From b8448ab6379ee2b516675e695ef1a6dd2336cc3b Mon Sep 17 00:00:00 2001 From: Ximin Luo Date: Sun, 4 Nov 2018 11:50:33 +0000 Subject: [PATCH] Import cargo_0.31.0.orig-vendor.tar.gz [dgit import orig cargo_0.31.0.orig-vendor.tar.gz] --- aho-corasick/.cargo-checksum.json | 1 + aho-corasick/COPYING | 3 + aho-corasick/Cargo.toml | 72 + aho-corasick/LICENSE-MIT | 21 + aho-corasick/README.md | 55 + aho-corasick/UNLICENSE | 24 + aho-corasick/benches/bench.rs | 373 + aho-corasick/benches/random.txt | 513 + aho-corasick/examples/dict-search.rs | 138 + aho-corasick/src/autiter.rs | 530 + aho-corasick/src/full.rs | 145 + aho-corasick/src/lib.rs | 1082 + aho-corasick/src/main.rs | 13 + ansi_term/.cargo-checksum.json | 1 + ansi_term/Cargo.toml | 27 + ansi_term/LICENCE | 21 + ansi_term/README.md | 174 + ansi_term/examples/colours.rs | 13 + ansi_term/src/ansi.rs | 258 + ansi_term/src/debug.rs | 122 + ansi_term/src/difference.rs | 179 + ansi_term/src/display.rs | 279 + ansi_term/src/lib.rs | 205 + ansi_term/src/style.rs | 259 + ansi_term/src/windows.rs | 40 + ansi_term/src/write.rs | 40 + arrayvec/.cargo-checksum.json | 1 + arrayvec/Cargo.toml | 57 + arrayvec/LICENSE-APACHE | 201 + arrayvec/LICENSE-MIT | 25 + arrayvec/README.rst | 184 + arrayvec/benches/arraystring.rs | 90 + arrayvec/benches/extend.rs | 43 + arrayvec/custom.css | 25 + arrayvec/src/array.rs | 93 + arrayvec/src/array_string.rs | 514 + arrayvec/src/char.rs | 54 + arrayvec/src/errors.rs | 53 + arrayvec/src/lib.rs | 1067 + arrayvec/src/range.rs | 42 + arrayvec/tests/serde.rs | 79 + arrayvec/tests/tests.rs | 468 + atty/.cargo-checksum.json | 1 + atty/CHANGELOG.md | 61 + atty/Cargo.toml | 33 + atty/LICENSE | 20 + atty/README.md | 76 + atty/appveyor.yml | 16 + atty/examples/atty.rs | 9 + atty/rustfmt.toml | 10 + atty/src/lib.rs | 210 + backtrace-sys/.cargo-checksum.json | 1 + backtrace-sys/Cargo.toml | 26 + backtrace-sys/LICENSE-APACHE | 201 + backtrace-sys/LICENSE-MIT | 25 + backtrace-sys/build.rs | 94 + backtrace-sys/src/lib.rs | 44 + backtrace-sys/src/libbacktrace/LICENSE | 29 + backtrace-sys/src/libbacktrace/Makefile.am | 206 + backtrace-sys/src/libbacktrace/Makefile.in | 1021 + .../libbacktrace/Mark.Twain-Tom.Sawyer.txt | 8465 + backtrace-sys/src/libbacktrace/README.md | 33 + backtrace-sys/src/libbacktrace/acinclude.m4 | 72 + backtrace-sys/src/libbacktrace/aclocal.m4 | 767 + backtrace-sys/src/libbacktrace/alloc.c | 156 + backtrace-sys/src/libbacktrace/atomic.c | 113 + .../src/libbacktrace/backtrace-supported.h.in | 66 + backtrace-sys/src/libbacktrace/backtrace.c | 129 + backtrace-sys/src/libbacktrace/backtrace.h | 182 + backtrace-sys/src/libbacktrace/btest.c | 500 + backtrace-sys/src/libbacktrace/config.guess | 1530 + backtrace-sys/src/libbacktrace/config.h.in | 149 + backtrace-sys/src/libbacktrace/config.sub | 1794 + .../src/libbacktrace/config/libtool.m4 | 7309 + .../src/libbacktrace/config/ltoptions.m4 | 368 + .../src/libbacktrace/config/ltsugar.m4 | 123 + .../src/libbacktrace/config/ltversion.m4 | 23 + .../src/libbacktrace/config/lt~obsolete.m4 | 92 + backtrace-sys/src/libbacktrace/configure | 14361 ++ backtrace-sys/src/libbacktrace/configure.ac | 512 + backtrace-sys/src/libbacktrace/dwarf.c | 3126 + backtrace-sys/src/libbacktrace/edtest.c | 121 + backtrace-sys/src/libbacktrace/edtest2.c | 43 + backtrace-sys/src/libbacktrace/elf.c | 3340 + backtrace-sys/src/libbacktrace/fileline.c | 201 + backtrace-sys/src/libbacktrace/filenames.h | 49 + backtrace-sys/src/libbacktrace/filetype.awk | 11 + backtrace-sys/src/libbacktrace/install-sh | 527 + backtrace-sys/src/libbacktrace/internal.h | 304 + backtrace-sys/src/libbacktrace/ltmain.sh | 7874 + backtrace-sys/src/libbacktrace/macho.c | 1418 + backtrace-sys/src/libbacktrace/missing | 331 + backtrace-sys/src/libbacktrace/mmap.c | 325 + backtrace-sys/src/libbacktrace/mmapio.c | 100 + backtrace-sys/src/libbacktrace/move-if-change | 83 + backtrace-sys/src/libbacktrace/nounwind.c | 66 + backtrace-sys/src/libbacktrace/pecoff.c | 943 + backtrace-sys/src/libbacktrace/posix.c | 100 + backtrace-sys/src/libbacktrace/print.c | 92 + backtrace-sys/src/libbacktrace/read.c | 96 + backtrace-sys/src/libbacktrace/simple.c | 108 + backtrace-sys/src/libbacktrace/sort.c | 108 + backtrace-sys/src/libbacktrace/state.c | 72 + backtrace-sys/src/libbacktrace/stest.c | 137 + backtrace-sys/src/libbacktrace/testlib.c | 234 + backtrace-sys/src/libbacktrace/testlib.h | 110 + backtrace-sys/src/libbacktrace/ttest.c | 161 + backtrace-sys/src/libbacktrace/unknown.c | 65 + backtrace-sys/src/libbacktrace/xcoff.c | 1642 + backtrace-sys/src/libbacktrace/ztest.c | 537 + backtrace-sys/symbol-map | 19 + backtrace/.cargo-checksum.json | 1 + backtrace/Cargo.toml | 86 + backtrace/LICENSE-APACHE | 201 + backtrace/LICENSE-MIT | 25 + backtrace/README.md | 91 + backtrace/appveyor.yml | 20 + backtrace/ci/android-ndk.sh | 23 + .../docker/aarch64-linux-android/Dockerfile | 18 + .../aarch64-unknown-linux-gnu/Dockerfile | 11 + .../docker/arm-linux-androideabi/Dockerfile | 18 + .../arm-unknown-linux-gnueabihf/Dockerfile | 10 + .../docker/armv7-linux-androideabi/Dockerfile | 18 + .../armv7-unknown-linux-gnueabihf/Dockerfile | 10 + .../docker/i586-unknown-linux-gnu/Dockerfile | 5 + .../ci/docker/i686-linux-android/Dockerfile | 18 + .../docker/i686-unknown-linux-gnu/Dockerfile | 5 + .../powerpc-unknown-linux-gnu/Dockerfile | 9 + .../powerpc64-unknown-linux-gnu/Dockerfile | 16 + .../ci/docker/x86_64-linux-android/Dockerfile | 18 + .../docker/x86_64-pc-windows-gnu/Dockerfile | 10 + .../x86_64-unknown-linux-gnu/Dockerfile | 5 + .../x86_64-unknown-linux-musl/Dockerfile | 6 + backtrace/ci/run-docker.sh | 32 + backtrace/ci/run.sh | 5 + backtrace/examples/backtrace.rs | 7 + backtrace/examples/raw.rs | 48 + backtrace/src/backtrace/dbghelp.rs | 103 + backtrace/src/backtrace/libunwind.rs | 200 + backtrace/src/backtrace/mod.rs | 113 + backtrace/src/backtrace/noop.rs | 16 + backtrace/src/backtrace/unix_backtrace.rs | 46 + backtrace/src/capture.rs | 237 + backtrace/src/dylib.rs | 70 + backtrace/src/lib.rs | 178 + backtrace/src/symbolize/coresymbolication.rs | 192 + backtrace/src/symbolize/dbghelp.rs | 118 + backtrace/src/symbolize/dladdr.rs | 59 + backtrace/src/symbolize/gimli.rs | 224 + backtrace/src/symbolize/libbacktrace.rs | 180 + backtrace/src/symbolize/mod.rs | 294 + backtrace/src/symbolize/noop.rs | 27 + backtrace/tests/long_fn_name.rs | 57 + backtrace/tests/smoke.rs | 173 + bitflags/.cargo-checksum.json | 1 + bitflags/CHANGELOG.md | 108 + bitflags/CODE_OF_CONDUCT.md | 73 + bitflags/Cargo.toml | 33 + bitflags/LICENSE-APACHE | 201 + bitflags/LICENSE-MIT | 25 + bitflags/README.md | 34 + bitflags/src/example_generated.rs | 14 + bitflags/src/lib.rs | 1229 + bufstream/.cargo-checksum.json | 1 + bufstream/Cargo.toml | 32 + bufstream/LICENSE-APACHE | 201 + bufstream/LICENSE-MIT | 25 + bufstream/README.md | 22 + bufstream/src/lib.rs | 262 + cc/.cargo-checksum.json | 1 + cc/Cargo.toml | 37 + cc/LICENSE-APACHE | 201 + cc/LICENSE-MIT | 25 + cc/README.md | 202 + cc/appveyor.yml | 55 + cc/src/bin/gcc-shim.rs | 23 + cc/src/com.rs | 155 + cc/src/lib.rs | 2275 + cc/src/registry.rs | 204 + cc/src/setup_config.rs | 283 + cc/src/winapi.rs | 218 + cc/src/windows_registry.rs | 696 + cc/tests/cc_env.rs | 119 + cc/tests/support/mod.rs | 131 + cc/tests/test.rs | 361 + cfg-if/.cargo-checksum.json | 1 + cfg-if/Cargo.toml | 24 + cfg-if/LICENSE-APACHE | 201 + cfg-if/LICENSE-MIT | 25 + cfg-if/README.md | 52 + cfg-if/src/lib.rs | 142 + cfg-if/tests/xcrate.rs | 17 + clap/.cargo-checksum.json | 1 + clap/.pc/.quilt_patches | 1 + clap/.pc/.quilt_series | 1 + clap/.pc/.version | 1 + clap/.pc/applied-patches | 1 + clap/.pc/no-clippy.patch/.timestamp | 0 clap/.pc/no-clippy.patch/Cargo.toml | 140 + clap/CHANGELOG.md | 2814 + clap/CONTRIBUTORS.md | 91 + clap/Cargo.toml | 135 + clap/LICENSE-MIT | 21 + clap/README.md | 575 + clap/SPONSORS.md | 10 + clap/clap-test.rs | 86 + clap/debian/patches/no-clippy.patch | 21 + clap/debian/patches/no-clippy.patch~ | 21 + clap/debian/patches/series | 1 + clap/index.html | 1 + clap/justfile | 39 + clap/rustfmt.toml | 4 + clap/src/app/help.rs | 1028 + clap/src/app/meta.rs | 33 + clap/src/app/mod.rs | 1842 + clap/src/app/parser.rs | 2150 + clap/src/app/settings.rs | 1134 + clap/src/app/usage.rs | 479 + clap/src/app/validator.rs | 573 + clap/src/args/any_arg.rs | 74 + clap/src/args/arg.rs | 3944 + clap/src/args/arg_builder/base.rs | 38 + clap/src/args/arg_builder/flag.rs | 159 + clap/src/args/arg_builder/mod.rs | 13 + clap/src/args/arg_builder/option.rs | 244 + clap/src/args/arg_builder/positional.rs | 229 + clap/src/args/arg_builder/switched.rs | 38 + clap/src/args/arg_builder/valued.rs | 67 + clap/src/args/arg_matcher.rs | 218 + clap/src/args/arg_matches.rs | 966 + clap/src/args/group.rs | 635 + clap/src/args/macros.rs | 109 + clap/src/args/matched_arg.rs | 24 + clap/src/args/mod.rs | 21 + clap/src/args/settings.rs | 230 + clap/src/args/subcommand.rs | 66 + clap/src/completions/bash.rs | 219 + clap/src/completions/elvish.rs | 126 + clap/src/completions/fish.rs | 99 + clap/src/completions/macros.rs | 28 + clap/src/completions/mod.rs | 179 + clap/src/completions/powershell.rs | 139 + clap/src/completions/shell.rs | 52 + clap/src/completions/zsh.rs | 472 + clap/src/errors.rs | 911 + clap/src/fmt.rs | 189 + clap/src/lib.rs | 625 + clap/src/macros.rs | 1103 + clap/src/map.rs | 74 + clap/src/osstringext.rs | 119 + clap/src/strext.rs | 16 + clap/src/suggestions.rs | 135 + clap/src/usage_parser.rs | 1347 + cloudabi/.cargo-checksum.json | 1 + cloudabi/Cargo.toml | 31 + cloudabi/bitflags.rs | 51 + cloudabi/cloudabi.rs | 2847 + commoncrypto-sys/.cargo-checksum.json | 1 + commoncrypto-sys/.pc/.quilt_patches | 1 + commoncrypto-sys/.pc/.quilt_series | 1 + commoncrypto-sys/.pc/.version | 1 + commoncrypto-sys/.pc/applied-patches | 1 + .../.pc/no-clippy.patch/.timestamp | 0 .../.pc/no-clippy.patch/Cargo.toml | 20 + commoncrypto-sys/Cargo.toml | 15 + .../debian/patches/no-clippy.patch | 18 + commoncrypto-sys/debian/patches/series | 1 + commoncrypto-sys/src/lib.rs | 237 + commoncrypto-sys/tests/hash.rs | 138 + commoncrypto-sys/tests/pbkdf2.rs | 48 + commoncrypto/.cargo-checksum.json | 1 + commoncrypto/.pc/.quilt_patches | 1 + commoncrypto/.pc/.quilt_series | 1 + commoncrypto/.pc/.version | 1 + commoncrypto/.pc/applied-patches | 1 + commoncrypto/.pc/no-clippy.patch/.timestamp | 0 commoncrypto/.pc/no-clippy.patch/Cargo.toml | 20 + commoncrypto/Cargo.toml | 15 + commoncrypto/debian/patches/no-clippy.patch | 18 + commoncrypto/debian/patches/series | 1 + commoncrypto/src/hash.rs | 127 + commoncrypto/src/lib.rs | 30 + commoncrypto/src/pbkdf2.rs | 66 + commoncrypto/tests/hash.rs | 18 + commoncrypto/tests/pbkdf2.rs | 16 + core-foundation-sys/.cargo-checksum.json | 1 + core-foundation-sys/Cargo.toml | 27 + core-foundation-sys/LICENSE-APACHE | 201 + core-foundation-sys/LICENSE-MIT | 25 + core-foundation-sys/build.rs | 14 + core-foundation-sys/src/array.rs | 55 + core-foundation-sys/src/attributed_string.rs | 56 + core-foundation-sys/src/base.rs | 154 + core-foundation-sys/src/bundle.rs | 36 + core-foundation-sys/src/data.rs | 31 + core-foundation-sys/src/date.rs | 34 + core-foundation-sys/src/dictionary.rs | 91 + core-foundation-sys/src/error.rs | 32 + core-foundation-sys/src/filedescriptor.rs | 58 + core-foundation-sys/src/lib.rs | 30 + core-foundation-sys/src/messageport.rs | 79 + core-foundation-sys/src/number.rs | 60 + core-foundation-sys/src/propertylist.rs | 46 + core-foundation-sys/src/runloop.rs | 164 + core-foundation-sys/src/set.rs | 58 + core-foundation-sys/src/string.rs | 319 + core-foundation-sys/src/timezone.rs | 27 + core-foundation-sys/src/url.rs | 164 + core-foundation-sys/src/uuid.rs | 49 + core-foundation/.cargo-checksum.json | 1 + core-foundation/Cargo.toml | 39 + core-foundation/LICENSE-APACHE | 201 + core-foundation/LICENSE-MIT | 25 + core-foundation/src/array.rs | 269 + core-foundation/src/attributed_string.rs | 79 + core-foundation/src/base.rs | 443 + core-foundation/src/boolean.rs | 70 + core-foundation/src/bundle.rs | 123 + core-foundation/src/data.rs | 63 + core-foundation/src/date.rs | 130 + core-foundation/src/dictionary.rs | 316 + core-foundation/src/error.rs | 71 + core-foundation/src/filedescriptor.rs | 210 + core-foundation/src/lib.rs | 178 + core-foundation/src/number.rs | 120 + core-foundation/src/propertylist.rs | 325 + core-foundation/src/runloop.rs | 199 + core-foundation/src/set.rs | 45 + core-foundation/src/string.rs | 150 + core-foundation/src/timezone.rs | 95 + core-foundation/src/url.rs | 156 + core-foundation/src/uuid.rs | 112 + .../tests/use_macro_outside_crate.rs | 28 + crossbeam-channel/.cargo-checksum.json | 1 + crossbeam-channel/CHANGELOG.md | 125 + crossbeam-channel/Cargo.toml | 40 + crossbeam-channel/LICENSE-APACHE | 201 + crossbeam-channel/LICENSE-MIT | 25 + crossbeam-channel/README.md | 36 + crossbeam-channel/examples/csp.rs | 60 + crossbeam-channel/examples/fibonacci.rs | 37 + crossbeam-channel/examples/mpsc.rs | 273 + crossbeam-channel/src/flavors/after.rs | 250 + crossbeam-channel/src/flavors/array.rs | 568 + crossbeam-channel/src/flavors/list.rs | 544 + crossbeam-channel/src/flavors/mod.rs | 15 + crossbeam-channel/src/flavors/tick.rs | 190 + crossbeam-channel/src/flavors/zero.rs | 465 + crossbeam-channel/src/internal/channel.rs | 931 + crossbeam-channel/src/internal/context.rs | 187 + crossbeam-channel/src/internal/mod.rs | 14 + crossbeam-channel/src/internal/select.rs | 1911 + crossbeam-channel/src/internal/utils.rs | 114 + crossbeam-channel/src/internal/waker.rs | 251 + crossbeam-channel/src/lib.rs | 380 + crossbeam-channel/tests/after.rs | 299 + crossbeam-channel/tests/array.rs | 580 + crossbeam-channel/tests/cmp.rs | 48 + crossbeam-channel/tests/golang.rs | 906 + crossbeam-channel/tests/iter.rs | 80 + crossbeam-channel/tests/list.rs | 430 + crossbeam-channel/tests/mpsc.rs | 1979 + crossbeam-channel/tests/parse.rs | 405 + crossbeam-channel/tests/select.rs | 893 + crossbeam-channel/tests/select_struct.rs | 861 + crossbeam-channel/tests/thread_locals.rs | 53 + crossbeam-channel/tests/tick.rs | 276 + crossbeam-channel/tests/wrappers/cloned.rs | 61 + crossbeam-channel/tests/wrappers/mod.rs | 16 + crossbeam-channel/tests/wrappers/normal.rs | 60 + crossbeam-channel/tests/wrappers/select.rs | 84 + .../tests/wrappers/select_multi.rs | 87 + .../tests/wrappers/select_spin.rs | 105 + crossbeam-channel/tests/zero.rs | 495 + crossbeam-epoch/.cargo-checksum.json | 1 + crossbeam-epoch/CHANGELOG.md | 100 + crossbeam-epoch/Cargo.toml | 53 + crossbeam-epoch/LICENSE-APACHE | 201 + crossbeam-epoch/LICENSE-MIT | 25 + crossbeam-epoch/README.md | 35 + crossbeam-epoch/benches/defer.rs | 73 + crossbeam-epoch/benches/flush.rs | 51 + crossbeam-epoch/benches/pin.rs | 31 + crossbeam-epoch/examples/sanitize.rs | 70 + crossbeam-epoch/src/atomic.rs | 1124 + crossbeam-epoch/src/collector.rs | 428 + crossbeam-epoch/src/default.rs | 73 + crossbeam-epoch/src/deferred.rs | 134 + crossbeam-epoch/src/epoch.rs | 106 + crossbeam-epoch/src/guard.rs | 547 + crossbeam-epoch/src/internal.rs | 543 + crossbeam-epoch/src/lib.rs | 99 + crossbeam-epoch/src/sync/list.rs | 473 + crossbeam-epoch/src/sync/mod.rs | 4 + crossbeam-epoch/src/sync/queue.rs | 428 + crossbeam-utils/.cargo-checksum.json | 1 + crossbeam-utils/CHANGELOG.md | 82 + crossbeam-utils/Cargo.toml | 31 + crossbeam-utils/LICENSE-APACHE | 201 + crossbeam-utils/LICENSE-MIT | 25 + crossbeam-utils/README.md | 31 + crossbeam-utils/src/cache_padded.rs | 172 + crossbeam-utils/src/consume.rs | 82 + crossbeam-utils/src/lib.rs | 14 + crossbeam-utils/src/thread.rs | 382 + crypto-hash/.cargo-checksum.json | 1 + crypto-hash/CONTRIBUTING.md | 77 + crypto-hash/Cargo.toml | 37 + crypto-hash/LICENSE | 19 + crypto-hash/Makefile | 29 + crypto-hash/NEWS.md | 62 + crypto-hash/README.md | 54 + crypto-hash/src/imp/commoncrypto.rs | 81 + crypto-hash/src/imp/cryptoapi.rs | 159 + crypto-hash/src/imp/openssl.rs | 85 + crypto-hash/src/lib.rs | 119 + crypto-hash/src/test.rs | 83 + curl-sys/.cargo-checksum.json | 1 + curl-sys/Cargo.toml | 61 + curl-sys/LICENSE | 19 + curl-sys/build.rs | 373 + curl-sys/lib.rs | 1062 + curl/.cargo-checksum.json | 1 + curl/.pc/.quilt_patches | 1 + curl/.pc/.quilt_series | 1 + curl/.pc/.version | 1 + curl/.pc/applied-patches | 1 + curl/.pc/winapi3.patch/.timestamp | 0 curl/.pc/winapi3.patch/Cargo.toml | 60 + curl/.pc/winapi3.patch/src/easy/windows.rs | 136 + curl/.pc/winapi3.patch/src/lib.rs | 129 + curl/.pc/winapi3.patch/src/multi.rs | 1069 + curl/Cargo.toml | 59 + curl/LICENSE | 19 + curl/README.md | 138 + curl/appveyor.yml | 40 + curl/ci/Dockerfile-linux32 | 14 + curl/ci/Dockerfile-linux64 | 7 + curl/ci/Dockerfile-linux64-curl | 6 + curl/ci/Dockerfile-mingw | 6 + curl/ci/Dockerfile-musl | 18 + curl/ci/run.sh | 11 + curl/debian/patches/series | 1 + curl/debian/patches/winapi3.patch | 62 + curl/debian/patches/winapi3.patch~ | 62 + curl/src/easy/form.rs | 333 + curl/src/easy/handle.rs | 1471 + curl/src/easy/handler.rs | 3241 + curl/src/easy/list.rs | 99 + curl/src/easy/mod.rs | 22 + curl/src/easy/windows.rs | 136 + curl/src/error.rs | 600 + curl/src/lib.rs | 127 + curl/src/multi.rs | 1069 + curl/src/panic.rs | 34 + curl/src/version.rs | 326 + curl/tests/atexit.rs | 20 + curl/tests/easy.rs | 693 + curl/tests/formdata | 1 + curl/tests/multi.rs | 253 + curl/tests/post.rs | 108 + curl/tests/server/mod.rs | 175 + env_logger/.cargo-checksum.json | 1 + env_logger/Cargo.toml | 50 + env_logger/LICENSE-APACHE | 201 + env_logger/LICENSE-MIT | 25 + env_logger/README.md | 140 + env_logger/examples/custom_default_format.rs | 44 + env_logger/examples/custom_format.rs | 52 + env_logger/examples/custom_logger.rs | 60 + env_logger/examples/default.rs | 36 + env_logger/examples/direct_logger.rs | 40 + env_logger/src/filter/mod.rs | 568 + env_logger/src/filter/regex.rs | 29 + env_logger/src/filter/string.rs | 22 + env_logger/src/fmt.rs | 844 + env_logger/src/lib.rs | 1125 + env_logger/tests/log-in-log.rs | 38 + env_logger/tests/regexp_filter.rs | 51 + failure/.cargo-checksum.json | 1 + failure/CODE_OF_CONDUCT.md | 46 + failure/Cargo.toml | 33 + failure/LICENSE-APACHE | 201 + failure/LICENSE-MIT | 23 + failure/Makefile | 15 + failure/README.md | 119 + failure/RELEASES.md | 43 + failure/book/src/SUMMARY.md | 14 + failure/book/src/bail-and-ensure.md | 18 + failure/book/src/custom-fail.md | 75 + failure/book/src/derive-fail.md | 177 + failure/book/src/error-errorkind.md | 143 + failure/book/src/error-msg.md | 59 + failure/book/src/error.md | 100 + failure/book/src/fail.md | 152 + failure/book/src/guidance.md | 24 + failure/book/src/howto.md | 8 + failure/book/src/intro.md | 77 + failure/book/src/string-custom-error.md | 160 + failure/book/src/use-error.md | 66 + failure/build-docs.sh | 8 + failure/examples/bail_ensure.rs | 26 + failure/examples/error_as_cause.rs | 18 + failure/examples/simple.rs | 22 + .../examples/string_custom_error_pattern.rs | 76 + failure/src/as_fail.rs | 37 + failure/src/backtrace/internal.rs | 130 + failure/src/backtrace/mod.rs | 144 + failure/src/box_std.rs | 19 + failure/src/compat.rs | 47 + failure/src/context.rs | 176 + failure/src/error/error_impl.rs | 50 + failure/src/error/error_impl_small.rs | 132 + failure/src/error/mod.rs | 243 + failure/src/error_message.rs | 28 + failure/src/lib.rs | 306 + failure/src/macros.rs | 51 + failure/src/result_ext.rs | 203 + failure/src/small_error.rs | 264 + failure/src/sync_failure.rs | 97 + failure/travis.sh | 39 + failure_derive/.cargo-checksum.json | 1 + failure_derive/Cargo.toml | 41 + failure_derive/build.rs | 39 + failure_derive/src/lib.rs | 200 + failure_derive/tests/backtrace.rs | 64 + failure_derive/tests/custom_type_bounds.rs | 45 + failure_derive/tests/no_derive_display.rs | 21 + failure_derive/tests/tests.rs | 55 + failure_derive/tests/wraps.rs | 99 + filetime/.cargo-checksum.json | 1 + filetime/Cargo.toml | 31 + filetime/LICENSE-APACHE | 201 + filetime/LICENSE-MIT | 25 + filetime/README.md | 34 + filetime/appveyor.yml | 17 + filetime/src/lib.rs | 429 + filetime/src/redox.rs | 57 + filetime/src/unix/linux.rs | 62 + filetime/src/unix/mod.rs | 125 + filetime/src/unix/utimensat.rs | 13 + filetime/src/unix/utimes.rs | 13 + filetime/src/windows.rs | 87 + flate2/.cargo-checksum.json | 1 + flate2/.pc/.quilt_patches | 1 + flate2/.pc/.quilt_series | 1 + flate2/.pc/.version | 1 + flate2/.pc/applied-patches | 2 + flate2/.pc/disable-miniz.patch/.timestamp | 0 flate2/.pc/disable-miniz.patch/Cargo.toml | 70 + .../.pc/drop-deps-for-cargo.patch/.timestamp | 0 .../.pc/drop-deps-for-cargo.patch/Cargo.toml | 67 + flate2/Cargo.toml | 59 + flate2/LICENSE-APACHE | 201 + flate2/LICENSE-MIT | 25 + flate2/README.md | 90 + flate2/appveyor.yml | 24 + flate2/debian/patches/disable-miniz.patch | 25 + .../debian/patches/drop-deps-for-cargo.patch | 30 + .../debian/patches/drop-deps-for-cargo.patch~ | 28 + flate2/debian/patches/series | 2 + flate2/examples/deflatedecoder-bufread.rs | 24 + flate2/examples/deflatedecoder-read.rs | 24 + flate2/examples/deflatedecoder-write.rs | 26 + flate2/examples/deflateencoder-bufread.rs | 24 + flate2/examples/deflateencoder-read.rs | 20 + flate2/examples/deflateencoder-write.rs | 12 + flate2/examples/gzbuilder.rs | 24 + flate2/examples/gzdecoder-bufread.rs | 24 + flate2/examples/gzdecoder-read.rs | 24 + flate2/examples/gzdecoder-write.rs | 26 + flate2/examples/gzencoder-bufread.rs | 24 + flate2/examples/gzencoder-read.rs | 20 + flate2/examples/gzencoder-write.rs | 12 + flate2/examples/gzmultidecoder-bufread.rs | 24 + flate2/examples/gzmultidecoder-read.rs | 24 + flate2/examples/hello_world.txt | 1 + flate2/examples/zlibdecoder-bufread.rs | 24 + flate2/examples/zlibdecoder-read.rs | 24 + flate2/examples/zlibdecoder-write.rs | 26 + flate2/examples/zlibencoder-bufread.rs | 24 + flate2/examples/zlibencoder-read.rs | 21 + flate2/examples/zlibencoder-write.rs | 12 + flate2/src/bufreader.rs | 104 + flate2/src/crc.rs | 182 + flate2/src/deflate/bufread.rs | 268 + flate2/src/deflate/mod.rs | 193 + flate2/src/deflate/read.rs | 266 + flate2/src/deflate/write.rs | 349 + flate2/src/ffi.rs | 278 + flate2/src/gz/bufread.rs | 556 + flate2/src/gz/mod.rs | 359 + flate2/src/gz/read.rs | 278 + flate2/src/gz/write.rs | 480 + flate2/src/lib.rs | 234 + flate2/src/mem.rs | 737 + flate2/src/zio.rs | 290 + flate2/src/zlib/bufread.rs | 258 + flate2/src/zlib/mod.rs | 159 + flate2/src/zlib/read.rs | 265 + flate2/src/zlib/write.rs | 348 + flate2/tests/corrupt-file.gz | Bin 0 -> 7128 bytes flate2/tests/early-flush.rs | 20 + flate2/tests/empty-read.rs | 82 + flate2/tests/good-file.gz | Bin 0 -> 6766 bytes flate2/tests/good-file.txt | 733 + flate2/tests/gunzip.rs | 77 + flate2/tests/multi.gz | Bin 0 -> 53 bytes flate2/tests/multi.txt | 2 + flate2/tests/tokio.rs | 130 + flate2/tests/zero-write.rs | 8 + fnv/.cargo-checksum.json | 1 + fnv/Cargo.toml | 25 + fnv/LICENSE-APACHE | 201 + fnv/LICENSE-MIT | 25 + fnv/README.md | 81 + fnv/lib.rs | 349 + foreign-types-shared/.cargo-checksum.json | 1 + foreign-types-shared/Cargo.toml | 21 + foreign-types-shared/LICENSE-APACHE | 202 + foreign-types-shared/LICENSE-MIT | 19 + foreign-types-shared/src/lib.rs | 51 + foreign-types/.cargo-checksum.json | 1 + foreign-types/Cargo.toml | 22 + foreign-types/LICENSE-APACHE | 202 + foreign-types/LICENSE-MIT | 19 + foreign-types/README.md | 23 + foreign-types/src/lib.rs | 306 + fs2/.cargo-checksum.json | 1 + fs2/Cargo.toml | 33 + fs2/LICENSE-APACHE | 201 + fs2/LICENSE-MIT | 25 + fs2/README.md | 50 + fs2/src/lib.rs | 458 + fs2/src/unix.rs | 250 + fs2/src/windows.rs | 279 + fwdansi/.cargo-checksum.json | 1 + fwdansi/Cargo.toml | 36 + fwdansi/appveyor.yml | 20 + fwdansi/examples/run-rustc.rs | 17 + fwdansi/src/lib.rs | 235 + fwdansi/tests/tests.proptest-regressions | 11 + fwdansi/tests/tests.rs | 99 + git2-curl/.cargo-checksum.json | 1 + git2-curl/Cargo.toml | 48 + git2-curl/LICENSE-APACHE | 201 + git2-curl/LICENSE-MIT | 25 + git2-curl/src/lib.rs | 280 + git2-curl/tests/all.rs | 68 + git2/.cargo-checksum.json | 1 + git2/Cargo.toml | 75 + git2/LICENSE-APACHE | 201 + git2/LICENSE-MIT | 25 + git2/README.md | 71 + git2/appveyor.yml | 19 + git2/examples/add.rs | 85 + git2/examples/blame.rs | 106 + git2/examples/cat-file.rs | 142 + git2/examples/clone.rs | 124 + git2/examples/diff.rs | 284 + git2/examples/fetch.rs | 128 + git2/examples/init.rs | 152 + git2/examples/log.rs | 263 + git2/examples/ls-remote.rs | 63 + git2/examples/rev-list.rs | 97 + git2/examples/rev-parse.rs | 70 + git2/examples/status.rs | 369 + git2/examples/tag.rs | 134 + git2/src/blame.rs | 315 + git2/src/blob.rs | 186 + git2/src/branch.rs | 162 + git2/src/buf.rs | 73 + git2/src/build.rs | 638 + git2/src/call.rs | 217 + git2/src/cert.rs | 97 + git2/src/commit.rs | 359 + git2/src/config.rs | 628 + git2/src/cred.rs | 577 + git2/src/describe.rs | 199 + git2/src/diff.rs | 1258 + git2/src/error.rs | 284 + git2/src/index.rs | 634 + git2/src/lib.rs | 1337 + git2/src/merge.rs | 160 + git2/src/message.rs | 52 + git2/src/note.rs | 130 + git2/src/object.rs | 234 + git2/src/odb.rs | 419 + git2/src/oid.rs | 214 + git2/src/oid_array.rs | 50 + git2/src/packbuilder.rs | 386 + git2/src/panic.rs | 55 + git2/src/patch.rs | 202 + git2/src/pathspec.rs | 301 + git2/src/proxy_options.rs | 56 + git2/src/reference.rs | 401 + git2/src/reflog.rs | 172 + git2/src/refspec.rs | 89 + git2/src/remote.rs | 753 + git2/src/remote_callbacks.rs | 391 + git2/src/repo.rs | 2562 + git2/src/revspec.rs | 26 + git2/src/revwalk.rs | 203 + git2/src/signature.rs | 175 + git2/src/stash.rs | 210 + git2/src/status.rs | 418 + git2/src/string_array.rs | 117 + git2/src/submodule.rs | 344 + git2/src/tag.rs | 191 + git2/src/test.rs | 61 + git2/src/time.rs | 100 + git2/src/transport.rs | 326 + git2/src/tree.rs | 406 + git2/src/treebuilder.rs | 199 + git2/src/util.rs | 152 + glob/.cargo-checksum.json | 1 + glob/Cargo.toml | 15 + glob/LICENSE-APACHE | 201 + glob/LICENSE-MIT | 25 + glob/README.md | 24 + glob/src/lib.rs | 1312 + glob/tests/glob-std.rs | 278 + globset/.cargo-checksum.json | 1 + globset/COPYING | 3 + globset/Cargo.toml | 46 + globset/LICENSE-MIT | 21 + globset/README.md | 122 + globset/UNLICENSE | 24 + globset/benches/bench.rs | 121 + globset/src/glob.rs | 1452 + globset/src/lib.rs | 866 + globset/src/pathutil.rs | 172 + hex/.cargo-checksum.json | 1 + hex/Cargo.toml | 23 + hex/Dockerfile | 1 + hex/LICENSE-APACHE | 202 + hex/LICENSE-MIT | 20 + hex/README.md | 17 + hex/src/lib.rs | 406 + home/.cargo-checksum.json | 1 + home/Cargo.toml | 26 + home/README.md | 24 + home/src/lib.rs | 297 + humantime/.cargo-checksum.json | 1 + humantime/Cargo.toml | 37 + humantime/LICENSE-APACHE | 202 + humantime/LICENSE-MIT | 26 + humantime/README.md | 67 + humantime/benches/datetime_format.rs | 58 + humantime/benches/datetime_parse.rs | 50 + humantime/bulk.yaml | 8 + humantime/src/date.rs | 530 + humantime/src/duration.rs | 411 + humantime/src/lib.rs | 30 + humantime/src/wrapper.rs | 107 + humantime/vagga.yaml | 92 + idna/.cargo-checksum.json | 1 + idna/Cargo.toml | 43 + idna/LICENSE-APACHE | 201 + idna/LICENSE-MIT | 25 + idna/src/IdnaMappingTable.txt | 8405 + idna/src/lib.rs | 73 + idna/src/make_uts46_mapping_table.py | 192 + idna/src/punycode.rs | 213 + idna/src/uts46.rs | 433 + idna/src/uts46_mapping_table.rs | 16005 ++ idna/tests/IdnaTest.txt | 7848 + idna/tests/punycode.rs | 65 + idna/tests/punycode_tests.json | 120 + idna/tests/tests.rs | 21 + idna/tests/unit.rs | 40 + idna/tests/uts46.rs | 124 + ignore/.cargo-checksum.json | 1 + ignore/COPYING | 3 + ignore/Cargo.toml | 60 + ignore/LICENSE-MIT | 21 + ignore/README.md | 66 + ignore/UNLICENSE | 24 + ignore/examples/walk.rs | 84 + ignore/src/dir.rs | 940 + ignore/src/gitignore.rs | 764 + ignore/src/lib.rs | 444 + ignore/src/overrides.rs | 259 + ignore/src/pathutil.rs | 108 + ignore/src/types.rs | 786 + ignore/src/walk.rs | 2050 + ...atched_path_or_any_parents_tests.gitignore | 216 + ...gnore_matched_path_or_any_parents_tests.rs | 323 + itoa/.cargo-checksum.json | 1 + itoa/Cargo.toml | 30 + itoa/LICENSE-APACHE | 201 + itoa/LICENSE-MIT | 23 + itoa/README.md | 82 + itoa/benches/bench.rs | 84 + itoa/src/lib.rs | 299 + itoa/src/udiv128.rs | 63 + itoa/tests/test.rs | 51 + jobserver/.cargo-checksum.json | 1 + jobserver/.pc/.quilt_patches | 1 + jobserver/.pc/.quilt_series | 1 + jobserver/.pc/.version | 1 + jobserver/.pc/applied-patches | 1 + .../.pc/relax-dep-version.patch/.timestamp | 0 .../.pc/relax-dep-version.patch/Cargo.toml | 64 + jobserver/Cargo.toml | 64 + jobserver/LICENSE-APACHE | 201 + jobserver/LICENSE-MIT | 25 + jobserver/README.md | 41 + .../debian/patches/relax-dep-version.patch | 8 + .../debian/patches/relax-dep-version.patch~ | 8 + jobserver/debian/patches/series | 1 + jobserver/src/lib.rs | 942 + jobserver/tests/client-of-myself.rs | 60 + jobserver/tests/client.rs | 191 + jobserver/tests/helper.rs | 44 + jobserver/tests/make-as-a-client.rs | 77 + jobserver/tests/server.rs | 145 + lazy_static/.cargo-checksum.json | 1 + lazy_static/Cargo.toml | 47 + lazy_static/LICENSE-APACHE | 201 + lazy_static/LICENSE-MIT | 25 + lazy_static/README.md | 73 + lazy_static/appveyor.yml | 61 + lazy_static/build.rs | 44 + lazy_static/src/core_lazy.rs | 34 + lazy_static/src/heap_lazy.rs | 43 + lazy_static/src/inline_lazy.rs | 59 + lazy_static/src/lib.rs | 223 + lazy_static/tests/no_std.rs | 21 + lazy_static/tests/test.rs | 162 + lazycell/.cargo-checksum.json | 1 + lazycell/.pc/.quilt_patches | 1 + lazycell/.pc/.quilt_series | 1 + lazycell/.pc/.version | 1 + lazycell/.pc/applied-patches | 1 + lazycell/.pc/no-clippy.patch/.timestamp | 0 lazycell/.pc/no-clippy.patch/Cargo.toml | 30 + lazycell/CHANGELOG.md | 168 + lazycell/Cargo.toml | 23 + lazycell/LICENSE-APACHE | 201 + lazycell/LICENSE-MIT | 26 + lazycell/README.md | 72 + lazycell/debian/patches/no-clippy.patch | 11 + lazycell/debian/patches/no-clippy.patch~ | 11 + lazycell/debian/patches/series | 1 + lazycell/src/lib.rs | 583 + libc/.cargo-checksum.json | 1 + libc/Cargo.toml | 33 + libc/LICENSE-APACHE | 201 + libc/LICENSE-MIT | 25 + libc/README.md | 174 + libc/appveyor.yml | 28 + libc/ci/README.md | 243 + libc/ci/android-install-ndk.sh | 37 + libc/ci/android-install-sdk.sh | 60 + libc/ci/android-sysimage.sh | 52 + .../docker/aarch64-linux-android/Dockerfile | 45 + .../aarch64-unknown-linux-gnu/Dockerfile | 7 + .../aarch64-unknown-linux-musl/Dockerfile | 27 + .../docker/arm-linux-androideabi/Dockerfile | 45 + .../arm-unknown-linux-gnueabihf/Dockerfile | 7 + .../arm-unknown-linux-musleabihf/Dockerfile | 25 + .../asmjs-unknown-emscripten/Dockerfile | 20 + libc/ci/docker/i686-linux-android/Dockerfile | 45 + .../docker/i686-unknown-linux-gnu/Dockerfile | 5 + .../docker/i686-unknown-linux-musl/Dockerfile | 31 + .../docker/mips-unknown-linux-gnu/Dockerfile | 10 + .../docker/mips-unknown-linux-musl/Dockerfile | 17 + .../mips64-unknown-linux-gnuabi64/Dockerfile | 11 + .../Dockerfile | 11 + .../mipsel-unknown-linux-musl/Dockerfile | 17 + .../powerpc-unknown-linux-gnu/Dockerfile | 10 + .../powerpc64-unknown-linux-gnu/Dockerfile | 11 + .../powerpc64le-unknown-linux-gnu/Dockerfile | 11 + .../docker/s390x-unknown-linux-gnu/Dockerfile | 18 + .../sparc64-unknown-linux-gnu/Dockerfile | 21 + .../wasm32-unknown-emscripten/Dockerfile | 21 + .../wasm32-unknown-emscripten/node-wrapper.sh | 15 + .../ci/docker/x86_64-linux-android/Dockerfile | 26 + .../docker/x86_64-rumprun-netbsd/Dockerfile | 10 + .../docker/x86_64-rumprun-netbsd/runtest.rs | 54 + .../docker/x86_64-unknown-freebsd/Dockerfile | 13 + .../x86_64-unknown-linux-gnu/Dockerfile | 5 + .../x86_64-unknown-linux-gnux32/Dockerfile | 5 + .../x86_64-unknown-linux-musl/Dockerfile | 20 + libc/ci/dox.sh | 33 + libc/ci/emscripten-entry.sh | 19 + libc/ci/emscripten.sh | 54 + .../ci/ios/deploy_and_run_on_ios_simulator.rs | 172 + libc/ci/landing-page-footer.html | 3 + libc/ci/landing-page-head.html | 7 + libc/ci/linux-s390x.sh | 18 + libc/ci/linux-sparc64.sh | 17 + libc/ci/run-docker.sh | 36 + libc/ci/run-qemu.sh | 32 + libc/ci/run.sh | 98 + libc/ci/runtest-android.rs | 41 + libc/ci/style.rs | 206 + libc/ci/test-runner-linux | 23 + libc/src/cloudabi/aarch64.rs | 4 + libc/src/cloudabi/arm.rs | 4 + libc/src/cloudabi/mod.rs | 166 + libc/src/cloudabi/x86.rs | 4 + libc/src/cloudabi/x86_64.rs | 4 + libc/src/dox.rs | 209 + libc/src/fuchsia/aarch64.rs | 336 + libc/src/fuchsia/mod.rs | 3999 + libc/src/fuchsia/powerpc64.rs | 79 + libc/src/fuchsia/x86_64.rs | 447 + libc/src/lib.rs | 305 + libc/src/macros.rs | 88 + libc/src/redox/mod.rs | 138 + libc/src/redox/net.rs | 124 + libc/src/unix/bsd/apple/b32.rs | 60 + libc/src/unix/bsd/apple/b64.rs | 65 + libc/src/unix/bsd/apple/mod.rs | 2560 + .../src/unix/bsd/freebsdlike/dragonfly/mod.rs | 787 + .../unix/bsd/freebsdlike/freebsd/aarch64.rs | 32 + libc/src/unix/bsd/freebsdlike/freebsd/mod.rs | 1040 + libc/src/unix/bsd/freebsdlike/freebsd/x86.rs | 31 + .../unix/bsd/freebsdlike/freebsd/x86_64.rs | 32 + libc/src/unix/bsd/freebsdlike/mod.rs | 1217 + libc/src/unix/bsd/mod.rs | 557 + libc/src/unix/bsd/netbsdlike/mod.rs | 667 + libc/src/unix/bsd/netbsdlike/netbsd/mod.rs | 1114 + .../bsd/netbsdlike/netbsd/other/b32/mod.rs | 2 + .../bsd/netbsdlike/netbsd/other/b64/mod.rs | 2 + .../unix/bsd/netbsdlike/netbsd/other/mod.rs | 14 + .../bsd/netbsdlike/openbsdlike/bitrig/mod.rs | 111 + .../bsd/netbsdlike/openbsdlike/bitrig/x86.rs | 2 + .../netbsdlike/openbsdlike/bitrig/x86_64.rs | 2 + .../unix/bsd/netbsdlike/openbsdlike/mod.rs | 749 + .../netbsdlike/openbsdlike/openbsd/aarch64.rs | 3 + .../bsd/netbsdlike/openbsdlike/openbsd/mod.rs | 279 + .../bsd/netbsdlike/openbsdlike/openbsd/x86.rs | 3 + .../netbsdlike/openbsdlike/openbsd/x86_64.rs | 3 + libc/src/unix/haiku/b32.rs | 3 + libc/src/unix/haiku/b64.rs | 3 + libc/src/unix/haiku/mod.rs | 1243 + libc/src/unix/hermit/aarch64.rs | 2 + libc/src/unix/hermit/mod.rs | 736 + libc/src/unix/hermit/x86_64.rs | 2 + libc/src/unix/mod.rs | 985 + libc/src/unix/newlib/aarch64/mod.rs | 5 + libc/src/unix/newlib/arm/mod.rs | 5 + libc/src/unix/newlib/mod.rs | 739 + libc/src/unix/notbsd/android/b32/arm.rs | 357 + libc/src/unix/notbsd/android/b32/mod.rs | 214 + libc/src/unix/notbsd/android/b32/x86.rs | 415 + libc/src/unix/notbsd/android/b64/aarch64.rs | 325 + libc/src/unix/notbsd/android/b64/mod.rs | 167 + libc/src/unix/notbsd/android/b64/x86_64.rs | 420 + libc/src/unix/notbsd/android/mod.rs | 1726 + libc/src/unix/notbsd/emscripten.rs | 1670 + libc/src/unix/notbsd/linux/mips/mips32.rs | 703 + libc/src/unix/notbsd/linux/mips/mips64.rs | 647 + libc/src/unix/notbsd/linux/mips/mod.rs | 963 + libc/src/unix/notbsd/linux/mod.rs | 2149 + libc/src/unix/notbsd/linux/musl/b32/arm.rs | 840 + libc/src/unix/notbsd/linux/musl/b32/mips.rs | 858 + libc/src/unix/notbsd/linux/musl/b32/mod.rs | 62 + .../src/unix/notbsd/linux/musl/b32/powerpc.rs | 866 + libc/src/unix/notbsd/linux/musl/b32/x86.rs | 901 + .../src/unix/notbsd/linux/musl/b64/aarch64.rs | 476 + libc/src/unix/notbsd/linux/musl/b64/mod.rs | 330 + .../unix/notbsd/linux/musl/b64/powerpc64.rs | 572 + libc/src/unix/notbsd/linux/musl/b64/x86_64.rs | 587 + libc/src/unix/notbsd/linux/musl/mod.rs | 268 + libc/src/unix/notbsd/linux/other/b32/arm.rs | 612 + libc/src/unix/notbsd/linux/other/b32/mod.rs | 384 + .../unix/notbsd/linux/other/b32/powerpc.rs | 614 + libc/src/unix/notbsd/linux/other/b32/x86.rs | 769 + .../unix/notbsd/linux/other/b64/aarch64.rs | 830 + libc/src/unix/notbsd/linux/other/b64/mod.rs | 85 + .../unix/notbsd/linux/other/b64/not_x32.rs | 421 + .../unix/notbsd/linux/other/b64/powerpc64.rs | 928 + .../unix/notbsd/linux/other/b64/sparc64.rs | 863 + libc/src/unix/notbsd/linux/other/b64/x32.rs | 374 + .../src/unix/notbsd/linux/other/b64/x86_64.rs | 660 + libc/src/unix/notbsd/linux/other/mod.rs | 927 + libc/src/unix/notbsd/linux/s390x.rs | 1333 + libc/src/unix/notbsd/mod.rs | 1214 + libc/src/unix/solaris/mod.rs | 1458 + libc/src/unix/uclibc/mips/mips32.rs | 639 + libc/src/unix/uclibc/mips/mips64.rs | 214 + libc/src/unix/uclibc/mips/mod.rs | 478 + libc/src/unix/uclibc/mod.rs | 1954 + libc/src/unix/uclibc/x86_64/l4re.rs | 47 + libc/src/unix/uclibc/x86_64/mod.rs | 416 + libc/src/unix/uclibc/x86_64/other.rs | 4 + libc/src/windows.rs | 248 + libgit2-sys/.cargo-checksum.json | 1 + libgit2-sys/.pc/.quilt_patches | 1 + libgit2-sys/.pc/.quilt_series | 1 + libgit2-sys/.pc/.version | 1 + libgit2-sys/.pc/applied-patches | 1 + .../no-special-snowflake-env.patch/.timestamp | 0 .../no-special-snowflake-env.patch/build.rs | 194 + libgit2-sys/Cargo.toml | 53 + libgit2-sys/LICENSE-APACHE | 201 + libgit2-sys/LICENSE-MIT | 25 + libgit2-sys/build.rs | 192 + .../patches/no-special-snowflake-env.patch | 15 + .../patches/no-special-snowflake-env.patch~ | 15 + libgit2-sys/debian/patches/series | 1 + libgit2-sys/lib.rs | 2997 + libssh2-sys/.cargo-checksum.json | 1 + libssh2-sys/.pc/.quilt_patches | 1 + libssh2-sys/.pc/.quilt_series | 1 + libssh2-sys/.pc/.version | 1 + libssh2-sys/.pc/applied-patches | 1 + .../no-special-snowflake-env.patch/.timestamp | 0 .../no-special-snowflake-env.patch/build.rs | 180 + libssh2-sys/Cargo.toml | 39 + libssh2-sys/build.rs | 181 + .../patches/no-special-snowflake-env.patch | 39 + .../patches/no-special-snowflake-env.patch~ | 39 + libssh2-sys/debian/patches/series | 1 + libssh2-sys/lib.rs | 600 + libz-sys/.cargo-checksum.json | 1 + libz-sys/Cargo.toml | 36 + libz-sys/LICENSE-APACHE | 201 + libz-sys/LICENSE-MIT | 25 + libz-sys/README.md | 25 + libz-sys/appveyor.yml | 30 + libz-sys/build.rs | 194 + libz-sys/ci/Dockerfile | 7 + libz-sys/ci/run-docker.sh | 18 + libz-sys/src/lib.rs | 231 + libz-sys/src/smoke.c | 5 + lock_api/.cargo-checksum.json | 1 + lock_api/Cargo.toml | 31 + lock_api/LICENSE-APACHE | 201 + lock_api/LICENSE-MIT | 25 + lock_api/src/lib.rs | 109 + lock_api/src/mutex.rs | 550 + lock_api/src/remutex.rs | 618 + lock_api/src/rwlock.rs | 1453 + log/.cargo-checksum.json | 1 + log/CHANGELOG.md | 123 + log/Cargo.toml | 59 + log/LICENSE-APACHE | 201 + log/LICENSE-MIT | 25 + log/README.md | 88 + log/appveyor.yml | 19 + log/src/lib.rs | 1469 + log/src/macros.rs | 253 + log/src/serde.rs | 327 + log/tests/filters.rs | 66 + matches/.cargo-checksum.json | 1 + matches/Cargo.toml | 24 + matches/LICENSE | 25 + matches/lib.rs | 126 + matches/tests/macro_use_one.rs | 11 + memchr/.cargo-checksum.json | 1 + memchr/COPYING | 3 + memchr/Cargo.toml | 51 + memchr/LICENSE-MIT | 21 + memchr/README.md | 45 + memchr/UNLICENSE | 24 + memchr/build.rs | 27 + memchr/src/c.rs | 44 + memchr/src/fallback.rs | 346 + memchr/src/iter.rs | 177 + memchr/src/lib.rs | 317 + memchr/src/naive.rs | 37 + memchr/src/tests/iter.rs | 228 + memchr/src/tests/memchr.rs | 131 + memchr/src/tests/mod.rs | 410 + memchr/src/x86/avx.rs | 739 + memchr/src/x86/mod.rs | 86 + memchr/src/x86/sse2.rs | 827 + memchr/src/x86/sse42.rs | 75 + memoffset/.cargo-checksum.json | 1 + memoffset/Cargo.toml | 24 + memoffset/LICENSE | 19 + memoffset/README.md | 47 + memoffset/src/lib.rs | 70 + memoffset/src/offset_of.rs | 119 + memoffset/src/span_of.rs | 274 + miow/.cargo-checksum.json | 1 + miow/Cargo.toml | 31 + miow/LICENSE-APACHE | 201 + miow/LICENSE-MIT | 25 + miow/README.md | 31 + miow/appveyor.yml | 20 + miow/src/handle.rs | 164 + miow/src/iocp.rs | 324 + miow/src/lib.rs | 57 + miow/src/net.rs | 1140 + miow/src/overlapped.rs | 95 + miow/src/pipe.rs | 716 + nodrop/.cargo-checksum.json | 1 + nodrop/Cargo.toml | 33 + nodrop/README.rst | 58 + nodrop/src/lib.rs | 186 + num_cpus/.cargo-checksum.json | 1 + num_cpus/CHANGELOG.md | 82 + num_cpus/CONTRIBUTING.md | 16 + num_cpus/Cargo.toml | 25 + num_cpus/LICENSE-APACHE | 201 + num_cpus/LICENSE-MIT | 20 + num_cpus/README.md | 28 + num_cpus/src/lib.rs | 414 + opener/.cargo-checksum.json | 1 + opener/Cargo.toml | 37 + opener/src/lib.rs | 212 + opener/src/xdg-open | 1066 + openssl-probe/.cargo-checksum.json | 1 + openssl-probe/Cargo.toml | 21 + openssl-probe/LICENSE-APACHE | 201 + openssl-probe/LICENSE-MIT | 25 + openssl-probe/README.md | 33 + openssl-probe/src/lib.rs | 83 + openssl-sys/.cargo-checksum.json | 1 + openssl-sys/.pc/.quilt_patches | 1 + openssl-sys/.pc/.quilt_series | 1 + openssl-sys/.pc/.version | 1 + openssl-sys/.pc/applied-patches | 1 + .../.pc/disable-vendor.patch/.timestamp | 0 .../.pc/disable-vendor.patch/Cargo.toml | 41 + openssl-sys/Cargo.toml | 38 + openssl-sys/LICENSE-MIT | 25 + openssl-sys/README.md | 24 + openssl-sys/build/cfgs.rs | 55 + openssl-sys/build/main.rs | 615 + .../debian/patches/disable-vendor.patch | 20 + openssl-sys/debian/patches/series | 1 + openssl-sys/src/aes.rs | 28 + openssl-sys/src/asn1.rs | 61 + openssl-sys/src/bio.rs | 151 + openssl-sys/src/bn.rs | 160 + openssl-sys/src/cms.rs | 79 + openssl-sys/src/conf.rs | 7 + openssl-sys/src/crypto.rs | 122 + openssl-sys/src/dh.rs | 24 + openssl-sys/src/dsa.rs | 66 + openssl-sys/src/dtls1.rs | 3 + openssl-sys/src/ec.rs | 211 + openssl-sys/src/err.rs | 50 + openssl-sys/src/evp.rs | 310 + openssl-sys/src/hmac.rs | 30 + openssl-sys/src/lib.rs | 167 + openssl-sys/src/macros.rs | 69 + openssl-sys/src/obj_mac.rs | 914 + openssl-sys/src/object.rs | 18 + openssl-sys/src/ocsp.rs | 118 + openssl-sys/src/ossl_typ.rs | 990 + openssl-sys/src/pem.rs | 146 + openssl-sys/src/pkcs12.rs | 56 + openssl-sys/src/pkcs7.rs | 74 + openssl-sys/src/rand.rs | 6 + openssl-sys/src/rsa.rs | 178 + openssl-sys/src/safestack.rs | 1 + openssl-sys/src/sha.rs | 70 + openssl-sys/src/srtp.rs | 18 + openssl-sys/src/ssl.rs | 1277 + openssl-sys/src/ssl3.rs | 5 + openssl-sys/src/stack.rs | 45 + openssl-sys/src/tls1.rs | 111 + openssl-sys/src/x509.rs | 348 + openssl-sys/src/x509_vfy.rs | 153 + openssl-sys/src/x509v3.rs | 92 + openssl/.cargo-checksum.json | 1 + openssl/Cargo.toml | 54 + openssl/LICENSE | 15 + openssl/README.md | 24 + openssl/build.rs | 57 + openssl/examples/mk_certs.rs | 154 + openssl/src/aes.rs | 169 + openssl/src/asn1.rs | 308 + openssl/src/bio.rs | 84 + openssl/src/bn.rs | 1405 + openssl/src/cms.rs | 164 + openssl/src/conf.rs | 52 + openssl/src/derive.rs | 123 + openssl/src/dh.rs | 187 + openssl/src/dsa.rs | 411 + openssl/src/ec.rs | 931 + openssl/src/ecdsa.rs | 233 + openssl/src/error.rs | 292 + openssl/src/ex_data.rs | 26 + openssl/src/fips.rs | 22 + openssl/src/hash.rs | 433 + openssl/src/lib.rs | 200 + openssl/src/macros.rs | 269 + openssl/src/memcmp.rs | 92 + openssl/src/nid.rs | 1120 + openssl/src/ocsp.rs | 347 + openssl/src/pkcs12.rs | 288 + openssl/src/pkcs5.rs | 296 + openssl/src/pkcs7.rs | 383 + openssl/src/pkey.rs | 658 + openssl/src/rand.rs | 52 + openssl/src/rsa.rs | 907 + openssl/src/sha.rs | 386 + openssl/src/sign.rs | 724 + openssl/src/srtp.rs | 57 + openssl/src/ssl/bio.rs | 275 + openssl/src/ssl/callbacks.rs | 684 + openssl/src/ssl/connector.rs | 507 + openssl/src/ssl/error.rs | 191 + openssl/src/ssl/mod.rs | 3728 + openssl/src/ssl/test.rs | 1841 + openssl/src/stack.rs | 369 + openssl/src/string.rs | 70 + openssl/src/symm.rs | 1259 + openssl/src/util.rs | 67 + openssl/src/version.rs | 131 + openssl/src/x509/extension.rs | 520 + openssl/src/x509/mod.rs | 1359 + openssl/src/x509/store.rs | 106 + openssl/src/x509/tests.rs | 365 + openssl/src/x509/verify.rs | 88 + openssl/test/alt_name_cert.pem | 22 + openssl/test/cert.pem | 19 + openssl/test/certs.pem | 40 + openssl/test/dhparams.pem | 8 + openssl/test/dsa.pem | 12 + openssl/test/dsa.pem.pub | 12 + openssl/test/dsaparam.pem | 9 + openssl/test/identity.p12 | Bin 0 -> 3386 bytes openssl/test/key.der | Bin 0 -> 1193 bytes openssl/test/key.der.pub | Bin 0 -> 294 bytes openssl/test/key.pem | 28 + openssl/test/key.pem.pub | 9 + openssl/test/keystore-empty-chain.p12 | Bin 0 -> 2514 bytes openssl/test/nid_test_cert.pem | 12 + openssl/test/nid_uid_test_cert.pem | 24 + openssl/test/pkcs1.pem.pub | 8 + openssl/test/pkcs8.der | Bin 0 -> 1298 bytes openssl/test/root-ca.key | 27 + openssl/test/root-ca.pem | 21 + openssl/test/rsa-encrypted.pem | 30 + openssl/test/rsa.pem | 27 + openssl/test/rsa.pem.pub | 9 + owning_ref/.cargo-checksum.json | 1 + owning_ref/Cargo.toml | 15 + owning_ref/LICENSE | 21 + owning_ref/README.md | 67 + owning_ref/src/lib.rs | 1891 + parking_lot/.cargo-checksum.json | 1 + parking_lot/CHANGELOG.md | 48 + parking_lot/Cargo.toml | 35 + parking_lot/LICENSE-APACHE | 201 + parking_lot/LICENSE-MIT | 25 + parking_lot/README.md | 138 + parking_lot/appveyor.yml | 29 + parking_lot/src/condvar.rs | 533 + parking_lot/src/deadlock.rs | 218 + parking_lot/src/elision.rs | 169 + parking_lot/src/lib.rs | 44 + parking_lot/src/mutex.rs | 298 + parking_lot/src/once.rs | 488 + parking_lot/src/raw_mutex.rs | 312 + parking_lot/src/raw_rwlock.rs | 1402 + parking_lot/src/remutex.rs | 126 + parking_lot/src/rwlock.rs | 564 + parking_lot/src/util.rs | 32 + parking_lot_core/.cargo-checksum.json | 1 + parking_lot_core/Cargo.toml | 49 + parking_lot_core/LICENSE-APACHE | 201 + parking_lot_core/LICENSE-MIT | 25 + parking_lot_core/build.rs | 8 + parking_lot_core/src/lib.rs | 84 + parking_lot_core/src/parking_lot.rs | 1391 + parking_lot_core/src/spinwait.rs | 125 + parking_lot_core/src/thread_parker/generic.rs | 98 + parking_lot_core/src/thread_parker/linux.rs | 137 + parking_lot_core/src/thread_parker/unix.rs | 242 + .../src/thread_parker/windows/keyed_event.rs | 197 + .../src/thread_parker/windows/mod.rs | 136 + .../src/thread_parker/windows/waitaddress.rs | 134 + parking_lot_core/src/util.rs | 32 + parking_lot_core/src/word_lock.rs | 279 + percent-encoding/.cargo-checksum.json | 1 + percent-encoding/Cargo.toml | 24 + percent-encoding/LICENSE-APACHE | 201 + percent-encoding/LICENSE-MIT | 25 + percent-encoding/lib.rs | 442 + pkg-config/.cargo-checksum.json | 1 + pkg-config/.pc/.quilt_patches | 1 + pkg-config/.pc/.quilt_series | 1 + pkg-config/.pc/.version | 1 + pkg-config/.pc/applied-patches | 1 + .../no-special-snowflake-env.patch/.timestamp | 0 .../no-special-snowflake-env.patch/src/lib.rs | 630 + .../tests/test.rs | 118 + pkg-config/CHANGELOG.md | 19 + pkg-config/Cargo.toml | 25 + pkg-config/LICENSE-APACHE | 201 + pkg-config/LICENSE-MIT | 25 + pkg-config/README.md | 73 + .../patches/no-special-snowflake-env.patch | 98 + .../patches/no-special-snowflake-env.patch~ | 98 + pkg-config/debian/patches/series | 1 + pkg-config/src/lib.rs | 623 + pkg-config/tests/escape.pc | 5 + pkg-config/tests/foo.pc | 16 + pkg-config/tests/framework.pc | 16 + pkg-config/tests/test.rs | 116 + proc-macro2/.cargo-checksum.json | 1 + proc-macro2/Cargo.toml | 38 + proc-macro2/LICENSE-APACHE | 201 + proc-macro2/LICENSE-MIT | 25 + proc-macro2/README.md | 82 + proc-macro2/build.rs | 61 + proc-macro2/src/lib.rs | 1099 + proc-macro2/src/stable.rs | 1395 + proc-macro2/src/strnom.rs | 393 + proc-macro2/src/unstable.rs | 920 + proc-macro2/tests/test.rs | 390 + quick-error/.cargo-checksum.json | 1 + quick-error/Cargo.toml | 23 + quick-error/LICENSE-APACHE | 202 + quick-error/LICENSE-MIT | 19 + quick-error/README.rst | 69 + quick-error/bulk.yaml | 8 + quick-error/examples/context.rs | 48 + quick-error/src/lib.rs | 1309 + quick-error/vagga.yaml | 36 + quote/.cargo-checksum.json | 1 + quote/Cargo.toml | 33 + quote/LICENSE-APACHE | 201 + quote/LICENSE-MIT | 25 + quote/README.md | 147 + quote/src/ext.rs | 115 + quote/src/lib.rs | 858 + quote/src/to_tokens.rs | 197 + quote/tests/test.rs | 290 + rand/.cargo-checksum.json | 1 + rand/.pc/.quilt_patches | 1 + rand/.pc/.quilt_series | 1 + rand/.pc/.version | 1 + rand/.pc/applied-patches | 1 + .../.timestamp | 0 .../Cargo.toml | 72 + rand/CHANGELOG.md | 406 + rand/CONTRIBUTING.md | 93 + rand/Cargo.toml | 72 + rand/LICENSE-APACHE | 201 + rand/LICENSE-MIT | 25 + rand/README.md | 143 + rand/UPDATING.md | 260 + rand/appveyor.yml | 39 + rand/benches/distributions.rs | 161 + rand/benches/generators.rs | 176 + rand/benches/misc.rs | 194 + .../patches/disable-fuchsia-zircon-dep.diff | 13 + rand/debian/patches/series | 1 + rand/examples/monte-carlo.rs | 52 + rand/examples/monty-hall.rs | 117 + rand/src/distributions/bernoulli.rs | 120 + rand/src/distributions/binomial.rs | 178 + rand/src/distributions/cauchy.rs | 116 + rand/src/distributions/exponential.rs | 122 + rand/src/distributions/float.rs | 206 + rand/src/distributions/gamma.rs | 360 + rand/src/distributions/integer.rs | 113 + rand/src/distributions/log_gamma.rs | 51 + rand/src/distributions/mod.rs | 784 + rand/src/distributions/normal.rs | 192 + rand/src/distributions/other.rs | 215 + rand/src/distributions/pareto.rs | 76 + rand/src/distributions/poisson.rs | 158 + rand/src/distributions/uniform.rs | 856 + rand/src/distributions/ziggurat_tables.rs | 280 + rand/src/lib.rs | 1252 + rand/src/prelude.rs | 28 + rand/src/prng/chacha.rs | 477 + rand/src/prng/hc128.rs | 464 + rand/src/prng/isaac.rs | 486 + rand/src/prng/isaac64.rs | 478 + rand/src/prng/isaac_array.rs | 137 + rand/src/prng/mod.rs | 330 + rand/src/prng/xorshift.rs | 225 + rand/src/rngs/adapter/mod.rs | 17 + rand/src/rngs/adapter/read.rs | 137 + rand/src/rngs/adapter/reseeding.rs | 260 + rand/src/rngs/entropy.rs | 296 + rand/src/rngs/jitter.rs | 887 + rand/src/rngs/mock.rs | 61 + rand/src/rngs/mod.rs | 218 + rand/src/rngs/os.rs | 1186 + rand/src/rngs/small.rs | 101 + rand/src/rngs/std.rs | 83 + rand/src/rngs/thread.rs | 141 + rand/src/seq.rs | 334 + rand/tests/bool.rs | 23 + rand/utils/ci/install.sh | 49 + rand/utils/ci/script.sh | 29 + rand/utils/ziggurat_tables.py | 127 + rand_core-0.2.2/.cargo-checksum.json | 1 + rand_core-0.2.2/CHANGELOG.md | 33 + rand_core-0.2.2/Cargo.toml | 37 + rand_core-0.2.2/LICENSE-APACHE | 201 + rand_core-0.2.2/LICENSE-MIT | 25 + rand_core-0.2.2/README.md | 64 + rand_core-0.2.2/src/block.rs | 502 + rand_core-0.2.2/src/error.rs | 179 + rand_core-0.2.2/src/impls.rs | 167 + rand_core-0.2.2/src/le.rs | 70 + rand_core-0.2.2/src/lib.rs | 52 + rand_core/.cargo-checksum.json | 1 + rand_core/CHANGELOG.md | 33 + rand_core/COPYRIGHT | 12 + rand_core/Cargo.toml | 42 + rand_core/LICENSE-APACHE | 201 + rand_core/LICENSE-MIT | 25 + rand_core/README.md | 64 + rand_core/src/block.rs | 508 + rand_core/src/error.rs | 177 + rand_core/src/impls.rs | 165 + rand_core/src/le.rs | 68 + rand_core/src/lib.rs | 486 + redox_syscall/.cargo-checksum.json | 1 + redox_syscall/Cargo.toml | 23 + redox_syscall/LICENSE | 22 + redox_syscall/README.md | 7 + redox_syscall/rust-toolchain | 1 + redox_syscall/src/arch/arm.rs | 73 + redox_syscall/src/arch/x86.rs | 73 + redox_syscall/src/arch/x86_64.rs | 74 + redox_syscall/src/call.rs | 346 + redox_syscall/src/data.rs | 167 + redox_syscall/src/error.rs | 315 + redox_syscall/src/flag.rs | 148 + redox_syscall/src/io/dma.rs | 76 + redox_syscall/src/io/io.rs | 67 + redox_syscall/src/io/mmio.rs | 31 + redox_syscall/src/io/mod.rs | 11 + redox_syscall/src/io/pio.rs | 89 + redox_syscall/src/lib.rs | 47 + redox_syscall/src/number.rs | 73 + redox_syscall/src/scheme/generate.sh | 20 + redox_syscall/src/scheme/mod.rs | 9 + redox_syscall/src/scheme/scheme.rs | 157 + redox_syscall/src/scheme/scheme_block.rs | 157 + redox_syscall/src/scheme/scheme_block_mut.rs | 157 + redox_syscall/src/scheme/scheme_mut.rs | 157 + redox_termios/.cargo-checksum.json | 1 + redox_termios/Cargo.toml | 26 + redox_termios/LICENSE | 21 + redox_termios/README.md | 2 + redox_termios/src/lib.rs | 218 + regex-syntax/.cargo-checksum.json | 1 + regex-syntax/Cargo.toml | 23 + regex-syntax/LICENSE-APACHE | 201 + regex-syntax/LICENSE-MIT | 25 + regex-syntax/benches/bench.rs | 73 + regex-syntax/src/ast/mod.rs | 1515 + regex-syntax/src/ast/parse.rs | 5365 + regex-syntax/src/ast/print.rs | 586 + regex-syntax/src/ast/visitor.rs | 557 + regex-syntax/src/either.rs | 8 + regex-syntax/src/error.rs | 297 + regex-syntax/src/hir/interval.rs | 490 + regex-syntax/src/hir/literal/mod.rs | 1551 + regex-syntax/src/hir/mod.rs | 2055 + regex-syntax/src/hir/print.rs | 375 + regex-syntax/src/hir/translate.rs | 2532 + regex-syntax/src/hir/visitor.rs | 222 + regex-syntax/src/lib.rs | 228 + regex-syntax/src/parser.rs | 206 + regex-syntax/src/unicode.rs | 437 + regex-syntax/src/unicode_tables/age.rs | 455 + .../src/unicode_tables/case_folding_simple.rs | 725 + .../src/unicode_tables/general_category.rs | 1907 + regex-syntax/src/unicode_tables/mod.rs | 9 + regex-syntax/src/unicode_tables/perl_word.rs | 188 + .../src/unicode_tables/property_bool.rs | 2658 + .../src/unicode_tables/property_names.rs | 147 + .../src/unicode_tables/property_values.rs | 289 + regex-syntax/src/unicode_tables/script.rs | 801 + .../src/unicode_tables/script_extension.rs | 829 + regex/.cargo-checksum.json | 1 + regex/CHANGELOG.md | 524 + regex/Cargo.toml | 105 + regex/HACKING.md | 341 + regex/LICENSE-APACHE | 201 + regex/LICENSE-MIT | 25 + regex/PERFORMANCE.md | 279 + regex/README.md | 234 + regex/UNICODE.md | 250 + regex/appveyor.yml | 19 + regex/build.rs | 86 + regex/ci/after_success.sh | 25 + regex/ci/run-kcov | 44 + regex/ci/run-shootout-test | 18 + regex/ci/script.sh | 56 + regex/ci/test-regex-capi | 7 + regex/examples/regexdna-input.txt | 1671 + regex/examples/regexdna-output.txt | 13 + regex/examples/shootout-regex-dna-bytes.rs | 66 + regex/examples/shootout-regex-dna-cheat.rs | 88 + regex/examples/shootout-regex-dna-replace.rs | 19 + .../shootout-regex-dna-single-cheat.rs | 73 + regex/examples/shootout-regex-dna-single.rs | 55 + regex/examples/shootout-regex-dna.rs | 66 + regex/scripts/frequencies.py | 82 + regex/scripts/generate.py | 45 + regex/scripts/regex-match-tests.py | 107 + regex/scripts/scrape_crates_io.py | 189 + regex/src/backtrack.rs | 302 + regex/src/compile.rs | 1151 + regex/src/dfa.rs | 1894 + regex/src/error.rs | 84 + regex/src/exec.rs | 1350 + regex/src/expand.rs | 215 + regex/src/freqs.rs | 271 + regex/src/input.rs | 420 + regex/src/lib.rs | 683 + regex/src/literal/mod.rs | 1141 + regex/src/literal/teddy_avx2/fallback.rs | 20 + regex/src/literal/teddy_avx2/imp.rs | 468 + regex/src/literal/teddy_avx2/mod.rs | 14 + regex/src/literal/teddy_ssse3/fallback.rs | 20 + regex/src/literal/teddy_ssse3/imp.rs | 776 + regex/src/literal/teddy_ssse3/mod.rs | 14 + regex/src/pattern.rs | 62 + regex/src/pikevm.rs | 377 + regex/src/prog.rs | 422 + regex/src/re_builder.rs | 388 + regex/src/re_bytes.rs | 1169 + regex/src/re_set.rs | 431 + regex/src/re_trait.rs | 268 + regex/src/re_unicode.rs | 1211 + regex/src/sparse.rs | 78 + regex/src/testdata/LICENSE | 19 + regex/src/testdata/README | 17 + regex/src/testdata/basic.dat | 221 + regex/src/testdata/nullsubexpr.dat | 79 + regex/src/testdata/repetition.dat | 163 + regex/src/utf8.rs | 259 + regex/src/vector/avx2.rs | 187 + regex/src/vector/mod.rs | 4 + regex/src/vector/ssse3.rs | 192 + regex/tests/api.rs | 182 + regex/tests/api_str.rs | 31 + regex/tests/bytes.rs | 72 + regex/tests/consistent.rs | 213 + regex/tests/crates_regex.rs | 3129 + regex/tests/crazy.rs | 401 + regex/tests/flags.rs | 11 + regex/tests/fowler.rs | 371 + regex/tests/macros.rs | 149 + regex/tests/macros_bytes.rs | 39 + regex/tests/macros_str.rs | 36 + regex/tests/misc.rs | 14 + regex/tests/multiline.rs | 49 + regex/tests/noparse.rs | 50 + regex/tests/regression.rs | 94 + regex/tests/replace.rs | 50 + regex/tests/searcher.rs | 66 + regex/tests/set.rs | 32 + regex/tests/shortest_match.rs | 14 + regex/tests/suffix_reverse.rs | 16 + regex/tests/test_backtrack.rs | 64 + regex/tests/test_backtrack_bytes.rs | 65 + regex/tests/test_backtrack_utf8bytes.rs | 65 + regex/tests/test_crates_regex.rs | 57 + regex/tests/test_default.rs | 87 + regex/tests/test_default_bytes.rs | 73 + regex/tests/test_nfa.rs | 60 + regex/tests/test_nfa_bytes.rs | 65 + regex/tests/test_nfa_utf8bytes.rs | 61 + regex/tests/unicode.rs | 110 + regex/tests/word_boundary.rs | 89 + regex/tests/word_boundary_ascii.rs | 9 + regex/tests/word_boundary_unicode.rs | 6 + remove_dir_all/.cargo-checksum.json | 1 + remove_dir_all/Cargo.toml | 26 + remove_dir_all/LICENCE-APACHE | 191 + remove_dir_all/LICENCE-MIT | 26 + remove_dir_all/src/fs.rs | 265 + remove_dir_all/src/lib.rs | 12 + rustc-demangle/.cargo-checksum.json | 1 + rustc-demangle/Cargo.toml | 22 + rustc-demangle/LICENSE-APACHE | 201 + rustc-demangle/LICENSE-MIT | 25 + rustc-demangle/README.md | 24 + rustc-demangle/src/lib.rs | 501 + rustc-workspace-hack/.cargo-checksum.json | 1 + rustc-workspace-hack/Cargo.toml | 20 + rustc-workspace-hack/src/lib.rs | 7 + rustc_version/.cargo-checksum.json | 1 + rustc_version/Cargo.toml | 26 + rustc_version/LICENSE-APACHE | 201 + rustc_version/LICENSE-MIT | 25 + rustc_version/README.md | 75 + rustc_version/src/errors.rs | 79 + rustc_version/src/lib.rs | 347 + rustfix/.cargo-checksum.json | 1 + rustfix/Cargo.toml | 53 + rustfix/LICENSE-APACHE | 202 + rustfix/LICENSE-MIT | 21 + rustfix/Readme.md | 44 + rustfix/bors.toml | 4 + rustfix/proptest-regressions/replace.txt | 8 + rustfix/src/diagnostics.rs | 87 + rustfix/src/lib.rs | 244 + rustfix/src/replace.rs | 327 + ryu/.cargo-checksum.json | 1 + ryu/Cargo.toml | 39 + ryu/LICENSE-APACHE | 201 + ryu/LICENSE-BOOST | 23 + ryu/README.md | 77 + ryu/benchmark/benchmark.rs | 86 + ryu/build.rs | 60 + ryu/src/buffer/mod.rs | 97 + ryu/src/common.rs | 72 + ryu/src/d2s.rs | 581 + ryu/src/d2s_full_table.rs | 643 + ryu/src/d2s_intrinsics.rs | 79 + ryu/src/d2s_small_table.rs | 206 + ryu/src/digit_table.rs | 28 + ryu/src/f2s.rs | 494 + ryu/src/lib.rs | 66 + ryu/src/pretty/exponent.rs | 49 + ryu/src/pretty/mantissa.rs | 51 + ryu/src/pretty/mod.rs | 154 + ryu/tests/d2s_table_test.rs | 52 + ryu/tests/d2s_test.rs | 154 + ryu/tests/exhaustive.rs | 55 + ryu/tests/f2s_test.rs | 183 + ryu/tests/macros/mod.rs | 12 + same-file/.cargo-checksum.json | 1 + same-file/COPYING | 3 + same-file/Cargo.toml | 28 + same-file/LICENSE-MIT | 21 + same-file/README.md | 45 + same-file/UNLICENSE | 24 + same-file/examples/is_same_file.rs | 13 + same-file/examples/is_stderr.rs | 33 + same-file/src/lib.rs | 530 + same-file/src/unix.rs | 112 + same-file/src/win.rs | 174 + schannel/.cargo-checksum.json | 1 + schannel/Cargo.toml | 28 + schannel/LICENSE.md | 7 + schannel/README.md | 6 + schannel/appveyor.yml | 26 + schannel/src/cert_chain.rs | 139 + schannel/src/cert_context.rs | 640 + schannel/src/cert_store.rs | 446 + schannel/src/context_buffer.rs | 21 + schannel/src/crypt_key.rs | 15 + schannel/src/crypt_prov.rs | 224 + schannel/src/ctl_context.rs | 187 + schannel/src/key_handle.rs | 5 + schannel/src/lib.rs | 87 + schannel/src/ncrypt_key.rs | 15 + schannel/src/schannel_cred.rs | 262 + schannel/src/security_context.rs | 112 + schannel/src/test.rs | 655 + schannel/src/tls_stream.rs | 944 + schannel/test/cert.der | Bin 0 -> 799 bytes schannel/test/cert.pem | 19 + schannel/test/identity.p12 | Bin 0 -> 3386 bytes schannel/test/key.key | Bin 0 -> 1193 bytes schannel/test/key.pem | 28 + schannel/test/self-signed.badssl.com.cer | Bin 0 -> 893 bytes scopeguard/.cargo-checksum.json | 1 + scopeguard/Cargo.toml | 28 + scopeguard/LICENSE-APACHE | 201 + scopeguard/LICENSE-MIT | 25 + scopeguard/README.rst | 81 + scopeguard/examples/readme.rs | 27 + scopeguard/src/lib.rs | 409 + semver-parser/.cargo-checksum.json | 1 + semver-parser/Cargo.toml | 11 + semver-parser/LICENSE-APACHE | 201 + semver-parser/LICENSE-MIT | 25 + semver-parser/src/common.rs | 66 + semver-parser/src/lib.rs | 8 + semver-parser/src/range.rs | 696 + semver-parser/src/recognize.rs | 154 + semver-parser/src/version.rs | 365 + semver/.cargo-checksum.json | 1 + semver/Cargo.toml | 45 + semver/LICENSE-APACHE | 201 + semver/LICENSE-MIT | 25 + semver/README.md | 103 + semver/src/lib.rs | 182 + semver/src/version.rs | 759 + semver/src/version_req.rs | 895 + semver/tests/deprecation.rs | 22 + semver/tests/regression.rs | 25 + semver/tests/serde.rs | 90 + serde/.cargo-checksum.json | 1 + serde/Cargo.toml | 44 + serde/LICENSE-APACHE | 201 + serde/LICENSE-MIT | 25 + serde/README.md | 105 + serde/build.rs | 90 + serde/crates-io.md | 56 + serde/src/de/from_primitive.rs | 268 + serde/src/de/ignored_any.rs | 226 + serde/src/de/impls.rs | 2451 + serde/src/de/mod.rs | 2274 + serde/src/de/utf8.rs | 54 + serde/src/de/value.rs | 1395 + serde/src/export.rs | 44 + serde/src/integer128.rs | 86 + serde/src/lib.rs | 305 + serde/src/macros.rs | 256 + serde/src/private/de.rs | 2941 + serde/src/private/macros.rs | 148 + serde/src/private/mod.rs | 12 + serde/src/private/ser.rs | 1337 + serde/src/ser/impls.rs | 809 + serde/src/ser/impossible.rs | 229 + serde/src/ser/mod.rs | 2084 + serde_derive/.cargo-checksum.json | 1 + serde_derive/Cargo.toml | 48 + serde_derive/LICENSE-APACHE | 201 + serde_derive/LICENSE-MIT | 25 + serde_derive/README.md | 105 + serde_derive/crates-io.md | 56 + serde_derive/src/bound.rs | 316 + serde_derive/src/de.rs | 2974 + serde_derive/src/fragment.rs | 82 + serde_derive/src/internals/ast.rs | 194 + serde_derive/src/internals/attr.rs | 1564 + serde_derive/src/internals/case.rs | 182 + serde_derive/src/internals/check.rs | 361 + serde_derive/src/internals/ctxt.rs | 68 + serde_derive/src/internals/mod.rs | 22 + serde_derive/src/lib.rs | 104 + serde_derive/src/pretend.rs | 139 + serde_derive/src/ser.rs | 1284 + serde_derive/src/try.rs | 24 + serde_ignored/.cargo-checksum.json | 1 + serde_ignored/Cargo.toml | 16 + serde_ignored/LICENSE-APACHE | 201 + serde_ignored/LICENSE-MIT | 25 + serde_ignored/README.md | 98 + serde_ignored/src/lib.rs | 1104 + serde_json/.cargo-checksum.json | 1 + serde_json/Cargo.toml | 60 + serde_json/LICENSE-APACHE | 201 + serde_json/LICENSE-MIT | 25 + serde_json/README.md | 365 + serde_json/src/de.rs | 2261 + serde_json/src/error.rs | 429 + serde_json/src/iter.rs | 78 + serde_json/src/lib.rs | 395 + serde_json/src/macros.rs | 309 + serde_json/src/map.rs | 852 + serde_json/src/number.rs | 761 + serde_json/src/raw.rs | 469 + serde_json/src/read.rs | 859 + serde_json/src/ser.rs | 2314 + serde_json/src/value/de.rs | 1389 + serde_json/src/value/from.rs | 266 + serde_json/src/value/index.rs | 274 + serde_json/src/value/mod.rs | 1129 + serde_json/src/value/partial_eq.rs | 102 + serde_json/src/value/ser.rs | 1012 + shell-escape/.cargo-checksum.json | 1 + shell-escape/Cargo.toml | 21 + shell-escape/LICENSE-APACHE | 201 + shell-escape/LICENSE-MIT | 25 + shell-escape/README.md | 23 + shell-escape/src/lib.rs | 133 + smallvec/.cargo-checksum.json | 1 + smallvec/Cargo.toml | 40 + smallvec/LICENSE-APACHE | 201 + smallvec/LICENSE-MIT | 25 + smallvec/README.md | 8 + smallvec/benches/bench.rs | 295 + smallvec/lib.rs | 2179 + socket2/.cargo-checksum.json | 1 + socket2/Cargo.toml | 40 + socket2/LICENSE-APACHE | 201 + socket2/LICENSE-MIT | 25 + socket2/README.md | 23 + socket2/src/lib.rs | 153 + socket2/src/sockaddr.rs | 212 + socket2/src/socket.rs | 985 + socket2/src/sys/redox/mod.rs | 832 + socket2/src/sys/unix/mod.rs | 1116 + socket2/src/sys/unix/weak.rs | 59 + socket2/src/sys/windows.rs | 974 + socket2/src/utils.rs | 48 + stable_deref_trait/.cargo-checksum.json | 1 + stable_deref_trait/Cargo.toml | 27 + stable_deref_trait/LICENSE-APACHE | 201 + stable_deref_trait/LICENSE-MIT | 25 + stable_deref_trait/README.md | 23 + stable_deref_trait/src/lib.rs | 201 + strsim/.cargo-checksum.json | 1 + strsim/CHANGELOG.md | 118 + strsim/Cargo.toml | 28 + strsim/LICENSE | 23 + strsim/README.md | 62 + strsim/appveyor.yml | 13 + strsim/dev | 41 + strsim/src/lib.rs | 698 + strsim/tests/lib.rs | 39 + syn/.cargo-checksum.json | 1 + syn/Cargo.toml | 70 + syn/LICENSE-APACHE | 201 + syn/LICENSE-MIT | 25 + syn/README.md | 257 + syn/src/attr.rs | 697 + syn/src/buffer.rs | 355 + syn/src/data.rs | 392 + syn/src/derive.rs | 263 + syn/src/error.rs | 137 + syn/src/export.rs | 35 + syn/src/expr.rs | 3781 + syn/src/ext.rs | 65 + syn/src/file.rs | 123 + syn/src/gen/fold.rs | 3047 + syn/src/gen/visit.rs | 3491 + syn/src/gen/visit_mut.rs | 3399 + syn/src/gen_helper.rs | 162 + syn/src/generics.rs | 1129 + syn/src/group.rs | 292 + syn/src/ident.rs | 86 + syn/src/item.rs | 2542 + syn/src/keyword.rs | 254 + syn/src/lib.rs | 748 + syn/src/lifetime.rs | 166 + syn/src/lit.rs | 1099 + syn/src/lookahead.rs | 175 + syn/src/mac.rs | 139 + syn/src/macros.rs | 173 + syn/src/op.rs | 239 + syn/src/parse.rs | 1112 + syn/src/parse_macro_input.rs | 107 + syn/src/parse_quote.rs | 155 + syn/src/path.rs | 686 + syn/src/print.rs | 16 + syn/src/punctuated.rs | 791 + syn/src/span.rs | 67 + syn/src/spanned.rs | 158 + syn/src/synom.rs | 0 syn/src/token.rs | 910 + syn/src/tt.rs | 118 + syn/src/ty.rs | 1002 + synstructure/.cargo-checksum.json | 1 + synstructure/Cargo.toml | 40 + synstructure/LICENSE | 7 + synstructure/README.md | 159 + synstructure/src/lib.rs | 2283 + synstructure/src/macros.rs | 415 + tar/.cargo-checksum.json | 1 + tar/Cargo.toml | 38 + tar/LICENSE-APACHE | 201 + tar/LICENSE-MIT | 25 + tar/README.md | 80 + tar/appveyor.yml | 20 + tar/examples/extract_file.rs | 25 + tar/examples/list.rs | 17 + tar/examples/raw_list.rs | 48 + tar/examples/write.rs | 13 + tar/src/archive.rs | 450 + tar/src/builder.rs | 436 + tar/src/entry.rs | 670 + tar/src/entry_type.rs | 193 + tar/src/error.rs | 40 + tar/src/header.rs | 1597 + tar/src/lib.rs | 75 + tar/src/pax.rs | 91 + tar/tests/all.rs | 863 + tar/tests/archives/directory.tar | Bin 0 -> 10240 bytes tar/tests/archives/duplicate_dirs.tar | Bin 0 -> 2048 bytes tar/tests/archives/empty_filename.tar | Bin 0 -> 512 bytes tar/tests/archives/file_times.tar | Bin 0 -> 1536 bytes tar/tests/archives/link.tar | Bin 0 -> 10240 bytes tar/tests/archives/pax.tar | Bin 0 -> 10240 bytes tar/tests/archives/pax2.tar | Bin 0 -> 10240 bytes tar/tests/archives/reading_files.tar | Bin 0 -> 10240 bytes tar/tests/archives/simple.tar | Bin 0 -> 10240 bytes tar/tests/archives/spaces.tar | Bin 0 -> 2048 bytes tar/tests/archives/sparse.tar | Bin 0 -> 10240 bytes tar/tests/archives/xattrs.tar | Bin 0 -> 10240 bytes tar/tests/entry.rs | 322 + tar/tests/header/mod.rs | 236 + tempfile/.cargo-checksum.json | 1 + tempfile/Cargo.toml | 37 + tempfile/LICENSE-APACHE | 201 + tempfile/LICENSE-MIT | 25 + tempfile/NEWS | 70 + tempfile/README.md | 52 + tempfile/appveyor.yml | 40 + tempfile/src/dir.rs | 406 + tempfile/src/file/imp/mod.rs | 12 + tempfile/src/file/imp/other.rs | 23 + tempfile/src/file/imp/unix.rs | 149 + tempfile/src/file/imp/windows.rs | 121 + tempfile/src/file/mod.rs | 802 + tempfile/src/lib.rs | 449 + tempfile/src/util.rs | 52 + tempfile/tests/namedtempfile.rs | 203 + tempfile/tests/tempdir.rs | 260 + tempfile/tests/tempfile.rs | 62 + termcolor/.cargo-checksum.json | 1 + termcolor/COPYING | 3 + termcolor/Cargo.toml | 30 + termcolor/LICENSE-MIT | 21 + termcolor/README.md | 86 + termcolor/UNLICENSE | 24 + termcolor/src/lib.rs | 1895 + termion/.cargo-checksum.json | 1 + termion/Cargo.toml | 17 + termion/LICENSE | 21 + termion/README.md | 181 + termion/examples/alternate_screen.rs | 17 + termion/examples/alternate_screen_raw.rs | 40 + termion/examples/async.rs | 39 + termion/examples/click.rs | 35 + termion/examples/color.rs | 10 + termion/examples/commie.rs | 51 + termion/examples/detect_color.rs | 19 + termion/examples/is_tty.rs | 11 + termion/examples/keys.rs | 44 + termion/examples/mouse.rs | 46 + termion/examples/rainbow.rs | 60 + termion/examples/read.rs | 23 + termion/examples/rustc_fun.rs | 24 + termion/examples/simple.rs | 42 + termion/examples/size.rs | 7 + termion/examples/truecolor.rs | 12 + termion/logo.svg | 9 + termion/src/async.rs | 78 + termion/src/clear.rs | 9 + termion/src/color.rs | 242 + termion/src/cursor.rs | 140 + termion/src/event.rs | 351 + termion/src/input.rs | 388 + termion/src/lib.rs | 61 + termion/src/macros.rs | 19 + termion/src/raw.rs | 117 + termion/src/screen.rs | 91 + termion/src/scroll.rs | 23 + termion/src/style.rs | 22 + termion/src/sys/redox/attr.rs | 33 + termion/src/sys/redox/mod.rs | 15 + termion/src/sys/redox/size.rs | 18 + termion/src/sys/redox/tty.rs | 22 + termion/src/sys/unix/attr.rs | 29 + termion/src/sys/unix/mod.rs | 33 + termion/src/sys/unix/size.rs | 48 + termion/src/sys/unix/tty.rs | 17 + textwrap/.cargo-checksum.json | 1 + textwrap/Cargo.toml | 49 + textwrap/LICENSE | 21 + textwrap/README.md | 339 + textwrap/benches/linear.rs | 120 + textwrap/examples/layout.rs | 38 + textwrap/examples/termwidth.rs | 21 + textwrap/src/lib.rs | 1305 + textwrap/tests/version-numbers.rs | 12 + thread_local/.cargo-checksum.json | 1 + thread_local/Cargo.toml | 26 + thread_local/LICENSE-APACHE | 201 + thread_local/LICENSE-MIT | 25 + thread_local/README.md | 41 + thread_local/benches/thread_local.rs | 18 + thread_local/src/lib.rs | 757 + thread_local/src/thread_id.rs | 61 + thread_local/src/unreachable.rs | 74 + toml/.cargo-checksum.json | 1 + toml/Cargo.toml | 33 + toml/LICENSE-APACHE | 201 + toml/LICENSE-MIT | 25 + toml/README.md | 40 + toml/examples/decode.rs | 55 + toml/examples/toml2json.rs | 51 + toml/src/datetime.rs | 424 + toml/src/de.rs | 1528 + toml/src/lib.rs | 175 + toml/src/macros.rs | 462 + toml/src/ser.rs | 1802 + toml/src/spanned.rs | 140 + toml/src/tokens.rs | 673 + toml/src/value.rs | 962 + ucd-util/.cargo-checksum.json | 1 + ucd-util/Cargo.toml | 23 + ucd-util/LICENSE-APACHE | 201 + ucd-util/LICENSE-MIT | 21 + ucd-util/README.md | 23 + ucd-util/src/hangul.rs | 105 + ucd-util/src/ideograph.rs | 83 + ucd-util/src/lib.rs | 28 + ucd-util/src/name.rs | 194 + ucd-util/src/property.rs | 124 + .../src/unicode_tables/jamo_short_name.rs | 22 + ucd-util/src/unicode_tables/mod.rs | 5 + ucd-util/src/unicode_tables/property_names.rs | 147 + .../src/unicode_tables/property_values.rs | 1122 + unicode-bidi/.cargo-checksum.json | 1 + unicode-bidi/.pc/.quilt_patches | 1 + unicode-bidi/.pc/.quilt_series | 1 + unicode-bidi/.pc/.version | 1 + unicode-bidi/.pc/applied-patches | 1 + .../.pc/no-flamegraphs.patch/.timestamp | 0 .../.pc/no-flamegraphs.patch/Cargo.toml | 49 + unicode-bidi/AUTHORS | 4 + unicode-bidi/COPYRIGHT | 8 + unicode-bidi/Cargo.toml | 41 + unicode-bidi/LICENSE-APACHE | 201 + unicode-bidi/LICENSE-MIT | 25 + unicode-bidi/README.md | 12 + .../debian/patches/no-flamegraphs.patch | 26 + unicode-bidi/debian/patches/series | 1 + unicode-bidi/src/char_data/mod.rs | 136 + unicode-bidi/src/char_data/tables.rs | 464 + unicode-bidi/src/deprecated.rs | 90 + unicode-bidi/src/explicit.rs | 186 + unicode-bidi/src/format_chars.rs | 42 + unicode-bidi/src/implicit.rs | 228 + unicode-bidi/src/level.rs | 382 + unicode-bidi/src/lib.rs | 890 + unicode-bidi/src/prepare.rs | 366 + unicode-normalization/.cargo-checksum.json | 1 + unicode-normalization/COPYRIGHT | 7 + unicode-normalization/Cargo.toml | 24 + unicode-normalization/LICENSE-APACHE | 201 + unicode-normalization/LICENSE-MIT | 25 + unicode-normalization/README.md | 32 + unicode-normalization/benches/bench.rs | 92 + unicode-normalization/scripts/unicode.py | 452 + unicode-normalization/src/decompose.rs | 126 + unicode-normalization/src/lib.rs | 167 + .../src/normalization_tests.rs | 131078 +++++++++++++++ unicode-normalization/src/normalize.rs | 152 + unicode-normalization/src/quick_check.rs | 156 + unicode-normalization/src/recompose.rs | 148 + unicode-normalization/src/stream_safe.rs | 164 + unicode-normalization/src/tables.rs | 11128 ++ unicode-normalization/src/test.rs | 193 + unicode-width/.cargo-checksum.json | 1 + unicode-width/COPYRIGHT | 7 + unicode-width/Cargo.toml | 29 + unicode-width/LICENSE-APACHE | 201 + unicode-width/LICENSE-MIT | 25 + unicode-width/README.md | 39 + unicode-width/scripts/unicode.py | 321 + unicode-width/src/lib.rs | 131 + unicode-width/src/tables.rs | 273 + unicode-width/src/tests.rs | 156 + unicode-xid/.cargo-checksum.json | 1 + unicode-xid/COPYRIGHT | 7 + unicode-xid/Cargo.toml | 26 + unicode-xid/LICENSE-APACHE | 201 + unicode-xid/LICENSE-MIT | 25 + unicode-xid/README.md | 34 + unicode-xid/scripts/unicode.py | 187 + unicode-xid/src/lib.rs | 87 + unicode-xid/src/tables.rs | 426 + unicode-xid/src/tests.rs | 113 + unreachable/.cargo-checksum.json | 1 + unreachable/Cargo.toml | 13 + unreachable/LICENSE-APACHE | 202 + unreachable/LICENSE-MIT | 19 + unreachable/README.md | 35 + unreachable/src/lib.rs | 77 + url/.cargo-checksum.json | 1 + url/Cargo.toml | 83 + url/LICENSE-APACHE | 201 + url/LICENSE-MIT | 25 + url/README.md | 10 + url/UPGRADING.md | 263 + url/appveyor.yml | 13 + url/benches/parse_url.rs | 18 + url/docs/404.html | 3 + url/docs/index.html | 3 + url/src/encoding.rs | 146 + url/src/form_urlencoded.rs | 411 + url/src/host.rs | 556 + url/src/lib.rs | 2481 + url/src/origin.rs | 130 + url/src/parser.rs | 1279 + url/src/path_segments.rs | 217 + url/src/quirks.rs | 217 + url/src/slicing.rs | 182 + url/tests/data.rs | 200 + url/tests/setters_tests.json | 1533 + url/tests/unit.rs | 545 + url/tests/urltestdata.json | 6148 + utf8-ranges/.cargo-checksum.json | 1 + utf8-ranges/COPYING | 3 + utf8-ranges/Cargo.toml | 29 + utf8-ranges/LICENSE-MIT | 21 + utf8-ranges/README.md | 54 + utf8-ranges/UNLICENSE | 24 + utf8-ranges/benches/bench.rs | 25 + utf8-ranges/src/char_utf8.rs | 36 + utf8-ranges/src/lib.rs | 527 + vcpkg/.cargo-checksum.json | 1 + vcpkg/Cargo.toml | 38 + vcpkg/src/lib.rs | 1154 + vec_map/.cargo-checksum.json | 1 + vec_map/Cargo.toml | 30 + vec_map/LICENSE-APACHE | 201 + vec_map/LICENSE-MIT | 25 + vec_map/README.md | 15 + vec_map/deploy-docs.sh | 20 + vec_map/src/lib.rs | 1623 + version_check/.cargo-checksum.json | 1 + version_check/Cargo.toml | 24 + version_check/LICENSE-APACHE | 201 + version_check/LICENSE-MIT | 19 + version_check/README.md | 67 + version_check/src/lib.rs | 255 + void/.cargo-checksum.json | 1 + void/Cargo.toml | 15 + void/README.md | 39 + void/src/lib.rs | 121 + walkdir/.cargo-checksum.json | 1 + walkdir/COPYING | 3 + walkdir/Cargo.toml | 53 + walkdir/LICENSE-MIT | 21 + walkdir/README.md | 140 + walkdir/UNLICENSE | 24 + walkdir/compare/nftw.c | 25 + walkdir/compare/walk.py | 10 + walkdir/examples/walkdir.rs | 103 + walkdir/src/lib.rs | 1681 + walkdir/src/tests.rs | 867 + walkdir/src/unix.rs | 16 + .../.cargo-checksum.json | 1 + winapi-i686-pc-windows-gnu/Cargo.toml | 22 + winapi-i686-pc-windows-gnu/build.rs | 18 + winapi-i686-pc-windows-gnu/src/lib.rs | 7 + winapi-util/.cargo-checksum.json | 1 + winapi-util/COPYING | 3 + winapi-util/Cargo.toml | 27 + winapi-util/LICENSE-MIT | 21 + winapi-util/README.md | 51 + winapi-util/UNLICENSE | 24 + winapi-util/appveyor.yml | 26 + winapi-util/ci/script.sh | 7 + winapi-util/src/console.rs | 121 + winapi-util/src/file.rs | 158 + winapi-util/src/lib.rs | 35 + winapi-util/src/win.rs | 247 + .../.cargo-checksum.json | 1 + winapi-x86_64-pc-windows-gnu/Cargo.toml | 22 + winapi-x86_64-pc-windows-gnu/build.rs | 18 + winapi-x86_64-pc-windows-gnu/src/lib.rs | 7 + winapi/.cargo-checksum.json | 1 + winapi/Cargo.toml | 380 + winapi/LICENSE-APACHE | 201 + winapi/LICENSE-MIT | 19 + winapi/README.md | 82 + winapi/build.rs | 460 + winapi/src/lib.rs | 68 + winapi/src/macros.rs | 406 + winapi/src/shared/basetsd.rs | 71 + winapi/src/shared/bcrypt.rs | 1002 + winapi/src/shared/bugcodes.rs | 457 + winapi/src/shared/cderr.rs | 45 + winapi/src/shared/cfg.rs | 139 + winapi/src/shared/d3d9.rs | 1269 + winapi/src/shared/d3d9caps.rs | 367 + winapi/src/shared/d3d9types.rs | 1488 + winapi/src/shared/dcomptypes.rs | 51 + winapi/src/shared/devguid.rs | 179 + winapi/src/shared/devpkey.rs | 402 + winapi/src/shared/devpropdef.rs | 84 + winapi/src/shared/dinputd.rs | 22 + winapi/src/shared/dxgi.rs | 412 + winapi/src/shared/dxgi1_2.rs | 356 + winapi/src/shared/dxgi1_3.rs | 191 + winapi/src/shared/dxgi1_4.rs | 113 + winapi/src/shared/dxgi1_5.rs | 93 + winapi/src/shared/dxgi1_6.rs | 99 + winapi/src/shared/dxgiformat.rs | 128 + winapi/src/shared/dxgitype.rs | 110 + winapi/src/shared/evntprov.rs | 310 + winapi/src/shared/evntrace.rs | 991 + winapi/src/shared/guiddef.rs | 37 + winapi/src/shared/hidclass.rs | 69 + winapi/src/shared/hidpi.rs | 394 + winapi/src/shared/hidsdi.rs | 111 + winapi/src/shared/hidusage.rs | 275 + winapi/src/shared/in6addr.rs | 18 + winapi/src/shared/inaddr.rs | 30 + winapi/src/shared/intsafe.rs | 6 + winapi/src/shared/ks.rs | 64 + winapi/src/shared/ksmedia.rs | 60 + winapi/src/shared/ktmtypes.rs | 139 + winapi/src/shared/lmcons.rs | 61 + winapi/src/shared/minwindef.rs | 103 + winapi/src/shared/mmreg.rs | 310 + winapi/src/shared/mod.rs | 71 + winapi/src/shared/mstcpip.rs | 496 + winapi/src/shared/ntddscsi.rs | 835 + winapi/src/shared/ntddser.rs | 18 + winapi/src/shared/ntdef.rs | 1074 + winapi/src/shared/ntstatus.rs | 2575 + winapi/src/shared/qos.rs | 21 + winapi/src/shared/rpc.rs | 10 + winapi/src/shared/rpcdce.rs | 564 + winapi/src/shared/rpcndr.rs | 26 + winapi/src/shared/sddl.rs | 218 + winapi/src/shared/sspi.rs | 1075 + winapi/src/shared/stralign.rs | 41 + winapi/src/shared/transportsettingcommon.rs | 11 + winapi/src/shared/tvout.rs | 73 + winapi/src/shared/usb.rs | 524 + winapi/src/shared/usbiodef.rs | 113 + winapi/src/shared/usbspec.rs | 861 + winapi/src/shared/windef.rs | 117 + winapi/src/shared/windowsx.rs | 18 + winapi/src/shared/winerror.rs | 6114 + winapi/src/shared/winusbio.rs | 39 + winapi/src/shared/wmistr.rs | 200 + winapi/src/shared/wnnc.rs | 78 + winapi/src/shared/ws2def.rs | 561 + winapi/src/shared/ws2ipdef.rs | 82 + winapi/src/shared/wtypes.rs | 108 + winapi/src/shared/wtypesbase.rs | 162 + winapi/src/um/accctrl.rs | 372 + winapi/src/um/aclapi.rs | 363 + winapi/src/um/appmgmt.rs | 123 + winapi/src/um/audioclient.rs | 173 + winapi/src/um/audiosessiontypes.rs | 38 + winapi/src/um/avrt.rs | 83 + winapi/src/um/bits.rs | 294 + winapi/src/um/bits10_1.rs | 38 + winapi/src/um/bits1_5.rs | 71 + winapi/src/um/bits2_0.rs | 52 + winapi/src/um/bits2_5.rs | 65 + winapi/src/um/bits3_0.rs | 180 + winapi/src/um/bits4_0.rs | 33 + winapi/src/um/bits5_0.rs | 96 + winapi/src/um/bitscfg.rs | 71 + winapi/src/um/bitsmsg.rs | 143 + winapi/src/um/cfgmgr32.rs | 2043 + winapi/src/um/cguid.rs | 135 + winapi/src/um/combaseapi.rs | 478 + winapi/src/um/coml2api.rs | 11 + winapi/src/um/commapi.rs | 88 + winapi/src/um/commctrl.rs | 4136 + winapi/src/um/commdlg.rs | 713 + winapi/src/um/commoncontrols.rs | 233 + winapi/src/um/consoleapi.rs | 77 + winapi/src/um/corsym.rs | 90 + winapi/src/um/d2d1.rs | 984 + winapi/src/um/d2d1_1.rs | 848 + winapi/src/um/d2d1_2.rs | 68 + winapi/src/um/d2d1_3.rs | 698 + winapi/src/um/d2d1effectauthor.rs | 513 + winapi/src/um/d2d1effects.rs | 618 + winapi/src/um/d2d1effects_1.rs | 32 + winapi/src/um/d2d1effects_2.rs | 41 + winapi/src/um/d2d1svg.rs | 412 + winapi/src/um/d2dbasetypes.rs | 16 + winapi/src/um/d3d.rs | 62 + winapi/src/um/d3d10.rs | 58 + winapi/src/um/d3d10_1.rs | 12 + winapi/src/um/d3d10_1shader.rs | 8 + winapi/src/um/d3d10effect.rs | 46 + winapi/src/um/d3d10misc.rs | 8 + winapi/src/um/d3d10sdklayers.rs | 14 + winapi/src/um/d3d10shader.rs | 207 + winapi/src/um/d3d11.rs | 3419 + winapi/src/um/d3d11_1.rs | 486 + winapi/src/um/d3d11_2.rs | 147 + winapi/src/um/d3d11_3.rs | 24 + winapi/src/um/d3d11_4.rs | 8 + winapi/src/um/d3d11on12.rs | 68 + winapi/src/um/d3d11sdklayers.rs | 2680 + winapi/src/um/d3d11shader.rs | 478 + winapi/src/um/d3d11tokenizedprogramformat.rs | 1191 + winapi/src/um/d3d12.rs | 2721 + winapi/src/um/d3d12sdklayers.rs | 1364 + winapi/src/um/d3d12shader.rs | 348 + winapi/src/um/d3dcommon.rs | 745 + winapi/src/um/d3dcompiler.rs | 275 + winapi/src/um/d3dcsx.rs | 12 + winapi/src/um/d3dx10core.rs | 12 + winapi/src/um/d3dx10math.rs | 8 + winapi/src/um/d3dx10mesh.rs | 20 + winapi/src/um/datetimeapi.rs | 61 + winapi/src/um/davclnt.rs | 105 + winapi/src/um/dbghelp.rs | 668 + winapi/src/um/dbt.rs | 193 + winapi/src/um/dcommon.rs | 115 + winapi/src/um/dcomp.rs | 1165 + winapi/src/um/dcompanimation.rs | 39 + winapi/src/um/dde.rs | 21 + winapi/src/um/ddraw.rs | 38 + winapi/src/um/ddrawi.rs | 14 + winapi/src/um/ddrawint.rs | 42 + winapi/src/um/debugapi.rs | 42 + winapi/src/um/devicetopology.rs | 461 + winapi/src/um/dinput.rs | 108 + winapi/src/um/dmksctl.rs | 12 + winapi/src/um/dmusicc.rs | 72 + winapi/src/um/docobj.rs | 137 + winapi/src/um/documenttarget.rs | 25 + winapi/src/um/dpa_dsa.rs | 284 + winapi/src/um/dpapi.rs | 101 + winapi/src/um/dsgetdc.rs | 268 + winapi/src/um/dsound.rs | 343 + winapi/src/um/dsrole.rs | 67 + winapi/src/um/dvp.rs | 26 + winapi/src/um/dwmapi.rs | 296 + winapi/src/um/dwrite.rs | 1478 + winapi/src/um/dwrite_1.rs | 747 + winapi/src/um/dwrite_2.rs | 294 + winapi/src/um/dwrite_3.rs | 579 + winapi/src/um/dxdiag.rs | 12 + winapi/src/um/dxfile.rs | 24 + winapi/src/um/dxgidebug.rs | 236 + winapi/src/um/dxva2api.rs | 707 + winapi/src/um/dxvahd.rs | 556 + winapi/src/um/endpointvolume.rs | 124 + winapi/src/um/errhandlingapi.rs | 76 + winapi/src/um/evntcons.rs | 229 + winapi/src/um/exdisp.rs | 221 + winapi/src/um/fibersapi.rs | 24 + winapi/src/um/fileapi.rs | 640 + winapi/src/um/gl/gl.rs | 53 + winapi/src/um/gl/mod.rs | 8 + winapi/src/um/handleapi.rs | 37 + winapi/src/um/heapapi.rs | 93 + .../um/highlevelmonitorconfigurationapi.rs | 172 + winapi/src/um/http.rs | 1073 + winapi/src/um/imm.rs | 43 + winapi/src/um/interlockedapi.rs | 32 + winapi/src/um/ioapiset.rs | 72 + winapi/src/um/jobapi.rs | 15 + winapi/src/um/jobapi2.rs | 64 + winapi/src/um/knownfolders.rs | 288 + winapi/src/um/ktmw32.rs | 64 + winapi/src/um/libloaderapi.rs | 237 + winapi/src/um/lmaccess.rs | 1215 + winapi/src/um/lmalert.rs | 76 + winapi/src/um/lmapibuf.rs | 31 + winapi/src/um/lmat.rs | 63 + winapi/src/um/lmdfs.rs | 484 + winapi/src/um/lmerrlog.rs | 269 + winapi/src/um/lmjoin.rs | 233 + winapi/src/um/lmmsg.rs | 57 + winapi/src/um/lmremutl.rs | 62 + winapi/src/um/lmrepl.rs | 201 + winapi/src/um/lmserver.rs | 1256 + winapi/src/um/lmshare.rs | 380 + winapi/src/um/lmstats.rs | 86 + winapi/src/um/lmsvc.rs | 181 + winapi/src/um/lmuse.rs | 102 + winapi/src/um/lmwksta.rs | 422 + .../src/um/lowlevelmonitorconfigurationapi.rs | 50 + winapi/src/um/lsalookup.rs | 110 + winapi/src/um/memoryapi.rs | 391 + winapi/src/um/minschannel.rs | 59 + winapi/src/um/minwinbase.rs | 334 + winapi/src/um/mmdeviceapi.rs | 219 + winapi/src/um/mmeapi.rs | 337 + winapi/src/um/mmsystem.rs | 267 + winapi/src/um/mod.rs | 267 + winapi/src/um/msaatext.rs | 60 + winapi/src/um/mscat.rs | 37 + winapi/src/um/mschapp.rs | 49 + winapi/src/um/mssip.rs | 256 + winapi/src/um/namedpipeapi.rs | 94 + winapi/src/um/namespaceapi.rs | 37 + winapi/src/um/nb30.rs | 215 + winapi/src/um/ncrypt.rs | 89 + winapi/src/um/ntlsa.rs | 1527 + winapi/src/um/ntsecapi.rs | 1729 + winapi/src/um/oaidl.rs | 848 + winapi/src/um/objbase.rs | 65 + winapi/src/um/objidl.rs | 508 + winapi/src/um/objidlbase.rs | 957 + winapi/src/um/ocidl.rs | 69 + winapi/src/um/ole2.rs | 22 + winapi/src/um/oleauto.rs | 824 + winapi/src/um/olectl.rs | 15 + winapi/src/um/oleidl.rs | 44 + winapi/src/um/opmapi.rs | 363 + winapi/src/um/pdh.rs | 807 + winapi/src/um/perflib.rs | 332 + .../src/um/physicalmonitorenumerationapi.rs | 44 + winapi/src/um/playsoundapi.rs | 47 + winapi/src/um/powerbase.rs | 36 + winapi/src/um/powersetting.rs | 61 + winapi/src/um/powrprof.rs | 551 + winapi/src/um/processenv.rs | 99 + winapi/src/um/processsnapshot.rs | 121 + winapi/src/um/processthreadsapi.rs | 442 + winapi/src/um/processtopologyapi.rs | 24 + winapi/src/um/profileapi.rs | 16 + winapi/src/um/propidl.rs | 16 + winapi/src/um/propkeydef.rs | 14 + winapi/src/um/propsys.rs | 47 + winapi/src/um/prsht.rs | 362 + winapi/src/um/psapi.rs | 353 + winapi/src/um/realtimeapiset.rs | 31 + winapi/src/um/reason.rs | 61 + winapi/src/um/restartmanager.rs | 151 + winapi/src/um/restrictederrorinfo.rs | 21 + winapi/src/um/rmxfguid.rs | 68 + winapi/src/um/sapi.rs | 1389 + winapi/src/um/sapi51.rs | 3729 + winapi/src/um/sapi53.rs | 1824 + winapi/src/um/sapiddk.rs | 239 + winapi/src/um/sapiddk51.rs | 652 + winapi/src/um/schannel.rs | 340 + winapi/src/um/securityappcontainer.rs | 17 + winapi/src/um/securitybaseapi.rs | 691 + winapi/src/um/servprov.rs | 24 + winapi/src/um/setupapi.rs | 3573 + winapi/src/um/shellapi.rs | 924 + winapi/src/um/shellscalingapi.rs | 45 + winapi/src/um/shlobj.rs | 261 + winapi/src/um/shobjidl.rs | 374 + winapi/src/um/shobjidl_core.rs | 94 + winapi/src/um/shtypes.rs | 45 + winapi/src/um/spapidef.rs | 54 + winapi/src/um/sporder.rs | 42 + winapi/src/um/sql.rs | 109 + winapi/src/um/sqlext.rs | 96 + winapi/src/um/sqltypes.rs | 143 + winapi/src/um/sqlucode.rs | 107 + winapi/src/um/sspi.rs | 8 + winapi/src/um/stringapiset.rs | 76 + winapi/src/um/strmif.rs | 8 + winapi/src/um/subauth.rs | 205 + winapi/src/um/synchapi.rs | 350 + winapi/src/um/sysinfoapi.rs | 218 + winapi/src/um/systemtopologyapi.rs | 21 + winapi/src/um/textstor.rs | 12 + winapi/src/um/threadpoolapiset.rs | 172 + winapi/src/um/threadpoollegacyapiset.rs | 45 + winapi/src/um/timeapi.rs | 21 + winapi/src/um/timezoneapi.rs | 90 + winapi/src/um/tlhelp32.rs | 195 + winapi/src/um/unknwnbase.rs | 44 + winapi/src/um/urlhist.rs | 98 + winapi/src/um/urlmon.rs | 22 + winapi/src/um/userenv.rs | 160 + winapi/src/um/usp10.rs | 563 + winapi/src/um/utilapiset.rs | 26 + winapi/src/um/uxtheme.rs | 773 + winapi/src/um/vsbackup.rs | 521 + winapi/src/um/vss.rs | 285 + winapi/src/um/vsserror.rs | 90 + winapi/src/um/vswriter.rs | 398 + winapi/src/um/wct.rs | 115 + winapi/src/um/werapi.rs | 54 + winapi/src/um/winbase.rs | 2804 + winapi/src/um/wincodec.rs | 1865 + winapi/src/um/wincodecsdk.rs | 565 + winapi/src/um/wincon.rs | 540 + winapi/src/um/wincred.rs | 533 + winapi/src/um/wincrypt.rs | 7366 + winapi/src/um/windowsceip.rs | 10 + winapi/src/um/winefs.rs | 179 + winapi/src/um/winevt.rs | 544 + winapi/src/um/wingdi.rs | 5591 + winapi/src/um/winhttp.rs | 659 + winapi/src/um/wininet.rs | 2365 + winapi/src/um/winineti.rs | 143 + winapi/src/um/winioctl.rs | 1037 + winapi/src/um/winnetwk.rs | 447 + winapi/src/um/winnls.rs | 818 + winapi/src/um/winnt.rs | 8429 + winapi/src/um/winreg.rs | 491 + winapi/src/um/winsafer.rs | 229 + winapi/src/um/winscard.rs | 710 + winapi/src/um/winsmcrd.rs | 167 + winapi/src/um/winsock2.rs | 1451 + winapi/src/um/winspool.rs | 2434 + winapi/src/um/winsvc.rs | 666 + winapi/src/um/winusb.rs | 225 + winapi/src/um/winuser.rs | 6907 + winapi/src/um/winver.rs | 54 + winapi/src/um/wow64apiset.rs | 28 + winapi/src/um/ws2spi.rs | 910 + winapi/src/um/ws2tcpip.rs | 347 + winapi/src/um/xinput.rs | 166 + winapi/src/vc/excpt.rs | 19 + winapi/src/vc/limits.rs | 8 + winapi/src/vc/mod.rs | 11 + winapi/src/vc/vadefs.rs | 9 + winapi/src/vc/vcruntime.rs | 10 + winapi/src/winrt/activation.rs | 14 + winapi/src/winrt/hstring.rs | 26 + winapi/src/winrt/inspectable.rs | 30 + winapi/src/winrt/mod.rs | 13 + winapi/src/winrt/roapi.rs | 61 + winapi/src/winrt/robuffer.rs | 13 + winapi/src/winrt/roerrorapi.rs | 104 + winapi/src/winrt/winstring.rs | 151 + wincolor/.cargo-checksum.json | 1 + wincolor/COPYING | 3 + wincolor/Cargo.toml | 33 + wincolor/LICENSE-MIT | 21 + wincolor/README.md | 44 + wincolor/UNLICENSE | 24 + wincolor/src/lib.rs | 35 + wincolor/src/win.rs | 261 + 2501 files changed, 879286 insertions(+) create mode 100644 aho-corasick/.cargo-checksum.json create mode 100644 aho-corasick/COPYING create mode 100644 aho-corasick/Cargo.toml create mode 100644 aho-corasick/LICENSE-MIT create mode 100644 aho-corasick/README.md create mode 100644 aho-corasick/UNLICENSE create mode 100644 aho-corasick/benches/bench.rs create mode 100644 aho-corasick/benches/random.txt create mode 100644 aho-corasick/examples/dict-search.rs create mode 100644 aho-corasick/src/autiter.rs create mode 100644 aho-corasick/src/full.rs create mode 100644 aho-corasick/src/lib.rs create mode 100644 aho-corasick/src/main.rs create mode 100644 ansi_term/.cargo-checksum.json create mode 100644 ansi_term/Cargo.toml create mode 100644 ansi_term/LICENCE create mode 100644 ansi_term/README.md create mode 100644 ansi_term/examples/colours.rs create mode 100644 ansi_term/src/ansi.rs create mode 100644 ansi_term/src/debug.rs create mode 100644 ansi_term/src/difference.rs create mode 100644 ansi_term/src/display.rs create mode 100644 ansi_term/src/lib.rs create mode 100644 ansi_term/src/style.rs create mode 100644 ansi_term/src/windows.rs create mode 100644 ansi_term/src/write.rs create mode 100644 arrayvec/.cargo-checksum.json create mode 100644 arrayvec/Cargo.toml create mode 100644 arrayvec/LICENSE-APACHE create mode 100644 arrayvec/LICENSE-MIT create mode 100644 arrayvec/README.rst create mode 100644 arrayvec/benches/arraystring.rs create mode 100644 arrayvec/benches/extend.rs create mode 100644 arrayvec/custom.css create mode 100644 arrayvec/src/array.rs create mode 100644 arrayvec/src/array_string.rs create mode 100644 arrayvec/src/char.rs create mode 100644 arrayvec/src/errors.rs create mode 100644 arrayvec/src/lib.rs create mode 100644 arrayvec/src/range.rs create mode 100644 arrayvec/tests/serde.rs create mode 100644 arrayvec/tests/tests.rs create mode 100644 atty/.cargo-checksum.json create mode 100644 atty/CHANGELOG.md create mode 100644 atty/Cargo.toml create mode 100644 atty/LICENSE create mode 100644 atty/README.md create mode 100644 atty/appveyor.yml create mode 100644 atty/examples/atty.rs create mode 100644 atty/rustfmt.toml create mode 100644 atty/src/lib.rs create mode 100644 backtrace-sys/.cargo-checksum.json create mode 100644 backtrace-sys/Cargo.toml create mode 100644 backtrace-sys/LICENSE-APACHE create mode 100644 backtrace-sys/LICENSE-MIT create mode 100644 backtrace-sys/build.rs create mode 100644 backtrace-sys/src/lib.rs create mode 100644 backtrace-sys/src/libbacktrace/LICENSE create mode 100644 backtrace-sys/src/libbacktrace/Makefile.am create mode 100644 backtrace-sys/src/libbacktrace/Makefile.in create mode 100644 backtrace-sys/src/libbacktrace/Mark.Twain-Tom.Sawyer.txt create mode 100644 backtrace-sys/src/libbacktrace/README.md create mode 100644 backtrace-sys/src/libbacktrace/acinclude.m4 create mode 100644 backtrace-sys/src/libbacktrace/aclocal.m4 create mode 100644 backtrace-sys/src/libbacktrace/alloc.c create mode 100644 backtrace-sys/src/libbacktrace/atomic.c create mode 100644 backtrace-sys/src/libbacktrace/backtrace-supported.h.in create mode 100644 backtrace-sys/src/libbacktrace/backtrace.c create mode 100644 backtrace-sys/src/libbacktrace/backtrace.h create mode 100644 backtrace-sys/src/libbacktrace/btest.c create mode 100755 backtrace-sys/src/libbacktrace/config.guess create mode 100644 backtrace-sys/src/libbacktrace/config.h.in create mode 100755 backtrace-sys/src/libbacktrace/config.sub create mode 100644 backtrace-sys/src/libbacktrace/config/libtool.m4 create mode 100644 backtrace-sys/src/libbacktrace/config/ltoptions.m4 create mode 100644 backtrace-sys/src/libbacktrace/config/ltsugar.m4 create mode 100644 backtrace-sys/src/libbacktrace/config/ltversion.m4 create mode 100644 backtrace-sys/src/libbacktrace/config/lt~obsolete.m4 create mode 100755 backtrace-sys/src/libbacktrace/configure create mode 100644 backtrace-sys/src/libbacktrace/configure.ac create mode 100644 backtrace-sys/src/libbacktrace/dwarf.c create mode 100644 backtrace-sys/src/libbacktrace/edtest.c create mode 100644 backtrace-sys/src/libbacktrace/edtest2.c create mode 100644 backtrace-sys/src/libbacktrace/elf.c create mode 100644 backtrace-sys/src/libbacktrace/fileline.c create mode 100644 backtrace-sys/src/libbacktrace/filenames.h create mode 100644 backtrace-sys/src/libbacktrace/filetype.awk create mode 100755 backtrace-sys/src/libbacktrace/install-sh create mode 100644 backtrace-sys/src/libbacktrace/internal.h create mode 100755 backtrace-sys/src/libbacktrace/ltmain.sh create mode 100644 backtrace-sys/src/libbacktrace/macho.c create mode 100755 backtrace-sys/src/libbacktrace/missing create mode 100644 backtrace-sys/src/libbacktrace/mmap.c create mode 100644 backtrace-sys/src/libbacktrace/mmapio.c create mode 100644 backtrace-sys/src/libbacktrace/move-if-change create mode 100644 backtrace-sys/src/libbacktrace/nounwind.c create mode 100644 backtrace-sys/src/libbacktrace/pecoff.c create mode 100644 backtrace-sys/src/libbacktrace/posix.c create mode 100644 backtrace-sys/src/libbacktrace/print.c create mode 100644 backtrace-sys/src/libbacktrace/read.c create mode 100644 backtrace-sys/src/libbacktrace/simple.c create mode 100644 backtrace-sys/src/libbacktrace/sort.c create mode 100644 backtrace-sys/src/libbacktrace/state.c create mode 100644 backtrace-sys/src/libbacktrace/stest.c create mode 100644 backtrace-sys/src/libbacktrace/testlib.c create mode 100644 backtrace-sys/src/libbacktrace/testlib.h create mode 100644 backtrace-sys/src/libbacktrace/ttest.c create mode 100644 backtrace-sys/src/libbacktrace/unknown.c create mode 100644 backtrace-sys/src/libbacktrace/xcoff.c create mode 100644 backtrace-sys/src/libbacktrace/ztest.c create mode 100644 backtrace-sys/symbol-map create mode 100644 backtrace/.cargo-checksum.json create mode 100644 backtrace/Cargo.toml create mode 100644 backtrace/LICENSE-APACHE create mode 100644 backtrace/LICENSE-MIT create mode 100644 backtrace/README.md create mode 100644 backtrace/appveyor.yml create mode 100755 backtrace/ci/android-ndk.sh create mode 100644 backtrace/ci/docker/aarch64-linux-android/Dockerfile create mode 100644 backtrace/ci/docker/aarch64-unknown-linux-gnu/Dockerfile create mode 100644 backtrace/ci/docker/arm-linux-androideabi/Dockerfile create mode 100644 backtrace/ci/docker/arm-unknown-linux-gnueabihf/Dockerfile create mode 100644 backtrace/ci/docker/armv7-linux-androideabi/Dockerfile create mode 100644 backtrace/ci/docker/armv7-unknown-linux-gnueabihf/Dockerfile create mode 100644 backtrace/ci/docker/i586-unknown-linux-gnu/Dockerfile create mode 100644 backtrace/ci/docker/i686-linux-android/Dockerfile create mode 100644 backtrace/ci/docker/i686-unknown-linux-gnu/Dockerfile create mode 100644 backtrace/ci/docker/powerpc-unknown-linux-gnu/Dockerfile create mode 100644 backtrace/ci/docker/powerpc64-unknown-linux-gnu/Dockerfile create mode 100644 backtrace/ci/docker/x86_64-linux-android/Dockerfile create mode 100644 backtrace/ci/docker/x86_64-pc-windows-gnu/Dockerfile create mode 100644 backtrace/ci/docker/x86_64-unknown-linux-gnu/Dockerfile create mode 100644 backtrace/ci/docker/x86_64-unknown-linux-musl/Dockerfile create mode 100755 backtrace/ci/run-docker.sh create mode 100755 backtrace/ci/run.sh create mode 100644 backtrace/examples/backtrace.rs create mode 100644 backtrace/examples/raw.rs create mode 100644 backtrace/src/backtrace/dbghelp.rs create mode 100644 backtrace/src/backtrace/libunwind.rs create mode 100644 backtrace/src/backtrace/mod.rs create mode 100644 backtrace/src/backtrace/noop.rs create mode 100644 backtrace/src/backtrace/unix_backtrace.rs create mode 100644 backtrace/src/capture.rs create mode 100644 backtrace/src/dylib.rs create mode 100644 backtrace/src/lib.rs create mode 100644 backtrace/src/symbolize/coresymbolication.rs create mode 100644 backtrace/src/symbolize/dbghelp.rs create mode 100644 backtrace/src/symbolize/dladdr.rs create mode 100644 backtrace/src/symbolize/gimli.rs create mode 100644 backtrace/src/symbolize/libbacktrace.rs create mode 100644 backtrace/src/symbolize/mod.rs create mode 100644 backtrace/src/symbolize/noop.rs create mode 100644 backtrace/tests/long_fn_name.rs create mode 100644 backtrace/tests/smoke.rs create mode 100644 bitflags/.cargo-checksum.json create mode 100644 bitflags/CHANGELOG.md create mode 100644 bitflags/CODE_OF_CONDUCT.md create mode 100644 bitflags/Cargo.toml create mode 100644 bitflags/LICENSE-APACHE create mode 100644 bitflags/LICENSE-MIT create mode 100644 bitflags/README.md create mode 100644 bitflags/src/example_generated.rs create mode 100644 bitflags/src/lib.rs create mode 100644 bufstream/.cargo-checksum.json create mode 100644 bufstream/Cargo.toml create mode 100644 bufstream/LICENSE-APACHE create mode 100644 bufstream/LICENSE-MIT create mode 100644 bufstream/README.md create mode 100644 bufstream/src/lib.rs create mode 100644 cc/.cargo-checksum.json create mode 100644 cc/Cargo.toml create mode 100644 cc/LICENSE-APACHE create mode 100644 cc/LICENSE-MIT create mode 100644 cc/README.md create mode 100644 cc/appveyor.yml create mode 100644 cc/src/bin/gcc-shim.rs create mode 100644 cc/src/com.rs create mode 100644 cc/src/lib.rs create mode 100644 cc/src/registry.rs create mode 100644 cc/src/setup_config.rs create mode 100644 cc/src/winapi.rs create mode 100644 cc/src/windows_registry.rs create mode 100644 cc/tests/cc_env.rs create mode 100644 cc/tests/support/mod.rs create mode 100644 cc/tests/test.rs create mode 100644 cfg-if/.cargo-checksum.json create mode 100644 cfg-if/Cargo.toml create mode 100644 cfg-if/LICENSE-APACHE create mode 100644 cfg-if/LICENSE-MIT create mode 100644 cfg-if/README.md create mode 100644 cfg-if/src/lib.rs create mode 100644 cfg-if/tests/xcrate.rs create mode 100644 clap/.cargo-checksum.json create mode 100644 clap/.pc/.quilt_patches create mode 100644 clap/.pc/.quilt_series create mode 100644 clap/.pc/.version create mode 100644 clap/.pc/applied-patches create mode 100644 clap/.pc/no-clippy.patch/.timestamp create mode 100644 clap/.pc/no-clippy.patch/Cargo.toml create mode 100644 clap/CHANGELOG.md create mode 100644 clap/CONTRIBUTORS.md create mode 100644 clap/Cargo.toml create mode 100644 clap/LICENSE-MIT create mode 100644 clap/README.md create mode 100644 clap/SPONSORS.md create mode 100644 clap/clap-test.rs create mode 100644 clap/debian/patches/no-clippy.patch create mode 100644 clap/debian/patches/no-clippy.patch~ create mode 100644 clap/debian/patches/series create mode 100644 clap/index.html create mode 100644 clap/justfile create mode 100644 clap/rustfmt.toml create mode 100644 clap/src/app/help.rs create mode 100644 clap/src/app/meta.rs create mode 100644 clap/src/app/mod.rs create mode 100644 clap/src/app/parser.rs create mode 100644 clap/src/app/settings.rs create mode 100644 clap/src/app/usage.rs create mode 100644 clap/src/app/validator.rs create mode 100644 clap/src/args/any_arg.rs create mode 100644 clap/src/args/arg.rs create mode 100644 clap/src/args/arg_builder/base.rs create mode 100644 clap/src/args/arg_builder/flag.rs create mode 100644 clap/src/args/arg_builder/mod.rs create mode 100644 clap/src/args/arg_builder/option.rs create mode 100644 clap/src/args/arg_builder/positional.rs create mode 100644 clap/src/args/arg_builder/switched.rs create mode 100644 clap/src/args/arg_builder/valued.rs create mode 100644 clap/src/args/arg_matcher.rs create mode 100644 clap/src/args/arg_matches.rs create mode 100644 clap/src/args/group.rs create mode 100644 clap/src/args/macros.rs create mode 100644 clap/src/args/matched_arg.rs create mode 100644 clap/src/args/mod.rs create mode 100644 clap/src/args/settings.rs create mode 100644 clap/src/args/subcommand.rs create mode 100644 clap/src/completions/bash.rs create mode 100644 clap/src/completions/elvish.rs create mode 100644 clap/src/completions/fish.rs create mode 100644 clap/src/completions/macros.rs create mode 100644 clap/src/completions/mod.rs create mode 100644 clap/src/completions/powershell.rs create mode 100644 clap/src/completions/shell.rs create mode 100644 clap/src/completions/zsh.rs create mode 100644 clap/src/errors.rs create mode 100644 clap/src/fmt.rs create mode 100644 clap/src/lib.rs create mode 100644 clap/src/macros.rs create mode 100644 clap/src/map.rs create mode 100644 clap/src/osstringext.rs create mode 100644 clap/src/strext.rs create mode 100644 clap/src/suggestions.rs create mode 100644 clap/src/usage_parser.rs create mode 100644 cloudabi/.cargo-checksum.json create mode 100644 cloudabi/Cargo.toml create mode 100644 cloudabi/bitflags.rs create mode 100644 cloudabi/cloudabi.rs create mode 100644 commoncrypto-sys/.cargo-checksum.json create mode 100644 commoncrypto-sys/.pc/.quilt_patches create mode 100644 commoncrypto-sys/.pc/.quilt_series create mode 100644 commoncrypto-sys/.pc/.version create mode 100644 commoncrypto-sys/.pc/applied-patches create mode 100644 commoncrypto-sys/.pc/no-clippy.patch/.timestamp create mode 100644 commoncrypto-sys/.pc/no-clippy.patch/Cargo.toml create mode 100644 commoncrypto-sys/Cargo.toml create mode 100644 commoncrypto-sys/debian/patches/no-clippy.patch create mode 100644 commoncrypto-sys/debian/patches/series create mode 100644 commoncrypto-sys/src/lib.rs create mode 100644 commoncrypto-sys/tests/hash.rs create mode 100644 commoncrypto-sys/tests/pbkdf2.rs create mode 100644 commoncrypto/.cargo-checksum.json create mode 100644 commoncrypto/.pc/.quilt_patches create mode 100644 commoncrypto/.pc/.quilt_series create mode 100644 commoncrypto/.pc/.version create mode 100644 commoncrypto/.pc/applied-patches create mode 100644 commoncrypto/.pc/no-clippy.patch/.timestamp create mode 100644 commoncrypto/.pc/no-clippy.patch/Cargo.toml create mode 100644 commoncrypto/Cargo.toml create mode 100644 commoncrypto/debian/patches/no-clippy.patch create mode 100644 commoncrypto/debian/patches/series create mode 100644 commoncrypto/src/hash.rs create mode 100644 commoncrypto/src/lib.rs create mode 100644 commoncrypto/src/pbkdf2.rs create mode 100644 commoncrypto/tests/hash.rs create mode 100644 commoncrypto/tests/pbkdf2.rs create mode 100644 core-foundation-sys/.cargo-checksum.json create mode 100644 core-foundation-sys/Cargo.toml create mode 100644 core-foundation-sys/LICENSE-APACHE create mode 100644 core-foundation-sys/LICENSE-MIT create mode 100644 core-foundation-sys/build.rs create mode 100644 core-foundation-sys/src/array.rs create mode 100644 core-foundation-sys/src/attributed_string.rs create mode 100644 core-foundation-sys/src/base.rs create mode 100644 core-foundation-sys/src/bundle.rs create mode 100644 core-foundation-sys/src/data.rs create mode 100644 core-foundation-sys/src/date.rs create mode 100644 core-foundation-sys/src/dictionary.rs create mode 100644 core-foundation-sys/src/error.rs create mode 100644 core-foundation-sys/src/filedescriptor.rs create mode 100644 core-foundation-sys/src/lib.rs create mode 100644 core-foundation-sys/src/messageport.rs create mode 100644 core-foundation-sys/src/number.rs create mode 100644 core-foundation-sys/src/propertylist.rs create mode 100644 core-foundation-sys/src/runloop.rs create mode 100644 core-foundation-sys/src/set.rs create mode 100644 core-foundation-sys/src/string.rs create mode 100644 core-foundation-sys/src/timezone.rs create mode 100644 core-foundation-sys/src/url.rs create mode 100644 core-foundation-sys/src/uuid.rs create mode 100644 core-foundation/.cargo-checksum.json create mode 100644 core-foundation/Cargo.toml create mode 100644 core-foundation/LICENSE-APACHE create mode 100644 core-foundation/LICENSE-MIT create mode 100644 core-foundation/src/array.rs create mode 100644 core-foundation/src/attributed_string.rs create mode 100644 core-foundation/src/base.rs create mode 100644 core-foundation/src/boolean.rs create mode 100644 core-foundation/src/bundle.rs create mode 100644 core-foundation/src/data.rs create mode 100644 core-foundation/src/date.rs create mode 100644 core-foundation/src/dictionary.rs create mode 100644 core-foundation/src/error.rs create mode 100644 core-foundation/src/filedescriptor.rs create mode 100644 core-foundation/src/lib.rs create mode 100644 core-foundation/src/number.rs create mode 100644 core-foundation/src/propertylist.rs create mode 100644 core-foundation/src/runloop.rs create mode 100644 core-foundation/src/set.rs create mode 100644 core-foundation/src/string.rs create mode 100644 core-foundation/src/timezone.rs create mode 100644 core-foundation/src/url.rs create mode 100644 core-foundation/src/uuid.rs create mode 100644 core-foundation/tests/use_macro_outside_crate.rs create mode 100644 crossbeam-channel/.cargo-checksum.json create mode 100644 crossbeam-channel/CHANGELOG.md create mode 100644 crossbeam-channel/Cargo.toml create mode 100644 crossbeam-channel/LICENSE-APACHE create mode 100644 crossbeam-channel/LICENSE-MIT create mode 100644 crossbeam-channel/README.md create mode 100644 crossbeam-channel/examples/csp.rs create mode 100644 crossbeam-channel/examples/fibonacci.rs create mode 100644 crossbeam-channel/examples/mpsc.rs create mode 100644 crossbeam-channel/src/flavors/after.rs create mode 100644 crossbeam-channel/src/flavors/array.rs create mode 100644 crossbeam-channel/src/flavors/list.rs create mode 100644 crossbeam-channel/src/flavors/mod.rs create mode 100644 crossbeam-channel/src/flavors/tick.rs create mode 100644 crossbeam-channel/src/flavors/zero.rs create mode 100644 crossbeam-channel/src/internal/channel.rs create mode 100644 crossbeam-channel/src/internal/context.rs create mode 100644 crossbeam-channel/src/internal/mod.rs create mode 100644 crossbeam-channel/src/internal/select.rs create mode 100644 crossbeam-channel/src/internal/utils.rs create mode 100644 crossbeam-channel/src/internal/waker.rs create mode 100644 crossbeam-channel/src/lib.rs create mode 100644 crossbeam-channel/tests/after.rs create mode 100644 crossbeam-channel/tests/array.rs create mode 100644 crossbeam-channel/tests/cmp.rs create mode 100644 crossbeam-channel/tests/golang.rs create mode 100644 crossbeam-channel/tests/iter.rs create mode 100644 crossbeam-channel/tests/list.rs create mode 100644 crossbeam-channel/tests/mpsc.rs create mode 100644 crossbeam-channel/tests/parse.rs create mode 100644 crossbeam-channel/tests/select.rs create mode 100644 crossbeam-channel/tests/select_struct.rs create mode 100644 crossbeam-channel/tests/thread_locals.rs create mode 100644 crossbeam-channel/tests/tick.rs create mode 100644 crossbeam-channel/tests/wrappers/cloned.rs create mode 100644 crossbeam-channel/tests/wrappers/mod.rs create mode 100644 crossbeam-channel/tests/wrappers/normal.rs create mode 100644 crossbeam-channel/tests/wrappers/select.rs create mode 100644 crossbeam-channel/tests/wrappers/select_multi.rs create mode 100644 crossbeam-channel/tests/wrappers/select_spin.rs create mode 100644 crossbeam-channel/tests/zero.rs create mode 100644 crossbeam-epoch/.cargo-checksum.json create mode 100644 crossbeam-epoch/CHANGELOG.md create mode 100644 crossbeam-epoch/Cargo.toml create mode 100644 crossbeam-epoch/LICENSE-APACHE create mode 100644 crossbeam-epoch/LICENSE-MIT create mode 100644 crossbeam-epoch/README.md create mode 100644 crossbeam-epoch/benches/defer.rs create mode 100644 crossbeam-epoch/benches/flush.rs create mode 100644 crossbeam-epoch/benches/pin.rs create mode 100644 crossbeam-epoch/examples/sanitize.rs create mode 100644 crossbeam-epoch/src/atomic.rs create mode 100644 crossbeam-epoch/src/collector.rs create mode 100644 crossbeam-epoch/src/default.rs create mode 100644 crossbeam-epoch/src/deferred.rs create mode 100644 crossbeam-epoch/src/epoch.rs create mode 100644 crossbeam-epoch/src/guard.rs create mode 100644 crossbeam-epoch/src/internal.rs create mode 100644 crossbeam-epoch/src/lib.rs create mode 100644 crossbeam-epoch/src/sync/list.rs create mode 100644 crossbeam-epoch/src/sync/mod.rs create mode 100644 crossbeam-epoch/src/sync/queue.rs create mode 100644 crossbeam-utils/.cargo-checksum.json create mode 100644 crossbeam-utils/CHANGELOG.md create mode 100644 crossbeam-utils/Cargo.toml create mode 100644 crossbeam-utils/LICENSE-APACHE create mode 100644 crossbeam-utils/LICENSE-MIT create mode 100644 crossbeam-utils/README.md create mode 100644 crossbeam-utils/src/cache_padded.rs create mode 100644 crossbeam-utils/src/consume.rs create mode 100644 crossbeam-utils/src/lib.rs create mode 100644 crossbeam-utils/src/thread.rs create mode 100644 crypto-hash/.cargo-checksum.json create mode 100644 crypto-hash/CONTRIBUTING.md create mode 100644 crypto-hash/Cargo.toml create mode 100644 crypto-hash/LICENSE create mode 100644 crypto-hash/Makefile create mode 100644 crypto-hash/NEWS.md create mode 100644 crypto-hash/README.md create mode 100644 crypto-hash/src/imp/commoncrypto.rs create mode 100644 crypto-hash/src/imp/cryptoapi.rs create mode 100644 crypto-hash/src/imp/openssl.rs create mode 100644 crypto-hash/src/lib.rs create mode 100644 crypto-hash/src/test.rs create mode 100644 curl-sys/.cargo-checksum.json create mode 100644 curl-sys/Cargo.toml create mode 100644 curl-sys/LICENSE create mode 100644 curl-sys/build.rs create mode 100644 curl-sys/lib.rs create mode 100644 curl/.cargo-checksum.json create mode 100644 curl/.pc/.quilt_patches create mode 100644 curl/.pc/.quilt_series create mode 100644 curl/.pc/.version create mode 100644 curl/.pc/applied-patches create mode 100644 curl/.pc/winapi3.patch/.timestamp create mode 100644 curl/.pc/winapi3.patch/Cargo.toml create mode 100644 curl/.pc/winapi3.patch/src/easy/windows.rs create mode 100644 curl/.pc/winapi3.patch/src/lib.rs create mode 100644 curl/.pc/winapi3.patch/src/multi.rs create mode 100644 curl/Cargo.toml create mode 100644 curl/LICENSE create mode 100644 curl/README.md create mode 100644 curl/appveyor.yml create mode 100644 curl/ci/Dockerfile-linux32 create mode 100644 curl/ci/Dockerfile-linux64 create mode 100644 curl/ci/Dockerfile-linux64-curl create mode 100644 curl/ci/Dockerfile-mingw create mode 100644 curl/ci/Dockerfile-musl create mode 100644 curl/ci/run.sh create mode 100644 curl/debian/patches/series create mode 100644 curl/debian/patches/winapi3.patch create mode 100644 curl/debian/patches/winapi3.patch~ create mode 100644 curl/src/easy/form.rs create mode 100644 curl/src/easy/handle.rs create mode 100644 curl/src/easy/handler.rs create mode 100644 curl/src/easy/list.rs create mode 100644 curl/src/easy/mod.rs create mode 100644 curl/src/easy/windows.rs create mode 100644 curl/src/error.rs create mode 100644 curl/src/lib.rs create mode 100644 curl/src/multi.rs create mode 100644 curl/src/panic.rs create mode 100644 curl/src/version.rs create mode 100644 curl/tests/atexit.rs create mode 100644 curl/tests/easy.rs create mode 100644 curl/tests/formdata create mode 100644 curl/tests/multi.rs create mode 100644 curl/tests/post.rs create mode 100644 curl/tests/server/mod.rs create mode 100644 env_logger/.cargo-checksum.json create mode 100644 env_logger/Cargo.toml create mode 100644 env_logger/LICENSE-APACHE create mode 100644 env_logger/LICENSE-MIT create mode 100644 env_logger/README.md create mode 100644 env_logger/examples/custom_default_format.rs create mode 100644 env_logger/examples/custom_format.rs create mode 100644 env_logger/examples/custom_logger.rs create mode 100644 env_logger/examples/default.rs create mode 100644 env_logger/examples/direct_logger.rs create mode 100644 env_logger/src/filter/mod.rs create mode 100644 env_logger/src/filter/regex.rs create mode 100644 env_logger/src/filter/string.rs create mode 100644 env_logger/src/fmt.rs create mode 100644 env_logger/src/lib.rs create mode 100644 env_logger/tests/log-in-log.rs create mode 100644 env_logger/tests/regexp_filter.rs create mode 100644 failure/.cargo-checksum.json create mode 100644 failure/CODE_OF_CONDUCT.md create mode 100644 failure/Cargo.toml create mode 100644 failure/LICENSE-APACHE create mode 100644 failure/LICENSE-MIT create mode 100644 failure/Makefile create mode 100644 failure/README.md create mode 100644 failure/RELEASES.md create mode 100644 failure/book/src/SUMMARY.md create mode 100644 failure/book/src/bail-and-ensure.md create mode 100644 failure/book/src/custom-fail.md create mode 100644 failure/book/src/derive-fail.md create mode 100644 failure/book/src/error-errorkind.md create mode 100644 failure/book/src/error-msg.md create mode 100644 failure/book/src/error.md create mode 100644 failure/book/src/fail.md create mode 100644 failure/book/src/guidance.md create mode 100644 failure/book/src/howto.md create mode 100644 failure/book/src/intro.md create mode 100644 failure/book/src/string-custom-error.md create mode 100644 failure/book/src/use-error.md create mode 100755 failure/build-docs.sh create mode 100644 failure/examples/bail_ensure.rs create mode 100644 failure/examples/error_as_cause.rs create mode 100644 failure/examples/simple.rs create mode 100644 failure/examples/string_custom_error_pattern.rs create mode 100644 failure/src/as_fail.rs create mode 100644 failure/src/backtrace/internal.rs create mode 100644 failure/src/backtrace/mod.rs create mode 100644 failure/src/box_std.rs create mode 100644 failure/src/compat.rs create mode 100644 failure/src/context.rs create mode 100644 failure/src/error/error_impl.rs create mode 100644 failure/src/error/error_impl_small.rs create mode 100644 failure/src/error/mod.rs create mode 100644 failure/src/error_message.rs create mode 100644 failure/src/lib.rs create mode 100644 failure/src/macros.rs create mode 100644 failure/src/result_ext.rs create mode 100644 failure/src/small_error.rs create mode 100644 failure/src/sync_failure.rs create mode 100644 failure/travis.sh create mode 100644 failure_derive/.cargo-checksum.json create mode 100644 failure_derive/Cargo.toml create mode 100644 failure_derive/build.rs create mode 100644 failure_derive/src/lib.rs create mode 100644 failure_derive/tests/backtrace.rs create mode 100644 failure_derive/tests/custom_type_bounds.rs create mode 100644 failure_derive/tests/no_derive_display.rs create mode 100644 failure_derive/tests/tests.rs create mode 100644 failure_derive/tests/wraps.rs create mode 100644 filetime/.cargo-checksum.json create mode 100644 filetime/Cargo.toml create mode 100644 filetime/LICENSE-APACHE create mode 100644 filetime/LICENSE-MIT create mode 100644 filetime/README.md create mode 100644 filetime/appveyor.yml create mode 100644 filetime/src/lib.rs create mode 100644 filetime/src/redox.rs create mode 100644 filetime/src/unix/linux.rs create mode 100644 filetime/src/unix/mod.rs create mode 100644 filetime/src/unix/utimensat.rs create mode 100644 filetime/src/unix/utimes.rs create mode 100644 filetime/src/windows.rs create mode 100644 flate2/.cargo-checksum.json create mode 100644 flate2/.pc/.quilt_patches create mode 100644 flate2/.pc/.quilt_series create mode 100644 flate2/.pc/.version create mode 100644 flate2/.pc/applied-patches create mode 100644 flate2/.pc/disable-miniz.patch/.timestamp create mode 100644 flate2/.pc/disable-miniz.patch/Cargo.toml create mode 100644 flate2/.pc/drop-deps-for-cargo.patch/.timestamp create mode 100644 flate2/.pc/drop-deps-for-cargo.patch/Cargo.toml create mode 100644 flate2/Cargo.toml create mode 100644 flate2/LICENSE-APACHE create mode 100644 flate2/LICENSE-MIT create mode 100644 flate2/README.md create mode 100644 flate2/appveyor.yml create mode 100644 flate2/debian/patches/disable-miniz.patch create mode 100644 flate2/debian/patches/drop-deps-for-cargo.patch create mode 100644 flate2/debian/patches/drop-deps-for-cargo.patch~ create mode 100644 flate2/debian/patches/series create mode 100644 flate2/examples/deflatedecoder-bufread.rs create mode 100644 flate2/examples/deflatedecoder-read.rs create mode 100644 flate2/examples/deflatedecoder-write.rs create mode 100644 flate2/examples/deflateencoder-bufread.rs create mode 100644 flate2/examples/deflateencoder-read.rs create mode 100644 flate2/examples/deflateencoder-write.rs create mode 100644 flate2/examples/gzbuilder.rs create mode 100644 flate2/examples/gzdecoder-bufread.rs create mode 100644 flate2/examples/gzdecoder-read.rs create mode 100644 flate2/examples/gzdecoder-write.rs create mode 100644 flate2/examples/gzencoder-bufread.rs create mode 100644 flate2/examples/gzencoder-read.rs create mode 100644 flate2/examples/gzencoder-write.rs create mode 100644 flate2/examples/gzmultidecoder-bufread.rs create mode 100644 flate2/examples/gzmultidecoder-read.rs create mode 100644 flate2/examples/hello_world.txt create mode 100644 flate2/examples/zlibdecoder-bufread.rs create mode 100644 flate2/examples/zlibdecoder-read.rs create mode 100644 flate2/examples/zlibdecoder-write.rs create mode 100644 flate2/examples/zlibencoder-bufread.rs create mode 100644 flate2/examples/zlibencoder-read.rs create mode 100644 flate2/examples/zlibencoder-write.rs create mode 100644 flate2/src/bufreader.rs create mode 100644 flate2/src/crc.rs create mode 100644 flate2/src/deflate/bufread.rs create mode 100644 flate2/src/deflate/mod.rs create mode 100644 flate2/src/deflate/read.rs create mode 100644 flate2/src/deflate/write.rs create mode 100644 flate2/src/ffi.rs create mode 100644 flate2/src/gz/bufread.rs create mode 100644 flate2/src/gz/mod.rs create mode 100644 flate2/src/gz/read.rs create mode 100644 flate2/src/gz/write.rs create mode 100644 flate2/src/lib.rs create mode 100644 flate2/src/mem.rs create mode 100644 flate2/src/zio.rs create mode 100644 flate2/src/zlib/bufread.rs create mode 100644 flate2/src/zlib/mod.rs create mode 100644 flate2/src/zlib/read.rs create mode 100644 flate2/src/zlib/write.rs create mode 100644 flate2/tests/corrupt-file.gz create mode 100644 flate2/tests/early-flush.rs create mode 100644 flate2/tests/empty-read.rs create mode 100644 flate2/tests/good-file.gz create mode 100644 flate2/tests/good-file.txt create mode 100644 flate2/tests/gunzip.rs create mode 100644 flate2/tests/multi.gz create mode 100644 flate2/tests/multi.txt create mode 100644 flate2/tests/tokio.rs create mode 100644 flate2/tests/zero-write.rs create mode 100644 fnv/.cargo-checksum.json create mode 100644 fnv/Cargo.toml create mode 100644 fnv/LICENSE-APACHE create mode 100644 fnv/LICENSE-MIT create mode 100644 fnv/README.md create mode 100644 fnv/lib.rs create mode 100644 foreign-types-shared/.cargo-checksum.json create mode 100644 foreign-types-shared/Cargo.toml create mode 100644 foreign-types-shared/LICENSE-APACHE create mode 100644 foreign-types-shared/LICENSE-MIT create mode 100644 foreign-types-shared/src/lib.rs create mode 100644 foreign-types/.cargo-checksum.json create mode 100644 foreign-types/Cargo.toml create mode 100644 foreign-types/LICENSE-APACHE create mode 100644 foreign-types/LICENSE-MIT create mode 100644 foreign-types/README.md create mode 100644 foreign-types/src/lib.rs create mode 100644 fs2/.cargo-checksum.json create mode 100644 fs2/Cargo.toml create mode 100644 fs2/LICENSE-APACHE create mode 100644 fs2/LICENSE-MIT create mode 100644 fs2/README.md create mode 100644 fs2/src/lib.rs create mode 100644 fs2/src/unix.rs create mode 100644 fs2/src/windows.rs create mode 100644 fwdansi/.cargo-checksum.json create mode 100644 fwdansi/Cargo.toml create mode 100644 fwdansi/appveyor.yml create mode 100644 fwdansi/examples/run-rustc.rs create mode 100644 fwdansi/src/lib.rs create mode 100644 fwdansi/tests/tests.proptest-regressions create mode 100644 fwdansi/tests/tests.rs create mode 100644 git2-curl/.cargo-checksum.json create mode 100644 git2-curl/Cargo.toml create mode 100644 git2-curl/LICENSE-APACHE create mode 100644 git2-curl/LICENSE-MIT create mode 100644 git2-curl/src/lib.rs create mode 100644 git2-curl/tests/all.rs create mode 100644 git2/.cargo-checksum.json create mode 100644 git2/Cargo.toml create mode 100644 git2/LICENSE-APACHE create mode 100644 git2/LICENSE-MIT create mode 100644 git2/README.md create mode 100644 git2/appveyor.yml create mode 100644 git2/examples/add.rs create mode 100644 git2/examples/blame.rs create mode 100644 git2/examples/cat-file.rs create mode 100644 git2/examples/clone.rs create mode 100644 git2/examples/diff.rs create mode 100644 git2/examples/fetch.rs create mode 100644 git2/examples/init.rs create mode 100644 git2/examples/log.rs create mode 100644 git2/examples/ls-remote.rs create mode 100644 git2/examples/rev-list.rs create mode 100644 git2/examples/rev-parse.rs create mode 100644 git2/examples/status.rs create mode 100644 git2/examples/tag.rs create mode 100644 git2/src/blame.rs create mode 100644 git2/src/blob.rs create mode 100644 git2/src/branch.rs create mode 100644 git2/src/buf.rs create mode 100644 git2/src/build.rs create mode 100644 git2/src/call.rs create mode 100644 git2/src/cert.rs create mode 100644 git2/src/commit.rs create mode 100644 git2/src/config.rs create mode 100644 git2/src/cred.rs create mode 100644 git2/src/describe.rs create mode 100644 git2/src/diff.rs create mode 100644 git2/src/error.rs create mode 100644 git2/src/index.rs create mode 100644 git2/src/lib.rs create mode 100644 git2/src/merge.rs create mode 100644 git2/src/message.rs create mode 100644 git2/src/note.rs create mode 100644 git2/src/object.rs create mode 100644 git2/src/odb.rs create mode 100644 git2/src/oid.rs create mode 100644 git2/src/oid_array.rs create mode 100644 git2/src/packbuilder.rs create mode 100644 git2/src/panic.rs create mode 100644 git2/src/patch.rs create mode 100644 git2/src/pathspec.rs create mode 100644 git2/src/proxy_options.rs create mode 100644 git2/src/reference.rs create mode 100644 git2/src/reflog.rs create mode 100644 git2/src/refspec.rs create mode 100644 git2/src/remote.rs create mode 100644 git2/src/remote_callbacks.rs create mode 100644 git2/src/repo.rs create mode 100644 git2/src/revspec.rs create mode 100644 git2/src/revwalk.rs create mode 100644 git2/src/signature.rs create mode 100644 git2/src/stash.rs create mode 100644 git2/src/status.rs create mode 100644 git2/src/string_array.rs create mode 100644 git2/src/submodule.rs create mode 100644 git2/src/tag.rs create mode 100644 git2/src/test.rs create mode 100644 git2/src/time.rs create mode 100644 git2/src/transport.rs create mode 100644 git2/src/tree.rs create mode 100644 git2/src/treebuilder.rs create mode 100644 git2/src/util.rs create mode 100644 glob/.cargo-checksum.json create mode 100644 glob/Cargo.toml create mode 100644 glob/LICENSE-APACHE create mode 100644 glob/LICENSE-MIT create mode 100644 glob/README.md create mode 100644 glob/src/lib.rs create mode 100644 glob/tests/glob-std.rs create mode 100644 globset/.cargo-checksum.json create mode 100644 globset/COPYING create mode 100644 globset/Cargo.toml create mode 100644 globset/LICENSE-MIT create mode 100644 globset/README.md create mode 100644 globset/UNLICENSE create mode 100644 globset/benches/bench.rs create mode 100644 globset/src/glob.rs create mode 100644 globset/src/lib.rs create mode 100644 globset/src/pathutil.rs create mode 100644 hex/.cargo-checksum.json create mode 100644 hex/Cargo.toml create mode 100644 hex/Dockerfile create mode 100644 hex/LICENSE-APACHE create mode 100644 hex/LICENSE-MIT create mode 100644 hex/README.md create mode 100644 hex/src/lib.rs create mode 100644 home/.cargo-checksum.json create mode 100644 home/Cargo.toml create mode 100644 home/README.md create mode 100644 home/src/lib.rs create mode 100644 humantime/.cargo-checksum.json create mode 100644 humantime/Cargo.toml create mode 100644 humantime/LICENSE-APACHE create mode 100644 humantime/LICENSE-MIT create mode 100644 humantime/README.md create mode 100644 humantime/benches/datetime_format.rs create mode 100644 humantime/benches/datetime_parse.rs create mode 100644 humantime/bulk.yaml create mode 100644 humantime/src/date.rs create mode 100644 humantime/src/duration.rs create mode 100644 humantime/src/lib.rs create mode 100644 humantime/src/wrapper.rs create mode 100644 humantime/vagga.yaml create mode 100644 idna/.cargo-checksum.json create mode 100644 idna/Cargo.toml create mode 100644 idna/LICENSE-APACHE create mode 100644 idna/LICENSE-MIT create mode 100644 idna/src/IdnaMappingTable.txt create mode 100644 idna/src/lib.rs create mode 100644 idna/src/make_uts46_mapping_table.py create mode 100644 idna/src/punycode.rs create mode 100644 idna/src/uts46.rs create mode 100644 idna/src/uts46_mapping_table.rs create mode 100644 idna/tests/IdnaTest.txt create mode 100644 idna/tests/punycode.rs create mode 100644 idna/tests/punycode_tests.json create mode 100644 idna/tests/tests.rs create mode 100644 idna/tests/unit.rs create mode 100644 idna/tests/uts46.rs create mode 100644 ignore/.cargo-checksum.json create mode 100644 ignore/COPYING create mode 100644 ignore/Cargo.toml create mode 100644 ignore/LICENSE-MIT create mode 100644 ignore/README.md create mode 100644 ignore/UNLICENSE create mode 100644 ignore/examples/walk.rs create mode 100644 ignore/src/dir.rs create mode 100644 ignore/src/gitignore.rs create mode 100644 ignore/src/lib.rs create mode 100644 ignore/src/overrides.rs create mode 100644 ignore/src/pathutil.rs create mode 100644 ignore/src/types.rs create mode 100644 ignore/src/walk.rs create mode 100644 ignore/tests/gitignore_matched_path_or_any_parents_tests.gitignore create mode 100644 ignore/tests/gitignore_matched_path_or_any_parents_tests.rs create mode 100644 itoa/.cargo-checksum.json create mode 100644 itoa/Cargo.toml create mode 100644 itoa/LICENSE-APACHE create mode 100644 itoa/LICENSE-MIT create mode 100644 itoa/README.md create mode 100644 itoa/benches/bench.rs create mode 100644 itoa/src/lib.rs create mode 100644 itoa/src/udiv128.rs create mode 100644 itoa/tests/test.rs create mode 100644 jobserver/.cargo-checksum.json create mode 100644 jobserver/.pc/.quilt_patches create mode 100644 jobserver/.pc/.quilt_series create mode 100644 jobserver/.pc/.version create mode 100644 jobserver/.pc/applied-patches create mode 100644 jobserver/.pc/relax-dep-version.patch/.timestamp create mode 100644 jobserver/.pc/relax-dep-version.patch/Cargo.toml create mode 100644 jobserver/Cargo.toml create mode 100644 jobserver/LICENSE-APACHE create mode 100644 jobserver/LICENSE-MIT create mode 100644 jobserver/README.md create mode 100644 jobserver/debian/patches/relax-dep-version.patch create mode 100644 jobserver/debian/patches/relax-dep-version.patch~ create mode 100644 jobserver/debian/patches/series create mode 100644 jobserver/src/lib.rs create mode 100644 jobserver/tests/client-of-myself.rs create mode 100644 jobserver/tests/client.rs create mode 100644 jobserver/tests/helper.rs create mode 100644 jobserver/tests/make-as-a-client.rs create mode 100644 jobserver/tests/server.rs create mode 100644 lazy_static/.cargo-checksum.json create mode 100644 lazy_static/Cargo.toml create mode 100644 lazy_static/LICENSE-APACHE create mode 100644 lazy_static/LICENSE-MIT create mode 100644 lazy_static/README.md create mode 100644 lazy_static/appveyor.yml create mode 100644 lazy_static/build.rs create mode 100644 lazy_static/src/core_lazy.rs create mode 100644 lazy_static/src/heap_lazy.rs create mode 100644 lazy_static/src/inline_lazy.rs create mode 100644 lazy_static/src/lib.rs create mode 100644 lazy_static/tests/no_std.rs create mode 100644 lazy_static/tests/test.rs create mode 100644 lazycell/.cargo-checksum.json create mode 100644 lazycell/.pc/.quilt_patches create mode 100644 lazycell/.pc/.quilt_series create mode 100644 lazycell/.pc/.version create mode 100644 lazycell/.pc/applied-patches create mode 100644 lazycell/.pc/no-clippy.patch/.timestamp create mode 100644 lazycell/.pc/no-clippy.patch/Cargo.toml create mode 100644 lazycell/CHANGELOG.md create mode 100644 lazycell/Cargo.toml create mode 100644 lazycell/LICENSE-APACHE create mode 100644 lazycell/LICENSE-MIT create mode 100644 lazycell/README.md create mode 100644 lazycell/debian/patches/no-clippy.patch create mode 100644 lazycell/debian/patches/no-clippy.patch~ create mode 100644 lazycell/debian/patches/series create mode 100644 lazycell/src/lib.rs create mode 100644 libc/.cargo-checksum.json create mode 100644 libc/Cargo.toml create mode 100644 libc/LICENSE-APACHE create mode 100644 libc/LICENSE-MIT create mode 100644 libc/README.md create mode 100644 libc/appveyor.yml create mode 100644 libc/ci/README.md create mode 100644 libc/ci/android-install-ndk.sh create mode 100644 libc/ci/android-install-sdk.sh create mode 100644 libc/ci/android-sysimage.sh create mode 100644 libc/ci/docker/aarch64-linux-android/Dockerfile create mode 100644 libc/ci/docker/aarch64-unknown-linux-gnu/Dockerfile create mode 100644 libc/ci/docker/aarch64-unknown-linux-musl/Dockerfile create mode 100644 libc/ci/docker/arm-linux-androideabi/Dockerfile create mode 100644 libc/ci/docker/arm-unknown-linux-gnueabihf/Dockerfile create mode 100644 libc/ci/docker/arm-unknown-linux-musleabihf/Dockerfile create mode 100644 libc/ci/docker/asmjs-unknown-emscripten/Dockerfile create mode 100644 libc/ci/docker/i686-linux-android/Dockerfile create mode 100644 libc/ci/docker/i686-unknown-linux-gnu/Dockerfile create mode 100644 libc/ci/docker/i686-unknown-linux-musl/Dockerfile create mode 100644 libc/ci/docker/mips-unknown-linux-gnu/Dockerfile create mode 100644 libc/ci/docker/mips-unknown-linux-musl/Dockerfile create mode 100644 libc/ci/docker/mips64-unknown-linux-gnuabi64/Dockerfile create mode 100644 libc/ci/docker/mips64el-unknown-linux-gnuabi64/Dockerfile create mode 100644 libc/ci/docker/mipsel-unknown-linux-musl/Dockerfile create mode 100644 libc/ci/docker/powerpc-unknown-linux-gnu/Dockerfile create mode 100644 libc/ci/docker/powerpc64-unknown-linux-gnu/Dockerfile create mode 100644 libc/ci/docker/powerpc64le-unknown-linux-gnu/Dockerfile create mode 100644 libc/ci/docker/s390x-unknown-linux-gnu/Dockerfile create mode 100644 libc/ci/docker/sparc64-unknown-linux-gnu/Dockerfile create mode 100644 libc/ci/docker/wasm32-unknown-emscripten/Dockerfile create mode 100755 libc/ci/docker/wasm32-unknown-emscripten/node-wrapper.sh create mode 100644 libc/ci/docker/x86_64-linux-android/Dockerfile create mode 100644 libc/ci/docker/x86_64-rumprun-netbsd/Dockerfile create mode 100644 libc/ci/docker/x86_64-rumprun-netbsd/runtest.rs create mode 100644 libc/ci/docker/x86_64-unknown-freebsd/Dockerfile create mode 100644 libc/ci/docker/x86_64-unknown-linux-gnu/Dockerfile create mode 100644 libc/ci/docker/x86_64-unknown-linux-gnux32/Dockerfile create mode 100644 libc/ci/docker/x86_64-unknown-linux-musl/Dockerfile create mode 100644 libc/ci/dox.sh create mode 100755 libc/ci/emscripten-entry.sh create mode 100644 libc/ci/emscripten.sh create mode 100644 libc/ci/ios/deploy_and_run_on_ios_simulator.rs create mode 100644 libc/ci/landing-page-footer.html create mode 100644 libc/ci/landing-page-head.html create mode 100644 libc/ci/linux-s390x.sh create mode 100644 libc/ci/linux-sparc64.sh create mode 100755 libc/ci/run-docker.sh create mode 100644 libc/ci/run-qemu.sh create mode 100755 libc/ci/run.sh create mode 100644 libc/ci/runtest-android.rs create mode 100644 libc/ci/style.rs create mode 100755 libc/ci/test-runner-linux create mode 100644 libc/src/cloudabi/aarch64.rs create mode 100644 libc/src/cloudabi/arm.rs create mode 100644 libc/src/cloudabi/mod.rs create mode 100644 libc/src/cloudabi/x86.rs create mode 100644 libc/src/cloudabi/x86_64.rs create mode 100644 libc/src/dox.rs create mode 100644 libc/src/fuchsia/aarch64.rs create mode 100644 libc/src/fuchsia/mod.rs create mode 100644 libc/src/fuchsia/powerpc64.rs create mode 100644 libc/src/fuchsia/x86_64.rs create mode 100644 libc/src/lib.rs create mode 100644 libc/src/macros.rs create mode 100644 libc/src/redox/mod.rs create mode 100644 libc/src/redox/net.rs create mode 100644 libc/src/unix/bsd/apple/b32.rs create mode 100644 libc/src/unix/bsd/apple/b64.rs create mode 100644 libc/src/unix/bsd/apple/mod.rs create mode 100644 libc/src/unix/bsd/freebsdlike/dragonfly/mod.rs create mode 100644 libc/src/unix/bsd/freebsdlike/freebsd/aarch64.rs create mode 100644 libc/src/unix/bsd/freebsdlike/freebsd/mod.rs create mode 100644 libc/src/unix/bsd/freebsdlike/freebsd/x86.rs create mode 100644 libc/src/unix/bsd/freebsdlike/freebsd/x86_64.rs create mode 100644 libc/src/unix/bsd/freebsdlike/mod.rs create mode 100644 libc/src/unix/bsd/mod.rs create mode 100644 libc/src/unix/bsd/netbsdlike/mod.rs create mode 100644 libc/src/unix/bsd/netbsdlike/netbsd/mod.rs create mode 100644 libc/src/unix/bsd/netbsdlike/netbsd/other/b32/mod.rs create mode 100644 libc/src/unix/bsd/netbsdlike/netbsd/other/b64/mod.rs create mode 100644 libc/src/unix/bsd/netbsdlike/netbsd/other/mod.rs create mode 100644 libc/src/unix/bsd/netbsdlike/openbsdlike/bitrig/mod.rs create mode 100644 libc/src/unix/bsd/netbsdlike/openbsdlike/bitrig/x86.rs create mode 100644 libc/src/unix/bsd/netbsdlike/openbsdlike/bitrig/x86_64.rs create mode 100644 libc/src/unix/bsd/netbsdlike/openbsdlike/mod.rs create mode 100644 libc/src/unix/bsd/netbsdlike/openbsdlike/openbsd/aarch64.rs create mode 100644 libc/src/unix/bsd/netbsdlike/openbsdlike/openbsd/mod.rs create mode 100644 libc/src/unix/bsd/netbsdlike/openbsdlike/openbsd/x86.rs create mode 100644 libc/src/unix/bsd/netbsdlike/openbsdlike/openbsd/x86_64.rs create mode 100644 libc/src/unix/haiku/b32.rs create mode 100644 libc/src/unix/haiku/b64.rs create mode 100644 libc/src/unix/haiku/mod.rs create mode 100644 libc/src/unix/hermit/aarch64.rs create mode 100644 libc/src/unix/hermit/mod.rs create mode 100644 libc/src/unix/hermit/x86_64.rs create mode 100644 libc/src/unix/mod.rs create mode 100644 libc/src/unix/newlib/aarch64/mod.rs create mode 100644 libc/src/unix/newlib/arm/mod.rs create mode 100644 libc/src/unix/newlib/mod.rs create mode 100644 libc/src/unix/notbsd/android/b32/arm.rs create mode 100644 libc/src/unix/notbsd/android/b32/mod.rs create mode 100644 libc/src/unix/notbsd/android/b32/x86.rs create mode 100644 libc/src/unix/notbsd/android/b64/aarch64.rs create mode 100644 libc/src/unix/notbsd/android/b64/mod.rs create mode 100644 libc/src/unix/notbsd/android/b64/x86_64.rs create mode 100644 libc/src/unix/notbsd/android/mod.rs create mode 100644 libc/src/unix/notbsd/emscripten.rs create mode 100644 libc/src/unix/notbsd/linux/mips/mips32.rs create mode 100644 libc/src/unix/notbsd/linux/mips/mips64.rs create mode 100644 libc/src/unix/notbsd/linux/mips/mod.rs create mode 100644 libc/src/unix/notbsd/linux/mod.rs create mode 100644 libc/src/unix/notbsd/linux/musl/b32/arm.rs create mode 100644 libc/src/unix/notbsd/linux/musl/b32/mips.rs create mode 100644 libc/src/unix/notbsd/linux/musl/b32/mod.rs create mode 100644 libc/src/unix/notbsd/linux/musl/b32/powerpc.rs create mode 100644 libc/src/unix/notbsd/linux/musl/b32/x86.rs create mode 100644 libc/src/unix/notbsd/linux/musl/b64/aarch64.rs create mode 100644 libc/src/unix/notbsd/linux/musl/b64/mod.rs create mode 100644 libc/src/unix/notbsd/linux/musl/b64/powerpc64.rs create mode 100644 libc/src/unix/notbsd/linux/musl/b64/x86_64.rs create mode 100644 libc/src/unix/notbsd/linux/musl/mod.rs create mode 100644 libc/src/unix/notbsd/linux/other/b32/arm.rs create mode 100644 libc/src/unix/notbsd/linux/other/b32/mod.rs create mode 100644 libc/src/unix/notbsd/linux/other/b32/powerpc.rs create mode 100644 libc/src/unix/notbsd/linux/other/b32/x86.rs create mode 100644 libc/src/unix/notbsd/linux/other/b64/aarch64.rs create mode 100644 libc/src/unix/notbsd/linux/other/b64/mod.rs create mode 100644 libc/src/unix/notbsd/linux/other/b64/not_x32.rs create mode 100644 libc/src/unix/notbsd/linux/other/b64/powerpc64.rs create mode 100644 libc/src/unix/notbsd/linux/other/b64/sparc64.rs create mode 100644 libc/src/unix/notbsd/linux/other/b64/x32.rs create mode 100644 libc/src/unix/notbsd/linux/other/b64/x86_64.rs create mode 100644 libc/src/unix/notbsd/linux/other/mod.rs create mode 100644 libc/src/unix/notbsd/linux/s390x.rs create mode 100644 libc/src/unix/notbsd/mod.rs create mode 100644 libc/src/unix/solaris/mod.rs create mode 100644 libc/src/unix/uclibc/mips/mips32.rs create mode 100644 libc/src/unix/uclibc/mips/mips64.rs create mode 100644 libc/src/unix/uclibc/mips/mod.rs create mode 100644 libc/src/unix/uclibc/mod.rs create mode 100644 libc/src/unix/uclibc/x86_64/l4re.rs create mode 100644 libc/src/unix/uclibc/x86_64/mod.rs create mode 100644 libc/src/unix/uclibc/x86_64/other.rs create mode 100644 libc/src/windows.rs create mode 100644 libgit2-sys/.cargo-checksum.json create mode 100644 libgit2-sys/.pc/.quilt_patches create mode 100644 libgit2-sys/.pc/.quilt_series create mode 100644 libgit2-sys/.pc/.version create mode 100644 libgit2-sys/.pc/applied-patches create mode 100644 libgit2-sys/.pc/no-special-snowflake-env.patch/.timestamp create mode 100644 libgit2-sys/.pc/no-special-snowflake-env.patch/build.rs create mode 100644 libgit2-sys/Cargo.toml create mode 100644 libgit2-sys/LICENSE-APACHE create mode 100644 libgit2-sys/LICENSE-MIT create mode 100644 libgit2-sys/build.rs create mode 100644 libgit2-sys/debian/patches/no-special-snowflake-env.patch create mode 100644 libgit2-sys/debian/patches/no-special-snowflake-env.patch~ create mode 100644 libgit2-sys/debian/patches/series create mode 100644 libgit2-sys/lib.rs create mode 100644 libssh2-sys/.cargo-checksum.json create mode 100644 libssh2-sys/.pc/.quilt_patches create mode 100644 libssh2-sys/.pc/.quilt_series create mode 100644 libssh2-sys/.pc/.version create mode 100644 libssh2-sys/.pc/applied-patches create mode 100644 libssh2-sys/.pc/no-special-snowflake-env.patch/.timestamp create mode 100644 libssh2-sys/.pc/no-special-snowflake-env.patch/build.rs create mode 100644 libssh2-sys/Cargo.toml create mode 100644 libssh2-sys/build.rs create mode 100644 libssh2-sys/debian/patches/no-special-snowflake-env.patch create mode 100644 libssh2-sys/debian/patches/no-special-snowflake-env.patch~ create mode 100644 libssh2-sys/debian/patches/series create mode 100644 libssh2-sys/lib.rs create mode 100644 libz-sys/.cargo-checksum.json create mode 100644 libz-sys/Cargo.toml create mode 100644 libz-sys/LICENSE-APACHE create mode 100644 libz-sys/LICENSE-MIT create mode 100644 libz-sys/README.md create mode 100644 libz-sys/appveyor.yml create mode 100644 libz-sys/build.rs create mode 100644 libz-sys/ci/Dockerfile create mode 100755 libz-sys/ci/run-docker.sh create mode 100644 libz-sys/src/lib.rs create mode 100644 libz-sys/src/smoke.c create mode 100644 lock_api/.cargo-checksum.json create mode 100644 lock_api/Cargo.toml create mode 100644 lock_api/LICENSE-APACHE create mode 100644 lock_api/LICENSE-MIT create mode 100644 lock_api/src/lib.rs create mode 100644 lock_api/src/mutex.rs create mode 100644 lock_api/src/remutex.rs create mode 100644 lock_api/src/rwlock.rs create mode 100644 log/.cargo-checksum.json create mode 100644 log/CHANGELOG.md create mode 100644 log/Cargo.toml create mode 100644 log/LICENSE-APACHE create mode 100644 log/LICENSE-MIT create mode 100644 log/README.md create mode 100644 log/appveyor.yml create mode 100644 log/src/lib.rs create mode 100644 log/src/macros.rs create mode 100644 log/src/serde.rs create mode 100644 log/tests/filters.rs create mode 100644 matches/.cargo-checksum.json create mode 100644 matches/Cargo.toml create mode 100644 matches/LICENSE create mode 100644 matches/lib.rs create mode 100644 matches/tests/macro_use_one.rs create mode 100644 memchr/.cargo-checksum.json create mode 100644 memchr/COPYING create mode 100644 memchr/Cargo.toml create mode 100644 memchr/LICENSE-MIT create mode 100644 memchr/README.md create mode 100644 memchr/UNLICENSE create mode 100644 memchr/build.rs create mode 100644 memchr/src/c.rs create mode 100644 memchr/src/fallback.rs create mode 100644 memchr/src/iter.rs create mode 100644 memchr/src/lib.rs create mode 100644 memchr/src/naive.rs create mode 100644 memchr/src/tests/iter.rs create mode 100644 memchr/src/tests/memchr.rs create mode 100644 memchr/src/tests/mod.rs create mode 100644 memchr/src/x86/avx.rs create mode 100644 memchr/src/x86/mod.rs create mode 100644 memchr/src/x86/sse2.rs create mode 100644 memchr/src/x86/sse42.rs create mode 100644 memoffset/.cargo-checksum.json create mode 100644 memoffset/Cargo.toml create mode 100644 memoffset/LICENSE create mode 100644 memoffset/README.md create mode 100644 memoffset/src/lib.rs create mode 100644 memoffset/src/offset_of.rs create mode 100644 memoffset/src/span_of.rs create mode 100644 miow/.cargo-checksum.json create mode 100644 miow/Cargo.toml create mode 100644 miow/LICENSE-APACHE create mode 100644 miow/LICENSE-MIT create mode 100644 miow/README.md create mode 100644 miow/appveyor.yml create mode 100644 miow/src/handle.rs create mode 100644 miow/src/iocp.rs create mode 100644 miow/src/lib.rs create mode 100644 miow/src/net.rs create mode 100644 miow/src/overlapped.rs create mode 100644 miow/src/pipe.rs create mode 100644 nodrop/.cargo-checksum.json create mode 100644 nodrop/Cargo.toml create mode 100644 nodrop/README.rst create mode 100644 nodrop/src/lib.rs create mode 100644 num_cpus/.cargo-checksum.json create mode 100644 num_cpus/CHANGELOG.md create mode 100644 num_cpus/CONTRIBUTING.md create mode 100644 num_cpus/Cargo.toml create mode 100644 num_cpus/LICENSE-APACHE create mode 100644 num_cpus/LICENSE-MIT create mode 100644 num_cpus/README.md create mode 100644 num_cpus/src/lib.rs create mode 100644 opener/.cargo-checksum.json create mode 100644 opener/Cargo.toml create mode 100644 opener/src/lib.rs create mode 100644 opener/src/xdg-open create mode 100644 openssl-probe/.cargo-checksum.json create mode 100644 openssl-probe/Cargo.toml create mode 100644 openssl-probe/LICENSE-APACHE create mode 100644 openssl-probe/LICENSE-MIT create mode 100644 openssl-probe/README.md create mode 100644 openssl-probe/src/lib.rs create mode 100644 openssl-sys/.cargo-checksum.json create mode 100644 openssl-sys/.pc/.quilt_patches create mode 100644 openssl-sys/.pc/.quilt_series create mode 100644 openssl-sys/.pc/.version create mode 100644 openssl-sys/.pc/applied-patches create mode 100644 openssl-sys/.pc/disable-vendor.patch/.timestamp create mode 100644 openssl-sys/.pc/disable-vendor.patch/Cargo.toml create mode 100644 openssl-sys/Cargo.toml create mode 100644 openssl-sys/LICENSE-MIT create mode 100644 openssl-sys/README.md create mode 100644 openssl-sys/build/cfgs.rs create mode 100644 openssl-sys/build/main.rs create mode 100644 openssl-sys/debian/patches/disable-vendor.patch create mode 100644 openssl-sys/debian/patches/series create mode 100644 openssl-sys/src/aes.rs create mode 100644 openssl-sys/src/asn1.rs create mode 100644 openssl-sys/src/bio.rs create mode 100644 openssl-sys/src/bn.rs create mode 100644 openssl-sys/src/cms.rs create mode 100644 openssl-sys/src/conf.rs create mode 100644 openssl-sys/src/crypto.rs create mode 100644 openssl-sys/src/dh.rs create mode 100644 openssl-sys/src/dsa.rs create mode 100644 openssl-sys/src/dtls1.rs create mode 100644 openssl-sys/src/ec.rs create mode 100644 openssl-sys/src/err.rs create mode 100644 openssl-sys/src/evp.rs create mode 100644 openssl-sys/src/hmac.rs create mode 100644 openssl-sys/src/lib.rs create mode 100644 openssl-sys/src/macros.rs create mode 100644 openssl-sys/src/obj_mac.rs create mode 100644 openssl-sys/src/object.rs create mode 100644 openssl-sys/src/ocsp.rs create mode 100644 openssl-sys/src/ossl_typ.rs create mode 100644 openssl-sys/src/pem.rs create mode 100644 openssl-sys/src/pkcs12.rs create mode 100644 openssl-sys/src/pkcs7.rs create mode 100644 openssl-sys/src/rand.rs create mode 100644 openssl-sys/src/rsa.rs create mode 100644 openssl-sys/src/safestack.rs create mode 100644 openssl-sys/src/sha.rs create mode 100644 openssl-sys/src/srtp.rs create mode 100644 openssl-sys/src/ssl.rs create mode 100644 openssl-sys/src/ssl3.rs create mode 100644 openssl-sys/src/stack.rs create mode 100644 openssl-sys/src/tls1.rs create mode 100644 openssl-sys/src/x509.rs create mode 100644 openssl-sys/src/x509_vfy.rs create mode 100644 openssl-sys/src/x509v3.rs create mode 100644 openssl/.cargo-checksum.json create mode 100644 openssl/Cargo.toml create mode 100644 openssl/LICENSE create mode 100644 openssl/README.md create mode 100644 openssl/build.rs create mode 100644 openssl/examples/mk_certs.rs create mode 100644 openssl/src/aes.rs create mode 100644 openssl/src/asn1.rs create mode 100644 openssl/src/bio.rs create mode 100644 openssl/src/bn.rs create mode 100644 openssl/src/cms.rs create mode 100644 openssl/src/conf.rs create mode 100644 openssl/src/derive.rs create mode 100644 openssl/src/dh.rs create mode 100644 openssl/src/dsa.rs create mode 100644 openssl/src/ec.rs create mode 100644 openssl/src/ecdsa.rs create mode 100644 openssl/src/error.rs create mode 100644 openssl/src/ex_data.rs create mode 100644 openssl/src/fips.rs create mode 100644 openssl/src/hash.rs create mode 100644 openssl/src/lib.rs create mode 100644 openssl/src/macros.rs create mode 100644 openssl/src/memcmp.rs create mode 100644 openssl/src/nid.rs create mode 100644 openssl/src/ocsp.rs create mode 100644 openssl/src/pkcs12.rs create mode 100644 openssl/src/pkcs5.rs create mode 100644 openssl/src/pkcs7.rs create mode 100644 openssl/src/pkey.rs create mode 100644 openssl/src/rand.rs create mode 100644 openssl/src/rsa.rs create mode 100644 openssl/src/sha.rs create mode 100644 openssl/src/sign.rs create mode 100644 openssl/src/srtp.rs create mode 100644 openssl/src/ssl/bio.rs create mode 100644 openssl/src/ssl/callbacks.rs create mode 100644 openssl/src/ssl/connector.rs create mode 100644 openssl/src/ssl/error.rs create mode 100644 openssl/src/ssl/mod.rs create mode 100644 openssl/src/ssl/test.rs create mode 100644 openssl/src/stack.rs create mode 100644 openssl/src/string.rs create mode 100644 openssl/src/symm.rs create mode 100644 openssl/src/util.rs create mode 100644 openssl/src/version.rs create mode 100644 openssl/src/x509/extension.rs create mode 100644 openssl/src/x509/mod.rs create mode 100644 openssl/src/x509/store.rs create mode 100644 openssl/src/x509/tests.rs create mode 100644 openssl/src/x509/verify.rs create mode 100644 openssl/test/alt_name_cert.pem create mode 100644 openssl/test/cert.pem create mode 100644 openssl/test/certs.pem create mode 100644 openssl/test/dhparams.pem create mode 100644 openssl/test/dsa.pem create mode 100644 openssl/test/dsa.pem.pub create mode 100644 openssl/test/dsaparam.pem create mode 100644 openssl/test/identity.p12 create mode 100644 openssl/test/key.der create mode 100644 openssl/test/key.der.pub create mode 100644 openssl/test/key.pem create mode 100644 openssl/test/key.pem.pub create mode 100644 openssl/test/keystore-empty-chain.p12 create mode 100644 openssl/test/nid_test_cert.pem create mode 100644 openssl/test/nid_uid_test_cert.pem create mode 100644 openssl/test/pkcs1.pem.pub create mode 100644 openssl/test/pkcs8.der create mode 100644 openssl/test/root-ca.key create mode 100644 openssl/test/root-ca.pem create mode 100644 openssl/test/rsa-encrypted.pem create mode 100644 openssl/test/rsa.pem create mode 100644 openssl/test/rsa.pem.pub create mode 100644 owning_ref/.cargo-checksum.json create mode 100644 owning_ref/Cargo.toml create mode 100644 owning_ref/LICENSE create mode 100644 owning_ref/README.md create mode 100644 owning_ref/src/lib.rs create mode 100644 parking_lot/.cargo-checksum.json create mode 100644 parking_lot/CHANGELOG.md create mode 100644 parking_lot/Cargo.toml create mode 100644 parking_lot/LICENSE-APACHE create mode 100644 parking_lot/LICENSE-MIT create mode 100644 parking_lot/README.md create mode 100644 parking_lot/appveyor.yml create mode 100644 parking_lot/src/condvar.rs create mode 100644 parking_lot/src/deadlock.rs create mode 100644 parking_lot/src/elision.rs create mode 100644 parking_lot/src/lib.rs create mode 100644 parking_lot/src/mutex.rs create mode 100644 parking_lot/src/once.rs create mode 100644 parking_lot/src/raw_mutex.rs create mode 100644 parking_lot/src/raw_rwlock.rs create mode 100644 parking_lot/src/remutex.rs create mode 100644 parking_lot/src/rwlock.rs create mode 100644 parking_lot/src/util.rs create mode 100644 parking_lot_core/.cargo-checksum.json create mode 100644 parking_lot_core/Cargo.toml create mode 100644 parking_lot_core/LICENSE-APACHE create mode 100644 parking_lot_core/LICENSE-MIT create mode 100644 parking_lot_core/build.rs create mode 100644 parking_lot_core/src/lib.rs create mode 100644 parking_lot_core/src/parking_lot.rs create mode 100644 parking_lot_core/src/spinwait.rs create mode 100644 parking_lot_core/src/thread_parker/generic.rs create mode 100644 parking_lot_core/src/thread_parker/linux.rs create mode 100644 parking_lot_core/src/thread_parker/unix.rs create mode 100644 parking_lot_core/src/thread_parker/windows/keyed_event.rs create mode 100644 parking_lot_core/src/thread_parker/windows/mod.rs create mode 100644 parking_lot_core/src/thread_parker/windows/waitaddress.rs create mode 100644 parking_lot_core/src/util.rs create mode 100644 parking_lot_core/src/word_lock.rs create mode 100644 percent-encoding/.cargo-checksum.json create mode 100644 percent-encoding/Cargo.toml create mode 100644 percent-encoding/LICENSE-APACHE create mode 100644 percent-encoding/LICENSE-MIT create mode 100644 percent-encoding/lib.rs create mode 100644 pkg-config/.cargo-checksum.json create mode 100644 pkg-config/.pc/.quilt_patches create mode 100644 pkg-config/.pc/.quilt_series create mode 100644 pkg-config/.pc/.version create mode 100644 pkg-config/.pc/applied-patches create mode 100644 pkg-config/.pc/no-special-snowflake-env.patch/.timestamp create mode 100644 pkg-config/.pc/no-special-snowflake-env.patch/src/lib.rs create mode 100644 pkg-config/.pc/no-special-snowflake-env.patch/tests/test.rs create mode 100644 pkg-config/CHANGELOG.md create mode 100644 pkg-config/Cargo.toml create mode 100644 pkg-config/LICENSE-APACHE create mode 100644 pkg-config/LICENSE-MIT create mode 100644 pkg-config/README.md create mode 100644 pkg-config/debian/patches/no-special-snowflake-env.patch create mode 100644 pkg-config/debian/patches/no-special-snowflake-env.patch~ create mode 100644 pkg-config/debian/patches/series create mode 100644 pkg-config/src/lib.rs create mode 100644 pkg-config/tests/escape.pc create mode 100644 pkg-config/tests/foo.pc create mode 100644 pkg-config/tests/framework.pc create mode 100644 pkg-config/tests/test.rs create mode 100644 proc-macro2/.cargo-checksum.json create mode 100644 proc-macro2/Cargo.toml create mode 100644 proc-macro2/LICENSE-APACHE create mode 100644 proc-macro2/LICENSE-MIT create mode 100644 proc-macro2/README.md create mode 100644 proc-macro2/build.rs create mode 100644 proc-macro2/src/lib.rs create mode 100644 proc-macro2/src/stable.rs create mode 100644 proc-macro2/src/strnom.rs create mode 100644 proc-macro2/src/unstable.rs create mode 100644 proc-macro2/tests/test.rs create mode 100644 quick-error/.cargo-checksum.json create mode 100644 quick-error/Cargo.toml create mode 100644 quick-error/LICENSE-APACHE create mode 100644 quick-error/LICENSE-MIT create mode 100644 quick-error/README.rst create mode 100644 quick-error/bulk.yaml create mode 100644 quick-error/examples/context.rs create mode 100644 quick-error/src/lib.rs create mode 100644 quick-error/vagga.yaml create mode 100644 quote/.cargo-checksum.json create mode 100644 quote/Cargo.toml create mode 100644 quote/LICENSE-APACHE create mode 100644 quote/LICENSE-MIT create mode 100644 quote/README.md create mode 100644 quote/src/ext.rs create mode 100644 quote/src/lib.rs create mode 100644 quote/src/to_tokens.rs create mode 100644 quote/tests/test.rs create mode 100644 rand/.cargo-checksum.json create mode 100644 rand/.pc/.quilt_patches create mode 100644 rand/.pc/.quilt_series create mode 100644 rand/.pc/.version create mode 100644 rand/.pc/applied-patches create mode 100644 rand/.pc/disable-fuchsia-zircon-dep.diff/.timestamp create mode 100644 rand/.pc/disable-fuchsia-zircon-dep.diff/Cargo.toml create mode 100644 rand/CHANGELOG.md create mode 100644 rand/CONTRIBUTING.md create mode 100644 rand/Cargo.toml create mode 100644 rand/LICENSE-APACHE create mode 100644 rand/LICENSE-MIT create mode 100644 rand/README.md create mode 100644 rand/UPDATING.md create mode 100644 rand/appveyor.yml create mode 100644 rand/benches/distributions.rs create mode 100644 rand/benches/generators.rs create mode 100644 rand/benches/misc.rs create mode 100644 rand/debian/patches/disable-fuchsia-zircon-dep.diff create mode 100644 rand/debian/patches/series create mode 100644 rand/examples/monte-carlo.rs create mode 100644 rand/examples/monty-hall.rs create mode 100644 rand/src/distributions/bernoulli.rs create mode 100644 rand/src/distributions/binomial.rs create mode 100644 rand/src/distributions/cauchy.rs create mode 100644 rand/src/distributions/exponential.rs create mode 100644 rand/src/distributions/float.rs create mode 100644 rand/src/distributions/gamma.rs create mode 100644 rand/src/distributions/integer.rs create mode 100644 rand/src/distributions/log_gamma.rs create mode 100644 rand/src/distributions/mod.rs create mode 100644 rand/src/distributions/normal.rs create mode 100644 rand/src/distributions/other.rs create mode 100644 rand/src/distributions/pareto.rs create mode 100644 rand/src/distributions/poisson.rs create mode 100644 rand/src/distributions/uniform.rs create mode 100644 rand/src/distributions/ziggurat_tables.rs create mode 100644 rand/src/lib.rs create mode 100644 rand/src/prelude.rs create mode 100644 rand/src/prng/chacha.rs create mode 100644 rand/src/prng/hc128.rs create mode 100644 rand/src/prng/isaac.rs create mode 100644 rand/src/prng/isaac64.rs create mode 100644 rand/src/prng/isaac_array.rs create mode 100644 rand/src/prng/mod.rs create mode 100644 rand/src/prng/xorshift.rs create mode 100644 rand/src/rngs/adapter/mod.rs create mode 100644 rand/src/rngs/adapter/read.rs create mode 100644 rand/src/rngs/adapter/reseeding.rs create mode 100644 rand/src/rngs/entropy.rs create mode 100644 rand/src/rngs/jitter.rs create mode 100644 rand/src/rngs/mock.rs create mode 100644 rand/src/rngs/mod.rs create mode 100644 rand/src/rngs/os.rs create mode 100644 rand/src/rngs/small.rs create mode 100644 rand/src/rngs/std.rs create mode 100644 rand/src/rngs/thread.rs create mode 100644 rand/src/seq.rs create mode 100644 rand/tests/bool.rs create mode 100644 rand/utils/ci/install.sh create mode 100644 rand/utils/ci/script.sh create mode 100755 rand/utils/ziggurat_tables.py create mode 100644 rand_core-0.2.2/.cargo-checksum.json create mode 100644 rand_core-0.2.2/CHANGELOG.md create mode 100644 rand_core-0.2.2/Cargo.toml create mode 100644 rand_core-0.2.2/LICENSE-APACHE create mode 100644 rand_core-0.2.2/LICENSE-MIT create mode 100644 rand_core-0.2.2/README.md create mode 100644 rand_core-0.2.2/src/block.rs create mode 100644 rand_core-0.2.2/src/error.rs create mode 100644 rand_core-0.2.2/src/impls.rs create mode 100644 rand_core-0.2.2/src/le.rs create mode 100644 rand_core-0.2.2/src/lib.rs create mode 100644 rand_core/.cargo-checksum.json create mode 100644 rand_core/CHANGELOG.md create mode 100644 rand_core/COPYRIGHT create mode 100644 rand_core/Cargo.toml create mode 100644 rand_core/LICENSE-APACHE create mode 100644 rand_core/LICENSE-MIT create mode 100644 rand_core/README.md create mode 100644 rand_core/src/block.rs create mode 100644 rand_core/src/error.rs create mode 100644 rand_core/src/impls.rs create mode 100644 rand_core/src/le.rs create mode 100644 rand_core/src/lib.rs create mode 100644 redox_syscall/.cargo-checksum.json create mode 100644 redox_syscall/Cargo.toml create mode 100644 redox_syscall/LICENSE create mode 100644 redox_syscall/README.md create mode 100644 redox_syscall/rust-toolchain create mode 100644 redox_syscall/src/arch/arm.rs create mode 100644 redox_syscall/src/arch/x86.rs create mode 100644 redox_syscall/src/arch/x86_64.rs create mode 100644 redox_syscall/src/call.rs create mode 100644 redox_syscall/src/data.rs create mode 100644 redox_syscall/src/error.rs create mode 100644 redox_syscall/src/flag.rs create mode 100644 redox_syscall/src/io/dma.rs create mode 100644 redox_syscall/src/io/io.rs create mode 100644 redox_syscall/src/io/mmio.rs create mode 100644 redox_syscall/src/io/mod.rs create mode 100644 redox_syscall/src/io/pio.rs create mode 100644 redox_syscall/src/lib.rs create mode 100644 redox_syscall/src/number.rs create mode 100755 redox_syscall/src/scheme/generate.sh create mode 100644 redox_syscall/src/scheme/mod.rs create mode 100644 redox_syscall/src/scheme/scheme.rs create mode 100644 redox_syscall/src/scheme/scheme_block.rs create mode 100644 redox_syscall/src/scheme/scheme_block_mut.rs create mode 100644 redox_syscall/src/scheme/scheme_mut.rs create mode 100644 redox_termios/.cargo-checksum.json create mode 100644 redox_termios/Cargo.toml create mode 100644 redox_termios/LICENSE create mode 100644 redox_termios/README.md create mode 100644 redox_termios/src/lib.rs create mode 100644 regex-syntax/.cargo-checksum.json create mode 100644 regex-syntax/Cargo.toml create mode 100644 regex-syntax/LICENSE-APACHE create mode 100644 regex-syntax/LICENSE-MIT create mode 100644 regex-syntax/benches/bench.rs create mode 100644 regex-syntax/src/ast/mod.rs create mode 100644 regex-syntax/src/ast/parse.rs create mode 100644 regex-syntax/src/ast/print.rs create mode 100644 regex-syntax/src/ast/visitor.rs create mode 100644 regex-syntax/src/either.rs create mode 100644 regex-syntax/src/error.rs create mode 100644 regex-syntax/src/hir/interval.rs create mode 100644 regex-syntax/src/hir/literal/mod.rs create mode 100644 regex-syntax/src/hir/mod.rs create mode 100644 regex-syntax/src/hir/print.rs create mode 100644 regex-syntax/src/hir/translate.rs create mode 100644 regex-syntax/src/hir/visitor.rs create mode 100644 regex-syntax/src/lib.rs create mode 100644 regex-syntax/src/parser.rs create mode 100644 regex-syntax/src/unicode.rs create mode 100644 regex-syntax/src/unicode_tables/age.rs create mode 100644 regex-syntax/src/unicode_tables/case_folding_simple.rs create mode 100644 regex-syntax/src/unicode_tables/general_category.rs create mode 100644 regex-syntax/src/unicode_tables/mod.rs create mode 100644 regex-syntax/src/unicode_tables/perl_word.rs create mode 100644 regex-syntax/src/unicode_tables/property_bool.rs create mode 100644 regex-syntax/src/unicode_tables/property_names.rs create mode 100644 regex-syntax/src/unicode_tables/property_values.rs create mode 100644 regex-syntax/src/unicode_tables/script.rs create mode 100644 regex-syntax/src/unicode_tables/script_extension.rs create mode 100644 regex/.cargo-checksum.json create mode 100644 regex/CHANGELOG.md create mode 100644 regex/Cargo.toml create mode 100644 regex/HACKING.md create mode 100644 regex/LICENSE-APACHE create mode 100644 regex/LICENSE-MIT create mode 100644 regex/PERFORMANCE.md create mode 100644 regex/README.md create mode 100644 regex/UNICODE.md create mode 100644 regex/appveyor.yml create mode 100644 regex/build.rs create mode 100755 regex/ci/after_success.sh create mode 100755 regex/ci/run-kcov create mode 100755 regex/ci/run-shootout-test create mode 100755 regex/ci/script.sh create mode 100755 regex/ci/test-regex-capi create mode 100644 regex/examples/regexdna-input.txt create mode 100644 regex/examples/regexdna-output.txt create mode 100644 regex/examples/shootout-regex-dna-bytes.rs create mode 100644 regex/examples/shootout-regex-dna-cheat.rs create mode 100644 regex/examples/shootout-regex-dna-replace.rs create mode 100644 regex/examples/shootout-regex-dna-single-cheat.rs create mode 100644 regex/examples/shootout-regex-dna-single.rs create mode 100644 regex/examples/shootout-regex-dna.rs create mode 100755 regex/scripts/frequencies.py create mode 100755 regex/scripts/generate.py create mode 100755 regex/scripts/regex-match-tests.py create mode 100755 regex/scripts/scrape_crates_io.py create mode 100644 regex/src/backtrack.rs create mode 100644 regex/src/compile.rs create mode 100644 regex/src/dfa.rs create mode 100644 regex/src/error.rs create mode 100644 regex/src/exec.rs create mode 100644 regex/src/expand.rs create mode 100644 regex/src/freqs.rs create mode 100644 regex/src/input.rs create mode 100644 regex/src/lib.rs create mode 100644 regex/src/literal/mod.rs create mode 100644 regex/src/literal/teddy_avx2/fallback.rs create mode 100644 regex/src/literal/teddy_avx2/imp.rs create mode 100644 regex/src/literal/teddy_avx2/mod.rs create mode 100644 regex/src/literal/teddy_ssse3/fallback.rs create mode 100644 regex/src/literal/teddy_ssse3/imp.rs create mode 100644 regex/src/literal/teddy_ssse3/mod.rs create mode 100644 regex/src/pattern.rs create mode 100644 regex/src/pikevm.rs create mode 100644 regex/src/prog.rs create mode 100644 regex/src/re_builder.rs create mode 100644 regex/src/re_bytes.rs create mode 100644 regex/src/re_set.rs create mode 100644 regex/src/re_trait.rs create mode 100644 regex/src/re_unicode.rs create mode 100644 regex/src/sparse.rs create mode 100644 regex/src/testdata/LICENSE create mode 100644 regex/src/testdata/README create mode 100644 regex/src/testdata/basic.dat create mode 100644 regex/src/testdata/nullsubexpr.dat create mode 100644 regex/src/testdata/repetition.dat create mode 100644 regex/src/utf8.rs create mode 100644 regex/src/vector/avx2.rs create mode 100644 regex/src/vector/mod.rs create mode 100644 regex/src/vector/ssse3.rs create mode 100644 regex/tests/api.rs create mode 100644 regex/tests/api_str.rs create mode 100644 regex/tests/bytes.rs create mode 100644 regex/tests/consistent.rs create mode 100644 regex/tests/crates_regex.rs create mode 100644 regex/tests/crazy.rs create mode 100644 regex/tests/flags.rs create mode 100644 regex/tests/fowler.rs create mode 100644 regex/tests/macros.rs create mode 100644 regex/tests/macros_bytes.rs create mode 100644 regex/tests/macros_str.rs create mode 100644 regex/tests/misc.rs create mode 100644 regex/tests/multiline.rs create mode 100644 regex/tests/noparse.rs create mode 100644 regex/tests/regression.rs create mode 100644 regex/tests/replace.rs create mode 100644 regex/tests/searcher.rs create mode 100644 regex/tests/set.rs create mode 100644 regex/tests/shortest_match.rs create mode 100644 regex/tests/suffix_reverse.rs create mode 100644 regex/tests/test_backtrack.rs create mode 100644 regex/tests/test_backtrack_bytes.rs create mode 100644 regex/tests/test_backtrack_utf8bytes.rs create mode 100644 regex/tests/test_crates_regex.rs create mode 100644 regex/tests/test_default.rs create mode 100644 regex/tests/test_default_bytes.rs create mode 100644 regex/tests/test_nfa.rs create mode 100644 regex/tests/test_nfa_bytes.rs create mode 100644 regex/tests/test_nfa_utf8bytes.rs create mode 100644 regex/tests/unicode.rs create mode 100644 regex/tests/word_boundary.rs create mode 100644 regex/tests/word_boundary_ascii.rs create mode 100644 regex/tests/word_boundary_unicode.rs create mode 100644 remove_dir_all/.cargo-checksum.json create mode 100644 remove_dir_all/Cargo.toml create mode 100644 remove_dir_all/LICENCE-APACHE create mode 100644 remove_dir_all/LICENCE-MIT create mode 100644 remove_dir_all/src/fs.rs create mode 100644 remove_dir_all/src/lib.rs create mode 100644 rustc-demangle/.cargo-checksum.json create mode 100644 rustc-demangle/Cargo.toml create mode 100644 rustc-demangle/LICENSE-APACHE create mode 100644 rustc-demangle/LICENSE-MIT create mode 100644 rustc-demangle/README.md create mode 100644 rustc-demangle/src/lib.rs create mode 100644 rustc-workspace-hack/.cargo-checksum.json create mode 100644 rustc-workspace-hack/Cargo.toml create mode 100644 rustc-workspace-hack/src/lib.rs create mode 100644 rustc_version/.cargo-checksum.json create mode 100644 rustc_version/Cargo.toml create mode 100644 rustc_version/LICENSE-APACHE create mode 100644 rustc_version/LICENSE-MIT create mode 100644 rustc_version/README.md create mode 100644 rustc_version/src/errors.rs create mode 100644 rustc_version/src/lib.rs create mode 100644 rustfix/.cargo-checksum.json create mode 100644 rustfix/Cargo.toml create mode 100644 rustfix/LICENSE-APACHE create mode 100644 rustfix/LICENSE-MIT create mode 100644 rustfix/Readme.md create mode 100644 rustfix/bors.toml create mode 100644 rustfix/proptest-regressions/replace.txt create mode 100644 rustfix/src/diagnostics.rs create mode 100644 rustfix/src/lib.rs create mode 100644 rustfix/src/replace.rs create mode 100644 ryu/.cargo-checksum.json create mode 100644 ryu/Cargo.toml create mode 100644 ryu/LICENSE-APACHE create mode 100644 ryu/LICENSE-BOOST create mode 100644 ryu/README.md create mode 100644 ryu/benchmark/benchmark.rs create mode 100644 ryu/build.rs create mode 100644 ryu/src/buffer/mod.rs create mode 100644 ryu/src/common.rs create mode 100644 ryu/src/d2s.rs create mode 100644 ryu/src/d2s_full_table.rs create mode 100644 ryu/src/d2s_intrinsics.rs create mode 100644 ryu/src/d2s_small_table.rs create mode 100644 ryu/src/digit_table.rs create mode 100644 ryu/src/f2s.rs create mode 100644 ryu/src/lib.rs create mode 100644 ryu/src/pretty/exponent.rs create mode 100644 ryu/src/pretty/mantissa.rs create mode 100644 ryu/src/pretty/mod.rs create mode 100644 ryu/tests/d2s_table_test.rs create mode 100644 ryu/tests/d2s_test.rs create mode 100644 ryu/tests/exhaustive.rs create mode 100644 ryu/tests/f2s_test.rs create mode 100644 ryu/tests/macros/mod.rs create mode 100644 same-file/.cargo-checksum.json create mode 100644 same-file/COPYING create mode 100644 same-file/Cargo.toml create mode 100644 same-file/LICENSE-MIT create mode 100644 same-file/README.md create mode 100644 same-file/UNLICENSE create mode 100644 same-file/examples/is_same_file.rs create mode 100644 same-file/examples/is_stderr.rs create mode 100644 same-file/src/lib.rs create mode 100644 same-file/src/unix.rs create mode 100644 same-file/src/win.rs create mode 100644 schannel/.cargo-checksum.json create mode 100644 schannel/Cargo.toml create mode 100644 schannel/LICENSE.md create mode 100644 schannel/README.md create mode 100644 schannel/appveyor.yml create mode 100644 schannel/src/cert_chain.rs create mode 100644 schannel/src/cert_context.rs create mode 100644 schannel/src/cert_store.rs create mode 100644 schannel/src/context_buffer.rs create mode 100644 schannel/src/crypt_key.rs create mode 100644 schannel/src/crypt_prov.rs create mode 100644 schannel/src/ctl_context.rs create mode 100644 schannel/src/key_handle.rs create mode 100644 schannel/src/lib.rs create mode 100644 schannel/src/ncrypt_key.rs create mode 100644 schannel/src/schannel_cred.rs create mode 100644 schannel/src/security_context.rs create mode 100644 schannel/src/test.rs create mode 100644 schannel/src/tls_stream.rs create mode 100644 schannel/test/cert.der create mode 100644 schannel/test/cert.pem create mode 100644 schannel/test/identity.p12 create mode 100644 schannel/test/key.key create mode 100644 schannel/test/key.pem create mode 100644 schannel/test/self-signed.badssl.com.cer create mode 100644 scopeguard/.cargo-checksum.json create mode 100644 scopeguard/Cargo.toml create mode 100644 scopeguard/LICENSE-APACHE create mode 100644 scopeguard/LICENSE-MIT create mode 100644 scopeguard/README.rst create mode 100644 scopeguard/examples/readme.rs create mode 100644 scopeguard/src/lib.rs create mode 100644 semver-parser/.cargo-checksum.json create mode 100644 semver-parser/Cargo.toml create mode 100644 semver-parser/LICENSE-APACHE create mode 100644 semver-parser/LICENSE-MIT create mode 100644 semver-parser/src/common.rs create mode 100644 semver-parser/src/lib.rs create mode 100644 semver-parser/src/range.rs create mode 100644 semver-parser/src/recognize.rs create mode 100644 semver-parser/src/version.rs create mode 100644 semver/.cargo-checksum.json create mode 100644 semver/Cargo.toml create mode 100644 semver/LICENSE-APACHE create mode 100644 semver/LICENSE-MIT create mode 100644 semver/README.md create mode 100644 semver/src/lib.rs create mode 100644 semver/src/version.rs create mode 100644 semver/src/version_req.rs create mode 100644 semver/tests/deprecation.rs create mode 100644 semver/tests/regression.rs create mode 100644 semver/tests/serde.rs create mode 100644 serde/.cargo-checksum.json create mode 100644 serde/Cargo.toml create mode 100644 serde/LICENSE-APACHE create mode 100644 serde/LICENSE-MIT create mode 100644 serde/README.md create mode 100644 serde/build.rs create mode 100644 serde/crates-io.md create mode 100644 serde/src/de/from_primitive.rs create mode 100644 serde/src/de/ignored_any.rs create mode 100644 serde/src/de/impls.rs create mode 100644 serde/src/de/mod.rs create mode 100644 serde/src/de/utf8.rs create mode 100644 serde/src/de/value.rs create mode 100644 serde/src/export.rs create mode 100644 serde/src/integer128.rs create mode 100644 serde/src/lib.rs create mode 100644 serde/src/macros.rs create mode 100644 serde/src/private/de.rs create mode 100644 serde/src/private/macros.rs create mode 100644 serde/src/private/mod.rs create mode 100644 serde/src/private/ser.rs create mode 100644 serde/src/ser/impls.rs create mode 100644 serde/src/ser/impossible.rs create mode 100644 serde/src/ser/mod.rs create mode 100644 serde_derive/.cargo-checksum.json create mode 100644 serde_derive/Cargo.toml create mode 100644 serde_derive/LICENSE-APACHE create mode 100644 serde_derive/LICENSE-MIT create mode 100644 serde_derive/README.md create mode 100644 serde_derive/crates-io.md create mode 100644 serde_derive/src/bound.rs create mode 100644 serde_derive/src/de.rs create mode 100644 serde_derive/src/fragment.rs create mode 100644 serde_derive/src/internals/ast.rs create mode 100644 serde_derive/src/internals/attr.rs create mode 100644 serde_derive/src/internals/case.rs create mode 100644 serde_derive/src/internals/check.rs create mode 100644 serde_derive/src/internals/ctxt.rs create mode 100644 serde_derive/src/internals/mod.rs create mode 100644 serde_derive/src/lib.rs create mode 100644 serde_derive/src/pretend.rs create mode 100644 serde_derive/src/ser.rs create mode 100644 serde_derive/src/try.rs create mode 100644 serde_ignored/.cargo-checksum.json create mode 100644 serde_ignored/Cargo.toml create mode 100644 serde_ignored/LICENSE-APACHE create mode 100644 serde_ignored/LICENSE-MIT create mode 100644 serde_ignored/README.md create mode 100644 serde_ignored/src/lib.rs create mode 100644 serde_json/.cargo-checksum.json create mode 100644 serde_json/Cargo.toml create mode 100644 serde_json/LICENSE-APACHE create mode 100644 serde_json/LICENSE-MIT create mode 100644 serde_json/README.md create mode 100644 serde_json/src/de.rs create mode 100644 serde_json/src/error.rs create mode 100644 serde_json/src/iter.rs create mode 100644 serde_json/src/lib.rs create mode 100644 serde_json/src/macros.rs create mode 100644 serde_json/src/map.rs create mode 100644 serde_json/src/number.rs create mode 100644 serde_json/src/raw.rs create mode 100644 serde_json/src/read.rs create mode 100644 serde_json/src/ser.rs create mode 100644 serde_json/src/value/de.rs create mode 100644 serde_json/src/value/from.rs create mode 100644 serde_json/src/value/index.rs create mode 100644 serde_json/src/value/mod.rs create mode 100644 serde_json/src/value/partial_eq.rs create mode 100644 serde_json/src/value/ser.rs create mode 100644 shell-escape/.cargo-checksum.json create mode 100644 shell-escape/Cargo.toml create mode 100644 shell-escape/LICENSE-APACHE create mode 100644 shell-escape/LICENSE-MIT create mode 100644 shell-escape/README.md create mode 100644 shell-escape/src/lib.rs create mode 100644 smallvec/.cargo-checksum.json create mode 100644 smallvec/Cargo.toml create mode 100644 smallvec/LICENSE-APACHE create mode 100644 smallvec/LICENSE-MIT create mode 100644 smallvec/README.md create mode 100644 smallvec/benches/bench.rs create mode 100644 smallvec/lib.rs create mode 100644 socket2/.cargo-checksum.json create mode 100644 socket2/Cargo.toml create mode 100644 socket2/LICENSE-APACHE create mode 100644 socket2/LICENSE-MIT create mode 100644 socket2/README.md create mode 100644 socket2/src/lib.rs create mode 100644 socket2/src/sockaddr.rs create mode 100644 socket2/src/socket.rs create mode 100644 socket2/src/sys/redox/mod.rs create mode 100644 socket2/src/sys/unix/mod.rs create mode 100644 socket2/src/sys/unix/weak.rs create mode 100644 socket2/src/sys/windows.rs create mode 100644 socket2/src/utils.rs create mode 100644 stable_deref_trait/.cargo-checksum.json create mode 100644 stable_deref_trait/Cargo.toml create mode 100644 stable_deref_trait/LICENSE-APACHE create mode 100644 stable_deref_trait/LICENSE-MIT create mode 100644 stable_deref_trait/README.md create mode 100644 stable_deref_trait/src/lib.rs create mode 100644 strsim/.cargo-checksum.json create mode 100644 strsim/CHANGELOG.md create mode 100644 strsim/Cargo.toml create mode 100644 strsim/LICENSE create mode 100644 strsim/README.md create mode 100644 strsim/appveyor.yml create mode 100755 strsim/dev create mode 100644 strsim/src/lib.rs create mode 100644 strsim/tests/lib.rs create mode 100644 syn/.cargo-checksum.json create mode 100644 syn/Cargo.toml create mode 100644 syn/LICENSE-APACHE create mode 100644 syn/LICENSE-MIT create mode 100644 syn/README.md create mode 100644 syn/src/attr.rs create mode 100644 syn/src/buffer.rs create mode 100644 syn/src/data.rs create mode 100644 syn/src/derive.rs create mode 100644 syn/src/error.rs create mode 100644 syn/src/export.rs create mode 100644 syn/src/expr.rs create mode 100644 syn/src/ext.rs create mode 100644 syn/src/file.rs create mode 100644 syn/src/gen/fold.rs create mode 100644 syn/src/gen/visit.rs create mode 100644 syn/src/gen/visit_mut.rs create mode 100644 syn/src/gen_helper.rs create mode 100644 syn/src/generics.rs create mode 100644 syn/src/group.rs create mode 100644 syn/src/ident.rs create mode 100644 syn/src/item.rs create mode 100644 syn/src/keyword.rs create mode 100644 syn/src/lib.rs create mode 100644 syn/src/lifetime.rs create mode 100644 syn/src/lit.rs create mode 100644 syn/src/lookahead.rs create mode 100644 syn/src/mac.rs create mode 100644 syn/src/macros.rs create mode 100644 syn/src/op.rs create mode 100644 syn/src/parse.rs create mode 100644 syn/src/parse_macro_input.rs create mode 100644 syn/src/parse_quote.rs create mode 100644 syn/src/path.rs create mode 100644 syn/src/print.rs create mode 100644 syn/src/punctuated.rs create mode 100644 syn/src/span.rs create mode 100644 syn/src/spanned.rs create mode 100644 syn/src/synom.rs create mode 100644 syn/src/token.rs create mode 100644 syn/src/tt.rs create mode 100644 syn/src/ty.rs create mode 100644 synstructure/.cargo-checksum.json create mode 100644 synstructure/Cargo.toml create mode 100644 synstructure/LICENSE create mode 100644 synstructure/README.md create mode 100644 synstructure/src/lib.rs create mode 100644 synstructure/src/macros.rs create mode 100644 tar/.cargo-checksum.json create mode 100644 tar/Cargo.toml create mode 100644 tar/LICENSE-APACHE create mode 100644 tar/LICENSE-MIT create mode 100644 tar/README.md create mode 100644 tar/appveyor.yml create mode 100644 tar/examples/extract_file.rs create mode 100644 tar/examples/list.rs create mode 100644 tar/examples/raw_list.rs create mode 100644 tar/examples/write.rs create mode 100644 tar/src/archive.rs create mode 100644 tar/src/builder.rs create mode 100644 tar/src/entry.rs create mode 100644 tar/src/entry_type.rs create mode 100644 tar/src/error.rs create mode 100644 tar/src/header.rs create mode 100644 tar/src/lib.rs create mode 100644 tar/src/pax.rs create mode 100644 tar/tests/all.rs create mode 100644 tar/tests/archives/directory.tar create mode 100644 tar/tests/archives/duplicate_dirs.tar create mode 100644 tar/tests/archives/empty_filename.tar create mode 100644 tar/tests/archives/file_times.tar create mode 100644 tar/tests/archives/link.tar create mode 100644 tar/tests/archives/pax.tar create mode 100644 tar/tests/archives/pax2.tar create mode 100644 tar/tests/archives/reading_files.tar create mode 100644 tar/tests/archives/simple.tar create mode 100755 tar/tests/archives/spaces.tar create mode 100644 tar/tests/archives/sparse.tar create mode 100644 tar/tests/archives/xattrs.tar create mode 100644 tar/tests/entry.rs create mode 100644 tar/tests/header/mod.rs create mode 100644 tempfile/.cargo-checksum.json create mode 100644 tempfile/Cargo.toml create mode 100644 tempfile/LICENSE-APACHE create mode 100644 tempfile/LICENSE-MIT create mode 100644 tempfile/NEWS create mode 100644 tempfile/README.md create mode 100644 tempfile/appveyor.yml create mode 100644 tempfile/src/dir.rs create mode 100644 tempfile/src/file/imp/mod.rs create mode 100644 tempfile/src/file/imp/other.rs create mode 100644 tempfile/src/file/imp/unix.rs create mode 100644 tempfile/src/file/imp/windows.rs create mode 100644 tempfile/src/file/mod.rs create mode 100644 tempfile/src/lib.rs create mode 100644 tempfile/src/util.rs create mode 100644 tempfile/tests/namedtempfile.rs create mode 100644 tempfile/tests/tempdir.rs create mode 100644 tempfile/tests/tempfile.rs create mode 100644 termcolor/.cargo-checksum.json create mode 100644 termcolor/COPYING create mode 100644 termcolor/Cargo.toml create mode 100644 termcolor/LICENSE-MIT create mode 100644 termcolor/README.md create mode 100644 termcolor/UNLICENSE create mode 100644 termcolor/src/lib.rs create mode 100644 termion/.cargo-checksum.json create mode 100644 termion/Cargo.toml create mode 100644 termion/LICENSE create mode 100644 termion/README.md create mode 100644 termion/examples/alternate_screen.rs create mode 100644 termion/examples/alternate_screen_raw.rs create mode 100644 termion/examples/async.rs create mode 100644 termion/examples/click.rs create mode 100644 termion/examples/color.rs create mode 100644 termion/examples/commie.rs create mode 100644 termion/examples/detect_color.rs create mode 100644 termion/examples/is_tty.rs create mode 100644 termion/examples/keys.rs create mode 100644 termion/examples/mouse.rs create mode 100644 termion/examples/rainbow.rs create mode 100644 termion/examples/read.rs create mode 100644 termion/examples/rustc_fun.rs create mode 100644 termion/examples/simple.rs create mode 100644 termion/examples/size.rs create mode 100644 termion/examples/truecolor.rs create mode 100644 termion/logo.svg create mode 100644 termion/src/async.rs create mode 100644 termion/src/clear.rs create mode 100644 termion/src/color.rs create mode 100644 termion/src/cursor.rs create mode 100644 termion/src/event.rs create mode 100644 termion/src/input.rs create mode 100644 termion/src/lib.rs create mode 100644 termion/src/macros.rs create mode 100644 termion/src/raw.rs create mode 100644 termion/src/screen.rs create mode 100644 termion/src/scroll.rs create mode 100644 termion/src/style.rs create mode 100644 termion/src/sys/redox/attr.rs create mode 100644 termion/src/sys/redox/mod.rs create mode 100644 termion/src/sys/redox/size.rs create mode 100644 termion/src/sys/redox/tty.rs create mode 100644 termion/src/sys/unix/attr.rs create mode 100644 termion/src/sys/unix/mod.rs create mode 100644 termion/src/sys/unix/size.rs create mode 100644 termion/src/sys/unix/tty.rs create mode 100644 textwrap/.cargo-checksum.json create mode 100644 textwrap/Cargo.toml create mode 100644 textwrap/LICENSE create mode 100644 textwrap/README.md create mode 100644 textwrap/benches/linear.rs create mode 100644 textwrap/examples/layout.rs create mode 100644 textwrap/examples/termwidth.rs create mode 100644 textwrap/src/lib.rs create mode 100644 textwrap/tests/version-numbers.rs create mode 100644 thread_local/.cargo-checksum.json create mode 100644 thread_local/Cargo.toml create mode 100644 thread_local/LICENSE-APACHE create mode 100644 thread_local/LICENSE-MIT create mode 100644 thread_local/README.md create mode 100644 thread_local/benches/thread_local.rs create mode 100644 thread_local/src/lib.rs create mode 100644 thread_local/src/thread_id.rs create mode 100644 thread_local/src/unreachable.rs create mode 100644 toml/.cargo-checksum.json create mode 100644 toml/Cargo.toml create mode 100644 toml/LICENSE-APACHE create mode 100644 toml/LICENSE-MIT create mode 100644 toml/README.md create mode 100644 toml/examples/decode.rs create mode 100644 toml/examples/toml2json.rs create mode 100644 toml/src/datetime.rs create mode 100644 toml/src/de.rs create mode 100644 toml/src/lib.rs create mode 100644 toml/src/macros.rs create mode 100644 toml/src/ser.rs create mode 100644 toml/src/spanned.rs create mode 100644 toml/src/tokens.rs create mode 100644 toml/src/value.rs create mode 100644 ucd-util/.cargo-checksum.json create mode 100644 ucd-util/Cargo.toml create mode 100644 ucd-util/LICENSE-APACHE create mode 100644 ucd-util/LICENSE-MIT create mode 100644 ucd-util/README.md create mode 100644 ucd-util/src/hangul.rs create mode 100644 ucd-util/src/ideograph.rs create mode 100644 ucd-util/src/lib.rs create mode 100644 ucd-util/src/name.rs create mode 100644 ucd-util/src/property.rs create mode 100644 ucd-util/src/unicode_tables/jamo_short_name.rs create mode 100644 ucd-util/src/unicode_tables/mod.rs create mode 100644 ucd-util/src/unicode_tables/property_names.rs create mode 100644 ucd-util/src/unicode_tables/property_values.rs create mode 100644 unicode-bidi/.cargo-checksum.json create mode 100644 unicode-bidi/.pc/.quilt_patches create mode 100644 unicode-bidi/.pc/.quilt_series create mode 100644 unicode-bidi/.pc/.version create mode 100644 unicode-bidi/.pc/applied-patches create mode 100644 unicode-bidi/.pc/no-flamegraphs.patch/.timestamp create mode 100644 unicode-bidi/.pc/no-flamegraphs.patch/Cargo.toml create mode 100644 unicode-bidi/AUTHORS create mode 100644 unicode-bidi/COPYRIGHT create mode 100644 unicode-bidi/Cargo.toml create mode 100644 unicode-bidi/LICENSE-APACHE create mode 100644 unicode-bidi/LICENSE-MIT create mode 100644 unicode-bidi/README.md create mode 100644 unicode-bidi/debian/patches/no-flamegraphs.patch create mode 100644 unicode-bidi/debian/patches/series create mode 100644 unicode-bidi/src/char_data/mod.rs create mode 100644 unicode-bidi/src/char_data/tables.rs create mode 100644 unicode-bidi/src/deprecated.rs create mode 100644 unicode-bidi/src/explicit.rs create mode 100644 unicode-bidi/src/format_chars.rs create mode 100644 unicode-bidi/src/implicit.rs create mode 100644 unicode-bidi/src/level.rs create mode 100644 unicode-bidi/src/lib.rs create mode 100644 unicode-bidi/src/prepare.rs create mode 100644 unicode-normalization/.cargo-checksum.json create mode 100644 unicode-normalization/COPYRIGHT create mode 100644 unicode-normalization/Cargo.toml create mode 100644 unicode-normalization/LICENSE-APACHE create mode 100644 unicode-normalization/LICENSE-MIT create mode 100644 unicode-normalization/README.md create mode 100644 unicode-normalization/benches/bench.rs create mode 100644 unicode-normalization/scripts/unicode.py create mode 100644 unicode-normalization/src/decompose.rs create mode 100644 unicode-normalization/src/lib.rs create mode 100644 unicode-normalization/src/normalization_tests.rs create mode 100644 unicode-normalization/src/normalize.rs create mode 100644 unicode-normalization/src/quick_check.rs create mode 100644 unicode-normalization/src/recompose.rs create mode 100644 unicode-normalization/src/stream_safe.rs create mode 100644 unicode-normalization/src/tables.rs create mode 100644 unicode-normalization/src/test.rs create mode 100644 unicode-width/.cargo-checksum.json create mode 100644 unicode-width/COPYRIGHT create mode 100644 unicode-width/Cargo.toml create mode 100644 unicode-width/LICENSE-APACHE create mode 100644 unicode-width/LICENSE-MIT create mode 100644 unicode-width/README.md create mode 100755 unicode-width/scripts/unicode.py create mode 100644 unicode-width/src/lib.rs create mode 100644 unicode-width/src/tables.rs create mode 100644 unicode-width/src/tests.rs create mode 100644 unicode-xid/.cargo-checksum.json create mode 100644 unicode-xid/COPYRIGHT create mode 100644 unicode-xid/Cargo.toml create mode 100644 unicode-xid/LICENSE-APACHE create mode 100644 unicode-xid/LICENSE-MIT create mode 100644 unicode-xid/README.md create mode 100755 unicode-xid/scripts/unicode.py create mode 100644 unicode-xid/src/lib.rs create mode 100644 unicode-xid/src/tables.rs create mode 100644 unicode-xid/src/tests.rs create mode 100644 unreachable/.cargo-checksum.json create mode 100644 unreachable/Cargo.toml create mode 100644 unreachable/LICENSE-APACHE create mode 100644 unreachable/LICENSE-MIT create mode 100644 unreachable/README.md create mode 100644 unreachable/src/lib.rs create mode 100644 url/.cargo-checksum.json create mode 100644 url/Cargo.toml create mode 100644 url/LICENSE-APACHE create mode 100644 url/LICENSE-MIT create mode 100644 url/README.md create mode 100644 url/UPGRADING.md create mode 100644 url/appveyor.yml create mode 100644 url/benches/parse_url.rs create mode 100644 url/docs/404.html create mode 100644 url/docs/index.html create mode 100644 url/src/encoding.rs create mode 100644 url/src/form_urlencoded.rs create mode 100644 url/src/host.rs create mode 100644 url/src/lib.rs create mode 100644 url/src/origin.rs create mode 100644 url/src/parser.rs create mode 100644 url/src/path_segments.rs create mode 100644 url/src/quirks.rs create mode 100644 url/src/slicing.rs create mode 100644 url/tests/data.rs create mode 100644 url/tests/setters_tests.json create mode 100644 url/tests/unit.rs create mode 100644 url/tests/urltestdata.json create mode 100644 utf8-ranges/.cargo-checksum.json create mode 100644 utf8-ranges/COPYING create mode 100644 utf8-ranges/Cargo.toml create mode 100644 utf8-ranges/LICENSE-MIT create mode 100644 utf8-ranges/README.md create mode 100644 utf8-ranges/UNLICENSE create mode 100644 utf8-ranges/benches/bench.rs create mode 100644 utf8-ranges/src/char_utf8.rs create mode 100644 utf8-ranges/src/lib.rs create mode 100644 vcpkg/.cargo-checksum.json create mode 100644 vcpkg/Cargo.toml create mode 100644 vcpkg/src/lib.rs create mode 100644 vec_map/.cargo-checksum.json create mode 100644 vec_map/Cargo.toml create mode 100644 vec_map/LICENSE-APACHE create mode 100644 vec_map/LICENSE-MIT create mode 100644 vec_map/README.md create mode 100644 vec_map/deploy-docs.sh create mode 100644 vec_map/src/lib.rs create mode 100644 version_check/.cargo-checksum.json create mode 100644 version_check/Cargo.toml create mode 100644 version_check/LICENSE-APACHE create mode 100644 version_check/LICENSE-MIT create mode 100644 version_check/README.md create mode 100644 version_check/src/lib.rs create mode 100644 void/.cargo-checksum.json create mode 100644 void/Cargo.toml create mode 100644 void/README.md create mode 100644 void/src/lib.rs create mode 100644 walkdir/.cargo-checksum.json create mode 100644 walkdir/COPYING create mode 100644 walkdir/Cargo.toml create mode 100644 walkdir/LICENSE-MIT create mode 100644 walkdir/README.md create mode 100644 walkdir/UNLICENSE create mode 100644 walkdir/compare/nftw.c create mode 100644 walkdir/compare/walk.py create mode 100644 walkdir/examples/walkdir.rs create mode 100644 walkdir/src/lib.rs create mode 100644 walkdir/src/tests.rs create mode 100644 walkdir/src/unix.rs create mode 100644 winapi-i686-pc-windows-gnu/.cargo-checksum.json create mode 100644 winapi-i686-pc-windows-gnu/Cargo.toml create mode 100644 winapi-i686-pc-windows-gnu/build.rs create mode 100644 winapi-i686-pc-windows-gnu/src/lib.rs create mode 100644 winapi-util/.cargo-checksum.json create mode 100644 winapi-util/COPYING create mode 100644 winapi-util/Cargo.toml create mode 100644 winapi-util/LICENSE-MIT create mode 100644 winapi-util/README.md create mode 100644 winapi-util/UNLICENSE create mode 100644 winapi-util/appveyor.yml create mode 100755 winapi-util/ci/script.sh create mode 100644 winapi-util/src/console.rs create mode 100644 winapi-util/src/file.rs create mode 100644 winapi-util/src/lib.rs create mode 100644 winapi-util/src/win.rs create mode 100644 winapi-x86_64-pc-windows-gnu/.cargo-checksum.json create mode 100644 winapi-x86_64-pc-windows-gnu/Cargo.toml create mode 100644 winapi-x86_64-pc-windows-gnu/build.rs create mode 100644 winapi-x86_64-pc-windows-gnu/src/lib.rs create mode 100644 winapi/.cargo-checksum.json create mode 100644 winapi/Cargo.toml create mode 100644 winapi/LICENSE-APACHE create mode 100644 winapi/LICENSE-MIT create mode 100644 winapi/README.md create mode 100644 winapi/build.rs create mode 100644 winapi/src/lib.rs create mode 100644 winapi/src/macros.rs create mode 100644 winapi/src/shared/basetsd.rs create mode 100644 winapi/src/shared/bcrypt.rs create mode 100644 winapi/src/shared/bugcodes.rs create mode 100644 winapi/src/shared/cderr.rs create mode 100644 winapi/src/shared/cfg.rs create mode 100644 winapi/src/shared/d3d9.rs create mode 100644 winapi/src/shared/d3d9caps.rs create mode 100644 winapi/src/shared/d3d9types.rs create mode 100644 winapi/src/shared/dcomptypes.rs create mode 100644 winapi/src/shared/devguid.rs create mode 100644 winapi/src/shared/devpkey.rs create mode 100644 winapi/src/shared/devpropdef.rs create mode 100644 winapi/src/shared/dinputd.rs create mode 100644 winapi/src/shared/dxgi.rs create mode 100644 winapi/src/shared/dxgi1_2.rs create mode 100644 winapi/src/shared/dxgi1_3.rs create mode 100644 winapi/src/shared/dxgi1_4.rs create mode 100644 winapi/src/shared/dxgi1_5.rs create mode 100644 winapi/src/shared/dxgi1_6.rs create mode 100644 winapi/src/shared/dxgiformat.rs create mode 100644 winapi/src/shared/dxgitype.rs create mode 100644 winapi/src/shared/evntprov.rs create mode 100644 winapi/src/shared/evntrace.rs create mode 100644 winapi/src/shared/guiddef.rs create mode 100644 winapi/src/shared/hidclass.rs create mode 100644 winapi/src/shared/hidpi.rs create mode 100644 winapi/src/shared/hidsdi.rs create mode 100644 winapi/src/shared/hidusage.rs create mode 100644 winapi/src/shared/in6addr.rs create mode 100644 winapi/src/shared/inaddr.rs create mode 100644 winapi/src/shared/intsafe.rs create mode 100644 winapi/src/shared/ks.rs create mode 100644 winapi/src/shared/ksmedia.rs create mode 100644 winapi/src/shared/ktmtypes.rs create mode 100644 winapi/src/shared/lmcons.rs create mode 100644 winapi/src/shared/minwindef.rs create mode 100644 winapi/src/shared/mmreg.rs create mode 100644 winapi/src/shared/mod.rs create mode 100644 winapi/src/shared/mstcpip.rs create mode 100644 winapi/src/shared/ntddscsi.rs create mode 100644 winapi/src/shared/ntddser.rs create mode 100644 winapi/src/shared/ntdef.rs create mode 100644 winapi/src/shared/ntstatus.rs create mode 100644 winapi/src/shared/qos.rs create mode 100644 winapi/src/shared/rpc.rs create mode 100644 winapi/src/shared/rpcdce.rs create mode 100644 winapi/src/shared/rpcndr.rs create mode 100644 winapi/src/shared/sddl.rs create mode 100644 winapi/src/shared/sspi.rs create mode 100644 winapi/src/shared/stralign.rs create mode 100644 winapi/src/shared/transportsettingcommon.rs create mode 100644 winapi/src/shared/tvout.rs create mode 100644 winapi/src/shared/usb.rs create mode 100644 winapi/src/shared/usbiodef.rs create mode 100644 winapi/src/shared/usbspec.rs create mode 100644 winapi/src/shared/windef.rs create mode 100644 winapi/src/shared/windowsx.rs create mode 100644 winapi/src/shared/winerror.rs create mode 100644 winapi/src/shared/winusbio.rs create mode 100644 winapi/src/shared/wmistr.rs create mode 100644 winapi/src/shared/wnnc.rs create mode 100644 winapi/src/shared/ws2def.rs create mode 100644 winapi/src/shared/ws2ipdef.rs create mode 100644 winapi/src/shared/wtypes.rs create mode 100644 winapi/src/shared/wtypesbase.rs create mode 100644 winapi/src/um/accctrl.rs create mode 100644 winapi/src/um/aclapi.rs create mode 100644 winapi/src/um/appmgmt.rs create mode 100644 winapi/src/um/audioclient.rs create mode 100644 winapi/src/um/audiosessiontypes.rs create mode 100644 winapi/src/um/avrt.rs create mode 100644 winapi/src/um/bits.rs create mode 100644 winapi/src/um/bits10_1.rs create mode 100644 winapi/src/um/bits1_5.rs create mode 100644 winapi/src/um/bits2_0.rs create mode 100644 winapi/src/um/bits2_5.rs create mode 100644 winapi/src/um/bits3_0.rs create mode 100644 winapi/src/um/bits4_0.rs create mode 100644 winapi/src/um/bits5_0.rs create mode 100644 winapi/src/um/bitscfg.rs create mode 100644 winapi/src/um/bitsmsg.rs create mode 100644 winapi/src/um/cfgmgr32.rs create mode 100644 winapi/src/um/cguid.rs create mode 100644 winapi/src/um/combaseapi.rs create mode 100644 winapi/src/um/coml2api.rs create mode 100644 winapi/src/um/commapi.rs create mode 100644 winapi/src/um/commctrl.rs create mode 100644 winapi/src/um/commdlg.rs create mode 100644 winapi/src/um/commoncontrols.rs create mode 100644 winapi/src/um/consoleapi.rs create mode 100644 winapi/src/um/corsym.rs create mode 100644 winapi/src/um/d2d1.rs create mode 100644 winapi/src/um/d2d1_1.rs create mode 100644 winapi/src/um/d2d1_2.rs create mode 100644 winapi/src/um/d2d1_3.rs create mode 100644 winapi/src/um/d2d1effectauthor.rs create mode 100644 winapi/src/um/d2d1effects.rs create mode 100644 winapi/src/um/d2d1effects_1.rs create mode 100644 winapi/src/um/d2d1effects_2.rs create mode 100644 winapi/src/um/d2d1svg.rs create mode 100644 winapi/src/um/d2dbasetypes.rs create mode 100644 winapi/src/um/d3d.rs create mode 100644 winapi/src/um/d3d10.rs create mode 100644 winapi/src/um/d3d10_1.rs create mode 100644 winapi/src/um/d3d10_1shader.rs create mode 100644 winapi/src/um/d3d10effect.rs create mode 100644 winapi/src/um/d3d10misc.rs create mode 100644 winapi/src/um/d3d10sdklayers.rs create mode 100644 winapi/src/um/d3d10shader.rs create mode 100644 winapi/src/um/d3d11.rs create mode 100644 winapi/src/um/d3d11_1.rs create mode 100644 winapi/src/um/d3d11_2.rs create mode 100644 winapi/src/um/d3d11_3.rs create mode 100644 winapi/src/um/d3d11_4.rs create mode 100644 winapi/src/um/d3d11on12.rs create mode 100644 winapi/src/um/d3d11sdklayers.rs create mode 100644 winapi/src/um/d3d11shader.rs create mode 100644 winapi/src/um/d3d11tokenizedprogramformat.rs create mode 100644 winapi/src/um/d3d12.rs create mode 100644 winapi/src/um/d3d12sdklayers.rs create mode 100644 winapi/src/um/d3d12shader.rs create mode 100644 winapi/src/um/d3dcommon.rs create mode 100644 winapi/src/um/d3dcompiler.rs create mode 100644 winapi/src/um/d3dcsx.rs create mode 100644 winapi/src/um/d3dx10core.rs create mode 100644 winapi/src/um/d3dx10math.rs create mode 100644 winapi/src/um/d3dx10mesh.rs create mode 100644 winapi/src/um/datetimeapi.rs create mode 100644 winapi/src/um/davclnt.rs create mode 100644 winapi/src/um/dbghelp.rs create mode 100644 winapi/src/um/dbt.rs create mode 100644 winapi/src/um/dcommon.rs create mode 100644 winapi/src/um/dcomp.rs create mode 100644 winapi/src/um/dcompanimation.rs create mode 100644 winapi/src/um/dde.rs create mode 100644 winapi/src/um/ddraw.rs create mode 100644 winapi/src/um/ddrawi.rs create mode 100644 winapi/src/um/ddrawint.rs create mode 100644 winapi/src/um/debugapi.rs create mode 100644 winapi/src/um/devicetopology.rs create mode 100644 winapi/src/um/dinput.rs create mode 100644 winapi/src/um/dmksctl.rs create mode 100644 winapi/src/um/dmusicc.rs create mode 100644 winapi/src/um/docobj.rs create mode 100644 winapi/src/um/documenttarget.rs create mode 100644 winapi/src/um/dpa_dsa.rs create mode 100644 winapi/src/um/dpapi.rs create mode 100644 winapi/src/um/dsgetdc.rs create mode 100644 winapi/src/um/dsound.rs create mode 100644 winapi/src/um/dsrole.rs create mode 100644 winapi/src/um/dvp.rs create mode 100644 winapi/src/um/dwmapi.rs create mode 100644 winapi/src/um/dwrite.rs create mode 100644 winapi/src/um/dwrite_1.rs create mode 100644 winapi/src/um/dwrite_2.rs create mode 100644 winapi/src/um/dwrite_3.rs create mode 100644 winapi/src/um/dxdiag.rs create mode 100644 winapi/src/um/dxfile.rs create mode 100644 winapi/src/um/dxgidebug.rs create mode 100644 winapi/src/um/dxva2api.rs create mode 100644 winapi/src/um/dxvahd.rs create mode 100644 winapi/src/um/endpointvolume.rs create mode 100644 winapi/src/um/errhandlingapi.rs create mode 100644 winapi/src/um/evntcons.rs create mode 100644 winapi/src/um/exdisp.rs create mode 100644 winapi/src/um/fibersapi.rs create mode 100644 winapi/src/um/fileapi.rs create mode 100644 winapi/src/um/gl/gl.rs create mode 100644 winapi/src/um/gl/mod.rs create mode 100644 winapi/src/um/handleapi.rs create mode 100644 winapi/src/um/heapapi.rs create mode 100644 winapi/src/um/highlevelmonitorconfigurationapi.rs create mode 100644 winapi/src/um/http.rs create mode 100644 winapi/src/um/imm.rs create mode 100644 winapi/src/um/interlockedapi.rs create mode 100644 winapi/src/um/ioapiset.rs create mode 100644 winapi/src/um/jobapi.rs create mode 100644 winapi/src/um/jobapi2.rs create mode 100644 winapi/src/um/knownfolders.rs create mode 100644 winapi/src/um/ktmw32.rs create mode 100644 winapi/src/um/libloaderapi.rs create mode 100644 winapi/src/um/lmaccess.rs create mode 100644 winapi/src/um/lmalert.rs create mode 100644 winapi/src/um/lmapibuf.rs create mode 100644 winapi/src/um/lmat.rs create mode 100644 winapi/src/um/lmdfs.rs create mode 100644 winapi/src/um/lmerrlog.rs create mode 100644 winapi/src/um/lmjoin.rs create mode 100644 winapi/src/um/lmmsg.rs create mode 100644 winapi/src/um/lmremutl.rs create mode 100644 winapi/src/um/lmrepl.rs create mode 100644 winapi/src/um/lmserver.rs create mode 100644 winapi/src/um/lmshare.rs create mode 100644 winapi/src/um/lmstats.rs create mode 100644 winapi/src/um/lmsvc.rs create mode 100644 winapi/src/um/lmuse.rs create mode 100644 winapi/src/um/lmwksta.rs create mode 100644 winapi/src/um/lowlevelmonitorconfigurationapi.rs create mode 100644 winapi/src/um/lsalookup.rs create mode 100644 winapi/src/um/memoryapi.rs create mode 100644 winapi/src/um/minschannel.rs create mode 100644 winapi/src/um/minwinbase.rs create mode 100644 winapi/src/um/mmdeviceapi.rs create mode 100644 winapi/src/um/mmeapi.rs create mode 100644 winapi/src/um/mmsystem.rs create mode 100644 winapi/src/um/mod.rs create mode 100644 winapi/src/um/msaatext.rs create mode 100644 winapi/src/um/mscat.rs create mode 100644 winapi/src/um/mschapp.rs create mode 100644 winapi/src/um/mssip.rs create mode 100644 winapi/src/um/namedpipeapi.rs create mode 100644 winapi/src/um/namespaceapi.rs create mode 100644 winapi/src/um/nb30.rs create mode 100644 winapi/src/um/ncrypt.rs create mode 100644 winapi/src/um/ntlsa.rs create mode 100644 winapi/src/um/ntsecapi.rs create mode 100644 winapi/src/um/oaidl.rs create mode 100644 winapi/src/um/objbase.rs create mode 100644 winapi/src/um/objidl.rs create mode 100644 winapi/src/um/objidlbase.rs create mode 100644 winapi/src/um/ocidl.rs create mode 100644 winapi/src/um/ole2.rs create mode 100644 winapi/src/um/oleauto.rs create mode 100644 winapi/src/um/olectl.rs create mode 100644 winapi/src/um/oleidl.rs create mode 100644 winapi/src/um/opmapi.rs create mode 100644 winapi/src/um/pdh.rs create mode 100644 winapi/src/um/perflib.rs create mode 100644 winapi/src/um/physicalmonitorenumerationapi.rs create mode 100644 winapi/src/um/playsoundapi.rs create mode 100644 winapi/src/um/powerbase.rs create mode 100644 winapi/src/um/powersetting.rs create mode 100644 winapi/src/um/powrprof.rs create mode 100644 winapi/src/um/processenv.rs create mode 100644 winapi/src/um/processsnapshot.rs create mode 100644 winapi/src/um/processthreadsapi.rs create mode 100644 winapi/src/um/processtopologyapi.rs create mode 100644 winapi/src/um/profileapi.rs create mode 100644 winapi/src/um/propidl.rs create mode 100644 winapi/src/um/propkeydef.rs create mode 100644 winapi/src/um/propsys.rs create mode 100644 winapi/src/um/prsht.rs create mode 100644 winapi/src/um/psapi.rs create mode 100644 winapi/src/um/realtimeapiset.rs create mode 100644 winapi/src/um/reason.rs create mode 100644 winapi/src/um/restartmanager.rs create mode 100644 winapi/src/um/restrictederrorinfo.rs create mode 100644 winapi/src/um/rmxfguid.rs create mode 100644 winapi/src/um/sapi.rs create mode 100644 winapi/src/um/sapi51.rs create mode 100644 winapi/src/um/sapi53.rs create mode 100644 winapi/src/um/sapiddk.rs create mode 100644 winapi/src/um/sapiddk51.rs create mode 100644 winapi/src/um/schannel.rs create mode 100644 winapi/src/um/securityappcontainer.rs create mode 100644 winapi/src/um/securitybaseapi.rs create mode 100644 winapi/src/um/servprov.rs create mode 100644 winapi/src/um/setupapi.rs create mode 100644 winapi/src/um/shellapi.rs create mode 100644 winapi/src/um/shellscalingapi.rs create mode 100644 winapi/src/um/shlobj.rs create mode 100644 winapi/src/um/shobjidl.rs create mode 100644 winapi/src/um/shobjidl_core.rs create mode 100644 winapi/src/um/shtypes.rs create mode 100644 winapi/src/um/spapidef.rs create mode 100644 winapi/src/um/sporder.rs create mode 100644 winapi/src/um/sql.rs create mode 100644 winapi/src/um/sqlext.rs create mode 100644 winapi/src/um/sqltypes.rs create mode 100644 winapi/src/um/sqlucode.rs create mode 100644 winapi/src/um/sspi.rs create mode 100644 winapi/src/um/stringapiset.rs create mode 100644 winapi/src/um/strmif.rs create mode 100644 winapi/src/um/subauth.rs create mode 100644 winapi/src/um/synchapi.rs create mode 100644 winapi/src/um/sysinfoapi.rs create mode 100644 winapi/src/um/systemtopologyapi.rs create mode 100644 winapi/src/um/textstor.rs create mode 100644 winapi/src/um/threadpoolapiset.rs create mode 100644 winapi/src/um/threadpoollegacyapiset.rs create mode 100644 winapi/src/um/timeapi.rs create mode 100644 winapi/src/um/timezoneapi.rs create mode 100644 winapi/src/um/tlhelp32.rs create mode 100644 winapi/src/um/unknwnbase.rs create mode 100644 winapi/src/um/urlhist.rs create mode 100644 winapi/src/um/urlmon.rs create mode 100644 winapi/src/um/userenv.rs create mode 100644 winapi/src/um/usp10.rs create mode 100644 winapi/src/um/utilapiset.rs create mode 100644 winapi/src/um/uxtheme.rs create mode 100644 winapi/src/um/vsbackup.rs create mode 100644 winapi/src/um/vss.rs create mode 100644 winapi/src/um/vsserror.rs create mode 100644 winapi/src/um/vswriter.rs create mode 100644 winapi/src/um/wct.rs create mode 100644 winapi/src/um/werapi.rs create mode 100644 winapi/src/um/winbase.rs create mode 100644 winapi/src/um/wincodec.rs create mode 100644 winapi/src/um/wincodecsdk.rs create mode 100644 winapi/src/um/wincon.rs create mode 100644 winapi/src/um/wincred.rs create mode 100644 winapi/src/um/wincrypt.rs create mode 100644 winapi/src/um/windowsceip.rs create mode 100644 winapi/src/um/winefs.rs create mode 100644 winapi/src/um/winevt.rs create mode 100644 winapi/src/um/wingdi.rs create mode 100644 winapi/src/um/winhttp.rs create mode 100644 winapi/src/um/wininet.rs create mode 100644 winapi/src/um/winineti.rs create mode 100644 winapi/src/um/winioctl.rs create mode 100644 winapi/src/um/winnetwk.rs create mode 100644 winapi/src/um/winnls.rs create mode 100644 winapi/src/um/winnt.rs create mode 100644 winapi/src/um/winreg.rs create mode 100644 winapi/src/um/winsafer.rs create mode 100644 winapi/src/um/winscard.rs create mode 100644 winapi/src/um/winsmcrd.rs create mode 100644 winapi/src/um/winsock2.rs create mode 100644 winapi/src/um/winspool.rs create mode 100644 winapi/src/um/winsvc.rs create mode 100644 winapi/src/um/winusb.rs create mode 100644 winapi/src/um/winuser.rs create mode 100644 winapi/src/um/winver.rs create mode 100644 winapi/src/um/wow64apiset.rs create mode 100644 winapi/src/um/ws2spi.rs create mode 100644 winapi/src/um/ws2tcpip.rs create mode 100644 winapi/src/um/xinput.rs create mode 100644 winapi/src/vc/excpt.rs create mode 100644 winapi/src/vc/limits.rs create mode 100644 winapi/src/vc/mod.rs create mode 100644 winapi/src/vc/vadefs.rs create mode 100644 winapi/src/vc/vcruntime.rs create mode 100644 winapi/src/winrt/activation.rs create mode 100644 winapi/src/winrt/hstring.rs create mode 100644 winapi/src/winrt/inspectable.rs create mode 100644 winapi/src/winrt/mod.rs create mode 100644 winapi/src/winrt/roapi.rs create mode 100644 winapi/src/winrt/robuffer.rs create mode 100644 winapi/src/winrt/roerrorapi.rs create mode 100644 winapi/src/winrt/winstring.rs create mode 100644 wincolor/.cargo-checksum.json create mode 100644 wincolor/COPYING create mode 100644 wincolor/Cargo.toml create mode 100644 wincolor/LICENSE-MIT create mode 100644 wincolor/README.md create mode 100644 wincolor/UNLICENSE create mode 100644 wincolor/src/lib.rs create mode 100644 wincolor/src/win.rs diff --git a/aho-corasick/.cargo-checksum.json b/aho-corasick/.cargo-checksum.json new file mode 100644 index 000000000..e304a3dc0 --- /dev/null +++ b/aho-corasick/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{},"package":"1e9a933f4e58658d7b12defcf96dc5c720f20832deebe3e0a19efd3b6aaeeb9e"} \ No newline at end of file diff --git a/aho-corasick/COPYING b/aho-corasick/COPYING new file mode 100644 index 000000000..bb9c20a09 --- /dev/null +++ b/aho-corasick/COPYING @@ -0,0 +1,3 @@ +This project is dual-licensed under the Unlicense and MIT licenses. + +You may use this code under the terms of either license. diff --git a/aho-corasick/Cargo.toml b/aho-corasick/Cargo.toml new file mode 100644 index 000000000..155d48e83 --- /dev/null +++ b/aho-corasick/Cargo.toml @@ -0,0 +1,72 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g. crates.io) dependencies +# +# If you believe there's an error in this file please file an +# issue against the rust-lang/cargo repository. If you're +# editing this file be aware that the upstream Cargo.toml +# will likely look very different (and much more reasonable) + +[package] +name = "aho-corasick" +version = "0.6.9" +authors = ["Andrew Gallant "] +exclude = ["/benches/sherlock.txt", "/ci/*", "/.travis.yml", "/Makefile", "/ctags.rust", "/session.vim"] +description = "Fast multiple substring searching with finite state machines." +homepage = "https://github.com/BurntSushi/aho-corasick" +readme = "README.md" +keywords = ["string", "search", "text", "aho", "corasick"] +license = "Unlicense/MIT" +repository = "https://github.com/BurntSushi/aho-corasick" +[profile.test] +debug = true + +[profile.bench] +debug = true + +[profile.release] +debug = true + +[lib] +name = "aho_corasick" + +[[bin]] +name = "aho-corasick-dot" +path = "src/main.rs" +test = false +bench = false +doc = false + +[[bench]] +name = "bench" +path = "benches/bench.rs" +test = false +bench = true +[dependencies.memchr] +version = "2" +[dev-dependencies.csv] +version = "1" + +[dev-dependencies.docopt] +version = "1" + +[dev-dependencies.memmap] +version = "0.6" + +[dev-dependencies.quickcheck] +version = "0.7" +default-features = false + +[dev-dependencies.rand] +version = "0.5" + +[dev-dependencies.serde] +version = "1" + +[dev-dependencies.serde_derive] +version = "1" +[badges.travis-ci] +repository = "BurntSushi/aho-corasick" diff --git a/aho-corasick/LICENSE-MIT b/aho-corasick/LICENSE-MIT new file mode 100644 index 000000000..3b0a5dc09 --- /dev/null +++ b/aho-corasick/LICENSE-MIT @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015 Andrew Gallant + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/aho-corasick/README.md b/aho-corasick/README.md new file mode 100644 index 000000000..25e9ef917 --- /dev/null +++ b/aho-corasick/README.md @@ -0,0 +1,55 @@ +This crate provides an implementation of the +[Aho-Corasick](http://en.wikipedia.org/wiki/Aho%E2%80%93Corasick_string_matching_algorithm) +algorithm. Its intended use case is for fast substring matching, particularly +when matching multiple substrings in a search text. This is achieved by +compiling the substrings into a finite state machine. + +This implementation provides optimal algorithmic time complexity. Construction +of the finite state machine is `O(p)` where `p` is the length of the substrings +concatenated. Matching against search text is `O(n + p + m)`, where `n` is +the length of the search text and `m` is the number of matches. + +[![Build status](https://api.travis-ci.org/BurntSushi/aho-corasick.png)](https://travis-ci.org/BurntSushi/aho-corasick) +[![](http://meritbadge.herokuapp.com/aho-corasick)](https://crates.io/crates/aho-corasick) + +Dual-licensed under MIT or the [UNLICENSE](http://unlicense.org). + + +### Documentation + +[https://docs.rs/aho-corasick/](https://docs.rs/aho-corasick/). + + +### Example + +The documentation contains several examples, and there is a more complete +example as a full program in `examples/dict-search.rs`. + +Here is a quick example showing simple substring matching: + +```rust +use aho_corasick::{Automaton, AcAutomaton, Match}; + +let aut = AcAutomaton::new(vec!["apple", "maple"]); +let mut it = aut.find("I like maple apples."); +assert_eq!(it.next(), Some(Match { + pati: 1, + start: 7, + end: 12, +})); +assert_eq!(it.next(), Some(Match { + pati: 0, + start: 13, + end: 18, +})); +assert_eq!(it.next(), None); +``` + + +### Alternatives + +Aho-Corasick is useful for matching multiple substrings against many long +strings. If your long string is fixed, then you might consider building a +[suffix array](https://github.com/BurntSushi/suffix) +of the search text (which takes `O(n)` time). Matches can then be found in +`O(plogn)` time. diff --git a/aho-corasick/UNLICENSE b/aho-corasick/UNLICENSE new file mode 100644 index 000000000..68a49daad --- /dev/null +++ b/aho-corasick/UNLICENSE @@ -0,0 +1,24 @@ +This is free and unencumbered software released into the public domain. + +Anyone is free to copy, modify, publish, use, compile, sell, or +distribute this software, either in source code form or as a compiled +binary, for any purpose, commercial or non-commercial, and by any +means. + +In jurisdictions that recognize copyright laws, the author or authors +of this software dedicate any and all copyright interest in the +software to the public domain. We make this dedication for the benefit +of the public at large and to the detriment of our heirs and +successors. We intend this dedication to be an overt act of +relinquishment in perpetuity of all present and future rights to this +software under copyright law. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR +OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. + +For more information, please refer to diff --git a/aho-corasick/benches/bench.rs b/aho-corasick/benches/bench.rs new file mode 100644 index 000000000..0e68e2af0 --- /dev/null +++ b/aho-corasick/benches/bench.rs @@ -0,0 +1,373 @@ +#![feature(test)] + +extern crate aho_corasick; +extern crate test; + +use std::iter; + +use aho_corasick::{Automaton, AcAutomaton, Transitions}; +use test::Bencher; + +const HAYSTACK_RANDOM: &'static str = include_str!("random.txt"); +const HAYSTACK_SHERLOCK: &'static str = include_str!("sherlock.txt"); + +fn bench_aut_no_match, T: Transitions>( + b: &mut Bencher, + aut: AcAutomaton, + haystack: &str, +) { + b.bytes = haystack.len() as u64; + b.iter(|| assert!(aut.find(haystack).next().is_none())); +} + +fn bench_box_aut_no_match, T: Transitions>( + b: &mut Bencher, + aut: AcAutomaton, + haystack: &str, +) { + b.bytes = haystack.len() as u64; + let aut: &Automaton

= &aut; + b.iter(|| assert!(Automaton::find(&aut, haystack).next().is_none())); +} + +fn bench_full_aut_no_match, T: Transitions>( + b: &mut Bencher, + aut: AcAutomaton, + haystack: &str, +) { + let aut = aut.into_full(); + b.bytes = haystack.len() as u64; + b.iter(|| assert!(aut.find(haystack).next().is_none())); +} + +fn bench_full_aut_overlapping_no_match, T: Transitions>( + b: &mut Bencher, + aut: AcAutomaton, + haystack: &str, +) { + let aut = aut.into_full(); + b.bytes = haystack.len() as u64; + b.iter(|| assert!(aut.find_overlapping(haystack).count() == 0)); +} + +fn bench_naive_no_match(b: &mut Bencher, needles: Vec, haystack: &str) + where S: Into { + b.bytes = haystack.len() as u64; + let needles: Vec = needles.into_iter().map(Into::into).collect(); + b.iter(|| assert!(!naive_find(&needles, haystack))); +} + +#[bench] +fn bench_construction(b: &mut Bencher) { + b.iter(|| { + AcAutomaton::new(test::black_box( + [ + "ADL", "ADl", "AdL", "Adl", "BAK", "BAk", "BAK", "BaK", "Bak", "BaK", "HOL", + "HOl", "HoL", "Hol", "IRE", "IRe", "IrE", "Ire", "JOH", "JOh", "JoH", "Joh", "SHE", + "SHe", "ShE", "She", "WAT", "WAt", "WaT", "Wat", "aDL", "aDl", "adL", "adl", "bAK", + "bAk", "bAK", "baK", "bak", "baK", "hOL", "hOl", "hoL", "hol", "iRE", "iRe", + "irE", "ire", "jOH", "jOh", "joH", "joh", "sHE", "sHe", "shE", "she", "wAT", "wAt", + "waT", "wat", "ſHE", "ſHe", "ſhE", "ſhe", + ].iter() + .map(|x| *x), + )) + }) +} + +#[bench] +fn bench_full_construction(b: &mut Bencher) { + b.iter(|| { + AcAutomaton::new(test::black_box( + [ + "ADL", "ADl", "AdL", "Adl", "BAK", "BAk", "BAK", "BaK", "Bak", "BaK", "HOL", + "HOl", "HoL", "Hol", "IRE", "IRe", "IrE", "Ire", "JOH", "JOh", "JoH", "Joh", "SHE", + "SHe", "ShE", "She", "WAT", "WAt", "WaT", "Wat", "aDL", "aDl", "adL", "adl", "bAK", + "bAk", "bAK", "baK", "bak", "baK", "hOL", "hOl", "hoL", "hol", "iRE", "iRe", + "irE", "ire", "jOH", "jOh", "joH", "joh", "sHE", "sHe", "shE", "she", "wAT", "wAt", + "waT", "wat", "ſHE", "ſHe", "ſhE", "ſhe", + ].iter() + .map(|x| *x), + )).into_full() + }) +} + +fn haystack_same(letter: char) -> String { + iter::repeat(letter).take(10000).collect() +} + +macro_rules! aut_benches { + ($prefix:ident, $aut:expr, $bench:expr) => { + mod $prefix { +#![allow(unused_imports)] +use aho_corasick::{Automaton, AcAutomaton, Sparse}; +use test::Bencher; + +use super::{ + HAYSTACK_RANDOM, haystack_same, + bench_aut_no_match, bench_box_aut_no_match, + bench_full_aut_no_match, bench_full_aut_overlapping_no_match, +}; + +#[bench] +fn ac_one_byte(b: &mut Bencher) { + let aut = $aut(vec!["a"]); + $bench(b, aut, &haystack_same('z')); +} + +#[bench] +fn ac_one_prefix_byte_no_match(b: &mut Bencher) { + let aut = $aut(vec!["zbc"]); + $bench(b, aut, &haystack_same('y')); +} + +#[bench] +fn ac_one_prefix_byte_every_match(b: &mut Bencher) { + // We lose the benefit of `memchr` because the first byte matches + // in every position in the haystack. + let aut = $aut(vec!["zbc"]); + $bench(b, aut, &haystack_same('z')); +} + +#[bench] +fn ac_one_prefix_byte_random(b: &mut Bencher) { + let aut = $aut(vec!["zbc\x00"]); + $bench(b, aut, HAYSTACK_RANDOM); +} + +#[bench] +fn ac_two_bytes(b: &mut Bencher) { + let aut = $aut(vec!["a", "b"]); + $bench(b, aut, &haystack_same('z')); +} + +#[bench] +fn ac_two_diff_prefix(b: &mut Bencher) { + let aut = $aut(vec!["abcdef", "bmnopq"]); + $bench(b, aut, &haystack_same('z')); +} + +#[bench] +fn ac_two_one_prefix_byte_every_match(b: &mut Bencher) { + let aut = $aut(vec!["zbcdef", "zmnopq"]); + $bench(b, aut, &haystack_same('z')); +} + +#[bench] +fn ac_two_one_prefix_byte_no_match(b: &mut Bencher) { + let aut = $aut(vec!["zbcdef", "zmnopq"]); + $bench(b, aut, &haystack_same('y')); +} + +#[bench] +fn ac_two_one_prefix_byte_random(b: &mut Bencher) { + let aut = $aut(vec!["zbcdef\x00", "zmnopq\x00"]); + $bench(b, aut, HAYSTACK_RANDOM); +} + +#[bench] +fn ac_ten_bytes(b: &mut Bencher) { + let aut = $aut(vec!["a", "b", "c", "d", "e", + "f", "g", "h", "i", "j"]); + $bench(b, aut, &haystack_same('z')); +} + +#[bench] +fn ac_ten_diff_prefix(b: &mut Bencher) { + let aut = $aut(vec!["abcdef", "bbcdef", "cbcdef", "dbcdef", + "ebcdef", "fbcdef", "gbcdef", "hbcdef", + "ibcdef", "jbcdef"]); + $bench(b, aut, &haystack_same('z')); +} + +#[bench] +fn ac_ten_one_prefix_byte_every_match(b: &mut Bencher) { + let aut = $aut(vec!["zacdef", "zbcdef", "zccdef", "zdcdef", + "zecdef", "zfcdef", "zgcdef", "zhcdef", + "zicdef", "zjcdef"]); + $bench(b, aut, &haystack_same('z')); +} + +#[bench] +fn ac_ten_one_prefix_byte_no_match(b: &mut Bencher) { + let aut = $aut(vec!["zacdef", "zbcdef", "zccdef", "zdcdef", + "zecdef", "zfcdef", "zgcdef", "zhcdef", + "zicdef", "zjcdef"]); + $bench(b, aut, &haystack_same('y')); +} + +#[bench] +fn ac_ten_one_prefix_byte_random(b: &mut Bencher) { + let aut = $aut(vec!["zacdef\x00", "zbcdef\x00", "zccdef\x00", + "zdcdef\x00", "zecdef\x00", "zfcdef\x00", + "zgcdef\x00", "zhcdef\x00", "zicdef\x00", + "zjcdef\x00"]); + $bench(b, aut, HAYSTACK_RANDOM); +} + } + } +} + +aut_benches!(dense, AcAutomaton::new, bench_aut_no_match); +aut_benches!(dense_boxed, AcAutomaton::new, bench_box_aut_no_match); +aut_benches!(sparse, AcAutomaton::<&str, Sparse>::with_transitions, + bench_aut_no_match); +aut_benches!(full, AcAutomaton::new, bench_full_aut_no_match); +aut_benches!(full_overlap, AcAutomaton::new, bench_full_aut_overlapping_no_match); + +// A naive multi-pattern search. +// We use this to benchmark *throughput*, so it should never match anything. +fn naive_find(needles: &[String], haystack: &str) -> bool { + for hi in 0..haystack.len() { + let rest = &haystack.as_bytes()[hi..]; + for needle in needles { + let needle = needle.as_bytes(); + if needle.len() > rest.len() { + continue; + } + if needle == &rest[..needle.len()] { + // should never happen in throughput benchmarks. + return true; + } + } + } + false +} + +#[bench] +fn naive_one_byte(b: &mut Bencher) { + bench_naive_no_match(b, vec!["a"], &haystack_same('z')); +} + +#[bench] +fn naive_one_prefix_byte_no_match(b: &mut Bencher) { + bench_naive_no_match(b, vec!["zbc"], &haystack_same('y')); +} + +#[bench] +fn naive_one_prefix_byte_every_match(b: &mut Bencher) { + bench_naive_no_match(b, vec!["zbc"], &haystack_same('z')); +} + +#[bench] +fn naive_one_prefix_byte_random(b: &mut Bencher) { + bench_naive_no_match(b, vec!["zbc\x00"], HAYSTACK_RANDOM); +} + +#[bench] +fn naive_two_bytes(b: &mut Bencher) { + bench_naive_no_match(b, vec!["a", "b"], &haystack_same('z')); +} + +#[bench] +fn naive_two_diff_prefix(b: &mut Bencher) { + bench_naive_no_match(b, vec!["abcdef", "bmnopq"], &haystack_same('z')); +} + +#[bench] +fn naive_two_one_prefix_byte_every_match(b: &mut Bencher) { + bench_naive_no_match(b, vec!["zbcdef", "zmnopq"], &haystack_same('z')); +} + +#[bench] +fn naive_two_one_prefix_byte_no_match(b: &mut Bencher) { + bench_naive_no_match(b, vec!["zbcdef", "zmnopq"], &haystack_same('y')); +} + +#[bench] +fn naive_two_one_prefix_byte_random(b: &mut Bencher) { + bench_naive_no_match(b, vec!["zbcdef\x00", "zmnopq\x00"], HAYSTACK_RANDOM); +} + +#[bench] +fn naive_ten_bytes(b: &mut Bencher) { + let needles = vec!["a", "b", "c", "d", "e", + "f", "g", "h", "i", "j"]; + bench_naive_no_match(b, needles, &haystack_same('z')); +} + +#[bench] +fn naive_ten_diff_prefix(b: &mut Bencher) { + let needles = vec!["abcdef", "bbcdef", "cbcdef", "dbcdef", + "ebcdef", "fbcdef", "gbcdef", "hbcdef", + "ibcdef", "jbcdef"]; + bench_naive_no_match(b, needles, &haystack_same('z')); +} + +#[bench] +fn naive_ten_one_prefix_byte_every_match(b: &mut Bencher) { + let needles = vec!["zacdef", "zbcdef", "zccdef", "zdcdef", + "zecdef", "zfcdef", "zgcdef", "zhcdef", + "zicdef", "zjcdef"]; + bench_naive_no_match(b, needles, &haystack_same('z')); +} + +#[bench] +fn naive_ten_one_prefix_byte_no_match(b: &mut Bencher) { + let needles = vec!["zacdef", "zbcdef", "zccdef", "zdcdef", + "zecdef", "zfcdef", "zgcdef", "zhcdef", + "zicdef", "zjcdef"]; + bench_naive_no_match(b, needles, &haystack_same('y')); +} + +#[bench] +fn naive_ten_one_prefix_byte_random(b: &mut Bencher) { + let needles = vec!["zacdef\x00", "zbcdef\x00", "zccdef\x00", + "zdcdef\x00", "zecdef\x00", "zfcdef\x00", + "zgcdef\x00", "zhcdef\x00", "zicdef\x00", + "zjcdef\x00"]; + bench_naive_no_match(b, needles, HAYSTACK_RANDOM); +} + + +// The organization above is just awful. Let's start over... + +mod sherlock { + use aho_corasick::{Automaton, AcAutomaton}; + use test::Bencher; + use super::HAYSTACK_SHERLOCK; + + macro_rules! sherlock { + ($name:ident, $count:expr, $pats:expr) => { + #[bench] + fn $name(b: &mut Bencher) { + let haystack = HAYSTACK_SHERLOCK; + let aut = AcAutomaton::new($pats).into_full(); + b.bytes = haystack.len() as u64; + b.iter(|| assert_eq!($count, aut.find(haystack).count())); + } + } + } + + sherlock!(name_alt1, 158, vec!["Sherlock", "Street"]); + + sherlock!(name_alt2, 558, vec!["Sherlock", "Holmes"]); + + sherlock!(name_alt3, 740, vec![ + "Sherlock", "Holmes", "Watson", "Irene", "Adler", "John", "Baker", + ]); + + sherlock!(name_alt3_nocase, 1764, vec![ + "ADL", "ADl", "AdL", "Adl", "BAK", "BAk", "BAK", "BaK", "Bak", "BaK", + "HOL", "HOl", "HoL", "Hol", "IRE", "IRe", "IrE", "Ire", "JOH", "JOh", + "JoH", "Joh", "SHE", "SHe", "ShE", "She", "WAT", "WAt", "WaT", "Wat", + "aDL", "aDl", "adL", "adl", "bAK", "bAk", "bAK", "baK", "bak", "baK", + "hOL", "hOl", "hoL", "hol", "iRE", "iRe", "irE", "ire", "jOH", "jOh", + "joH", "joh", "sHE", "sHe", "shE", "she", "wAT", "wAt", "waT", "wat", + "ſHE", "ſHe", "ſhE", "ſhe", + ]); + + sherlock!(name_alt4, 582, vec!["Sher", "Hol"]); + + sherlock!(name_alt4_nocase, 1307, vec![ + "HOL", "HOl", "HoL", "Hol", "SHE", "SHe", "ShE", "She", "hOL", "hOl", + "hoL", "hol", "sHE", "sHe", "shE", "she", "ſHE", "ſHe", "ſhE", "ſhe", + ]); + + sherlock!(name_alt5, 639, vec!["Sherlock", "Holmes", "Watson"]); + + sherlock!(name_alt5_nocase, 1442, vec![ + "HOL", "HOl", "HoL", "Hol", "SHE", "SHe", "ShE", "She", "WAT", "WAt", + "WaT", "Wat", "hOL", "hOl", "hoL", "hol", "sHE", "sHe", "shE", "she", + "wAT", "wAt", "waT", "wat", "ſHE", "ſHe", "ſhE", "ſhe", + ]); +} diff --git a/aho-corasick/benches/random.txt b/aho-corasick/benches/random.txt new file mode 100644 index 000000000..dfae5cd14 --- /dev/null +++ b/aho-corasick/benches/random.txt @@ -0,0 +1,513 @@ + +mnxnsynfvuugtbxsxbfxwreuspglnplefzwsp +tacfqcwnmodnmgnyiuvqoco +z + +qjuozfkexn +zoaxzncje +sldhqtmgxzyurfyzwazmmu +bbeuv +mzsrihycwcb +xzfqozfmlnpmrzpxxxytqs +xrg +mcplby +nmslhfgjowhzfxsvyddydnsyehdskbydbjksqtpet +indvfw +bvjvvw + +pddufodyqtyixbndtumndyz +xjjhtuvmsxhuwqulqtjhqrdqrmtbcphvyuqllocrnkpfv +zemshhz +wss +xewlrxfmgxnwgphcgefa +mbgsgbzrtthxweimcqzcaaheurdmd +osqefupespvh +z +tvvlakwzwjbrgjzfgubsmmonav +pjdskxcfgapsm +zqktqgkrcdrlskx +zwwfebhguskho +zlvvw +czwm +gojnpmboehlsazbexjjnuscqftrfufngygjdxcydib +d +afigycivicnknfxl +ljuwuopctiftfwctxecwipjnljyef +jonwbkodomzhqvlf +jdkizhognqsdogunwedjsmsdzho +zxvni +oynfjf +muvokjuqz +azuwrwtuxzfopwrcex +ixrjinlvxjmn +blaegnmbhsgsbmebwazaeguugtkowexgnqtbfkldadddv +tzabyoftyov +ctbtqbzscxzviuvcigwuwusrdro +ljynr +gnnnyyxslrhsbj +hhzlw +hijalf +rxlfqk +mhaofforwznvmcgplinludpgkucpa +gvvxsqqfmu +xxqhoyosixjfhjuxpv +faadjpvamjekreepizurntvwdynozfawsfawyms + +lcbutr +aqyxvpozkjrecrkl +lfmochahrr +ptqyomjlwo +vcmslulznx +lmlsskcihrmxauztuarydlp +beiqsrfnmvmlmybmwpektjbikvpggthpabqsgmjhnthvysuhwbigillugjsp +dfsuegseffwcsnvsrqedytblbpzbfeyfsq +kypvqctrkuds +ylqeduokzgdqaxelhftxnxbidu +bprzyayfopxdsmfhhfqowa +ymiutdtlfaaxpbtaeslv +ggago + +owpbicekdeykzfgcbgzobdvvrtetvcv +xsrlgingstiez +gyncqvq +xasohmeiwyscpehctmzmsnjklg +xsudghakxlw +dzqlfptjogzpkvwuticcyugnyopypuqqc +wlxshxbhdvuherumoppcc + +znyaptivzncvkpeyeipynqefjxjjcsgfqbnezeebtowdrbjaqjlbxwvyikrmxjwoxngqgvfpbniftnmszuxg +umwpwwyvufy +pallkjtnrmtauqxauewgygwkjjwebbkabhtxticxmxfujpxlrpzlrozfslkzfdsswlmmsbdgjwmjnummk +dhsxylejzityahtqqzmohrpzjprrsraztpnuagtyzfjdekthvdogfidksrdppr +ybc +fyukknoqfnkllkwflwempjijxgo +dltvlau +rhvrvlwsribfctuzodfqkdczfzxnetqqzflnhiyl +goxmcasmq +wljbhwkpahdotqhhrbhqzijv +lszewkgdmkezvgmbmllhpksdkoiwgkvqjmurshrptlctqsosuurndcuzjfwherotv +dudxxihygxblhgchbgzyzffb +eht +fvwxvqoltdcsd +rkuig +e +axhsacsmnicugul +rubtdlhjqndxdzzwfnkuzy +swxteuyxxsktkjgv +hzwwodlqaq +vxgecev +qnwla +vdxjuzpyoqhpmuunyffptopmeauhycs +dkzo +awrfzatzohslgvqlaezepmli +qgxatixvpkkhvkumbwmwcagtgyfljdok +amdnzstpvcqj +xsrvwvhjirzfgkessve +qezwbfltfbikbmoasvoflozsjhrljnszqiciuqmflrlqowwkoevuumh +babskcvavmtvsxqsewirucwzajjcfcqwsydydqo +ywfurpsl +edacsjjkjjewkxfoh +dcgkfpcjezurnuhiatrczcp +xsatnimwbcciu +grzmbrsvvcyigcbmcqfwiiknrohveubhyijxeyzfm +kqyewccgcqrrrznwxmoztlyseagbpyho +najju +nis +awgzdvfjkzlrsjcqfeacx +oisuflfigrjaex +desbdulyuwqxuxianyypybxwlql +ekmqgspvqpftpwswayh +egbyj +fznzprhvnnwcxgcc +wfdsueieosmugirxbymbpmfrspvrktjzguxm +qkjrufshwnfwwpbhukdjlaqvljlgubmqmhnha +hwqpudgnblhlxppbrmbznotteivuzguuwlhtkytky +w +yofkyzbpg +cenolnfnllkvhikrpttcxgqxmufvorekjruyjxmr + +hyexmpjijgzumawp +cdbevdilgopbzlo +fivelagckslkugdxprjxkylizewcptwxfhomzuituujixchadmnjoktnqa +csojvlinzmmkkfzqueamnuwkanzdzsavgohposbuoamoevehqrmcxdsuyelvvctoejzoertqormhaaxwofvjzekwt +sbkghhnhutrvwtyjaxndzyjamrhx +jjyqy +majwbnrhveuhrsbbbjrwpwuplifeseylqh +wyvutpxnkrnkuxxetjkkifpqb +dyzucmbcvgnjeecm +hz +uhnuipthxrzkqluosvk +lwqqzsdwiwvwaqfwlvubadlyizlo +jbd +oyzjeu +kydjkbsqxnbfiuesc +smeubjqrcxdvhsabzceyglqjzbfmoacmwvwjbhhxbr +uabipgecujfdfxpmdzrscdyvefizabgspqjrrkmgjt +xgvdgzryz +lw +uimob +ifhn +bqph +ole +g +wt +k +yslzrkwkundxfdibwqvucemepqxlmlpyngabbeciuzhptpjdetyngrtxrdtzmvq +ccwapidp + +bwvrgvmtshevrophy +ni +fdkplu +mdykey +i +rhsrenoetdggpjb +djmkplpeabsholx +judxtub +fooakqwvocvpcrvxqhvtmpvhkrecy +uuxscjillynilbkrgt +evtinrmilniguarqritpeipwochmdw +sxaqzjybydyvnmmjtdcgkjnqfcklbfpkdfyewgcukqoiegyfp +kg +ovrwieqhy +jcxqtkerzjwhs +xeonglszbgypafhmqcaseimzjgebkvigbqwsayrnrprtuvhsxyitfqygohgorcdnufbcyvevvgzmjrgjqqquwkszplogx +zdketqqv +yebckucwayckeezfvtnavglpjh +zorkfrwk +pad +xqaquxudybwtgixbfktinctfirjfdayh +rieknj +ebk +qzbcfywfdmhsdruhopovemafijbscagllkmhmof + +asbsnbddlobwoqatfhkbhhsymzqxjuixwreheugvngmgcuqpkjhhfwpbarqaxrwgwnjbanljlds +etevdvlc +lqyjrnmenhn +k +tsf +zczgeavcexh +jlpuxywtsrvnvluruqhecjca +ir +rikrgkmhwaosodkxgcnrexfmdrszhnmutpvwztg +bffjqovvkemctnsgeh +weysbhzixiipfithjfsk +usyzvaiyuhmksfluoirfbnsu +o +cgawpdakaszeafdtbdkqtlzkrpnoqomqvuaqcfmzgvfegovtfaonelpv +izmrcjlk +xmzemniyrzy +knqexaafsdlimdamcrprlshq +qkmqw +dntgjwsibclvposdwjuklvtejjjdjibgpyynqpgprvvaetshhmvfkcpb +otvazkrkklrxfotpopyjte +fghkcnpi +rulyaihsowvcgbzeiblhuhhfbmncqsuuqcxvseorn +exirzfmojnxcoqom +zsgpgtokun +zvamxfocorganbtlafifwdqmqtsnktbwwtewborq + +cxlnaspjqvsitjyzyriqsuorjsrvzqenisprttudxntsbqrpjtdkxnwcwgjyxmgtqljcrmrbrmyvosojzlumcmjcgfjsdehec +mvx +mt +mckr +teulvroifk +laaicc +koufy +bexmwsvyarnznebdfy +ripvviosbqijsxnjilwddaqaqemzsdarnxmfooxghoypizwtbueo +ljycycuqwfnzbambibqdixmkkvwtubepla +cis +kcg +vmbbiuuoamenzepuagpfujevfstqtndjxjchdvycfrrrowochtjdmkklgnhf +pmorrwguxkvdxpluatagaziin + +uwvzbmkmykjkmknzppklx +pnzxuvsrjunqxercsnvayhykcazdeclomdsasgkpqpiufyfqsxhj +yceizkddwojgweegcllaagpvrpo +ek +kuxxgbezqyxvfaxdwnqdgqsmneijunxzlwxkrs +ldldbrxmvtjlqxifngmactzqcygkvuteffcmvphevilabgukatqakamjlridznodcvblvlogulmcixxfimh +iuzjootuywjqklolzzhpeaynydjwtufjavbozxnzckuzdodkvkjfmhinelv +swlfkcufscfcovmghqwcrtxjukwafoeogrkgubbqgwzm +gjcylkwgzroubdssuqeykqjcmguso +fzq +srfvysoxtlylctp + +pbfeiuzwoyixews +ocvvunfsjnrtklmuuzjojw +xdjcnrpqhmpmpcwacpcdtmbsczvhllkqapzjuaf +nfnuvjz +fwnuiyqpn +wshxxxpzzxp +hibrxcfeqca + +wqhlllarl +bukcbojv +plrytapy +xm +vlgfqoyzdczqbbaxjwbjjevjhxgopuqvqcrj +vpjqfbdnsdxlbuuiqocvrhap +mgumjbvnnzgnrdru +gcgzugazxdcamrhczfzhtmdjj +uislwq +vooai +zjuqfmebuzsqngzekyajujkopvayxtdzvugwwucvlsbrnhitfotmhhmgddlzlvqrkcponictrfweuilfjiuoabkfdvpjiqjrrgi +aptjfhmrnxaq +hbs +w +mwmoxqvucwygunplzvxtxpk +fgmqmtlorfzytjdzffsosfccnfwugrsrynuej +rpmpenrhsxoefnblyumjqwvuyszyppnttuyvazjdug +zdzxraxkroknkmqgvuoqeqdtvclsvvuwmdwzfugcpteohlogxubyoebvrzbqzklvehfcqadtdrkpubfhmokzwyosogepwragcpwxo +ax +dz +de + +thvkdmnbdws + +ejmubw +umvwkaubzurf +wyxtxeluaoox +wwbioobtgmkebxo +miglgnafmdarzkeblyjctuayzyoeqnfnbtrcbymdzkzg +loavxq +kzhllgsenxlbgdbfzwbg +yxflogzsohlcycbyzegeubfflouvtuatixhjvicjegltjiy +jigqfjppafdiarc +mcnmwtachgearonfcymvjbrnljjxmlzkudvzqsarnfysmxlfrtlvjxwvpdbhvwysnvcdozfcruhjwnucdzakkilmlfgjiolcatpfusm + +n +pdjunfcz +dc +edxkkxabsbvmvifiinnoccki +bc +gwtwsvorwzfqpz +exidmexstfflkhi +s +s +c +wtcjfywlayhpbqktcepoybowtkrmnumqsg +ozclkgjdmdk +jmegtbunyexurvfexhqptnqzie +tkoenpagzwqfawlxvzaijsjqhmg +swodqfjpdqcbkc +ujokogocyaygdibgpglecis +shlmdmgonvpuaxlhrymkxtiytmv +brhk +jmsyiuomiywxhegilycjprkyfgojdo + +wzdzrgpdiosdsvkcw +odlnmsfnjrcsnflviwvawybpczdkzvdocpwrmavz +p +ubowamlskcqhdxuckrxa +fawhntiwhmdwkddnahmtajqqazpdygttqivhdiodkcpcwv +gmxujmmaufmbipaiulhurzkfdg +eixjhmbaeoybiwk +kumntgrgiofcmujlzbcopuobambsw +mnjkqiyb +iktwnsnv +hfuzcl +tqiyqvagbqgtowpjbedgjot +dfemvamelxadkztogliizdtsddoboafawficudlefo +raecmxiiibljryswntpfed +mbwrtsebkeegw +x +epp +he + +vnztrswhiusokqdkmsnpuswucvfhcthjbtam +baxlwidsgbdpzvnlj +tcbjjoadrzo +aiidahyllzzsg + +igebuubweicbssgddpmqxunrawavuglmpxrtkqsvjjtscibqiejjfgfnovokodmqcqitlteiakooupvzkwucucrfdzjvjbqbkgutoybmpfvhbutigdxhfiqfplyciz +cnrhbjdnjftwfwlwzrdkwhajgsizsi +qfntnt +okqyfnbresp +asyg +mjqdkdyggdxzwuzglays +h +ifaqcazoy +fol +vvsusbnugduxsceozmsarbp +epjwtorx +bwiuxxiyc +cw +bwogruhctwkfvbexjnwircykxyzjmats +kygiochfwlpsvmxcgmtjrgvfdptd +q +qmpqe + +z +jghffhqfoecmszunhxmzmzhlmbrvjabhrkihgjmvckhkfpaygjkg + +kfiyfgounmhlvhupswqdgws +ezzdpyqucqoocsdcjtruqpokldfkmjhqzoynirybsifyaxnaxppthjoqy +nwetlgzwrhkhtuubbkbepuhbllxspvagxrqokwnrhkbwdwtp +hlazomrhqogoaxypqaszwfxxmutvbpuuvpdffuqskcbzlwyzcssnflkwiydoveyxjnzllzhyozbsa +hwnitkwbxcyibbqsluuqywbk + +ozpfjsdrc +yoepefuy +lvmspzepnetra +genbrcrmuqfvkaouvuymoxhcxotjjhk +pcshyqgbmqdubsdajnyfqvxkqvywffzn +ukhcbyzwslqeq +otfrmcbnhbyffxqregqoufdxucjunwdhlqqeiiawbxlpqeyzzopfungrryqdykgizrhqodirvazm +dhpfhzyq +cloz +eduupqifolfekve +qiec +ishnjukvomntmdthlkajxpiwk +y +axl +tmyskjqkjsvumizlal +wvvolwewsfxhhdieuagdcuhwsgqvswpbkdkpxskloalmr +ryfmhe +z +mmbpgsyrfvzdatbjrjhuipwt +llzwizmmuulgwocowwmugtaoewkhnqxparvtynlffffdfcocdbba + +pyczkzbmcgrdnxnmezsx +gsqe +mcocxcolcynhpecstsn +opnpplkccobjuhtbhirpzfxuktmpsiwbvsgiaavvdge +wpaldxzasnrbvtugjwytvtfttrh +zxecurevkjiyxy +wtnovebcmglkktic +fdpwfgvlvovxrwh +bmwgdullzy +uzwhagxinwqifxjbcntqzqoxkmpqxhe +jrfizsnwxwnnhb +inapddlahrp + +ndtvkceobe +buskgghihdjmjlwfc +j +rkvffxwtmzoeruhlsurwtnuh +cbvkhfepkdishfpqvijzrpleuy +jzdpxjhcgqnybssfegvrnpgyehdqpgjwudbwrjbavp +xzzvgqdrdwajmdmj +vfatwsxvwfdbdhnijdujoyotwvwjipuuetichcfmvgrsnjpqaaezjtkvc +lbfoqgfshrtwgdqufwnfuitdrjydqctqixlzufkdbp +zgau +qefdpmtkecvtj +kuphldkvnzdtpd +dti +fpd +gfrliyegxsb +i +qsddsrmkyfgzrjeqnitmnypbcakh +vfbvbrpuogzhzrbmklvhji +nkz +xlufbaoblbmeub +alwuzxzmobwdukvwnkiwmuqhuxfhevogdnqtmxjptqznrk +cngpoty + +ms +qvenfg +dmeaffm +jycfgnanbmoamhmarkmjcagbp +ysqmbhopgx +jczbzgwedsp + +zxzwjrxcwdtleizjlvifjwgxiibezwxhtzywqdi +mtgnlu +xboxirdchurkfnklnpkapnqfxnhrxyseiujrznjm + +zm +atddskbghcahlhql +szshwzmmvu +befdtpouamwhiisyybispkchpjhownatawjfbx + +ennkzbrlygd +zbt +upphzpdwzmlhhhbqvjsfmbnrar +ddcs +ipbxgzyudjyongtcyygncojdufnufqpdppgvq +gc +isu +foa +wf +jdlvqxgfbowhohhyyngbcs +zjuwjyucdwblatsnywaaoftlcamfbcnw +lzrioesuhoeevczuwrnltmkahfwiu +uicggfbddqltnjyxfltbnaekncnyxsit +zkxsqkqrwrzrxgxbsgxatybfr + +ptvmfyxdcglbfipcguqthjygzqnpqssscukzawynidtchjrrxwuxifoe +w +ohu +vg +zagpowezvbniybgold +lhqseqcxteiqtgnpanpvrmvvlltxh +mtfnxn +wyodtg + +rawpbgtpbaktqzmmpzxmrlwpvvmdsl +widcfbirvswraukbmkhf +vplrueuxomjkqrtjgyxjdkexttzyozawyq +hrpbahllznvmjudzxpbbv +tlavfrxygjfipkupwnbacltcfepeg +icu +otxcu +aewazy +hl + +fmrp +qaacthwzohenzjr +xbyebba +rvkph +mkhhmh +swme +zjmdoypaktglcyzobquunvthcdwegtbywpijxd +jvkuhnxqc +gibhqgjojsxt +bodbktzomiqujtbstqiyquwvqgufphqstenxvddkvtdh +bpusrxkfi +zgp +pmxvgamydyakituvvsucsuidrlznupcsinltmrahulhepxmhoqtfvpjkxzhrrinncuh +jzgkjjhjqykzelaszvcwvvwbnzsxdeaerfnaravk +ynanrqyrxo +zsmuxofullob +brklgrcqefdyoczy +qkpls +snhqumae +iqdtzjadzzvnqvdvjfsaf +nfqfdqiramueblxkaqxbbkxwywzgdbndjjiqk +tc +kp +cpuckbjsxhtxmomfesgxdpz +oseif +ybhxbvyxrpkrexrhjzoaxxohrhsniewsrktjnaztn +ggelspdzhzbchruhbjbjidgjwdlhdycetqaswh +jkgivsngygkbqtlmoj +dwpnanfvitxg +ospxbwxp +wgvmvrnjescemdoiralbkvemalifxnyhrbdgodml +hjtsnkzknkplbzsiwmneefdkihnhsamjsrxggclyjqgpqltizi + + +sykgbuypwwhweab +nvdkkkskmtiwpoerkon +sx +sbyflwwiqylbskdlxesmylpaz +dnwcjenaluwesyywfaezznwkdwpoesxpu +kie +dslccwfryol +gfhomgfn +zprjtfqvkotktzidmoyrivall +bunvsqkysdelozemnjoeqfolruulpbipm +ullyzfahpkhkja +hwd +kvyqtprpuulgsk +zotbkcadnxmfvqmtlbxalhughceyfcibtzzj +vvpjbgxygl +hpic +mhrqd +dv +thehuzdbaacoidjoljbysnqwrrxxplrdznmgiukkvjqbopb +moszjt +rmtbunktkywqirveeqfa +kse +wbfflnatgzobjrxghjgvcsyxoruenxhyomutbptswjajawqjpqafpdcstkiyjuilimecgejpqmyciolgcmdpcstzdozbmnza diff --git a/aho-corasick/examples/dict-search.rs b/aho-corasick/examples/dict-search.rs new file mode 100644 index 000000000..b620cc284 --- /dev/null +++ b/aho-corasick/examples/dict-search.rs @@ -0,0 +1,138 @@ +// This example demonstrates how to use the Aho-Corasick algorithm to rapidly +// scan text for matches in a large dictionary of keywords. This example by +// default reads your system's dictionary (~120,000 words). +extern crate aho_corasick; +extern crate csv; +extern crate docopt; +extern crate memmap; +extern crate serde; +#[macro_use] +extern crate serde_derive; + +use std::error::Error; +use std::fs::File; +use std::io::{self, BufRead, Write}; +use std::process; + +use aho_corasick::{Automaton, AcAutomaton, Match}; +use docopt::Docopt; +use memmap::Mmap; + +static USAGE: &'static str = " +Usage: dict-search [options] + dict-search --help + +Options: + -d , --dict Path to dictionary of keywords to search. + [default: /usr/share/dict/words] + -m , --min-len The minimum length for a keyword in UTF-8 + encoded bytes. [default: 5] + --overlapping Report overlapping matches. + -c, --count Show only the numebr of matches. + --memory-usage Show memory usage of automaton. + --full Use fully expanded transition matrix. + Warning: may use lots of memory. + -h, --help Show this usage message. +"; + +#[derive(Clone, Debug, Deserialize)] +struct Args { + arg_input: String, + flag_dict: String, + flag_min_len: usize, + flag_overlapping: bool, + flag_memory_usage: bool, + flag_full: bool, + flag_count: bool, +} + +fn main() { + let args: Args = Docopt::new(USAGE) + .and_then(|d| d.deserialize()) + .unwrap_or_else(|e| e.exit()); + match run(&args) { + Ok(()) => {} + Err(err) => { + writeln!(&mut io::stderr(), "{}", err).unwrap(); + process::exit(1); + } + } +} + +fn run(args: &Args) -> Result<(), Box> { + let aut = try!(build_automaton(&args.flag_dict, args.flag_min_len)); + if args.flag_memory_usage { + let (bytes, states) = if args.flag_full { + let aut = aut.into_full(); + (aut.heap_bytes(), aut.num_states()) + } else { + (aut.heap_bytes(), aut.num_states()) + }; + println!("{} bytes, {} states", bytes, states); + return Ok(()); + } + + let rdr = try!(File::open(&args.arg_input)); + if args.flag_full { + let aut = aut.into_full(); + if args.flag_overlapping { + if args.flag_count { + let mmap = unsafe { try!(Mmap::map(&rdr)) }; + println!("{}", aut.find_overlapping(&*mmap).count()); + } else { + try!(write_matches(&aut, aut.stream_find_overlapping(rdr))); + } + } else { + if args.flag_count { + let mmap = unsafe { try!(Mmap::map(&rdr)) }; + println!("{}", aut.find(&*mmap).count()); + } else { + try!(write_matches(&aut, aut.stream_find(rdr))); + } + } + } else { + if args.flag_overlapping { + if args.flag_count { + let mmap = unsafe { try!(Mmap::map(&rdr)) }; + println!("{}", aut.find_overlapping(&*mmap).count()); + } else { + try!(write_matches(&aut, aut.stream_find_overlapping(rdr))); + } + } else { + if args.flag_count { + let mmap = unsafe { try!(Mmap::map(&rdr)) }; + println!("{}", aut.find(&*mmap).count()); + } else { + try!(write_matches(&aut, aut.stream_find(rdr))); + } + } + } + Ok(()) +} + +fn write_matches(aut: &A, it: I) -> Result<(), Box> + where A: Automaton, I: Iterator> { + let mut wtr = csv::Writer::from_writer(io::stdout()); + try!(wtr.serialize(("pattern", "start", "end"))); + for m in it { + let m = try!(m); + try!(wtr.serialize((aut.pattern(m.pati), m.start, m.end))); + } + try!(wtr.flush()); + Ok(()) +} + +fn build_automaton( + dict_path: &str, + min_len: usize, +) -> Result, Box> { + let buf = io::BufReader::new(try!(File::open(dict_path))); + let mut lines = Vec::with_capacity(1 << 10); + for line in buf.lines() { + let line = try!(line); + if line.len() >= min_len { + lines.push(line); + } + } + Ok(AcAutomaton::with_transitions(lines)) +} diff --git a/aho-corasick/src/autiter.rs b/aho-corasick/src/autiter.rs new file mode 100644 index 000000000..dfad28f67 --- /dev/null +++ b/aho-corasick/src/autiter.rs @@ -0,0 +1,530 @@ +use std::io::{self, BufRead}; +use std::marker::PhantomData; + +use memchr::{memchr, memchr2, memchr3}; + +use super::{ROOT_STATE, StateIdx}; + +/// An abstraction over automatons and their corresponding iterators. +/// The type parameter `P` is the type of the pattern that was used to +/// construct this Automaton. +pub trait Automaton

{ + /// Return the next state given the current state and next character. + fn next_state(&self, si: StateIdx, b: u8) -> StateIdx; + + /// Return true if and only if the given state and current pattern index + /// indicate a match. + fn has_match(&self, si: StateIdx, outi: usize) -> bool; + + /// Build a match given the current state, pattern index and input index. + fn get_match(&self, si: StateIdx, outi: usize, texti: usize) -> Match; + + /// Return the set of bytes that have transitions in the root state. + fn start_bytes(&self) -> &[u8]; + + /// Returns all of the patterns matched by this automaton. + /// + /// The order of the patterns is the order in which they were added. + fn patterns(&self) -> &[P]; + + /// Returns the pattern indexed at `i`. + /// + /// The index corresponds to the position at which the pattern was added + /// to the automaton, starting at `0`. + fn pattern(&self, i: usize) -> &P; + + /// Return the number of patterns in the automaton. + #[inline] + fn len(&self) -> usize { + self.patterns().len() + } + + /// Returns true if the automaton has no patterns. + #[inline] + fn is_empty(&self) -> bool { + self.patterns().is_empty() + } + + /// Returns an iterator of non-overlapping matches in `s`. + fn find<'a, 's, Q: ?Sized + AsRef<[u8]>>( + &'a self, + s: &'s Q, + ) -> Matches<'a, 's, P, Self> + where Self: Sized { + Matches { + aut: self, + text: s.as_ref(), + texti: 0, + si: ROOT_STATE, + _m: PhantomData, + } + } + + /// Returns an iterator of overlapping matches in `s`. + fn find_overlapping<'a, 's, Q: ?Sized + AsRef<[u8]>>( + &'a self, + s: &'s Q, + ) -> MatchesOverlapping<'a, 's, P, Self> + where Self: Sized { + MatchesOverlapping { + aut: self, + text: s.as_ref(), + texti: 0, + si: ROOT_STATE, + outi: 0, + _m: PhantomData, + } + } + + /// Returns an iterator of non-overlapping matches in the given reader. + fn stream_find<'a, R: io::Read>( + &'a self, + rdr: R, + ) -> StreamMatches<'a, R, P, Self> + where Self: Sized { + StreamMatches { + aut: self, + buf: io::BufReader::new(rdr), + texti: 0, + si: ROOT_STATE, + _m: PhantomData, + } + } + + /// Returns an iterator of overlapping matches in the given reader. + fn stream_find_overlapping<'a, R: io::Read>( + &'a self, + rdr: R, + ) -> StreamMatchesOverlapping<'a, R, P, Self> + where Self: Sized { + StreamMatchesOverlapping { + aut: self, + buf: io::BufReader::new(rdr), + texti: 0, + si: ROOT_STATE, + outi: 0, + _m: PhantomData, + } + } +} + +impl<'a, P: AsRef<[u8]>, A: 'a + Automaton

+ ?Sized> + Automaton

for &'a A { + fn next_state(&self, si: StateIdx, b: u8) -> StateIdx { + (**self).next_state(si, b) + } + + fn has_match(&self, si: StateIdx, outi: usize) -> bool { + (**self).has_match(si, outi) + } + + fn start_bytes(&self) -> &[u8] { + (**self).start_bytes() + } + + fn patterns(&self) -> &[P] { + (**self).patterns() + } + + fn pattern(&self, i: usize) -> &P { + (**self).pattern(i) + } + + fn get_match(&self, si: StateIdx, outi: usize, texti: usize) -> Match { + (**self).get_match(si, outi, texti) + } +} + +/// Records a match in the search text. +#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] +pub struct Match { + /// The pattern index. + /// + /// This corresponds to the ordering in which the matched pattern was + /// added to the automaton, starting at `0`. + pub pati: usize, + /// The starting byte offset of the match in the search text. + pub start: usize, + /// The ending byte offset of the match in the search text. + /// + /// (This can be re-captiulated with `pati` and adding the pattern's + /// length to `start`, but it is convenient to have it here.) + pub end: usize, +} + +/// An iterator of non-overlapping matches for in-memory text. +/// +/// This iterator yields `Match` values. +/// +/// `'a` is the lifetime of the automaton, `'s` is the lifetime of the +/// search text, and `P` is the type of the Automaton's pattern. +#[derive(Debug)] +pub struct Matches<'a, 's, P, A: 'a + Automaton

+ ?Sized> { + aut: &'a A, + text: &'s [u8], + texti: usize, + si: StateIdx, + _m: PhantomData

, +} + +// When there's an initial lone start byte, it is usually worth it +// to use `memchr` to skip along the input. The problem is that +// the skipping function is called in the inner match loop, which +// can be quite costly if the skipping condition is never met. +// Therefore, we lift the case analysis outside of the inner loop at +// the cost of repeating code. +// +// `step_to_match` is the version of the inner loop without skipping, +// and `skip_to_match` is the version with skipping. +#[inline(never)] +fn step_to_match + ?Sized>( + aut: &A, + text: &[u8], + mut texti: usize, + mut si: StateIdx +) -> Option<(usize, StateIdx)> { + while texti < text.len() { + si = aut.next_state(si, text[texti]); + if aut.has_match(si, 0) { + return Some((texti, si)); + } + texti += 1; + if texti + 4 < text.len() { + si = aut.next_state(si, text[texti]); + if aut.has_match(si, 0) { + return Some((texti, si)); + } + texti += 1; + si = aut.next_state(si, text[texti]); + if aut.has_match(si, 0) { + return Some((texti, si)); + } + texti += 1; + si = aut.next_state(si, text[texti]); + if aut.has_match(si, 0) { + return Some((texti, si)); + } + texti += 1; + si = aut.next_state(si, text[texti]); + if aut.has_match(si, 0) { + return Some((texti, si)); + } + texti += 1; + si = aut.next_state(si, text[texti]); + if aut.has_match(si, 0) { + return Some((texti, si)); + } + texti += 1; + } + } + None +} + +fn skip_to_match + ?Sized, F: Fn(&A, &[u8], usize) -> usize>( + aut: &A, + text: &[u8], + mut texti: usize, + mut si: StateIdx, + skip: F, +) -> Option<(usize, StateIdx)> { + if si == ROOT_STATE { + texti = skip(aut, text, texti); + } + while texti < text.len() { + si = aut.next_state(si, text[texti]); + if aut.has_match(si, 0) { + return Some((texti, si)); + } + if si == ROOT_STATE { + texti = skip(aut, text, texti + 1); + } else { + texti += 1; + if texti + 4 < text.len() { + si = aut.next_state(si, text[texti]); + if aut.has_match(si, 0) { + return Some((texti, si)); + } + texti += 1; + si = aut.next_state(si, text[texti]); + if aut.has_match(si, 0) { + return Some((texti, si)); + } + texti += 1; + si = aut.next_state(si, text[texti]); + if aut.has_match(si, 0) { + return Some((texti, si)); + } + texti += 1; + si = aut.next_state(si, text[texti]); + if aut.has_match(si, 0) { + return Some((texti, si)); + } + texti += 1; + si = aut.next_state(si, text[texti]); + if aut.has_match(si, 0) { + return Some((texti, si)); + } + texti += 1; + } + } + } + None +} + +#[inline] +fn skip1 + ?Sized>( + aut: &A, + text: &[u8], + at: usize, +) -> usize { + debug_assert!(aut.start_bytes().len() == 1); + let b = aut.start_bytes()[0]; + match memchr(b, &text[at..]) { + None => text.len(), + Some(i) => at + i, + } +} + +#[inline] +fn skip2 + ?Sized>( + aut: &A, + text: &[u8], + at: usize, +) -> usize { + debug_assert!(aut.start_bytes().len() == 2); + let (b1, b2) = (aut.start_bytes()[0], aut.start_bytes()[1]); + match memchr2(b1, b2, &text[at..]) { + None => text.len(), + Some(i) => at + i, + } +} + +#[inline] +fn skip3 + ?Sized>( + aut: &A, + text: &[u8], + at: usize, +) -> usize { + debug_assert!(aut.start_bytes().len() == 3); + let (b1, b2, b3) = ( + aut.start_bytes()[0], aut.start_bytes()[1], aut.start_bytes()[2], + ); + match memchr3(b1, b2, b3, &text[at..]) { + None => text.len(), + Some(i) => at + i, + } +} + +impl<'a, 's, P, A: Automaton

+ ?Sized> Iterator for Matches<'a, 's, P, A> { + type Item = Match; + + fn next(&mut self) -> Option { + if self.aut.start_bytes().len() == 1 { + let skip = skip_to_match( + self.aut, self.text, self.texti, self.si, skip1); + if let Some((texti, si)) = skip { + self.texti = texti + 1; + self.si = ROOT_STATE; + return Some(self.aut.get_match(si, 0, texti)); + } + } else if self.aut.start_bytes().len() == 2 { + let skip = skip_to_match( + self.aut, self.text, self.texti, self.si, skip2); + if let Some((texti, si)) = skip { + self.texti = texti + 1; + self.si = ROOT_STATE; + return Some(self.aut.get_match(si, 0, texti)); + } + } else if self.aut.start_bytes().len() == 3 { + let skip = skip_to_match( + self.aut, self.text, self.texti, self.si, skip3); + if let Some((texti, si)) = skip { + self.texti = texti + 1; + self.si = ROOT_STATE; + return Some(self.aut.get_match(si, 0, texti)); + } + } else { + let step = step_to_match(self.aut, self.text, self.texti, self.si); + if let Some((texti, si)) = step { + self.texti = texti + 1; + self.si = ROOT_STATE; + return Some(self.aut.get_match(si, 0, texti)); + } + } + None + } +} + +/// An iterator of non-overlapping matches for streaming text. +/// +/// This iterator yields `io::Result` values. +/// +/// `'a` is the lifetime of the automaton, `R` is the type of the underlying +/// `io::Read`er, and P is the type of the Automaton's pattern. +#[derive(Debug)] +pub struct StreamMatches<'a, R, P, A: 'a + Automaton

+ ?Sized> { + aut: &'a A, + buf: io::BufReader, + texti: usize, + si: StateIdx, + _m: PhantomData

, +} + +impl<'a, R: io::Read, P, A: Automaton

> + Iterator for StreamMatches<'a, R, P, A> { + type Item = io::Result; + + fn next(&mut self) -> Option> { + let mut m = None; + let mut consumed = 0; +'LOOP: loop { + self.buf.consume(consumed); + let bs = match self.buf.fill_buf() { + Err(err) => return Some(Err(err)), + Ok(bs) if bs.is_empty() => break, + Ok(bs) => bs, + }; + consumed = bs.len(); // is shortened if we find a match + for (i, &b) in bs.iter().enumerate() { + self.si = self.aut.next_state(self.si, b); + if self.aut.has_match(self.si, 0) { + m = Some(Ok(self.aut.get_match(self.si, 0, self.texti))); + consumed = i + 1; + self.texti += 1; + self.si = ROOT_STATE; + break 'LOOP; + } + self.texti += 1; + } + } + self.buf.consume(consumed); + m + } +} + +/// An iterator of overlapping matches for in-memory text. +/// +/// This iterator yields `Match` values. +/// +/// `'a` is the lifetime of the automaton, `'s` is the lifetime of the +/// search text, and `P` is the type of the Automaton's pattern. +#[derive(Debug)] +pub struct MatchesOverlapping<'a, 's, P, A: 'a + Automaton

+ ?Sized> { + aut: &'a A, + text: &'s [u8], + texti: usize, + si: StateIdx, + outi: usize, + _m: PhantomData

, +} + +impl<'a, 's, P, A: Automaton

+ ?Sized> + Iterator for MatchesOverlapping<'a, 's, P, A> { + type Item = Match; + + fn next(&mut self) -> Option { + if self.aut.has_match(self.si, self.outi) { + let m = self.aut.get_match(self.si, self.outi, self.texti); + self.outi += 1; + if !self.aut.has_match(self.si, self.outi) { + self.texti += 1; + } + return Some(m); + } + + self.outi = 0; + if self.aut.start_bytes().len() == 1 { + let skip = skip_to_match( + self.aut, self.text, self.texti, self.si, skip1); + if let Some((texti, si)) = skip { + self.texti = texti; + self.si = si; + return self.next(); + } + } else if self.aut.start_bytes().len() == 2 { + let skip = skip_to_match( + self.aut, self.text, self.texti, self.si, skip2); + if let Some((texti, si)) = skip { + self.texti = texti; + self.si = si; + return self.next(); + } + } else if self.aut.start_bytes().len() == 3 { + let skip = skip_to_match( + self.aut, self.text, self.texti, self.si, skip3); + if let Some((texti, si)) = skip { + self.texti = texti; + self.si = si; + return self.next(); + } + } else { + let step = step_to_match(self.aut, self.text, self.texti, self.si); + if let Some((texti, si)) = step { + self.texti = texti; + self.si = si; + return self.next(); + } + } + None + } +} + +/// An iterator of overlapping matches for streaming text. +/// +/// This iterator yields `io::Result` values. +/// +/// `'a` is the lifetime of the automaton, `R` is the type of the underlying +/// `io::Read`er, and P is the type of the Automaton's pattern. +#[derive(Debug)] +pub struct StreamMatchesOverlapping<'a, R, P, A: 'a + Automaton

+ ?Sized> { + aut: &'a A, + buf: io::BufReader, + texti: usize, + si: StateIdx, + outi: usize, + _m: PhantomData

, +} + +impl<'a, R: io::Read, P, A: Automaton

+ ?Sized> + Iterator for StreamMatchesOverlapping<'a, R, P, A> { + type Item = io::Result; + + fn next(&mut self) -> Option> { + if self.aut.has_match(self.si, self.outi) { + let m = self.aut.get_match(self.si, self.outi, self.texti); + self.outi += 1; + if !self.aut.has_match(self.si, self.outi) { + self.texti += 1; + } + return Some(Ok(m)); + } + let mut m = None; + let mut consumed = 0; + self.outi = 0; +'LOOP: loop { + self.buf.consume(consumed); + let bs = match self.buf.fill_buf() { + Err(err) => return Some(Err(err)), + Ok(bs) if bs.is_empty() => break, + Ok(bs) => bs, + }; + consumed = bs.len(); // is shortened if we find a match + for (i, &b) in bs.iter().enumerate() { + self.si = self.aut.next_state(self.si, b); + if self.aut.has_match(self.si, self.outi) { + m = Some(Ok(self.aut.get_match( + self.si, self.outi, self.texti))); + consumed = i + 1; + self.outi += 1; + if !self.aut.has_match(self.si, self.outi) { + self.texti += 1; + } + break 'LOOP; + } + self.texti += 1; + } + } + self.buf.consume(consumed); + m + } +} diff --git a/aho-corasick/src/full.rs b/aho-corasick/src/full.rs new file mode 100644 index 000000000..377940afa --- /dev/null +++ b/aho-corasick/src/full.rs @@ -0,0 +1,145 @@ +use std::fmt; +use std::mem; + +use super::{ + FAIL_STATE, + StateIdx, AcAutomaton, Transitions, Match, + usize_bytes, vec_bytes, +}; +use super::autiter::Automaton; + +/// A complete Aho-Corasick automaton. +/// +/// This uses a single transition matrix that permits each input character +/// to move to the next state with a single lookup in the matrix. +/// +/// This is as fast as it gets, but it is guaranteed to use a lot of memory. +/// Namely, it will use at least `4 * 256 * #states`, where the number of +/// states is capped at length of all patterns concatenated. +#[derive(Clone)] +pub struct FullAcAutomaton

{ + pats: Vec

, + trans: Vec, // row-major, where states are rows + out: Vec>, // indexed by StateIdx + start_bytes: Vec, +} + +impl> FullAcAutomaton

{ + /// Build a new expanded Aho-Corasick automaton from an existing + /// Aho-Corasick automaton. + pub fn new(ac: AcAutomaton) -> FullAcAutomaton

{ + let mut fac = FullAcAutomaton { + pats: vec![], + trans: vec![FAIL_STATE; 256 * ac.states.len()], + out: vec![vec![]; ac.states.len()], + start_bytes: vec![], + }; + fac.build_matrix(&ac); + fac.pats = ac.pats; + fac.start_bytes = ac.start_bytes; + fac + } + + #[doc(hidden)] + pub fn memory_usage(&self) -> usize { + self.pats.iter() + .map(|p| vec_bytes() + p.as_ref().len()) + .sum::() + + (4 * self.trans.len()) + + self.out.iter() + .map(|v| vec_bytes() + (usize_bytes() * v.len())) + .sum::() + + self.start_bytes.len() + } + + #[doc(hidden)] + pub fn heap_bytes(&self) -> usize { + self.pats.iter() + .map(|p| mem::size_of::

() + p.as_ref().len()) + .sum::() + + (4 * self.trans.len()) + + self.out.iter() + .map(|v| vec_bytes() + (usize_bytes() * v.len())) + .sum::() + + self.start_bytes.len() + } + + fn set(&mut self, si: StateIdx, i: u8, goto: StateIdx) { + let ns = self.num_states(); + self.trans[i as usize * ns + si as usize] = goto; + } + + #[doc(hidden)] + #[inline] + pub fn num_states(&self) -> usize { + self.out.len() + } +} + +impl> Automaton

for FullAcAutomaton

{ + #[inline] + fn next_state(&self, si: StateIdx, i: u8) -> StateIdx { + let at = i as usize * self.num_states() + si as usize; + unsafe { *self.trans.get_unchecked(at) } + } + + #[inline] + fn get_match(&self, si: StateIdx, outi: usize, texti: usize) -> Match { + let pati = self.out[si as usize][outi]; + let patlen = self.pats[pati].as_ref().len(); + let start = texti + 1 - patlen; + Match { + pati: pati, + start: start, + end: start + patlen, + } + } + + #[inline] + fn has_match(&self, si: StateIdx, outi: usize) -> bool { + unsafe { outi < self.out.get_unchecked(si as usize).len() } + } + + #[inline] + fn start_bytes(&self) -> &[u8] { + &self.start_bytes + } + + #[inline] + fn patterns(&self) -> &[P] { + &self.pats + } + + #[inline] + fn pattern(&self, i: usize) -> &P { + &self.pats[i] + } +} + +impl> FullAcAutomaton

{ + fn build_matrix(&mut self, ac: &AcAutomaton) { + for (si, s) in ac.states.iter().enumerate().skip(1) { + self.set_states(ac, si as StateIdx); + self.out[si].extend_from_slice(&s.out); + } + } + + fn set_states(&mut self, ac: &AcAutomaton, si: StateIdx) { + let current_state = &ac.states[si as usize]; + let first_fail_state = current_state.fail; + current_state.for_each_transition(move |b, maybe_si| { + let goto = if maybe_si == FAIL_STATE { + ac.memoized_next_state(self, si, first_fail_state, b) + } else { + maybe_si + }; + self.set(si, b, goto); + }); + } +} + +impl + fmt::Debug> fmt::Debug for FullAcAutomaton

{ + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "FullAcAutomaton({:?})", self.pats) + } +} diff --git a/aho-corasick/src/lib.rs b/aho-corasick/src/lib.rs new file mode 100644 index 000000000..6ceb20641 --- /dev/null +++ b/aho-corasick/src/lib.rs @@ -0,0 +1,1082 @@ +/*! +An implementation of the +[Aho-Corasick string search algorithm](https://en.wikipedia.org/wiki/Aho%E2%80%93Corasick_string_matching_algorithm). + +The Aho-Corasick algorithm is principally useful when you need to search many +large texts for a fixed (possibly large) set of keywords. In particular, the +Aho-Corasick algorithm preprocesses the set of keywords by constructing a +finite state machine. The search phase is then a quick linear scan through the +text. Each character in the search text causes a state transition in the +automaton. Matches are reported when the automaton enters a match state. + +# Examples + +The main type exposed by this crate is `AcAutomaton`, which can be constructed +from an iterator of pattern strings: + +```rust +use aho_corasick::{Automaton, AcAutomaton}; + +let aut = AcAutomaton::new(vec!["apple", "maple"]); + +// AcAutomaton also implements `FromIterator`: +let aut: AcAutomaton<&str> = ["apple", "maple"].iter().cloned().collect(); +``` + +Finding matches can be done with `find`: + +```rust +use aho_corasick::{Automaton, AcAutomaton, Match}; + +let aut = AcAutomaton::new(vec!["apple", "maple"]); +let mut it = aut.find("I like maple apples."); +assert_eq!(it.next(), Some(Match { + pati: 1, + start: 7, + end: 12, +})); +assert_eq!(it.next(), Some(Match { + pati: 0, + start: 13, + end: 18, +})); +assert_eq!(it.next(), None); +``` + +Use `find_overlapping` if you want to report all matches, even if they +overlap with each other. + +```rust +use aho_corasick::{Automaton, AcAutomaton, Match}; + +let aut = AcAutomaton::new(vec!["abc", "a"]); +let matches: Vec<_> = aut.find_overlapping("abc").collect(); +assert_eq!(matches, vec![ + Match { pati: 1, start: 0, end: 1}, Match { pati: 0, start: 0, end: 3 }, +]); + +// Regular `find` will report only one match: +let matches: Vec<_> = aut.find("abc").collect(); +assert_eq!(matches, vec![Match { pati: 1, start: 0, end: 1}]); +``` + +Finally, there are also methods for finding matches on *streams*. Namely, the +search text does not have to live in memory. It's useful to run this on files +that can't fit into memory: + +```no_run +use std::fs::File; + +use aho_corasick::{Automaton, AcAutomaton}; + +let aut = AcAutomaton::new(vec!["foo", "bar", "baz"]); +let rdr = File::open("search.txt").unwrap(); +for m in aut.stream_find(rdr) { + let m = m.unwrap(); // could be an IO error + println!("Pattern '{}' matched at: ({}, {})", + aut.pattern(m.pati), m.start, m.end); +} +``` + +There is also `stream_find_overlapping`, which is just like `find_overlapping`, +but it operates on streams. + +Please see `dict-search.rs` in this crate's `examples` directory for a more +complete example. It creates a large automaton from a dictionary and can do a +streaming match over arbitrarily large data. + +# Memory usage + +A key aspect of an Aho-Corasick implementation is how the state transitions +are represented. The easiest way to make the automaton fast is to store a +sparse 256-slot map in each state. It maps an input byte to a state index. +This makes the matching loop extremely fast, since it translates to a simple +pointer read. + +The problem is that as the automaton accumulates more states, you end up paying +a `256 * 4` (`4` is for the `u32` state index) byte penalty for every state +regardless of how many transitions it has. + +To solve this, only states near the root of the automaton have this sparse +map representation. States near the leaves of the automaton use a dense mapping +that requires a linear scan. + +(The specific limit currently set is `3`, so that states with a depth less than +or equal to `3` are less memory efficient. The result is that the memory usage +of the automaton stops growing rapidly past ~60MB, even for automatons with +thousands of patterns.) + +If you'd like to opt for the less-memory-efficient-but-faster version, then +you can construct an `AcAutomaton` with a `Sparse` transition strategy: + +```rust +use aho_corasick::{Automaton, AcAutomaton, Match, Sparse}; + +let aut = AcAutomaton::<&str, Sparse>::with_transitions(vec!["abc", "a"]); +let matches: Vec<_> = aut.find("abc").collect(); +assert_eq!(matches, vec![Match { pati: 1, start: 0, end: 1}]); +``` +*/ + +#![deny(missing_docs)] + +extern crate memchr; +#[cfg(test)] +extern crate quickcheck; +#[cfg(test)] +extern crate rand; + +use std::collections::VecDeque; +use std::fmt; +use std::iter::FromIterator; +use std::mem; + +pub use self::autiter::{ + Automaton, Match, + Matches, MatchesOverlapping, StreamMatches, StreamMatchesOverlapping, +}; +pub use self::full::FullAcAutomaton; + +// We're specifying paths explicitly so that we can use +// these modules simultaneously from `main.rs`. +// Should probably make just make `main.rs` a separate crate. +#[path = "autiter.rs"] +mod autiter; +#[path = "full.rs"] +mod full; + +/// The integer type used for the state index. +/// +/// Limiting this to 32 bit integers can have a big impact on memory usage +/// when using the `Sparse` transition representation. +pub type StateIdx = u32; + +// Constants for special state indexes. +const FAIL_STATE: u32 = 0; +const ROOT_STATE: u32 = 1; + +// Limit the depth at which we use a sparse alphabet map. Once the limit is +// reached, a dense set is used (and lookup becomes O(n)). +// +// This does have a performance hit, but the (straight forward) alternative +// is to have a `256 * 4` byte overhead for every state. +// Given that Aho-Corasick is typically used for dictionary searching, this +// can lead to dramatic memory bloat. +// +// This limit should only be increased at your peril. Namely, in the worst +// case, `256^DENSE_DEPTH_THRESHOLD * 4` corresponds to the memory usage in +// bytes. A value of `1` gives us a good balance. This is also a happy point +// in the benchmarks. A value of `0` gives considerably worse times on certain +// benchmarks (e.g., `ac_ten_one_prefix_byte_every_match`) than even a value +// of `1`. A value of `2` is slightly better than `1` and it looks like gains +// level off at that point with not much observable difference when set to +// `3`. +// +// Why not make this user configurable? Well, it doesn't make much sense +// because we pay for it with case analysis in the matching loop. Increasing it +// doesn't have much impact on performance (outside of pathological cases?). +// +// N.B. Someone else seems to have discovered an alternative, but I haven't +// grokked it yet: https://github.com/mischasan/aho-corasick +const DENSE_DEPTH_THRESHOLD: u32 = 1; + +/// An Aho-Corasick finite automaton. +/// +/// The type parameter `P` is the type of the pattern that was used to +/// construct this AcAutomaton. +#[derive(Clone)] +pub struct AcAutomaton { + pats: Vec

, + states: Vec>, + start_bytes: Vec, +} + +#[derive(Clone)] +struct State { + out: Vec, + fail: StateIdx, + goto: T, + depth: u32, +} + +impl> AcAutomaton

{ + /// Create a new automaton from an iterator of patterns. + /// + /// The patterns must be convertible to bytes (`&[u8]`) via the `AsRef` + /// trait. + pub fn new(pats: I) -> AcAutomaton + where I: IntoIterator { + AcAutomaton::with_transitions(pats) + } +} + +impl, T: Transitions> AcAutomaton { + /// Create a new automaton from an iterator of patterns. + /// + /// This constructor allows one to choose the transition representation. + /// + /// The patterns must be convertible to bytes (`&[u8]`) via the `AsRef` + /// trait. + pub fn with_transitions(pats: I) -> AcAutomaton + where I: IntoIterator { + AcAutomaton { + pats: vec![], // filled in later, avoid wrath of borrow checker + states: vec![State::new(0), State::new(0)], // empty and root + start_bytes: vec![], // also filled in later + }.build(pats.into_iter().collect()) + } + + /// Build out the entire automaton into a single matrix. + /// + /// This will make searching as fast as possible at the expense of using + /// at least `4 * 256 * #states` bytes of memory. + pub fn into_full(self) -> FullAcAutomaton

{ + FullAcAutomaton::new(self) + } + + #[doc(hidden)] + pub fn num_states(&self) -> usize { + self.states.len() + } + + #[doc(hidden)] + pub fn heap_bytes(&self) -> usize { + self.pats.iter() + .map(|p| mem::size_of::

() + p.as_ref().len()) + .sum::() + + self.states.iter() + .map(|s| mem::size_of::>() + s.heap_bytes()) + .sum::() + + self.start_bytes.len() + } + + // The states of `full_automaton` should be set for all states < si + fn memoized_next_state( + &self, + full_automaton: &FullAcAutomaton

, + current_si: StateIdx, + mut si: StateIdx, + b: u8, + ) -> StateIdx { + loop { + if si < current_si { + return full_automaton.next_state(si, b); + } + let state = &self.states[si as usize]; + let maybe_si = state.goto(b); + if maybe_si != FAIL_STATE { + return maybe_si; + } else { + si = state.fail; + } + } + } +} + +impl, T: Transitions> Automaton

for AcAutomaton { + #[inline] + fn next_state(&self, mut si: StateIdx, b: u8) -> StateIdx { + loop { + let state = &self.states[si as usize]; + let maybe_si = state.goto(b); + if maybe_si != FAIL_STATE { + si = maybe_si; + break; + } else { + si = state.fail; + } + } + si + } + + #[inline] + fn get_match(&self, si: StateIdx, outi: usize, texti: usize) -> Match { + let pati = self.states[si as usize].out[outi]; + let patlen = self.pats[pati].as_ref().len(); + let start = texti + 1 - patlen; + Match { + pati: pati, + start: start, + end: start + patlen, + } + } + + #[inline] + fn has_match(&self, si: StateIdx, outi: usize) -> bool { + outi < self.states[si as usize].out.len() + } + + #[inline] + fn start_bytes(&self) -> &[u8] { + &self.start_bytes + } + + #[inline] + fn patterns(&self) -> &[P] { + &self.pats + } + + #[inline] + fn pattern(&self, i: usize) -> &P { + &self.pats[i] + } +} + +// `(0..256).map(|b| b as u8)` optimizes poorly in debug builds so +// we use this small explicit iterator instead +struct AllBytesIter(i32); +impl Iterator for AllBytesIter { + type Item = u8; + #[inline] + fn next(&mut self) -> Option { + if self.0 < 256 { + let b = self.0 as u8; + self.0 += 1; + Some(b) + } else { + None + } + } +} + +impl AllBytesIter { + fn new() -> AllBytesIter { + AllBytesIter(0) + } +} + +// Below contains code for *building* the automaton. It's a reasonably faithful +// translation of the description/psuedo-code from: +// http://www.cs.uku.fi/~kilpelai/BSA05/lectures/slides04.pdf + +impl, T: Transitions> AcAutomaton { + // This is the first phase and builds the initial keyword tree. + fn build(mut self, pats: Vec

) -> AcAutomaton { + for (pati, pat) in pats.iter().enumerate() { + if pat.as_ref().is_empty() { + continue; + } + let mut previ = ROOT_STATE; + for &b in pat.as_ref() { + if self.states[previ as usize].goto(b) != FAIL_STATE { + previ = self.states[previ as usize].goto(b); + } else { + let depth = self.states[previ as usize].depth + 1; + let nexti = self.add_state(State::new(depth)); + self.states[previ as usize].set_goto(b, nexti); + previ = nexti; + } + } + self.states[previ as usize].out.push(pati); + } + { + let root_state = &mut self.states[ROOT_STATE as usize]; + for c in AllBytesIter::new() { + if root_state.goto(c) == FAIL_STATE { + root_state.set_goto(c, ROOT_STATE); + } else { + self.start_bytes.push(c); + } + } + } + // If any of the start bytes are non-ASCII, then remove them all, + // because we don't want to be calling memchr on non-ASCII bytes. + // (Well, we could, but it requires being more clever. Simply using + // the prefix byte isn't good enough.) + if self.start_bytes.iter().any(|&b| b > 0x7F) { + self.start_bytes.clear(); + } + self.pats = pats; + self.fill() + } + + // The second phase that fills in the back links. + fn fill(mut self) -> AcAutomaton { + // Fill up the queue with all non-root transitions out of the root + // node. Then proceed by breadth first traversal. + let mut q = VecDeque::new(); + self.states[ROOT_STATE as usize].for_each_transition(|_, si| { + if si != ROOT_STATE { + q.push_front(si); + } + }); + + let mut transitions = Vec::new(); + + while let Some(si) = q.pop_back() { + self.states[si as usize].for_each_ok_transition(|c, u| { + transitions.push((c, u)); + q.push_front(u); + }); + + for (c, u) in transitions.drain(..) { + let mut v = self.states[si as usize].fail; + loop { + let state = &self.states[v as usize]; + if state.goto(c) == FAIL_STATE { + v = state.fail; + } else { + break; + } + } + let ufail = self.states[v as usize].goto(c); + self.states[u as usize].fail = ufail; + + fn get_two(xs: &mut [T], i: usize, j: usize) -> (&mut T, &mut T) { + if i < j { + let (before, after) = xs.split_at_mut(j); + (&mut before[i], &mut after[0]) + } else { + let (before, after) = xs.split_at_mut(i); + (&mut after[0], &mut before[j]) + } + } + + let (ufail_out, out) = get_two(&mut self.states, ufail as usize, u as usize); + out.out.extend_from_slice(&ufail_out.out); + } + } + self + } + + fn add_state(&mut self, state: State) -> StateIdx { + let i = self.states.len(); + self.states.push(state); + i as StateIdx + } +} + +impl State { + fn new(depth: u32) -> State { + State { + out: vec![], + fail: 1, + goto: Transitions::new(depth), + depth: depth, + } + } + + fn goto(&self, b: u8) -> StateIdx { + self.goto.goto(b) + } + + fn set_goto(&mut self, b: u8, si: StateIdx) { + self.goto.set_goto(b, si); + } + + fn heap_bytes(&self) -> usize { + (self.out.len() * usize_bytes()) + + self.goto.heap_bytes() + } + + fn for_each_transition(&self, f: F) + where F: FnMut(u8, StateIdx) + { + self.goto.for_each_transition(f) + } + + fn for_each_ok_transition(&self, f: F) + where F: FnMut(u8, StateIdx) + { + self.goto.for_each_ok_transition(f) + } +} + +/// An abstraction over state transition strategies. +/// +/// This is an attempt to let the caller choose the space/time trade offs +/// used for state transitions. +/// +/// (It's possible that this interface is merely good enough for just the two +/// implementations in this crate.) +pub trait Transitions { + /// Return a new state at the given depth. + fn new(depth: u32) -> Self; + /// Return the next state index given the next character. + fn goto(&self, alpha: u8) -> StateIdx; + /// Set the next state index for the character given. + fn set_goto(&mut self, alpha: u8, si: StateIdx); + /// The memory use in bytes (on the heap) of this set of transitions. + fn heap_bytes(&self) -> usize; + + /// Iterates over each state + fn for_each_transition(&self, mut f: F) + where F: FnMut(u8, StateIdx) + { + for b in AllBytesIter::new() { + f(b, self.goto(b)); + } + } + + /// Iterates over each non-fail state + fn for_each_ok_transition(&self, mut f: F) + where + F: FnMut(u8, StateIdx), + { + self.for_each_transition(|b, si| { + if si != FAIL_STATE { + f(b, si); + } + }); + } +} + +/// State transitions that can be stored either sparsely or densely. +/// +/// This uses less space but at the expense of slower matching. +#[derive(Clone, Debug)] +pub struct Dense(DenseChoice); + +#[derive(Clone, Debug)] +enum DenseChoice { + Sparse(Box), + Dense(Vec<(u8, StateIdx)>), +} + +impl Transitions for Dense { + fn new(depth: u32) -> Dense { + if depth <= DENSE_DEPTH_THRESHOLD { + Dense(DenseChoice::Sparse(Box::new(Sparse::new(depth)))) + } else { + Dense(DenseChoice::Dense(vec![])) + } + } + + fn goto(&self, b1: u8) -> StateIdx { + match self.0 { + DenseChoice::Sparse(ref m) => m.goto(b1), + DenseChoice::Dense(ref m) => { + for &(b2, si) in m { + if b1 == b2 { + return si; + } + } + FAIL_STATE + } + } + } + + fn set_goto(&mut self, b: u8, si: StateIdx) { + match self.0 { + DenseChoice::Sparse(ref mut m) => m.set_goto(b, si), + DenseChoice::Dense(ref mut m) => m.push((b, si)), + } + } + + fn heap_bytes(&self) -> usize { + match self.0 { + DenseChoice::Sparse(_) => mem::size_of::(), + DenseChoice::Dense(ref m) => m.len() * (1 + 4), + } + } + + fn for_each_transition(&self, mut f: F) + where F: FnMut(u8, StateIdx) + { + match self.0 { + DenseChoice::Sparse(ref m) => m.for_each_transition(f), + DenseChoice::Dense(ref m) => { + let mut iter = m.iter(); + let mut b = 0i32; + while let Some(&(next_b, next_si)) = iter.next() { + while (b as u8) < next_b { + f(b as u8, FAIL_STATE); + b += 1; + } + f(b as u8, next_si); + b += 1; + } + while b < 256 { + f(b as u8, FAIL_STATE); + b += 1; + } + } + } + } + fn for_each_ok_transition(&self, mut f: F) + where + F: FnMut(u8, StateIdx), + { + match self.0 { + DenseChoice::Sparse(ref m) => m.for_each_ok_transition(f), + DenseChoice::Dense(ref m) => for &(b, si) in m { + f(b, si) + } + } + } +} + +/// State transitions that are always sparse. +/// +/// This can use enormous amounts of memory when there are many patterns, +/// but matching is very fast. +pub struct Sparse([StateIdx; 256]); + +impl Clone for Sparse { + fn clone(&self) -> Sparse { + Sparse(self.0) + } +} + +impl fmt::Debug for Sparse { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_tuple("Sparse").field(&&self.0[..]).finish() + } +} + +impl Transitions for Sparse { + fn new(_: u32) -> Sparse { + Sparse([0; 256]) + } + + #[inline] + fn goto(&self, b: u8) -> StateIdx { + self.0[b as usize] + } + + fn set_goto(&mut self, b: u8, si: StateIdx) { + self.0[b as usize] = si; + } + + fn heap_bytes(&self) -> usize { + 0 + } +} + +impl> FromIterator for AcAutomaton { + /// Create an automaton from an iterator of strings. + fn from_iter(it: T) -> AcAutomaton where T: IntoIterator { + AcAutomaton::new(it) + } +} + +// Provide some question debug impls for viewing automatons. +// The custom impls mostly exist for special showing of sparse maps. + +impl + fmt::Debug, T: Transitions> + fmt::Debug for AcAutomaton { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + use std::iter::repeat; + + try!(writeln!(f, "{}", repeat('-').take(79).collect::())); + try!(writeln!(f, "Patterns: {:?}", self.pats)); + for (i, state) in self.states.iter().enumerate().skip(1) { + try!(writeln!(f, "{:3}: {}", i, state.debug(i == 1))); + } + write!(f, "{}", repeat('-').take(79).collect::()) + } +} + +impl State { + fn debug(&self, root: bool) -> String { + format!("State {{ depth: {:?}, out: {:?}, fail: {:?}, goto: {{{}}} }}", + self.depth, self.out, self.fail, self.goto_string(root)) + } + + fn goto_string(&self, root: bool) -> String { + let mut goto = vec![]; + for b in AllBytesIter::new() { + let si = self.goto(b); + if (!root && si == FAIL_STATE) || (root && si == ROOT_STATE) { + continue; + } + goto.push(format!("{} => {}", b as char, si)); + } + goto.join(", ") + } +} + +impl fmt::Debug for State { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{}", self.debug(false)) + } +} + +impl AcAutomaton { + #[doc(hidden)] + pub fn dot(&self) -> String { + use std::fmt::Write; + let mut out = String::new(); + macro_rules! w { + ($w:expr, $($tt:tt)*) => { {write!($w, $($tt)*)}.unwrap() } + } + + w!(out, r#" +digraph automaton {{ + label=<{}>; + labelloc="l"; + labeljust="l"; + rankdir="LR"; +"#, self.pats.join(", ")); + for (i, s) in self.states.iter().enumerate().skip(1) { + let i = i as u32; + if s.out.is_empty() { + w!(out, " {};\n", i); + } else { + w!(out, " {} [peripheries=2];\n", i); + } + w!(out, " {} -> {} [style=dashed];\n", i, s.fail); + for b in AllBytesIter::new() { + let si = s.goto(b); + if si == FAIL_STATE || (i == ROOT_STATE && si == ROOT_STATE) { + continue; + } + w!(out, " {} -> {} [label={}];\n", i, si, b as char); + } + } + w!(out, "}}"); + out + } +} + +fn vec_bytes() -> usize { + usize_bytes() * 3 +} + +fn usize_bytes() -> usize { + let bits = usize::max_value().count_ones() as usize; + bits / 8 +} + +#[cfg(test)] +mod tests { + use std::collections::HashSet; + use std::io; + + use quickcheck::{Arbitrary, Gen, quickcheck}; + use rand::Rng; + + use super::{AcAutomaton, Automaton, Match, AllBytesIter}; + + fn aut_find(xs: &[S], haystack: &str) -> Vec + where S: Clone + AsRef<[u8]> { + AcAutomaton::new(xs.to_vec()).find(&haystack).collect() + } + + fn aut_finds(xs: &[S], haystack: &str) -> Vec + where S: Clone + AsRef<[u8]> { + let cur = io::Cursor::new(haystack.as_bytes()); + AcAutomaton::new(xs.to_vec()) + .stream_find(cur).map(|r| r.unwrap()).collect() + } + + fn aut_findf(xs: &[S], haystack: &str) -> Vec + where S: Clone + AsRef<[u8]> { + AcAutomaton::new(xs.to_vec()).into_full().find(haystack).collect() + } + + fn aut_findfs(xs: &[S], haystack: &str) -> Vec + where S: Clone + AsRef<[u8]> { + let cur = io::Cursor::new(haystack.as_bytes()); + AcAutomaton::new(xs.to_vec()) + .into_full() + .stream_find(cur).map(|r| r.unwrap()).collect() + } + + fn aut_findo(xs: &[S], haystack: &str) -> Vec + where S: Clone + AsRef<[u8]> { + AcAutomaton::new(xs.to_vec()).find_overlapping(haystack).collect() + } + + fn aut_findos(xs: &[S], haystack: &str) -> Vec + where S: Clone + AsRef<[u8]> { + let cur = io::Cursor::new(haystack.as_bytes()); + AcAutomaton::new(xs.to_vec()) + .stream_find_overlapping(cur).map(|r| r.unwrap()).collect() + } + + fn aut_findfo(xs: &[S], haystack: &str) -> Vec + where S: Clone + AsRef<[u8]> { + AcAutomaton::new(xs.to_vec()) + .into_full().find_overlapping(haystack).collect() + } + + fn aut_findfos(xs: &[S], haystack: &str) -> Vec + where S: Clone + AsRef<[u8]> { + let cur = io::Cursor::new(haystack.as_bytes()); + AcAutomaton::new(xs.to_vec()) + .into_full() + .stream_find_overlapping(cur).map(|r| r.unwrap()).collect() + } + + #[test] + fn one_pattern_one_match() { + let ns = vec!["a"]; + let hay = "za"; + let matches = vec![ + Match { pati: 0, start: 1, end: 2 }, + ]; + assert_eq!(&aut_find(&ns, hay), &matches); + assert_eq!(&aut_finds(&ns, hay), &matches); + assert_eq!(&aut_findf(&ns, hay), &matches); + assert_eq!(&aut_findfs(&ns, hay), &matches); + } + + #[test] + fn one_pattern_many_match() { + let ns = vec!["a"]; + let hay = "zazazzzza"; + let matches = vec![ + Match { pati: 0, start: 1, end: 2 }, + Match { pati: 0, start: 3, end: 4 }, + Match { pati: 0, start: 8, end: 9 }, + ]; + assert_eq!(&aut_find(&ns, hay), &matches); + assert_eq!(&aut_finds(&ns, hay), &matches); + assert_eq!(&aut_findf(&ns, hay), &matches); + assert_eq!(&aut_findfs(&ns, hay), &matches); + } + + #[test] + fn one_longer_pattern_one_match() { + let ns = vec!["abc"]; + let hay = "zazabcz"; + let matches = vec![ Match { pati: 0, start: 3, end: 6 } ]; + assert_eq!(&aut_find(&ns, hay), &matches); + assert_eq!(&aut_finds(&ns, hay), &matches); + assert_eq!(&aut_findf(&ns, hay), &matches); + assert_eq!(&aut_findfs(&ns, hay), &matches); + } + + #[test] + fn one_longer_pattern_many_match() { + let ns = vec!["abc"]; + let hay = "zazabczzzzazzzabc"; + let matches = vec![ + Match { pati: 0, start: 3, end: 6 }, + Match { pati: 0, start: 14, end: 17 }, + ]; + assert_eq!(&aut_find(&ns, hay), &matches); + assert_eq!(&aut_finds(&ns, hay), &matches); + assert_eq!(&aut_findf(&ns, hay), &matches); + assert_eq!(&aut_findfs(&ns, hay), &matches); + } + + #[test] + fn many_pattern_one_match() { + let ns = vec!["a", "b"]; + let hay = "zb"; + let matches = vec![ Match { pati: 1, start: 1, end: 2 } ]; + assert_eq!(&aut_find(&ns, hay), &matches); + assert_eq!(&aut_finds(&ns, hay), &matches); + assert_eq!(&aut_findf(&ns, hay), &matches); + assert_eq!(&aut_findfs(&ns, hay), &matches); + } + + #[test] + fn many_pattern_many_match() { + let ns = vec!["a", "b"]; + let hay = "zbzazzzzb"; + let matches = vec![ + Match { pati: 1, start: 1, end: 2 }, + Match { pati: 0, start: 3, end: 4 }, + Match { pati: 1, start: 8, end: 9 }, + ]; + assert_eq!(&aut_find(&ns, hay), &matches); + assert_eq!(&aut_finds(&ns, hay), &matches); + assert_eq!(&aut_findf(&ns, hay), &matches); + assert_eq!(&aut_findfs(&ns, hay), &matches); + } + + #[test] + fn many_longer_pattern_one_match() { + let ns = vec!["abc", "xyz"]; + let hay = "zazxyzz"; + let matches = vec![ Match { pati: 1, start: 3, end: 6 } ]; + assert_eq!(&aut_find(&ns, hay), &matches); + assert_eq!(&aut_finds(&ns, hay), &matches); + assert_eq!(&aut_findf(&ns, hay), &matches); + assert_eq!(&aut_findfs(&ns, hay), &matches); + } + + #[test] + fn many_longer_pattern_many_match() { + let ns = vec!["abc", "xyz"]; + let hay = "zazxyzzzzzazzzabcxyz"; + let matches = vec![ + Match { pati: 1, start: 3, end: 6 }, + Match { pati: 0, start: 14, end: 17 }, + Match { pati: 1, start: 17, end: 20 }, + ]; + assert_eq!(&aut_find(&ns, hay), &matches); + assert_eq!(&aut_finds(&ns, hay), &matches); + assert_eq!(&aut_findf(&ns, hay), &matches); + assert_eq!(&aut_findfs(&ns, hay), &matches); + } + + #[test] + fn many_longer_pattern_overlap_one_match() { + let ns = vec!["abc", "bc"]; + let hay = "zazabcz"; + let matches = vec![ + Match { pati: 0, start: 3, end: 6 }, + Match { pati: 1, start: 4, end: 6 }, + ]; + assert_eq!(&aut_findo(&ns, hay), &matches); + assert_eq!(&aut_findos(&ns, hay), &matches); + assert_eq!(&aut_findfo(&ns, hay), &matches); + assert_eq!(&aut_findfos(&ns, hay), &matches); + } + + #[test] + fn many_longer_pattern_overlap_one_match_reverse() { + let ns = vec!["abc", "bc"]; + let hay = "xbc"; + let matches = vec![ Match { pati: 1, start: 1, end: 3 } ]; + assert_eq!(&aut_findo(&ns, hay), &matches); + assert_eq!(&aut_findos(&ns, hay), &matches); + assert_eq!(&aut_findfo(&ns, hay), &matches); + assert_eq!(&aut_findfos(&ns, hay), &matches); + } + + #[test] + fn many_longer_pattern_overlap_many_match() { + let ns = vec!["abc", "bc", "c"]; + let hay = "zzzabczzzbczzzc"; + let matches = vec![ + Match { pati: 0, start: 3, end: 6 }, + Match { pati: 1, start: 4, end: 6 }, + Match { pati: 2, start: 5, end: 6 }, + Match { pati: 1, start: 9, end: 11 }, + Match { pati: 2, start: 10, end: 11 }, + Match { pati: 2, start: 14, end: 15 }, + ]; + assert_eq!(&aut_findo(&ns, hay), &matches); + assert_eq!(&aut_findos(&ns, hay), &matches); + assert_eq!(&aut_findfo(&ns, hay), &matches); + assert_eq!(&aut_findfos(&ns, hay), &matches); + } + + #[test] + fn many_longer_pattern_overlap_many_match_reverse() { + let ns = vec!["abc", "bc", "c"]; + let hay = "zzzczzzbczzzabc"; + let matches = vec![ + Match { pati: 2, start: 3, end: 4 }, + Match { pati: 1, start: 7, end: 9 }, + Match { pati: 2, start: 8, end: 9 }, + Match { pati: 0, start: 12, end: 15 }, + Match { pati: 1, start: 13, end: 15 }, + Match { pati: 2, start: 14, end: 15 }, + ]; + assert_eq!(&aut_findo(&ns, hay), &matches); + assert_eq!(&aut_findos(&ns, hay), &matches); + assert_eq!(&aut_findfo(&ns, hay), &matches); + assert_eq!(&aut_findfos(&ns, hay), &matches); + } + + #[test] + fn pattern_returns_original_type() { + let aut = AcAutomaton::new(vec!["apple", "maple"]); + + // Explicitly given this type to assert that the thing returned + // from the function is our original type. + let pat: &str = aut.pattern(0); + assert_eq!(pat, "apple"); + + // Also check the return type of the `patterns` function. + let pats: &[&str] = aut.patterns(); + assert_eq!(pats, &["apple", "maple"]); + } + + // Quickcheck time. + + // This generates very small ascii strings, which makes them more likely + // to interact in interesting ways with larger haystack strings. + #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] + pub struct SmallAscii(String); + + impl Arbitrary for SmallAscii { + fn arbitrary(g: &mut G) -> SmallAscii { + use std::char::from_u32; + SmallAscii((0..2) + .map(|_| from_u32(g.gen_range(97, 123)).unwrap()) + .collect()) + } + + fn shrink(&self) -> Box> { + Box::new(self.0.shrink().map(SmallAscii)) + } + } + + impl From for String { + fn from(s: SmallAscii) -> String { s.0 } + } + + impl AsRef<[u8]> for SmallAscii { + fn as_ref(&self) -> &[u8] { self.0.as_ref() } + } + + // This is the same arbitrary impl as `String`, except it has a bias toward + // ASCII characters. + #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] + pub struct BiasAscii(String); + + impl Arbitrary for BiasAscii { + fn arbitrary(g: &mut G) -> BiasAscii { + use std::char::from_u32; + let size = { let s = g.size(); g.gen_range(0, s) }; + let mut s = String::with_capacity(size); + for _ in 0..size { + if g.gen_bool(0.3) { + s.push(char::arbitrary(g)); + } else { + for _ in 0..5 { + s.push(from_u32(g.gen_range(97, 123)).unwrap()); + } + } + } + BiasAscii(s) + } + + fn shrink(&self) -> Box> { + Box::new(self.0.shrink().map(BiasAscii)) + } + } + + fn naive_find(xs: &[S], haystack: &str) -> Vec + where S: Clone + Into { + let needles: Vec = + xs.to_vec().into_iter().map(Into::into).collect(); + let mut matches = vec![]; + for hi in 0..haystack.len() { + for (pati, needle) in needles.iter().enumerate() { + let needle = needle.as_bytes(); + if needle.len() == 0 || needle.len() > haystack.len() - hi { + continue; + } + if needle == &haystack.as_bytes()[hi..hi+needle.len()] { + matches.push(Match { + pati: pati, + start: hi, + end: hi + needle.len(), + }); + } + } + } + matches + } + + #[test] + fn qc_ac_equals_naive() { + fn prop(needles: Vec, haystack: BiasAscii) -> bool { + let aut_matches = aut_findo(&needles, &haystack.0); + let naive_matches = naive_find(&needles, &haystack.0); + // Ordering isn't always the same. I don't think we care, so do + // an unordered comparison. + let aset: HashSet = aut_matches.iter().cloned().collect(); + let nset: HashSet = naive_matches.iter().cloned().collect(); + aset == nset + } + quickcheck(prop as fn(Vec, BiasAscii) -> bool); + } + + + #[test] + fn all_bytes_iter() { + let all_bytes = AllBytesIter::new().collect::>(); + assert_eq!(all_bytes[0], 0); + assert_eq!(all_bytes[255], 255); + assert!(AllBytesIter::new().enumerate().all(|(i, b)| b as usize == i)); + } +} diff --git a/aho-corasick/src/main.rs b/aho-corasick/src/main.rs new file mode 100644 index 000000000..60562ac6a --- /dev/null +++ b/aho-corasick/src/main.rs @@ -0,0 +1,13 @@ +extern crate memchr; + +use std::env; + +use lib::AcAutomaton; + +#[allow(dead_code)] +mod lib; + +fn main() { + let aut = AcAutomaton::new(env::args().skip(1)); + println!("{}", aut.dot().trim()); +} diff --git a/ansi_term/.cargo-checksum.json b/ansi_term/.cargo-checksum.json new file mode 100644 index 000000000..012a9823a --- /dev/null +++ b/ansi_term/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{},"package":"ee49baf6cb617b853aa8d93bf420db2383fab46d314482ca2803b40d5fde979b"} \ No newline at end of file diff --git a/ansi_term/Cargo.toml b/ansi_term/Cargo.toml new file mode 100644 index 000000000..56a7f2d36 --- /dev/null +++ b/ansi_term/Cargo.toml @@ -0,0 +1,27 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g. crates.io) dependencies +# +# If you believe there's an error in this file please file an +# issue against the rust-lang/cargo repository. If you're +# editing this file be aware that the upstream Cargo.toml +# will likely look very different (and much more reasonable) + +[package] +name = "ansi_term" +version = "0.11.0" +authors = ["ogham@bsago.me", "Ryan Scheel (Havvy) ", "Josh Triplett "] +description = "Library for ANSI terminal colours and styles (bold, underline)" +homepage = "https://github.com/ogham/rust-ansi-term" +documentation = "https://docs.rs/ansi_term" +readme = "README.md" +license = "MIT" + +[lib] +name = "ansi_term" +[target."cfg(target_os=\"windows\")".dependencies.winapi] +version = "0.3.4" +features = ["errhandlingapi", "consoleapi", "processenv"] diff --git a/ansi_term/LICENCE b/ansi_term/LICENCE new file mode 100644 index 000000000..3f39e72bd --- /dev/null +++ b/ansi_term/LICENCE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Benjamin Sago + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/ansi_term/README.md b/ansi_term/README.md new file mode 100644 index 000000000..1cb56a19a --- /dev/null +++ b/ansi_term/README.md @@ -0,0 +1,174 @@ +# rust-ansi-term [![ansi-term on crates.io](http://meritbadge.herokuapp.com/ansi-term)](https://crates.io/crates/ansi_term) [![Build status](https://travis-ci.org/ogham/rust-ansi-term.svg?branch=master)](https://travis-ci.org/ogham/rust-ansi-term) [![Coverage status](https://coveralls.io/repos/ogham/rust-ansi-term/badge.svg?branch=master&service=github)](https://coveralls.io/github/ogham/rust-ansi-term?branch=master) + +This is a library for controlling colours and formatting, such as red bold text or blue underlined text, on ANSI terminals. + +### [View the Rustdoc](https://docs.rs/ansi_term/0.9.0/ansi_term/) + + +# Installation + +This crate works with [Cargo](http://crates.io). Add the following to your `Cargo.toml` dependencies section: + +```toml +[dependencies] +ansi_term = "0.9" +``` + + +## Basic usage + +There are two main data structures in this crate that you need to be concerned with: `ANSIString` and `Style`. +A `Style` holds stylistic information: colours, whether the text should be bold, or blinking, or whatever. +There are also `Colour` variants that represent simple foreground colour styles. +An `ANSIString` is a string paired with a `Style`. + +(Yes, it’s British English, but you won’t have to write “colour” very often. `Style` is used the majority of the time.) + +To format a string, call the `paint` method on a `Style` or a `Colour`, passing in the string you want to format as the argument. +For example, here’s how to get some red text: + +```rust +use ansi_term::Colour::Red; +println!("This is in red: {}", Red.paint("a red string")); +``` + +It’s important to note that the `paint` method does *not* actually return a string with the ANSI control characters surrounding it. +Instead, it returns an `ANSIString` value that has a `Display` implementation that, when formatted, returns the characters. +This allows strings to be printed with a minimum of `String` allocations being performed behind the scenes. + +If you *do* want to get at the escape codes, then you can convert the `ANSIString` to a string as you would any other `Display` value: + +```rust +use ansi_term::Colour::Red; +use std::string::ToString; +let red_string = Red.paint("a red string").to_string(); +``` + +**Note for Windows 10 users:** On Windows 10, the application must enable ANSI support first: + +```rust +let enabled = ansi_term::enable_ansi_support(); +``` + +## Bold, underline, background, and other styles + +For anything more complex than plain foreground colour changes, you need to construct `Style` objects themselves, rather than beginning with a `Colour`. +You can do this by chaining methods based on a new `Style`, created with `Style::new()`. +Each method creates a new style that has that specific property set. +For example: + +```rust +use ansi_term::Style; +println!("How about some {} and {}?", + Style::new().bold().paint("bold"), + Style::new().underline().paint("underline")); +``` + +For brevity, these methods have also been implemented for `Colour` values, so you can give your styles a foreground colour without having to begin with an empty `Style` value: + +```rust +use ansi_term::Colour::{Blue, Yellow}; +println!("Demonstrating {} and {}!", + Blue.bold().paint("blue bold"), + Yellow.underline().paint("yellow underline")); +println!("Yellow on blue: {}", Yellow.on(Blue).paint("wow!")); +``` + +The complete list of styles you can use are: +`bold`, `dimmed`, `italic`, `underline`, `blink`, `reverse`, `hidden`, and `on` for background colours. + +In some cases, you may find it easier to change the foreground on an existing `Style` rather than starting from the appropriate `Colour`. +You can do this using the `fg` method: + +```rust + use ansi_term::Style; + use ansi_term::Colour::{Blue, Cyan, Yellow}; + println!("Yellow on blue: {}", Style::new().on(Blue).fg(Yellow).paint("yow!")); + println!("Also yellow on blue: {}", Cyan.on(Blue).fg(Yellow).paint("zow!")); +``` + +Finally, you can turn a `Colour` into a `Style` with the `normal` method. +This will produce the exact same `ANSIString` as if you just used the `paint` method on the `Colour` directly, but it’s useful in certain cases: for example, you may have a method that returns `Styles`, and need to represent both the “red bold” and “red, but not bold” styles with values of the same type. The `Style` struct also has a `Default` implementation if you want to have a style with *nothing* set. + +```rust +use ansi_term::Style; +use ansi_term::Colour::Red; +Red.normal().paint("yet another red string"); +Style::default().paint("a completely regular string"); +``` + + +## Extended colours + +You can access the extended range of 256 colours by using the `Fixed` colour variant, which takes an argument of the colour number to use. +This can be included wherever you would use a `Colour`: + +```rust +use ansi_term::Colour::Fixed; +Fixed(134).paint("A sort of light purple"); +Fixed(221).on(Fixed(124)).paint("Mustard in the ketchup"); +``` + +The first sixteen of these values are the same as the normal and bold standard colour variants. +There’s nothing stopping you from using these as `Fixed` colours instead, but there’s nothing to be gained by doing so either. + +You can also access full 24-bit color by using the `RGB` colour variant, which takes separate `u8` arguments for red, green, and blue: + +```rust + use ansi_term::Colour::RGB; + RGB(70, 130, 180).paint("Steel blue"); +``` + +## Combining successive coloured strings + +The benefit of writing ANSI escape codes to the terminal is that they *stack*: you do not need to end every coloured string with a reset code if the text that follows it is of a similar style. +For example, if you want to have some blue text followed by some blue bold text, it’s possible to send the ANSI code for blue, followed by the ANSI code for bold, and finishing with a reset code without having to have an extra one between the two strings. + +This crate can optimise the ANSI codes that get printed in situations like this, making life easier for your terminal renderer. +The `ANSIStrings` struct takes a slice of several `ANSIString` values, and will iterate over each of them, printing only the codes for the styles that need to be updated as part of its formatting routine. + +The following code snippet uses this to enclose a binary number displayed in red bold text inside some red, but not bold, brackets: + +```rust +use ansi_term::Colour::Red; +use ansi_term::{ANSIString, ANSIStrings}; +let some_value = format!("{:b}", 42); +let strings: &[ANSIString<'static>] = &[ + Red.paint("["), + Red.bold().paint(some_value), + Red.paint("]"), +]; +println!("Value: {}", ANSIStrings(strings)); +``` + +There are several things to note here. +Firstly, the `paint` method can take *either* an owned `String` or a borrowed `&str`. +Internally, an `ANSIString` holds a copy-on-write (`Cow`) string value to deal with both owned and borrowed strings at the same time. +This is used here to display a `String`, the result of the `format!` call, using the same mechanism as some statically-available `&str` slices. +Secondly, that the `ANSIStrings` value works in the same way as its singular counterpart, with a `Display` implementation that only performs the formatting when required. + +## Byte strings + +This library also supports formatting `[u8]` byte strings; this supports +applications working with text in an unknown encoding. `Style` and +`Color` support painting `[u8]` values, resulting in an `ANSIByteString`. +This type does not implement `Display`, as it may not contain UTF-8, but +it does provide a method `write_to` to write the result to any +`io::Write`: + +```rust +use ansi_term::Colour::Green; +Green.paint("user data".as_bytes()).write_to(&mut std::io::stdout()).unwrap(); +``` + +Similarly, the type `ANSIByteStrings` supports writing a list of +`ANSIByteString` values with minimal escape sequences: + +```rust +use ansi_term::Colour::Green; +use ansi_term::ANSIByteStrings; +ANSIByteStrings(&[ + Green.paint("user data 1\n".as_bytes()), + Green.bold().paint("user data 2\n".as_bytes()), +]).write_to(&mut std::io::stdout()).unwrap(); +``` diff --git a/ansi_term/examples/colours.rs b/ansi_term/examples/colours.rs new file mode 100644 index 000000000..187031063 --- /dev/null +++ b/ansi_term/examples/colours.rs @@ -0,0 +1,13 @@ +extern crate ansi_term; +use ansi_term::Colour::*; + +fn main() { + println!("{}", Black.paint("Black")); + println!("{}", Red.paint("Red")); + println!("{}", Green.paint("Green")); + println!("{}", Yellow.paint("Yellow")); + println!("{}", Blue.paint("Blue")); + println!("{}", Purple.paint("Purple")); + println!("{}", Cyan.paint("Cyan")); + println!("{}", White.paint("White")); +} diff --git a/ansi_term/src/ansi.rs b/ansi_term/src/ansi.rs new file mode 100644 index 000000000..009043ff7 --- /dev/null +++ b/ansi_term/src/ansi.rs @@ -0,0 +1,258 @@ +use style::{Colour, Style}; + +use std::fmt; + +use write::AnyWrite; + + +// ---- generating ANSI codes ---- + +impl Style { + + /// Write any ANSI codes that go *before* a piece of text. These should be + /// the codes to set the terminal to a different colour or font style. + fn write_prefix(&self, f: &mut W) -> Result<(), W::Error> { + + // If there are actually no styles here, then don’t write *any* codes + // as the prefix. An empty ANSI code may not affect the terminal + // output at all, but a user may just want a code-free string. + if self.is_plain() { + return Ok(()); + } + + // Write the codes’ prefix, then write numbers, separated by + // semicolons, for each text style we want to apply. + write!(f, "\x1B[")?; + let mut written_anything = false; + + { + let mut write_char = |c| { + if written_anything { write!(f, ";")?; } + written_anything = true; + write!(f, "{}", c)?; + Ok(()) + }; + + if self.is_bold { write_char('1')? } + if self.is_dimmed { write_char('2')? } + if self.is_italic { write_char('3')? } + if self.is_underline { write_char('4')? } + if self.is_blink { write_char('5')? } + if self.is_reverse { write_char('7')? } + if self.is_hidden { write_char('8')? } + if self.is_strikethrough { write_char('9')? } + } + + // The foreground and background colours, if specified, need to be + // handled specially because the number codes are more complicated. + // (see `write_background_code` and `write_foreground_code`) + if let Some(bg) = self.background { + if written_anything { write!(f, ";")?; } + written_anything = true; + bg.write_background_code(f)?; + } + + if let Some(fg) = self.foreground { + if written_anything { write!(f, ";")?; } + fg.write_foreground_code(f)?; + } + + // All the codes end with an `m`, because reasons. + write!(f, "m")?; + + Ok(()) + } + + /// Write any ANSI codes that go *after* a piece of text. These should be + /// the codes to *reset* the terminal back to its normal colour and style. + fn write_suffix(&self, f: &mut W) -> Result<(), W::Error> { + if self.is_plain() { + Ok(()) + } + else { + write!(f, "{}", RESET) + } + } +} + + +/// The code to send to reset all styles and return to `Style::default()`. +pub static RESET: &str = "\x1B[0m"; + + + +impl Colour { + fn write_foreground_code(&self, f: &mut W) -> Result<(), W::Error> { + match *self { + Colour::Black => write!(f, "30"), + Colour::Red => write!(f, "31"), + Colour::Green => write!(f, "32"), + Colour::Yellow => write!(f, "33"), + Colour::Blue => write!(f, "34"), + Colour::Purple => write!(f, "35"), + Colour::Cyan => write!(f, "36"), + Colour::White => write!(f, "37"), + Colour::Fixed(num) => write!(f, "38;5;{}", &num), + Colour::RGB(r,g,b) => write!(f, "38;2;{};{};{}", &r, &g, &b), + } + } + + fn write_background_code(&self, f: &mut W) -> Result<(), W::Error> { + match *self { + Colour::Black => write!(f, "40"), + Colour::Red => write!(f, "41"), + Colour::Green => write!(f, "42"), + Colour::Yellow => write!(f, "43"), + Colour::Blue => write!(f, "44"), + Colour::Purple => write!(f, "45"), + Colour::Cyan => write!(f, "46"), + Colour::White => write!(f, "47"), + Colour::Fixed(num) => write!(f, "48;5;{}", &num), + Colour::RGB(r,g,b) => write!(f, "48;2;{};{};{}", &r, &g, &b), + } + } +} + + +/// Like `ANSIString`, but only displays the style prefix. +#[derive(Clone, Copy, Debug)] +pub struct Prefix(Style); + +/// Like `ANSIString`, but only displays the difference between two +/// styles. +#[derive(Clone, Copy, Debug)] +pub struct Infix(Style, Style); + +/// Like `ANSIString`, but only displays the style suffix. +#[derive(Clone, Copy, Debug)] +pub struct Suffix(Style); + + +impl Style { + + /// The prefix for this style. + pub fn prefix(self) -> Prefix { + Prefix(self) + } + + /// The infix between this style and another. + pub fn infix(self, other: Style) -> Infix { + Infix(self, other) + } + + /// The suffix for this style. + pub fn suffix(self) -> Suffix { + Suffix(self) + } +} + + +impl Colour { + + /// The prefix for this colour. + pub fn prefix(self) -> Prefix { + Prefix(self.normal()) + } + + /// The infix between this colour and another. + pub fn infix(self, other: Colour) -> Infix { + Infix(self.normal(), other.normal()) + } + + /// The suffix for this colour. + pub fn suffix(self) -> Suffix { + Suffix(self.normal()) + } +} + + +impl fmt::Display for Prefix { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let f: &mut fmt::Write = f; + self.0.write_prefix(f) + } +} + + +impl fmt::Display for Infix { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + use difference::Difference; + + match Difference::between(&self.0, &self.1) { + Difference::ExtraStyles(style) => { + let f: &mut fmt::Write = f; + style.write_prefix(f) + }, + Difference::Reset => { + let f: &mut fmt::Write = f; + write!(f, "{}{}", RESET, self.0.prefix()) + }, + Difference::NoDifference => { + Ok(()) // nothing to write + }, + } + } +} + + +impl fmt::Display for Suffix { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let f: &mut fmt::Write = f; + self.0.write_suffix(f) + } +} + + + +#[cfg(test)] +mod test { + use style::Style; + use style::Colour::*; + + macro_rules! test { + ($name: ident: $style: expr; $input: expr => $result: expr) => { + #[test] + fn $name() { + assert_eq!($style.paint($input).to_string(), $result.to_string()); + + let mut v = Vec::new(); + $style.paint($input.as_bytes()).write_to(&mut v).unwrap(); + assert_eq!(v.as_slice(), $result.as_bytes()); + } + }; + } + + test!(plain: Style::default(); "text/plain" => "text/plain"); + test!(red: Red; "hi" => "\x1B[31mhi\x1B[0m"); + test!(black: Black.normal(); "hi" => "\x1B[30mhi\x1B[0m"); + test!(yellow_bold: Yellow.bold(); "hi" => "\x1B[1;33mhi\x1B[0m"); + test!(yellow_bold_2: Yellow.normal().bold(); "hi" => "\x1B[1;33mhi\x1B[0m"); + test!(blue_underline: Blue.underline(); "hi" => "\x1B[4;34mhi\x1B[0m"); + test!(green_bold_ul: Green.bold().underline(); "hi" => "\x1B[1;4;32mhi\x1B[0m"); + test!(green_bold_ul_2: Green.underline().bold(); "hi" => "\x1B[1;4;32mhi\x1B[0m"); + test!(purple_on_white: Purple.on(White); "hi" => "\x1B[47;35mhi\x1B[0m"); + test!(purple_on_white_2: Purple.normal().on(White); "hi" => "\x1B[47;35mhi\x1B[0m"); + test!(yellow_on_blue: Style::new().on(Blue).fg(Yellow); "hi" => "\x1B[44;33mhi\x1B[0m"); + test!(yellow_on_blue_2: Cyan.on(Blue).fg(Yellow); "hi" => "\x1B[44;33mhi\x1B[0m"); + test!(cyan_bold_on_white: Cyan.bold().on(White); "hi" => "\x1B[1;47;36mhi\x1B[0m"); + test!(cyan_ul_on_white: Cyan.underline().on(White); "hi" => "\x1B[4;47;36mhi\x1B[0m"); + test!(cyan_bold_ul_on_white: Cyan.bold().underline().on(White); "hi" => "\x1B[1;4;47;36mhi\x1B[0m"); + test!(cyan_ul_bold_on_white: Cyan.underline().bold().on(White); "hi" => "\x1B[1;4;47;36mhi\x1B[0m"); + test!(fixed: Fixed(100); "hi" => "\x1B[38;5;100mhi\x1B[0m"); + test!(fixed_on_purple: Fixed(100).on(Purple); "hi" => "\x1B[45;38;5;100mhi\x1B[0m"); + test!(fixed_on_fixed: Fixed(100).on(Fixed(200)); "hi" => "\x1B[48;5;200;38;5;100mhi\x1B[0m"); + test!(rgb: RGB(70,130,180); "hi" => "\x1B[38;2;70;130;180mhi\x1B[0m"); + test!(rgb_on_blue: RGB(70,130,180).on(Blue); "hi" => "\x1B[44;38;2;70;130;180mhi\x1B[0m"); + test!(blue_on_rgb: Blue.on(RGB(70,130,180)); "hi" => "\x1B[48;2;70;130;180;34mhi\x1B[0m"); + test!(rgb_on_rgb: RGB(70,130,180).on(RGB(5,10,15)); "hi" => "\x1B[48;2;5;10;15;38;2;70;130;180mhi\x1B[0m"); + test!(bold: Style::new().bold(); "hi" => "\x1B[1mhi\x1B[0m"); + test!(underline: Style::new().underline(); "hi" => "\x1B[4mhi\x1B[0m"); + test!(bunderline: Style::new().bold().underline(); "hi" => "\x1B[1;4mhi\x1B[0m"); + test!(dimmed: Style::new().dimmed(); "hi" => "\x1B[2mhi\x1B[0m"); + test!(italic: Style::new().italic(); "hi" => "\x1B[3mhi\x1B[0m"); + test!(blink: Style::new().blink(); "hi" => "\x1B[5mhi\x1B[0m"); + test!(reverse: Style::new().reverse(); "hi" => "\x1B[7mhi\x1B[0m"); + test!(hidden: Style::new().hidden(); "hi" => "\x1B[8mhi\x1B[0m"); + test!(stricken: Style::new().strikethrough(); "hi" => "\x1B[9mhi\x1B[0m"); + +} diff --git a/ansi_term/src/debug.rs b/ansi_term/src/debug.rs new file mode 100644 index 000000000..da30c625c --- /dev/null +++ b/ansi_term/src/debug.rs @@ -0,0 +1,122 @@ +use std::fmt; + +use style::Style; + + +/// Styles have a special `Debug` implementation that only shows the fields that +/// are set. Fields that haven’t been touched aren’t included in the output. +/// +/// This behaviour gets bypassed when using the alternate formatting mode +/// `format!("{:#?}")`. +/// +/// use ansi_term::Colour::{Red, Blue}; +/// assert_eq!("Style { fg(Red), on(Blue), bold, italic }", +/// format!("{:?}", Red.on(Blue).bold().italic())); +impl fmt::Debug for Style { + fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { + if fmt.alternate() { + fmt.debug_struct("Style") + .field("foreground", &self.foreground) + .field("background", &self.background) + .field("blink", &self.is_blink) + .field("bold", &self.is_bold) + .field("dimmed", &self.is_dimmed) + .field("hidden", &self.is_hidden) + .field("italic", &self.is_italic) + .field("reverse", &self.is_reverse) + .field("strikethrough", &self.is_strikethrough) + .field("underline", &self.is_underline) + .finish() + } + else if self.is_plain() { + fmt.write_str("Style {}") + } + else { + fmt.write_str("Style { ")?; + + let mut written_anything = false; + + if let Some(fg) = self.foreground { + if written_anything { fmt.write_str(", ")? } + written_anything = true; + write!(fmt, "fg({:?})", fg)? + } + + if let Some(bg) = self.background { + if written_anything { fmt.write_str(", ")? } + written_anything = true; + write!(fmt, "on({:?})", bg)? + } + + { + let mut write_flag = |name| { + if written_anything { fmt.write_str(", ")? } + written_anything = true; + fmt.write_str(name) + }; + + if self.is_blink { write_flag("blink")? } + if self.is_bold { write_flag("bold")? } + if self.is_dimmed { write_flag("dimmed")? } + if self.is_hidden { write_flag("hidden")? } + if self.is_italic { write_flag("italic")? } + if self.is_reverse { write_flag("reverse")? } + if self.is_strikethrough { write_flag("strikethrough")? } + if self.is_underline { write_flag("underline")? } + } + + write!(fmt, " }}") + } + } +} + + +#[cfg(test)] +mod test { + use style::Colour::*; + use style::Style; + + fn style() -> Style { + Style::new() + } + + macro_rules! test { + ($name: ident: $obj: expr => $result: expr) => { + #[test] + fn $name() { + assert_eq!($result, format!("{:?}", $obj)); + } + }; + } + + test!(empty: style() => "Style {}"); + test!(bold: style().bold() => "Style { bold }"); + test!(italic: style().italic() => "Style { italic }"); + test!(both: style().bold().italic() => "Style { bold, italic }"); + + test!(red: Red.normal() => "Style { fg(Red) }"); + test!(redblue: Red.normal().on(RGB(3, 2, 4)) => "Style { fg(Red), on(RGB(3, 2, 4)) }"); + + test!(everything: + Red.on(Blue).blink().bold().dimmed().hidden().italic().reverse().strikethrough().underline() => + "Style { fg(Red), on(Blue), blink, bold, dimmed, hidden, italic, reverse, strikethrough, underline }"); + + #[test] + fn long_and_detailed() { + let debug = r##"Style { + foreground: Some( + Blue + ), + background: None, + blink: false, + bold: true, + dimmed: false, + hidden: false, + italic: false, + reverse: false, + strikethrough: false, + underline: false +}"##; + assert_eq!(debug, format!("{:#?}", Blue.bold())); + } +} diff --git a/ansi_term/src/difference.rs b/ansi_term/src/difference.rs new file mode 100644 index 000000000..571574634 --- /dev/null +++ b/ansi_term/src/difference.rs @@ -0,0 +1,179 @@ +use super::Style; + + +/// When printing out one coloured string followed by another, use one of +/// these rules to figure out which *extra* control codes need to be sent. +#[derive(PartialEq, Clone, Copy, Debug)] +pub enum Difference { + + /// Print out the control codes specified by this style to end up looking + /// like the second string's styles. + ExtraStyles(Style), + + /// Converting between these two is impossible, so just send a reset + /// command and then the second string's styles. + Reset, + + /// The before style is exactly the same as the after style, so no further + /// control codes need to be printed. + NoDifference, +} + + +impl Difference { + + /// Compute the 'style difference' required to turn an existing style into + /// the given, second style. + /// + /// For example, to turn green text into green bold text, it's redundant + /// to write a reset command then a second green+bold command, instead of + /// just writing one bold command. This method should see that both styles + /// use the foreground colour green, and reduce it to a single command. + /// + /// This method returns an enum value because it's not actually always + /// possible to turn one style into another: for example, text could be + /// made bold and underlined, but you can't remove the bold property + /// without also removing the underline property. So when this has to + /// happen, this function returns None, meaning that the entire set of + /// styles should be reset and begun again. + pub fn between(first: &Style, next: &Style) -> Difference { + use self::Difference::*; + + // XXX(Havvy): This algorithm is kind of hard to replicate without + // having the Plain/Foreground enum variants, so I'm just leaving + // it commented out for now, and defaulting to Reset. + + if first == next { + return NoDifference; + } + + // Cannot un-bold, so must Reset. + if first.is_bold && !next.is_bold { + return Reset; + } + + if first.is_dimmed && !next.is_dimmed { + return Reset; + } + + if first.is_italic && !next.is_italic { + return Reset; + } + + // Cannot un-underline, so must Reset. + if first.is_underline && !next.is_underline { + return Reset; + } + + if first.is_blink && !next.is_blink { + return Reset; + } + + if first.is_reverse && !next.is_reverse { + return Reset; + } + + if first.is_hidden && !next.is_hidden { + return Reset; + } + + if first.is_strikethrough && !next.is_strikethrough { + return Reset; + } + + // Cannot go from foreground to no foreground, so must Reset. + if first.foreground.is_some() && next.foreground.is_none() { + return Reset; + } + + // Cannot go from background to no background, so must Reset. + if first.background.is_some() && next.background.is_none() { + return Reset; + } + + let mut extra_styles = Style::default(); + + if first.is_bold != next.is_bold { + extra_styles.is_bold = true; + } + + if first.is_dimmed != next.is_dimmed { + extra_styles.is_dimmed = true; + } + + if first.is_italic != next.is_italic { + extra_styles.is_italic = true; + } + + if first.is_underline != next.is_underline { + extra_styles.is_underline = true; + } + + if first.is_blink != next.is_blink { + extra_styles.is_blink = true; + } + + if first.is_reverse != next.is_reverse { + extra_styles.is_reverse = true; + } + + if first.is_hidden != next.is_hidden { + extra_styles.is_hidden = true; + } + + if first.is_strikethrough != next.is_strikethrough { + extra_styles.is_strikethrough = true; + } + + if first.foreground != next.foreground { + extra_styles.foreground = next.foreground; + } + + if first.background != next.background { + extra_styles.background = next.background; + } + + ExtraStyles(extra_styles) + } +} + + +#[cfg(test)] +mod test { + use super::*; + use super::Difference::*; + use style::Colour::*; + use style::Style; + + fn style() -> Style { + Style::new() + } + + macro_rules! test { + ($name: ident: $first: expr; $next: expr => $result: expr) => { + #[test] + fn $name() { + assert_eq!($result, Difference::between(&$first, &$next)); + } + }; + } + + test!(nothing: Green.normal(); Green.normal() => NoDifference); + test!(uppercase: Green.normal(); Green.bold() => ExtraStyles(style().bold())); + test!(lowercase: Green.bold(); Green.normal() => Reset); + test!(nothing2: Green.bold(); Green.bold() => NoDifference); + + test!(colour_change: Red.normal(); Blue.normal() => ExtraStyles(Blue.normal())); + + test!(addition_of_blink: style(); style().blink() => ExtraStyles(style().blink())); + test!(addition_of_dimmed: style(); style().dimmed() => ExtraStyles(style().dimmed())); + test!(addition_of_hidden: style(); style().hidden() => ExtraStyles(style().hidden())); + test!(addition_of_reverse: style(); style().reverse() => ExtraStyles(style().reverse())); + test!(addition_of_strikethrough: style(); style().strikethrough() => ExtraStyles(style().strikethrough())); + + test!(removal_of_strikethrough: style().strikethrough(); style() => Reset); + test!(removal_of_reverse: style().reverse(); style() => Reset); + test!(removal_of_hidden: style().hidden(); style() => Reset); + test!(removal_of_dimmed: style().dimmed(); style() => Reset); + test!(removal_of_blink: style().blink(); style() => Reset); +} diff --git a/ansi_term/src/display.rs b/ansi_term/src/display.rs new file mode 100644 index 000000000..8314ab85f --- /dev/null +++ b/ansi_term/src/display.rs @@ -0,0 +1,279 @@ +use std::borrow::Cow; +use std::fmt; +use std::io; +use std::ops::Deref; + +use ansi::RESET; +use difference::Difference; +use style::{Style, Colour}; +use write::AnyWrite; + + +/// An `ANSIGenericString` includes a generic string type and a `Style` to +/// display that string. `ANSIString` and `ANSIByteString` are aliases for +/// this type on `str` and `[u8]`, respectively. +#[derive(PartialEq, Debug)] +pub struct ANSIGenericString<'a, S: 'a + ToOwned + ?Sized> +where ::Owned: fmt::Debug { + style: Style, + string: Cow<'a, S>, +} + + +/// Cloning an `ANSIGenericString` will clone its underlying string. +/// +/// ### Examples +/// +/// ``` +/// use ansi_term::ANSIString; +/// +/// let plain_string = ANSIString::from("a plain string"); +/// let clone_string = plain_string.clone(); +/// assert_eq!(clone_string, plain_string); +/// ``` +impl<'a, S: 'a + ToOwned + ?Sized> Clone for ANSIGenericString<'a, S> +where ::Owned: fmt::Debug { + fn clone(&self) -> ANSIGenericString<'a, S> { + ANSIGenericString { + style: self.style, + string: self.string.clone(), + } + } +} + +// You might think that the hand-written Clone impl above is the same as the +// one that gets generated with #[derive]. But it’s not *quite* the same! +// +// `str` is not Clone, and the derived Clone implementation puts a Clone +// constraint on the S type parameter (generated using --pretty=expanded): +// +// ↓_________________↓ +// impl <'a, S: ::std::clone::Clone + 'a + ToOwned + ?Sized> ::std::clone::Clone +// for ANSIGenericString<'a, S> where +// ::Owned: fmt::Debug { ... } +// +// This resulted in compile errors when you tried to derive Clone on a type +// that used it: +// +// #[derive(PartialEq, Debug, Clone, Default)] +// pub struct TextCellContents(Vec>); +// ^^^^^^^^^^^^^^^^^^^^^^^^^ +// error[E0277]: the trait `std::clone::Clone` is not implemented for `str` +// +// The hand-written impl above can ignore that constraint and still compile. + + + +/// An ANSI String is a string coupled with the `Style` to display it +/// in a terminal. +/// +/// Although not technically a string itself, it can be turned into +/// one with the `to_string` method. +/// +/// ### Examples +/// +/// ```no_run +/// use ansi_term::ANSIString; +/// use ansi_term::Colour::Red; +/// +/// let red_string = Red.paint("a red string"); +/// println!("{}", red_string); +/// ``` +/// +/// ``` +/// use ansi_term::ANSIString; +/// +/// let plain_string = ANSIString::from("a plain string"); +/// assert_eq!(&*plain_string, "a plain string"); +/// ``` +pub type ANSIString<'a> = ANSIGenericString<'a, str>; + +/// An `ANSIByteString` represents a formatted series of bytes. Use +/// `ANSIByteString` when styling text with an unknown encoding. +pub type ANSIByteString<'a> = ANSIGenericString<'a, [u8]>; + +impl<'a, I, S: 'a + ToOwned + ?Sized> From for ANSIGenericString<'a, S> +where I: Into>, + ::Owned: fmt::Debug { + fn from(input: I) -> ANSIGenericString<'a, S> { + ANSIGenericString { + string: input.into(), + style: Style::default(), + } + } +} + +impl<'a, S: 'a + ToOwned + ?Sized> Deref for ANSIGenericString<'a, S> +where ::Owned: fmt::Debug { + type Target = S; + + fn deref(&self) -> &S { + self.string.deref() + } +} + + +/// A set of `ANSIGenericString`s collected together, in order to be +/// written with a minimum of control characters. +pub struct ANSIGenericStrings<'a, S: 'a + ToOwned + ?Sized> + (pub &'a [ANSIGenericString<'a, S>]) + where ::Owned: fmt::Debug; + +/// A set of `ANSIString`s collected together, in order to be written with a +/// minimum of control characters. +pub type ANSIStrings<'a> = ANSIGenericStrings<'a, str>; + +/// A function to construct an `ANSIStrings` instance. +#[allow(non_snake_case)] +pub fn ANSIStrings<'a>(arg: &'a [ANSIString<'a>]) -> ANSIStrings<'a> { + ANSIGenericStrings(arg) +} + +/// A set of `ANSIByteString`s collected together, in order to be +/// written with a minimum of control characters. +pub type ANSIByteStrings<'a> = ANSIGenericStrings<'a, [u8]>; + +/// A function to construct an `ANSIByteStrings` instance. +#[allow(non_snake_case)] +pub fn ANSIByteStrings<'a>(arg: &'a [ANSIByteString<'a>]) -> ANSIByteStrings<'a> { + ANSIGenericStrings(arg) +} + + +// ---- paint functions ---- + +impl Style { + + /// Paints the given text with this colour, returning an ANSI string. + pub fn paint<'a, I, S: 'a + ToOwned + ?Sized>(self, input: I) -> ANSIGenericString<'a, S> + where I: Into>, + ::Owned: fmt::Debug { + ANSIGenericString { + string: input.into(), + style: self, + } + } +} + + +impl Colour { + + /// Paints the given text with this colour, returning an ANSI string. + /// This is a short-cut so you don’t have to use `Blue.normal()` just + /// to get blue text. + /// + /// ``` + /// use ansi_term::Colour::Blue; + /// println!("{}", Blue.paint("da ba dee")); + /// ``` + pub fn paint<'a, I, S: 'a + ToOwned + ?Sized>(self, input: I) -> ANSIGenericString<'a, S> + where I: Into>, + ::Owned: fmt::Debug { + ANSIGenericString { + string: input.into(), + style: self.normal(), + } + } +} + + +// ---- writers for individual ANSI strings ---- + +impl<'a> fmt::Display for ANSIString<'a> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let w: &mut fmt::Write = f; + self.write_to_any(w) + } +} + +impl<'a> ANSIByteString<'a> { + /// Write an `ANSIByteString` to an `io::Write`. This writes the escape + /// sequences for the associated `Style` around the bytes. + pub fn write_to(&self, w: &mut W) -> io::Result<()> { + let w: &mut io::Write = w; + self.write_to_any(w) + } +} + +impl<'a, S: 'a + ToOwned + ?Sized> ANSIGenericString<'a, S> +where ::Owned: fmt::Debug, &'a S: AsRef<[u8]> { + fn write_to_any + ?Sized>(&self, w: &mut W) -> Result<(), W::Error> { + write!(w, "{}", self.style.prefix())?; + w.write_str(self.string.as_ref())?; + write!(w, "{}", self.style.suffix()) + } +} + + +// ---- writers for combined ANSI strings ---- + +impl<'a> fmt::Display for ANSIStrings<'a> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let f: &mut fmt::Write = f; + self.write_to_any(f) + } +} + +impl<'a> ANSIByteStrings<'a> { + /// Write `ANSIByteStrings` to an `io::Write`. This writes the minimal + /// escape sequences for the associated `Style`s around each set of + /// bytes. + pub fn write_to(&self, w: &mut W) -> io::Result<()> { + let w: &mut io::Write = w; + self.write_to_any(w) + } +} + +impl<'a, S: 'a + ToOwned + ?Sized> ANSIGenericStrings<'a, S> +where ::Owned: fmt::Debug, &'a S: AsRef<[u8]> { + fn write_to_any + ?Sized>(&self, w: &mut W) -> Result<(), W::Error> { + use self::Difference::*; + + let first = match self.0.first() { + None => return Ok(()), + Some(f) => f, + }; + + write!(w, "{}", first.style.prefix())?; + w.write_str(first.string.as_ref())?; + + for window in self.0.windows(2) { + match Difference::between(&window[0].style, &window[1].style) { + ExtraStyles(style) => write!(w, "{}", style.prefix())?, + Reset => write!(w, "{}{}", RESET, window[1].style.prefix())?, + NoDifference => {/* Do nothing! */}, + } + + w.write_str(&window[1].string)?; + } + + // Write the final reset string after all of the ANSIStrings have been + // written, *except* if the last one has no styles, because it would + // have already been written by this point. + if let Some(last) = self.0.last() { + if !last.style.is_plain() { + write!(w, "{}", RESET)?; + } + } + + Ok(()) + } +} + + +// ---- tests ---- + +#[cfg(test)] +mod tests { + pub use super::super::ANSIStrings; + pub use style::Style; + pub use style::Colour::*; + + #[test] + fn no_control_codes_for_plain() { + let one = Style::default().paint("one"); + let two = Style::default().paint("two"); + let output = format!("{}", ANSIStrings( &[ one, two ] )); + assert_eq!(&*output, "onetwo"); + } +} diff --git a/ansi_term/src/lib.rs b/ansi_term/src/lib.rs new file mode 100644 index 000000000..f2a676a1c --- /dev/null +++ b/ansi_term/src/lib.rs @@ -0,0 +1,205 @@ +//! This is a library for controlling colours and formatting, such as +//! red bold text or blue underlined text, on ANSI terminals. +//! +//! +//! ## Basic usage +//! +//! There are two main data structures in this crate that you need to be +//! concerned with: `ANSIString` and `Style`. A `Style` holds stylistic +//! information: colours, whether the text should be bold, or blinking, or +//! whatever. There are also `Colour` variants that represent simple foreground +//! colour styles. An `ANSIString` is a string paired with a `Style`. +//! +//! (Yes, it’s British English, but you won’t have to write “colour” very often. +//! `Style` is used the majority of the time.) +//! +//! To format a string, call the `paint` method on a `Style` or a `Colour`, +//! passing in the string you want to format as the argument. For example, +//! here’s how to get some red text: +//! +//! use ansi_term::Colour::Red; +//! println!("This is in red: {}", Red.paint("a red string")); +//! +//! It’s important to note that the `paint` method does *not* actually return a +//! string with the ANSI control characters surrounding it. Instead, it returns +//! an `ANSIString` value that has a `Display` implementation that, when +//! formatted, returns the characters. This allows strings to be printed with a +//! minimum of `String` allocations being performed behind the scenes. +//! +//! If you *do* want to get at the escape codes, then you can convert the +//! `ANSIString` to a string as you would any other `Display` value: +//! +//! use ansi_term::Colour::Red; +//! use std::string::ToString; +//! let red_string = Red.paint("a red string").to_string(); +//! +//! +//! ## Bold, underline, background, and other styles +//! +//! For anything more complex than plain foreground colour changes, you need to +//! construct `Style` objects themselves, rather than beginning with a `Colour`. +//! You can do this by chaining methods based on a new `Style`, created with +//! `Style::new()`. Each method creates a new style that has that specific +//! property set. For example: +//! +//! use ansi_term::Style; +//! println!("How about some {} and {}?", +//! Style::new().bold().paint("bold"), +//! Style::new().underline().paint("underline")); +//! +//! For brevity, these methods have also been implemented for `Colour` values, +//! so you can give your styles a foreground colour without having to begin with +//! an empty `Style` value: +//! +//! use ansi_term::Colour::{Blue, Yellow}; +//! println!("Demonstrating {} and {}!", +//! Blue.bold().paint("blue bold"), +//! Yellow.underline().paint("yellow underline")); +//! println!("Yellow on blue: {}", Yellow.on(Blue).paint("wow!")); +//! +//! The complete list of styles you can use are: `bold`, `dimmed`, `italic`, +//! `underline`, `blink`, `reverse`, `hidden`, `strikethrough`, and `on` for +//! background colours. +//! +//! In some cases, you may find it easier to change the foreground on an +//! existing `Style` rather than starting from the appropriate `Colour`. +//! You can do this using the `fg` method: +//! +//! use ansi_term::Style; +//! use ansi_term::Colour::{Blue, Cyan, Yellow}; +//! println!("Yellow on blue: {}", Style::new().on(Blue).fg(Yellow).paint("yow!")); +//! println!("Also yellow on blue: {}", Cyan.on(Blue).fg(Yellow).paint("zow!")); +//! +//! Finally, you can turn a `Colour` into a `Style` with the `normal` method. +//! This will produce the exact same `ANSIString` as if you just used the +//! `paint` method on the `Colour` directly, but it’s useful in certain cases: +//! for example, you may have a method that returns `Styles`, and need to +//! represent both the “red bold” and “red, but not bold” styles with values of +//! the same type. The `Style` struct also has a `Default` implementation if you +//! want to have a style with *nothing* set. +//! +//! use ansi_term::Style; +//! use ansi_term::Colour::Red; +//! Red.normal().paint("yet another red string"); +//! Style::default().paint("a completely regular string"); +//! +//! +//! ## Extended colours +//! +//! You can access the extended range of 256 colours by using the `Fixed` colour +//! variant, which takes an argument of the colour number to use. This can be +//! included wherever you would use a `Colour`: +//! +//! use ansi_term::Colour::Fixed; +//! Fixed(134).paint("A sort of light purple"); +//! Fixed(221).on(Fixed(124)).paint("Mustard in the ketchup"); +//! +//! The first sixteen of these values are the same as the normal and bold +//! standard colour variants. There’s nothing stopping you from using these as +//! `Fixed` colours instead, but there’s nothing to be gained by doing so +//! either. +//! +//! You can also access full 24-bit color by using the `RGB` colour variant, +//! which takes separate `u8` arguments for red, green, and blue: +//! +//! use ansi_term::Colour::RGB; +//! RGB(70, 130, 180).paint("Steel blue"); +//! +//! ## Combining successive coloured strings +//! +//! The benefit of writing ANSI escape codes to the terminal is that they +//! *stack*: you do not need to end every coloured string with a reset code if +//! the text that follows it is of a similar style. For example, if you want to +//! have some blue text followed by some blue bold text, it’s possible to send +//! the ANSI code for blue, followed by the ANSI code for bold, and finishing +//! with a reset code without having to have an extra one between the two +//! strings. +//! +//! This crate can optimise the ANSI codes that get printed in situations like +//! this, making life easier for your terminal renderer. The `ANSIStrings` +//! struct takes a slice of several `ANSIString` values, and will iterate over +//! each of them, printing only the codes for the styles that need to be updated +//! as part of its formatting routine. +//! +//! The following code snippet uses this to enclose a binary number displayed in +//! red bold text inside some red, but not bold, brackets: +//! +//! use ansi_term::Colour::Red; +//! use ansi_term::{ANSIString, ANSIStrings}; +//! let some_value = format!("{:b}", 42); +//! let strings: &[ANSIString<'static>] = &[ +//! Red.paint("["), +//! Red.bold().paint(some_value), +//! Red.paint("]"), +//! ]; +//! println!("Value: {}", ANSIStrings(strings)); +//! +//! There are several things to note here. Firstly, the `paint` method can take +//! *either* an owned `String` or a borrowed `&str`. Internally, an `ANSIString` +//! holds a copy-on-write (`Cow`) string value to deal with both owned and +//! borrowed strings at the same time. This is used here to display a `String`, +//! the result of the `format!` call, using the same mechanism as some +//! statically-available `&str` slices. Secondly, that the `ANSIStrings` value +//! works in the same way as its singular counterpart, with a `Display` +//! implementation that only performs the formatting when required. +//! +//! ## Byte strings +//! +//! This library also supports formatting `[u8]` byte strings; this supports +//! applications working with text in an unknown encoding. `Style` and +//! `Color` support painting `[u8]` values, resulting in an `ANSIByteString`. +//! This type does not implement `Display`, as it may not contain UTF-8, but +//! it does provide a method `write_to` to write the result to any +//! `io::Write`: +//! +//! use ansi_term::Colour::Green; +//! Green.paint("user data".as_bytes()).write_to(&mut std::io::stdout()).unwrap(); +//! +//! Similarly, the type `ANSIByteStrings` supports writing a list of +//! `ANSIByteString` values with minimal escape sequences: +//! +//! use ansi_term::Colour::Green; +//! use ansi_term::ANSIByteStrings; +//! ANSIByteStrings(&[ +//! Green.paint("user data 1\n".as_bytes()), +//! Green.bold().paint("user data 2\n".as_bytes()), +//! ]).write_to(&mut std::io::stdout()).unwrap(); + + +#![crate_name = "ansi_term"] +#![crate_type = "rlib"] +#![crate_type = "dylib"] + +#![warn(missing_copy_implementations)] +#![warn(missing_docs)] +#![warn(trivial_casts, trivial_numeric_casts)] +#![warn(unused_extern_crates, unused_qualifications)] + +#[cfg(target_os="windows")] +extern crate winapi; + +mod ansi; +pub use ansi::{Prefix, Infix, Suffix}; + +mod style; +pub use style::{Colour, Style}; + +/// Color is a type alias for Colour for those who can't be bothered. +pub use Colour as Color; + +// I'm not beyond calling Colour Colour, rather than Color, but I did +// purposefully name this crate 'ansi-term' so people wouldn't get +// confused when they tried to install it. +// +// Only *after* they'd installed it. + +mod difference; +mod display; +pub use display::*; + +mod write; + +mod windows; +pub use windows::*; + +mod debug; diff --git a/ansi_term/src/style.rs b/ansi_term/src/style.rs new file mode 100644 index 000000000..b9fb52326 --- /dev/null +++ b/ansi_term/src/style.rs @@ -0,0 +1,259 @@ +/// A style is a collection of properties that can format a string +/// using ANSI escape codes. +#[derive(PartialEq, Clone, Copy)] +pub struct Style { + + /// The style's foreground colour, if it has one. + pub foreground: Option, + + /// The style's background colour, if it has one. + pub background: Option, + + /// Whether this style is bold. + pub is_bold: bool, + + /// Whether this style is dimmed. + pub is_dimmed: bool, + + /// Whether this style is italic. + pub is_italic: bool, + + /// Whether this style is underlined. + pub is_underline: bool, + + /// Whether this style is blinking. + pub is_blink: bool, + + /// Whether this style has reverse colours. + pub is_reverse: bool, + + /// Whether this style is hidden. + pub is_hidden: bool, + + /// Whether this style is struckthrough. + pub is_strikethrough: bool +} + +impl Style { + /// Creates a new Style with no differences. + pub fn new() -> Style { + Style::default() + } + + /// Returns a `Style` with the bold property set. + pub fn bold(&self) -> Style { + Style { is_bold: true, .. *self } + } + + /// Returns a `Style` with the dimmed property set. + pub fn dimmed(&self) -> Style { + Style { is_dimmed: true, .. *self } + } + + /// Returns a `Style` with the italic property set. + pub fn italic(&self) -> Style { + Style { is_italic: true, .. *self } + } + + /// Returns a `Style` with the underline property set. + pub fn underline(&self) -> Style { + Style { is_underline: true, .. *self } + } + + /// Returns a `Style` with the blink property set. + pub fn blink(&self) -> Style { + Style { is_blink: true, .. *self } + } + + /// Returns a `Style` with the reverse property set. + pub fn reverse(&self) -> Style { + Style { is_reverse: true, .. *self } + } + + /// Returns a `Style` with the hidden property set. + pub fn hidden(&self) -> Style { + Style { is_hidden: true, .. *self } + } + + /// Returns a `Style` with the hidden property set. + pub fn strikethrough(&self) -> Style { + Style { is_strikethrough: true, .. *self } + } + + /// Returns a `Style` with the foreground colour property set. + pub fn fg(&self, foreground: Colour) -> Style { + Style { foreground: Some(foreground), .. *self } + } + + /// Returns a `Style` with the background colour property set. + pub fn on(&self, background: Colour) -> Style { + Style { background: Some(background), .. *self } + } + + /// Return true if this `Style` has no actual styles, and can be written + /// without any control characters. + pub fn is_plain(self) -> bool { + self == Style::default() + } +} + +impl Default for Style { + + /// Returns a style with *no* properties set. Formatting text using this + /// style returns the exact same text. + /// + /// ``` + /// use ansi_term::Style; + /// assert_eq!(None, Style::default().foreground); + /// assert_eq!(None, Style::default().background); + /// assert_eq!(false, Style::default().is_bold); + /// assert_eq!("txt", Style::default().paint("txt").to_string()); + /// ``` + fn default() -> Style { + Style { + foreground: None, + background: None, + is_bold: false, + is_dimmed: false, + is_italic: false, + is_underline: false, + is_blink: false, + is_reverse: false, + is_hidden: false, + is_strikethrough: false, + } + } +} + + +// ---- colours ---- + +/// A colour is one specific type of ANSI escape code, and can refer +/// to either the foreground or background colour. +/// +/// These use the standard numeric sequences. +/// See +#[derive(PartialEq, Clone, Copy, Debug)] +pub enum Colour { + + /// Colour #0 (foreground code `30`, background code `40`). + /// + /// This is not necessarily the background colour, and using it as one may + /// render the text hard to read on terminals with dark backgrounds. + Black, + + /// Colour #1 (foreground code `31`, background code `41`). + Red, + + /// Colour #2 (foreground code `32`, background code `42`). + Green, + + /// Colour #3 (foreground code `33`, background code `43`). + Yellow, + + /// Colour #4 (foreground code `34`, background code `44`). + Blue, + + /// Colour #5 (foreground code `35`, background code `45`). + Purple, + + /// Colour #6 (foreground code `36`, background code `46`). + Cyan, + + /// Colour #7 (foreground code `37`, background code `47`). + /// + /// As above, this is not necessarily the foreground colour, and may be + /// hard to read on terminals with light backgrounds. + White, + + /// A colour number from 0 to 255, for use in 256-colour terminal + /// environments. + /// + /// - Colours 0 to 7 are the `Black` to `White` variants respectively. + /// These colours can usually be changed in the terminal emulator. + /// - Colours 8 to 15 are brighter versions of the eight colours above. + /// These can also usually be changed in the terminal emulator, or it + /// could be configured to use the original colours and show the text in + /// bold instead. It varies depending on the program. + /// - Colours 16 to 231 contain several palettes of bright colours, + /// arranged in six squares measuring six by six each. + /// - Colours 232 to 255 are shades of grey from black to white. + /// + /// It might make more sense to look at a [colour chart][cc]. + /// + /// [cc]: https://upload.wikimedia.org/wikipedia/en/1/15/Xterm_256color_chart.svg + Fixed(u8), + + /// A 24-bit RGB color, as specified by ISO-8613-3. + RGB(u8, u8, u8), +} + + +impl Colour { + /// Return a `Style` with the foreground colour set to this colour. + pub fn normal(self) -> Style { + Style { foreground: Some(self), .. Style::default() } + } + + /// Returns a `Style` with the bold property set. + pub fn bold(self) -> Style { + Style { foreground: Some(self), is_bold: true, .. Style::default() } + } + + /// Returns a `Style` with the dimmed property set. + pub fn dimmed(self) -> Style { + Style { foreground: Some(self), is_dimmed: true, .. Style::default() } + } + + /// Returns a `Style` with the italic property set. + pub fn italic(self) -> Style { + Style { foreground: Some(self), is_italic: true, .. Style::default() } + } + + /// Returns a `Style` with the underline property set. + pub fn underline(self) -> Style { + Style { foreground: Some(self), is_underline: true, .. Style::default() } + } + + /// Returns a `Style` with the blink property set. + pub fn blink(self) -> Style { + Style { foreground: Some(self), is_blink: true, .. Style::default() } + } + + /// Returns a `Style` with the reverse property set. + pub fn reverse(self) -> Style { + Style { foreground: Some(self), is_reverse: true, .. Style::default() } + } + + /// Returns a `Style` with the hidden property set. + pub fn hidden(self) -> Style { + Style { foreground: Some(self), is_hidden: true, .. Style::default() } + } + + /// Returns a `Style` with the strikethrough property set. + pub fn strikethrough(self) -> Style { + Style { foreground: Some(self), is_strikethrough: true, .. Style::default() } + } + + /// Returns a `Style` with the background colour property set. + pub fn on(self, background: Colour) -> Style { + Style { foreground: Some(self), background: Some(background), .. Style::default() } + } +} + +impl From for Style { + + /// You can turn a `Colour` into a `Style` with the foreground colour set + /// with the `From` trait. + /// + /// ``` + /// use ansi_term::{Style, Colour}; + /// let green_foreground = Style::default().fg(Colour::Green); + /// assert_eq!(green_foreground, Colour::Green.normal()); + /// assert_eq!(green_foreground, Colour::Green.into()); + /// assert_eq!(green_foreground, Style::from(Colour::Green)); + /// ``` + fn from(colour: Colour) -> Style { + colour.normal() + } +} diff --git a/ansi_term/src/windows.rs b/ansi_term/src/windows.rs new file mode 100644 index 000000000..ff6fa683d --- /dev/null +++ b/ansi_term/src/windows.rs @@ -0,0 +1,40 @@ +/// Enables ANSI code support on Windows 10. +/// +/// This uses Windows API calls to alter the properties of the console that +/// the program is running in. +/// +/// https://msdn.microsoft.com/en-us/library/windows/desktop/mt638032(v=vs.85).aspx +/// +/// Returns a `Result` with the Windows error code if unsuccessful. +#[cfg(windows)] +pub fn enable_ansi_support() -> Result<(), u32> { + use winapi::um::processenv::GetStdHandle; + use winapi::um::errhandlingapi::GetLastError; + use winapi::um::consoleapi::{GetConsoleMode, SetConsoleMode}; + + const STD_OUT_HANDLE: u32 = -11i32 as u32; + const ENABLE_VIRTUAL_TERMINAL_PROCESSING: u32 = 0x0004; + + unsafe { + // https://docs.microsoft.com/en-us/windows/console/getstdhandle + let std_out_handle = GetStdHandle(STD_OUT_HANDLE); + let error_code = GetLastError(); + if error_code != 0 { return Err(error_code); } + + // https://docs.microsoft.com/en-us/windows/console/getconsolemode + let mut console_mode: u32 = 0; + GetConsoleMode(std_out_handle, &mut console_mode); + let error_code = GetLastError(); + if error_code != 0 { return Err(error_code); } + + // VT processing not already enabled? + if console_mode & ENABLE_VIRTUAL_TERMINAL_PROCESSING == 0 { + // https://docs.microsoft.com/en-us/windows/console/setconsolemode + SetConsoleMode(std_out_handle, console_mode | ENABLE_VIRTUAL_TERMINAL_PROCESSING); + let error_code = GetLastError(); + if error_code != 0 { return Err(error_code); } + } + } + + return Ok(()); +} diff --git a/ansi_term/src/write.rs b/ansi_term/src/write.rs new file mode 100644 index 000000000..bb146ac15 --- /dev/null +++ b/ansi_term/src/write.rs @@ -0,0 +1,40 @@ +use std::fmt; +use std::io; + + +pub trait AnyWrite { + type wstr: ?Sized; + type Error; + + fn write_fmt(&mut self, fmt: fmt::Arguments) -> Result<(), Self::Error>; + + fn write_str(&mut self, s: &Self::wstr) -> Result<(), Self::Error>; +} + + +impl<'a> AnyWrite for fmt::Write + 'a { + type wstr = str; + type Error = fmt::Error; + + fn write_fmt(&mut self, fmt: fmt::Arguments) -> Result<(), Self::Error> { + fmt::Write::write_fmt(self, fmt) + } + + fn write_str(&mut self, s: &Self::wstr) -> Result<(), Self::Error> { + fmt::Write::write_str(self, s) + } +} + + +impl<'a> AnyWrite for io::Write + 'a { + type wstr = [u8]; + type Error = io::Error; + + fn write_fmt(&mut self, fmt: fmt::Arguments) -> Result<(), Self::Error> { + io::Write::write_fmt(self, fmt) + } + + fn write_str(&mut self, s: &Self::wstr) -> Result<(), Self::Error> { + io::Write::write_all(self, s) + } +} diff --git a/arrayvec/.cargo-checksum.json b/arrayvec/.cargo-checksum.json new file mode 100644 index 000000000..0890a9a06 --- /dev/null +++ b/arrayvec/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{},"package":"a1e964f9e24d588183fcb43503abda40d288c8657dfc27311516ce2f05675aef"} \ No newline at end of file diff --git a/arrayvec/Cargo.toml b/arrayvec/Cargo.toml new file mode 100644 index 000000000..9e5c0d36f --- /dev/null +++ b/arrayvec/Cargo.toml @@ -0,0 +1,57 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g. crates.io) dependencies +# +# If you believe there's an error in this file please file an +# issue against the rust-lang/cargo repository. If you're +# editing this file be aware that the upstream Cargo.toml +# will likely look very different (and much more reasonable) + +[package] +name = "arrayvec" +version = "0.4.7" +authors = ["bluss"] +description = "A vector with fixed capacity, backed by an array (it can be stored on the stack too). Implements fixed capacity ArrayVec and ArrayString." +documentation = "https://docs.rs/arrayvec/" +keywords = ["stack", "vector", "array", "data-structure", "no_std"] +categories = ["data-structures", "no-std"] +license = "MIT/Apache-2.0" +repository = "https://github.com/bluss/arrayvec" +[package.metadata.docs.rs] +features = ["serde-1"] + +[package.metadata.release] +no-dev-version = true + +[[bench]] +name = "extend" +harness = false + +[[bench]] +name = "arraystring" +harness = false +[dependencies.nodrop] +version = "0.1.12" +default-features = false + +[dependencies.serde] +version = "1.0" +optional = true +default-features = false +[dev-dependencies.bencher] +version = "0.1.4" + +[dev-dependencies.matches] +version = "0.1" + +[dev-dependencies.serde_test] +version = "1.0" + +[features] +default = ["std"] +serde-1 = ["serde"] +std = [] +use_union = [] diff --git a/arrayvec/LICENSE-APACHE b/arrayvec/LICENSE-APACHE new file mode 100644 index 000000000..16fe87b06 --- /dev/null +++ b/arrayvec/LICENSE-APACHE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/arrayvec/LICENSE-MIT b/arrayvec/LICENSE-MIT new file mode 100644 index 000000000..2c8f27dba --- /dev/null +++ b/arrayvec/LICENSE-MIT @@ -0,0 +1,25 @@ +Copyright (c) Ulrik Sverdrup "bluss" 2015-2017 + +Permission is hereby granted, free of charge, to any +person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the +Software without restriction, including without +limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software +is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions +of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/arrayvec/README.rst b/arrayvec/README.rst new file mode 100644 index 000000000..c10ae3238 --- /dev/null +++ b/arrayvec/README.rst @@ -0,0 +1,184 @@ + +arrayvec +======== + +A vector with fixed capacity. + +Please read the `API documentation here`__ + +__ https://docs.rs/arrayvec + +|build_status|_ |crates|_ |crates2|_ + +.. |build_status| image:: https://travis-ci.org/bluss/arrayvec.svg +.. _build_status: https://travis-ci.org/bluss/arrayvec + +.. |crates| image:: http://meritbadge.herokuapp.com/arrayvec +.. _crates: https://crates.io/crates/arrayvec + +.. |crates2| image:: http://meritbadge.herokuapp.com/nodrop +.. _crates2: https://crates.io/crates/nodrop + +Recent Changes (arrayvec) +------------------------- + +- 0.4.7 + + - Fix future compat warning about raw pointer casts + - Use ``drop_in_place`` when dropping the arrayvec by-value iterator + - Decrease mininum Rust version (see docs) by @jeehoonkang + +- 0.3.25 + + - Fix future compat warning about raw pointer casts + +- 0.4.6 + + - Fix compilation on 16-bit targets. This means, the 65536 array size is not + included on these targets. + +- 0.3.24 + + - Fix compilation on 16-bit targets. This means, the 65536 array size is not + included on these targets. + - Fix license files so that they are both included (was fixed in 0.4 before) + +- 0.4.5 + + - Add methods to ``ArrayString`` by @DenialAdams: + + - ``.pop() -> Option`` + - ``.truncate(new_len)`` + - ``.remove(index) -> char`` + + - Remove dependency on crate odds + - Document debug assertions in unsafe methods better + +- 0.4.4 + + - Add method ``ArrayVec::truncate()`` by @niklasf + +- 0.4.3 + + - Improve performance for ``ArrayVec::extend`` with a lower level + implementation (#74) + - Small cleanup in dependencies (use no std for crates where we don't need more) + +- 0.4.2 + + - Add constructor method ``new`` to ``CapacityError``. + +- 0.4.1 + + - Add ``Default`` impl to ``ArrayString`` by @tbu- + +- 0.4.0 + + - Reformed signatures and error handling by @bluss and @tbu-: + + - ``ArrayVec``'s ``push, insert, remove, swap_remove`` now match ``Vec``'s + corresponding signature and panic on capacity errors where applicable. + - Add fallible methods ``try_push, insert`` and checked methods + ``pop_at, swap_pop``. + - Similar changes to ``ArrayString``'s push methods. + + - Use a local version of the ``RangeArgument`` trait + - Add array sizes 50, 150, 200 by @daboross + - Support serde 1.0 by @daboross + - New method ``.push_unchecked()`` by @niklasf + - ``ArrayString`` implements ``PartialOrd, Ord`` by @tbu- + - Require Rust 1.14 + - crate feature ``use_generic_array`` was dropped. + +- 0.3.23 + + - Implement ``PartialOrd, Ord`` as well as ``PartialOrd`` for + ``ArrayString``. + +- 0.3.22 + + - Implement ``Array`` for the 65536 size + +- 0.3.21 + + - Use ``encode_utf8`` from crate odds + - Add constructor ``ArrayString::from_byte_string`` + +- 0.3.20 + + - Simplify and speed up ``ArrayString``’s ``.push(char)``- + +- 0.3.19 + + - Add new crate feature ``use_generic_array`` which allows using their + ``GenericArray`` just like a regular fixed size array for the storage + of an ``ArrayVec``. + +- 0.3.18 + + - Fix bounds check in ``ArrayVec::insert``! + It would be buggy if ``self.len() < index < self.capacity()``. Take note of + the push out behavior specified in the docs. + +- 0.3.17 + + - Added crate feature ``use_union`` which forwards to the nodrop crate feature + - Added methods ``.is_full()`` to ``ArrayVec`` and ``ArrayString``. + +- 0.3.16 + + - Added method ``.retain()`` to ``ArrayVec``. + - Added methods ``.as_slice(), .as_mut_slice()`` to ``ArrayVec`` and ``.as_str()`` + to ``ArrayString``. + +- 0.3.15 + + - Add feature std, which you can opt out of to use ``no_std`` (requires Rust 1.6 + to opt out). + - Implement ``Clone::clone_from`` for ArrayVec and ArrayString + +- 0.3.14 + + - Add ``ArrayString::from(&str)`` + +- 0.3.13 + + - Added ``DerefMut`` impl for ``ArrayString``. + - Added method ``.simplify()`` to drop the element for ``CapacityError``. + - Added method ``.dispose()`` to ``ArrayVec`` + +- 0.3.12 + + - Added ArrayString, a fixed capacity analogy of String + +- 0.3.11 + + - Added trait impls Default, PartialOrd, Ord, Write for ArrayVec + +- 0.3.10 + + - Go back to using external NoDrop, fixing a panic safety bug (issue #3) + +- 0.3.8 + + - Inline the non-dropping logic to remove one drop flag in the + ArrayVec representation. + +- 0.3.7 + + - Added method .into_inner() + - Added unsafe method .set_len() + + +License +======= + +Dual-licensed to be compatible with the Rust project. + +Licensed under the Apache License, Version 2.0 +http://www.apache.org/licenses/LICENSE-2.0 or the MIT license +http://opensource.org/licenses/MIT, at your +option. This file may not be copied, modified, or distributed +except according to those terms. + + diff --git a/arrayvec/benches/arraystring.rs b/arrayvec/benches/arraystring.rs new file mode 100644 index 000000000..9cff5875d --- /dev/null +++ b/arrayvec/benches/arraystring.rs @@ -0,0 +1,90 @@ + +extern crate arrayvec; +#[macro_use] extern crate bencher; + +use arrayvec::ArrayString; + +use bencher::Bencher; + +fn try_push_c(b: &mut Bencher) { + let mut v = ArrayString::<[u8; 512]>::new(); + b.iter(|| { + v.clear(); + while v.try_push('c').is_ok() { + } + v.len() + }); + b.bytes = v.capacity() as u64; +} + +fn try_push_alpha(b: &mut Bencher) { + let mut v = ArrayString::<[u8; 512]>::new(); + b.iter(|| { + v.clear(); + while v.try_push('α').is_ok() { + } + v.len() + }); + b.bytes = v.capacity() as u64; +} + +// Yes, pushing a string char-by-char is slow. Use .push_str. +fn try_push_string(b: &mut Bencher) { + let mut v = ArrayString::<[u8; 512]>::new(); + let input = "abcαβγ“”"; + b.iter(|| { + v.clear(); + for ch in input.chars().cycle() { + if !v.try_push(ch).is_ok() { + break; + } + } + v.len() + }); + b.bytes = v.capacity() as u64; +} + +fn push_c(b: &mut Bencher) { + let mut v = ArrayString::<[u8; 512]>::new(); + b.iter(|| { + v.clear(); + while !v.is_full() { + v.push('c'); + } + v.len() + }); + b.bytes = v.capacity() as u64; +} + +fn push_alpha(b: &mut Bencher) { + let mut v = ArrayString::<[u8; 512]>::new(); + b.iter(|| { + v.clear(); + while !v.is_full() { + v.push('α'); + } + v.len() + }); + b.bytes = v.capacity() as u64; +} + +fn push_string(b: &mut Bencher) { + let mut v = ArrayString::<[u8; 512]>::new(); + let input = "abcαβγ“”"; + b.iter(|| { + v.clear(); + for ch in input.chars().cycle() { + if !v.is_full() { + v.push(ch); + } else { + break; + } + } + v.len() + }); + b.bytes = v.capacity() as u64; +} + +benchmark_group!(benches, try_push_c, try_push_alpha, try_push_string, push_c, + push_alpha, push_string); +benchmark_main!(benches); diff --git a/arrayvec/benches/extend.rs b/arrayvec/benches/extend.rs new file mode 100644 index 000000000..d380a7ed2 --- /dev/null +++ b/arrayvec/benches/extend.rs @@ -0,0 +1,43 @@ + +extern crate arrayvec; +#[macro_use] extern crate bencher; + +use arrayvec::ArrayVec; + +use bencher::Bencher; + +fn extend_with_constant(b: &mut Bencher) { + let mut v = ArrayVec::<[u8; 512]>::new(); + let cap = v.capacity(); + b.iter(|| { + v.clear(); + v.extend((0..cap).map(|_| 1)); + v[0] + }); + b.bytes = v.capacity() as u64; +} + +fn extend_with_range(b: &mut Bencher) { + let mut v = ArrayVec::<[u8; 512]>::new(); + let cap = v.capacity(); + b.iter(|| { + v.clear(); + v.extend((0..cap).map(|x| x as _)); + v[0] + }); + b.bytes = v.capacity() as u64; +} + +fn extend_with_slice(b: &mut Bencher) { + let mut v = ArrayVec::<[u8; 512]>::new(); + let data = [1; 512]; + b.iter(|| { + v.clear(); + v.extend(data.iter().cloned()); + v[0] + }); + b.bytes = v.capacity() as u64; +} + +benchmark_group!(benches, extend_with_constant, extend_with_range, extend_with_slice); +benchmark_main!(benches); diff --git a/arrayvec/custom.css b/arrayvec/custom.css new file mode 100644 index 000000000..8e0b7053e --- /dev/null +++ b/arrayvec/custom.css @@ -0,0 +1,25 @@ + +.docblock pre.rust { background: #eeeeff; } +pre.trait, pre.fn, pre.struct, pre.enum, pre.typedef { background: #fcfefc; } + +/* Small “example” label for doc examples */ +.docblock pre.rust::before { + content: "example"; + float: right; + font-style: italic; + font-size: 0.8em; + margin-top: -10px; + margin-right: -5px; +} + + +/* Fixup where display in trait listing */ +pre.trait .where::before { +content: '\a '; +} + +.docblock code { + background-color: inherit; + font-weight: bold; + padding: 0 0.1em; +} diff --git a/arrayvec/src/array.rs b/arrayvec/src/array.rs new file mode 100644 index 000000000..2ca90b30d --- /dev/null +++ b/arrayvec/src/array.rs @@ -0,0 +1,93 @@ + +/// Trait for fixed size arrays. +pub unsafe trait Array { + /// The array’s element type + type Item; + #[doc(hidden)] + /// The smallest index type that indexes the array. + type Index: Index; + #[doc(hidden)] + fn as_ptr(&self) -> *const Self::Item; + #[doc(hidden)] + fn as_mut_ptr(&mut self) -> *mut Self::Item; + #[doc(hidden)] + fn capacity() -> usize; +} + +pub trait Index : PartialEq + Copy { + fn to_usize(self) -> usize; + fn from(usize) -> Self; +} + +use std::slice::{from_raw_parts}; + +pub trait ArrayExt : Array { + #[inline(always)] + fn as_slice(&self) -> &[Self::Item] { + unsafe { + from_raw_parts(self.as_ptr(), Self::capacity()) + } + } +} + +impl ArrayExt for A where A: Array { } + +impl Index for u8 { + #[inline(always)] + fn to_usize(self) -> usize { self as usize } + #[inline(always)] + fn from(ix: usize) -> Self { ix as u8 } +} + +impl Index for u16 { + #[inline(always)] + fn to_usize(self) -> usize { self as usize } + #[inline(always)] + fn from(ix: usize) -> Self { ix as u16 } +} + +impl Index for u32 { + #[inline(always)] + fn to_usize(self) -> usize { self as usize } + #[inline(always)] + fn from(ix: usize) -> Self { ix as u32 } +} + +impl Index for usize { + #[inline(always)] + fn to_usize(self) -> usize { self } + #[inline(always)] + fn from(ix: usize) -> Self { ix } +} + +macro_rules! fix_array_impl { + ($index_type:ty, $len:expr ) => ( + unsafe impl Array for [T; $len] { + type Item = T; + type Index = $index_type; + #[inline(always)] + fn as_ptr(&self) -> *const T { self as *const _ as *const _ } + #[inline(always)] + fn as_mut_ptr(&mut self) -> *mut T { self as *mut _ as *mut _} + #[inline(always)] + fn capacity() -> usize { $len } + } + ) +} + +macro_rules! fix_array_impl_recursive { + ($index_type:ty, ) => (); + ($index_type:ty, $len:expr, $($more:expr,)*) => ( + fix_array_impl!($index_type, $len); + fix_array_impl_recursive!($index_type, $($more,)*); + ); +} + +fix_array_impl_recursive!(u8, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, + 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, + 32, 40, 48, 50, 56, 64, 72, 96, 100, 128, 160, 192, 200, 224,); +fix_array_impl_recursive!(u16, 256, 384, 512, 768, 1024, 2048, 4096, 8192, 16384, 32768,); +// This array size doesn't exist on 16-bit +#[cfg(any(target_pointer_width="32", target_pointer_width="64"))] +fix_array_impl_recursive!(u32, 1 << 16,); + diff --git a/arrayvec/src/array_string.rs b/arrayvec/src/array_string.rs new file mode 100644 index 000000000..0dc097432 --- /dev/null +++ b/arrayvec/src/array_string.rs @@ -0,0 +1,514 @@ +use std::borrow::Borrow; +use std::cmp; +use std::fmt; +use std::hash::{Hash, Hasher}; +use std::mem; +use std::ptr; +use std::ops::{Deref, DerefMut}; +use std::str; +use std::str::Utf8Error; +use std::slice; + +use array::{Array, ArrayExt}; +use array::Index; +use CapacityError; +use char::encode_utf8; + +#[cfg(feature="serde-1")] +use serde::{Serialize, Deserialize, Serializer, Deserializer}; + +/// A string with a fixed capacity. +/// +/// The `ArrayString` is a string backed by a fixed size array. It keeps track +/// of its length. +/// +/// The string is a contiguous value that you can store directly on the stack +/// if needed. +#[derive(Copy)] +pub struct ArrayString> { + xs: A, + len: A::Index, +} + +impl> Default for ArrayString { + /// Return an empty `ArrayString` + fn default() -> ArrayString { + ArrayString::new() + } +} + +impl> ArrayString { + /// Create a new empty `ArrayString`. + /// + /// Capacity is inferred from the type parameter. + /// + /// ``` + /// use arrayvec::ArrayString; + /// + /// let mut string = ArrayString::<[_; 16]>::new(); + /// string.push_str("foo"); + /// assert_eq!(&string[..], "foo"); + /// assert_eq!(string.capacity(), 16); + /// ``` + pub fn new() -> ArrayString { + unsafe { + ArrayString { + xs: ::new_array(), + len: Index::from(0), + } + } + } + + /// Create a new `ArrayString` from a `str`. + /// + /// Capacity is inferred from the type parameter. + /// + /// **Errors** if the backing array is not large enough to fit the string. + /// + /// ``` + /// use arrayvec::ArrayString; + /// + /// let mut string = ArrayString::<[_; 3]>::from("foo").unwrap(); + /// assert_eq!(&string[..], "foo"); + /// assert_eq!(string.len(), 3); + /// assert_eq!(string.capacity(), 3); + /// ``` + pub fn from(s: &str) -> Result> { + let mut arraystr = Self::new(); + arraystr.try_push_str(s)?; + Ok(arraystr) + } + + /// Create a new `ArrayString` from a byte string literal. + /// + /// **Errors** if the byte string literal is not valid UTF-8. + /// + /// ``` + /// use arrayvec::ArrayString; + /// + /// let string = ArrayString::from_byte_string(b"hello world").unwrap(); + /// ``` + pub fn from_byte_string(b: &A) -> Result { + let mut arraystr = Self::new(); + let s = try!(str::from_utf8(b.as_slice())); + let _result = arraystr.try_push_str(s); + debug_assert!(_result.is_ok()); + Ok(arraystr) + } + + /// Return the capacity of the `ArrayString`. + /// + /// ``` + /// use arrayvec::ArrayString; + /// + /// let string = ArrayString::<[_; 3]>::new(); + /// assert_eq!(string.capacity(), 3); + /// ``` + #[inline] + pub fn capacity(&self) -> usize { A::capacity() } + + /// Return if the `ArrayString` is completely filled. + /// + /// ``` + /// use arrayvec::ArrayString; + /// + /// let mut string = ArrayString::<[_; 1]>::new(); + /// assert!(!string.is_full()); + /// string.push_str("A"); + /// assert!(string.is_full()); + /// ``` + pub fn is_full(&self) -> bool { self.len() == self.capacity() } + + /// Adds the given char to the end of the string. + /// + /// ***Panics*** if the backing array is not large enough to fit the additional char. + /// + /// ``` + /// use arrayvec::ArrayString; + /// + /// let mut string = ArrayString::<[_; 2]>::new(); + /// + /// string.push('a'); + /// string.push('b'); + /// + /// assert_eq!(&string[..], "ab"); + /// ``` + pub fn push(&mut self, c: char) { + self.try_push(c).unwrap(); + } + + /// Adds the given char to the end of the string. + /// + /// Returns `Ok` if the push succeeds. + /// + /// **Errors** if the backing array is not large enough to fit the additional char. + /// + /// ``` + /// use arrayvec::ArrayString; + /// + /// let mut string = ArrayString::<[_; 2]>::new(); + /// + /// string.try_push('a').unwrap(); + /// string.try_push('b').unwrap(); + /// let overflow = string.try_push('c'); + /// + /// assert_eq!(&string[..], "ab"); + /// assert_eq!(overflow.unwrap_err().element(), 'c'); + /// ``` + pub fn try_push(&mut self, c: char) -> Result<(), CapacityError> { + let len = self.len(); + unsafe { + match encode_utf8(c, &mut self.raw_mut_bytes()[len..]) { + Ok(n) => { + self.set_len(len + n); + Ok(()) + } + Err(_) => Err(CapacityError::new(c)), + } + } + } + + /// Adds the given string slice to the end of the string. + /// + /// ***Panics*** if the backing array is not large enough to fit the string. + /// + /// ``` + /// use arrayvec::ArrayString; + /// + /// let mut string = ArrayString::<[_; 2]>::new(); + /// + /// string.push_str("a"); + /// string.push_str("d"); + /// + /// assert_eq!(&string[..], "ad"); + /// ``` + pub fn push_str(&mut self, s: &str) { + self.try_push_str(s).unwrap() + } + + /// Adds the given string slice to the end of the string. + /// + /// Returns `Ok` if the push succeeds. + /// + /// **Errors** if the backing array is not large enough to fit the string. + /// + /// ``` + /// use arrayvec::ArrayString; + /// + /// let mut string = ArrayString::<[_; 2]>::new(); + /// + /// string.try_push_str("a").unwrap(); + /// let overflow1 = string.try_push_str("bc"); + /// string.try_push_str("d").unwrap(); + /// let overflow2 = string.try_push_str("ef"); + /// + /// assert_eq!(&string[..], "ad"); + /// assert_eq!(overflow1.unwrap_err().element(), "bc"); + /// assert_eq!(overflow2.unwrap_err().element(), "ef"); + /// ``` + pub fn try_push_str<'a>(&mut self, s: &'a str) -> Result<(), CapacityError<&'a str>> { + if s.len() > self.capacity() - self.len() { + return Err(CapacityError::new(s)); + } + unsafe { + let dst = self.xs.as_mut_ptr().offset(self.len() as isize); + let src = s.as_ptr(); + ptr::copy_nonoverlapping(src, dst, s.len()); + let newl = self.len() + s.len(); + self.set_len(newl); + } + Ok(()) + } + + /// Removes the last character from the string and returns it. + /// + /// Returns `None` if this `ArrayString` is empty. + /// + /// ``` + /// use arrayvec::ArrayString; + /// + /// let mut s = ArrayString::<[_; 3]>::from("foo").unwrap(); + /// + /// assert_eq!(s.pop(), Some('o')); + /// assert_eq!(s.pop(), Some('o')); + /// assert_eq!(s.pop(), Some('f')); + /// + /// assert_eq!(s.pop(), None); + /// ``` + #[inline] + pub fn pop(&mut self) -> Option { + let ch = match self.chars().rev().next() { + Some(ch) => ch, + None => return None, + }; + let new_len = self.len() - ch.len_utf8(); + unsafe { + self.set_len(new_len); + } + Some(ch) + } + + /// Shortens this `ArrayString` to the specified length. + /// + /// If `new_len` is greater than the string’s current length, this has no + /// effect. + /// + /// ***Panics*** if `new_len` does not lie on a `char` boundary. + /// + /// ``` + /// use arrayvec::ArrayString; + /// + /// let mut string = ArrayString::<[_; 6]>::from("foobar").unwrap(); + /// string.truncate(3); + /// assert_eq!(&string[..], "foo"); + /// string.truncate(4); + /// assert_eq!(&string[..], "foo"); + /// ``` + #[inline] + pub fn truncate(&mut self, new_len: usize) { + if new_len <= self.len() { + assert!(self.is_char_boundary(new_len)); + unsafe { + // In libstd truncate is called on the underlying vector, + // which in turns drops each element. + // As we know we don't have to worry about Drop, + // we can just set the length (a la clear.) + self.set_len(new_len); + } + } + } + + /// Removes a `char` from this `ArrayString` at a byte position and returns it. + /// + /// This is an `O(n)` operation, as it requires copying every element in the + /// array. + /// + /// ***Panics*** if `idx` is larger than or equal to the `ArrayString`’s length, + /// or if it does not lie on a `char` boundary. + /// + /// ``` + /// use arrayvec::ArrayString; + /// + /// let mut s = ArrayString::<[_; 3]>::from("foo").unwrap(); + /// + /// assert_eq!(s.remove(0), 'f'); + /// assert_eq!(s.remove(1), 'o'); + /// assert_eq!(s.remove(0), 'o'); + /// ``` + #[inline] + pub fn remove(&mut self, idx: usize) -> char { + let ch = match self[idx..].chars().next() { + Some(ch) => ch, + None => panic!("cannot remove a char from the end of a string"), + }; + + let next = idx + ch.len_utf8(); + let len = self.len(); + unsafe { + ptr::copy(self.xs.as_ptr().offset(next as isize), + self.xs.as_mut_ptr().offset(idx as isize), + len - next); + self.set_len(len - (next - idx)); + } + ch + } + + /// Make the string empty. + pub fn clear(&mut self) { + unsafe { + self.set_len(0); + } + } + + /// Set the strings’s length. + /// + /// This function is `unsafe` because it changes the notion of the + /// number of “valid” bytes in the string. Use with care. + /// + /// This method uses *debug assertions* to check the validity of `length` + /// and may use other debug assertions. + #[inline] + pub unsafe fn set_len(&mut self, length: usize) { + debug_assert!(length <= self.capacity()); + self.len = Index::from(length); + } + + /// Return a string slice of the whole `ArrayString`. + pub fn as_str(&self) -> &str { + self + } + + /// Return a mutable slice of the whole string’s buffer + unsafe fn raw_mut_bytes(&mut self) -> &mut [u8] { + slice::from_raw_parts_mut(self.xs.as_mut_ptr(), self.capacity()) + } +} + +impl> Deref for ArrayString { + type Target = str; + #[inline] + fn deref(&self) -> &str { + unsafe { + let sl = slice::from_raw_parts(self.xs.as_ptr(), self.len.to_usize()); + str::from_utf8_unchecked(sl) + } + } +} + +impl> DerefMut for ArrayString { + #[inline] + fn deref_mut(&mut self) -> &mut str { + unsafe { + let sl = slice::from_raw_parts_mut(self.xs.as_mut_ptr(), self.len.to_usize()); + // FIXME: Nothing but transmute to do this right now + mem::transmute(sl) + } + } +} + +impl> PartialEq for ArrayString { + fn eq(&self, rhs: &Self) -> bool { + **self == **rhs + } +} + +impl> PartialEq for ArrayString { + fn eq(&self, rhs: &str) -> bool { + &**self == rhs + } +} + +impl> PartialEq> for str { + fn eq(&self, rhs: &ArrayString) -> bool { + self == &**rhs + } +} + +impl> Eq for ArrayString { } + +impl> Hash for ArrayString { + fn hash(&self, h: &mut H) { + (**self).hash(h) + } +} + +impl> Borrow for ArrayString { + fn borrow(&self) -> &str { self } +} + +impl> AsRef for ArrayString { + fn as_ref(&self) -> &str { self } +} + +impl> fmt::Debug for ArrayString { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { (**self).fmt(f) } +} + +impl> fmt::Display for ArrayString { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { (**self).fmt(f) } +} + +/// `Write` appends written data to the end of the string. +impl> fmt::Write for ArrayString { + fn write_char(&mut self, c: char) -> fmt::Result { + self.try_push(c).map_err(|_| fmt::Error) + } + + fn write_str(&mut self, s: &str) -> fmt::Result { + self.try_push_str(s).map_err(|_| fmt::Error) + } +} + +impl + Copy> Clone for ArrayString { + fn clone(&self) -> ArrayString { + *self + } + fn clone_from(&mut self, rhs: &Self) { + // guaranteed to fit due to types matching. + self.clear(); + self.try_push_str(rhs).ok(); + } +} + +impl> PartialOrd for ArrayString { + fn partial_cmp(&self, rhs: &Self) -> Option { + (**self).partial_cmp(&**rhs) + } + fn lt(&self, rhs: &Self) -> bool { **self < **rhs } + fn le(&self, rhs: &Self) -> bool { **self <= **rhs } + fn gt(&self, rhs: &Self) -> bool { **self > **rhs } + fn ge(&self, rhs: &Self) -> bool { **self >= **rhs } +} + +impl> PartialOrd for ArrayString { + fn partial_cmp(&self, rhs: &str) -> Option { + (**self).partial_cmp(rhs) + } + fn lt(&self, rhs: &str) -> bool { &**self < rhs } + fn le(&self, rhs: &str) -> bool { &**self <= rhs } + fn gt(&self, rhs: &str) -> bool { &**self > rhs } + fn ge(&self, rhs: &str) -> bool { &**self >= rhs } +} + +impl> PartialOrd> for str { + fn partial_cmp(&self, rhs: &ArrayString) -> Option { + self.partial_cmp(&**rhs) + } + fn lt(&self, rhs: &ArrayString) -> bool { self < &**rhs } + fn le(&self, rhs: &ArrayString) -> bool { self <= &**rhs } + fn gt(&self, rhs: &ArrayString) -> bool { self > &**rhs } + fn ge(&self, rhs: &ArrayString) -> bool { self >= &**rhs } +} + +impl> Ord for ArrayString { + fn cmp(&self, rhs: &Self) -> cmp::Ordering { + (**self).cmp(&**rhs) + } +} + +#[cfg(feature="serde-1")] +/// Requires crate feature `"serde-1"` +impl> Serialize for ArrayString { + fn serialize(&self, serializer: S) -> Result + where S: Serializer + { + serializer.serialize_str(&*self) + } +} + +#[cfg(feature="serde-1")] +/// Requires crate feature `"serde-1"` +impl<'de, A: Array> Deserialize<'de> for ArrayString { + fn deserialize(deserializer: D) -> Result + where D: Deserializer<'de> + { + use serde::de::{self, Visitor}; + use std::marker::PhantomData; + + struct ArrayStringVisitor>(PhantomData); + + impl<'de, A: Array> Visitor<'de> for ArrayStringVisitor { + type Value = ArrayString; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + write!(formatter, "a string no more than {} bytes long", A::capacity()) + } + + fn visit_str(self, v: &str) -> Result + where E: de::Error, + { + ArrayString::from(v).map_err(|_| E::invalid_length(v.len(), &self)) + } + + fn visit_bytes(self, v: &[u8]) -> Result + where E: de::Error, + { + let s = try!(str::from_utf8(v).map_err(|_| E::invalid_value(de::Unexpected::Bytes(v), &self))); + + ArrayString::from(s).map_err(|_| E::invalid_length(s.len(), &self)) + } + } + + deserializer.deserialize_str(ArrayStringVisitor::(PhantomData)) + } +} diff --git a/arrayvec/src/char.rs b/arrayvec/src/char.rs new file mode 100644 index 000000000..8191dfbd8 --- /dev/null +++ b/arrayvec/src/char.rs @@ -0,0 +1,54 @@ +// Copyright 2012-2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. +// +// Original authors: alexchrichton, bluss + +// UTF-8 ranges and tags for encoding characters +const TAG_CONT: u8 = 0b1000_0000; +const TAG_TWO_B: u8 = 0b1100_0000; +const TAG_THREE_B: u8 = 0b1110_0000; +const TAG_FOUR_B: u8 = 0b1111_0000; +const MAX_ONE_B: u32 = 0x80; +const MAX_TWO_B: u32 = 0x800; +const MAX_THREE_B: u32 = 0x10000; + +/// Placeholder +pub struct EncodeUtf8Error; + +/// Encode a char into buf using UTF-8. +/// +/// On success, return the byte length of the encoding (1, 2, 3 or 4).
+/// On error, return `EncodeUtf8Error` if the buffer was too short for the char. +#[inline] +pub fn encode_utf8(ch: char, buf: &mut [u8]) -> Result +{ + let code = ch as u32; + if code < MAX_ONE_B && buf.len() >= 1 { + buf[0] = code as u8; + return Ok(1); + } else if code < MAX_TWO_B && buf.len() >= 2 { + buf[0] = (code >> 6 & 0x1F) as u8 | TAG_TWO_B; + buf[1] = (code & 0x3F) as u8 | TAG_CONT; + return Ok(2); + } else if code < MAX_THREE_B && buf.len() >= 3 { + buf[0] = (code >> 12 & 0x0F) as u8 | TAG_THREE_B; + buf[1] = (code >> 6 & 0x3F) as u8 | TAG_CONT; + buf[2] = (code & 0x3F) as u8 | TAG_CONT; + return Ok(3); + } else if buf.len() >= 4 { + buf[0] = (code >> 18 & 0x07) as u8 | TAG_FOUR_B; + buf[1] = (code >> 12 & 0x3F) as u8 | TAG_CONT; + buf[2] = (code >> 6 & 0x3F) as u8 | TAG_CONT; + buf[3] = (code & 0x3F) as u8 | TAG_CONT; + return Ok(4); + }; + Err(EncodeUtf8Error) +} + diff --git a/arrayvec/src/errors.rs b/arrayvec/src/errors.rs new file mode 100644 index 000000000..5bea98068 --- /dev/null +++ b/arrayvec/src/errors.rs @@ -0,0 +1,53 @@ +use std::fmt; +#[cfg(feature="std")] +use std::any::Any; +#[cfg(feature="std")] +use std::error::Error; + +/// Error value indicating insufficient capacity +#[derive(Clone, Copy, Eq, Ord, PartialEq, PartialOrd)] +pub struct CapacityError { + element: T, +} + +impl CapacityError { + /// Create a new `CapacityError` from `element`. + pub fn new(element: T) -> CapacityError { + CapacityError { + element: element, + } + } + + /// Extract the overflowing element + pub fn element(self) -> T { + self.element + } + + /// Convert into a `CapacityError` that does not carry an element. + pub fn simplify(self) -> CapacityError { + CapacityError { element: () } + } +} + +const CAPERROR: &'static str = "insufficient capacity"; + +#[cfg(feature="std")] +/// Requires `features="std"`. +impl Error for CapacityError { + fn description(&self) -> &str { + CAPERROR + } +} + +impl fmt::Display for CapacityError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{}", CAPERROR) + } +} + +impl fmt::Debug for CapacityError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{}: {}", "CapacityError", CAPERROR) + } +} + diff --git a/arrayvec/src/lib.rs b/arrayvec/src/lib.rs new file mode 100644 index 000000000..74c884846 --- /dev/null +++ b/arrayvec/src/lib.rs @@ -0,0 +1,1067 @@ +//! **arrayvec** provides the types `ArrayVec` and `ArrayString`: +//! array-backed vector and string types, which store their contents inline. +//! +//! The arrayvec package has the following cargo features: +//! +//! - `std` +//! - Optional, enabled by default +//! - Use libstd; disable to use `no_std` instead. +//! +//! - `use_union` +//! - Optional +//! - Requires Rust nightly channel +//! - Experimental: This flag uses nightly so it *may break* unexpectedly +//! at some point; since it doesn't change API this flag may also change +//! to do nothing in the future. +//! - Use the unstable feature untagged unions for the internal implementation, +//! which may have reduced space overhead +//! - `serde-1` +//! - Optional +//! - Enable serialization for ArrayVec and ArrayString using serde 1.0 +//! +//! ## Rust Version +//! +//! This version of arrayvec requires Rust 1.13 or later. +//! +#![doc(html_root_url="https://docs.rs/arrayvec/0.4/")] +#![cfg_attr(not(feature="std"), no_std)] +extern crate nodrop; +#[cfg(feature="serde-1")] +extern crate serde; + +#[cfg(not(feature="std"))] +extern crate core as std; + +use std::cmp; +use std::iter; +use std::mem; +use std::ptr; +use std::ops::{ + Deref, + DerefMut, +}; +use std::slice; + +// extra traits +use std::borrow::{Borrow, BorrowMut}; +use std::hash::{Hash, Hasher}; +use std::fmt; + +#[cfg(feature="std")] +use std::io; + +#[cfg(not(feature="use_union"))] +use nodrop::NoDrop; + +#[cfg(feature="use_union")] +use std::mem::ManuallyDrop as NoDrop; + +#[cfg(feature="serde-1")] +use serde::{Serialize, Deserialize, Serializer, Deserializer}; + +mod array; +mod array_string; +mod char; +mod range; +mod errors; + +pub use array::Array; +pub use range::RangeArgument; +use array::Index; +pub use array_string::ArrayString; +pub use errors::CapacityError; + + +unsafe fn new_array() -> A { + // Note: Returning an uninitialized value here only works + // if we can be sure the data is never used. The nullable pointer + // inside enum optimization conflicts with this this for example, + // so we need to be extra careful. See `NoDrop` enum. + mem::uninitialized() +} + +/// A vector with a fixed capacity. +/// +/// The `ArrayVec` is a vector backed by a fixed size array. It keeps track of +/// the number of initialized elements. +/// +/// The vector is a contiguous value that you can store directly on the stack +/// if needed. +/// +/// It offers a simple API but also dereferences to a slice, so +/// that the full slice API is available. +/// +/// ArrayVec can be converted into a by value iterator. +pub struct ArrayVec { + xs: NoDrop
, + len: A::Index, +} + +impl Drop for ArrayVec { + fn drop(&mut self) { + self.clear(); + + // NoDrop inhibits array's drop + // panic safety: NoDrop::drop will trigger on panic, so the inner + // array will not drop even after panic. + } +} + +macro_rules! panic_oob { + ($method_name:expr, $index:expr, $len:expr) => { + panic!(concat!("ArrayVec::", $method_name, ": index {} is out of bounds in vector of length {}"), + $index, $len) + } +} + +impl ArrayVec { + /// Create a new empty `ArrayVec`. + /// + /// Capacity is inferred from the type parameter. + /// + /// ``` + /// use arrayvec::ArrayVec; + /// + /// let mut array = ArrayVec::<[_; 16]>::new(); + /// array.push(1); + /// array.push(2); + /// assert_eq!(&array[..], &[1, 2]); + /// assert_eq!(array.capacity(), 16); + /// ``` + pub fn new() -> ArrayVec { + unsafe { + ArrayVec { xs: NoDrop::new(new_array()), len: Index::from(0) } + } + } + + /// Return the number of elements in the `ArrayVec`. + /// + /// ``` + /// use arrayvec::ArrayVec; + /// + /// let mut array = ArrayVec::from([1, 2, 3]); + /// array.pop(); + /// assert_eq!(array.len(), 2); + /// ``` + #[inline] + pub fn len(&self) -> usize { self.len.to_usize() } + + /// Return the capacity of the `ArrayVec`. + /// + /// ``` + /// use arrayvec::ArrayVec; + /// + /// let array = ArrayVec::from([1, 2, 3]); + /// assert_eq!(array.capacity(), 3); + /// ``` + #[inline] + pub fn capacity(&self) -> usize { A::capacity() } + + /// Return if the `ArrayVec` is completely filled. + /// + /// ``` + /// use arrayvec::ArrayVec; + /// + /// let mut array = ArrayVec::<[_; 1]>::new(); + /// assert!(!array.is_full()); + /// array.push(1); + /// assert!(array.is_full()); + /// ``` + pub fn is_full(&self) -> bool { self.len() == self.capacity() } + + /// Push `element` to the end of the vector. + /// + /// ***Panics*** if the vector is already full. + /// + /// ``` + /// use arrayvec::ArrayVec; + /// + /// let mut array = ArrayVec::<[_; 2]>::new(); + /// + /// array.push(1); + /// array.push(2); + /// + /// assert_eq!(&array[..], &[1, 2]); + /// ``` + pub fn push(&mut self, element: A::Item) { + self.try_push(element).unwrap() + } + + /// Push `element` to the end of the vector. + /// + /// Return `Ok` if the push succeeds, or return an error if the vector + /// is already full. + /// + /// ``` + /// use arrayvec::ArrayVec; + /// + /// let mut array = ArrayVec::<[_; 2]>::new(); + /// + /// let push1 = array.try_push(1); + /// let push2 = array.try_push(2); + /// + /// assert!(push1.is_ok()); + /// assert!(push2.is_ok()); + /// + /// assert_eq!(&array[..], &[1, 2]); + /// + /// let overflow = array.try_push(3); + /// + /// assert!(overflow.is_err()); + /// ``` + pub fn try_push(&mut self, element: A::Item) -> Result<(), CapacityError> { + if self.len() < A::capacity() { + unsafe { + self.push_unchecked(element); + } + Ok(()) + } else { + Err(CapacityError::new(element)) + } + } + + + /// Push `element` to the end of the vector without checking the capacity. + /// + /// It is up to the caller to ensure the capacity of the vector is + /// sufficiently large. + /// + /// This method uses *debug assertions* to check that the arrayvec is not full. + /// + /// ``` + /// use arrayvec::ArrayVec; + /// + /// let mut array = ArrayVec::<[_; 2]>::new(); + /// + /// if array.len() + 2 <= array.capacity() { + /// unsafe { + /// array.push_unchecked(1); + /// array.push_unchecked(2); + /// } + /// } + /// + /// assert_eq!(&array[..], &[1, 2]); + /// ``` + #[inline] + pub unsafe fn push_unchecked(&mut self, element: A::Item) { + let len = self.len(); + debug_assert!(len < A::capacity()); + ptr::write(self.get_unchecked_mut(len), element); + self.set_len(len + 1); + } + + /// Insert `element` at position `index`. + /// + /// Shift up all elements after `index`. + /// + /// It is an error if the index is greater than the length or if the + /// arrayvec is full. + /// + /// ***Panics*** on errors. See `try_result` for fallible version. + /// + /// ``` + /// use arrayvec::ArrayVec; + /// + /// let mut array = ArrayVec::<[_; 2]>::new(); + /// + /// array.insert(0, "x"); + /// array.insert(0, "y"); + /// assert_eq!(&array[..], &["y", "x"]); + /// + /// ``` + pub fn insert(&mut self, index: usize, element: A::Item) { + self.try_insert(index, element).unwrap() + } + + /// Insert `element` at position `index`. + /// + /// Shift up all elements after `index`; the `index` must be less than + /// or equal to the length. + /// + /// Returns an error if vector is already at full capacity. + /// + /// ***Panics*** `index` is out of bounds. + /// + /// ``` + /// use arrayvec::ArrayVec; + /// + /// let mut array = ArrayVec::<[_; 2]>::new(); + /// + /// assert!(array.try_insert(0, "x").is_ok()); + /// assert!(array.try_insert(0, "y").is_ok()); + /// assert!(array.try_insert(0, "z").is_err()); + /// assert_eq!(&array[..], &["y", "x"]); + /// + /// ``` + pub fn try_insert(&mut self, index: usize, element: A::Item) -> Result<(), CapacityError> { + if index > self.len() { + panic_oob!("try_insert", index, self.len()) + } + if self.len() == self.capacity() { + return Err(CapacityError::new(element)); + } + let len = self.len(); + + // follows is just like Vec + unsafe { // infallible + // The spot to put the new value + { + let p: *mut _ = self.get_unchecked_mut(index); + // Shift everything over to make space. (Duplicating the + // `index`th element into two consecutive places.) + ptr::copy(p, p.offset(1), len - index); + // Write it in, overwriting the first copy of the `index`th + // element. + ptr::write(p, element); + } + self.set_len(len + 1); + } + Ok(()) + } + + /// Remove the last element in the vector and return it. + /// + /// Return `Some(` *element* `)` if the vector is non-empty, else `None`. + /// + /// ``` + /// use arrayvec::ArrayVec; + /// + /// let mut array = ArrayVec::<[_; 2]>::new(); + /// + /// array.push(1); + /// + /// assert_eq!(array.pop(), Some(1)); + /// assert_eq!(array.pop(), None); + /// ``` + pub fn pop(&mut self) -> Option { + if self.len() == 0 { + return None + } + unsafe { + let new_len = self.len() - 1; + self.set_len(new_len); + Some(ptr::read(self.get_unchecked_mut(new_len))) + } + } + + /// Remove the element at `index` and swap the last element into its place. + /// + /// This operation is O(1). + /// + /// Return the *element* if the index is in bounds, else panic. + /// + /// ***Panics*** if the `index` is out of bounds. + /// + /// ``` + /// use arrayvec::ArrayVec; + /// + /// let mut array = ArrayVec::from([1, 2, 3]); + /// + /// assert_eq!(array.swap_remove(0), 1); + /// assert_eq!(&array[..], &[3, 2]); + /// + /// assert_eq!(array.swap_remove(1), 2); + /// assert_eq!(&array[..], &[3]); + /// ``` + pub fn swap_remove(&mut self, index: usize) -> A::Item { + self.swap_pop(index) + .unwrap_or_else(|| { + panic_oob!("swap_remove", index, self.len()) + }) + } + + /// Remove the element at `index` and swap the last element into its place. + /// + /// This is a checked version of `.swap_remove`. + /// This operation is O(1). + /// + /// Return `Some(` *element* `)` if the index is in bounds, else `None`. + /// + /// ``` + /// use arrayvec::ArrayVec; + /// + /// let mut array = ArrayVec::from([1, 2, 3]); + /// + /// assert_eq!(array.swap_pop(0), Some(1)); + /// assert_eq!(&array[..], &[3, 2]); + /// + /// assert_eq!(array.swap_pop(10), None); + /// ``` + pub fn swap_pop(&mut self, index: usize) -> Option { + let len = self.len(); + if index >= len { + return None; + } + self.swap(index, len - 1); + self.pop() + } + + /// Remove the element at `index` and shift down the following elements. + /// + /// The `index` must be strictly less than the length of the vector. + /// + /// ***Panics*** if the `index` is out of bounds. + /// + /// ``` + /// use arrayvec::ArrayVec; + /// + /// let mut array = ArrayVec::from([1, 2, 3]); + /// + /// let removed_elt = array.remove(0); + /// assert_eq!(removed_elt, 1); + /// assert_eq!(&array[..], &[2, 3]); + /// ``` + pub fn remove(&mut self, index: usize) -> A::Item { + self.pop_at(index) + .unwrap_or_else(|| { + panic_oob!("remove", index, self.len()) + }) + } + + /// Remove the element at `index` and shift down the following elements. + /// + /// This is a checked version of `.remove(index)`. Returns `None` if there + /// is no element at `index`. Otherwise, return the element inside `Some`. + /// + /// ``` + /// use arrayvec::ArrayVec; + /// + /// let mut array = ArrayVec::from([1, 2, 3]); + /// + /// assert!(array.pop_at(0).is_some()); + /// assert_eq!(&array[..], &[2, 3]); + /// + /// assert!(array.pop_at(2).is_none()); + /// assert!(array.pop_at(10).is_none()); + /// ``` + pub fn pop_at(&mut self, index: usize) -> Option { + if index >= self.len() { + None + } else { + self.drain(index..index + 1).next() + } + } + + /// Shortens the vector, keeping the first `len` elements and dropping + /// the rest. + /// + /// If `len` is greater than the vector’s current length this has no + /// effect. + /// + /// ``` + /// use arrayvec::ArrayVec; + /// + /// let mut array = ArrayVec::from([1, 2, 3, 4, 5]); + /// array.truncate(3); + /// assert_eq!(&array[..], &[1, 2, 3]); + /// array.truncate(4); + /// assert_eq!(&array[..], &[1, 2, 3]); + /// ``` + pub fn truncate(&mut self, len: usize) { + while self.len() > len { self.pop(); } + } + + /// Remove all elements in the vector. + pub fn clear(&mut self) { + while let Some(_) = self.pop() { } + } + + /// Retains only the elements specified by the predicate. + /// + /// In other words, remove all elements `e` such that `f(&mut e)` returns false. + /// This method operates in place and preserves the order of the retained + /// elements. + /// + /// ``` + /// use arrayvec::ArrayVec; + /// + /// let mut array = ArrayVec::from([1, 2, 3, 4]); + /// array.retain(|x| *x & 1 != 0 ); + /// assert_eq!(&array[..], &[1, 3]); + /// ``` + pub fn retain(&mut self, mut f: F) + where F: FnMut(&mut A::Item) -> bool + { + let len = self.len(); + let mut del = 0; + { + let v = &mut **self; + + for i in 0..len { + if !f(&mut v[i]) { + del += 1; + } else if del > 0 { + v.swap(i - del, i); + } + } + } + if del > 0 { + self.drain(len - del..); + } + } + + /// Set the vector’s length without dropping or moving out elements + /// + /// This method is `unsafe` because it changes the notion of the + /// number of “valid” elements in the vector. Use with care. + /// + /// This method uses *debug assertions* to check that check that `length` is + /// not greater than the capacity. + #[inline] + pub unsafe fn set_len(&mut self, length: usize) { + debug_assert!(length <= self.capacity()); + self.len = Index::from(length); + } + + + /// Create a draining iterator that removes the specified range in the vector + /// and yields the removed items from start to end. The element range is + /// removed even if the iterator is not consumed until the end. + /// + /// Note: It is unspecified how many elements are removed from the vector, + /// if the `Drain` value is leaked. + /// + /// **Panics** if the starting point is greater than the end point or if + /// the end point is greater than the length of the vector. + /// + /// ``` + /// use arrayvec::ArrayVec; + /// + /// let mut v = ArrayVec::from([1, 2, 3]); + /// let u: ArrayVec<[_; 3]> = v.drain(0..2).collect(); + /// assert_eq!(&v[..], &[3]); + /// assert_eq!(&u[..], &[1, 2]); + /// ``` + pub fn drain(&mut self, range: R) -> Drain { + // Memory safety + // + // When the Drain is first created, it shortens the length of + // the source vector to make sure no uninitalized or moved-from elements + // are accessible at all if the Drain's destructor never gets to run. + // + // Drain will ptr::read out the values to remove. + // When finished, remaining tail of the vec is copied back to cover + // the hole, and the vector length is restored to the new length. + // + let len = self.len(); + let start = range.start().unwrap_or(0); + let end = range.end().unwrap_or(len); + // bounds check happens here + let range_slice: *const _ = &self[start..end]; + + unsafe { + // set self.vec length's to start, to be safe in case Drain is leaked + self.set_len(start); + Drain { + tail_start: end, + tail_len: len - end, + iter: (*range_slice).iter(), + vec: self as *mut _, + } + } + } + + /// Return the inner fixed size array, if it is full to its capacity. + /// + /// Return an `Ok` value with the array if length equals capacity, + /// return an `Err` with self otherwise. + /// + /// `Note:` This function may incur unproportionally large overhead + /// to move the array out, its performance is not optimal. + pub fn into_inner(self) -> Result { + if self.len() < self.capacity() { + Err(self) + } else { + unsafe { + let array = ptr::read(&*self.xs); + mem::forget(self); + Ok(array) + } + } + } + + /// Dispose of `self` without the overwriting that is needed in Drop. + pub fn dispose(mut self) { + self.clear(); + mem::forget(self); + } + + /// Return a slice containing all elements of the vector. + pub fn as_slice(&self) -> &[A::Item] { + self + } + + /// Return a mutable slice containing all elements of the vector. + pub fn as_mut_slice(&mut self) -> &mut [A::Item] { + self + } +} + +impl Deref for ArrayVec { + type Target = [A::Item]; + #[inline] + fn deref(&self) -> &[A::Item] { + unsafe { + slice::from_raw_parts(self.xs.as_ptr(), self.len()) + } + } +} + +impl DerefMut for ArrayVec { + #[inline] + fn deref_mut(&mut self) -> &mut [A::Item] { + let len = self.len(); + unsafe { + slice::from_raw_parts_mut(self.xs.as_mut_ptr(), len) + } + } +} + +/// Create an `ArrayVec` from an array. +/// +/// ``` +/// use arrayvec::ArrayVec; +/// +/// let mut array = ArrayVec::from([1, 2, 3]); +/// assert_eq!(array.len(), 3); +/// assert_eq!(array.capacity(), 3); +/// ``` +impl From for ArrayVec { + fn from(array: A) -> Self { + ArrayVec { xs: NoDrop::new(array), len: Index::from(A::capacity()) } + } +} + + +/// Iterate the `ArrayVec` with references to each element. +/// +/// ``` +/// use arrayvec::ArrayVec; +/// +/// let array = ArrayVec::from([1, 2, 3]); +/// +/// for elt in &array { +/// // ... +/// } +/// ``` +impl<'a, A: Array> IntoIterator for &'a ArrayVec { + type Item = &'a A::Item; + type IntoIter = slice::Iter<'a, A::Item>; + fn into_iter(self) -> Self::IntoIter { self.iter() } +} + +/// Iterate the `ArrayVec` with mutable references to each element. +/// +/// ``` +/// use arrayvec::ArrayVec; +/// +/// let mut array = ArrayVec::from([1, 2, 3]); +/// +/// for elt in &mut array { +/// // ... +/// } +/// ``` +impl<'a, A: Array> IntoIterator for &'a mut ArrayVec { + type Item = &'a mut A::Item; + type IntoIter = slice::IterMut<'a, A::Item>; + fn into_iter(self) -> Self::IntoIter { self.iter_mut() } +} + +/// Iterate the `ArrayVec` with each element by value. +/// +/// The vector is consumed by this operation. +/// +/// ``` +/// use arrayvec::ArrayVec; +/// +/// for elt in ArrayVec::from([1, 2, 3]) { +/// // ... +/// } +/// ``` +impl IntoIterator for ArrayVec { + type Item = A::Item; + type IntoIter = IntoIter; + fn into_iter(self) -> IntoIter { + IntoIter { index: Index::from(0), v: self, } + } +} + + +/// By-value iterator for `ArrayVec`. +pub struct IntoIter { + index: A::Index, + v: ArrayVec, +} + +impl Iterator for IntoIter { + type Item = A::Item; + + #[inline] + fn next(&mut self) -> Option { + if self.index == self.v.len { + None + } else { + unsafe { + let index = self.index.to_usize(); + self.index = Index::from(index + 1); + Some(ptr::read(self.v.get_unchecked_mut(index))) + } + } + } + + fn size_hint(&self) -> (usize, Option) { + let len = self.v.len() - self.index.to_usize(); + (len, Some(len)) + } +} + +impl DoubleEndedIterator for IntoIter { + #[inline] + fn next_back(&mut self) -> Option { + if self.index == self.v.len { + None + } else { + unsafe { + let new_len = self.v.len() - 1; + self.v.set_len(new_len); + Some(ptr::read(self.v.get_unchecked_mut(new_len))) + } + } + } +} + +impl ExactSizeIterator for IntoIter { } + +impl Drop for IntoIter { + fn drop(&mut self) { + // panic safety: Set length to 0 before dropping elements. + let index = self.index.to_usize(); + let len = self.v.len(); + unsafe { + self.v.set_len(0); + let elements = slice::from_raw_parts_mut( + self.v.get_unchecked_mut(index), + len - index); + ptr::drop_in_place(elements); + } + } +} + +/// A draining iterator for `ArrayVec`. +pub struct Drain<'a, A> + where A: Array, + A::Item: 'a, +{ + /// Index of tail to preserve + tail_start: usize, + /// Length of tail + tail_len: usize, + /// Current remaining range to remove + iter: slice::Iter<'a, A::Item>, + vec: *mut ArrayVec, +} + +unsafe impl<'a, A: Array + Sync> Sync for Drain<'a, A> {} +unsafe impl<'a, A: Array + Send> Send for Drain<'a, A> {} + +impl<'a, A: Array> Iterator for Drain<'a, A> + where A::Item: 'a, +{ + type Item = A::Item; + + #[inline] + fn next(&mut self) -> Option { + self.iter.next().map(|elt| + unsafe { + ptr::read(elt as *const _) + } + ) + } + + #[inline] + fn size_hint(&self) -> (usize, Option) { + self.iter.size_hint() + } +} + +impl<'a, A: Array> DoubleEndedIterator for Drain<'a, A> + where A::Item: 'a, +{ + #[inline] + fn next_back(&mut self) -> Option { + self.iter.next_back().map(|elt| + unsafe { + ptr::read(elt as *const _) + } + ) + } +} + +impl<'a, A: Array> ExactSizeIterator for Drain<'a, A> where A::Item: 'a {} + +impl<'a, A: Array> Drop for Drain<'a, A> + where A::Item: 'a +{ + fn drop(&mut self) { + // len is currently 0 so panicking while dropping will not cause a double drop. + + // exhaust self first + while let Some(_) = self.next() { } + + if self.tail_len > 0 { + unsafe { + let source_vec = &mut *self.vec; + // memmove back untouched tail, update to new length + let start = source_vec.len(); + let tail = self.tail_start; + let src = source_vec.as_ptr().offset(tail as isize); + let dst = source_vec.as_mut_ptr().offset(start as isize); + ptr::copy(src, dst, self.tail_len); + source_vec.set_len(start + self.tail_len); + } + } + } +} + +struct ScopeExitGuard + where F: FnMut(&Data, &mut T) +{ + value: T, + data: Data, + f: F, +} + +impl Drop for ScopeExitGuard + where F: FnMut(&Data, &mut T) +{ + fn drop(&mut self) { + (self.f)(&self.data, &mut self.value) + } +} + + + +/// Extend the `ArrayVec` with an iterator. +/// +/// Does not extract more items than there is space for. No error +/// occurs if there are more iterator elements. +impl Extend for ArrayVec { + fn extend>(&mut self, iter: T) { + let take = self.capacity() - self.len(); + unsafe { + let len = self.len(); + let mut ptr = self.as_mut_ptr().offset(len as isize); + // Keep the length in a separate variable, write it back on scope + // exit. To help the compiler with alias analysis and stuff. + // We update the length to handle panic in the iteration of the + // user's iterator, without dropping any elements on the floor. + let mut guard = ScopeExitGuard { + value: self, + data: len, + f: |&len, self_| { + self_.set_len(len) + } + }; + for elt in iter.into_iter().take(take) { + ptr::write(ptr, elt); + ptr = ptr.offset(1); + guard.data += 1; + } + } + } +} + +/// Create an `ArrayVec` from an iterator. +/// +/// Does not extract more items than there is space for. No error +/// occurs if there are more iterator elements. +impl iter::FromIterator for ArrayVec { + fn from_iter>(iter: T) -> Self { + let mut array = ArrayVec::new(); + array.extend(iter); + array + } +} + +impl Clone for ArrayVec + where A::Item: Clone +{ + fn clone(&self) -> Self { + self.iter().cloned().collect() + } + + fn clone_from(&mut self, rhs: &Self) { + // recursive case for the common prefix + let prefix = cmp::min(self.len(), rhs.len()); + { + let a = &mut self[..prefix]; + let b = &rhs[..prefix]; + for i in 0..prefix { + a[i].clone_from(&b[i]); + } + } + if prefix < self.len() { + // rhs was shorter + for _ in 0..self.len() - prefix { + self.pop(); + } + } else { + for elt in &rhs[self.len()..] { + self.push(elt.clone()); + } + } + } +} + +impl Hash for ArrayVec + where A::Item: Hash +{ + fn hash(&self, state: &mut H) { + Hash::hash(&**self, state) + } +} + +impl PartialEq for ArrayVec + where A::Item: PartialEq +{ + fn eq(&self, other: &Self) -> bool { + **self == **other + } +} + +impl PartialEq<[A::Item]> for ArrayVec + where A::Item: PartialEq +{ + fn eq(&self, other: &[A::Item]) -> bool { + **self == *other + } +} + +impl Eq for ArrayVec where A::Item: Eq { } + +impl Borrow<[A::Item]> for ArrayVec { + fn borrow(&self) -> &[A::Item] { self } +} + +impl BorrowMut<[A::Item]> for ArrayVec { + fn borrow_mut(&mut self) -> &mut [A::Item] { self } +} + +impl AsRef<[A::Item]> for ArrayVec { + fn as_ref(&self) -> &[A::Item] { self } +} + +impl AsMut<[A::Item]> for ArrayVec { + fn as_mut(&mut self) -> &mut [A::Item] { self } +} + +impl fmt::Debug for ArrayVec where A::Item: fmt::Debug { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { (**self).fmt(f) } +} + +impl Default for ArrayVec { + /// Return an empty array + fn default() -> ArrayVec { + ArrayVec::new() + } +} + +impl PartialOrd for ArrayVec where A::Item: PartialOrd { + #[inline] + fn partial_cmp(&self, other: &ArrayVec) -> Option { + (**self).partial_cmp(other) + } + + #[inline] + fn lt(&self, other: &Self) -> bool { + (**self).lt(other) + } + + #[inline] + fn le(&self, other: &Self) -> bool { + (**self).le(other) + } + + #[inline] + fn ge(&self, other: &Self) -> bool { + (**self).ge(other) + } + + #[inline] + fn gt(&self, other: &Self) -> bool { + (**self).gt(other) + } +} + +impl Ord for ArrayVec where A::Item: Ord { + fn cmp(&self, other: &ArrayVec) -> cmp::Ordering { + (**self).cmp(other) + } +} + +#[cfg(feature="std")] +/// `Write` appends written data to the end of the vector. +/// +/// Requires `features="std"`. +impl> io::Write for ArrayVec { + fn write(&mut self, data: &[u8]) -> io::Result { + unsafe { + let len = self.len(); + let mut tail = slice::from_raw_parts_mut(self.get_unchecked_mut(len), + A::capacity() - len); + let result = tail.write(data); + if let Ok(written) = result { + self.set_len(len + written); + } + result + } + } + fn flush(&mut self) -> io::Result<()> { Ok(()) } +} + +#[cfg(feature="serde-1")] +/// Requires crate feature `"serde-1"` +impl> Serialize for ArrayVec { + fn serialize(&self, serializer: S) -> Result + where S: Serializer + { + serializer.collect_seq(self) + } +} + +#[cfg(feature="serde-1")] +/// Requires crate feature `"serde-1"` +impl<'de, T: Deserialize<'de>, A: Array> Deserialize<'de> for ArrayVec { + fn deserialize(deserializer: D) -> Result + where D: Deserializer<'de> + { + use serde::de::{Visitor, SeqAccess, Error}; + use std::marker::PhantomData; + + struct ArrayVecVisitor<'de, T: Deserialize<'de>, A: Array>(PhantomData<(&'de (), T, A)>); + + impl<'de, T: Deserialize<'de>, A: Array> Visitor<'de> for ArrayVecVisitor<'de, T, A> { + type Value = ArrayVec; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + write!(formatter, "an array with no more than {} items", A::capacity()) + } + + fn visit_seq(self, mut seq: SA) -> Result + where SA: SeqAccess<'de>, + { + let mut values = ArrayVec::::new(); + + while let Some(value) = try!(seq.next_element()) { + if let Err(_) = values.try_push(value) { + return Err(SA::Error::invalid_length(A::capacity() + 1, &self)); + } + } + + Ok(values) + } + } + + deserializer.deserialize_seq(ArrayVecVisitor::(PhantomData)) + } +} diff --git a/arrayvec/src/range.rs b/arrayvec/src/range.rs new file mode 100644 index 000000000..f90f5af43 --- /dev/null +++ b/arrayvec/src/range.rs @@ -0,0 +1,42 @@ + +use std::ops::{ + RangeFull, + RangeFrom, + RangeTo, + Range, +}; + +/// `RangeArgument` is implemented by Rust's built-in range types, produced +/// by range syntax like `..`, `a..`, `..b` or `c..d`. +/// +/// Note: This is arrayvec's provisional trait, waiting for stable Rust to +/// provide an equivalent. +pub trait RangeArgument { + #[inline] + /// Start index (inclusive) + fn start(&self) -> Option { None } + #[inline] + /// End index (exclusive) + fn end(&self) -> Option { None } +} + + +impl RangeArgument for RangeFull {} + +impl RangeArgument for RangeFrom { + #[inline] + fn start(&self) -> Option { Some(self.start) } +} + +impl RangeArgument for RangeTo { + #[inline] + fn end(&self) -> Option { Some(self.end) } +} + +impl RangeArgument for Range { + #[inline] + fn start(&self) -> Option { Some(self.start) } + #[inline] + fn end(&self) -> Option { Some(self.end) } +} + diff --git a/arrayvec/tests/serde.rs b/arrayvec/tests/serde.rs new file mode 100644 index 000000000..62acf25ba --- /dev/null +++ b/arrayvec/tests/serde.rs @@ -0,0 +1,79 @@ +#![cfg(feature = "serde-1")] +extern crate arrayvec; +extern crate serde_test; + +mod array_vec { + use arrayvec::ArrayVec; + + use serde_test::{Token, assert_tokens, assert_de_tokens_error}; + + #[test] + fn test_ser_de_empty() { + let vec = ArrayVec::<[u32; 0]>::new(); + + assert_tokens(&vec, &[ + Token::Seq { len: Some(0) }, + Token::SeqEnd, + ]); + } + + + #[test] + fn test_ser_de() { + let mut vec = ArrayVec::<[u32; 3]>::new(); + vec.push(20); + vec.push(55); + vec.push(123); + + assert_tokens(&vec, &[ + Token::Seq { len: Some(3) }, + Token::U32(20), + Token::U32(55), + Token::U32(123), + Token::SeqEnd, + ]); + } + + #[test] + fn test_de_too_large() { + assert_de_tokens_error::>(&[ + Token::Seq { len: Some(3) }, + Token::U32(13), + Token::U32(42), + Token::U32(68), + ], "invalid length 3, expected an array with no more than 2 items"); + } +} + +mod array_string { + use arrayvec::ArrayString; + + use serde_test::{Token, assert_tokens, assert_de_tokens_error}; + + #[test] + fn test_ser_de_empty() { + let string = ArrayString::<[u8; 0]>::new(); + + assert_tokens(&string, &[ + Token::Str(""), + ]); + } + + + #[test] + fn test_ser_de() { + let string = ArrayString::<[u8; 9]>::from("1234 abcd") + .expect("expected exact specified capacity to be enough"); + + assert_tokens(&string, &[ + Token::Str("1234 abcd"), + ]); + } + + #[test] + fn test_de_too_large() { + assert_de_tokens_error::>(&[ + Token::Str("afd") + ], "invalid length 3, expected a string no more than 2 bytes long"); + } +} diff --git a/arrayvec/tests/tests.rs b/arrayvec/tests/tests.rs new file mode 100644 index 000000000..de3507a54 --- /dev/null +++ b/arrayvec/tests/tests.rs @@ -0,0 +1,468 @@ +extern crate arrayvec; +#[macro_use] extern crate matches; + +use arrayvec::ArrayVec; +use arrayvec::ArrayString; +use std::mem; +use arrayvec::CapacityError; + +use std::collections::HashMap; + + +#[test] +fn test_simple() { + use std::ops::Add; + + let mut vec: ArrayVec<[Vec; 3]> = ArrayVec::new(); + + vec.push(vec![1, 2, 3, 4]); + vec.push(vec![10]); + vec.push(vec![-1, 13, -2]); + + for elt in &vec { + assert_eq!(elt.iter().fold(0, Add::add), 10); + } + + let sum_len = vec.into_iter().map(|x| x.len()).fold(0, Add::add); + assert_eq!(sum_len, 8); +} + +#[test] +fn test_u16_index() { + const N: usize = 4096; + let mut vec: ArrayVec<[_; N]> = ArrayVec::new(); + for _ in 0..N { + assert!(vec.try_push(1u8).is_ok()); + } + assert!(vec.try_push(0).is_err()); + assert_eq!(vec.len(), N); +} + +#[test] +fn test_iter() { + let mut iter = ArrayVec::from([1, 2, 3]).into_iter(); + assert_eq!(iter.size_hint(), (3, Some(3))); + assert_eq!(iter.next_back(), Some(3)); + assert_eq!(iter.next(), Some(1)); + assert_eq!(iter.next_back(), Some(2)); + assert_eq!(iter.size_hint(), (0, Some(0))); + assert_eq!(iter.next_back(), None); +} + +#[test] +fn test_drop() { + use std::cell::Cell; + + let flag = &Cell::new(0); + + struct Bump<'a>(&'a Cell); + + impl<'a> Drop for Bump<'a> { + fn drop(&mut self) { + let n = self.0.get(); + self.0.set(n + 1); + } + } + + { + let mut array = ArrayVec::<[Bump; 128]>::new(); + array.push(Bump(flag)); + array.push(Bump(flag)); + } + assert_eq!(flag.get(), 2); + + // test something with the nullable pointer optimization + flag.set(0); + + { + let mut array = ArrayVec::<[_; 3]>::new(); + array.push(vec![Bump(flag)]); + array.push(vec![Bump(flag), Bump(flag)]); + array.push(vec![]); + let push4 = array.try_push(vec![Bump(flag)]); + assert_eq!(flag.get(), 0); + drop(push4); + assert_eq!(flag.get(), 1); + drop(array.pop()); + assert_eq!(flag.get(), 1); + drop(array.pop()); + assert_eq!(flag.get(), 3); + } + + assert_eq!(flag.get(), 4); + + // test into_inner + flag.set(0); + { + let mut array = ArrayVec::<[_; 3]>::new(); + array.push(Bump(flag)); + array.push(Bump(flag)); + array.push(Bump(flag)); + let inner = array.into_inner(); + assert!(inner.is_ok()); + assert_eq!(flag.get(), 0); + drop(inner); + assert_eq!(flag.get(), 3); + } + +} + +#[test] +fn test_extend() { + let mut range = 0..10; + + let mut array: ArrayVec<[_; 5]> = range.by_ref().collect(); + assert_eq!(&array[..], &[0, 1, 2, 3, 4]); + assert_eq!(range.next(), Some(5)); + + array.extend(range.by_ref()); + assert_eq!(range.next(), Some(6)); + + let mut array: ArrayVec<[_; 10]> = (0..3).collect(); + assert_eq!(&array[..], &[0, 1, 2]); + array.extend(3..5); + assert_eq!(&array[..], &[0, 1, 2, 3, 4]); +} + +#[test] +fn test_is_send_sync() { + let data = ArrayVec::<[Vec; 5]>::new(); + &data as &Send; + &data as &Sync; +} + +#[test] +fn test_compact_size() { + // Future rust will kill these drop flags! + // 4 elements size + 1 len + 1 enum tag + [1 drop flag] + type ByteArray = ArrayVec<[u8; 4]>; + println!("{}", mem::size_of::()); + assert!(mem::size_of::() <= 8); + + // 12 element size + 1 enum tag + 3 padding + 1 len + 1 drop flag + 2 padding + type QuadArray = ArrayVec<[u32; 3]>; + println!("{}", mem::size_of::()); + assert!(mem::size_of::() <= 24); +} + +#[test] +fn test_drain() { + let mut v = ArrayVec::from([0; 8]); + v.pop(); + v.drain(0..7); + assert_eq!(&v[..], &[]); + + v.extend(0..); + v.drain(1..4); + assert_eq!(&v[..], &[0, 4, 5, 6, 7]); + let u: ArrayVec<[_; 3]> = v.drain(1..4).rev().collect(); + assert_eq!(&u[..], &[6, 5, 4]); + assert_eq!(&v[..], &[0, 7]); + v.drain(..); + assert_eq!(&v[..], &[]); +} + +#[test] +fn test_retain() { + let mut v = ArrayVec::from([0; 8]); + for (i, elt) in v.iter_mut().enumerate() { + *elt = i; + } + v.retain(|_| true); + assert_eq!(&v[..], &[0, 1, 2, 3, 4, 5, 6, 7]); + v.retain(|elt| { + *elt /= 2; + *elt % 2 == 0 + }); + assert_eq!(&v[..], &[0, 0, 2, 2]); + v.retain(|_| false); + assert_eq!(&v[..], &[]); +} + +#[test] +#[should_panic] +fn test_drain_oob() { + let mut v = ArrayVec::from([0; 8]); + v.pop(); + v.drain(0..8); +} + +#[test] +#[should_panic] +fn test_drop_panic() { + struct DropPanic; + + impl Drop for DropPanic { + fn drop(&mut self) { + panic!("drop"); + } + } + + let mut array = ArrayVec::<[DropPanic; 1]>::new(); + array.push(DropPanic); +} + +#[test] +#[should_panic] +fn test_drop_panic_into_iter() { + struct DropPanic; + + impl Drop for DropPanic { + fn drop(&mut self) { + panic!("drop"); + } + } + + let mut array = ArrayVec::<[DropPanic; 1]>::new(); + array.push(DropPanic); + array.into_iter(); +} + +#[test] +fn test_insert() { + let mut v = ArrayVec::from([]); + assert_matches!(v.try_push(1), Err(_)); + + let mut v = ArrayVec::<[_; 3]>::new(); + v.insert(0, 0); + v.insert(1, 1); + //let ret1 = v.try_insert(3, 3); + //assert_matches!(ret1, Err(InsertError::OutOfBounds(_))); + assert_eq!(&v[..], &[0, 1]); + v.insert(2, 2); + assert_eq!(&v[..], &[0, 1, 2]); + + let ret2 = v.try_insert(1, 9); + assert_eq!(&v[..], &[0, 1, 2]); + assert_matches!(ret2, Err(_)); + + let mut v = ArrayVec::from([2]); + assert_matches!(v.try_insert(0, 1), Err(CapacityError { .. })); + assert_matches!(v.try_insert(1, 1), Err(CapacityError { .. })); + //assert_matches!(v.try_insert(2, 1), Err(CapacityError { .. })); +} + +#[test] +fn test_into_inner_1() { + let mut v = ArrayVec::from([1, 2]); + v.pop(); + let u = v.clone(); + assert_eq!(v.into_inner(), Err(u)); +} + +#[test] +fn test_into_inner_2() { + let mut v = ArrayVec::<[String; 4]>::new(); + v.push("a".into()); + v.push("b".into()); + v.push("c".into()); + v.push("d".into()); + assert_eq!(v.into_inner().unwrap(), ["a", "b", "c", "d"]); +} + +#[test] +fn test_into_inner_3_() { + let mut v = ArrayVec::<[i32; 4]>::new(); + v.extend(1..); + assert_eq!(v.into_inner().unwrap(), [1, 2, 3, 4]); +} + +#[test] +fn test_write() { + use std::io::Write; + let mut v = ArrayVec::<[_; 8]>::new(); + write!(&mut v, "\x01\x02\x03").unwrap(); + assert_eq!(&v[..], &[1, 2, 3]); + let r = v.write(&[9; 16]).unwrap(); + assert_eq!(r, 5); + assert_eq!(&v[..], &[1, 2, 3, 9, 9, 9, 9, 9]); +} + +#[test] +fn array_clone_from() { + let mut v = ArrayVec::<[_; 4]>::new(); + v.push(vec![1, 2]); + v.push(vec![3, 4, 5]); + v.push(vec![6]); + let reference = v.to_vec(); + let mut u = ArrayVec::<[_; 4]>::new(); + u.clone_from(&v); + assert_eq!(&u, &reference[..]); + + let mut t = ArrayVec::<[_; 4]>::new(); + t.push(vec![97]); + t.push(vec![]); + t.push(vec![5, 6, 2]); + t.push(vec![2]); + t.clone_from(&v); + assert_eq!(&t, &reference[..]); + t.clear(); + t.clone_from(&v); + assert_eq!(&t, &reference[..]); +} + +#[test] +fn test_string() { + use std::error::Error; + + let text = "hello world"; + let mut s = ArrayString::<[_; 16]>::new(); + s.try_push_str(text).unwrap(); + assert_eq!(&s, text); + assert_eq!(text, &s); + + // Make sure Hash / Eq / Borrow match up so we can use HashMap + let mut map = HashMap::new(); + map.insert(s, 1); + assert_eq!(map[text], 1); + + let mut t = ArrayString::<[_; 2]>::new(); + assert!(t.try_push_str(text).is_err()); + assert_eq!(&t, ""); + + t.push_str("ab"); + // DerefMut + let tmut: &mut str = &mut t; + assert_eq!(tmut, "ab"); + + // Test Error trait / try + let t = || -> Result<(), Box> { + let mut t = ArrayString::<[_; 2]>::new(); + try!(t.try_push_str(text)); + Ok(()) + }(); + assert!(t.is_err()); +} + +#[test] +fn test_string_from() { + let text = "hello world"; + // Test `from` constructor + let u = ArrayString::<[_; 11]>::from(text).unwrap(); + assert_eq!(&u, text); + assert_eq!(u.len(), text.len()); +} + +#[test] +fn test_string_from_bytes() { + let text = "hello world"; + let u = ArrayString::from_byte_string(b"hello world").unwrap(); + assert_eq!(&u, text); + assert_eq!(u.len(), text.len()); +} + +#[test] +fn test_string_clone() { + let text = "hi"; + let mut s = ArrayString::<[_; 4]>::new(); + s.push_str("abcd"); + let t = ArrayString::<[_; 4]>::from(text).unwrap(); + s.clone_from(&t); + assert_eq!(&t, &s); +} + +#[test] +fn test_string_push() { + let text = "abcαβγ"; + let mut s = ArrayString::<[_; 8]>::new(); + for c in text.chars() { + if let Err(_) = s.try_push(c) { + break; + } + } + assert_eq!("abcαβ", &s[..]); + s.push('x'); + assert_eq!("abcαβx", &s[..]); + assert!(s.try_push('x').is_err()); +} + + +#[test] +fn test_insert_at_length() { + let mut v = ArrayVec::<[_; 8]>::new(); + let result1 = v.try_insert(0, "a"); + let result2 = v.try_insert(1, "b"); + assert!(result1.is_ok() && result2.is_ok()); + assert_eq!(&v[..], &["a", "b"]); +} + +#[should_panic] +#[test] +fn test_insert_out_of_bounds() { + let mut v = ArrayVec::<[_; 8]>::new(); + let _ = v.try_insert(1, "test"); +} + +/* + * insert that pushes out the last + let mut u = ArrayVec::from([1, 2, 3, 4]); + let ret = u.try_insert(3, 99); + assert_eq!(&u[..], &[1, 2, 3, 99]); + assert_matches!(ret, Err(_)); + let ret = u.try_insert(4, 77); + assert_eq!(&u[..], &[1, 2, 3, 99]); + assert_matches!(ret, Err(_)); +*/ + +#[test] +fn test_drop_in_insert() { + use std::cell::Cell; + + let flag = &Cell::new(0); + + struct Bump<'a>(&'a Cell); + + impl<'a> Drop for Bump<'a> { + fn drop(&mut self) { + let n = self.0.get(); + self.0.set(n + 1); + } + } + + flag.set(0); + + { + let mut array = ArrayVec::<[_; 2]>::new(); + array.push(Bump(flag)); + array.insert(0, Bump(flag)); + assert_eq!(flag.get(), 0); + let ret = array.try_insert(1, Bump(flag)); + assert_eq!(flag.get(), 0); + assert_matches!(ret, Err(_)); + drop(ret); + assert_eq!(flag.get(), 1); + } + assert_eq!(flag.get(), 3); +} + +#[test] +fn test_pop_at() { + let mut v = ArrayVec::<[String; 4]>::new(); + let s = String::from; + v.push(s("a")); + v.push(s("b")); + v.push(s("c")); + v.push(s("d")); + + assert_eq!(v.pop_at(4), None); + assert_eq!(v.pop_at(1), Some(s("b"))); + assert_eq!(v.pop_at(1), Some(s("c"))); + assert_eq!(v.pop_at(2), None); + assert_eq!(&v[..], &["a", "d"]); +} + +#[test] +fn test_sizes() { + let v = ArrayVec::from([0u8; 1 << 16]); + assert_eq!(vec![0u8; v.len()], &v[..]); +} + +#[test] +fn test_default() { + use std::net; + let s: ArrayString<[u8; 4]> = Default::default(); + // Something without `Default` implementation. + let v: ArrayVec<[net::TcpStream; 4]> = Default::default(); + assert_eq!(s.len(), 0); + assert_eq!(v.len(), 0); +} diff --git a/atty/.cargo-checksum.json b/atty/.cargo-checksum.json new file mode 100644 index 000000000..d828fa118 --- /dev/null +++ b/atty/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{},"package":"9a7d5b8723950951411ee34d271d99dddcc2035a16ab25310ea2c8cfd4369652"} \ No newline at end of file diff --git a/atty/CHANGELOG.md b/atty/CHANGELOG.md new file mode 100644 index 000000000..0b5c0eab5 --- /dev/null +++ b/atty/CHANGELOG.md @@ -0,0 +1,61 @@ +# 0.2.11 + +* fix msys detection with `winapi@0.3.5` [#28](https://github.com/softprops/atty/pull/28) + +# 0.2.10 + +* fix wasm regression [#27](https://github.com/softprops/atty/pull/27) + +# 0.2.9 + +* Fix fix pty detection [#25](https://github.com/softprops/atty/pull/25) + +# 0.2.8 + +* Fix an inverted condition on MinGW [#22](https://github.com/softprops/atty/pull/22) + +# 0.2.7 + +* Change `||` to `&&` for whether MSYS is a tty [#24](https://github.com/softprops/atty/pull/24/) + +# 0.2.6 + +* updated winapi dependency to [0.3](https://retep998.github.io/blog/winapi-0.3/) [#18](https://github.com/softprops/atty/pull/18) + +# 0.2.5 + +* added support for Wasm compile targets [#17](https://github.com/softprops/atty/pull/17) + +# 0.2.4 + +* added support for Wasm compile targets [#17](https://github.com/softprops/atty/pull/17) + +# 0.2.3 + +* added support for Redox OS [#14](https://github.com/softprops/atty/pull/14) + +# 0.2.2 + +* use target specific dependencies [#11](https://github.com/softprops/atty/pull/11) +* Add tty detection for MSYS terminals [#12](https://github.com/softprops/atty/pull/12) + +# 0.2.1 + +* fix windows bug + +# 0.2.0 + +* support for various stream types + +# 0.1.2 + +* windows support (with automated testing) +* automated code coverage + +# 0.1.1 + +* bumped libc dep from `0.1` to `0.2` + +# 0.1.0 + +* initial release diff --git a/atty/Cargo.toml b/atty/Cargo.toml new file mode 100644 index 000000000..0d02b6af2 --- /dev/null +++ b/atty/Cargo.toml @@ -0,0 +1,33 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g. crates.io) dependencies +# +# If you believe there's an error in this file please file an +# issue against the rust-lang/cargo repository. If you're +# editing this file be aware that the upstream Cargo.toml +# will likely look very different (and much more reasonable) + +[package] +name = "atty" +version = "0.2.11" +authors = ["softprops "] +description = "A simple interface for querying atty" +homepage = "https://github.com/softprops/atty" +documentation = "http://softprops.github.io/atty" +readme = "README.md" +keywords = ["terminal", "tty"] +license = "MIT" +repository = "https://github.com/softprops/atty" +[target."cfg(target_os = \"redox\")".dependencies.termion] +version = "1.5" +[target."cfg(unix)".dependencies.libc] +version = "0.2" +default-features = false +[target."cfg(windows)".dependencies.winapi] +version = "0.3" +features = ["consoleapi", "processenv", "minwinbase", "minwindef", "winbase"] +[badges.travis-ci] +repository = "softprops/atty" diff --git a/atty/LICENSE b/atty/LICENSE new file mode 100644 index 000000000..d1f01c829 --- /dev/null +++ b/atty/LICENSE @@ -0,0 +1,20 @@ +Copyright (c) 2015-2017 Doug Tangren + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/atty/README.md b/atty/README.md new file mode 100644 index 000000000..5ce32d1d7 --- /dev/null +++ b/atty/README.md @@ -0,0 +1,76 @@ +# atty + +[![Build Status](https://travis-ci.org/softprops/atty.svg?branch=master)](https://travis-ci.org/softprops/atty) [![Build status](https://ci.appveyor.com/api/projects/status/geggrsnsjsuse8cv?svg=true)](https://ci.appveyor.com/project/softprops/atty) [![Coverage Status](https://coveralls.io/repos/softprops/atty/badge.svg?branch=master&service=github)](https://coveralls.io/github/softprops/atty?branch=master) [![crates.io](https://img.shields.io/crates/v/atty.svg)](https://crates.io/crates/atty) [![Released API docs](https://docs.rs/atty/badge.svg)](http://docs.rs/atty) [![Master API docs](https://img.shields.io/badge/docs-master-green.svg)](https://softprops.github.io/atty) + +> are you or are you not a tty? + + +## install + +Add the following to your `Cargo.toml` + +```toml +[dependencies] +atty = "0.2" +``` + +## usage + +```rust +extern crate atty; + +use atty::Stream; + +fn main() { + if atty::is(Stream::Stdout) { + println!("I'm a terminal"); + } else { + println!("I'm not"); + } +} +``` + +## testing + +This library has been unit tested on both unix and windows platforms (via appveyor). + + +A simple example program is provided in this repo to test various tty's. By default. + +It prints + +```bash +$ cargo run --example atty +stdout? true +stderr? true +stdin? true +``` + +To test std in, pipe some text to the program + +```bash +$ echo "test" | cargo run --example atty +stdout? true +stderr? true +stdin? false +``` + +To test std out, pipe the program to something + +```bash +$ cargo run --example atty | grep std +stdout? false +stderr? true +stdin? true +``` + +To test std err, pipe the program to something redirecting std err + +```bash +$ cargo run --example atty 2>&1 | grep std +stdout? false +stderr? false +stdin? true +``` + +Doug Tangren (softprops) 2015-2017 diff --git a/atty/appveyor.yml b/atty/appveyor.yml new file mode 100644 index 000000000..d7fb12794 --- /dev/null +++ b/atty/appveyor.yml @@ -0,0 +1,16 @@ +environment: + matrix: + - TARGET: nightly-x86_64-pc-windows-msvc + - TARGET: nightly-i686-pc-windows-msvc + - TARGET: nightly-x86_64-pc-windows-gnu + - TARGET: nightly-i686-pc-windows-gnu +install: + - ps: Start-FileDownload "https://static.rust-lang.org/dist/rust-${env:TARGET}.exe" -FileName "rust-install.exe" + - ps: .\rust-install.exe /VERYSILENT /NORESTART /DIR="C:\rust" | Out-Null + - ps: $env:PATH="$env:PATH;C:\rust\bin" + - call "%VCVARS%" || ver>nul + - rustc -vV + - cargo -vV +build: false +test_script: + - cargo build diff --git a/atty/examples/atty.rs b/atty/examples/atty.rs new file mode 100644 index 000000000..3b3635e59 --- /dev/null +++ b/atty/examples/atty.rs @@ -0,0 +1,9 @@ +extern crate atty; + +use atty::{is, Stream}; + +fn main() { + println!("stdout? {}", is(Stream::Stdout)); + println!("stderr? {}", is(Stream::Stderr)); + println!("stdin? {}", is(Stream::Stdin)); +} diff --git a/atty/rustfmt.toml b/atty/rustfmt.toml new file mode 100644 index 000000000..d83987dca --- /dev/null +++ b/atty/rustfmt.toml @@ -0,0 +1,10 @@ +# keep imports tidy +reorder_imported_names = true +reorder_imports = true +reorder_imports_in_group = true +# there is no try! +use_try_shorthand = true +# don't create rustfmt artifacts +write_mode = "Replace" +# reduce wide load +max_width = 80 \ No newline at end of file diff --git a/atty/src/lib.rs b/atty/src/lib.rs new file mode 100644 index 000000000..6bffeadac --- /dev/null +++ b/atty/src/lib.rs @@ -0,0 +1,210 @@ +//! atty is a simple utility that answers one question +//! > is this a tty? +//! +//! usage is just as simple +//! +//! ``` +//! if atty::is(atty::Stream::Stdout) { +//! println!("i'm a tty") +//! } +//! ``` +//! +//! ``` +//! if atty::isnt(atty::Stream::Stdout) { +//! println!("i'm not a tty") +//! } +//! ``` + +#![cfg_attr(unix, no_std)] + +#[cfg(unix)] +extern crate libc; +#[cfg(windows)] +extern crate winapi; +#[cfg(target_os = "redox")] +extern crate termion; + +#[cfg(windows)] +use winapi::shared::minwindef::DWORD; +#[cfg(windows)] +use winapi::shared::ntdef::WCHAR; + +/// possible stream sources +#[derive(Clone, Copy, Debug)] +pub enum Stream { + Stdout, + Stderr, + Stdin, +} + +/// returns true if this is a tty +#[cfg(all(unix, not(target_arch = "wasm32")))] +pub fn is(stream: Stream) -> bool { + extern crate libc; + + let fd = match stream { + Stream::Stdout => libc::STDOUT_FILENO, + Stream::Stderr => libc::STDERR_FILENO, + Stream::Stdin => libc::STDIN_FILENO, + }; + unsafe { libc::isatty(fd) != 0 } +} + +/// returns true if this is a tty +#[cfg(windows)] +pub fn is(stream: Stream) -> bool { + use winapi::um::winbase::{STD_ERROR_HANDLE as STD_ERROR, STD_INPUT_HANDLE as STD_INPUT, + STD_OUTPUT_HANDLE as STD_OUTPUT}; + + let (fd, others) = match stream { + Stream::Stdin => (STD_INPUT, [STD_ERROR, STD_OUTPUT]), + Stream::Stderr => (STD_ERROR, [STD_INPUT, STD_OUTPUT]), + Stream::Stdout => (STD_OUTPUT, [STD_INPUT, STD_ERROR]), + }; + if unsafe { console_on_any(&[fd]) } { + // False positives aren't possible. If we got a console then + // we definitely have a tty on stdin. + return true; + } + + // At this point, we *could* have a false negative. We can determine that + // this is true negative if we can detect the presence of a console on + // any of the other streams. If another stream has a console, then we know + // we're in a Windows console and can therefore trust the negative. + if unsafe { console_on_any(&others) } { + return false; + } + + // Otherwise, we fall back to a very strange msys hack to see if we can + // sneakily detect the presence of a tty. + unsafe { msys_tty_on(fd) } +} + +/// returns true if this is _not_ a tty +pub fn isnt(stream: Stream) -> bool { + !is(stream) +} + +/// Returns true if any of the given fds are on a console. +#[cfg(windows)] +unsafe fn console_on_any(fds: &[DWORD]) -> bool { + use winapi::um::consoleapi::GetConsoleMode; + use winapi::um::processenv::GetStdHandle; + + for &fd in fds { + let mut out = 0; + let handle = GetStdHandle(fd); + if GetConsoleMode(handle, &mut out) != 0 { + return true; + } + } + false +} + +/// Returns true if there is an MSYS tty on the given handle. +#[cfg(windows)] +unsafe fn msys_tty_on(fd: DWORD) -> bool { + use std::mem; + use std::slice; + + use winapi::ctypes::c_void; + use winapi::um::winbase::GetFileInformationByHandleEx; + use winapi::um::fileapi::FILE_NAME_INFO; + use winapi::um::minwinbase::FileNameInfo; + use winapi::um::processenv::GetStdHandle; + use winapi::shared::minwindef::MAX_PATH; + + let size = mem::size_of::(); + let mut name_info_bytes = vec![0u8; size + MAX_PATH * mem::size_of::()]; + let res = GetFileInformationByHandleEx( + GetStdHandle(fd), + FileNameInfo, + &mut *name_info_bytes as *mut _ as *mut c_void, + name_info_bytes.len() as u32, + ); + if res == 0 { + return false; + } + let name_info: &FILE_NAME_INFO = &*(name_info_bytes.as_ptr() as *const FILE_NAME_INFO); + let s = slice::from_raw_parts( + name_info.FileName.as_ptr(), + name_info.FileNameLength as usize / 2, + ); + let name = String::from_utf16_lossy(s); + // This checks whether 'pty' exists in the file name, which indicates that + // a pseudo-terminal is attached. To mitigate against false positives + // (e.g., an actual file name that contains 'pty'), we also require that + // either the strings 'msys-' or 'cygwin-' are in the file name as well.) + let is_msys = name.contains("msys-") || name.contains("cygwin-"); + let is_pty = name.contains("-pty"); + is_msys && is_pty +} + +/// returns true if this is a tty +#[cfg(target_os = "redox")] +pub fn is(stream: Stream) -> bool { + use std::io; + use termion::is_tty; + + match stream { + Stream::Stdin => is_tty(&io::stdin()), + Stream::Stdout => is_tty(&io::stdout()), + Stream::Stderr => is_tty(&io::stderr()), + } +} + +/// returns true if this is a tty +#[cfg(target_arch = "wasm32")] +pub fn is(_stream: Stream) -> bool { + false +} + +#[cfg(test)] +mod tests { + use super::{Stream, is}; + + #[test] + #[cfg(windows)] + fn is_err() { + // appveyor pipes its output + assert!(!is(Stream::Stderr)) + } + + #[test] + #[cfg(windows)] + fn is_out() { + // appveyor pipes its output + assert!(!is(Stream::Stdout)) + } + + #[test] + #[cfg(windows)] + fn is_in() { + assert!(is(Stream::Stdin)) + } + + #[test] + #[cfg(unix)] + fn is_err() { + assert!(is(Stream::Stderr)) + } + + #[test] + #[cfg(unix)] + fn is_out() { + assert!(is(Stream::Stdout)) + } + + #[test] + #[cfg(target_os = "macos")] + fn is_in() { + // macos on travis seems to pipe its input + assert!(is(Stream::Stdin)) + } + + #[test] + #[cfg(all(not(target_os = "macos"), unix))] + fn is_in() { + assert!(is(Stream::Stdin)) + } +} diff --git a/backtrace-sys/.cargo-checksum.json b/backtrace-sys/.cargo-checksum.json new file mode 100644 index 000000000..505fbdd5d --- /dev/null +++ b/backtrace-sys/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{},"package":"c66d56ac8dabd07f6aacdaf633f4b8262f5b3601a810a0dcddffd5c22c69daa0"} \ No newline at end of file diff --git a/backtrace-sys/Cargo.toml b/backtrace-sys/Cargo.toml new file mode 100644 index 000000000..79927fb53 --- /dev/null +++ b/backtrace-sys/Cargo.toml @@ -0,0 +1,26 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g. crates.io) dependencies +# +# If you believe there's an error in this file please file an +# issue against the rust-lang/cargo repository. If you're +# editing this file be aware that the upstream Cargo.toml +# will likely look very different (and much more reasonable) + +[package] +name = "backtrace-sys" +version = "0.1.24" +authors = ["Alex Crichton "] +build = "build.rs" +description = "Bindings to the libbacktrace gcc library\n" +homepage = "https://github.com/alexcrichton/backtrace-rs" +documentation = "http://alexcrichton.com/backtrace-rs" +license = "MIT/Apache-2.0" +repository = "https://github.com/alexcrichton/backtrace-rs" +[dependencies.libc] +version = "0.2" +[build-dependencies.cc] +version = "1.0" diff --git a/backtrace-sys/LICENSE-APACHE b/backtrace-sys/LICENSE-APACHE new file mode 100644 index 000000000..16fe87b06 --- /dev/null +++ b/backtrace-sys/LICENSE-APACHE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/backtrace-sys/LICENSE-MIT b/backtrace-sys/LICENSE-MIT new file mode 100644 index 000000000..39e0ed660 --- /dev/null +++ b/backtrace-sys/LICENSE-MIT @@ -0,0 +1,25 @@ +Copyright (c) 2014 Alex Crichton + +Permission is hereby granted, free of charge, to any +person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the +Software without restriction, including without +limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software +is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions +of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/backtrace-sys/build.rs b/backtrace-sys/build.rs new file mode 100644 index 000000000..10486d1f8 --- /dev/null +++ b/backtrace-sys/build.rs @@ -0,0 +1,94 @@ +extern crate cc; + +use std::env; +use std::path::PathBuf; +use std::fs::File; + +fn main() { + let target = env::var("TARGET").unwrap(); + + // libbacktrace isn't used on windows + if target.contains("windows") { + return + } + + // no way this will ever compile for emscripten + if target.contains("emscripten") { + return + } + + let out_dir = PathBuf::from(env::var_os("OUT_DIR").unwrap()); + + let mut build = cc::Build::new(); + build + .include("src/libbacktrace") + .include(&out_dir) + .warnings(false) + .file("src/libbacktrace/alloc.c") + .file("src/libbacktrace/dwarf.c") + .file("src/libbacktrace/fileline.c") + .file("src/libbacktrace/posix.c") + .file("src/libbacktrace/read.c") + .file("src/libbacktrace/sort.c") + .file("src/libbacktrace/state.c"); + + if target.contains("darwin") { + build.file("src/libbacktrace/macho.c"); + } else if target.contains("windows") { + build.file("src/libbacktrace/pecoff.c"); + } else { + build.flag("-fvisibility=hidden"); + build.file("src/libbacktrace/elf.c"); + + let pointer_width = env::var("CARGO_CFG_TARGET_POINTER_WIDTH").unwrap(); + if pointer_width == "64" { + build.define("BACKTRACE_ELF_SIZE", "64"); + } else { + build.define("BACKTRACE_ELF_SIZE", "32"); + } + } + + File::create(out_dir.join("backtrace-supported.h")).unwrap(); + build.define("BACKTRACE_SUPPORTED", "1"); + build.define("BACKTRACE_USES_MALLOC", "1"); + build.define("BACKTRACE_SUPPORTS_THREADS", "0"); + build.define("BACKTRACE_SUPPORTS_DATA", "0"); + + File::create(out_dir.join("config.h")).unwrap(); + if !target.contains("apple-ios") && + !target.contains("solaris") && + !target.contains("redox") && + !target.contains("android") && + !target.contains("haiku") { + build.define("HAVE_DL_ITERATE_PHDR", "1"); + } + build.define("_GNU_SOURCE", "1"); + build.define("_LARGE_FILES", "1"); + + let syms = [ + "backtrace_full", + "backtrace_dwarf_add", + "backtrace_initialize", + "backtrace_pcinfo", + "backtrace_syminfo", + "backtrace_get_view", + "backtrace_release_view", + "backtrace_alloc", + "backtrace_free", + "backtrace_vector_finish", + "backtrace_vector_grow", + "backtrace_vector_release", + "backtrace_close", + "backtrace_open", + "backtrace_print", + "backtrace_simple", + "backtrace_qsort", + "backtrace_create_state", + "backtrace_uncompress_zdebug", + ]; + for sym in syms.iter() { + build.define(sym, &format!("__rbt_{}", sym)[..]); + } + + build.compile("backtrace"); +} diff --git a/backtrace-sys/src/lib.rs b/backtrace-sys/src/lib.rs new file mode 100644 index 000000000..0edc2674c --- /dev/null +++ b/backtrace-sys/src/lib.rs @@ -0,0 +1,44 @@ +#![allow(bad_style)] + +extern crate libc; + +use libc::uintptr_t; +use std::os::raw::{c_void, c_char, c_int}; + +pub type backtrace_syminfo_callback = + extern fn(data: *mut c_void, + pc: uintptr_t, + symname: *const c_char, + symval: uintptr_t, + symsize: uintptr_t); +pub type backtrace_full_callback = + extern fn(data: *mut c_void, + pc: uintptr_t, + filename: *const c_char, + lineno: c_int, + function: *const c_char) -> c_int; +pub type backtrace_error_callback = + extern fn(data: *mut c_void, + msg: *const c_char, + errnum: c_int); +pub enum backtrace_state {} + +extern { + #[link_name = "__rbt_backtrace_create_state"] + pub fn backtrace_create_state(filename: *const c_char, + threaded: c_int, + error: backtrace_error_callback, + data: *mut c_void) -> *mut backtrace_state; + #[link_name = "__rbt_backtrace_syminfo"] + pub fn backtrace_syminfo(state: *mut backtrace_state, + addr: uintptr_t, + cb: backtrace_syminfo_callback, + error: backtrace_error_callback, + data: *mut c_void) -> c_int; + #[link_name = "__rbt_backtrace_pcinfo"] + pub fn backtrace_pcinfo(state: *mut backtrace_state, + addr: uintptr_t, + cb: backtrace_full_callback, + error: backtrace_error_callback, + data: *mut c_void) -> c_int; +} diff --git a/backtrace-sys/src/libbacktrace/LICENSE b/backtrace-sys/src/libbacktrace/LICENSE new file mode 100644 index 000000000..097d2774e --- /dev/null +++ b/backtrace-sys/src/libbacktrace/LICENSE @@ -0,0 +1,29 @@ +# Copyright (C) 2012-2016 Free Software Foundation, Inc. + +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: + +# (1) Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. + +# (2) Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. + +# (3) The name of the author may not be used to +# endorse or promote products derived from this software without +# specific prior written permission. + +# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR +# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, +# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, +# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING +# IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. diff --git a/backtrace-sys/src/libbacktrace/Makefile.am b/backtrace-sys/src/libbacktrace/Makefile.am new file mode 100644 index 000000000..56a3d1b13 --- /dev/null +++ b/backtrace-sys/src/libbacktrace/Makefile.am @@ -0,0 +1,206 @@ +# Makefile.am -- Backtrace Makefile. +# Copyright (C) 2012-2018 Free Software Foundation, Inc. + +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: + +# (1) Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. + +# (2) Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. + +# (3) The name of the author may not be used to +# endorse or promote products derived from this software without +# specific prior written permission. + +# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR +# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, +# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, +# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING +# IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +ACLOCAL_AMFLAGS = -I config + +AM_CFLAGS = $(EXTRA_FLAGS) $(WARN_FLAGS) $(PIC_FLAG) + +include_HEADERS = backtrace.h backtrace-supported.h + +lib_LTLIBRARIES = libbacktrace.la + +libbacktrace_la_SOURCES = \ + backtrace.h \ + atomic.c \ + dwarf.c \ + fileline.c \ + internal.h \ + posix.c \ + print.c \ + sort.c \ + state.c + +BACKTRACE_FILES = \ + backtrace.c \ + simple.c \ + nounwind.c + +FORMAT_FILES = \ + elf.c \ + pecoff.c \ + unknown.c \ + xcoff.c + +VIEW_FILES = \ + read.c \ + mmapio.c + +ALLOC_FILES = \ + alloc.c \ + mmap.c + +EXTRA_libbacktrace_la_SOURCES = \ + $(BACKTRACE_FILES) \ + $(FORMAT_FILES) \ + $(VIEW_FILES) \ + $(ALLOC_FILES) + +libbacktrace_la_LIBADD = \ + $(BACKTRACE_FILE) \ + $(FORMAT_FILE) \ + $(VIEW_FILE) \ + $(ALLOC_FILE) + +libbacktrace_la_DEPENDENCIES = $(libbacktrace_la_LIBADD) + +# Testsuite. + +check_PROGRAMS = +CLEANFILES = + +TESTS = $(check_PROGRAMS) + +if NATIVE + +btest_SOURCES = btest.c testlib.c +btest_CFLAGS = $(AM_CFLAGS) -g -O +btest_LDADD = libbacktrace.la + +check_PROGRAMS += btest + +btest_static_SOURCES = btest.c testlib.c +btest_static_CFLAGS = $(AM_CFLAGS) -g -O +btest_static_LDADD = libbacktrace.la +btest_static_LDFLAGS = -static-libtool-libs + +check_PROGRAMS += btest_static + +stest_SOURCES = stest.c +stest_LDADD = libbacktrace.la + +check_PROGRAMS += stest + +ztest_SOURCES = ztest.c testlib.c +ztest_CFLAGS = -DSRCDIR=\"$(srcdir)\" +ztest_LDADD = libbacktrace.la + +if HAVE_ZLIB +ztest_LDADD += -lz +endif +ztest_LDADD += $(CLOCK_GETTIME_LINK) + +check_PROGRAMS += ztest + +edtest_SOURCES = edtest.c edtest2_build.c testlib.c +edtest_LDADD = libbacktrace.la + +check_PROGRAMS += edtest + +edtest2_build.c: gen_edtest2_build; @true +gen_edtest2_build: $(srcdir)/edtest2.c + cat $(srcdir)/edtest2.c > tmp-edtest2_build.c + $(SHELL) $(srcdir)/move-if-change tmp-edtest2_build.c edtest2_build.c + echo timestamp > $@ + +CLEANFILES += edtest2_build.c gen_edtest2_build + +if HAVE_PTHREAD + +check_PROGRAMS += ttest + +ttest_SOURCES = ttest.c testlib.c +ttest_CFLAGS = $(AM_CFLAGS) -pthread +ttest_LDADD = libbacktrace.la + +endif HAVE_PTHREAD + +if HAVE_OBJCOPY_DEBUGLINK + +TESTS += dtest + +dtest: btest_static + $(OBJCOPY) --only-keep-debug btest_static btest.debug + $(OBJCOPY) --strip-debug --add-gnu-debuglink=btest.debug btest_static dtest + +CLEANFILES += dtest btest.debug + +endif HAVE_OBJCOPY_DEBUGLINK + +if HAVE_COMPRESSED_DEBUG + +ctestg_SOURCES = btest.c testlib.c +ctestg_CFLAGS = $(AM_CFLAGS) -g +ctestg_LDFLAGS = -Wl,--compress-debug-sections=zlib-gnu +ctestg_LDADD = libbacktrace.la + +ctesta_SOURCES = btest.c testlib.c +ctesta_CFLAGS = $(AM_CFLAGS) -g +ctesta_LDFLAGS = -Wl,--compress-debug-sections=zlib-gabi +ctesta_LDADD = libbacktrace.la + +check_PROGRAMS += ctestg ctesta + +endif + +endif NATIVE + +# We can't use automake's automatic dependency tracking, because it +# breaks when using bootstrap-lean. Automatic dependency tracking +# with GCC bootstrap will cause some of the objects to depend on +# header files in prev-gcc/include, e.g., stddef.h and stdarg.h. When +# using bootstrap-lean, prev-gcc is removed after each stage. When +# running "make install", those header files will be gone, causing the +# library to be rebuilt at install time. That may not succeed. + +# These manual dependencies do not include dependencies on unwind.h, +# even though that is part of GCC, because where to find it depends on +# whether we are being built as a host library or a target library. + +alloc.lo: config.h backtrace.h internal.h +backtrace.lo: config.h backtrace.h internal.h +btest.lo: backtrace.h backtrace-supported.h filenames.h +dwarf.lo: config.h filenames.h backtrace.h internal.h +elf.lo: config.h backtrace.h internal.h +fileline.lo: config.h backtrace.h internal.h +mmap.lo: config.h backtrace.h internal.h +mmapio.lo: config.h backtrace.h internal.h +nounwind.lo: config.h internal.h +pecoff.lo: config.h backtrace.h internal.h +posix.lo: config.h backtrace.h internal.h +print.lo: config.h backtrace.h internal.h +read.lo: config.h backtrace.h internal.h +simple.lo: config.h backtrace.h internal.h +sort.lo: config.h backtrace.h internal.h +stest.lo: config.h backtrace.h internal.h +state.lo: config.h backtrace.h backtrace-supported.h internal.h +unknown.lo: config.h backtrace.h internal.h +xcoff.lo: config.h backtrace.h internal.h + diff --git a/backtrace-sys/src/libbacktrace/Makefile.in b/backtrace-sys/src/libbacktrace/Makefile.in new file mode 100644 index 000000000..498f7fb09 --- /dev/null +++ b/backtrace-sys/src/libbacktrace/Makefile.in @@ -0,0 +1,1021 @@ +# Makefile.in generated by automake 1.11.6 from Makefile.am. +# @configure_input@ + +# Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, +# 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011 Free Software +# Foundation, Inc. +# This Makefile.in is free software; the Free Software Foundation +# gives unlimited permission to copy and/or distribute it, +# with or without modifications, as long as this notice is preserved. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY, to the extent permitted by law; without +# even the implied warranty of MERCHANTABILITY or FITNESS FOR A +# PARTICULAR PURPOSE. + +@SET_MAKE@ + +# Makefile.am -- Backtrace Makefile. +# Copyright (C) 2012-2018 Free Software Foundation, Inc. + +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: + +# (1) Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. + +# (2) Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. + +# (3) The name of the author may not be used to +# endorse or promote products derived from this software without +# specific prior written permission. + +# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR +# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, +# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, +# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING +# IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + + +VPATH = @srcdir@ +am__make_dryrun = \ + { \ + am__dry=no; \ + case $$MAKEFLAGS in \ + *\\[\ \ ]*) \ + echo 'am--echo: ; @echo "AM" OK' | $(MAKE) -f - 2>/dev/null \ + | grep '^AM OK$$' >/dev/null || am__dry=yes;; \ + *) \ + for am__flg in $$MAKEFLAGS; do \ + case $$am__flg in \ + *=*|--*) ;; \ + *n*) am__dry=yes; break;; \ + esac; \ + done;; \ + esac; \ + test $$am__dry = yes; \ + } +pkgdatadir = $(datadir)/@PACKAGE@ +pkgincludedir = $(includedir)/@PACKAGE@ +pkglibdir = $(libdir)/@PACKAGE@ +pkglibexecdir = $(libexecdir)/@PACKAGE@ +am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd +install_sh_DATA = $(install_sh) -c -m 644 +install_sh_PROGRAM = $(install_sh) -c +install_sh_SCRIPT = $(install_sh) -c +INSTALL_HEADER = $(INSTALL_DATA) +transform = $(program_transform_name) +NORMAL_INSTALL = : +PRE_INSTALL = : +POST_INSTALL = : +NORMAL_UNINSTALL = : +PRE_UNINSTALL = : +POST_UNINSTALL = : +build_triplet = @build@ +host_triplet = @host@ +target_triplet = @target@ +check_PROGRAMS = $(am__EXEEXT_1) $(am__EXEEXT_2) $(am__EXEEXT_3) +@NATIVE_TRUE@am__append_1 = btest btest_static stest ztest edtest +@HAVE_ZLIB_TRUE@@NATIVE_TRUE@am__append_2 = -lz +@NATIVE_TRUE@am__append_3 = edtest2_build.c gen_edtest2_build +@HAVE_PTHREAD_TRUE@@NATIVE_TRUE@am__append_4 = ttest +@HAVE_OBJCOPY_DEBUGLINK_TRUE@@NATIVE_TRUE@am__append_5 = dtest +@HAVE_OBJCOPY_DEBUGLINK_TRUE@@NATIVE_TRUE@am__append_6 = dtest btest.debug +@HAVE_COMPRESSED_DEBUG_TRUE@@NATIVE_TRUE@am__append_7 = ctestg ctesta +subdir = . +DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am \ + $(top_srcdir)/configure $(am__configure_deps) \ + $(srcdir)/config.h.in $(srcdir)/backtrace-supported.h.in \ + $(include_HEADERS) +ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 +am__aclocal_m4_deps = $(top_srcdir)/config/libtool.m4 \ + $(top_srcdir)/config/ltoptions.m4 \ + $(top_srcdir)/config/ltsugar.m4 \ + $(top_srcdir)/config/ltversion.m4 \ + $(top_srcdir)/config/lt~obsolete.m4 $(top_srcdir)/acinclude.m4 \ + $(top_srcdir)/configure.ac +am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ + $(ACLOCAL_M4) +am__CONFIG_DISTCLEAN_FILES = config.status config.cache config.log \ + configure.lineno config.status.lineno +mkinstalldirs = $(install_sh) -d +CONFIG_HEADER = config.h +CONFIG_CLEAN_FILES = backtrace-supported.h +CONFIG_CLEAN_VPATH_FILES = +am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; +am__vpath_adj = case $$p in \ + $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ + *) f=$$p;; \ + esac; +am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; +am__install_max = 40 +am__nobase_strip_setup = \ + srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` +am__nobase_strip = \ + for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" +am__nobase_list = $(am__nobase_strip_setup); \ + for p in $$list; do echo "$$p $$p"; done | \ + sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ + $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ + if (++n[$$2] == $(am__install_max)) \ + { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ + END { for (dir in files) print dir, files[dir] }' +am__base_list = \ + sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ + sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' +am__uninstall_files_from_dir = { \ + test -z "$$files" \ + || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ + || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ + $(am__cd) "$$dir" && rm -f $$files; }; \ + } +am__installdirs = "$(DESTDIR)$(libdir)" "$(DESTDIR)$(includedir)" +LTLIBRARIES = $(lib_LTLIBRARIES) +am__DEPENDENCIES_1 = +am_libbacktrace_la_OBJECTS = atomic.lo dwarf.lo fileline.lo posix.lo \ + print.lo sort.lo state.lo +libbacktrace_la_OBJECTS = $(am_libbacktrace_la_OBJECTS) +@NATIVE_TRUE@am__EXEEXT_1 = btest$(EXEEXT) btest_static$(EXEEXT) \ +@NATIVE_TRUE@ stest$(EXEEXT) ztest$(EXEEXT) edtest$(EXEEXT) +@HAVE_PTHREAD_TRUE@@NATIVE_TRUE@am__EXEEXT_2 = ttest$(EXEEXT) +@HAVE_COMPRESSED_DEBUG_TRUE@@NATIVE_TRUE@am__EXEEXT_3 = \ +@HAVE_COMPRESSED_DEBUG_TRUE@@NATIVE_TRUE@ ctestg$(EXEEXT) \ +@HAVE_COMPRESSED_DEBUG_TRUE@@NATIVE_TRUE@ ctesta$(EXEEXT) +@NATIVE_TRUE@am_btest_OBJECTS = btest-btest.$(OBJEXT) \ +@NATIVE_TRUE@ btest-testlib.$(OBJEXT) +btest_OBJECTS = $(am_btest_OBJECTS) +@NATIVE_TRUE@btest_DEPENDENCIES = libbacktrace.la +btest_LINK = $(LIBTOOL) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) \ + --mode=link $(CCLD) $(btest_CFLAGS) $(CFLAGS) $(AM_LDFLAGS) \ + $(LDFLAGS) -o $@ +@NATIVE_TRUE@am_btest_static_OBJECTS = btest_static-btest.$(OBJEXT) \ +@NATIVE_TRUE@ btest_static-testlib.$(OBJEXT) +btest_static_OBJECTS = $(am_btest_static_OBJECTS) +@NATIVE_TRUE@btest_static_DEPENDENCIES = libbacktrace.la +btest_static_LINK = $(LIBTOOL) --tag=CC $(AM_LIBTOOLFLAGS) \ + $(LIBTOOLFLAGS) --mode=link $(CCLD) $(btest_static_CFLAGS) \ + $(CFLAGS) $(btest_static_LDFLAGS) $(LDFLAGS) -o $@ +@HAVE_COMPRESSED_DEBUG_TRUE@@NATIVE_TRUE@am_ctesta_OBJECTS = ctesta-btest.$(OBJEXT) \ +@HAVE_COMPRESSED_DEBUG_TRUE@@NATIVE_TRUE@ ctesta-testlib.$(OBJEXT) +ctesta_OBJECTS = $(am_ctesta_OBJECTS) +@HAVE_COMPRESSED_DEBUG_TRUE@@NATIVE_TRUE@ctesta_DEPENDENCIES = \ +@HAVE_COMPRESSED_DEBUG_TRUE@@NATIVE_TRUE@ libbacktrace.la +ctesta_LINK = $(LIBTOOL) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) \ + --mode=link $(CCLD) $(ctesta_CFLAGS) $(CFLAGS) \ + $(ctesta_LDFLAGS) $(LDFLAGS) -o $@ +@HAVE_COMPRESSED_DEBUG_TRUE@@NATIVE_TRUE@am_ctestg_OBJECTS = ctestg-btest.$(OBJEXT) \ +@HAVE_COMPRESSED_DEBUG_TRUE@@NATIVE_TRUE@ ctestg-testlib.$(OBJEXT) +ctestg_OBJECTS = $(am_ctestg_OBJECTS) +@HAVE_COMPRESSED_DEBUG_TRUE@@NATIVE_TRUE@ctestg_DEPENDENCIES = \ +@HAVE_COMPRESSED_DEBUG_TRUE@@NATIVE_TRUE@ libbacktrace.la +ctestg_LINK = $(LIBTOOL) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) \ + --mode=link $(CCLD) $(ctestg_CFLAGS) $(CFLAGS) \ + $(ctestg_LDFLAGS) $(LDFLAGS) -o $@ +@NATIVE_TRUE@am_edtest_OBJECTS = edtest.$(OBJEXT) \ +@NATIVE_TRUE@ edtest2_build.$(OBJEXT) testlib.$(OBJEXT) +edtest_OBJECTS = $(am_edtest_OBJECTS) +@NATIVE_TRUE@edtest_DEPENDENCIES = libbacktrace.la +@NATIVE_TRUE@am_stest_OBJECTS = stest.$(OBJEXT) +stest_OBJECTS = $(am_stest_OBJECTS) +@NATIVE_TRUE@stest_DEPENDENCIES = libbacktrace.la +@HAVE_PTHREAD_TRUE@@NATIVE_TRUE@am_ttest_OBJECTS = \ +@HAVE_PTHREAD_TRUE@@NATIVE_TRUE@ ttest-ttest.$(OBJEXT) \ +@HAVE_PTHREAD_TRUE@@NATIVE_TRUE@ ttest-testlib.$(OBJEXT) +ttest_OBJECTS = $(am_ttest_OBJECTS) +@HAVE_PTHREAD_TRUE@@NATIVE_TRUE@ttest_DEPENDENCIES = libbacktrace.la +ttest_LINK = $(LIBTOOL) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) \ + --mode=link $(CCLD) $(ttest_CFLAGS) $(CFLAGS) $(AM_LDFLAGS) \ + $(LDFLAGS) -o $@ +@NATIVE_TRUE@am_ztest_OBJECTS = ztest-ztest.$(OBJEXT) \ +@NATIVE_TRUE@ ztest-testlib.$(OBJEXT) +ztest_OBJECTS = $(am_ztest_OBJECTS) +@NATIVE_TRUE@ztest_DEPENDENCIES = libbacktrace.la \ +@NATIVE_TRUE@ $(am__DEPENDENCIES_1) $(am__DEPENDENCIES_1) +ztest_LINK = $(LIBTOOL) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) \ + --mode=link $(CCLD) $(ztest_CFLAGS) $(CFLAGS) $(AM_LDFLAGS) \ + $(LDFLAGS) -o $@ +DEFAULT_INCLUDES = -I.@am__isrc@ +depcomp = +am__depfiles_maybe = +COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \ + $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) +LTCOMPILE = $(LIBTOOL) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) \ + --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \ + $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) +CCLD = $(CC) +LINK = $(LIBTOOL) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) \ + --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) $(AM_LDFLAGS) \ + $(LDFLAGS) -o $@ +SOURCES = $(libbacktrace_la_SOURCES) $(EXTRA_libbacktrace_la_SOURCES) \ + $(btest_SOURCES) $(btest_static_SOURCES) $(ctesta_SOURCES) \ + $(ctestg_SOURCES) $(edtest_SOURCES) $(stest_SOURCES) \ + $(ttest_SOURCES) $(ztest_SOURCES) +MULTISRCTOP = +MULTIBUILDTOP = +MULTIDIRS = +MULTISUBDIR = +MULTIDO = true +MULTICLEAN = true +am__can_run_installinfo = \ + case $$AM_UPDATE_INFO_DIR in \ + n|no|NO) false;; \ + *) (install-info --version) >/dev/null 2>&1;; \ + esac +HEADERS = $(include_HEADERS) +ETAGS = etags +CTAGS = ctags +am__tty_colors = \ +red=; grn=; lgn=; blu=; std= +ACLOCAL = @ACLOCAL@ +ALLOC_FILE = @ALLOC_FILE@ +AMTAR = @AMTAR@ +AR = @AR@ +AUTOCONF = @AUTOCONF@ +AUTOHEADER = @AUTOHEADER@ +AUTOMAKE = @AUTOMAKE@ +AWK = @AWK@ +BACKTRACE_FILE = @BACKTRACE_FILE@ +BACKTRACE_SUPPORTED = @BACKTRACE_SUPPORTED@ +BACKTRACE_SUPPORTS_DATA = @BACKTRACE_SUPPORTS_DATA@ +BACKTRACE_SUPPORTS_THREADS = @BACKTRACE_SUPPORTS_THREADS@ +BACKTRACE_USES_MALLOC = @BACKTRACE_USES_MALLOC@ +CC = @CC@ +CFLAGS = @CFLAGS@ +CLOCK_GETTIME_LINK = @CLOCK_GETTIME_LINK@ +CPP = @CPP@ +CPPFLAGS = @CPPFLAGS@ +CYGPATH_W = @CYGPATH_W@ +DEFS = @DEFS@ +DSYMUTIL = @DSYMUTIL@ +DUMPBIN = @DUMPBIN@ +ECHO_C = @ECHO_C@ +ECHO_N = @ECHO_N@ +ECHO_T = @ECHO_T@ +EGREP = @EGREP@ +EXEEXT = @EXEEXT@ +EXTRA_FLAGS = @EXTRA_FLAGS@ +FGREP = @FGREP@ +FORMAT_FILE = @FORMAT_FILE@ +GREP = @GREP@ +INSTALL = @INSTALL@ +INSTALL_DATA = @INSTALL_DATA@ +INSTALL_PROGRAM = @INSTALL_PROGRAM@ +INSTALL_SCRIPT = @INSTALL_SCRIPT@ +INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ +LD = @LD@ +LDFLAGS = @LDFLAGS@ +LIBOBJS = @LIBOBJS@ +LIBS = @LIBS@ +LIBTOOL = @LIBTOOL@ +LIPO = @LIPO@ +LN_S = @LN_S@ +LTLIBOBJS = @LTLIBOBJS@ +MAINT = @MAINT@ +MAKEINFO = @MAKEINFO@ +MKDIR_P = @MKDIR_P@ +NM = @NM@ +NMEDIT = @NMEDIT@ +OBJCOPY = @OBJCOPY@ +OBJEXT = @OBJEXT@ +OTOOL = @OTOOL@ +OTOOL64 = @OTOOL64@ +PACKAGE = @PACKAGE@ +PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ +PACKAGE_NAME = @PACKAGE_NAME@ +PACKAGE_STRING = @PACKAGE_STRING@ +PACKAGE_TARNAME = @PACKAGE_TARNAME@ +PACKAGE_URL = @PACKAGE_URL@ +PACKAGE_VERSION = @PACKAGE_VERSION@ +PATH_SEPARATOR = @PATH_SEPARATOR@ +PIC_FLAG = @PIC_FLAG@ +PTHREAD_CFLAGS = @PTHREAD_CFLAGS@ +RANLIB = @RANLIB@ +SED = @SED@ +SET_MAKE = @SET_MAKE@ +SHELL = @SHELL@ +STRIP = @STRIP@ +VERSION = @VERSION@ +VIEW_FILE = @VIEW_FILE@ +WARN_FLAGS = @WARN_FLAGS@ +abs_builddir = @abs_builddir@ +abs_srcdir = @abs_srcdir@ +abs_top_builddir = @abs_top_builddir@ +abs_top_srcdir = @abs_top_srcdir@ +ac_ct_CC = @ac_ct_CC@ +ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ +am__leading_dot = @am__leading_dot@ +am__tar = @am__tar@ +am__untar = @am__untar@ +bindir = @bindir@ +build = @build@ +build_alias = @build_alias@ +build_cpu = @build_cpu@ +build_os = @build_os@ +build_vendor = @build_vendor@ +builddir = @builddir@ +datadir = @datadir@ +datarootdir = @datarootdir@ +docdir = @docdir@ +dvidir = @dvidir@ +exec_prefix = @exec_prefix@ +host = @host@ +host_alias = @host_alias@ +host_cpu = @host_cpu@ +host_os = @host_os@ +host_vendor = @host_vendor@ +htmldir = @htmldir@ +includedir = @includedir@ +infodir = @infodir@ +install_sh = @install_sh@ +libdir = @libdir@ +libexecdir = @libexecdir@ +libtool_VERSION = @libtool_VERSION@ +localedir = @localedir@ +localstatedir = @localstatedir@ +lt_ECHO = @lt_ECHO@ +mandir = @mandir@ +mkdir_p = @mkdir_p@ +multi_basedir = @multi_basedir@ +oldincludedir = @oldincludedir@ +pdfdir = @pdfdir@ +prefix = @prefix@ +program_transform_name = @program_transform_name@ +psdir = @psdir@ +sbindir = @sbindir@ +sharedstatedir = @sharedstatedir@ +srcdir = @srcdir@ +sysconfdir = @sysconfdir@ +target = @target@ +target_alias = @target_alias@ +target_cpu = @target_cpu@ +target_os = @target_os@ +target_vendor = @target_vendor@ +top_build_prefix = @top_build_prefix@ +top_builddir = @top_builddir@ +top_srcdir = @top_srcdir@ +ACLOCAL_AMFLAGS = -I config +AM_CFLAGS = $(EXTRA_FLAGS) $(WARN_FLAGS) $(PIC_FLAG) +include_HEADERS = backtrace.h backtrace-supported.h +lib_LTLIBRARIES = libbacktrace.la +libbacktrace_la_SOURCES = \ + backtrace.h \ + atomic.c \ + dwarf.c \ + fileline.c \ + internal.h \ + posix.c \ + print.c \ + sort.c \ + state.c + +BACKTRACE_FILES = \ + backtrace.c \ + simple.c \ + nounwind.c + +FORMAT_FILES = \ + elf.c \ + pecoff.c \ + unknown.c \ + xcoff.c + +VIEW_FILES = \ + read.c \ + mmapio.c + +ALLOC_FILES = \ + alloc.c \ + mmap.c + +EXTRA_libbacktrace_la_SOURCES = \ + $(BACKTRACE_FILES) \ + $(FORMAT_FILES) \ + $(VIEW_FILES) \ + $(ALLOC_FILES) + +libbacktrace_la_LIBADD = \ + $(BACKTRACE_FILE) \ + $(FORMAT_FILE) \ + $(VIEW_FILE) \ + $(ALLOC_FILE) + +libbacktrace_la_DEPENDENCIES = $(libbacktrace_la_LIBADD) +CLEANFILES = $(am__append_3) $(am__append_6) +TESTS = $(check_PROGRAMS) $(am__append_5) +@NATIVE_TRUE@btest_SOURCES = btest.c testlib.c +@NATIVE_TRUE@btest_CFLAGS = $(AM_CFLAGS) -g -O +@NATIVE_TRUE@btest_LDADD = libbacktrace.la +@NATIVE_TRUE@btest_static_SOURCES = btest.c testlib.c +@NATIVE_TRUE@btest_static_CFLAGS = $(AM_CFLAGS) -g -O +@NATIVE_TRUE@btest_static_LDADD = libbacktrace.la +@NATIVE_TRUE@btest_static_LDFLAGS = -static-libtool-libs +@NATIVE_TRUE@stest_SOURCES = stest.c +@NATIVE_TRUE@stest_LDADD = libbacktrace.la +@NATIVE_TRUE@ztest_SOURCES = ztest.c testlib.c +@NATIVE_TRUE@ztest_CFLAGS = -DSRCDIR=\"$(srcdir)\" +@NATIVE_TRUE@ztest_LDADD = libbacktrace.la $(am__append_2) \ +@NATIVE_TRUE@ $(CLOCK_GETTIME_LINK) +@NATIVE_TRUE@edtest_SOURCES = edtest.c edtest2_build.c testlib.c +@NATIVE_TRUE@edtest_LDADD = libbacktrace.la +@HAVE_PTHREAD_TRUE@@NATIVE_TRUE@ttest_SOURCES = ttest.c testlib.c +@HAVE_PTHREAD_TRUE@@NATIVE_TRUE@ttest_CFLAGS = $(AM_CFLAGS) -pthread +@HAVE_PTHREAD_TRUE@@NATIVE_TRUE@ttest_LDADD = libbacktrace.la +@HAVE_COMPRESSED_DEBUG_TRUE@@NATIVE_TRUE@ctestg_SOURCES = btest.c testlib.c +@HAVE_COMPRESSED_DEBUG_TRUE@@NATIVE_TRUE@ctestg_CFLAGS = $(AM_CFLAGS) -g +@HAVE_COMPRESSED_DEBUG_TRUE@@NATIVE_TRUE@ctestg_LDFLAGS = -Wl,--compress-debug-sections=zlib-gnu +@HAVE_COMPRESSED_DEBUG_TRUE@@NATIVE_TRUE@ctestg_LDADD = libbacktrace.la +@HAVE_COMPRESSED_DEBUG_TRUE@@NATIVE_TRUE@ctesta_SOURCES = btest.c testlib.c +@HAVE_COMPRESSED_DEBUG_TRUE@@NATIVE_TRUE@ctesta_CFLAGS = $(AM_CFLAGS) -g +@HAVE_COMPRESSED_DEBUG_TRUE@@NATIVE_TRUE@ctesta_LDFLAGS = -Wl,--compress-debug-sections=zlib-gabi +@HAVE_COMPRESSED_DEBUG_TRUE@@NATIVE_TRUE@ctesta_LDADD = libbacktrace.la +all: config.h + $(MAKE) $(AM_MAKEFLAGS) all-am + +.SUFFIXES: +.SUFFIXES: .c .lo .o .obj +am--refresh: Makefile + @: +$(srcdir)/Makefile.in: @MAINTAINER_MODE_TRUE@ $(srcdir)/Makefile.am $(am__configure_deps) + @for dep in $?; do \ + case '$(am__configure_deps)' in \ + *$$dep*) \ + echo ' cd $(srcdir) && $(AUTOMAKE) --foreign --ignore-deps'; \ + $(am__cd) $(srcdir) && $(AUTOMAKE) --foreign --ignore-deps \ + && exit 0; \ + exit 1;; \ + esac; \ + done; \ + echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign --ignore-deps Makefile'; \ + $(am__cd) $(top_srcdir) && \ + $(AUTOMAKE) --foreign --ignore-deps Makefile +.PRECIOUS: Makefile +Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status + @case '$?' in \ + *config.status*) \ + echo ' $(SHELL) ./config.status'; \ + $(SHELL) ./config.status;; \ + *) \ + echo ' cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__depfiles_maybe)'; \ + cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__depfiles_maybe);; \ + esac; + +$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) + $(SHELL) ./config.status --recheck + +$(top_srcdir)/configure: @MAINTAINER_MODE_TRUE@ $(am__configure_deps) + $(am__cd) $(srcdir) && $(AUTOCONF) +$(ACLOCAL_M4): @MAINTAINER_MODE_TRUE@ $(am__aclocal_m4_deps) + $(am__cd) $(srcdir) && $(ACLOCAL) $(ACLOCAL_AMFLAGS) +$(am__aclocal_m4_deps): + +config.h: stamp-h1 + @if test ! -f $@; then rm -f stamp-h1; else :; fi + @if test ! -f $@; then $(MAKE) $(AM_MAKEFLAGS) stamp-h1; else :; fi + +stamp-h1: $(srcdir)/config.h.in $(top_builddir)/config.status + @rm -f stamp-h1 + cd $(top_builddir) && $(SHELL) ./config.status config.h +$(srcdir)/config.h.in: @MAINTAINER_MODE_TRUE@ $(am__configure_deps) + ($(am__cd) $(top_srcdir) && $(AUTOHEADER)) + rm -f stamp-h1 + touch $@ + +distclean-hdr: + -rm -f config.h stamp-h1 +backtrace-supported.h: $(top_builddir)/config.status $(srcdir)/backtrace-supported.h.in + cd $(top_builddir) && $(SHELL) ./config.status $@ +install-libLTLIBRARIES: $(lib_LTLIBRARIES) + @$(NORMAL_INSTALL) + @list='$(lib_LTLIBRARIES)'; test -n "$(libdir)" || list=; \ + list2=; for p in $$list; do \ + if test -f $$p; then \ + list2="$$list2 $$p"; \ + else :; fi; \ + done; \ + test -z "$$list2" || { \ + echo " $(MKDIR_P) '$(DESTDIR)$(libdir)'"; \ + $(MKDIR_P) "$(DESTDIR)$(libdir)" || exit 1; \ + echo " $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL) $(INSTALL_STRIP_FLAG) $$list2 '$(DESTDIR)$(libdir)'"; \ + $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL) $(INSTALL_STRIP_FLAG) $$list2 "$(DESTDIR)$(libdir)"; \ + } + +uninstall-libLTLIBRARIES: + @$(NORMAL_UNINSTALL) + @list='$(lib_LTLIBRARIES)'; test -n "$(libdir)" || list=; \ + for p in $$list; do \ + $(am__strip_dir) \ + echo " $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=uninstall rm -f '$(DESTDIR)$(libdir)/$$f'"; \ + $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=uninstall rm -f "$(DESTDIR)$(libdir)/$$f"; \ + done + +clean-libLTLIBRARIES: + -test -z "$(lib_LTLIBRARIES)" || rm -f $(lib_LTLIBRARIES) + @list='$(lib_LTLIBRARIES)'; for p in $$list; do \ + dir="`echo $$p | sed -e 's|/[^/]*$$||'`"; \ + test "$$dir" != "$$p" || dir=.; \ + echo "rm -f \"$${dir}/so_locations\""; \ + rm -f "$${dir}/so_locations"; \ + done +libbacktrace.la: $(libbacktrace_la_OBJECTS) $(libbacktrace_la_DEPENDENCIES) $(EXTRA_libbacktrace_la_DEPENDENCIES) + $(LINK) -rpath $(libdir) $(libbacktrace_la_OBJECTS) $(libbacktrace_la_LIBADD) $(LIBS) + +clean-checkPROGRAMS: + @list='$(check_PROGRAMS)'; test -n "$$list" || exit 0; \ + echo " rm -f" $$list; \ + rm -f $$list || exit $$?; \ + test -n "$(EXEEXT)" || exit 0; \ + list=`for p in $$list; do echo "$$p"; done | sed 's/$(EXEEXT)$$//'`; \ + echo " rm -f" $$list; \ + rm -f $$list +btest$(EXEEXT): $(btest_OBJECTS) $(btest_DEPENDENCIES) $(EXTRA_btest_DEPENDENCIES) + @rm -f btest$(EXEEXT) + $(btest_LINK) $(btest_OBJECTS) $(btest_LDADD) $(LIBS) +btest_static$(EXEEXT): $(btest_static_OBJECTS) $(btest_static_DEPENDENCIES) $(EXTRA_btest_static_DEPENDENCIES) + @rm -f btest_static$(EXEEXT) + $(btest_static_LINK) $(btest_static_OBJECTS) $(btest_static_LDADD) $(LIBS) +ctesta$(EXEEXT): $(ctesta_OBJECTS) $(ctesta_DEPENDENCIES) $(EXTRA_ctesta_DEPENDENCIES) + @rm -f ctesta$(EXEEXT) + $(ctesta_LINK) $(ctesta_OBJECTS) $(ctesta_LDADD) $(LIBS) +ctestg$(EXEEXT): $(ctestg_OBJECTS) $(ctestg_DEPENDENCIES) $(EXTRA_ctestg_DEPENDENCIES) + @rm -f ctestg$(EXEEXT) + $(ctestg_LINK) $(ctestg_OBJECTS) $(ctestg_LDADD) $(LIBS) +edtest$(EXEEXT): $(edtest_OBJECTS) $(edtest_DEPENDENCIES) $(EXTRA_edtest_DEPENDENCIES) + @rm -f edtest$(EXEEXT) + $(LINK) $(edtest_OBJECTS) $(edtest_LDADD) $(LIBS) +stest$(EXEEXT): $(stest_OBJECTS) $(stest_DEPENDENCIES) $(EXTRA_stest_DEPENDENCIES) + @rm -f stest$(EXEEXT) + $(LINK) $(stest_OBJECTS) $(stest_LDADD) $(LIBS) +ttest$(EXEEXT): $(ttest_OBJECTS) $(ttest_DEPENDENCIES) $(EXTRA_ttest_DEPENDENCIES) + @rm -f ttest$(EXEEXT) + $(ttest_LINK) $(ttest_OBJECTS) $(ttest_LDADD) $(LIBS) +ztest$(EXEEXT): $(ztest_OBJECTS) $(ztest_DEPENDENCIES) $(EXTRA_ztest_DEPENDENCIES) + @rm -f ztest$(EXEEXT) + $(ztest_LINK) $(ztest_OBJECTS) $(ztest_LDADD) $(LIBS) + +mostlyclean-compile: + -rm -f *.$(OBJEXT) + +distclean-compile: + -rm -f *.tab.c + +.c.o: + $(COMPILE) -c $< + +.c.obj: + $(COMPILE) -c `$(CYGPATH_W) '$<'` + +.c.lo: + $(LTCOMPILE) -c -o $@ $< + +btest-btest.o: btest.c + $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(btest_CFLAGS) $(CFLAGS) -c -o btest-btest.o `test -f 'btest.c' || echo '$(srcdir)/'`btest.c + +btest-btest.obj: btest.c + $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(btest_CFLAGS) $(CFLAGS) -c -o btest-btest.obj `if test -f 'btest.c'; then $(CYGPATH_W) 'btest.c'; else $(CYGPATH_W) '$(srcdir)/btest.c'; fi` + +btest-testlib.o: testlib.c + $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(btest_CFLAGS) $(CFLAGS) -c -o btest-testlib.o `test -f 'testlib.c' || echo '$(srcdir)/'`testlib.c + +btest-testlib.obj: testlib.c + $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(btest_CFLAGS) $(CFLAGS) -c -o btest-testlib.obj `if test -f 'testlib.c'; then $(CYGPATH_W) 'testlib.c'; else $(CYGPATH_W) '$(srcdir)/testlib.c'; fi` + +btest_static-btest.o: btest.c + $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(btest_static_CFLAGS) $(CFLAGS) -c -o btest_static-btest.o `test -f 'btest.c' || echo '$(srcdir)/'`btest.c + +btest_static-btest.obj: btest.c + $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(btest_static_CFLAGS) $(CFLAGS) -c -o btest_static-btest.obj `if test -f 'btest.c'; then $(CYGPATH_W) 'btest.c'; else $(CYGPATH_W) '$(srcdir)/btest.c'; fi` + +btest_static-testlib.o: testlib.c + $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(btest_static_CFLAGS) $(CFLAGS) -c -o btest_static-testlib.o `test -f 'testlib.c' || echo '$(srcdir)/'`testlib.c + +btest_static-testlib.obj: testlib.c + $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(btest_static_CFLAGS) $(CFLAGS) -c -o btest_static-testlib.obj `if test -f 'testlib.c'; then $(CYGPATH_W) 'testlib.c'; else $(CYGPATH_W) '$(srcdir)/testlib.c'; fi` + +ctesta-btest.o: btest.c + $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(ctesta_CFLAGS) $(CFLAGS) -c -o ctesta-btest.o `test -f 'btest.c' || echo '$(srcdir)/'`btest.c + +ctesta-btest.obj: btest.c + $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(ctesta_CFLAGS) $(CFLAGS) -c -o ctesta-btest.obj `if test -f 'btest.c'; then $(CYGPATH_W) 'btest.c'; else $(CYGPATH_W) '$(srcdir)/btest.c'; fi` + +ctesta-testlib.o: testlib.c + $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(ctesta_CFLAGS) $(CFLAGS) -c -o ctesta-testlib.o `test -f 'testlib.c' || echo '$(srcdir)/'`testlib.c + +ctesta-testlib.obj: testlib.c + $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(ctesta_CFLAGS) $(CFLAGS) -c -o ctesta-testlib.obj `if test -f 'testlib.c'; then $(CYGPATH_W) 'testlib.c'; else $(CYGPATH_W) '$(srcdir)/testlib.c'; fi` + +ctestg-btest.o: btest.c + $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(ctestg_CFLAGS) $(CFLAGS) -c -o ctestg-btest.o `test -f 'btest.c' || echo '$(srcdir)/'`btest.c + +ctestg-btest.obj: btest.c + $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(ctestg_CFLAGS) $(CFLAGS) -c -o ctestg-btest.obj `if test -f 'btest.c'; then $(CYGPATH_W) 'btest.c'; else $(CYGPATH_W) '$(srcdir)/btest.c'; fi` + +ctestg-testlib.o: testlib.c + $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(ctestg_CFLAGS) $(CFLAGS) -c -o ctestg-testlib.o `test -f 'testlib.c' || echo '$(srcdir)/'`testlib.c + +ctestg-testlib.obj: testlib.c + $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(ctestg_CFLAGS) $(CFLAGS) -c -o ctestg-testlib.obj `if test -f 'testlib.c'; then $(CYGPATH_W) 'testlib.c'; else $(CYGPATH_W) '$(srcdir)/testlib.c'; fi` + +ttest-ttest.o: ttest.c + $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(ttest_CFLAGS) $(CFLAGS) -c -o ttest-ttest.o `test -f 'ttest.c' || echo '$(srcdir)/'`ttest.c + +ttest-ttest.obj: ttest.c + $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(ttest_CFLAGS) $(CFLAGS) -c -o ttest-ttest.obj `if test -f 'ttest.c'; then $(CYGPATH_W) 'ttest.c'; else $(CYGPATH_W) '$(srcdir)/ttest.c'; fi` + +ttest-testlib.o: testlib.c + $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(ttest_CFLAGS) $(CFLAGS) -c -o ttest-testlib.o `test -f 'testlib.c' || echo '$(srcdir)/'`testlib.c + +ttest-testlib.obj: testlib.c + $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(ttest_CFLAGS) $(CFLAGS) -c -o ttest-testlib.obj `if test -f 'testlib.c'; then $(CYGPATH_W) 'testlib.c'; else $(CYGPATH_W) '$(srcdir)/testlib.c'; fi` + +ztest-ztest.o: ztest.c + $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(ztest_CFLAGS) $(CFLAGS) -c -o ztest-ztest.o `test -f 'ztest.c' || echo '$(srcdir)/'`ztest.c + +ztest-ztest.obj: ztest.c + $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(ztest_CFLAGS) $(CFLAGS) -c -o ztest-ztest.obj `if test -f 'ztest.c'; then $(CYGPATH_W) 'ztest.c'; else $(CYGPATH_W) '$(srcdir)/ztest.c'; fi` + +ztest-testlib.o: testlib.c + $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(ztest_CFLAGS) $(CFLAGS) -c -o ztest-testlib.o `test -f 'testlib.c' || echo '$(srcdir)/'`testlib.c + +ztest-testlib.obj: testlib.c + $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(ztest_CFLAGS) $(CFLAGS) -c -o ztest-testlib.obj `if test -f 'testlib.c'; then $(CYGPATH_W) 'testlib.c'; else $(CYGPATH_W) '$(srcdir)/testlib.c'; fi` + +mostlyclean-libtool: + -rm -f *.lo + +clean-libtool: + -rm -rf .libs _libs + +distclean-libtool: + -rm -f libtool config.lt + +# GNU Make needs to see an explicit $(MAKE) variable in the command it +# runs to enable its job server during parallel builds. Hence the +# comments below. +all-multi: + $(MULTIDO) $(AM_MAKEFLAGS) DO=all multi-do # $(MAKE) +install-multi: + $(MULTIDO) $(AM_MAKEFLAGS) DO=install multi-do # $(MAKE) + +mostlyclean-multi: + $(MULTICLEAN) $(AM_MAKEFLAGS) DO=mostlyclean multi-clean # $(MAKE) +clean-multi: + $(MULTICLEAN) $(AM_MAKEFLAGS) DO=clean multi-clean # $(MAKE) +distclean-multi: + $(MULTICLEAN) $(AM_MAKEFLAGS) DO=distclean multi-clean # $(MAKE) +maintainer-clean-multi: + $(MULTICLEAN) $(AM_MAKEFLAGS) DO=maintainer-clean multi-clean # $(MAKE) +install-includeHEADERS: $(include_HEADERS) + @$(NORMAL_INSTALL) + @list='$(include_HEADERS)'; test -n "$(includedir)" || list=; \ + if test -n "$$list"; then \ + echo " $(MKDIR_P) '$(DESTDIR)$(includedir)'"; \ + $(MKDIR_P) "$(DESTDIR)$(includedir)" || exit 1; \ + fi; \ + for p in $$list; do \ + if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ + echo "$$d$$p"; \ + done | $(am__base_list) | \ + while read files; do \ + echo " $(INSTALL_HEADER) $$files '$(DESTDIR)$(includedir)'"; \ + $(INSTALL_HEADER) $$files "$(DESTDIR)$(includedir)" || exit $$?; \ + done + +uninstall-includeHEADERS: + @$(NORMAL_UNINSTALL) + @list='$(include_HEADERS)'; test -n "$(includedir)" || list=; \ + files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \ + dir='$(DESTDIR)$(includedir)'; $(am__uninstall_files_from_dir) + +ID: $(HEADERS) $(SOURCES) $(LISP) $(TAGS_FILES) + list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ + unique=`for i in $$list; do \ + if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ + done | \ + $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ + END { if (nonempty) { for (i in files) print i; }; }'`; \ + mkid -fID $$unique +tags: TAGS + +TAGS: $(HEADERS) $(SOURCES) config.h.in $(TAGS_DEPENDENCIES) \ + $(TAGS_FILES) $(LISP) + set x; \ + here=`pwd`; \ + list='$(SOURCES) $(HEADERS) config.h.in $(LISP) $(TAGS_FILES)'; \ + unique=`for i in $$list; do \ + if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ + done | \ + $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ + END { if (nonempty) { for (i in files) print i; }; }'`; \ + shift; \ + if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ + test -n "$$unique" || unique=$$empty_fix; \ + if test $$# -gt 0; then \ + $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ + "$$@" $$unique; \ + else \ + $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ + $$unique; \ + fi; \ + fi +ctags: CTAGS +CTAGS: $(HEADERS) $(SOURCES) config.h.in $(TAGS_DEPENDENCIES) \ + $(TAGS_FILES) $(LISP) + list='$(SOURCES) $(HEADERS) config.h.in $(LISP) $(TAGS_FILES)'; \ + unique=`for i in $$list; do \ + if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ + done | \ + $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ + END { if (nonempty) { for (i in files) print i; }; }'`; \ + test -z "$(CTAGS_ARGS)$$unique" \ + || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ + $$unique + +GTAGS: + here=`$(am__cd) $(top_builddir) && pwd` \ + && $(am__cd) $(top_srcdir) \ + && gtags -i $(GTAGS_ARGS) "$$here" + +distclean-tags: + -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags + +check-TESTS: $(TESTS) + @failed=0; all=0; xfail=0; xpass=0; skip=0; \ + srcdir=$(srcdir); export srcdir; \ + list=' $(TESTS) '; \ + $(am__tty_colors); \ + if test -n "$$list"; then \ + for tst in $$list; do \ + if test -f ./$$tst; then dir=./; \ + elif test -f $$tst; then dir=; \ + else dir="$(srcdir)/"; fi; \ + if $(TESTS_ENVIRONMENT) $${dir}$$tst; then \ + all=`expr $$all + 1`; \ + case " $(XFAIL_TESTS) " in \ + *[\ \ ]$$tst[\ \ ]*) \ + xpass=`expr $$xpass + 1`; \ + failed=`expr $$failed + 1`; \ + col=$$red; res=XPASS; \ + ;; \ + *) \ + col=$$grn; res=PASS; \ + ;; \ + esac; \ + elif test $$? -ne 77; then \ + all=`expr $$all + 1`; \ + case " $(XFAIL_TESTS) " in \ + *[\ \ ]$$tst[\ \ ]*) \ + xfail=`expr $$xfail + 1`; \ + col=$$lgn; res=XFAIL; \ + ;; \ + *) \ + failed=`expr $$failed + 1`; \ + col=$$red; res=FAIL; \ + ;; \ + esac; \ + else \ + skip=`expr $$skip + 1`; \ + col=$$blu; res=SKIP; \ + fi; \ + echo "$${col}$$res$${std}: $$tst"; \ + done; \ + if test "$$all" -eq 1; then \ + tests="test"; \ + All=""; \ + else \ + tests="tests"; \ + All="All "; \ + fi; \ + if test "$$failed" -eq 0; then \ + if test "$$xfail" -eq 0; then \ + banner="$$All$$all $$tests passed"; \ + else \ + if test "$$xfail" -eq 1; then failures=failure; else failures=failures; fi; \ + banner="$$All$$all $$tests behaved as expected ($$xfail expected $$failures)"; \ + fi; \ + else \ + if test "$$xpass" -eq 0; then \ + banner="$$failed of $$all $$tests failed"; \ + else \ + if test "$$xpass" -eq 1; then passes=pass; else passes=passes; fi; \ + banner="$$failed of $$all $$tests did not behave as expected ($$xpass unexpected $$passes)"; \ + fi; \ + fi; \ + dashes="$$banner"; \ + skipped=""; \ + if test "$$skip" -ne 0; then \ + if test "$$skip" -eq 1; then \ + skipped="($$skip test was not run)"; \ + else \ + skipped="($$skip tests were not run)"; \ + fi; \ + test `echo "$$skipped" | wc -c` -le `echo "$$banner" | wc -c` || \ + dashes="$$skipped"; \ + fi; \ + report=""; \ + if test "$$failed" -ne 0 && test -n "$(PACKAGE_BUGREPORT)"; then \ + report="Please report to $(PACKAGE_BUGREPORT)"; \ + test `echo "$$report" | wc -c` -le `echo "$$banner" | wc -c` || \ + dashes="$$report"; \ + fi; \ + dashes=`echo "$$dashes" | sed s/./=/g`; \ + if test "$$failed" -eq 0; then \ + col="$$grn"; \ + else \ + col="$$red"; \ + fi; \ + echo "$${col}$$dashes$${std}"; \ + echo "$${col}$$banner$${std}"; \ + test -z "$$skipped" || echo "$${col}$$skipped$${std}"; \ + test -z "$$report" || echo "$${col}$$report$${std}"; \ + echo "$${col}$$dashes$${std}"; \ + test "$$failed" -eq 0; \ + else :; fi +check-am: all-am + $(MAKE) $(AM_MAKEFLAGS) $(check_PROGRAMS) + $(MAKE) $(AM_MAKEFLAGS) check-TESTS +check: check-am +all-am: Makefile $(LTLIBRARIES) all-multi $(HEADERS) config.h +installdirs: + for dir in "$(DESTDIR)$(libdir)" "$(DESTDIR)$(includedir)"; do \ + test -z "$$dir" || $(MKDIR_P) "$$dir"; \ + done +install: install-am +install-exec: install-exec-am +install-data: install-data-am +uninstall: uninstall-am + +install-am: all-am + @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am + +installcheck: installcheck-am +install-strip: + if test -z '$(STRIP)'; then \ + $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ + install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ + install; \ + else \ + $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ + install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ + "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ + fi +mostlyclean-generic: + +clean-generic: + -test -z "$(CLEANFILES)" || rm -f $(CLEANFILES) + +distclean-generic: + -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) + -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) + +maintainer-clean-generic: + @echo "This command is intended for maintainers to use" + @echo "it deletes files that may require special tools to rebuild." +clean: clean-am clean-multi + +clean-am: clean-checkPROGRAMS clean-generic clean-libLTLIBRARIES \ + clean-libtool mostlyclean-am + +distclean: distclean-am distclean-multi + -rm -f $(am__CONFIG_DISTCLEAN_FILES) + -rm -f Makefile +distclean-am: clean-am distclean-compile distclean-generic \ + distclean-hdr distclean-libtool distclean-tags + +dvi: dvi-am + +dvi-am: + +html: html-am + +html-am: + +info: info-am + +info-am: + +install-data-am: install-includeHEADERS + +install-dvi: install-dvi-am + +install-dvi-am: + +install-exec-am: install-libLTLIBRARIES install-multi + +install-html: install-html-am + +install-html-am: + +install-info: install-info-am + +install-info-am: + +install-man: + +install-pdf: install-pdf-am + +install-pdf-am: + +install-ps: install-ps-am + +install-ps-am: + +installcheck-am: + +maintainer-clean: maintainer-clean-am maintainer-clean-multi + -rm -f $(am__CONFIG_DISTCLEAN_FILES) + -rm -rf $(top_srcdir)/autom4te.cache + -rm -f Makefile +maintainer-clean-am: distclean-am maintainer-clean-generic + +mostlyclean: mostlyclean-am mostlyclean-multi + +mostlyclean-am: mostlyclean-compile mostlyclean-generic \ + mostlyclean-libtool + +pdf: pdf-am + +pdf-am: + +ps: ps-am + +ps-am: + +uninstall-am: uninstall-includeHEADERS uninstall-libLTLIBRARIES + +.MAKE: all all-multi check-am clean-multi distclean-multi install-am \ + install-multi install-strip maintainer-clean-multi \ + mostlyclean-multi + +.PHONY: CTAGS GTAGS all all-am all-multi am--refresh check check-TESTS \ + check-am clean clean-checkPROGRAMS clean-generic \ + clean-libLTLIBRARIES clean-libtool clean-multi ctags distclean \ + distclean-compile distclean-generic distclean-hdr \ + distclean-libtool distclean-multi distclean-tags dvi dvi-am \ + html html-am info info-am install install-am install-data \ + install-data-am install-dvi install-dvi-am install-exec \ + install-exec-am install-html install-html-am \ + install-includeHEADERS install-info install-info-am \ + install-libLTLIBRARIES install-man install-multi install-pdf \ + install-pdf-am install-ps install-ps-am install-strip \ + installcheck installcheck-am installdirs maintainer-clean \ + maintainer-clean-generic maintainer-clean-multi mostlyclean \ + mostlyclean-compile mostlyclean-generic mostlyclean-libtool \ + mostlyclean-multi pdf pdf-am ps ps-am tags uninstall \ + uninstall-am uninstall-includeHEADERS uninstall-libLTLIBRARIES + + +@NATIVE_TRUE@edtest2_build.c: gen_edtest2_build; @true +@NATIVE_TRUE@gen_edtest2_build: $(srcdir)/edtest2.c +@NATIVE_TRUE@ cat $(srcdir)/edtest2.c > tmp-edtest2_build.c +@NATIVE_TRUE@ $(SHELL) $(srcdir)/move-if-change tmp-edtest2_build.c edtest2_build.c +@NATIVE_TRUE@ echo timestamp > $@ + +@HAVE_OBJCOPY_DEBUGLINK_TRUE@@NATIVE_TRUE@dtest: btest_static +@HAVE_OBJCOPY_DEBUGLINK_TRUE@@NATIVE_TRUE@ $(OBJCOPY) --only-keep-debug btest_static btest.debug +@HAVE_OBJCOPY_DEBUGLINK_TRUE@@NATIVE_TRUE@ $(OBJCOPY) --strip-debug --add-gnu-debuglink=btest.debug btest_static dtest + +# We can't use automake's automatic dependency tracking, because it +# breaks when using bootstrap-lean. Automatic dependency tracking +# with GCC bootstrap will cause some of the objects to depend on +# header files in prev-gcc/include, e.g., stddef.h and stdarg.h. When +# using bootstrap-lean, prev-gcc is removed after each stage. When +# running "make install", those header files will be gone, causing the +# library to be rebuilt at install time. That may not succeed. + +# These manual dependencies do not include dependencies on unwind.h, +# even though that is part of GCC, because where to find it depends on +# whether we are being built as a host library or a target library. + +alloc.lo: config.h backtrace.h internal.h +backtrace.lo: config.h backtrace.h internal.h +btest.lo: backtrace.h backtrace-supported.h filenames.h +dwarf.lo: config.h filenames.h backtrace.h internal.h +elf.lo: config.h backtrace.h internal.h +fileline.lo: config.h backtrace.h internal.h +mmap.lo: config.h backtrace.h internal.h +mmapio.lo: config.h backtrace.h internal.h +nounwind.lo: config.h internal.h +pecoff.lo: config.h backtrace.h internal.h +posix.lo: config.h backtrace.h internal.h +print.lo: config.h backtrace.h internal.h +read.lo: config.h backtrace.h internal.h +simple.lo: config.h backtrace.h internal.h +sort.lo: config.h backtrace.h internal.h +stest.lo: config.h backtrace.h internal.h +state.lo: config.h backtrace.h backtrace-supported.h internal.h +unknown.lo: config.h backtrace.h internal.h +xcoff.lo: config.h backtrace.h internal.h + +# Tell versions [3.59,3.63) of GNU make to not export all variables. +# Otherwise a system limit (for SysV at least) may be exceeded. +.NOEXPORT: diff --git a/backtrace-sys/src/libbacktrace/Mark.Twain-Tom.Sawyer.txt b/backtrace-sys/src/libbacktrace/Mark.Twain-Tom.Sawyer.txt new file mode 100644 index 000000000..c9106fd52 --- /dev/null +++ b/backtrace-sys/src/libbacktrace/Mark.Twain-Tom.Sawyer.txt @@ -0,0 +1,8465 @@ +Produced by David Widger. The previous edition was updated by Jose +Menendez. + + + + + + THE ADVENTURES OF TOM SAWYER + BY + MARK TWAIN + (Samuel Langhorne Clemens) + + + + + P R E F A C E + +MOST of the adventures recorded in this book really occurred; one or +two were experiences of my own, the rest those of boys who were +schoolmates of mine. Huck Finn is drawn from life; Tom Sawyer also, but +not from an individual--he is a combination of the characteristics of +three boys whom I knew, and therefore belongs to the composite order of +architecture. + +The odd superstitions touched upon were all prevalent among children +and slaves in the West at the period of this story--that is to say, +thirty or forty years ago. + +Although my book is intended mainly for the entertainment of boys and +girls, I hope it will not be shunned by men and women on that account, +for part of my plan has been to try to pleasantly remind adults of what +they once were themselves, and of how they felt and thought and talked, +and what queer enterprises they sometimes engaged in. + + THE AUTHOR. + +HARTFORD, 1876. + + + + T O M S A W Y E R + + + +CHAPTER I + +"TOM!" + +No answer. + +"TOM!" + +No answer. + +"What's gone with that boy, I wonder? You TOM!" + +No answer. + +The old lady pulled her spectacles down and looked over them about the +room; then she put them up and looked out under them. She seldom or +never looked THROUGH them for so small a thing as a boy; they were her +state pair, the pride of her heart, and were built for "style," not +service--she could have seen through a pair of stove-lids just as well. +She looked perplexed for a moment, and then said, not fiercely, but +still loud enough for the furniture to hear: + +"Well, I lay if I get hold of you I'll--" + +She did not finish, for by this time she was bending down and punching +under the bed with the broom, and so she needed breath to punctuate the +punches with. She resurrected nothing but the cat. + +"I never did see the beat of that boy!" + +She went to the open door and stood in it and looked out among the +tomato vines and "jimpson" weeds that constituted the garden. No Tom. +So she lifted up her voice at an angle calculated for distance and +shouted: + +"Y-o-u-u TOM!" + +There was a slight noise behind her and she turned just in time to +seize a small boy by the slack of his roundabout and arrest his flight. + +"There! I might 'a' thought of that closet. What you been doing in +there?" + +"Nothing." + +"Nothing! Look at your hands. And look at your mouth. What IS that +truck?" + +"I don't know, aunt." + +"Well, I know. It's jam--that's what it is. Forty times I've said if +you didn't let that jam alone I'd skin you. Hand me that switch." + +The switch hovered in the air--the peril was desperate-- + +"My! Look behind you, aunt!" + +The old lady whirled round, and snatched her skirts out of danger. The +lad fled on the instant, scrambled up the high board-fence, and +disappeared over it. + +His aunt Polly stood surprised a moment, and then broke into a gentle +laugh. + +"Hang the boy, can't I never learn anything? Ain't he played me tricks +enough like that for me to be looking out for him by this time? But old +fools is the biggest fools there is. Can't learn an old dog new tricks, +as the saying is. But my goodness, he never plays them alike, two days, +and how is a body to know what's coming? He 'pears to know just how +long he can torment me before I get my dander up, and he knows if he +can make out to put me off for a minute or make me laugh, it's all down +again and I can't hit him a lick. I ain't doing my duty by that boy, +and that's the Lord's truth, goodness knows. Spare the rod and spile +the child, as the Good Book says. I'm a laying up sin and suffering for +us both, I know. He's full of the Old Scratch, but laws-a-me! he's my +own dead sister's boy, poor thing, and I ain't got the heart to lash +him, somehow. Every time I let him off, my conscience does hurt me so, +and every time I hit him my old heart most breaks. Well-a-well, man +that is born of woman is of few days and full of trouble, as the +Scripture says, and I reckon it's so. He'll play hookey this evening, * +and [* Southwestern for "afternoon"] I'll just be obleeged to make him +work, to-morrow, to punish him. It's mighty hard to make him work +Saturdays, when all the boys is having holiday, but he hates work more +than he hates anything else, and I've GOT to do some of my duty by him, +or I'll be the ruination of the child." + +Tom did play hookey, and he had a very good time. He got back home +barely in season to help Jim, the small colored boy, saw next-day's +wood and split the kindlings before supper--at least he was there in +time to tell his adventures to Jim while Jim did three-fourths of the +work. Tom's younger brother (or rather half-brother) Sid was already +through with his part of the work (picking up chips), for he was a +quiet boy, and had no adventurous, troublesome ways. + +While Tom was eating his supper, and stealing sugar as opportunity +offered, Aunt Polly asked him questions that were full of guile, and +very deep--for she wanted to trap him into damaging revealments. Like +many other simple-hearted souls, it was her pet vanity to believe she +was endowed with a talent for dark and mysterious diplomacy, and she +loved to contemplate her most transparent devices as marvels of low +cunning. Said she: + +"Tom, it was middling warm in school, warn't it?" + +"Yes'm." + +"Powerful warm, warn't it?" + +"Yes'm." + +"Didn't you want to go in a-swimming, Tom?" + +A bit of a scare shot through Tom--a touch of uncomfortable suspicion. +He searched Aunt Polly's face, but it told him nothing. So he said: + +"No'm--well, not very much." + +The old lady reached out her hand and felt Tom's shirt, and said: + +"But you ain't too warm now, though." And it flattered her to reflect +that she had discovered that the shirt was dry without anybody knowing +that that was what she had in her mind. But in spite of her, Tom knew +where the wind lay, now. So he forestalled what might be the next move: + +"Some of us pumped on our heads--mine's damp yet. See?" + +Aunt Polly was vexed to think she had overlooked that bit of +circumstantial evidence, and missed a trick. Then she had a new +inspiration: + +"Tom, you didn't have to undo your shirt collar where I sewed it, to +pump on your head, did you? Unbutton your jacket!" + +The trouble vanished out of Tom's face. He opened his jacket. His +shirt collar was securely sewed. + +"Bother! Well, go 'long with you. I'd made sure you'd played hookey +and been a-swimming. But I forgive ye, Tom. I reckon you're a kind of a +singed cat, as the saying is--better'n you look. THIS time." + +She was half sorry her sagacity had miscarried, and half glad that Tom +had stumbled into obedient conduct for once. + +But Sidney said: + +"Well, now, if I didn't think you sewed his collar with white thread, +but it's black." + +"Why, I did sew it with white! Tom!" + +But Tom did not wait for the rest. As he went out at the door he said: + +"Siddy, I'll lick you for that." + +In a safe place Tom examined two large needles which were thrust into +the lapels of his jacket, and had thread bound about them--one needle +carried white thread and the other black. He said: + +"She'd never noticed if it hadn't been for Sid. Confound it! sometimes +she sews it with white, and sometimes she sews it with black. I wish to +geeminy she'd stick to one or t'other--I can't keep the run of 'em. But +I bet you I'll lam Sid for that. I'll learn him!" + +He was not the Model Boy of the village. He knew the model boy very +well though--and loathed him. + +Within two minutes, or even less, he had forgotten all his troubles. +Not because his troubles were one whit less heavy and bitter to him +than a man's are to a man, but because a new and powerful interest bore +them down and drove them out of his mind for the time--just as men's +misfortunes are forgotten in the excitement of new enterprises. This +new interest was a valued novelty in whistling, which he had just +acquired from a negro, and he was suffering to practise it undisturbed. +It consisted in a peculiar bird-like turn, a sort of liquid warble, +produced by touching the tongue to the roof of the mouth at short +intervals in the midst of the music--the reader probably remembers how +to do it, if he has ever been a boy. Diligence and attention soon gave +him the knack of it, and he strode down the street with his mouth full +of harmony and his soul full of gratitude. He felt much as an +astronomer feels who has discovered a new planet--no doubt, as far as +strong, deep, unalloyed pleasure is concerned, the advantage was with +the boy, not the astronomer. + +The summer evenings were long. It was not dark, yet. Presently Tom +checked his whistle. A stranger was before him--a boy a shade larger +than himself. A new-comer of any age or either sex was an impressive +curiosity in the poor little shabby village of St. Petersburg. This boy +was well dressed, too--well dressed on a week-day. This was simply +astounding. His cap was a dainty thing, his close-buttoned blue cloth +roundabout was new and natty, and so were his pantaloons. He had shoes +on--and it was only Friday. He even wore a necktie, a bright bit of +ribbon. He had a citified air about him that ate into Tom's vitals. The +more Tom stared at the splendid marvel, the higher he turned up his +nose at his finery and the shabbier and shabbier his own outfit seemed +to him to grow. Neither boy spoke. If one moved, the other moved--but +only sidewise, in a circle; they kept face to face and eye to eye all +the time. Finally Tom said: + +"I can lick you!" + +"I'd like to see you try it." + +"Well, I can do it." + +"No you can't, either." + +"Yes I can." + +"No you can't." + +"I can." + +"You can't." + +"Can!" + +"Can't!" + +An uncomfortable pause. Then Tom said: + +"What's your name?" + +"'Tisn't any of your business, maybe." + +"Well I 'low I'll MAKE it my business." + +"Well why don't you?" + +"If you say much, I will." + +"Much--much--MUCH. There now." + +"Oh, you think you're mighty smart, DON'T you? I could lick you with +one hand tied behind me, if I wanted to." + +"Well why don't you DO it? You SAY you can do it." + +"Well I WILL, if you fool with me." + +"Oh yes--I've seen whole families in the same fix." + +"Smarty! You think you're SOME, now, DON'T you? Oh, what a hat!" + +"You can lump that hat if you don't like it. I dare you to knock it +off--and anybody that'll take a dare will suck eggs." + +"You're a liar!" + +"You're another." + +"You're a fighting liar and dasn't take it up." + +"Aw--take a walk!" + +"Say--if you give me much more of your sass I'll take and bounce a +rock off'n your head." + +"Oh, of COURSE you will." + +"Well I WILL." + +"Well why don't you DO it then? What do you keep SAYING you will for? +Why don't you DO it? It's because you're afraid." + +"I AIN'T afraid." + +"You are." + +"I ain't." + +"You are." + +Another pause, and more eying and sidling around each other. Presently +they were shoulder to shoulder. Tom said: + +"Get away from here!" + +"Go away yourself!" + +"I won't." + +"I won't either." + +So they stood, each with a foot placed at an angle as a brace, and +both shoving with might and main, and glowering at each other with +hate. But neither could get an advantage. After struggling till both +were hot and flushed, each relaxed his strain with watchful caution, +and Tom said: + +"You're a coward and a pup. I'll tell my big brother on you, and he +can thrash you with his little finger, and I'll make him do it, too." + +"What do I care for your big brother? I've got a brother that's bigger +than he is--and what's more, he can throw him over that fence, too." +[Both brothers were imaginary.] + +"That's a lie." + +"YOUR saying so don't make it so." + +Tom drew a line in the dust with his big toe, and said: + +"I dare you to step over that, and I'll lick you till you can't stand +up. Anybody that'll take a dare will steal sheep." + +The new boy stepped over promptly, and said: + +"Now you said you'd do it, now let's see you do it." + +"Don't you crowd me now; you better look out." + +"Well, you SAID you'd do it--why don't you do it?" + +"By jingo! for two cents I WILL do it." + +The new boy took two broad coppers out of his pocket and held them out +with derision. Tom struck them to the ground. In an instant both boys +were rolling and tumbling in the dirt, gripped together like cats; and +for the space of a minute they tugged and tore at each other's hair and +clothes, punched and scratched each other's nose, and covered +themselves with dust and glory. Presently the confusion took form, and +through the fog of battle Tom appeared, seated astride the new boy, and +pounding him with his fists. "Holler 'nuff!" said he. + +The boy only struggled to free himself. He was crying--mainly from rage. + +"Holler 'nuff!"--and the pounding went on. + +At last the stranger got out a smothered "'Nuff!" and Tom let him up +and said: + +"Now that'll learn you. Better look out who you're fooling with next +time." + +The new boy went off brushing the dust from his clothes, sobbing, +snuffling, and occasionally looking back and shaking his head and +threatening what he would do to Tom the "next time he caught him out." +To which Tom responded with jeers, and started off in high feather, and +as soon as his back was turned the new boy snatched up a stone, threw +it and hit him between the shoulders and then turned tail and ran like +an antelope. Tom chased the traitor home, and thus found out where he +lived. He then held a position at the gate for some time, daring the +enemy to come outside, but the enemy only made faces at him through the +window and declined. At last the enemy's mother appeared, and called +Tom a bad, vicious, vulgar child, and ordered him away. So he went +away; but he said he "'lowed" to "lay" for that boy. + +He got home pretty late that night, and when he climbed cautiously in +at the window, he uncovered an ambuscade, in the person of his aunt; +and when she saw the state his clothes were in her resolution to turn +his Saturday holiday into captivity at hard labor became adamantine in +its firmness. + + + +CHAPTER II + +SATURDAY morning was come, and all the summer world was bright and +fresh, and brimming with life. There was a song in every heart; and if +the heart was young the music issued at the lips. There was cheer in +every face and a spring in every step. The locust-trees were in bloom +and the fragrance of the blossoms filled the air. Cardiff Hill, beyond +the village and above it, was green with vegetation and it lay just far +enough away to seem a Delectable Land, dreamy, reposeful, and inviting. + +Tom appeared on the sidewalk with a bucket of whitewash and a +long-handled brush. He surveyed the fence, and all gladness left him and +a deep melancholy settled down upon his spirit. Thirty yards of board +fence nine feet high. Life to him seemed hollow, and existence but a +burden. Sighing, he dipped his brush and passed it along the topmost +plank; repeated the operation; did it again; compared the insignificant +whitewashed streak with the far-reaching continent of unwhitewashed +fence, and sat down on a tree-box discouraged. Jim came skipping out at +the gate with a tin pail, and singing Buffalo Gals. Bringing water from +the town pump had always been hateful work in Tom's eyes, before, but +now it did not strike him so. He remembered that there was company at +the pump. White, mulatto, and negro boys and girls were always there +waiting their turns, resting, trading playthings, quarrelling, +fighting, skylarking. And he remembered that although the pump was only +a hundred and fifty yards off, Jim never got back with a bucket of +water under an hour--and even then somebody generally had to go after +him. Tom said: + +"Say, Jim, I'll fetch the water if you'll whitewash some." + +Jim shook his head and said: + +"Can't, Mars Tom. Ole missis, she tole me I got to go an' git dis +water an' not stop foolin' roun' wid anybody. She say she spec' Mars +Tom gwine to ax me to whitewash, an' so she tole me go 'long an' 'tend +to my own business--she 'lowed SHE'D 'tend to de whitewashin'." + +"Oh, never you mind what she said, Jim. That's the way she always +talks. Gimme the bucket--I won't be gone only a a minute. SHE won't +ever know." + +"Oh, I dasn't, Mars Tom. Ole missis she'd take an' tar de head off'n +me. 'Deed she would." + +"SHE! She never licks anybody--whacks 'em over the head with her +thimble--and who cares for that, I'd like to know. She talks awful, but +talk don't hurt--anyways it don't if she don't cry. Jim, I'll give you +a marvel. I'll give you a white alley!" + +Jim began to waver. + +"White alley, Jim! And it's a bully taw." + +"My! Dat's a mighty gay marvel, I tell you! But Mars Tom I's powerful +'fraid ole missis--" + +"And besides, if you will I'll show you my sore toe." + +Jim was only human--this attraction was too much for him. He put down +his pail, took the white alley, and bent over the toe with absorbing +interest while the bandage was being unwound. In another moment he was +flying down the street with his pail and a tingling rear, Tom was +whitewashing with vigor, and Aunt Polly was retiring from the field +with a slipper in her hand and triumph in her eye. + +But Tom's energy did not last. He began to think of the fun he had +planned for this day, and his sorrows multiplied. Soon the free boys +would come tripping along on all sorts of delicious expeditions, and +they would make a world of fun of him for having to work--the very +thought of it burnt him like fire. He got out his worldly wealth and +examined it--bits of toys, marbles, and trash; enough to buy an +exchange of WORK, maybe, but not half enough to buy so much as half an +hour of pure freedom. So he returned his straitened means to his +pocket, and gave up the idea of trying to buy the boys. At this dark +and hopeless moment an inspiration burst upon him! Nothing less than a +great, magnificent inspiration. + +He took up his brush and went tranquilly to work. Ben Rogers hove in +sight presently--the very boy, of all boys, whose ridicule he had been +dreading. Ben's gait was the hop-skip-and-jump--proof enough that his +heart was light and his anticipations high. He was eating an apple, and +giving a long, melodious whoop, at intervals, followed by a deep-toned +ding-dong-dong, ding-dong-dong, for he was personating a steamboat. As +he drew near, he slackened speed, took the middle of the street, leaned +far over to starboard and rounded to ponderously and with laborious +pomp and circumstance--for he was personating the Big Missouri, and +considered himself to be drawing nine feet of water. He was boat and +captain and engine-bells combined, so he had to imagine himself +standing on his own hurricane-deck giving the orders and executing them: + +"Stop her, sir! Ting-a-ling-ling!" The headway ran almost out, and he +drew up slowly toward the sidewalk. + +"Ship up to back! Ting-a-ling-ling!" His arms straightened and +stiffened down his sides. + +"Set her back on the stabboard! Ting-a-ling-ling! Chow! ch-chow-wow! +Chow!" His right hand, meantime, describing stately circles--for it was +representing a forty-foot wheel. + +"Let her go back on the labboard! Ting-a-lingling! Chow-ch-chow-chow!" +The left hand began to describe circles. + +"Stop the stabboard! Ting-a-ling-ling! Stop the labboard! Come ahead +on the stabboard! Stop her! Let your outside turn over slow! +Ting-a-ling-ling! Chow-ow-ow! Get out that head-line! LIVELY now! +Come--out with your spring-line--what're you about there! Take a turn +round that stump with the bight of it! Stand by that stage, now--let her +go! Done with the engines, sir! Ting-a-ling-ling! SH'T! S'H'T! SH'T!" +(trying the gauge-cocks). + +Tom went on whitewashing--paid no attention to the steamboat. Ben +stared a moment and then said: "Hi-YI! YOU'RE up a stump, ain't you!" + +No answer. Tom surveyed his last touch with the eye of an artist, then +he gave his brush another gentle sweep and surveyed the result, as +before. Ben ranged up alongside of him. Tom's mouth watered for the +apple, but he stuck to his work. Ben said: + +"Hello, old chap, you got to work, hey?" + +Tom wheeled suddenly and said: + +"Why, it's you, Ben! I warn't noticing." + +"Say--I'm going in a-swimming, I am. Don't you wish you could? But of +course you'd druther WORK--wouldn't you? Course you would!" + +Tom contemplated the boy a bit, and said: + +"What do you call work?" + +"Why, ain't THAT work?" + +Tom resumed his whitewashing, and answered carelessly: + +"Well, maybe it is, and maybe it ain't. All I know, is, it suits Tom +Sawyer." + +"Oh come, now, you don't mean to let on that you LIKE it?" + +The brush continued to move. + +"Like it? Well, I don't see why I oughtn't to like it. Does a boy get +a chance to whitewash a fence every day?" + +That put the thing in a new light. Ben stopped nibbling his apple. Tom +swept his brush daintily back and forth--stepped back to note the +effect--added a touch here and there--criticised the effect again--Ben +watching every move and getting more and more interested, more and more +absorbed. Presently he said: + +"Say, Tom, let ME whitewash a little." + +Tom considered, was about to consent; but he altered his mind: + +"No--no--I reckon it wouldn't hardly do, Ben. You see, Aunt Polly's +awful particular about this fence--right here on the street, you know +--but if it was the back fence I wouldn't mind and SHE wouldn't. Yes, +she's awful particular about this fence; it's got to be done very +careful; I reckon there ain't one boy in a thousand, maybe two +thousand, that can do it the way it's got to be done." + +"No--is that so? Oh come, now--lemme just try. Only just a little--I'd +let YOU, if you was me, Tom." + +"Ben, I'd like to, honest injun; but Aunt Polly--well, Jim wanted to +do it, but she wouldn't let him; Sid wanted to do it, and she wouldn't +let Sid. Now don't you see how I'm fixed? If you was to tackle this +fence and anything was to happen to it--" + +"Oh, shucks, I'll be just as careful. Now lemme try. Say--I'll give +you the core of my apple." + +"Well, here--No, Ben, now don't. I'm afeard--" + +"I'll give you ALL of it!" + +Tom gave up the brush with reluctance in his face, but alacrity in his +heart. And while the late steamer Big Missouri worked and sweated in +the sun, the retired artist sat on a barrel in the shade close by, +dangled his legs, munched his apple, and planned the slaughter of more +innocents. There was no lack of material; boys happened along every +little while; they came to jeer, but remained to whitewash. By the time +Ben was fagged out, Tom had traded the next chance to Billy Fisher for +a kite, in good repair; and when he played out, Johnny Miller bought in +for a dead rat and a string to swing it with--and so on, and so on, +hour after hour. And when the middle of the afternoon came, from being +a poor poverty-stricken boy in the morning, Tom was literally rolling +in wealth. He had besides the things before mentioned, twelve marbles, +part of a jews-harp, a piece of blue bottle-glass to look through, a +spool cannon, a key that wouldn't unlock anything, a fragment of chalk, +a glass stopper of a decanter, a tin soldier, a couple of tadpoles, six +fire-crackers, a kitten with only one eye, a brass doorknob, a +dog-collar--but no dog--the handle of a knife, four pieces of +orange-peel, and a dilapidated old window sash. + +He had had a nice, good, idle time all the while--plenty of company +--and the fence had three coats of whitewash on it! If he hadn't run out +of whitewash he would have bankrupted every boy in the village. + +Tom said to himself that it was not such a hollow world, after all. He +had discovered a great law of human action, without knowing it--namely, +that in order to make a man or a boy covet a thing, it is only +necessary to make the thing difficult to attain. If he had been a great +and wise philosopher, like the writer of this book, he would now have +comprehended that Work consists of whatever a body is OBLIGED to do, +and that Play consists of whatever a body is not obliged to do. And +this would help him to understand why constructing artificial flowers +or performing on a tread-mill is work, while rolling ten-pins or +climbing Mont Blanc is only amusement. There are wealthy gentlemen in +England who drive four-horse passenger-coaches twenty or thirty miles +on a daily line, in the summer, because the privilege costs them +considerable money; but if they were offered wages for the service, +that would turn it into work and then they would resign. + +The boy mused awhile over the substantial change which had taken place +in his worldly circumstances, and then wended toward headquarters to +report. + + + +CHAPTER III + +TOM presented himself before Aunt Polly, who was sitting by an open +window in a pleasant rearward apartment, which was bedroom, +breakfast-room, dining-room, and library, combined. The balmy summer +air, the restful quiet, the odor of the flowers, and the drowsing murmur +of the bees had had their effect, and she was nodding over her knitting +--for she had no company but the cat, and it was asleep in her lap. Her +spectacles were propped up on her gray head for safety. She had thought +that of course Tom had deserted long ago, and she wondered at seeing him +place himself in her power again in this intrepid way. He said: "Mayn't +I go and play now, aunt?" + +"What, a'ready? How much have you done?" + +"It's all done, aunt." + +"Tom, don't lie to me--I can't bear it." + +"I ain't, aunt; it IS all done." + +Aunt Polly placed small trust in such evidence. She went out to see +for herself; and she would have been content to find twenty per cent. +of Tom's statement true. When she found the entire fence whitewashed, +and not only whitewashed but elaborately coated and recoated, and even +a streak added to the ground, her astonishment was almost unspeakable. +She said: + +"Well, I never! There's no getting round it, you can work when you're +a mind to, Tom." And then she diluted the compliment by adding, "But +it's powerful seldom you're a mind to, I'm bound to say. Well, go 'long +and play; but mind you get back some time in a week, or I'll tan you." + +She was so overcome by the splendor of his achievement that she took +him into the closet and selected a choice apple and delivered it to +him, along with an improving lecture upon the added value and flavor a +treat took to itself when it came without sin through virtuous effort. +And while she closed with a happy Scriptural flourish, he "hooked" a +doughnut. + +Then he skipped out, and saw Sid just starting up the outside stairway +that led to the back rooms on the second floor. Clods were handy and +the air was full of them in a twinkling. They raged around Sid like a +hail-storm; and before Aunt Polly could collect her surprised faculties +and sally to the rescue, six or seven clods had taken personal effect, +and Tom was over the fence and gone. There was a gate, but as a general +thing he was too crowded for time to make use of it. His soul was at +peace, now that he had settled with Sid for calling attention to his +black thread and getting him into trouble. + +Tom skirted the block, and came round into a muddy alley that led by +the back of his aunt's cow-stable. He presently got safely beyond the +reach of capture and punishment, and hastened toward the public square +of the village, where two "military" companies of boys had met for +conflict, according to previous appointment. Tom was General of one of +these armies, Joe Harper (a bosom friend) General of the other. These +two great commanders did not condescend to fight in person--that being +better suited to the still smaller fry--but sat together on an eminence +and conducted the field operations by orders delivered through +aides-de-camp. Tom's army won a great victory, after a long and +hard-fought battle. Then the dead were counted, prisoners exchanged, +the terms of the next disagreement agreed upon, and the day for the +necessary battle appointed; after which the armies fell into line and +marched away, and Tom turned homeward alone. + +As he was passing by the house where Jeff Thatcher lived, he saw a new +girl in the garden--a lovely little blue-eyed creature with yellow hair +plaited into two long-tails, white summer frock and embroidered +pantalettes. The fresh-crowned hero fell without firing a shot. A +certain Amy Lawrence vanished out of his heart and left not even a +memory of herself behind. He had thought he loved her to distraction; +he had regarded his passion as adoration; and behold it was only a poor +little evanescent partiality. He had been months winning her; she had +confessed hardly a week ago; he had been the happiest and the proudest +boy in the world only seven short days, and here in one instant of time +she had gone out of his heart like a casual stranger whose visit is +done. + +He worshipped this new angel with furtive eye, till he saw that she +had discovered him; then he pretended he did not know she was present, +and began to "show off" in all sorts of absurd boyish ways, in order to +win her admiration. He kept up this grotesque foolishness for some +time; but by-and-by, while he was in the midst of some dangerous +gymnastic performances, he glanced aside and saw that the little girl +was wending her way toward the house. Tom came up to the fence and +leaned on it, grieving, and hoping she would tarry yet awhile longer. +She halted a moment on the steps and then moved toward the door. Tom +heaved a great sigh as she put her foot on the threshold. But his face +lit up, right away, for she tossed a pansy over the fence a moment +before she disappeared. + +The boy ran around and stopped within a foot or two of the flower, and +then shaded his eyes with his hand and began to look down street as if +he had discovered something of interest going on in that direction. +Presently he picked up a straw and began trying to balance it on his +nose, with his head tilted far back; and as he moved from side to side, +in his efforts, he edged nearer and nearer toward the pansy; finally +his bare foot rested upon it, his pliant toes closed upon it, and he +hopped away with the treasure and disappeared round the corner. But +only for a minute--only while he could button the flower inside his +jacket, next his heart--or next his stomach, possibly, for he was not +much posted in anatomy, and not hypercritical, anyway. + +He returned, now, and hung about the fence till nightfall, "showing +off," as before; but the girl never exhibited herself again, though Tom +comforted himself a little with the hope that she had been near some +window, meantime, and been aware of his attentions. Finally he strode +home reluctantly, with his poor head full of visions. + +All through supper his spirits were so high that his aunt wondered +"what had got into the child." He took a good scolding about clodding +Sid, and did not seem to mind it in the least. He tried to steal sugar +under his aunt's very nose, and got his knuckles rapped for it. He said: + +"Aunt, you don't whack Sid when he takes it." + +"Well, Sid don't torment a body the way you do. You'd be always into +that sugar if I warn't watching you." + +Presently she stepped into the kitchen, and Sid, happy in his +immunity, reached for the sugar-bowl--a sort of glorying over Tom which +was wellnigh unbearable. But Sid's fingers slipped and the bowl dropped +and broke. Tom was in ecstasies. In such ecstasies that he even +controlled his tongue and was silent. He said to himself that he would +not speak a word, even when his aunt came in, but would sit perfectly +still till she asked who did the mischief; and then he would tell, and +there would be nothing so good in the world as to see that pet model +"catch it." He was so brimful of exultation that he could hardly hold +himself when the old lady came back and stood above the wreck +discharging lightnings of wrath from over her spectacles. He said to +himself, "Now it's coming!" And the next instant he was sprawling on +the floor! The potent palm was uplifted to strike again when Tom cried +out: + +"Hold on, now, what 'er you belting ME for?--Sid broke it!" + +Aunt Polly paused, perplexed, and Tom looked for healing pity. But +when she got her tongue again, she only said: + +"Umf! Well, you didn't get a lick amiss, I reckon. You been into some +other audacious mischief when I wasn't around, like enough." + +Then her conscience reproached her, and she yearned to say something +kind and loving; but she judged that this would be construed into a +confession that she had been in the wrong, and discipline forbade that. +So she kept silence, and went about her affairs with a troubled heart. +Tom sulked in a corner and exalted his woes. He knew that in her heart +his aunt was on her knees to him, and he was morosely gratified by the +consciousness of it. He would hang out no signals, he would take notice +of none. He knew that a yearning glance fell upon him, now and then, +through a film of tears, but he refused recognition of it. He pictured +himself lying sick unto death and his aunt bending over him beseeching +one little forgiving word, but he would turn his face to the wall, and +die with that word unsaid. Ah, how would she feel then? And he pictured +himself brought home from the river, dead, with his curls all wet, and +his sore heart at rest. How she would throw herself upon him, and how +her tears would fall like rain, and her lips pray God to give her back +her boy and she would never, never abuse him any more! But he would lie +there cold and white and make no sign--a poor little sufferer, whose +griefs were at an end. He so worked upon his feelings with the pathos +of these dreams, that he had to keep swallowing, he was so like to +choke; and his eyes swam in a blur of water, which overflowed when he +winked, and ran down and trickled from the end of his nose. And such a +luxury to him was this petting of his sorrows, that he could not bear +to have any worldly cheeriness or any grating delight intrude upon it; +it was too sacred for such contact; and so, presently, when his cousin +Mary danced in, all alive with the joy of seeing home again after an +age-long visit of one week to the country, he got up and moved in +clouds and darkness out at one door as she brought song and sunshine in +at the other. + +He wandered far from the accustomed haunts of boys, and sought +desolate places that were in harmony with his spirit. A log raft in the +river invited him, and he seated himself on its outer edge and +contemplated the dreary vastness of the stream, wishing, the while, +that he could only be drowned, all at once and unconsciously, without +undergoing the uncomfortable routine devised by nature. Then he thought +of his flower. He got it out, rumpled and wilted, and it mightily +increased his dismal felicity. He wondered if she would pity him if she +knew? Would she cry, and wish that she had a right to put her arms +around his neck and comfort him? Or would she turn coldly away like all +the hollow world? This picture brought such an agony of pleasurable +suffering that he worked it over and over again in his mind and set it +up in new and varied lights, till he wore it threadbare. At last he +rose up sighing and departed in the darkness. + +About half-past nine or ten o'clock he came along the deserted street +to where the Adored Unknown lived; he paused a moment; no sound fell +upon his listening ear; a candle was casting a dull glow upon the +curtain of a second-story window. Was the sacred presence there? He +climbed the fence, threaded his stealthy way through the plants, till +he stood under that window; he looked up at it long, and with emotion; +then he laid him down on the ground under it, disposing himself upon +his back, with his hands clasped upon his breast and holding his poor +wilted flower. And thus he would die--out in the cold world, with no +shelter over his homeless head, no friendly hand to wipe the +death-damps from his brow, no loving face to bend pityingly over him +when the great agony came. And thus SHE would see him when she looked +out upon the glad morning, and oh! would she drop one little tear upon +his poor, lifeless form, would she heave one little sigh to see a bright +young life so rudely blighted, so untimely cut down? + +The window went up, a maid-servant's discordant voice profaned the +holy calm, and a deluge of water drenched the prone martyr's remains! + +The strangling hero sprang up with a relieving snort. There was a whiz +as of a missile in the air, mingled with the murmur of a curse, a sound +as of shivering glass followed, and a small, vague form went over the +fence and shot away in the gloom. + +Not long after, as Tom, all undressed for bed, was surveying his +drenched garments by the light of a tallow dip, Sid woke up; but if he +had any dim idea of making any "references to allusions," he thought +better of it and held his peace, for there was danger in Tom's eye. + +Tom turned in without the added vexation of prayers, and Sid made +mental note of the omission. + + + +CHAPTER IV + +THE sun rose upon a tranquil world, and beamed down upon the peaceful +village like a benediction. Breakfast over, Aunt Polly had family +worship: it began with a prayer built from the ground up of solid +courses of Scriptural quotations, welded together with a thin mortar of +originality; and from the summit of this she delivered a grim chapter +of the Mosaic Law, as from Sinai. + +Then Tom girded up his loins, so to speak, and went to work to "get +his verses." Sid had learned his lesson days before. Tom bent all his +energies to the memorizing of five verses, and he chose part of the +Sermon on the Mount, because he could find no verses that were shorter. +At the end of half an hour Tom had a vague general idea of his lesson, +but no more, for his mind was traversing the whole field of human +thought, and his hands were busy with distracting recreations. Mary +took his book to hear him recite, and he tried to find his way through +the fog: + +"Blessed are the--a--a--" + +"Poor"-- + +"Yes--poor; blessed are the poor--a--a--" + +"In spirit--" + +"In spirit; blessed are the poor in spirit, for they--they--" + +"THEIRS--" + +"For THEIRS. Blessed are the poor in spirit, for theirs is the kingdom +of heaven. Blessed are they that mourn, for they--they--" + +"Sh--" + +"For they--a--" + +"S, H, A--" + +"For they S, H--Oh, I don't know what it is!" + +"SHALL!" + +"Oh, SHALL! for they shall--for they shall--a--a--shall mourn--a--a-- +blessed are they that shall--they that--a--they that shall mourn, for +they shall--a--shall WHAT? Why don't you tell me, Mary?--what do you +want to be so mean for?" + +"Oh, Tom, you poor thick-headed thing, I'm not teasing you. I wouldn't +do that. You must go and learn it again. Don't you be discouraged, Tom, +you'll manage it--and if you do, I'll give you something ever so nice. +There, now, that's a good boy." + +"All right! What is it, Mary, tell me what it is." + +"Never you mind, Tom. You know if I say it's nice, it is nice." + +"You bet you that's so, Mary. All right, I'll tackle it again." + +And he did "tackle it again"--and under the double pressure of +curiosity and prospective gain he did it with such spirit that he +accomplished a shining success. Mary gave him a brand-new "Barlow" +knife worth twelve and a half cents; and the convulsion of delight that +swept his system shook him to his foundations. True, the knife would +not cut anything, but it was a "sure-enough" Barlow, and there was +inconceivable grandeur in that--though where the Western boys ever got +the idea that such a weapon could possibly be counterfeited to its +injury is an imposing mystery and will always remain so, perhaps. Tom +contrived to scarify the cupboard with it, and was arranging to begin +on the bureau, when he was called off to dress for Sunday-school. + +Mary gave him a tin basin of water and a piece of soap, and he went +outside the door and set the basin on a little bench there; then he +dipped the soap in the water and laid it down; turned up his sleeves; +poured out the water on the ground, gently, and then entered the +kitchen and began to wipe his face diligently on the towel behind the +door. But Mary removed the towel and said: + +"Now ain't you ashamed, Tom. You mustn't be so bad. Water won't hurt +you." + +Tom was a trifle disconcerted. The basin was refilled, and this time +he stood over it a little while, gathering resolution; took in a big +breath and began. When he entered the kitchen presently, with both eyes +shut and groping for the towel with his hands, an honorable testimony +of suds and water was dripping from his face. But when he emerged from +the towel, he was not yet satisfactory, for the clean territory stopped +short at his chin and his jaws, like a mask; below and beyond this line +there was a dark expanse of unirrigated soil that spread downward in +front and backward around his neck. Mary took him in hand, and when she +was done with him he was a man and a brother, without distinction of +color, and his saturated hair was neatly brushed, and its short curls +wrought into a dainty and symmetrical general effect. [He privately +smoothed out the curls, with labor and difficulty, and plastered his +hair close down to his head; for he held curls to be effeminate, and +his own filled his life with bitterness.] Then Mary got out a suit of +his clothing that had been used only on Sundays during two years--they +were simply called his "other clothes"--and so by that we know the +size of his wardrobe. The girl "put him to rights" after he had dressed +himself; she buttoned his neat roundabout up to his chin, turned his +vast shirt collar down over his shoulders, brushed him off and crowned +him with his speckled straw hat. He now looked exceedingly improved and +uncomfortable. He was fully as uncomfortable as he looked; for there +was a restraint about whole clothes and cleanliness that galled him. He +hoped that Mary would forget his shoes, but the hope was blighted; she +coated them thoroughly with tallow, as was the custom, and brought them +out. He lost his temper and said he was always being made to do +everything he didn't want to do. But Mary said, persuasively: + +"Please, Tom--that's a good boy." + +So he got into the shoes snarling. Mary was soon ready, and the three +children set out for Sunday-school--a place that Tom hated with his +whole heart; but Sid and Mary were fond of it. + +Sabbath-school hours were from nine to half-past ten; and then church +service. Two of the children always remained for the sermon +voluntarily, and the other always remained too--for stronger reasons. +The church's high-backed, uncushioned pews would seat about three +hundred persons; the edifice was but a small, plain affair, with a sort +of pine board tree-box on top of it for a steeple. At the door Tom +dropped back a step and accosted a Sunday-dressed comrade: + +"Say, Billy, got a yaller ticket?" + +"Yes." + +"What'll you take for her?" + +"What'll you give?" + +"Piece of lickrish and a fish-hook." + +"Less see 'em." + +Tom exhibited. They were satisfactory, and the property changed hands. +Then Tom traded a couple of white alleys for three red tickets, and +some small trifle or other for a couple of blue ones. He waylaid other +boys as they came, and went on buying tickets of various colors ten or +fifteen minutes longer. He entered the church, now, with a swarm of +clean and noisy boys and girls, proceeded to his seat and started a +quarrel with the first boy that came handy. The teacher, a grave, +elderly man, interfered; then turned his back a moment and Tom pulled a +boy's hair in the next bench, and was absorbed in his book when the boy +turned around; stuck a pin in another boy, presently, in order to hear +him say "Ouch!" and got a new reprimand from his teacher. Tom's whole +class were of a pattern--restless, noisy, and troublesome. When they +came to recite their lessons, not one of them knew his verses +perfectly, but had to be prompted all along. However, they worried +through, and each got his reward--in small blue tickets, each with a +passage of Scripture on it; each blue ticket was pay for two verses of +the recitation. Ten blue tickets equalled a red one, and could be +exchanged for it; ten red tickets equalled a yellow one; for ten yellow +tickets the superintendent gave a very plainly bound Bible (worth forty +cents in those easy times) to the pupil. How many of my readers would +have the industry and application to memorize two thousand verses, even +for a Dore Bible? And yet Mary had acquired two Bibles in this way--it +was the patient work of two years--and a boy of German parentage had +won four or five. He once recited three thousand verses without +stopping; but the strain upon his mental faculties was too great, and +he was little better than an idiot from that day forth--a grievous +misfortune for the school, for on great occasions, before company, the +superintendent (as Tom expressed it) had always made this boy come out +and "spread himself." Only the older pupils managed to keep their +tickets and stick to their tedious work long enough to get a Bible, and +so the delivery of one of these prizes was a rare and noteworthy +circumstance; the successful pupil was so great and conspicuous for +that day that on the spot every scholar's heart was fired with a fresh +ambition that often lasted a couple of weeks. It is possible that Tom's +mental stomach had never really hungered for one of those prizes, but +unquestionably his entire being had for many a day longed for the glory +and the eclat that came with it. + +In due course the superintendent stood up in front of the pulpit, with +a closed hymn-book in his hand and his forefinger inserted between its +leaves, and commanded attention. When a Sunday-school superintendent +makes his customary little speech, a hymn-book in the hand is as +necessary as is the inevitable sheet of music in the hand of a singer +who stands forward on the platform and sings a solo at a concert +--though why, is a mystery: for neither the hymn-book nor the sheet of +music is ever referred to by the sufferer. This superintendent was a +slim creature of thirty-five, with a sandy goatee and short sandy hair; +he wore a stiff standing-collar whose upper edge almost reached his +ears and whose sharp points curved forward abreast the corners of his +mouth--a fence that compelled a straight lookout ahead, and a turning +of the whole body when a side view was required; his chin was propped +on a spreading cravat which was as broad and as long as a bank-note, +and had fringed ends; his boot toes were turned sharply up, in the +fashion of the day, like sleigh-runners--an effect patiently and +laboriously produced by the young men by sitting with their toes +pressed against a wall for hours together. Mr. Walters was very earnest +of mien, and very sincere and honest at heart; and he held sacred +things and places in such reverence, and so separated them from worldly +matters, that unconsciously to himself his Sunday-school voice had +acquired a peculiar intonation which was wholly absent on week-days. He +began after this fashion: + +"Now, children, I want you all to sit up just as straight and pretty +as you can and give me all your attention for a minute or two. There +--that is it. That is the way good little boys and girls should do. I see +one little girl who is looking out of the window--I am afraid she +thinks I am out there somewhere--perhaps up in one of the trees making +a speech to the little birds. [Applausive titter.] I want to tell you +how good it makes me feel to see so many bright, clean little faces +assembled in a place like this, learning to do right and be good." And +so forth and so on. It is not necessary to set down the rest of the +oration. It was of a pattern which does not vary, and so it is familiar +to us all. + +The latter third of the speech was marred by the resumption of fights +and other recreations among certain of the bad boys, and by fidgetings +and whisperings that extended far and wide, washing even to the bases +of isolated and incorruptible rocks like Sid and Mary. But now every +sound ceased suddenly, with the subsidence of Mr. Walters' voice, and +the conclusion of the speech was received with a burst of silent +gratitude. + +A good part of the whispering had been occasioned by an event which +was more or less rare--the entrance of visitors: lawyer Thatcher, +accompanied by a very feeble and aged man; a fine, portly, middle-aged +gentleman with iron-gray hair; and a dignified lady who was doubtless +the latter's wife. The lady was leading a child. Tom had been restless +and full of chafings and repinings; conscience-smitten, too--he could +not meet Amy Lawrence's eye, he could not brook her loving gaze. But +when he saw this small new-comer his soul was all ablaze with bliss in +a moment. The next moment he was "showing off" with all his might +--cuffing boys, pulling hair, making faces--in a word, using every art +that seemed likely to fascinate a girl and win her applause. His +exaltation had but one alloy--the memory of his humiliation in this +angel's garden--and that record in sand was fast washing out, under +the waves of happiness that were sweeping over it now. + +The visitors were given the highest seat of honor, and as soon as Mr. +Walters' speech was finished, he introduced them to the school. The +middle-aged man turned out to be a prodigious personage--no less a one +than the county judge--altogether the most august creation these +children had ever looked upon--and they wondered what kind of material +he was made of--and they half wanted to hear him roar, and were half +afraid he might, too. He was from Constantinople, twelve miles away--so +he had travelled, and seen the world--these very eyes had looked upon +the county court-house--which was said to have a tin roof. The awe +which these reflections inspired was attested by the impressive silence +and the ranks of staring eyes. This was the great Judge Thatcher, +brother of their own lawyer. Jeff Thatcher immediately went forward, to +be familiar with the great man and be envied by the school. It would +have been music to his soul to hear the whisperings: + +"Look at him, Jim! He's a going up there. Say--look! he's a going to +shake hands with him--he IS shaking hands with him! By jings, don't you +wish you was Jeff?" + +Mr. Walters fell to "showing off," with all sorts of official +bustlings and activities, giving orders, delivering judgments, +discharging directions here, there, everywhere that he could find a +target. The librarian "showed off"--running hither and thither with his +arms full of books and making a deal of the splutter and fuss that +insect authority delights in. The young lady teachers "showed off" +--bending sweetly over pupils that were lately being boxed, lifting +pretty warning fingers at bad little boys and patting good ones +lovingly. The young gentlemen teachers "showed off" with small +scoldings and other little displays of authority and fine attention to +discipline--and most of the teachers, of both sexes, found business up +at the library, by the pulpit; and it was business that frequently had +to be done over again two or three times (with much seeming vexation). +The little girls "showed off" in various ways, and the little boys +"showed off" with such diligence that the air was thick with paper wads +and the murmur of scufflings. And above it all the great man sat and +beamed a majestic judicial smile upon all the house, and warmed himself +in the sun of his own grandeur--for he was "showing off," too. + +There was only one thing wanting to make Mr. Walters' ecstasy +complete, and that was a chance to deliver a Bible-prize and exhibit a +prodigy. Several pupils had a few yellow tickets, but none had enough +--he had been around among the star pupils inquiring. He would have given +worlds, now, to have that German lad back again with a sound mind. + +And now at this moment, when hope was dead, Tom Sawyer came forward +with nine yellow tickets, nine red tickets, and ten blue ones, and +demanded a Bible. This was a thunderbolt out of a clear sky. Walters +was not expecting an application from this source for the next ten +years. But there was no getting around it--here were the certified +checks, and they were good for their face. Tom was therefore elevated +to a place with the Judge and the other elect, and the great news was +announced from headquarters. It was the most stunning surprise of the +decade, and so profound was the sensation that it lifted the new hero +up to the judicial one's altitude, and the school had two marvels to +gaze upon in place of one. The boys were all eaten up with envy--but +those that suffered the bitterest pangs were those who perceived too +late that they themselves had contributed to this hated splendor by +trading tickets to Tom for the wealth he had amassed in selling +whitewashing privileges. These despised themselves, as being the dupes +of a wily fraud, a guileful snake in the grass. + +The prize was delivered to Tom with as much effusion as the +superintendent could pump up under the circumstances; but it lacked +somewhat of the true gush, for the poor fellow's instinct taught him +that there was a mystery here that could not well bear the light, +perhaps; it was simply preposterous that this boy had warehoused two +thousand sheaves of Scriptural wisdom on his premises--a dozen would +strain his capacity, without a doubt. + +Amy Lawrence was proud and glad, and she tried to make Tom see it in +her face--but he wouldn't look. She wondered; then she was just a grain +troubled; next a dim suspicion came and went--came again; she watched; +a furtive glance told her worlds--and then her heart broke, and she was +jealous, and angry, and the tears came and she hated everybody. Tom +most of all (she thought). + +Tom was introduced to the Judge; but his tongue was tied, his breath +would hardly come, his heart quaked--partly because of the awful +greatness of the man, but mainly because he was her parent. He would +have liked to fall down and worship him, if it were in the dark. The +Judge put his hand on Tom's head and called him a fine little man, and +asked him what his name was. The boy stammered, gasped, and got it out: + +"Tom." + +"Oh, no, not Tom--it is--" + +"Thomas." + +"Ah, that's it. I thought there was more to it, maybe. That's very +well. But you've another one I daresay, and you'll tell it to me, won't +you?" + +"Tell the gentleman your other name, Thomas," said Walters, "and say +sir. You mustn't forget your manners." + +"Thomas Sawyer--sir." + +"That's it! That's a good boy. Fine boy. Fine, manly little fellow. +Two thousand verses is a great many--very, very great many. And you +never can be sorry for the trouble you took to learn them; for +knowledge is worth more than anything there is in the world; it's what +makes great men and good men; you'll be a great man and a good man +yourself, some day, Thomas, and then you'll look back and say, It's all +owing to the precious Sunday-school privileges of my boyhood--it's all +owing to my dear teachers that taught me to learn--it's all owing to +the good superintendent, who encouraged me, and watched over me, and +gave me a beautiful Bible--a splendid elegant Bible--to keep and have +it all for my own, always--it's all owing to right bringing up! That is +what you will say, Thomas--and you wouldn't take any money for those +two thousand verses--no indeed you wouldn't. And now you wouldn't mind +telling me and this lady some of the things you've learned--no, I know +you wouldn't--for we are proud of little boys that learn. Now, no +doubt you know the names of all the twelve disciples. Won't you tell us +the names of the first two that were appointed?" + +Tom was tugging at a button-hole and looking sheepish. He blushed, +now, and his eyes fell. Mr. Walters' heart sank within him. He said to +himself, it is not possible that the boy can answer the simplest +question--why DID the Judge ask him? Yet he felt obliged to speak up +and say: + +"Answer the gentleman, Thomas--don't be afraid." + +Tom still hung fire. + +"Now I know you'll tell me," said the lady. "The names of the first +two disciples were--" + +"DAVID AND GOLIAH!" + +Let us draw the curtain of charity over the rest of the scene. + + + +CHAPTER V + +ABOUT half-past ten the cracked bell of the small church began to +ring, and presently the people began to gather for the morning sermon. +The Sunday-school children distributed themselves about the house and +occupied pews with their parents, so as to be under supervision. Aunt +Polly came, and Tom and Sid and Mary sat with her--Tom being placed +next the aisle, in order that he might be as far away from the open +window and the seductive outside summer scenes as possible. The crowd +filed up the aisles: the aged and needy postmaster, who had seen better +days; the mayor and his wife--for they had a mayor there, among other +unnecessaries; the justice of the peace; the widow Douglass, fair, +smart, and forty, a generous, good-hearted soul and well-to-do, her +hill mansion the only palace in the town, and the most hospitable and +much the most lavish in the matter of festivities that St. Petersburg +could boast; the bent and venerable Major and Mrs. Ward; lawyer +Riverson, the new notable from a distance; next the belle of the +village, followed by a troop of lawn-clad and ribbon-decked young +heart-breakers; then all the young clerks in town in a body--for they +had stood in the vestibule sucking their cane-heads, a circling wall of +oiled and simpering admirers, till the last girl had run their gantlet; +and last of all came the Model Boy, Willie Mufferson, taking as heedful +care of his mother as if she were cut glass. He always brought his +mother to church, and was the pride of all the matrons. The boys all +hated him, he was so good. And besides, he had been "thrown up to them" +so much. His white handkerchief was hanging out of his pocket behind, as +usual on Sundays--accidentally. Tom had no handkerchief, and he looked +upon boys who had as snobs. + +The congregation being fully assembled, now, the bell rang once more, +to warn laggards and stragglers, and then a solemn hush fell upon the +church which was only broken by the tittering and whispering of the +choir in the gallery. The choir always tittered and whispered all +through service. There was once a church choir that was not ill-bred, +but I have forgotten where it was, now. It was a great many years ago, +and I can scarcely remember anything about it, but I think it was in +some foreign country. + +The minister gave out the hymn, and read it through with a relish, in +a peculiar style which was much admired in that part of the country. +His voice began on a medium key and climbed steadily up till it reached +a certain point, where it bore with strong emphasis upon the topmost +word and then plunged down as if from a spring-board: + + Shall I be car-ri-ed toe the skies, on flow'ry BEDS of ease, + + Whilst others fight to win the prize, and sail thro' BLOODY seas? + +He was regarded as a wonderful reader. At church "sociables" he was +always called upon to read poetry; and when he was through, the ladies +would lift up their hands and let them fall helplessly in their laps, +and "wall" their eyes, and shake their heads, as much as to say, "Words +cannot express it; it is too beautiful, TOO beautiful for this mortal +earth." + +After the hymn had been sung, the Rev. Mr. Sprague turned himself into +a bulletin-board, and read off "notices" of meetings and societies and +things till it seemed that the list would stretch out to the crack of +doom--a queer custom which is still kept up in America, even in cities, +away here in this age of abundant newspapers. Often, the less there is +to justify a traditional custom, the harder it is to get rid of it. + +And now the minister prayed. A good, generous prayer it was, and went +into details: it pleaded for the church, and the little children of the +church; for the other churches of the village; for the village itself; +for the county; for the State; for the State officers; for the United +States; for the churches of the United States; for Congress; for the +President; for the officers of the Government; for poor sailors, tossed +by stormy seas; for the oppressed millions groaning under the heel of +European monarchies and Oriental despotisms; for such as have the light +and the good tidings, and yet have not eyes to see nor ears to hear +withal; for the heathen in the far islands of the sea; and closed with +a supplication that the words he was about to speak might find grace +and favor, and be as seed sown in fertile ground, yielding in time a +grateful harvest of good. Amen. + +There was a rustling of dresses, and the standing congregation sat +down. The boy whose history this book relates did not enjoy the prayer, +he only endured it--if he even did that much. He was restive all +through it; he kept tally of the details of the prayer, unconsciously +--for he was not listening, but he knew the ground of old, and the +clergyman's regular route over it--and when a little trifle of new +matter was interlarded, his ear detected it and his whole nature +resented it; he considered additions unfair, and scoundrelly. In the +midst of the prayer a fly had lit on the back of the pew in front of +him and tortured his spirit by calmly rubbing its hands together, +embracing its head with its arms, and polishing it so vigorously that +it seemed to almost part company with the body, and the slender thread +of a neck was exposed to view; scraping its wings with its hind legs +and smoothing them to its body as if they had been coat-tails; going +through its whole toilet as tranquilly as if it knew it was perfectly +safe. As indeed it was; for as sorely as Tom's hands itched to grab for +it they did not dare--he believed his soul would be instantly destroyed +if he did such a thing while the prayer was going on. But with the +closing sentence his hand began to curve and steal forward; and the +instant the "Amen" was out the fly was a prisoner of war. His aunt +detected the act and made him let it go. + +The minister gave out his text and droned along monotonously through +an argument that was so prosy that many a head by and by began to nod +--and yet it was an argument that dealt in limitless fire and brimstone +and thinned the predestined elect down to a company so small as to be +hardly worth the saving. Tom counted the pages of the sermon; after +church he always knew how many pages there had been, but he seldom knew +anything else about the discourse. However, this time he was really +interested for a little while. The minister made a grand and moving +picture of the assembling together of the world's hosts at the +millennium when the lion and the lamb should lie down together and a +little child should lead them. But the pathos, the lesson, the moral of +the great spectacle were lost upon the boy; he only thought of the +conspicuousness of the principal character before the on-looking +nations; his face lit with the thought, and he said to himself that he +wished he could be that child, if it was a tame lion. + +Now he lapsed into suffering again, as the dry argument was resumed. +Presently he bethought him of a treasure he had and got it out. It was +a large black beetle with formidable jaws--a "pinchbug," he called it. +It was in a percussion-cap box. The first thing the beetle did was to +take him by the finger. A natural fillip followed, the beetle went +floundering into the aisle and lit on its back, and the hurt finger +went into the boy's mouth. The beetle lay there working its helpless +legs, unable to turn over. Tom eyed it, and longed for it; but it was +safe out of his reach. Other people uninterested in the sermon found +relief in the beetle, and they eyed it too. Presently a vagrant poodle +dog came idling along, sad at heart, lazy with the summer softness and +the quiet, weary of captivity, sighing for change. He spied the beetle; +the drooping tail lifted and wagged. He surveyed the prize; walked +around it; smelt at it from a safe distance; walked around it again; +grew bolder, and took a closer smell; then lifted his lip and made a +gingerly snatch at it, just missing it; made another, and another; +began to enjoy the diversion; subsided to his stomach with the beetle +between his paws, and continued his experiments; grew weary at last, +and then indifferent and absent-minded. His head nodded, and little by +little his chin descended and touched the enemy, who seized it. There +was a sharp yelp, a flirt of the poodle's head, and the beetle fell a +couple of yards away, and lit on its back once more. The neighboring +spectators shook with a gentle inward joy, several faces went behind +fans and handkerchiefs, and Tom was entirely happy. The dog looked +foolish, and probably felt so; but there was resentment in his heart, +too, and a craving for revenge. So he went to the beetle and began a +wary attack on it again; jumping at it from every point of a circle, +lighting with his fore-paws within an inch of the creature, making even +closer snatches at it with his teeth, and jerking his head till his +ears flapped again. But he grew tired once more, after a while; tried +to amuse himself with a fly but found no relief; followed an ant +around, with his nose close to the floor, and quickly wearied of that; +yawned, sighed, forgot the beetle entirely, and sat down on it. Then +there was a wild yelp of agony and the poodle went sailing up the +aisle; the yelps continued, and so did the dog; he crossed the house in +front of the altar; he flew down the other aisle; he crossed before the +doors; he clamored up the home-stretch; his anguish grew with his +progress, till presently he was but a woolly comet moving in its orbit +with the gleam and the speed of light. At last the frantic sufferer +sheered from its course, and sprang into its master's lap; he flung it +out of the window, and the voice of distress quickly thinned away and +died in the distance. + +By this time the whole church was red-faced and suffocating with +suppressed laughter, and the sermon had come to a dead standstill. The +discourse was resumed presently, but it went lame and halting, all +possibility of impressiveness being at an end; for even the gravest +sentiments were constantly being received with a smothered burst of +unholy mirth, under cover of some remote pew-back, as if the poor +parson had said a rarely facetious thing. It was a genuine relief to +the whole congregation when the ordeal was over and the benediction +pronounced. + +Tom Sawyer went home quite cheerful, thinking to himself that there +was some satisfaction about divine service when there was a bit of +variety in it. He had but one marring thought; he was willing that the +dog should play with his pinchbug, but he did not think it was upright +in him to carry it off. + + + +CHAPTER VI + +MONDAY morning found Tom Sawyer miserable. Monday morning always found +him so--because it began another week's slow suffering in school. He +generally began that day with wishing he had had no intervening +holiday, it made the going into captivity and fetters again so much +more odious. + +Tom lay thinking. Presently it occurred to him that he wished he was +sick; then he could stay home from school. Here was a vague +possibility. He canvassed his system. No ailment was found, and he +investigated again. This time he thought he could detect colicky +symptoms, and he began to encourage them with considerable hope. But +they soon grew feeble, and presently died wholly away. He reflected +further. Suddenly he discovered something. One of his upper front teeth +was loose. This was lucky; he was about to begin to groan, as a +"starter," as he called it, when it occurred to him that if he came +into court with that argument, his aunt would pull it out, and that +would hurt. So he thought he would hold the tooth in reserve for the +present, and seek further. Nothing offered for some little time, and +then he remembered hearing the doctor tell about a certain thing that +laid up a patient for two or three weeks and threatened to make him +lose a finger. So the boy eagerly drew his sore toe from under the +sheet and held it up for inspection. But now he did not know the +necessary symptoms. However, it seemed well worth while to chance it, +so he fell to groaning with considerable spirit. + +But Sid slept on unconscious. + +Tom groaned louder, and fancied that he began to feel pain in the toe. + +No result from Sid. + +Tom was panting with his exertions by this time. He took a rest and +then swelled himself up and fetched a succession of admirable groans. + +Sid snored on. + +Tom was aggravated. He said, "Sid, Sid!" and shook him. This course +worked well, and Tom began to groan again. Sid yawned, stretched, then +brought himself up on his elbow with a snort, and began to stare at +Tom. Tom went on groaning. Sid said: + +"Tom! Say, Tom!" [No response.] "Here, Tom! TOM! What is the matter, +Tom?" And he shook him and looked in his face anxiously. + +Tom moaned out: + +"Oh, don't, Sid. Don't joggle me." + +"Why, what's the matter, Tom? I must call auntie." + +"No--never mind. It'll be over by and by, maybe. Don't call anybody." + +"But I must! DON'T groan so, Tom, it's awful. How long you been this +way?" + +"Hours. Ouch! Oh, don't stir so, Sid, you'll kill me." + +"Tom, why didn't you wake me sooner? Oh, Tom, DON'T! It makes my +flesh crawl to hear you. Tom, what is the matter?" + +"I forgive you everything, Sid. [Groan.] Everything you've ever done +to me. When I'm gone--" + +"Oh, Tom, you ain't dying, are you? Don't, Tom--oh, don't. Maybe--" + +"I forgive everybody, Sid. [Groan.] Tell 'em so, Sid. And Sid, you +give my window-sash and my cat with one eye to that new girl that's +come to town, and tell her--" + +But Sid had snatched his clothes and gone. Tom was suffering in +reality, now, so handsomely was his imagination working, and so his +groans had gathered quite a genuine tone. + +Sid flew down-stairs and said: + +"Oh, Aunt Polly, come! Tom's dying!" + +"Dying!" + +"Yes'm. Don't wait--come quick!" + +"Rubbage! I don't believe it!" + +But she fled up-stairs, nevertheless, with Sid and Mary at her heels. +And her face grew white, too, and her lip trembled. When she reached +the bedside she gasped out: + +"You, Tom! Tom, what's the matter with you?" + +"Oh, auntie, I'm--" + +"What's the matter with you--what is the matter with you, child?" + +"Oh, auntie, my sore toe's mortified!" + +The old lady sank down into a chair and laughed a little, then cried a +little, then did both together. This restored her and she said: + +"Tom, what a turn you did give me. Now you shut up that nonsense and +climb out of this." + +The groans ceased and the pain vanished from the toe. The boy felt a +little foolish, and he said: + +"Aunt Polly, it SEEMED mortified, and it hurt so I never minded my +tooth at all." + +"Your tooth, indeed! What's the matter with your tooth?" + +"One of them's loose, and it aches perfectly awful." + +"There, there, now, don't begin that groaning again. Open your mouth. +Well--your tooth IS loose, but you're not going to die about that. +Mary, get me a silk thread, and a chunk of fire out of the kitchen." + +Tom said: + +"Oh, please, auntie, don't pull it out. It don't hurt any more. I wish +I may never stir if it does. Please don't, auntie. I don't want to stay +home from school." + +"Oh, you don't, don't you? So all this row was because you thought +you'd get to stay home from school and go a-fishing? Tom, Tom, I love +you so, and you seem to try every way you can to break my old heart +with your outrageousness." By this time the dental instruments were +ready. The old lady made one end of the silk thread fast to Tom's tooth +with a loop and tied the other to the bedpost. Then she seized the +chunk of fire and suddenly thrust it almost into the boy's face. The +tooth hung dangling by the bedpost, now. + +But all trials bring their compensations. As Tom wended to school +after breakfast, he was the envy of every boy he met because the gap in +his upper row of teeth enabled him to expectorate in a new and +admirable way. He gathered quite a following of lads interested in the +exhibition; and one that had cut his finger and had been a centre of +fascination and homage up to this time, now found himself suddenly +without an adherent, and shorn of his glory. His heart was heavy, and +he said with a disdain which he did not feel that it wasn't anything to +spit like Tom Sawyer; but another boy said, "Sour grapes!" and he +wandered away a dismantled hero. + +Shortly Tom came upon the juvenile pariah of the village, Huckleberry +Finn, son of the town drunkard. Huckleberry was cordially hated and +dreaded by all the mothers of the town, because he was idle and lawless +and vulgar and bad--and because all their children admired him so, and +delighted in his forbidden society, and wished they dared to be like +him. Tom was like the rest of the respectable boys, in that he envied +Huckleberry his gaudy outcast condition, and was under strict orders +not to play with him. So he played with him every time he got a chance. +Huckleberry was always dressed in the cast-off clothes of full-grown +men, and they were in perennial bloom and fluttering with rags. His hat +was a vast ruin with a wide crescent lopped out of its brim; his coat, +when he wore one, hung nearly to his heels and had the rearward buttons +far down the back; but one suspender supported his trousers; the seat +of the trousers bagged low and contained nothing, the fringed legs +dragged in the dirt when not rolled up. + +Huckleberry came and went, at his own free will. He slept on doorsteps +in fine weather and in empty hogsheads in wet; he did not have to go to +school or to church, or call any being master or obey anybody; he could +go fishing or swimming when and where he chose, and stay as long as it +suited him; nobody forbade him to fight; he could sit up as late as he +pleased; he was always the first boy that went barefoot in the spring +and the last to resume leather in the fall; he never had to wash, nor +put on clean clothes; he could swear wonderfully. In a word, everything +that goes to make life precious that boy had. So thought every +harassed, hampered, respectable boy in St. Petersburg. + +Tom hailed the romantic outcast: + +"Hello, Huckleberry!" + +"Hello yourself, and see how you like it." + +"What's that you got?" + +"Dead cat." + +"Lemme see him, Huck. My, he's pretty stiff. Where'd you get him?" + +"Bought him off'n a boy." + +"What did you give?" + +"I give a blue ticket and a bladder that I got at the slaughter-house." + +"Where'd you get the blue ticket?" + +"Bought it off'n Ben Rogers two weeks ago for a hoop-stick." + +"Say--what is dead cats good for, Huck?" + +"Good for? Cure warts with." + +"No! Is that so? I know something that's better." + +"I bet you don't. What is it?" + +"Why, spunk-water." + +"Spunk-water! I wouldn't give a dern for spunk-water." + +"You wouldn't, wouldn't you? D'you ever try it?" + +"No, I hain't. But Bob Tanner did." + +"Who told you so!" + +"Why, he told Jeff Thatcher, and Jeff told Johnny Baker, and Johnny +told Jim Hollis, and Jim told Ben Rogers, and Ben told a nigger, and +the nigger told me. There now!" + +"Well, what of it? They'll all lie. Leastways all but the nigger. I +don't know HIM. But I never see a nigger that WOULDN'T lie. Shucks! Now +you tell me how Bob Tanner done it, Huck." + +"Why, he took and dipped his hand in a rotten stump where the +rain-water was." + +"In the daytime?" + +"Certainly." + +"With his face to the stump?" + +"Yes. Least I reckon so." + +"Did he say anything?" + +"I don't reckon he did. I don't know." + +"Aha! Talk about trying to cure warts with spunk-water such a blame +fool way as that! Why, that ain't a-going to do any good. You got to go +all by yourself, to the middle of the woods, where you know there's a +spunk-water stump, and just as it's midnight you back up against the +stump and jam your hand in and say: + + 'Barley-corn, barley-corn, injun-meal shorts, + Spunk-water, spunk-water, swaller these warts,' + +and then walk away quick, eleven steps, with your eyes shut, and then +turn around three times and walk home without speaking to anybody. +Because if you speak the charm's busted." + +"Well, that sounds like a good way; but that ain't the way Bob Tanner +done." + +"No, sir, you can bet he didn't, becuz he's the wartiest boy in this +town; and he wouldn't have a wart on him if he'd knowed how to work +spunk-water. I've took off thousands of warts off of my hands that way, +Huck. I play with frogs so much that I've always got considerable many +warts. Sometimes I take 'em off with a bean." + +"Yes, bean's good. I've done that." + +"Have you? What's your way?" + +"You take and split the bean, and cut the wart so as to get some +blood, and then you put the blood on one piece of the bean and take and +dig a hole and bury it 'bout midnight at the crossroads in the dark of +the moon, and then you burn up the rest of the bean. You see that piece +that's got the blood on it will keep drawing and drawing, trying to +fetch the other piece to it, and so that helps the blood to draw the +wart, and pretty soon off she comes." + +"Yes, that's it, Huck--that's it; though when you're burying it if you +say 'Down bean; off wart; come no more to bother me!' it's better. +That's the way Joe Harper does, and he's been nearly to Coonville and +most everywheres. But say--how do you cure 'em with dead cats?" + +"Why, you take your cat and go and get in the graveyard 'long about +midnight when somebody that was wicked has been buried; and when it's +midnight a devil will come, or maybe two or three, but you can't see +'em, you can only hear something like the wind, or maybe hear 'em talk; +and when they're taking that feller away, you heave your cat after 'em +and say, 'Devil follow corpse, cat follow devil, warts follow cat, I'm +done with ye!' That'll fetch ANY wart." + +"Sounds right. D'you ever try it, Huck?" + +"No, but old Mother Hopkins told me." + +"Well, I reckon it's so, then. Becuz they say she's a witch." + +"Say! Why, Tom, I KNOW she is. She witched pap. Pap says so his own +self. He come along one day, and he see she was a-witching him, so he +took up a rock, and if she hadn't dodged, he'd a got her. Well, that +very night he rolled off'n a shed wher' he was a layin drunk, and broke +his arm." + +"Why, that's awful. How did he know she was a-witching him?" + +"Lord, pap can tell, easy. Pap says when they keep looking at you +right stiddy, they're a-witching you. Specially if they mumble. Becuz +when they mumble they're saying the Lord's Prayer backards." + +"Say, Hucky, when you going to try the cat?" + +"To-night. I reckon they'll come after old Hoss Williams to-night." + +"But they buried him Saturday. Didn't they get him Saturday night?" + +"Why, how you talk! How could their charms work till midnight?--and +THEN it's Sunday. Devils don't slosh around much of a Sunday, I don't +reckon." + +"I never thought of that. That's so. Lemme go with you?" + +"Of course--if you ain't afeard." + +"Afeard! 'Tain't likely. Will you meow?" + +"Yes--and you meow back, if you get a chance. Last time, you kep' me +a-meowing around till old Hays went to throwing rocks at me and says +'Dern that cat!' and so I hove a brick through his window--but don't +you tell." + +"I won't. I couldn't meow that night, becuz auntie was watching me, +but I'll meow this time. Say--what's that?" + +"Nothing but a tick." + +"Where'd you get him?" + +"Out in the woods." + +"What'll you take for him?" + +"I don't know. I don't want to sell him." + +"All right. It's a mighty small tick, anyway." + +"Oh, anybody can run a tick down that don't belong to them. I'm +satisfied with it. It's a good enough tick for me." + +"Sho, there's ticks a plenty. I could have a thousand of 'em if I +wanted to." + +"Well, why don't you? Becuz you know mighty well you can't. This is a +pretty early tick, I reckon. It's the first one I've seen this year." + +"Say, Huck--I'll give you my tooth for him." + +"Less see it." + +Tom got out a bit of paper and carefully unrolled it. Huckleberry +viewed it wistfully. The temptation was very strong. At last he said: + +"Is it genuwyne?" + +Tom lifted his lip and showed the vacancy. + +"Well, all right," said Huckleberry, "it's a trade." + +Tom enclosed the tick in the percussion-cap box that had lately been +the pinchbug's prison, and the boys separated, each feeling wealthier +than before. + +When Tom reached the little isolated frame schoolhouse, he strode in +briskly, with the manner of one who had come with all honest speed. +He hung his hat on a peg and flung himself into his seat with +business-like alacrity. The master, throned on high in his great +splint-bottom arm-chair, was dozing, lulled by the drowsy hum of study. +The interruption roused him. + +"Thomas Sawyer!" + +Tom knew that when his name was pronounced in full, it meant trouble. + +"Sir!" + +"Come up here. Now, sir, why are you late again, as usual?" + +Tom was about to take refuge in a lie, when he saw two long tails of +yellow hair hanging down a back that he recognized by the electric +sympathy of love; and by that form was THE ONLY VACANT PLACE on the +girls' side of the schoolhouse. He instantly said: + +"I STOPPED TO TALK WITH HUCKLEBERRY FINN!" + +The master's pulse stood still, and he stared helplessly. The buzz of +study ceased. The pupils wondered if this foolhardy boy had lost his +mind. The master said: + +"You--you did what?" + +"Stopped to talk with Huckleberry Finn." + +There was no mistaking the words. + +"Thomas Sawyer, this is the most astounding confession I have ever +listened to. No mere ferule will answer for this offence. Take off your +jacket." + +The master's arm performed until it was tired and the stock of +switches notably diminished. Then the order followed: + +"Now, sir, go and sit with the girls! And let this be a warning to you." + +The titter that rippled around the room appeared to abash the boy, but +in reality that result was caused rather more by his worshipful awe of +his unknown idol and the dread pleasure that lay in his high good +fortune. He sat down upon the end of the pine bench and the girl +hitched herself away from him with a toss of her head. Nudges and winks +and whispers traversed the room, but Tom sat still, with his arms upon +the long, low desk before him, and seemed to study his book. + +By and by attention ceased from him, and the accustomed school murmur +rose upon the dull air once more. Presently the boy began to steal +furtive glances at the girl. She observed it, "made a mouth" at him and +gave him the back of her head for the space of a minute. When she +cautiously faced around again, a peach lay before her. She thrust it +away. Tom gently put it back. She thrust it away again, but with less +animosity. Tom patiently returned it to its place. Then she let it +remain. Tom scrawled on his slate, "Please take it--I got more." The +girl glanced at the words, but made no sign. Now the boy began to draw +something on the slate, hiding his work with his left hand. For a time +the girl refused to notice; but her human curiosity presently began to +manifest itself by hardly perceptible signs. The boy worked on, +apparently unconscious. The girl made a sort of noncommittal attempt to +see, but the boy did not betray that he was aware of it. At last she +gave in and hesitatingly whispered: + +"Let me see it." + +Tom partly uncovered a dismal caricature of a house with two gable +ends to it and a corkscrew of smoke issuing from the chimney. Then the +girl's interest began to fasten itself upon the work and she forgot +everything else. When it was finished, she gazed a moment, then +whispered: + +"It's nice--make a man." + +The artist erected a man in the front yard, that resembled a derrick. +He could have stepped over the house; but the girl was not +hypercritical; she was satisfied with the monster, and whispered: + +"It's a beautiful man--now make me coming along." + +Tom drew an hour-glass with a full moon and straw limbs to it and +armed the spreading fingers with a portentous fan. The girl said: + +"It's ever so nice--I wish I could draw." + +"It's easy," whispered Tom, "I'll learn you." + +"Oh, will you? When?" + +"At noon. Do you go home to dinner?" + +"I'll stay if you will." + +"Good--that's a whack. What's your name?" + +"Becky Thatcher. What's yours? Oh, I know. It's Thomas Sawyer." + +"That's the name they lick me by. I'm Tom when I'm good. You call me +Tom, will you?" + +"Yes." + +Now Tom began to scrawl something on the slate, hiding the words from +the girl. But she was not backward this time. She begged to see. Tom +said: + +"Oh, it ain't anything." + +"Yes it is." + +"No it ain't. You don't want to see." + +"Yes I do, indeed I do. Please let me." + +"You'll tell." + +"No I won't--deed and deed and double deed won't." + +"You won't tell anybody at all? Ever, as long as you live?" + +"No, I won't ever tell ANYbody. Now let me." + +"Oh, YOU don't want to see!" + +"Now that you treat me so, I WILL see." And she put her small hand +upon his and a little scuffle ensued, Tom pretending to resist in +earnest but letting his hand slip by degrees till these words were +revealed: "I LOVE YOU." + +"Oh, you bad thing!" And she hit his hand a smart rap, but reddened +and looked pleased, nevertheless. + +Just at this juncture the boy felt a slow, fateful grip closing on his +ear, and a steady lifting impulse. In that wise he was borne across the +house and deposited in his own seat, under a peppering fire of giggles +from the whole school. Then the master stood over him during a few +awful moments, and finally moved away to his throne without saying a +word. But although Tom's ear tingled, his heart was jubilant. + +As the school quieted down Tom made an honest effort to study, but the +turmoil within him was too great. In turn he took his place in the +reading class and made a botch of it; then in the geography class and +turned lakes into mountains, mountains into rivers, and rivers into +continents, till chaos was come again; then in the spelling class, and +got "turned down," by a succession of mere baby words, till he brought +up at the foot and yielded up the pewter medal which he had worn with +ostentation for months. + + + +CHAPTER VII + +THE harder Tom tried to fasten his mind on his book, the more his +ideas wandered. So at last, with a sigh and a yawn, he gave it up. It +seemed to him that the noon recess would never come. The air was +utterly dead. There was not a breath stirring. It was the sleepiest of +sleepy days. The drowsing murmur of the five and twenty studying +scholars soothed the soul like the spell that is in the murmur of bees. +Away off in the flaming sunshine, Cardiff Hill lifted its soft green +sides through a shimmering veil of heat, tinted with the purple of +distance; a few birds floated on lazy wing high in the air; no other +living thing was visible but some cows, and they were asleep. Tom's +heart ached to be free, or else to have something of interest to do to +pass the dreary time. His hand wandered into his pocket and his face +lit up with a glow of gratitude that was prayer, though he did not know +it. Then furtively the percussion-cap box came out. He released the +tick and put him on the long flat desk. The creature probably glowed +with a gratitude that amounted to prayer, too, at this moment, but it +was premature: for when he started thankfully to travel off, Tom turned +him aside with a pin and made him take a new direction. + +Tom's bosom friend sat next him, suffering just as Tom had been, and +now he was deeply and gratefully interested in this entertainment in an +instant. This bosom friend was Joe Harper. The two boys were sworn +friends all the week, and embattled enemies on Saturdays. Joe took a +pin out of his lapel and began to assist in exercising the prisoner. +The sport grew in interest momently. Soon Tom said that they were +interfering with each other, and neither getting the fullest benefit of +the tick. So he put Joe's slate on the desk and drew a line down the +middle of it from top to bottom. + +"Now," said he, "as long as he is on your side you can stir him up and +I'll let him alone; but if you let him get away and get on my side, +you're to leave him alone as long as I can keep him from crossing over." + +"All right, go ahead; start him up." + +The tick escaped from Tom, presently, and crossed the equator. Joe +harassed him awhile, and then he got away and crossed back again. This +change of base occurred often. While one boy was worrying the tick with +absorbing interest, the other would look on with interest as strong, +the two heads bowed together over the slate, and the two souls dead to +all things else. At last luck seemed to settle and abide with Joe. The +tick tried this, that, and the other course, and got as excited and as +anxious as the boys themselves, but time and again just as he would +have victory in his very grasp, so to speak, and Tom's fingers would be +twitching to begin, Joe's pin would deftly head him off, and keep +possession. At last Tom could stand it no longer. The temptation was +too strong. So he reached out and lent a hand with his pin. Joe was +angry in a moment. Said he: + +"Tom, you let him alone." + +"I only just want to stir him up a little, Joe." + +"No, sir, it ain't fair; you just let him alone." + +"Blame it, I ain't going to stir him much." + +"Let him alone, I tell you." + +"I won't!" + +"You shall--he's on my side of the line." + +"Look here, Joe Harper, whose is that tick?" + +"I don't care whose tick he is--he's on my side of the line, and you +sha'n't touch him." + +"Well, I'll just bet I will, though. He's my tick and I'll do what I +blame please with him, or die!" + +A tremendous whack came down on Tom's shoulders, and its duplicate on +Joe's; and for the space of two minutes the dust continued to fly from +the two jackets and the whole school to enjoy it. The boys had been too +absorbed to notice the hush that had stolen upon the school awhile +before when the master came tiptoeing down the room and stood over +them. He had contemplated a good part of the performance before he +contributed his bit of variety to it. + +When school broke up at noon, Tom flew to Becky Thatcher, and +whispered in her ear: + +"Put on your bonnet and let on you're going home; and when you get to +the corner, give the rest of 'em the slip, and turn down through the +lane and come back. I'll go the other way and come it over 'em the same +way." + +So the one went off with one group of scholars, and the other with +another. In a little while the two met at the bottom of the lane, and +when they reached the school they had it all to themselves. Then they +sat together, with a slate before them, and Tom gave Becky the pencil +and held her hand in his, guiding it, and so created another surprising +house. When the interest in art began to wane, the two fell to talking. +Tom was swimming in bliss. He said: + +"Do you love rats?" + +"No! I hate them!" + +"Well, I do, too--LIVE ones. But I mean dead ones, to swing round your +head with a string." + +"No, I don't care for rats much, anyway. What I like is chewing-gum." + +"Oh, I should say so! I wish I had some now." + +"Do you? I've got some. I'll let you chew it awhile, but you must give +it back to me." + +That was agreeable, so they chewed it turn about, and dangled their +legs against the bench in excess of contentment. + +"Was you ever at a circus?" said Tom. + +"Yes, and my pa's going to take me again some time, if I'm good." + +"I been to the circus three or four times--lots of times. Church ain't +shucks to a circus. There's things going on at a circus all the time. +I'm going to be a clown in a circus when I grow up." + +"Oh, are you! That will be nice. They're so lovely, all spotted up." + +"Yes, that's so. And they get slathers of money--most a dollar a day, +Ben Rogers says. Say, Becky, was you ever engaged?" + +"What's that?" + +"Why, engaged to be married." + +"No." + +"Would you like to?" + +"I reckon so. I don't know. What is it like?" + +"Like? Why it ain't like anything. You only just tell a boy you won't +ever have anybody but him, ever ever ever, and then you kiss and that's +all. Anybody can do it." + +"Kiss? What do you kiss for?" + +"Why, that, you know, is to--well, they always do that." + +"Everybody?" + +"Why, yes, everybody that's in love with each other. Do you remember +what I wrote on the slate?" + +"Ye--yes." + +"What was it?" + +"I sha'n't tell you." + +"Shall I tell YOU?" + +"Ye--yes--but some other time." + +"No, now." + +"No, not now--to-morrow." + +"Oh, no, NOW. Please, Becky--I'll whisper it, I'll whisper it ever so +easy." + +Becky hesitating, Tom took silence for consent, and passed his arm +about her waist and whispered the tale ever so softly, with his mouth +close to her ear. And then he added: + +"Now you whisper it to me--just the same." + +She resisted, for a while, and then said: + +"You turn your face away so you can't see, and then I will. But you +mustn't ever tell anybody--WILL you, Tom? Now you won't, WILL you?" + +"No, indeed, indeed I won't. Now, Becky." + +He turned his face away. She bent timidly around till her breath +stirred his curls and whispered, "I--love--you!" + +Then she sprang away and ran around and around the desks and benches, +with Tom after her, and took refuge in a corner at last, with her +little white apron to her face. Tom clasped her about her neck and +pleaded: + +"Now, Becky, it's all done--all over but the kiss. Don't you be afraid +of that--it ain't anything at all. Please, Becky." And he tugged at her +apron and the hands. + +By and by she gave up, and let her hands drop; her face, all glowing +with the struggle, came up and submitted. Tom kissed the red lips and +said: + +"Now it's all done, Becky. And always after this, you know, you ain't +ever to love anybody but me, and you ain't ever to marry anybody but +me, ever never and forever. Will you?" + +"No, I'll never love anybody but you, Tom, and I'll never marry +anybody but you--and you ain't to ever marry anybody but me, either." + +"Certainly. Of course. That's PART of it. And always coming to school +or when we're going home, you're to walk with me, when there ain't +anybody looking--and you choose me and I choose you at parties, because +that's the way you do when you're engaged." + +"It's so nice. I never heard of it before." + +"Oh, it's ever so gay! Why, me and Amy Lawrence--" + +The big eyes told Tom his blunder and he stopped, confused. + +"Oh, Tom! Then I ain't the first you've ever been engaged to!" + +The child began to cry. Tom said: + +"Oh, don't cry, Becky, I don't care for her any more." + +"Yes, you do, Tom--you know you do." + +Tom tried to put his arm about her neck, but she pushed him away and +turned her face to the wall, and went on crying. Tom tried again, with +soothing words in his mouth, and was repulsed again. Then his pride was +up, and he strode away and went outside. He stood about, restless and +uneasy, for a while, glancing at the door, every now and then, hoping +she would repent and come to find him. But she did not. Then he began +to feel badly and fear that he was in the wrong. It was a hard struggle +with him to make new advances, now, but he nerved himself to it and +entered. She was still standing back there in the corner, sobbing, with +her face to the wall. Tom's heart smote him. He went to her and stood a +moment, not knowing exactly how to proceed. Then he said hesitatingly: + +"Becky, I--I don't care for anybody but you." + +No reply--but sobs. + +"Becky"--pleadingly. "Becky, won't you say something?" + +More sobs. + +Tom got out his chiefest jewel, a brass knob from the top of an +andiron, and passed it around her so that she could see it, and said: + +"Please, Becky, won't you take it?" + +She struck it to the floor. Then Tom marched out of the house and over +the hills and far away, to return to school no more that day. Presently +Becky began to suspect. She ran to the door; he was not in sight; she +flew around to the play-yard; he was not there. Then she called: + +"Tom! Come back, Tom!" + +She listened intently, but there was no answer. She had no companions +but silence and loneliness. So she sat down to cry again and upbraid +herself; and by this time the scholars began to gather again, and she +had to hide her griefs and still her broken heart and take up the cross +of a long, dreary, aching afternoon, with none among the strangers +about her to exchange sorrows with. + + + +CHAPTER VIII + +TOM dodged hither and thither through lanes until he was well out of +the track of returning scholars, and then fell into a moody jog. He +crossed a small "branch" two or three times, because of a prevailing +juvenile superstition that to cross water baffled pursuit. Half an hour +later he was disappearing behind the Douglas mansion on the summit of +Cardiff Hill, and the schoolhouse was hardly distinguishable away off +in the valley behind him. He entered a dense wood, picked his pathless +way to the centre of it, and sat down on a mossy spot under a spreading +oak. There was not even a zephyr stirring; the dead noonday heat had +even stilled the songs of the birds; nature lay in a trance that was +broken by no sound but the occasional far-off hammering of a +woodpecker, and this seemed to render the pervading silence and sense +of loneliness the more profound. The boy's soul was steeped in +melancholy; his feelings were in happy accord with his surroundings. He +sat long with his elbows on his knees and his chin in his hands, +meditating. It seemed to him that life was but a trouble, at best, and +he more than half envied Jimmy Hodges, so lately released; it must be +very peaceful, he thought, to lie and slumber and dream forever and +ever, with the wind whispering through the trees and caressing the +grass and the flowers over the grave, and nothing to bother and grieve +about, ever any more. If he only had a clean Sunday-school record he +could be willing to go, and be done with it all. Now as to this girl. +What had he done? Nothing. He had meant the best in the world, and been +treated like a dog--like a very dog. She would be sorry some day--maybe +when it was too late. Ah, if he could only die TEMPORARILY! + +But the elastic heart of youth cannot be compressed into one +constrained shape long at a time. Tom presently began to drift +insensibly back into the concerns of this life again. What if he turned +his back, now, and disappeared mysteriously? What if he went away--ever +so far away, into unknown countries beyond the seas--and never came +back any more! How would she feel then! The idea of being a clown +recurred to him now, only to fill him with disgust. For frivolity and +jokes and spotted tights were an offense, when they intruded themselves +upon a spirit that was exalted into the vague august realm of the +romantic. No, he would be a soldier, and return after long years, all +war-worn and illustrious. No--better still, he would join the Indians, +and hunt buffaloes and go on the warpath in the mountain ranges and the +trackless great plains of the Far West, and away in the future come +back a great chief, bristling with feathers, hideous with paint, and +prance into Sunday-school, some drowsy summer morning, with a +bloodcurdling war-whoop, and sear the eyeballs of all his companions +with unappeasable envy. But no, there was something gaudier even than +this. He would be a pirate! That was it! NOW his future lay plain +before him, and glowing with unimaginable splendor. How his name would +fill the world, and make people shudder! How gloriously he would go +plowing the dancing seas, in his long, low, black-hulled racer, the +Spirit of the Storm, with his grisly flag flying at the fore! And at +the zenith of his fame, how he would suddenly appear at the old village +and stalk into church, brown and weather-beaten, in his black velvet +doublet and trunks, his great jack-boots, his crimson sash, his belt +bristling with horse-pistols, his crime-rusted cutlass at his side, his +slouch hat with waving plumes, his black flag unfurled, with the skull +and crossbones on it, and hear with swelling ecstasy the whisperings, +"It's Tom Sawyer the Pirate!--the Black Avenger of the Spanish Main!" + +Yes, it was settled; his career was determined. He would run away from +home and enter upon it. He would start the very next morning. Therefore +he must now begin to get ready. He would collect his resources +together. He went to a rotten log near at hand and began to dig under +one end of it with his Barlow knife. He soon struck wood that sounded +hollow. He put his hand there and uttered this incantation impressively: + +"What hasn't come here, come! What's here, stay here!" + +Then he scraped away the dirt, and exposed a pine shingle. He took it +up and disclosed a shapely little treasure-house whose bottom and sides +were of shingles. In it lay a marble. Tom's astonishment was boundless! +He scratched his head with a perplexed air, and said: + +"Well, that beats anything!" + +Then he tossed the marble away pettishly, and stood cogitating. The +truth was, that a superstition of his had failed, here, which he and +all his comrades had always looked upon as infallible. If you buried a +marble with certain necessary incantations, and left it alone a +fortnight, and then opened the place with the incantation he had just +used, you would find that all the marbles you had ever lost had +gathered themselves together there, meantime, no matter how widely they +had been separated. But now, this thing had actually and unquestionably +failed. Tom's whole structure of faith was shaken to its foundations. +He had many a time heard of this thing succeeding but never of its +failing before. It did not occur to him that he had tried it several +times before, himself, but could never find the hiding-places +afterward. He puzzled over the matter some time, and finally decided +that some witch had interfered and broken the charm. He thought he +would satisfy himself on that point; so he searched around till he +found a small sandy spot with a little funnel-shaped depression in it. +He laid himself down and put his mouth close to this depression and +called-- + +"Doodle-bug, doodle-bug, tell me what I want to know! Doodle-bug, +doodle-bug, tell me what I want to know!" + +The sand began to work, and presently a small black bug appeared for a +second and then darted under again in a fright. + +"He dasn't tell! So it WAS a witch that done it. I just knowed it." + +He well knew the futility of trying to contend against witches, so he +gave up discouraged. But it occurred to him that he might as well have +the marble he had just thrown away, and therefore he went and made a +patient search for it. But he could not find it. Now he went back to +his treasure-house and carefully placed himself just as he had been +standing when he tossed the marble away; then he took another marble +from his pocket and tossed it in the same way, saying: + +"Brother, go find your brother!" + +He watched where it stopped, and went there and looked. But it must +have fallen short or gone too far; so he tried twice more. The last +repetition was successful. The two marbles lay within a foot of each +other. + +Just here the blast of a toy tin trumpet came faintly down the green +aisles of the forest. Tom flung off his jacket and trousers, turned a +suspender into a belt, raked away some brush behind the rotten log, +disclosing a rude bow and arrow, a lath sword and a tin trumpet, and in +a moment had seized these things and bounded away, barelegged, with +fluttering shirt. He presently halted under a great elm, blew an +answering blast, and then began to tiptoe and look warily out, this way +and that. He said cautiously--to an imaginary company: + +"Hold, my merry men! Keep hid till I blow." + +Now appeared Joe Harper, as airily clad and elaborately armed as Tom. +Tom called: + +"Hold! Who comes here into Sherwood Forest without my pass?" + +"Guy of Guisborne wants no man's pass. Who art thou that--that--" + +"Dares to hold such language," said Tom, prompting--for they talked +"by the book," from memory. + +"Who art thou that dares to hold such language?" + +"I, indeed! I am Robin Hood, as thy caitiff carcase soon shall know." + +"Then art thou indeed that famous outlaw? Right gladly will I dispute +with thee the passes of the merry wood. Have at thee!" + +They took their lath swords, dumped their other traps on the ground, +struck a fencing attitude, foot to foot, and began a grave, careful +combat, "two up and two down." Presently Tom said: + +"Now, if you've got the hang, go it lively!" + +So they "went it lively," panting and perspiring with the work. By and +by Tom shouted: + +"Fall! fall! Why don't you fall?" + +"I sha'n't! Why don't you fall yourself? You're getting the worst of +it." + +"Why, that ain't anything. I can't fall; that ain't the way it is in +the book. The book says, 'Then with one back-handed stroke he slew poor +Guy of Guisborne.' You're to turn around and let me hit you in the +back." + +There was no getting around the authorities, so Joe turned, received +the whack and fell. + +"Now," said Joe, getting up, "you got to let me kill YOU. That's fair." + +"Why, I can't do that, it ain't in the book." + +"Well, it's blamed mean--that's all." + +"Well, say, Joe, you can be Friar Tuck or Much the miller's son, and +lam me with a quarter-staff; or I'll be the Sheriff of Nottingham and +you be Robin Hood a little while and kill me." + +This was satisfactory, and so these adventures were carried out. Then +Tom became Robin Hood again, and was allowed by the treacherous nun to +bleed his strength away through his neglected wound. And at last Joe, +representing a whole tribe of weeping outlaws, dragged him sadly forth, +gave his bow into his feeble hands, and Tom said, "Where this arrow +falls, there bury poor Robin Hood under the greenwood tree." Then he +shot the arrow and fell back and would have died, but he lit on a +nettle and sprang up too gaily for a corpse. + +The boys dressed themselves, hid their accoutrements, and went off +grieving that there were no outlaws any more, and wondering what modern +civilization could claim to have done to compensate for their loss. +They said they would rather be outlaws a year in Sherwood Forest than +President of the United States forever. + + + +CHAPTER IX + +AT half-past nine, that night, Tom and Sid were sent to bed, as usual. +They said their prayers, and Sid was soon asleep. Tom lay awake and +waited, in restless impatience. When it seemed to him that it must be +nearly daylight, he heard the clock strike ten! This was despair. He +would have tossed and fidgeted, as his nerves demanded, but he was +afraid he might wake Sid. So he lay still, and stared up into the dark. +Everything was dismally still. By and by, out of the stillness, little, +scarcely perceptible noises began to emphasize themselves. The ticking +of the clock began to bring itself into notice. Old beams began to +crack mysteriously. The stairs creaked faintly. Evidently spirits were +abroad. A measured, muffled snore issued from Aunt Polly's chamber. And +now the tiresome chirping of a cricket that no human ingenuity could +locate, began. Next the ghastly ticking of a deathwatch in the wall at +the bed's head made Tom shudder--it meant that somebody's days were +numbered. Then the howl of a far-off dog rose on the night air, and was +answered by a fainter howl from a remoter distance. Tom was in an +agony. At last he was satisfied that time had ceased and eternity +begun; he began to doze, in spite of himself; the clock chimed eleven, +but he did not hear it. And then there came, mingling with his +half-formed dreams, a most melancholy caterwauling. The raising of a +neighboring window disturbed him. A cry of "Scat! you devil!" and the +crash of an empty bottle against the back of his aunt's woodshed +brought him wide awake, and a single minute later he was dressed and +out of the window and creeping along the roof of the "ell" on all +fours. He "meow'd" with caution once or twice, as he went; then jumped +to the roof of the woodshed and thence to the ground. Huckleberry Finn +was there, with his dead cat. The boys moved off and disappeared in the +gloom. At the end of half an hour they were wading through the tall +grass of the graveyard. + +It was a graveyard of the old-fashioned Western kind. It was on a +hill, about a mile and a half from the village. It had a crazy board +fence around it, which leaned inward in places, and outward the rest of +the time, but stood upright nowhere. Grass and weeds grew rank over the +whole cemetery. All the old graves were sunken in, there was not a +tombstone on the place; round-topped, worm-eaten boards staggered over +the graves, leaning for support and finding none. "Sacred to the memory +of" So-and-So had been painted on them once, but it could no longer +have been read, on the most of them, now, even if there had been light. + +A faint wind moaned through the trees, and Tom feared it might be the +spirits of the dead, complaining at being disturbed. The boys talked +little, and only under their breath, for the time and the place and the +pervading solemnity and silence oppressed their spirits. They found the +sharp new heap they were seeking, and ensconced themselves within the +protection of three great elms that grew in a bunch within a few feet +of the grave. + +Then they waited in silence for what seemed a long time. The hooting +of a distant owl was all the sound that troubled the dead stillness. +Tom's reflections grew oppressive. He must force some talk. So he said +in a whisper: + +"Hucky, do you believe the dead people like it for us to be here?" + +Huckleberry whispered: + +"I wisht I knowed. It's awful solemn like, AIN'T it?" + +"I bet it is." + +There was a considerable pause, while the boys canvassed this matter +inwardly. Then Tom whispered: + +"Say, Hucky--do you reckon Hoss Williams hears us talking?" + +"O' course he does. Least his sperrit does." + +Tom, after a pause: + +"I wish I'd said Mister Williams. But I never meant any harm. +Everybody calls him Hoss." + +"A body can't be too partic'lar how they talk 'bout these-yer dead +people, Tom." + +This was a damper, and conversation died again. + +Presently Tom seized his comrade's arm and said: + +"Sh!" + +"What is it, Tom?" And the two clung together with beating hearts. + +"Sh! There 'tis again! Didn't you hear it?" + +"I--" + +"There! Now you hear it." + +"Lord, Tom, they're coming! They're coming, sure. What'll we do?" + +"I dono. Think they'll see us?" + +"Oh, Tom, they can see in the dark, same as cats. I wisht I hadn't +come." + +"Oh, don't be afeard. I don't believe they'll bother us. We ain't +doing any harm. If we keep perfectly still, maybe they won't notice us +at all." + +"I'll try to, Tom, but, Lord, I'm all of a shiver." + +"Listen!" + +The boys bent their heads together and scarcely breathed. A muffled +sound of voices floated up from the far end of the graveyard. + +"Look! See there!" whispered Tom. "What is it?" + +"It's devil-fire. Oh, Tom, this is awful." + +Some vague figures approached through the gloom, swinging an +old-fashioned tin lantern that freckled the ground with innumerable +little spangles of light. Presently Huckleberry whispered with a +shudder: + +"It's the devils sure enough. Three of 'em! Lordy, Tom, we're goners! +Can you pray?" + +"I'll try, but don't you be afeard. They ain't going to hurt us. 'Now +I lay me down to sleep, I--'" + +"Sh!" + +"What is it, Huck?" + +"They're HUMANS! One of 'em is, anyway. One of 'em's old Muff Potter's +voice." + +"No--'tain't so, is it?" + +"I bet I know it. Don't you stir nor budge. He ain't sharp enough to +notice us. Drunk, the same as usual, likely--blamed old rip!" + +"All right, I'll keep still. Now they're stuck. Can't find it. Here +they come again. Now they're hot. Cold again. Hot again. Red hot! +They're p'inted right, this time. Say, Huck, I know another o' them +voices; it's Injun Joe." + +"That's so--that murderin' half-breed! I'd druther they was devils a +dern sight. What kin they be up to?" + +The whisper died wholly out, now, for the three men had reached the +grave and stood within a few feet of the boys' hiding-place. + +"Here it is," said the third voice; and the owner of it held the +lantern up and revealed the face of young Doctor Robinson. + +Potter and Injun Joe were carrying a handbarrow with a rope and a +couple of shovels on it. They cast down their load and began to open +the grave. The doctor put the lantern at the head of the grave and came +and sat down with his back against one of the elm trees. He was so +close the boys could have touched him. + +"Hurry, men!" he said, in a low voice; "the moon might come out at any +moment." + +They growled a response and went on digging. For some time there was +no noise but the grating sound of the spades discharging their freight +of mould and gravel. It was very monotonous. Finally a spade struck +upon the coffin with a dull woody accent, and within another minute or +two the men had hoisted it out on the ground. They pried off the lid +with their shovels, got out the body and dumped it rudely on the +ground. The moon drifted from behind the clouds and exposed the pallid +face. The barrow was got ready and the corpse placed on it, covered +with a blanket, and bound to its place with the rope. Potter took out a +large spring-knife and cut off the dangling end of the rope and then +said: + +"Now the cussed thing's ready, Sawbones, and you'll just out with +another five, or here she stays." + +"That's the talk!" said Injun Joe. + +"Look here, what does this mean?" said the doctor. "You required your +pay in advance, and I've paid you." + +"Yes, and you done more than that," said Injun Joe, approaching the +doctor, who was now standing. "Five years ago you drove me away from +your father's kitchen one night, when I come to ask for something to +eat, and you said I warn't there for any good; and when I swore I'd get +even with you if it took a hundred years, your father had me jailed for +a vagrant. Did you think I'd forget? The Injun blood ain't in me for +nothing. And now I've GOT you, and you got to SETTLE, you know!" + +He was threatening the doctor, with his fist in his face, by this +time. The doctor struck out suddenly and stretched the ruffian on the +ground. Potter dropped his knife, and exclaimed: + +"Here, now, don't you hit my pard!" and the next moment he had +grappled with the doctor and the two were struggling with might and +main, trampling the grass and tearing the ground with their heels. +Injun Joe sprang to his feet, his eyes flaming with passion, snatched +up Potter's knife, and went creeping, catlike and stooping, round and +round about the combatants, seeking an opportunity. All at once the +doctor flung himself free, seized the heavy headboard of Williams' +grave and felled Potter to the earth with it--and in the same instant +the half-breed saw his chance and drove the knife to the hilt in the +young man's breast. He reeled and fell partly upon Potter, flooding him +with his blood, and in the same moment the clouds blotted out the +dreadful spectacle and the two frightened boys went speeding away in +the dark. + +Presently, when the moon emerged again, Injun Joe was standing over +the two forms, contemplating them. The doctor murmured inarticulately, +gave a long gasp or two and was still. The half-breed muttered: + +"THAT score is settled--damn you." + +Then he robbed the body. After which he put the fatal knife in +Potter's open right hand, and sat down on the dismantled coffin. Three +--four--five minutes passed, and then Potter began to stir and moan. His +hand closed upon the knife; he raised it, glanced at it, and let it +fall, with a shudder. Then he sat up, pushing the body from him, and +gazed at it, and then around him, confusedly. His eyes met Joe's. + +"Lord, how is this, Joe?" he said. + +"It's a dirty business," said Joe, without moving. + +"What did you do it for?" + +"I! I never done it!" + +"Look here! That kind of talk won't wash." + +Potter trembled and grew white. + +"I thought I'd got sober. I'd no business to drink to-night. But it's +in my head yet--worse'n when we started here. I'm all in a muddle; +can't recollect anything of it, hardly. Tell me, Joe--HONEST, now, old +feller--did I do it? Joe, I never meant to--'pon my soul and honor, I +never meant to, Joe. Tell me how it was, Joe. Oh, it's awful--and him +so young and promising." + +"Why, you two was scuffling, and he fetched you one with the headboard +and you fell flat; and then up you come, all reeling and staggering +like, and snatched the knife and jammed it into him, just as he fetched +you another awful clip--and here you've laid, as dead as a wedge til +now." + +"Oh, I didn't know what I was a-doing. I wish I may die this minute if +I did. It was all on account of the whiskey and the excitement, I +reckon. I never used a weepon in my life before, Joe. I've fought, but +never with weepons. They'll all say that. Joe, don't tell! Say you +won't tell, Joe--that's a good feller. I always liked you, Joe, and +stood up for you, too. Don't you remember? You WON'T tell, WILL you, +Joe?" And the poor creature dropped on his knees before the stolid +murderer, and clasped his appealing hands. + +"No, you've always been fair and square with me, Muff Potter, and I +won't go back on you. There, now, that's as fair as a man can say." + +"Oh, Joe, you're an angel. I'll bless you for this the longest day I +live." And Potter began to cry. + +"Come, now, that's enough of that. This ain't any time for blubbering. +You be off yonder way and I'll go this. Move, now, and don't leave any +tracks behind you." + +Potter started on a trot that quickly increased to a run. The +half-breed stood looking after him. He muttered: + +"If he's as much stunned with the lick and fuddled with the rum as he +had the look of being, he won't think of the knife till he's gone so +far he'll be afraid to come back after it to such a place by himself +--chicken-heart!" + +Two or three minutes later the murdered man, the blanketed corpse, the +lidless coffin, and the open grave were under no inspection but the +moon's. The stillness was complete again, too. + + + +CHAPTER X + +THE two boys flew on and on, toward the village, speechless with +horror. They glanced backward over their shoulders from time to time, +apprehensively, as if they feared they might be followed. Every stump +that started up in their path seemed a man and an enemy, and made them +catch their breath; and as they sped by some outlying cottages that lay +near the village, the barking of the aroused watch-dogs seemed to give +wings to their feet. + +"If we can only get to the old tannery before we break down!" +whispered Tom, in short catches between breaths. "I can't stand it much +longer." + +Huckleberry's hard pantings were his only reply, and the boys fixed +their eyes on the goal of their hopes and bent to their work to win it. +They gained steadily on it, and at last, breast to breast, they burst +through the open door and fell grateful and exhausted in the sheltering +shadows beyond. By and by their pulses slowed down, and Tom whispered: + +"Huckleberry, what do you reckon'll come of this?" + +"If Doctor Robinson dies, I reckon hanging'll come of it." + +"Do you though?" + +"Why, I KNOW it, Tom." + +Tom thought a while, then he said: + +"Who'll tell? We?" + +"What are you talking about? S'pose something happened and Injun Joe +DIDN'T hang? Why, he'd kill us some time or other, just as dead sure as +we're a laying here." + +"That's just what I was thinking to myself, Huck." + +"If anybody tells, let Muff Potter do it, if he's fool enough. He's +generally drunk enough." + +Tom said nothing--went on thinking. Presently he whispered: + +"Huck, Muff Potter don't know it. How can he tell?" + +"What's the reason he don't know it?" + +"Because he'd just got that whack when Injun Joe done it. D'you reckon +he could see anything? D'you reckon he knowed anything?" + +"By hokey, that's so, Tom!" + +"And besides, look-a-here--maybe that whack done for HIM!" + +"No, 'taint likely, Tom. He had liquor in him; I could see that; and +besides, he always has. Well, when pap's full, you might take and belt +him over the head with a church and you couldn't phase him. He says so, +his own self. So it's the same with Muff Potter, of course. But if a +man was dead sober, I reckon maybe that whack might fetch him; I dono." + +After another reflective silence, Tom said: + +"Hucky, you sure you can keep mum?" + +"Tom, we GOT to keep mum. You know that. That Injun devil wouldn't +make any more of drownding us than a couple of cats, if we was to +squeak 'bout this and they didn't hang him. Now, look-a-here, Tom, less +take and swear to one another--that's what we got to do--swear to keep +mum." + +"I'm agreed. It's the best thing. Would you just hold hands and swear +that we--" + +"Oh no, that wouldn't do for this. That's good enough for little +rubbishy common things--specially with gals, cuz THEY go back on you +anyway, and blab if they get in a huff--but there orter be writing +'bout a big thing like this. And blood." + +Tom's whole being applauded this idea. It was deep, and dark, and +awful; the hour, the circumstances, the surroundings, were in keeping +with it. He picked up a clean pine shingle that lay in the moonlight, +took a little fragment of "red keel" out of his pocket, got the moon on +his work, and painfully scrawled these lines, emphasizing each slow +down-stroke by clamping his tongue between his teeth, and letting up +the pressure on the up-strokes. [See next page.] + + "Huck Finn and + Tom Sawyer swears + they will keep mum + about This and They + wish They may Drop + down dead in Their + Tracks if They ever + Tell and Rot." + +Huckleberry was filled with admiration of Tom's facility in writing, +and the sublimity of his language. He at once took a pin from his lapel +and was going to prick his flesh, but Tom said: + +"Hold on! Don't do that. A pin's brass. It might have verdigrease on +it." + +"What's verdigrease?" + +"It's p'ison. That's what it is. You just swaller some of it once +--you'll see." + +So Tom unwound the thread from one of his needles, and each boy +pricked the ball of his thumb and squeezed out a drop of blood. In +time, after many squeezes, Tom managed to sign his initials, using the +ball of his little finger for a pen. Then he showed Huckleberry how to +make an H and an F, and the oath was complete. They buried the shingle +close to the wall, with some dismal ceremonies and incantations, and +the fetters that bound their tongues were considered to be locked and +the key thrown away. + +A figure crept stealthily through a break in the other end of the +ruined building, now, but they did not notice it. + +"Tom," whispered Huckleberry, "does this keep us from EVER telling +--ALWAYS?" + +"Of course it does. It don't make any difference WHAT happens, we got +to keep mum. We'd drop down dead--don't YOU know that?" + +"Yes, I reckon that's so." + +They continued to whisper for some little time. Presently a dog set up +a long, lugubrious howl just outside--within ten feet of them. The boys +clasped each other suddenly, in an agony of fright. + +"Which of us does he mean?" gasped Huckleberry. + +"I dono--peep through the crack. Quick!" + +"No, YOU, Tom!" + +"I can't--I can't DO it, Huck!" + +"Please, Tom. There 'tis again!" + +"Oh, lordy, I'm thankful!" whispered Tom. "I know his voice. It's Bull +Harbison." * + +[* If Mr. Harbison owned a slave named Bull, Tom would have spoken of +him as "Harbison's Bull," but a son or a dog of that name was "Bull +Harbison."] + +"Oh, that's good--I tell you, Tom, I was most scared to death; I'd a +bet anything it was a STRAY dog." + +The dog howled again. The boys' hearts sank once more. + +"Oh, my! that ain't no Bull Harbison!" whispered Huckleberry. "DO, Tom!" + +Tom, quaking with fear, yielded, and put his eye to the crack. His +whisper was hardly audible when he said: + +"Oh, Huck, IT S A STRAY DOG!" + +"Quick, Tom, quick! Who does he mean?" + +"Huck, he must mean us both--we're right together." + +"Oh, Tom, I reckon we're goners. I reckon there ain't no mistake 'bout +where I'LL go to. I been so wicked." + +"Dad fetch it! This comes of playing hookey and doing everything a +feller's told NOT to do. I might a been good, like Sid, if I'd a tried +--but no, I wouldn't, of course. But if ever I get off this time, I lay +I'll just WALLER in Sunday-schools!" And Tom began to snuffle a little. + +"YOU bad!" and Huckleberry began to snuffle too. "Consound it, Tom +Sawyer, you're just old pie, 'longside o' what I am. Oh, LORDY, lordy, +lordy, I wisht I only had half your chance." + +Tom choked off and whispered: + +"Look, Hucky, look! He's got his BACK to us!" + +Hucky looked, with joy in his heart. + +"Well, he has, by jingoes! Did he before?" + +"Yes, he did. But I, like a fool, never thought. Oh, this is bully, +you know. NOW who can he mean?" + +The howling stopped. Tom pricked up his ears. + +"Sh! What's that?" he whispered. + +"Sounds like--like hogs grunting. No--it's somebody snoring, Tom." + +"That IS it! Where 'bouts is it, Huck?" + +"I bleeve it's down at 'tother end. Sounds so, anyway. Pap used to +sleep there, sometimes, 'long with the hogs, but laws bless you, he +just lifts things when HE snores. Besides, I reckon he ain't ever +coming back to this town any more." + +The spirit of adventure rose in the boys' souls once more. + +"Hucky, do you das't to go if I lead?" + +"I don't like to, much. Tom, s'pose it's Injun Joe!" + +Tom quailed. But presently the temptation rose up strong again and the +boys agreed to try, with the understanding that they would take to +their heels if the snoring stopped. So they went tiptoeing stealthily +down, the one behind the other. When they had got to within five steps +of the snorer, Tom stepped on a stick, and it broke with a sharp snap. +The man moaned, writhed a little, and his face came into the moonlight. +It was Muff Potter. The boys' hearts had stood still, and their hopes +too, when the man moved, but their fears passed away now. They tiptoed +out, through the broken weather-boarding, and stopped at a little +distance to exchange a parting word. That long, lugubrious howl rose on +the night air again! They turned and saw the strange dog standing +within a few feet of where Potter was lying, and FACING Potter, with +his nose pointing heavenward. + +"Oh, geeminy, it's HIM!" exclaimed both boys, in a breath. + +"Say, Tom--they say a stray dog come howling around Johnny Miller's +house, 'bout midnight, as much as two weeks ago; and a whippoorwill +come in and lit on the banisters and sung, the very same evening; and +there ain't anybody dead there yet." + +"Well, I know that. And suppose there ain't. Didn't Gracie Miller fall +in the kitchen fire and burn herself terrible the very next Saturday?" + +"Yes, but she ain't DEAD. And what's more, she's getting better, too." + +"All right, you wait and see. She's a goner, just as dead sure as Muff +Potter's a goner. That's what the niggers say, and they know all about +these kind of things, Huck." + +Then they separated, cogitating. When Tom crept in at his bedroom +window the night was almost spent. He undressed with excessive caution, +and fell asleep congratulating himself that nobody knew of his +escapade. He was not aware that the gently-snoring Sid was awake, and +had been so for an hour. + +When Tom awoke, Sid was dressed and gone. There was a late look in the +light, a late sense in the atmosphere. He was startled. Why had he not +been called--persecuted till he was up, as usual? The thought filled +him with bodings. Within five minutes he was dressed and down-stairs, +feeling sore and drowsy. The family were still at table, but they had +finished breakfast. There was no voice of rebuke; but there were +averted eyes; there was a silence and an air of solemnity that struck a +chill to the culprit's heart. He sat down and tried to seem gay, but it +was up-hill work; it roused no smile, no response, and he lapsed into +silence and let his heart sink down to the depths. + +After breakfast his aunt took him aside, and Tom almost brightened in +the hope that he was going to be flogged; but it was not so. His aunt +wept over him and asked him how he could go and break her old heart so; +and finally told him to go on, and ruin himself and bring her gray +hairs with sorrow to the grave, for it was no use for her to try any +more. This was worse than a thousand whippings, and Tom's heart was +sorer now than his body. He cried, he pleaded for forgiveness, promised +to reform over and over again, and then received his dismissal, feeling +that he had won but an imperfect forgiveness and established but a +feeble confidence. + +He left the presence too miserable to even feel revengeful toward Sid; +and so the latter's prompt retreat through the back gate was +unnecessary. He moped to school gloomy and sad, and took his flogging, +along with Joe Harper, for playing hookey the day before, with the air +of one whose heart was busy with heavier woes and wholly dead to +trifles. Then he betook himself to his seat, rested his elbows on his +desk and his jaws in his hands, and stared at the wall with the stony +stare of suffering that has reached the limit and can no further go. +His elbow was pressing against some hard substance. After a long time +he slowly and sadly changed his position, and took up this object with +a sigh. It was in a paper. He unrolled it. A long, lingering, colossal +sigh followed, and his heart broke. It was his brass andiron knob! + +This final feather broke the camel's back. + + + +CHAPTER XI + +CLOSE upon the hour of noon the whole village was suddenly electrified +with the ghastly news. No need of the as yet undreamed-of telegraph; +the tale flew from man to man, from group to group, from house to +house, with little less than telegraphic speed. Of course the +schoolmaster gave holiday for that afternoon; the town would have +thought strangely of him if he had not. + +A gory knife had been found close to the murdered man, and it had been +recognized by somebody as belonging to Muff Potter--so the story ran. +And it was said that a belated citizen had come upon Potter washing +himself in the "branch" about one or two o'clock in the morning, and +that Potter had at once sneaked off--suspicious circumstances, +especially the washing which was not a habit with Potter. It was also +said that the town had been ransacked for this "murderer" (the public +are not slow in the matter of sifting evidence and arriving at a +verdict), but that he could not be found. Horsemen had departed down +all the roads in every direction, and the Sheriff "was confident" that +he would be captured before night. + +All the town was drifting toward the graveyard. Tom's heartbreak +vanished and he joined the procession, not because he would not a +thousand times rather go anywhere else, but because an awful, +unaccountable fascination drew him on. Arrived at the dreadful place, +he wormed his small body through the crowd and saw the dismal +spectacle. It seemed to him an age since he was there before. Somebody +pinched his arm. He turned, and his eyes met Huckleberry's. Then both +looked elsewhere at once, and wondered if anybody had noticed anything +in their mutual glance. But everybody was talking, and intent upon the +grisly spectacle before them. + +"Poor fellow!" "Poor young fellow!" "This ought to be a lesson to +grave robbers!" "Muff Potter'll hang for this if they catch him!" This +was the drift of remark; and the minister said, "It was a judgment; His +hand is here." + +Now Tom shivered from head to heel; for his eye fell upon the stolid +face of Injun Joe. At this moment the crowd began to sway and struggle, +and voices shouted, "It's him! it's him! he's coming himself!" + +"Who? Who?" from twenty voices. + +"Muff Potter!" + +"Hallo, he's stopped!--Look out, he's turning! Don't let him get away!" + +People in the branches of the trees over Tom's head said he wasn't +trying to get away--he only looked doubtful and perplexed. + +"Infernal impudence!" said a bystander; "wanted to come and take a +quiet look at his work, I reckon--didn't expect any company." + +The crowd fell apart, now, and the Sheriff came through, +ostentatiously leading Potter by the arm. The poor fellow's face was +haggard, and his eyes showed the fear that was upon him. When he stood +before the murdered man, he shook as with a palsy, and he put his face +in his hands and burst into tears. + +"I didn't do it, friends," he sobbed; "'pon my word and honor I never +done it." + +"Who's accused you?" shouted a voice. + +This shot seemed to carry home. Potter lifted his face and looked +around him with a pathetic hopelessness in his eyes. He saw Injun Joe, +and exclaimed: + +"Oh, Injun Joe, you promised me you'd never--" + +"Is that your knife?" and it was thrust before him by the Sheriff. + +Potter would have fallen if they had not caught him and eased him to +the ground. Then he said: + +"Something told me 't if I didn't come back and get--" He shuddered; +then waved his nerveless hand with a vanquished gesture and said, "Tell +'em, Joe, tell 'em--it ain't any use any more." + +Then Huckleberry and Tom stood dumb and staring, and heard the +stony-hearted liar reel off his serene statement, they expecting every +moment that the clear sky would deliver God's lightnings upon his head, +and wondering to see how long the stroke was delayed. And when he had +finished and still stood alive and whole, their wavering impulse to +break their oath and save the poor betrayed prisoner's life faded and +vanished away, for plainly this miscreant had sold himself to Satan and +it would be fatal to meddle with the property of such a power as that. + +"Why didn't you leave? What did you want to come here for?" somebody +said. + +"I couldn't help it--I couldn't help it," Potter moaned. "I wanted to +run away, but I couldn't seem to come anywhere but here." And he fell +to sobbing again. + +Injun Joe repeated his statement, just as calmly, a few minutes +afterward on the inquest, under oath; and the boys, seeing that the +lightnings were still withheld, were confirmed in their belief that Joe +had sold himself to the devil. He was now become, to them, the most +balefully interesting object they had ever looked upon, and they could +not take their fascinated eyes from his face. + +They inwardly resolved to watch him nights, when opportunity should +offer, in the hope of getting a glimpse of his dread master. + +Injun Joe helped to raise the body of the murdered man and put it in a +wagon for removal; and it was whispered through the shuddering crowd +that the wound bled a little! The boys thought that this happy +circumstance would turn suspicion in the right direction; but they were +disappointed, for more than one villager remarked: + +"It was within three feet of Muff Potter when it done it." + +Tom's fearful secret and gnawing conscience disturbed his sleep for as +much as a week after this; and at breakfast one morning Sid said: + +"Tom, you pitch around and talk in your sleep so much that you keep me +awake half the time." + +Tom blanched and dropped his eyes. + +"It's a bad sign," said Aunt Polly, gravely. "What you got on your +mind, Tom?" + +"Nothing. Nothing 't I know of." But the boy's hand shook so that he +spilled his coffee. + +"And you do talk such stuff," Sid said. "Last night you said, 'It's +blood, it's blood, that's what it is!' You said that over and over. And +you said, 'Don't torment me so--I'll tell!' Tell WHAT? What is it +you'll tell?" + +Everything was swimming before Tom. There is no telling what might +have happened, now, but luckily the concern passed out of Aunt Polly's +face and she came to Tom's relief without knowing it. She said: + +"Sho! It's that dreadful murder. I dream about it most every night +myself. Sometimes I dream it's me that done it." + +Mary said she had been affected much the same way. Sid seemed +satisfied. Tom got out of the presence as quick as he plausibly could, +and after that he complained of toothache for a week, and tied up his +jaws every night. He never knew that Sid lay nightly watching, and +frequently slipped the bandage free and then leaned on his elbow +listening a good while at a time, and afterward slipped the bandage +back to its place again. Tom's distress of mind wore off gradually and +the toothache grew irksome and was discarded. If Sid really managed to +make anything out of Tom's disjointed mutterings, he kept it to himself. + +It seemed to Tom that his schoolmates never would get done holding +inquests on dead cats, and thus keeping his trouble present to his +mind. Sid noticed that Tom never was coroner at one of these inquiries, +though it had been his habit to take the lead in all new enterprises; +he noticed, too, that Tom never acted as a witness--and that was +strange; and Sid did not overlook the fact that Tom even showed a +marked aversion to these inquests, and always avoided them when he +could. Sid marvelled, but said nothing. However, even inquests went out +of vogue at last, and ceased to torture Tom's conscience. + +Every day or two, during this time of sorrow, Tom watched his +opportunity and went to the little grated jail-window and smuggled such +small comforts through to the "murderer" as he could get hold of. The +jail was a trifling little brick den that stood in a marsh at the edge +of the village, and no guards were afforded for it; indeed, it was +seldom occupied. These offerings greatly helped to ease Tom's +conscience. + +The villagers had a strong desire to tar-and-feather Injun Joe and +ride him on a rail, for body-snatching, but so formidable was his +character that nobody could be found who was willing to take the lead +in the matter, so it was dropped. He had been careful to begin both of +his inquest-statements with the fight, without confessing the +grave-robbery that preceded it; therefore it was deemed wisest not +to try the case in the courts at present. + + + +CHAPTER XII + +ONE of the reasons why Tom's mind had drifted away from its secret +troubles was, that it had found a new and weighty matter to interest +itself about. Becky Thatcher had stopped coming to school. Tom had +struggled with his pride a few days, and tried to "whistle her down the +wind," but failed. He began to find himself hanging around her father's +house, nights, and feeling very miserable. She was ill. What if she +should die! There was distraction in the thought. He no longer took an +interest in war, nor even in piracy. The charm of life was gone; there +was nothing but dreariness left. He put his hoop away, and his bat; +there was no joy in them any more. His aunt was concerned. She began to +try all manner of remedies on him. She was one of those people who are +infatuated with patent medicines and all new-fangled methods of +producing health or mending it. She was an inveterate experimenter in +these things. When something fresh in this line came out she was in a +fever, right away, to try it; not on herself, for she was never ailing, +but on anybody else that came handy. She was a subscriber for all the +"Health" periodicals and phrenological frauds; and the solemn ignorance +they were inflated with was breath to her nostrils. All the "rot" they +contained about ventilation, and how to go to bed, and how to get up, +and what to eat, and what to drink, and how much exercise to take, and +what frame of mind to keep one's self in, and what sort of clothing to +wear, was all gospel to her, and she never observed that her +health-journals of the current month customarily upset everything they +had recommended the month before. She was as simple-hearted and honest +as the day was long, and so she was an easy victim. She gathered +together her quack periodicals and her quack medicines, and thus armed +with death, went about on her pale horse, metaphorically speaking, with +"hell following after." But she never suspected that she was not an +angel of healing and the balm of Gilead in disguise, to the suffering +neighbors. + +The water treatment was new, now, and Tom's low condition was a +windfall to her. She had him out at daylight every morning, stood him +up in the woodshed and drowned him with a deluge of cold water; then +she scrubbed him down with a towel like a file, and so brought him to; +then she rolled him up in a wet sheet and put him away under blankets +till she sweated his soul clean and "the yellow stains of it came +through his pores"--as Tom said. + +Yet notwithstanding all this, the boy grew more and more melancholy +and pale and dejected. She added hot baths, sitz baths, shower baths, +and plunges. The boy remained as dismal as a hearse. She began to +assist the water with a slim oatmeal diet and blister-plasters. She +calculated his capacity as she would a jug's, and filled him up every +day with quack cure-alls. + +Tom had become indifferent to persecution by this time. This phase +filled the old lady's heart with consternation. This indifference must +be broken up at any cost. Now she heard of Pain-killer for the first +time. She ordered a lot at once. She tasted it and was filled with +gratitude. It was simply fire in a liquid form. She dropped the water +treatment and everything else, and pinned her faith to Pain-killer. She +gave Tom a teaspoonful and watched with the deepest anxiety for the +result. Her troubles were instantly at rest, her soul at peace again; +for the "indifference" was broken up. The boy could not have shown a +wilder, heartier interest, if she had built a fire under him. + +Tom felt that it was time to wake up; this sort of life might be +romantic enough, in his blighted condition, but it was getting to have +too little sentiment and too much distracting variety about it. So he +thought over various plans for relief, and finally hit pon that of +professing to be fond of Pain-killer. He asked for it so often that he +became a nuisance, and his aunt ended by telling him to help himself +and quit bothering her. If it had been Sid, she would have had no +misgivings to alloy her delight; but since it was Tom, she watched the +bottle clandestinely. She found that the medicine did really diminish, +but it did not occur to her that the boy was mending the health of a +crack in the sitting-room floor with it. + +One day Tom was in the act of dosing the crack when his aunt's yellow +cat came along, purring, eying the teaspoon avariciously, and begging +for a taste. Tom said: + +"Don't ask for it unless you want it, Peter." + +But Peter signified that he did want it. + +"You better make sure." + +Peter was sure. + +"Now you've asked for it, and I'll give it to you, because there ain't +anything mean about me; but if you find you don't like it, you mustn't +blame anybody but your own self." + +Peter was agreeable. So Tom pried his mouth open and poured down the +Pain-killer. Peter sprang a couple of yards in the air, and then +delivered a war-whoop and set off round and round the room, banging +against furniture, upsetting flower-pots, and making general havoc. +Next he rose on his hind feet and pranced around, in a frenzy of +enjoyment, with his head over his shoulder and his voice proclaiming +his unappeasable happiness. Then he went tearing around the house again +spreading chaos and destruction in his path. Aunt Polly entered in time +to see him throw a few double summersets, deliver a final mighty +hurrah, and sail through the open window, carrying the rest of the +flower-pots with him. The old lady stood petrified with astonishment, +peering over her glasses; Tom lay on the floor expiring with laughter. + +"Tom, what on earth ails that cat?" + +"I don't know, aunt," gasped the boy. + +"Why, I never see anything like it. What did make him act so?" + +"Deed I don't know, Aunt Polly; cats always act so when they're having +a good time." + +"They do, do they?" There was something in the tone that made Tom +apprehensive. + +"Yes'm. That is, I believe they do." + +"You DO?" + +"Yes'm." + +The old lady was bending down, Tom watching, with interest emphasized +by anxiety. Too late he divined her "drift." The handle of the telltale +teaspoon was visible under the bed-valance. Aunt Polly took it, held it +up. Tom winced, and dropped his eyes. Aunt Polly raised him by the +usual handle--his ear--and cracked his head soundly with her thimble. + +"Now, sir, what did you want to treat that poor dumb beast so, for?" + +"I done it out of pity for him--because he hadn't any aunt." + +"Hadn't any aunt!--you numskull. What has that got to do with it?" + +"Heaps. Because if he'd had one she'd a burnt him out herself! She'd a +roasted his bowels out of him 'thout any more feeling than if he was a +human!" + +Aunt Polly felt a sudden pang of remorse. This was putting the thing +in a new light; what was cruelty to a cat MIGHT be cruelty to a boy, +too. She began to soften; she felt sorry. Her eyes watered a little, +and she put her hand on Tom's head and said gently: + +"I was meaning for the best, Tom. And, Tom, it DID do you good." + +Tom looked up in her face with just a perceptible twinkle peeping +through his gravity. + +"I know you was meaning for the best, aunty, and so was I with Peter. +It done HIM good, too. I never see him get around so since--" + +"Oh, go 'long with you, Tom, before you aggravate me again. And you +try and see if you can't be a good boy, for once, and you needn't take +any more medicine." + +Tom reached school ahead of time. It was noticed that this strange +thing had been occurring every day latterly. And now, as usual of late, +he hung about the gate of the schoolyard instead of playing with his +comrades. He was sick, he said, and he looked it. He tried to seem to +be looking everywhere but whither he really was looking--down the road. +Presently Jeff Thatcher hove in sight, and Tom's face lighted; he gazed +a moment, and then turned sorrowfully away. When Jeff arrived, Tom +accosted him; and "led up" warily to opportunities for remark about +Becky, but the giddy lad never could see the bait. Tom watched and +watched, hoping whenever a frisking frock came in sight, and hating the +owner of it as soon as he saw she was not the right one. At last frocks +ceased to appear, and he dropped hopelessly into the dumps; he entered +the empty schoolhouse and sat down to suffer. Then one more frock +passed in at the gate, and Tom's heart gave a great bound. The next +instant he was out, and "going on" like an Indian; yelling, laughing, +chasing boys, jumping over the fence at risk of life and limb, throwing +handsprings, standing on his head--doing all the heroic things he could +conceive of, and keeping a furtive eye out, all the while, to see if +Becky Thatcher was noticing. But she seemed to be unconscious of it +all; she never looked. Could it be possible that she was not aware that +he was there? He carried his exploits to her immediate vicinity; came +war-whooping around, snatched a boy's cap, hurled it to the roof of the +schoolhouse, broke through a group of boys, tumbling them in every +direction, and fell sprawling, himself, under Becky's nose, almost +upsetting her--and she turned, with her nose in the air, and he heard +her say: "Mf! some people think they're mighty smart--always showing +off!" + +Tom's cheeks burned. He gathered himself up and sneaked off, crushed +and crestfallen. + + + +CHAPTER XIII + +TOM'S mind was made up now. He was gloomy and desperate. He was a +forsaken, friendless boy, he said; nobody loved him; when they found +out what they had driven him to, perhaps they would be sorry; he had +tried to do right and get along, but they would not let him; since +nothing would do them but to be rid of him, let it be so; and let them +blame HIM for the consequences--why shouldn't they? What right had the +friendless to complain? Yes, they had forced him to it at last: he +would lead a life of crime. There was no choice. + +By this time he was far down Meadow Lane, and the bell for school to +"take up" tinkled faintly upon his ear. He sobbed, now, to think he +should never, never hear that old familiar sound any more--it was very +hard, but it was forced on him; since he was driven out into the cold +world, he must submit--but he forgave them. Then the sobs came thick +and fast. + +Just at this point he met his soul's sworn comrade, Joe Harper +--hard-eyed, and with evidently a great and dismal purpose in his heart. +Plainly here were "two souls with but a single thought." Tom, wiping +his eyes with his sleeve, began to blubber out something about a +resolution to escape from hard usage and lack of sympathy at home by +roaming abroad into the great world never to return; and ended by +hoping that Joe would not forget him. + +But it transpired that this was a request which Joe had just been +going to make of Tom, and had come to hunt him up for that purpose. His +mother had whipped him for drinking some cream which he had never +tasted and knew nothing about; it was plain that she was tired of him +and wished him to go; if she felt that way, there was nothing for him +to do but succumb; he hoped she would be happy, and never regret having +driven her poor boy out into the unfeeling world to suffer and die. + +As the two boys walked sorrowing along, they made a new compact to +stand by each other and be brothers and never separate till death +relieved them of their troubles. Then they began to lay their plans. +Joe was for being a hermit, and living on crusts in a remote cave, and +dying, some time, of cold and want and grief; but after listening to +Tom, he conceded that there were some conspicuous advantages about a +life of crime, and so he consented to be a pirate. + +Three miles below St. Petersburg, at a point where the Mississippi +River was a trifle over a mile wide, there was a long, narrow, wooded +island, with a shallow bar at the head of it, and this offered well as +a rendezvous. It was not inhabited; it lay far over toward the further +shore, abreast a dense and almost wholly unpeopled forest. So Jackson's +Island was chosen. Who were to be the subjects of their piracies was a +matter that did not occur to them. Then they hunted up Huckleberry +Finn, and he joined them promptly, for all careers were one to him; he +was indifferent. They presently separated to meet at a lonely spot on +the river-bank two miles above the village at the favorite hour--which +was midnight. There was a small log raft there which they meant to +capture. Each would bring hooks and lines, and such provision as he +could steal in the most dark and mysterious way--as became outlaws. And +before the afternoon was done, they had all managed to enjoy the sweet +glory of spreading the fact that pretty soon the town would "hear +something." All who got this vague hint were cautioned to "be mum and +wait." + +About midnight Tom arrived with a boiled ham and a few trifles, +and stopped in a dense undergrowth on a small bluff overlooking the +meeting-place. It was starlight, and very still. The mighty river lay +like an ocean at rest. Tom listened a moment, but no sound disturbed the +quiet. Then he gave a low, distinct whistle. It was answered from under +the bluff. Tom whistled twice more; these signals were answered in the +same way. Then a guarded voice said: + +"Who goes there?" + +"Tom Sawyer, the Black Avenger of the Spanish Main. Name your names." + +"Huck Finn the Red-Handed, and Joe Harper the Terror of the Seas." Tom +had furnished these titles, from his favorite literature. + +"'Tis well. Give the countersign." + +Two hoarse whispers delivered the same awful word simultaneously to +the brooding night: + +"BLOOD!" + +Then Tom tumbled his ham over the bluff and let himself down after it, +tearing both skin and clothes to some extent in the effort. There was +an easy, comfortable path along the shore under the bluff, but it +lacked the advantages of difficulty and danger so valued by a pirate. + +The Terror of the Seas had brought a side of bacon, and had about worn +himself out with getting it there. Finn the Red-Handed had stolen a +skillet and a quantity of half-cured leaf tobacco, and had also brought +a few corn-cobs to make pipes with. But none of the pirates smoked or +"chewed" but himself. The Black Avenger of the Spanish Main said it +would never do to start without some fire. That was a wise thought; +matches were hardly known there in that day. They saw a fire +smouldering upon a great raft a hundred yards above, and they went +stealthily thither and helped themselves to a chunk. They made an +imposing adventure of it, saying, "Hist!" every now and then, and +suddenly halting with finger on lip; moving with hands on imaginary +dagger-hilts; and giving orders in dismal whispers that if "the foe" +stirred, to "let him have it to the hilt," because "dead men tell no +tales." They knew well enough that the raftsmen were all down at the +village laying in stores or having a spree, but still that was no +excuse for their conducting this thing in an unpiratical way. + +They shoved off, presently, Tom in command, Huck at the after oar and +Joe at the forward. Tom stood amidships, gloomy-browed, and with folded +arms, and gave his orders in a low, stern whisper: + +"Luff, and bring her to the wind!" + +"Aye-aye, sir!" + +"Steady, steady-y-y-y!" + +"Steady it is, sir!" + +"Let her go off a point!" + +"Point it is, sir!" + +As the boys steadily and monotonously drove the raft toward mid-stream +it was no doubt understood that these orders were given only for +"style," and were not intended to mean anything in particular. + +"What sail's she carrying?" + +"Courses, tops'ls, and flying-jib, sir." + +"Send the r'yals up! Lay out aloft, there, half a dozen of ye +--foretopmaststuns'l! Lively, now!" + +"Aye-aye, sir!" + +"Shake out that maintogalans'l! Sheets and braces! NOW my hearties!" + +"Aye-aye, sir!" + +"Hellum-a-lee--hard a port! Stand by to meet her when she comes! Port, +port! NOW, men! With a will! Stead-y-y-y!" + +"Steady it is, sir!" + +The raft drew beyond the middle of the river; the boys pointed her +head right, and then lay on their oars. The river was not high, so +there was not more than a two or three mile current. Hardly a word was +said during the next three-quarters of an hour. Now the raft was +passing before the distant town. Two or three glimmering lights showed +where it lay, peacefully sleeping, beyond the vague vast sweep of +star-gemmed water, unconscious of the tremendous event that was happening. +The Black Avenger stood still with folded arms, "looking his last" upon +the scene of his former joys and his later sufferings, and wishing +"she" could see him now, abroad on the wild sea, facing peril and death +with dauntless heart, going to his doom with a grim smile on his lips. +It was but a small strain on his imagination to remove Jackson's Island +beyond eyeshot of the village, and so he "looked his last" with a +broken and satisfied heart. The other pirates were looking their last, +too; and they all looked so long that they came near letting the +current drift them out of the range of the island. But they discovered +the danger in time, and made shift to avert it. About two o'clock in +the morning the raft grounded on the bar two hundred yards above the +head of the island, and they waded back and forth until they had landed +their freight. Part of the little raft's belongings consisted of an old +sail, and this they spread over a nook in the bushes for a tent to +shelter their provisions; but they themselves would sleep in the open +air in good weather, as became outlaws. + +They built a fire against the side of a great log twenty or thirty +steps within the sombre depths of the forest, and then cooked some +bacon in the frying-pan for supper, and used up half of the corn "pone" +stock they had brought. It seemed glorious sport to be feasting in that +wild, free way in the virgin forest of an unexplored and uninhabited +island, far from the haunts of men, and they said they never would +return to civilization. The climbing fire lit up their faces and threw +its ruddy glare upon the pillared tree-trunks of their forest temple, +and upon the varnished foliage and festooning vines. + +When the last crisp slice of bacon was gone, and the last allowance of +corn pone devoured, the boys stretched themselves out on the grass, +filled with contentment. They could have found a cooler place, but they +would not deny themselves such a romantic feature as the roasting +camp-fire. + +"AIN'T it gay?" said Joe. + +"It's NUTS!" said Tom. "What would the boys say if they could see us?" + +"Say? Well, they'd just die to be here--hey, Hucky!" + +"I reckon so," said Huckleberry; "anyways, I'm suited. I don't want +nothing better'n this. I don't ever get enough to eat, gen'ally--and +here they can't come and pick at a feller and bullyrag him so." + +"It's just the life for me," said Tom. "You don't have to get up, +mornings, and you don't have to go to school, and wash, and all that +blame foolishness. You see a pirate don't have to do ANYTHING, Joe, +when he's ashore, but a hermit HE has to be praying considerable, and +then he don't have any fun, anyway, all by himself that way." + +"Oh yes, that's so," said Joe, "but I hadn't thought much about it, +you know. I'd a good deal rather be a pirate, now that I've tried it." + +"You see," said Tom, "people don't go much on hermits, nowadays, like +they used to in old times, but a pirate's always respected. And a +hermit's got to sleep on the hardest place he can find, and put +sackcloth and ashes on his head, and stand out in the rain, and--" + +"What does he put sackcloth and ashes on his head for?" inquired Huck. + +"I dono. But they've GOT to do it. Hermits always do. You'd have to do +that if you was a hermit." + +"Dern'd if I would," said Huck. + +"Well, what would you do?" + +"I dono. But I wouldn't do that." + +"Why, Huck, you'd HAVE to. How'd you get around it?" + +"Why, I just wouldn't stand it. I'd run away." + +"Run away! Well, you WOULD be a nice old slouch of a hermit. You'd be +a disgrace." + +The Red-Handed made no response, being better employed. He had +finished gouging out a cob, and now he fitted a weed stem to it, loaded +it with tobacco, and was pressing a coal to the charge and blowing a +cloud of fragrant smoke--he was in the full bloom of luxurious +contentment. The other pirates envied him this majestic vice, and +secretly resolved to acquire it shortly. Presently Huck said: + +"What does pirates have to do?" + +Tom said: + +"Oh, they have just a bully time--take ships and burn them, and get +the money and bury it in awful places in their island where there's +ghosts and things to watch it, and kill everybody in the ships--make +'em walk a plank." + +"And they carry the women to the island," said Joe; "they don't kill +the women." + +"No," assented Tom, "they don't kill the women--they're too noble. And +the women's always beautiful, too. + +"And don't they wear the bulliest clothes! Oh no! All gold and silver +and di'monds," said Joe, with enthusiasm. + +"Who?" said Huck. + +"Why, the pirates." + +Huck scanned his own clothing forlornly. + +"I reckon I ain't dressed fitten for a pirate," said he, with a +regretful pathos in his voice; "but I ain't got none but these." + +But the other boys told him the fine clothes would come fast enough, +after they should have begun their adventures. They made him understand +that his poor rags would do to begin with, though it was customary for +wealthy pirates to start with a proper wardrobe. + +Gradually their talk died out and drowsiness began to steal upon the +eyelids of the little waifs. The pipe dropped from the fingers of the +Red-Handed, and he slept the sleep of the conscience-free and the +weary. The Terror of the Seas and the Black Avenger of the Spanish Main +had more difficulty in getting to sleep. They said their prayers +inwardly, and lying down, since there was nobody there with authority +to make them kneel and recite aloud; in truth, they had a mind not to +say them at all, but they were afraid to proceed to such lengths as +that, lest they might call down a sudden and special thunderbolt from +heaven. Then at once they reached and hovered upon the imminent verge +of sleep--but an intruder came, now, that would not "down." It was +conscience. They began to feel a vague fear that they had been doing +wrong to run away; and next they thought of the stolen meat, and then +the real torture came. They tried to argue it away by reminding +conscience that they had purloined sweetmeats and apples scores of +times; but conscience was not to be appeased by such thin +plausibilities; it seemed to them, in the end, that there was no +getting around the stubborn fact that taking sweetmeats was only +"hooking," while taking bacon and hams and such valuables was plain +simple stealing--and there was a command against that in the Bible. So +they inwardly resolved that so long as they remained in the business, +their piracies should not again be sullied with the crime of stealing. +Then conscience granted a truce, and these curiously inconsistent +pirates fell peacefully to sleep. + + + +CHAPTER XIV + +WHEN Tom awoke in the morning, he wondered where he was. He sat up and +rubbed his eyes and looked around. Then he comprehended. It was the +cool gray dawn, and there was a delicious sense of repose and peace in +the deep pervading calm and silence of the woods. Not a leaf stirred; +not a sound obtruded upon great Nature's meditation. Beaded dewdrops +stood upon the leaves and grasses. A white layer of ashes covered the +fire, and a thin blue breath of smoke rose straight into the air. Joe +and Huck still slept. + +Now, far away in the woods a bird called; another answered; presently +the hammering of a woodpecker was heard. Gradually the cool dim gray of +the morning whitened, and as gradually sounds multiplied and life +manifested itself. The marvel of Nature shaking off sleep and going to +work unfolded itself to the musing boy. A little green worm came +crawling over a dewy leaf, lifting two-thirds of his body into the air +from time to time and "sniffing around," then proceeding again--for he +was measuring, Tom said; and when the worm approached him, of its own +accord, he sat as still as a stone, with his hopes rising and falling, +by turns, as the creature still came toward him or seemed inclined to +go elsewhere; and when at last it considered a painful moment with its +curved body in the air and then came decisively down upon Tom's leg and +began a journey over him, his whole heart was glad--for that meant that +he was going to have a new suit of clothes--without the shadow of a +doubt a gaudy piratical uniform. Now a procession of ants appeared, +from nowhere in particular, and went about their labors; one struggled +manfully by with a dead spider five times as big as itself in its arms, +and lugged it straight up a tree-trunk. A brown spotted lady-bug +climbed the dizzy height of a grass blade, and Tom bent down close to +it and said, "Lady-bug, lady-bug, fly away home, your house is on fire, +your children's alone," and she took wing and went off to see about it +--which did not surprise the boy, for he knew of old that this insect was +credulous about conflagrations, and he had practised upon its +simplicity more than once. A tumblebug came next, heaving sturdily at +its ball, and Tom touched the creature, to see it shut its legs against +its body and pretend to be dead. The birds were fairly rioting by this +time. A catbird, the Northern mocker, lit in a tree over Tom's head, +and trilled out her imitations of her neighbors in a rapture of +enjoyment; then a shrill jay swept down, a flash of blue flame, and +stopped on a twig almost within the boy's reach, cocked his head to one +side and eyed the strangers with a consuming curiosity; a gray squirrel +and a big fellow of the "fox" kind came skurrying along, sitting up at +intervals to inspect and chatter at the boys, for the wild things had +probably never seen a human being before and scarcely knew whether to +be afraid or not. All Nature was wide awake and stirring, now; long +lances of sunlight pierced down through the dense foliage far and near, +and a few butterflies came fluttering upon the scene. + +Tom stirred up the other pirates and they all clattered away with a +shout, and in a minute or two were stripped and chasing after and +tumbling over each other in the shallow limpid water of the white +sandbar. They felt no longing for the little village sleeping in the +distance beyond the majestic waste of water. A vagrant current or a +slight rise in the river had carried off their raft, but this only +gratified them, since its going was something like burning the bridge +between them and civilization. + +They came back to camp wonderfully refreshed, glad-hearted, and +ravenous; and they soon had the camp-fire blazing up again. Huck found +a spring of clear cold water close by, and the boys made cups of broad +oak or hickory leaves, and felt that water, sweetened with such a +wildwood charm as that, would be a good enough substitute for coffee. +While Joe was slicing bacon for breakfast, Tom and Huck asked him to +hold on a minute; they stepped to a promising nook in the river-bank +and threw in their lines; almost immediately they had reward. Joe had +not had time to get impatient before they were back again with some +handsome bass, a couple of sun-perch and a small catfish--provisions +enough for quite a family. They fried the fish with the bacon, and were +astonished; for no fish had ever seemed so delicious before. They did +not know that the quicker a fresh-water fish is on the fire after he is +caught the better he is; and they reflected little upon what a sauce +open-air sleeping, open-air exercise, bathing, and a large ingredient +of hunger make, too. + +They lay around in the shade, after breakfast, while Huck had a smoke, +and then went off through the woods on an exploring expedition. They +tramped gayly along, over decaying logs, through tangled underbrush, +among solemn monarchs of the forest, hung from their crowns to the +ground with a drooping regalia of grape-vines. Now and then they came +upon snug nooks carpeted with grass and jeweled with flowers. + +They found plenty of things to be delighted with, but nothing to be +astonished at. They discovered that the island was about three miles +long and a quarter of a mile wide, and that the shore it lay closest to +was only separated from it by a narrow channel hardly two hundred yards +wide. They took a swim about every hour, so it was close upon the +middle of the afternoon when they got back to camp. They were too +hungry to stop to fish, but they fared sumptuously upon cold ham, and +then threw themselves down in the shade to talk. But the talk soon +began to drag, and then died. The stillness, the solemnity that brooded +in the woods, and the sense of loneliness, began to tell upon the +spirits of the boys. They fell to thinking. A sort of undefined longing +crept upon them. This took dim shape, presently--it was budding +homesickness. Even Finn the Red-Handed was dreaming of his doorsteps +and empty hogsheads. But they were all ashamed of their weakness, and +none was brave enough to speak his thought. + +For some time, now, the boys had been dully conscious of a peculiar +sound in the distance, just as one sometimes is of the ticking of a +clock which he takes no distinct note of. But now this mysterious sound +became more pronounced, and forced a recognition. The boys started, +glanced at each other, and then each assumed a listening attitude. +There was a long silence, profound and unbroken; then a deep, sullen +boom came floating down out of the distance. + +"What is it!" exclaimed Joe, under his breath. + +"I wonder," said Tom in a whisper. + +"'Tain't thunder," said Huckleberry, in an awed tone, "becuz thunder--" + +"Hark!" said Tom. "Listen--don't talk." + +They waited a time that seemed an age, and then the same muffled boom +troubled the solemn hush. + +"Let's go and see." + +They sprang to their feet and hurried to the shore toward the town. +They parted the bushes on the bank and peered out over the water. The +little steam ferryboat was about a mile below the village, drifting +with the current. Her broad deck seemed crowded with people. There were +a great many skiffs rowing about or floating with the stream in the +neighborhood of the ferryboat, but the boys could not determine what +the men in them were doing. Presently a great jet of white smoke burst +from the ferryboat's side, and as it expanded and rose in a lazy cloud, +that same dull throb of sound was borne to the listeners again. + +"I know now!" exclaimed Tom; "somebody's drownded!" + +"That's it!" said Huck; "they done that last summer, when Bill Turner +got drownded; they shoot a cannon over the water, and that makes him +come up to the top. Yes, and they take loaves of bread and put +quicksilver in 'em and set 'em afloat, and wherever there's anybody +that's drownded, they'll float right there and stop." + +"Yes, I've heard about that," said Joe. "I wonder what makes the bread +do that." + +"Oh, it ain't the bread, so much," said Tom; "I reckon it's mostly +what they SAY over it before they start it out." + +"But they don't say anything over it," said Huck. "I've seen 'em and +they don't." + +"Well, that's funny," said Tom. "But maybe they say it to themselves. +Of COURSE they do. Anybody might know that." + +The other boys agreed that there was reason in what Tom said, because +an ignorant lump of bread, uninstructed by an incantation, could not be +expected to act very intelligently when set upon an errand of such +gravity. + +"By jings, I wish I was over there, now," said Joe. + +"I do too" said Huck "I'd give heaps to know who it is." + +The boys still listened and watched. Presently a revealing thought +flashed through Tom's mind, and he exclaimed: + +"Boys, I know who's drownded--it's us!" + +They felt like heroes in an instant. Here was a gorgeous triumph; they +were missed; they were mourned; hearts were breaking on their account; +tears were being shed; accusing memories of unkindness to these poor +lost lads were rising up, and unavailing regrets and remorse were being +indulged; and best of all, the departed were the talk of the whole +town, and the envy of all the boys, as far as this dazzling notoriety +was concerned. This was fine. It was worth while to be a pirate, after +all. + +As twilight drew on, the ferryboat went back to her accustomed +business and the skiffs disappeared. The pirates returned to camp. They +were jubilant with vanity over their new grandeur and the illustrious +trouble they were making. They caught fish, cooked supper and ate it, +and then fell to guessing at what the village was thinking and saying +about them; and the pictures they drew of the public distress on their +account were gratifying to look upon--from their point of view. But +when the shadows of night closed them in, they gradually ceased to +talk, and sat gazing into the fire, with their minds evidently +wandering elsewhere. The excitement was gone, now, and Tom and Joe +could not keep back thoughts of certain persons at home who were not +enjoying this fine frolic as much as they were. Misgivings came; they +grew troubled and unhappy; a sigh or two escaped, unawares. By and by +Joe timidly ventured upon a roundabout "feeler" as to how the others +might look upon a return to civilization--not right now, but-- + +Tom withered him with derision! Huck, being uncommitted as yet, joined +in with Tom, and the waverer quickly "explained," and was glad to get +out of the scrape with as little taint of chicken-hearted homesickness +clinging to his garments as he could. Mutiny was effectually laid to +rest for the moment. + +As the night deepened, Huck began to nod, and presently to snore. Joe +followed next. Tom lay upon his elbow motionless, for some time, +watching the two intently. At last he got up cautiously, on his knees, +and went searching among the grass and the flickering reflections flung +by the camp-fire. He picked up and inspected several large +semi-cylinders of the thin white bark of a sycamore, and finally chose +two which seemed to suit him. Then he knelt by the fire and painfully +wrote something upon each of these with his "red keel"; one he rolled up +and put in his jacket pocket, and the other he put in Joe's hat and +removed it to a little distance from the owner. And he also put into the +hat certain schoolboy treasures of almost inestimable value--among them +a lump of chalk, an India-rubber ball, three fishhooks, and one of that +kind of marbles known as a "sure 'nough crystal." Then he tiptoed his +way cautiously among the trees till he felt that he was out of hearing, +and straightway broke into a keen run in the direction of the sandbar. + + + +CHAPTER XV + +A FEW minutes later Tom was in the shoal water of the bar, wading +toward the Illinois shore. Before the depth reached his middle he was +half-way over; the current would permit no more wading, now, so he +struck out confidently to swim the remaining hundred yards. He swam +quartering upstream, but still was swept downward rather faster than he +had expected. However, he reached the shore finally, and drifted along +till he found a low place and drew himself out. He put his hand on his +jacket pocket, found his piece of bark safe, and then struck through +the woods, following the shore, with streaming garments. Shortly before +ten o'clock he came out into an open place opposite the village, and +saw the ferryboat lying in the shadow of the trees and the high bank. +Everything was quiet under the blinking stars. He crept down the bank, +watching with all his eyes, slipped into the water, swam three or four +strokes and climbed into the skiff that did "yawl" duty at the boat's +stern. He laid himself down under the thwarts and waited, panting. + +Presently the cracked bell tapped and a voice gave the order to "cast +off." A minute or two later the skiff's head was standing high up, +against the boat's swell, and the voyage was begun. Tom felt happy in +his success, for he knew it was the boat's last trip for the night. At +the end of a long twelve or fifteen minutes the wheels stopped, and Tom +slipped overboard and swam ashore in the dusk, landing fifty yards +downstream, out of danger of possible stragglers. + +He flew along unfrequented alleys, and shortly found himself at his +aunt's back fence. He climbed over, approached the "ell," and looked in +at the sitting-room window, for a light was burning there. There sat +Aunt Polly, Sid, Mary, and Joe Harper's mother, grouped together, +talking. They were by the bed, and the bed was between them and the +door. Tom went to the door and began to softly lift the latch; then he +pressed gently and the door yielded a crack; he continued pushing +cautiously, and quaking every time it creaked, till he judged he might +squeeze through on his knees; so he put his head through and began, +warily. + +"What makes the candle blow so?" said Aunt Polly. Tom hurried up. +"Why, that door's open, I believe. Why, of course it is. No end of +strange things now. Go 'long and shut it, Sid." + +Tom disappeared under the bed just in time. He lay and "breathed" +himself for a time, and then crept to where he could almost touch his +aunt's foot. + +"But as I was saying," said Aunt Polly, "he warn't BAD, so to say +--only mischEEvous. Only just giddy, and harum-scarum, you know. He +warn't any more responsible than a colt. HE never meant any harm, and +he was the best-hearted boy that ever was"--and she began to cry. + +"It was just so with my Joe--always full of his devilment, and up to +every kind of mischief, but he was just as unselfish and kind as he +could be--and laws bless me, to think I went and whipped him for taking +that cream, never once recollecting that I throwed it out myself +because it was sour, and I never to see him again in this world, never, +never, never, poor abused boy!" And Mrs. Harper sobbed as if her heart +would break. + +"I hope Tom's better off where he is," said Sid, "but if he'd been +better in some ways--" + +"SID!" Tom felt the glare of the old lady's eye, though he could not +see it. "Not a word against my Tom, now that he's gone! God'll take +care of HIM--never you trouble YOURself, sir! Oh, Mrs. Harper, I don't +know how to give him up! I don't know how to give him up! He was such a +comfort to me, although he tormented my old heart out of me, 'most." + +"The Lord giveth and the Lord hath taken away--Blessed be the name of +the Lord! But it's so hard--Oh, it's so hard! Only last Saturday my +Joe busted a firecracker right under my nose and I knocked him +sprawling. Little did I know then, how soon--Oh, if it was to do over +again I'd hug him and bless him for it." + +"Yes, yes, yes, I know just how you feel, Mrs. Harper, I know just +exactly how you feel. No longer ago than yesterday noon, my Tom took +and filled the cat full of Pain-killer, and I did think the cretur +would tear the house down. And God forgive me, I cracked Tom's head +with my thimble, poor boy, poor dead boy. But he's out of all his +troubles now. And the last words I ever heard him say was to reproach--" + +But this memory was too much for the old lady, and she broke entirely +down. Tom was snuffling, now, himself--and more in pity of himself than +anybody else. He could hear Mary crying, and putting in a kindly word +for him from time to time. He began to have a nobler opinion of himself +than ever before. Still, he was sufficiently touched by his aunt's +grief to long to rush out from under the bed and overwhelm her with +joy--and the theatrical gorgeousness of the thing appealed strongly to +his nature, too, but he resisted and lay still. + +He went on listening, and gathered by odds and ends that it was +conjectured at first that the boys had got drowned while taking a swim; +then the small raft had been missed; next, certain boys said the +missing lads had promised that the village should "hear something" +soon; the wise-heads had "put this and that together" and decided that +the lads had gone off on that raft and would turn up at the next town +below, presently; but toward noon the raft had been found, lodged +against the Missouri shore some five or six miles below the village +--and then hope perished; they must be drowned, else hunger would have +driven them home by nightfall if not sooner. It was believed that the +search for the bodies had been a fruitless effort merely because the +drowning must have occurred in mid-channel, since the boys, being good +swimmers, would otherwise have escaped to shore. This was Wednesday +night. If the bodies continued missing until Sunday, all hope would be +given over, and the funerals would be preached on that morning. Tom +shuddered. + +Mrs. Harper gave a sobbing good-night and turned to go. Then with a +mutual impulse the two bereaved women flung themselves into each +other's arms and had a good, consoling cry, and then parted. Aunt Polly +was tender far beyond her wont, in her good-night to Sid and Mary. Sid +snuffled a bit and Mary went off crying with all her heart. + +Aunt Polly knelt down and prayed for Tom so touchingly, so +appealingly, and with such measureless love in her words and her old +trembling voice, that he was weltering in tears again, long before she +was through. + +He had to keep still long after she went to bed, for she kept making +broken-hearted ejaculations from time to time, tossing unrestfully, and +turning over. But at last she was still, only moaning a little in her +sleep. Now the boy stole out, rose gradually by the bedside, shaded the +candle-light with his hand, and stood regarding her. His heart was full +of pity for her. He took out his sycamore scroll and placed it by the +candle. But something occurred to him, and he lingered considering. His +face lighted with a happy solution of his thought; he put the bark +hastily in his pocket. Then he bent over and kissed the faded lips, and +straightway made his stealthy exit, latching the door behind him. + +He threaded his way back to the ferry landing, found nobody at large +there, and walked boldly on board the boat, for he knew she was +tenantless except that there was a watchman, who always turned in and +slept like a graven image. He untied the skiff at the stern, slipped +into it, and was soon rowing cautiously upstream. When he had pulled a +mile above the village, he started quartering across and bent himself +stoutly to his work. He hit the landing on the other side neatly, for +this was a familiar bit of work to him. He was moved to capture the +skiff, arguing that it might be considered a ship and therefore +legitimate prey for a pirate, but he knew a thorough search would be +made for it and that might end in revelations. So he stepped ashore and +entered the woods. + +He sat down and took a long rest, torturing himself meanwhile to keep +awake, and then started warily down the home-stretch. The night was far +spent. It was broad daylight before he found himself fairly abreast the +island bar. He rested again until the sun was well up and gilding the +great river with its splendor, and then he plunged into the stream. A +little later he paused, dripping, upon the threshold of the camp, and +heard Joe say: + +"No, Tom's true-blue, Huck, and he'll come back. He won't desert. He +knows that would be a disgrace to a pirate, and Tom's too proud for +that sort of thing. He's up to something or other. Now I wonder what?" + +"Well, the things is ours, anyway, ain't they?" + +"Pretty near, but not yet, Huck. The writing says they are if he ain't +back here to breakfast." + +"Which he is!" exclaimed Tom, with fine dramatic effect, stepping +grandly into camp. + +A sumptuous breakfast of bacon and fish was shortly provided, and as +the boys set to work upon it, Tom recounted (and adorned) his +adventures. They were a vain and boastful company of heroes when the +tale was done. Then Tom hid himself away in a shady nook to sleep till +noon, and the other pirates got ready to fish and explore. + + + +CHAPTER XVI + +AFTER dinner all the gang turned out to hunt for turtle eggs on the +bar. They went about poking sticks into the sand, and when they found a +soft place they went down on their knees and dug with their hands. +Sometimes they would take fifty or sixty eggs out of one hole. They +were perfectly round white things a trifle smaller than an English +walnut. They had a famous fried-egg feast that night, and another on +Friday morning. + +After breakfast they went whooping and prancing out on the bar, and +chased each other round and round, shedding clothes as they went, until +they were naked, and then continued the frolic far away up the shoal +water of the bar, against the stiff current, which latter tripped their +legs from under them from time to time and greatly increased the fun. +And now and then they stooped in a group and splashed water in each +other's faces with their palms, gradually approaching each other, with +averted faces to avoid the strangling sprays, and finally gripping and +struggling till the best man ducked his neighbor, and then they all +went under in a tangle of white legs and arms and came up blowing, +sputtering, laughing, and gasping for breath at one and the same time. + +When they were well exhausted, they would run out and sprawl on the +dry, hot sand, and lie there and cover themselves up with it, and by +and by break for the water again and go through the original +performance once more. Finally it occurred to them that their naked +skin represented flesh-colored "tights" very fairly; so they drew a +ring in the sand and had a circus--with three clowns in it, for none +would yield this proudest post to his neighbor. + +Next they got their marbles and played "knucks" and "ring-taw" and +"keeps" till that amusement grew stale. Then Joe and Huck had another +swim, but Tom would not venture, because he found that in kicking off +his trousers he had kicked his string of rattlesnake rattles off his +ankle, and he wondered how he had escaped cramp so long without the +protection of this mysterious charm. He did not venture again until he +had found it, and by that time the other boys were tired and ready to +rest. They gradually wandered apart, dropped into the "dumps," and fell +to gazing longingly across the wide river to where the village lay +drowsing in the sun. Tom found himself writing "BECKY" in the sand with +his big toe; he scratched it out, and was angry with himself for his +weakness. But he wrote it again, nevertheless; he could not help it. He +erased it once more and then took himself out of temptation by driving +the other boys together and joining them. + +But Joe's spirits had gone down almost beyond resurrection. He was so +homesick that he could hardly endure the misery of it. The tears lay +very near the surface. Huck was melancholy, too. Tom was downhearted, +but tried hard not to show it. He had a secret which he was not ready +to tell, yet, but if this mutinous depression was not broken up soon, +he would have to bring it out. He said, with a great show of +cheerfulness: + +"I bet there's been pirates on this island before, boys. We'll explore +it again. They've hid treasures here somewhere. How'd you feel to light +on a rotten chest full of gold and silver--hey?" + +But it roused only faint enthusiasm, which faded out, with no reply. +Tom tried one or two other seductions; but they failed, too. It was +discouraging work. Joe sat poking up the sand with a stick and looking +very gloomy. Finally he said: + +"Oh, boys, let's give it up. I want to go home. It's so lonesome." + +"Oh no, Joe, you'll feel better by and by," said Tom. "Just think of +the fishing that's here." + +"I don't care for fishing. I want to go home." + +"But, Joe, there ain't such another swimming-place anywhere." + +"Swimming's no good. I don't seem to care for it, somehow, when there +ain't anybody to say I sha'n't go in. I mean to go home." + +"Oh, shucks! Baby! You want to see your mother, I reckon." + +"Yes, I DO want to see my mother--and you would, too, if you had one. +I ain't any more baby than you are." And Joe snuffled a little. + +"Well, we'll let the cry-baby go home to his mother, won't we, Huck? +Poor thing--does it want to see its mother? And so it shall. You like +it here, don't you, Huck? We'll stay, won't we?" + +Huck said, "Y-e-s"--without any heart in it. + +"I'll never speak to you again as long as I live," said Joe, rising. +"There now!" And he moved moodily away and began to dress himself. + +"Who cares!" said Tom. "Nobody wants you to. Go 'long home and get +laughed at. Oh, you're a nice pirate. Huck and me ain't cry-babies. +We'll stay, won't we, Huck? Let him go if he wants to. I reckon we can +get along without him, per'aps." + +But Tom was uneasy, nevertheless, and was alarmed to see Joe go +sullenly on with his dressing. And then it was discomforting to see +Huck eying Joe's preparations so wistfully, and keeping up such an +ominous silence. Presently, without a parting word, Joe began to wade +off toward the Illinois shore. Tom's heart began to sink. He glanced at +Huck. Huck could not bear the look, and dropped his eyes. Then he said: + +"I want to go, too, Tom. It was getting so lonesome anyway, and now +it'll be worse. Let's us go, too, Tom." + +"I won't! You can all go, if you want to. I mean to stay." + +"Tom, I better go." + +"Well, go 'long--who's hendering you." + +Huck began to pick up his scattered clothes. He said: + +"Tom, I wisht you'd come, too. Now you think it over. We'll wait for +you when we get to shore." + +"Well, you'll wait a blame long time, that's all." + +Huck started sorrowfully away, and Tom stood looking after him, with a +strong desire tugging at his heart to yield his pride and go along too. +He hoped the boys would stop, but they still waded slowly on. It +suddenly dawned on Tom that it was become very lonely and still. He +made one final struggle with his pride, and then darted after his +comrades, yelling: + +"Wait! Wait! I want to tell you something!" + +They presently stopped and turned around. When he got to where they +were, he began unfolding his secret, and they listened moodily till at +last they saw the "point" he was driving at, and then they set up a +war-whoop of applause and said it was "splendid!" and said if he had +told them at first, they wouldn't have started away. He made a plausible +excuse; but his real reason had been the fear that not even the secret +would keep them with him any very great length of time, and so he had +meant to hold it in reserve as a last seduction. + +The lads came gayly back and went at their sports again with a will, +chattering all the time about Tom's stupendous plan and admiring the +genius of it. After a dainty egg and fish dinner, Tom said he wanted to +learn to smoke, now. Joe caught at the idea and said he would like to +try, too. So Huck made pipes and filled them. These novices had never +smoked anything before but cigars made of grape-vine, and they "bit" +the tongue, and were not considered manly anyway. + +Now they stretched themselves out on their elbows and began to puff, +charily, and with slender confidence. The smoke had an unpleasant +taste, and they gagged a little, but Tom said: + +"Why, it's just as easy! If I'd a knowed this was all, I'd a learnt +long ago." + +"So would I," said Joe. "It's just nothing." + +"Why, many a time I've looked at people smoking, and thought well I +wish I could do that; but I never thought I could," said Tom. + +"That's just the way with me, hain't it, Huck? You've heard me talk +just that way--haven't you, Huck? I'll leave it to Huck if I haven't." + +"Yes--heaps of times," said Huck. + +"Well, I have too," said Tom; "oh, hundreds of times. Once down by the +slaughter-house. Don't you remember, Huck? Bob Tanner was there, and +Johnny Miller, and Jeff Thatcher, when I said it. Don't you remember, +Huck, 'bout me saying that?" + +"Yes, that's so," said Huck. "That was the day after I lost a white +alley. No, 'twas the day before." + +"There--I told you so," said Tom. "Huck recollects it." + +"I bleeve I could smoke this pipe all day," said Joe. "I don't feel +sick." + +"Neither do I," said Tom. "I could smoke it all day. But I bet you +Jeff Thatcher couldn't." + +"Jeff Thatcher! Why, he'd keel over just with two draws. Just let him +try it once. HE'D see!" + +"I bet he would. And Johnny Miller--I wish could see Johnny Miller +tackle it once." + +"Oh, don't I!" said Joe. "Why, I bet you Johnny Miller couldn't any +more do this than nothing. Just one little snifter would fetch HIM." + +"'Deed it would, Joe. Say--I wish the boys could see us now." + +"So do I." + +"Say--boys, don't say anything about it, and some time when they're +around, I'll come up to you and say, 'Joe, got a pipe? I want a smoke.' +And you'll say, kind of careless like, as if it warn't anything, you'll +say, 'Yes, I got my OLD pipe, and another one, but my tobacker ain't +very good.' And I'll say, 'Oh, that's all right, if it's STRONG +enough.' And then you'll out with the pipes, and we'll light up just as +ca'm, and then just see 'em look!" + +"By jings, that'll be gay, Tom! I wish it was NOW!" + +"So do I! And when we tell 'em we learned when we was off pirating, +won't they wish they'd been along?" + +"Oh, I reckon not! I'll just BET they will!" + +So the talk ran on. But presently it began to flag a trifle, and grow +disjointed. The silences widened; the expectoration marvellously +increased. Every pore inside the boys' cheeks became a spouting +fountain; they could scarcely bail out the cellars under their tongues +fast enough to prevent an inundation; little overflowings down their +throats occurred in spite of all they could do, and sudden retchings +followed every time. Both boys were looking very pale and miserable, +now. Joe's pipe dropped from his nerveless fingers. Tom's followed. +Both fountains were going furiously and both pumps bailing with might +and main. Joe said feebly: + +"I've lost my knife. I reckon I better go and find it." + +Tom said, with quivering lips and halting utterance: + +"I'll help you. You go over that way and I'll hunt around by the +spring. No, you needn't come, Huck--we can find it." + +So Huck sat down again, and waited an hour. Then he found it lonesome, +and went to find his comrades. They were wide apart in the woods, both +very pale, both fast asleep. But something informed him that if they +had had any trouble they had got rid of it. + +They were not talkative at supper that night. They had a humble look, +and when Huck prepared his pipe after the meal and was going to prepare +theirs, they said no, they were not feeling very well--something they +ate at dinner had disagreed with them. + +About midnight Joe awoke, and called the boys. There was a brooding +oppressiveness in the air that seemed to bode something. The boys +huddled themselves together and sought the friendly companionship of +the fire, though the dull dead heat of the breathless atmosphere was +stifling. They sat still, intent and waiting. The solemn hush +continued. Beyond the light of the fire everything was swallowed up in +the blackness of darkness. Presently there came a quivering glow that +vaguely revealed the foliage for a moment and then vanished. By and by +another came, a little stronger. Then another. Then a faint moan came +sighing through the branches of the forest and the boys felt a fleeting +breath upon their cheeks, and shuddered with the fancy that the Spirit +of the Night had gone by. There was a pause. Now a weird flash turned +night into day and showed every little grass-blade, separate and +distinct, that grew about their feet. And it showed three white, +startled faces, too. A deep peal of thunder went rolling and tumbling +down the heavens and lost itself in sullen rumblings in the distance. A +sweep of chilly air passed by, rustling all the leaves and snowing the +flaky ashes broadcast about the fire. Another fierce glare lit up the +forest and an instant crash followed that seemed to rend the tree-tops +right over the boys' heads. They clung together in terror, in the thick +gloom that followed. A few big rain-drops fell pattering upon the +leaves. + +"Quick! boys, go for the tent!" exclaimed Tom. + +They sprang away, stumbling over roots and among vines in the dark, no +two plunging in the same direction. A furious blast roared through the +trees, making everything sing as it went. One blinding flash after +another came, and peal on peal of deafening thunder. And now a +drenching rain poured down and the rising hurricane drove it in sheets +along the ground. The boys cried out to each other, but the roaring +wind and the booming thunder-blasts drowned their voices utterly. +However, one by one they straggled in at last and took shelter under +the tent, cold, scared, and streaming with water; but to have company +in misery seemed something to be grateful for. They could not talk, the +old sail flapped so furiously, even if the other noises would have +allowed them. The tempest rose higher and higher, and presently the +sail tore loose from its fastenings and went winging away on the blast. +The boys seized each others' hands and fled, with many tumblings and +bruises, to the shelter of a great oak that stood upon the river-bank. +Now the battle was at its highest. Under the ceaseless conflagration of +lightning that flamed in the skies, everything below stood out in +clean-cut and shadowless distinctness: the bending trees, the billowy +river, white with foam, the driving spray of spume-flakes, the dim +outlines of the high bluffs on the other side, glimpsed through the +drifting cloud-rack and the slanting veil of rain. Every little while +some giant tree yielded the fight and fell crashing through the younger +growth; and the unflagging thunder-peals came now in ear-splitting +explosive bursts, keen and sharp, and unspeakably appalling. The storm +culminated in one matchless effort that seemed likely to tear the island +to pieces, burn it up, drown it to the tree-tops, blow it away, and +deafen every creature in it, all at one and the same moment. It was a +wild night for homeless young heads to be out in. + +But at last the battle was done, and the forces retired with weaker +and weaker threatenings and grumblings, and peace resumed her sway. The +boys went back to camp, a good deal awed; but they found there was +still something to be thankful for, because the great sycamore, the +shelter of their beds, was a ruin, now, blasted by the lightnings, and +they were not under it when the catastrophe happened. + +Everything in camp was drenched, the camp-fire as well; for they were +but heedless lads, like their generation, and had made no provision +against rain. Here was matter for dismay, for they were soaked through +and chilled. They were eloquent in their distress; but they presently +discovered that the fire had eaten so far up under the great log it had +been built against (where it curved upward and separated itself from +the ground), that a handbreadth or so of it had escaped wetting; so +they patiently wrought until, with shreds and bark gathered from the +under sides of sheltered logs, they coaxed the fire to burn again. Then +they piled on great dead boughs till they had a roaring furnace, and +were glad-hearted once more. They dried their boiled ham and had a +feast, and after that they sat by the fire and expanded and glorified +their midnight adventure until morning, for there was not a dry spot to +sleep on, anywhere around. + +As the sun began to steal in upon the boys, drowsiness came over them, +and they went out on the sandbar and lay down to sleep. They got +scorched out by and by, and drearily set about getting breakfast. After +the meal they felt rusty, and stiff-jointed, and a little homesick once +more. Tom saw the signs, and fell to cheering up the pirates as well as +he could. But they cared nothing for marbles, or circus, or swimming, +or anything. He reminded them of the imposing secret, and raised a ray +of cheer. While it lasted, he got them interested in a new device. This +was to knock off being pirates, for a while, and be Indians for a +change. They were attracted by this idea; so it was not long before +they were stripped, and striped from head to heel with black mud, like +so many zebras--all of them chiefs, of course--and then they went +tearing through the woods to attack an English settlement. + +By and by they separated into three hostile tribes, and darted upon +each other from ambush with dreadful war-whoops, and killed and scalped +each other by thousands. It was a gory day. Consequently it was an +extremely satisfactory one. + +They assembled in camp toward supper-time, hungry and happy; but now a +difficulty arose--hostile Indians could not break the bread of +hospitality together without first making peace, and this was a simple +impossibility without smoking a pipe of peace. There was no other +process that ever they had heard of. Two of the savages almost wished +they had remained pirates. However, there was no other way; so with +such show of cheerfulness as they could muster they called for the pipe +and took their whiff as it passed, in due form. + +And behold, they were glad they had gone into savagery, for they had +gained something; they found that they could now smoke a little without +having to go and hunt for a lost knife; they did not get sick enough to +be seriously uncomfortable. They were not likely to fool away this high +promise for lack of effort. No, they practised cautiously, after +supper, with right fair success, and so they spent a jubilant evening. +They were prouder and happier in their new acquirement than they would +have been in the scalping and skinning of the Six Nations. We will +leave them to smoke and chatter and brag, since we have no further use +for them at present. + + + +CHAPTER XVII + +BUT there was no hilarity in the little town that same tranquil +Saturday afternoon. The Harpers, and Aunt Polly's family, were being +put into mourning, with great grief and many tears. An unusual quiet +possessed the village, although it was ordinarily quiet enough, in all +conscience. The villagers conducted their concerns with an absent air, +and talked little; but they sighed often. The Saturday holiday seemed a +burden to the children. They had no heart in their sports, and +gradually gave them up. + +In the afternoon Becky Thatcher found herself moping about the +deserted schoolhouse yard, and feeling very melancholy. But she found +nothing there to comfort her. She soliloquized: + +"Oh, if I only had a brass andiron-knob again! But I haven't got +anything now to remember him by." And she choked back a little sob. + +Presently she stopped, and said to herself: + +"It was right here. Oh, if it was to do over again, I wouldn't say +that--I wouldn't say it for the whole world. But he's gone now; I'll +never, never, never see him any more." + +This thought broke her down, and she wandered away, with tears rolling +down her cheeks. Then quite a group of boys and girls--playmates of +Tom's and Joe's--came by, and stood looking over the paling fence and +talking in reverent tones of how Tom did so-and-so the last time they +saw him, and how Joe said this and that small trifle (pregnant with +awful prophecy, as they could easily see now!)--and each speaker +pointed out the exact spot where the lost lads stood at the time, and +then added something like "and I was a-standing just so--just as I am +now, and as if you was him--I was as close as that--and he smiled, just +this way--and then something seemed to go all over me, like--awful, you +know--and I never thought what it meant, of course, but I can see now!" + +Then there was a dispute about who saw the dead boys last in life, and +many claimed that dismal distinction, and offered evidences, more or +less tampered with by the witness; and when it was ultimately decided +who DID see the departed last, and exchanged the last words with them, +the lucky parties took upon themselves a sort of sacred importance, and +were gaped at and envied by all the rest. One poor chap, who had no +other grandeur to offer, said with tolerably manifest pride in the +remembrance: + +"Well, Tom Sawyer he licked me once." + +But that bid for glory was a failure. Most of the boys could say that, +and so that cheapened the distinction too much. The group loitered +away, still recalling memories of the lost heroes, in awed voices. + +When the Sunday-school hour was finished, the next morning, the bell +began to toll, instead of ringing in the usual way. It was a very still +Sabbath, and the mournful sound seemed in keeping with the musing hush +that lay upon nature. The villagers began to gather, loitering a moment +in the vestibule to converse in whispers about the sad event. But there +was no whispering in the house; only the funereal rustling of dresses +as the women gathered to their seats disturbed the silence there. None +could remember when the little church had been so full before. There +was finally a waiting pause, an expectant dumbness, and then Aunt Polly +entered, followed by Sid and Mary, and they by the Harper family, all +in deep black, and the whole congregation, the old minister as well, +rose reverently and stood until the mourners were seated in the front +pew. There was another communing silence, broken at intervals by +muffled sobs, and then the minister spread his hands abroad and prayed. +A moving hymn was sung, and the text followed: "I am the Resurrection +and the Life." + +As the service proceeded, the clergyman drew such pictures of the +graces, the winning ways, and the rare promise of the lost lads that +every soul there, thinking he recognized these pictures, felt a pang in +remembering that he had persistently blinded himself to them always +before, and had as persistently seen only faults and flaws in the poor +boys. The minister related many a touching incident in the lives of the +departed, too, which illustrated their sweet, generous natures, and the +people could easily see, now, how noble and beautiful those episodes +were, and remembered with grief that at the time they occurred they had +seemed rank rascalities, well deserving of the cowhide. The +congregation became more and more moved, as the pathetic tale went on, +till at last the whole company broke down and joined the weeping +mourners in a chorus of anguished sobs, the preacher himself giving way +to his feelings, and crying in the pulpit. + +There was a rustle in the gallery, which nobody noticed; a moment +later the church door creaked; the minister raised his streaming eyes +above his handkerchief, and stood transfixed! First one and then +another pair of eyes followed the minister's, and then almost with one +impulse the congregation rose and stared while the three dead boys came +marching up the aisle, Tom in the lead, Joe next, and Huck, a ruin of +drooping rags, sneaking sheepishly in the rear! They had been hid in +the unused gallery listening to their own funeral sermon! + +Aunt Polly, Mary, and the Harpers threw themselves upon their restored +ones, smothered them with kisses and poured out thanksgivings, while +poor Huck stood abashed and uncomfortable, not knowing exactly what to +do or where to hide from so many unwelcoming eyes. He wavered, and +started to slink away, but Tom seized him and said: + +"Aunt Polly, it ain't fair. Somebody's got to be glad to see Huck." + +"And so they shall. I'm glad to see him, poor motherless thing!" And +the loving attentions Aunt Polly lavished upon him were the one thing +capable of making him more uncomfortable than he was before. + +Suddenly the minister shouted at the top of his voice: "Praise God +from whom all blessings flow--SING!--and put your hearts in it!" + +And they did. Old Hundred swelled up with a triumphant burst, and +while it shook the rafters Tom Sawyer the Pirate looked around upon the +envying juveniles about him and confessed in his heart that this was +the proudest moment of his life. + +As the "sold" congregation trooped out they said they would almost be +willing to be made ridiculous again to hear Old Hundred sung like that +once more. + +Tom got more cuffs and kisses that day--according to Aunt Polly's +varying moods--than he had earned before in a year; and he hardly knew +which expressed the most gratefulness to God and affection for himself. + + + +CHAPTER XVIII + +THAT was Tom's great secret--the scheme to return home with his +brother pirates and attend their own funerals. They had paddled over to +the Missouri shore on a log, at dusk on Saturday, landing five or six +miles below the village; they had slept in the woods at the edge of the +town till nearly daylight, and had then crept through back lanes and +alleys and finished their sleep in the gallery of the church among a +chaos of invalided benches. + +At breakfast, Monday morning, Aunt Polly and Mary were very loving to +Tom, and very attentive to his wants. There was an unusual amount of +talk. In the course of it Aunt Polly said: + +"Well, I don't say it wasn't a fine joke, Tom, to keep everybody +suffering 'most a week so you boys had a good time, but it is a pity +you could be so hard-hearted as to let me suffer so. If you could come +over on a log to go to your funeral, you could have come over and give +me a hint some way that you warn't dead, but only run off." + +"Yes, you could have done that, Tom," said Mary; "and I believe you +would if you had thought of it." + +"Would you, Tom?" said Aunt Polly, her face lighting wistfully. "Say, +now, would you, if you'd thought of it?" + +"I--well, I don't know. 'Twould 'a' spoiled everything." + +"Tom, I hoped you loved me that much," said Aunt Polly, with a grieved +tone that discomforted the boy. "It would have been something if you'd +cared enough to THINK of it, even if you didn't DO it." + +"Now, auntie, that ain't any harm," pleaded Mary; "it's only Tom's +giddy way--he is always in such a rush that he never thinks of +anything." + +"More's the pity. Sid would have thought. And Sid would have come and +DONE it, too. Tom, you'll look back, some day, when it's too late, and +wish you'd cared a little more for me when it would have cost you so +little." + +"Now, auntie, you know I do care for you," said Tom. + +"I'd know it better if you acted more like it." + +"I wish now I'd thought," said Tom, with a repentant tone; "but I +dreamt about you, anyway. That's something, ain't it?" + +"It ain't much--a cat does that much--but it's better than nothing. +What did you dream?" + +"Why, Wednesday night I dreamt that you was sitting over there by the +bed, and Sid was sitting by the woodbox, and Mary next to him." + +"Well, so we did. So we always do. I'm glad your dreams could take +even that much trouble about us." + +"And I dreamt that Joe Harper's mother was here." + +"Why, she was here! Did you dream any more?" + +"Oh, lots. But it's so dim, now." + +"Well, try to recollect--can't you?" + +"Somehow it seems to me that the wind--the wind blowed the--the--" + +"Try harder, Tom! The wind did blow something. Come!" + +Tom pressed his fingers on his forehead an anxious minute, and then +said: + +"I've got it now! I've got it now! It blowed the candle!" + +"Mercy on us! Go on, Tom--go on!" + +"And it seems to me that you said, 'Why, I believe that that door--'" + +"Go ON, Tom!" + +"Just let me study a moment--just a moment. Oh, yes--you said you +believed the door was open." + +"As I'm sitting here, I did! Didn't I, Mary! Go on!" + +"And then--and then--well I won't be certain, but it seems like as if +you made Sid go and--and--" + +"Well? Well? What did I make him do, Tom? What did I make him do?" + +"You made him--you--Oh, you made him shut it." + +"Well, for the land's sake! I never heard the beat of that in all my +days! Don't tell ME there ain't anything in dreams, any more. Sereny +Harper shall know of this before I'm an hour older. I'd like to see her +get around THIS with her rubbage 'bout superstition. Go on, Tom!" + +"Oh, it's all getting just as bright as day, now. Next you said I +warn't BAD, only mischeevous and harum-scarum, and not any more +responsible than--than--I think it was a colt, or something." + +"And so it was! Well, goodness gracious! Go on, Tom!" + +"And then you began to cry." + +"So I did. So I did. Not the first time, neither. And then--" + +"Then Mrs. Harper she began to cry, and said Joe was just the same, +and she wished she hadn't whipped him for taking cream when she'd +throwed it out her own self--" + +"Tom! The sperrit was upon you! You was a prophesying--that's what you +was doing! Land alive, go on, Tom!" + +"Then Sid he said--he said--" + +"I don't think I said anything," said Sid. + +"Yes you did, Sid," said Mary. + +"Shut your heads and let Tom go on! What did he say, Tom?" + +"He said--I THINK he said he hoped I was better off where I was gone +to, but if I'd been better sometimes--" + +"THERE, d'you hear that! It was his very words!" + +"And you shut him up sharp." + +"I lay I did! There must 'a' been an angel there. There WAS an angel +there, somewheres!" + +"And Mrs. Harper told about Joe scaring her with a firecracker, and +you told about Peter and the Painkiller--" + +"Just as true as I live!" + +"And then there was a whole lot of talk 'bout dragging the river for +us, and 'bout having the funeral Sunday, and then you and old Miss +Harper hugged and cried, and she went." + +"It happened just so! It happened just so, as sure as I'm a-sitting in +these very tracks. Tom, you couldn't told it more like if you'd 'a' +seen it! And then what? Go on, Tom!" + +"Then I thought you prayed for me--and I could see you and hear every +word you said. And you went to bed, and I was so sorry that I took and +wrote on a piece of sycamore bark, 'We ain't dead--we are only off +being pirates,' and put it on the table by the candle; and then you +looked so good, laying there asleep, that I thought I went and leaned +over and kissed you on the lips." + +"Did you, Tom, DID you! I just forgive you everything for that!" And +she seized the boy in a crushing embrace that made him feel like the +guiltiest of villains. + +"It was very kind, even though it was only a--dream," Sid soliloquized +just audibly. + +"Shut up, Sid! A body does just the same in a dream as he'd do if he +was awake. Here's a big Milum apple I've been saving for you, Tom, if +you was ever found again--now go 'long to school. I'm thankful to the +good God and Father of us all I've got you back, that's long-suffering +and merciful to them that believe on Him and keep His word, though +goodness knows I'm unworthy of it, but if only the worthy ones got His +blessings and had His hand to help them over the rough places, there's +few enough would smile here or ever enter into His rest when the long +night comes. Go 'long Sid, Mary, Tom--take yourselves off--you've +hendered me long enough." + +The children left for school, and the old lady to call on Mrs. Harper +and vanquish her realism with Tom's marvellous dream. Sid had better +judgment than to utter the thought that was in his mind as he left the +house. It was this: "Pretty thin--as long a dream as that, without any +mistakes in it!" + +What a hero Tom was become, now! He did not go skipping and prancing, +but moved with a dignified swagger as became a pirate who felt that the +public eye was on him. And indeed it was; he tried not to seem to see +the looks or hear the remarks as he passed along, but they were food +and drink to him. Smaller boys than himself flocked at his heels, as +proud to be seen with him, and tolerated by him, as if he had been the +drummer at the head of a procession or the elephant leading a menagerie +into town. Boys of his own size pretended not to know he had been away +at all; but they were consuming with envy, nevertheless. They would +have given anything to have that swarthy suntanned skin of his, and his +glittering notoriety; and Tom would not have parted with either for a +circus. + +At school the children made so much of him and of Joe, and delivered +such eloquent admiration from their eyes, that the two heroes were not +long in becoming insufferably "stuck-up." They began to tell their +adventures to hungry listeners--but they only began; it was not a thing +likely to have an end, with imaginations like theirs to furnish +material. And finally, when they got out their pipes and went serenely +puffing around, the very summit of glory was reached. + +Tom decided that he could be independent of Becky Thatcher now. Glory +was sufficient. He would live for glory. Now that he was distinguished, +maybe she would be wanting to "make up." Well, let her--she should see +that he could be as indifferent as some other people. Presently she +arrived. Tom pretended not to see her. He moved away and joined a group +of boys and girls and began to talk. Soon he observed that she was +tripping gayly back and forth with flushed face and dancing eyes, +pretending to be busy chasing schoolmates, and screaming with laughter +when she made a capture; but he noticed that she always made her +captures in his vicinity, and that she seemed to cast a conscious eye +in his direction at such times, too. It gratified all the vicious +vanity that was in him; and so, instead of winning him, it only "set +him up" the more and made him the more diligent to avoid betraying that +he knew she was about. Presently she gave over skylarking, and moved +irresolutely about, sighing once or twice and glancing furtively and +wistfully toward Tom. Then she observed that now Tom was talking more +particularly to Amy Lawrence than to any one else. She felt a sharp +pang and grew disturbed and uneasy at once. She tried to go away, but +her feet were treacherous, and carried her to the group instead. She +said to a girl almost at Tom's elbow--with sham vivacity: + +"Why, Mary Austin! you bad girl, why didn't you come to Sunday-school?" + +"I did come--didn't you see me?" + +"Why, no! Did you? Where did you sit?" + +"I was in Miss Peters' class, where I always go. I saw YOU." + +"Did you? Why, it's funny I didn't see you. I wanted to tell you about +the picnic." + +"Oh, that's jolly. Who's going to give it?" + +"My ma's going to let me have one." + +"Oh, goody; I hope she'll let ME come." + +"Well, she will. The picnic's for me. She'll let anybody come that I +want, and I want you." + +"That's ever so nice. When is it going to be?" + +"By and by. Maybe about vacation." + +"Oh, won't it be fun! You going to have all the girls and boys?" + +"Yes, every one that's friends to me--or wants to be"; and she glanced +ever so furtively at Tom, but he talked right along to Amy Lawrence +about the terrible storm on the island, and how the lightning tore the +great sycamore tree "all to flinders" while he was "standing within +three feet of it." + +"Oh, may I come?" said Grace Miller. + +"Yes." + +"And me?" said Sally Rogers. + +"Yes." + +"And me, too?" said Susy Harper. "And Joe?" + +"Yes." + +And so on, with clapping of joyful hands till all the group had begged +for invitations but Tom and Amy. Then Tom turned coolly away, still +talking, and took Amy with him. Becky's lips trembled and the tears +came to her eyes; she hid these signs with a forced gayety and went on +chattering, but the life had gone out of the picnic, now, and out of +everything else; she got away as soon as she could and hid herself and +had what her sex call "a good cry." Then she sat moody, with wounded +pride, till the bell rang. She roused up, now, with a vindictive cast +in her eye, and gave her plaited tails a shake and said she knew what +SHE'D do. + +At recess Tom continued his flirtation with Amy with jubilant +self-satisfaction. And he kept drifting about to find Becky and lacerate +her with the performance. At last he spied her, but there was a sudden +falling of his mercury. She was sitting cosily on a little bench behind +the schoolhouse looking at a picture-book with Alfred Temple--and so +absorbed were they, and their heads so close together over the book, +that they did not seem to be conscious of anything in the world besides. +Jealousy ran red-hot through Tom's veins. He began to hate himself for +throwing away the chance Becky had offered for a reconciliation. He +called himself a fool, and all the hard names he could think of. He +wanted to cry with vexation. Amy chatted happily along, as they walked, +for her heart was singing, but Tom's tongue had lost its function. He +did not hear what Amy was saying, and whenever she paused expectantly he +could only stammer an awkward assent, which was as often misplaced as +otherwise. He kept drifting to the rear of the schoolhouse, again and +again, to sear his eyeballs with the hateful spectacle there. He could +not help it. And it maddened him to see, as he thought he saw, that +Becky Thatcher never once suspected that he was even in the land of the +living. But she did see, nevertheless; and she knew she was winning her +fight, too, and was glad to see him suffer as she had suffered. + +Amy's happy prattle became intolerable. Tom hinted at things he had to +attend to; things that must be done; and time was fleeting. But in +vain--the girl chirped on. Tom thought, "Oh, hang her, ain't I ever +going to get rid of her?" At last he must be attending to those +things--and she said artlessly that she would be "around" when school +let out. And he hastened away, hating her for it. + +"Any other boy!" Tom thought, grating his teeth. "Any boy in the whole +town but that Saint Louis smarty that thinks he dresses so fine and is +aristocracy! Oh, all right, I licked you the first day you ever saw +this town, mister, and I'll lick you again! You just wait till I catch +you out! I'll just take and--" + +And he went through the motions of thrashing an imaginary boy +--pummelling the air, and kicking and gouging. "Oh, you do, do you? You +holler 'nough, do you? Now, then, let that learn you!" And so the +imaginary flogging was finished to his satisfaction. + +Tom fled home at noon. His conscience could not endure any more of +Amy's grateful happiness, and his jealousy could bear no more of the +other distress. Becky resumed her picture inspections with Alfred, but +as the minutes dragged along and no Tom came to suffer, her triumph +began to cloud and she lost interest; gravity and absent-mindedness +followed, and then melancholy; two or three times she pricked up her +ear at a footstep, but it was a false hope; no Tom came. At last she +grew entirely miserable and wished she hadn't carried it so far. When +poor Alfred, seeing that he was losing her, he did not know how, kept +exclaiming: "Oh, here's a jolly one! look at this!" she lost patience +at last, and said, "Oh, don't bother me! I don't care for them!" and +burst into tears, and got up and walked away. + +Alfred dropped alongside and was going to try to comfort her, but she +said: + +"Go away and leave me alone, can't you! I hate you!" + +So the boy halted, wondering what he could have done--for she had said +she would look at pictures all through the nooning--and she walked on, +crying. Then Alfred went musing into the deserted schoolhouse. He was +humiliated and angry. He easily guessed his way to the truth--the girl +had simply made a convenience of him to vent her spite upon Tom Sawyer. +He was far from hating Tom the less when this thought occurred to him. +He wished there was some way to get that boy into trouble without much +risk to himself. Tom's spelling-book fell under his eye. Here was his +opportunity. He gratefully opened to the lesson for the afternoon and +poured ink upon the page. + +Becky, glancing in at a window behind him at the moment, saw the act, +and moved on, without discovering herself. She started homeward, now, +intending to find Tom and tell him; Tom would be thankful and their +troubles would be healed. Before she was half way home, however, she +had changed her mind. The thought of Tom's treatment of her when she +was talking about her picnic came scorching back and filled her with +shame. She resolved to let him get whipped on the damaged +spelling-book's account, and to hate him forever, into the bargain. + + + +CHAPTER XIX + +TOM arrived at home in a dreary mood, and the first thing his aunt +said to him showed him that he had brought his sorrows to an +unpromising market: + +"Tom, I've a notion to skin you alive!" + +"Auntie, what have I done?" + +"Well, you've done enough. Here I go over to Sereny Harper, like an +old softy, expecting I'm going to make her believe all that rubbage +about that dream, when lo and behold you she'd found out from Joe that +you was over here and heard all the talk we had that night. Tom, I +don't know what is to become of a boy that will act like that. It makes +me feel so bad to think you could let me go to Sereny Harper and make +such a fool of myself and never say a word." + +This was a new aspect of the thing. His smartness of the morning had +seemed to Tom a good joke before, and very ingenious. It merely looked +mean and shabby now. He hung his head and could not think of anything +to say for a moment. Then he said: + +"Auntie, I wish I hadn't done it--but I didn't think." + +"Oh, child, you never think. You never think of anything but your own +selfishness. You could think to come all the way over here from +Jackson's Island in the night to laugh at our troubles, and you could +think to fool me with a lie about a dream; but you couldn't ever think +to pity us and save us from sorrow." + +"Auntie, I know now it was mean, but I didn't mean to be mean. I +didn't, honest. And besides, I didn't come over here to laugh at you +that night." + +"What did you come for, then?" + +"It was to tell you not to be uneasy about us, because we hadn't got +drownded." + +"Tom, Tom, I would be the thankfullest soul in this world if I could +believe you ever had as good a thought as that, but you know you never +did--and I know it, Tom." + +"Indeed and 'deed I did, auntie--I wish I may never stir if I didn't." + +"Oh, Tom, don't lie--don't do it. It only makes things a hundred times +worse." + +"It ain't a lie, auntie; it's the truth. I wanted to keep you from +grieving--that was all that made me come." + +"I'd give the whole world to believe that--it would cover up a power +of sins, Tom. I'd 'most be glad you'd run off and acted so bad. But it +ain't reasonable; because, why didn't you tell me, child?" + +"Why, you see, when you got to talking about the funeral, I just got +all full of the idea of our coming and hiding in the church, and I +couldn't somehow bear to spoil it. So I just put the bark back in my +pocket and kept mum." + +"What bark?" + +"The bark I had wrote on to tell you we'd gone pirating. I wish, now, +you'd waked up when I kissed you--I do, honest." + +The hard lines in his aunt's face relaxed and a sudden tenderness +dawned in her eyes. + +"DID you kiss me, Tom?" + +"Why, yes, I did." + +"Are you sure you did, Tom?" + +"Why, yes, I did, auntie--certain sure." + +"What did you kiss me for, Tom?" + +"Because I loved you so, and you laid there moaning and I was so sorry." + +The words sounded like truth. The old lady could not hide a tremor in +her voice when she said: + +"Kiss me again, Tom!--and be off with you to school, now, and don't +bother me any more." + +The moment he was gone, she ran to a closet and got out the ruin of a +jacket which Tom had gone pirating in. Then she stopped, with it in her +hand, and said to herself: + +"No, I don't dare. Poor boy, I reckon he's lied about it--but it's a +blessed, blessed lie, there's such a comfort come from it. I hope the +Lord--I KNOW the Lord will forgive him, because it was such +goodheartedness in him to tell it. But I don't want to find out it's a +lie. I won't look." + +She put the jacket away, and stood by musing a minute. Twice she put +out her hand to take the garment again, and twice she refrained. Once +more she ventured, and this time she fortified herself with the +thought: "It's a good lie--it's a good lie--I won't let it grieve me." +So she sought the jacket pocket. A moment later she was reading Tom's +piece of bark through flowing tears and saying: "I could forgive the +boy, now, if he'd committed a million sins!" + + + +CHAPTER XX + +THERE was something about Aunt Polly's manner, when she kissed Tom, +that swept away his low spirits and made him lighthearted and happy +again. He started to school and had the luck of coming upon Becky +Thatcher at the head of Meadow Lane. His mood always determined his +manner. Without a moment's hesitation he ran to her and said: + +"I acted mighty mean to-day, Becky, and I'm so sorry. I won't ever, +ever do that way again, as long as ever I live--please make up, won't +you?" + +The girl stopped and looked him scornfully in the face: + +"I'll thank you to keep yourself TO yourself, Mr. Thomas Sawyer. I'll +never speak to you again." + +She tossed her head and passed on. Tom was so stunned that he had not +even presence of mind enough to say "Who cares, Miss Smarty?" until the +right time to say it had gone by. So he said nothing. But he was in a +fine rage, nevertheless. He moped into the schoolyard wishing she were +a boy, and imagining how he would trounce her if she were. He presently +encountered her and delivered a stinging remark as he passed. She +hurled one in return, and the angry breach was complete. It seemed to +Becky, in her hot resentment, that she could hardly wait for school to +"take in," she was so impatient to see Tom flogged for the injured +spelling-book. If she had had any lingering notion of exposing Alfred +Temple, Tom's offensive fling had driven it entirely away. + +Poor girl, she did not know how fast she was nearing trouble herself. +The master, Mr. Dobbins, had reached middle age with an unsatisfied +ambition. The darling of his desires was, to be a doctor, but poverty +had decreed that he should be nothing higher than a village +schoolmaster. Every day he took a mysterious book out of his desk and +absorbed himself in it at times when no classes were reciting. He kept +that book under lock and key. There was not an urchin in school but was +perishing to have a glimpse of it, but the chance never came. Every boy +and girl had a theory about the nature of that book; but no two +theories were alike, and there was no way of getting at the facts in +the case. Now, as Becky was passing by the desk, which stood near the +door, she noticed that the key was in the lock! It was a precious +moment. She glanced around; found herself alone, and the next instant +she had the book in her hands. The title-page--Professor Somebody's +ANATOMY--carried no information to her mind; so she began to turn the +leaves. She came at once upon a handsomely engraved and colored +frontispiece--a human figure, stark naked. At that moment a shadow fell +on the page and Tom Sawyer stepped in at the door and caught a glimpse +of the picture. Becky snatched at the book to close it, and had the +hard luck to tear the pictured page half down the middle. She thrust +the volume into the desk, turned the key, and burst out crying with +shame and vexation. + +"Tom Sawyer, you are just as mean as you can be, to sneak up on a +person and look at what they're looking at." + +"How could I know you was looking at anything?" + +"You ought to be ashamed of yourself, Tom Sawyer; you know you're +going to tell on me, and oh, what shall I do, what shall I do! I'll be +whipped, and I never was whipped in school." + +Then she stamped her little foot and said: + +"BE so mean if you want to! I know something that's going to happen. +You just wait and you'll see! Hateful, hateful, hateful!"--and she +flung out of the house with a new explosion of crying. + +Tom stood still, rather flustered by this onslaught. Presently he said +to himself: + +"What a curious kind of a fool a girl is! Never been licked in school! +Shucks! What's a licking! That's just like a girl--they're so +thin-skinned and chicken-hearted. Well, of course I ain't going to tell +old Dobbins on this little fool, because there's other ways of getting +even on her, that ain't so mean; but what of it? Old Dobbins will ask +who it was tore his book. Nobody'll answer. Then he'll do just the way +he always does--ask first one and then t'other, and when he comes to the +right girl he'll know it, without any telling. Girls' faces always tell +on them. They ain't got any backbone. She'll get licked. Well, it's a +kind of a tight place for Becky Thatcher, because there ain't any way +out of it." Tom conned the thing a moment longer, and then added: "All +right, though; she'd like to see me in just such a fix--let her sweat it +out!" + +Tom joined the mob of skylarking scholars outside. In a few moments +the master arrived and school "took in." Tom did not feel a strong +interest in his studies. Every time he stole a glance at the girls' +side of the room Becky's face troubled him. Considering all things, he +did not want to pity her, and yet it was all he could do to help it. He +could get up no exultation that was really worthy the name. Presently +the spelling-book discovery was made, and Tom's mind was entirely full +of his own matters for a while after that. Becky roused up from her +lethargy of distress and showed good interest in the proceedings. She +did not expect that Tom could get out of his trouble by denying that he +spilt the ink on the book himself; and she was right. The denial only +seemed to make the thing worse for Tom. Becky supposed she would be +glad of that, and she tried to believe she was glad of it, but she +found she was not certain. When the worst came to the worst, she had an +impulse to get up and tell on Alfred Temple, but she made an effort and +forced herself to keep still--because, said she to herself, "he'll tell +about me tearing the picture sure. I wouldn't say a word, not to save +his life!" + +Tom took his whipping and went back to his seat not at all +broken-hearted, for he thought it was possible that he had unknowingly +upset the ink on the spelling-book himself, in some skylarking bout--he +had denied it for form's sake and because it was custom, and had stuck +to the denial from principle. + +A whole hour drifted by, the master sat nodding in his throne, the air +was drowsy with the hum of study. By and by, Mr. Dobbins straightened +himself up, yawned, then unlocked his desk, and reached for his book, +but seemed undecided whether to take it out or leave it. Most of the +pupils glanced up languidly, but there were two among them that watched +his movements with intent eyes. Mr. Dobbins fingered his book absently +for a while, then took it out and settled himself in his chair to read! +Tom shot a glance at Becky. He had seen a hunted and helpless rabbit +look as she did, with a gun levelled at its head. Instantly he forgot +his quarrel with her. Quick--something must be done! done in a flash, +too! But the very imminence of the emergency paralyzed his invention. +Good!--he had an inspiration! He would run and snatch the book, spring +through the door and fly. But his resolution shook for one little +instant, and the chance was lost--the master opened the volume. If Tom +only had the wasted opportunity back again! Too late. There was no help +for Becky now, he said. The next moment the master faced the school. +Every eye sank under his gaze. There was that in it which smote even +the innocent with fear. There was silence while one might count ten +--the master was gathering his wrath. Then he spoke: "Who tore this book?" + +There was not a sound. One could have heard a pin drop. The stillness +continued; the master searched face after face for signs of guilt. + +"Benjamin Rogers, did you tear this book?" + +A denial. Another pause. + +"Joseph Harper, did you?" + +Another denial. Tom's uneasiness grew more and more intense under the +slow torture of these proceedings. The master scanned the ranks of +boys--considered a while, then turned to the girls: + +"Amy Lawrence?" + +A shake of the head. + +"Gracie Miller?" + +The same sign. + +"Susan Harper, did you do this?" + +Another negative. The next girl was Becky Thatcher. Tom was trembling +from head to foot with excitement and a sense of the hopelessness of +the situation. + +"Rebecca Thatcher" [Tom glanced at her face--it was white with terror] +--"did you tear--no, look me in the face" [her hands rose in appeal] +--"did you tear this book?" + +A thought shot like lightning through Tom's brain. He sprang to his +feet and shouted--"I done it!" + +The school stared in perplexity at this incredible folly. Tom stood a +moment, to gather his dismembered faculties; and when he stepped +forward to go to his punishment the surprise, the gratitude, the +adoration that shone upon him out of poor Becky's eyes seemed pay +enough for a hundred floggings. Inspired by the splendor of his own +act, he took without an outcry the most merciless flaying that even Mr. +Dobbins had ever administered; and also received with indifference the +added cruelty of a command to remain two hours after school should be +dismissed--for he knew who would wait for him outside till his +captivity was done, and not count the tedious time as loss, either. + +Tom went to bed that night planning vengeance against Alfred Temple; +for with shame and repentance Becky had told him all, not forgetting +her own treachery; but even the longing for vengeance had to give way, +soon, to pleasanter musings, and he fell asleep at last with Becky's +latest words lingering dreamily in his ear-- + +"Tom, how COULD you be so noble!" + + + +CHAPTER XXI + +VACATION was approaching. The schoolmaster, always severe, grew +severer and more exacting than ever, for he wanted the school to make a +good showing on "Examination" day. His rod and his ferule were seldom +idle now--at least among the smaller pupils. Only the biggest boys, and +young ladies of eighteen and twenty, escaped lashing. Mr. Dobbins' +lashings were very vigorous ones, too; for although he carried, under +his wig, a perfectly bald and shiny head, he had only reached middle +age, and there was no sign of feebleness in his muscle. As the great +day approached, all the tyranny that was in him came to the surface; he +seemed to take a vindictive pleasure in punishing the least +shortcomings. The consequence was, that the smaller boys spent their +days in terror and suffering and their nights in plotting revenge. They +threw away no opportunity to do the master a mischief. But he kept +ahead all the time. The retribution that followed every vengeful +success was so sweeping and majestic that the boys always retired from +the field badly worsted. At last they conspired together and hit upon a +plan that promised a dazzling victory. They swore in the sign-painter's +boy, told him the scheme, and asked his help. He had his own reasons +for being delighted, for the master boarded in his father's family and +had given the boy ample cause to hate him. The master's wife would go +on a visit to the country in a few days, and there would be nothing to +interfere with the plan; the master always prepared himself for great +occasions by getting pretty well fuddled, and the sign-painter's boy +said that when the dominie had reached the proper condition on +Examination Evening he would "manage the thing" while he napped in his +chair; then he would have him awakened at the right time and hurried +away to school. + +In the fulness of time the interesting occasion arrived. At eight in +the evening the schoolhouse was brilliantly lighted, and adorned with +wreaths and festoons of foliage and flowers. The master sat throned in +his great chair upon a raised platform, with his blackboard behind him. +He was looking tolerably mellow. Three rows of benches on each side and +six rows in front of him were occupied by the dignitaries of the town +and by the parents of the pupils. To his left, back of the rows of +citizens, was a spacious temporary platform upon which were seated the +scholars who were to take part in the exercises of the evening; rows of +small boys, washed and dressed to an intolerable state of discomfort; +rows of gawky big boys; snowbanks of girls and young ladies clad in +lawn and muslin and conspicuously conscious of their bare arms, their +grandmothers' ancient trinkets, their bits of pink and blue ribbon and +the flowers in their hair. All the rest of the house was filled with +non-participating scholars. + +The exercises began. A very little boy stood up and sheepishly +recited, "You'd scarce expect one of my age to speak in public on the +stage," etc.--accompanying himself with the painfully exact and +spasmodic gestures which a machine might have used--supposing the +machine to be a trifle out of order. But he got through safely, though +cruelly scared, and got a fine round of applause when he made his +manufactured bow and retired. + +A little shamefaced girl lisped, "Mary had a little lamb," etc., +performed a compassion-inspiring curtsy, got her meed of applause, and +sat down flushed and happy. + +Tom Sawyer stepped forward with conceited confidence and soared into +the unquenchable and indestructible "Give me liberty or give me death" +speech, with fine fury and frantic gesticulation, and broke down in the +middle of it. A ghastly stage-fright seized him, his legs quaked under +him and he was like to choke. True, he had the manifest sympathy of the +house but he had the house's silence, too, which was even worse than +its sympathy. The master frowned, and this completed the disaster. Tom +struggled awhile and then retired, utterly defeated. There was a weak +attempt at applause, but it died early. + +"The Boy Stood on the Burning Deck" followed; also "The Assyrian Came +Down," and other declamatory gems. Then there were reading exercises, +and a spelling fight. The meagre Latin class recited with honor. The +prime feature of the evening was in order, now--original "compositions" +by the young ladies. Each in her turn stepped forward to the edge of +the platform, cleared her throat, held up her manuscript (tied with +dainty ribbon), and proceeded to read, with labored attention to +"expression" and punctuation. The themes were the same that had been +illuminated upon similar occasions by their mothers before them, their +grandmothers, and doubtless all their ancestors in the female line +clear back to the Crusades. "Friendship" was one; "Memories of Other +Days"; "Religion in History"; "Dream Land"; "The Advantages of +Culture"; "Forms of Political Government Compared and Contrasted"; +"Melancholy"; "Filial Love"; "Heart Longings," etc., etc. + +A prevalent feature in these compositions was a nursed and petted +melancholy; another was a wasteful and opulent gush of "fine language"; +another was a tendency to lug in by the ears particularly prized words +and phrases until they were worn entirely out; and a peculiarity that +conspicuously marked and marred them was the inveterate and intolerable +sermon that wagged its crippled tail at the end of each and every one +of them. No matter what the subject might be, a brain-racking effort +was made to squirm it into some aspect or other that the moral and +religious mind could contemplate with edification. The glaring +insincerity of these sermons was not sufficient to compass the +banishment of the fashion from the schools, and it is not sufficient +to-day; it never will be sufficient while the world stands, perhaps. +There is no school in all our land where the young ladies do not feel +obliged to close their compositions with a sermon; and you will find +that the sermon of the most frivolous and the least religious girl in +the school is always the longest and the most relentlessly pious. But +enough of this. Homely truth is unpalatable. + +Let us return to the "Examination." The first composition that was +read was one entitled "Is this, then, Life?" Perhaps the reader can +endure an extract from it: + + "In the common walks of life, with what delightful + emotions does the youthful mind look forward to some + anticipated scene of festivity! Imagination is busy + sketching rose-tinted pictures of joy. In fancy, the + voluptuous votary of fashion sees herself amid the + festive throng, 'the observed of all observers.' Her + graceful form, arrayed in snowy robes, is whirling + through the mazes of the joyous dance; her eye is + brightest, her step is lightest in the gay assembly. + + "In such delicious fancies time quickly glides by, + and the welcome hour arrives for her entrance into + the Elysian world, of which she has had such bright + dreams. How fairy-like does everything appear to + her enchanted vision! Each new scene is more charming + than the last. But after a while she finds that + beneath this goodly exterior, all is vanity, the + flattery which once charmed her soul, now grates + harshly upon her ear; the ball-room has lost its + charms; and with wasted health and imbittered heart, + she turns away with the conviction that earthly + pleasures cannot satisfy the longings of the soul!" + +And so forth and so on. There was a buzz of gratification from time to +time during the reading, accompanied by whispered ejaculations of "How +sweet!" "How eloquent!" "So true!" etc., and after the thing had closed +with a peculiarly afflicting sermon the applause was enthusiastic. + +Then arose a slim, melancholy girl, whose face had the "interesting" +paleness that comes of pills and indigestion, and read a "poem." Two +stanzas of it will do: + + "A MISSOURI MAIDEN'S FAREWELL TO ALABAMA + + "Alabama, good-bye! I love thee well! + But yet for a while do I leave thee now! + Sad, yes, sad thoughts of thee my heart doth swell, + And burning recollections throng my brow! + For I have wandered through thy flowery woods; + Have roamed and read near Tallapoosa's stream; + Have listened to Tallassee's warring floods, + And wooed on Coosa's side Aurora's beam. + + "Yet shame I not to bear an o'er-full heart, + Nor blush to turn behind my tearful eyes; + 'Tis from no stranger land I now must part, + 'Tis to no strangers left I yield these sighs. + Welcome and home were mine within this State, + Whose vales I leave--whose spires fade fast from me + And cold must be mine eyes, and heart, and tete, + When, dear Alabama! they turn cold on thee!" + +There were very few there who knew what "tete" meant, but the poem was +very satisfactory, nevertheless. + +Next appeared a dark-complexioned, black-eyed, black-haired young +lady, who paused an impressive moment, assumed a tragic expression, and +began to read in a measured, solemn tone: + + "A VISION + + "Dark and tempestuous was night. Around the + throne on high not a single star quivered; but + the deep intonations of the heavy thunder + constantly vibrated upon the ear; whilst the + terrific lightning revelled in angry mood + through the cloudy chambers of heaven, seeming + to scorn the power exerted over its terror by + the illustrious Franklin! Even the boisterous + winds unanimously came forth from their mystic + homes, and blustered about as if to enhance by + their aid the wildness of the scene. + + "At such a time, so dark, so dreary, for human + sympathy my very spirit sighed; but instead thereof, + + "'My dearest friend, my counsellor, my comforter + and guide--My joy in grief, my second bliss + in joy,' came to my side. She moved like one of + those bright beings pictured in the sunny walks + of fancy's Eden by the romantic and young, a + queen of beauty unadorned save by her own + transcendent loveliness. So soft was her step, it + failed to make even a sound, and but for the + magical thrill imparted by her genial touch, as + other unobtrusive beauties, she would have glided + away un-perceived--unsought. A strange sadness + rested upon her features, like icy tears upon + the robe of December, as she pointed to the + contending elements without, and bade me contemplate + the two beings presented." + +This nightmare occupied some ten pages of manuscript and wound up with +a sermon so destructive of all hope to non-Presbyterians that it took +the first prize. This composition was considered to be the very finest +effort of the evening. The mayor of the village, in delivering the +prize to the author of it, made a warm speech in which he said that it +was by far the most "eloquent" thing he had ever listened to, and that +Daniel Webster himself might well be proud of it. + +It may be remarked, in passing, that the number of compositions in +which the word "beauteous" was over-fondled, and human experience +referred to as "life's page," was up to the usual average. + +Now the master, mellow almost to the verge of geniality, put his chair +aside, turned his back to the audience, and began to draw a map of +America on the blackboard, to exercise the geography class upon. But he +made a sad business of it with his unsteady hand, and a smothered +titter rippled over the house. He knew what the matter was, and set +himself to right it. He sponged out lines and remade them; but he only +distorted them more than ever, and the tittering was more pronounced. +He threw his entire attention upon his work, now, as if determined not +to be put down by the mirth. He felt that all eyes were fastened upon +him; he imagined he was succeeding, and yet the tittering continued; it +even manifestly increased. And well it might. There was a garret above, +pierced with a scuttle over his head; and down through this scuttle +came a cat, suspended around the haunches by a string; she had a rag +tied about her head and jaws to keep her from mewing; as she slowly +descended she curved upward and clawed at the string, she swung +downward and clawed at the intangible air. The tittering rose higher +and higher--the cat was within six inches of the absorbed teacher's +head--down, down, a little lower, and she grabbed his wig with her +desperate claws, clung to it, and was snatched up into the garret in an +instant with her trophy still in her possession! And how the light did +blaze abroad from the master's bald pate--for the sign-painter's boy +had GILDED it! + +That broke up the meeting. The boys were avenged. Vacation had come. + + NOTE:--The pretended "compositions" quoted in + this chapter are taken without alteration from a + volume entitled "Prose and Poetry, by a Western + Lady"--but they are exactly and precisely after + the schoolgirl pattern, and hence are much + happier than any mere imitations could be. + + + +CHAPTER XXII + +TOM joined the new order of Cadets of Temperance, being attracted by +the showy character of their "regalia." He promised to abstain from +smoking, chewing, and profanity as long as he remained a member. Now he +found out a new thing--namely, that to promise not to do a thing is the +surest way in the world to make a body want to go and do that very +thing. Tom soon found himself tormented with a desire to drink and +swear; the desire grew to be so intense that nothing but the hope of a +chance to display himself in his red sash kept him from withdrawing +from the order. Fourth of July was coming; but he soon gave that up +--gave it up before he had worn his shackles over forty-eight hours--and +fixed his hopes upon old Judge Frazer, justice of the peace, who was +apparently on his deathbed and would have a big public funeral, since +he was so high an official. During three days Tom was deeply concerned +about the Judge's condition and hungry for news of it. Sometimes his +hopes ran high--so high that he would venture to get out his regalia +and practise before the looking-glass. But the Judge had a most +discouraging way of fluctuating. At last he was pronounced upon the +mend--and then convalescent. Tom was disgusted; and felt a sense of +injury, too. He handed in his resignation at once--and that night the +Judge suffered a relapse and died. Tom resolved that he would never +trust a man like that again. + +The funeral was a fine thing. The Cadets paraded in a style calculated +to kill the late member with envy. Tom was a free boy again, however +--there was something in that. He could drink and swear, now--but found +to his surprise that he did not want to. The simple fact that he could, +took the desire away, and the charm of it. + +Tom presently wondered to find that his coveted vacation was beginning +to hang a little heavily on his hands. + +He attempted a diary--but nothing happened during three days, and so +he abandoned it. + +The first of all the negro minstrel shows came to town, and made a +sensation. Tom and Joe Harper got up a band of performers and were +happy for two days. + +Even the Glorious Fourth was in some sense a failure, for it rained +hard, there was no procession in consequence, and the greatest man in +the world (as Tom supposed), Mr. Benton, an actual United States +Senator, proved an overwhelming disappointment--for he was not +twenty-five feet high, nor even anywhere in the neighborhood of it. + +A circus came. The boys played circus for three days afterward in +tents made of rag carpeting--admission, three pins for boys, two for +girls--and then circusing was abandoned. + +A phrenologist and a mesmerizer came--and went again and left the +village duller and drearier than ever. + +There were some boys-and-girls' parties, but they were so few and so +delightful that they only made the aching voids between ache the harder. + +Becky Thatcher was gone to her Constantinople home to stay with her +parents during vacation--so there was no bright side to life anywhere. + +The dreadful secret of the murder was a chronic misery. It was a very +cancer for permanency and pain. + +Then came the measles. + +During two long weeks Tom lay a prisoner, dead to the world and its +happenings. He was very ill, he was interested in nothing. When he got +upon his feet at last and moved feebly down-town, a melancholy change +had come over everything and every creature. There had been a +"revival," and everybody had "got religion," not only the adults, but +even the boys and girls. Tom went about, hoping against hope for the +sight of one blessed sinful face, but disappointment crossed him +everywhere. He found Joe Harper studying a Testament, and turned sadly +away from the depressing spectacle. He sought Ben Rogers, and found him +visiting the poor with a basket of tracts. He hunted up Jim Hollis, who +called his attention to the precious blessing of his late measles as a +warning. Every boy he encountered added another ton to his depression; +and when, in desperation, he flew for refuge at last to the bosom of +Huckleberry Finn and was received with a Scriptural quotation, his +heart broke and he crept home and to bed realizing that he alone of all +the town was lost, forever and forever. + +And that night there came on a terrific storm, with driving rain, +awful claps of thunder and blinding sheets of lightning. He covered his +head with the bedclothes and waited in a horror of suspense for his +doom; for he had not the shadow of a doubt that all this hubbub was +about him. He believed he had taxed the forbearance of the powers above +to the extremity of endurance and that this was the result. It might +have seemed to him a waste of pomp and ammunition to kill a bug with a +battery of artillery, but there seemed nothing incongruous about the +getting up such an expensive thunderstorm as this to knock the turf +from under an insect like himself. + +By and by the tempest spent itself and died without accomplishing its +object. The boy's first impulse was to be grateful, and reform. His +second was to wait--for there might not be any more storms. + +The next day the doctors were back; Tom had relapsed. The three weeks +he spent on his back this time seemed an entire age. When he got abroad +at last he was hardly grateful that he had been spared, remembering how +lonely was his estate, how companionless and forlorn he was. He drifted +listlessly down the street and found Jim Hollis acting as judge in a +juvenile court that was trying a cat for murder, in the presence of her +victim, a bird. He found Joe Harper and Huck Finn up an alley eating a +stolen melon. Poor lads! they--like Tom--had suffered a relapse. + + + +CHAPTER XXIII + +AT last the sleepy atmosphere was stirred--and vigorously: the murder +trial came on in the court. It became the absorbing topic of village +talk immediately. Tom could not get away from it. Every reference to +the murder sent a shudder to his heart, for his troubled conscience and +fears almost persuaded him that these remarks were put forth in his +hearing as "feelers"; he did not see how he could be suspected of +knowing anything about the murder, but still he could not be +comfortable in the midst of this gossip. It kept him in a cold shiver +all the time. He took Huck to a lonely place to have a talk with him. +It would be some relief to unseal his tongue for a little while; to +divide his burden of distress with another sufferer. Moreover, he +wanted to assure himself that Huck had remained discreet. + +"Huck, have you ever told anybody about--that?" + +"'Bout what?" + +"You know what." + +"Oh--'course I haven't." + +"Never a word?" + +"Never a solitary word, so help me. What makes you ask?" + +"Well, I was afeard." + +"Why, Tom Sawyer, we wouldn't be alive two days if that got found out. +YOU know that." + +Tom felt more comfortable. After a pause: + +"Huck, they couldn't anybody get you to tell, could they?" + +"Get me to tell? Why, if I wanted that half-breed devil to drownd me +they could get me to tell. They ain't no different way." + +"Well, that's all right, then. I reckon we're safe as long as we keep +mum. But let's swear again, anyway. It's more surer." + +"I'm agreed." + +So they swore again with dread solemnities. + +"What is the talk around, Huck? I've heard a power of it." + +"Talk? Well, it's just Muff Potter, Muff Potter, Muff Potter all the +time. It keeps me in a sweat, constant, so's I want to hide som'ers." + +"That's just the same way they go on round me. I reckon he's a goner. +Don't you feel sorry for him, sometimes?" + +"Most always--most always. He ain't no account; but then he hain't +ever done anything to hurt anybody. Just fishes a little, to get money +to get drunk on--and loafs around considerable; but lord, we all do +that--leastways most of us--preachers and such like. But he's kind of +good--he give me half a fish, once, when there warn't enough for two; +and lots of times he's kind of stood by me when I was out of luck." + +"Well, he's mended kites for me, Huck, and knitted hooks on to my +line. I wish we could get him out of there." + +"My! we couldn't get him out, Tom. And besides, 'twouldn't do any +good; they'd ketch him again." + +"Yes--so they would. But I hate to hear 'em abuse him so like the +dickens when he never done--that." + +"I do too, Tom. Lord, I hear 'em say he's the bloodiest looking +villain in this country, and they wonder he wasn't ever hung before." + +"Yes, they talk like that, all the time. I've heard 'em say that if he +was to get free they'd lynch him." + +"And they'd do it, too." + +The boys had a long talk, but it brought them little comfort. As the +twilight drew on, they found themselves hanging about the neighborhood +of the little isolated jail, perhaps with an undefined hope that +something would happen that might clear away their difficulties. But +nothing happened; there seemed to be no angels or fairies interested in +this luckless captive. + +The boys did as they had often done before--went to the cell grating +and gave Potter some tobacco and matches. He was on the ground floor +and there were no guards. + +His gratitude for their gifts had always smote their consciences +before--it cut deeper than ever, this time. They felt cowardly and +treacherous to the last degree when Potter said: + +"You've been mighty good to me, boys--better'n anybody else in this +town. And I don't forget it, I don't. Often I says to myself, says I, +'I used to mend all the boys' kites and things, and show 'em where the +good fishin' places was, and befriend 'em what I could, and now they've +all forgot old Muff when he's in trouble; but Tom don't, and Huck +don't--THEY don't forget him, says I, 'and I don't forget them.' Well, +boys, I done an awful thing--drunk and crazy at the time--that's the +only way I account for it--and now I got to swing for it, and it's +right. Right, and BEST, too, I reckon--hope so, anyway. Well, we won't +talk about that. I don't want to make YOU feel bad; you've befriended +me. But what I want to say, is, don't YOU ever get drunk--then you won't +ever get here. Stand a litter furder west--so--that's it; it's a prime +comfort to see faces that's friendly when a body's in such a muck of +trouble, and there don't none come here but yourn. Good friendly +faces--good friendly faces. Git up on one another's backs and let me +touch 'em. That's it. Shake hands--yourn'll come through the bars, but +mine's too big. Little hands, and weak--but they've helped Muff Potter +a power, and they'd help him more if they could." + +Tom went home miserable, and his dreams that night were full of +horrors. The next day and the day after, he hung about the court-room, +drawn by an almost irresistible impulse to go in, but forcing himself +to stay out. Huck was having the same experience. They studiously +avoided each other. Each wandered away, from time to time, but the same +dismal fascination always brought them back presently. Tom kept his +ears open when idlers sauntered out of the court-room, but invariably +heard distressing news--the toils were closing more and more +relentlessly around poor Potter. At the end of the second day the +village talk was to the effect that Injun Joe's evidence stood firm and +unshaken, and that there was not the slightest question as to what the +jury's verdict would be. + +Tom was out late, that night, and came to bed through the window. He +was in a tremendous state of excitement. It was hours before he got to +sleep. All the village flocked to the court-house the next morning, for +this was to be the great day. Both sexes were about equally represented +in the packed audience. After a long wait the jury filed in and took +their places; shortly afterward, Potter, pale and haggard, timid and +hopeless, was brought in, with chains upon him, and seated where all +the curious eyes could stare at him; no less conspicuous was Injun Joe, +stolid as ever. There was another pause, and then the judge arrived and +the sheriff proclaimed the opening of the court. The usual whisperings +among the lawyers and gathering together of papers followed. These +details and accompanying delays worked up an atmosphere of preparation +that was as impressive as it was fascinating. + +Now a witness was called who testified that he found Muff Potter +washing in the brook, at an early hour of the morning that the murder +was discovered, and that he immediately sneaked away. After some +further questioning, counsel for the prosecution said: + +"Take the witness." + +The prisoner raised his eyes for a moment, but dropped them again when +his own counsel said: + +"I have no questions to ask him." + +The next witness proved the finding of the knife near the corpse. +Counsel for the prosecution said: + +"Take the witness." + +"I have no questions to ask him," Potter's lawyer replied. + +A third witness swore he had often seen the knife in Potter's +possession. + +"Take the witness." + +Counsel for Potter declined to question him. The faces of the audience +began to betray annoyance. Did this attorney mean to throw away his +client's life without an effort? + +Several witnesses deposed concerning Potter's guilty behavior when +brought to the scene of the murder. They were allowed to leave the +stand without being cross-questioned. + +Every detail of the damaging circumstances that occurred in the +graveyard upon that morning which all present remembered so well was +brought out by credible witnesses, but none of them were cross-examined +by Potter's lawyer. The perplexity and dissatisfaction of the house +expressed itself in murmurs and provoked a reproof from the bench. +Counsel for the prosecution now said: + +"By the oaths of citizens whose simple word is above suspicion, we +have fastened this awful crime, beyond all possibility of question, +upon the unhappy prisoner at the bar. We rest our case here." + +A groan escaped from poor Potter, and he put his face in his hands and +rocked his body softly to and fro, while a painful silence reigned in +the court-room. Many men were moved, and many women's compassion +testified itself in tears. Counsel for the defence rose and said: + +"Your honor, in our remarks at the opening of this trial, we +foreshadowed our purpose to prove that our client did this fearful deed +while under the influence of a blind and irresponsible delirium +produced by drink. We have changed our mind. We shall not offer that +plea." [Then to the clerk:] "Call Thomas Sawyer!" + +A puzzled amazement awoke in every face in the house, not even +excepting Potter's. Every eye fastened itself with wondering interest +upon Tom as he rose and took his place upon the stand. The boy looked +wild enough, for he was badly scared. The oath was administered. + +"Thomas Sawyer, where were you on the seventeenth of June, about the +hour of midnight?" + +Tom glanced at Injun Joe's iron face and his tongue failed him. The +audience listened breathless, but the words refused to come. After a +few moments, however, the boy got a little of his strength back, and +managed to put enough of it into his voice to make part of the house +hear: + +"In the graveyard!" + +"A little bit louder, please. Don't be afraid. You were--" + +"In the graveyard." + +A contemptuous smile flitted across Injun Joe's face. + +"Were you anywhere near Horse Williams' grave?" + +"Yes, sir." + +"Speak up--just a trifle louder. How near were you?" + +"Near as I am to you." + +"Were you hidden, or not?" + +"I was hid." + +"Where?" + +"Behind the elms that's on the edge of the grave." + +Injun Joe gave a barely perceptible start. + +"Any one with you?" + +"Yes, sir. I went there with--" + +"Wait--wait a moment. Never mind mentioning your companion's name. We +will produce him at the proper time. Did you carry anything there with +you." + +Tom hesitated and looked confused. + +"Speak out, my boy--don't be diffident. The truth is always +respectable. What did you take there?" + +"Only a--a--dead cat." + +There was a ripple of mirth, which the court checked. + +"We will produce the skeleton of that cat. Now, my boy, tell us +everything that occurred--tell it in your own way--don't skip anything, +and don't be afraid." + +Tom began--hesitatingly at first, but as he warmed to his subject his +words flowed more and more easily; in a little while every sound ceased +but his own voice; every eye fixed itself upon him; with parted lips +and bated breath the audience hung upon his words, taking no note of +time, rapt in the ghastly fascinations of the tale. The strain upon +pent emotion reached its climax when the boy said: + +"--and as the doctor fetched the board around and Muff Potter fell, +Injun Joe jumped with the knife and--" + +Crash! Quick as lightning the half-breed sprang for a window, tore his +way through all opposers, and was gone! + + + +CHAPTER XXIV + +TOM was a glittering hero once more--the pet of the old, the envy of +the young. His name even went into immortal print, for the village +paper magnified him. There were some that believed he would be +President, yet, if he escaped hanging. + +As usual, the fickle, unreasoning world took Muff Potter to its bosom +and fondled him as lavishly as it had abused him before. But that sort +of conduct is to the world's credit; therefore it is not well to find +fault with it. + +Tom's days were days of splendor and exultation to him, but his nights +were seasons of horror. Injun Joe infested all his dreams, and always +with doom in his eye. Hardly any temptation could persuade the boy to +stir abroad after nightfall. Poor Huck was in the same state of +wretchedness and terror, for Tom had told the whole story to the lawyer +the night before the great day of the trial, and Huck was sore afraid +that his share in the business might leak out, yet, notwithstanding +Injun Joe's flight had saved him the suffering of testifying in court. +The poor fellow had got the attorney to promise secrecy, but what of +that? Since Tom's harassed conscience had managed to drive him to the +lawyer's house by night and wring a dread tale from lips that had been +sealed with the dismalest and most formidable of oaths, Huck's +confidence in the human race was well-nigh obliterated. + +Daily Muff Potter's gratitude made Tom glad he had spoken; but nightly +he wished he had sealed up his tongue. + +Half the time Tom was afraid Injun Joe would never be captured; the +other half he was afraid he would be. He felt sure he never could draw +a safe breath again until that man was dead and he had seen the corpse. + +Rewards had been offered, the country had been scoured, but no Injun +Joe was found. One of those omniscient and awe-inspiring marvels, a +detective, came up from St. Louis, moused around, shook his head, +looked wise, and made that sort of astounding success which members of +that craft usually achieve. That is to say, he "found a clew." But you +can't hang a "clew" for murder, and so after that detective had got +through and gone home, Tom felt just as insecure as he was before. + +The slow days drifted on, and each left behind it a slightly lightened +weight of apprehension. + + + +CHAPTER XXV + +THERE comes a time in every rightly-constructed boy's life when he has +a raging desire to go somewhere and dig for hidden treasure. This +desire suddenly came upon Tom one day. He sallied out to find Joe +Harper, but failed of success. Next he sought Ben Rogers; he had gone +fishing. Presently he stumbled upon Huck Finn the Red-Handed. Huck +would answer. Tom took him to a private place and opened the matter to +him confidentially. Huck was willing. Huck was always willing to take a +hand in any enterprise that offered entertainment and required no +capital, for he had a troublesome superabundance of that sort of time +which is not money. "Where'll we dig?" said Huck. + +"Oh, most anywhere." + +"Why, is it hid all around?" + +"No, indeed it ain't. It's hid in mighty particular places, Huck +--sometimes on islands, sometimes in rotten chests under the end of a +limb of an old dead tree, just where the shadow falls at midnight; but +mostly under the floor in ha'nted houses." + +"Who hides it?" + +"Why, robbers, of course--who'd you reckon? Sunday-school +sup'rintendents?" + +"I don't know. If 'twas mine I wouldn't hide it; I'd spend it and have +a good time." + +"So would I. But robbers don't do that way. They always hide it and +leave it there." + +"Don't they come after it any more?" + +"No, they think they will, but they generally forget the marks, or +else they die. Anyway, it lays there a long time and gets rusty; and by +and by somebody finds an old yellow paper that tells how to find the +marks--a paper that's got to be ciphered over about a week because it's +mostly signs and hy'roglyphics." + +"Hyro--which?" + +"Hy'roglyphics--pictures and things, you know, that don't seem to mean +anything." + +"Have you got one of them papers, Tom?" + +"No." + +"Well then, how you going to find the marks?" + +"I don't want any marks. They always bury it under a ha'nted house or +on an island, or under a dead tree that's got one limb sticking out. +Well, we've tried Jackson's Island a little, and we can try it again +some time; and there's the old ha'nted house up the Still-House branch, +and there's lots of dead-limb trees--dead loads of 'em." + +"Is it under all of them?" + +"How you talk! No!" + +"Then how you going to know which one to go for?" + +"Go for all of 'em!" + +"Why, Tom, it'll take all summer." + +"Well, what of that? Suppose you find a brass pot with a hundred +dollars in it, all rusty and gray, or rotten chest full of di'monds. +How's that?" + +Huck's eyes glowed. + +"That's bully. Plenty bully enough for me. Just you gimme the hundred +dollars and I don't want no di'monds." + +"All right. But I bet you I ain't going to throw off on di'monds. Some +of 'em's worth twenty dollars apiece--there ain't any, hardly, but's +worth six bits or a dollar." + +"No! Is that so?" + +"Cert'nly--anybody'll tell you so. Hain't you ever seen one, Huck?" + +"Not as I remember." + +"Oh, kings have slathers of them." + +"Well, I don' know no kings, Tom." + +"I reckon you don't. But if you was to go to Europe you'd see a raft +of 'em hopping around." + +"Do they hop?" + +"Hop?--your granny! No!" + +"Well, what did you say they did, for?" + +"Shucks, I only meant you'd SEE 'em--not hopping, of course--what do +they want to hop for?--but I mean you'd just see 'em--scattered around, +you know, in a kind of a general way. Like that old humpbacked Richard." + +"Richard? What's his other name?" + +"He didn't have any other name. Kings don't have any but a given name." + +"No?" + +"But they don't." + +"Well, if they like it, Tom, all right; but I don't want to be a king +and have only just a given name, like a nigger. But say--where you +going to dig first?" + +"Well, I don't know. S'pose we tackle that old dead-limb tree on the +hill t'other side of Still-House branch?" + +"I'm agreed." + +So they got a crippled pick and a shovel, and set out on their +three-mile tramp. They arrived hot and panting, and threw themselves +down in the shade of a neighboring elm to rest and have a smoke. + +"I like this," said Tom. + +"So do I." + +"Say, Huck, if we find a treasure here, what you going to do with your +share?" + +"Well, I'll have pie and a glass of soda every day, and I'll go to +every circus that comes along. I bet I'll have a gay time." + +"Well, ain't you going to save any of it?" + +"Save it? What for?" + +"Why, so as to have something to live on, by and by." + +"Oh, that ain't any use. Pap would come back to thish-yer town some +day and get his claws on it if I didn't hurry up, and I tell you he'd +clean it out pretty quick. What you going to do with yourn, Tom?" + +"I'm going to buy a new drum, and a sure-'nough sword, and a red +necktie and a bull pup, and get married." + +"Married!" + +"That's it." + +"Tom, you--why, you ain't in your right mind." + +"Wait--you'll see." + +"Well, that's the foolishest thing you could do. Look at pap and my +mother. Fight! Why, they used to fight all the time. I remember, mighty +well." + +"That ain't anything. The girl I'm going to marry won't fight." + +"Tom, I reckon they're all alike. They'll all comb a body. Now you +better think 'bout this awhile. I tell you you better. What's the name +of the gal?" + +"It ain't a gal at all--it's a girl." + +"It's all the same, I reckon; some says gal, some says girl--both's +right, like enough. Anyway, what's her name, Tom?" + +"I'll tell you some time--not now." + +"All right--that'll do. Only if you get married I'll be more lonesomer +than ever." + +"No you won't. You'll come and live with me. Now stir out of this and +we'll go to digging." + +They worked and sweated for half an hour. No result. They toiled +another half-hour. Still no result. Huck said: + +"Do they always bury it as deep as this?" + +"Sometimes--not always. Not generally. I reckon we haven't got the +right place." + +So they chose a new spot and began again. The labor dragged a little, +but still they made progress. They pegged away in silence for some +time. Finally Huck leaned on his shovel, swabbed the beaded drops from +his brow with his sleeve, and said: + +"Where you going to dig next, after we get this one?" + +"I reckon maybe we'll tackle the old tree that's over yonder on +Cardiff Hill back of the widow's." + +"I reckon that'll be a good one. But won't the widow take it away from +us, Tom? It's on her land." + +"SHE take it away! Maybe she'd like to try it once. Whoever finds one +of these hid treasures, it belongs to him. It don't make any difference +whose land it's on." + +That was satisfactory. The work went on. By and by Huck said: + +"Blame it, we must be in the wrong place again. What do you think?" + +"It is mighty curious, Huck. I don't understand it. Sometimes witches +interfere. I reckon maybe that's what's the trouble now." + +"Shucks! Witches ain't got no power in the daytime." + +"Well, that's so. I didn't think of that. Oh, I know what the matter +is! What a blamed lot of fools we are! You got to find out where the +shadow of the limb falls at midnight, and that's where you dig!" + +"Then consound it, we've fooled away all this work for nothing. Now +hang it all, we got to come back in the night. It's an awful long way. +Can you get out?" + +"I bet I will. We've got to do it to-night, too, because if somebody +sees these holes they'll know in a minute what's here and they'll go +for it." + +"Well, I'll come around and maow to-night." + +"All right. Let's hide the tools in the bushes." + +The boys were there that night, about the appointed time. They sat in +the shadow waiting. It was a lonely place, and an hour made solemn by +old traditions. Spirits whispered in the rustling leaves, ghosts lurked +in the murky nooks, the deep baying of a hound floated up out of the +distance, an owl answered with his sepulchral note. The boys were +subdued by these solemnities, and talked little. By and by they judged +that twelve had come; they marked where the shadow fell, and began to +dig. Their hopes commenced to rise. Their interest grew stronger, and +their industry kept pace with it. The hole deepened and still deepened, +but every time their hearts jumped to hear the pick strike upon +something, they only suffered a new disappointment. It was only a stone +or a chunk. At last Tom said: + +"It ain't any use, Huck, we're wrong again." + +"Well, but we CAN'T be wrong. We spotted the shadder to a dot." + +"I know it, but then there's another thing." + +"What's that?". + +"Why, we only guessed at the time. Like enough it was too late or too +early." + +Huck dropped his shovel. + +"That's it," said he. "That's the very trouble. We got to give this +one up. We can't ever tell the right time, and besides this kind of +thing's too awful, here this time of night with witches and ghosts +a-fluttering around so. I feel as if something's behind me all the time; +and I'm afeard to turn around, becuz maybe there's others in front +a-waiting for a chance. I been creeping all over, ever since I got here." + +"Well, I've been pretty much so, too, Huck. They most always put in a +dead man when they bury a treasure under a tree, to look out for it." + +"Lordy!" + +"Yes, they do. I've always heard that." + +"Tom, I don't like to fool around much where there's dead people. A +body's bound to get into trouble with 'em, sure." + +"I don't like to stir 'em up, either. S'pose this one here was to +stick his skull out and say something!" + +"Don't Tom! It's awful." + +"Well, it just is. Huck, I don't feel comfortable a bit." + +"Say, Tom, let's give this place up, and try somewheres else." + +"All right, I reckon we better." + +"What'll it be?" + +Tom considered awhile; and then said: + +"The ha'nted house. That's it!" + +"Blame it, I don't like ha'nted houses, Tom. Why, they're a dern sight +worse'n dead people. Dead people might talk, maybe, but they don't come +sliding around in a shroud, when you ain't noticing, and peep over your +shoulder all of a sudden and grit their teeth, the way a ghost does. I +couldn't stand such a thing as that, Tom--nobody could." + +"Yes, but, Huck, ghosts don't travel around only at night. They won't +hender us from digging there in the daytime." + +"Well, that's so. But you know mighty well people don't go about that +ha'nted house in the day nor the night." + +"Well, that's mostly because they don't like to go where a man's been +murdered, anyway--but nothing's ever been seen around that house except +in the night--just some blue lights slipping by the windows--no regular +ghosts." + +"Well, where you see one of them blue lights flickering around, Tom, +you can bet there's a ghost mighty close behind it. It stands to +reason. Becuz you know that they don't anybody but ghosts use 'em." + +"Yes, that's so. But anyway they don't come around in the daytime, so +what's the use of our being afeard?" + +"Well, all right. We'll tackle the ha'nted house if you say so--but I +reckon it's taking chances." + +They had started down the hill by this time. There in the middle of +the moonlit valley below them stood the "ha'nted" house, utterly +isolated, its fences gone long ago, rank weeds smothering the very +doorsteps, the chimney crumbled to ruin, the window-sashes vacant, a +corner of the roof caved in. The boys gazed awhile, half expecting to +see a blue light flit past a window; then talking in a low tone, as +befitted the time and the circumstances, they struck far off to the +right, to give the haunted house a wide berth, and took their way +homeward through the woods that adorned the rearward side of Cardiff +Hill. + + + +CHAPTER XXVI + +ABOUT noon the next day the boys arrived at the dead tree; they had +come for their tools. Tom was impatient to go to the haunted house; +Huck was measurably so, also--but suddenly said: + +"Lookyhere, Tom, do you know what day it is?" + +Tom mentally ran over the days of the week, and then quickly lifted +his eyes with a startled look in them-- + +"My! I never once thought of it, Huck!" + +"Well, I didn't neither, but all at once it popped onto me that it was +Friday." + +"Blame it, a body can't be too careful, Huck. We might 'a' got into an +awful scrape, tackling such a thing on a Friday." + +"MIGHT! Better say we WOULD! There's some lucky days, maybe, but +Friday ain't." + +"Any fool knows that. I don't reckon YOU was the first that found it +out, Huck." + +"Well, I never said I was, did I? And Friday ain't all, neither. I had +a rotten bad dream last night--dreampt about rats." + +"No! Sure sign of trouble. Did they fight?" + +"No." + +"Well, that's good, Huck. When they don't fight it's only a sign that +there's trouble around, you know. All we got to do is to look mighty +sharp and keep out of it. We'll drop this thing for to-day, and play. +Do you know Robin Hood, Huck?" + +"No. Who's Robin Hood?" + +"Why, he was one of the greatest men that was ever in England--and the +best. He was a robber." + +"Cracky, I wisht I was. Who did he rob?" + +"Only sheriffs and bishops and rich people and kings, and such like. +But he never bothered the poor. He loved 'em. He always divided up with +'em perfectly square." + +"Well, he must 'a' been a brick." + +"I bet you he was, Huck. Oh, he was the noblest man that ever was. +They ain't any such men now, I can tell you. He could lick any man in +England, with one hand tied behind him; and he could take his yew bow +and plug a ten-cent piece every time, a mile and a half." + +"What's a YEW bow?" + +"I don't know. It's some kind of a bow, of course. And if he hit that +dime only on the edge he would set down and cry--and curse. But we'll +play Robin Hood--it's nobby fun. I'll learn you." + +"I'm agreed." + +So they played Robin Hood all the afternoon, now and then casting a +yearning eye down upon the haunted house and passing a remark about the +morrow's prospects and possibilities there. As the sun began to sink +into the west they took their way homeward athwart the long shadows of +the trees and soon were buried from sight in the forests of Cardiff +Hill. + +On Saturday, shortly after noon, the boys were at the dead tree again. +They had a smoke and a chat in the shade, and then dug a little in +their last hole, not with great hope, but merely because Tom said there +were so many cases where people had given up a treasure after getting +down within six inches of it, and then somebody else had come along and +turned it up with a single thrust of a shovel. The thing failed this +time, however, so the boys shouldered their tools and went away feeling +that they had not trifled with fortune, but had fulfilled all the +requirements that belong to the business of treasure-hunting. + +When they reached the haunted house there was something so weird and +grisly about the dead silence that reigned there under the baking sun, +and something so depressing about the loneliness and desolation of the +place, that they were afraid, for a moment, to venture in. Then they +crept to the door and took a trembling peep. They saw a weed-grown, +floorless room, unplastered, an ancient fireplace, vacant windows, a +ruinous staircase; and here, there, and everywhere hung ragged and +abandoned cobwebs. They presently entered, softly, with quickened +pulses, talking in whispers, ears alert to catch the slightest sound, +and muscles tense and ready for instant retreat. + +In a little while familiarity modified their fears and they gave the +place a critical and interested examination, rather admiring their own +boldness, and wondering at it, too. Next they wanted to look up-stairs. +This was something like cutting off retreat, but they got to daring +each other, and of course there could be but one result--they threw +their tools into a corner and made the ascent. Up there were the same +signs of decay. In one corner they found a closet that promised +mystery, but the promise was a fraud--there was nothing in it. Their +courage was up now and well in hand. They were about to go down and +begin work when-- + +"Sh!" said Tom. + +"What is it?" whispered Huck, blanching with fright. + +"Sh!... There!... Hear it?" + +"Yes!... Oh, my! Let's run!" + +"Keep still! Don't you budge! They're coming right toward the door." + +The boys stretched themselves upon the floor with their eyes to +knot-holes in the planking, and lay waiting, in a misery of fear. + +"They've stopped.... No--coming.... Here they are. Don't whisper +another word, Huck. My goodness, I wish I was out of this!" + +Two men entered. Each boy said to himself: "There's the old deaf and +dumb Spaniard that's been about town once or twice lately--never saw +t'other man before." + +"T'other" was a ragged, unkempt creature, with nothing very pleasant +in his face. The Spaniard was wrapped in a serape; he had bushy white +whiskers; long white hair flowed from under his sombrero, and he wore +green goggles. When they came in, "t'other" was talking in a low voice; +they sat down on the ground, facing the door, with their backs to the +wall, and the speaker continued his remarks. His manner became less +guarded and his words more distinct as he proceeded: + +"No," said he, "I've thought it all over, and I don't like it. It's +dangerous." + +"Dangerous!" grunted the "deaf and dumb" Spaniard--to the vast +surprise of the boys. "Milksop!" + +This voice made the boys gasp and quake. It was Injun Joe's! There was +silence for some time. Then Joe said: + +"What's any more dangerous than that job up yonder--but nothing's come +of it." + +"That's different. Away up the river so, and not another house about. +'Twon't ever be known that we tried, anyway, long as we didn't succeed." + +"Well, what's more dangerous than coming here in the daytime!--anybody +would suspicion us that saw us." + +"I know that. But there warn't any other place as handy after that +fool of a job. I want to quit this shanty. I wanted to yesterday, only +it warn't any use trying to stir out of here, with those infernal boys +playing over there on the hill right in full view." + +"Those infernal boys" quaked again under the inspiration of this +remark, and thought how lucky it was that they had remembered it was +Friday and concluded to wait a day. They wished in their hearts they +had waited a year. + +The two men got out some food and made a luncheon. After a long and +thoughtful silence, Injun Joe said: + +"Look here, lad--you go back up the river where you belong. Wait there +till you hear from me. I'll take the chances on dropping into this town +just once more, for a look. We'll do that 'dangerous' job after I've +spied around a little and think things look well for it. Then for +Texas! We'll leg it together!" + +This was satisfactory. Both men presently fell to yawning, and Injun +Joe said: + +"I'm dead for sleep! It's your turn to watch." + +He curled down in the weeds and soon began to snore. His comrade +stirred him once or twice and he became quiet. Presently the watcher +began to nod; his head drooped lower and lower, both men began to snore +now. + +The boys drew a long, grateful breath. Tom whispered: + +"Now's our chance--come!" + +Huck said: + +"I can't--I'd die if they was to wake." + +Tom urged--Huck held back. At last Tom rose slowly and softly, and +started alone. But the first step he made wrung such a hideous creak +from the crazy floor that he sank down almost dead with fright. He +never made a second attempt. The boys lay there counting the dragging +moments till it seemed to them that time must be done and eternity +growing gray; and then they were grateful to note that at last the sun +was setting. + +Now one snore ceased. Injun Joe sat up, stared around--smiled grimly +upon his comrade, whose head was drooping upon his knees--stirred him +up with his foot and said: + +"Here! YOU'RE a watchman, ain't you! All right, though--nothing's +happened." + +"My! have I been asleep?" + +"Oh, partly, partly. Nearly time for us to be moving, pard. What'll we +do with what little swag we've got left?" + +"I don't know--leave it here as we've always done, I reckon. No use to +take it away till we start south. Six hundred and fifty in silver's +something to carry." + +"Well--all right--it won't matter to come here once more." + +"No--but I'd say come in the night as we used to do--it's better." + +"Yes: but look here; it may be a good while before I get the right +chance at that job; accidents might happen; 'tain't in such a very good +place; we'll just regularly bury it--and bury it deep." + +"Good idea," said the comrade, who walked across the room, knelt down, +raised one of the rearward hearth-stones and took out a bag that +jingled pleasantly. He subtracted from it twenty or thirty dollars for +himself and as much for Injun Joe, and passed the bag to the latter, +who was on his knees in the corner, now, digging with his bowie-knife. + +The boys forgot all their fears, all their miseries in an instant. +With gloating eyes they watched every movement. Luck!--the splendor of +it was beyond all imagination! Six hundred dollars was money enough to +make half a dozen boys rich! Here was treasure-hunting under the +happiest auspices--there would not be any bothersome uncertainty as to +where to dig. They nudged each other every moment--eloquent nudges and +easily understood, for they simply meant--"Oh, but ain't you glad NOW +we're here!" + +Joe's knife struck upon something. + +"Hello!" said he. + +"What is it?" said his comrade. + +"Half-rotten plank--no, it's a box, I believe. Here--bear a hand and +we'll see what it's here for. Never mind, I've broke a hole." + +He reached his hand in and drew it out-- + +"Man, it's money!" + +The two men examined the handful of coins. They were gold. The boys +above were as excited as themselves, and as delighted. + +Joe's comrade said: + +"We'll make quick work of this. There's an old rusty pick over amongst +the weeds in the corner the other side of the fireplace--I saw it a +minute ago." + +He ran and brought the boys' pick and shovel. Injun Joe took the pick, +looked it over critically, shook his head, muttered something to +himself, and then began to use it. The box was soon unearthed. It was +not very large; it was iron bound and had been very strong before the +slow years had injured it. The men contemplated the treasure awhile in +blissful silence. + +"Pard, there's thousands of dollars here," said Injun Joe. + +"'Twas always said that Murrel's gang used to be around here one +summer," the stranger observed. + +"I know it," said Injun Joe; "and this looks like it, I should say." + +"Now you won't need to do that job." + +The half-breed frowned. Said he: + +"You don't know me. Least you don't know all about that thing. 'Tain't +robbery altogether--it's REVENGE!" and a wicked light flamed in his +eyes. "I'll need your help in it. When it's finished--then Texas. Go +home to your Nance and your kids, and stand by till you hear from me." + +"Well--if you say so; what'll we do with this--bury it again?" + +"Yes. [Ravishing delight overhead.] NO! by the great Sachem, no! +[Profound distress overhead.] I'd nearly forgot. That pick had fresh +earth on it! [The boys were sick with terror in a moment.] What +business has a pick and a shovel here? What business with fresh earth +on them? Who brought them here--and where are they gone? Have you heard +anybody?--seen anybody? What! bury it again and leave them to come and +see the ground disturbed? Not exactly--not exactly. We'll take it to my +den." + +"Why, of course! Might have thought of that before. You mean Number +One?" + +"No--Number Two--under the cross. The other place is bad--too common." + +"All right. It's nearly dark enough to start." + +Injun Joe got up and went about from window to window cautiously +peeping out. Presently he said: + +"Who could have brought those tools here? Do you reckon they can be +up-stairs?" + +The boys' breath forsook them. Injun Joe put his hand on his knife, +halted a moment, undecided, and then turned toward the stairway. The +boys thought of the closet, but their strength was gone. The steps came +creaking up the stairs--the intolerable distress of the situation woke +the stricken resolution of the lads--they were about to spring for the +closet, when there was a crash of rotten timbers and Injun Joe landed +on the ground amid the debris of the ruined stairway. He gathered +himself up cursing, and his comrade said: + +"Now what's the use of all that? If it's anybody, and they're up +there, let them STAY there--who cares? If they want to jump down, now, +and get into trouble, who objects? It will be dark in fifteen minutes +--and then let them follow us if they want to. I'm willing. In my +opinion, whoever hove those things in here caught a sight of us and +took us for ghosts or devils or something. I'll bet they're running +yet." + +Joe grumbled awhile; then he agreed with his friend that what daylight +was left ought to be economized in getting things ready for leaving. +Shortly afterward they slipped out of the house in the deepening +twilight, and moved toward the river with their precious box. + +Tom and Huck rose up, weak but vastly relieved, and stared after them +through the chinks between the logs of the house. Follow? Not they. +They were content to reach ground again without broken necks, and take +the townward track over the hill. They did not talk much. They were too +much absorbed in hating themselves--hating the ill luck that made them +take the spade and the pick there. But for that, Injun Joe never would +have suspected. He would have hidden the silver with the gold to wait +there till his "revenge" was satisfied, and then he would have had the +misfortune to find that money turn up missing. Bitter, bitter luck that +the tools were ever brought there! + +They resolved to keep a lookout for that Spaniard when he should come +to town spying out for chances to do his revengeful job, and follow him +to "Number Two," wherever that might be. Then a ghastly thought +occurred to Tom. + +"Revenge? What if he means US, Huck!" + +"Oh, don't!" said Huck, nearly fainting. + +They talked it all over, and as they entered town they agreed to +believe that he might possibly mean somebody else--at least that he +might at least mean nobody but Tom, since only Tom had testified. + +Very, very small comfort it was to Tom to be alone in danger! Company +would be a palpable improvement, he thought. + + + +CHAPTER XXVII + +THE adventure of the day mightily tormented Tom's dreams that night. +Four times he had his hands on that rich treasure and four times it +wasted to nothingness in his fingers as sleep forsook him and +wakefulness brought back the hard reality of his misfortune. As he lay +in the early morning recalling the incidents of his great adventure, he +noticed that they seemed curiously subdued and far away--somewhat as if +they had happened in another world, or in a time long gone by. Then it +occurred to him that the great adventure itself must be a dream! There +was one very strong argument in favor of this idea--namely, that the +quantity of coin he had seen was too vast to be real. He had never seen +as much as fifty dollars in one mass before, and he was like all boys +of his age and station in life, in that he imagined that all references +to "hundreds" and "thousands" were mere fanciful forms of speech, and +that no such sums really existed in the world. He never had supposed +for a moment that so large a sum as a hundred dollars was to be found +in actual money in any one's possession. If his notions of hidden +treasure had been analyzed, they would have been found to consist of a +handful of real dimes and a bushel of vague, splendid, ungraspable +dollars. + +But the incidents of his adventure grew sensibly sharper and clearer +under the attrition of thinking them over, and so he presently found +himself leaning to the impression that the thing might not have been a +dream, after all. This uncertainty must be swept away. He would snatch +a hurried breakfast and go and find Huck. Huck was sitting on the +gunwale of a flatboat, listlessly dangling his feet in the water and +looking very melancholy. Tom concluded to let Huck lead up to the +subject. If he did not do it, then the adventure would be proved to +have been only a dream. + +"Hello, Huck!" + +"Hello, yourself." + +Silence, for a minute. + +"Tom, if we'd 'a' left the blame tools at the dead tree, we'd 'a' got +the money. Oh, ain't it awful!" + +"'Tain't a dream, then, 'tain't a dream! Somehow I most wish it was. +Dog'd if I don't, Huck." + +"What ain't a dream?" + +"Oh, that thing yesterday. I been half thinking it was." + +"Dream! If them stairs hadn't broke down you'd 'a' seen how much dream +it was! I've had dreams enough all night--with that patch-eyed Spanish +devil going for me all through 'em--rot him!" + +"No, not rot him. FIND him! Track the money!" + +"Tom, we'll never find him. A feller don't have only one chance for +such a pile--and that one's lost. I'd feel mighty shaky if I was to see +him, anyway." + +"Well, so'd I; but I'd like to see him, anyway--and track him out--to +his Number Two." + +"Number Two--yes, that's it. I been thinking 'bout that. But I can't +make nothing out of it. What do you reckon it is?" + +"I dono. It's too deep. Say, Huck--maybe it's the number of a house!" + +"Goody!... No, Tom, that ain't it. If it is, it ain't in this +one-horse town. They ain't no numbers here." + +"Well, that's so. Lemme think a minute. Here--it's the number of a +room--in a tavern, you know!" + +"Oh, that's the trick! They ain't only two taverns. We can find out +quick." + +"You stay here, Huck, till I come." + +Tom was off at once. He did not care to have Huck's company in public +places. He was gone half an hour. He found that in the best tavern, No. +2 had long been occupied by a young lawyer, and was still so occupied. +In the less ostentatious house, No. 2 was a mystery. The +tavern-keeper's young son said it was kept locked all the time, and he +never saw anybody go into it or come out of it except at night; he did +not know any particular reason for this state of things; had had some +little curiosity, but it was rather feeble; had made the most of the +mystery by entertaining himself with the idea that that room was +"ha'nted"; had noticed that there was a light in there the night before. + +"That's what I've found out, Huck. I reckon that's the very No. 2 +we're after." + +"I reckon it is, Tom. Now what you going to do?" + +"Lemme think." + +Tom thought a long time. Then he said: + +"I'll tell you. The back door of that No. 2 is the door that comes out +into that little close alley between the tavern and the old rattle trap +of a brick store. Now you get hold of all the door-keys you can find, +and I'll nip all of auntie's, and the first dark night we'll go there +and try 'em. And mind you, keep a lookout for Injun Joe, because he +said he was going to drop into town and spy around once more for a +chance to get his revenge. If you see him, you just follow him; and if +he don't go to that No. 2, that ain't the place." + +"Lordy, I don't want to foller him by myself!" + +"Why, it'll be night, sure. He mightn't ever see you--and if he did, +maybe he'd never think anything." + +"Well, if it's pretty dark I reckon I'll track him. I dono--I dono. +I'll try." + +"You bet I'll follow him, if it's dark, Huck. Why, he might 'a' found +out he couldn't get his revenge, and be going right after that money." + +"It's so, Tom, it's so. I'll foller him; I will, by jingoes!" + +"Now you're TALKING! Don't you ever weaken, Huck, and I won't." + + + +CHAPTER XXVIII + +THAT night Tom and Huck were ready for their adventure. They hung +about the neighborhood of the tavern until after nine, one watching the +alley at a distance and the other the tavern door. Nobody entered the +alley or left it; nobody resembling the Spaniard entered or left the +tavern door. The night promised to be a fair one; so Tom went home with +the understanding that if a considerable degree of darkness came on, +Huck was to come and "maow," whereupon he would slip out and try the +keys. But the night remained clear, and Huck closed his watch and +retired to bed in an empty sugar hogshead about twelve. + +Tuesday the boys had the same ill luck. Also Wednesday. But Thursday +night promised better. Tom slipped out in good season with his aunt's +old tin lantern, and a large towel to blindfold it with. He hid the +lantern in Huck's sugar hogshead and the watch began. An hour before +midnight the tavern closed up and its lights (the only ones +thereabouts) were put out. No Spaniard had been seen. Nobody had +entered or left the alley. Everything was auspicious. The blackness of +darkness reigned, the perfect stillness was interrupted only by +occasional mutterings of distant thunder. + +Tom got his lantern, lit it in the hogshead, wrapped it closely in the +towel, and the two adventurers crept in the gloom toward the tavern. +Huck stood sentry and Tom felt his way into the alley. Then there was a +season of waiting anxiety that weighed upon Huck's spirits like a +mountain. He began to wish he could see a flash from the lantern--it +would frighten him, but it would at least tell him that Tom was alive +yet. It seemed hours since Tom had disappeared. Surely he must have +fainted; maybe he was dead; maybe his heart had burst under terror and +excitement. In his uneasiness Huck found himself drawing closer and +closer to the alley; fearing all sorts of dreadful things, and +momentarily expecting some catastrophe to happen that would take away +his breath. There was not much to take away, for he seemed only able to +inhale it by thimblefuls, and his heart would soon wear itself out, the +way it was beating. Suddenly there was a flash of light and Tom came +tearing by him: "Run!" said he; "run, for your life!" + +He needn't have repeated it; once was enough; Huck was making thirty +or forty miles an hour before the repetition was uttered. The boys +never stopped till they reached the shed of a deserted slaughter-house +at the lower end of the village. Just as they got within its shelter +the storm burst and the rain poured down. As soon as Tom got his breath +he said: + +"Huck, it was awful! I tried two of the keys, just as soft as I could; +but they seemed to make such a power of racket that I couldn't hardly +get my breath I was so scared. They wouldn't turn in the lock, either. +Well, without noticing what I was doing, I took hold of the knob, and +open comes the door! It warn't locked! I hopped in, and shook off the +towel, and, GREAT CAESAR'S GHOST!" + +"What!--what'd you see, Tom?" + +"Huck, I most stepped onto Injun Joe's hand!" + +"No!" + +"Yes! He was lying there, sound asleep on the floor, with his old +patch on his eye and his arms spread out." + +"Lordy, what did you do? Did he wake up?" + +"No, never budged. Drunk, I reckon. I just grabbed that towel and +started!" + +"I'd never 'a' thought of the towel, I bet!" + +"Well, I would. My aunt would make me mighty sick if I lost it." + +"Say, Tom, did you see that box?" + +"Huck, I didn't wait to look around. I didn't see the box, I didn't +see the cross. I didn't see anything but a bottle and a tin cup on the +floor by Injun Joe; yes, I saw two barrels and lots more bottles in the +room. Don't you see, now, what's the matter with that ha'nted room?" + +"How?" + +"Why, it's ha'nted with whiskey! Maybe ALL the Temperance Taverns have +got a ha'nted room, hey, Huck?" + +"Well, I reckon maybe that's so. Who'd 'a' thought such a thing? But +say, Tom, now's a mighty good time to get that box, if Injun Joe's +drunk." + +"It is, that! You try it!" + +Huck shuddered. + +"Well, no--I reckon not." + +"And I reckon not, Huck. Only one bottle alongside of Injun Joe ain't +enough. If there'd been three, he'd be drunk enough and I'd do it." + +There was a long pause for reflection, and then Tom said: + +"Lookyhere, Huck, less not try that thing any more till we know Injun +Joe's not in there. It's too scary. Now, if we watch every night, we'll +be dead sure to see him go out, some time or other, and then we'll +snatch that box quicker'n lightning." + +"Well, I'm agreed. I'll watch the whole night long, and I'll do it +every night, too, if you'll do the other part of the job." + +"All right, I will. All you got to do is to trot up Hooper Street a +block and maow--and if I'm asleep, you throw some gravel at the window +and that'll fetch me." + +"Agreed, and good as wheat!" + +"Now, Huck, the storm's over, and I'll go home. It'll begin to be +daylight in a couple of hours. You go back and watch that long, will +you?" + +"I said I would, Tom, and I will. I'll ha'nt that tavern every night +for a year! I'll sleep all day and I'll stand watch all night." + +"That's all right. Now, where you going to sleep?" + +"In Ben Rogers' hayloft. He lets me, and so does his pap's nigger man, +Uncle Jake. I tote water for Uncle Jake whenever he wants me to, and +any time I ask him he gives me a little something to eat if he can +spare it. That's a mighty good nigger, Tom. He likes me, becuz I don't +ever act as if I was above him. Sometime I've set right down and eat +WITH him. But you needn't tell that. A body's got to do things when +he's awful hungry he wouldn't want to do as a steady thing." + +"Well, if I don't want you in the daytime, I'll let you sleep. I won't +come bothering around. Any time you see something's up, in the night, +just skip right around and maow." + + + +CHAPTER XXIX + +THE first thing Tom heard on Friday morning was a glad piece of news +--Judge Thatcher's family had come back to town the night before. Both +Injun Joe and the treasure sunk into secondary importance for a moment, +and Becky took the chief place in the boy's interest. He saw her and +they had an exhausting good time playing "hi-spy" and "gully-keeper" +with a crowd of their school-mates. The day was completed and crowned +in a peculiarly satisfactory way: Becky teased her mother to appoint +the next day for the long-promised and long-delayed picnic, and she +consented. The child's delight was boundless; and Tom's not more +moderate. The invitations were sent out before sunset, and straightway +the young folks of the village were thrown into a fever of preparation +and pleasurable anticipation. Tom's excitement enabled him to keep +awake until a pretty late hour, and he had good hopes of hearing Huck's +"maow," and of having his treasure to astonish Becky and the picnickers +with, next day; but he was disappointed. No signal came that night. + +Morning came, eventually, and by ten or eleven o'clock a giddy and +rollicking company were gathered at Judge Thatcher's, and everything +was ready for a start. It was not the custom for elderly people to mar +the picnics with their presence. The children were considered safe +enough under the wings of a few young ladies of eighteen and a few +young gentlemen of twenty-three or thereabouts. The old steam ferryboat +was chartered for the occasion; presently the gay throng filed up the +main street laden with provision-baskets. Sid was sick and had to miss +the fun; Mary remained at home to entertain him. The last thing Mrs. +Thatcher said to Becky, was: + +"You'll not get back till late. Perhaps you'd better stay all night +with some of the girls that live near the ferry-landing, child." + +"Then I'll stay with Susy Harper, mamma." + +"Very well. And mind and behave yourself and don't be any trouble." + +Presently, as they tripped along, Tom said to Becky: + +"Say--I'll tell you what we'll do. 'Stead of going to Joe Harper's +we'll climb right up the hill and stop at the Widow Douglas'. She'll +have ice-cream! She has it most every day--dead loads of it. And she'll +be awful glad to have us." + +"Oh, that will be fun!" + +Then Becky reflected a moment and said: + +"But what will mamma say?" + +"How'll she ever know?" + +The girl turned the idea over in her mind, and said reluctantly: + +"I reckon it's wrong--but--" + +"But shucks! Your mother won't know, and so what's the harm? All she +wants is that you'll be safe; and I bet you she'd 'a' said go there if +she'd 'a' thought of it. I know she would!" + +The Widow Douglas' splendid hospitality was a tempting bait. It and +Tom's persuasions presently carried the day. So it was decided to say +nothing anybody about the night's programme. Presently it occurred to +Tom that maybe Huck might come this very night and give the signal. The +thought took a deal of the spirit out of his anticipations. Still he +could not bear to give up the fun at Widow Douglas'. And why should he +give it up, he reasoned--the signal did not come the night before, so +why should it be any more likely to come to-night? The sure fun of the +evening outweighed the uncertain treasure; and, boy-like, he determined +to yield to the stronger inclination and not allow himself to think of +the box of money another time that day. + +Three miles below town the ferryboat stopped at the mouth of a woody +hollow and tied up. The crowd swarmed ashore and soon the forest +distances and craggy heights echoed far and near with shoutings and +laughter. All the different ways of getting hot and tired were gone +through with, and by-and-by the rovers straggled back to camp fortified +with responsible appetites, and then the destruction of the good things +began. After the feast there was a refreshing season of rest and chat +in the shade of spreading oaks. By-and-by somebody shouted: + +"Who's ready for the cave?" + +Everybody was. Bundles of candles were procured, and straightway there +was a general scamper up the hill. The mouth of the cave was up the +hillside--an opening shaped like a letter A. Its massive oaken door +stood unbarred. Within was a small chamber, chilly as an ice-house, and +walled by Nature with solid limestone that was dewy with a cold sweat. +It was romantic and mysterious to stand here in the deep gloom and look +out upon the green valley shining in the sun. But the impressiveness of +the situation quickly wore off, and the romping began again. The moment +a candle was lighted there was a general rush upon the owner of it; a +struggle and a gallant defence followed, but the candle was soon +knocked down or blown out, and then there was a glad clamor of laughter +and a new chase. But all things have an end. By-and-by the procession +went filing down the steep descent of the main avenue, the flickering +rank of lights dimly revealing the lofty walls of rock almost to their +point of junction sixty feet overhead. This main avenue was not more +than eight or ten feet wide. Every few steps other lofty and still +narrower crevices branched from it on either hand--for McDougal's cave +was but a vast labyrinth of crooked aisles that ran into each other and +out again and led nowhere. It was said that one might wander days and +nights together through its intricate tangle of rifts and chasms, and +never find the end of the cave; and that he might go down, and down, +and still down, into the earth, and it was just the same--labyrinth +under labyrinth, and no end to any of them. No man "knew" the cave. +That was an impossible thing. Most of the young men knew a portion of +it, and it was not customary to venture much beyond this known portion. +Tom Sawyer knew as much of the cave as any one. + +The procession moved along the main avenue some three-quarters of a +mile, and then groups and couples began to slip aside into branch +avenues, fly along the dismal corridors, and take each other by +surprise at points where the corridors joined again. Parties were able +to elude each other for the space of half an hour without going beyond +the "known" ground. + +By-and-by, one group after another came straggling back to the mouth +of the cave, panting, hilarious, smeared from head to foot with tallow +drippings, daubed with clay, and entirely delighted with the success of +the day. Then they were astonished to find that they had been taking no +note of time and that night was about at hand. The clanging bell had +been calling for half an hour. However, this sort of close to the day's +adventures was romantic and therefore satisfactory. When the ferryboat +with her wild freight pushed into the stream, nobody cared sixpence for +the wasted time but the captain of the craft. + +Huck was already upon his watch when the ferryboat's lights went +glinting past the wharf. He heard no noise on board, for the young +people were as subdued and still as people usually are who are nearly +tired to death. He wondered what boat it was, and why she did not stop +at the wharf--and then he dropped her out of his mind and put his +attention upon his business. The night was growing cloudy and dark. Ten +o'clock came, and the noise of vehicles ceased, scattered lights began +to wink out, all straggling foot-passengers disappeared, the village +betook itself to its slumbers and left the small watcher alone with the +silence and the ghosts. Eleven o'clock came, and the tavern lights were +put out; darkness everywhere, now. Huck waited what seemed a weary long +time, but nothing happened. His faith was weakening. Was there any use? +Was there really any use? Why not give it up and turn in? + +A noise fell upon his ear. He was all attention in an instant. The +alley door closed softly. He sprang to the corner of the brick store. +The next moment two men brushed by him, and one seemed to have +something under his arm. It must be that box! So they were going to +remove the treasure. Why call Tom now? It would be absurd--the men +would get away with the box and never be found again. No, he would +stick to their wake and follow them; he would trust to the darkness for +security from discovery. So communing with himself, Huck stepped out +and glided along behind the men, cat-like, with bare feet, allowing +them to keep just far enough ahead not to be invisible. + +They moved up the river street three blocks, then turned to the left +up a cross-street. They went straight ahead, then, until they came to +the path that led up Cardiff Hill; this they took. They passed by the +old Welshman's house, half-way up the hill, without hesitating, and +still climbed upward. Good, thought Huck, they will bury it in the old +quarry. But they never stopped at the quarry. They passed on, up the +summit. They plunged into the narrow path between the tall sumach +bushes, and were at once hidden in the gloom. Huck closed up and +shortened his distance, now, for they would never be able to see him. +He trotted along awhile; then slackened his pace, fearing he was +gaining too fast; moved on a piece, then stopped altogether; listened; +no sound; none, save that he seemed to hear the beating of his own +heart. The hooting of an owl came over the hill--ominous sound! But no +footsteps. Heavens, was everything lost! He was about to spring with +winged feet, when a man cleared his throat not four feet from him! +Huck's heart shot into his throat, but he swallowed it again; and then +he stood there shaking as if a dozen agues had taken charge of him at +once, and so weak that he thought he must surely fall to the ground. He +knew where he was. He knew he was within five steps of the stile +leading into Widow Douglas' grounds. Very well, he thought, let them +bury it there; it won't be hard to find. + +Now there was a voice--a very low voice--Injun Joe's: + +"Damn her, maybe she's got company--there's lights, late as it is." + +"I can't see any." + +This was that stranger's voice--the stranger of the haunted house. A +deadly chill went to Huck's heart--this, then, was the "revenge" job! +His thought was, to fly. Then he remembered that the Widow Douglas had +been kind to him more than once, and maybe these men were going to +murder her. He wished he dared venture to warn her; but he knew he +didn't dare--they might come and catch him. He thought all this and +more in the moment that elapsed between the stranger's remark and Injun +Joe's next--which was-- + +"Because the bush is in your way. Now--this way--now you see, don't +you?" + +"Yes. Well, there IS company there, I reckon. Better give it up." + +"Give it up, and I just leaving this country forever! Give it up and +maybe never have another chance. I tell you again, as I've told you +before, I don't care for her swag--you may have it. But her husband was +rough on me--many times he was rough on me--and mainly he was the +justice of the peace that jugged me for a vagrant. And that ain't all. +It ain't a millionth part of it! He had me HORSEWHIPPED!--horsewhipped +in front of the jail, like a nigger!--with all the town looking on! +HORSEWHIPPED!--do you understand? He took advantage of me and died. But +I'll take it out of HER." + +"Oh, don't kill her! Don't do that!" + +"Kill? Who said anything about killing? I would kill HIM if he was +here; but not her. When you want to get revenge on a woman you don't +kill her--bosh! you go for her looks. You slit her nostrils--you notch +her ears like a sow!" + +"By God, that's--" + +"Keep your opinion to yourself! It will be safest for you. I'll tie +her to the bed. If she bleeds to death, is that my fault? I'll not cry, +if she does. My friend, you'll help me in this thing--for MY sake +--that's why you're here--I mightn't be able alone. If you flinch, I'll +kill you. Do you understand that? And if I have to kill you, I'll kill +her--and then I reckon nobody'll ever know much about who done this +business." + +"Well, if it's got to be done, let's get at it. The quicker the +better--I'm all in a shiver." + +"Do it NOW? And company there? Look here--I'll get suspicious of you, +first thing you know. No--we'll wait till the lights are out--there's +no hurry." + +Huck felt that a silence was going to ensue--a thing still more awful +than any amount of murderous talk; so he held his breath and stepped +gingerly back; planted his foot carefully and firmly, after balancing, +one-legged, in a precarious way and almost toppling over, first on one +side and then on the other. He took another step back, with the same +elaboration and the same risks; then another and another, and--a twig +snapped under his foot! His breath stopped and he listened. There was +no sound--the stillness was perfect. His gratitude was measureless. Now +he turned in his tracks, between the walls of sumach bushes--turned +himself as carefully as if he were a ship--and then stepped quickly but +cautiously along. When he emerged at the quarry he felt secure, and so +he picked up his nimble heels and flew. Down, down he sped, till he +reached the Welshman's. He banged at the door, and presently the heads +of the old man and his two stalwart sons were thrust from windows. + +"What's the row there? Who's banging? What do you want?" + +"Let me in--quick! I'll tell everything." + +"Why, who are you?" + +"Huckleberry Finn--quick, let me in!" + +"Huckleberry Finn, indeed! It ain't a name to open many doors, I +judge! But let him in, lads, and let's see what's the trouble." + +"Please don't ever tell I told you," were Huck's first words when he +got in. "Please don't--I'd be killed, sure--but the widow's been good +friends to me sometimes, and I want to tell--I WILL tell if you'll +promise you won't ever say it was me." + +"By George, he HAS got something to tell, or he wouldn't act so!" +exclaimed the old man; "out with it and nobody here'll ever tell, lad." + +Three minutes later the old man and his sons, well armed, were up the +hill, and just entering the sumach path on tiptoe, their weapons in +their hands. Huck accompanied them no further. He hid behind a great +bowlder and fell to listening. There was a lagging, anxious silence, +and then all of a sudden there was an explosion of firearms and a cry. + +Huck waited for no particulars. He sprang away and sped down the hill +as fast as his legs could carry him. + + + +CHAPTER XXX + +AS the earliest suspicion of dawn appeared on Sunday morning, Huck +came groping up the hill and rapped gently at the old Welshman's door. +The inmates were asleep, but it was a sleep that was set on a +hair-trigger, on account of the exciting episode of the night. A call +came from a window: + +"Who's there!" + +Huck's scared voice answered in a low tone: + +"Please let me in! It's only Huck Finn!" + +"It's a name that can open this door night or day, lad!--and welcome!" + +These were strange words to the vagabond boy's ears, and the +pleasantest he had ever heard. He could not recollect that the closing +word had ever been applied in his case before. The door was quickly +unlocked, and he entered. Huck was given a seat and the old man and his +brace of tall sons speedily dressed themselves. + +"Now, my boy, I hope you're good and hungry, because breakfast will be +ready as soon as the sun's up, and we'll have a piping hot one, too +--make yourself easy about that! I and the boys hoped you'd turn up and +stop here last night." + +"I was awful scared," said Huck, "and I run. I took out when the +pistols went off, and I didn't stop for three mile. I've come now becuz +I wanted to know about it, you know; and I come before daylight becuz I +didn't want to run across them devils, even if they was dead." + +"Well, poor chap, you do look as if you'd had a hard night of it--but +there's a bed here for you when you've had your breakfast. No, they +ain't dead, lad--we are sorry enough for that. You see we knew right +where to put our hands on them, by your description; so we crept along +on tiptoe till we got within fifteen feet of them--dark as a cellar +that sumach path was--and just then I found I was going to sneeze. It +was the meanest kind of luck! I tried to keep it back, but no use +--'twas bound to come, and it did come! I was in the lead with my pistol +raised, and when the sneeze started those scoundrels a-rustling to get +out of the path, I sung out, 'Fire boys!' and blazed away at the place +where the rustling was. So did the boys. But they were off in a jiffy, +those villains, and we after them, down through the woods. I judge we +never touched them. They fired a shot apiece as they started, but their +bullets whizzed by and didn't do us any harm. As soon as we lost the +sound of their feet we quit chasing, and went down and stirred up the +constables. They got a posse together, and went off to guard the river +bank, and as soon as it is light the sheriff and a gang are going to +beat up the woods. My boys will be with them presently. I wish we had +some sort of description of those rascals--'twould help a good deal. +But you couldn't see what they were like, in the dark, lad, I suppose?" + +"Oh yes; I saw them down-town and follered them." + +"Splendid! Describe them--describe them, my boy!" + +"One's the old deaf and dumb Spaniard that's ben around here once or +twice, and t'other's a mean-looking, ragged--" + +"That's enough, lad, we know the men! Happened on them in the woods +back of the widow's one day, and they slunk away. Off with you, boys, +and tell the sheriff--get your breakfast to-morrow morning!" + +The Welshman's sons departed at once. As they were leaving the room +Huck sprang up and exclaimed: + +"Oh, please don't tell ANYbody it was me that blowed on them! Oh, +please!" + +"All right if you say it, Huck, but you ought to have the credit of +what you did." + +"Oh no, no! Please don't tell!" + +When the young men were gone, the old Welshman said: + +"They won't tell--and I won't. But why don't you want it known?" + +Huck would not explain, further than to say that he already knew too +much about one of those men and would not have the man know that he +knew anything against him for the whole world--he would be killed for +knowing it, sure. + +The old man promised secrecy once more, and said: + +"How did you come to follow these fellows, lad? Were they looking +suspicious?" + +Huck was silent while he framed a duly cautious reply. Then he said: + +"Well, you see, I'm a kind of a hard lot,--least everybody says so, +and I don't see nothing agin it--and sometimes I can't sleep much, on +account of thinking about it and sort of trying to strike out a new way +of doing. That was the way of it last night. I couldn't sleep, and so I +come along up-street 'bout midnight, a-turning it all over, and when I +got to that old shackly brick store by the Temperance Tavern, I backed +up agin the wall to have another think. Well, just then along comes +these two chaps slipping along close by me, with something under their +arm, and I reckoned they'd stole it. One was a-smoking, and t'other one +wanted a light; so they stopped right before me and the cigars lit up +their faces and I see that the big one was the deaf and dumb Spaniard, +by his white whiskers and the patch on his eye, and t'other one was a +rusty, ragged-looking devil." + +"Could you see the rags by the light of the cigars?" + +This staggered Huck for a moment. Then he said: + +"Well, I don't know--but somehow it seems as if I did." + +"Then they went on, and you--" + +"Follered 'em--yes. That was it. I wanted to see what was up--they +sneaked along so. I dogged 'em to the widder's stile, and stood in the +dark and heard the ragged one beg for the widder, and the Spaniard +swear he'd spile her looks just as I told you and your two--" + +"What! The DEAF AND DUMB man said all that!" + +Huck had made another terrible mistake! He was trying his best to keep +the old man from getting the faintest hint of who the Spaniard might +be, and yet his tongue seemed determined to get him into trouble in +spite of all he could do. He made several efforts to creep out of his +scrape, but the old man's eye was upon him and he made blunder after +blunder. Presently the Welshman said: + +"My boy, don't be afraid of me. I wouldn't hurt a hair of your head +for all the world. No--I'd protect you--I'd protect you. This Spaniard +is not deaf and dumb; you've let that slip without intending it; you +can't cover that up now. You know something about that Spaniard that +you want to keep dark. Now trust me--tell me what it is, and trust me +--I won't betray you." + +Huck looked into the old man's honest eyes a moment, then bent over +and whispered in his ear: + +"'Tain't a Spaniard--it's Injun Joe!" + +The Welshman almost jumped out of his chair. In a moment he said: + +"It's all plain enough, now. When you talked about notching ears and +slitting noses I judged that that was your own embellishment, because +white men don't take that sort of revenge. But an Injun! That's a +different matter altogether." + +During breakfast the talk went on, and in the course of it the old man +said that the last thing which he and his sons had done, before going +to bed, was to get a lantern and examine the stile and its vicinity for +marks of blood. They found none, but captured a bulky bundle of-- + +"Of WHAT?" + +If the words had been lightning they could not have leaped with a more +stunning suddenness from Huck's blanched lips. His eyes were staring +wide, now, and his breath suspended--waiting for the answer. The +Welshman started--stared in return--three seconds--five seconds--ten +--then replied: + +"Of burglar's tools. Why, what's the MATTER with you?" + +Huck sank back, panting gently, but deeply, unutterably grateful. The +Welshman eyed him gravely, curiously--and presently said: + +"Yes, burglar's tools. That appears to relieve you a good deal. But +what did give you that turn? What were YOU expecting we'd found?" + +Huck was in a close place--the inquiring eye was upon him--he would +have given anything for material for a plausible answer--nothing +suggested itself--the inquiring eye was boring deeper and deeper--a +senseless reply offered--there was no time to weigh it, so at a venture +he uttered it--feebly: + +"Sunday-school books, maybe." + +Poor Huck was too distressed to smile, but the old man laughed loud +and joyously, shook up the details of his anatomy from head to foot, +and ended by saying that such a laugh was money in a-man's pocket, +because it cut down the doctor's bill like everything. Then he added: + +"Poor old chap, you're white and jaded--you ain't well a bit--no +wonder you're a little flighty and off your balance. But you'll come +out of it. Rest and sleep will fetch you out all right, I hope." + +Huck was irritated to think he had been such a goose and betrayed such +a suspicious excitement, for he had dropped the idea that the parcel +brought from the tavern was the treasure, as soon as he had heard the +talk at the widow's stile. He had only thought it was not the treasure, +however--he had not known that it wasn't--and so the suggestion of a +captured bundle was too much for his self-possession. But on the whole +he felt glad the little episode had happened, for now he knew beyond +all question that that bundle was not THE bundle, and so his mind was +at rest and exceedingly comfortable. In fact, everything seemed to be +drifting just in the right direction, now; the treasure must be still +in No. 2, the men would be captured and jailed that day, and he and Tom +could seize the gold that night without any trouble or any fear of +interruption. + +Just as breakfast was completed there was a knock at the door. Huck +jumped for a hiding-place, for he had no mind to be connected even +remotely with the late event. The Welshman admitted several ladies and +gentlemen, among them the Widow Douglas, and noticed that groups of +citizens were climbing up the hill--to stare at the stile. So the news +had spread. The Welshman had to tell the story of the night to the +visitors. The widow's gratitude for her preservation was outspoken. + +"Don't say a word about it, madam. There's another that you're more +beholden to than you are to me and my boys, maybe, but he don't allow +me to tell his name. We wouldn't have been there but for him." + +Of course this excited a curiosity so vast that it almost belittled +the main matter--but the Welshman allowed it to eat into the vitals of +his visitors, and through them be transmitted to the whole town, for he +refused to part with his secret. When all else had been learned, the +widow said: + +"I went to sleep reading in bed and slept straight through all that +noise. Why didn't you come and wake me?" + +"We judged it warn't worth while. Those fellows warn't likely to come +again--they hadn't any tools left to work with, and what was the use of +waking you up and scaring you to death? My three negro men stood guard +at your house all the rest of the night. They've just come back." + +More visitors came, and the story had to be told and retold for a +couple of hours more. + +There was no Sabbath-school during day-school vacation, but everybody +was early at church. The stirring event was well canvassed. News came +that not a sign of the two villains had been yet discovered. When the +sermon was finished, Judge Thatcher's wife dropped alongside of Mrs. +Harper as she moved down the aisle with the crowd and said: + +"Is my Becky going to sleep all day? I just expected she would be +tired to death." + +"Your Becky?" + +"Yes," with a startled look--"didn't she stay with you last night?" + +"Why, no." + +Mrs. Thatcher turned pale, and sank into a pew, just as Aunt Polly, +talking briskly with a friend, passed by. Aunt Polly said: + +"Good-morning, Mrs. Thatcher. Good-morning, Mrs. Harper. I've got a +boy that's turned up missing. I reckon my Tom stayed at your house last +night--one of you. And now he's afraid to come to church. I've got to +settle with him." + +Mrs. Thatcher shook her head feebly and turned paler than ever. + +"He didn't stay with us," said Mrs. Harper, beginning to look uneasy. +A marked anxiety came into Aunt Polly's face. + +"Joe Harper, have you seen my Tom this morning?" + +"No'm." + +"When did you see him last?" + +Joe tried to remember, but was not sure he could say. The people had +stopped moving out of church. Whispers passed along, and a boding +uneasiness took possession of every countenance. Children were +anxiously questioned, and young teachers. They all said they had not +noticed whether Tom and Becky were on board the ferryboat on the +homeward trip; it was dark; no one thought of inquiring if any one was +missing. One young man finally blurted out his fear that they were +still in the cave! Mrs. Thatcher swooned away. Aunt Polly fell to +crying and wringing her hands. + +The alarm swept from lip to lip, from group to group, from street to +street, and within five minutes the bells were wildly clanging and the +whole town was up! The Cardiff Hill episode sank into instant +insignificance, the burglars were forgotten, horses were saddled, +skiffs were manned, the ferryboat ordered out, and before the horror +was half an hour old, two hundred men were pouring down highroad and +river toward the cave. + +All the long afternoon the village seemed empty and dead. Many women +visited Aunt Polly and Mrs. Thatcher and tried to comfort them. They +cried with them, too, and that was still better than words. All the +tedious night the town waited for news; but when the morning dawned at +last, all the word that came was, "Send more candles--and send food." +Mrs. Thatcher was almost crazed; and Aunt Polly, also. Judge Thatcher +sent messages of hope and encouragement from the cave, but they +conveyed no real cheer. + +The old Welshman came home toward daylight, spattered with +candle-grease, smeared with clay, and almost worn out. He found Huck +still in the bed that had been provided for him, and delirious with +fever. The physicians were all at the cave, so the Widow Douglas came +and took charge of the patient. She said she would do her best by him, +because, whether he was good, bad, or indifferent, he was the Lord's, +and nothing that was the Lord's was a thing to be neglected. The +Welshman said Huck had good spots in him, and the widow said: + +"You can depend on it. That's the Lord's mark. He don't leave it off. +He never does. Puts it somewhere on every creature that comes from his +hands." + +Early in the forenoon parties of jaded men began to straggle into the +village, but the strongest of the citizens continued searching. All the +news that could be gained was that remotenesses of the cavern were +being ransacked that had never been visited before; that every corner +and crevice was going to be thoroughly searched; that wherever one +wandered through the maze of passages, lights were to be seen flitting +hither and thither in the distance, and shoutings and pistol-shots sent +their hollow reverberations to the ear down the sombre aisles. In one +place, far from the section usually traversed by tourists, the names +"BECKY & TOM" had been found traced upon the rocky wall with +candle-smoke, and near at hand a grease-soiled bit of ribbon. Mrs. +Thatcher recognized the ribbon and cried over it. She said it was the +last relic she should ever have of her child; and that no other memorial +of her could ever be so precious, because this one parted latest from +the living body before the awful death came. Some said that now and +then, in the cave, a far-away speck of light would glimmer, and then a +glorious shout would burst forth and a score of men go trooping down the +echoing aisle--and then a sickening disappointment always followed; the +children were not there; it was only a searcher's light. + +Three dreadful days and nights dragged their tedious hours along, and +the village sank into a hopeless stupor. No one had heart for anything. +The accidental discovery, just made, that the proprietor of the +Temperance Tavern kept liquor on his premises, scarcely fluttered the +public pulse, tremendous as the fact was. In a lucid interval, Huck +feebly led up to the subject of taverns, and finally asked--dimly +dreading the worst--if anything had been discovered at the Temperance +Tavern since he had been ill. + +"Yes," said the widow. + +Huck started up in bed, wild-eyed: + +"What? What was it?" + +"Liquor!--and the place has been shut up. Lie down, child--what a turn +you did give me!" + +"Only tell me just one thing--only just one--please! Was it Tom Sawyer +that found it?" + +The widow burst into tears. "Hush, hush, child, hush! I've told you +before, you must NOT talk. You are very, very sick!" + +Then nothing but liquor had been found; there would have been a great +powwow if it had been the gold. So the treasure was gone forever--gone +forever! But what could she be crying about? Curious that she should +cry. + +These thoughts worked their dim way through Huck's mind, and under the +weariness they gave him he fell asleep. The widow said to herself: + +"There--he's asleep, poor wreck. Tom Sawyer find it! Pity but somebody +could find Tom Sawyer! Ah, there ain't many left, now, that's got hope +enough, or strength enough, either, to go on searching." + + + +CHAPTER XXXI + +NOW to return to Tom and Becky's share in the picnic. They tripped +along the murky aisles with the rest of the company, visiting the +familiar wonders of the cave--wonders dubbed with rather +over-descriptive names, such as "The Drawing-Room," "The Cathedral," +"Aladdin's Palace," and so on. Presently the hide-and-seek frolicking +began, and Tom and Becky engaged in it with zeal until the exertion +began to grow a trifle wearisome; then they wandered down a sinuous +avenue holding their candles aloft and reading the tangled web-work of +names, dates, post-office addresses, and mottoes with which the rocky +walls had been frescoed (in candle-smoke). Still drifting along and +talking, they scarcely noticed that they were now in a part of the cave +whose walls were not frescoed. They smoked their own names under an +overhanging shelf and moved on. Presently they came to a place where a +little stream of water, trickling over a ledge and carrying a limestone +sediment with it, had, in the slow-dragging ages, formed a laced and +ruffled Niagara in gleaming and imperishable stone. Tom squeezed his +small body behind it in order to illuminate it for Becky's +gratification. He found that it curtained a sort of steep natural +stairway which was enclosed between narrow walls, and at once the +ambition to be a discoverer seized him. Becky responded to his call, +and they made a smoke-mark for future guidance, and started upon their +quest. They wound this way and that, far down into the secret depths of +the cave, made another mark, and branched off in search of novelties to +tell the upper world about. In one place they found a spacious cavern, +from whose ceiling depended a multitude of shining stalactites of the +length and circumference of a man's leg; they walked all about it, +wondering and admiring, and presently left it by one of the numerous +passages that opened into it. This shortly brought them to a bewitching +spring, whose basin was incrusted with a frostwork of glittering +crystals; it was in the midst of a cavern whose walls were supported by +many fantastic pillars which had been formed by the joining of great +stalactites and stalagmites together, the result of the ceaseless +water-drip of centuries. Under the roof vast knots of bats had packed +themselves together, thousands in a bunch; the lights disturbed the +creatures and they came flocking down by hundreds, squeaking and +darting furiously at the candles. Tom knew their ways and the danger of +this sort of conduct. He seized Becky's hand and hurried her into the +first corridor that offered; and none too soon, for a bat struck +Becky's light out with its wing while she was passing out of the +cavern. The bats chased the children a good distance; but the fugitives +plunged into every new passage that offered, and at last got rid of the +perilous things. Tom found a subterranean lake, shortly, which +stretched its dim length away until its shape was lost in the shadows. +He wanted to explore its borders, but concluded that it would be best +to sit down and rest awhile, first. Now, for the first time, the deep +stillness of the place laid a clammy hand upon the spirits of the +children. Becky said: + +"Why, I didn't notice, but it seems ever so long since I heard any of +the others." + +"Come to think, Becky, we are away down below them--and I don't know +how far away north, or south, or east, or whichever it is. We couldn't +hear them here." + +Becky grew apprehensive. + +"I wonder how long we've been down here, Tom? We better start back." + +"Yes, I reckon we better. P'raps we better." + +"Can you find the way, Tom? It's all a mixed-up crookedness to me." + +"I reckon I could find it--but then the bats. If they put our candles +out it will be an awful fix. Let's try some other way, so as not to go +through there." + +"Well. But I hope we won't get lost. It would be so awful!" and the +girl shuddered at the thought of the dreadful possibilities. + +They started through a corridor, and traversed it in silence a long +way, glancing at each new opening, to see if there was anything +familiar about the look of it; but they were all strange. Every time +Tom made an examination, Becky would watch his face for an encouraging +sign, and he would say cheerily: + +"Oh, it's all right. This ain't the one, but we'll come to it right +away!" + +But he felt less and less hopeful with each failure, and presently +began to turn off into diverging avenues at sheer random, in desperate +hope of finding the one that was wanted. He still said it was "all +right," but there was such a leaden dread at his heart that the words +had lost their ring and sounded just as if he had said, "All is lost!" +Becky clung to his side in an anguish of fear, and tried hard to keep +back the tears, but they would come. At last she said: + +"Oh, Tom, never mind the bats, let's go back that way! We seem to get +worse and worse off all the time." + +"Listen!" said he. + +Profound silence; silence so deep that even their breathings were +conspicuous in the hush. Tom shouted. The call went echoing down the +empty aisles and died out in the distance in a faint sound that +resembled a ripple of mocking laughter. + +"Oh, don't do it again, Tom, it is too horrid," said Becky. + +"It is horrid, but I better, Becky; they might hear us, you know," and +he shouted again. + +The "might" was even a chillier horror than the ghostly laughter, it +so confessed a perishing hope. The children stood still and listened; +but there was no result. Tom turned upon the back track at once, and +hurried his steps. It was but a little while before a certain +indecision in his manner revealed another fearful fact to Becky--he +could not find his way back! + +"Oh, Tom, you didn't make any marks!" + +"Becky, I was such a fool! Such a fool! I never thought we might want +to come back! No--I can't find the way. It's all mixed up." + +"Tom, Tom, we're lost! we're lost! We never can get out of this awful +place! Oh, why DID we ever leave the others!" + +She sank to the ground and burst into such a frenzy of crying that Tom +was appalled with the idea that she might die, or lose her reason. He +sat down by her and put his arms around her; she buried her face in his +bosom, she clung to him, she poured out her terrors, her unavailing +regrets, and the far echoes turned them all to jeering laughter. Tom +begged her to pluck up hope again, and she said she could not. He fell +to blaming and abusing himself for getting her into this miserable +situation; this had a better effect. She said she would try to hope +again, she would get up and follow wherever he might lead if only he +would not talk like that any more. For he was no more to blame than +she, she said. + +So they moved on again--aimlessly--simply at random--all they could do +was to move, keep moving. For a little while, hope made a show of +reviving--not with any reason to back it, but only because it is its +nature to revive when the spring has not been taken out of it by age +and familiarity with failure. + +By-and-by Tom took Becky's candle and blew it out. This economy meant +so much! Words were not needed. Becky understood, and her hope died +again. She knew that Tom had a whole candle and three or four pieces in +his pockets--yet he must economize. + +By-and-by, fatigue began to assert its claims; the children tried to +pay attention, for it was dreadful to think of sitting down when time +was grown to be so precious, moving, in some direction, in any +direction, was at least progress and might bear fruit; but to sit down +was to invite death and shorten its pursuit. + +At last Becky's frail limbs refused to carry her farther. She sat +down. Tom rested with her, and they talked of home, and the friends +there, and the comfortable beds and, above all, the light! Becky cried, +and Tom tried to think of some way of comforting her, but all his +encouragements were grown threadbare with use, and sounded like +sarcasms. Fatigue bore so heavily upon Becky that she drowsed off to +sleep. Tom was grateful. He sat looking into her drawn face and saw it +grow smooth and natural under the influence of pleasant dreams; and +by-and-by a smile dawned and rested there. The peaceful face reflected +somewhat of peace and healing into his own spirit, and his thoughts +wandered away to bygone times and dreamy memories. While he was deep in +his musings, Becky woke up with a breezy little laugh--but it was +stricken dead upon her lips, and a groan followed it. + +"Oh, how COULD I sleep! I wish I never, never had waked! No! No, I +don't, Tom! Don't look so! I won't say it again." + +"I'm glad you've slept, Becky; you'll feel rested, now, and we'll find +the way out." + +"We can try, Tom; but I've seen such a beautiful country in my dream. +I reckon we are going there." + +"Maybe not, maybe not. Cheer up, Becky, and let's go on trying." + +They rose up and wandered along, hand in hand and hopeless. They tried +to estimate how long they had been in the cave, but all they knew was +that it seemed days and weeks, and yet it was plain that this could not +be, for their candles were not gone yet. A long time after this--they +could not tell how long--Tom said they must go softly and listen for +dripping water--they must find a spring. They found one presently, and +Tom said it was time to rest again. Both were cruelly tired, yet Becky +said she thought she could go a little farther. She was surprised to +hear Tom dissent. She could not understand it. They sat down, and Tom +fastened his candle to the wall in front of them with some clay. +Thought was soon busy; nothing was said for some time. Then Becky broke +the silence: + +"Tom, I am so hungry!" + +Tom took something out of his pocket. + +"Do you remember this?" said he. + +Becky almost smiled. + +"It's our wedding-cake, Tom." + +"Yes--I wish it was as big as a barrel, for it's all we've got." + +"I saved it from the picnic for us to dream on, Tom, the way grown-up +people do with wedding-cake--but it'll be our--" + +She dropped the sentence where it was. Tom divided the cake and Becky +ate with good appetite, while Tom nibbled at his moiety. There was +abundance of cold water to finish the feast with. By-and-by Becky +suggested that they move on again. Tom was silent a moment. Then he +said: + +"Becky, can you bear it if I tell you something?" + +Becky's face paled, but she thought she could. + +"Well, then, Becky, we must stay here, where there's water to drink. +That little piece is our last candle!" + +Becky gave loose to tears and wailings. Tom did what he could to +comfort her, but with little effect. At length Becky said: + +"Tom!" + +"Well, Becky?" + +"They'll miss us and hunt for us!" + +"Yes, they will! Certainly they will!" + +"Maybe they're hunting for us now, Tom." + +"Why, I reckon maybe they are. I hope they are." + +"When would they miss us, Tom?" + +"When they get back to the boat, I reckon." + +"Tom, it might be dark then--would they notice we hadn't come?" + +"I don't know. But anyway, your mother would miss you as soon as they +got home." + +A frightened look in Becky's face brought Tom to his senses and he saw +that he had made a blunder. Becky was not to have gone home that night! +The children became silent and thoughtful. In a moment a new burst of +grief from Becky showed Tom that the thing in his mind had struck hers +also--that the Sabbath morning might be half spent before Mrs. Thatcher +discovered that Becky was not at Mrs. Harper's. + +The children fastened their eyes upon their bit of candle and watched +it melt slowly and pitilessly away; saw the half inch of wick stand +alone at last; saw the feeble flame rise and fall, climb the thin +column of smoke, linger at its top a moment, and then--the horror of +utter darkness reigned! + +How long afterward it was that Becky came to a slow consciousness that +she was crying in Tom's arms, neither could tell. All that they knew +was, that after what seemed a mighty stretch of time, both awoke out of +a dead stupor of sleep and resumed their miseries once more. Tom said +it might be Sunday, now--maybe Monday. He tried to get Becky to talk, +but her sorrows were too oppressive, all her hopes were gone. Tom said +that they must have been missed long ago, and no doubt the search was +going on. He would shout and maybe some one would come. He tried it; +but in the darkness the distant echoes sounded so hideously that he +tried it no more. + +The hours wasted away, and hunger came to torment the captives again. +A portion of Tom's half of the cake was left; they divided and ate it. +But they seemed hungrier than before. The poor morsel of food only +whetted desire. + +By-and-by Tom said: + +"SH! Did you hear that?" + +Both held their breath and listened. There was a sound like the +faintest, far-off shout. Instantly Tom answered it, and leading Becky +by the hand, started groping down the corridor in its direction. +Presently he listened again; again the sound was heard, and apparently +a little nearer. + +"It's them!" said Tom; "they're coming! Come along, Becky--we're all +right now!" + +The joy of the prisoners was almost overwhelming. Their speed was +slow, however, because pitfalls were somewhat common, and had to be +guarded against. They shortly came to one and had to stop. It might be +three feet deep, it might be a hundred--there was no passing it at any +rate. Tom got down on his breast and reached as far down as he could. +No bottom. They must stay there and wait until the searchers came. They +listened; evidently the distant shoutings were growing more distant! a +moment or two more and they had gone altogether. The heart-sinking +misery of it! Tom whooped until he was hoarse, but it was of no use. He +talked hopefully to Becky; but an age of anxious waiting passed and no +sounds came again. + +The children groped their way back to the spring. The weary time +dragged on; they slept again, and awoke famished and woe-stricken. Tom +believed it must be Tuesday by this time. + +Now an idea struck him. There were some side passages near at hand. It +would be better to explore some of these than bear the weight of the +heavy time in idleness. He took a kite-line from his pocket, tied it to +a projection, and he and Becky started, Tom in the lead, unwinding the +line as he groped along. At the end of twenty steps the corridor ended +in a "jumping-off place." Tom got down on his knees and felt below, and +then as far around the corner as he could reach with his hands +conveniently; he made an effort to stretch yet a little farther to the +right, and at that moment, not twenty yards away, a human hand, holding +a candle, appeared from behind a rock! Tom lifted up a glorious shout, +and instantly that hand was followed by the body it belonged to--Injun +Joe's! Tom was paralyzed; he could not move. He was vastly gratified +the next moment, to see the "Spaniard" take to his heels and get +himself out of sight. Tom wondered that Joe had not recognized his +voice and come over and killed him for testifying in court. But the +echoes must have disguised the voice. Without doubt, that was it, he +reasoned. Tom's fright weakened every muscle in his body. He said to +himself that if he had strength enough to get back to the spring he +would stay there, and nothing should tempt him to run the risk of +meeting Injun Joe again. He was careful to keep from Becky what it was +he had seen. He told her he had only shouted "for luck." + +But hunger and wretchedness rise superior to fears in the long run. +Another tedious wait at the spring and another long sleep brought +changes. The children awoke tortured with a raging hunger. Tom believed +that it must be Wednesday or Thursday or even Friday or Saturday, now, +and that the search had been given over. He proposed to explore another +passage. He felt willing to risk Injun Joe and all other terrors. But +Becky was very weak. She had sunk into a dreary apathy and would not be +roused. She said she would wait, now, where she was, and die--it would +not be long. She told Tom to go with the kite-line and explore if he +chose; but she implored him to come back every little while and speak +to her; and she made him promise that when the awful time came, he +would stay by her and hold her hand until all was over. + +Tom kissed her, with a choking sensation in his throat, and made a +show of being confident of finding the searchers or an escape from the +cave; then he took the kite-line in his hand and went groping down one +of the passages on his hands and knees, distressed with hunger and sick +with bodings of coming doom. + + + +CHAPTER XXXII + +TUESDAY afternoon came, and waned to the twilight. The village of St. +Petersburg still mourned. The lost children had not been found. Public +prayers had been offered up for them, and many and many a private +prayer that had the petitioner's whole heart in it; but still no good +news came from the cave. The majority of the searchers had given up the +quest and gone back to their daily avocations, saying that it was plain +the children could never be found. Mrs. Thatcher was very ill, and a +great part of the time delirious. People said it was heartbreaking to +hear her call her child, and raise her head and listen a whole minute +at a time, then lay it wearily down again with a moan. Aunt Polly had +drooped into a settled melancholy, and her gray hair had grown almost +white. The village went to its rest on Tuesday night, sad and forlorn. + +Away in the middle of the night a wild peal burst from the village +bells, and in a moment the streets were swarming with frantic half-clad +people, who shouted, "Turn out! turn out! they're found! they're +found!" Tin pans and horns were added to the din, the population massed +itself and moved toward the river, met the children coming in an open +carriage drawn by shouting citizens, thronged around it, joined its +homeward march, and swept magnificently up the main street roaring +huzzah after huzzah! + +The village was illuminated; nobody went to bed again; it was the +greatest night the little town had ever seen. During the first half-hour +a procession of villagers filed through Judge Thatcher's house, seized +the saved ones and kissed them, squeezed Mrs. Thatcher's hand, tried to +speak but couldn't--and drifted out raining tears all over the place. + +Aunt Polly's happiness was complete, and Mrs. Thatcher's nearly so. It +would be complete, however, as soon as the messenger dispatched with +the great news to the cave should get the word to her husband. Tom lay +upon a sofa with an eager auditory about him and told the history of +the wonderful adventure, putting in many striking additions to adorn it +withal; and closed with a description of how he left Becky and went on +an exploring expedition; how he followed two avenues as far as his +kite-line would reach; how he followed a third to the fullest stretch of +the kite-line, and was about to turn back when he glimpsed a far-off +speck that looked like daylight; dropped the line and groped toward it, +pushed his head and shoulders through a small hole, and saw the broad +Mississippi rolling by! And if it had only happened to be night he would +not have seen that speck of daylight and would not have explored that +passage any more! He told how he went back for Becky and broke the good +news and she told him not to fret her with such stuff, for she was +tired, and knew she was going to die, and wanted to. He described how he +labored with her and convinced her; and how she almost died for joy when +she had groped to where she actually saw the blue speck of daylight; how +he pushed his way out at the hole and then helped her out; how they sat +there and cried for gladness; how some men came along in a skiff and Tom +hailed them and told them their situation and their famished condition; +how the men didn't believe the wild tale at first, "because," said they, +"you are five miles down the river below the valley the cave is in" +--then took them aboard, rowed to a house, gave them supper, made them +rest till two or three hours after dark and then brought them home. + +Before day-dawn, Judge Thatcher and the handful of searchers with him +were tracked out, in the cave, by the twine clews they had strung +behind them, and informed of the great news. + +Three days and nights of toil and hunger in the cave were not to be +shaken off at once, as Tom and Becky soon discovered. They were +bedridden all of Wednesday and Thursday, and seemed to grow more and +more tired and worn, all the time. Tom got about, a little, on +Thursday, was down-town Friday, and nearly as whole as ever Saturday; +but Becky did not leave her room until Sunday, and then she looked as +if she had passed through a wasting illness. + +Tom learned of Huck's sickness and went to see him on Friday, but +could not be admitted to the bedroom; neither could he on Saturday or +Sunday. He was admitted daily after that, but was warned to keep still +about his adventure and introduce no exciting topic. The Widow Douglas +stayed by to see that he obeyed. At home Tom learned of the Cardiff +Hill event; also that the "ragged man's" body had eventually been found +in the river near the ferry-landing; he had been drowned while trying +to escape, perhaps. + +About a fortnight after Tom's rescue from the cave, he started off to +visit Huck, who had grown plenty strong enough, now, to hear exciting +talk, and Tom had some that would interest him, he thought. Judge +Thatcher's house was on Tom's way, and he stopped to see Becky. The +Judge and some friends set Tom to talking, and some one asked him +ironically if he wouldn't like to go to the cave again. Tom said he +thought he wouldn't mind it. The Judge said: + +"Well, there are others just like you, Tom, I've not the least doubt. +But we have taken care of that. Nobody will get lost in that cave any +more." + +"Why?" + +"Because I had its big door sheathed with boiler iron two weeks ago, +and triple-locked--and I've got the keys." + +Tom turned as white as a sheet. + +"What's the matter, boy! Here, run, somebody! Fetch a glass of water!" + +The water was brought and thrown into Tom's face. + +"Ah, now you're all right. What was the matter with you, Tom?" + +"Oh, Judge, Injun Joe's in the cave!" + + + +CHAPTER XXXIII + +WITHIN a few minutes the news had spread, and a dozen skiff-loads of +men were on their way to McDougal's cave, and the ferryboat, well +filled with passengers, soon followed. Tom Sawyer was in the skiff that +bore Judge Thatcher. + +When the cave door was unlocked, a sorrowful sight presented itself in +the dim twilight of the place. Injun Joe lay stretched upon the ground, +dead, with his face close to the crack of the door, as if his longing +eyes had been fixed, to the latest moment, upon the light and the cheer +of the free world outside. Tom was touched, for he knew by his own +experience how this wretch had suffered. His pity was moved, but +nevertheless he felt an abounding sense of relief and security, now, +which revealed to him in a degree which he had not fully appreciated +before how vast a weight of dread had been lying upon him since the day +he lifted his voice against this bloody-minded outcast. + +Injun Joe's bowie-knife lay close by, its blade broken in two. The +great foundation-beam of the door had been chipped and hacked through, +with tedious labor; useless labor, too, it was, for the native rock +formed a sill outside it, and upon that stubborn material the knife had +wrought no effect; the only damage done was to the knife itself. But if +there had been no stony obstruction there the labor would have been +useless still, for if the beam had been wholly cut away Injun Joe could +not have squeezed his body under the door, and he knew it. So he had +only hacked that place in order to be doing something--in order to pass +the weary time--in order to employ his tortured faculties. Ordinarily +one could find half a dozen bits of candle stuck around in the crevices +of this vestibule, left there by tourists; but there were none now. The +prisoner had searched them out and eaten them. He had also contrived to +catch a few bats, and these, also, he had eaten, leaving only their +claws. The poor unfortunate had starved to death. In one place, near at +hand, a stalagmite had been slowly growing up from the ground for ages, +builded by the water-drip from a stalactite overhead. The captive had +broken off the stalagmite, and upon the stump had placed a stone, +wherein he had scooped a shallow hollow to catch the precious drop +that fell once in every three minutes with the dreary regularity of a +clock-tick--a dessertspoonful once in four and twenty hours. That drop +was falling when the Pyramids were new; when Troy fell; when the +foundations of Rome were laid; when Christ was crucified; when the +Conqueror created the British empire; when Columbus sailed; when the +massacre at Lexington was "news." It is falling now; it will still be +falling when all these things shall have sunk down the afternoon of +history, and the twilight of tradition, and been swallowed up in the +thick night of oblivion. Has everything a purpose and a mission? Did +this drop fall patiently during five thousand years to be ready for +this flitting human insect's need? and has it another important object +to accomplish ten thousand years to come? No matter. It is many and +many a year since the hapless half-breed scooped out the stone to catch +the priceless drops, but to this day the tourist stares longest at that +pathetic stone and that slow-dropping water when he comes to see the +wonders of McDougal's cave. Injun Joe's cup stands first in the list of +the cavern's marvels; even "Aladdin's Palace" cannot rival it. + +Injun Joe was buried near the mouth of the cave; and people flocked +there in boats and wagons from the towns and from all the farms and +hamlets for seven miles around; they brought their children, and all +sorts of provisions, and confessed that they had had almost as +satisfactory a time at the funeral as they could have had at the +hanging. + +This funeral stopped the further growth of one thing--the petition to +the governor for Injun Joe's pardon. The petition had been largely +signed; many tearful and eloquent meetings had been held, and a +committee of sappy women been appointed to go in deep mourning and wail +around the governor, and implore him to be a merciful ass and trample +his duty under foot. Injun Joe was believed to have killed five +citizens of the village, but what of that? If he had been Satan himself +there would have been plenty of weaklings ready to scribble their names +to a pardon-petition, and drip a tear on it from their permanently +impaired and leaky water-works. + +The morning after the funeral Tom took Huck to a private place to have +an important talk. Huck had learned all about Tom's adventure from the +Welshman and the Widow Douglas, by this time, but Tom said he reckoned +there was one thing they had not told him; that thing was what he +wanted to talk about now. Huck's face saddened. He said: + +"I know what it is. You got into No. 2 and never found anything but +whiskey. Nobody told me it was you; but I just knowed it must 'a' ben +you, soon as I heard 'bout that whiskey business; and I knowed you +hadn't got the money becuz you'd 'a' got at me some way or other and +told me even if you was mum to everybody else. Tom, something's always +told me we'd never get holt of that swag." + +"Why, Huck, I never told on that tavern-keeper. YOU know his tavern +was all right the Saturday I went to the picnic. Don't you remember you +was to watch there that night?" + +"Oh yes! Why, it seems 'bout a year ago. It was that very night that I +follered Injun Joe to the widder's." + +"YOU followed him?" + +"Yes--but you keep mum. I reckon Injun Joe's left friends behind him, +and I don't want 'em souring on me and doing me mean tricks. If it +hadn't ben for me he'd be down in Texas now, all right." + +Then Huck told his entire adventure in confidence to Tom, who had only +heard of the Welshman's part of it before. + +"Well," said Huck, presently, coming back to the main question, +"whoever nipped the whiskey in No. 2, nipped the money, too, I reckon +--anyways it's a goner for us, Tom." + +"Huck, that money wasn't ever in No. 2!" + +"What!" Huck searched his comrade's face keenly. "Tom, have you got on +the track of that money again?" + +"Huck, it's in the cave!" + +Huck's eyes blazed. + +"Say it again, Tom." + +"The money's in the cave!" + +"Tom--honest injun, now--is it fun, or earnest?" + +"Earnest, Huck--just as earnest as ever I was in my life. Will you go +in there with me and help get it out?" + +"I bet I will! I will if it's where we can blaze our way to it and not +get lost." + +"Huck, we can do that without the least little bit of trouble in the +world." + +"Good as wheat! What makes you think the money's--" + +"Huck, you just wait till we get in there. If we don't find it I'll +agree to give you my drum and every thing I've got in the world. I +will, by jings." + +"All right--it's a whiz. When do you say?" + +"Right now, if you say it. Are you strong enough?" + +"Is it far in the cave? I ben on my pins a little, three or four days, +now, but I can't walk more'n a mile, Tom--least I don't think I could." + +"It's about five mile into there the way anybody but me would go, +Huck, but there's a mighty short cut that they don't anybody but me +know about. Huck, I'll take you right to it in a skiff. I'll float the +skiff down there, and I'll pull it back again all by myself. You +needn't ever turn your hand over." + +"Less start right off, Tom." + +"All right. We want some bread and meat, and our pipes, and a little +bag or two, and two or three kite-strings, and some of these +new-fangled things they call lucifer matches. I tell you, many's +the time I wished I had some when I was in there before." + +A trifle after noon the boys borrowed a small skiff from a citizen who +was absent, and got under way at once. When they were several miles +below "Cave Hollow," Tom said: + +"Now you see this bluff here looks all alike all the way down from the +cave hollow--no houses, no wood-yards, bushes all alike. But do you see +that white place up yonder where there's been a landslide? Well, that's +one of my marks. We'll get ashore, now." + +They landed. + +"Now, Huck, where we're a-standing you could touch that hole I got out +of with a fishing-pole. See if you can find it." + +Huck searched all the place about, and found nothing. Tom proudly +marched into a thick clump of sumach bushes and said: + +"Here you are! Look at it, Huck; it's the snuggest hole in this +country. You just keep mum about it. All along I've been wanting to be +a robber, but I knew I'd got to have a thing like this, and where to +run across it was the bother. We've got it now, and we'll keep it +quiet, only we'll let Joe Harper and Ben Rogers in--because of course +there's got to be a Gang, or else there wouldn't be any style about it. +Tom Sawyer's Gang--it sounds splendid, don't it, Huck?" + +"Well, it just does, Tom. And who'll we rob?" + +"Oh, most anybody. Waylay people--that's mostly the way." + +"And kill them?" + +"No, not always. Hive them in the cave till they raise a ransom." + +"What's a ransom?" + +"Money. You make them raise all they can, off'n their friends; and +after you've kept them a year, if it ain't raised then you kill them. +That's the general way. Only you don't kill the women. You shut up the +women, but you don't kill them. They're always beautiful and rich, and +awfully scared. You take their watches and things, but you always take +your hat off and talk polite. They ain't anybody as polite as robbers +--you'll see that in any book. Well, the women get to loving you, and +after they've been in the cave a week or two weeks they stop crying and +after that you couldn't get them to leave. If you drove them out they'd +turn right around and come back. It's so in all the books." + +"Why, it's real bully, Tom. I believe it's better'n to be a pirate." + +"Yes, it's better in some ways, because it's close to home and +circuses and all that." + +By this time everything was ready and the boys entered the hole, Tom +in the lead. They toiled their way to the farther end of the tunnel, +then made their spliced kite-strings fast and moved on. A few steps +brought them to the spring, and Tom felt a shudder quiver all through +him. He showed Huck the fragment of candle-wick perched on a lump of +clay against the wall, and described how he and Becky had watched the +flame struggle and expire. + +The boys began to quiet down to whispers, now, for the stillness and +gloom of the place oppressed their spirits. They went on, and presently +entered and followed Tom's other corridor until they reached the +"jumping-off place." The candles revealed the fact that it was not +really a precipice, but only a steep clay hill twenty or thirty feet +high. Tom whispered: + +"Now I'll show you something, Huck." + +He held his candle aloft and said: + +"Look as far around the corner as you can. Do you see that? There--on +the big rock over yonder--done with candle-smoke." + +"Tom, it's a CROSS!" + +"NOW where's your Number Two? 'UNDER THE CROSS,' hey? Right yonder's +where I saw Injun Joe poke up his candle, Huck!" + +Huck stared at the mystic sign awhile, and then said with a shaky voice: + +"Tom, less git out of here!" + +"What! and leave the treasure?" + +"Yes--leave it. Injun Joe's ghost is round about there, certain." + +"No it ain't, Huck, no it ain't. It would ha'nt the place where he +died--away out at the mouth of the cave--five mile from here." + +"No, Tom, it wouldn't. It would hang round the money. I know the ways +of ghosts, and so do you." + +Tom began to fear that Huck was right. Misgivings gathered in his +mind. But presently an idea occurred to him-- + +"Lookyhere, Huck, what fools we're making of ourselves! Injun Joe's +ghost ain't a going to come around where there's a cross!" + +The point was well taken. It had its effect. + +"Tom, I didn't think of that. But that's so. It's luck for us, that +cross is. I reckon we'll climb down there and have a hunt for that box." + +Tom went first, cutting rude steps in the clay hill as he descended. +Huck followed. Four avenues opened out of the small cavern which the +great rock stood in. The boys examined three of them with no result. +They found a small recess in the one nearest the base of the rock, with +a pallet of blankets spread down in it; also an old suspender, some +bacon rind, and the well-gnawed bones of two or three fowls. But there +was no money-box. The lads searched and researched this place, but in +vain. Tom said: + +"He said UNDER the cross. Well, this comes nearest to being under the +cross. It can't be under the rock itself, because that sets solid on +the ground." + +They searched everywhere once more, and then sat down discouraged. +Huck could suggest nothing. By-and-by Tom said: + +"Lookyhere, Huck, there's footprints and some candle-grease on the +clay about one side of this rock, but not on the other sides. Now, +what's that for? I bet you the money IS under the rock. I'm going to +dig in the clay." + +"That ain't no bad notion, Tom!" said Huck with animation. + +Tom's "real Barlow" was out at once, and he had not dug four inches +before he struck wood. + +"Hey, Huck!--you hear that?" + +Huck began to dig and scratch now. Some boards were soon uncovered and +removed. They had concealed a natural chasm which led under the rock. +Tom got into this and held his candle as far under the rock as he +could, but said he could not see to the end of the rift. He proposed to +explore. He stooped and passed under; the narrow way descended +gradually. He followed its winding course, first to the right, then to +the left, Huck at his heels. Tom turned a short curve, by-and-by, and +exclaimed: + +"My goodness, Huck, lookyhere!" + +It was the treasure-box, sure enough, occupying a snug little cavern, +along with an empty powder-keg, a couple of guns in leather cases, two +or three pairs of old moccasins, a leather belt, and some other rubbish +well soaked with the water-drip. + +"Got it at last!" said Huck, ploughing among the tarnished coins with +his hand. "My, but we're rich, Tom!" + +"Huck, I always reckoned we'd get it. It's just too good to believe, +but we HAVE got it, sure! Say--let's not fool around here. Let's snake +it out. Lemme see if I can lift the box." + +It weighed about fifty pounds. Tom could lift it, after an awkward +fashion, but could not carry it conveniently. + +"I thought so," he said; "THEY carried it like it was heavy, that day +at the ha'nted house. I noticed that. I reckon I was right to think of +fetching the little bags along." + +The money was soon in the bags and the boys took it up to the cross +rock. + +"Now less fetch the guns and things," said Huck. + +"No, Huck--leave them there. They're just the tricks to have when we +go to robbing. We'll keep them there all the time, and we'll hold our +orgies there, too. It's an awful snug place for orgies." + +"What orgies?" + +"I dono. But robbers always have orgies, and of course we've got to +have them, too. Come along, Huck, we've been in here a long time. It's +getting late, I reckon. I'm hungry, too. We'll eat and smoke when we +get to the skiff." + +They presently emerged into the clump of sumach bushes, looked warily +out, found the coast clear, and were soon lunching and smoking in the +skiff. As the sun dipped toward the horizon they pushed out and got +under way. Tom skimmed up the shore through the long twilight, chatting +cheerily with Huck, and landed shortly after dark. + +"Now, Huck," said Tom, "we'll hide the money in the loft of the +widow's woodshed, and I'll come up in the morning and we'll count it +and divide, and then we'll hunt up a place out in the woods for it +where it will be safe. Just you lay quiet here and watch the stuff till +I run and hook Benny Taylor's little wagon; I won't be gone a minute." + +He disappeared, and presently returned with the wagon, put the two +small sacks into it, threw some old rags on top of them, and started +off, dragging his cargo behind him. When the boys reached the +Welshman's house, they stopped to rest. Just as they were about to move +on, the Welshman stepped out and said: + +"Hallo, who's that?" + +"Huck and Tom Sawyer." + +"Good! Come along with me, boys, you are keeping everybody waiting. +Here--hurry up, trot ahead--I'll haul the wagon for you. Why, it's not +as light as it might be. Got bricks in it?--or old metal?" + +"Old metal," said Tom. + +"I judged so; the boys in this town will take more trouble and fool +away more time hunting up six bits' worth of old iron to sell to the +foundry than they would to make twice the money at regular work. But +that's human nature--hurry along, hurry along!" + +The boys wanted to know what the hurry was about. + +"Never mind; you'll see, when we get to the Widow Douglas'." + +Huck said with some apprehension--for he was long used to being +falsely accused: + +"Mr. Jones, we haven't been doing nothing." + +The Welshman laughed. + +"Well, I don't know, Huck, my boy. I don't know about that. Ain't you +and the widow good friends?" + +"Yes. Well, she's ben good friends to me, anyway." + +"All right, then. What do you want to be afraid for?" + +This question was not entirely answered in Huck's slow mind before he +found himself pushed, along with Tom, into Mrs. Douglas' drawing-room. +Mr. Jones left the wagon near the door and followed. + +The place was grandly lighted, and everybody that was of any +consequence in the village was there. The Thatchers were there, the +Harpers, the Rogerses, Aunt Polly, Sid, Mary, the minister, the editor, +and a great many more, and all dressed in their best. The widow +received the boys as heartily as any one could well receive two such +looking beings. They were covered with clay and candle-grease. Aunt +Polly blushed crimson with humiliation, and frowned and shook her head +at Tom. Nobody suffered half as much as the two boys did, however. Mr. +Jones said: + +"Tom wasn't at home, yet, so I gave him up; but I stumbled on him and +Huck right at my door, and so I just brought them along in a hurry." + +"And you did just right," said the widow. "Come with me, boys." + +She took them to a bedchamber and said: + +"Now wash and dress yourselves. Here are two new suits of clothes +--shirts, socks, everything complete. They're Huck's--no, no thanks, +Huck--Mr. Jones bought one and I the other. But they'll fit both of you. +Get into them. We'll wait--come down when you are slicked up enough." + +Then she left. + + + +CHAPTER XXXIV + +HUCK said: "Tom, we can slope, if we can find a rope. The window ain't +high from the ground." + +"Shucks! what do you want to slope for?" + +"Well, I ain't used to that kind of a crowd. I can't stand it. I ain't +going down there, Tom." + +"Oh, bother! It ain't anything. I don't mind it a bit. I'll take care +of you." + +Sid appeared. + +"Tom," said he, "auntie has been waiting for you all the afternoon. +Mary got your Sunday clothes ready, and everybody's been fretting about +you. Say--ain't this grease and clay, on your clothes?" + +"Now, Mr. Siddy, you jist 'tend to your own business. What's all this +blow-out about, anyway?" + +"It's one of the widow's parties that she's always having. This time +it's for the Welshman and his sons, on account of that scrape they +helped her out of the other night. And say--I can tell you something, +if you want to know." + +"Well, what?" + +"Why, old Mr. Jones is going to try to spring something on the people +here to-night, but I overheard him tell auntie to-day about it, as a +secret, but I reckon it's not much of a secret now. Everybody knows +--the widow, too, for all she tries to let on she don't. Mr. Jones was +bound Huck should be here--couldn't get along with his grand secret +without Huck, you know!" + +"Secret about what, Sid?" + +"About Huck tracking the robbers to the widow's. I reckon Mr. Jones +was going to make a grand time over his surprise, but I bet you it will +drop pretty flat." + +Sid chuckled in a very contented and satisfied way. + +"Sid, was it you that told?" + +"Oh, never mind who it was. SOMEBODY told--that's enough." + +"Sid, there's only one person in this town mean enough to do that, and +that's you. If you had been in Huck's place you'd 'a' sneaked down the +hill and never told anybody on the robbers. You can't do any but mean +things, and you can't bear to see anybody praised for doing good ones. +There--no thanks, as the widow says"--and Tom cuffed Sid's ears and +helped him to the door with several kicks. "Now go and tell auntie if +you dare--and to-morrow you'll catch it!" + +Some minutes later the widow's guests were at the supper-table, and a +dozen children were propped up at little side-tables in the same room, +after the fashion of that country and that day. At the proper time Mr. +Jones made his little speech, in which he thanked the widow for the +honor she was doing himself and his sons, but said that there was +another person whose modesty-- + +And so forth and so on. He sprung his secret about Huck's share in the +adventure in the finest dramatic manner he was master of, but the +surprise it occasioned was largely counterfeit and not as clamorous and +effusive as it might have been under happier circumstances. However, +the widow made a pretty fair show of astonishment, and heaped so many +compliments and so much gratitude upon Huck that he almost forgot the +nearly intolerable discomfort of his new clothes in the entirely +intolerable discomfort of being set up as a target for everybody's gaze +and everybody's laudations. + +The widow said she meant to give Huck a home under her roof and have +him educated; and that when she could spare the money she would start +him in business in a modest way. Tom's chance was come. He said: + +"Huck don't need it. Huck's rich." + +Nothing but a heavy strain upon the good manners of the company kept +back the due and proper complimentary laugh at this pleasant joke. But +the silence was a little awkward. Tom broke it: + +"Huck's got money. Maybe you don't believe it, but he's got lots of +it. Oh, you needn't smile--I reckon I can show you. You just wait a +minute." + +Tom ran out of doors. The company looked at each other with a +perplexed interest--and inquiringly at Huck, who was tongue-tied. + +"Sid, what ails Tom?" said Aunt Polly. "He--well, there ain't ever any +making of that boy out. I never--" + +Tom entered, struggling with the weight of his sacks, and Aunt Polly +did not finish her sentence. Tom poured the mass of yellow coin upon +the table and said: + +"There--what did I tell you? Half of it's Huck's and half of it's mine!" + +The spectacle took the general breath away. All gazed, nobody spoke +for a moment. Then there was a unanimous call for an explanation. Tom +said he could furnish it, and he did. The tale was long, but brimful of +interest. There was scarcely an interruption from any one to break the +charm of its flow. When he had finished, Mr. Jones said: + +"I thought I had fixed up a little surprise for this occasion, but it +don't amount to anything now. This one makes it sing mighty small, I'm +willing to allow." + +The money was counted. The sum amounted to a little over twelve +thousand dollars. It was more than any one present had ever seen at one +time before, though several persons were there who were worth +considerably more than that in property. + + + +CHAPTER XXXV + +THE reader may rest satisfied that Tom's and Huck's windfall made a +mighty stir in the poor little village of St. Petersburg. So vast a +sum, all in actual cash, seemed next to incredible. It was talked +about, gloated over, glorified, until the reason of many of the +citizens tottered under the strain of the unhealthy excitement. Every +"haunted" house in St. Petersburg and the neighboring villages was +dissected, plank by plank, and its foundations dug up and ransacked for +hidden treasure--and not by boys, but men--pretty grave, unromantic +men, too, some of them. Wherever Tom and Huck appeared they were +courted, admired, stared at. The boys were not able to remember that +their remarks had possessed weight before; but now their sayings were +treasured and repeated; everything they did seemed somehow to be +regarded as remarkable; they had evidently lost the power of doing and +saying commonplace things; moreover, their past history was raked up +and discovered to bear marks of conspicuous originality. The village +paper published biographical sketches of the boys. + +The Widow Douglas put Huck's money out at six per cent., and Judge +Thatcher did the same with Tom's at Aunt Polly's request. Each lad had +an income, now, that was simply prodigious--a dollar for every week-day +in the year and half of the Sundays. It was just what the minister got +--no, it was what he was promised--he generally couldn't collect it. A +dollar and a quarter a week would board, lodge, and school a boy in +those old simple days--and clothe him and wash him, too, for that +matter. + +Judge Thatcher had conceived a great opinion of Tom. He said that no +commonplace boy would ever have got his daughter out of the cave. When +Becky told her father, in strict confidence, how Tom had taken her +whipping at school, the Judge was visibly moved; and when she pleaded +grace for the mighty lie which Tom had told in order to shift that +whipping from her shoulders to his own, the Judge said with a fine +outburst that it was a noble, a generous, a magnanimous lie--a lie that +was worthy to hold up its head and march down through history breast to +breast with George Washington's lauded Truth about the hatchet! Becky +thought her father had never looked so tall and so superb as when he +walked the floor and stamped his foot and said that. She went straight +off and told Tom about it. + +Judge Thatcher hoped to see Tom a great lawyer or a great soldier some +day. He said he meant to look to it that Tom should be admitted to the +National Military Academy and afterward trained in the best law school +in the country, in order that he might be ready for either career or +both. + +Huck Finn's wealth and the fact that he was now under the Widow +Douglas' protection introduced him into society--no, dragged him into +it, hurled him into it--and his sufferings were almost more than he +could bear. The widow's servants kept him clean and neat, combed and +brushed, and they bedded him nightly in unsympathetic sheets that had +not one little spot or stain which he could press to his heart and know +for a friend. He had to eat with a knife and fork; he had to use +napkin, cup, and plate; he had to learn his book, he had to go to +church; he had to talk so properly that speech was become insipid in +his mouth; whithersoever he turned, the bars and shackles of +civilization shut him in and bound him hand and foot. + +He bravely bore his miseries three weeks, and then one day turned up +missing. For forty-eight hours the widow hunted for him everywhere in +great distress. The public were profoundly concerned; they searched +high and low, they dragged the river for his body. Early the third +morning Tom Sawyer wisely went poking among some old empty hogsheads +down behind the abandoned slaughter-house, and in one of them he found +the refugee. Huck had slept there; he had just breakfasted upon some +stolen odds and ends of food, and was lying off, now, in comfort, with +his pipe. He was unkempt, uncombed, and clad in the same old ruin of +rags that had made him picturesque in the days when he was free and +happy. Tom routed him out, told him the trouble he had been causing, +and urged him to go home. Huck's face lost its tranquil content, and +took a melancholy cast. He said: + +"Don't talk about it, Tom. I've tried it, and it don't work; it don't +work, Tom. It ain't for me; I ain't used to it. The widder's good to +me, and friendly; but I can't stand them ways. She makes me get up just +at the same time every morning; she makes me wash, they comb me all to +thunder; she won't let me sleep in the woodshed; I got to wear them +blamed clothes that just smothers me, Tom; they don't seem to any air +git through 'em, somehow; and they're so rotten nice that I can't set +down, nor lay down, nor roll around anywher's; I hain't slid on a +cellar-door for--well, it 'pears to be years; I got to go to church and +sweat and sweat--I hate them ornery sermons! I can't ketch a fly in +there, I can't chaw. I got to wear shoes all Sunday. The widder eats by +a bell; she goes to bed by a bell; she gits up by a bell--everything's +so awful reg'lar a body can't stand it." + +"Well, everybody does that way, Huck." + +"Tom, it don't make no difference. I ain't everybody, and I can't +STAND it. It's awful to be tied up so. And grub comes too easy--I don't +take no interest in vittles, that way. I got to ask to go a-fishing; I +got to ask to go in a-swimming--dern'd if I hain't got to ask to do +everything. Well, I'd got to talk so nice it wasn't no comfort--I'd got +to go up in the attic and rip out awhile, every day, to git a taste in +my mouth, or I'd a died, Tom. The widder wouldn't let me smoke; she +wouldn't let me yell, she wouldn't let me gape, nor stretch, nor +scratch, before folks--" [Then with a spasm of special irritation and +injury]--"And dad fetch it, she prayed all the time! I never see such a +woman! I HAD to shove, Tom--I just had to. And besides, that school's +going to open, and I'd a had to go to it--well, I wouldn't stand THAT, +Tom. Looky here, Tom, being rich ain't what it's cracked up to be. It's +just worry and worry, and sweat and sweat, and a-wishing you was dead +all the time. Now these clothes suits me, and this bar'l suits me, and +I ain't ever going to shake 'em any more. Tom, I wouldn't ever got into +all this trouble if it hadn't 'a' ben for that money; now you just take +my sheer of it along with your'n, and gimme a ten-center sometimes--not +many times, becuz I don't give a dern for a thing 'thout it's tollable +hard to git--and you go and beg off for me with the widder." + +"Oh, Huck, you know I can't do that. 'Tain't fair; and besides if +you'll try this thing just a while longer you'll come to like it." + +"Like it! Yes--the way I'd like a hot stove if I was to set on it long +enough. No, Tom, I won't be rich, and I won't live in them cussed +smothery houses. I like the woods, and the river, and hogsheads, and +I'll stick to 'em, too. Blame it all! just as we'd got guns, and a +cave, and all just fixed to rob, here this dern foolishness has got to +come up and spile it all!" + +Tom saw his opportunity-- + +"Lookyhere, Huck, being rich ain't going to keep me back from turning +robber." + +"No! Oh, good-licks; are you in real dead-wood earnest, Tom?" + +"Just as dead earnest as I'm sitting here. But Huck, we can't let you +into the gang if you ain't respectable, you know." + +Huck's joy was quenched. + +"Can't let me in, Tom? Didn't you let me go for a pirate?" + +"Yes, but that's different. A robber is more high-toned than what a +pirate is--as a general thing. In most countries they're awful high up +in the nobility--dukes and such." + +"Now, Tom, hain't you always ben friendly to me? You wouldn't shet me +out, would you, Tom? You wouldn't do that, now, WOULD you, Tom?" + +"Huck, I wouldn't want to, and I DON'T want to--but what would people +say? Why, they'd say, 'Mph! Tom Sawyer's Gang! pretty low characters in +it!' They'd mean you, Huck. You wouldn't like that, and I wouldn't." + +Huck was silent for some time, engaged in a mental struggle. Finally +he said: + +"Well, I'll go back to the widder for a month and tackle it and see if +I can come to stand it, if you'll let me b'long to the gang, Tom." + +"All right, Huck, it's a whiz! Come along, old chap, and I'll ask the +widow to let up on you a little, Huck." + +"Will you, Tom--now will you? That's good. If she'll let up on some of +the roughest things, I'll smoke private and cuss private, and crowd +through or bust. When you going to start the gang and turn robbers?" + +"Oh, right off. We'll get the boys together and have the initiation +to-night, maybe." + +"Have the which?" + +"Have the initiation." + +"What's that?" + +"It's to swear to stand by one another, and never tell the gang's +secrets, even if you're chopped all to flinders, and kill anybody and +all his family that hurts one of the gang." + +"That's gay--that's mighty gay, Tom, I tell you." + +"Well, I bet it is. And all that swearing's got to be done at +midnight, in the lonesomest, awfulest place you can find--a ha'nted +house is the best, but they're all ripped up now." + +"Well, midnight's good, anyway, Tom." + +"Yes, so it is. And you've got to swear on a coffin, and sign it with +blood." + +"Now, that's something LIKE! Why, it's a million times bullier than +pirating. I'll stick to the widder till I rot, Tom; and if I git to be +a reg'lar ripper of a robber, and everybody talking 'bout it, I reckon +she'll be proud she snaked me in out of the wet." + + + +CONCLUSION + +SO endeth this chronicle. It being strictly a history of a BOY, it +must stop here; the story could not go much further without becoming +the history of a MAN. When one writes a novel about grown people, he +knows exactly where to stop--that is, with a marriage; but when he +writes of juveniles, he must stop where he best can. + +Most of the characters that perform in this book still live, and are +prosperous and happy. Some day it may seem worth while to take up the +story of the younger ones again and see what sort of men and women they +turned out to be; therefore it will be wisest not to reveal any of that +part of their lives at present. diff --git a/backtrace-sys/src/libbacktrace/README.md b/backtrace-sys/src/libbacktrace/README.md new file mode 100644 index 000000000..da6d9667d --- /dev/null +++ b/backtrace-sys/src/libbacktrace/README.md @@ -0,0 +1,33 @@ +# libbacktrace +A C library that may be linked into a C/C++ program to produce symbolic backtraces + +Initially written by Ian Lance Taylor . + +This is version 1.0. +It is likely that this will always be version 1.0. + +The libbacktrace library may be linked into a program or library and +used to produce symbolic backtraces. +Sample uses would be to print a detailed backtrace when an error +occurs or to gather detailed profiling information. + +The libbacktrace library is provided under a BSD license. +See the source files for the exact license text. + +The public functions are declared and documented in the header file +backtrace.h, which should be #include'd by a user of the library. + +Building libbacktrace will generate a file backtrace-supported.h, +which a user of the library may use to determine whether backtraces +will work. +See the source file backtrace-supported.h.in for the macros that it +defines. + +As of January 2018, libbacktrace only supports ELF, PE/COFF, and XCOFF +executables with DWARF debugging information. +The library is written to make it straightforward to add support for +other object file and debugging formats. + +The library relies on the C++ unwind API defined at +https://itanium-cxx-abi.github.io/cxx-abi/abi-eh.html +This API is provided by GCC. diff --git a/backtrace-sys/src/libbacktrace/acinclude.m4 b/backtrace-sys/src/libbacktrace/acinclude.m4 new file mode 100644 index 000000000..daa73af9e --- /dev/null +++ b/backtrace-sys/src/libbacktrace/acinclude.m4 @@ -0,0 +1,72 @@ +dnl +dnl Check whether _Unwind_GetIPInfo is available without doing a link +dnl test so we can use this with libstdc++-v3 and libjava. Need to +dnl use $target to set defaults because automatic checking is not possible +dnl without a link test (and maybe even with a link test). +dnl + +AC_DEFUN([GCC_CHECK_UNWIND_GETIPINFO], [ + AC_ARG_WITH(system-libunwind, + [ --with-system-libunwind use installed libunwind]) + # If system-libunwind was not specifically set, pick a default setting. + if test x$with_system_libunwind = x; then + case ${target} in + ia64-*-hpux*) with_system_libunwind=yes ;; + *) with_system_libunwind=no ;; + esac + fi + # Based on system-libunwind and target, do we have ipinfo? + if test x$with_system_libunwind = xyes; then + case ${target} in + ia64-*-*) have_unwind_getipinfo=no ;; + *) have_unwind_getipinfo=yes ;; + esac + else + # Darwin before version 9 does not have _Unwind_GetIPInfo. + changequote(,) + case ${target} in + *-*-darwin[3-8]|*-*-darwin[3-8].*) have_unwind_getipinfo=no ;; + *) have_unwind_getipinfo=yes ;; + esac + changequote([,]) + fi + + if test x$have_unwind_getipinfo = xyes; then + AC_DEFINE(HAVE_GETIPINFO, 1, [Define if _Unwind_GetIPInfo is available.]) + fi +]) + +# ACX_PROG_CC_WARNING_OPTS(WARNINGS, [VARIABLE = WARN_CFLAGS]) +# Sets @VARIABLE@ to the subset of the given options which the +# compiler accepts. +AC_DEFUN([ACX_PROG_CC_WARNING_OPTS], +[AC_REQUIRE([AC_PROG_CC])dnl +AC_LANG_PUSH(C) +m4_pushdef([acx_Var], [m4_default([$2], [WARN_CFLAGS])])dnl +AC_SUBST(acx_Var)dnl +m4_expand_once([acx_Var= +],m4_quote(acx_Var=))dnl +save_CFLAGS="$CFLAGS" +for real_option in $1; do + # Do the check with the no- prefix removed since gcc silently + # accepts any -Wno-* option on purpose + case $real_option in + -Wno-*) option=-W`expr x$real_option : 'x-Wno-\(.*\)'` ;; + *) option=$real_option ;; + esac + AS_VAR_PUSHDEF([acx_Woption], [acx_cv_prog_cc_warning_$option]) + AC_CACHE_CHECK([whether $CC supports $option], acx_Woption, + [CFLAGS="$option" + AC_COMPILE_IFELSE([AC_LANG_PROGRAM([],[])], + [AS_VAR_SET(acx_Woption, yes)], + [AS_VAR_SET(acx_Woption, no)]) + ]) + AS_IF([test AS_VAR_GET(acx_Woption) = yes], + [acx_Var="$acx_Var${acx_Var:+ }$real_option"]) + AS_VAR_POPDEF([acx_Woption])dnl +done +CFLAGS="$save_CFLAGS" +m4_popdef([acx_Var])dnl +AC_LANG_POP(C) +])# ACX_PROG_CC_WARNING_OPTS + diff --git a/backtrace-sys/src/libbacktrace/aclocal.m4 b/backtrace-sys/src/libbacktrace/aclocal.m4 new file mode 100644 index 000000000..32a1811f2 --- /dev/null +++ b/backtrace-sys/src/libbacktrace/aclocal.m4 @@ -0,0 +1,767 @@ +# generated automatically by aclocal 1.11.6 -*- Autoconf -*- + +# Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, +# 2005, 2006, 2007, 2008, 2009, 2010, 2011 Free Software Foundation, +# Inc. +# This file is free software; the Free Software Foundation +# gives unlimited permission to copy and/or distribute it, +# with or without modifications, as long as this notice is preserved. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY, to the extent permitted by law; without +# even the implied warranty of MERCHANTABILITY or FITNESS FOR A +# PARTICULAR PURPOSE. + +m4_ifndef([AC_AUTOCONF_VERSION], + [m4_copy([m4_PACKAGE_VERSION], [AC_AUTOCONF_VERSION])])dnl +m4_if(m4_defn([AC_AUTOCONF_VERSION]), [2.64],, +[m4_warning([this file was generated for autoconf 2.64. +You have another version of autoconf. It may work, but is not guaranteed to. +If you have problems, you may need to regenerate the build system entirely. +To do so, use the procedure documented by the package, typically `autoreconf'.])]) + +# Copyright (C) 2002, 2003, 2005, 2006, 2007, 2008, 2011 Free Software +# Foundation, Inc. +# +# This file is free software; the Free Software Foundation +# gives unlimited permission to copy and/or distribute it, +# with or without modifications, as long as this notice is preserved. + +# serial 1 + +# AM_AUTOMAKE_VERSION(VERSION) +# ---------------------------- +# Automake X.Y traces this macro to ensure aclocal.m4 has been +# generated from the m4 files accompanying Automake X.Y. +# (This private macro should not be called outside this file.) +AC_DEFUN([AM_AUTOMAKE_VERSION], +[am__api_version='1.11' +dnl Some users find AM_AUTOMAKE_VERSION and mistake it for a way to +dnl require some minimum version. Point them to the right macro. +m4_if([$1], [1.11.6], [], + [AC_FATAL([Do not call $0, use AM_INIT_AUTOMAKE([$1]).])])dnl +]) + +# _AM_AUTOCONF_VERSION(VERSION) +# ----------------------------- +# aclocal traces this macro to find the Autoconf version. +# This is a private macro too. Using m4_define simplifies +# the logic in aclocal, which can simply ignore this definition. +m4_define([_AM_AUTOCONF_VERSION], []) + +# AM_SET_CURRENT_AUTOMAKE_VERSION +# ------------------------------- +# Call AM_AUTOMAKE_VERSION and AM_AUTOMAKE_VERSION so they can be traced. +# This function is AC_REQUIREd by AM_INIT_AUTOMAKE. +AC_DEFUN([AM_SET_CURRENT_AUTOMAKE_VERSION], +[AM_AUTOMAKE_VERSION([1.11.6])dnl +m4_ifndef([AC_AUTOCONF_VERSION], + [m4_copy([m4_PACKAGE_VERSION], [AC_AUTOCONF_VERSION])])dnl +_AM_AUTOCONF_VERSION(m4_defn([AC_AUTOCONF_VERSION]))]) + +# AM_AUX_DIR_EXPAND -*- Autoconf -*- + +# Copyright (C) 2001, 2003, 2005, 2011 Free Software Foundation, Inc. +# +# This file is free software; the Free Software Foundation +# gives unlimited permission to copy and/or distribute it, +# with or without modifications, as long as this notice is preserved. + +# serial 1 + +# For projects using AC_CONFIG_AUX_DIR([foo]), Autoconf sets +# $ac_aux_dir to `$srcdir/foo'. In other projects, it is set to +# `$srcdir', `$srcdir/..', or `$srcdir/../..'. +# +# Of course, Automake must honor this variable whenever it calls a +# tool from the auxiliary directory. The problem is that $srcdir (and +# therefore $ac_aux_dir as well) can be either absolute or relative, +# depending on how configure is run. This is pretty annoying, since +# it makes $ac_aux_dir quite unusable in subdirectories: in the top +# source directory, any form will work fine, but in subdirectories a +# relative path needs to be adjusted first. +# +# $ac_aux_dir/missing +# fails when called from a subdirectory if $ac_aux_dir is relative +# $top_srcdir/$ac_aux_dir/missing +# fails if $ac_aux_dir is absolute, +# fails when called from a subdirectory in a VPATH build with +# a relative $ac_aux_dir +# +# The reason of the latter failure is that $top_srcdir and $ac_aux_dir +# are both prefixed by $srcdir. In an in-source build this is usually +# harmless because $srcdir is `.', but things will broke when you +# start a VPATH build or use an absolute $srcdir. +# +# So we could use something similar to $top_srcdir/$ac_aux_dir/missing, +# iff we strip the leading $srcdir from $ac_aux_dir. That would be: +# am_aux_dir='\$(top_srcdir)/'`expr "$ac_aux_dir" : "$srcdir//*\(.*\)"` +# and then we would define $MISSING as +# MISSING="\${SHELL} $am_aux_dir/missing" +# This will work as long as MISSING is not called from configure, because +# unfortunately $(top_srcdir) has no meaning in configure. +# However there are other variables, like CC, which are often used in +# configure, and could therefore not use this "fixed" $ac_aux_dir. +# +# Another solution, used here, is to always expand $ac_aux_dir to an +# absolute PATH. The drawback is that using absolute paths prevent a +# configured tree to be moved without reconfiguration. + +AC_DEFUN([AM_AUX_DIR_EXPAND], +[dnl Rely on autoconf to set up CDPATH properly. +AC_PREREQ([2.50])dnl +# expand $ac_aux_dir to an absolute path +am_aux_dir=`cd $ac_aux_dir && pwd` +]) + +# AM_CONDITIONAL -*- Autoconf -*- + +# Copyright (C) 1997, 2000, 2001, 2003, 2004, 2005, 2006, 2008 +# Free Software Foundation, Inc. +# +# This file is free software; the Free Software Foundation +# gives unlimited permission to copy and/or distribute it, +# with or without modifications, as long as this notice is preserved. + +# serial 9 + +# AM_CONDITIONAL(NAME, SHELL-CONDITION) +# ------------------------------------- +# Define a conditional. +AC_DEFUN([AM_CONDITIONAL], +[AC_PREREQ(2.52)dnl + ifelse([$1], [TRUE], [AC_FATAL([$0: invalid condition: $1])], + [$1], [FALSE], [AC_FATAL([$0: invalid condition: $1])])dnl +AC_SUBST([$1_TRUE])dnl +AC_SUBST([$1_FALSE])dnl +_AM_SUBST_NOTMAKE([$1_TRUE])dnl +_AM_SUBST_NOTMAKE([$1_FALSE])dnl +m4_define([_AM_COND_VALUE_$1], [$2])dnl +if $2; then + $1_TRUE= + $1_FALSE='#' +else + $1_TRUE='#' + $1_FALSE= +fi +AC_CONFIG_COMMANDS_PRE( +[if test -z "${$1_TRUE}" && test -z "${$1_FALSE}"; then + AC_MSG_ERROR([[conditional "$1" was never defined. +Usually this means the macro was only invoked conditionally.]]) +fi])]) + +# Do all the work for Automake. -*- Autoconf -*- + +# Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, +# 2005, 2006, 2008, 2009 Free Software Foundation, Inc. +# +# This file is free software; the Free Software Foundation +# gives unlimited permission to copy and/or distribute it, +# with or without modifications, as long as this notice is preserved. + +# serial 16 + +# This macro actually does too much. Some checks are only needed if +# your package does certain things. But this isn't really a big deal. + +# AM_INIT_AUTOMAKE(PACKAGE, VERSION, [NO-DEFINE]) +# AM_INIT_AUTOMAKE([OPTIONS]) +# ----------------------------------------------- +# The call with PACKAGE and VERSION arguments is the old style +# call (pre autoconf-2.50), which is being phased out. PACKAGE +# and VERSION should now be passed to AC_INIT and removed from +# the call to AM_INIT_AUTOMAKE. +# We support both call styles for the transition. After +# the next Automake release, Autoconf can make the AC_INIT +# arguments mandatory, and then we can depend on a new Autoconf +# release and drop the old call support. +AC_DEFUN([AM_INIT_AUTOMAKE], +[AC_PREREQ([2.62])dnl +dnl Autoconf wants to disallow AM_ names. We explicitly allow +dnl the ones we care about. +m4_pattern_allow([^AM_[A-Z]+FLAGS$])dnl +AC_REQUIRE([AM_SET_CURRENT_AUTOMAKE_VERSION])dnl +AC_REQUIRE([AC_PROG_INSTALL])dnl +if test "`cd $srcdir && pwd`" != "`pwd`"; then + # Use -I$(srcdir) only when $(srcdir) != ., so that make's output + # is not polluted with repeated "-I." + AC_SUBST([am__isrc], [' -I$(srcdir)'])_AM_SUBST_NOTMAKE([am__isrc])dnl + # test to see if srcdir already configured + if test -f $srcdir/config.status; then + AC_MSG_ERROR([source directory already configured; run "make distclean" there first]) + fi +fi + +# test whether we have cygpath +if test -z "$CYGPATH_W"; then + if (cygpath --version) >/dev/null 2>/dev/null; then + CYGPATH_W='cygpath -w' + else + CYGPATH_W=echo + fi +fi +AC_SUBST([CYGPATH_W]) + +# Define the identity of the package. +dnl Distinguish between old-style and new-style calls. +m4_ifval([$2], +[m4_ifval([$3], [_AM_SET_OPTION([no-define])])dnl + AC_SUBST([PACKAGE], [$1])dnl + AC_SUBST([VERSION], [$2])], +[_AM_SET_OPTIONS([$1])dnl +dnl Diagnose old-style AC_INIT with new-style AM_AUTOMAKE_INIT. +m4_if(m4_ifdef([AC_PACKAGE_NAME], 1)m4_ifdef([AC_PACKAGE_VERSION], 1), 11,, + [m4_fatal([AC_INIT should be called with package and version arguments])])dnl + AC_SUBST([PACKAGE], ['AC_PACKAGE_TARNAME'])dnl + AC_SUBST([VERSION], ['AC_PACKAGE_VERSION'])])dnl + +_AM_IF_OPTION([no-define],, +[AC_DEFINE_UNQUOTED(PACKAGE, "$PACKAGE", [Name of package]) + AC_DEFINE_UNQUOTED(VERSION, "$VERSION", [Version number of package])])dnl + +# Some tools Automake needs. +AC_REQUIRE([AM_SANITY_CHECK])dnl +AC_REQUIRE([AC_ARG_PROGRAM])dnl +AM_MISSING_PROG(ACLOCAL, aclocal-${am__api_version}) +AM_MISSING_PROG(AUTOCONF, autoconf) +AM_MISSING_PROG(AUTOMAKE, automake-${am__api_version}) +AM_MISSING_PROG(AUTOHEADER, autoheader) +AM_MISSING_PROG(MAKEINFO, makeinfo) +AC_REQUIRE([AM_PROG_INSTALL_SH])dnl +AC_REQUIRE([AM_PROG_INSTALL_STRIP])dnl +AC_REQUIRE([AM_PROG_MKDIR_P])dnl +# We need awk for the "check" target. The system "awk" is bad on +# some platforms. +AC_REQUIRE([AC_PROG_AWK])dnl +AC_REQUIRE([AC_PROG_MAKE_SET])dnl +AC_REQUIRE([AM_SET_LEADING_DOT])dnl +_AM_IF_OPTION([tar-ustar], [_AM_PROG_TAR([ustar])], + [_AM_IF_OPTION([tar-pax], [_AM_PROG_TAR([pax])], + [_AM_PROG_TAR([v7])])]) +_AM_IF_OPTION([no-dependencies],, +[AC_PROVIDE_IFELSE([AC_PROG_CC], + [_AM_DEPENDENCIES(CC)], + [define([AC_PROG_CC], + defn([AC_PROG_CC])[_AM_DEPENDENCIES(CC)])])dnl +AC_PROVIDE_IFELSE([AC_PROG_CXX], + [_AM_DEPENDENCIES(CXX)], + [define([AC_PROG_CXX], + defn([AC_PROG_CXX])[_AM_DEPENDENCIES(CXX)])])dnl +AC_PROVIDE_IFELSE([AC_PROG_OBJC], + [_AM_DEPENDENCIES(OBJC)], + [define([AC_PROG_OBJC], + defn([AC_PROG_OBJC])[_AM_DEPENDENCIES(OBJC)])])dnl +]) +_AM_IF_OPTION([silent-rules], [AC_REQUIRE([AM_SILENT_RULES])])dnl +dnl The `parallel-tests' driver may need to know about EXEEXT, so add the +dnl `am__EXEEXT' conditional if _AM_COMPILER_EXEEXT was seen. This macro +dnl is hooked onto _AC_COMPILER_EXEEXT early, see below. +AC_CONFIG_COMMANDS_PRE(dnl +[m4_provide_if([_AM_COMPILER_EXEEXT], + [AM_CONDITIONAL([am__EXEEXT], [test -n "$EXEEXT"])])])dnl +]) + +dnl Hook into `_AC_COMPILER_EXEEXT' early to learn its expansion. Do not +dnl add the conditional right here, as _AC_COMPILER_EXEEXT may be further +dnl mangled by Autoconf and run in a shell conditional statement. +m4_define([_AC_COMPILER_EXEEXT], +m4_defn([_AC_COMPILER_EXEEXT])[m4_provide([_AM_COMPILER_EXEEXT])]) + + +# When config.status generates a header, we must update the stamp-h file. +# This file resides in the same directory as the config header +# that is generated. The stamp files are numbered to have different names. + +# Autoconf calls _AC_AM_CONFIG_HEADER_HOOK (when defined) in the +# loop where config.status creates the headers, so we can generate +# our stamp files there. +AC_DEFUN([_AC_AM_CONFIG_HEADER_HOOK], +[# Compute $1's index in $config_headers. +_am_arg=$1 +_am_stamp_count=1 +for _am_header in $config_headers :; do + case $_am_header in + $_am_arg | $_am_arg:* ) + break ;; + * ) + _am_stamp_count=`expr $_am_stamp_count + 1` ;; + esac +done +echo "timestamp for $_am_arg" >`AS_DIRNAME(["$_am_arg"])`/stamp-h[]$_am_stamp_count]) + +# Copyright (C) 2001, 2003, 2005, 2008, 2011 Free Software Foundation, +# Inc. +# +# This file is free software; the Free Software Foundation +# gives unlimited permission to copy and/or distribute it, +# with or without modifications, as long as this notice is preserved. + +# serial 1 + +# AM_PROG_INSTALL_SH +# ------------------ +# Define $install_sh. +AC_DEFUN([AM_PROG_INSTALL_SH], +[AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl +if test x"${install_sh}" != xset; then + case $am_aux_dir in + *\ * | *\ *) + install_sh="\${SHELL} '$am_aux_dir/install-sh'" ;; + *) + install_sh="\${SHELL} $am_aux_dir/install-sh" + esac +fi +AC_SUBST(install_sh)]) + +# Copyright (C) 2003, 2005 Free Software Foundation, Inc. +# +# This file is free software; the Free Software Foundation +# gives unlimited permission to copy and/or distribute it, +# with or without modifications, as long as this notice is preserved. + +# serial 2 + +# Check whether the underlying file-system supports filenames +# with a leading dot. For instance MS-DOS doesn't. +AC_DEFUN([AM_SET_LEADING_DOT], +[rm -rf .tst 2>/dev/null +mkdir .tst 2>/dev/null +if test -d .tst; then + am__leading_dot=. +else + am__leading_dot=_ +fi +rmdir .tst 2>/dev/null +AC_SUBST([am__leading_dot])]) + +# Add --enable-maintainer-mode option to configure. -*- Autoconf -*- +# From Jim Meyering + +# Copyright (C) 1996, 1998, 2000, 2001, 2002, 2003, 2004, 2005, 2008, +# 2011 Free Software Foundation, Inc. +# +# This file is free software; the Free Software Foundation +# gives unlimited permission to copy and/or distribute it, +# with or without modifications, as long as this notice is preserved. + +# serial 5 + +# AM_MAINTAINER_MODE([DEFAULT-MODE]) +# ---------------------------------- +# Control maintainer-specific portions of Makefiles. +# Default is to disable them, unless `enable' is passed literally. +# For symmetry, `disable' may be passed as well. Anyway, the user +# can override the default with the --enable/--disable switch. +AC_DEFUN([AM_MAINTAINER_MODE], +[m4_case(m4_default([$1], [disable]), + [enable], [m4_define([am_maintainer_other], [disable])], + [disable], [m4_define([am_maintainer_other], [enable])], + [m4_define([am_maintainer_other], [enable]) + m4_warn([syntax], [unexpected argument to AM@&t@_MAINTAINER_MODE: $1])]) +AC_MSG_CHECKING([whether to enable maintainer-specific portions of Makefiles]) + dnl maintainer-mode's default is 'disable' unless 'enable' is passed + AC_ARG_ENABLE([maintainer-mode], +[ --][am_maintainer_other][-maintainer-mode am_maintainer_other make rules and dependencies not useful + (and sometimes confusing) to the casual installer], + [USE_MAINTAINER_MODE=$enableval], + [USE_MAINTAINER_MODE=]m4_if(am_maintainer_other, [enable], [no], [yes])) + AC_MSG_RESULT([$USE_MAINTAINER_MODE]) + AM_CONDITIONAL([MAINTAINER_MODE], [test $USE_MAINTAINER_MODE = yes]) + MAINT=$MAINTAINER_MODE_TRUE + AC_SUBST([MAINT])dnl +] +) + +AU_DEFUN([jm_MAINTAINER_MODE], [AM_MAINTAINER_MODE]) + +# Fake the existence of programs that GNU maintainers use. -*- Autoconf -*- + +# Copyright (C) 1997, 1999, 2000, 2001, 2003, 2004, 2005, 2008 +# Free Software Foundation, Inc. +# +# This file is free software; the Free Software Foundation +# gives unlimited permission to copy and/or distribute it, +# with or without modifications, as long as this notice is preserved. + +# serial 6 + +# AM_MISSING_PROG(NAME, PROGRAM) +# ------------------------------ +AC_DEFUN([AM_MISSING_PROG], +[AC_REQUIRE([AM_MISSING_HAS_RUN]) +$1=${$1-"${am_missing_run}$2"} +AC_SUBST($1)]) + + +# AM_MISSING_HAS_RUN +# ------------------ +# Define MISSING if not defined so far and test if it supports --run. +# If it does, set am_missing_run to use it, otherwise, to nothing. +AC_DEFUN([AM_MISSING_HAS_RUN], +[AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl +AC_REQUIRE_AUX_FILE([missing])dnl +if test x"${MISSING+set}" != xset; then + case $am_aux_dir in + *\ * | *\ *) + MISSING="\${SHELL} \"$am_aux_dir/missing\"" ;; + *) + MISSING="\${SHELL} $am_aux_dir/missing" ;; + esac +fi +# Use eval to expand $SHELL +if eval "$MISSING --run true"; then + am_missing_run="$MISSING --run " +else + am_missing_run= + AC_MSG_WARN([`missing' script is too old or missing]) +fi +]) + +# Copyright (C) 2003, 2004, 2005, 2006, 2011 Free Software Foundation, +# Inc. +# +# This file is free software; the Free Software Foundation +# gives unlimited permission to copy and/or distribute it, +# with or without modifications, as long as this notice is preserved. + +# serial 1 + +# AM_PROG_MKDIR_P +# --------------- +# Check for `mkdir -p'. +AC_DEFUN([AM_PROG_MKDIR_P], +[AC_PREREQ([2.60])dnl +AC_REQUIRE([AC_PROG_MKDIR_P])dnl +dnl Automake 1.8 to 1.9.6 used to define mkdir_p. We now use MKDIR_P, +dnl while keeping a definition of mkdir_p for backward compatibility. +dnl @MKDIR_P@ is magic: AC_OUTPUT adjusts its value for each Makefile. +dnl However we cannot define mkdir_p as $(MKDIR_P) for the sake of +dnl Makefile.ins that do not define MKDIR_P, so we do our own +dnl adjustment using top_builddir (which is defined more often than +dnl MKDIR_P). +AC_SUBST([mkdir_p], ["$MKDIR_P"])dnl +case $mkdir_p in + [[\\/$]]* | ?:[[\\/]]*) ;; + */*) mkdir_p="\$(top_builddir)/$mkdir_p" ;; +esac +]) + +# Copyright (C) 1998, 1999, 2000, 2001, 2003, 2004, 2005, 2006, 2012 +# Free Software Foundation, Inc. +# +# This file is free software; the Free Software Foundation +# gives unlimited permission to copy and/or distribute it, +# with or without modifications, as long as this notice is preserved. + +# serial 6 + +# AM_ENABLE_MULTILIB([MAKEFILE], [REL-TO-TOP-SRCDIR]) +# --------------------------------------------------- +# Add --enable-multilib to configure. +AC_DEFUN([AM_ENABLE_MULTILIB], +[m4_warn([obsolete], [$0 will be removed from Automake core soon. +Files implementing the "multilib" feature are (and will remain) available +to the 'contrib/' directory in the Automake distribution.])]dnl +[# Default to --enable-multilib +AC_ARG_ENABLE(multilib, +[ --enable-multilib build many library versions (default)], +[case "$enableval" in + yes) multilib=yes ;; + no) multilib=no ;; + *) AC_MSG_ERROR([bad value $enableval for multilib option]) ;; + esac], + [multilib=yes]) + +# We may get other options which we leave undocumented: +# --with-target-subdir, --with-multisrctop, --with-multisubdir +# See config-ml.in if you want the gory details. + +if test "$srcdir" = "."; then + if test "$with_target_subdir" != "."; then + multi_basedir="$srcdir/$with_multisrctop../$2" + else + multi_basedir="$srcdir/$with_multisrctop$2" + fi +else + multi_basedir="$srcdir/$2" +fi +AC_SUBST(multi_basedir) + +# Even if the default multilib is not a cross compilation, +# it may be that some of the other multilibs are. +if test $cross_compiling = no && test $multilib = yes \ + && test "x${with_multisubdir}" != x ; then + cross_compiling=maybe +fi + +AC_OUTPUT_COMMANDS([ +# Only add multilib support code if we just rebuilt the top-level +# Makefile. +case " $CONFIG_FILES " in + *" ]m4_default([$1],Makefile)[ "*) + ac_file=]m4_default([$1],Makefile)[ . ${multi_basedir}/config-ml.in + ;; +esac], + [ +srcdir="$srcdir" +host="$host" +target="$target" +with_multisubdir="$with_multisubdir" +with_multisrctop="$with_multisrctop" +with_target_subdir="$with_target_subdir" +ac_configure_args="${multilib_arg} ${ac_configure_args}" +multi_basedir="$multi_basedir" +CONFIG_SHELL=${CONFIG_SHELL-/bin/sh} +CC="$CC"])])dnl + +# Helper functions for option handling. -*- Autoconf -*- + +# Copyright (C) 2001, 2002, 2003, 2005, 2008, 2010 Free Software +# Foundation, Inc. +# +# This file is free software; the Free Software Foundation +# gives unlimited permission to copy and/or distribute it, +# with or without modifications, as long as this notice is preserved. + +# serial 5 + +# _AM_MANGLE_OPTION(NAME) +# ----------------------- +AC_DEFUN([_AM_MANGLE_OPTION], +[[_AM_OPTION_]m4_bpatsubst($1, [[^a-zA-Z0-9_]], [_])]) + +# _AM_SET_OPTION(NAME) +# -------------------- +# Set option NAME. Presently that only means defining a flag for this option. +AC_DEFUN([_AM_SET_OPTION], +[m4_define(_AM_MANGLE_OPTION([$1]), 1)]) + +# _AM_SET_OPTIONS(OPTIONS) +# ------------------------ +# OPTIONS is a space-separated list of Automake options. +AC_DEFUN([_AM_SET_OPTIONS], +[m4_foreach_w([_AM_Option], [$1], [_AM_SET_OPTION(_AM_Option)])]) + +# _AM_IF_OPTION(OPTION, IF-SET, [IF-NOT-SET]) +# ------------------------------------------- +# Execute IF-SET if OPTION is set, IF-NOT-SET otherwise. +AC_DEFUN([_AM_IF_OPTION], +[m4_ifset(_AM_MANGLE_OPTION([$1]), [$2], [$3])]) + +# Check to make sure that the build environment is sane. -*- Autoconf -*- + +# Copyright (C) 1996, 1997, 2000, 2001, 2003, 2005, 2008 +# Free Software Foundation, Inc. +# +# This file is free software; the Free Software Foundation +# gives unlimited permission to copy and/or distribute it, +# with or without modifications, as long as this notice is preserved. + +# serial 5 + +# AM_SANITY_CHECK +# --------------- +AC_DEFUN([AM_SANITY_CHECK], +[AC_MSG_CHECKING([whether build environment is sane]) +# Just in case +sleep 1 +echo timestamp > conftest.file +# Reject unsafe characters in $srcdir or the absolute working directory +# name. Accept space and tab only in the latter. +am_lf=' +' +case `pwd` in + *[[\\\"\#\$\&\'\`$am_lf]]*) + AC_MSG_ERROR([unsafe absolute working directory name]);; +esac +case $srcdir in + *[[\\\"\#\$\&\'\`$am_lf\ \ ]]*) + AC_MSG_ERROR([unsafe srcdir value: `$srcdir']);; +esac + +# Do `set' in a subshell so we don't clobber the current shell's +# arguments. Must try -L first in case configure is actually a +# symlink; some systems play weird games with the mod time of symlinks +# (eg FreeBSD returns the mod time of the symlink's containing +# directory). +if ( + set X `ls -Lt "$srcdir/configure" conftest.file 2> /dev/null` + if test "$[*]" = "X"; then + # -L didn't work. + set X `ls -t "$srcdir/configure" conftest.file` + fi + rm -f conftest.file + if test "$[*]" != "X $srcdir/configure conftest.file" \ + && test "$[*]" != "X conftest.file $srcdir/configure"; then + + # If neither matched, then we have a broken ls. This can happen + # if, for instance, CONFIG_SHELL is bash and it inherits a + # broken ls alias from the environment. This has actually + # happened. Such a system could not be considered "sane". + AC_MSG_ERROR([ls -t appears to fail. Make sure there is not a broken +alias in your environment]) + fi + + test "$[2]" = conftest.file + ) +then + # Ok. + : +else + AC_MSG_ERROR([newly created file is older than distributed files! +Check your system clock]) +fi +AC_MSG_RESULT(yes)]) + +# Copyright (C) 2001, 2003, 2005, 2011 Free Software Foundation, Inc. +# +# This file is free software; the Free Software Foundation +# gives unlimited permission to copy and/or distribute it, +# with or without modifications, as long as this notice is preserved. + +# serial 1 + +# AM_PROG_INSTALL_STRIP +# --------------------- +# One issue with vendor `install' (even GNU) is that you can't +# specify the program used to strip binaries. This is especially +# annoying in cross-compiling environments, where the build's strip +# is unlikely to handle the host's binaries. +# Fortunately install-sh will honor a STRIPPROG variable, so we +# always use install-sh in `make install-strip', and initialize +# STRIPPROG with the value of the STRIP variable (set by the user). +AC_DEFUN([AM_PROG_INSTALL_STRIP], +[AC_REQUIRE([AM_PROG_INSTALL_SH])dnl +# Installed binaries are usually stripped using `strip' when the user +# run `make install-strip'. However `strip' might not be the right +# tool to use in cross-compilation environments, therefore Automake +# will honor the `STRIP' environment variable to overrule this program. +dnl Don't test for $cross_compiling = yes, because it might be `maybe'. +if test "$cross_compiling" != no; then + AC_CHECK_TOOL([STRIP], [strip], :) +fi +INSTALL_STRIP_PROGRAM="\$(install_sh) -c -s" +AC_SUBST([INSTALL_STRIP_PROGRAM])]) + +# Copyright (C) 2006, 2008, 2010 Free Software Foundation, Inc. +# +# This file is free software; the Free Software Foundation +# gives unlimited permission to copy and/or distribute it, +# with or without modifications, as long as this notice is preserved. + +# serial 3 + +# _AM_SUBST_NOTMAKE(VARIABLE) +# --------------------------- +# Prevent Automake from outputting VARIABLE = @VARIABLE@ in Makefile.in. +# This macro is traced by Automake. +AC_DEFUN([_AM_SUBST_NOTMAKE]) + +# AM_SUBST_NOTMAKE(VARIABLE) +# -------------------------- +# Public sister of _AM_SUBST_NOTMAKE. +AC_DEFUN([AM_SUBST_NOTMAKE], [_AM_SUBST_NOTMAKE($@)]) + +# Check how to create a tarball. -*- Autoconf -*- + +# Copyright (C) 2004, 2005, 2012 Free Software Foundation, Inc. +# +# This file is free software; the Free Software Foundation +# gives unlimited permission to copy and/or distribute it, +# with or without modifications, as long as this notice is preserved. + +# serial 2 + +# _AM_PROG_TAR(FORMAT) +# -------------------- +# Check how to create a tarball in format FORMAT. +# FORMAT should be one of `v7', `ustar', or `pax'. +# +# Substitute a variable $(am__tar) that is a command +# writing to stdout a FORMAT-tarball containing the directory +# $tardir. +# tardir=directory && $(am__tar) > result.tar +# +# Substitute a variable $(am__untar) that extract such +# a tarball read from stdin. +# $(am__untar) < result.tar +AC_DEFUN([_AM_PROG_TAR], +[# Always define AMTAR for backward compatibility. Yes, it's still used +# in the wild :-( We should find a proper way to deprecate it ... +AC_SUBST([AMTAR], ['$${TAR-tar}']) +m4_if([$1], [v7], + [am__tar='$${TAR-tar} chof - "$$tardir"' am__untar='$${TAR-tar} xf -'], + [m4_case([$1], [ustar],, [pax],, + [m4_fatal([Unknown tar format])]) +AC_MSG_CHECKING([how to create a $1 tar archive]) +# Loop over all known methods to create a tar archive until one works. +_am_tools='gnutar m4_if([$1], [ustar], [plaintar]) pax cpio none' +_am_tools=${am_cv_prog_tar_$1-$_am_tools} +# Do not fold the above two line into one, because Tru64 sh and +# Solaris sh will not grok spaces in the rhs of `-'. +for _am_tool in $_am_tools +do + case $_am_tool in + gnutar) + for _am_tar in tar gnutar gtar; + do + AM_RUN_LOG([$_am_tar --version]) && break + done + am__tar="$_am_tar --format=m4_if([$1], [pax], [posix], [$1]) -chf - "'"$$tardir"' + am__tar_="$_am_tar --format=m4_if([$1], [pax], [posix], [$1]) -chf - "'"$tardir"' + am__untar="$_am_tar -xf -" + ;; + plaintar) + # Must skip GNU tar: if it does not support --format= it doesn't create + # ustar tarball either. + (tar --version) >/dev/null 2>&1 && continue + am__tar='tar chf - "$$tardir"' + am__tar_='tar chf - "$tardir"' + am__untar='tar xf -' + ;; + pax) + am__tar='pax -L -x $1 -w "$$tardir"' + am__tar_='pax -L -x $1 -w "$tardir"' + am__untar='pax -r' + ;; + cpio) + am__tar='find "$$tardir" -print | cpio -o -H $1 -L' + am__tar_='find "$tardir" -print | cpio -o -H $1 -L' + am__untar='cpio -i -H $1 -d' + ;; + none) + am__tar=false + am__tar_=false + am__untar=false + ;; + esac + + # If the value was cached, stop now. We just wanted to have am__tar + # and am__untar set. + test -n "${am_cv_prog_tar_$1}" && break + + # tar/untar a dummy directory, and stop if the command works + rm -rf conftest.dir + mkdir conftest.dir + echo GrepMe > conftest.dir/file + AM_RUN_LOG([tardir=conftest.dir && eval $am__tar_ >conftest.tar]) + rm -rf conftest.dir + if test -s conftest.tar; then + AM_RUN_LOG([$am__untar /dev/null 2>&1 && break + fi +done +rm -rf conftest.dir + +AC_CACHE_VAL([am_cv_prog_tar_$1], [am_cv_prog_tar_$1=$_am_tool]) +AC_MSG_RESULT([$am_cv_prog_tar_$1])]) +AC_SUBST([am__tar]) +AC_SUBST([am__untar]) +]) # _AM_PROG_TAR + +m4_include([config/libtool.m4]) +m4_include([config/ltoptions.m4]) +m4_include([config/ltsugar.m4]) +m4_include([config/ltversion.m4]) +m4_include([config/lt~obsolete.m4]) +m4_include([acinclude.m4]) diff --git a/backtrace-sys/src/libbacktrace/alloc.c b/backtrace-sys/src/libbacktrace/alloc.c new file mode 100644 index 000000000..7070afbf2 --- /dev/null +++ b/backtrace-sys/src/libbacktrace/alloc.c @@ -0,0 +1,156 @@ +/* alloc.c -- Memory allocation without mmap. + Copyright (C) 2012-2018 Free Software Foundation, Inc. + Written by Ian Lance Taylor, Google. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + (1) Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + (2) Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + + (3) The name of the author may not be used to + endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, +INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, +STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING +IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. */ + +#include "config.h" + +#include +#include +#include + +#include "backtrace.h" +#include "internal.h" + +/* Allocation routines to use on systems that do not support anonymous + mmap. This implementation just uses malloc, which means that the + backtrace functions may not be safely invoked from a signal + handler. */ + +/* Allocate memory like malloc. If ERROR_CALLBACK is NULL, don't + report an error. */ + +void * +backtrace_alloc (struct backtrace_state *state ATTRIBUTE_UNUSED, + size_t size, backtrace_error_callback error_callback, + void *data) +{ + void *ret; + + ret = malloc (size); + if (ret == NULL) + { + if (error_callback) + error_callback (data, "malloc", errno); + } + return ret; +} + +/* Free memory. */ + +void +backtrace_free (struct backtrace_state *state ATTRIBUTE_UNUSED, + void *p, size_t size ATTRIBUTE_UNUSED, + backtrace_error_callback error_callback ATTRIBUTE_UNUSED, + void *data ATTRIBUTE_UNUSED) +{ + free (p); +} + +/* Grow VEC by SIZE bytes. */ + +void * +backtrace_vector_grow (struct backtrace_state *state ATTRIBUTE_UNUSED, + size_t size, backtrace_error_callback error_callback, + void *data, struct backtrace_vector *vec) +{ + void *ret; + + if (size > vec->alc) + { + size_t alc; + void *base; + + if (vec->size == 0) + alc = 32 * size; + else if (vec->size >= 4096) + alc = vec->size + 4096; + else + alc = 2 * vec->size; + + if (alc < vec->size + size) + alc = vec->size + size; + + base = realloc (vec->base, alc); + if (base == NULL) + { + error_callback (data, "realloc", errno); + return NULL; + } + + vec->base = base; + vec->alc = alc - vec->size; + } + + ret = (char *) vec->base + vec->size; + vec->size += size; + vec->alc -= size; + return ret; +} + +/* Finish the current allocation on VEC. */ + +void * +backtrace_vector_finish (struct backtrace_state *state, + struct backtrace_vector *vec, + backtrace_error_callback error_callback, + void *data) +{ + void *ret; + + /* With this allocator we call realloc in backtrace_vector_grow, + which means we can't easily reuse the memory here. So just + release it. */ + if (!backtrace_vector_release (state, vec, error_callback, data)) + return NULL; + ret = vec->base; + vec->base = NULL; + vec->size = 0; + vec->alc = 0; + return ret; +} + +/* Release any extra space allocated for VEC. */ + +int +backtrace_vector_release (struct backtrace_state *state ATTRIBUTE_UNUSED, + struct backtrace_vector *vec, + backtrace_error_callback error_callback, + void *data) +{ + vec->base = realloc (vec->base, vec->size); + if (vec->base == NULL) + { + error_callback (data, "realloc", errno); + return 0; + } + vec->alc = 0; + return 1; +} diff --git a/backtrace-sys/src/libbacktrace/atomic.c b/backtrace-sys/src/libbacktrace/atomic.c new file mode 100644 index 000000000..5ab160066 --- /dev/null +++ b/backtrace-sys/src/libbacktrace/atomic.c @@ -0,0 +1,113 @@ +/* atomic.c -- Support for atomic functions if not present. + Copyright (C) 2013-2018 Free Software Foundation, Inc. + Written by Ian Lance Taylor, Google. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + (1) Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + (2) Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + + (3) The name of the author may not be used to + endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, +INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, +STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING +IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. */ + +#include "config.h" + +#include + +#include "backtrace.h" +#include "backtrace-supported.h" +#include "internal.h" + +/* This file holds implementations of the atomic functions that are + used if the host compiler has the sync functions but not the atomic + functions, as is true of versions of GCC before 4.7. */ + +#if !defined (HAVE_ATOMIC_FUNCTIONS) && defined (HAVE_SYNC_FUNCTIONS) + +/* Do an atomic load of a pointer. */ + +void * +backtrace_atomic_load_pointer (void *arg) +{ + void **pp; + void *p; + + pp = (void **) arg; + p = *pp; + while (!__sync_bool_compare_and_swap (pp, p, p)) + p = *pp; + return p; +} + +/* Do an atomic load of an int. */ + +int +backtrace_atomic_load_int (int *p) +{ + int i; + + i = *p; + while (!__sync_bool_compare_and_swap (p, i, i)) + i = *p; + return i; +} + +/* Do an atomic store of a pointer. */ + +void +backtrace_atomic_store_pointer (void *arg, void *p) +{ + void **pp; + void *old; + + pp = (void **) arg; + old = *pp; + while (!__sync_bool_compare_and_swap (pp, old, p)) + old = *pp; +} + +/* Do an atomic store of a size_t value. */ + +void +backtrace_atomic_store_size_t (size_t *p, size_t v) +{ + size_t old; + + old = *p; + while (!__sync_bool_compare_and_swap (p, old, v)) + old = *p; +} + +/* Do an atomic store of a int value. */ + +void +backtrace_atomic_store_int (int *p, int v) +{ + size_t old; + + old = *p; + while (!__sync_bool_compare_and_swap (p, old, v)) + old = *p; +} + +#endif diff --git a/backtrace-sys/src/libbacktrace/backtrace-supported.h.in b/backtrace-sys/src/libbacktrace/backtrace-supported.h.in new file mode 100644 index 000000000..c2d03d241 --- /dev/null +++ b/backtrace-sys/src/libbacktrace/backtrace-supported.h.in @@ -0,0 +1,66 @@ +/* backtrace-supported.h.in -- Whether stack backtrace is supported. + Copyright (C) 2012-2016 Free Software Foundation, Inc. + Written by Ian Lance Taylor, Google. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + (1) Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + (2) Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + + (3) The name of the author may not be used to + endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, +INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, +STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING +IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. */ + +/* The file backtrace-supported.h.in is used by configure to generate + the file backtrace-supported.h. The file backtrace-supported.h may + be #include'd to see whether the backtrace library will be able to + get a backtrace and produce symbolic information. */ + + +/* BACKTRACE_SUPPORTED will be #define'd as 1 if the backtrace library + should work, 0 if it will not. Libraries may #include this to make + other arrangements. */ + +#define BACKTRACE_SUPPORTED @BACKTRACE_SUPPORTED@ + +/* BACKTRACE_USES_MALLOC will be #define'd as 1 if the backtrace + library will call malloc as it works, 0 if it will call mmap + instead. This may be used to determine whether it is safe to call + the backtrace functions from a signal handler. In general this + only applies to calls like backtrace and backtrace_pcinfo. It does + not apply to backtrace_simple, which never calls malloc. It does + not apply to backtrace_print, which always calls fprintf and + therefore malloc. */ + +#define BACKTRACE_USES_MALLOC @BACKTRACE_USES_MALLOC@ + +/* BACKTRACE_SUPPORTS_THREADS will be #define'd as 1 if the backtrace + library is configured with threading support, 0 if not. If this is + 0, the threaded parameter to backtrace_create_state must be passed + as 0. */ + +#define BACKTRACE_SUPPORTS_THREADS @BACKTRACE_SUPPORTS_THREADS@ + +/* BACKTRACE_SUPPORTS_DATA will be #defined'd as 1 if the backtrace_syminfo + will work for variables. It will always work for functions. */ + +#define BACKTRACE_SUPPORTS_DATA @BACKTRACE_SUPPORTS_DATA@ diff --git a/backtrace-sys/src/libbacktrace/backtrace.c b/backtrace-sys/src/libbacktrace/backtrace.c new file mode 100644 index 000000000..f8e3dc59d --- /dev/null +++ b/backtrace-sys/src/libbacktrace/backtrace.c @@ -0,0 +1,129 @@ +/* backtrace.c -- Entry point for stack backtrace library. + Copyright (C) 2012-2018 Free Software Foundation, Inc. + Written by Ian Lance Taylor, Google. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + (1) Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + (2) Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + + (3) The name of the author may not be used to + endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, +INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, +STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING +IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. */ + +#include "config.h" + +#include + +#include "unwind.h" +#include "backtrace.h" +#include "internal.h" + +/* The main backtrace_full routine. */ + +/* Data passed through _Unwind_Backtrace. */ + +struct backtrace_data +{ + /* Number of frames to skip. */ + int skip; + /* Library state. */ + struct backtrace_state *state; + /* Callback routine. */ + backtrace_full_callback callback; + /* Error callback routine. */ + backtrace_error_callback error_callback; + /* Data to pass to callback routines. */ + void *data; + /* Value to return from backtrace_full. */ + int ret; + /* Whether there is any memory available. */ + int can_alloc; +}; + +/* Unwind library callback routine. This is passed to + _Unwind_Backtrace. */ + +static _Unwind_Reason_Code +unwind (struct _Unwind_Context *context, void *vdata) +{ + struct backtrace_data *bdata = (struct backtrace_data *) vdata; + uintptr_t pc; + int ip_before_insn = 0; + +#ifdef HAVE_GETIPINFO + pc = _Unwind_GetIPInfo (context, &ip_before_insn); +#else + pc = _Unwind_GetIP (context); +#endif + + if (bdata->skip > 0) + { + --bdata->skip; + return _URC_NO_REASON; + } + + if (!ip_before_insn) + --pc; + + if (!bdata->can_alloc) + bdata->ret = bdata->callback (bdata->data, pc, NULL, 0, NULL); + else + bdata->ret = backtrace_pcinfo (bdata->state, pc, bdata->callback, + bdata->error_callback, bdata->data); + if (bdata->ret != 0) + return _URC_END_OF_STACK; + + return _URC_NO_REASON; +} + +/* Get a stack backtrace. */ + +int +backtrace_full (struct backtrace_state *state, int skip, + backtrace_full_callback callback, + backtrace_error_callback error_callback, void *data) +{ + struct backtrace_data bdata; + void *p; + + bdata.skip = skip + 1; + bdata.state = state; + bdata.callback = callback; + bdata.error_callback = error_callback; + bdata.data = data; + bdata.ret = 0; + + /* If we can't allocate any memory at all, don't try to produce + file/line information. */ + p = backtrace_alloc (state, 4096, NULL, NULL); + if (p == NULL) + bdata.can_alloc = 0; + else + { + backtrace_free (state, p, 4096, NULL, NULL); + bdata.can_alloc = 1; + } + + _Unwind_Backtrace (unwind, &bdata); + return bdata.ret; +} diff --git a/backtrace-sys/src/libbacktrace/backtrace.h b/backtrace-sys/src/libbacktrace/backtrace.h new file mode 100644 index 000000000..17b10197d --- /dev/null +++ b/backtrace-sys/src/libbacktrace/backtrace.h @@ -0,0 +1,182 @@ +/* backtrace.h -- Public header file for stack backtrace library. + Copyright (C) 2012-2018 Free Software Foundation, Inc. + Written by Ian Lance Taylor, Google. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + (1) Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + (2) Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + + (3) The name of the author may not be used to + endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, +INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, +STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING +IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. */ + +#ifndef BACKTRACE_H +#define BACKTRACE_H + +#include +#include +#include + +#ifdef __cplusplus +extern "C" { +#endif + +/* The backtrace state. This struct is intentionally not defined in + the public interface. */ + +struct backtrace_state; + +/* The type of the error callback argument to backtrace functions. + This function, if not NULL, will be called for certain error cases. + The DATA argument is passed to the function that calls this one. + The MSG argument is an error message. The ERRNUM argument, if + greater than 0, holds an errno value. The MSG buffer may become + invalid after this function returns. + + As a special case, the ERRNUM argument will be passed as -1 if no + debug info can be found for the executable, but the function + requires debug info (e.g., backtrace_full, backtrace_pcinfo). The + MSG in this case will be something along the lines of "no debug + info". Similarly, ERRNUM will be passed as -1 if there is no + symbol table, but the function requires a symbol table (e.g., + backtrace_syminfo). This may be used as a signal that some other + approach should be tried. */ + +typedef void (*backtrace_error_callback) (void *data, const char *msg, + int errnum); + +/* Create state information for the backtrace routines. This must be + called before any of the other routines, and its return value must + be passed to all of the other routines. FILENAME is the path name + of the executable file; if it is NULL the library will try + system-specific path names. If not NULL, FILENAME must point to a + permanent buffer. If THREADED is non-zero the state may be + accessed by multiple threads simultaneously, and the library will + use appropriate atomic operations. If THREADED is zero the state + may only be accessed by one thread at a time. This returns a state + pointer on success, NULL on error. If an error occurs, this will + call the ERROR_CALLBACK routine. */ + +extern struct backtrace_state *backtrace_create_state ( + const char *filename, int threaded, + backtrace_error_callback error_callback, void *data); + +/* The type of the callback argument to the backtrace_full function. + DATA is the argument passed to backtrace_full. PC is the program + counter. FILENAME is the name of the file containing PC, or NULL + if not available. LINENO is the line number in FILENAME containing + PC, or 0 if not available. FUNCTION is the name of the function + containing PC, or NULL if not available. This should return 0 to + continuing tracing. The FILENAME and FUNCTION buffers may become + invalid after this function returns. */ + +typedef int (*backtrace_full_callback) (void *data, uintptr_t pc, + const char *filename, int lineno, + const char *function); + +/* Get a full stack backtrace. SKIP is the number of frames to skip; + passing 0 will start the trace with the function calling + backtrace_full. DATA is passed to the callback routine. If any + call to CALLBACK returns a non-zero value, the stack backtrace + stops, and backtrace returns that value; this may be used to limit + the number of stack frames desired. If all calls to CALLBACK + return 0, backtrace returns 0. The backtrace_full function will + make at least one call to either CALLBACK or ERROR_CALLBACK. This + function requires debug info for the executable. */ + +extern int backtrace_full (struct backtrace_state *state, int skip, + backtrace_full_callback callback, + backtrace_error_callback error_callback, + void *data); + +/* The type of the callback argument to the backtrace_simple function. + DATA is the argument passed to simple_backtrace. PC is the program + counter. This should return 0 to continue tracing. */ + +typedef int (*backtrace_simple_callback) (void *data, uintptr_t pc); + +/* Get a simple backtrace. SKIP is the number of frames to skip, as + in backtrace. DATA is passed to the callback routine. If any call + to CALLBACK returns a non-zero value, the stack backtrace stops, + and backtrace_simple returns that value. Otherwise + backtrace_simple returns 0. The backtrace_simple function will + make at least one call to either CALLBACK or ERROR_CALLBACK. This + function does not require any debug info for the executable. */ + +extern int backtrace_simple (struct backtrace_state *state, int skip, + backtrace_simple_callback callback, + backtrace_error_callback error_callback, + void *data); + +/* Print the current backtrace in a user readable format to a FILE. + SKIP is the number of frames to skip, as in backtrace_full. Any + error messages are printed to stderr. This function requires debug + info for the executable. */ + +extern void backtrace_print (struct backtrace_state *state, int skip, FILE *); + +/* Given PC, a program counter in the current program, call the + callback function with filename, line number, and function name + information. This will normally call the callback function exactly + once. However, if the PC happens to describe an inlined call, and + the debugging information contains the necessary information, then + this may call the callback function multiple times. This will make + at least one call to either CALLBACK or ERROR_CALLBACK. This + returns the first non-zero value returned by CALLBACK, or 0. */ + +extern int backtrace_pcinfo (struct backtrace_state *state, uintptr_t pc, + backtrace_full_callback callback, + backtrace_error_callback error_callback, + void *data); + +/* The type of the callback argument to backtrace_syminfo. DATA and + PC are the arguments passed to backtrace_syminfo. SYMNAME is the + name of the symbol for the corresponding code. SYMVAL is the + value and SYMSIZE is the size of the symbol. SYMNAME will be NULL + if no error occurred but the symbol could not be found. */ + +typedef void (*backtrace_syminfo_callback) (void *data, uintptr_t pc, + const char *symname, + uintptr_t symval, + uintptr_t symsize); + +/* Given ADDR, an address or program counter in the current program, + call the callback information with the symbol name and value + describing the function or variable in which ADDR may be found. + This will call either CALLBACK or ERROR_CALLBACK exactly once. + This returns 1 on success, 0 on failure. This function requires + the symbol table but does not require the debug info. Note that if + the symbol table is present but ADDR could not be found in the + table, CALLBACK will be called with a NULL SYMNAME argument. + Returns 1 on success, 0 on error. */ + +extern int backtrace_syminfo (struct backtrace_state *state, uintptr_t addr, + backtrace_syminfo_callback callback, + backtrace_error_callback error_callback, + void *data); + +#ifdef __cplusplus +} /* End extern "C". */ +#endif + +#endif diff --git a/backtrace-sys/src/libbacktrace/btest.c b/backtrace-sys/src/libbacktrace/btest.c new file mode 100644 index 000000000..1348d54da --- /dev/null +++ b/backtrace-sys/src/libbacktrace/btest.c @@ -0,0 +1,500 @@ +/* btest.c -- Test for libbacktrace library + Copyright (C) 2012-2018 Free Software Foundation, Inc. + Written by Ian Lance Taylor, Google. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + (1) Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + (2) Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + + (3) The name of the author may not be used to + endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, +INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, +STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING +IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. */ + +/* This program tests the externally visible interfaces of the + libbacktrace library. */ + +#include +#include +#include +#include +#include + +#include "filenames.h" + +#include "backtrace.h" +#include "backtrace-supported.h" + +#include "testlib.h" + +/* Test the backtrace function with non-inlined functions. */ + +static int test1 (void) __attribute__ ((noinline, unused)); +static int f2 (int) __attribute__ ((noinline)); +static int f3 (int, int) __attribute__ ((noinline)); + +static int +test1 (void) +{ + /* Returning a value here and elsewhere avoids a tailcall which + would mess up the backtrace. */ + return f2 (__LINE__) + 1; +} + +static int +f2 (int f1line) +{ + return f3 (f1line, __LINE__) + 2; +} + +static int +f3 (int f1line, int f2line) +{ + struct info all[20]; + struct bdata data; + int f3line; + int i; + + data.all = &all[0]; + data.index = 0; + data.max = 20; + data.failed = 0; + + f3line = __LINE__ + 1; + i = backtrace_full (state, 0, callback_one, error_callback_one, &data); + + if (i != 0) + { + fprintf (stderr, "test1: unexpected return value %d\n", i); + data.failed = 1; + } + + if (data.index < 3) + { + fprintf (stderr, + "test1: not enough frames; got %zu, expected at least 3\n", + data.index); + data.failed = 1; + } + + check ("test1", 0, all, f3line, "f3", "btest.c", &data.failed); + check ("test1", 1, all, f2line, "f2", "btest.c", &data.failed); + check ("test1", 2, all, f1line, "test1", "btest.c", &data.failed); + + printf ("%s: backtrace_full noinline\n", data.failed ? "FAIL" : "PASS"); + + if (data.failed) + ++failures; + + return failures; +} + +/* Test the backtrace function with inlined functions. */ + +static inline int test2 (void) __attribute__ ((always_inline, unused)); +static inline int f12 (int) __attribute__ ((always_inline)); +static inline int f13 (int, int) __attribute__ ((always_inline)); + +static inline int +test2 (void) +{ + return f12 (__LINE__) + 1; +} + +static inline int +f12 (int f1line) +{ + return f13 (f1line, __LINE__) + 2; +} + +static inline int +f13 (int f1line, int f2line) +{ + struct info all[20]; + struct bdata data; + int f3line; + int i; + + data.all = &all[0]; + data.index = 0; + data.max = 20; + data.failed = 0; + + f3line = __LINE__ + 1; + i = backtrace_full (state, 0, callback_one, error_callback_one, &data); + + if (i != 0) + { + fprintf (stderr, "test2: unexpected return value %d\n", i); + data.failed = 1; + } + + check ("test2", 0, all, f3line, "f13", "btest.c", &data.failed); + check ("test2", 1, all, f2line, "f12", "btest.c", &data.failed); + check ("test2", 2, all, f1line, "test2", "btest.c", &data.failed); + + printf ("%s: backtrace_full inline\n", data.failed ? "FAIL" : "PASS"); + + if (data.failed) + ++failures; + + return failures; +} + +/* Test the backtrace_simple function with non-inlined functions. */ + +static int test3 (void) __attribute__ ((noinline, unused)); +static int f22 (int) __attribute__ ((noinline)); +static int f23 (int, int) __attribute__ ((noinline)); + +static int +test3 (void) +{ + return f22 (__LINE__) + 1; +} + +static int +f22 (int f1line) +{ + return f23 (f1line, __LINE__) + 2; +} + +static int +f23 (int f1line, int f2line) +{ + uintptr_t addrs[20]; + struct sdata data; + int f3line; + int i; + + data.addrs = &addrs[0]; + data.index = 0; + data.max = 20; + data.failed = 0; + + f3line = __LINE__ + 1; + i = backtrace_simple (state, 0, callback_two, error_callback_two, &data); + + if (i != 0) + { + fprintf (stderr, "test3: unexpected return value %d\n", i); + data.failed = 1; + } + + if (!data.failed) + { + struct info all[20]; + struct bdata bdata; + int j; + + bdata.all = &all[0]; + bdata.index = 0; + bdata.max = 20; + bdata.failed = 0; + + for (j = 0; j < 3; ++j) + { + i = backtrace_pcinfo (state, addrs[j], callback_one, + error_callback_one, &bdata); + if (i != 0) + { + fprintf (stderr, + ("test3: unexpected return value " + "from backtrace_pcinfo %d\n"), + i); + bdata.failed = 1; + } + if (!bdata.failed && bdata.index != (size_t) (j + 1)) + { + fprintf (stderr, + ("wrong number of calls from backtrace_pcinfo " + "got %u expected %d\n"), + (unsigned int) bdata.index, j + 1); + bdata.failed = 1; + } + } + + check ("test3", 0, all, f3line, "f23", "btest.c", &bdata.failed); + check ("test3", 1, all, f2line, "f22", "btest.c", &bdata.failed); + check ("test3", 2, all, f1line, "test3", "btest.c", &bdata.failed); + + if (bdata.failed) + data.failed = 1; + + for (j = 0; j < 3; ++j) + { + struct symdata symdata; + + symdata.name = NULL; + symdata.val = 0; + symdata.size = 0; + symdata.failed = 0; + + i = backtrace_syminfo (state, addrs[j], callback_three, + error_callback_three, &symdata); + if (i == 0) + { + fprintf (stderr, + ("test3: [%d]: unexpected return value " + "from backtrace_syminfo %d\n"), + j, i); + symdata.failed = 1; + } + + if (!symdata.failed) + { + const char *expected; + + switch (j) + { + case 0: + expected = "f23"; + break; + case 1: + expected = "f22"; + break; + case 2: + expected = "test3"; + break; + default: + assert (0); + } + + if (symdata.name == NULL) + { + fprintf (stderr, "test3: [%d]: NULL syminfo name\n", j); + symdata.failed = 1; + } + /* Use strncmp, not strcmp, because GCC might create a + clone. */ + else if (strncmp (symdata.name, expected, strlen (expected)) + != 0) + { + fprintf (stderr, + ("test3: [%d]: unexpected syminfo name " + "got %s expected %s\n"), + j, symdata.name, expected); + symdata.failed = 1; + } + } + + if (symdata.failed) + data.failed = 1; + } + } + + printf ("%s: backtrace_simple noinline\n", data.failed ? "FAIL" : "PASS"); + + if (data.failed) + ++failures; + + return failures; +} + +/* Test the backtrace_simple function with inlined functions. */ + +static inline int test4 (void) __attribute__ ((always_inline, unused)); +static inline int f32 (int) __attribute__ ((always_inline)); +static inline int f33 (int, int) __attribute__ ((always_inline)); + +static inline int +test4 (void) +{ + return f32 (__LINE__) + 1; +} + +static inline int +f32 (int f1line) +{ + return f33 (f1line, __LINE__) + 2; +} + +static inline int +f33 (int f1line, int f2line) +{ + uintptr_t addrs[20]; + struct sdata data; + int f3line; + int i; + + data.addrs = &addrs[0]; + data.index = 0; + data.max = 20; + data.failed = 0; + + f3line = __LINE__ + 1; + i = backtrace_simple (state, 0, callback_two, error_callback_two, &data); + + if (i != 0) + { + fprintf (stderr, "test3: unexpected return value %d\n", i); + data.failed = 1; + } + + if (!data.failed) + { + struct info all[20]; + struct bdata bdata; + + bdata.all = &all[0]; + bdata.index = 0; + bdata.max = 20; + bdata.failed = 0; + + i = backtrace_pcinfo (state, addrs[0], callback_one, error_callback_one, + &bdata); + if (i != 0) + { + fprintf (stderr, + ("test4: unexpected return value " + "from backtrace_pcinfo %d\n"), + i); + bdata.failed = 1; + } + + check ("test4", 0, all, f3line, "f33", "btest.c", &bdata.failed); + check ("test4", 1, all, f2line, "f32", "btest.c", &bdata.failed); + check ("test4", 2, all, f1line, "test4", "btest.c", &bdata.failed); + + if (bdata.failed) + data.failed = 1; + } + + printf ("%s: backtrace_simple inline\n", data.failed ? "FAIL" : "PASS"); + + if (data.failed) + ++failures; + + return failures; +} + +static int test5 (void) __attribute__ ((unused)); + +int global = 1; + +static int +test5 (void) +{ + struct symdata symdata; + int i; + uintptr_t addr = (uintptr_t) &global; + + if (sizeof (global) > 1) + addr += 1; + + symdata.name = NULL; + symdata.val = 0; + symdata.size = 0; + symdata.failed = 0; + + i = backtrace_syminfo (state, addr, callback_three, + error_callback_three, &symdata); + if (i == 0) + { + fprintf (stderr, + "test5: unexpected return value from backtrace_syminfo %d\n", + i); + symdata.failed = 1; + } + + if (!symdata.failed) + { + if (symdata.name == NULL) + { + fprintf (stderr, "test5: NULL syminfo name\n"); + symdata.failed = 1; + } + else if (strcmp (symdata.name, "global") != 0) + { + fprintf (stderr, + "test5: unexpected syminfo name got %s expected %s\n", + symdata.name, "global"); + symdata.failed = 1; + } + else if (symdata.val != (uintptr_t) &global) + { + fprintf (stderr, + "test5: unexpected syminfo value got %lx expected %lx\n", + (unsigned long) symdata.val, + (unsigned long) (uintptr_t) &global); + symdata.failed = 1; + } + else if (symdata.size != sizeof (global)) + { + fprintf (stderr, + "test5: unexpected syminfo size got %lx expected %lx\n", + (unsigned long) symdata.size, + (unsigned long) sizeof (global)); + symdata.failed = 1; + } + } + + printf ("%s: backtrace_syminfo variable\n", + symdata.failed ? "FAIL" : "PASS"); + + if (symdata.failed) + ++failures; + + return failures; +} + +/* Check that are no files left open. */ + +static void +check_open_files (void) +{ + int i; + + for (i = 3; i < 10; i++) + { + if (close (i) == 0) + { + fprintf (stderr, + "ERROR: descriptor %d still open after tests complete\n", + i); + ++failures; + } + } +} + +/* Run all the tests. */ + +int +main (int argc ATTRIBUTE_UNUSED, char **argv) +{ + state = backtrace_create_state (argv[0], BACKTRACE_SUPPORTS_THREADS, + error_callback_create, NULL); + +#if BACKTRACE_SUPPORTED + test1 (); + test2 (); + test3 (); + test4 (); +#if BACKTRACE_SUPPORTS_DATA + test5 (); +#endif +#endif + + check_open_files (); + + exit (failures ? EXIT_FAILURE : EXIT_SUCCESS); +} diff --git a/backtrace-sys/src/libbacktrace/config.guess b/backtrace-sys/src/libbacktrace/config.guess new file mode 100755 index 000000000..d622a44e5 --- /dev/null +++ b/backtrace-sys/src/libbacktrace/config.guess @@ -0,0 +1,1530 @@ +#! /bin/sh +# Attempt to guess a canonical system name. +# Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, +# 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, +# 2011, 2012 Free Software Foundation, Inc. + +timestamp='2012-02-10' + +# This file is free software; you can redistribute it and/or modify it +# under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, see . +# +# As a special exception to the GNU General Public License, if you +# distribute this file as part of a program that contains a +# configuration script generated by Autoconf, you may include it under +# the same distribution terms that you use for the rest of that program. + + +# Originally written by Per Bothner. Please send patches (context +# diff format) to and include a ChangeLog +# entry. +# +# This script attempts to guess a canonical system name similar to +# config.sub. If it succeeds, it prints the system name on stdout, and +# exits with 0. Otherwise, it exits with 1. +# +# You can get the latest version of this script from: +# http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.guess;hb=HEAD + +me=`echo "$0" | sed -e 's,.*/,,'` + +usage="\ +Usage: $0 [OPTION] + +Output the configuration name of the system \`$me' is run on. + +Operation modes: + -h, --help print this help, then exit + -t, --time-stamp print date of last modification, then exit + -v, --version print version number, then exit + +Report bugs and patches to ." + +version="\ +GNU config.guess ($timestamp) + +Originally written by Per Bothner. +Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, +2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012 +Free Software Foundation, Inc. + +This is free software; see the source for copying conditions. There is NO +warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE." + +help=" +Try \`$me --help' for more information." + +# Parse command line +while test $# -gt 0 ; do + case $1 in + --time-stamp | --time* | -t ) + echo "$timestamp" ; exit ;; + --version | -v ) + echo "$version" ; exit ;; + --help | --h* | -h ) + echo "$usage"; exit ;; + -- ) # Stop option processing + shift; break ;; + - ) # Use stdin as input. + break ;; + -* ) + echo "$me: invalid option $1$help" >&2 + exit 1 ;; + * ) + break ;; + esac +done + +if test $# != 0; then + echo "$me: too many arguments$help" >&2 + exit 1 +fi + +trap 'exit 1' 1 2 15 + +# CC_FOR_BUILD -- compiler used by this script. Note that the use of a +# compiler to aid in system detection is discouraged as it requires +# temporary files to be created and, as you can see below, it is a +# headache to deal with in a portable fashion. + +# Historically, `CC_FOR_BUILD' used to be named `HOST_CC'. We still +# use `HOST_CC' if defined, but it is deprecated. + +# Portable tmp directory creation inspired by the Autoconf team. + +set_cc_for_build=' +trap "exitcode=\$?; (rm -f \$tmpfiles 2>/dev/null; rmdir \$tmp 2>/dev/null) && exit \$exitcode" 0 ; +trap "rm -f \$tmpfiles 2>/dev/null; rmdir \$tmp 2>/dev/null; exit 1" 1 2 13 15 ; +: ${TMPDIR=/tmp} ; + { tmp=`(umask 077 && mktemp -d "$TMPDIR/cgXXXXXX") 2>/dev/null` && test -n "$tmp" && test -d "$tmp" ; } || + { test -n "$RANDOM" && tmp=$TMPDIR/cg$$-$RANDOM && (umask 077 && mkdir $tmp) ; } || + { tmp=$TMPDIR/cg-$$ && (umask 077 && mkdir $tmp) && echo "Warning: creating insecure temp directory" >&2 ; } || + { echo "$me: cannot create a temporary directory in $TMPDIR" >&2 ; exit 1 ; } ; +dummy=$tmp/dummy ; +tmpfiles="$dummy.c $dummy.o $dummy.rel $dummy" ; +case $CC_FOR_BUILD,$HOST_CC,$CC in + ,,) echo "int x;" > $dummy.c ; + for c in cc gcc c89 c99 ; do + if ($c -c -o $dummy.o $dummy.c) >/dev/null 2>&1 ; then + CC_FOR_BUILD="$c"; break ; + fi ; + done ; + if test x"$CC_FOR_BUILD" = x ; then + CC_FOR_BUILD=no_compiler_found ; + fi + ;; + ,,*) CC_FOR_BUILD=$CC ;; + ,*,*) CC_FOR_BUILD=$HOST_CC ;; +esac ; set_cc_for_build= ;' + +# This is needed to find uname on a Pyramid OSx when run in the BSD universe. +# (ghazi@noc.rutgers.edu 1994-08-24) +if (test -f /.attbin/uname) >/dev/null 2>&1 ; then + PATH=$PATH:/.attbin ; export PATH +fi + +UNAME_MACHINE=`(uname -m) 2>/dev/null` || UNAME_MACHINE=unknown +UNAME_RELEASE=`(uname -r) 2>/dev/null` || UNAME_RELEASE=unknown +UNAME_SYSTEM=`(uname -s) 2>/dev/null` || UNAME_SYSTEM=unknown +UNAME_VERSION=`(uname -v) 2>/dev/null` || UNAME_VERSION=unknown + +# Note: order is significant - the case branches are not exclusive. + +case "${UNAME_MACHINE}:${UNAME_SYSTEM}:${UNAME_RELEASE}:${UNAME_VERSION}" in + *:NetBSD:*:*) + # NetBSD (nbsd) targets should (where applicable) match one or + # more of the tuples: *-*-netbsdelf*, *-*-netbsdaout*, + # *-*-netbsdecoff* and *-*-netbsd*. For targets that recently + # switched to ELF, *-*-netbsd* would select the old + # object file format. This provides both forward + # compatibility and a consistent mechanism for selecting the + # object file format. + # + # Note: NetBSD doesn't particularly care about the vendor + # portion of the name. We always set it to "unknown". + sysctl="sysctl -n hw.machine_arch" + UNAME_MACHINE_ARCH=`(/sbin/$sysctl 2>/dev/null || \ + /usr/sbin/$sysctl 2>/dev/null || echo unknown)` + case "${UNAME_MACHINE_ARCH}" in + armeb) machine=armeb-unknown ;; + arm*) machine=arm-unknown ;; + sh3el) machine=shl-unknown ;; + sh3eb) machine=sh-unknown ;; + sh5el) machine=sh5le-unknown ;; + *) machine=${UNAME_MACHINE_ARCH}-unknown ;; + esac + # The Operating System including object format, if it has switched + # to ELF recently, or will in the future. + case "${UNAME_MACHINE_ARCH}" in + arm*|i386|m68k|ns32k|sh3*|sparc|vax) + eval $set_cc_for_build + if echo __ELF__ | $CC_FOR_BUILD -E - 2>/dev/null \ + | grep -q __ELF__ + then + # Once all utilities can be ECOFF (netbsdecoff) or a.out (netbsdaout). + # Return netbsd for either. FIX? + os=netbsd + else + os=netbsdelf + fi + ;; + *) + os=netbsd + ;; + esac + # The OS release + # Debian GNU/NetBSD machines have a different userland, and + # thus, need a distinct triplet. However, they do not need + # kernel version information, so it can be replaced with a + # suitable tag, in the style of linux-gnu. + case "${UNAME_VERSION}" in + Debian*) + release='-gnu' + ;; + *) + release=`echo ${UNAME_RELEASE}|sed -e 's/[-_].*/\./'` + ;; + esac + # Since CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM: + # contains redundant information, the shorter form: + # CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM is used. + echo "${machine}-${os}${release}" + exit ;; + *:OpenBSD:*:*) + UNAME_MACHINE_ARCH=`arch | sed 's/OpenBSD.//'` + echo ${UNAME_MACHINE_ARCH}-unknown-openbsd${UNAME_RELEASE} + exit ;; + *:ekkoBSD:*:*) + echo ${UNAME_MACHINE}-unknown-ekkobsd${UNAME_RELEASE} + exit ;; + *:SolidBSD:*:*) + echo ${UNAME_MACHINE}-unknown-solidbsd${UNAME_RELEASE} + exit ;; + macppc:MirBSD:*:*) + echo powerpc-unknown-mirbsd${UNAME_RELEASE} + exit ;; + *:MirBSD:*:*) + echo ${UNAME_MACHINE}-unknown-mirbsd${UNAME_RELEASE} + exit ;; + alpha:OSF1:*:*) + case $UNAME_RELEASE in + *4.0) + UNAME_RELEASE=`/usr/sbin/sizer -v | awk '{print $3}'` + ;; + *5.*) + UNAME_RELEASE=`/usr/sbin/sizer -v | awk '{print $4}'` + ;; + esac + # According to Compaq, /usr/sbin/psrinfo has been available on + # OSF/1 and Tru64 systems produced since 1995. I hope that + # covers most systems running today. This code pipes the CPU + # types through head -n 1, so we only detect the type of CPU 0. + ALPHA_CPU_TYPE=`/usr/sbin/psrinfo -v | sed -n -e 's/^ The alpha \(.*\) processor.*$/\1/p' | head -n 1` + case "$ALPHA_CPU_TYPE" in + "EV4 (21064)") + UNAME_MACHINE="alpha" ;; + "EV4.5 (21064)") + UNAME_MACHINE="alpha" ;; + "LCA4 (21066/21068)") + UNAME_MACHINE="alpha" ;; + "EV5 (21164)") + UNAME_MACHINE="alphaev5" ;; + "EV5.6 (21164A)") + UNAME_MACHINE="alphaev56" ;; + "EV5.6 (21164PC)") + UNAME_MACHINE="alphapca56" ;; + "EV5.7 (21164PC)") + UNAME_MACHINE="alphapca57" ;; + "EV6 (21264)") + UNAME_MACHINE="alphaev6" ;; + "EV6.7 (21264A)") + UNAME_MACHINE="alphaev67" ;; + "EV6.8CB (21264C)") + UNAME_MACHINE="alphaev68" ;; + "EV6.8AL (21264B)") + UNAME_MACHINE="alphaev68" ;; + "EV6.8CX (21264D)") + UNAME_MACHINE="alphaev68" ;; + "EV6.9A (21264/EV69A)") + UNAME_MACHINE="alphaev69" ;; + "EV7 (21364)") + UNAME_MACHINE="alphaev7" ;; + "EV7.9 (21364A)") + UNAME_MACHINE="alphaev79" ;; + esac + # A Pn.n version is a patched version. + # A Vn.n version is a released version. + # A Tn.n version is a released field test version. + # A Xn.n version is an unreleased experimental baselevel. + # 1.2 uses "1.2" for uname -r. + echo ${UNAME_MACHINE}-dec-osf`echo ${UNAME_RELEASE} | sed -e 's/^[PVTX]//' | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz'` + # Reset EXIT trap before exiting to avoid spurious non-zero exit code. + exitcode=$? + trap '' 0 + exit $exitcode ;; + Alpha\ *:Windows_NT*:*) + # How do we know it's Interix rather than the generic POSIX subsystem? + # Should we change UNAME_MACHINE based on the output of uname instead + # of the specific Alpha model? + echo alpha-pc-interix + exit ;; + 21064:Windows_NT:50:3) + echo alpha-dec-winnt3.5 + exit ;; + Amiga*:UNIX_System_V:4.0:*) + echo m68k-unknown-sysv4 + exit ;; + *:[Aa]miga[Oo][Ss]:*:*) + echo ${UNAME_MACHINE}-unknown-amigaos + exit ;; + *:[Mm]orph[Oo][Ss]:*:*) + echo ${UNAME_MACHINE}-unknown-morphos + exit ;; + *:OS/390:*:*) + echo i370-ibm-openedition + exit ;; + *:z/VM:*:*) + echo s390-ibm-zvmoe + exit ;; + *:OS400:*:*) + echo powerpc-ibm-os400 + exit ;; + arm:RISC*:1.[012]*:*|arm:riscix:1.[012]*:*) + echo arm-acorn-riscix${UNAME_RELEASE} + exit ;; + arm:riscos:*:*|arm:RISCOS:*:*) + echo arm-unknown-riscos + exit ;; + SR2?01:HI-UX/MPP:*:* | SR8000:HI-UX/MPP:*:*) + echo hppa1.1-hitachi-hiuxmpp + exit ;; + Pyramid*:OSx*:*:* | MIS*:OSx*:*:* | MIS*:SMP_DC-OSx*:*:*) + # akee@wpdis03.wpafb.af.mil (Earle F. Ake) contributed MIS and NILE. + if test "`(/bin/universe) 2>/dev/null`" = att ; then + echo pyramid-pyramid-sysv3 + else + echo pyramid-pyramid-bsd + fi + exit ;; + NILE*:*:*:dcosx) + echo pyramid-pyramid-svr4 + exit ;; + DRS?6000:unix:4.0:6*) + echo sparc-icl-nx6 + exit ;; + DRS?6000:UNIX_SV:4.2*:7* | DRS?6000:isis:4.2*:7*) + case `/usr/bin/uname -p` in + sparc) echo sparc-icl-nx7; exit ;; + esac ;; + s390x:SunOS:*:*) + echo ${UNAME_MACHINE}-ibm-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` + exit ;; + sun4H:SunOS:5.*:*) + echo sparc-hal-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` + exit ;; + sun4*:SunOS:5.*:* | tadpole*:SunOS:5.*:*) + echo sparc-sun-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` + exit ;; + i86pc:AuroraUX:5.*:* | i86xen:AuroraUX:5.*:*) + echo i386-pc-auroraux${UNAME_RELEASE} + exit ;; + i86pc:SunOS:5.*:* | i86xen:SunOS:5.*:*) + eval $set_cc_for_build + SUN_ARCH="i386" + # If there is a compiler, see if it is configured for 64-bit objects. + # Note that the Sun cc does not turn __LP64__ into 1 like gcc does. + # This test works for both compilers. + if [ "$CC_FOR_BUILD" != 'no_compiler_found' ]; then + if (echo '#ifdef __amd64'; echo IS_64BIT_ARCH; echo '#endif') | \ + (CCOPTS= $CC_FOR_BUILD -E - 2>/dev/null) | \ + grep IS_64BIT_ARCH >/dev/null + then + SUN_ARCH="x86_64" + fi + fi + echo ${SUN_ARCH}-pc-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` + exit ;; + sun4*:SunOS:6*:*) + # According to config.sub, this is the proper way to canonicalize + # SunOS6. Hard to guess exactly what SunOS6 will be like, but + # it's likely to be more like Solaris than SunOS4. + echo sparc-sun-solaris3`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` + exit ;; + sun4*:SunOS:*:*) + case "`/usr/bin/arch -k`" in + Series*|S4*) + UNAME_RELEASE=`uname -v` + ;; + esac + # Japanese Language versions have a version number like `4.1.3-JL'. + echo sparc-sun-sunos`echo ${UNAME_RELEASE}|sed -e 's/-/_/'` + exit ;; + sun3*:SunOS:*:*) + echo m68k-sun-sunos${UNAME_RELEASE} + exit ;; + sun*:*:4.2BSD:*) + UNAME_RELEASE=`(sed 1q /etc/motd | awk '{print substr($5,1,3)}') 2>/dev/null` + test "x${UNAME_RELEASE}" = "x" && UNAME_RELEASE=3 + case "`/bin/arch`" in + sun3) + echo m68k-sun-sunos${UNAME_RELEASE} + ;; + sun4) + echo sparc-sun-sunos${UNAME_RELEASE} + ;; + esac + exit ;; + aushp:SunOS:*:*) + echo sparc-auspex-sunos${UNAME_RELEASE} + exit ;; + # The situation for MiNT is a little confusing. The machine name + # can be virtually everything (everything which is not + # "atarist" or "atariste" at least should have a processor + # > m68000). The system name ranges from "MiNT" over "FreeMiNT" + # to the lowercase version "mint" (or "freemint"). Finally + # the system name "TOS" denotes a system which is actually not + # MiNT. But MiNT is downward compatible to TOS, so this should + # be no problem. + atarist[e]:*MiNT:*:* | atarist[e]:*mint:*:* | atarist[e]:*TOS:*:*) + echo m68k-atari-mint${UNAME_RELEASE} + exit ;; + atari*:*MiNT:*:* | atari*:*mint:*:* | atarist[e]:*TOS:*:*) + echo m68k-atari-mint${UNAME_RELEASE} + exit ;; + *falcon*:*MiNT:*:* | *falcon*:*mint:*:* | *falcon*:*TOS:*:*) + echo m68k-atari-mint${UNAME_RELEASE} + exit ;; + milan*:*MiNT:*:* | milan*:*mint:*:* | *milan*:*TOS:*:*) + echo m68k-milan-mint${UNAME_RELEASE} + exit ;; + hades*:*MiNT:*:* | hades*:*mint:*:* | *hades*:*TOS:*:*) + echo m68k-hades-mint${UNAME_RELEASE} + exit ;; + *:*MiNT:*:* | *:*mint:*:* | *:*TOS:*:*) + echo m68k-unknown-mint${UNAME_RELEASE} + exit ;; + m68k:machten:*:*) + echo m68k-apple-machten${UNAME_RELEASE} + exit ;; + powerpc:machten:*:*) + echo powerpc-apple-machten${UNAME_RELEASE} + exit ;; + RISC*:Mach:*:*) + echo mips-dec-mach_bsd4.3 + exit ;; + RISC*:ULTRIX:*:*) + echo mips-dec-ultrix${UNAME_RELEASE} + exit ;; + VAX*:ULTRIX*:*:*) + echo vax-dec-ultrix${UNAME_RELEASE} + exit ;; + 2020:CLIX:*:* | 2430:CLIX:*:*) + echo clipper-intergraph-clix${UNAME_RELEASE} + exit ;; + mips:*:*:UMIPS | mips:*:*:RISCos) + eval $set_cc_for_build + sed 's/^ //' << EOF >$dummy.c +#ifdef __cplusplus +#include /* for printf() prototype */ + int main (int argc, char *argv[]) { +#else + int main (argc, argv) int argc; char *argv[]; { +#endif + #if defined (host_mips) && defined (MIPSEB) + #if defined (SYSTYPE_SYSV) + printf ("mips-mips-riscos%ssysv\n", argv[1]); exit (0); + #endif + #if defined (SYSTYPE_SVR4) + printf ("mips-mips-riscos%ssvr4\n", argv[1]); exit (0); + #endif + #if defined (SYSTYPE_BSD43) || defined(SYSTYPE_BSD) + printf ("mips-mips-riscos%sbsd\n", argv[1]); exit (0); + #endif + #endif + exit (-1); + } +EOF + $CC_FOR_BUILD -o $dummy $dummy.c && + dummyarg=`echo "${UNAME_RELEASE}" | sed -n 's/\([0-9]*\).*/\1/p'` && + SYSTEM_NAME=`$dummy $dummyarg` && + { echo "$SYSTEM_NAME"; exit; } + echo mips-mips-riscos${UNAME_RELEASE} + exit ;; + Motorola:PowerMAX_OS:*:*) + echo powerpc-motorola-powermax + exit ;; + Motorola:*:4.3:PL8-*) + echo powerpc-harris-powermax + exit ;; + Night_Hawk:*:*:PowerMAX_OS | Synergy:PowerMAX_OS:*:*) + echo powerpc-harris-powermax + exit ;; + Night_Hawk:Power_UNIX:*:*) + echo powerpc-harris-powerunix + exit ;; + m88k:CX/UX:7*:*) + echo m88k-harris-cxux7 + exit ;; + m88k:*:4*:R4*) + echo m88k-motorola-sysv4 + exit ;; + m88k:*:3*:R3*) + echo m88k-motorola-sysv3 + exit ;; + AViiON:dgux:*:*) + # DG/UX returns AViiON for all architectures + UNAME_PROCESSOR=`/usr/bin/uname -p` + if [ $UNAME_PROCESSOR = mc88100 ] || [ $UNAME_PROCESSOR = mc88110 ] + then + if [ ${TARGET_BINARY_INTERFACE}x = m88kdguxelfx ] || \ + [ ${TARGET_BINARY_INTERFACE}x = x ] + then + echo m88k-dg-dgux${UNAME_RELEASE} + else + echo m88k-dg-dguxbcs${UNAME_RELEASE} + fi + else + echo i586-dg-dgux${UNAME_RELEASE} + fi + exit ;; + M88*:DolphinOS:*:*) # DolphinOS (SVR3) + echo m88k-dolphin-sysv3 + exit ;; + M88*:*:R3*:*) + # Delta 88k system running SVR3 + echo m88k-motorola-sysv3 + exit ;; + XD88*:*:*:*) # Tektronix XD88 system running UTekV (SVR3) + echo m88k-tektronix-sysv3 + exit ;; + Tek43[0-9][0-9]:UTek:*:*) # Tektronix 4300 system running UTek (BSD) + echo m68k-tektronix-bsd + exit ;; + *:IRIX*:*:*) + echo mips-sgi-irix`echo ${UNAME_RELEASE}|sed -e 's/-/_/g'` + exit ;; + ????????:AIX?:[12].1:2) # AIX 2.2.1 or AIX 2.1.1 is RT/PC AIX. + echo romp-ibm-aix # uname -m gives an 8 hex-code CPU id + exit ;; # Note that: echo "'`uname -s`'" gives 'AIX ' + i*86:AIX:*:*) + echo i386-ibm-aix + exit ;; + ia64:AIX:*:*) + if [ -x /usr/bin/oslevel ] ; then + IBM_REV=`/usr/bin/oslevel` + else + IBM_REV=${UNAME_VERSION}.${UNAME_RELEASE} + fi + echo ${UNAME_MACHINE}-ibm-aix${IBM_REV} + exit ;; + *:AIX:2:3) + if grep bos325 /usr/include/stdio.h >/dev/null 2>&1; then + eval $set_cc_for_build + sed 's/^ //' << EOF >$dummy.c + #include + + main() + { + if (!__power_pc()) + exit(1); + puts("powerpc-ibm-aix3.2.5"); + exit(0); + } +EOF + if $CC_FOR_BUILD -o $dummy $dummy.c && SYSTEM_NAME=`$dummy` + then + echo "$SYSTEM_NAME" + else + echo rs6000-ibm-aix3.2.5 + fi + elif grep bos324 /usr/include/stdio.h >/dev/null 2>&1; then + echo rs6000-ibm-aix3.2.4 + else + echo rs6000-ibm-aix3.2 + fi + exit ;; + *:AIX:*:[4567]) + IBM_CPU_ID=`/usr/sbin/lsdev -C -c processor -S available | sed 1q | awk '{ print $1 }'` + if /usr/sbin/lsattr -El ${IBM_CPU_ID} | grep ' POWER' >/dev/null 2>&1; then + IBM_ARCH=rs6000 + else + IBM_ARCH=powerpc + fi + if [ -x /usr/bin/oslevel ] ; then + IBM_REV=`/usr/bin/oslevel` + else + IBM_REV=${UNAME_VERSION}.${UNAME_RELEASE} + fi + echo ${IBM_ARCH}-ibm-aix${IBM_REV} + exit ;; + *:AIX:*:*) + echo rs6000-ibm-aix + exit ;; + ibmrt:4.4BSD:*|romp-ibm:BSD:*) + echo romp-ibm-bsd4.4 + exit ;; + ibmrt:*BSD:*|romp-ibm:BSD:*) # covers RT/PC BSD and + echo romp-ibm-bsd${UNAME_RELEASE} # 4.3 with uname added to + exit ;; # report: romp-ibm BSD 4.3 + *:BOSX:*:*) + echo rs6000-bull-bosx + exit ;; + DPX/2?00:B.O.S.:*:*) + echo m68k-bull-sysv3 + exit ;; + 9000/[34]??:4.3bsd:1.*:*) + echo m68k-hp-bsd + exit ;; + hp300:4.4BSD:*:* | 9000/[34]??:4.3bsd:2.*:*) + echo m68k-hp-bsd4.4 + exit ;; + 9000/[34678]??:HP-UX:*:*) + HPUX_REV=`echo ${UNAME_RELEASE}|sed -e 's/[^.]*.[0B]*//'` + case "${UNAME_MACHINE}" in + 9000/31? ) HP_ARCH=m68000 ;; + 9000/[34]?? ) HP_ARCH=m68k ;; + 9000/[678][0-9][0-9]) + if [ -x /usr/bin/getconf ]; then + sc_cpu_version=`/usr/bin/getconf SC_CPU_VERSION 2>/dev/null` + sc_kernel_bits=`/usr/bin/getconf SC_KERNEL_BITS 2>/dev/null` + case "${sc_cpu_version}" in + 523) HP_ARCH="hppa1.0" ;; # CPU_PA_RISC1_0 + 528) HP_ARCH="hppa1.1" ;; # CPU_PA_RISC1_1 + 532) # CPU_PA_RISC2_0 + case "${sc_kernel_bits}" in + 32) HP_ARCH="hppa2.0n" ;; + 64) HP_ARCH="hppa2.0w" ;; + '') HP_ARCH="hppa2.0" ;; # HP-UX 10.20 + esac ;; + esac + fi + if [ "${HP_ARCH}" = "" ]; then + eval $set_cc_for_build + sed 's/^ //' << EOF >$dummy.c + + #define _HPUX_SOURCE + #include + #include + + int main () + { + #if defined(_SC_KERNEL_BITS) + long bits = sysconf(_SC_KERNEL_BITS); + #endif + long cpu = sysconf (_SC_CPU_VERSION); + + switch (cpu) + { + case CPU_PA_RISC1_0: puts ("hppa1.0"); break; + case CPU_PA_RISC1_1: puts ("hppa1.1"); break; + case CPU_PA_RISC2_0: + #if defined(_SC_KERNEL_BITS) + switch (bits) + { + case 64: puts ("hppa2.0w"); break; + case 32: puts ("hppa2.0n"); break; + default: puts ("hppa2.0"); break; + } break; + #else /* !defined(_SC_KERNEL_BITS) */ + puts ("hppa2.0"); break; + #endif + default: puts ("hppa1.0"); break; + } + exit (0); + } +EOF + (CCOPTS= $CC_FOR_BUILD -o $dummy $dummy.c 2>/dev/null) && HP_ARCH=`$dummy` + test -z "$HP_ARCH" && HP_ARCH=hppa + fi ;; + esac + if [ ${HP_ARCH} = "hppa2.0w" ] + then + eval $set_cc_for_build + + # hppa2.0w-hp-hpux* has a 64-bit kernel and a compiler generating + # 32-bit code. hppa64-hp-hpux* has the same kernel and a compiler + # generating 64-bit code. GNU and HP use different nomenclature: + # + # $ CC_FOR_BUILD=cc ./config.guess + # => hppa2.0w-hp-hpux11.23 + # $ CC_FOR_BUILD="cc +DA2.0w" ./config.guess + # => hppa64-hp-hpux11.23 + + if echo __LP64__ | (CCOPTS= $CC_FOR_BUILD -E - 2>/dev/null) | + grep -q __LP64__ + then + HP_ARCH="hppa2.0w" + else + HP_ARCH="hppa64" + fi + fi + echo ${HP_ARCH}-hp-hpux${HPUX_REV} + exit ;; + ia64:HP-UX:*:*) + HPUX_REV=`echo ${UNAME_RELEASE}|sed -e 's/[^.]*.[0B]*//'` + echo ia64-hp-hpux${HPUX_REV} + exit ;; + 3050*:HI-UX:*:*) + eval $set_cc_for_build + sed 's/^ //' << EOF >$dummy.c + #include + int + main () + { + long cpu = sysconf (_SC_CPU_VERSION); + /* The order matters, because CPU_IS_HP_MC68K erroneously returns + true for CPU_PA_RISC1_0. CPU_IS_PA_RISC returns correct + results, however. */ + if (CPU_IS_PA_RISC (cpu)) + { + switch (cpu) + { + case CPU_PA_RISC1_0: puts ("hppa1.0-hitachi-hiuxwe2"); break; + case CPU_PA_RISC1_1: puts ("hppa1.1-hitachi-hiuxwe2"); break; + case CPU_PA_RISC2_0: puts ("hppa2.0-hitachi-hiuxwe2"); break; + default: puts ("hppa-hitachi-hiuxwe2"); break; + } + } + else if (CPU_IS_HP_MC68K (cpu)) + puts ("m68k-hitachi-hiuxwe2"); + else puts ("unknown-hitachi-hiuxwe2"); + exit (0); + } +EOF + $CC_FOR_BUILD -o $dummy $dummy.c && SYSTEM_NAME=`$dummy` && + { echo "$SYSTEM_NAME"; exit; } + echo unknown-hitachi-hiuxwe2 + exit ;; + 9000/7??:4.3bsd:*:* | 9000/8?[79]:4.3bsd:*:* ) + echo hppa1.1-hp-bsd + exit ;; + 9000/8??:4.3bsd:*:*) + echo hppa1.0-hp-bsd + exit ;; + *9??*:MPE/iX:*:* | *3000*:MPE/iX:*:*) + echo hppa1.0-hp-mpeix + exit ;; + hp7??:OSF1:*:* | hp8?[79]:OSF1:*:* ) + echo hppa1.1-hp-osf + exit ;; + hp8??:OSF1:*:*) + echo hppa1.0-hp-osf + exit ;; + i*86:OSF1:*:*) + if [ -x /usr/sbin/sysversion ] ; then + echo ${UNAME_MACHINE}-unknown-osf1mk + else + echo ${UNAME_MACHINE}-unknown-osf1 + fi + exit ;; + parisc*:Lites*:*:*) + echo hppa1.1-hp-lites + exit ;; + C1*:ConvexOS:*:* | convex:ConvexOS:C1*:*) + echo c1-convex-bsd + exit ;; + C2*:ConvexOS:*:* | convex:ConvexOS:C2*:*) + if getsysinfo -f scalar_acc + then echo c32-convex-bsd + else echo c2-convex-bsd + fi + exit ;; + C34*:ConvexOS:*:* | convex:ConvexOS:C34*:*) + echo c34-convex-bsd + exit ;; + C38*:ConvexOS:*:* | convex:ConvexOS:C38*:*) + echo c38-convex-bsd + exit ;; + C4*:ConvexOS:*:* | convex:ConvexOS:C4*:*) + echo c4-convex-bsd + exit ;; + CRAY*Y-MP:*:*:*) + echo ymp-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' + exit ;; + CRAY*[A-Z]90:*:*:*) + echo ${UNAME_MACHINE}-cray-unicos${UNAME_RELEASE} \ + | sed -e 's/CRAY.*\([A-Z]90\)/\1/' \ + -e y/ABCDEFGHIJKLMNOPQRSTUVWXYZ/abcdefghijklmnopqrstuvwxyz/ \ + -e 's/\.[^.]*$/.X/' + exit ;; + CRAY*TS:*:*:*) + echo t90-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' + exit ;; + CRAY*T3E:*:*:*) + echo alphaev5-cray-unicosmk${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' + exit ;; + CRAY*SV1:*:*:*) + echo sv1-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' + exit ;; + *:UNICOS/mp:*:*) + echo craynv-cray-unicosmp${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' + exit ;; + F30[01]:UNIX_System_V:*:* | F700:UNIX_System_V:*:*) + FUJITSU_PROC=`uname -m | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz'` + FUJITSU_SYS=`uname -p | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/\///'` + FUJITSU_REL=`echo ${UNAME_RELEASE} | sed -e 's/ /_/'` + echo "${FUJITSU_PROC}-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}" + exit ;; + 5000:UNIX_System_V:4.*:*) + FUJITSU_SYS=`uname -p | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/\///'` + FUJITSU_REL=`echo ${UNAME_RELEASE} | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/ /_/'` + echo "sparc-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}" + exit ;; + i*86:BSD/386:*:* | i*86:BSD/OS:*:* | *:Ascend\ Embedded/OS:*:*) + echo ${UNAME_MACHINE}-pc-bsdi${UNAME_RELEASE} + exit ;; + sparc*:BSD/OS:*:*) + echo sparc-unknown-bsdi${UNAME_RELEASE} + exit ;; + *:BSD/OS:*:*) + echo ${UNAME_MACHINE}-unknown-bsdi${UNAME_RELEASE} + exit ;; + *:FreeBSD:*:*) + UNAME_PROCESSOR=`/usr/bin/uname -p` + case ${UNAME_PROCESSOR} in + amd64) + echo x86_64-unknown-freebsd`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` ;; + *) + echo ${UNAME_PROCESSOR}-unknown-freebsd`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` ;; + esac + exit ;; + i*:CYGWIN*:*) + echo ${UNAME_MACHINE}-pc-cygwin + exit ;; + *:MINGW*:*) + echo ${UNAME_MACHINE}-pc-mingw32 + exit ;; + i*:MSYS*:*) + echo ${UNAME_MACHINE}-pc-msys + exit ;; + i*:windows32*:*) + # uname -m includes "-pc" on this system. + echo ${UNAME_MACHINE}-mingw32 + exit ;; + i*:PW*:*) + echo ${UNAME_MACHINE}-pc-pw32 + exit ;; + *:Interix*:*) + case ${UNAME_MACHINE} in + x86) + echo i586-pc-interix${UNAME_RELEASE} + exit ;; + authenticamd | genuineintel | EM64T) + echo x86_64-unknown-interix${UNAME_RELEASE} + exit ;; + IA64) + echo ia64-unknown-interix${UNAME_RELEASE} + exit ;; + esac ;; + [345]86:Windows_95:* | [345]86:Windows_98:* | [345]86:Windows_NT:*) + echo i${UNAME_MACHINE}-pc-mks + exit ;; + 8664:Windows_NT:*) + echo x86_64-pc-mks + exit ;; + i*:Windows_NT*:* | Pentium*:Windows_NT*:*) + # How do we know it's Interix rather than the generic POSIX subsystem? + # It also conflicts with pre-2.0 versions of AT&T UWIN. Should we + # UNAME_MACHINE based on the output of uname instead of i386? + echo i586-pc-interix + exit ;; + i*:UWIN*:*) + echo ${UNAME_MACHINE}-pc-uwin + exit ;; + amd64:CYGWIN*:*:* | x86_64:CYGWIN*:*:*) + echo x86_64-unknown-cygwin + exit ;; + p*:CYGWIN*:*) + echo powerpcle-unknown-cygwin + exit ;; + prep*:SunOS:5.*:*) + echo powerpcle-unknown-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` + exit ;; + *:GNU:*:*) + # the GNU system + echo `echo ${UNAME_MACHINE}|sed -e 's,[-/].*$,,'`-unknown-gnu`echo ${UNAME_RELEASE}|sed -e 's,/.*$,,'` + exit ;; + *:GNU/*:*:*) + # other systems with GNU libc and userland + echo ${UNAME_MACHINE}-unknown-`echo ${UNAME_SYSTEM} | sed 's,^[^/]*/,,' | tr '[A-Z]' '[a-z]'``echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'`-gnu + exit ;; + i*86:Minix:*:*) + echo ${UNAME_MACHINE}-pc-minix + exit ;; + aarch64:Linux:*:*) + echo ${UNAME_MACHINE}-unknown-linux-gnu + exit ;; + aarch64_be:Linux:*:*) + UNAME_MACHINE=aarch64_be + echo ${UNAME_MACHINE}-unknown-linux-gnu + exit ;; + alpha:Linux:*:*) + case `sed -n '/^cpu model/s/^.*: \(.*\)/\1/p' < /proc/cpuinfo` in + EV5) UNAME_MACHINE=alphaev5 ;; + EV56) UNAME_MACHINE=alphaev56 ;; + PCA56) UNAME_MACHINE=alphapca56 ;; + PCA57) UNAME_MACHINE=alphapca56 ;; + EV6) UNAME_MACHINE=alphaev6 ;; + EV67) UNAME_MACHINE=alphaev67 ;; + EV68*) UNAME_MACHINE=alphaev68 ;; + esac + objdump --private-headers /bin/sh | grep -q ld.so.1 + if test "$?" = 0 ; then LIBC="libc1" ; else LIBC="" ; fi + echo ${UNAME_MACHINE}-unknown-linux-gnu${LIBC} + exit ;; + arm*:Linux:*:*) + eval $set_cc_for_build + if echo __ARM_EABI__ | $CC_FOR_BUILD -E - 2>/dev/null \ + | grep -q __ARM_EABI__ + then + echo ${UNAME_MACHINE}-unknown-linux-gnu + else + if echo __ARM_PCS_VFP | $CC_FOR_BUILD -E - 2>/dev/null \ + | grep -q __ARM_PCS_VFP + then + echo ${UNAME_MACHINE}-unknown-linux-gnueabi + else + echo ${UNAME_MACHINE}-unknown-linux-gnueabihf + fi + fi + exit ;; + avr32*:Linux:*:*) + echo ${UNAME_MACHINE}-unknown-linux-gnu + exit ;; + cris:Linux:*:*) + echo ${UNAME_MACHINE}-axis-linux-gnu + exit ;; + crisv32:Linux:*:*) + echo ${UNAME_MACHINE}-axis-linux-gnu + exit ;; + frv:Linux:*:*) + echo ${UNAME_MACHINE}-unknown-linux-gnu + exit ;; + hexagon:Linux:*:*) + echo ${UNAME_MACHINE}-unknown-linux-gnu + exit ;; + i*86:Linux:*:*) + LIBC=gnu + eval $set_cc_for_build + sed 's/^ //' << EOF >$dummy.c + #ifdef __dietlibc__ + LIBC=dietlibc + #endif +EOF + eval `$CC_FOR_BUILD -E $dummy.c 2>/dev/null | grep '^LIBC'` + echo "${UNAME_MACHINE}-pc-linux-${LIBC}" + exit ;; + ia64:Linux:*:*) + echo ${UNAME_MACHINE}-unknown-linux-gnu + exit ;; + m32r*:Linux:*:*) + echo ${UNAME_MACHINE}-unknown-linux-gnu + exit ;; + m68*:Linux:*:*) + echo ${UNAME_MACHINE}-unknown-linux-gnu + exit ;; + mips:Linux:*:* | mips64:Linux:*:*) + eval $set_cc_for_build + sed 's/^ //' << EOF >$dummy.c + #undef CPU + #undef ${UNAME_MACHINE} + #undef ${UNAME_MACHINE}el + #if defined(__MIPSEL__) || defined(__MIPSEL) || defined(_MIPSEL) || defined(MIPSEL) + CPU=${UNAME_MACHINE}el + #else + #if defined(__MIPSEB__) || defined(__MIPSEB) || defined(_MIPSEB) || defined(MIPSEB) + CPU=${UNAME_MACHINE} + #else + CPU= + #endif + #endif +EOF + eval `$CC_FOR_BUILD -E $dummy.c 2>/dev/null | grep '^CPU'` + test x"${CPU}" != x && { echo "${CPU}-unknown-linux-gnu"; exit; } + ;; + or32:Linux:*:*) + echo ${UNAME_MACHINE}-unknown-linux-gnu + exit ;; + padre:Linux:*:*) + echo sparc-unknown-linux-gnu + exit ;; + parisc64:Linux:*:* | hppa64:Linux:*:*) + echo hppa64-unknown-linux-gnu + exit ;; + parisc:Linux:*:* | hppa:Linux:*:*) + # Look for CPU level + case `grep '^cpu[^a-z]*:' /proc/cpuinfo 2>/dev/null | cut -d' ' -f2` in + PA7*) echo hppa1.1-unknown-linux-gnu ;; + PA8*) echo hppa2.0-unknown-linux-gnu ;; + *) echo hppa-unknown-linux-gnu ;; + esac + exit ;; + ppc64:Linux:*:*) + echo powerpc64-unknown-linux-gnu + exit ;; + ppc:Linux:*:*) + echo powerpc-unknown-linux-gnu + exit ;; + s390:Linux:*:* | s390x:Linux:*:*) + echo ${UNAME_MACHINE}-ibm-linux + exit ;; + sh64*:Linux:*:*) + echo ${UNAME_MACHINE}-unknown-linux-gnu + exit ;; + sh*:Linux:*:*) + echo ${UNAME_MACHINE}-unknown-linux-gnu + exit ;; + sparc:Linux:*:* | sparc64:Linux:*:*) + echo ${UNAME_MACHINE}-unknown-linux-gnu + exit ;; + tile*:Linux:*:*) + echo ${UNAME_MACHINE}-unknown-linux-gnu + exit ;; + vax:Linux:*:*) + echo ${UNAME_MACHINE}-dec-linux-gnu + exit ;; + x86_64:Linux:*:*) + echo ${UNAME_MACHINE}-unknown-linux-gnu + exit ;; + xtensa*:Linux:*:*) + echo ${UNAME_MACHINE}-unknown-linux-gnu + exit ;; + i*86:DYNIX/ptx:4*:*) + # ptx 4.0 does uname -s correctly, with DYNIX/ptx in there. + # earlier versions are messed up and put the nodename in both + # sysname and nodename. + echo i386-sequent-sysv4 + exit ;; + i*86:UNIX_SV:4.2MP:2.*) + # Unixware is an offshoot of SVR4, but it has its own version + # number series starting with 2... + # I am not positive that other SVR4 systems won't match this, + # I just have to hope. -- rms. + # Use sysv4.2uw... so that sysv4* matches it. + echo ${UNAME_MACHINE}-pc-sysv4.2uw${UNAME_VERSION} + exit ;; + i*86:OS/2:*:*) + # If we were able to find `uname', then EMX Unix compatibility + # is probably installed. + echo ${UNAME_MACHINE}-pc-os2-emx + exit ;; + i*86:XTS-300:*:STOP) + echo ${UNAME_MACHINE}-unknown-stop + exit ;; + i*86:atheos:*:*) + echo ${UNAME_MACHINE}-unknown-atheos + exit ;; + i*86:syllable:*:*) + echo ${UNAME_MACHINE}-pc-syllable + exit ;; + i*86:LynxOS:2.*:* | i*86:LynxOS:3.[01]*:* | i*86:LynxOS:4.[02]*:*) + echo i386-unknown-lynxos${UNAME_RELEASE} + exit ;; + i*86:*DOS:*:*) + echo ${UNAME_MACHINE}-pc-msdosdjgpp + exit ;; + i*86:*:4.*:* | i*86:SYSTEM_V:4.*:*) + UNAME_REL=`echo ${UNAME_RELEASE} | sed 's/\/MP$//'` + if grep Novell /usr/include/link.h >/dev/null 2>/dev/null; then + echo ${UNAME_MACHINE}-univel-sysv${UNAME_REL} + else + echo ${UNAME_MACHINE}-pc-sysv${UNAME_REL} + fi + exit ;; + i*86:*:5:[678]*) + # UnixWare 7.x, OpenUNIX and OpenServer 6. + case `/bin/uname -X | grep "^Machine"` in + *486*) UNAME_MACHINE=i486 ;; + *Pentium) UNAME_MACHINE=i586 ;; + *Pent*|*Celeron) UNAME_MACHINE=i686 ;; + esac + echo ${UNAME_MACHINE}-unknown-sysv${UNAME_RELEASE}${UNAME_SYSTEM}${UNAME_VERSION} + exit ;; + i*86:*:3.2:*) + if test -f /usr/options/cb.name; then + UNAME_REL=`sed -n 's/.*Version //p' /dev/null >/dev/null ; then + UNAME_REL=`(/bin/uname -X|grep Release|sed -e 's/.*= //')` + (/bin/uname -X|grep i80486 >/dev/null) && UNAME_MACHINE=i486 + (/bin/uname -X|grep '^Machine.*Pentium' >/dev/null) \ + && UNAME_MACHINE=i586 + (/bin/uname -X|grep '^Machine.*Pent *II' >/dev/null) \ + && UNAME_MACHINE=i686 + (/bin/uname -X|grep '^Machine.*Pentium Pro' >/dev/null) \ + && UNAME_MACHINE=i686 + echo ${UNAME_MACHINE}-pc-sco$UNAME_REL + else + echo ${UNAME_MACHINE}-pc-sysv32 + fi + exit ;; + pc:*:*:*) + # Left here for compatibility: + # uname -m prints for DJGPP always 'pc', but it prints nothing about + # the processor, so we play safe by assuming i586. + # Note: whatever this is, it MUST be the same as what config.sub + # prints for the "djgpp" host, or else GDB configury will decide that + # this is a cross-build. + echo i586-pc-msdosdjgpp + exit ;; + Intel:Mach:3*:*) + echo i386-pc-mach3 + exit ;; + paragon:*:*:*) + echo i860-intel-osf1 + exit ;; + i860:*:4.*:*) # i860-SVR4 + if grep Stardent /usr/include/sys/uadmin.h >/dev/null 2>&1 ; then + echo i860-stardent-sysv${UNAME_RELEASE} # Stardent Vistra i860-SVR4 + else # Add other i860-SVR4 vendors below as they are discovered. + echo i860-unknown-sysv${UNAME_RELEASE} # Unknown i860-SVR4 + fi + exit ;; + mini*:CTIX:SYS*5:*) + # "miniframe" + echo m68010-convergent-sysv + exit ;; + mc68k:UNIX:SYSTEM5:3.51m) + echo m68k-convergent-sysv + exit ;; + M680?0:D-NIX:5.3:*) + echo m68k-diab-dnix + exit ;; + M68*:*:R3V[5678]*:*) + test -r /sysV68 && { echo 'm68k-motorola-sysv'; exit; } ;; + 3[345]??:*:4.0:3.0 | 3[34]??A:*:4.0:3.0 | 3[34]??,*:*:4.0:3.0 | 3[34]??/*:*:4.0:3.0 | 4400:*:4.0:3.0 | 4850:*:4.0:3.0 | SKA40:*:4.0:3.0 | SDS2:*:4.0:3.0 | SHG2:*:4.0:3.0 | S7501*:*:4.0:3.0) + OS_REL='' + test -r /etc/.relid \ + && OS_REL=.`sed -n 's/[^ ]* [^ ]* \([0-9][0-9]\).*/\1/p' < /etc/.relid` + /bin/uname -p 2>/dev/null | grep 86 >/dev/null \ + && { echo i486-ncr-sysv4.3${OS_REL}; exit; } + /bin/uname -p 2>/dev/null | /bin/grep entium >/dev/null \ + && { echo i586-ncr-sysv4.3${OS_REL}; exit; } ;; + 3[34]??:*:4.0:* | 3[34]??,*:*:4.0:*) + /bin/uname -p 2>/dev/null | grep 86 >/dev/null \ + && { echo i486-ncr-sysv4; exit; } ;; + NCR*:*:4.2:* | MPRAS*:*:4.2:*) + OS_REL='.3' + test -r /etc/.relid \ + && OS_REL=.`sed -n 's/[^ ]* [^ ]* \([0-9][0-9]\).*/\1/p' < /etc/.relid` + /bin/uname -p 2>/dev/null | grep 86 >/dev/null \ + && { echo i486-ncr-sysv4.3${OS_REL}; exit; } + /bin/uname -p 2>/dev/null | /bin/grep entium >/dev/null \ + && { echo i586-ncr-sysv4.3${OS_REL}; exit; } + /bin/uname -p 2>/dev/null | /bin/grep pteron >/dev/null \ + && { echo i586-ncr-sysv4.3${OS_REL}; exit; } ;; + m68*:LynxOS:2.*:* | m68*:LynxOS:3.0*:*) + echo m68k-unknown-lynxos${UNAME_RELEASE} + exit ;; + mc68030:UNIX_System_V:4.*:*) + echo m68k-atari-sysv4 + exit ;; + TSUNAMI:LynxOS:2.*:*) + echo sparc-unknown-lynxos${UNAME_RELEASE} + exit ;; + rs6000:LynxOS:2.*:*) + echo rs6000-unknown-lynxos${UNAME_RELEASE} + exit ;; + PowerPC:LynxOS:2.*:* | PowerPC:LynxOS:3.[01]*:* | PowerPC:LynxOS:4.[02]*:*) + echo powerpc-unknown-lynxos${UNAME_RELEASE} + exit ;; + SM[BE]S:UNIX_SV:*:*) + echo mips-dde-sysv${UNAME_RELEASE} + exit ;; + RM*:ReliantUNIX-*:*:*) + echo mips-sni-sysv4 + exit ;; + RM*:SINIX-*:*:*) + echo mips-sni-sysv4 + exit ;; + *:SINIX-*:*:*) + if uname -p 2>/dev/null >/dev/null ; then + UNAME_MACHINE=`(uname -p) 2>/dev/null` + echo ${UNAME_MACHINE}-sni-sysv4 + else + echo ns32k-sni-sysv + fi + exit ;; + PENTIUM:*:4.0*:*) # Unisys `ClearPath HMP IX 4000' SVR4/MP effort + # says + echo i586-unisys-sysv4 + exit ;; + *:UNIX_System_V:4*:FTX*) + # From Gerald Hewes . + # How about differentiating between stratus architectures? -djm + echo hppa1.1-stratus-sysv4 + exit ;; + *:*:*:FTX*) + # From seanf@swdc.stratus.com. + echo i860-stratus-sysv4 + exit ;; + i*86:VOS:*:*) + # From Paul.Green@stratus.com. + echo ${UNAME_MACHINE}-stratus-vos + exit ;; + *:VOS:*:*) + # From Paul.Green@stratus.com. + echo hppa1.1-stratus-vos + exit ;; + mc68*:A/UX:*:*) + echo m68k-apple-aux${UNAME_RELEASE} + exit ;; + news*:NEWS-OS:6*:*) + echo mips-sony-newsos6 + exit ;; + R[34]000:*System_V*:*:* | R4000:UNIX_SYSV:*:* | R*000:UNIX_SV:*:*) + if [ -d /usr/nec ]; then + echo mips-nec-sysv${UNAME_RELEASE} + else + echo mips-unknown-sysv${UNAME_RELEASE} + fi + exit ;; + BeBox:BeOS:*:*) # BeOS running on hardware made by Be, PPC only. + echo powerpc-be-beos + exit ;; + BeMac:BeOS:*:*) # BeOS running on Mac or Mac clone, PPC only. + echo powerpc-apple-beos + exit ;; + BePC:BeOS:*:*) # BeOS running on Intel PC compatible. + echo i586-pc-beos + exit ;; + BePC:Haiku:*:*) # Haiku running on Intel PC compatible. + echo i586-pc-haiku + exit ;; + SX-4:SUPER-UX:*:*) + echo sx4-nec-superux${UNAME_RELEASE} + exit ;; + SX-5:SUPER-UX:*:*) + echo sx5-nec-superux${UNAME_RELEASE} + exit ;; + SX-6:SUPER-UX:*:*) + echo sx6-nec-superux${UNAME_RELEASE} + exit ;; + SX-7:SUPER-UX:*:*) + echo sx7-nec-superux${UNAME_RELEASE} + exit ;; + SX-8:SUPER-UX:*:*) + echo sx8-nec-superux${UNAME_RELEASE} + exit ;; + SX-8R:SUPER-UX:*:*) + echo sx8r-nec-superux${UNAME_RELEASE} + exit ;; + Power*:Rhapsody:*:*) + echo powerpc-apple-rhapsody${UNAME_RELEASE} + exit ;; + *:Rhapsody:*:*) + echo ${UNAME_MACHINE}-apple-rhapsody${UNAME_RELEASE} + exit ;; + *:Darwin:*:*) + UNAME_PROCESSOR=`uname -p` || UNAME_PROCESSOR=unknown + case $UNAME_PROCESSOR in + i386) + eval $set_cc_for_build + if [ "$CC_FOR_BUILD" != 'no_compiler_found' ]; then + if (echo '#ifdef __LP64__'; echo IS_64BIT_ARCH; echo '#endif') | \ + (CCOPTS= $CC_FOR_BUILD -E - 2>/dev/null) | \ + grep IS_64BIT_ARCH >/dev/null + then + UNAME_PROCESSOR="x86_64" + fi + fi ;; + unknown) UNAME_PROCESSOR=powerpc ;; + esac + echo ${UNAME_PROCESSOR}-apple-darwin${UNAME_RELEASE} + exit ;; + *:procnto*:*:* | *:QNX:[0123456789]*:*) + UNAME_PROCESSOR=`uname -p` + if test "$UNAME_PROCESSOR" = "x86"; then + UNAME_PROCESSOR=i386 + UNAME_MACHINE=pc + fi + echo ${UNAME_PROCESSOR}-${UNAME_MACHINE}-nto-qnx${UNAME_RELEASE} + exit ;; + *:QNX:*:4*) + echo i386-pc-qnx + exit ;; + NEO-?:NONSTOP_KERNEL:*:*) + echo neo-tandem-nsk${UNAME_RELEASE} + exit ;; + NSE-?:NONSTOP_KERNEL:*:*) + echo nse-tandem-nsk${UNAME_RELEASE} + exit ;; + NSR-?:NONSTOP_KERNEL:*:*) + echo nsr-tandem-nsk${UNAME_RELEASE} + exit ;; + *:NonStop-UX:*:*) + echo mips-compaq-nonstopux + exit ;; + BS2000:POSIX*:*:*) + echo bs2000-siemens-sysv + exit ;; + DS/*:UNIX_System_V:*:*) + echo ${UNAME_MACHINE}-${UNAME_SYSTEM}-${UNAME_RELEASE} + exit ;; + *:Plan9:*:*) + # "uname -m" is not consistent, so use $cputype instead. 386 + # is converted to i386 for consistency with other x86 + # operating systems. + if test "$cputype" = "386"; then + UNAME_MACHINE=i386 + else + UNAME_MACHINE="$cputype" + fi + echo ${UNAME_MACHINE}-unknown-plan9 + exit ;; + *:TOPS-10:*:*) + echo pdp10-unknown-tops10 + exit ;; + *:TENEX:*:*) + echo pdp10-unknown-tenex + exit ;; + KS10:TOPS-20:*:* | KL10:TOPS-20:*:* | TYPE4:TOPS-20:*:*) + echo pdp10-dec-tops20 + exit ;; + XKL-1:TOPS-20:*:* | TYPE5:TOPS-20:*:*) + echo pdp10-xkl-tops20 + exit ;; + *:TOPS-20:*:*) + echo pdp10-unknown-tops20 + exit ;; + *:ITS:*:*) + echo pdp10-unknown-its + exit ;; + SEI:*:*:SEIUX) + echo mips-sei-seiux${UNAME_RELEASE} + exit ;; + *:DragonFly:*:*) + echo ${UNAME_MACHINE}-unknown-dragonfly`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` + exit ;; + *:*VMS:*:*) + UNAME_MACHINE=`(uname -p) 2>/dev/null` + case "${UNAME_MACHINE}" in + A*) echo alpha-dec-vms ; exit ;; + I*) echo ia64-dec-vms ; exit ;; + V*) echo vax-dec-vms ; exit ;; + esac ;; + *:XENIX:*:SysV) + echo i386-pc-xenix + exit ;; + i*86:skyos:*:*) + echo ${UNAME_MACHINE}-pc-skyos`echo ${UNAME_RELEASE}` | sed -e 's/ .*$//' + exit ;; + i*86:rdos:*:*) + echo ${UNAME_MACHINE}-pc-rdos + exit ;; + i*86:AROS:*:*) + echo ${UNAME_MACHINE}-pc-aros + exit ;; + x86_64:VMkernel:*:*) + echo ${UNAME_MACHINE}-unknown-esx + exit ;; +esac + +#echo '(No uname command or uname output not recognized.)' 1>&2 +#echo "${UNAME_MACHINE}:${UNAME_SYSTEM}:${UNAME_RELEASE}:${UNAME_VERSION}" 1>&2 + +eval $set_cc_for_build +cat >$dummy.c < +# include +#endif +main () +{ +#if defined (sony) +#if defined (MIPSEB) + /* BFD wants "bsd" instead of "newsos". Perhaps BFD should be changed, + I don't know.... */ + printf ("mips-sony-bsd\n"); exit (0); +#else +#include + printf ("m68k-sony-newsos%s\n", +#ifdef NEWSOS4 + "4" +#else + "" +#endif + ); exit (0); +#endif +#endif + +#if defined (__arm) && defined (__acorn) && defined (__unix) + printf ("arm-acorn-riscix\n"); exit (0); +#endif + +#if defined (hp300) && !defined (hpux) + printf ("m68k-hp-bsd\n"); exit (0); +#endif + +#if defined (NeXT) +#if !defined (__ARCHITECTURE__) +#define __ARCHITECTURE__ "m68k" +#endif + int version; + version=`(hostinfo | sed -n 's/.*NeXT Mach \([0-9]*\).*/\1/p') 2>/dev/null`; + if (version < 4) + printf ("%s-next-nextstep%d\n", __ARCHITECTURE__, version); + else + printf ("%s-next-openstep%d\n", __ARCHITECTURE__, version); + exit (0); +#endif + +#if defined (MULTIMAX) || defined (n16) +#if defined (UMAXV) + printf ("ns32k-encore-sysv\n"); exit (0); +#else +#if defined (CMU) + printf ("ns32k-encore-mach\n"); exit (0); +#else + printf ("ns32k-encore-bsd\n"); exit (0); +#endif +#endif +#endif + +#if defined (__386BSD__) + printf ("i386-pc-bsd\n"); exit (0); +#endif + +#if defined (sequent) +#if defined (i386) + printf ("i386-sequent-dynix\n"); exit (0); +#endif +#if defined (ns32000) + printf ("ns32k-sequent-dynix\n"); exit (0); +#endif +#endif + +#if defined (_SEQUENT_) + struct utsname un; + + uname(&un); + + if (strncmp(un.version, "V2", 2) == 0) { + printf ("i386-sequent-ptx2\n"); exit (0); + } + if (strncmp(un.version, "V1", 2) == 0) { /* XXX is V1 correct? */ + printf ("i386-sequent-ptx1\n"); exit (0); + } + printf ("i386-sequent-ptx\n"); exit (0); + +#endif + +#if defined (vax) +# if !defined (ultrix) +# include +# if defined (BSD) +# if BSD == 43 + printf ("vax-dec-bsd4.3\n"); exit (0); +# else +# if BSD == 199006 + printf ("vax-dec-bsd4.3reno\n"); exit (0); +# else + printf ("vax-dec-bsd\n"); exit (0); +# endif +# endif +# else + printf ("vax-dec-bsd\n"); exit (0); +# endif +# else + printf ("vax-dec-ultrix\n"); exit (0); +# endif +#endif + +#if defined (alliant) && defined (i860) + printf ("i860-alliant-bsd\n"); exit (0); +#endif + + exit (1); +} +EOF + +$CC_FOR_BUILD -o $dummy $dummy.c 2>/dev/null && SYSTEM_NAME=`$dummy` && + { echo "$SYSTEM_NAME"; exit; } + +# Apollos put the system type in the environment. + +test -d /usr/apollo && { echo ${ISP}-apollo-${SYSTYPE}; exit; } + +# Convex versions that predate uname can use getsysinfo(1) + +if [ -x /usr/convex/getsysinfo ] +then + case `getsysinfo -f cpu_type` in + c1*) + echo c1-convex-bsd + exit ;; + c2*) + if getsysinfo -f scalar_acc + then echo c32-convex-bsd + else echo c2-convex-bsd + fi + exit ;; + c34*) + echo c34-convex-bsd + exit ;; + c38*) + echo c38-convex-bsd + exit ;; + c4*) + echo c4-convex-bsd + exit ;; + esac +fi + +cat >&2 < in order to provide the needed +information to handle your system. + +config.guess timestamp = $timestamp + +uname -m = `(uname -m) 2>/dev/null || echo unknown` +uname -r = `(uname -r) 2>/dev/null || echo unknown` +uname -s = `(uname -s) 2>/dev/null || echo unknown` +uname -v = `(uname -v) 2>/dev/null || echo unknown` + +/usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null` +/bin/uname -X = `(/bin/uname -X) 2>/dev/null` + +hostinfo = `(hostinfo) 2>/dev/null` +/bin/universe = `(/bin/universe) 2>/dev/null` +/usr/bin/arch -k = `(/usr/bin/arch -k) 2>/dev/null` +/bin/arch = `(/bin/arch) 2>/dev/null` +/usr/bin/oslevel = `(/usr/bin/oslevel) 2>/dev/null` +/usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null` + +UNAME_MACHINE = ${UNAME_MACHINE} +UNAME_RELEASE = ${UNAME_RELEASE} +UNAME_SYSTEM = ${UNAME_SYSTEM} +UNAME_VERSION = ${UNAME_VERSION} +EOF + +exit 1 + +# Local variables: +# eval: (add-hook 'write-file-hooks 'time-stamp) +# time-stamp-start: "timestamp='" +# time-stamp-format: "%:y-%02m-%02d" +# time-stamp-end: "'" +# End: diff --git a/backtrace-sys/src/libbacktrace/config.h.in b/backtrace-sys/src/libbacktrace/config.h.in new file mode 100644 index 000000000..76baa0945 --- /dev/null +++ b/backtrace-sys/src/libbacktrace/config.h.in @@ -0,0 +1,149 @@ +/* config.h.in. Generated from configure.ac by autoheader. */ + +/* ELF size: 32 or 64 */ +#undef BACKTRACE_ELF_SIZE + +/* XCOFF size: 32 or 64 */ +#undef BACKTRACE_XCOFF_SIZE + +/* Define to 1 if you have the __atomic functions */ +#undef HAVE_ATOMIC_FUNCTIONS + +/* Define to 1 if you have the `clock_gettime' function. */ +#undef HAVE_CLOCK_GETTIME + +/* Define to 1 if you have the declaration of `strnlen', and to 0 if you + don't. */ +#undef HAVE_DECL_STRNLEN + +/* Define to 1 if you have the header file. */ +#undef HAVE_DLFCN_H + +/* Define if dl_iterate_phdr is available. */ +#undef HAVE_DL_ITERATE_PHDR + +/* Define to 1 if you have the fcntl function */ +#undef HAVE_FCNTL + +/* Define if getexecname is available. */ +#undef HAVE_GETEXECNAME + +/* Define if _Unwind_GetIPInfo is available. */ +#undef HAVE_GETIPINFO + +/* Define to 1 if you have the header file. */ +#undef HAVE_INTTYPES_H + +/* Define to 1 if you have the `z' library (-lz). */ +#undef HAVE_LIBZ + +/* Define to 1 if you have the header file. */ +#undef HAVE_LINK_H + +/* Define if AIX loadquery is available. */ +#undef HAVE_LOADQUERY + +/* Define to 1 if you have the `lstat' function. */ +#undef HAVE_LSTAT + +/* Define to 1 if you have the header file. */ +#undef HAVE_MEMORY_H + +/* Define to 1 if you have the `readlink' function. */ +#undef HAVE_READLINK + +/* Define to 1 if you have the header file. */ +#undef HAVE_STDINT_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_STDLIB_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_STRINGS_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_STRING_H + +/* Define to 1 if you have the __sync functions */ +#undef HAVE_SYNC_FUNCTIONS + +/* Define to 1 if you have the header file. */ +#undef HAVE_SYS_LDR_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_SYS_MMAN_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_SYS_STAT_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_SYS_TYPES_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_UNISTD_H + +/* Define if -lz is available. */ +#undef HAVE_ZLIB + +/* Define to the sub-directory in which libtool stores uninstalled libraries. + */ +#undef LT_OBJDIR + +/* Define to the address where bug reports for this package should be sent. */ +#undef PACKAGE_BUGREPORT + +/* Define to the full name of this package. */ +#undef PACKAGE_NAME + +/* Define to the full name and version of this package. */ +#undef PACKAGE_STRING + +/* Define to the one symbol short name of this package. */ +#undef PACKAGE_TARNAME + +/* Define to the home page for this package. */ +#undef PACKAGE_URL + +/* Define to the version of this package. */ +#undef PACKAGE_VERSION + +/* Define to 1 if you have the ANSI C header files. */ +#undef STDC_HEADERS + +/* Enable extensions on AIX 3, Interix. */ +#ifndef _ALL_SOURCE +# undef _ALL_SOURCE +#endif +/* Enable GNU extensions on systems that have them. */ +#ifndef _GNU_SOURCE +# undef _GNU_SOURCE +#endif +/* Enable threading extensions on Solaris. */ +#ifndef _POSIX_PTHREAD_SEMANTICS +# undef _POSIX_PTHREAD_SEMANTICS +#endif +/* Enable extensions on HP NonStop. */ +#ifndef _TANDEM_SOURCE +# undef _TANDEM_SOURCE +#endif +/* Enable general extensions on Solaris. */ +#ifndef __EXTENSIONS__ +# undef __EXTENSIONS__ +#endif + + +/* Number of bits in a file offset, on hosts where this is settable. */ +#undef _FILE_OFFSET_BITS + +/* Define for large files, on AIX-style hosts. */ +#undef _LARGE_FILES + +/* Define to 1 if on MINIX. */ +#undef _MINIX + +/* Define to 2 if the system does not provide POSIX.1 features except with + this defined. */ +#undef _POSIX_1_SOURCE + +/* Define to 1 if you need to in order for `stat' and other things to work. */ +#undef _POSIX_SOURCE diff --git a/backtrace-sys/src/libbacktrace/config.sub b/backtrace-sys/src/libbacktrace/config.sub new file mode 100755 index 000000000..63634d9a6 --- /dev/null +++ b/backtrace-sys/src/libbacktrace/config.sub @@ -0,0 +1,1794 @@ +#! /bin/sh +# Configuration validation subroutine script. +# Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, +# 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, +# 2011, 2012 Free Software Foundation, Inc. + +timestamp='2012-02-10' + +# This file is (in principle) common to ALL GNU software. +# The presence of a machine in this file suggests that SOME GNU software +# can handle that machine. It does not imply ALL GNU software can. +# +# This file is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, see . +# +# As a special exception to the GNU General Public License, if you +# distribute this file as part of a program that contains a +# configuration script generated by Autoconf, you may include it under +# the same distribution terms that you use for the rest of that program. + + +# Please send patches to . Submit a context +# diff and a properly formatted GNU ChangeLog entry. +# +# Configuration subroutine to validate and canonicalize a configuration type. +# Supply the specified configuration type as an argument. +# If it is invalid, we print an error message on stderr and exit with code 1. +# Otherwise, we print the canonical config type on stdout and succeed. + +# You can get the latest version of this script from: +# http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.sub;hb=HEAD + +# This file is supposed to be the same for all GNU packages +# and recognize all the CPU types, system types and aliases +# that are meaningful with *any* GNU software. +# Each package is responsible for reporting which valid configurations +# it does not support. The user should be able to distinguish +# a failure to support a valid configuration from a meaningless +# configuration. + +# The goal of this file is to map all the various variations of a given +# machine specification into a single specification in the form: +# CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM +# or in some cases, the newer four-part form: +# CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM +# It is wrong to echo any other type of specification. + +me=`echo "$0" | sed -e 's,.*/,,'` + +usage="\ +Usage: $0 [OPTION] CPU-MFR-OPSYS + $0 [OPTION] ALIAS + +Canonicalize a configuration name. + +Operation modes: + -h, --help print this help, then exit + -t, --time-stamp print date of last modification, then exit + -v, --version print version number, then exit + +Report bugs and patches to ." + +version="\ +GNU config.sub ($timestamp) + +Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, +2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012 +Free Software Foundation, Inc. + +This is free software; see the source for copying conditions. There is NO +warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE." + +help=" +Try \`$me --help' for more information." + +# Parse command line +while test $# -gt 0 ; do + case $1 in + --time-stamp | --time* | -t ) + echo "$timestamp" ; exit ;; + --version | -v ) + echo "$version" ; exit ;; + --help | --h* | -h ) + echo "$usage"; exit ;; + -- ) # Stop option processing + shift; break ;; + - ) # Use stdin as input. + break ;; + -* ) + echo "$me: invalid option $1$help" + exit 1 ;; + + *local*) + # First pass through any local machine types. + echo $1 + exit ;; + + * ) + break ;; + esac +done + +case $# in + 0) echo "$me: missing argument$help" >&2 + exit 1;; + 1) ;; + *) echo "$me: too many arguments$help" >&2 + exit 1;; +esac + +# Separate what the user gave into CPU-COMPANY and OS or KERNEL-OS (if any). +# Here we must recognize all the valid KERNEL-OS combinations. +maybe_os=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\2/'` +case $maybe_os in + nto-qnx* | linux-gnu* | linux-android* | linux-dietlibc | linux-newlib* | \ + linux-uclibc* | uclinux-uclibc* | uclinux-gnu* | kfreebsd*-gnu* | \ + knetbsd*-gnu* | netbsd*-gnu* | \ + kopensolaris*-gnu* | \ + storm-chaos* | os2-emx* | rtmk-nova*) + os=-$maybe_os + basic_machine=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\1/'` + ;; + android-linux) + os=-linux-android + basic_machine=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\1/'`-unknown + ;; + *) + basic_machine=`echo $1 | sed 's/-[^-]*$//'` + if [ $basic_machine != $1 ] + then os=`echo $1 | sed 's/.*-/-/'` + else os=; fi + ;; +esac + +### Let's recognize common machines as not being operating systems so +### that things like config.sub decstation-3100 work. We also +### recognize some manufacturers as not being operating systems, so we +### can provide default operating systems below. +case $os in + -sun*os*) + # Prevent following clause from handling this invalid input. + ;; + -dec* | -mips* | -sequent* | -encore* | -pc532* | -sgi* | -sony* | \ + -att* | -7300* | -3300* | -delta* | -motorola* | -sun[234]* | \ + -unicom* | -ibm* | -next | -hp | -isi* | -apollo | -altos* | \ + -convergent* | -ncr* | -news | -32* | -3600* | -3100* | -hitachi* |\ + -c[123]* | -convex* | -sun | -crds | -omron* | -dg | -ultra | -tti* | \ + -harris | -dolphin | -highlevel | -gould | -cbm | -ns | -masscomp | \ + -apple | -axis | -knuth | -cray | -microblaze) + os= + basic_machine=$1 + ;; + -bluegene*) + os=-cnk + ;; + -sim | -cisco | -oki | -wec | -winbond) + os= + basic_machine=$1 + ;; + -scout) + ;; + -wrs) + os=-vxworks + basic_machine=$1 + ;; + -chorusos*) + os=-chorusos + basic_machine=$1 + ;; + -chorusrdb) + os=-chorusrdb + basic_machine=$1 + ;; + -hiux*) + os=-hiuxwe2 + ;; + -sco6) + os=-sco5v6 + basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` + ;; + -sco5) + os=-sco3.2v5 + basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` + ;; + -sco4) + os=-sco3.2v4 + basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` + ;; + -sco3.2.[4-9]*) + os=`echo $os | sed -e 's/sco3.2./sco3.2v/'` + basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` + ;; + -sco3.2v[4-9]*) + # Don't forget version if it is 3.2v4 or newer. + basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` + ;; + -sco5v6*) + # Don't forget version if it is 3.2v4 or newer. + basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` + ;; + -sco*) + os=-sco3.2v2 + basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` + ;; + -udk*) + basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` + ;; + -isc) + os=-isc2.2 + basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` + ;; + -clix*) + basic_machine=clipper-intergraph + ;; + -isc*) + basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` + ;; + -lynx*) + os=-lynxos + ;; + -ptx*) + basic_machine=`echo $1 | sed -e 's/86-.*/86-sequent/'` + ;; + -windowsnt*) + os=`echo $os | sed -e 's/windowsnt/winnt/'` + ;; + -psos*) + os=-psos + ;; + -mint | -mint[0-9]*) + basic_machine=m68k-atari + os=-mint + ;; +esac + +# Decode aliases for certain CPU-COMPANY combinations. +case $basic_machine in + # Recognize the basic CPU types without company name. + # Some are omitted here because they have special meanings below. + 1750a | 580 \ + | a29k \ + | aarch64 | aarch64_be \ + | alpha | alphaev[4-8] | alphaev56 | alphaev6[78] | alphapca5[67] \ + | alpha64 | alpha64ev[4-8] | alpha64ev56 | alpha64ev6[78] | alpha64pca5[67] \ + | am33_2.0 \ + | arc | arm | arm[bl]e | arme[lb] | armv[2345] | armv[345][lb] | avr | avr32 \ + | be32 | be64 \ + | bfin \ + | c4x | clipper \ + | d10v | d30v | dlx | dsp16xx \ + | epiphany \ + | fido | fr30 | frv \ + | h8300 | h8500 | hppa | hppa1.[01] | hppa2.0 | hppa2.0[nw] | hppa64 \ + | hexagon \ + | i370 | i860 | i960 | ia16 | ia64 \ + | ip2k | iq2000 \ + | le32 | le64 \ + | lm32 \ + | m32c | m32r | m32rle | m68000 | m68k | m88k \ + | maxq | mb | microblaze | mcore | mep | metag \ + | mips | mipsbe | mipseb | mipsel | mipsle \ + | mips16 \ + | mips64 | mips64el \ + | mips64octeon | mips64octeonel \ + | mips64orion | mips64orionel \ + | mips64r5900 | mips64r5900el \ + | mips64vr | mips64vrel \ + | mips64vr4100 | mips64vr4100el \ + | mips64vr4300 | mips64vr4300el \ + | mips64vr5000 | mips64vr5000el \ + | mips64vr5900 | mips64vr5900el \ + | mipsisa32 | mipsisa32el \ + | mipsisa32r2 | mipsisa32r2el \ + | mipsisa64 | mipsisa64el \ + | mipsisa64r2 | mipsisa64r2el \ + | mipsisa64sb1 | mipsisa64sb1el \ + | mipsisa64sr71k | mipsisa64sr71kel \ + | mipstx39 | mipstx39el \ + | mn10200 | mn10300 \ + | moxie \ + | mt \ + | msp430 \ + | nds32 | nds32le | nds32be \ + | nios | nios2 \ + | ns16k | ns32k \ + | open8 \ + | or32 \ + | pdp10 | pdp11 | pj | pjl \ + | powerpc | powerpc64 | powerpc64le | powerpcle \ + | pru \ + | pyramid \ + | rl78 | rx \ + | score \ + | sh | sh[1234] | sh[24]a | sh[24]aeb | sh[23]e | sh[34]eb | sheb | shbe | shle | sh[1234]le | sh3ele \ + | sh64 | sh64le \ + | sparc | sparc64 | sparc64b | sparc64v | sparc86x | sparclet | sparclite \ + | sparcv8 | sparcv9 | sparcv9b | sparcv9v \ + | spu \ + | tahoe | tic4x | tic54x | tic55x | tic6x | tic80 | tron \ + | ubicom32 \ + | v850 | v850e | v850e1 | v850e2 | v850es | v850e2v3 \ + | we32k \ + | x86 | xc16x | xstormy16 | xtensa \ + | z8k | z80) + basic_machine=$basic_machine-unknown + ;; + c54x) + basic_machine=tic54x-unknown + ;; + c55x) + basic_machine=tic55x-unknown + ;; + c6x) + basic_machine=tic6x-unknown + ;; + m6811 | m68hc11 | m6812 | m68hc12 | m68hcs12x | picochip) + basic_machine=$basic_machine-unknown + os=-none + ;; + m88110 | m680[12346]0 | m683?2 | m68360 | m5200 | v70 | w65 | z8k) + ;; + ms1) + basic_machine=mt-unknown + ;; + + strongarm | thumb | xscale) + basic_machine=arm-unknown + ;; + xgate) + basic_machine=$basic_machine-unknown + os=-none + ;; + xscaleeb) + basic_machine=armeb-unknown + ;; + + xscaleel) + basic_machine=armel-unknown + ;; + + # We use `pc' rather than `unknown' + # because (1) that's what they normally are, and + # (2) the word "unknown" tends to confuse beginning users. + i*86 | x86_64) + basic_machine=$basic_machine-pc + ;; + # Object if more than one company name word. + *-*-*) + echo Invalid configuration \`$1\': machine \`$basic_machine\' not recognized 1>&2 + exit 1 + ;; + # Recognize the basic CPU types with company name. + 580-* \ + | a29k-* \ + | aarch64-* | aarch64_be-* \ + | alpha-* | alphaev[4-8]-* | alphaev56-* | alphaev6[78]-* \ + | alpha64-* | alpha64ev[4-8]-* | alpha64ev56-* | alpha64ev6[78]-* \ + | alphapca5[67]-* | alpha64pca5[67]-* | arc-* \ + | arm-* | armbe-* | armle-* | armeb-* | armv*-* \ + | avr-* | avr32-* \ + | be32-* | be64-* \ + | bfin-* | bs2000-* \ + | c[123]* | c30-* | [cjt]90-* | c4x-* \ + | clipper-* | craynv-* | cydra-* \ + | d10v-* | d30v-* | dlx-* \ + | elxsi-* \ + | f30[01]-* | f700-* | fido-* | fr30-* | frv-* | fx80-* \ + | h8300-* | h8500-* \ + | hppa-* | hppa1.[01]-* | hppa2.0-* | hppa2.0[nw]-* | hppa64-* \ + | hexagon-* \ + | i*86-* | i860-* | i960-* | ia16-* | ia64-* \ + | ip2k-* | iq2000-* \ + | le32-* | le64-* \ + | lm32-* \ + | m32c-* | m32r-* | m32rle-* \ + | m68000-* | m680[012346]0-* | m68360-* | m683?2-* | m68k-* \ + | m88110-* | m88k-* | maxq-* | mcore-* | metag-* | microblaze-* \ + | mips-* | mipsbe-* | mipseb-* | mipsel-* | mipsle-* \ + | mips16-* \ + | mips64-* | mips64el-* \ + | mips64octeon-* | mips64octeonel-* \ + | mips64orion-* | mips64orionel-* \ + | mips64r5900-* | mips64r5900el-* \ + | mips64vr-* | mips64vrel-* \ + | mips64vr4100-* | mips64vr4100el-* \ + | mips64vr4300-* | mips64vr4300el-* \ + | mips64vr5000-* | mips64vr5000el-* \ + | mips64vr5900-* | mips64vr5900el-* \ + | mipsisa32-* | mipsisa32el-* \ + | mipsisa32r2-* | mipsisa32r2el-* \ + | mipsisa64-* | mipsisa64el-* \ + | mipsisa64r2-* | mipsisa64r2el-* \ + | mipsisa64sb1-* | mipsisa64sb1el-* \ + | mipsisa64sr71k-* | mipsisa64sr71kel-* \ + | mipstx39-* | mipstx39el-* \ + | mmix-* \ + | mt-* \ + | msp430-* \ + | nds32-* | nds32le-* | nds32be-* \ + | nios-* | nios2-* \ + | none-* | np1-* | ns16k-* | ns32k-* \ + | open8-* \ + | orion-* \ + | pdp10-* | pdp11-* | pj-* | pjl-* | pn-* | power-* \ + | powerpc-* | powerpc64-* | powerpc64le-* | powerpcle-* \ + | pru-* \ + | pyramid-* \ + | rl78-* | romp-* | rs6000-* | rx-* \ + | sh-* | sh[1234]-* | sh[24]a-* | sh[24]aeb-* | sh[23]e-* | sh[34]eb-* | sheb-* | shbe-* \ + | shle-* | sh[1234]le-* | sh3ele-* | sh64-* | sh64le-* \ + | sparc-* | sparc64-* | sparc64b-* | sparc64v-* | sparc86x-* | sparclet-* \ + | sparclite-* \ + | sparcv8-* | sparcv9-* | sparcv9b-* | sparcv9v-* | sv1-* | sx?-* \ + | tahoe-* \ + | tic30-* | tic4x-* | tic54x-* | tic55x-* | tic6x-* | tic80-* \ + | tile*-* \ + | tron-* \ + | ubicom32-* \ + | v850-* | v850e-* | v850e1-* | v850es-* | v850e2-* | v850e2v3-* \ + | vax-* \ + | we32k-* \ + | x86-* | x86_64-* | xc16x-* | xps100-* \ + | xstormy16-* | xtensa*-* \ + | ymp-* \ + | z8k-* | z80-*) + ;; + # Recognize the basic CPU types without company name, with glob match. + xtensa*) + basic_machine=$basic_machine-unknown + ;; + # Recognize the various machine names and aliases which stand + # for a CPU type and a company and sometimes even an OS. + 386bsd) + basic_machine=i386-unknown + os=-bsd + ;; + 3b1 | 7300 | 7300-att | att-7300 | pc7300 | safari | unixpc) + basic_machine=m68000-att + ;; + 3b*) + basic_machine=we32k-att + ;; + a29khif) + basic_machine=a29k-amd + os=-udi + ;; + abacus) + basic_machine=abacus-unknown + ;; + adobe68k) + basic_machine=m68010-adobe + os=-scout + ;; + alliant | fx80) + basic_machine=fx80-alliant + ;; + altos | altos3068) + basic_machine=m68k-altos + ;; + am29k) + basic_machine=a29k-none + os=-bsd + ;; + amd64) + basic_machine=x86_64-pc + ;; + amd64-*) + basic_machine=x86_64-`echo $basic_machine | sed 's/^[^-]*-//'` + ;; + amdahl) + basic_machine=580-amdahl + os=-sysv + ;; + amiga | amiga-*) + basic_machine=m68k-unknown + ;; + amigaos | amigados) + basic_machine=m68k-unknown + os=-amigaos + ;; + amigaunix | amix) + basic_machine=m68k-unknown + os=-sysv4 + ;; + apollo68) + basic_machine=m68k-apollo + os=-sysv + ;; + apollo68bsd) + basic_machine=m68k-apollo + os=-bsd + ;; + aros) + basic_machine=i386-pc + os=-aros + ;; + aux) + basic_machine=m68k-apple + os=-aux + ;; + balance) + basic_machine=ns32k-sequent + os=-dynix + ;; + blackfin) + basic_machine=bfin-unknown + os=-linux + ;; + blackfin-*) + basic_machine=bfin-`echo $basic_machine | sed 's/^[^-]*-//'` + os=-linux + ;; + bluegene*) + basic_machine=powerpc-ibm + os=-cnk + ;; + c54x-*) + basic_machine=tic54x-`echo $basic_machine | sed 's/^[^-]*-//'` + ;; + c55x-*) + basic_machine=tic55x-`echo $basic_machine | sed 's/^[^-]*-//'` + ;; + c6x-*) + basic_machine=tic6x-`echo $basic_machine | sed 's/^[^-]*-//'` + ;; + c90) + basic_machine=c90-cray + os=-unicos + ;; + cegcc) + basic_machine=arm-unknown + os=-cegcc + ;; + convex-c1) + basic_machine=c1-convex + os=-bsd + ;; + convex-c2) + basic_machine=c2-convex + os=-bsd + ;; + convex-c32) + basic_machine=c32-convex + os=-bsd + ;; + convex-c34) + basic_machine=c34-convex + os=-bsd + ;; + convex-c38) + basic_machine=c38-convex + os=-bsd + ;; + cray | j90) + basic_machine=j90-cray + os=-unicos + ;; + craynv) + basic_machine=craynv-cray + os=-unicosmp + ;; + cr16 | cr16-*) + basic_machine=cr16-unknown + os=-elf + ;; + crds | unos) + basic_machine=m68k-crds + ;; + crisv32 | crisv32-* | etraxfs*) + basic_machine=crisv32-axis + ;; + cris | cris-* | etrax*) + basic_machine=cris-axis + ;; + crx) + basic_machine=crx-unknown + os=-elf + ;; + da30 | da30-*) + basic_machine=m68k-da30 + ;; + decstation | decstation-3100 | pmax | pmax-* | pmin | dec3100 | decstatn) + basic_machine=mips-dec + ;; + decsystem10* | dec10*) + basic_machine=pdp10-dec + os=-tops10 + ;; + decsystem20* | dec20*) + basic_machine=pdp10-dec + os=-tops20 + ;; + delta | 3300 | motorola-3300 | motorola-delta \ + | 3300-motorola | delta-motorola) + basic_machine=m68k-motorola + ;; + delta88) + basic_machine=m88k-motorola + os=-sysv3 + ;; + dicos) + basic_machine=i686-pc + os=-dicos + ;; + djgpp) + basic_machine=i586-pc + os=-msdosdjgpp + ;; + dpx20 | dpx20-*) + basic_machine=rs6000-bull + os=-bosx + ;; + dpx2* | dpx2*-bull) + basic_machine=m68k-bull + os=-sysv3 + ;; + e500v[12]) + basic_machine=powerpc-unknown + os=$os"spe" + ;; + e500v[12]-*) + basic_machine=powerpc-`echo $basic_machine | sed 's/^[^-]*-//'` + os=$os"spe" + ;; + ebmon29k) + basic_machine=a29k-amd + os=-ebmon + ;; + elxsi) + basic_machine=elxsi-elxsi + os=-bsd + ;; + encore | umax | mmax) + basic_machine=ns32k-encore + ;; + es1800 | OSE68k | ose68k | ose | OSE) + basic_machine=m68k-ericsson + os=-ose + ;; + fx2800) + basic_machine=i860-alliant + ;; + genix) + basic_machine=ns32k-ns + ;; + gmicro) + basic_machine=tron-gmicro + os=-sysv + ;; + go32) + basic_machine=i386-pc + os=-go32 + ;; + h3050r* | hiux*) + basic_machine=hppa1.1-hitachi + os=-hiuxwe2 + ;; + h8300hms) + basic_machine=h8300-hitachi + os=-hms + ;; + h8300xray) + basic_machine=h8300-hitachi + os=-xray + ;; + h8500hms) + basic_machine=h8500-hitachi + os=-hms + ;; + harris) + basic_machine=m88k-harris + os=-sysv3 + ;; + hp300-*) + basic_machine=m68k-hp + ;; + hp300bsd) + basic_machine=m68k-hp + os=-bsd + ;; + hp300hpux) + basic_machine=m68k-hp + os=-hpux + ;; + hp3k9[0-9][0-9] | hp9[0-9][0-9]) + basic_machine=hppa1.0-hp + ;; + hp9k2[0-9][0-9] | hp9k31[0-9]) + basic_machine=m68000-hp + ;; + hp9k3[2-9][0-9]) + basic_machine=m68k-hp + ;; + hp9k6[0-9][0-9] | hp6[0-9][0-9]) + basic_machine=hppa1.0-hp + ;; + hp9k7[0-79][0-9] | hp7[0-79][0-9]) + basic_machine=hppa1.1-hp + ;; + hp9k78[0-9] | hp78[0-9]) + # FIXME: really hppa2.0-hp + basic_machine=hppa1.1-hp + ;; + hp9k8[67]1 | hp8[67]1 | hp9k80[24] | hp80[24] | hp9k8[78]9 | hp8[78]9 | hp9k893 | hp893) + # FIXME: really hppa2.0-hp + basic_machine=hppa1.1-hp + ;; + hp9k8[0-9][13679] | hp8[0-9][13679]) + basic_machine=hppa1.1-hp + ;; + hp9k8[0-9][0-9] | hp8[0-9][0-9]) + basic_machine=hppa1.0-hp + ;; + hppa-next) + os=-nextstep3 + ;; + hppaosf) + basic_machine=hppa1.1-hp + os=-osf + ;; + hppro) + basic_machine=hppa1.1-hp + os=-proelf + ;; + i370-ibm* | ibm*) + basic_machine=i370-ibm + ;; + i*86v32) + basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'` + os=-sysv32 + ;; + i*86v4*) + basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'` + os=-sysv4 + ;; + i*86v) + basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'` + os=-sysv + ;; + i*86sol2) + basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'` + os=-solaris2 + ;; + i386mach) + basic_machine=i386-mach + os=-mach + ;; + i386-vsta | vsta) + basic_machine=i386-unknown + os=-vsta + ;; + iris | iris4d) + basic_machine=mips-sgi + case $os in + -irix*) + ;; + *) + os=-irix4 + ;; + esac + ;; + isi68 | isi) + basic_machine=m68k-isi + os=-sysv + ;; + m68knommu) + basic_machine=m68k-unknown + os=-linux + ;; + m68knommu-*) + basic_machine=m68k-`echo $basic_machine | sed 's/^[^-]*-//'` + os=-linux + ;; + m88k-omron*) + basic_machine=m88k-omron + ;; + magnum | m3230) + basic_machine=mips-mips + os=-sysv + ;; + merlin) + basic_machine=ns32k-utek + os=-sysv + ;; + microblaze) + basic_machine=microblaze-xilinx + ;; + mingw32) + basic_machine=i386-pc + os=-mingw32 + ;; + mingw32ce) + basic_machine=arm-unknown + os=-mingw32ce + ;; + miniframe) + basic_machine=m68000-convergent + ;; + *mint | -mint[0-9]* | *MiNT | *MiNT[0-9]*) + basic_machine=m68k-atari + os=-mint + ;; + mips3*-*) + basic_machine=`echo $basic_machine | sed -e 's/mips3/mips64/'` + ;; + mips3*) + basic_machine=`echo $basic_machine | sed -e 's/mips3/mips64/'`-unknown + ;; + monitor) + basic_machine=m68k-rom68k + os=-coff + ;; + morphos) + basic_machine=powerpc-unknown + os=-morphos + ;; + msdos) + basic_machine=i386-pc + os=-msdos + ;; + ms1-*) + basic_machine=`echo $basic_machine | sed -e 's/ms1-/mt-/'` + ;; + msys) + basic_machine=i386-pc + os=-msys + ;; + mvs) + basic_machine=i370-ibm + os=-mvs + ;; + nacl) + basic_machine=le32-unknown + os=-nacl + ;; + ncr3000) + basic_machine=i486-ncr + os=-sysv4 + ;; + netbsd386) + basic_machine=i386-unknown + os=-netbsd + ;; + netwinder) + basic_machine=armv4l-rebel + os=-linux + ;; + news | news700 | news800 | news900) + basic_machine=m68k-sony + os=-newsos + ;; + news1000) + basic_machine=m68030-sony + os=-newsos + ;; + news-3600 | risc-news) + basic_machine=mips-sony + os=-newsos + ;; + necv70) + basic_machine=v70-nec + os=-sysv + ;; + next | m*-next ) + basic_machine=m68k-next + case $os in + -nextstep* ) + ;; + -ns2*) + os=-nextstep2 + ;; + *) + os=-nextstep3 + ;; + esac + ;; + nh3000) + basic_machine=m68k-harris + os=-cxux + ;; + nh[45]000) + basic_machine=m88k-harris + os=-cxux + ;; + nindy960) + basic_machine=i960-intel + os=-nindy + ;; + mon960) + basic_machine=i960-intel + os=-mon960 + ;; + nonstopux) + basic_machine=mips-compaq + os=-nonstopux + ;; + np1) + basic_machine=np1-gould + ;; + neo-tandem) + basic_machine=neo-tandem + ;; + nse-tandem) + basic_machine=nse-tandem + ;; + nsr-tandem) + basic_machine=nsr-tandem + ;; + nsx-tandem) + basic_machine=nsx-tandem + ;; + op50n-* | op60c-*) + basic_machine=hppa1.1-oki + os=-proelf + ;; + openrisc | openrisc-*) + basic_machine=or32-unknown + ;; + os400) + basic_machine=powerpc-ibm + os=-os400 + ;; + OSE68000 | ose68000) + basic_machine=m68000-ericsson + os=-ose + ;; + os68k) + basic_machine=m68k-none + os=-os68k + ;; + pa-hitachi) + basic_machine=hppa1.1-hitachi + os=-hiuxwe2 + ;; + paragon) + basic_machine=i860-intel + os=-osf + ;; + parisc) + basic_machine=hppa-unknown + os=-linux + ;; + parisc-*) + basic_machine=hppa-`echo $basic_machine | sed 's/^[^-]*-//'` + os=-linux + ;; + pbd) + basic_machine=sparc-tti + ;; + pbb) + basic_machine=m68k-tti + ;; + pc532 | pc532-*) + basic_machine=ns32k-pc532 + ;; + pc98) + basic_machine=i386-pc + ;; + pc98-*) + basic_machine=i386-`echo $basic_machine | sed 's/^[^-]*-//'` + ;; + pentium | p5 | k5 | k6 | nexgen | viac3) + basic_machine=i586-pc + ;; + pentiumpro | p6 | 6x86 | athlon | athlon_*) + basic_machine=i686-pc + ;; + pentiumii | pentium2 | pentiumiii | pentium3) + basic_machine=i686-pc + ;; + pentium4) + basic_machine=i786-pc + ;; + pentium-* | p5-* | k5-* | k6-* | nexgen-* | viac3-*) + basic_machine=i586-`echo $basic_machine | sed 's/^[^-]*-//'` + ;; + pentiumpro-* | p6-* | 6x86-* | athlon-*) + basic_machine=i686-`echo $basic_machine | sed 's/^[^-]*-//'` + ;; + pentiumii-* | pentium2-* | pentiumiii-* | pentium3-*) + basic_machine=i686-`echo $basic_machine | sed 's/^[^-]*-//'` + ;; + pentium4-*) + basic_machine=i786-`echo $basic_machine | sed 's/^[^-]*-//'` + ;; + pn) + basic_machine=pn-gould + ;; + power) basic_machine=power-ibm + ;; + ppc | ppcbe) basic_machine=powerpc-unknown + ;; + ppc-* | ppcbe-*) + basic_machine=powerpc-`echo $basic_machine | sed 's/^[^-]*-//'` + ;; + ppcle | powerpclittle) + basic_machine=powerpcle-unknown + ;; + ppcle-* | powerpclittle-*) + basic_machine=powerpcle-`echo $basic_machine | sed 's/^[^-]*-//'` + ;; + ppc64) basic_machine=powerpc64-unknown + ;; + ppc64-*) basic_machine=powerpc64-`echo $basic_machine | sed 's/^[^-]*-//'` + ;; + ppc64le | powerpc64little) + basic_machine=powerpc64le-unknown + ;; + ppc64le-* | powerpc64little-*) + basic_machine=powerpc64le-`echo $basic_machine | sed 's/^[^-]*-//'` + ;; + ps2) + basic_machine=i386-ibm + ;; + pw32) + basic_machine=i586-unknown + os=-pw32 + ;; + rdos) + basic_machine=i386-pc + os=-rdos + ;; + rom68k) + basic_machine=m68k-rom68k + os=-coff + ;; + rm[46]00) + basic_machine=mips-siemens + ;; + rtpc | rtpc-*) + basic_machine=romp-ibm + ;; + s390 | s390-*) + basic_machine=s390-ibm + ;; + s390x | s390x-*) + basic_machine=s390x-ibm + ;; + sa29200) + basic_machine=a29k-amd + os=-udi + ;; + sb1) + basic_machine=mipsisa64sb1-unknown + ;; + sb1el) + basic_machine=mipsisa64sb1el-unknown + ;; + sde) + basic_machine=mipsisa32-sde + os=-elf + ;; + sei) + basic_machine=mips-sei + os=-seiux + ;; + sequent) + basic_machine=i386-sequent + ;; + sh) + basic_machine=sh-hitachi + os=-hms + ;; + sh5el) + basic_machine=sh5le-unknown + ;; + sh64) + basic_machine=sh64-unknown + ;; + sparclite-wrs | simso-wrs) + basic_machine=sparclite-wrs + os=-vxworks + ;; + sps7) + basic_machine=m68k-bull + os=-sysv2 + ;; + spur) + basic_machine=spur-unknown + ;; + st2000) + basic_machine=m68k-tandem + ;; + stratus) + basic_machine=i860-stratus + os=-sysv4 + ;; + strongarm-* | thumb-*) + basic_machine=arm-`echo $basic_machine | sed 's/^[^-]*-//'` + ;; + sun2) + basic_machine=m68000-sun + ;; + sun2os3) + basic_machine=m68000-sun + os=-sunos3 + ;; + sun2os4) + basic_machine=m68000-sun + os=-sunos4 + ;; + sun3os3) + basic_machine=m68k-sun + os=-sunos3 + ;; + sun3os4) + basic_machine=m68k-sun + os=-sunos4 + ;; + sun4os3) + basic_machine=sparc-sun + os=-sunos3 + ;; + sun4os4) + basic_machine=sparc-sun + os=-sunos4 + ;; + sun4sol2) + basic_machine=sparc-sun + os=-solaris2 + ;; + sun3 | sun3-*) + basic_machine=m68k-sun + ;; + sun4) + basic_machine=sparc-sun + ;; + sun386 | sun386i | roadrunner) + basic_machine=i386-sun + ;; + sv1) + basic_machine=sv1-cray + os=-unicos + ;; + symmetry) + basic_machine=i386-sequent + os=-dynix + ;; + t3e) + basic_machine=alphaev5-cray + os=-unicos + ;; + t90) + basic_machine=t90-cray + os=-unicos + ;; + tile*) + basic_machine=$basic_machine-unknown + os=-linux-gnu + ;; + tx39) + basic_machine=mipstx39-unknown + ;; + tx39el) + basic_machine=mipstx39el-unknown + ;; + toad1) + basic_machine=pdp10-xkl + os=-tops20 + ;; + tower | tower-32) + basic_machine=m68k-ncr + ;; + tpf) + basic_machine=s390x-ibm + os=-tpf + ;; + udi29k) + basic_machine=a29k-amd + os=-udi + ;; + ultra3) + basic_machine=a29k-nyu + os=-sym1 + ;; + v810 | necv810) + basic_machine=v810-nec + os=-none + ;; + vaxv) + basic_machine=vax-dec + os=-sysv + ;; + vms) + basic_machine=vax-dec + os=-vms + ;; + vpp*|vx|vx-*) + basic_machine=f301-fujitsu + ;; + vxworks960) + basic_machine=i960-wrs + os=-vxworks + ;; + vxworks68) + basic_machine=m68k-wrs + os=-vxworks + ;; + vxworks29k) + basic_machine=a29k-wrs + os=-vxworks + ;; + wasm32) + basic_machine=wasm32-unknown + ;; + w65*) + basic_machine=w65-wdc + os=-none + ;; + w89k-*) + basic_machine=hppa1.1-winbond + os=-proelf + ;; + xbox) + basic_machine=i686-pc + os=-mingw32 + ;; + xps | xps100) + basic_machine=xps100-honeywell + ;; + xscale-* | xscalee[bl]-*) + basic_machine=`echo $basic_machine | sed 's/^xscale/arm/'` + ;; + ymp) + basic_machine=ymp-cray + os=-unicos + ;; + z8k-*-coff) + basic_machine=z8k-unknown + os=-sim + ;; + z80-*-coff) + basic_machine=z80-unknown + os=-sim + ;; + none) + basic_machine=none-none + os=-none + ;; + +# Here we handle the default manufacturer of certain CPU types. It is in +# some cases the only manufacturer, in others, it is the most popular. + w89k) + basic_machine=hppa1.1-winbond + ;; + op50n) + basic_machine=hppa1.1-oki + ;; + op60c) + basic_machine=hppa1.1-oki + ;; + romp) + basic_machine=romp-ibm + ;; + mmix) + basic_machine=mmix-knuth + ;; + rs6000) + basic_machine=rs6000-ibm + ;; + vax) + basic_machine=vax-dec + ;; + pdp10) + # there are many clones, so DEC is not a safe bet + basic_machine=pdp10-unknown + ;; + pdp11) + basic_machine=pdp11-dec + ;; + we32k) + basic_machine=we32k-att + ;; + sh[1234] | sh[24]a | sh[24]aeb | sh[34]eb | sh[1234]le | sh[23]ele) + basic_machine=sh-unknown + ;; + sparc | sparcv8 | sparcv9 | sparcv9b | sparcv9v) + basic_machine=sparc-sun + ;; + cydra) + basic_machine=cydra-cydrome + ;; + orion) + basic_machine=orion-highlevel + ;; + orion105) + basic_machine=clipper-highlevel + ;; + mac | mpw | mac-mpw) + basic_machine=m68k-apple + ;; + pmac | pmac-mpw) + basic_machine=powerpc-apple + ;; + *-unknown) + # Make sure to match an already-canonicalized machine name. + ;; + *) + echo Invalid configuration \`$1\': machine \`$basic_machine\' not recognized 1>&2 + exit 1 + ;; +esac + +# Here we canonicalize certain aliases for manufacturers. +case $basic_machine in + *-digital*) + basic_machine=`echo $basic_machine | sed 's/digital.*/dec/'` + ;; + *-commodore*) + basic_machine=`echo $basic_machine | sed 's/commodore.*/cbm/'` + ;; + *) + ;; +esac + +# Decode manufacturer-specific aliases for certain operating systems. + +if [ x"$os" != x"" ] +then +case $os in + # First match some system type aliases + # that might get confused with valid system types. + # -solaris* is a basic system type, with this one exception. + -auroraux) + os=-auroraux + ;; + -solaris1 | -solaris1.*) + os=`echo $os | sed -e 's|solaris1|sunos4|'` + ;; + -solaris) + os=-solaris2 + ;; + -svr4*) + os=-sysv4 + ;; + -unixware*) + os=-sysv4.2uw + ;; + -gnu/linux*) + os=`echo $os | sed -e 's|gnu/linux|linux-gnu|'` + ;; + # First accept the basic system types. + # The portable systems comes first. + # Each alternative MUST END IN A *, to match a version number. + # -sysv* is not here because it comes later, after sysvr4. + -gnu* | -bsd* | -mach* | -minix* | -genix* | -ultrix* | -irix* \ + | -*vms* | -sco* | -esix* | -isc* | -aix* | -cnk* | -sunos | -sunos[34]*\ + | -hpux* | -unos* | -osf* | -luna* | -dgux* | -auroraux* | -solaris* \ + | -sym* | -kopensolaris* \ + | -amigaos* | -amigados* | -msdos* | -newsos* | -unicos* | -aof* \ + | -aos* | -aros* \ + | -nindy* | -vxsim* | -vxworks* | -ebmon* | -hms* | -mvs* \ + | -clix* | -riscos* | -uniplus* | -iris* | -rtu* | -xenix* \ + | -hiux* | -386bsd* | -knetbsd* | -mirbsd* | -netbsd* \ + | -openbsd* | -solidbsd* \ + | -ekkobsd* | -kfreebsd* | -freebsd* | -riscix* | -lynxos* \ + | -bosx* | -nextstep* | -cxux* | -aout* | -elf* | -oabi* \ + | -ptx* | -coff* | -ecoff* | -winnt* | -domain* | -vsta* \ + | -udi* | -eabi* | -lites* | -ieee* | -go32* | -aux* \ + | -chorusos* | -chorusrdb* | -cegcc* \ + | -cygwin* | -msys* | -pe* | -psos* | -moss* | -proelf* | -rtems* \ + | -mingw32* | -linux-gnu* | -linux-android* \ + | -linux-newlib* | -linux-uclibc* \ + | -uxpv* | -beos* | -mpeix* | -udk* \ + | -interix* | -uwin* | -mks* | -rhapsody* | -darwin* | -opened* \ + | -openstep* | -oskit* | -conix* | -pw32* | -nonstopux* \ + | -storm-chaos* | -tops10* | -tenex* | -tops20* | -its* \ + | -os2* | -vos* | -palmos* | -uclinux* | -nucleus* \ + | -morphos* | -superux* | -rtmk* | -rtmk-nova* | -windiss* \ + | -powermax* | -dnix* | -nx6 | -nx7 | -sei* | -dragonfly* \ + | -skyos* | -haiku* | -rdos* | -toppers* | -drops* | -es*) + # Remember, each alternative MUST END IN *, to match a version number. + ;; + -qnx*) + case $basic_machine in + x86-* | i*86-*) + ;; + *) + os=-nto$os + ;; + esac + ;; + -nto-qnx*) + ;; + -nto*) + os=`echo $os | sed -e 's|nto|nto-qnx|'` + ;; + -sim | -es1800* | -hms* | -xray | -os68k* | -none* | -v88r* \ + | -windows* | -osx | -abug | -netware* | -os9* | -beos* | -haiku* \ + | -macos* | -mpw* | -magic* | -mmixware* | -mon960* | -lnews*) + ;; + -mac*) + os=`echo $os | sed -e 's|mac|macos|'` + ;; + -linux-dietlibc) + os=-linux-dietlibc + ;; + -linux*) + os=`echo $os | sed -e 's|linux|linux-gnu|'` + ;; + -sunos5*) + os=`echo $os | sed -e 's|sunos5|solaris2|'` + ;; + -sunos6*) + os=`echo $os | sed -e 's|sunos6|solaris3|'` + ;; + -opened*) + os=-openedition + ;; + -os400*) + os=-os400 + ;; + -wince*) + os=-wince + ;; + -osfrose*) + os=-osfrose + ;; + -osf*) + os=-osf + ;; + -utek*) + os=-bsd + ;; + -dynix*) + os=-bsd + ;; + -acis*) + os=-aos + ;; + -atheos*) + os=-atheos + ;; + -syllable*) + os=-syllable + ;; + -386bsd) + os=-bsd + ;; + -ctix* | -uts*) + os=-sysv + ;; + -nova*) + os=-rtmk-nova + ;; + -ns2 ) + os=-nextstep2 + ;; + -nsk*) + os=-nsk + ;; + # Preserve the version number of sinix5. + -sinix5.*) + os=`echo $os | sed -e 's|sinix|sysv|'` + ;; + -sinix*) + os=-sysv4 + ;; + -tpf*) + os=-tpf + ;; + -triton*) + os=-sysv3 + ;; + -oss*) + os=-sysv3 + ;; + -svr4) + os=-sysv4 + ;; + -svr3) + os=-sysv3 + ;; + -sysvr4) + os=-sysv4 + ;; + # This must come after -sysvr4. + -sysv*) + ;; + -ose*) + os=-ose + ;; + -es1800*) + os=-ose + ;; + -xenix) + os=-xenix + ;; + -*mint | -mint[0-9]* | -*MiNT | -MiNT[0-9]*) + os=-mint + ;; + -aros*) + os=-aros + ;; + -kaos*) + os=-kaos + ;; + -zvmoe) + os=-zvmoe + ;; + -dicos*) + os=-dicos + ;; + -nacl*) + ;; + -ios) + ;; + -none) + ;; + *) + # Get rid of the `-' at the beginning of $os. + os=`echo $os | sed 's/[^-]*-//'` + echo Invalid configuration \`$1\': system \`$os\' not recognized 1>&2 + exit 1 + ;; +esac +else + +# Here we handle the default operating systems that come with various machines. +# The value should be what the vendor currently ships out the door with their +# machine or put another way, the most popular os provided with the machine. + +# Note that if you're going to try to match "-MANUFACTURER" here (say, +# "-sun"), then you have to tell the case statement up towards the top +# that MANUFACTURER isn't an operating system. Otherwise, code above +# will signal an error saying that MANUFACTURER isn't an operating +# system, and we'll never get to this point. + +case $basic_machine in + score-*) + os=-elf + ;; + spu-*) + os=-elf + ;; + *-acorn) + os=-riscix1.2 + ;; + arm*-rebel) + os=-linux + ;; + arm*-semi) + os=-aout + ;; + c4x-* | tic4x-*) + os=-coff + ;; + tic54x-*) + os=-coff + ;; + tic55x-*) + os=-coff + ;; + tic6x-*) + os=-coff + ;; + # This must come before the *-dec entry. + pdp10-*) + os=-tops20 + ;; + pdp11-*) + os=-none + ;; + *-dec | vax-*) + os=-ultrix4.2 + ;; + m68*-apollo) + os=-domain + ;; + i386-sun) + os=-sunos4.0.2 + ;; + m68000-sun) + os=-sunos3 + ;; + m68*-cisco) + os=-aout + ;; + mep-*) + os=-elf + ;; + mips*-cisco) + os=-elf + ;; + mips*-*) + os=-elf + ;; + or32-*) + os=-coff + ;; + *-tti) # must be before sparc entry or we get the wrong os. + os=-sysv3 + ;; + sparc-* | *-sun) + os=-sunos4.1.1 + ;; + pru-*) + os=-elf + ;; + *-be) + os=-beos + ;; + *-haiku) + os=-haiku + ;; + *-ibm) + os=-aix + ;; + *-knuth) + os=-mmixware + ;; + *-wec) + os=-proelf + ;; + *-winbond) + os=-proelf + ;; + *-oki) + os=-proelf + ;; + *-hp) + os=-hpux + ;; + *-hitachi) + os=-hiux + ;; + i860-* | *-att | *-ncr | *-altos | *-motorola | *-convergent) + os=-sysv + ;; + *-cbm) + os=-amigaos + ;; + *-dg) + os=-dgux + ;; + *-dolphin) + os=-sysv3 + ;; + m68k-ccur) + os=-rtu + ;; + m88k-omron*) + os=-luna + ;; + *-next ) + os=-nextstep + ;; + *-sequent) + os=-ptx + ;; + *-crds) + os=-unos + ;; + *-ns) + os=-genix + ;; + i370-*) + os=-mvs + ;; + *-next) + os=-nextstep3 + ;; + *-gould) + os=-sysv + ;; + *-highlevel) + os=-bsd + ;; + *-encore) + os=-bsd + ;; + *-sgi) + os=-irix + ;; + *-siemens) + os=-sysv4 + ;; + *-masscomp) + os=-rtu + ;; + f30[01]-fujitsu | f700-fujitsu) + os=-uxpv + ;; + *-rom68k) + os=-coff + ;; + *-*bug) + os=-coff + ;; + *-apple) + os=-macos + ;; + *-atari*) + os=-mint + ;; + *) + os=-none + ;; +esac +fi + +# Here we handle the case where we know the os, and the CPU type, but not the +# manufacturer. We pick the logical manufacturer. +vendor=unknown +case $basic_machine in + *-unknown) + case $os in + -riscix*) + vendor=acorn + ;; + -sunos*) + vendor=sun + ;; + -cnk*|-aix*) + vendor=ibm + ;; + -beos*) + vendor=be + ;; + -hpux*) + vendor=hp + ;; + -mpeix*) + vendor=hp + ;; + -hiux*) + vendor=hitachi + ;; + -unos*) + vendor=crds + ;; + -dgux*) + vendor=dg + ;; + -luna*) + vendor=omron + ;; + -genix*) + vendor=ns + ;; + -mvs* | -opened*) + vendor=ibm + ;; + -os400*) + vendor=ibm + ;; + -ptx*) + vendor=sequent + ;; + -tpf*) + vendor=ibm + ;; + -vxsim* | -vxworks* | -windiss*) + vendor=wrs + ;; + -aux*) + vendor=apple + ;; + -hms*) + vendor=hitachi + ;; + -mpw* | -macos*) + vendor=apple + ;; + -*mint | -mint[0-9]* | -*MiNT | -MiNT[0-9]*) + vendor=atari + ;; + -vos*) + vendor=stratus + ;; + esac + basic_machine=`echo $basic_machine | sed "s/unknown/$vendor/"` + ;; +esac + +echo $basic_machine$os +exit + +# Local variables: +# eval: (add-hook 'write-file-hooks 'time-stamp) +# time-stamp-start: "timestamp='" +# time-stamp-format: "%:y-%02m-%02d" +# time-stamp-end: "'" +# End: diff --git a/backtrace-sys/src/libbacktrace/config/libtool.m4 b/backtrace-sys/src/libbacktrace/config/libtool.m4 new file mode 100644 index 000000000..4ceb7f128 --- /dev/null +++ b/backtrace-sys/src/libbacktrace/config/libtool.m4 @@ -0,0 +1,7309 @@ +# libtool.m4 - Configure libtool for the host system. -*-Autoconf-*- +# +# Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, +# 2006, 2007, 2008 Free Software Foundation, Inc. +# Written by Gordon Matzigkeit, 1996 +# +# This file is free software; the Free Software Foundation gives +# unlimited permission to copy and/or distribute it, with or without +# modifications, as long as this notice is preserved. + +m4_define([_LT_COPYING], [dnl +# Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, +# 2006, 2007, 2008 Free Software Foundation, Inc. +# Written by Gordon Matzigkeit, 1996 +# +# This file is part of GNU Libtool. +# +# GNU Libtool is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License as +# published by the Free Software Foundation; either version 2 of +# the License, or (at your option) any later version. +# +# As a special exception to the GNU General Public License, +# if you distribute this file as part of a program or library that +# is built using GNU Libtool, you may include this file under the +# same distribution terms that you use for the rest of that program. +# +# GNU Libtool is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with GNU Libtool; see the file COPYING. If not, a copy +# can be downloaded from http://www.gnu.org/licenses/gpl.html, or +# obtained by writing to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. +]) + +# serial 56 LT_INIT + + +# LT_PREREQ(VERSION) +# ------------------ +# Complain and exit if this libtool version is less that VERSION. +m4_defun([LT_PREREQ], +[m4_if(m4_version_compare(m4_defn([LT_PACKAGE_VERSION]), [$1]), -1, + [m4_default([$3], + [m4_fatal([Libtool version $1 or higher is required], + 63)])], + [$2])]) + + +# _LT_CHECK_BUILDDIR +# ------------------ +# Complain if the absolute build directory name contains unusual characters +m4_defun([_LT_CHECK_BUILDDIR], +[case `pwd` in + *\ * | *\ *) + AC_MSG_WARN([Libtool does not cope well with whitespace in `pwd`]) ;; +esac +]) + + +# LT_INIT([OPTIONS]) +# ------------------ +AC_DEFUN([LT_INIT], +[AC_PREREQ([2.58])dnl We use AC_INCLUDES_DEFAULT +AC_BEFORE([$0], [LT_LANG])dnl +AC_BEFORE([$0], [LT_OUTPUT])dnl +AC_BEFORE([$0], [LTDL_INIT])dnl +m4_require([_LT_CHECK_BUILDDIR])dnl + +dnl Autoconf doesn't catch unexpanded LT_ macros by default: +m4_pattern_forbid([^_?LT_[A-Z_]+$])dnl +m4_pattern_allow([^(_LT_EOF|LT_DLGLOBAL|LT_DLLAZY_OR_NOW|LT_MULTI_MODULE)$])dnl +dnl aclocal doesn't pull ltoptions.m4, ltsugar.m4, or ltversion.m4 +dnl unless we require an AC_DEFUNed macro: +AC_REQUIRE([LTOPTIONS_VERSION])dnl +AC_REQUIRE([LTSUGAR_VERSION])dnl +AC_REQUIRE([LTVERSION_VERSION])dnl +AC_REQUIRE([LTOBSOLETE_VERSION])dnl +m4_require([_LT_PROG_LTMAIN])dnl + +dnl Parse OPTIONS +_LT_SET_OPTIONS([$0], [$1]) + +# This can be used to rebuild libtool when needed +LIBTOOL_DEPS="$ltmain" + +# Always use our own libtool. +LIBTOOL='$(SHELL) $(top_builddir)/libtool' +AC_SUBST(LIBTOOL)dnl + +_LT_SETUP + +# Only expand once: +m4_define([LT_INIT]) +])# LT_INIT + +# Old names: +AU_ALIAS([AC_PROG_LIBTOOL], [LT_INIT]) +AU_ALIAS([AM_PROG_LIBTOOL], [LT_INIT]) +dnl aclocal-1.4 backwards compatibility: +dnl AC_DEFUN([AC_PROG_LIBTOOL], []) +dnl AC_DEFUN([AM_PROG_LIBTOOL], []) + + +# _LT_CC_BASENAME(CC) +# ------------------- +# Calculate cc_basename. Skip known compiler wrappers and cross-prefix. +m4_defun([_LT_CC_BASENAME], +[for cc_temp in $1""; do + case $cc_temp in + compile | *[[\\/]]compile | ccache | *[[\\/]]ccache ) ;; + distcc | *[[\\/]]distcc | purify | *[[\\/]]purify ) ;; + \-*) ;; + *) break;; + esac +done +cc_basename=`$ECHO "X$cc_temp" | $Xsed -e 's%.*/%%' -e "s%^$host_alias-%%"` +]) + + +# _LT_FILEUTILS_DEFAULTS +# ---------------------- +# It is okay to use these file commands and assume they have been set +# sensibly after `m4_require([_LT_FILEUTILS_DEFAULTS])'. +m4_defun([_LT_FILEUTILS_DEFAULTS], +[: ${CP="cp -f"} +: ${MV="mv -f"} +: ${RM="rm -f"} +])# _LT_FILEUTILS_DEFAULTS + + +# _LT_SETUP +# --------- +m4_defun([_LT_SETUP], +[AC_REQUIRE([AC_CANONICAL_HOST])dnl +AC_REQUIRE([AC_CANONICAL_BUILD])dnl +_LT_DECL([], [host_alias], [0], [The host system])dnl +_LT_DECL([], [host], [0])dnl +_LT_DECL([], [host_os], [0])dnl +dnl +_LT_DECL([], [build_alias], [0], [The build system])dnl +_LT_DECL([], [build], [0])dnl +_LT_DECL([], [build_os], [0])dnl +dnl +AC_REQUIRE([AC_PROG_CC])dnl +AC_REQUIRE([LT_PATH_LD])dnl +AC_REQUIRE([LT_PATH_NM])dnl +dnl +AC_REQUIRE([AC_PROG_LN_S])dnl +test -z "$LN_S" && LN_S="ln -s" +_LT_DECL([], [LN_S], [1], [Whether we need soft or hard links])dnl +dnl +AC_REQUIRE([LT_CMD_MAX_LEN])dnl +_LT_DECL([objext], [ac_objext], [0], [Object file suffix (normally "o")])dnl +_LT_DECL([], [exeext], [0], [Executable file suffix (normally "")])dnl +dnl +m4_require([_LT_FILEUTILS_DEFAULTS])dnl +m4_require([_LT_CHECK_SHELL_FEATURES])dnl +m4_require([_LT_CMD_RELOAD])dnl +m4_require([_LT_CHECK_MAGIC_METHOD])dnl +m4_require([_LT_CMD_OLD_ARCHIVE])dnl +m4_require([_LT_CMD_GLOBAL_SYMBOLS])dnl + +_LT_CONFIG_LIBTOOL_INIT([ +# See if we are running on zsh, and set the options which allow our +# commands through without removal of \ escapes INIT. +if test -n "\${ZSH_VERSION+set}" ; then + setopt NO_GLOB_SUBST +fi +]) +if test -n "${ZSH_VERSION+set}" ; then + setopt NO_GLOB_SUBST +fi + +_LT_CHECK_OBJDIR + +m4_require([_LT_TAG_COMPILER])dnl +_LT_PROG_ECHO_BACKSLASH + +case $host_os in +aix3*) + # AIX sometimes has problems with the GCC collect2 program. For some + # reason, if we set the COLLECT_NAMES environment variable, the problems + # vanish in a puff of smoke. + if test "X${COLLECT_NAMES+set}" != Xset; then + COLLECT_NAMES= + export COLLECT_NAMES + fi + ;; +esac + +# Sed substitution that helps us do robust quoting. It backslashifies +# metacharacters that are still active within double-quoted strings. +sed_quote_subst='s/\([["`$\\]]\)/\\\1/g' + +# Same as above, but do not quote variable references. +double_quote_subst='s/\([["`\\]]\)/\\\1/g' + +# Sed substitution to delay expansion of an escaped shell variable in a +# double_quote_subst'ed string. +delay_variable_subst='s/\\\\\\\\\\\$/\\\\\\$/g' + +# Sed substitution to delay expansion of an escaped single quote. +delay_single_quote_subst='s/'\''/'\'\\\\\\\'\''/g' + +# Sed substitution to avoid accidental globbing in evaled expressions +no_glob_subst='s/\*/\\\*/g' + +# Global variables: +ofile=libtool +can_build_shared=yes + +# All known linkers require a `.a' archive for static linking (except MSVC, +# which needs '.lib'). +libext=a + +with_gnu_ld="$lt_cv_prog_gnu_ld" + +old_CC="$CC" +old_CFLAGS="$CFLAGS" + +# Set sane defaults for various variables +test -z "$CC" && CC=cc +test -z "$LTCC" && LTCC=$CC +test -z "$LTCFLAGS" && LTCFLAGS=$CFLAGS +test -z "$LD" && LD=ld +test -z "$ac_objext" && ac_objext=o + +_LT_CC_BASENAME([$compiler]) + +# Only perform the check for file, if the check method requires it +test -z "$MAGIC_CMD" && MAGIC_CMD=file +case $deplibs_check_method in +file_magic*) + if test "$file_magic_cmd" = '$MAGIC_CMD'; then + _LT_PATH_MAGIC + fi + ;; +esac + +# Use C for the default configuration in the libtool script +LT_SUPPORTED_TAG([CC]) +_LT_LANG_C_CONFIG +_LT_LANG_DEFAULT_CONFIG +_LT_CONFIG_COMMANDS +])# _LT_SETUP + + +# _LT_PROG_LTMAIN +# --------------- +# Note that this code is called both from `configure', and `config.status' +# now that we use AC_CONFIG_COMMANDS to generate libtool. Notably, +# `config.status' has no value for ac_aux_dir unless we are using Automake, +# so we pass a copy along to make sure it has a sensible value anyway. +m4_defun([_LT_PROG_LTMAIN], +[m4_ifdef([AC_REQUIRE_AUX_FILE], [AC_REQUIRE_AUX_FILE([ltmain.sh])])dnl +_LT_CONFIG_LIBTOOL_INIT([ac_aux_dir='$ac_aux_dir']) +ltmain="$ac_aux_dir/ltmain.sh" +])# _LT_PROG_LTMAIN + + +## ------------------------------------- ## +## Accumulate code for creating libtool. ## +## ------------------------------------- ## + +# So that we can recreate a full libtool script including additional +# tags, we accumulate the chunks of code to send to AC_CONFIG_COMMANDS +# in macros and then make a single call at the end using the `libtool' +# label. + + +# _LT_CONFIG_LIBTOOL_INIT([INIT-COMMANDS]) +# ---------------------------------------- +# Register INIT-COMMANDS to be passed to AC_CONFIG_COMMANDS later. +m4_define([_LT_CONFIG_LIBTOOL_INIT], +[m4_ifval([$1], + [m4_append([_LT_OUTPUT_LIBTOOL_INIT], + [$1 +])])]) + +# Initialize. +m4_define([_LT_OUTPUT_LIBTOOL_INIT]) + + +# _LT_CONFIG_LIBTOOL([COMMANDS]) +# ------------------------------ +# Register COMMANDS to be passed to AC_CONFIG_COMMANDS later. +m4_define([_LT_CONFIG_LIBTOOL], +[m4_ifval([$1], + [m4_append([_LT_OUTPUT_LIBTOOL_COMMANDS], + [$1 +])])]) + +# Initialize. +m4_define([_LT_OUTPUT_LIBTOOL_COMMANDS]) + + +# _LT_CONFIG_SAVE_COMMANDS([COMMANDS], [INIT_COMMANDS]) +# ----------------------------------------------------- +m4_defun([_LT_CONFIG_SAVE_COMMANDS], +[_LT_CONFIG_LIBTOOL([$1]) +_LT_CONFIG_LIBTOOL_INIT([$2]) +]) + + +# _LT_FORMAT_COMMENT([COMMENT]) +# ----------------------------- +# Add leading comment marks to the start of each line, and a trailing +# full-stop to the whole comment if one is not present already. +m4_define([_LT_FORMAT_COMMENT], +[m4_ifval([$1], [ +m4_bpatsubst([m4_bpatsubst([$1], [^ *], [# ])], + [['`$\]], [\\\&])]m4_bmatch([$1], [[!?.]$], [], [.]) +)]) + + + +## ------------------------ ## +## FIXME: Eliminate VARNAME ## +## ------------------------ ## + + +# _LT_DECL([CONFIGNAME], VARNAME, VALUE, [DESCRIPTION], [IS-TAGGED?]) +# ------------------------------------------------------------------- +# CONFIGNAME is the name given to the value in the libtool script. +# VARNAME is the (base) name used in the configure script. +# VALUE may be 0, 1 or 2 for a computed quote escaped value based on +# VARNAME. Any other value will be used directly. +m4_define([_LT_DECL], +[lt_if_append_uniq([lt_decl_varnames], [$2], [, ], + [lt_dict_add_subkey([lt_decl_dict], [$2], [libtool_name], + [m4_ifval([$1], [$1], [$2])]) + lt_dict_add_subkey([lt_decl_dict], [$2], [value], [$3]) + m4_ifval([$4], + [lt_dict_add_subkey([lt_decl_dict], [$2], [description], [$4])]) + lt_dict_add_subkey([lt_decl_dict], [$2], + [tagged?], [m4_ifval([$5], [yes], [no])])]) +]) + + +# _LT_TAGDECL([CONFIGNAME], VARNAME, VALUE, [DESCRIPTION]) +# -------------------------------------------------------- +m4_define([_LT_TAGDECL], [_LT_DECL([$1], [$2], [$3], [$4], [yes])]) + + +# lt_decl_tag_varnames([SEPARATOR], [VARNAME1...]) +# ------------------------------------------------ +m4_define([lt_decl_tag_varnames], +[_lt_decl_filter([tagged?], [yes], $@)]) + + +# _lt_decl_filter(SUBKEY, VALUE, [SEPARATOR], [VARNAME1..]) +# --------------------------------------------------------- +m4_define([_lt_decl_filter], +[m4_case([$#], + [0], [m4_fatal([$0: too few arguments: $#])], + [1], [m4_fatal([$0: too few arguments: $#: $1])], + [2], [lt_dict_filter([lt_decl_dict], [$1], [$2], [], lt_decl_varnames)], + [3], [lt_dict_filter([lt_decl_dict], [$1], [$2], [$3], lt_decl_varnames)], + [lt_dict_filter([lt_decl_dict], $@)])[]dnl +]) + + +# lt_decl_quote_varnames([SEPARATOR], [VARNAME1...]) +# -------------------------------------------------- +m4_define([lt_decl_quote_varnames], +[_lt_decl_filter([value], [1], $@)]) + + +# lt_decl_dquote_varnames([SEPARATOR], [VARNAME1...]) +# --------------------------------------------------- +m4_define([lt_decl_dquote_varnames], +[_lt_decl_filter([value], [2], $@)]) + + +# lt_decl_varnames_tagged([SEPARATOR], [VARNAME1...]) +# --------------------------------------------------- +m4_define([lt_decl_varnames_tagged], +[_$0(m4_quote(m4_default([$1], [[, ]])), + m4_quote(m4_if([$2], [], + m4_quote(lt_decl_tag_varnames), + m4_quote(m4_shift($@)))), + m4_split(m4_normalize(m4_quote(_LT_TAGS))))]) +m4_define([_lt_decl_varnames_tagged], [lt_combine([$1], [$2], [_], $3)]) + + +# lt_decl_all_varnames([SEPARATOR], [VARNAME1...]) +# ------------------------------------------------ +m4_define([lt_decl_all_varnames], +[_$0(m4_quote(m4_default([$1], [[, ]])), + m4_if([$2], [], + m4_quote(lt_decl_varnames), + m4_quote(m4_shift($@))))[]dnl +]) +m4_define([_lt_decl_all_varnames], +[lt_join($@, lt_decl_varnames_tagged([$1], + lt_decl_tag_varnames([[, ]], m4_shift($@))))dnl +]) + + +# _LT_CONFIG_STATUS_DECLARE([VARNAME]) +# ------------------------------------ +# Quote a variable value, and forward it to `config.status' so that its +# declaration there will have the same value as in `configure'. VARNAME +# must have a single quote delimited value for this to work. +m4_define([_LT_CONFIG_STATUS_DECLARE], +[$1='`$ECHO "X$][$1" | $Xsed -e "$delay_single_quote_subst"`']) + + +# _LT_CONFIG_STATUS_DECLARATIONS +# ------------------------------ +# We delimit libtool config variables with single quotes, so when +# we write them to config.status, we have to be sure to quote all +# embedded single quotes properly. In configure, this macro expands +# each variable declared with _LT_DECL (and _LT_TAGDECL) into: +# +# ='`$ECHO "X$" | $Xsed -e "$delay_single_quote_subst"`' +m4_defun([_LT_CONFIG_STATUS_DECLARATIONS], +[m4_foreach([_lt_var], m4_quote(lt_decl_all_varnames), + [m4_n([_LT_CONFIG_STATUS_DECLARE(_lt_var)])])]) + + +# _LT_LIBTOOL_TAGS +# ---------------- +# Output comment and list of tags supported by the script +m4_defun([_LT_LIBTOOL_TAGS], +[_LT_FORMAT_COMMENT([The names of the tagged configurations supported by this script])dnl +available_tags="_LT_TAGS"dnl +]) + + +# _LT_LIBTOOL_DECLARE(VARNAME, [TAG]) +# ----------------------------------- +# Extract the dictionary values for VARNAME (optionally with TAG) and +# expand to a commented shell variable setting: +# +# # Some comment about what VAR is for. +# visible_name=$lt_internal_name +m4_define([_LT_LIBTOOL_DECLARE], +[_LT_FORMAT_COMMENT(m4_quote(lt_dict_fetch([lt_decl_dict], [$1], + [description])))[]dnl +m4_pushdef([_libtool_name], + m4_quote(lt_dict_fetch([lt_decl_dict], [$1], [libtool_name])))[]dnl +m4_case(m4_quote(lt_dict_fetch([lt_decl_dict], [$1], [value])), + [0], [_libtool_name=[$]$1], + [1], [_libtool_name=$lt_[]$1], + [2], [_libtool_name=$lt_[]$1], + [_libtool_name=lt_dict_fetch([lt_decl_dict], [$1], [value])])[]dnl +m4_ifval([$2], [_$2])[]m4_popdef([_libtool_name])[]dnl +]) + + +# _LT_LIBTOOL_CONFIG_VARS +# ----------------------- +# Produce commented declarations of non-tagged libtool config variables +# suitable for insertion in the LIBTOOL CONFIG section of the `libtool' +# script. Tagged libtool config variables (even for the LIBTOOL CONFIG +# section) are produced by _LT_LIBTOOL_TAG_VARS. +m4_defun([_LT_LIBTOOL_CONFIG_VARS], +[m4_foreach([_lt_var], + m4_quote(_lt_decl_filter([tagged?], [no], [], lt_decl_varnames)), + [m4_n([_LT_LIBTOOL_DECLARE(_lt_var)])])]) + + +# _LT_LIBTOOL_TAG_VARS(TAG) +# ------------------------- +m4_define([_LT_LIBTOOL_TAG_VARS], +[m4_foreach([_lt_var], m4_quote(lt_decl_tag_varnames), + [m4_n([_LT_LIBTOOL_DECLARE(_lt_var, [$1])])])]) + + +# _LT_TAGVAR(VARNAME, [TAGNAME]) +# ------------------------------ +m4_define([_LT_TAGVAR], [m4_ifval([$2], [$1_$2], [$1])]) + + +# _LT_CONFIG_COMMANDS +# ------------------- +# Send accumulated output to $CONFIG_STATUS. Thanks to the lists of +# variables for single and double quote escaping we saved from calls +# to _LT_DECL, we can put quote escaped variables declarations +# into `config.status', and then the shell code to quote escape them in +# for loops in `config.status'. Finally, any additional code accumulated +# from calls to _LT_CONFIG_LIBTOOL_INIT is expanded. +m4_defun([_LT_CONFIG_COMMANDS], +[AC_PROVIDE_IFELSE([LT_OUTPUT], + dnl If the libtool generation code has been placed in $CONFIG_LT, + dnl instead of duplicating it all over again into config.status, + dnl then we will have config.status run $CONFIG_LT later, so it + dnl needs to know what name is stored there: + [AC_CONFIG_COMMANDS([libtool], + [$SHELL $CONFIG_LT || AS_EXIT(1)], [CONFIG_LT='$CONFIG_LT'])], + dnl If the libtool generation code is destined for config.status, + dnl expand the accumulated commands and init code now: + [AC_CONFIG_COMMANDS([libtool], + [_LT_OUTPUT_LIBTOOL_COMMANDS], [_LT_OUTPUT_LIBTOOL_COMMANDS_INIT])]) +])#_LT_CONFIG_COMMANDS + + +# Initialize. +m4_define([_LT_OUTPUT_LIBTOOL_COMMANDS_INIT], +[ + +# The HP-UX ksh and POSIX shell print the target directory to stdout +# if CDPATH is set. +(unset CDPATH) >/dev/null 2>&1 && unset CDPATH + +sed_quote_subst='$sed_quote_subst' +double_quote_subst='$double_quote_subst' +delay_variable_subst='$delay_variable_subst' +_LT_CONFIG_STATUS_DECLARATIONS +LTCC='$LTCC' +LTCFLAGS='$LTCFLAGS' +compiler='$compiler_DEFAULT' + +# Quote evaled strings. +for var in lt_decl_all_varnames([[ \ +]], lt_decl_quote_varnames); do + case \`eval \\\\\$ECHO "X\\\\\$\$var"\` in + *[[\\\\\\\`\\"\\\$]]*) + eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"X\\\$\$var\\" | \\\$Xsed -e \\"\\\$sed_quote_subst\\"\\\`\\\\\\"" + ;; + *) + eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\"" + ;; + esac +done + +# Double-quote double-evaled strings. +for var in lt_decl_all_varnames([[ \ +]], lt_decl_dquote_varnames); do + case \`eval \\\\\$ECHO "X\\\\\$\$var"\` in + *[[\\\\\\\`\\"\\\$]]*) + eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"X\\\$\$var\\" | \\\$Xsed -e \\"\\\$double_quote_subst\\" -e \\"\\\$sed_quote_subst\\" -e \\"\\\$delay_variable_subst\\"\\\`\\\\\\"" + ;; + *) + eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\"" + ;; + esac +done + +# Fix-up fallback echo if it was mangled by the above quoting rules. +case \$lt_ECHO in +*'\\\[$]0 --fallback-echo"')dnl " + lt_ECHO=\`\$ECHO "X\$lt_ECHO" | \$Xsed -e 's/\\\\\\\\\\\\\\\[$]0 --fallback-echo"\[$]/\[$]0 --fallback-echo"/'\` + ;; +esac + +_LT_OUTPUT_LIBTOOL_INIT +]) + + +# LT_OUTPUT +# --------- +# This macro allows early generation of the libtool script (before +# AC_OUTPUT is called), incase it is used in configure for compilation +# tests. +AC_DEFUN([LT_OUTPUT], +[: ${CONFIG_LT=./config.lt} +AC_MSG_NOTICE([creating $CONFIG_LT]) +cat >"$CONFIG_LT" <<_LTEOF +#! $SHELL +# Generated by $as_me. +# Run this file to recreate a libtool stub with the current configuration. + +lt_cl_silent=false +SHELL=\${CONFIG_SHELL-$SHELL} +_LTEOF + +cat >>"$CONFIG_LT" <<\_LTEOF +AS_SHELL_SANITIZE +_AS_PREPARE + +exec AS_MESSAGE_FD>&1 +exec AS_MESSAGE_LOG_FD>>config.log +{ + echo + AS_BOX([Running $as_me.]) +} >&AS_MESSAGE_LOG_FD + +lt_cl_help="\ +\`$as_me' creates a local libtool stub from the current configuration, +for use in further configure time tests before the real libtool is +generated. + +Usage: $[0] [[OPTIONS]] + + -h, --help print this help, then exit + -V, --version print version number, then exit + -q, --quiet do not print progress messages + -d, --debug don't remove temporary files + +Report bugs to ." + +lt_cl_version="\ +m4_ifset([AC_PACKAGE_NAME], [AC_PACKAGE_NAME ])config.lt[]dnl +m4_ifset([AC_PACKAGE_VERSION], [ AC_PACKAGE_VERSION]) +configured by $[0], generated by m4_PACKAGE_STRING. + +Copyright (C) 2008 Free Software Foundation, Inc. +This config.lt script is free software; the Free Software Foundation +gives unlimited permision to copy, distribute and modify it." + +while test $[#] != 0 +do + case $[1] in + --version | --v* | -V ) + echo "$lt_cl_version"; exit 0 ;; + --help | --h* | -h ) + echo "$lt_cl_help"; exit 0 ;; + --debug | --d* | -d ) + debug=: ;; + --quiet | --q* | --silent | --s* | -q ) + lt_cl_silent=: ;; + + -*) AC_MSG_ERROR([unrecognized option: $[1] +Try \`$[0] --help' for more information.]) ;; + + *) AC_MSG_ERROR([unrecognized argument: $[1] +Try \`$[0] --help' for more information.]) ;; + esac + shift +done + +if $lt_cl_silent; then + exec AS_MESSAGE_FD>/dev/null +fi +_LTEOF + +cat >>"$CONFIG_LT" <<_LTEOF +_LT_OUTPUT_LIBTOOL_COMMANDS_INIT +_LTEOF + +cat >>"$CONFIG_LT" <<\_LTEOF +AC_MSG_NOTICE([creating $ofile]) +_LT_OUTPUT_LIBTOOL_COMMANDS +AS_EXIT(0) +_LTEOF +chmod +x "$CONFIG_LT" + +# configure is writing to config.log, but config.lt does its own redirection, +# appending to config.log, which fails on DOS, as config.log is still kept +# open by configure. Here we exec the FD to /dev/null, effectively closing +# config.log, so it can be properly (re)opened and appended to by config.lt. +if test "$no_create" != yes; then + lt_cl_success=: + test "$silent" = yes && + lt_config_lt_args="$lt_config_lt_args --quiet" + exec AS_MESSAGE_LOG_FD>/dev/null + $SHELL "$CONFIG_LT" $lt_config_lt_args || lt_cl_success=false + exec AS_MESSAGE_LOG_FD>>config.log + $lt_cl_success || AS_EXIT(1) +fi +])# LT_OUTPUT + + +# _LT_CONFIG(TAG) +# --------------- +# If TAG is the built-in tag, create an initial libtool script with a +# default configuration from the untagged config vars. Otherwise add code +# to config.status for appending the configuration named by TAG from the +# matching tagged config vars. +m4_defun([_LT_CONFIG], +[m4_require([_LT_FILEUTILS_DEFAULTS])dnl +_LT_CONFIG_SAVE_COMMANDS([ + m4_define([_LT_TAG], m4_if([$1], [], [C], [$1]))dnl + m4_if(_LT_TAG, [C], [ + # See if we are running on zsh, and set the options which allow our + # commands through without removal of \ escapes. + if test -n "${ZSH_VERSION+set}" ; then + setopt NO_GLOB_SUBST + fi + + cfgfile="${ofile}T" + trap "$RM \"$cfgfile\"; exit 1" 1 2 15 + $RM "$cfgfile" + + cat <<_LT_EOF >> "$cfgfile" +#! $SHELL + +# `$ECHO "$ofile" | sed 's%^.*/%%'` - Provide generalized library-building support services. +# Generated automatically by $as_me ($PACKAGE$TIMESTAMP) $VERSION +# Libtool was configured on host `(hostname || uname -n) 2>/dev/null | sed 1q`: +# NOTE: Changes made to this file will be lost: look at ltmain.sh. +# +_LT_COPYING +_LT_LIBTOOL_TAGS + +# ### BEGIN LIBTOOL CONFIG +_LT_LIBTOOL_CONFIG_VARS +_LT_LIBTOOL_TAG_VARS +# ### END LIBTOOL CONFIG + +_LT_EOF + + case $host_os in + aix3*) + cat <<\_LT_EOF >> "$cfgfile" +# AIX sometimes has problems with the GCC collect2 program. For some +# reason, if we set the COLLECT_NAMES environment variable, the problems +# vanish in a puff of smoke. +if test "X${COLLECT_NAMES+set}" != Xset; then + COLLECT_NAMES= + export COLLECT_NAMES +fi +_LT_EOF + ;; + esac + + _LT_PROG_LTMAIN + + # We use sed instead of cat because bash on DJGPP gets confused if + # if finds mixed CR/LF and LF-only lines. Since sed operates in + # text mode, it properly converts lines to CR/LF. This bash problem + # is reportedly fixed, but why not run on old versions too? + sed '/^# Generated shell functions inserted here/q' "$ltmain" >> "$cfgfile" \ + || (rm -f "$cfgfile"; exit 1) + + _LT_PROG_XSI_SHELLFNS + + sed -n '/^# Generated shell functions inserted here/,$p' "$ltmain" >> "$cfgfile" \ + || (rm -f "$cfgfile"; exit 1) + + mv -f "$cfgfile" "$ofile" || + (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile") + chmod +x "$ofile" +], +[cat <<_LT_EOF >> "$ofile" + +dnl Unfortunately we have to use $1 here, since _LT_TAG is not expanded +dnl in a comment (ie after a #). +# ### BEGIN LIBTOOL TAG CONFIG: $1 +_LT_LIBTOOL_TAG_VARS(_LT_TAG) +# ### END LIBTOOL TAG CONFIG: $1 +_LT_EOF +])dnl /m4_if +], +[m4_if([$1], [], [ + PACKAGE='$PACKAGE' + VERSION='$VERSION' + TIMESTAMP='$TIMESTAMP' + RM='$RM' + ofile='$ofile'], []) +])dnl /_LT_CONFIG_SAVE_COMMANDS +])# _LT_CONFIG + + +# LT_SUPPORTED_TAG(TAG) +# --------------------- +# Trace this macro to discover what tags are supported by the libtool +# --tag option, using: +# autoconf --trace 'LT_SUPPORTED_TAG:$1' +AC_DEFUN([LT_SUPPORTED_TAG], []) + + +# C support is built-in for now +m4_define([_LT_LANG_C_enabled], []) +m4_define([_LT_TAGS], []) + + +# LT_LANG(LANG) +# ------------- +# Enable libtool support for the given language if not already enabled. +AC_DEFUN([LT_LANG], +[AC_BEFORE([$0], [LT_OUTPUT])dnl +m4_case([$1], + [C], [_LT_LANG(C)], + [C++], [_LT_LANG(CXX)], + [Java], [_LT_LANG(GCJ)], + [Fortran 77], [_LT_LANG(F77)], + [Fortran], [_LT_LANG(FC)], + [Windows Resource], [_LT_LANG(RC)], + [m4_ifdef([_LT_LANG_]$1[_CONFIG], + [_LT_LANG($1)], + [m4_fatal([$0: unsupported language: "$1"])])])dnl +])# LT_LANG + + +# _LT_LANG(LANGNAME) +# ------------------ +m4_defun([_LT_LANG], +[m4_ifdef([_LT_LANG_]$1[_enabled], [], + [LT_SUPPORTED_TAG([$1])dnl + m4_append([_LT_TAGS], [$1 ])dnl + m4_define([_LT_LANG_]$1[_enabled], [])dnl + _LT_LANG_$1_CONFIG($1)])dnl +])# _LT_LANG + + +# _LT_LANG_DEFAULT_CONFIG +# ----------------------- +m4_defun([_LT_LANG_DEFAULT_CONFIG], +[AC_PROVIDE_IFELSE([AC_PROG_CXX], + [LT_LANG(CXX)], + [m4_define([AC_PROG_CXX], defn([AC_PROG_CXX])[LT_LANG(CXX)])]) + +AC_PROVIDE_IFELSE([AC_PROG_F77], + [LT_LANG(F77)], + [m4_define([AC_PROG_F77], defn([AC_PROG_F77])[LT_LANG(F77)])]) + +AC_PROVIDE_IFELSE([AC_PROG_FC], + [LT_LANG(FC)], + [m4_define([AC_PROG_FC], defn([AC_PROG_FC])[LT_LANG(FC)])]) + +dnl The call to [A][M_PROG_GCJ] is quoted like that to stop aclocal +dnl pulling things in needlessly. +AC_PROVIDE_IFELSE([AC_PROG_GCJ], + [LT_LANG(GCJ)], + [AC_PROVIDE_IFELSE([A][M_PROG_GCJ], + [LT_LANG(GCJ)], + [AC_PROVIDE_IFELSE([LT_PROG_GCJ], + [LT_LANG(GCJ)], + [m4_ifdef([AC_PROG_GCJ], + [m4_define([AC_PROG_GCJ], defn([AC_PROG_GCJ])[LT_LANG(GCJ)])]) + m4_ifdef([A][M_PROG_GCJ], + [m4_define([A][M_PROG_GCJ], defn([A][M_PROG_GCJ])[LT_LANG(GCJ)])]) + m4_ifdef([LT_PROG_GCJ], + [m4_define([LT_PROG_GCJ], defn([LT_PROG_GCJ])[LT_LANG(GCJ)])])])])]) + +AC_PROVIDE_IFELSE([LT_PROG_RC], + [LT_LANG(RC)], + [m4_define([LT_PROG_RC], defn([LT_PROG_RC])[LT_LANG(RC)])]) +])# _LT_LANG_DEFAULT_CONFIG + +# Obsolete macros: +AU_DEFUN([AC_LIBTOOL_CXX], [LT_LANG(C++)]) +AU_DEFUN([AC_LIBTOOL_F77], [LT_LANG(Fortran 77)]) +AU_DEFUN([AC_LIBTOOL_FC], [LT_LANG(Fortran)]) +AU_DEFUN([AC_LIBTOOL_GCJ], [LT_LANG(Java)]) +dnl aclocal-1.4 backwards compatibility: +dnl AC_DEFUN([AC_LIBTOOL_CXX], []) +dnl AC_DEFUN([AC_LIBTOOL_F77], []) +dnl AC_DEFUN([AC_LIBTOOL_FC], []) +dnl AC_DEFUN([AC_LIBTOOL_GCJ], []) + + +# _LT_TAG_COMPILER +# ---------------- +m4_defun([_LT_TAG_COMPILER], +[AC_REQUIRE([AC_PROG_CC])dnl + +_LT_DECL([LTCC], [CC], [1], [A C compiler])dnl +_LT_DECL([LTCFLAGS], [CFLAGS], [1], [LTCC compiler flags])dnl +_LT_TAGDECL([CC], [compiler], [1], [A language specific compiler])dnl +_LT_TAGDECL([with_gcc], [GCC], [0], [Is the compiler the GNU compiler?])dnl + +# If no C compiler was specified, use CC. +LTCC=${LTCC-"$CC"} + +# If no C compiler flags were specified, use CFLAGS. +LTCFLAGS=${LTCFLAGS-"$CFLAGS"} + +# Allow CC to be a program name with arguments. +compiler=$CC +])# _LT_TAG_COMPILER + + +# _LT_COMPILER_BOILERPLATE +# ------------------------ +# Check for compiler boilerplate output or warnings with +# the simple compiler test code. +m4_defun([_LT_COMPILER_BOILERPLATE], +[m4_require([_LT_DECL_SED])dnl +ac_outfile=conftest.$ac_objext +echo "$lt_simple_compile_test_code" >conftest.$ac_ext +eval "$ac_compile" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err +_lt_compiler_boilerplate=`cat conftest.err` +$RM conftest* +])# _LT_COMPILER_BOILERPLATE + + +# _LT_LINKER_BOILERPLATE +# ---------------------- +# Check for linker boilerplate output or warnings with +# the simple link test code. +m4_defun([_LT_LINKER_BOILERPLATE], +[m4_require([_LT_DECL_SED])dnl +ac_outfile=conftest.$ac_objext +echo "$lt_simple_link_test_code" >conftest.$ac_ext +eval "$ac_link" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err +_lt_linker_boilerplate=`cat conftest.err` +$RM -r conftest* +])# _LT_LINKER_BOILERPLATE + +# _LT_REQUIRED_DARWIN_CHECKS +# ------------------------- +m4_defun_once([_LT_REQUIRED_DARWIN_CHECKS],[ + case $host_os in + rhapsody* | darwin*) + AC_CHECK_TOOL([DSYMUTIL], [dsymutil], [:]) + AC_CHECK_TOOL([NMEDIT], [nmedit], [:]) + AC_CHECK_TOOL([LIPO], [lipo], [:]) + AC_CHECK_TOOL([OTOOL], [otool], [:]) + AC_CHECK_TOOL([OTOOL64], [otool64], [:]) + _LT_DECL([], [DSYMUTIL], [1], + [Tool to manipulate archived DWARF debug symbol files on Mac OS X]) + _LT_DECL([], [NMEDIT], [1], + [Tool to change global to local symbols on Mac OS X]) + _LT_DECL([], [LIPO], [1], + [Tool to manipulate fat objects and archives on Mac OS X]) + _LT_DECL([], [OTOOL], [1], + [ldd/readelf like tool for Mach-O binaries on Mac OS X]) + _LT_DECL([], [OTOOL64], [1], + [ldd/readelf like tool for 64 bit Mach-O binaries on Mac OS X 10.4]) + + AC_CACHE_CHECK([for -single_module linker flag],[lt_cv_apple_cc_single_mod], + [lt_cv_apple_cc_single_mod=no + if test -z "${LT_MULTI_MODULE}"; then + # By default we will add the -single_module flag. You can override + # by either setting the environment variable LT_MULTI_MODULE + # non-empty at configure time, or by adding -multi_module to the + # link flags. + rm -rf libconftest.dylib* + echo "int foo(void){return 1;}" > conftest.c + echo "$LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \ +-dynamiclib -Wl,-single_module conftest.c" >&AS_MESSAGE_LOG_FD + $LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \ + -dynamiclib -Wl,-single_module conftest.c 2>conftest.err + _lt_result=$? + if test -f libconftest.dylib && test ! -s conftest.err && test $_lt_result = 0; then + lt_cv_apple_cc_single_mod=yes + else + cat conftest.err >&AS_MESSAGE_LOG_FD + fi + rm -rf libconftest.dylib* + rm -f conftest.* + fi]) + AC_CACHE_CHECK([for -exported_symbols_list linker flag], + [lt_cv_ld_exported_symbols_list], + [lt_cv_ld_exported_symbols_list=no + save_LDFLAGS=$LDFLAGS + echo "_main" > conftest.sym + LDFLAGS="$LDFLAGS -Wl,-exported_symbols_list,conftest.sym" + AC_LINK_IFELSE([AC_LANG_PROGRAM([],[])], + [lt_cv_ld_exported_symbols_list=yes], + [lt_cv_ld_exported_symbols_list=no]) + LDFLAGS="$save_LDFLAGS" + ]) + case $host_os in + rhapsody* | darwin1.[[012]]) + _lt_dar_allow_undefined='${wl}-undefined ${wl}suppress' ;; + darwin1.*) + _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;; + darwin*) # darwin 5.x on + # if running on 10.5 or later, the deployment target defaults + # to the OS version, if on x86, and 10.4, the deployment + # target defaults to 10.4. Don't you love it? + case ${MACOSX_DEPLOYMENT_TARGET-10.0},$host in + 10.0,*86*-darwin8*|10.0,*-darwin[[91]]*) + _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;; + 10.[[012]]*) + _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;; + 10.*) + _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;; + esac + ;; + esac + if test "$lt_cv_apple_cc_single_mod" = "yes"; then + _lt_dar_single_mod='$single_module' + fi + if test "$lt_cv_ld_exported_symbols_list" = "yes"; then + _lt_dar_export_syms=' ${wl}-exported_symbols_list,$output_objdir/${libname}-symbols.expsym' + else + _lt_dar_export_syms='~$NMEDIT -s $output_objdir/${libname}-symbols.expsym ${lib}' + fi + if test "$DSYMUTIL" != ":"; then + _lt_dsymutil='~$DSYMUTIL $lib || :' + else + _lt_dsymutil= + fi + ;; + esac +]) + + +# _LT_DARWIN_LINKER_FEATURES +# -------------------------- +# Checks for linker and compiler features on darwin +m4_defun([_LT_DARWIN_LINKER_FEATURES], +[ + m4_require([_LT_REQUIRED_DARWIN_CHECKS]) + _LT_TAGVAR(archive_cmds_need_lc, $1)=no + _LT_TAGVAR(hardcode_direct, $1)=no + _LT_TAGVAR(hardcode_automatic, $1)=yes + _LT_TAGVAR(hardcode_shlibpath_var, $1)=unsupported + _LT_TAGVAR(whole_archive_flag_spec, $1)='' + _LT_TAGVAR(link_all_deplibs, $1)=yes + _LT_TAGVAR(allow_undefined_flag, $1)="$_lt_dar_allow_undefined" + if test "$GCC" = "yes"; then + output_verbose_link_cmd=echo + _LT_TAGVAR(archive_cmds, $1)="\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod${_lt_dsymutil}" + _LT_TAGVAR(module_cmds, $1)="\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dsymutil}" + _LT_TAGVAR(archive_expsym_cmds, $1)="sed 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring ${_lt_dar_single_mod}${_lt_dar_export_syms}${_lt_dsymutil}" + _LT_TAGVAR(module_expsym_cmds, $1)="sed -e 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dar_export_syms}${_lt_dsymutil}" + m4_if([$1], [CXX], +[ if test "$lt_cv_apple_cc_single_mod" != "yes"; then + _LT_TAGVAR(archive_cmds, $1)="\$CC -r -keep_private_externs -nostdlib -o \${lib}-master.o \$libobjs~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \${lib}-master.o \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring${_lt_dsymutil}" + _LT_TAGVAR(archive_expsym_cmds, $1)="sed 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC -r -keep_private_externs -nostdlib -o \${lib}-master.o \$libobjs~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \${lib}-master.o \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring${_lt_dar_export_syms}${_lt_dsymutil}" + fi +],[]) + else + _LT_TAGVAR(ld_shlibs, $1)=no + fi +]) + +# _LT_SYS_MODULE_PATH_AIX +# ----------------------- +# Links a minimal program and checks the executable +# for the system default hardcoded library path. In most cases, +# this is /usr/lib:/lib, but when the MPI compilers are used +# the location of the communication and MPI libs are included too. +# If we don't find anything, use the default library path according +# to the aix ld manual. +m4_defun([_LT_SYS_MODULE_PATH_AIX], +[m4_require([_LT_DECL_SED])dnl +AC_LINK_IFELSE(AC_LANG_PROGRAM,[ +lt_aix_libpath_sed=' + /Import File Strings/,/^$/ { + /^0/ { + s/^0 *\(.*\)$/\1/ + p + } + }' +aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` +# Check for a 64-bit object if we didn't find anything. +if test -z "$aix_libpath"; then + aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` +fi],[]) +if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi +])# _LT_SYS_MODULE_PATH_AIX + + +# _LT_SHELL_INIT(ARG) +# ------------------- +m4_define([_LT_SHELL_INIT], +[ifdef([AC_DIVERSION_NOTICE], + [AC_DIVERT_PUSH(AC_DIVERSION_NOTICE)], + [AC_DIVERT_PUSH(NOTICE)]) +$1 +AC_DIVERT_POP +])# _LT_SHELL_INIT + + +# _LT_PROG_ECHO_BACKSLASH +# ----------------------- +# Add some code to the start of the generated configure script which +# will find an echo command which doesn't interpret backslashes. +m4_defun([_LT_PROG_ECHO_BACKSLASH], +[_LT_SHELL_INIT([ +# Check that we are running under the correct shell. +SHELL=${CONFIG_SHELL-/bin/sh} + +case X$lt_ECHO in +X*--fallback-echo) + # Remove one level of quotation (which was required for Make). + ECHO=`echo "$lt_ECHO" | sed 's,\\\\\[$]\\[$]0,'[$]0','` + ;; +esac + +ECHO=${lt_ECHO-echo} +if test "X[$]1" = X--no-reexec; then + # Discard the --no-reexec flag, and continue. + shift +elif test "X[$]1" = X--fallback-echo; then + # Avoid inline document here, it may be left over + : +elif test "X`{ $ECHO '\t'; } 2>/dev/null`" = 'X\t' ; then + # Yippee, $ECHO works! + : +else + # Restart under the correct shell. + exec $SHELL "[$]0" --no-reexec ${1+"[$]@"} +fi + +if test "X[$]1" = X--fallback-echo; then + # used as fallback echo + shift + cat <<_LT_EOF +[$]* +_LT_EOF + exit 0 +fi + +# The HP-UX ksh and POSIX shell print the target directory to stdout +# if CDPATH is set. +(unset CDPATH) >/dev/null 2>&1 && unset CDPATH + +if test -z "$lt_ECHO"; then + if test "X${echo_test_string+set}" != Xset; then + # find a string as large as possible, as long as the shell can cope with it + for cmd in 'sed 50q "[$]0"' 'sed 20q "[$]0"' 'sed 10q "[$]0"' 'sed 2q "[$]0"' 'echo test'; do + # expected sizes: less than 2Kb, 1Kb, 512 bytes, 16 bytes, ... + if { echo_test_string=`eval $cmd`; } 2>/dev/null && + { test "X$echo_test_string" = "X$echo_test_string"; } 2>/dev/null + then + break + fi + done + fi + + if test "X`{ $ECHO '\t'; } 2>/dev/null`" = 'X\t' && + echo_testing_string=`{ $ECHO "$echo_test_string"; } 2>/dev/null` && + test "X$echo_testing_string" = "X$echo_test_string"; then + : + else + # The Solaris, AIX, and Digital Unix default echo programs unquote + # backslashes. This makes it impossible to quote backslashes using + # echo "$something" | sed 's/\\/\\\\/g' + # + # So, first we look for a working echo in the user's PATH. + + lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR + for dir in $PATH /usr/ucb; do + IFS="$lt_save_ifs" + if (test -f $dir/echo || test -f $dir/echo$ac_exeext) && + test "X`($dir/echo '\t') 2>/dev/null`" = 'X\t' && + echo_testing_string=`($dir/echo "$echo_test_string") 2>/dev/null` && + test "X$echo_testing_string" = "X$echo_test_string"; then + ECHO="$dir/echo" + break + fi + done + IFS="$lt_save_ifs" + + if test "X$ECHO" = Xecho; then + # We didn't find a better echo, so look for alternatives. + if test "X`{ print -r '\t'; } 2>/dev/null`" = 'X\t' && + echo_testing_string=`{ print -r "$echo_test_string"; } 2>/dev/null` && + test "X$echo_testing_string" = "X$echo_test_string"; then + # This shell has a builtin print -r that does the trick. + ECHO='print -r' + elif { test -f /bin/ksh || test -f /bin/ksh$ac_exeext; } && + test "X$CONFIG_SHELL" != X/bin/ksh; then + # If we have ksh, try running configure again with it. + ORIGINAL_CONFIG_SHELL=${CONFIG_SHELL-/bin/sh} + export ORIGINAL_CONFIG_SHELL + CONFIG_SHELL=/bin/ksh + export CONFIG_SHELL + exec $CONFIG_SHELL "[$]0" --no-reexec ${1+"[$]@"} + else + # Try using printf. + ECHO='printf %s\n' + if test "X`{ $ECHO '\t'; } 2>/dev/null`" = 'X\t' && + echo_testing_string=`{ $ECHO "$echo_test_string"; } 2>/dev/null` && + test "X$echo_testing_string" = "X$echo_test_string"; then + # Cool, printf works + : + elif echo_testing_string=`($ORIGINAL_CONFIG_SHELL "[$]0" --fallback-echo '\t') 2>/dev/null` && + test "X$echo_testing_string" = 'X\t' && + echo_testing_string=`($ORIGINAL_CONFIG_SHELL "[$]0" --fallback-echo "$echo_test_string") 2>/dev/null` && + test "X$echo_testing_string" = "X$echo_test_string"; then + CONFIG_SHELL=$ORIGINAL_CONFIG_SHELL + export CONFIG_SHELL + SHELL="$CONFIG_SHELL" + export SHELL + ECHO="$CONFIG_SHELL [$]0 --fallback-echo" + elif echo_testing_string=`($CONFIG_SHELL "[$]0" --fallback-echo '\t') 2>/dev/null` && + test "X$echo_testing_string" = 'X\t' && + echo_testing_string=`($CONFIG_SHELL "[$]0" --fallback-echo "$echo_test_string") 2>/dev/null` && + test "X$echo_testing_string" = "X$echo_test_string"; then + ECHO="$CONFIG_SHELL [$]0 --fallback-echo" + else + # maybe with a smaller string... + prev=: + + for cmd in 'echo test' 'sed 2q "[$]0"' 'sed 10q "[$]0"' 'sed 20q "[$]0"' 'sed 50q "[$]0"'; do + if { test "X$echo_test_string" = "X`eval $cmd`"; } 2>/dev/null + then + break + fi + prev="$cmd" + done + + if test "$prev" != 'sed 50q "[$]0"'; then + echo_test_string=`eval $prev` + export echo_test_string + exec ${ORIGINAL_CONFIG_SHELL-${CONFIG_SHELL-/bin/sh}} "[$]0" ${1+"[$]@"} + else + # Oops. We lost completely, so just stick with echo. + ECHO=echo + fi + fi + fi + fi + fi +fi + +# Copy echo and quote the copy suitably for passing to libtool from +# the Makefile, instead of quoting the original, which is used later. +lt_ECHO=$ECHO +if test "X$lt_ECHO" = "X$CONFIG_SHELL [$]0 --fallback-echo"; then + lt_ECHO="$CONFIG_SHELL \\\$\[$]0 --fallback-echo" +fi + +AC_SUBST(lt_ECHO) +]) +_LT_DECL([], [SHELL], [1], [Shell to use when invoking shell scripts]) +_LT_DECL([], [ECHO], [1], + [An echo program that does not interpret backslashes]) +])# _LT_PROG_ECHO_BACKSLASH + + +# _LT_ENABLE_LOCK +# --------------- +m4_defun([_LT_ENABLE_LOCK], +[AC_ARG_ENABLE([libtool-lock], + [AS_HELP_STRING([--disable-libtool-lock], + [avoid locking (might break parallel builds)])]) +test "x$enable_libtool_lock" != xno && enable_libtool_lock=yes + +# Some flags need to be propagated to the compiler or linker for good +# libtool support. +case $host in +ia64-*-hpux*) + # Find out which ABI we are using. + echo 'int i;' > conftest.$ac_ext + if AC_TRY_EVAL(ac_compile); then + case `/usr/bin/file conftest.$ac_objext` in + *ELF-32*) + HPUX_IA64_MODE="32" + ;; + *ELF-64*) + HPUX_IA64_MODE="64" + ;; + esac + fi + rm -rf conftest* + ;; +*-*-irix6*) + # Find out which ABI we are using. + echo '[#]line __oline__ "configure"' > conftest.$ac_ext + if AC_TRY_EVAL(ac_compile); then + if test "$lt_cv_prog_gnu_ld" = yes; then + case `/usr/bin/file conftest.$ac_objext` in + *32-bit*) + LD="${LD-ld} -melf32bsmip" + ;; + *N32*) + LD="${LD-ld} -melf32bmipn32" + ;; + *64-bit*) + LD="${LD-ld} -melf64bmip" + ;; + esac + else + case `/usr/bin/file conftest.$ac_objext` in + *32-bit*) + LD="${LD-ld} -32" + ;; + *N32*) + LD="${LD-ld} -n32" + ;; + *64-bit*) + LD="${LD-ld} -64" + ;; + esac + fi + fi + rm -rf conftest* + ;; + +x86_64-*kfreebsd*-gnu|x86_64-*linux*|ppc*-*linux*|powerpc*-*linux*| \ +s390*-*linux*|s390*-*tpf*|sparc*-*linux*) + # Find out which ABI we are using. + echo 'int i;' > conftest.$ac_ext + if AC_TRY_EVAL(ac_compile); then + case `/usr/bin/file conftest.o` in + *32-bit*) + case $host in + x86_64-*kfreebsd*-gnu) + LD="${LD-ld} -m elf_i386_fbsd" + ;; + x86_64-*linux*) + LD="${LD-ld} -m elf_i386" + ;; + ppc64-*linux*|powerpc64-*linux*) + LD="${LD-ld} -m elf32ppclinux" + ;; + s390x-*linux*) + LD="${LD-ld} -m elf_s390" + ;; + sparc64-*linux*) + LD="${LD-ld} -m elf32_sparc" + ;; + esac + ;; + *64-bit*) + case $host in + x86_64-*kfreebsd*-gnu) + LD="${LD-ld} -m elf_x86_64_fbsd" + ;; + x86_64-*linux*) + LD="${LD-ld} -m elf_x86_64" + ;; + ppc*-*linux*|powerpc*-*linux*) + LD="${LD-ld} -m elf64ppc" + ;; + s390*-*linux*|s390*-*tpf*) + LD="${LD-ld} -m elf64_s390" + ;; + sparc*-*linux*) + LD="${LD-ld} -m elf64_sparc" + ;; + esac + ;; + esac + fi + rm -rf conftest* + ;; + +*-*-sco3.2v5*) + # On SCO OpenServer 5, we need -belf to get full-featured binaries. + SAVE_CFLAGS="$CFLAGS" + CFLAGS="$CFLAGS -belf" + AC_CACHE_CHECK([whether the C compiler needs -belf], lt_cv_cc_needs_belf, + [AC_LANG_PUSH(C) + AC_LINK_IFELSE([AC_LANG_PROGRAM([[]],[[]])],[lt_cv_cc_needs_belf=yes],[lt_cv_cc_needs_belf=no]) + AC_LANG_POP]) + if test x"$lt_cv_cc_needs_belf" != x"yes"; then + # this is probably gcc 2.8.0, egcs 1.0 or newer; no need for -belf + CFLAGS="$SAVE_CFLAGS" + fi + ;; +sparc*-*solaris*) + # Find out which ABI we are using. + echo 'int i;' > conftest.$ac_ext + if AC_TRY_EVAL(ac_compile); then + case `/usr/bin/file conftest.o` in + *64-bit*) + case $lt_cv_prog_gnu_ld in + yes*) LD="${LD-ld} -m elf64_sparc" ;; + *) + if ${LD-ld} -64 -r -o conftest2.o conftest.o >/dev/null 2>&1; then + LD="${LD-ld} -64" + fi + ;; + esac + ;; + esac + fi + rm -rf conftest* + ;; +esac + +need_locks="$enable_libtool_lock" +])# _LT_ENABLE_LOCK + + +# _LT_CMD_OLD_ARCHIVE +# ------------------- +m4_defun([_LT_CMD_OLD_ARCHIVE], +[AC_CHECK_TOOL(AR, ar, false) +test -z "$AR" && AR=ar +test -z "$AR_FLAGS" && AR_FLAGS=cru +_LT_DECL([], [AR], [1], [The archiver]) +_LT_DECL([], [AR_FLAGS], [1]) + +AC_CHECK_TOOL(STRIP, strip, :) +test -z "$STRIP" && STRIP=: +_LT_DECL([], [STRIP], [1], [A symbol stripping program]) + +AC_CHECK_TOOL(RANLIB, ranlib, :) +test -z "$RANLIB" && RANLIB=: +_LT_DECL([], [RANLIB], [1], + [Commands used to install an old-style archive]) + +# Determine commands to create old-style static archives. +old_archive_cmds='$AR $AR_FLAGS $oldlib$oldobjs' +old_postinstall_cmds='chmod 644 $oldlib' +old_postuninstall_cmds= + +if test -n "$RANLIB"; then + case $host_os in + openbsd*) + old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB -t \$oldlib" + ;; + *) + old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB \$oldlib" + ;; + esac + old_archive_cmds="$old_archive_cmds~\$RANLIB \$oldlib" +fi +_LT_DECL([], [old_postinstall_cmds], [2]) +_LT_DECL([], [old_postuninstall_cmds], [2]) +_LT_TAGDECL([], [old_archive_cmds], [2], + [Commands used to build an old-style archive]) +])# _LT_CMD_OLD_ARCHIVE + + +# _LT_COMPILER_OPTION(MESSAGE, VARIABLE-NAME, FLAGS, +# [OUTPUT-FILE], [ACTION-SUCCESS], [ACTION-FAILURE]) +# ---------------------------------------------------------------- +# Check whether the given compiler option works +AC_DEFUN([_LT_COMPILER_OPTION], +[m4_require([_LT_FILEUTILS_DEFAULTS])dnl +m4_require([_LT_DECL_SED])dnl +AC_CACHE_CHECK([$1], [$2], + [$2=no + m4_if([$4], , [ac_outfile=conftest.$ac_objext], [ac_outfile=$4]) + echo "$lt_simple_compile_test_code" > conftest.$ac_ext + lt_compiler_flag="$3" + # Insert the option either (1) after the last *FLAGS variable, or + # (2) before a word containing "conftest.", or (3) at the end. + # Note that $ac_compile itself does not contain backslashes and begins + # with a dollar sign (not a hyphen), so the echo should work correctly. + # The option is referenced via a variable to avoid confusing sed. + lt_compile=`echo "$ac_compile" | $SED \ + -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ + -e 's: [[^ ]]*conftest\.: $lt_compiler_flag&:; t' \ + -e 's:$: $lt_compiler_flag:'` + (eval echo "\"\$as_me:__oline__: $lt_compile\"" >&AS_MESSAGE_LOG_FD) + (eval "$lt_compile" 2>conftest.err) + ac_status=$? + cat conftest.err >&AS_MESSAGE_LOG_FD + echo "$as_me:__oline__: \$? = $ac_status" >&AS_MESSAGE_LOG_FD + if (exit $ac_status) && test -s "$ac_outfile"; then + # The compiler can only warn and ignore the option if not recognized + # So say no if there are warnings other than the usual output. + $ECHO "X$_lt_compiler_boilerplate" | $Xsed -e '/^$/d' >conftest.exp + $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 + if test ! -s conftest.er2 || diff conftest.exp conftest.er2 >/dev/null; then + $2=yes + fi + fi + $RM conftest* +]) + +if test x"[$]$2" = xyes; then + m4_if([$5], , :, [$5]) +else + m4_if([$6], , :, [$6]) +fi +])# _LT_COMPILER_OPTION + +# Old name: +AU_ALIAS([AC_LIBTOOL_COMPILER_OPTION], [_LT_COMPILER_OPTION]) +dnl aclocal-1.4 backwards compatibility: +dnl AC_DEFUN([AC_LIBTOOL_COMPILER_OPTION], []) + + +# _LT_LINKER_OPTION(MESSAGE, VARIABLE-NAME, FLAGS, +# [ACTION-SUCCESS], [ACTION-FAILURE]) +# ---------------------------------------------------- +# Check whether the given linker option works +AC_DEFUN([_LT_LINKER_OPTION], +[m4_require([_LT_FILEUTILS_DEFAULTS])dnl +m4_require([_LT_DECL_SED])dnl +AC_CACHE_CHECK([$1], [$2], + [$2=no + save_LDFLAGS="$LDFLAGS" + LDFLAGS="$LDFLAGS $3" + echo "$lt_simple_link_test_code" > conftest.$ac_ext + if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then + # The linker can only warn and ignore the option if not recognized + # So say no if there are warnings + if test -s conftest.err; then + # Append any errors to the config.log. + cat conftest.err 1>&AS_MESSAGE_LOG_FD + $ECHO "X$_lt_linker_boilerplate" | $Xsed -e '/^$/d' > conftest.exp + $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 + if diff conftest.exp conftest.er2 >/dev/null; then + $2=yes + fi + else + $2=yes + fi + fi + $RM -r conftest* + LDFLAGS="$save_LDFLAGS" +]) + +if test x"[$]$2" = xyes; then + m4_if([$4], , :, [$4]) +else + m4_if([$5], , :, [$5]) +fi +])# _LT_LINKER_OPTION + +# Old name: +AU_ALIAS([AC_LIBTOOL_LINKER_OPTION], [_LT_LINKER_OPTION]) +dnl aclocal-1.4 backwards compatibility: +dnl AC_DEFUN([AC_LIBTOOL_LINKER_OPTION], []) + + +# LT_CMD_MAX_LEN +#--------------- +AC_DEFUN([LT_CMD_MAX_LEN], +[AC_REQUIRE([AC_CANONICAL_HOST])dnl +# find the maximum length of command line arguments +AC_MSG_CHECKING([the maximum length of command line arguments]) +AC_CACHE_VAL([lt_cv_sys_max_cmd_len], [dnl + i=0 + teststring="ABCD" + + case $build_os in + msdosdjgpp*) + # On DJGPP, this test can blow up pretty badly due to problems in libc + # (any single argument exceeding 2000 bytes causes a buffer overrun + # during glob expansion). Even if it were fixed, the result of this + # check would be larger than it should be. + lt_cv_sys_max_cmd_len=12288; # 12K is about right + ;; + + gnu*) + # Under GNU Hurd, this test is not required because there is + # no limit to the length of command line arguments. + # Libtool will interpret -1 as no limit whatsoever + lt_cv_sys_max_cmd_len=-1; + ;; + + cygwin* | mingw*) + # On Win9x/ME, this test blows up -- it succeeds, but takes + # about 5 minutes as the teststring grows exponentially. + # Worse, since 9x/ME are not pre-emptively multitasking, + # you end up with a "frozen" computer, even though with patience + # the test eventually succeeds (with a max line length of 256k). + # Instead, let's just punt: use the minimum linelength reported by + # all of the supported platforms: 8192 (on NT/2K/XP). + lt_cv_sys_max_cmd_len=8192; + ;; + + amigaos*) + # On AmigaOS with pdksh, this test takes hours, literally. + # So we just punt and use a minimum line length of 8192. + lt_cv_sys_max_cmd_len=8192; + ;; + + netbsd* | freebsd* | openbsd* | darwin* | dragonfly*) + # This has been around since 386BSD, at least. Likely further. + if test -x /sbin/sysctl; then + lt_cv_sys_max_cmd_len=`/sbin/sysctl -n kern.argmax` + elif test -x /usr/sbin/sysctl; then + lt_cv_sys_max_cmd_len=`/usr/sbin/sysctl -n kern.argmax` + else + lt_cv_sys_max_cmd_len=65536 # usable default for all BSDs + fi + # And add a safety zone + lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4` + lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3` + ;; + + interix*) + # We know the value 262144 and hardcode it with a safety zone (like BSD) + lt_cv_sys_max_cmd_len=196608 + ;; + + osf*) + # Dr. Hans Ekkehard Plesser reports seeing a kernel panic running configure + # due to this test when exec_disable_arg_limit is 1 on Tru64. It is not + # nice to cause kernel panics so lets avoid the loop below. + # First set a reasonable default. + lt_cv_sys_max_cmd_len=16384 + # + if test -x /sbin/sysconfig; then + case `/sbin/sysconfig -q proc exec_disable_arg_limit` in + *1*) lt_cv_sys_max_cmd_len=-1 ;; + esac + fi + ;; + sco3.2v5*) + lt_cv_sys_max_cmd_len=102400 + ;; + sysv5* | sco5v6* | sysv4.2uw2*) + kargmax=`grep ARG_MAX /etc/conf/cf.d/stune 2>/dev/null` + if test -n "$kargmax"; then + lt_cv_sys_max_cmd_len=`echo $kargmax | sed 's/.*[[ ]]//'` + else + lt_cv_sys_max_cmd_len=32768 + fi + ;; + *) + lt_cv_sys_max_cmd_len=`(getconf ARG_MAX) 2> /dev/null` + if test -n "$lt_cv_sys_max_cmd_len"; then + lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4` + lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3` + else + # Make teststring a little bigger before we do anything with it. + # a 1K string should be a reasonable start. + for i in 1 2 3 4 5 6 7 8 ; do + teststring=$teststring$teststring + done + SHELL=${SHELL-${CONFIG_SHELL-/bin/sh}} + # If test is not a shell built-in, we'll probably end up computing a + # maximum length that is only half of the actual maximum length, but + # we can't tell. + while { test "X"`$SHELL [$]0 --fallback-echo "X$teststring$teststring" 2>/dev/null` \ + = "XX$teststring$teststring"; } >/dev/null 2>&1 && + test $i != 17 # 1/2 MB should be enough + do + i=`expr $i + 1` + teststring=$teststring$teststring + done + # Only check the string length outside the loop. + lt_cv_sys_max_cmd_len=`expr "X$teststring" : ".*" 2>&1` + teststring= + # Add a significant safety factor because C++ compilers can tack on + # massive amounts of additional arguments before passing them to the + # linker. It appears as though 1/2 is a usable value. + lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 2` + fi + ;; + esac +]) +if test -n $lt_cv_sys_max_cmd_len ; then + AC_MSG_RESULT($lt_cv_sys_max_cmd_len) +else + AC_MSG_RESULT(none) +fi +max_cmd_len=$lt_cv_sys_max_cmd_len +_LT_DECL([], [max_cmd_len], [0], + [What is the maximum length of a command?]) +])# LT_CMD_MAX_LEN + +# Old name: +AU_ALIAS([AC_LIBTOOL_SYS_MAX_CMD_LEN], [LT_CMD_MAX_LEN]) +dnl aclocal-1.4 backwards compatibility: +dnl AC_DEFUN([AC_LIBTOOL_SYS_MAX_CMD_LEN], []) + + +# _LT_HEADER_DLFCN +# ---------------- +m4_defun([_LT_HEADER_DLFCN], +[AC_CHECK_HEADERS([dlfcn.h], [], [], [AC_INCLUDES_DEFAULT])dnl +])# _LT_HEADER_DLFCN + + +# _LT_TRY_DLOPEN_SELF (ACTION-IF-TRUE, ACTION-IF-TRUE-W-USCORE, +# ACTION-IF-FALSE, ACTION-IF-CROSS-COMPILING) +# ---------------------------------------------------------------- +m4_defun([_LT_TRY_DLOPEN_SELF], +[m4_require([_LT_HEADER_DLFCN])dnl +if test "$cross_compiling" = yes; then : + [$4] +else + lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2 + lt_status=$lt_dlunknown + cat > conftest.$ac_ext <<_LT_EOF +[#line __oline__ "configure" +#include "confdefs.h" + +#if HAVE_DLFCN_H +#include +#endif + +#include + +#ifdef RTLD_GLOBAL +# define LT_DLGLOBAL RTLD_GLOBAL +#else +# ifdef DL_GLOBAL +# define LT_DLGLOBAL DL_GLOBAL +# else +# define LT_DLGLOBAL 0 +# endif +#endif + +/* We may have to define LT_DLLAZY_OR_NOW in the command line if we + find out it does not work in some platform. */ +#ifndef LT_DLLAZY_OR_NOW +# ifdef RTLD_LAZY +# define LT_DLLAZY_OR_NOW RTLD_LAZY +# else +# ifdef DL_LAZY +# define LT_DLLAZY_OR_NOW DL_LAZY +# else +# ifdef RTLD_NOW +# define LT_DLLAZY_OR_NOW RTLD_NOW +# else +# ifdef DL_NOW +# define LT_DLLAZY_OR_NOW DL_NOW +# else +# define LT_DLLAZY_OR_NOW 0 +# endif +# endif +# endif +# endif +#endif + +#ifdef __cplusplus +extern "C" void exit (int); +#endif + +void fnord() { int i=42;} +int main () +{ + void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); + int status = $lt_dlunknown; + + if (self) + { + if (dlsym (self,"fnord")) status = $lt_dlno_uscore; + else if (dlsym( self,"_fnord")) status = $lt_dlneed_uscore; + /* dlclose (self); */ + } + else + puts (dlerror ()); + + exit (status); +}] +_LT_EOF + if AC_TRY_EVAL(ac_link) && test -s conftest${ac_exeext} 2>/dev/null; then + (./conftest; exit; ) >&AS_MESSAGE_LOG_FD 2>/dev/null + lt_status=$? + case x$lt_status in + x$lt_dlno_uscore) $1 ;; + x$lt_dlneed_uscore) $2 ;; + x$lt_dlunknown|x*) $3 ;; + esac + else : + # compilation failed + $3 + fi +fi +rm -fr conftest* +])# _LT_TRY_DLOPEN_SELF + + +# LT_SYS_DLOPEN_SELF +# ------------------ +AC_DEFUN([LT_SYS_DLOPEN_SELF], +[m4_require([_LT_HEADER_DLFCN])dnl +if test "x$enable_dlopen" != xyes; then + enable_dlopen=unknown + enable_dlopen_self=unknown + enable_dlopen_self_static=unknown +else + lt_cv_dlopen=no + lt_cv_dlopen_libs= + + case $host_os in + beos*) + lt_cv_dlopen="load_add_on" + lt_cv_dlopen_libs= + lt_cv_dlopen_self=yes + ;; + + mingw* | pw32*) + lt_cv_dlopen="LoadLibrary" + lt_cv_dlopen_libs= + ;; + + cygwin*) + lt_cv_dlopen="dlopen" + lt_cv_dlopen_libs= + ;; + + darwin*) + # if libdl is installed we need to link against it + AC_CHECK_LIB([dl], [dlopen], + [lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl"],[ + lt_cv_dlopen="dyld" + lt_cv_dlopen_libs= + lt_cv_dlopen_self=yes + ]) + ;; + + *) + AC_CHECK_FUNC([shl_load], + [lt_cv_dlopen="shl_load"], + [AC_CHECK_LIB([dld], [shl_load], + [lt_cv_dlopen="shl_load" lt_cv_dlopen_libs="-ldld"], + [AC_CHECK_FUNC([dlopen], + [lt_cv_dlopen="dlopen"], + [AC_CHECK_LIB([dl], [dlopen], + [lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl"], + [AC_CHECK_LIB([svld], [dlopen], + [lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-lsvld"], + [AC_CHECK_LIB([dld], [dld_link], + [lt_cv_dlopen="dld_link" lt_cv_dlopen_libs="-ldld"]) + ]) + ]) + ]) + ]) + ]) + ;; + esac + + if test "x$lt_cv_dlopen" != xno; then + enable_dlopen=yes + else + enable_dlopen=no + fi + + case $lt_cv_dlopen in + dlopen) + save_CPPFLAGS="$CPPFLAGS" + test "x$ac_cv_header_dlfcn_h" = xyes && CPPFLAGS="$CPPFLAGS -DHAVE_DLFCN_H" + + save_LDFLAGS="$LDFLAGS" + wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $export_dynamic_flag_spec\" + + save_LIBS="$LIBS" + LIBS="$lt_cv_dlopen_libs $LIBS" + + AC_CACHE_CHECK([whether a program can dlopen itself], + lt_cv_dlopen_self, [dnl + _LT_TRY_DLOPEN_SELF( + lt_cv_dlopen_self=yes, lt_cv_dlopen_self=yes, + lt_cv_dlopen_self=no, lt_cv_dlopen_self=cross) + ]) + + if test "x$lt_cv_dlopen_self" = xyes; then + wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $lt_prog_compiler_static\" + AC_CACHE_CHECK([whether a statically linked program can dlopen itself], + lt_cv_dlopen_self_static, [dnl + _LT_TRY_DLOPEN_SELF( + lt_cv_dlopen_self_static=yes, lt_cv_dlopen_self_static=yes, + lt_cv_dlopen_self_static=no, lt_cv_dlopen_self_static=cross) + ]) + fi + + CPPFLAGS="$save_CPPFLAGS" + LDFLAGS="$save_LDFLAGS" + LIBS="$save_LIBS" + ;; + esac + + case $lt_cv_dlopen_self in + yes|no) enable_dlopen_self=$lt_cv_dlopen_self ;; + *) enable_dlopen_self=unknown ;; + esac + + case $lt_cv_dlopen_self_static in + yes|no) enable_dlopen_self_static=$lt_cv_dlopen_self_static ;; + *) enable_dlopen_self_static=unknown ;; + esac +fi +_LT_DECL([dlopen_support], [enable_dlopen], [0], + [Whether dlopen is supported]) +_LT_DECL([dlopen_self], [enable_dlopen_self], [0], + [Whether dlopen of programs is supported]) +_LT_DECL([dlopen_self_static], [enable_dlopen_self_static], [0], + [Whether dlopen of statically linked programs is supported]) +])# LT_SYS_DLOPEN_SELF + +# Old name: +AU_ALIAS([AC_LIBTOOL_DLOPEN_SELF], [LT_SYS_DLOPEN_SELF]) +dnl aclocal-1.4 backwards compatibility: +dnl AC_DEFUN([AC_LIBTOOL_DLOPEN_SELF], []) + + +# _LT_COMPILER_C_O([TAGNAME]) +# --------------------------- +# Check to see if options -c and -o are simultaneously supported by compiler. +# This macro does not hard code the compiler like AC_PROG_CC_C_O. +m4_defun([_LT_COMPILER_C_O], +[m4_require([_LT_DECL_SED])dnl +m4_require([_LT_FILEUTILS_DEFAULTS])dnl +m4_require([_LT_TAG_COMPILER])dnl +AC_CACHE_CHECK([if $compiler supports -c -o file.$ac_objext], + [_LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)], + [_LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)=no + $RM -r conftest 2>/dev/null + mkdir conftest + cd conftest + mkdir out + echo "$lt_simple_compile_test_code" > conftest.$ac_ext + + lt_compiler_flag="-o out/conftest2.$ac_objext" + # Insert the option either (1) after the last *FLAGS variable, or + # (2) before a word containing "conftest.", or (3) at the end. + # Note that $ac_compile itself does not contain backslashes and begins + # with a dollar sign (not a hyphen), so the echo should work correctly. + lt_compile=`echo "$ac_compile" | $SED \ + -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ + -e 's: [[^ ]]*conftest\.: $lt_compiler_flag&:; t' \ + -e 's:$: $lt_compiler_flag:'` + (eval echo "\"\$as_me:__oline__: $lt_compile\"" >&AS_MESSAGE_LOG_FD) + (eval "$lt_compile" 2>out/conftest.err) + ac_status=$? + cat out/conftest.err >&AS_MESSAGE_LOG_FD + echo "$as_me:__oline__: \$? = $ac_status" >&AS_MESSAGE_LOG_FD + if (exit $ac_status) && test -s out/conftest2.$ac_objext + then + # The compiler can only warn and ignore the option if not recognized + # So say no if there are warnings + $ECHO "X$_lt_compiler_boilerplate" | $Xsed -e '/^$/d' > out/conftest.exp + $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2 + if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then + _LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)=yes + fi + fi + chmod u+w . 2>&AS_MESSAGE_LOG_FD + $RM conftest* + # SGI C++ compiler will create directory out/ii_files/ for + # template instantiation + test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files + $RM out/* && rmdir out + cd .. + $RM -r conftest + $RM conftest* +]) +_LT_TAGDECL([compiler_c_o], [lt_cv_prog_compiler_c_o], [1], + [Does compiler simultaneously support -c and -o options?]) +])# _LT_COMPILER_C_O + + +# _LT_COMPILER_FILE_LOCKS([TAGNAME]) +# ---------------------------------- +# Check to see if we can do hard links to lock some files if needed +m4_defun([_LT_COMPILER_FILE_LOCKS], +[m4_require([_LT_ENABLE_LOCK])dnl +m4_require([_LT_FILEUTILS_DEFAULTS])dnl +_LT_COMPILER_C_O([$1]) + +hard_links="nottested" +if test "$_LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)" = no && test "$need_locks" != no; then + # do not overwrite the value of need_locks provided by the user + AC_MSG_CHECKING([if we can lock with hard links]) + hard_links=yes + $RM conftest* + ln conftest.a conftest.b 2>/dev/null && hard_links=no + touch conftest.a + ln conftest.a conftest.b 2>&5 || hard_links=no + ln conftest.a conftest.b 2>/dev/null && hard_links=no + AC_MSG_RESULT([$hard_links]) + if test "$hard_links" = no; then + AC_MSG_WARN([`$CC' does not support `-c -o', so `make -j' may be unsafe]) + need_locks=warn + fi +else + need_locks=no +fi +_LT_DECL([], [need_locks], [1], [Must we lock files when doing compilation?]) +])# _LT_COMPILER_FILE_LOCKS + + +# _LT_CHECK_OBJDIR +# ---------------- +m4_defun([_LT_CHECK_OBJDIR], +[AC_CACHE_CHECK([for objdir], [lt_cv_objdir], +[rm -f .libs 2>/dev/null +mkdir .libs 2>/dev/null +if test -d .libs; then + lt_cv_objdir=.libs +else + # MS-DOS does not allow filenames that begin with a dot. + lt_cv_objdir=_libs +fi +rmdir .libs 2>/dev/null]) +objdir=$lt_cv_objdir +_LT_DECL([], [objdir], [0], + [The name of the directory that contains temporary libtool files])dnl +m4_pattern_allow([LT_OBJDIR])dnl +AC_DEFINE_UNQUOTED(LT_OBJDIR, "$lt_cv_objdir/", + [Define to the sub-directory in which libtool stores uninstalled libraries.]) +])# _LT_CHECK_OBJDIR + + +# _LT_LINKER_HARDCODE_LIBPATH([TAGNAME]) +# -------------------------------------- +# Check hardcoding attributes. +m4_defun([_LT_LINKER_HARDCODE_LIBPATH], +[AC_MSG_CHECKING([how to hardcode library paths into programs]) +_LT_TAGVAR(hardcode_action, $1)= +if test -n "$_LT_TAGVAR(hardcode_libdir_flag_spec, $1)" || + test -n "$_LT_TAGVAR(runpath_var, $1)" || + test "X$_LT_TAGVAR(hardcode_automatic, $1)" = "Xyes" ; then + + # We can hardcode non-existent directories. + if test "$_LT_TAGVAR(hardcode_direct, $1)" != no && + # If the only mechanism to avoid hardcoding is shlibpath_var, we + # have to relink, otherwise we might link with an installed library + # when we should be linking with a yet-to-be-installed one + ## test "$_LT_TAGVAR(hardcode_shlibpath_var, $1)" != no && + test "$_LT_TAGVAR(hardcode_minus_L, $1)" != no; then + # Linking always hardcodes the temporary library directory. + _LT_TAGVAR(hardcode_action, $1)=relink + else + # We can link without hardcoding, and we can hardcode nonexisting dirs. + _LT_TAGVAR(hardcode_action, $1)=immediate + fi +else + # We cannot hardcode anything, or else we can only hardcode existing + # directories. + _LT_TAGVAR(hardcode_action, $1)=unsupported +fi +AC_MSG_RESULT([$_LT_TAGVAR(hardcode_action, $1)]) + +if test "$_LT_TAGVAR(hardcode_action, $1)" = relink || + test "$_LT_TAGVAR(inherit_rpath, $1)" = yes; then + # Fast installation is not supported + enable_fast_install=no +elif test "$shlibpath_overrides_runpath" = yes || + test "$enable_shared" = no; then + # Fast installation is not necessary + enable_fast_install=needless +fi +_LT_TAGDECL([], [hardcode_action], [0], + [How to hardcode a shared library path into an executable]) +])# _LT_LINKER_HARDCODE_LIBPATH + + +# _LT_CMD_STRIPLIB +# ---------------- +m4_defun([_LT_CMD_STRIPLIB], +[m4_require([_LT_DECL_EGREP]) +striplib= +old_striplib= +AC_MSG_CHECKING([whether stripping libraries is possible]) +if test -n "$STRIP" && $STRIP -V 2>&1 | $GREP "GNU strip" >/dev/null; then + test -z "$old_striplib" && old_striplib="$STRIP --strip-debug" + test -z "$striplib" && striplib="$STRIP --strip-unneeded" + AC_MSG_RESULT([yes]) +else +# FIXME - insert some real tests, host_os isn't really good enough + case $host_os in + darwin*) + if test -n "$STRIP" ; then + striplib="$STRIP -x" + old_striplib="$STRIP -S" + AC_MSG_RESULT([yes]) + else + AC_MSG_RESULT([no]) + fi + ;; + *) + AC_MSG_RESULT([no]) + ;; + esac +fi +_LT_DECL([], [old_striplib], [1], [Commands to strip libraries]) +_LT_DECL([], [striplib], [1]) +])# _LT_CMD_STRIPLIB + + +# _LT_SYS_DYNAMIC_LINKER([TAG]) +# ----------------------------- +# PORTME Fill in your ld.so characteristics +m4_defun([_LT_SYS_DYNAMIC_LINKER], +[AC_REQUIRE([AC_CANONICAL_HOST])dnl +m4_require([_LT_DECL_EGREP])dnl +m4_require([_LT_FILEUTILS_DEFAULTS])dnl +m4_require([_LT_DECL_SED])dnl +AC_MSG_CHECKING([dynamic linker characteristics]) +m4_if([$1], + [], [ +if test "$GCC" = yes; then + case $host_os in + darwin*) lt_awk_arg="/^libraries:/,/LR/" ;; + *) lt_awk_arg="/^libraries:/" ;; + esac + lt_search_path_spec=`$CC -print-search-dirs | awk $lt_awk_arg | $SED -e "s/^libraries://" -e "s,=/,/,g"` + if $ECHO "$lt_search_path_spec" | $GREP ';' >/dev/null ; then + # if the path contains ";" then we assume it to be the separator + # otherwise default to the standard path separator (i.e. ":") - it is + # assumed that no part of a normal pathname contains ";" but that should + # okay in the real world where ";" in dirpaths is itself problematic. + lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED -e 's/;/ /g'` + else + lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` + fi + # Ok, now we have the path, separated by spaces, we can step through it + # and add multilib dir if necessary. + lt_tmp_lt_search_path_spec= + lt_multi_os_dir=`$CC $CPPFLAGS $CFLAGS $LDFLAGS -print-multi-os-directory 2>/dev/null` + for lt_sys_path in $lt_search_path_spec; do + if test -d "$lt_sys_path/$lt_multi_os_dir"; then + lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path/$lt_multi_os_dir" + else + test -d "$lt_sys_path" && \ + lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path" + fi + done + lt_search_path_spec=`$ECHO $lt_tmp_lt_search_path_spec | awk ' +BEGIN {RS=" "; FS="/|\n";} { + lt_foo=""; + lt_count=0; + for (lt_i = NF; lt_i > 0; lt_i--) { + if ($lt_i != "" && $lt_i != ".") { + if ($lt_i == "..") { + lt_count++; + } else { + if (lt_count == 0) { + lt_foo="/" $lt_i lt_foo; + } else { + lt_count--; + } + } + } + } + if (lt_foo != "") { lt_freq[[lt_foo]]++; } + if (lt_freq[[lt_foo]] == 1) { print lt_foo; } +}'` + sys_lib_search_path_spec=`$ECHO $lt_search_path_spec` +else + sys_lib_search_path_spec="/lib /usr/lib /usr/local/lib" +fi]) +library_names_spec= +libname_spec='lib$name' +soname_spec= +shrext_cmds=".so" +postinstall_cmds= +postuninstall_cmds= +finish_cmds= +finish_eval= +shlibpath_var= +shlibpath_overrides_runpath=unknown +version_type=none +dynamic_linker="$host_os ld.so" +sys_lib_dlsearch_path_spec="/lib /usr/lib" +need_lib_prefix=unknown +hardcode_into_libs=no + +# when you set need_version to no, make sure it does not cause -set_version +# flags to be left without arguments +need_version=unknown + +case $host_os in +aix3*) + version_type=linux + library_names_spec='${libname}${release}${shared_ext}$versuffix $libname.a' + shlibpath_var=LIBPATH + + # AIX 3 has no versioning support, so we append a major version to the name. + soname_spec='${libname}${release}${shared_ext}$major' + ;; + +aix[[4-9]]*) + version_type=linux + need_lib_prefix=no + need_version=no + hardcode_into_libs=yes + if test "$host_cpu" = ia64; then + # AIX 5 supports IA64 + library_names_spec='${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext}$versuffix $libname${shared_ext}' + shlibpath_var=LD_LIBRARY_PATH + else + # With GCC up to 2.95.x, collect2 would create an import file + # for dependence libraries. The import file would start with + # the line `#! .'. This would cause the generated library to + # depend on `.', always an invalid library. This was fixed in + # development snapshots of GCC prior to 3.0. + case $host_os in + aix4 | aix4.[[01]] | aix4.[[01]].*) + if { echo '#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ >= 97)' + echo ' yes ' + echo '#endif'; } | ${CC} -E - | $GREP yes > /dev/null; then + : + else + can_build_shared=no + fi + ;; + esac + # AIX (on Power*) has no versioning support, so currently we can not hardcode correct + # soname into executable. Probably we can add versioning support to + # collect2, so additional links can be useful in future. + if test "$aix_use_runtimelinking" = yes; then + # If using run time linking (on AIX 4.2 or later) use lib.so + # instead of lib.a to let people know that these are not + # typical AIX shared libraries. + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' + else + # We preserve .a as extension for shared libraries through AIX4.2 + # and later when we are not doing run time linking. + library_names_spec='${libname}${release}.a $libname.a' + soname_spec='${libname}${release}${shared_ext}$major' + fi + shlibpath_var=LIBPATH + fi + ;; + +amigaos*) + case $host_cpu in + powerpc) + # Since July 2007 AmigaOS4 officially supports .so libraries. + # When compiling the executable, add -use-dynld -Lsobjs: to the compileline. + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' + ;; + m68k) + library_names_spec='$libname.ixlibrary $libname.a' + # Create ${libname}_ixlibrary.a entries in /sys/libs. + finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do libname=`$ECHO "X$lib" | $Xsed -e '\''s%^.*/\([[^/]]*\)\.ixlibrary$%\1%'\''`; test $RM /sys/libs/${libname}_ixlibrary.a; $show "cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a"; cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a || exit 1; done' + ;; + esac + ;; + +beos*) + library_names_spec='${libname}${shared_ext}' + dynamic_linker="$host_os ld.so" + shlibpath_var=LIBRARY_PATH + ;; + +bsdi[[45]]*) + version_type=linux + need_version=no + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' + soname_spec='${libname}${release}${shared_ext}$major' + finish_cmds='PATH="\$PATH:/sbin" ldconfig $libdir' + shlibpath_var=LD_LIBRARY_PATH + sys_lib_search_path_spec="/shlib /usr/lib /usr/X11/lib /usr/contrib/lib /lib /usr/local/lib" + sys_lib_dlsearch_path_spec="/shlib /usr/lib /usr/local/lib" + # the default ld.so.conf also contains /usr/contrib/lib and + # /usr/X11R6/lib (/usr/X11 is a link to /usr/X11R6), but let us allow + # libtool to hard-code these into programs + ;; + +cygwin* | mingw* | pw32*) + version_type=windows + shrext_cmds=".dll" + need_version=no + need_lib_prefix=no + + case $GCC,$host_os in + yes,cygwin* | yes,mingw* | yes,pw32*) + library_names_spec='$libname.dll.a' + # DLL is installed to $(libdir)/../bin by postinstall_cmds + postinstall_cmds='base_file=`basename \${file}`~ + dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ + dldir=$destdir/`dirname \$dlpath`~ + test -d \$dldir || mkdir -p \$dldir~ + $install_prog $dir/$dlname \$dldir/$dlname~ + chmod a+x \$dldir/$dlname~ + if test -n '\''$stripme'\'' && test -n '\''$striplib'\''; then + eval '\''$striplib \$dldir/$dlname'\'' || exit \$?; + fi' + postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ + dlpath=$dir/\$dldll~ + $RM \$dlpath' + shlibpath_overrides_runpath=yes + + case $host_os in + cygwin*) + # Cygwin DLLs use 'cyg' prefix rather than 'lib' + soname_spec='`echo ${libname} | sed -e 's/^lib/cyg/'``echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext}' + sys_lib_search_path_spec="/usr/lib /lib/w32api /lib /usr/local/lib" + ;; + mingw*) + # MinGW DLLs use traditional 'lib' prefix + soname_spec='${libname}`echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext}' + sys_lib_search_path_spec=`$CC -print-search-dirs | $GREP "^libraries:" | $SED -e "s/^libraries://" -e "s,=/,/,g"` + if $ECHO "$sys_lib_search_path_spec" | [$GREP ';[c-zC-Z]:/' >/dev/null]; then + # It is most probably a Windows format PATH printed by + # mingw gcc, but we are running on Cygwin. Gcc prints its search + # path with ; separators, and with drive letters. We can handle the + # drive letters (cygwin fileutils understands them), so leave them, + # especially as we might pass files found there to a mingw objdump, + # which wouldn't understand a cygwinified path. Ahh. + sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` + else + sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` + fi + ;; + pw32*) + # pw32 DLLs use 'pw' prefix rather than 'lib' + library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext}' + ;; + esac + ;; + + *) + library_names_spec='${libname}`echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext} $libname.lib' + ;; + esac + dynamic_linker='Win32 ld.exe' + # FIXME: first we should search . and the directory the executable is in + shlibpath_var=PATH + ;; + +darwin* | rhapsody*) + dynamic_linker="$host_os dyld" + version_type=darwin + need_lib_prefix=no + need_version=no + library_names_spec='${libname}${release}${major}$shared_ext ${libname}$shared_ext' + soname_spec='${libname}${release}${major}$shared_ext' + shlibpath_overrides_runpath=yes + shlibpath_var=DYLD_LIBRARY_PATH + shrext_cmds='`test .$module = .yes && echo .so || echo .dylib`' +m4_if([$1], [],[ + sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/local/lib"]) + sys_lib_dlsearch_path_spec='/usr/local/lib /lib /usr/lib' + ;; + +dgux*) + version_type=linux + need_lib_prefix=no + need_version=no + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname$shared_ext' + soname_spec='${libname}${release}${shared_ext}$major' + shlibpath_var=LD_LIBRARY_PATH + ;; + +freebsd1*) + dynamic_linker=no + ;; + +freebsd* | dragonfly*) + # DragonFly does not have aout. When/if they implement a new + # versioning mechanism, adjust this. + if test -x /usr/bin/objformat; then + objformat=`/usr/bin/objformat` + else + case $host_os in + freebsd[[123]]*) objformat=aout ;; + *) objformat=elf ;; + esac + fi + version_type=freebsd-$objformat + case $version_type in + freebsd-elf*) + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}' + need_version=no + need_lib_prefix=no + ;; + freebsd-*) + library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix' + need_version=yes + ;; + esac + shlibpath_var=LD_LIBRARY_PATH + case $host_os in + freebsd2*) + shlibpath_overrides_runpath=yes + ;; + freebsd3.[[01]]* | freebsdelf3.[[01]]*) + shlibpath_overrides_runpath=yes + hardcode_into_libs=yes + ;; + freebsd3.[[2-9]]* | freebsdelf3.[[2-9]]* | \ + freebsd4.[[0-5]] | freebsdelf4.[[0-5]] | freebsd4.1.1 | freebsdelf4.1.1) + shlibpath_overrides_runpath=no + hardcode_into_libs=yes + ;; + *) # from 4.6 on, and DragonFly + shlibpath_overrides_runpath=yes + hardcode_into_libs=yes + ;; + esac + ;; + +gnu*) + version_type=linux + need_lib_prefix=no + need_version=no + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}' + soname_spec='${libname}${release}${shared_ext}$major' + shlibpath_var=LD_LIBRARY_PATH + hardcode_into_libs=yes + ;; + +hpux9* | hpux10* | hpux11*) + # Give a soname corresponding to the major version so that dld.sl refuses to + # link against other versions. + version_type=sunos + need_lib_prefix=no + need_version=no + case $host_cpu in + ia64*) + shrext_cmds='.so' + hardcode_into_libs=yes + dynamic_linker="$host_os dld.so" + shlibpath_var=LD_LIBRARY_PATH + shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' + soname_spec='${libname}${release}${shared_ext}$major' + if test "X$HPUX_IA64_MODE" = X32; then + sys_lib_search_path_spec="/usr/lib/hpux32 /usr/local/lib/hpux32 /usr/local/lib" + else + sys_lib_search_path_spec="/usr/lib/hpux64 /usr/local/lib/hpux64" + fi + sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec + ;; + hppa*64*) + shrext_cmds='.sl' + hardcode_into_libs=yes + dynamic_linker="$host_os dld.sl" + shlibpath_var=LD_LIBRARY_PATH # How should we handle SHLIB_PATH + shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' + soname_spec='${libname}${release}${shared_ext}$major' + sys_lib_search_path_spec="/usr/lib/pa20_64 /usr/ccs/lib/pa20_64" + sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec + ;; + *) + shrext_cmds='.sl' + dynamic_linker="$host_os dld.sl" + shlibpath_var=SHLIB_PATH + shlibpath_overrides_runpath=no # +s is required to enable SHLIB_PATH + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' + soname_spec='${libname}${release}${shared_ext}$major' + ;; + esac + # HP-UX runs *really* slowly unless shared libraries are mode 555. + postinstall_cmds='chmod 555 $lib' + ;; + +interix[[3-9]]*) + version_type=linux + need_lib_prefix=no + need_version=no + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' + soname_spec='${libname}${release}${shared_ext}$major' + dynamic_linker='Interix 3.x ld.so.1 (PE, like ELF)' + shlibpath_var=LD_LIBRARY_PATH + shlibpath_overrides_runpath=no + hardcode_into_libs=yes + ;; + +irix5* | irix6* | nonstopux*) + case $host_os in + nonstopux*) version_type=nonstopux ;; + *) + if test "$lt_cv_prog_gnu_ld" = yes; then + version_type=linux + else + version_type=irix + fi ;; + esac + need_lib_prefix=no + need_version=no + soname_spec='${libname}${release}${shared_ext}$major' + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext} $libname${shared_ext}' + case $host_os in + irix5* | nonstopux*) + libsuff= shlibsuff= + ;; + *) + case $LD in # libtool.m4 will add one of these switches to LD + *-32|*"-32 "|*-melf32bsmip|*"-melf32bsmip ") + libsuff= shlibsuff= libmagic=32-bit;; + *-n32|*"-n32 "|*-melf32bmipn32|*"-melf32bmipn32 ") + libsuff=32 shlibsuff=N32 libmagic=N32;; + *-64|*"-64 "|*-melf64bmip|*"-melf64bmip ") + libsuff=64 shlibsuff=64 libmagic=64-bit;; + *) libsuff= shlibsuff= libmagic=never-match;; + esac + ;; + esac + shlibpath_var=LD_LIBRARY${shlibsuff}_PATH + shlibpath_overrides_runpath=no + sys_lib_search_path_spec="/usr/lib${libsuff} /lib${libsuff} /usr/local/lib${libsuff}" + sys_lib_dlsearch_path_spec="/usr/lib${libsuff} /lib${libsuff}" + hardcode_into_libs=yes + ;; + +# No shared lib support for Linux oldld, aout, or coff. +linux*oldld* | linux*aout* | linux*coff*) + dynamic_linker=no + ;; + +# This must be Linux ELF. +linux* | k*bsd*-gnu) + version_type=linux + need_lib_prefix=no + need_version=no + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' + soname_spec='${libname}${release}${shared_ext}$major' + finish_cmds='PATH="\$PATH:/sbin" ldconfig -n $libdir' + shlibpath_var=LD_LIBRARY_PATH + shlibpath_overrides_runpath=no + # Some binutils ld are patched to set DT_RUNPATH + save_LDFLAGS=$LDFLAGS + save_libdir=$libdir + eval "libdir=/foo; wl=\"$_LT_TAGVAR(lt_prog_compiler_wl, $1)\"; \ + LDFLAGS=\"\$LDFLAGS $_LT_TAGVAR(hardcode_libdir_flag_spec, $1)\"" + AC_LINK_IFELSE([AC_LANG_PROGRAM([],[])], + [AS_IF([ ($OBJDUMP -p conftest$ac_exeext) 2>/dev/null | grep "RUNPATH.*$libdir" >/dev/null], + [shlibpath_overrides_runpath=yes])]) + LDFLAGS=$save_LDFLAGS + libdir=$save_libdir + + # This implies no fast_install, which is unacceptable. + # Some rework will be needed to allow for fast_install + # before this can be enabled. + hardcode_into_libs=yes + + # Append ld.so.conf contents to the search path + if test -f /etc/ld.so.conf; then + lt_ld_extra=`awk '/^include / { system(sprintf("cd /etc; cat %s 2>/dev/null", \[$]2)); skip = 1; } { if (!skip) print \[$]0; skip = 0; }' < /etc/ld.so.conf | $SED -e 's/#.*//;/^[ ]*hwcap[ ]/d;s/[:, ]/ /g;s/=[^=]*$//;s/=[^= ]* / /g;/^$/d' | tr '\n' ' '` + sys_lib_dlsearch_path_spec="/lib /usr/lib $lt_ld_extra" + fi + + # We used to test for /lib/ld.so.1 and disable shared libraries on + # powerpc, because MkLinux only supported shared libraries with the + # GNU dynamic linker. Since this was broken with cross compilers, + # most powerpc-linux boxes support dynamic linking these days and + # people can always --disable-shared, the test was removed, and we + # assume the GNU/Linux dynamic linker is in use. + dynamic_linker='GNU/Linux ld.so' + ;; + +netbsd*) + version_type=sunos + need_lib_prefix=no + need_version=no + if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' + finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' + dynamic_linker='NetBSD (a.out) ld.so' + else + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' + soname_spec='${libname}${release}${shared_ext}$major' + dynamic_linker='NetBSD ld.elf_so' + fi + shlibpath_var=LD_LIBRARY_PATH + shlibpath_overrides_runpath=yes + hardcode_into_libs=yes + ;; + +newsos6) + version_type=linux + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' + shlibpath_var=LD_LIBRARY_PATH + shlibpath_overrides_runpath=yes + ;; + +*nto* | *qnx*) + version_type=qnx + need_lib_prefix=no + need_version=no + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' + soname_spec='${libname}${release}${shared_ext}$major' + shlibpath_var=LD_LIBRARY_PATH + shlibpath_overrides_runpath=no + hardcode_into_libs=yes + dynamic_linker='ldqnx.so' + ;; + +openbsd*) + version_type=sunos + sys_lib_dlsearch_path_spec="/usr/lib" + need_lib_prefix=no + # Some older versions of OpenBSD (3.3 at least) *do* need versioned libs. + case $host_os in + openbsd3.3 | openbsd3.3.*) need_version=yes ;; + *) need_version=no ;; + esac + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' + finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' + shlibpath_var=LD_LIBRARY_PATH + if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then + case $host_os in + openbsd2.[[89]] | openbsd2.[[89]].*) + shlibpath_overrides_runpath=no + ;; + *) + shlibpath_overrides_runpath=yes + ;; + esac + else + shlibpath_overrides_runpath=yes + fi + ;; + +os2*) + libname_spec='$name' + shrext_cmds=".dll" + need_lib_prefix=no + library_names_spec='$libname${shared_ext} $libname.a' + dynamic_linker='OS/2 ld.exe' + shlibpath_var=LIBPATH + ;; + +osf3* | osf4* | osf5*) + version_type=osf + need_lib_prefix=no + need_version=no + soname_spec='${libname}${release}${shared_ext}$major' + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' + shlibpath_var=LD_LIBRARY_PATH + sys_lib_search_path_spec="/usr/shlib /usr/ccs/lib /usr/lib/cmplrs/cc /usr/lib /usr/local/lib /var/shlib" + sys_lib_dlsearch_path_spec="$sys_lib_search_path_spec" + ;; + +rdos*) + dynamic_linker=no + ;; + +solaris*) + version_type=linux + need_lib_prefix=no + need_version=no + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' + soname_spec='${libname}${release}${shared_ext}$major' + shlibpath_var=LD_LIBRARY_PATH + shlibpath_overrides_runpath=yes + hardcode_into_libs=yes + # ldd complains unless libraries are executable + postinstall_cmds='chmod +x $lib' + ;; + +sunos4*) + version_type=sunos + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' + finish_cmds='PATH="\$PATH:/usr/etc" ldconfig $libdir' + shlibpath_var=LD_LIBRARY_PATH + shlibpath_overrides_runpath=yes + if test "$with_gnu_ld" = yes; then + need_lib_prefix=no + fi + need_version=yes + ;; + +sysv4 | sysv4.3*) + version_type=linux + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' + soname_spec='${libname}${release}${shared_ext}$major' + shlibpath_var=LD_LIBRARY_PATH + case $host_vendor in + sni) + shlibpath_overrides_runpath=no + need_lib_prefix=no + runpath_var=LD_RUN_PATH + ;; + siemens) + need_lib_prefix=no + ;; + motorola) + need_lib_prefix=no + need_version=no + shlibpath_overrides_runpath=no + sys_lib_search_path_spec='/lib /usr/lib /usr/ccs/lib' + ;; + esac + ;; + +sysv4*MP*) + if test -d /usr/nec ;then + version_type=linux + library_names_spec='$libname${shared_ext}.$versuffix $libname${shared_ext}.$major $libname${shared_ext}' + soname_spec='$libname${shared_ext}.$major' + shlibpath_var=LD_LIBRARY_PATH + fi + ;; + +sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*) + version_type=freebsd-elf + need_lib_prefix=no + need_version=no + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}' + soname_spec='${libname}${release}${shared_ext}$major' + shlibpath_var=LD_LIBRARY_PATH + shlibpath_overrides_runpath=yes + hardcode_into_libs=yes + if test "$with_gnu_ld" = yes; then + sys_lib_search_path_spec='/usr/local/lib /usr/gnu/lib /usr/ccs/lib /usr/lib /lib' + else + sys_lib_search_path_spec='/usr/ccs/lib /usr/lib' + case $host_os in + sco3.2v5*) + sys_lib_search_path_spec="$sys_lib_search_path_spec /lib" + ;; + esac + fi + sys_lib_dlsearch_path_spec='/usr/lib' + ;; + +tpf*) + # TPF is a cross-target only. Preferred cross-host = GNU/Linux. + version_type=linux + need_lib_prefix=no + need_version=no + library_name_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' + shlibpath_var=LD_LIBRARY_PATH + shlibpath_overrides_runpath=no + hardcode_into_libs=yes + ;; + +uts4*) + version_type=linux + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' + soname_spec='${libname}${release}${shared_ext}$major' + shlibpath_var=LD_LIBRARY_PATH + ;; + +*) + dynamic_linker=no + ;; +esac +AC_MSG_RESULT([$dynamic_linker]) +test "$dynamic_linker" = no && can_build_shared=no + +variables_saved_for_relink="PATH $shlibpath_var $runpath_var" +if test "$GCC" = yes; then + variables_saved_for_relink="$variables_saved_for_relink GCC_EXEC_PREFIX COMPILER_PATH LIBRARY_PATH" +fi + +if test "${lt_cv_sys_lib_search_path_spec+set}" = set; then + sys_lib_search_path_spec="$lt_cv_sys_lib_search_path_spec" +fi +if test "${lt_cv_sys_lib_dlsearch_path_spec+set}" = set; then + sys_lib_dlsearch_path_spec="$lt_cv_sys_lib_dlsearch_path_spec" +fi + +_LT_DECL([], [variables_saved_for_relink], [1], + [Variables whose values should be saved in libtool wrapper scripts and + restored at link time]) +_LT_DECL([], [need_lib_prefix], [0], + [Do we need the "lib" prefix for modules?]) +_LT_DECL([], [need_version], [0], [Do we need a version for libraries?]) +_LT_DECL([], [version_type], [0], [Library versioning type]) +_LT_DECL([], [runpath_var], [0], [Shared library runtime path variable]) +_LT_DECL([], [shlibpath_var], [0],[Shared library path variable]) +_LT_DECL([], [shlibpath_overrides_runpath], [0], + [Is shlibpath searched before the hard-coded library search path?]) +_LT_DECL([], [libname_spec], [1], [Format of library name prefix]) +_LT_DECL([], [library_names_spec], [1], + [[List of archive names. First name is the real one, the rest are links. + The last name is the one that the linker finds with -lNAME]]) +_LT_DECL([], [soname_spec], [1], + [[The coded name of the library, if different from the real name]]) +_LT_DECL([], [postinstall_cmds], [2], + [Command to use after installation of a shared archive]) +_LT_DECL([], [postuninstall_cmds], [2], + [Command to use after uninstallation of a shared archive]) +_LT_DECL([], [finish_cmds], [2], + [Commands used to finish a libtool library installation in a directory]) +_LT_DECL([], [finish_eval], [1], + [[As "finish_cmds", except a single script fragment to be evaled but + not shown]]) +_LT_DECL([], [hardcode_into_libs], [0], + [Whether we should hardcode library paths into libraries]) +_LT_DECL([], [sys_lib_search_path_spec], [2], + [Compile-time system search path for libraries]) +_LT_DECL([], [sys_lib_dlsearch_path_spec], [2], + [Run-time system search path for libraries]) +])# _LT_SYS_DYNAMIC_LINKER + + +# _LT_PATH_TOOL_PREFIX(TOOL) +# -------------------------- +# find a file program which can recognize shared library +AC_DEFUN([_LT_PATH_TOOL_PREFIX], +[m4_require([_LT_DECL_EGREP])dnl +AC_MSG_CHECKING([for $1]) +AC_CACHE_VAL(lt_cv_path_MAGIC_CMD, +[case $MAGIC_CMD in +[[\\/*] | ?:[\\/]*]) + lt_cv_path_MAGIC_CMD="$MAGIC_CMD" # Let the user override the test with a path. + ;; +*) + lt_save_MAGIC_CMD="$MAGIC_CMD" + lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR +dnl $ac_dummy forces splitting on constant user-supplied paths. +dnl POSIX.2 word splitting is done only on the output of word expansions, +dnl not every word. This closes a longstanding sh security hole. + ac_dummy="m4_if([$2], , $PATH, [$2])" + for ac_dir in $ac_dummy; do + IFS="$lt_save_ifs" + test -z "$ac_dir" && ac_dir=. + if test -f $ac_dir/$1; then + lt_cv_path_MAGIC_CMD="$ac_dir/$1" + if test -n "$file_magic_test_file"; then + case $deplibs_check_method in + "file_magic "*) + file_magic_regex=`expr "$deplibs_check_method" : "file_magic \(.*\)"` + MAGIC_CMD="$lt_cv_path_MAGIC_CMD" + if eval $file_magic_cmd \$file_magic_test_file 2> /dev/null | + $EGREP "$file_magic_regex" > /dev/null; then + : + else + cat <<_LT_EOF 1>&2 + +*** Warning: the command libtool uses to detect shared libraries, +*** $file_magic_cmd, produces output that libtool cannot recognize. +*** The result is that libtool may fail to recognize shared libraries +*** as such. This will affect the creation of libtool libraries that +*** depend on shared libraries, but programs linked with such libtool +*** libraries will work regardless of this problem. Nevertheless, you +*** may want to report the problem to your system manager and/or to +*** bug-libtool@gnu.org + +_LT_EOF + fi ;; + esac + fi + break + fi + done + IFS="$lt_save_ifs" + MAGIC_CMD="$lt_save_MAGIC_CMD" + ;; +esac]) +MAGIC_CMD="$lt_cv_path_MAGIC_CMD" +if test -n "$MAGIC_CMD"; then + AC_MSG_RESULT($MAGIC_CMD) +else + AC_MSG_RESULT(no) +fi +_LT_DECL([], [MAGIC_CMD], [0], + [Used to examine libraries when file_magic_cmd begins with "file"])dnl +])# _LT_PATH_TOOL_PREFIX + +# Old name: +AU_ALIAS([AC_PATH_TOOL_PREFIX], [_LT_PATH_TOOL_PREFIX]) +dnl aclocal-1.4 backwards compatibility: +dnl AC_DEFUN([AC_PATH_TOOL_PREFIX], []) + + +# _LT_PATH_MAGIC +# -------------- +# find a file program which can recognize a shared library +m4_defun([_LT_PATH_MAGIC], +[_LT_PATH_TOOL_PREFIX(${ac_tool_prefix}file, /usr/bin$PATH_SEPARATOR$PATH) +if test -z "$lt_cv_path_MAGIC_CMD"; then + if test -n "$ac_tool_prefix"; then + _LT_PATH_TOOL_PREFIX(file, /usr/bin$PATH_SEPARATOR$PATH) + else + MAGIC_CMD=: + fi +fi +])# _LT_PATH_MAGIC + + +# LT_PATH_LD +# ---------- +# find the pathname to the GNU or non-GNU linker +AC_DEFUN([LT_PATH_LD], +[AC_REQUIRE([AC_PROG_CC])dnl +AC_REQUIRE([AC_CANONICAL_HOST])dnl +AC_REQUIRE([AC_CANONICAL_BUILD])dnl +m4_require([_LT_DECL_SED])dnl +m4_require([_LT_DECL_EGREP])dnl + +AC_ARG_WITH([gnu-ld], + [AS_HELP_STRING([--with-gnu-ld], + [assume the C compiler uses GNU ld @<:@default=no@:>@])], + [test "$withval" = no || with_gnu_ld=yes], + [with_gnu_ld=no])dnl + +ac_prog=ld +if test "$GCC" = yes; then + # Check if gcc -print-prog-name=ld gives a path. + AC_MSG_CHECKING([for ld used by $CC]) + case $host in + *-*-mingw*) + # gcc leaves a trailing carriage return which upsets mingw + ac_prog=`($CC -print-prog-name=ld) 2>&5 | tr -d '\015'` ;; + *) + ac_prog=`($CC -print-prog-name=ld) 2>&5` ;; + esac + case $ac_prog in + # Accept absolute paths. + [[\\/]]* | ?:[[\\/]]*) + re_direlt='/[[^/]][[^/]]*/\.\./' + # Canonicalize the pathname of ld + ac_prog=`$ECHO "$ac_prog"| $SED 's%\\\\%/%g'` + while $ECHO "$ac_prog" | $GREP "$re_direlt" > /dev/null 2>&1; do + ac_prog=`$ECHO $ac_prog| $SED "s%$re_direlt%/%"` + done + test -z "$LD" && LD="$ac_prog" + ;; + "") + # If it fails, then pretend we aren't using GCC. + ac_prog=ld + ;; + *) + # If it is relative, then search for the first ld in PATH. + with_gnu_ld=unknown + ;; + esac +elif test "$with_gnu_ld" = yes; then + AC_MSG_CHECKING([for GNU ld]) +else + AC_MSG_CHECKING([for non-GNU ld]) +fi +AC_CACHE_VAL(lt_cv_path_LD, +[if test -z "$LD"; then + lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR + for ac_dir in $PATH; do + IFS="$lt_save_ifs" + test -z "$ac_dir" && ac_dir=. + if test -f "$ac_dir/$ac_prog" || test -f "$ac_dir/$ac_prog$ac_exeext"; then + lt_cv_path_LD="$ac_dir/$ac_prog" + # Check to see if the program is GNU ld. I'd rather use --version, + # but apparently some variants of GNU ld only accept -v. + # Break only if it was the GNU/non-GNU ld that we prefer. + case `"$lt_cv_path_LD" -v 2>&1 &1 /dev/null 2>&1; then + lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL' + lt_cv_file_magic_cmd='func_win32_libid' + else + lt_cv_deplibs_check_method='file_magic file format pei*-i386(.*architecture: i386)?' + lt_cv_file_magic_cmd='$OBJDUMP -f' + fi + ;; + +darwin* | rhapsody*) + lt_cv_deplibs_check_method=pass_all + ;; + +freebsd* | dragonfly*) + if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then + case $host_cpu in + i*86 ) + # Not sure whether the presence of OpenBSD here was a mistake. + # Let's accept both of them until this is cleared up. + lt_cv_deplibs_check_method='file_magic (FreeBSD|OpenBSD|DragonFly)/i[[3-9]]86 (compact )?demand paged shared library' + lt_cv_file_magic_cmd=/usr/bin/file + lt_cv_file_magic_test_file=`echo /usr/lib/libc.so.*` + ;; + esac + else + lt_cv_deplibs_check_method=pass_all + fi + ;; + +gnu*) + lt_cv_deplibs_check_method=pass_all + ;; + +hpux10.20* | hpux11*) + lt_cv_file_magic_cmd=/usr/bin/file + case $host_cpu in + ia64*) + lt_cv_deplibs_check_method='file_magic (s[[0-9]][[0-9]][[0-9]]|ELF-[[0-9]][[0-9]]) shared object file - IA64' + lt_cv_file_magic_test_file=/usr/lib/hpux32/libc.so + ;; + hppa*64*) + [lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|ELF-[0-9][0-9]) shared object file - PA-RISC [0-9].[0-9]'] + lt_cv_file_magic_test_file=/usr/lib/pa20_64/libc.sl + ;; + *) + lt_cv_deplibs_check_method='file_magic (s[[0-9]][[0-9]][[0-9]]|PA-RISC[[0-9]].[[0-9]]) shared library' + lt_cv_file_magic_test_file=/usr/lib/libc.sl + ;; + esac + ;; + +interix[[3-9]]*) + # PIC code is broken on Interix 3.x, that's why |\.a not |_pic\.a here + lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so|\.a)$' + ;; + +irix5* | irix6* | nonstopux*) + case $LD in + *-32|*"-32 ") libmagic=32-bit;; + *-n32|*"-n32 ") libmagic=N32;; + *-64|*"-64 ") libmagic=64-bit;; + *) libmagic=never-match;; + esac + lt_cv_deplibs_check_method=pass_all + ;; + +# This must be Linux ELF. +linux* | k*bsd*-gnu) + lt_cv_deplibs_check_method=pass_all + ;; + +netbsd*) + if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then + lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so\.[[0-9]]+\.[[0-9]]+|_pic\.a)$' + else + lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so|_pic\.a)$' + fi + ;; + +newos6*) + lt_cv_deplibs_check_method='file_magic ELF [[0-9]][[0-9]]*-bit [[ML]]SB (executable|dynamic lib)' + lt_cv_file_magic_cmd=/usr/bin/file + lt_cv_file_magic_test_file=/usr/lib/libnls.so + ;; + +*nto* | *qnx*) + lt_cv_deplibs_check_method=pass_all + ;; + +openbsd*) + if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then + lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so\.[[0-9]]+\.[[0-9]]+|\.so|_pic\.a)$' + else + lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so\.[[0-9]]+\.[[0-9]]+|_pic\.a)$' + fi + ;; + +osf3* | osf4* | osf5*) + lt_cv_deplibs_check_method=pass_all + ;; + +rdos*) + lt_cv_deplibs_check_method=pass_all + ;; + +solaris*) + lt_cv_deplibs_check_method=pass_all + ;; + +sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*) + lt_cv_deplibs_check_method=pass_all + ;; + +sysv4 | sysv4.3*) + case $host_vendor in + motorola) + lt_cv_deplibs_check_method='file_magic ELF [[0-9]][[0-9]]*-bit [[ML]]SB (shared object|dynamic lib) M[[0-9]][[0-9]]* Version [[0-9]]' + lt_cv_file_magic_test_file=`echo /usr/lib/libc.so*` + ;; + ncr) + lt_cv_deplibs_check_method=pass_all + ;; + sequent) + lt_cv_file_magic_cmd='/bin/file' + lt_cv_deplibs_check_method='file_magic ELF [[0-9]][[0-9]]*-bit [[LM]]SB (shared object|dynamic lib )' + ;; + sni) + lt_cv_file_magic_cmd='/bin/file' + lt_cv_deplibs_check_method="file_magic ELF [[0-9]][[0-9]]*-bit [[LM]]SB dynamic lib" + lt_cv_file_magic_test_file=/lib/libc.so + ;; + siemens) + lt_cv_deplibs_check_method=pass_all + ;; + pc) + lt_cv_deplibs_check_method=pass_all + ;; + esac + ;; + +tpf*) + lt_cv_deplibs_check_method=pass_all + ;; +esac +]) +file_magic_cmd=$lt_cv_file_magic_cmd +deplibs_check_method=$lt_cv_deplibs_check_method +test -z "$deplibs_check_method" && deplibs_check_method=unknown + +_LT_DECL([], [deplibs_check_method], [1], + [Method to check whether dependent libraries are shared objects]) +_LT_DECL([], [file_magic_cmd], [1], + [Command to use when deplibs_check_method == "file_magic"]) +])# _LT_CHECK_MAGIC_METHOD + + +# LT_PATH_NM +# ---------- +# find the pathname to a BSD- or MS-compatible name lister +AC_DEFUN([LT_PATH_NM], +[AC_REQUIRE([AC_PROG_CC])dnl +AC_CACHE_CHECK([for BSD- or MS-compatible name lister (nm)], lt_cv_path_NM, +[if test -n "$NM"; then + # Let the user override the test. + lt_cv_path_NM="$NM" +else + lt_nm_to_check="${ac_tool_prefix}nm" + if test -n "$ac_tool_prefix" && test "$build" = "$host"; then + lt_nm_to_check="$lt_nm_to_check nm" + fi + for lt_tmp_nm in $lt_nm_to_check; do + lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR + for ac_dir in $PATH /usr/ccs/bin/elf /usr/ccs/bin /usr/ucb /bin; do + IFS="$lt_save_ifs" + test -z "$ac_dir" && ac_dir=. + tmp_nm="$ac_dir/$lt_tmp_nm" + if test -f "$tmp_nm" || test -f "$tmp_nm$ac_exeext" ; then + # Check to see if the nm accepts a BSD-compat flag. + # Adding the `sed 1q' prevents false positives on HP-UX, which says: + # nm: unknown option "B" ignored + # Tru64's nm complains that /dev/null is an invalid object file + case `"$tmp_nm" -B /dev/null 2>&1 | sed '1q'` in + */dev/null* | *'Invalid file or object type'*) + lt_cv_path_NM="$tmp_nm -B" + break + ;; + *) + case `"$tmp_nm" -p /dev/null 2>&1 | sed '1q'` in + */dev/null*) + lt_cv_path_NM="$tmp_nm -p" + break + ;; + *) + lt_cv_path_NM=${lt_cv_path_NM="$tmp_nm"} # keep the first match, but + continue # so that we can try to find one that supports BSD flags + ;; + esac + ;; + esac + fi + done + IFS="$lt_save_ifs" + done + : ${lt_cv_path_NM=no} +fi]) +if test "$lt_cv_path_NM" != "no"; then + NM="$lt_cv_path_NM" +else + # Didn't find any BSD compatible name lister, look for dumpbin. + AC_CHECK_TOOLS(DUMPBIN, ["dumpbin -symbols" "link -dump -symbols"], :) + AC_SUBST([DUMPBIN]) + if test "$DUMPBIN" != ":"; then + NM="$DUMPBIN" + fi +fi +test -z "$NM" && NM=nm +AC_SUBST([NM]) +_LT_DECL([], [NM], [1], [A BSD- or MS-compatible name lister])dnl + +AC_CACHE_CHECK([the name lister ($NM) interface], [lt_cv_nm_interface], + [lt_cv_nm_interface="BSD nm" + echo "int some_variable = 0;" > conftest.$ac_ext + (eval echo "\"\$as_me:__oline__: $ac_compile\"" >&AS_MESSAGE_LOG_FD) + (eval "$ac_compile" 2>conftest.err) + cat conftest.err >&AS_MESSAGE_LOG_FD + (eval echo "\"\$as_me:__oline__: $NM \\\"conftest.$ac_objext\\\"\"" >&AS_MESSAGE_LOG_FD) + (eval "$NM \"conftest.$ac_objext\"" 2>conftest.err > conftest.out) + cat conftest.err >&AS_MESSAGE_LOG_FD + (eval echo "\"\$as_me:__oline__: output\"" >&AS_MESSAGE_LOG_FD) + cat conftest.out >&AS_MESSAGE_LOG_FD + if $GREP 'External.*some_variable' conftest.out > /dev/null; then + lt_cv_nm_interface="MS dumpbin" + fi + rm -f conftest*]) +])# LT_PATH_NM + +# Old names: +AU_ALIAS([AM_PROG_NM], [LT_PATH_NM]) +AU_ALIAS([AC_PROG_NM], [LT_PATH_NM]) +dnl aclocal-1.4 backwards compatibility: +dnl AC_DEFUN([AM_PROG_NM], []) +dnl AC_DEFUN([AC_PROG_NM], []) + + +# LT_LIB_M +# -------- +# check for math library +AC_DEFUN([LT_LIB_M], +[AC_REQUIRE([AC_CANONICAL_HOST])dnl +LIBM= +case $host in +*-*-beos* | *-*-cygwin* | *-*-pw32* | *-*-darwin*) + # These system don't have libm, or don't need it + ;; +*-ncr-sysv4.3*) + AC_CHECK_LIB(mw, _mwvalidcheckl, LIBM="-lmw") + AC_CHECK_LIB(m, cos, LIBM="$LIBM -lm") + ;; +*) + AC_CHECK_LIB(m, cos, LIBM="-lm") + ;; +esac +AC_SUBST([LIBM]) +])# LT_LIB_M + +# Old name: +AU_ALIAS([AC_CHECK_LIBM], [LT_LIB_M]) +dnl aclocal-1.4 backwards compatibility: +dnl AC_DEFUN([AC_CHECK_LIBM], []) + + +# _LT_COMPILER_NO_RTTI([TAGNAME]) +# ------------------------------- +m4_defun([_LT_COMPILER_NO_RTTI], +[m4_require([_LT_TAG_COMPILER])dnl + +_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)= + +if test "$GCC" = yes; then + _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=' -fno-builtin' + + _LT_COMPILER_OPTION([if $compiler supports -fno-rtti -fno-exceptions], + lt_cv_prog_compiler_rtti_exceptions, + [-fno-rtti -fno-exceptions], [], + [_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)="$_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1) -fno-rtti -fno-exceptions"]) +fi +_LT_TAGDECL([no_builtin_flag], [lt_prog_compiler_no_builtin_flag], [1], + [Compiler flag to turn off builtin functions]) +])# _LT_COMPILER_NO_RTTI + + +# _LT_CMD_GLOBAL_SYMBOLS +# ---------------------- +m4_defun([_LT_CMD_GLOBAL_SYMBOLS], +[AC_REQUIRE([AC_CANONICAL_HOST])dnl +AC_REQUIRE([AC_PROG_CC])dnl +AC_REQUIRE([LT_PATH_NM])dnl +AC_REQUIRE([LT_PATH_LD])dnl +m4_require([_LT_DECL_SED])dnl +m4_require([_LT_DECL_EGREP])dnl +m4_require([_LT_TAG_COMPILER])dnl + +# Check for command to grab the raw symbol name followed by C symbol from nm. +AC_MSG_CHECKING([command to parse $NM output from $compiler object]) +AC_CACHE_VAL([lt_cv_sys_global_symbol_pipe], +[ +# These are sane defaults that work on at least a few old systems. +# [They come from Ultrix. What could be older than Ultrix?!! ;)] + +# Character class describing NM global symbol codes. +symcode='[[BCDEGRST]]' + +# Regexp to match symbols that can be accessed directly from C. +sympat='\([[_A-Za-z]][[_A-Za-z0-9]]*\)' + +# Define system-specific variables. +case $host_os in +aix*) + symcode='[[BCDT]]' + ;; +cygwin* | mingw* | pw32*) + symcode='[[ABCDGISTW]]' + ;; +hpux*) + if test "$host_cpu" = ia64; then + symcode='[[ABCDEGRST]]' + fi + ;; +irix* | nonstopux*) + symcode='[[BCDEGRST]]' + ;; +osf*) + symcode='[[BCDEGQRST]]' + ;; +solaris*) + symcode='[[BDRT]]' + ;; +sco3.2v5*) + symcode='[[DT]]' + ;; +sysv4.2uw2*) + symcode='[[DT]]' + ;; +sysv5* | sco5v6* | unixware* | OpenUNIX*) + symcode='[[ABDT]]' + ;; +sysv4) + symcode='[[DFNSTU]]' + ;; +esac + +# If we're using GNU nm, then use its standard symbol codes. +case `$NM -V 2>&1` in +*GNU* | *'with BFD'*) + symcode='[[ABCDGIRSTW]]' ;; +esac + +# Transform an extracted symbol line into a proper C declaration. +# Some systems (esp. on ia64) link data and code symbols differently, +# so use this general approach. +lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'" + +# Transform an extracted symbol line into symbol name and symbol address +lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([[^ ]]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([[^ ]]*\) \([[^ ]]*\)$/ {\"\2\", (void *) \&\2},/p'" +lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([[^ ]]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([[^ ]]*\) \(lib[[^ ]]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([[^ ]]*\) \([[^ ]]*\)$/ {\"lib\2\", (void *) \&\2},/p'" + +# Handle CRLF in mingw tool chain +opt_cr= +case $build_os in +mingw*) + opt_cr=`$ECHO 'x\{0,1\}' | tr x '\015'` # option cr in regexp + ;; +esac + +# Try without a prefix underscore, then with it. +for ac_symprfx in "" "_"; do + + # Transform symcode, sympat, and symprfx into a raw symbol and a C symbol. + symxfrm="\\1 $ac_symprfx\\2 \\2" + + # Write the raw and C identifiers. + if test "$lt_cv_nm_interface" = "MS dumpbin"; then + # Fake it for dumpbin and say T for any non-static function + # and D for any global variable. + # Also find C++ and __fastcall symbols from MSVC++, + # which start with @ or ?. + lt_cv_sys_global_symbol_pipe="$AWK ['"\ +" {last_section=section; section=\$ 3};"\ +" /Section length .*#relocs.*(pick any)/{hide[last_section]=1};"\ +" \$ 0!~/External *\|/{next};"\ +" / 0+ UNDEF /{next}; / UNDEF \([^|]\)*()/{next};"\ +" {if(hide[section]) next};"\ +" {f=0}; \$ 0~/\(\).*\|/{f=1}; {printf f ? \"T \" : \"D \"};"\ +" {split(\$ 0, a, /\||\r/); split(a[2], s)};"\ +" s[1]~/^[@?]/{print s[1], s[1]; next};"\ +" s[1]~prfx {split(s[1],t,\"@\"); print t[1], substr(t[1],length(prfx))}"\ +" ' prfx=^$ac_symprfx]" + else + lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[[ ]]\($symcode$symcode*\)[[ ]][[ ]]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'" + fi + + # Check to see that the pipe works correctly. + pipe_works=no + + rm -f conftest* + cat > conftest.$ac_ext <<_LT_EOF +#ifdef __cplusplus +extern "C" { +#endif +char nm_test_var; +void nm_test_func(void); +void nm_test_func(void){} +#ifdef __cplusplus +} +#endif +int main(){nm_test_var='a';nm_test_func();return(0);} +_LT_EOF + + if AC_TRY_EVAL(ac_compile); then + # Now try to grab the symbols. + nlist=conftest.nm + if AC_TRY_EVAL(NM conftest.$ac_objext \| $lt_cv_sys_global_symbol_pipe \> $nlist) && test -s "$nlist"; then + # Try sorting and uniquifying the output. + if sort "$nlist" | uniq > "$nlist"T; then + mv -f "$nlist"T "$nlist" + else + rm -f "$nlist"T + fi + + # Make sure that we snagged all the symbols we need. + if $GREP ' nm_test_var$' "$nlist" >/dev/null; then + if $GREP ' nm_test_func$' "$nlist" >/dev/null; then + cat <<_LT_EOF > conftest.$ac_ext +#ifdef __cplusplus +extern "C" { +#endif + +_LT_EOF + # Now generate the symbol file. + eval "$lt_cv_sys_global_symbol_to_cdecl"' < "$nlist" | $GREP -v main >> conftest.$ac_ext' + + cat <<_LT_EOF >> conftest.$ac_ext + +/* The mapping between symbol names and symbols. */ +const struct { + const char *name; + void *address; +} +lt__PROGRAM__LTX_preloaded_symbols[[]] = +{ + { "@PROGRAM@", (void *) 0 }, +_LT_EOF + $SED "s/^$symcode$symcode* \(.*\) \(.*\)$/ {\"\2\", (void *) \&\2},/" < "$nlist" | $GREP -v main >> conftest.$ac_ext + cat <<\_LT_EOF >> conftest.$ac_ext + {0, (void *) 0} +}; + +/* This works around a problem in FreeBSD linker */ +#ifdef FREEBSD_WORKAROUND +static const void *lt_preloaded_setup() { + return lt__PROGRAM__LTX_preloaded_symbols; +} +#endif + +#ifdef __cplusplus +} +#endif +_LT_EOF + # Now try linking the two files. + mv conftest.$ac_objext conftstm.$ac_objext + lt_save_LIBS="$LIBS" + lt_save_CFLAGS="$CFLAGS" + LIBS="conftstm.$ac_objext" + CFLAGS="$CFLAGS$_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)" + if AC_TRY_EVAL(ac_link) && test -s conftest${ac_exeext}; then + pipe_works=yes + fi + LIBS="$lt_save_LIBS" + CFLAGS="$lt_save_CFLAGS" + else + echo "cannot find nm_test_func in $nlist" >&AS_MESSAGE_LOG_FD + fi + else + echo "cannot find nm_test_var in $nlist" >&AS_MESSAGE_LOG_FD + fi + else + echo "cannot run $lt_cv_sys_global_symbol_pipe" >&AS_MESSAGE_LOG_FD + fi + else + echo "$progname: failed program was:" >&AS_MESSAGE_LOG_FD + cat conftest.$ac_ext >&5 + fi + rm -rf conftest* conftst* + + # Do not use the global_symbol_pipe unless it works. + if test "$pipe_works" = yes; then + break + else + lt_cv_sys_global_symbol_pipe= + fi +done +]) +if test -z "$lt_cv_sys_global_symbol_pipe"; then + lt_cv_sys_global_symbol_to_cdecl= +fi +if test -z "$lt_cv_sys_global_symbol_pipe$lt_cv_sys_global_symbol_to_cdecl"; then + AC_MSG_RESULT(failed) +else + AC_MSG_RESULT(ok) +fi + +_LT_DECL([global_symbol_pipe], [lt_cv_sys_global_symbol_pipe], [1], + [Take the output of nm and produce a listing of raw symbols and C names]) +_LT_DECL([global_symbol_to_cdecl], [lt_cv_sys_global_symbol_to_cdecl], [1], + [Transform the output of nm in a proper C declaration]) +_LT_DECL([global_symbol_to_c_name_address], + [lt_cv_sys_global_symbol_to_c_name_address], [1], + [Transform the output of nm in a C name address pair]) +_LT_DECL([global_symbol_to_c_name_address_lib_prefix], + [lt_cv_sys_global_symbol_to_c_name_address_lib_prefix], [1], + [Transform the output of nm in a C name address pair when lib prefix is needed]) +]) # _LT_CMD_GLOBAL_SYMBOLS + + +# _LT_COMPILER_PIC([TAGNAME]) +# --------------------------- +m4_defun([_LT_COMPILER_PIC], +[m4_require([_LT_TAG_COMPILER])dnl +_LT_TAGVAR(lt_prog_compiler_wl, $1)= +_LT_TAGVAR(lt_prog_compiler_pic, $1)= +_LT_TAGVAR(lt_prog_compiler_static, $1)= + +AC_MSG_CHECKING([for $compiler option to produce PIC]) +m4_if([$1], [CXX], [ + # C++ specific cases for pic, static, wl, etc. + if test "$GXX" = yes; then + _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' + _LT_TAGVAR(lt_prog_compiler_static, $1)='-static' + + case $host_os in + aix*) + # All AIX code is PIC. + if test "$host_cpu" = ia64; then + # AIX 5 now supports IA64 processor + _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' + fi + ;; + + amigaos*) + case $host_cpu in + powerpc) + # see comment about AmigaOS4 .so support + _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' + ;; + m68k) + # FIXME: we need at least 68020 code to build shared libraries, but + # adding the `-m68020' flag to GCC prevents building anything better, + # like `-m68040'. + _LT_TAGVAR(lt_prog_compiler_pic, $1)='-m68020 -resident32 -malways-restore-a4' + ;; + esac + ;; + + beos* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*) + # PIC is the default for these OSes. + ;; + mingw* | cygwin* | os2* | pw32*) + # This hack is so that the source file can tell whether it is being + # built for inclusion in a dll (and should export symbols for example). + # Although the cygwin gcc ignores -fPIC, still need this for old-style + # (--disable-auto-import) libraries + m4_if([$1], [GCJ], [], + [_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT']) + ;; + darwin* | rhapsody*) + # PIC is the default on this platform + # Common symbols not allowed in MH_DYLIB files + _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fno-common' + ;; + *djgpp*) + # DJGPP does not support shared libraries at all + _LT_TAGVAR(lt_prog_compiler_pic, $1)= + ;; + interix[[3-9]]*) + # Interix 3.x gcc -fpic/-fPIC options generate broken code. + # Instead, we relocate shared libraries at runtime. + ;; + sysv4*MP*) + if test -d /usr/nec; then + _LT_TAGVAR(lt_prog_compiler_pic, $1)=-Kconform_pic + fi + ;; + hpux*) + # PIC is the default for IA64 HP-UX and 64-bit HP-UX, but + # not for PA HP-UX. + case $host_cpu in + hppa*64*|ia64*) + ;; + *) + _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' + ;; + esac + ;; + *qnx* | *nto*) + # QNX uses GNU C++, but need to define -shared option too, otherwise + # it will coredump. + _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC -shared' + ;; + *) + _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' + ;; + esac + else + case $host_os in + aix[[4-9]]*) + # All AIX code is PIC. + if test "$host_cpu" = ia64; then + # AIX 5 now supports IA64 processor + _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' + else + _LT_TAGVAR(lt_prog_compiler_static, $1)='-bnso -bI:/lib/syscalls.exp' + fi + ;; + chorus*) + case $cc_basename in + cxch68*) + # Green Hills C++ Compiler + # _LT_TAGVAR(lt_prog_compiler_static, $1)="--no_auto_instantiation -u __main -u __premain -u _abort -r $COOL_DIR/lib/libOrb.a $MVME_DIR/lib/CC/libC.a $MVME_DIR/lib/classix/libcx.s.a" + ;; + esac + ;; + dgux*) + case $cc_basename in + ec++*) + _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' + ;; + ghcx*) + # Green Hills C++ Compiler + _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic' + ;; + *) + ;; + esac + ;; + freebsd* | dragonfly*) + # FreeBSD uses GNU C++ + ;; + hpux9* | hpux10* | hpux11*) + case $cc_basename in + CC*) + _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' + _LT_TAGVAR(lt_prog_compiler_static, $1)='${wl}-a ${wl}archive' + if test "$host_cpu" != ia64; then + _LT_TAGVAR(lt_prog_compiler_pic, $1)='+Z' + fi + ;; + aCC*) + _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' + _LT_TAGVAR(lt_prog_compiler_static, $1)='${wl}-a ${wl}archive' + case $host_cpu in + hppa*64*|ia64*) + # +Z the default + ;; + *) + _LT_TAGVAR(lt_prog_compiler_pic, $1)='+Z' + ;; + esac + ;; + *) + ;; + esac + ;; + interix*) + # This is c89, which is MS Visual C++ (no shared libs) + # Anyone wants to do a port? + ;; + irix5* | irix6* | nonstopux*) + case $cc_basename in + CC*) + _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' + _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared' + # CC pic flag -KPIC is the default. + ;; + *) + ;; + esac + ;; + linux* | k*bsd*-gnu) + case $cc_basename in + KCC*) + # KAI C++ Compiler + _LT_TAGVAR(lt_prog_compiler_wl, $1)='--backend -Wl,' + _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' + ;; + icpc* | ecpc* ) + # Intel C++ + _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' + _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' + _LT_TAGVAR(lt_prog_compiler_static, $1)='-static' + ;; + pgCC* | pgcpp*) + # Portland Group C++ compiler + _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' + _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fpic' + _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' + ;; + cxx*) + # Compaq C++ + # Make sure the PIC flag is empty. It appears that all Alpha + # Linux and Compaq Tru64 Unix objects are PIC. + _LT_TAGVAR(lt_prog_compiler_pic, $1)= + _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared' + ;; + xlc* | xlC*) + # IBM XL 8.0 on PPC + _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' + _LT_TAGVAR(lt_prog_compiler_pic, $1)='-qpic' + _LT_TAGVAR(lt_prog_compiler_static, $1)='-qstaticlink' + ;; + *) + case `$CC -V 2>&1 | sed 5q` in + *Sun\ C*) + # Sun C++ 5.9 + _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' + _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' + _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld ' + ;; + esac + ;; + esac + ;; + lynxos*) + ;; + m88k*) + ;; + mvs*) + case $cc_basename in + cxx*) + _LT_TAGVAR(lt_prog_compiler_pic, $1)='-W c,exportall' + ;; + *) + ;; + esac + ;; + netbsd*) + ;; + *qnx* | *nto*) + # QNX uses GNU C++, but need to define -shared option too, otherwise + # it will coredump. + _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC -shared' + ;; + osf3* | osf4* | osf5*) + case $cc_basename in + KCC*) + _LT_TAGVAR(lt_prog_compiler_wl, $1)='--backend -Wl,' + ;; + RCC*) + # Rational C++ 2.4.1 + _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic' + ;; + cxx*) + # Digital/Compaq C++ + _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' + # Make sure the PIC flag is empty. It appears that all Alpha + # Linux and Compaq Tru64 Unix objects are PIC. + _LT_TAGVAR(lt_prog_compiler_pic, $1)= + _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared' + ;; + *) + ;; + esac + ;; + psos*) + ;; + solaris*) + case $cc_basename in + CC*) + # Sun C++ 4.2, 5.x and Centerline C++ + _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' + _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' + _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld ' + ;; + gcx*) + # Green Hills C++ Compiler + _LT_TAGVAR(lt_prog_compiler_pic, $1)='-PIC' + ;; + *) + ;; + esac + ;; + sunos4*) + case $cc_basename in + CC*) + # Sun C++ 4.x + _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic' + _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' + ;; + lcc*) + # Lucid + _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic' + ;; + *) + ;; + esac + ;; + sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*) + case $cc_basename in + CC*) + _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' + _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' + _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' + ;; + esac + ;; + tandem*) + case $cc_basename in + NCC*) + # NonStop-UX NCC 3.20 + _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' + ;; + *) + ;; + esac + ;; + vxworks*) + ;; + *) + _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no + ;; + esac + fi +], +[ + if test "$GCC" = yes; then + _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' + _LT_TAGVAR(lt_prog_compiler_static, $1)='-static' + + case $host_os in + aix*) + # All AIX code is PIC. + if test "$host_cpu" = ia64; then + # AIX 5 now supports IA64 processor + _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' + fi + ;; + + amigaos*) + case $host_cpu in + powerpc) + # see comment about AmigaOS4 .so support + _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' + ;; + m68k) + # FIXME: we need at least 68020 code to build shared libraries, but + # adding the `-m68020' flag to GCC prevents building anything better, + # like `-m68040'. + _LT_TAGVAR(lt_prog_compiler_pic, $1)='-m68020 -resident32 -malways-restore-a4' + ;; + esac + ;; + + beos* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*) + # PIC is the default for these OSes. + ;; + + mingw* | cygwin* | pw32* | os2*) + # This hack is so that the source file can tell whether it is being + # built for inclusion in a dll (and should export symbols for example). + # Although the cygwin gcc ignores -fPIC, still need this for old-style + # (--disable-auto-import) libraries + m4_if([$1], [GCJ], [], + [_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT']) + ;; + + darwin* | rhapsody*) + # PIC is the default on this platform + # Common symbols not allowed in MH_DYLIB files + _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fno-common' + ;; + + hpux*) + # PIC is the default for IA64 HP-UX and 64-bit HP-UX, but + # not for PA HP-UX. + case $host_cpu in + hppa*64*|ia64*) + # +Z the default + ;; + *) + _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' + ;; + esac + ;; + + interix[[3-9]]*) + # Interix 3.x gcc -fpic/-fPIC options generate broken code. + # Instead, we relocate shared libraries at runtime. + ;; + + msdosdjgpp*) + # Just because we use GCC doesn't mean we suddenly get shared libraries + # on systems that don't support them. + _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no + enable_shared=no + ;; + + *nto* | *qnx*) + # QNX uses GNU C++, but need to define -shared option too, otherwise + # it will coredump. + _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC -shared' + ;; + + sysv4*MP*) + if test -d /usr/nec; then + _LT_TAGVAR(lt_prog_compiler_pic, $1)=-Kconform_pic + fi + ;; + + *) + _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' + ;; + esac + else + # PORTME Check for flag to pass linker flags through the system compiler. + case $host_os in + aix*) + _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' + if test "$host_cpu" = ia64; then + # AIX 5 now supports IA64 processor + _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' + else + _LT_TAGVAR(lt_prog_compiler_static, $1)='-bnso -bI:/lib/syscalls.exp' + fi + ;; + + mingw* | cygwin* | pw32* | os2*) + # This hack is so that the source file can tell whether it is being + # built for inclusion in a dll (and should export symbols for example). + m4_if([$1], [GCJ], [], + [_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT']) + ;; + + hpux9* | hpux10* | hpux11*) + _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' + # PIC is the default for IA64 HP-UX and 64-bit HP-UX, but + # not for PA HP-UX. + case $host_cpu in + hppa*64*|ia64*) + # +Z the default + ;; + *) + _LT_TAGVAR(lt_prog_compiler_pic, $1)='+Z' + ;; + esac + # Is there a better lt_prog_compiler_static that works with the bundled CC? + _LT_TAGVAR(lt_prog_compiler_static, $1)='${wl}-a ${wl}archive' + ;; + + irix5* | irix6* | nonstopux*) + _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' + # PIC (with -KPIC) is the default. + _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared' + ;; + + linux* | k*bsd*-gnu) + case $cc_basename in + icc* | ecc* | ifort*) + _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' + _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' + _LT_TAGVAR(lt_prog_compiler_static, $1)='-static' + ;; + pgcc* | pgf77* | pgf90* | pgf95*) + # Portland Group compilers (*not* the Pentium gcc compiler, + # which looks to be a dead project) + _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' + _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fpic' + _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' + ;; + ccc*) + _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' + # All Alpha code is PIC. + _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared' + ;; + xl*) + # IBM XL C 8.0/Fortran 10.1 on PPC + _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' + _LT_TAGVAR(lt_prog_compiler_pic, $1)='-qpic' + _LT_TAGVAR(lt_prog_compiler_static, $1)='-qstaticlink' + ;; + *) + case `$CC -V 2>&1 | sed 5q` in + *Sun\ C*) + # Sun C 5.9 + _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' + _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' + _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' + ;; + *Sun\ F*) + # Sun Fortran 8.3 passes all unrecognized flags to the linker + _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' + _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' + _LT_TAGVAR(lt_prog_compiler_wl, $1)='' + ;; + esac + ;; + esac + ;; + + newsos6) + _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' + _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' + ;; + + *nto* | *qnx*) + # QNX uses GNU C++, but need to define -shared option too, otherwise + # it will coredump. + _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC -shared' + ;; + + osf3* | osf4* | osf5*) + _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' + # All OSF/1 code is PIC. + _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared' + ;; + + rdos*) + _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared' + ;; + + solaris*) + _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' + _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' + case $cc_basename in + f77* | f90* | f95*) + _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld ';; + *) + _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,';; + esac + ;; + + sunos4*) + _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld ' + _LT_TAGVAR(lt_prog_compiler_pic, $1)='-PIC' + _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' + ;; + + sysv4 | sysv4.2uw2* | sysv4.3*) + _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' + _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' + _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' + ;; + + sysv4*MP*) + if test -d /usr/nec ;then + _LT_TAGVAR(lt_prog_compiler_pic, $1)='-Kconform_pic' + _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' + fi + ;; + + sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*) + _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' + _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' + _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' + ;; + + unicos*) + _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' + _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no + ;; + + uts4*) + _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic' + _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' + ;; + + *) + _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no + ;; + esac + fi +]) +case $host_os in + # For platforms which do not support PIC, -DPIC is meaningless: + *djgpp*) + _LT_TAGVAR(lt_prog_compiler_pic, $1)= + ;; + *) + _LT_TAGVAR(lt_prog_compiler_pic, $1)="$_LT_TAGVAR(lt_prog_compiler_pic, $1)@&t@m4_if([$1],[],[ -DPIC],[m4_if([$1],[CXX],[ -DPIC],[])])" + ;; +esac +AC_MSG_RESULT([$_LT_TAGVAR(lt_prog_compiler_pic, $1)]) +_LT_TAGDECL([wl], [lt_prog_compiler_wl], [1], + [How to pass a linker flag through the compiler]) + +# +# Check to make sure the PIC flag actually works. +# +if test -n "$_LT_TAGVAR(lt_prog_compiler_pic, $1)"; then + _LT_COMPILER_OPTION([if $compiler PIC flag $_LT_TAGVAR(lt_prog_compiler_pic, $1) works], + [_LT_TAGVAR(lt_cv_prog_compiler_pic_works, $1)], + [$_LT_TAGVAR(lt_prog_compiler_pic, $1)@&t@m4_if([$1],[],[ -DPIC],[m4_if([$1],[CXX],[ -DPIC],[])])], [], + [case $_LT_TAGVAR(lt_prog_compiler_pic, $1) in + "" | " "*) ;; + *) _LT_TAGVAR(lt_prog_compiler_pic, $1)=" $_LT_TAGVAR(lt_prog_compiler_pic, $1)" ;; + esac], + [_LT_TAGVAR(lt_prog_compiler_pic, $1)= + _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no]) +fi +_LT_TAGDECL([pic_flag], [lt_prog_compiler_pic], [1], + [Additional compiler flags for building library objects]) + +# +# Check to make sure the static flag actually works. +# +wl=$_LT_TAGVAR(lt_prog_compiler_wl, $1) eval lt_tmp_static_flag=\"$_LT_TAGVAR(lt_prog_compiler_static, $1)\" +_LT_LINKER_OPTION([if $compiler static flag $lt_tmp_static_flag works], + _LT_TAGVAR(lt_cv_prog_compiler_static_works, $1), + $lt_tmp_static_flag, + [], + [_LT_TAGVAR(lt_prog_compiler_static, $1)=]) +_LT_TAGDECL([link_static_flag], [lt_prog_compiler_static], [1], + [Compiler flag to prevent dynamic linking]) +])# _LT_COMPILER_PIC + + +# _LT_LINKER_SHLIBS([TAGNAME]) +# ---------------------------- +# See if the linker supports building shared libraries. +m4_defun([_LT_LINKER_SHLIBS], +[AC_REQUIRE([LT_PATH_LD])dnl +AC_REQUIRE([LT_PATH_NM])dnl +m4_require([_LT_FILEUTILS_DEFAULTS])dnl +m4_require([_LT_DECL_EGREP])dnl +m4_require([_LT_DECL_SED])dnl +m4_require([_LT_CMD_GLOBAL_SYMBOLS])dnl +m4_require([_LT_TAG_COMPILER])dnl +AC_MSG_CHECKING([whether the $compiler linker ($LD) supports shared libraries]) +m4_if([$1], [CXX], [ + _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' + case $host_os in + aix[[4-9]]*) + # If we're using GNU nm, then we don't want the "-C" option. + # -C means demangle to AIX nm, but means don't demangle with GNU nm + if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then + _LT_TAGVAR(export_symbols_cmds, $1)='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && ([substr](\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols' + else + _LT_TAGVAR(export_symbols_cmds, $1)='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && ([substr](\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols' + fi + ;; + pw32*) + _LT_TAGVAR(export_symbols_cmds, $1)="$ltdll_cmds" + ;; + cygwin* | mingw*) + _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1 DATA/;/^.*[[ ]]__nm__/s/^.*[[ ]]__nm__\([[^ ]]*\)[[ ]][[^ ]]*/\1 DATA/;/^I[[ ]]/d;/^[[AITW]][[ ]]/s/.* //'\'' | sort | uniq > $export_symbols' + ;; + *) + _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' + ;; + esac + _LT_TAGVAR(exclude_expsyms, $1)=['_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*'] +], [ + runpath_var= + _LT_TAGVAR(allow_undefined_flag, $1)= + _LT_TAGVAR(always_export_symbols, $1)=no + _LT_TAGVAR(archive_cmds, $1)= + _LT_TAGVAR(archive_expsym_cmds, $1)= + _LT_TAGVAR(compiler_needs_object, $1)=no + _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=no + _LT_TAGVAR(export_dynamic_flag_spec, $1)= + _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' + _LT_TAGVAR(hardcode_automatic, $1)=no + _LT_TAGVAR(hardcode_direct, $1)=no + _LT_TAGVAR(hardcode_direct_absolute, $1)=no + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)= + _LT_TAGVAR(hardcode_libdir_flag_spec_ld, $1)= + _LT_TAGVAR(hardcode_libdir_separator, $1)= + _LT_TAGVAR(hardcode_minus_L, $1)=no + _LT_TAGVAR(hardcode_shlibpath_var, $1)=unsupported + _LT_TAGVAR(inherit_rpath, $1)=no + _LT_TAGVAR(link_all_deplibs, $1)=unknown + _LT_TAGVAR(module_cmds, $1)= + _LT_TAGVAR(module_expsym_cmds, $1)= + _LT_TAGVAR(old_archive_from_new_cmds, $1)= + _LT_TAGVAR(old_archive_from_expsyms_cmds, $1)= + _LT_TAGVAR(thread_safe_flag_spec, $1)= + _LT_TAGVAR(whole_archive_flag_spec, $1)= + # include_expsyms should be a list of space-separated symbols to be *always* + # included in the symbol list + _LT_TAGVAR(include_expsyms, $1)= + # exclude_expsyms can be an extended regexp of symbols to exclude + # it will be wrapped by ` (' and `)$', so one must not match beginning or + # end of line. Example: `a|bc|.*d.*' will exclude the symbols `a' and `bc', + # as well as any symbol that contains `d'. + _LT_TAGVAR(exclude_expsyms, $1)=['_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*'] + # Although _GLOBAL_OFFSET_TABLE_ is a valid symbol C name, most a.out + # platforms (ab)use it in PIC code, but their linkers get confused if + # the symbol is explicitly referenced. Since portable code cannot + # rely on this symbol name, it's probably fine to never include it in + # preloaded symbol tables. + # Exclude shared library initialization/finalization symbols. +dnl Note also adjust exclude_expsyms for C++ above. + extract_expsyms_cmds= + + case $host_os in + cygwin* | mingw* | pw32*) + # FIXME: the MSVC++ port hasn't been tested in a loooong time + # When not using gcc, we currently assume that we are using + # Microsoft Visual C++. + if test "$GCC" != yes; then + with_gnu_ld=no + fi + ;; + interix*) + # we just hope/assume this is gcc and not c89 (= MSVC++) + with_gnu_ld=yes + ;; + openbsd*) + with_gnu_ld=no + ;; + esac + + _LT_TAGVAR(ld_shlibs, $1)=yes + if test "$with_gnu_ld" = yes; then + # If archive_cmds runs LD, not CC, wlarc should be empty + wlarc='${wl}' + + # Set some defaults for GNU ld with shared library support. These + # are reset later if shared libraries are not supported. Putting them + # here allows them to be overridden if necessary. + runpath_var=LD_RUN_PATH + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' + _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic' + # ancient GNU ld didn't support --whole-archive et. al. + if $LD --help 2>&1 | $GREP 'no-whole-archive' > /dev/null; then + _LT_TAGVAR(whole_archive_flag_spec, $1)="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive' + else + _LT_TAGVAR(whole_archive_flag_spec, $1)= + fi + supports_anon_versioning=no + case `$LD -v 2>&1` in + *\ [[01]].* | *\ 2.[[0-9]].* | *\ 2.10.*) ;; # catch versions < 2.11 + *\ 2.11.93.0.2\ *) supports_anon_versioning=yes ;; # RH7.3 ... + *\ 2.11.92.0.12\ *) supports_anon_versioning=yes ;; # Mandrake 8.2 ... + *\ 2.11.*) ;; # other 2.11 versions + *) supports_anon_versioning=yes ;; + esac + + # See if GNU ld supports shared libraries. + case $host_os in + aix[[3-9]]*) + # On AIX/PPC, the GNU linker is very broken + if test "$host_cpu" != ia64; then + _LT_TAGVAR(ld_shlibs, $1)=no + cat <<_LT_EOF 1>&2 + +*** Warning: the GNU linker, at least up to release 2.9.1, is reported +*** to be unable to reliably create shared libraries on AIX. +*** Therefore, libtool is disabling shared libraries support. If you +*** really care for shared libraries, you may want to modify your PATH +*** so that a non-GNU linker is found, and then restart. + +_LT_EOF + fi + ;; + + amigaos*) + case $host_cpu in + powerpc) + # see comment about AmigaOS4 .so support + _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' + _LT_TAGVAR(archive_expsym_cmds, $1)='' + ;; + m68k) + _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)' + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' + _LT_TAGVAR(hardcode_minus_L, $1)=yes + ;; + esac + ;; + + beos*) + if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then + _LT_TAGVAR(allow_undefined_flag, $1)=unsupported + # Joseph Beckenbach says some releases of gcc + # support --undefined. This deserves some investigation. FIXME + _LT_TAGVAR(archive_cmds, $1)='$CC -nostart $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' + else + _LT_TAGVAR(ld_shlibs, $1)=no + fi + ;; + + cygwin* | mingw* | pw32*) + # _LT_TAGVAR(hardcode_libdir_flag_spec, $1) is actually meaningless, + # as there is no search path for DLLs. + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' + _LT_TAGVAR(allow_undefined_flag, $1)=unsupported + _LT_TAGVAR(always_export_symbols, $1)=no + _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes + _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1 DATA/'\'' | $SED -e '\''/^[[AITW]][[ ]]/s/.*[[ ]]//'\'' | sort | uniq > $export_symbols' + + if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then + _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' + # If the export-symbols file already is a .def file (1st line + # is EXPORTS), use it as is; otherwise, prepend... + _LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then + cp $export_symbols $output_objdir/$soname.def; + else + echo EXPORTS > $output_objdir/$soname.def; + cat $export_symbols >> $output_objdir/$soname.def; + fi~ + $CC -shared $output_objdir/$soname.def $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' + else + _LT_TAGVAR(ld_shlibs, $1)=no + fi + ;; + + interix[[3-9]]*) + _LT_TAGVAR(hardcode_direct, $1)=no + _LT_TAGVAR(hardcode_shlibpath_var, $1)=no + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir' + _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E' + # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc. + # Instead, shared libraries are loaded at an image base (0x10000000 by + # default) and relocated if they conflict, which is a slow very memory + # consuming and fragmenting process. To avoid this, we pick a random, + # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link + # time. Moving up from 0x10000000 also allows more sbrk(2) space. + _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' + _LT_TAGVAR(archive_expsym_cmds, $1)='sed "s,^,_," $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--retain-symbols-file,$output_objdir/$soname.expsym ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' + ;; + + gnu* | linux* | tpf* | k*bsd*-gnu) + tmp_diet=no + if test "$host_os" = linux-dietlibc; then + case $cc_basename in + diet\ *) tmp_diet=yes;; # linux-dietlibc with static linking (!diet-dyn) + esac + fi + if $LD --help 2>&1 | $EGREP ': supported targets:.* elf' > /dev/null \ + && test "$tmp_diet" = no + then + tmp_addflag= + tmp_sharedflag='-shared' + case $cc_basename,$host_cpu in + pgcc*) # Portland Group C compiler + _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; $ECHO \"$new_convenience\"` ${wl}--no-whole-archive' + tmp_addflag=' $pic_flag' + ;; + pgf77* | pgf90* | pgf95*) # Portland Group f77 and f90 compilers + _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; $ECHO \"$new_convenience\"` ${wl}--no-whole-archive' + tmp_addflag=' $pic_flag -Mnomain' ;; + ecc*,ia64* | icc*,ia64*) # Intel C compiler on ia64 + tmp_addflag=' -i_dynamic' ;; + efc*,ia64* | ifort*,ia64*) # Intel Fortran compiler on ia64 + tmp_addflag=' -i_dynamic -nofor_main' ;; + ifc* | ifort*) # Intel Fortran compiler + tmp_addflag=' -nofor_main' ;; + xl[[cC]]*) # IBM XL C 8.0 on PPC (deal with xlf below) + tmp_sharedflag='-qmkshrobj' + tmp_addflag= ;; + esac + case `$CC -V 2>&1 | sed 5q` in + *Sun\ C*) # Sun C 5.9 + _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; $ECHO \"$new_convenience\"` ${wl}--no-whole-archive' + _LT_TAGVAR(compiler_needs_object, $1)=yes + tmp_sharedflag='-G' ;; + *Sun\ F*) # Sun Fortran 8.3 + tmp_sharedflag='-G' ;; + esac + _LT_TAGVAR(archive_cmds, $1)='$CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' + + if test "x$supports_anon_versioning" = xyes; then + _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $output_objdir/$libname.ver~ + cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ + echo "local: *; };" >> $output_objdir/$libname.ver~ + $CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-version-script ${wl}$output_objdir/$libname.ver -o $lib' + fi + + case $cc_basename in + xlf*) + # IBM XL Fortran 10.1 on PPC cannot create shared libs itself + _LT_TAGVAR(whole_archive_flag_spec, $1)='--whole-archive$convenience --no-whole-archive' + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)= + _LT_TAGVAR(hardcode_libdir_flag_spec_ld, $1)='-rpath $libdir' + _LT_TAGVAR(archive_cmds, $1)='$LD -shared $libobjs $deplibs $compiler_flags -soname $soname -o $lib' + if test "x$supports_anon_versioning" = xyes; then + _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $output_objdir/$libname.ver~ + cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ + echo "local: *; };" >> $output_objdir/$libname.ver~ + $LD -shared $libobjs $deplibs $compiler_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' + fi + ;; + esac + else + _LT_TAGVAR(ld_shlibs, $1)=no + fi + ;; + + netbsd*) + if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then + _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib' + wlarc= + else + _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' + _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' + fi + ;; + + solaris*) + if $LD -v 2>&1 | $GREP 'BFD 2\.8' > /dev/null; then + _LT_TAGVAR(ld_shlibs, $1)=no + cat <<_LT_EOF 1>&2 + +*** Warning: The releases 2.8.* of the GNU linker cannot reliably +*** create shared libraries on Solaris systems. Therefore, libtool +*** is disabling shared libraries support. We urge you to upgrade GNU +*** binutils to release 2.9.1 or newer. Another option is to modify +*** your PATH or compiler configuration so that the native linker is +*** used, and then restart. + +_LT_EOF + elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then + _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' + _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' + else + _LT_TAGVAR(ld_shlibs, $1)=no + fi + ;; + + sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX*) + case `$LD -v 2>&1` in + *\ [[01]].* | *\ 2.[[0-9]].* | *\ 2.1[[0-5]].*) + _LT_TAGVAR(ld_shlibs, $1)=no + cat <<_LT_EOF 1>&2 + +*** Warning: Releases of the GNU linker prior to 2.16.91.0.3 can not +*** reliably create shared libraries on SCO systems. Therefore, libtool +*** is disabling shared libraries support. We urge you to upgrade GNU +*** binutils to release 2.16.91.0.3 or newer. Another option is to modify +*** your PATH or compiler configuration so that the native linker is +*** used, and then restart. + +_LT_EOF + ;; + *) + # For security reasons, it is highly recommended that you always + # use absolute paths for naming shared libraries, and exclude the + # DT_RUNPATH tag from executables and libraries. But doing so + # requires that you compile everything twice, which is a pain. + if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' + _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' + _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' + else + _LT_TAGVAR(ld_shlibs, $1)=no + fi + ;; + esac + ;; + + sunos4*) + _LT_TAGVAR(archive_cmds, $1)='$LD -assert pure-text -Bshareable -o $lib $libobjs $deplibs $linker_flags' + wlarc= + _LT_TAGVAR(hardcode_direct, $1)=yes + _LT_TAGVAR(hardcode_shlibpath_var, $1)=no + ;; + + *) + if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then + _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' + _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' + else + _LT_TAGVAR(ld_shlibs, $1)=no + fi + ;; + esac + + if test "$_LT_TAGVAR(ld_shlibs, $1)" = no; then + runpath_var= + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)= + _LT_TAGVAR(export_dynamic_flag_spec, $1)= + _LT_TAGVAR(whole_archive_flag_spec, $1)= + fi + else + # PORTME fill in a description of your system's linker (not GNU ld) + case $host_os in + aix3*) + _LT_TAGVAR(allow_undefined_flag, $1)=unsupported + _LT_TAGVAR(always_export_symbols, $1)=yes + _LT_TAGVAR(archive_expsym_cmds, $1)='$LD -o $output_objdir/$soname $libobjs $deplibs $linker_flags -bE:$export_symbols -T512 -H512 -bM:SRE~$AR $AR_FLAGS $lib $output_objdir/$soname' + # Note: this linker hardcodes the directories in LIBPATH if there + # are no directories specified by -L. + _LT_TAGVAR(hardcode_minus_L, $1)=yes + if test "$GCC" = yes && test -z "$lt_prog_compiler_static"; then + # Neither direct hardcoding nor static linking is supported with a + # broken collect2. + _LT_TAGVAR(hardcode_direct, $1)=unsupported + fi + ;; + + aix[[4-9]]*) + if test "$host_cpu" = ia64; then + # On IA64, the linker does run time linking by default, so we don't + # have to do anything special. + aix_use_runtimelinking=no + exp_sym_flag='-Bexport' + no_entry_flag="" + else + # If we're using GNU nm, then we don't want the "-C" option. + # -C means demangle to AIX nm, but means don't demangle with GNU nm + if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then + _LT_TAGVAR(export_symbols_cmds, $1)='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && ([substr](\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols' + else + _LT_TAGVAR(export_symbols_cmds, $1)='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && ([substr](\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols' + fi + aix_use_runtimelinking=no + + # Test if we are trying to use run time linking or normal + # AIX style linking. If -brtl is somewhere in LDFLAGS, we + # need to do runtime linking. + case $host_os in aix4.[[23]]|aix4.[[23]].*|aix[[5-9]]*) + for ld_flag in $LDFLAGS; do + if (test $ld_flag = "-brtl" || test $ld_flag = "-Wl,-brtl"); then + aix_use_runtimelinking=yes + break + fi + done + ;; + esac + + exp_sym_flag='-bexport' + no_entry_flag='-bnoentry' + fi + + # When large executables or shared objects are built, AIX ld can + # have problems creating the table of contents. If linking a library + # or program results in "error TOC overflow" add -mminimal-toc to + # CXXFLAGS/CFLAGS for g++/gcc. In the cases where that is not + # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS. + + _LT_TAGVAR(archive_cmds, $1)='' + _LT_TAGVAR(hardcode_direct, $1)=yes + _LT_TAGVAR(hardcode_direct_absolute, $1)=yes + _LT_TAGVAR(hardcode_libdir_separator, $1)=':' + _LT_TAGVAR(link_all_deplibs, $1)=yes + _LT_TAGVAR(file_list_spec, $1)='${wl}-f,' + + if test "$GCC" = yes; then + case $host_os in aix4.[[012]]|aix4.[[012]].*) + # We only want to do this on AIX 4.2 and lower, the check + # below for broken collect2 doesn't work under 4.3+ + collect2name=`${CC} -print-prog-name=collect2` + if test -f "$collect2name" && + strings "$collect2name" | $GREP resolve_lib_name >/dev/null + then + # We have reworked collect2 + : + else + # We have old collect2 + _LT_TAGVAR(hardcode_direct, $1)=unsupported + # It fails to find uninstalled libraries when the uninstalled + # path is not listed in the libpath. Setting hardcode_minus_L + # to unsupported forces relinking + _LT_TAGVAR(hardcode_minus_L, $1)=yes + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' + _LT_TAGVAR(hardcode_libdir_separator, $1)= + fi + ;; + esac + shared_flag='-shared' + if test "$aix_use_runtimelinking" = yes; then + shared_flag="$shared_flag "'${wl}-G' + fi + else + # not using gcc + if test "$host_cpu" = ia64; then + # VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release + # chokes on -Wl,-G. The following line is correct: + shared_flag='-G' + else + if test "$aix_use_runtimelinking" = yes; then + shared_flag='${wl}-G' + else + shared_flag='${wl}-bM:SRE' + fi + fi + fi + + # It seems that -bexpall does not export symbols beginning with + # underscore (_), so it is better to generate a list of symbols to export. + _LT_TAGVAR(always_export_symbols, $1)=yes + if test "$aix_use_runtimelinking" = yes; then + # Warning - without using the other runtime loading flags (-brtl), + # -berok will link without error, but may produce a broken library. + _LT_TAGVAR(allow_undefined_flag, $1)='-berok' + # Determine the default libpath from the value encoded in an + # empty executable. + _LT_SYS_MODULE_PATH_AIX + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath" + _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then $ECHO "X${wl}${allow_undefined_flag}" | $Xsed; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag" + else + if test "$host_cpu" = ia64; then + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R $libdir:/usr/lib:/lib' + _LT_TAGVAR(allow_undefined_flag, $1)="-z nodefs" + _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags ${wl}${allow_undefined_flag} '"\${wl}$exp_sym_flag:\$export_symbols" + else + # Determine the default libpath from the value encoded in an + # empty executable. + _LT_SYS_MODULE_PATH_AIX + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath" + # Warning - without using the other run time loading flags, + # -berok will link without error, but may produce a broken library. + _LT_TAGVAR(no_undefined_flag, $1)=' ${wl}-bernotok' + _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-berok' + # Exported symbols can be pulled into shared objects from archives + _LT_TAGVAR(whole_archive_flag_spec, $1)='$convenience' + _LT_TAGVAR(archive_cmds_need_lc, $1)=yes + # This is similar to how AIX traditionally builds its shared libraries. + _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs ${wl}-bnoentry $compiler_flags ${wl}-bE:$export_symbols${allow_undefined_flag}~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$soname' + fi + fi + ;; + + amigaos*) + case $host_cpu in + powerpc) + # see comment about AmigaOS4 .so support + _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' + _LT_TAGVAR(archive_expsym_cmds, $1)='' + ;; + m68k) + _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)' + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' + _LT_TAGVAR(hardcode_minus_L, $1)=yes + ;; + esac + ;; + + bsdi[[45]]*) + _LT_TAGVAR(export_dynamic_flag_spec, $1)=-rdynamic + ;; + + cygwin* | mingw* | pw32*) + # When not using gcc, we currently assume that we are using + # Microsoft Visual C++. + # hardcode_libdir_flag_spec is actually meaningless, as there is + # no search path for DLLs. + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=' ' + _LT_TAGVAR(allow_undefined_flag, $1)=unsupported + # Tell ltmain to make .lib files, not .a files. + libext=lib + # Tell ltmain to make .dll files, not .so files. + shrext_cmds=".dll" + # FIXME: Setting linknames here is a bad hack. + _LT_TAGVAR(archive_cmds, $1)='$CC -o $lib $libobjs $compiler_flags `$ECHO "X$deplibs" | $Xsed -e '\''s/ -lc$//'\''` -link -dll~linknames=' + # The linker will automatically build a .lib file if we build a DLL. + _LT_TAGVAR(old_archive_from_new_cmds, $1)='true' + # FIXME: Should let the user specify the lib program. + _LT_TAGVAR(old_archive_cmds, $1)='lib -OUT:$oldlib$oldobjs$old_deplibs' + _LT_TAGVAR(fix_srcfile_path, $1)='`cygpath -w "$srcfile"`' + _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes + ;; + + darwin* | rhapsody*) + _LT_DARWIN_LINKER_FEATURES($1) + ;; + + dgux*) + _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' + _LT_TAGVAR(hardcode_shlibpath_var, $1)=no + ;; + + freebsd1*) + _LT_TAGVAR(ld_shlibs, $1)=no + ;; + + # FreeBSD 2.2.[012] allows us to include c++rt0.o to get C++ constructor + # support. Future versions do this automatically, but an explicit c++rt0.o + # does not break anything, and helps significantly (at the cost of a little + # extra space). + freebsd2.2*) + _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags /usr/lib/c++rt0.o' + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' + _LT_TAGVAR(hardcode_direct, $1)=yes + _LT_TAGVAR(hardcode_shlibpath_var, $1)=no + ;; + + # Unfortunately, older versions of FreeBSD 2 do not have this feature. + freebsd2*) + _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' + _LT_TAGVAR(hardcode_direct, $1)=yes + _LT_TAGVAR(hardcode_minus_L, $1)=yes + _LT_TAGVAR(hardcode_shlibpath_var, $1)=no + ;; + + # FreeBSD 3 and greater uses gcc -shared to do shared libraries. + freebsd* | dragonfly*) + _LT_TAGVAR(archive_cmds, $1)='$CC -shared -o $lib $libobjs $deplibs $compiler_flags' + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' + _LT_TAGVAR(hardcode_direct, $1)=yes + _LT_TAGVAR(hardcode_shlibpath_var, $1)=no + ;; + + hpux9*) + if test "$GCC" = yes; then + _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -shared -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' + else + _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' + fi + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir' + _LT_TAGVAR(hardcode_libdir_separator, $1)=: + _LT_TAGVAR(hardcode_direct, $1)=yes + + # hardcode_minus_L: Not really in the search PATH, + # but as the default location of the library. + _LT_TAGVAR(hardcode_minus_L, $1)=yes + _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E' + ;; + + hpux10*) + if test "$GCC" = yes -a "$with_gnu_ld" = no; then + _LT_TAGVAR(archive_cmds, $1)='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' + else + _LT_TAGVAR(archive_cmds, $1)='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags' + fi + if test "$with_gnu_ld" = no; then + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir' + _LT_TAGVAR(hardcode_libdir_flag_spec_ld, $1)='+b $libdir' + _LT_TAGVAR(hardcode_libdir_separator, $1)=: + _LT_TAGVAR(hardcode_direct, $1)=yes + _LT_TAGVAR(hardcode_direct_absolute, $1)=yes + _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E' + # hardcode_minus_L: Not really in the search PATH, + # but as the default location of the library. + _LT_TAGVAR(hardcode_minus_L, $1)=yes + fi + ;; + + hpux11*) + if test "$GCC" = yes -a "$with_gnu_ld" = no; then + case $host_cpu in + hppa*64*) + _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' + ;; + ia64*) + _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' + ;; + *) + _LT_TAGVAR(archive_cmds, $1)='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' + ;; + esac + else + case $host_cpu in + hppa*64*) + _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' + ;; + ia64*) + _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' + ;; + *) + _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' + ;; + esac + fi + if test "$with_gnu_ld" = no; then + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir' + _LT_TAGVAR(hardcode_libdir_separator, $1)=: + + case $host_cpu in + hppa*64*|ia64*) + _LT_TAGVAR(hardcode_direct, $1)=no + _LT_TAGVAR(hardcode_shlibpath_var, $1)=no + ;; + *) + _LT_TAGVAR(hardcode_direct, $1)=yes + _LT_TAGVAR(hardcode_direct_absolute, $1)=yes + _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E' + + # hardcode_minus_L: Not really in the search PATH, + # but as the default location of the library. + _LT_TAGVAR(hardcode_minus_L, $1)=yes + ;; + esac + fi + ;; + + irix5* | irix6* | nonstopux*) + if test "$GCC" = yes; then + _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && $ECHO "X${wl}-set_version ${wl}$verstring" | $Xsed` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' + # Try to use the -exported_symbol ld option, if it does not + # work, assume that -exports_file does not work either and + # implicitly export all symbols. + save_LDFLAGS="$LDFLAGS" + LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null" + AC_LINK_IFELSE(int foo(void) {}, + _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && $ECHO "X${wl}-set_version ${wl}$verstring" | $Xsed` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib' + ) + LDFLAGS="$save_LDFLAGS" + else + _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && $ECHO "X-set_version $verstring" | $Xsed` -update_registry ${output_objdir}/so_locations -o $lib' + _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && $ECHO "X-set_version $verstring" | $Xsed` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib' + fi + _LT_TAGVAR(archive_cmds_need_lc, $1)='no' + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' + _LT_TAGVAR(hardcode_libdir_separator, $1)=: + _LT_TAGVAR(inherit_rpath, $1)=yes + _LT_TAGVAR(link_all_deplibs, $1)=yes + ;; + + netbsd*) + if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then + _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' # a.out + else + _LT_TAGVAR(archive_cmds, $1)='$LD -shared -o $lib $libobjs $deplibs $linker_flags' # ELF + fi + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' + _LT_TAGVAR(hardcode_direct, $1)=yes + _LT_TAGVAR(hardcode_shlibpath_var, $1)=no + ;; + + newsos6) + _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' + _LT_TAGVAR(hardcode_direct, $1)=yes + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' + _LT_TAGVAR(hardcode_libdir_separator, $1)=: + _LT_TAGVAR(hardcode_shlibpath_var, $1)=no + ;; + + *nto* | *qnx*) + ;; + + openbsd*) + if test -f /usr/libexec/ld.so; then + _LT_TAGVAR(hardcode_direct, $1)=yes + _LT_TAGVAR(hardcode_shlibpath_var, $1)=no + _LT_TAGVAR(hardcode_direct_absolute, $1)=yes + if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then + _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' + _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags ${wl}-retain-symbols-file,$export_symbols' + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir' + _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E' + else + case $host_os in + openbsd[[01]].* | openbsd2.[[0-7]] | openbsd2.[[0-7]].*) + _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' + ;; + *) + _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir' + ;; + esac + fi + else + _LT_TAGVAR(ld_shlibs, $1)=no + fi + ;; + + os2*) + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' + _LT_TAGVAR(hardcode_minus_L, $1)=yes + _LT_TAGVAR(allow_undefined_flag, $1)=unsupported + _LT_TAGVAR(archive_cmds, $1)='$ECHO "LIBRARY $libname INITINSTANCE" > $output_objdir/$libname.def~$ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~$ECHO DATA >> $output_objdir/$libname.def~$ECHO " SINGLE NONSHARED" >> $output_objdir/$libname.def~$ECHO EXPORTS >> $output_objdir/$libname.def~emxexp $libobjs >> $output_objdir/$libname.def~$CC -Zdll -Zcrtdll -o $lib $libobjs $deplibs $compiler_flags $output_objdir/$libname.def' + _LT_TAGVAR(old_archive_from_new_cmds, $1)='emximp -o $output_objdir/$libname.a $output_objdir/$libname.def' + ;; + + osf3*) + if test "$GCC" = yes; then + _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-expect_unresolved ${wl}\*' + _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && $ECHO "X${wl}-set_version ${wl}$verstring" | $Xsed` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' + else + _LT_TAGVAR(allow_undefined_flag, $1)=' -expect_unresolved \*' + _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && $ECHO "X-set_version $verstring" | $Xsed` -update_registry ${output_objdir}/so_locations -o $lib' + fi + _LT_TAGVAR(archive_cmds_need_lc, $1)='no' + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' + _LT_TAGVAR(hardcode_libdir_separator, $1)=: + ;; + + osf4* | osf5*) # as osf3* with the addition of -msym flag + if test "$GCC" = yes; then + _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-expect_unresolved ${wl}\*' + _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && $ECHO "X${wl}-set_version ${wl}$verstring" | $Xsed` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' + else + _LT_TAGVAR(allow_undefined_flag, $1)=' -expect_unresolved \*' + _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -msym -soname $soname `test -n "$verstring" && $ECHO "X-set_version $verstring" | $Xsed` -update_registry ${output_objdir}/so_locations -o $lib' + _LT_TAGVAR(archive_expsym_cmds, $1)='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done; printf "%s\\n" "-hidden">> $lib.exp~ + $CC -shared${allow_undefined_flag} ${wl}-input ${wl}$lib.exp $compiler_flags $libobjs $deplibs -soname $soname `test -n "$verstring" && $ECHO "X-set_version $verstring" | $Xsed` -update_registry ${output_objdir}/so_locations -o $lib~$RM $lib.exp' + + # Both c and cxx compiler support -rpath directly + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-rpath $libdir' + fi + _LT_TAGVAR(archive_cmds_need_lc, $1)='no' + _LT_TAGVAR(hardcode_libdir_separator, $1)=: + ;; + + solaris*) + _LT_TAGVAR(no_undefined_flag, $1)=' -z defs' + if test "$GCC" = yes; then + wlarc='${wl}' + _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' + _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ + $CC -shared ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' + else + case `$CC -V 2>&1` in + *"Compilers 5.0"*) + wlarc='' + _LT_TAGVAR(archive_cmds, $1)='$LD -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $linker_flags' + _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ + $LD -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $linker_flags~$RM $lib.exp' + ;; + *) + wlarc='${wl}' + _LT_TAGVAR(archive_cmds, $1)='$CC -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $compiler_flags' + _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ + $CC -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' + ;; + esac + fi + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' + _LT_TAGVAR(hardcode_shlibpath_var, $1)=no + case $host_os in + solaris2.[[0-5]] | solaris2.[[0-5]].*) ;; + *) + # The compiler driver will combine and reorder linker options, + # but understands `-z linker_flag'. GCC discards it without `$wl', + # but is careful enough not to reorder. + # Supported since Solaris 2.6 (maybe 2.5.1?) + if test "$GCC" = yes; then + _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}-z ${wl}allextract$convenience ${wl}-z ${wl}defaultextract' + else + _LT_TAGVAR(whole_archive_flag_spec, $1)='-z allextract$convenience -z defaultextract' + fi + ;; + esac + _LT_TAGVAR(link_all_deplibs, $1)=yes + ;; + + sunos4*) + if test "x$host_vendor" = xsequent; then + # Use $CC to link under sequent, because it throws in some extra .o + # files that make .init and .fini sections work. + _LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h $soname -o $lib $libobjs $deplibs $compiler_flags' + else + _LT_TAGVAR(archive_cmds, $1)='$LD -assert pure-text -Bstatic -o $lib $libobjs $deplibs $linker_flags' + fi + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' + _LT_TAGVAR(hardcode_direct, $1)=yes + _LT_TAGVAR(hardcode_minus_L, $1)=yes + _LT_TAGVAR(hardcode_shlibpath_var, $1)=no + ;; + + sysv4) + case $host_vendor in + sni) + _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' + _LT_TAGVAR(hardcode_direct, $1)=yes # is this really true??? + ;; + siemens) + ## LD is ld it makes a PLAMLIB + ## CC just makes a GrossModule. + _LT_TAGVAR(archive_cmds, $1)='$LD -G -o $lib $libobjs $deplibs $linker_flags' + _LT_TAGVAR(reload_cmds, $1)='$CC -r -o $output$reload_objs' + _LT_TAGVAR(hardcode_direct, $1)=no + ;; + motorola) + _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' + _LT_TAGVAR(hardcode_direct, $1)=no #Motorola manual says yes, but my tests say they lie + ;; + esac + runpath_var='LD_RUN_PATH' + _LT_TAGVAR(hardcode_shlibpath_var, $1)=no + ;; + + sysv4.3*) + _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' + _LT_TAGVAR(hardcode_shlibpath_var, $1)=no + _LT_TAGVAR(export_dynamic_flag_spec, $1)='-Bexport' + ;; + + sysv4*MP*) + if test -d /usr/nec; then + _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' + _LT_TAGVAR(hardcode_shlibpath_var, $1)=no + runpath_var=LD_RUN_PATH + hardcode_runpath_var=yes + _LT_TAGVAR(ld_shlibs, $1)=yes + fi + ;; + + sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[[01]].[[10]]* | unixware7* | sco3.2v5.0.[[024]]*) + _LT_TAGVAR(no_undefined_flag, $1)='${wl}-z,text' + _LT_TAGVAR(archive_cmds_need_lc, $1)=no + _LT_TAGVAR(hardcode_shlibpath_var, $1)=no + runpath_var='LD_RUN_PATH' + + if test "$GCC" = yes; then + _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' + _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' + else + _LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' + _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' + fi + ;; + + sysv5* | sco3.2v5* | sco5v6*) + # Note: We can NOT use -z defs as we might desire, because we do not + # link with -lc, and that would cause any symbols used from libc to + # always be unresolved, which means just about no library would + # ever link correctly. If we're not using GNU ld we use -z text + # though, which does catch some bad symbols but isn't as heavy-handed + # as -z defs. + _LT_TAGVAR(no_undefined_flag, $1)='${wl}-z,text' + _LT_TAGVAR(allow_undefined_flag, $1)='${wl}-z,nodefs' + _LT_TAGVAR(archive_cmds_need_lc, $1)=no + _LT_TAGVAR(hardcode_shlibpath_var, $1)=no + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R,$libdir' + _LT_TAGVAR(hardcode_libdir_separator, $1)=':' + _LT_TAGVAR(link_all_deplibs, $1)=yes + _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-Bexport' + runpath_var='LD_RUN_PATH' + + if test "$GCC" = yes; then + _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' + _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' + else + _LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' + _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' + fi + ;; + + uts4*) + _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' + _LT_TAGVAR(hardcode_shlibpath_var, $1)=no + ;; + + *) + _LT_TAGVAR(ld_shlibs, $1)=no + ;; + esac + + if test x$host_vendor = xsni; then + case $host in + sysv4 | sysv4.2uw2* | sysv4.3* | sysv5*) + _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-Blargedynsym' + ;; + esac + fi + fi +]) +AC_MSG_RESULT([$_LT_TAGVAR(ld_shlibs, $1)]) +test "$_LT_TAGVAR(ld_shlibs, $1)" = no && can_build_shared=no + +_LT_TAGVAR(with_gnu_ld, $1)=$with_gnu_ld + +_LT_DECL([], [libext], [0], [Old archive suffix (normally "a")])dnl +_LT_DECL([], [shrext_cmds], [1], [Shared library suffix (normally ".so")])dnl +_LT_DECL([], [extract_expsyms_cmds], [2], + [The commands to extract the exported symbol list from a shared archive]) + +# +# Do we need to explicitly link libc? +# +case "x$_LT_TAGVAR(archive_cmds_need_lc, $1)" in +x|xyes) + # Assume -lc should be added + _LT_TAGVAR(archive_cmds_need_lc, $1)=yes + + if test "$enable_shared" = yes && test "$GCC" = yes; then + case $_LT_TAGVAR(archive_cmds, $1) in + *'~'*) + # FIXME: we may have to deal with multi-command sequences. + ;; + '$CC '*) + # Test whether the compiler implicitly links with -lc since on some + # systems, -lgcc has to come before -lc. If gcc already passes -lc + # to ld, don't add -lc before -lgcc. + AC_MSG_CHECKING([whether -lc should be explicitly linked in]) + $RM conftest* + echo "$lt_simple_compile_test_code" > conftest.$ac_ext + + if AC_TRY_EVAL(ac_compile) 2>conftest.err; then + soname=conftest + lib=conftest + libobjs=conftest.$ac_objext + deplibs= + wl=$_LT_TAGVAR(lt_prog_compiler_wl, $1) + pic_flag=$_LT_TAGVAR(lt_prog_compiler_pic, $1) + compiler_flags=-v + linker_flags=-v + verstring= + output_objdir=. + libname=conftest + lt_save_allow_undefined_flag=$_LT_TAGVAR(allow_undefined_flag, $1) + _LT_TAGVAR(allow_undefined_flag, $1)= + if AC_TRY_EVAL(_LT_TAGVAR(archive_cmds, $1) 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1) + then + _LT_TAGVAR(archive_cmds_need_lc, $1)=no + else + _LT_TAGVAR(archive_cmds_need_lc, $1)=yes + fi + _LT_TAGVAR(allow_undefined_flag, $1)=$lt_save_allow_undefined_flag + else + cat conftest.err 1>&5 + fi + $RM conftest* + AC_MSG_RESULT([$_LT_TAGVAR(archive_cmds_need_lc, $1)]) + ;; + esac + fi + ;; +esac + +_LT_TAGDECL([build_libtool_need_lc], [archive_cmds_need_lc], [0], + [Whether or not to add -lc for building shared libraries]) +_LT_TAGDECL([allow_libtool_libs_with_static_runtimes], + [enable_shared_with_static_runtimes], [0], + [Whether or not to disallow shared libs when runtime libs are static]) +_LT_TAGDECL([], [export_dynamic_flag_spec], [1], + [Compiler flag to allow reflexive dlopens]) +_LT_TAGDECL([], [whole_archive_flag_spec], [1], + [Compiler flag to generate shared objects directly from archives]) +_LT_TAGDECL([], [compiler_needs_object], [1], + [Whether the compiler copes with passing no objects directly]) +_LT_TAGDECL([], [old_archive_from_new_cmds], [2], + [Create an old-style archive from a shared archive]) +_LT_TAGDECL([], [old_archive_from_expsyms_cmds], [2], + [Create a temporary old-style archive to link instead of a shared archive]) +_LT_TAGDECL([], [archive_cmds], [2], [Commands used to build a shared archive]) +_LT_TAGDECL([], [archive_expsym_cmds], [2]) +_LT_TAGDECL([], [module_cmds], [2], + [Commands used to build a loadable module if different from building + a shared archive.]) +_LT_TAGDECL([], [module_expsym_cmds], [2]) +_LT_TAGDECL([], [with_gnu_ld], [1], + [Whether we are building with GNU ld or not]) +_LT_TAGDECL([], [allow_undefined_flag], [1], + [Flag that allows shared libraries with undefined symbols to be built]) +_LT_TAGDECL([], [no_undefined_flag], [1], + [Flag that enforces no undefined symbols]) +_LT_TAGDECL([], [hardcode_libdir_flag_spec], [1], + [Flag to hardcode $libdir into a binary during linking. + This must work even if $libdir does not exist]) +_LT_TAGDECL([], [hardcode_libdir_flag_spec_ld], [1], + [[If ld is used when linking, flag to hardcode $libdir into a binary + during linking. This must work even if $libdir does not exist]]) +_LT_TAGDECL([], [hardcode_libdir_separator], [1], + [Whether we need a single "-rpath" flag with a separated argument]) +_LT_TAGDECL([], [hardcode_direct], [0], + [Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes + DIR into the resulting binary]) +_LT_TAGDECL([], [hardcode_direct_absolute], [0], + [Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes + DIR into the resulting binary and the resulting library dependency is + "absolute", i.e impossible to change by setting ${shlibpath_var} if the + library is relocated]) +_LT_TAGDECL([], [hardcode_minus_L], [0], + [Set to "yes" if using the -LDIR flag during linking hardcodes DIR + into the resulting binary]) +_LT_TAGDECL([], [hardcode_shlibpath_var], [0], + [Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR + into the resulting binary]) +_LT_TAGDECL([], [hardcode_automatic], [0], + [Set to "yes" if building a shared library automatically hardcodes DIR + into the library and all subsequent libraries and executables linked + against it]) +_LT_TAGDECL([], [inherit_rpath], [0], + [Set to yes if linker adds runtime paths of dependent libraries + to runtime path list]) +_LT_TAGDECL([], [link_all_deplibs], [0], + [Whether libtool must link a program against all its dependency libraries]) +_LT_TAGDECL([], [fix_srcfile_path], [1], + [Fix the shell variable $srcfile for the compiler]) +_LT_TAGDECL([], [always_export_symbols], [0], + [Set to "yes" if exported symbols are required]) +_LT_TAGDECL([], [export_symbols_cmds], [2], + [The commands to list exported symbols]) +_LT_TAGDECL([], [exclude_expsyms], [1], + [Symbols that should not be listed in the preloaded symbols]) +_LT_TAGDECL([], [include_expsyms], [1], + [Symbols that must always be exported]) +_LT_TAGDECL([], [prelink_cmds], [2], + [Commands necessary for linking programs (against libraries) with templates]) +_LT_TAGDECL([], [file_list_spec], [1], + [Specify filename containing input files]) +dnl FIXME: Not yet implemented +dnl _LT_TAGDECL([], [thread_safe_flag_spec], [1], +dnl [Compiler flag to generate thread safe objects]) +])# _LT_LINKER_SHLIBS + + +# _LT_LANG_C_CONFIG([TAG]) +# ------------------------ +# Ensure that the configuration variables for a C compiler are suitably +# defined. These variables are subsequently used by _LT_CONFIG to write +# the compiler configuration to `libtool'. +m4_defun([_LT_LANG_C_CONFIG], +[m4_require([_LT_DECL_EGREP])dnl +lt_save_CC="$CC" +AC_LANG_PUSH(C) + +# Source file extension for C test sources. +ac_ext=c + +# Object file extension for compiled C test sources. +objext=o +_LT_TAGVAR(objext, $1)=$objext + +# Code to be used in simple compile tests +lt_simple_compile_test_code="int some_variable = 0;" + +# Code to be used in simple link tests +lt_simple_link_test_code='int main(){return(0);}' + +_LT_TAG_COMPILER +# Save the default compiler, since it gets overwritten when the other +# tags are being tested, and _LT_TAGVAR(compiler, []) is a NOP. +compiler_DEFAULT=$CC + +# save warnings/boilerplate of simple test code +_LT_COMPILER_BOILERPLATE +_LT_LINKER_BOILERPLATE + +## CAVEAT EMPTOR: +## There is no encapsulation within the following macros, do not change +## the running order or otherwise move them around unless you know exactly +## what you are doing... +if test -n "$compiler"; then + _LT_COMPILER_NO_RTTI($1) + _LT_COMPILER_PIC($1) + _LT_COMPILER_C_O($1) + _LT_COMPILER_FILE_LOCKS($1) + _LT_LINKER_SHLIBS($1) + _LT_SYS_DYNAMIC_LINKER($1) + _LT_LINKER_HARDCODE_LIBPATH($1) + LT_SYS_DLOPEN_SELF + _LT_CMD_STRIPLIB + + # Report which library types will actually be built + AC_MSG_CHECKING([if libtool supports shared libraries]) + AC_MSG_RESULT([$can_build_shared]) + + AC_MSG_CHECKING([whether to build shared libraries]) + test "$can_build_shared" = "no" && enable_shared=no + + # On AIX, shared libraries and static libraries use the same namespace, and + # are all built from PIC. + case $host_os in + aix3*) + test "$enable_shared" = yes && enable_static=no + if test -n "$RANLIB"; then + archive_cmds="$archive_cmds~\$RANLIB \$lib" + postinstall_cmds='$RANLIB $lib' + fi + ;; + + aix[[4-9]]*) + if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then + test "$enable_shared" = yes && enable_static=no + fi + ;; + esac + AC_MSG_RESULT([$enable_shared]) + + AC_MSG_CHECKING([whether to build static libraries]) + # Make sure either enable_shared or enable_static is yes. + test "$enable_shared" = yes || enable_static=yes + AC_MSG_RESULT([$enable_static]) + + _LT_CONFIG($1) +fi +AC_LANG_POP +CC="$lt_save_CC" +])# _LT_LANG_C_CONFIG + + +# _LT_PROG_CXX +# ------------ +# Since AC_PROG_CXX is broken, in that it returns g++ if there is no c++ +# compiler, we have our own version here. +m4_defun([_LT_PROG_CXX], +[ +pushdef([AC_MSG_ERROR], [_lt_caught_CXX_error=yes]) +AC_PROG_CXX +if test -n "$CXX" && ( test "X$CXX" != "Xno" && + ( (test "X$CXX" = "Xg++" && `g++ -v >/dev/null 2>&1` ) || + (test "X$CXX" != "Xg++"))) ; then + AC_PROG_CXXCPP +else + _lt_caught_CXX_error=yes +fi +popdef([AC_MSG_ERROR]) +])# _LT_PROG_CXX + +dnl aclocal-1.4 backwards compatibility: +dnl AC_DEFUN([_LT_PROG_CXX], []) + + +# _LT_LANG_CXX_CONFIG([TAG]) +# -------------------------- +# Ensure that the configuration variables for a C++ compiler are suitably +# defined. These variables are subsequently used by _LT_CONFIG to write +# the compiler configuration to `libtool'. +m4_defun([_LT_LANG_CXX_CONFIG], +[AC_REQUIRE([_LT_PROG_CXX])dnl +m4_require([_LT_FILEUTILS_DEFAULTS])dnl +m4_require([_LT_DECL_EGREP])dnl + +AC_LANG_PUSH(C++) +_LT_TAGVAR(archive_cmds_need_lc, $1)=no +_LT_TAGVAR(allow_undefined_flag, $1)= +_LT_TAGVAR(always_export_symbols, $1)=no +_LT_TAGVAR(archive_expsym_cmds, $1)= +_LT_TAGVAR(compiler_needs_object, $1)=no +_LT_TAGVAR(export_dynamic_flag_spec, $1)= +_LT_TAGVAR(hardcode_direct, $1)=no +_LT_TAGVAR(hardcode_direct_absolute, $1)=no +_LT_TAGVAR(hardcode_libdir_flag_spec, $1)= +_LT_TAGVAR(hardcode_libdir_flag_spec_ld, $1)= +_LT_TAGVAR(hardcode_libdir_separator, $1)= +_LT_TAGVAR(hardcode_minus_L, $1)=no +_LT_TAGVAR(hardcode_shlibpath_var, $1)=unsupported +_LT_TAGVAR(hardcode_automatic, $1)=no +_LT_TAGVAR(inherit_rpath, $1)=no +_LT_TAGVAR(module_cmds, $1)= +_LT_TAGVAR(module_expsym_cmds, $1)= +_LT_TAGVAR(link_all_deplibs, $1)=unknown +_LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds +_LT_TAGVAR(no_undefined_flag, $1)= +_LT_TAGVAR(whole_archive_flag_spec, $1)= +_LT_TAGVAR(enable_shared_with_static_runtimes, $1)=no + +# Source file extension for C++ test sources. +ac_ext=cpp + +# Object file extension for compiled C++ test sources. +objext=o +_LT_TAGVAR(objext, $1)=$objext + +# No sense in running all these tests if we already determined that +# the CXX compiler isn't working. Some variables (like enable_shared) +# are currently assumed to apply to all compilers on this platform, +# and will be corrupted by setting them based on a non-working compiler. +if test "$_lt_caught_CXX_error" != yes; then + # Code to be used in simple compile tests + lt_simple_compile_test_code="int some_variable = 0;" + + # Code to be used in simple link tests + lt_simple_link_test_code='int main(int, char *[[]]) { return(0); }' + + # ltmain only uses $CC for tagged configurations so make sure $CC is set. + _LT_TAG_COMPILER + + # save warnings/boilerplate of simple test code + _LT_COMPILER_BOILERPLATE + _LT_LINKER_BOILERPLATE + + # Allow CC to be a program name with arguments. + lt_save_CC=$CC + lt_save_LD=$LD + lt_save_GCC=$GCC + GCC=$GXX + lt_save_with_gnu_ld=$with_gnu_ld + lt_save_path_LD=$lt_cv_path_LD + if test -n "${lt_cv_prog_gnu_ldcxx+set}"; then + lt_cv_prog_gnu_ld=$lt_cv_prog_gnu_ldcxx + else + $as_unset lt_cv_prog_gnu_ld + fi + if test -n "${lt_cv_path_LDCXX+set}"; then + lt_cv_path_LD=$lt_cv_path_LDCXX + else + $as_unset lt_cv_path_LD + fi + test -z "${LDCXX+set}" || LD=$LDCXX + CC=${CXX-"c++"} + compiler=$CC + _LT_TAGVAR(compiler, $1)=$CC + _LT_CC_BASENAME([$compiler]) + + if test -n "$compiler"; then + # We don't want -fno-exception when compiling C++ code, so set the + # no_builtin_flag separately + if test "$GXX" = yes; then + _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=' -fno-builtin' + else + _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)= + fi + + if test "$GXX" = yes; then + # Set up default GNU C++ configuration + + LT_PATH_LD + + # Check if GNU C++ uses GNU ld as the underlying linker, since the + # archiving commands below assume that GNU ld is being used. + if test "$with_gnu_ld" = yes; then + _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib' + _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' + + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' + _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic' + + # If archive_cmds runs LD, not CC, wlarc should be empty + # XXX I think wlarc can be eliminated in ltcf-cxx, but I need to + # investigate it a little bit more. (MM) + wlarc='${wl}' + + # ancient GNU ld didn't support --whole-archive et. al. + if eval "`$CC -print-prog-name=ld` --help 2>&1" | + $GREP 'no-whole-archive' > /dev/null; then + _LT_TAGVAR(whole_archive_flag_spec, $1)="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive' + else + _LT_TAGVAR(whole_archive_flag_spec, $1)= + fi + else + with_gnu_ld=no + wlarc= + + # A generic and very simple default shared library creation + # command for GNU C++ for the case where it uses the native + # linker, instead of GNU ld. If possible, this setting should + # overridden to take advantage of the native linker features on + # the platform it is being used on. + _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $lib' + fi + + # Commands to make compiler produce verbose output that lists + # what "hidden" libraries, object files and flags are used when + # linking a shared library. + output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "\-L"' + + else + GXX=no + with_gnu_ld=no + wlarc= + fi + + # PORTME: fill in a description of your system's C++ link characteristics + AC_MSG_CHECKING([whether the $compiler linker ($LD) supports shared libraries]) + _LT_TAGVAR(ld_shlibs, $1)=yes + case $host_os in + aix3*) + # FIXME: insert proper C++ library support + _LT_TAGVAR(ld_shlibs, $1)=no + ;; + aix[[4-9]]*) + if test "$host_cpu" = ia64; then + # On IA64, the linker does run time linking by default, so we don't + # have to do anything special. + aix_use_runtimelinking=no + exp_sym_flag='-Bexport' + no_entry_flag="" + else + aix_use_runtimelinking=no + + # Test if we are trying to use run time linking or normal + # AIX style linking. If -brtl is somewhere in LDFLAGS, we + # need to do runtime linking. + case $host_os in aix4.[[23]]|aix4.[[23]].*|aix[[5-9]]*) + for ld_flag in $LDFLAGS; do + case $ld_flag in + *-brtl*) + aix_use_runtimelinking=yes + break + ;; + esac + done + ;; + esac + + exp_sym_flag='-bexport' + no_entry_flag='-bnoentry' + fi + + # When large executables or shared objects are built, AIX ld can + # have problems creating the table of contents. If linking a library + # or program results in "error TOC overflow" add -mminimal-toc to + # CXXFLAGS/CFLAGS for g++/gcc. In the cases where that is not + # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS. + + _LT_TAGVAR(archive_cmds, $1)='' + _LT_TAGVAR(hardcode_direct, $1)=yes + _LT_TAGVAR(hardcode_direct_absolute, $1)=yes + _LT_TAGVAR(hardcode_libdir_separator, $1)=':' + _LT_TAGVAR(link_all_deplibs, $1)=yes + _LT_TAGVAR(file_list_spec, $1)='${wl}-f,' + + if test "$GXX" = yes; then + case $host_os in aix4.[[012]]|aix4.[[012]].*) + # We only want to do this on AIX 4.2 and lower, the check + # below for broken collect2 doesn't work under 4.3+ + collect2name=`${CC} -print-prog-name=collect2` + if test -f "$collect2name" && + strings "$collect2name" | $GREP resolve_lib_name >/dev/null + then + # We have reworked collect2 + : + else + # We have old collect2 + _LT_TAGVAR(hardcode_direct, $1)=unsupported + # It fails to find uninstalled libraries when the uninstalled + # path is not listed in the libpath. Setting hardcode_minus_L + # to unsupported forces relinking + _LT_TAGVAR(hardcode_minus_L, $1)=yes + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' + _LT_TAGVAR(hardcode_libdir_separator, $1)= + fi + esac + shared_flag='-shared' + if test "$aix_use_runtimelinking" = yes; then + shared_flag="$shared_flag "'${wl}-G' + fi + else + # not using gcc + if test "$host_cpu" = ia64; then + # VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release + # chokes on -Wl,-G. The following line is correct: + shared_flag='-G' + else + if test "$aix_use_runtimelinking" = yes; then + shared_flag='${wl}-G' + else + shared_flag='${wl}-bM:SRE' + fi + fi + fi + + # It seems that -bexpall does not export symbols beginning with + # underscore (_), so it is better to generate a list of symbols to + # export. + _LT_TAGVAR(always_export_symbols, $1)=yes + if test "$aix_use_runtimelinking" = yes; then + # Warning - without using the other runtime loading flags (-brtl), + # -berok will link without error, but may produce a broken library. + _LT_TAGVAR(allow_undefined_flag, $1)='-berok' + # Determine the default libpath from the value encoded in an empty + # executable. + _LT_SYS_MODULE_PATH_AIX + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath" + + _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then $ECHO "X${wl}${allow_undefined_flag}" | $Xsed; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag" + else + if test "$host_cpu" = ia64; then + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R $libdir:/usr/lib:/lib' + _LT_TAGVAR(allow_undefined_flag, $1)="-z nodefs" + _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags ${wl}${allow_undefined_flag} '"\${wl}$exp_sym_flag:\$export_symbols" + else + # Determine the default libpath from the value encoded in an + # empty executable. + _LT_SYS_MODULE_PATH_AIX + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath" + # Warning - without using the other run time loading flags, + # -berok will link without error, but may produce a broken library. + _LT_TAGVAR(no_undefined_flag, $1)=' ${wl}-bernotok' + _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-berok' + # Exported symbols can be pulled into shared objects from archives + _LT_TAGVAR(whole_archive_flag_spec, $1)='$convenience' + _LT_TAGVAR(archive_cmds_need_lc, $1)=yes + # This is similar to how AIX traditionally builds its shared + # libraries. + _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs ${wl}-bnoentry $compiler_flags ${wl}-bE:$export_symbols${allow_undefined_flag}~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$soname' + fi + fi + ;; + + beos*) + if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then + _LT_TAGVAR(allow_undefined_flag, $1)=unsupported + # Joseph Beckenbach says some releases of gcc + # support --undefined. This deserves some investigation. FIXME + _LT_TAGVAR(archive_cmds, $1)='$CC -nostart $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' + else + _LT_TAGVAR(ld_shlibs, $1)=no + fi + ;; + + chorus*) + case $cc_basename in + *) + # FIXME: insert proper C++ library support + _LT_TAGVAR(ld_shlibs, $1)=no + ;; + esac + ;; + + cygwin* | mingw* | pw32*) + # _LT_TAGVAR(hardcode_libdir_flag_spec, $1) is actually meaningless, + # as there is no search path for DLLs. + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' + _LT_TAGVAR(allow_undefined_flag, $1)=unsupported + _LT_TAGVAR(always_export_symbols, $1)=no + _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes + + if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then + _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' + # If the export-symbols file already is a .def file (1st line + # is EXPORTS), use it as is; otherwise, prepend... + _LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then + cp $export_symbols $output_objdir/$soname.def; + else + echo EXPORTS > $output_objdir/$soname.def; + cat $export_symbols >> $output_objdir/$soname.def; + fi~ + $CC -shared -nostdlib $output_objdir/$soname.def $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' + else + _LT_TAGVAR(ld_shlibs, $1)=no + fi + ;; + darwin* | rhapsody*) + _LT_DARWIN_LINKER_FEATURES($1) + ;; + + dgux*) + case $cc_basename in + ec++*) + # FIXME: insert proper C++ library support + _LT_TAGVAR(ld_shlibs, $1)=no + ;; + ghcx*) + # Green Hills C++ Compiler + # FIXME: insert proper C++ library support + _LT_TAGVAR(ld_shlibs, $1)=no + ;; + *) + # FIXME: insert proper C++ library support + _LT_TAGVAR(ld_shlibs, $1)=no + ;; + esac + ;; + + freebsd[[12]]*) + # C++ shared libraries reported to be fairly broken before + # switch to ELF + _LT_TAGVAR(ld_shlibs, $1)=no + ;; + + freebsd-elf*) + _LT_TAGVAR(archive_cmds_need_lc, $1)=no + ;; + + freebsd* | dragonfly*) + # FreeBSD 3 and later use GNU C++ and GNU ld with standard ELF + # conventions + _LT_TAGVAR(ld_shlibs, $1)=yes + ;; + + gnu*) + ;; + + hpux9*) + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir' + _LT_TAGVAR(hardcode_libdir_separator, $1)=: + _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E' + _LT_TAGVAR(hardcode_direct, $1)=yes + _LT_TAGVAR(hardcode_minus_L, $1)=yes # Not in the search PATH, + # but as the default + # location of the library. + + case $cc_basename in + CC*) + # FIXME: insert proper C++ library support + _LT_TAGVAR(ld_shlibs, $1)=no + ;; + aCC*) + _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -b ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' + # Commands to make compiler produce verbose output that lists + # what "hidden" libraries, object files and flags are used when + # linking a shared library. + # + # There doesn't appear to be a way to prevent this compiler from + # explicitly linking system object files so we need to strip them + # from the output so that they don't get included in the library + # dependencies. + output_verbose_link_cmd='templist=`($CC -b $CFLAGS -v conftest.$objext 2>&1) | $EGREP "\-L"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; $ECHO "X$list" | $Xsed' + ;; + *) + if test "$GXX" = yes; then + _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -shared -nostdlib -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' + else + # FIXME: insert proper C++ library support + _LT_TAGVAR(ld_shlibs, $1)=no + fi + ;; + esac + ;; + + hpux10*|hpux11*) + if test $with_gnu_ld = no; then + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir' + _LT_TAGVAR(hardcode_libdir_separator, $1)=: + + case $host_cpu in + hppa*64*|ia64*) + ;; + *) + _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E' + ;; + esac + fi + case $host_cpu in + hppa*64*|ia64*) + _LT_TAGVAR(hardcode_direct, $1)=no + _LT_TAGVAR(hardcode_shlibpath_var, $1)=no + ;; + *) + _LT_TAGVAR(hardcode_direct, $1)=yes + _LT_TAGVAR(hardcode_direct_absolute, $1)=yes + _LT_TAGVAR(hardcode_minus_L, $1)=yes # Not in the search PATH, + # but as the default + # location of the library. + ;; + esac + + case $cc_basename in + CC*) + # FIXME: insert proper C++ library support + _LT_TAGVAR(ld_shlibs, $1)=no + ;; + aCC*) + case $host_cpu in + hppa*64*) + _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' + ;; + ia64*) + _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' + ;; + *) + _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' + ;; + esac + # Commands to make compiler produce verbose output that lists + # what "hidden" libraries, object files and flags are used when + # linking a shared library. + # + # There doesn't appear to be a way to prevent this compiler from + # explicitly linking system object files so we need to strip them + # from the output so that they don't get included in the library + # dependencies. + output_verbose_link_cmd='templist=`($CC -b $CFLAGS -v conftest.$objext 2>&1) | $GREP "\-L"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; $ECHO "X$list" | $Xsed' + ;; + *) + if test "$GXX" = yes; then + if test $with_gnu_ld = no; then + case $host_cpu in + hppa*64*) + _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' + ;; + ia64*) + _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' + ;; + *) + _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' + ;; + esac + fi + else + # FIXME: insert proper C++ library support + _LT_TAGVAR(ld_shlibs, $1)=no + fi + ;; + esac + ;; + + interix[[3-9]]*) + _LT_TAGVAR(hardcode_direct, $1)=no + _LT_TAGVAR(hardcode_shlibpath_var, $1)=no + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir' + _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E' + # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc. + # Instead, shared libraries are loaded at an image base (0x10000000 by + # default) and relocated if they conflict, which is a slow very memory + # consuming and fragmenting process. To avoid this, we pick a random, + # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link + # time. Moving up from 0x10000000 also allows more sbrk(2) space. + _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' + _LT_TAGVAR(archive_expsym_cmds, $1)='sed "s,^,_," $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--retain-symbols-file,$output_objdir/$soname.expsym ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' + ;; + irix5* | irix6*) + case $cc_basename in + CC*) + # SGI C++ + _LT_TAGVAR(archive_cmds, $1)='$CC -shared -all -multigot $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -soname $soname `test -n "$verstring" && $ECHO "X-set_version $verstring" | $Xsed` -update_registry ${output_objdir}/so_locations -o $lib' + + # Archives containing C++ object files must be created using + # "CC -ar", where "CC" is the IRIX C++ compiler. This is + # necessary to make sure instantiated templates are included + # in the archive. + _LT_TAGVAR(old_archive_cmds, $1)='$CC -ar -WR,-u -o $oldlib $oldobjs' + ;; + *) + if test "$GXX" = yes; then + if test "$with_gnu_ld" = no; then + _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && $ECHO "X${wl}-set_version ${wl}$verstring" | $Xsed` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' + else + _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && $ECHO "X${wl}-set_version ${wl}$verstring" | $Xsed` -o $lib' + fi + fi + _LT_TAGVAR(link_all_deplibs, $1)=yes + ;; + esac + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' + _LT_TAGVAR(hardcode_libdir_separator, $1)=: + _LT_TAGVAR(inherit_rpath, $1)=yes + ;; + + linux* | k*bsd*-gnu) + case $cc_basename in + KCC*) + # Kuck and Associates, Inc. (KAI) C++ Compiler + + # KCC will only create a shared library if the output file + # ends with ".so" (or ".sl" for HP-UX), so rename the library + # to its proper name (with version) after linking. + _LT_TAGVAR(archive_cmds, $1)='tempext=`echo $shared_ext | $SED -e '\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo $lib | $SED -e "s/\${tempext}\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib; mv \$templib $lib' + _LT_TAGVAR(archive_expsym_cmds, $1)='tempext=`echo $shared_ext | $SED -e '\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo $lib | $SED -e "s/\${tempext}\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib ${wl}-retain-symbols-file,$export_symbols; mv \$templib $lib' + # Commands to make compiler produce verbose output that lists + # what "hidden" libraries, object files and flags are used when + # linking a shared library. + # + # There doesn't appear to be a way to prevent this compiler from + # explicitly linking system object files so we need to strip them + # from the output so that they don't get included in the library + # dependencies. + output_verbose_link_cmd='templist=`$CC $CFLAGS -v conftest.$objext -o libconftest$shared_ext 2>&1 | $GREP "ld"`; rm -f libconftest$shared_ext; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; $ECHO "X$list" | $Xsed' + + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir' + _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic' + + # Archives containing C++ object files must be created using + # "CC -Bstatic", where "CC" is the KAI C++ compiler. + _LT_TAGVAR(old_archive_cmds, $1)='$CC -Bstatic -o $oldlib $oldobjs' + ;; + icpc* | ecpc* ) + # Intel C++ + with_gnu_ld=yes + # version 8.0 and above of icpc choke on multiply defined symbols + # if we add $predep_objects and $postdep_objects, however 7.1 and + # earlier do not add the objects themselves. + case `$CC -V 2>&1` in + *"Version 7."*) + _LT_TAGVAR(archive_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib' + _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' + ;; + *) # Version 8.0 or newer + tmp_idyn= + case $host_cpu in + ia64*) tmp_idyn=' -i_dynamic';; + esac + _LT_TAGVAR(archive_cmds, $1)='$CC -shared'"$tmp_idyn"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' + _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared'"$tmp_idyn"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' + ;; + esac + _LT_TAGVAR(archive_cmds_need_lc, $1)=no + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir' + _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic' + _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive$convenience ${wl}--no-whole-archive' + ;; + pgCC* | pgcpp*) + # Portland Group C++ compiler + case `$CC -V` in + *pgCC\ [[1-5]]* | *pgcpp\ [[1-5]]*) + _LT_TAGVAR(prelink_cmds, $1)='tpldir=Template.dir~ + rm -rf $tpldir~ + $CC --prelink_objects --instantiation_dir $tpldir $objs $libobjs $compile_deplibs~ + compile_command="$compile_command `find $tpldir -name \*.o | $NL2SP`"' + _LT_TAGVAR(old_archive_cmds, $1)='tpldir=Template.dir~ + rm -rf $tpldir~ + $CC --prelink_objects --instantiation_dir $tpldir $oldobjs$old_deplibs~ + $AR $AR_FLAGS $oldlib$oldobjs$old_deplibs `find $tpldir -name \*.o | $NL2SP`~ + $RANLIB $oldlib' + _LT_TAGVAR(archive_cmds, $1)='tpldir=Template.dir~ + rm -rf $tpldir~ + $CC --prelink_objects --instantiation_dir $tpldir $predep_objects $libobjs $deplibs $convenience $postdep_objects~ + $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | $NL2SP` $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname -o $lib' + _LT_TAGVAR(archive_expsym_cmds, $1)='tpldir=Template.dir~ + rm -rf $tpldir~ + $CC --prelink_objects --instantiation_dir $tpldir $predep_objects $libobjs $deplibs $convenience $postdep_objects~ + $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | $NL2SP` $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname ${wl}-retain-symbols-file ${wl}$export_symbols -o $lib' + ;; + *) # Version 6 will use weak symbols + _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname -o $lib' + _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname ${wl}-retain-symbols-file ${wl}$export_symbols -o $lib' + ;; + esac + + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}--rpath ${wl}$libdir' + _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic' + _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; $ECHO \"$new_convenience\"` ${wl}--no-whole-archive' + ;; + cxx*) + # Compaq C++ + _LT_TAGVAR(archive_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib' + _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib ${wl}-retain-symbols-file $wl$export_symbols' + + runpath_var=LD_RUN_PATH + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-rpath $libdir' + _LT_TAGVAR(hardcode_libdir_separator, $1)=: + + # Commands to make compiler produce verbose output that lists + # what "hidden" libraries, object files and flags are used when + # linking a shared library. + # + # There doesn't appear to be a way to prevent this compiler from + # explicitly linking system object files so we need to strip them + # from the output so that they don't get included in the library + # dependencies. + output_verbose_link_cmd='templist=`$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "ld"`; templist=`$ECHO "X$templist" | $Xsed -e "s/\(^.*ld.*\)\( .*ld .*$\)/\1/"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; $ECHO "X$list" | $Xsed' + ;; + xl*) + # IBM XL 8.0 on PPC, with GNU ld + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' + _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic' + _LT_TAGVAR(archive_cmds, $1)='$CC -qmkshrobj $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' + if test "x$supports_anon_versioning" = xyes; then + _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $output_objdir/$libname.ver~ + cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ + echo "local: *; };" >> $output_objdir/$libname.ver~ + $CC -qmkshrobj $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-version-script ${wl}$output_objdir/$libname.ver -o $lib' + fi + ;; + *) + case `$CC -V 2>&1 | sed 5q` in + *Sun\ C*) + # Sun C++ 5.9 + _LT_TAGVAR(no_undefined_flag, $1)=' -zdefs' + _LT_TAGVAR(archive_cmds, $1)='$CC -G${allow_undefined_flag} -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' + _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G${allow_undefined_flag} -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-retain-symbols-file ${wl}$export_symbols' + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' + _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; $ECHO \"$new_convenience\"` ${wl}--no-whole-archive' + _LT_TAGVAR(compiler_needs_object, $1)=yes + + # Not sure whether something based on + # $CC $CFLAGS -v conftest.$objext -o libconftest$shared_ext 2>&1 + # would be better. + output_verbose_link_cmd='echo' + + # Archives containing C++ object files must be created using + # "CC -xar", where "CC" is the Sun C++ compiler. This is + # necessary to make sure instantiated templates are included + # in the archive. + _LT_TAGVAR(old_archive_cmds, $1)='$CC -xar -o $oldlib $oldobjs' + ;; + esac + ;; + esac + ;; + + lynxos*) + # FIXME: insert proper C++ library support + _LT_TAGVAR(ld_shlibs, $1)=no + ;; + + m88k*) + # FIXME: insert proper C++ library support + _LT_TAGVAR(ld_shlibs, $1)=no + ;; + + mvs*) + case $cc_basename in + cxx*) + # FIXME: insert proper C++ library support + _LT_TAGVAR(ld_shlibs, $1)=no + ;; + *) + # FIXME: insert proper C++ library support + _LT_TAGVAR(ld_shlibs, $1)=no + ;; + esac + ;; + + netbsd*) + if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then + _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $predep_objects $libobjs $deplibs $postdep_objects $linker_flags' + wlarc= + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' + _LT_TAGVAR(hardcode_direct, $1)=yes + _LT_TAGVAR(hardcode_shlibpath_var, $1)=no + fi + # Workaround some broken pre-1.5 toolchains + output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP conftest.$objext | $SED -e "s:-lgcc -lc -lgcc::"' + ;; + + *nto* | *qnx*) + _LT_TAGVAR(ld_shlibs, $1)=yes + ;; + + openbsd2*) + # C++ shared libraries are fairly broken + _LT_TAGVAR(ld_shlibs, $1)=no + ;; + + openbsd*) + if test -f /usr/libexec/ld.so; then + _LT_TAGVAR(hardcode_direct, $1)=yes + _LT_TAGVAR(hardcode_shlibpath_var, $1)=no + _LT_TAGVAR(hardcode_direct_absolute, $1)=yes + _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $lib' + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir' + if test -z "`echo __ELF__ | $CC -E - | grep __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then + _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-retain-symbols-file,$export_symbols -o $lib' + _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E' + _LT_TAGVAR(whole_archive_flag_spec, $1)="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive' + fi + output_verbose_link_cmd=echo + else + _LT_TAGVAR(ld_shlibs, $1)=no + fi + ;; + + osf3* | osf4* | osf5*) + case $cc_basename in + KCC*) + # Kuck and Associates, Inc. (KAI) C++ Compiler + + # KCC will only create a shared library if the output file + # ends with ".so" (or ".sl" for HP-UX), so rename the library + # to its proper name (with version) after linking. + _LT_TAGVAR(archive_cmds, $1)='tempext=`echo $shared_ext | $SED -e '\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo "$lib" | $SED -e "s/\${tempext}\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib; mv \$templib $lib' + + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir' + _LT_TAGVAR(hardcode_libdir_separator, $1)=: + + # Archives containing C++ object files must be created using + # the KAI C++ compiler. + case $host in + osf3*) _LT_TAGVAR(old_archive_cmds, $1)='$CC -Bstatic -o $oldlib $oldobjs' ;; + *) _LT_TAGVAR(old_archive_cmds, $1)='$CC -o $oldlib $oldobjs' ;; + esac + ;; + RCC*) + # Rational C++ 2.4.1 + # FIXME: insert proper C++ library support + _LT_TAGVAR(ld_shlibs, $1)=no + ;; + cxx*) + case $host in + osf3*) + _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-expect_unresolved ${wl}\*' + _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $soname `test -n "$verstring" && $ECHO "X${wl}-set_version $verstring" | $Xsed` -update_registry ${output_objdir}/so_locations -o $lib' + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' + ;; + *) + _LT_TAGVAR(allow_undefined_flag, $1)=' -expect_unresolved \*' + _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -msym -soname $soname `test -n "$verstring" && $ECHO "X-set_version $verstring" | $Xsed` -update_registry ${output_objdir}/so_locations -o $lib' + _LT_TAGVAR(archive_expsym_cmds, $1)='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done~ + echo "-hidden">> $lib.exp~ + $CC -shared$allow_undefined_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -msym -soname $soname ${wl}-input ${wl}$lib.exp `test -n "$verstring" && $ECHO "X-set_version $verstring" | $Xsed` -update_registry ${output_objdir}/so_locations -o $lib~ + $RM $lib.exp' + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-rpath $libdir' + ;; + esac + + _LT_TAGVAR(hardcode_libdir_separator, $1)=: + + # Commands to make compiler produce verbose output that lists + # what "hidden" libraries, object files and flags are used when + # linking a shared library. + # + # There doesn't appear to be a way to prevent this compiler from + # explicitly linking system object files so we need to strip them + # from the output so that they don't get included in the library + # dependencies. + output_verbose_link_cmd='templist=`$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "ld" | $GREP -v "ld:"`; templist=`$ECHO "X$templist" | $Xsed -e "s/\(^.*ld.*\)\( .*ld.*$\)/\1/"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; $ECHO "X$list" | $Xsed' + ;; + *) + if test "$GXX" = yes && test "$with_gnu_ld" = no; then + _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-expect_unresolved ${wl}\*' + case $host in + osf3*) + _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && $ECHO "X${wl}-set_version ${wl}$verstring" | $Xsed` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' + ;; + *) + _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && $ECHO "${wl}-set_version ${wl}$verstring" | $Xsed` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' + ;; + esac + + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' + _LT_TAGVAR(hardcode_libdir_separator, $1)=: + + # Commands to make compiler produce verbose output that lists + # what "hidden" libraries, object files and flags are used when + # linking a shared library. + output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "\-L"' + + else + # FIXME: insert proper C++ library support + _LT_TAGVAR(ld_shlibs, $1)=no + fi + ;; + esac + ;; + + psos*) + # FIXME: insert proper C++ library support + _LT_TAGVAR(ld_shlibs, $1)=no + ;; + + sunos4*) + case $cc_basename in + CC*) + # Sun C++ 4.x + # FIXME: insert proper C++ library support + _LT_TAGVAR(ld_shlibs, $1)=no + ;; + lcc*) + # Lucid + # FIXME: insert proper C++ library support + _LT_TAGVAR(ld_shlibs, $1)=no + ;; + *) + # FIXME: insert proper C++ library support + _LT_TAGVAR(ld_shlibs, $1)=no + ;; + esac + ;; + + solaris*) + case $cc_basename in + CC*) + # Sun C++ 4.2, 5.x and Centerline C++ + _LT_TAGVAR(archive_cmds_need_lc,$1)=yes + _LT_TAGVAR(no_undefined_flag, $1)=' -zdefs' + _LT_TAGVAR(archive_cmds, $1)='$CC -G${allow_undefined_flag} -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' + _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ + $CC -G${allow_undefined_flag} ${wl}-M ${wl}$lib.exp -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp' + + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' + _LT_TAGVAR(hardcode_shlibpath_var, $1)=no + case $host_os in + solaris2.[[0-5]] | solaris2.[[0-5]].*) ;; + *) + # The compiler driver will combine and reorder linker options, + # but understands `-z linker_flag'. + # Supported since Solaris 2.6 (maybe 2.5.1?) + _LT_TAGVAR(whole_archive_flag_spec, $1)='-z allextract$convenience -z defaultextract' + ;; + esac + _LT_TAGVAR(link_all_deplibs, $1)=yes + + output_verbose_link_cmd='echo' + + # Archives containing C++ object files must be created using + # "CC -xar", where "CC" is the Sun C++ compiler. This is + # necessary to make sure instantiated templates are included + # in the archive. + _LT_TAGVAR(old_archive_cmds, $1)='$CC -xar -o $oldlib $oldobjs' + ;; + gcx*) + # Green Hills C++ Compiler + _LT_TAGVAR(archive_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib' + + # The C++ compiler must be used to create the archive. + _LT_TAGVAR(old_archive_cmds, $1)='$CC $LDFLAGS -archive -o $oldlib $oldobjs' + ;; + *) + # GNU C++ compiler with Solaris linker + if test "$GXX" = yes && test "$with_gnu_ld" = no; then + _LT_TAGVAR(no_undefined_flag, $1)=' ${wl}-z ${wl}defs' + if $CC --version | $GREP -v '^2\.7' > /dev/null; then + _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib' + _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ + $CC -shared -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp' + + # Commands to make compiler produce verbose output that lists + # what "hidden" libraries, object files and flags are used when + # linking a shared library. + output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "\-L"' + else + # g++ 2.7 appears to require `-G' NOT `-shared' on this + # platform. + _LT_TAGVAR(archive_cmds, $1)='$CC -G -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib' + _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ + $CC -G -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp' + + # Commands to make compiler produce verbose output that lists + # what "hidden" libraries, object files and flags are used when + # linking a shared library. + output_verbose_link_cmd='$CC -G $CFLAGS -v conftest.$objext 2>&1 | $GREP "\-L"' + fi + + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R $wl$libdir' + case $host_os in + solaris2.[[0-5]] | solaris2.[[0-5]].*) ;; + *) + _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}-z ${wl}allextract$convenience ${wl}-z ${wl}defaultextract' + ;; + esac + fi + ;; + esac + ;; + + sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[[01]].[[10]]* | unixware7* | sco3.2v5.0.[[024]]*) + _LT_TAGVAR(no_undefined_flag, $1)='${wl}-z,text' + _LT_TAGVAR(archive_cmds_need_lc, $1)=no + _LT_TAGVAR(hardcode_shlibpath_var, $1)=no + runpath_var='LD_RUN_PATH' + + case $cc_basename in + CC*) + _LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' + _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' + ;; + *) + _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' + _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' + ;; + esac + ;; + + sysv5* | sco3.2v5* | sco5v6*) + # Note: We can NOT use -z defs as we might desire, because we do not + # link with -lc, and that would cause any symbols used from libc to + # always be unresolved, which means just about no library would + # ever link correctly. If we're not using GNU ld we use -z text + # though, which does catch some bad symbols but isn't as heavy-handed + # as -z defs. + _LT_TAGVAR(no_undefined_flag, $1)='${wl}-z,text' + _LT_TAGVAR(allow_undefined_flag, $1)='${wl}-z,nodefs' + _LT_TAGVAR(archive_cmds_need_lc, $1)=no + _LT_TAGVAR(hardcode_shlibpath_var, $1)=no + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R,$libdir' + _LT_TAGVAR(hardcode_libdir_separator, $1)=':' + _LT_TAGVAR(link_all_deplibs, $1)=yes + _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-Bexport' + runpath_var='LD_RUN_PATH' + + case $cc_basename in + CC*) + _LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' + _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' + ;; + *) + _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' + _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' + ;; + esac + ;; + + tandem*) + case $cc_basename in + NCC*) + # NonStop-UX NCC 3.20 + # FIXME: insert proper C++ library support + _LT_TAGVAR(ld_shlibs, $1)=no + ;; + *) + # FIXME: insert proper C++ library support + _LT_TAGVAR(ld_shlibs, $1)=no + ;; + esac + ;; + + vxworks*) + # FIXME: insert proper C++ library support + _LT_TAGVAR(ld_shlibs, $1)=no + ;; + + *) + # FIXME: insert proper C++ library support + _LT_TAGVAR(ld_shlibs, $1)=no + ;; + esac + + AC_MSG_RESULT([$_LT_TAGVAR(ld_shlibs, $1)]) + test "$_LT_TAGVAR(ld_shlibs, $1)" = no && can_build_shared=no + + _LT_TAGVAR(GCC, $1)="$GXX" + _LT_TAGVAR(LD, $1)="$LD" + + ## CAVEAT EMPTOR: + ## There is no encapsulation within the following macros, do not change + ## the running order or otherwise move them around unless you know exactly + ## what you are doing... + _LT_SYS_HIDDEN_LIBDEPS($1) + _LT_COMPILER_PIC($1) + _LT_COMPILER_C_O($1) + _LT_COMPILER_FILE_LOCKS($1) + _LT_LINKER_SHLIBS($1) + _LT_SYS_DYNAMIC_LINKER($1) + _LT_LINKER_HARDCODE_LIBPATH($1) + + _LT_CONFIG($1) + fi # test -n "$compiler" + + CC=$lt_save_CC + LDCXX=$LD + LD=$lt_save_LD + GCC=$lt_save_GCC + with_gnu_ld=$lt_save_with_gnu_ld + lt_cv_path_LDCXX=$lt_cv_path_LD + lt_cv_path_LD=$lt_save_path_LD + lt_cv_prog_gnu_ldcxx=$lt_cv_prog_gnu_ld + lt_cv_prog_gnu_ld=$lt_save_with_gnu_ld +fi # test "$_lt_caught_CXX_error" != yes + +AC_LANG_POP +])# _LT_LANG_CXX_CONFIG + + +# _LT_SYS_HIDDEN_LIBDEPS([TAGNAME]) +# --------------------------------- +# Figure out "hidden" library dependencies from verbose +# compiler output when linking a shared library. +# Parse the compiler output and extract the necessary +# objects, libraries and library flags. +m4_defun([_LT_SYS_HIDDEN_LIBDEPS], +[m4_require([_LT_FILEUTILS_DEFAULTS])dnl +# Dependencies to place before and after the object being linked: +_LT_TAGVAR(predep_objects, $1)= +_LT_TAGVAR(postdep_objects, $1)= +_LT_TAGVAR(predeps, $1)= +_LT_TAGVAR(postdeps, $1)= +_LT_TAGVAR(compiler_lib_search_path, $1)= + +dnl we can't use the lt_simple_compile_test_code here, +dnl because it contains code intended for an executable, +dnl not a library. It's possible we should let each +dnl tag define a new lt_????_link_test_code variable, +dnl but it's only used here... +m4_if([$1], [], [cat > conftest.$ac_ext <<_LT_EOF +int a; +void foo (void) { a = 0; } +_LT_EOF +], [$1], [CXX], [cat > conftest.$ac_ext <<_LT_EOF +class Foo +{ +public: + Foo (void) { a = 0; } +private: + int a; +}; +_LT_EOF +], [$1], [F77], [cat > conftest.$ac_ext <<_LT_EOF + subroutine foo + implicit none + integer*4 a + a=0 + return + end +_LT_EOF +], [$1], [FC], [cat > conftest.$ac_ext <<_LT_EOF + subroutine foo + implicit none + integer a + a=0 + return + end +_LT_EOF +], [$1], [GCJ], [cat > conftest.$ac_ext <<_LT_EOF +public class foo { + private int a; + public void bar (void) { + a = 0; + } +}; +_LT_EOF +]) +dnl Parse the compiler output and extract the necessary +dnl objects, libraries and library flags. +if AC_TRY_EVAL(ac_compile); then + # Parse the compiler output and extract the necessary + # objects, libraries and library flags. + + # Sentinel used to keep track of whether or not we are before + # the conftest object file. + pre_test_object_deps_done=no + + for p in `eval "$output_verbose_link_cmd"`; do + case $p in + + -L* | -R* | -l*) + # Some compilers place space between "-{L,R}" and the path. + # Remove the space. + if test $p = "-L" || + test $p = "-R"; then + prev=$p + continue + else + prev= + fi + + if test "$pre_test_object_deps_done" = no; then + case $p in + -L* | -R*) + # Internal compiler library paths should come after those + # provided the user. The postdeps already come after the + # user supplied libs so there is no need to process them. + if test -z "$_LT_TAGVAR(compiler_lib_search_path, $1)"; then + _LT_TAGVAR(compiler_lib_search_path, $1)="${prev}${p}" + else + _LT_TAGVAR(compiler_lib_search_path, $1)="${_LT_TAGVAR(compiler_lib_search_path, $1)} ${prev}${p}" + fi + ;; + # The "-l" case would never come before the object being + # linked, so don't bother handling this case. + esac + else + if test -z "$_LT_TAGVAR(postdeps, $1)"; then + _LT_TAGVAR(postdeps, $1)="${prev}${p}" + else + _LT_TAGVAR(postdeps, $1)="${_LT_TAGVAR(postdeps, $1)} ${prev}${p}" + fi + fi + ;; + + *.$objext) + # This assumes that the test object file only shows up + # once in the compiler output. + if test "$p" = "conftest.$objext"; then + pre_test_object_deps_done=yes + continue + fi + + if test "$pre_test_object_deps_done" = no; then + if test -z "$_LT_TAGVAR(predep_objects, $1)"; then + _LT_TAGVAR(predep_objects, $1)="$p" + else + _LT_TAGVAR(predep_objects, $1)="$_LT_TAGVAR(predep_objects, $1) $p" + fi + else + if test -z "$_LT_TAGVAR(postdep_objects, $1)"; then + _LT_TAGVAR(postdep_objects, $1)="$p" + else + _LT_TAGVAR(postdep_objects, $1)="$_LT_TAGVAR(postdep_objects, $1) $p" + fi + fi + ;; + + *) ;; # Ignore the rest. + + esac + done + + # Clean up. + rm -f a.out a.exe +else + echo "libtool.m4: error: problem compiling $1 test program" +fi + +$RM -f confest.$objext + +# PORTME: override above test on systems where it is broken +m4_if([$1], [CXX], +[case $host_os in +interix[[3-9]]*) + # Interix 3.5 installs completely hosed .la files for C++, so rather than + # hack all around it, let's just trust "g++" to DTRT. + _LT_TAGVAR(predep_objects,$1)= + _LT_TAGVAR(postdep_objects,$1)= + _LT_TAGVAR(postdeps,$1)= + ;; + +linux*) + case `$CC -V 2>&1 | sed 5q` in + *Sun\ C*) + # Sun C++ 5.9 + + # The more standards-conforming stlport4 library is + # incompatible with the Cstd library. Avoid specifying + # it if it's in CXXFLAGS. Ignore libCrun as + # -library=stlport4 depends on it. + case " $CXX $CXXFLAGS " in + *" -library=stlport4 "*) + solaris_use_stlport4=yes + ;; + esac + + if test "$solaris_use_stlport4" != yes; then + _LT_TAGVAR(postdeps,$1)='-library=Cstd -library=Crun' + fi + ;; + esac + ;; + +solaris*) + case $cc_basename in + CC*) + # The more standards-conforming stlport4 library is + # incompatible with the Cstd library. Avoid specifying + # it if it's in CXXFLAGS. Ignore libCrun as + # -library=stlport4 depends on it. + case " $CXX $CXXFLAGS " in + *" -library=stlport4 "*) + solaris_use_stlport4=yes + ;; + esac + + # Adding this requires a known-good setup of shared libraries for + # Sun compiler versions before 5.6, else PIC objects from an old + # archive will be linked into the output, leading to subtle bugs. + if test "$solaris_use_stlport4" != yes; then + _LT_TAGVAR(postdeps,$1)='-library=Cstd -library=Crun' + fi + ;; + esac + ;; +esac +]) + +case " $_LT_TAGVAR(postdeps, $1) " in +*" -lc "*) _LT_TAGVAR(archive_cmds_need_lc, $1)=no ;; +esac + _LT_TAGVAR(compiler_lib_search_dirs, $1)= +if test -n "${_LT_TAGVAR(compiler_lib_search_path, $1)}"; then + _LT_TAGVAR(compiler_lib_search_dirs, $1)=`echo " ${_LT_TAGVAR(compiler_lib_search_path, $1)}" | ${SED} -e 's! -L! !g' -e 's!^ !!'` +fi +_LT_TAGDECL([], [compiler_lib_search_dirs], [1], + [The directories searched by this compiler when creating a shared library]) +_LT_TAGDECL([], [predep_objects], [1], + [Dependencies to place before and after the objects being linked to + create a shared library]) +_LT_TAGDECL([], [postdep_objects], [1]) +_LT_TAGDECL([], [predeps], [1]) +_LT_TAGDECL([], [postdeps], [1]) +_LT_TAGDECL([], [compiler_lib_search_path], [1], + [The library search path used internally by the compiler when linking + a shared library]) +])# _LT_SYS_HIDDEN_LIBDEPS + + +# _LT_PROG_F77 +# ------------ +# Since AC_PROG_F77 is broken, in that it returns the empty string +# if there is no fortran compiler, we have our own version here. +m4_defun([_LT_PROG_F77], +[ +pushdef([AC_MSG_ERROR], [_lt_disable_F77=yes]) +AC_PROG_F77 +if test -z "$F77" || test "X$F77" = "Xno"; then + _lt_disable_F77=yes +fi +popdef([AC_MSG_ERROR]) +])# _LT_PROG_F77 + +dnl aclocal-1.4 backwards compatibility: +dnl AC_DEFUN([_LT_PROG_F77], []) + + +# _LT_LANG_F77_CONFIG([TAG]) +# -------------------------- +# Ensure that the configuration variables for a Fortran 77 compiler are +# suitably defined. These variables are subsequently used by _LT_CONFIG +# to write the compiler configuration to `libtool'. +m4_defun([_LT_LANG_F77_CONFIG], +[AC_REQUIRE([_LT_PROG_F77])dnl +AC_LANG_PUSH(Fortran 77) + +_LT_TAGVAR(archive_cmds_need_lc, $1)=no +_LT_TAGVAR(allow_undefined_flag, $1)= +_LT_TAGVAR(always_export_symbols, $1)=no +_LT_TAGVAR(archive_expsym_cmds, $1)= +_LT_TAGVAR(export_dynamic_flag_spec, $1)= +_LT_TAGVAR(hardcode_direct, $1)=no +_LT_TAGVAR(hardcode_direct_absolute, $1)=no +_LT_TAGVAR(hardcode_libdir_flag_spec, $1)= +_LT_TAGVAR(hardcode_libdir_flag_spec_ld, $1)= +_LT_TAGVAR(hardcode_libdir_separator, $1)= +_LT_TAGVAR(hardcode_minus_L, $1)=no +_LT_TAGVAR(hardcode_automatic, $1)=no +_LT_TAGVAR(inherit_rpath, $1)=no +_LT_TAGVAR(module_cmds, $1)= +_LT_TAGVAR(module_expsym_cmds, $1)= +_LT_TAGVAR(link_all_deplibs, $1)=unknown +_LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds +_LT_TAGVAR(no_undefined_flag, $1)= +_LT_TAGVAR(whole_archive_flag_spec, $1)= +_LT_TAGVAR(enable_shared_with_static_runtimes, $1)=no + +# Source file extension for f77 test sources. +ac_ext=f + +# Object file extension for compiled f77 test sources. +objext=o +_LT_TAGVAR(objext, $1)=$objext + +# No sense in running all these tests if we already determined that +# the F77 compiler isn't working. Some variables (like enable_shared) +# are currently assumed to apply to all compilers on this platform, +# and will be corrupted by setting them based on a non-working compiler. +if test "$_lt_disable_F77" != yes; then + # Code to be used in simple compile tests + lt_simple_compile_test_code="\ + subroutine t + return + end +" + + # Code to be used in simple link tests + lt_simple_link_test_code="\ + program t + end +" + + # ltmain only uses $CC for tagged configurations so make sure $CC is set. + _LT_TAG_COMPILER + + # save warnings/boilerplate of simple test code + _LT_COMPILER_BOILERPLATE + _LT_LINKER_BOILERPLATE + + # Allow CC to be a program name with arguments. + lt_save_CC="$CC" + lt_save_GCC=$GCC + CC=${F77-"f77"} + compiler=$CC + _LT_TAGVAR(compiler, $1)=$CC + _LT_CC_BASENAME([$compiler]) + GCC=$G77 + if test -n "$compiler"; then + AC_MSG_CHECKING([if libtool supports shared libraries]) + AC_MSG_RESULT([$can_build_shared]) + + AC_MSG_CHECKING([whether to build shared libraries]) + test "$can_build_shared" = "no" && enable_shared=no + + # On AIX, shared libraries and static libraries use the same namespace, and + # are all built from PIC. + case $host_os in + aix3*) + test "$enable_shared" = yes && enable_static=no + if test -n "$RANLIB"; then + archive_cmds="$archive_cmds~\$RANLIB \$lib" + postinstall_cmds='$RANLIB $lib' + fi + ;; + aix[[4-9]]*) + if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then + test "$enable_shared" = yes && enable_static=no + fi + ;; + esac + AC_MSG_RESULT([$enable_shared]) + + AC_MSG_CHECKING([whether to build static libraries]) + # Make sure either enable_shared or enable_static is yes. + test "$enable_shared" = yes || enable_static=yes + AC_MSG_RESULT([$enable_static]) + + _LT_TAGVAR(GCC, $1)="$G77" + _LT_TAGVAR(LD, $1)="$LD" + + ## CAVEAT EMPTOR: + ## There is no encapsulation within the following macros, do not change + ## the running order or otherwise move them around unless you know exactly + ## what you are doing... + _LT_COMPILER_PIC($1) + _LT_COMPILER_C_O($1) + _LT_COMPILER_FILE_LOCKS($1) + _LT_LINKER_SHLIBS($1) + _LT_SYS_DYNAMIC_LINKER($1) + _LT_LINKER_HARDCODE_LIBPATH($1) + + _LT_CONFIG($1) + fi # test -n "$compiler" + + GCC=$lt_save_GCC + CC="$lt_save_CC" +fi # test "$_lt_disable_F77" != yes + +AC_LANG_POP +])# _LT_LANG_F77_CONFIG + + +# _LT_PROG_FC +# ----------- +# Since AC_PROG_FC is broken, in that it returns the empty string +# if there is no fortran compiler, we have our own version here. +m4_defun([_LT_PROG_FC], +[ +pushdef([AC_MSG_ERROR], [_lt_disable_FC=yes]) +AC_PROG_FC +if test -z "$FC" || test "X$FC" = "Xno"; then + _lt_disable_FC=yes +fi +popdef([AC_MSG_ERROR]) +])# _LT_PROG_FC + +dnl aclocal-1.4 backwards compatibility: +dnl AC_DEFUN([_LT_PROG_FC], []) + + +# _LT_LANG_FC_CONFIG([TAG]) +# ------------------------- +# Ensure that the configuration variables for a Fortran compiler are +# suitably defined. These variables are subsequently used by _LT_CONFIG +# to write the compiler configuration to `libtool'. +m4_defun([_LT_LANG_FC_CONFIG], +[AC_REQUIRE([_LT_PROG_FC])dnl +AC_LANG_PUSH(Fortran) + +_LT_TAGVAR(archive_cmds_need_lc, $1)=no +_LT_TAGVAR(allow_undefined_flag, $1)= +_LT_TAGVAR(always_export_symbols, $1)=no +_LT_TAGVAR(archive_expsym_cmds, $1)= +_LT_TAGVAR(export_dynamic_flag_spec, $1)= +_LT_TAGVAR(hardcode_direct, $1)=no +_LT_TAGVAR(hardcode_direct_absolute, $1)=no +_LT_TAGVAR(hardcode_libdir_flag_spec, $1)= +_LT_TAGVAR(hardcode_libdir_flag_spec_ld, $1)= +_LT_TAGVAR(hardcode_libdir_separator, $1)= +_LT_TAGVAR(hardcode_minus_L, $1)=no +_LT_TAGVAR(hardcode_automatic, $1)=no +_LT_TAGVAR(inherit_rpath, $1)=no +_LT_TAGVAR(module_cmds, $1)= +_LT_TAGVAR(module_expsym_cmds, $1)= +_LT_TAGVAR(link_all_deplibs, $1)=unknown +_LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds +_LT_TAGVAR(no_undefined_flag, $1)= +_LT_TAGVAR(whole_archive_flag_spec, $1)= +_LT_TAGVAR(enable_shared_with_static_runtimes, $1)=no + +# Source file extension for fc test sources. +ac_ext=${ac_fc_srcext-f} + +# Object file extension for compiled fc test sources. +objext=o +_LT_TAGVAR(objext, $1)=$objext + +# No sense in running all these tests if we already determined that +# the FC compiler isn't working. Some variables (like enable_shared) +# are currently assumed to apply to all compilers on this platform, +# and will be corrupted by setting them based on a non-working compiler. +if test "$_lt_disable_FC" != yes; then + # Code to be used in simple compile tests + lt_simple_compile_test_code="\ + subroutine t + return + end +" + + # Code to be used in simple link tests + lt_simple_link_test_code="\ + program t + end +" + + # ltmain only uses $CC for tagged configurations so make sure $CC is set. + _LT_TAG_COMPILER + + # save warnings/boilerplate of simple test code + _LT_COMPILER_BOILERPLATE + _LT_LINKER_BOILERPLATE + + # Allow CC to be a program name with arguments. + lt_save_CC="$CC" + lt_save_GCC=$GCC + CC=${FC-"f95"} + compiler=$CC + GCC=$ac_cv_fc_compiler_gnu + + _LT_TAGVAR(compiler, $1)=$CC + _LT_CC_BASENAME([$compiler]) + + if test -n "$compiler"; then + AC_MSG_CHECKING([if libtool supports shared libraries]) + AC_MSG_RESULT([$can_build_shared]) + + AC_MSG_CHECKING([whether to build shared libraries]) + test "$can_build_shared" = "no" && enable_shared=no + + # On AIX, shared libraries and static libraries use the same namespace, and + # are all built from PIC. + case $host_os in + aix3*) + test "$enable_shared" = yes && enable_static=no + if test -n "$RANLIB"; then + archive_cmds="$archive_cmds~\$RANLIB \$lib" + postinstall_cmds='$RANLIB $lib' + fi + ;; + aix[[4-9]]*) + if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then + test "$enable_shared" = yes && enable_static=no + fi + ;; + esac + AC_MSG_RESULT([$enable_shared]) + + AC_MSG_CHECKING([whether to build static libraries]) + # Make sure either enable_shared or enable_static is yes. + test "$enable_shared" = yes || enable_static=yes + AC_MSG_RESULT([$enable_static]) + + _LT_TAGVAR(GCC, $1)="$ac_cv_fc_compiler_gnu" + _LT_TAGVAR(LD, $1)="$LD" + + ## CAVEAT EMPTOR: + ## There is no encapsulation within the following macros, do not change + ## the running order or otherwise move them around unless you know exactly + ## what you are doing... + _LT_SYS_HIDDEN_LIBDEPS($1) + _LT_COMPILER_PIC($1) + _LT_COMPILER_C_O($1) + _LT_COMPILER_FILE_LOCKS($1) + _LT_LINKER_SHLIBS($1) + _LT_SYS_DYNAMIC_LINKER($1) + _LT_LINKER_HARDCODE_LIBPATH($1) + + _LT_CONFIG($1) + fi # test -n "$compiler" + + GCC=$lt_save_GCC + CC="$lt_save_CC" +fi # test "$_lt_disable_FC" != yes + +AC_LANG_POP +])# _LT_LANG_FC_CONFIG + + +# _LT_LANG_GCJ_CONFIG([TAG]) +# -------------------------- +# Ensure that the configuration variables for the GNU Java Compiler compiler +# are suitably defined. These variables are subsequently used by _LT_CONFIG +# to write the compiler configuration to `libtool'. +m4_defun([_LT_LANG_GCJ_CONFIG], +[AC_REQUIRE([LT_PROG_GCJ])dnl +AC_LANG_SAVE + +# Source file extension for Java test sources. +ac_ext=java + +# Object file extension for compiled Java test sources. +objext=o +_LT_TAGVAR(objext, $1)=$objext + +# Code to be used in simple compile tests +lt_simple_compile_test_code="class foo {}" + +# Code to be used in simple link tests +lt_simple_link_test_code='public class conftest { public static void main(String[[]] argv) {}; }' + +# ltmain only uses $CC for tagged configurations so make sure $CC is set. +_LT_TAG_COMPILER + +# save warnings/boilerplate of simple test code +_LT_COMPILER_BOILERPLATE +_LT_LINKER_BOILERPLATE + +# Allow CC to be a program name with arguments. +lt_save_CC="$CC" +lt_save_GCC=$GCC +GCC=yes +CC=${GCJ-"gcj"} +compiler=$CC +_LT_TAGVAR(compiler, $1)=$CC +_LT_TAGVAR(LD, $1)="$LD" +_LT_CC_BASENAME([$compiler]) + +# GCJ did not exist at the time GCC didn't implicitly link libc in. +_LT_TAGVAR(archive_cmds_need_lc, $1)=no + +_LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds + +## CAVEAT EMPTOR: +## There is no encapsulation within the following macros, do not change +## the running order or otherwise move them around unless you know exactly +## what you are doing... +if test -n "$compiler"; then + _LT_COMPILER_NO_RTTI($1) + _LT_COMPILER_PIC($1) + _LT_COMPILER_C_O($1) + _LT_COMPILER_FILE_LOCKS($1) + _LT_LINKER_SHLIBS($1) + _LT_LINKER_HARDCODE_LIBPATH($1) + + _LT_CONFIG($1) +fi + +AC_LANG_RESTORE + +GCC=$lt_save_GCC +CC="$lt_save_CC" +])# _LT_LANG_GCJ_CONFIG + + +# _LT_LANG_RC_CONFIG([TAG]) +# ------------------------- +# Ensure that the configuration variables for the Windows resource compiler +# are suitably defined. These variables are subsequently used by _LT_CONFIG +# to write the compiler configuration to `libtool'. +m4_defun([_LT_LANG_RC_CONFIG], +[AC_REQUIRE([LT_PROG_RC])dnl +AC_LANG_SAVE + +# Source file extension for RC test sources. +ac_ext=rc + +# Object file extension for compiled RC test sources. +objext=o +_LT_TAGVAR(objext, $1)=$objext + +# Code to be used in simple compile tests +lt_simple_compile_test_code='sample MENU { MENUITEM "&Soup", 100, CHECKED }' + +# Code to be used in simple link tests +lt_simple_link_test_code="$lt_simple_compile_test_code" + +# ltmain only uses $CC for tagged configurations so make sure $CC is set. +_LT_TAG_COMPILER + +# save warnings/boilerplate of simple test code +_LT_COMPILER_BOILERPLATE +_LT_LINKER_BOILERPLATE + +# Allow CC to be a program name with arguments. +lt_save_CC="$CC" +lt_save_GCC=$GCC +GCC= +CC=${RC-"windres"} +compiler=$CC +_LT_TAGVAR(compiler, $1)=$CC +_LT_CC_BASENAME([$compiler]) +_LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)=yes + +if test -n "$compiler"; then + : + _LT_CONFIG($1) +fi + +GCC=$lt_save_GCC +AC_LANG_RESTORE +CC="$lt_save_CC" +])# _LT_LANG_RC_CONFIG + + +# LT_PROG_GCJ +# ----------- +AC_DEFUN([LT_PROG_GCJ], +[m4_ifdef([AC_PROG_GCJ], [AC_PROG_GCJ], + [m4_ifdef([A][M_PROG_GCJ], [A][M_PROG_GCJ], + [AC_CHECK_TOOL(GCJ, gcj,) + test "x${GCJFLAGS+set}" = xset || GCJFLAGS="-g -O2" + AC_SUBST(GCJFLAGS)])])[]dnl +]) + +# Old name: +AU_ALIAS([LT_AC_PROG_GCJ], [LT_PROG_GCJ]) +dnl aclocal-1.4 backwards compatibility: +dnl AC_DEFUN([LT_AC_PROG_GCJ], []) + + +# LT_PROG_RC +# ---------- +AC_DEFUN([LT_PROG_RC], +[AC_CHECK_TOOL(RC, windres,) +]) + +# Old name: +AU_ALIAS([LT_AC_PROG_RC], [LT_PROG_RC]) +dnl aclocal-1.4 backwards compatibility: +dnl AC_DEFUN([LT_AC_PROG_RC], []) + + +# _LT_DECL_EGREP +# -------------- +# If we don't have a new enough Autoconf to choose the best grep +# available, choose the one first in the user's PATH. +m4_defun([_LT_DECL_EGREP], +[AC_REQUIRE([AC_PROG_EGREP])dnl +AC_REQUIRE([AC_PROG_FGREP])dnl +test -z "$GREP" && GREP=grep +_LT_DECL([], [GREP], [1], [A grep program that handles long lines]) +_LT_DECL([], [EGREP], [1], [An ERE matcher]) +_LT_DECL([], [FGREP], [1], [A literal string matcher]) +dnl Non-bleeding-edge autoconf doesn't subst GREP, so do it here too +AC_SUBST([GREP]) +]) + + +# _LT_DECL_SED +# ------------ +# Check for a fully-functional sed program, that truncates +# as few characters as possible. Prefer GNU sed if found. +m4_defun([_LT_DECL_SED], +[AC_PROG_SED +test -z "$SED" && SED=sed +Xsed="$SED -e 1s/^X//" +_LT_DECL([], [SED], [1], [A sed program that does not truncate output]) +_LT_DECL([], [Xsed], ["\$SED -e 1s/^X//"], + [Sed that helps us avoid accidentally triggering echo(1) options like -n]) +])# _LT_DECL_SED + +m4_ifndef([AC_PROG_SED], [ +############################################################ +# NOTE: This macro has been submitted for inclusion into # +# GNU Autoconf as AC_PROG_SED. When it is available in # +# a released version of Autoconf we should remove this # +# macro and use it instead. # +############################################################ + +m4_defun([AC_PROG_SED], +[AC_MSG_CHECKING([for a sed that does not truncate output]) +AC_CACHE_VAL(lt_cv_path_SED, +[# Loop through the user's path and test for sed and gsed. +# Then use that list of sed's as ones to test for truncation. +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for lt_ac_prog in sed gsed; do + for ac_exec_ext in '' $ac_executable_extensions; do + if $as_executable_p "$as_dir/$lt_ac_prog$ac_exec_ext"; then + lt_ac_sed_list="$lt_ac_sed_list $as_dir/$lt_ac_prog$ac_exec_ext" + fi + done + done +done +IFS=$as_save_IFS +lt_ac_max=0 +lt_ac_count=0 +# Add /usr/xpg4/bin/sed as it is typically found on Solaris +# along with /bin/sed that truncates output. +for lt_ac_sed in $lt_ac_sed_list /usr/xpg4/bin/sed; do + test ! -f $lt_ac_sed && continue + cat /dev/null > conftest.in + lt_ac_count=0 + echo $ECHO_N "0123456789$ECHO_C" >conftest.in + # Check for GNU sed and select it if it is found. + if "$lt_ac_sed" --version 2>&1 < /dev/null | grep 'GNU' > /dev/null; then + lt_cv_path_SED=$lt_ac_sed + break + fi + while true; do + cat conftest.in conftest.in >conftest.tmp + mv conftest.tmp conftest.in + cp conftest.in conftest.nl + echo >>conftest.nl + $lt_ac_sed -e 's/a$//' < conftest.nl >conftest.out || break + cmp -s conftest.out conftest.nl || break + # 10000 chars as input seems more than enough + test $lt_ac_count -gt 10 && break + lt_ac_count=`expr $lt_ac_count + 1` + if test $lt_ac_count -gt $lt_ac_max; then + lt_ac_max=$lt_ac_count + lt_cv_path_SED=$lt_ac_sed + fi + done +done +]) +SED=$lt_cv_path_SED +AC_SUBST([SED]) +AC_MSG_RESULT([$SED]) +])#AC_PROG_SED +])#m4_ifndef + +# Old name: +AU_ALIAS([LT_AC_PROG_SED], [AC_PROG_SED]) +dnl aclocal-1.4 backwards compatibility: +dnl AC_DEFUN([LT_AC_PROG_SED], []) + + +# _LT_CHECK_SHELL_FEATURES +# ------------------------ +# Find out whether the shell is Bourne or XSI compatible, +# or has some other useful features. +m4_defun([_LT_CHECK_SHELL_FEATURES], +[AC_MSG_CHECKING([whether the shell understands some XSI constructs]) +# Try some XSI features +xsi_shell=no +( _lt_dummy="a/b/c" + test "${_lt_dummy##*/},${_lt_dummy%/*},"${_lt_dummy%"$_lt_dummy"}, \ + = c,a/b,, \ + && eval 'test $(( 1 + 1 )) -eq 2 \ + && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \ + && xsi_shell=yes +AC_MSG_RESULT([$xsi_shell]) +_LT_CONFIG_LIBTOOL_INIT([xsi_shell='$xsi_shell']) + +AC_MSG_CHECKING([whether the shell understands "+="]) +lt_shell_append=no +( foo=bar; set foo baz; eval "$[1]+=\$[2]" && test "$foo" = barbaz ) \ + >/dev/null 2>&1 \ + && lt_shell_append=yes +AC_MSG_RESULT([$lt_shell_append]) +_LT_CONFIG_LIBTOOL_INIT([lt_shell_append='$lt_shell_append']) + +if ( (MAIL=60; unset MAIL) || exit) >/dev/null 2>&1; then + lt_unset=unset +else + lt_unset=false +fi +_LT_DECL([], [lt_unset], [0], [whether the shell understands "unset"])dnl + +# test EBCDIC or ASCII +case `echo X|tr X '\101'` in + A) # ASCII based system + # \n is not interpreted correctly by Solaris 8 /usr/ucb/tr + lt_SP2NL='tr \040 \012' + lt_NL2SP='tr \015\012 \040\040' + ;; + *) # EBCDIC based system + lt_SP2NL='tr \100 \n' + lt_NL2SP='tr \r\n \100\100' + ;; +esac +_LT_DECL([SP2NL], [lt_SP2NL], [1], [turn spaces into newlines])dnl +_LT_DECL([NL2SP], [lt_NL2SP], [1], [turn newlines into spaces])dnl +])# _LT_CHECK_SHELL_FEATURES + + +# _LT_PROG_XSI_SHELLFNS +# --------------------- +# Bourne and XSI compatible variants of some useful shell functions. +m4_defun([_LT_PROG_XSI_SHELLFNS], +[case $xsi_shell in + yes) + cat << \_LT_EOF >> "$cfgfile" + +# func_dirname file append nondir_replacement +# Compute the dirname of FILE. If nonempty, add APPEND to the result, +# otherwise set result to NONDIR_REPLACEMENT. +func_dirname () +{ + case ${1} in + */*) func_dirname_result="${1%/*}${2}" ;; + * ) func_dirname_result="${3}" ;; + esac +} + +# func_basename file +func_basename () +{ + func_basename_result="${1##*/}" +} + +# func_dirname_and_basename file append nondir_replacement +# perform func_basename and func_dirname in a single function +# call: +# dirname: Compute the dirname of FILE. If nonempty, +# add APPEND to the result, otherwise set result +# to NONDIR_REPLACEMENT. +# value returned in "$func_dirname_result" +# basename: Compute filename of FILE. +# value retuned in "$func_basename_result" +# Implementation must be kept synchronized with func_dirname +# and func_basename. For efficiency, we do not delegate to +# those functions but instead duplicate the functionality here. +func_dirname_and_basename () +{ + case ${1} in + */*) func_dirname_result="${1%/*}${2}" ;; + * ) func_dirname_result="${3}" ;; + esac + func_basename_result="${1##*/}" +} + +# func_stripname prefix suffix name +# strip PREFIX and SUFFIX off of NAME. +# PREFIX and SUFFIX must not contain globbing or regex special +# characters, hashes, percent signs, but SUFFIX may contain a leading +# dot (in which case that matches only a dot). +func_stripname () +{ + # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are + # positional parameters, so assign one to ordinary parameter first. + func_stripname_result=${3} + func_stripname_result=${func_stripname_result#"${1}"} + func_stripname_result=${func_stripname_result%"${2}"} +} + +# func_opt_split +func_opt_split () +{ + func_opt_split_opt=${1%%=*} + func_opt_split_arg=${1#*=} +} + +# func_lo2o object +func_lo2o () +{ + case ${1} in + *.lo) func_lo2o_result=${1%.lo}.${objext} ;; + *) func_lo2o_result=${1} ;; + esac +} + +# func_xform libobj-or-source +func_xform () +{ + func_xform_result=${1%.*}.lo +} + +# func_arith arithmetic-term... +func_arith () +{ + func_arith_result=$(( $[*] )) +} + +# func_len string +# STRING may not start with a hyphen. +func_len () +{ + func_len_result=${#1} +} + +_LT_EOF + ;; + *) # Bourne compatible functions. + cat << \_LT_EOF >> "$cfgfile" + +# func_dirname file append nondir_replacement +# Compute the dirname of FILE. If nonempty, add APPEND to the result, +# otherwise set result to NONDIR_REPLACEMENT. +func_dirname () +{ + # Extract subdirectory from the argument. + func_dirname_result=`$ECHO "X${1}" | $Xsed -e "$dirname"` + if test "X$func_dirname_result" = "X${1}"; then + func_dirname_result="${3}" + else + func_dirname_result="$func_dirname_result${2}" + fi +} + +# func_basename file +func_basename () +{ + func_basename_result=`$ECHO "X${1}" | $Xsed -e "$basename"` +} + +dnl func_dirname_and_basename +dnl A portable version of this function is already defined in general.m4sh +dnl so there is no need for it here. + +# func_stripname prefix suffix name +# strip PREFIX and SUFFIX off of NAME. +# PREFIX and SUFFIX must not contain globbing or regex special +# characters, hashes, percent signs, but SUFFIX may contain a leading +# dot (in which case that matches only a dot). +# func_strip_suffix prefix name +func_stripname () +{ + case ${2} in + .*) func_stripname_result=`$ECHO "X${3}" \ + | $Xsed -e "s%^${1}%%" -e "s%\\\\${2}\$%%"`;; + *) func_stripname_result=`$ECHO "X${3}" \ + | $Xsed -e "s%^${1}%%" -e "s%${2}\$%%"`;; + esac +} + +# sed scripts: +my_sed_long_opt='1s/^\(-[[^=]]*\)=.*/\1/;q' +my_sed_long_arg='1s/^-[[^=]]*=//' + +# func_opt_split +func_opt_split () +{ + func_opt_split_opt=`$ECHO "X${1}" | $Xsed -e "$my_sed_long_opt"` + func_opt_split_arg=`$ECHO "X${1}" | $Xsed -e "$my_sed_long_arg"` +} + +# func_lo2o object +func_lo2o () +{ + func_lo2o_result=`$ECHO "X${1}" | $Xsed -e "$lo2o"` +} + +# func_xform libobj-or-source +func_xform () +{ + func_xform_result=`$ECHO "X${1}" | $Xsed -e 's/\.[[^.]]*$/.lo/'` +} + +# func_arith arithmetic-term... +func_arith () +{ + func_arith_result=`expr "$[@]"` +} + +# func_len string +# STRING may not start with a hyphen. +func_len () +{ + func_len_result=`expr "$[1]" : ".*" 2>/dev/null || echo $max_cmd_len` +} + +_LT_EOF +esac + +case $lt_shell_append in + yes) + cat << \_LT_EOF >> "$cfgfile" + +# func_append var value +# Append VALUE to the end of shell variable VAR. +func_append () +{ + eval "$[1]+=\$[2]" +} +_LT_EOF + ;; + *) + cat << \_LT_EOF >> "$cfgfile" + +# func_append var value +# Append VALUE to the end of shell variable VAR. +func_append () +{ + eval "$[1]=\$$[1]\$[2]" +} + +_LT_EOF + ;; + esac +]) diff --git a/backtrace-sys/src/libbacktrace/config/ltoptions.m4 b/backtrace-sys/src/libbacktrace/config/ltoptions.m4 new file mode 100644 index 000000000..e97011948 --- /dev/null +++ b/backtrace-sys/src/libbacktrace/config/ltoptions.m4 @@ -0,0 +1,368 @@ +# Helper functions for option handling. -*- Autoconf -*- +# +# Copyright (C) 2004, 2005, 2007, 2008 Free Software Foundation, Inc. +# Written by Gary V. Vaughan, 2004 +# +# This file is free software; the Free Software Foundation gives +# unlimited permission to copy and/or distribute it, with or without +# modifications, as long as this notice is preserved. + +# serial 6 ltoptions.m4 + +# This is to help aclocal find these macros, as it can't see m4_define. +AC_DEFUN([LTOPTIONS_VERSION], [m4_if([1])]) + + +# _LT_MANGLE_OPTION(MACRO-NAME, OPTION-NAME) +# ------------------------------------------ +m4_define([_LT_MANGLE_OPTION], +[[_LT_OPTION_]m4_bpatsubst($1__$2, [[^a-zA-Z0-9_]], [_])]) + + +# _LT_SET_OPTION(MACRO-NAME, OPTION-NAME) +# --------------------------------------- +# Set option OPTION-NAME for macro MACRO-NAME, and if there is a +# matching handler defined, dispatch to it. Other OPTION-NAMEs are +# saved as a flag. +m4_define([_LT_SET_OPTION], +[m4_define(_LT_MANGLE_OPTION([$1], [$2]))dnl +m4_ifdef(_LT_MANGLE_DEFUN([$1], [$2]), + _LT_MANGLE_DEFUN([$1], [$2]), + [m4_warning([Unknown $1 option `$2'])])[]dnl +]) + + +# _LT_IF_OPTION(MACRO-NAME, OPTION-NAME, IF-SET, [IF-NOT-SET]) +# ------------------------------------------------------------ +# Execute IF-SET if OPTION is set, IF-NOT-SET otherwise. +m4_define([_LT_IF_OPTION], +[m4_ifdef(_LT_MANGLE_OPTION([$1], [$2]), [$3], [$4])]) + + +# _LT_UNLESS_OPTIONS(MACRO-NAME, OPTION-LIST, IF-NOT-SET) +# ------------------------------------------------------- +# Execute IF-NOT-SET unless all options in OPTION-LIST for MACRO-NAME +# are set. +m4_define([_LT_UNLESS_OPTIONS], +[m4_foreach([_LT_Option], m4_split(m4_normalize([$2])), + [m4_ifdef(_LT_MANGLE_OPTION([$1], _LT_Option), + [m4_define([$0_found])])])[]dnl +m4_ifdef([$0_found], [m4_undefine([$0_found])], [$3 +])[]dnl +]) + + +# _LT_SET_OPTIONS(MACRO-NAME, OPTION-LIST) +# ---------------------------------------- +# OPTION-LIST is a space-separated list of Libtool options associated +# with MACRO-NAME. If any OPTION has a matching handler declared with +# LT_OPTION_DEFINE, dispatch to that macro; otherwise complain about +# the unknown option and exit. +m4_defun([_LT_SET_OPTIONS], +[# Set options +m4_foreach([_LT_Option], m4_split(m4_normalize([$2])), + [_LT_SET_OPTION([$1], _LT_Option)]) + +m4_if([$1],[LT_INIT],[ + dnl + dnl Simply set some default values (i.e off) if boolean options were not + dnl specified: + _LT_UNLESS_OPTIONS([LT_INIT], [dlopen], [enable_dlopen=no + ]) + _LT_UNLESS_OPTIONS([LT_INIT], [win32-dll], [enable_win32_dll=no + ]) + dnl + dnl If no reference was made to various pairs of opposing options, then + dnl we run the default mode handler for the pair. For example, if neither + dnl `shared' nor `disable-shared' was passed, we enable building of shared + dnl archives by default: + _LT_UNLESS_OPTIONS([LT_INIT], [shared disable-shared], [_LT_ENABLE_SHARED]) + _LT_UNLESS_OPTIONS([LT_INIT], [static disable-static], [_LT_ENABLE_STATIC]) + _LT_UNLESS_OPTIONS([LT_INIT], [pic-only no-pic], [_LT_WITH_PIC]) + _LT_UNLESS_OPTIONS([LT_INIT], [fast-install disable-fast-install], + [_LT_ENABLE_FAST_INSTALL]) + ]) +])# _LT_SET_OPTIONS + + +## --------------------------------- ## +## Macros to handle LT_INIT options. ## +## --------------------------------- ## + +# _LT_MANGLE_DEFUN(MACRO-NAME, OPTION-NAME) +# ----------------------------------------- +m4_define([_LT_MANGLE_DEFUN], +[[_LT_OPTION_DEFUN_]m4_bpatsubst(m4_toupper([$1__$2]), [[^A-Z0-9_]], [_])]) + + +# LT_OPTION_DEFINE(MACRO-NAME, OPTION-NAME, CODE) +# ----------------------------------------------- +m4_define([LT_OPTION_DEFINE], +[m4_define(_LT_MANGLE_DEFUN([$1], [$2]), [$3])[]dnl +])# LT_OPTION_DEFINE + + +# dlopen +# ------ +LT_OPTION_DEFINE([LT_INIT], [dlopen], [enable_dlopen=yes +]) + +AU_DEFUN([AC_LIBTOOL_DLOPEN], +[_LT_SET_OPTION([LT_INIT], [dlopen]) +AC_DIAGNOSE([obsolete], +[$0: Remove this warning and the call to _LT_SET_OPTION when you +put the `dlopen' option into LT_INIT's first parameter.]) +]) + +dnl aclocal-1.4 backwards compatibility: +dnl AC_DEFUN([AC_LIBTOOL_DLOPEN], []) + + +# win32-dll +# --------- +# Declare package support for building win32 dll's. +LT_OPTION_DEFINE([LT_INIT], [win32-dll], +[enable_win32_dll=yes + +case $host in +*-*-cygwin* | *-*-mingw* | *-*-pw32*) + AC_CHECK_TOOL(AS, as, false) + AC_CHECK_TOOL(DLLTOOL, dlltool, false) + AC_CHECK_TOOL(OBJDUMP, objdump, false) + ;; +esac + +test -z "$AS" && AS=as +_LT_DECL([], [AS], [0], [Assembler program])dnl + +test -z "$DLLTOOL" && DLLTOOL=dlltool +_LT_DECL([], [DLLTOOL], [0], [DLL creation program])dnl + +test -z "$OBJDUMP" && OBJDUMP=objdump +_LT_DECL([], [OBJDUMP], [0], [Object dumper program])dnl +])# win32-dll + +AU_DEFUN([AC_LIBTOOL_WIN32_DLL], +[AC_REQUIRE([AC_CANONICAL_HOST])dnl +_LT_SET_OPTION([LT_INIT], [win32-dll]) +AC_DIAGNOSE([obsolete], +[$0: Remove this warning and the call to _LT_SET_OPTION when you +put the `win32-dll' option into LT_INIT's first parameter.]) +]) + +dnl aclocal-1.4 backwards compatibility: +dnl AC_DEFUN([AC_LIBTOOL_WIN32_DLL], []) + + +# _LT_ENABLE_SHARED([DEFAULT]) +# ---------------------------- +# implement the --enable-shared flag, and supports the `shared' and +# `disable-shared' LT_INIT options. +# DEFAULT is either `yes' or `no'. If omitted, it defaults to `yes'. +m4_define([_LT_ENABLE_SHARED], +[m4_define([_LT_ENABLE_SHARED_DEFAULT], [m4_if($1, no, no, yes)])dnl +AC_ARG_ENABLE([shared], + [AS_HELP_STRING([--enable-shared@<:@=PKGS@:>@], + [build shared libraries @<:@default=]_LT_ENABLE_SHARED_DEFAULT[@:>@])], + [p=${PACKAGE-default} + case $enableval in + yes) enable_shared=yes ;; + no) enable_shared=no ;; + *) + enable_shared=no + # Look at the argument we got. We use all the common list separators. + lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," + for pkg in $enableval; do + IFS="$lt_save_ifs" + if test "X$pkg" = "X$p"; then + enable_shared=yes + fi + done + IFS="$lt_save_ifs" + ;; + esac], + [enable_shared=]_LT_ENABLE_SHARED_DEFAULT) + + _LT_DECL([build_libtool_libs], [enable_shared], [0], + [Whether or not to build shared libraries]) +])# _LT_ENABLE_SHARED + +LT_OPTION_DEFINE([LT_INIT], [shared], [_LT_ENABLE_SHARED([yes])]) +LT_OPTION_DEFINE([LT_INIT], [disable-shared], [_LT_ENABLE_SHARED([no])]) + +# Old names: +AC_DEFUN([AC_ENABLE_SHARED], +[_LT_SET_OPTION([LT_INIT], m4_if([$1], [no], [disable-])[shared]) +]) + +AC_DEFUN([AC_DISABLE_SHARED], +[_LT_SET_OPTION([LT_INIT], [disable-shared]) +]) + +AU_DEFUN([AM_ENABLE_SHARED], [AC_ENABLE_SHARED($@)]) +AU_DEFUN([AM_DISABLE_SHARED], [AC_DISABLE_SHARED($@)]) + +dnl aclocal-1.4 backwards compatibility: +dnl AC_DEFUN([AM_ENABLE_SHARED], []) +dnl AC_DEFUN([AM_DISABLE_SHARED], []) + + + +# _LT_ENABLE_STATIC([DEFAULT]) +# ---------------------------- +# implement the --enable-static flag, and support the `static' and +# `disable-static' LT_INIT options. +# DEFAULT is either `yes' or `no'. If omitted, it defaults to `yes'. +m4_define([_LT_ENABLE_STATIC], +[m4_define([_LT_ENABLE_STATIC_DEFAULT], [m4_if($1, no, no, yes)])dnl +AC_ARG_ENABLE([static], + [AS_HELP_STRING([--enable-static@<:@=PKGS@:>@], + [build static libraries @<:@default=]_LT_ENABLE_STATIC_DEFAULT[@:>@])], + [p=${PACKAGE-default} + case $enableval in + yes) enable_static=yes ;; + no) enable_static=no ;; + *) + enable_static=no + # Look at the argument we got. We use all the common list separators. + lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," + for pkg in $enableval; do + IFS="$lt_save_ifs" + if test "X$pkg" = "X$p"; then + enable_static=yes + fi + done + IFS="$lt_save_ifs" + ;; + esac], + [enable_static=]_LT_ENABLE_STATIC_DEFAULT) + + _LT_DECL([build_old_libs], [enable_static], [0], + [Whether or not to build static libraries]) +])# _LT_ENABLE_STATIC + +LT_OPTION_DEFINE([LT_INIT], [static], [_LT_ENABLE_STATIC([yes])]) +LT_OPTION_DEFINE([LT_INIT], [disable-static], [_LT_ENABLE_STATIC([no])]) + +# Old names: +AC_DEFUN([AC_ENABLE_STATIC], +[_LT_SET_OPTION([LT_INIT], m4_if([$1], [no], [disable-])[static]) +]) + +AC_DEFUN([AC_DISABLE_STATIC], +[_LT_SET_OPTION([LT_INIT], [disable-static]) +]) + +AU_DEFUN([AM_ENABLE_STATIC], [AC_ENABLE_STATIC($@)]) +AU_DEFUN([AM_DISABLE_STATIC], [AC_DISABLE_STATIC($@)]) + +dnl aclocal-1.4 backwards compatibility: +dnl AC_DEFUN([AM_ENABLE_STATIC], []) +dnl AC_DEFUN([AM_DISABLE_STATIC], []) + + + +# _LT_ENABLE_FAST_INSTALL([DEFAULT]) +# ---------------------------------- +# implement the --enable-fast-install flag, and support the `fast-install' +# and `disable-fast-install' LT_INIT options. +# DEFAULT is either `yes' or `no'. If omitted, it defaults to `yes'. +m4_define([_LT_ENABLE_FAST_INSTALL], +[m4_define([_LT_ENABLE_FAST_INSTALL_DEFAULT], [m4_if($1, no, no, yes)])dnl +AC_ARG_ENABLE([fast-install], + [AS_HELP_STRING([--enable-fast-install@<:@=PKGS@:>@], + [optimize for fast installation @<:@default=]_LT_ENABLE_FAST_INSTALL_DEFAULT[@:>@])], + [p=${PACKAGE-default} + case $enableval in + yes) enable_fast_install=yes ;; + no) enable_fast_install=no ;; + *) + enable_fast_install=no + # Look at the argument we got. We use all the common list separators. + lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," + for pkg in $enableval; do + IFS="$lt_save_ifs" + if test "X$pkg" = "X$p"; then + enable_fast_install=yes + fi + done + IFS="$lt_save_ifs" + ;; + esac], + [enable_fast_install=]_LT_ENABLE_FAST_INSTALL_DEFAULT) + +_LT_DECL([fast_install], [enable_fast_install], [0], + [Whether or not to optimize for fast installation])dnl +])# _LT_ENABLE_FAST_INSTALL + +LT_OPTION_DEFINE([LT_INIT], [fast-install], [_LT_ENABLE_FAST_INSTALL([yes])]) +LT_OPTION_DEFINE([LT_INIT], [disable-fast-install], [_LT_ENABLE_FAST_INSTALL([no])]) + +# Old names: +AU_DEFUN([AC_ENABLE_FAST_INSTALL], +[_LT_SET_OPTION([LT_INIT], m4_if([$1], [no], [disable-])[fast-install]) +AC_DIAGNOSE([obsolete], +[$0: Remove this warning and the call to _LT_SET_OPTION when you put +the `fast-install' option into LT_INIT's first parameter.]) +]) + +AU_DEFUN([AC_DISABLE_FAST_INSTALL], +[_LT_SET_OPTION([LT_INIT], [disable-fast-install]) +AC_DIAGNOSE([obsolete], +[$0: Remove this warning and the call to _LT_SET_OPTION when you put +the `disable-fast-install' option into LT_INIT's first parameter.]) +]) + +dnl aclocal-1.4 backwards compatibility: +dnl AC_DEFUN([AC_ENABLE_FAST_INSTALL], []) +dnl AC_DEFUN([AM_DISABLE_FAST_INSTALL], []) + + +# _LT_WITH_PIC([MODE]) +# -------------------- +# implement the --with-pic flag, and support the `pic-only' and `no-pic' +# LT_INIT options. +# MODE is either `yes' or `no'. If omitted, it defaults to `both'. +m4_define([_LT_WITH_PIC], +[AC_ARG_WITH([pic], + [AS_HELP_STRING([--with-pic], + [try to use only PIC/non-PIC objects @<:@default=use both@:>@])], + [pic_mode="$withval"], + [pic_mode=default]) + +test -z "$pic_mode" && pic_mode=m4_default([$1], [default]) + +_LT_DECL([], [pic_mode], [0], [What type of objects to build])dnl +])# _LT_WITH_PIC + +LT_OPTION_DEFINE([LT_INIT], [pic-only], [_LT_WITH_PIC([yes])]) +LT_OPTION_DEFINE([LT_INIT], [no-pic], [_LT_WITH_PIC([no])]) + +# Old name: +AU_DEFUN([AC_LIBTOOL_PICMODE], +[_LT_SET_OPTION([LT_INIT], [pic-only]) +AC_DIAGNOSE([obsolete], +[$0: Remove this warning and the call to _LT_SET_OPTION when you +put the `pic-only' option into LT_INIT's first parameter.]) +]) + +dnl aclocal-1.4 backwards compatibility: +dnl AC_DEFUN([AC_LIBTOOL_PICMODE], []) + +## ----------------- ## +## LTDL_INIT Options ## +## ----------------- ## + +m4_define([_LTDL_MODE], []) +LT_OPTION_DEFINE([LTDL_INIT], [nonrecursive], + [m4_define([_LTDL_MODE], [nonrecursive])]) +LT_OPTION_DEFINE([LTDL_INIT], [recursive], + [m4_define([_LTDL_MODE], [recursive])]) +LT_OPTION_DEFINE([LTDL_INIT], [subproject], + [m4_define([_LTDL_MODE], [subproject])]) + +m4_define([_LTDL_TYPE], []) +LT_OPTION_DEFINE([LTDL_INIT], [installable], + [m4_define([_LTDL_TYPE], [installable])]) +LT_OPTION_DEFINE([LTDL_INIT], [convenience], + [m4_define([_LTDL_TYPE], [convenience])]) diff --git a/backtrace-sys/src/libbacktrace/config/ltsugar.m4 b/backtrace-sys/src/libbacktrace/config/ltsugar.m4 new file mode 100644 index 000000000..0d258e070 --- /dev/null +++ b/backtrace-sys/src/libbacktrace/config/ltsugar.m4 @@ -0,0 +1,123 @@ +# ltsugar.m4 -- libtool m4 base layer. -*-Autoconf-*- +# +# Copyright (C) 2004, 2005, 2007 Free Software Foundation, Inc. +# Written by Gary V. Vaughan, 2004 +# +# This file is free software; the Free Software Foundation gives +# unlimited permission to copy and/or distribute it, with or without +# modifications, as long as this notice is preserved. + +# serial 5 ltsugar.m4 + +# This is to help aclocal find these macros, as it can't see m4_define. +AC_DEFUN([LTSUGAR_VERSION], [m4_if([0.1])]) + + +# lt_join(SEP, ARG1, [ARG2...]) +# ----------------------------- +# Produce ARG1SEPARG2...SEPARGn, omitting [] arguments and their +# associated separator. +# Needed until we can rely on m4_join from Autoconf 2.62, since all earlier +# versions in m4sugar had bugs. +m4_define([lt_join], +[m4_if([$#], [1], [], + [$#], [2], [[$2]], + [m4_if([$2], [], [], [[$2]_])$0([$1], m4_shift(m4_shift($@)))])]) +m4_define([_lt_join], +[m4_if([$#$2], [2], [], + [m4_if([$2], [], [], [[$1$2]])$0([$1], m4_shift(m4_shift($@)))])]) + + +# lt_car(LIST) +# lt_cdr(LIST) +# ------------ +# Manipulate m4 lists. +# These macros are necessary as long as will still need to support +# Autoconf-2.59 which quotes differently. +m4_define([lt_car], [[$1]]) +m4_define([lt_cdr], +[m4_if([$#], 0, [m4_fatal([$0: cannot be called without arguments])], + [$#], 1, [], + [m4_dquote(m4_shift($@))])]) +m4_define([lt_unquote], $1) + + +# lt_append(MACRO-NAME, STRING, [SEPARATOR]) +# ------------------------------------------ +# Redefine MACRO-NAME to hold its former content plus `SEPARATOR'`STRING'. +# Note that neither SEPARATOR nor STRING are expanded; they are appended +# to MACRO-NAME as is (leaving the expansion for when MACRO-NAME is invoked). +# No SEPARATOR is output if MACRO-NAME was previously undefined (different +# than defined and empty). +# +# This macro is needed until we can rely on Autoconf 2.62, since earlier +# versions of m4sugar mistakenly expanded SEPARATOR but not STRING. +m4_define([lt_append], +[m4_define([$1], + m4_ifdef([$1], [m4_defn([$1])[$3]])[$2])]) + + + +# lt_combine(SEP, PREFIX-LIST, INFIX, SUFFIX1, [SUFFIX2...]) +# ---------------------------------------------------------- +# Produce a SEP delimited list of all paired combinations of elements of +# PREFIX-LIST with SUFFIX1 through SUFFIXn. Each element of the list +# has the form PREFIXmINFIXSUFFIXn. +m4_define([lt_combine], +[m4_if([$2], [], [], + [m4_if([$4], [], [], + [lt_join(m4_quote(m4_default([$1], [[, ]])), + lt_unquote(m4_split(m4_normalize(m4_foreach(_Lt_prefix, [$2], + [m4_foreach(_Lt_suffix, lt_car([m4_shiftn(3, $@)]), + [_Lt_prefix[]$3[]_Lt_suffix ])])))))])])dnl +]) + + +# lt_if_append_uniq(MACRO-NAME, VARNAME, [SEPARATOR], [UNIQ], [NOT-UNIQ]) +# ----------------------------------------------------------------------- +# Iff MACRO-NAME does not yet contain VARNAME, then append it (delimited +# by SEPARATOR if supplied) and expand UNIQ, else NOT-UNIQ. +m4_define([lt_if_append_uniq], +[m4_ifdef([$1], + [m4_if(m4_index([$3]m4_defn([$1])[$3], [$3$2$3]), [-1], + [lt_append([$1], [$2], [$3])$4], + [$5])], + [lt_append([$1], [$2], [$3])$4])]) + + +# lt_dict_add(DICT, KEY, VALUE) +# ----------------------------- +m4_define([lt_dict_add], +[m4_define([$1($2)], [$3])]) + + +# lt_dict_add_subkey(DICT, KEY, SUBKEY, VALUE) +# -------------------------------------------- +m4_define([lt_dict_add_subkey], +[m4_define([$1($2:$3)], [$4])]) + + +# lt_dict_fetch(DICT, KEY, [SUBKEY]) +# ---------------------------------- +m4_define([lt_dict_fetch], +[m4_ifval([$3], + m4_ifdef([$1($2:$3)], [m4_defn([$1($2:$3)])]), + m4_ifdef([$1($2)], [m4_defn([$1($2)])]))]) + + +# lt_if_dict_fetch(DICT, KEY, [SUBKEY], VALUE, IF-TRUE, [IF-FALSE]) +# ----------------------------------------------------------------- +m4_define([lt_if_dict_fetch], +[m4_if(lt_dict_fetch([$1], [$2], [$3]), [$4], + [$5], + [$6])]) + + +# lt_dict_filter(DICT, [SUBKEY], VALUE, [SEPARATOR], KEY, [...]) +# -------------------------------------------------------------- +m4_define([lt_dict_filter], +[m4_if([$5], [], [], + [lt_join(m4_quote(m4_default([$4], [[, ]])), + lt_unquote(m4_split(m4_normalize(m4_foreach(_Lt_key, lt_car([m4_shiftn(4, $@)]), + [lt_if_dict_fetch([$1], _Lt_key, [$2], [$3], [_Lt_key ])])))))])[]dnl +]) diff --git a/backtrace-sys/src/libbacktrace/config/ltversion.m4 b/backtrace-sys/src/libbacktrace/config/ltversion.m4 new file mode 100644 index 000000000..45cb1557c --- /dev/null +++ b/backtrace-sys/src/libbacktrace/config/ltversion.m4 @@ -0,0 +1,23 @@ +# ltversion.m4 -- version numbers -*- Autoconf -*- +# +# Copyright (C) 2004 Free Software Foundation, Inc. +# Written by Scott James Remnant, 2004 +# +# This file is free software; the Free Software Foundation gives +# unlimited permission to copy and/or distribute it, with or without +# modifications, as long as this notice is preserved. + +# Generated from ltversion.in. + +# serial 2976 ltversion.m4 +# This file is part of GNU Libtool + +m4_define([LT_PACKAGE_VERSION], [2.2.4]) +m4_define([LT_PACKAGE_REVISION], [1.2976]) + +AC_DEFUN([LTVERSION_VERSION], +[macro_version='2.2.4' +macro_revision='1.2976' +_LT_DECL(, macro_version, 0, [Which release of libtool.m4 was used?]) +_LT_DECL(, macro_revision, 0) +]) diff --git a/backtrace-sys/src/libbacktrace/config/lt~obsolete.m4 b/backtrace-sys/src/libbacktrace/config/lt~obsolete.m4 new file mode 100644 index 000000000..637bb2066 --- /dev/null +++ b/backtrace-sys/src/libbacktrace/config/lt~obsolete.m4 @@ -0,0 +1,92 @@ +# lt~obsolete.m4 -- aclocal satisfying obsolete definitions. -*-Autoconf-*- +# +# Copyright (C) 2004, 2005, 2007 Free Software Foundation, Inc. +# Written by Scott James Remnant, 2004. +# +# This file is free software; the Free Software Foundation gives +# unlimited permission to copy and/or distribute it, with or without +# modifications, as long as this notice is preserved. + +# serial 4 lt~obsolete.m4 + +# These exist entirely to fool aclocal when bootstrapping libtool. +# +# In the past libtool.m4 has provided macros via AC_DEFUN (or AU_DEFUN) +# which have later been changed to m4_define as they aren't part of the +# exported API, or moved to Autoconf or Automake where they belong. +# +# The trouble is, aclocal is a bit thick. It'll see the old AC_DEFUN +# in /usr/share/aclocal/libtool.m4 and remember it, then when it sees us +# using a macro with the same name in our local m4/libtool.m4 it'll +# pull the old libtool.m4 in (it doesn't see our shiny new m4_define +# and doesn't know about Autoconf macros at all.) +# +# So we provide this file, which has a silly filename so it's always +# included after everything else. This provides aclocal with the +# AC_DEFUNs it wants, but when m4 processes it, it doesn't do anything +# because those macros already exist, or will be overwritten later. +# We use AC_DEFUN over AU_DEFUN for compatibility with aclocal-1.6. +# +# Anytime we withdraw an AC_DEFUN or AU_DEFUN, remember to add it here. +# Yes, that means every name once taken will need to remain here until +# we give up compatibility with versions before 1.7, at which point +# we need to keep only those names which we still refer to. + +# This is to help aclocal find these macros, as it can't see m4_define. +AC_DEFUN([LTOBSOLETE_VERSION], [m4_if([1])]) + +m4_ifndef([AC_LIBTOOL_LINKER_OPTION], [AC_DEFUN([AC_LIBTOOL_LINKER_OPTION])]) +m4_ifndef([AC_PROG_EGREP], [AC_DEFUN([AC_PROG_EGREP])]) +m4_ifndef([_LT_AC_PROG_ECHO_BACKSLASH], [AC_DEFUN([_LT_AC_PROG_ECHO_BACKSLASH])]) +m4_ifndef([_LT_AC_SHELL_INIT], [AC_DEFUN([_LT_AC_SHELL_INIT])]) +m4_ifndef([_LT_AC_SYS_LIBPATH_AIX], [AC_DEFUN([_LT_AC_SYS_LIBPATH_AIX])]) +m4_ifndef([_LT_PROG_LTMAIN], [AC_DEFUN([_LT_PROG_LTMAIN])]) +m4_ifndef([_LT_AC_TAGVAR], [AC_DEFUN([_LT_AC_TAGVAR])]) +m4_ifndef([AC_LTDL_ENABLE_INSTALL], [AC_DEFUN([AC_LTDL_ENABLE_INSTALL])]) +m4_ifndef([AC_LTDL_PREOPEN], [AC_DEFUN([AC_LTDL_PREOPEN])]) +m4_ifndef([_LT_AC_SYS_COMPILER], [AC_DEFUN([_LT_AC_SYS_COMPILER])]) +m4_ifndef([_LT_AC_LOCK], [AC_DEFUN([_LT_AC_LOCK])]) +m4_ifndef([AC_LIBTOOL_SYS_OLD_ARCHIVE], [AC_DEFUN([AC_LIBTOOL_SYS_OLD_ARCHIVE])]) +m4_ifndef([_LT_AC_TRY_DLOPEN_SELF], [AC_DEFUN([_LT_AC_TRY_DLOPEN_SELF])]) +m4_ifndef([AC_LIBTOOL_PROG_CC_C_O], [AC_DEFUN([AC_LIBTOOL_PROG_CC_C_O])]) +m4_ifndef([AC_LIBTOOL_SYS_HARD_LINK_LOCKS], [AC_DEFUN([AC_LIBTOOL_SYS_HARD_LINK_LOCKS])]) +m4_ifndef([AC_LIBTOOL_OBJDIR], [AC_DEFUN([AC_LIBTOOL_OBJDIR])]) +m4_ifndef([AC_LTDL_OBJDIR], [AC_DEFUN([AC_LTDL_OBJDIR])]) +m4_ifndef([AC_LIBTOOL_PROG_LD_HARDCODE_LIBPATH], [AC_DEFUN([AC_LIBTOOL_PROG_LD_HARDCODE_LIBPATH])]) +m4_ifndef([AC_LIBTOOL_SYS_LIB_STRIP], [AC_DEFUN([AC_LIBTOOL_SYS_LIB_STRIP])]) +m4_ifndef([AC_PATH_MAGIC], [AC_DEFUN([AC_PATH_MAGIC])]) +m4_ifndef([AC_PROG_LD_GNU], [AC_DEFUN([AC_PROG_LD_GNU])]) +m4_ifndef([AC_PROG_LD_RELOAD_FLAG], [AC_DEFUN([AC_PROG_LD_RELOAD_FLAG])]) +m4_ifndef([AC_DEPLIBS_CHECK_METHOD], [AC_DEFUN([AC_DEPLIBS_CHECK_METHOD])]) +m4_ifndef([AC_LIBTOOL_PROG_COMPILER_NO_RTTI], [AC_DEFUN([AC_LIBTOOL_PROG_COMPILER_NO_RTTI])]) +m4_ifndef([AC_LIBTOOL_SYS_GLOBAL_SYMBOL_PIPE], [AC_DEFUN([AC_LIBTOOL_SYS_GLOBAL_SYMBOL_PIPE])]) +m4_ifndef([AC_LIBTOOL_PROG_COMPILER_PIC], [AC_DEFUN([AC_LIBTOOL_PROG_COMPILER_PIC])]) +m4_ifndef([AC_LIBTOOL_PROG_LD_SHLIBS], [AC_DEFUN([AC_LIBTOOL_PROG_LD_SHLIBS])]) +m4_ifndef([AC_LIBTOOL_POSTDEP_PREDEP], [AC_DEFUN([AC_LIBTOOL_POSTDEP_PREDEP])]) +m4_ifndef([LT_AC_PROG_EGREP], [AC_DEFUN([LT_AC_PROG_EGREP])]) +m4_ifndef([LT_AC_PROG_SED], [AC_DEFUN([LT_AC_PROG_SED])]) +m4_ifndef([_LT_CC_BASENAME], [AC_DEFUN([_LT_CC_BASENAME])]) +m4_ifndef([_LT_COMPILER_BOILERPLATE], [AC_DEFUN([_LT_COMPILER_BOILERPLATE])]) +m4_ifndef([_LT_LINKER_BOILERPLATE], [AC_DEFUN([_LT_LINKER_BOILERPLATE])]) +m4_ifndef([_AC_PROG_LIBTOOL], [AC_DEFUN([_AC_PROG_LIBTOOL])]) +m4_ifndef([AC_LIBTOOL_SETUP], [AC_DEFUN([AC_LIBTOOL_SETUP])]) +m4_ifndef([_LT_AC_CHECK_DLFCN], [AC_DEFUN([_LT_AC_CHECK_DLFCN])]) +m4_ifndef([AC_LIBTOOL_SYS_DYNAMIC_LINKER], [AC_DEFUN([AC_LIBTOOL_SYS_DYNAMIC_LINKER])]) +m4_ifndef([_LT_AC_TAGCONFIG], [AC_DEFUN([_LT_AC_TAGCONFIG])]) +m4_ifndef([AC_DISABLE_FAST_INSTALL], [AC_DEFUN([AC_DISABLE_FAST_INSTALL])]) +m4_ifndef([_LT_AC_LANG_CXX], [AC_DEFUN([_LT_AC_LANG_CXX])]) +m4_ifndef([_LT_AC_LANG_F77], [AC_DEFUN([_LT_AC_LANG_F77])]) +m4_ifndef([_LT_AC_LANG_GCJ], [AC_DEFUN([_LT_AC_LANG_GCJ])]) +m4_ifndef([AC_LIBTOOL_RC], [AC_DEFUN([AC_LIBTOOL_RC])]) +m4_ifndef([AC_LIBTOOL_LANG_C_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_C_CONFIG])]) +m4_ifndef([_LT_AC_LANG_C_CONFIG], [AC_DEFUN([_LT_AC_LANG_C_CONFIG])]) +m4_ifndef([AC_LIBTOOL_LANG_CXX_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_CXX_CONFIG])]) +m4_ifndef([_LT_AC_LANG_CXX_CONFIG], [AC_DEFUN([_LT_AC_LANG_CXX_CONFIG])]) +m4_ifndef([AC_LIBTOOL_LANG_F77_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_F77_CONFIG])]) +m4_ifndef([_LT_AC_LANG_F77_CONFIG], [AC_DEFUN([_LT_AC_LANG_F77_CONFIG])]) +m4_ifndef([AC_LIBTOOL_LANG_GCJ_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_GCJ_CONFIG])]) +m4_ifndef([_LT_AC_LANG_GCJ_CONFIG], [AC_DEFUN([_LT_AC_LANG_GCJ_CONFIG])]) +m4_ifndef([AC_LIBTOOL_LANG_RC_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_RC_CONFIG])]) +m4_ifndef([_LT_AC_LANG_RC_CONFIG], [AC_DEFUN([_LT_AC_LANG_RC_CONFIG])]) +m4_ifndef([AC_LIBTOOL_CONFIG], [AC_DEFUN([AC_LIBTOOL_CONFIG])]) +m4_ifndef([_LT_AC_FILE_LTDLL_C], [AC_DEFUN([_LT_AC_FILE_LTDLL_C])]) diff --git a/backtrace-sys/src/libbacktrace/configure b/backtrace-sys/src/libbacktrace/configure new file mode 100755 index 000000000..b984f5ff5 --- /dev/null +++ b/backtrace-sys/src/libbacktrace/configure @@ -0,0 +1,14361 @@ +#! /bin/sh +# Guess values for system-dependent variables and create Makefiles. +# Generated by GNU Autoconf 2.64 for package-unused version-unused. +# +# Copyright (C) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001, +# 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 Free Software +# Foundation, Inc. +# +# This configure script is free software; the Free Software Foundation +# gives unlimited permission to copy, distribute and modify it. +## -------------------- ## +## M4sh Initialization. ## +## -------------------- ## + +# Be more Bourne compatible +DUALCASE=1; export DUALCASE # for MKS sh +if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then : + emulate sh + NULLCMD=: + # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which + # is contrary to our usage. Disable this feature. + alias -g '${1+"$@"}'='"$@"' + setopt NO_GLOB_SUBST +else + case `(set -o) 2>/dev/null` in #( + *posix*) : + set -o posix ;; #( + *) : + ;; +esac +fi + + +as_nl=' +' +export as_nl +# Printing a long string crashes Solaris 7 /usr/bin/printf. +as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' +as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo +as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo +# Prefer a ksh shell builtin over an external printf program on Solaris, +# but without wasting forks for bash or zsh. +if test -z "$BASH_VERSION$ZSH_VERSION" \ + && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then + as_echo='print -r --' + as_echo_n='print -rn --' +elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then + as_echo='printf %s\n' + as_echo_n='printf %s' +else + if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then + as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"' + as_echo_n='/usr/ucb/echo -n' + else + as_echo_body='eval expr "X$1" : "X\\(.*\\)"' + as_echo_n_body='eval + arg=$1; + case $arg in #( + *"$as_nl"*) + expr "X$arg" : "X\\(.*\\)$as_nl"; + arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;; + esac; + expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl" + ' + export as_echo_n_body + as_echo_n='sh -c $as_echo_n_body as_echo' + fi + export as_echo_body + as_echo='sh -c $as_echo_body as_echo' +fi + +# The user is always right. +if test "${PATH_SEPARATOR+set}" != set; then + PATH_SEPARATOR=: + (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { + (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || + PATH_SEPARATOR=';' + } +fi + + +# IFS +# We need space, tab and new line, in precisely that order. Quoting is +# there to prevent editors from complaining about space-tab. +# (If _AS_PATH_WALK were called with IFS unset, it would disable word +# splitting by setting IFS to empty value.) +IFS=" "" $as_nl" + +# Find who we are. Look in the path if we contain no directory separator. +case $0 in #(( + *[\\/]* ) as_myself=$0 ;; + *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break + done +IFS=$as_save_IFS + + ;; +esac +# We did not find ourselves, most probably we were run as `sh COMMAND' +# in which case we are not to be found in the path. +if test "x$as_myself" = x; then + as_myself=$0 +fi +if test ! -f "$as_myself"; then + $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 + exit 1 +fi + +# Unset variables that we do not need and which cause bugs (e.g. in +# pre-3.0 UWIN ksh). But do not cause bugs in bash 2.01; the "|| exit 1" +# suppresses any "Segmentation fault" message there. '((' could +# trigger a bug in pdksh 5.2.14. +for as_var in BASH_ENV ENV MAIL MAILPATH +do eval test x\${$as_var+set} = xset \ + && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || : +done +PS1='$ ' +PS2='> ' +PS4='+ ' + +# NLS nuisances. +LC_ALL=C +export LC_ALL +LANGUAGE=C +export LANGUAGE + +# CDPATH. +(unset CDPATH) >/dev/null 2>&1 && unset CDPATH + +if test "x$CONFIG_SHELL" = x; then + as_bourne_compatible="if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then : + emulate sh + NULLCMD=: + # Pre-4.2 versions of Zsh do word splitting on \${1+\"\$@\"}, which + # is contrary to our usage. Disable this feature. + alias -g '\${1+\"\$@\"}'='\"\$@\"' + setopt NO_GLOB_SUBST +else + case \`(set -o) 2>/dev/null\` in #( + *posix*) : + set -o posix ;; #( + *) : + ;; +esac +fi +" + as_required="as_fn_return () { (exit \$1); } +as_fn_success () { as_fn_return 0; } +as_fn_failure () { as_fn_return 1; } +as_fn_ret_success () { return 0; } +as_fn_ret_failure () { return 1; } + +exitcode=0 +as_fn_success || { exitcode=1; echo as_fn_success failed.; } +as_fn_failure && { exitcode=1; echo as_fn_failure succeeded.; } +as_fn_ret_success || { exitcode=1; echo as_fn_ret_success failed.; } +as_fn_ret_failure && { exitcode=1; echo as_fn_ret_failure succeeded.; } +if ( set x; as_fn_ret_success y && test x = \"\$1\" ); then : + +else + exitcode=1; echo positional parameters were not saved. +fi +test x\$exitcode = x0 || exit 1" + as_suggested=" as_lineno_1=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_1a=\$LINENO + as_lineno_2=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_2a=\$LINENO + eval 'test \"x\$as_lineno_1'\$as_run'\" != \"x\$as_lineno_2'\$as_run'\" && + test \"x\`expr \$as_lineno_1'\$as_run' + 1\`\" = \"x\$as_lineno_2'\$as_run'\"' || exit 1 +test \$(( 1 + 1 )) = 2 || exit 1" + if (eval "$as_required") 2>/dev/null; then : + as_have_required=yes +else + as_have_required=no +fi + if test x$as_have_required = xyes && (eval "$as_suggested") 2>/dev/null; then : + +else + as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +as_found=false +for as_dir in /bin$PATH_SEPARATOR/usr/bin$PATH_SEPARATOR$PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + as_found=: + case $as_dir in #( + /*) + for as_base in sh bash ksh sh5; do + # Try only shells that exist, to save several forks. + as_shell=$as_dir/$as_base + if { test -f "$as_shell" || test -f "$as_shell.exe"; } && + { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$as_shell"; } 2>/dev/null; then : + CONFIG_SHELL=$as_shell as_have_required=yes + if { $as_echo "$as_bourne_compatible""$as_suggested" | as_run=a "$as_shell"; } 2>/dev/null; then : + break 2 +fi +fi + done;; + esac + as_found=false +done +$as_found || { if { test -f "$SHELL" || test -f "$SHELL.exe"; } && + { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$SHELL"; } 2>/dev/null; then : + CONFIG_SHELL=$SHELL as_have_required=yes +fi; } +IFS=$as_save_IFS + + + if test "x$CONFIG_SHELL" != x; then : + # We cannot yet assume a decent shell, so we have to provide a + # neutralization value for shells without unset; and this also + # works around shells that cannot unset nonexistent variables. + BASH_ENV=/dev/null + ENV=/dev/null + (unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV + export CONFIG_SHELL + exec "$CONFIG_SHELL" "$as_myself" ${1+"$@"} +fi + + if test x$as_have_required = xno; then : + $as_echo "$0: This script requires a shell more modern than all" + $as_echo "$0: the shells that I found on your system." + if test x${ZSH_VERSION+set} = xset ; then + $as_echo "$0: In particular, zsh $ZSH_VERSION has bugs and should" + $as_echo "$0: be upgraded to zsh 4.3.4 or later." + else + $as_echo "$0: Please tell bug-autoconf@gnu.org about your system, +$0: including any error possibly output before this +$0: message. Then install a modern shell, or manually run +$0: the script under such a shell if you do have one." + fi + exit 1 +fi +fi +fi +SHELL=${CONFIG_SHELL-/bin/sh} +export SHELL +# Unset more variables known to interfere with behavior of common tools. +CLICOLOR_FORCE= GREP_OPTIONS= +unset CLICOLOR_FORCE GREP_OPTIONS + +## --------------------- ## +## M4sh Shell Functions. ## +## --------------------- ## +# as_fn_unset VAR +# --------------- +# Portably unset VAR. +as_fn_unset () +{ + { eval $1=; unset $1;} +} +as_unset=as_fn_unset + +# as_fn_set_status STATUS +# ----------------------- +# Set $? to STATUS, without forking. +as_fn_set_status () +{ + return $1 +} # as_fn_set_status + +# as_fn_exit STATUS +# ----------------- +# Exit the shell with STATUS, even in a "trap 0" or "set -e" context. +as_fn_exit () +{ + set +e + as_fn_set_status $1 + exit $1 +} # as_fn_exit + +# as_fn_mkdir_p +# ------------- +# Create "$as_dir" as a directory, including parents if necessary. +as_fn_mkdir_p () +{ + + case $as_dir in #( + -*) as_dir=./$as_dir;; + esac + test -d "$as_dir" || eval $as_mkdir_p || { + as_dirs= + while :; do + case $as_dir in #( + *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'( + *) as_qdir=$as_dir;; + esac + as_dirs="'$as_qdir' $as_dirs" + as_dir=`$as_dirname -- "$as_dir" || +$as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ + X"$as_dir" : 'X\(//\)[^/]' \| \ + X"$as_dir" : 'X\(//\)$' \| \ + X"$as_dir" : 'X\(/\)' \| . 2>/dev/null || +$as_echo X"$as_dir" | + sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ + s//\1/ + q + } + /^X\(\/\/\)[^/].*/{ + s//\1/ + q + } + /^X\(\/\/\)$/{ + s//\1/ + q + } + /^X\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'` + test -d "$as_dir" && break + done + test -z "$as_dirs" || eval "mkdir $as_dirs" + } || test -d "$as_dir" || as_fn_error "cannot create directory $as_dir" + + +} # as_fn_mkdir_p +# as_fn_append VAR VALUE +# ---------------------- +# Append the text in VALUE to the end of the definition contained in VAR. Take +# advantage of any shell optimizations that allow amortized linear growth over +# repeated appends, instead of the typical quadratic growth present in naive +# implementations. +if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then : + eval 'as_fn_append () + { + eval $1+=\$2 + }' +else + as_fn_append () + { + eval $1=\$$1\$2 + } +fi # as_fn_append + +# as_fn_arith ARG... +# ------------------ +# Perform arithmetic evaluation on the ARGs, and store the result in the +# global $as_val. Take advantage of shells that can avoid forks. The arguments +# must be portable across $(()) and expr. +if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then : + eval 'as_fn_arith () + { + as_val=$(( $* )) + }' +else + as_fn_arith () + { + as_val=`expr "$@" || test $? -eq 1` + } +fi # as_fn_arith + + +# as_fn_error ERROR [LINENO LOG_FD] +# --------------------------------- +# Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are +# provided, also output the error to LOG_FD, referencing LINENO. Then exit the +# script with status $?, using 1 if that was 0. +as_fn_error () +{ + as_status=$?; test $as_status -eq 0 && as_status=1 + if test "$3"; then + as_lineno=${as_lineno-"$2"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + $as_echo "$as_me:${as_lineno-$LINENO}: error: $1" >&$3 + fi + $as_echo "$as_me: error: $1" >&2 + as_fn_exit $as_status +} # as_fn_error + +if expr a : '\(a\)' >/dev/null 2>&1 && + test "X`expr 00001 : '.*\(...\)'`" = X001; then + as_expr=expr +else + as_expr=false +fi + +if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then + as_basename=basename +else + as_basename=false +fi + +if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then + as_dirname=dirname +else + as_dirname=false +fi + +as_me=`$as_basename -- "$0" || +$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ + X"$0" : 'X\(//\)$' \| \ + X"$0" : 'X\(/\)' \| . 2>/dev/null || +$as_echo X/"$0" | + sed '/^.*\/\([^/][^/]*\)\/*$/{ + s//\1/ + q + } + /^X\/\(\/\/\)$/{ + s//\1/ + q + } + /^X\/\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'` + +# Avoid depending upon Character Ranges. +as_cr_letters='abcdefghijklmnopqrstuvwxyz' +as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' +as_cr_Letters=$as_cr_letters$as_cr_LETTERS +as_cr_digits='0123456789' +as_cr_alnum=$as_cr_Letters$as_cr_digits + + + as_lineno_1=$LINENO as_lineno_1a=$LINENO + as_lineno_2=$LINENO as_lineno_2a=$LINENO + eval 'test "x$as_lineno_1'$as_run'" != "x$as_lineno_2'$as_run'" && + test "x`expr $as_lineno_1'$as_run' + 1`" = "x$as_lineno_2'$as_run'"' || { + # Blame Lee E. McMahon (1931-1989) for sed's syntax. :-) + sed -n ' + p + /[$]LINENO/= + ' <$as_myself | + sed ' + s/[$]LINENO.*/&-/ + t lineno + b + :lineno + N + :loop + s/[$]LINENO\([^'$as_cr_alnum'_].*\n\)\(.*\)/\2\1\2/ + t loop + s/-\n.*// + ' >$as_me.lineno && + chmod +x "$as_me.lineno" || + { $as_echo "$as_me: error: cannot create $as_me.lineno; rerun with a POSIX shell" >&2; as_fn_exit 1; } + + # Don't try to exec as it changes $[0], causing all sort of problems + # (the dirname of $[0] is not the place where we might find the + # original and so on. Autoconf is especially sensitive to this). + . "./$as_me.lineno" + # Exit status is that of the last command. + exit +} + +ECHO_C= ECHO_N= ECHO_T= +case `echo -n x` in #((((( +-n*) + case `echo 'xy\c'` in + *c*) ECHO_T=' ';; # ECHO_T is single tab character. + xy) ECHO_C='\c';; + *) echo `echo ksh88 bug on AIX 6.1` > /dev/null + ECHO_T=' ';; + esac;; +*) + ECHO_N='-n';; +esac + +rm -f conf$$ conf$$.exe conf$$.file +if test -d conf$$.dir; then + rm -f conf$$.dir/conf$$.file +else + rm -f conf$$.dir + mkdir conf$$.dir 2>/dev/null +fi +if (echo >conf$$.file) 2>/dev/null; then + if ln -s conf$$.file conf$$ 2>/dev/null; then + as_ln_s='ln -s' + # ... but there are two gotchas: + # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. + # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. + # In both cases, we have to default to `cp -p'. + ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || + as_ln_s='cp -p' + elif ln conf$$.file conf$$ 2>/dev/null; then + as_ln_s=ln + else + as_ln_s='cp -p' + fi +else + as_ln_s='cp -p' +fi +rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file +rmdir conf$$.dir 2>/dev/null + +if mkdir -p . 2>/dev/null; then + as_mkdir_p='mkdir -p "$as_dir"' +else + test -d ./-p && rmdir ./-p + as_mkdir_p=false +fi + +if test -x / >/dev/null 2>&1; then + as_test_x='test -x' +else + if ls -dL / >/dev/null 2>&1; then + as_ls_L_option=L + else + as_ls_L_option= + fi + as_test_x=' + eval sh -c '\'' + if test -d "$1"; then + test -d "$1/."; + else + case $1 in #( + -*)set "./$1";; + esac; + case `ls -ld'$as_ls_L_option' "$1" 2>/dev/null` in #(( + ???[sx]*):;;*)false;;esac;fi + '\'' sh + ' +fi +as_executable_p=$as_test_x + +# Sed expression to map a string onto a valid CPP name. +as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" + +# Sed expression to map a string onto a valid variable name. +as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" + + + +# Check that we are running under the correct shell. +SHELL=${CONFIG_SHELL-/bin/sh} + +case X$lt_ECHO in +X*--fallback-echo) + # Remove one level of quotation (which was required for Make). + ECHO=`echo "$lt_ECHO" | sed 's,\\\\\$\\$0,'$0','` + ;; +esac + +ECHO=${lt_ECHO-echo} +if test "X$1" = X--no-reexec; then + # Discard the --no-reexec flag, and continue. + shift +elif test "X$1" = X--fallback-echo; then + # Avoid inline document here, it may be left over + : +elif test "X`{ $ECHO '\t'; } 2>/dev/null`" = 'X\t' ; then + # Yippee, $ECHO works! + : +else + # Restart under the correct shell. + exec $SHELL "$0" --no-reexec ${1+"$@"} +fi + +if test "X$1" = X--fallback-echo; then + # used as fallback echo + shift + cat <<_LT_EOF +$* +_LT_EOF + exit 0 +fi + +# The HP-UX ksh and POSIX shell print the target directory to stdout +# if CDPATH is set. +(unset CDPATH) >/dev/null 2>&1 && unset CDPATH + +if test -z "$lt_ECHO"; then + if test "X${echo_test_string+set}" != Xset; then + # find a string as large as possible, as long as the shell can cope with it + for cmd in 'sed 50q "$0"' 'sed 20q "$0"' 'sed 10q "$0"' 'sed 2q "$0"' 'echo test'; do + # expected sizes: less than 2Kb, 1Kb, 512 bytes, 16 bytes, ... + if { echo_test_string=`eval $cmd`; } 2>/dev/null && + { test "X$echo_test_string" = "X$echo_test_string"; } 2>/dev/null + then + break + fi + done + fi + + if test "X`{ $ECHO '\t'; } 2>/dev/null`" = 'X\t' && + echo_testing_string=`{ $ECHO "$echo_test_string"; } 2>/dev/null` && + test "X$echo_testing_string" = "X$echo_test_string"; then + : + else + # The Solaris, AIX, and Digital Unix default echo programs unquote + # backslashes. This makes it impossible to quote backslashes using + # echo "$something" | sed 's/\\/\\\\/g' + # + # So, first we look for a working echo in the user's PATH. + + lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR + for dir in $PATH /usr/ucb; do + IFS="$lt_save_ifs" + if (test -f $dir/echo || test -f $dir/echo$ac_exeext) && + test "X`($dir/echo '\t') 2>/dev/null`" = 'X\t' && + echo_testing_string=`($dir/echo "$echo_test_string") 2>/dev/null` && + test "X$echo_testing_string" = "X$echo_test_string"; then + ECHO="$dir/echo" + break + fi + done + IFS="$lt_save_ifs" + + if test "X$ECHO" = Xecho; then + # We didn't find a better echo, so look for alternatives. + if test "X`{ print -r '\t'; } 2>/dev/null`" = 'X\t' && + echo_testing_string=`{ print -r "$echo_test_string"; } 2>/dev/null` && + test "X$echo_testing_string" = "X$echo_test_string"; then + # This shell has a builtin print -r that does the trick. + ECHO='print -r' + elif { test -f /bin/ksh || test -f /bin/ksh$ac_exeext; } && + test "X$CONFIG_SHELL" != X/bin/ksh; then + # If we have ksh, try running configure again with it. + ORIGINAL_CONFIG_SHELL=${CONFIG_SHELL-/bin/sh} + export ORIGINAL_CONFIG_SHELL + CONFIG_SHELL=/bin/ksh + export CONFIG_SHELL + exec $CONFIG_SHELL "$0" --no-reexec ${1+"$@"} + else + # Try using printf. + ECHO='printf %s\n' + if test "X`{ $ECHO '\t'; } 2>/dev/null`" = 'X\t' && + echo_testing_string=`{ $ECHO "$echo_test_string"; } 2>/dev/null` && + test "X$echo_testing_string" = "X$echo_test_string"; then + # Cool, printf works + : + elif echo_testing_string=`($ORIGINAL_CONFIG_SHELL "$0" --fallback-echo '\t') 2>/dev/null` && + test "X$echo_testing_string" = 'X\t' && + echo_testing_string=`($ORIGINAL_CONFIG_SHELL "$0" --fallback-echo "$echo_test_string") 2>/dev/null` && + test "X$echo_testing_string" = "X$echo_test_string"; then + CONFIG_SHELL=$ORIGINAL_CONFIG_SHELL + export CONFIG_SHELL + SHELL="$CONFIG_SHELL" + export SHELL + ECHO="$CONFIG_SHELL $0 --fallback-echo" + elif echo_testing_string=`($CONFIG_SHELL "$0" --fallback-echo '\t') 2>/dev/null` && + test "X$echo_testing_string" = 'X\t' && + echo_testing_string=`($CONFIG_SHELL "$0" --fallback-echo "$echo_test_string") 2>/dev/null` && + test "X$echo_testing_string" = "X$echo_test_string"; then + ECHO="$CONFIG_SHELL $0 --fallback-echo" + else + # maybe with a smaller string... + prev=: + + for cmd in 'echo test' 'sed 2q "$0"' 'sed 10q "$0"' 'sed 20q "$0"' 'sed 50q "$0"'; do + if { test "X$echo_test_string" = "X`eval $cmd`"; } 2>/dev/null + then + break + fi + prev="$cmd" + done + + if test "$prev" != 'sed 50q "$0"'; then + echo_test_string=`eval $prev` + export echo_test_string + exec ${ORIGINAL_CONFIG_SHELL-${CONFIG_SHELL-/bin/sh}} "$0" ${1+"$@"} + else + # Oops. We lost completely, so just stick with echo. + ECHO=echo + fi + fi + fi + fi + fi +fi + +# Copy echo and quote the copy suitably for passing to libtool from +# the Makefile, instead of quoting the original, which is used later. +lt_ECHO=$ECHO +if test "X$lt_ECHO" = "X$CONFIG_SHELL $0 --fallback-echo"; then + lt_ECHO="$CONFIG_SHELL \\\$\$0 --fallback-echo" +fi + + + + +exec 7<&0 &1 + +# Name of the host. +# hostname on some systems (SVR3.2, Linux) returns a bogus exit status, +# so uname gets run too. +ac_hostname=`(hostname || uname -n) 2>/dev/null | sed 1q` + +# +# Initializations. +# +ac_default_prefix=/usr/local +ac_clean_files= +ac_config_libobj_dir=. +LIBOBJS= +cross_compiling=no +subdirs= +MFLAGS= +MAKEFLAGS= + +# Identity of this package. +PACKAGE_NAME='package-unused' +PACKAGE_TARNAME='libbacktrace' +PACKAGE_VERSION='version-unused' +PACKAGE_STRING='package-unused version-unused' +PACKAGE_BUGREPORT='' +PACKAGE_URL='' + +ac_unique_file="backtrace.h" +# Factoring default headers for most tests. +ac_includes_default="\ +#include +#ifdef HAVE_SYS_TYPES_H +# include +#endif +#ifdef HAVE_SYS_STAT_H +# include +#endif +#ifdef STDC_HEADERS +# include +# include +#else +# ifdef HAVE_STDLIB_H +# include +# endif +#endif +#ifdef HAVE_STRING_H +# if !defined STDC_HEADERS && defined HAVE_MEMORY_H +# include +# endif +# include +#endif +#ifdef HAVE_STRINGS_H +# include +#endif +#ifdef HAVE_INTTYPES_H +# include +#endif +#ifdef HAVE_STDINT_H +# include +#endif +#ifdef HAVE_UNISTD_H +# include +#endif" + +ac_subst_vars='am__EXEEXT_FALSE +am__EXEEXT_TRUE +LTLIBOBJS +LIBOBJS +NATIVE_FALSE +NATIVE_TRUE +HAVE_OBJCOPY_DEBUGLINK_FALSE +HAVE_OBJCOPY_DEBUGLINK_TRUE +OBJCOPY +HAVE_COMPRESSED_DEBUG_FALSE +HAVE_COMPRESSED_DEBUG_TRUE +HAVE_ZLIB_FALSE +HAVE_ZLIB_TRUE +HAVE_PTHREAD_FALSE +HAVE_PTHREAD_TRUE +PTHREAD_CFLAGS +CLOCK_GETTIME_LINK +BACKTRACE_USES_MALLOC +ALLOC_FILE +VIEW_FILE +BACKTRACE_SUPPORTS_DATA +BACKTRACE_SUPPORTED +FORMAT_FILE +BACKTRACE_SUPPORTS_THREADS +PIC_FLAG +WARN_FLAGS +EXTRA_FLAGS +BACKTRACE_FILE +OTOOL64 +OTOOL +LIPO +NMEDIT +DSYMUTIL +lt_ECHO +RANLIB +AR +LN_S +NM +ac_ct_DUMPBIN +DUMPBIN +LD +FGREP +SED +LIBTOOL +MAINT +MAINTAINER_MODE_FALSE +MAINTAINER_MODE_TRUE +am__untar +am__tar +AMTAR +am__leading_dot +SET_MAKE +AWK +mkdir_p +MKDIR_P +INSTALL_STRIP_PROGRAM +STRIP +install_sh +MAKEINFO +AUTOHEADER +AUTOMAKE +AUTOCONF +ACLOCAL +VERSION +PACKAGE +CYGPATH_W +am__isrc +INSTALL_DATA +INSTALL_SCRIPT +INSTALL_PROGRAM +libtool_VERSION +EGREP +GREP +CPP +OBJEXT +EXEEXT +ac_ct_CC +CPPFLAGS +LDFLAGS +CFLAGS +CC +target_os +target_vendor +target_cpu +target +host_os +host_vendor +host_cpu +host +build_os +build_vendor +build_cpu +build +multi_basedir +target_alias +host_alias +build_alias +LIBS +ECHO_T +ECHO_N +ECHO_C +DEFS +mandir +localedir +libdir +psdir +pdfdir +dvidir +htmldir +infodir +docdir +oldincludedir +includedir +localstatedir +sharedstatedir +sysconfdir +datadir +datarootdir +libexecdir +sbindir +bindir +program_transform_name +prefix +exec_prefix +PACKAGE_URL +PACKAGE_BUGREPORT +PACKAGE_STRING +PACKAGE_VERSION +PACKAGE_TARNAME +PACKAGE_NAME +PATH_SEPARATOR +SHELL' +ac_subst_files='' +ac_user_opts=' +enable_option_checking +enable_multilib +enable_maintainer_mode +with_target_subdir +enable_shared +enable_static +with_pic +enable_fast_install +with_gnu_ld +enable_libtool_lock +enable_largefile +with_system_libunwind +enable_host_shared +' + ac_precious_vars='build_alias +host_alias +target_alias +CC +CFLAGS +LDFLAGS +LIBS +CPPFLAGS +CPP +OBJCOPY' + + +# Initialize some variables set by options. +ac_init_help= +ac_init_version=false +ac_unrecognized_opts= +ac_unrecognized_sep= +# The variables have the same names as the options, with +# dashes changed to underlines. +cache_file=/dev/null +exec_prefix=NONE +no_create= +no_recursion= +prefix=NONE +program_prefix=NONE +program_suffix=NONE +program_transform_name=s,x,x, +silent= +site= +srcdir= +verbose= +x_includes=NONE +x_libraries=NONE + +# Installation directory options. +# These are left unexpanded so users can "make install exec_prefix=/foo" +# and all the variables that are supposed to be based on exec_prefix +# by default will actually change. +# Use braces instead of parens because sh, perl, etc. also accept them. +# (The list follows the same order as the GNU Coding Standards.) +bindir='${exec_prefix}/bin' +sbindir='${exec_prefix}/sbin' +libexecdir='${exec_prefix}/libexec' +datarootdir='${prefix}/share' +datadir='${datarootdir}' +sysconfdir='${prefix}/etc' +sharedstatedir='${prefix}/com' +localstatedir='${prefix}/var' +includedir='${prefix}/include' +oldincludedir='/usr/include' +docdir='${datarootdir}/doc/${PACKAGE_TARNAME}' +infodir='${datarootdir}/info' +htmldir='${docdir}' +dvidir='${docdir}' +pdfdir='${docdir}' +psdir='${docdir}' +libdir='${exec_prefix}/lib' +localedir='${datarootdir}/locale' +mandir='${datarootdir}/man' + +ac_prev= +ac_dashdash= +for ac_option +do + # If the previous option needs an argument, assign it. + if test -n "$ac_prev"; then + eval $ac_prev=\$ac_option + ac_prev= + continue + fi + + case $ac_option in + *=*) ac_optarg=`expr "X$ac_option" : '[^=]*=\(.*\)'` ;; + *) ac_optarg=yes ;; + esac + + # Accept the important Cygnus configure options, so we can diagnose typos. + + case $ac_dashdash$ac_option in + --) + ac_dashdash=yes ;; + + -bindir | --bindir | --bindi | --bind | --bin | --bi) + ac_prev=bindir ;; + -bindir=* | --bindir=* | --bindi=* | --bind=* | --bin=* | --bi=*) + bindir=$ac_optarg ;; + + -build | --build | --buil | --bui | --bu) + ac_prev=build_alias ;; + -build=* | --build=* | --buil=* | --bui=* | --bu=*) + build_alias=$ac_optarg ;; + + -cache-file | --cache-file | --cache-fil | --cache-fi \ + | --cache-f | --cache- | --cache | --cach | --cac | --ca | --c) + ac_prev=cache_file ;; + -cache-file=* | --cache-file=* | --cache-fil=* | --cache-fi=* \ + | --cache-f=* | --cache-=* | --cache=* | --cach=* | --cac=* | --ca=* | --c=*) + cache_file=$ac_optarg ;; + + --config-cache | -C) + cache_file=config.cache ;; + + -datadir | --datadir | --datadi | --datad) + ac_prev=datadir ;; + -datadir=* | --datadir=* | --datadi=* | --datad=*) + datadir=$ac_optarg ;; + + -datarootdir | --datarootdir | --datarootdi | --datarootd | --dataroot \ + | --dataroo | --dataro | --datar) + ac_prev=datarootdir ;; + -datarootdir=* | --datarootdir=* | --datarootdi=* | --datarootd=* \ + | --dataroot=* | --dataroo=* | --dataro=* | --datar=*) + datarootdir=$ac_optarg ;; + + -disable-* | --disable-*) + ac_useropt=`expr "x$ac_option" : 'x-*disable-\(.*\)'` + # Reject names that are not valid shell variable names. + expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && + as_fn_error "invalid feature name: $ac_useropt" + ac_useropt_orig=$ac_useropt + ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` + case $ac_user_opts in + *" +"enable_$ac_useropt" +"*) ;; + *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--disable-$ac_useropt_orig" + ac_unrecognized_sep=', ';; + esac + eval enable_$ac_useropt=no ;; + + -docdir | --docdir | --docdi | --doc | --do) + ac_prev=docdir ;; + -docdir=* | --docdir=* | --docdi=* | --doc=* | --do=*) + docdir=$ac_optarg ;; + + -dvidir | --dvidir | --dvidi | --dvid | --dvi | --dv) + ac_prev=dvidir ;; + -dvidir=* | --dvidir=* | --dvidi=* | --dvid=* | --dvi=* | --dv=*) + dvidir=$ac_optarg ;; + + -enable-* | --enable-*) + ac_useropt=`expr "x$ac_option" : 'x-*enable-\([^=]*\)'` + # Reject names that are not valid shell variable names. + expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && + as_fn_error "invalid feature name: $ac_useropt" + ac_useropt_orig=$ac_useropt + ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` + case $ac_user_opts in + *" +"enable_$ac_useropt" +"*) ;; + *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--enable-$ac_useropt_orig" + ac_unrecognized_sep=', ';; + esac + eval enable_$ac_useropt=\$ac_optarg ;; + + -exec-prefix | --exec_prefix | --exec-prefix | --exec-prefi \ + | --exec-pref | --exec-pre | --exec-pr | --exec-p | --exec- \ + | --exec | --exe | --ex) + ac_prev=exec_prefix ;; + -exec-prefix=* | --exec_prefix=* | --exec-prefix=* | --exec-prefi=* \ + | --exec-pref=* | --exec-pre=* | --exec-pr=* | --exec-p=* | --exec-=* \ + | --exec=* | --exe=* | --ex=*) + exec_prefix=$ac_optarg ;; + + -gas | --gas | --ga | --g) + # Obsolete; use --with-gas. + with_gas=yes ;; + + -help | --help | --hel | --he | -h) + ac_init_help=long ;; + -help=r* | --help=r* | --hel=r* | --he=r* | -hr*) + ac_init_help=recursive ;; + -help=s* | --help=s* | --hel=s* | --he=s* | -hs*) + ac_init_help=short ;; + + -host | --host | --hos | --ho) + ac_prev=host_alias ;; + -host=* | --host=* | --hos=* | --ho=*) + host_alias=$ac_optarg ;; + + -htmldir | --htmldir | --htmldi | --htmld | --html | --htm | --ht) + ac_prev=htmldir ;; + -htmldir=* | --htmldir=* | --htmldi=* | --htmld=* | --html=* | --htm=* \ + | --ht=*) + htmldir=$ac_optarg ;; + + -includedir | --includedir | --includedi | --included | --include \ + | --includ | --inclu | --incl | --inc) + ac_prev=includedir ;; + -includedir=* | --includedir=* | --includedi=* | --included=* | --include=* \ + | --includ=* | --inclu=* | --incl=* | --inc=*) + includedir=$ac_optarg ;; + + -infodir | --infodir | --infodi | --infod | --info | --inf) + ac_prev=infodir ;; + -infodir=* | --infodir=* | --infodi=* | --infod=* | --info=* | --inf=*) + infodir=$ac_optarg ;; + + -libdir | --libdir | --libdi | --libd) + ac_prev=libdir ;; + -libdir=* | --libdir=* | --libdi=* | --libd=*) + libdir=$ac_optarg ;; + + -libexecdir | --libexecdir | --libexecdi | --libexecd | --libexec \ + | --libexe | --libex | --libe) + ac_prev=libexecdir ;; + -libexecdir=* | --libexecdir=* | --libexecdi=* | --libexecd=* | --libexec=* \ + | --libexe=* | --libex=* | --libe=*) + libexecdir=$ac_optarg ;; + + -localedir | --localedir | --localedi | --localed | --locale) + ac_prev=localedir ;; + -localedir=* | --localedir=* | --localedi=* | --localed=* | --locale=*) + localedir=$ac_optarg ;; + + -localstatedir | --localstatedir | --localstatedi | --localstated \ + | --localstate | --localstat | --localsta | --localst | --locals) + ac_prev=localstatedir ;; + -localstatedir=* | --localstatedir=* | --localstatedi=* | --localstated=* \ + | --localstate=* | --localstat=* | --localsta=* | --localst=* | --locals=*) + localstatedir=$ac_optarg ;; + + -mandir | --mandir | --mandi | --mand | --man | --ma | --m) + ac_prev=mandir ;; + -mandir=* | --mandir=* | --mandi=* | --mand=* | --man=* | --ma=* | --m=*) + mandir=$ac_optarg ;; + + -nfp | --nfp | --nf) + # Obsolete; use --without-fp. + with_fp=no ;; + + -no-create | --no-create | --no-creat | --no-crea | --no-cre \ + | --no-cr | --no-c | -n) + no_create=yes ;; + + -no-recursion | --no-recursion | --no-recursio | --no-recursi \ + | --no-recurs | --no-recur | --no-recu | --no-rec | --no-re | --no-r) + no_recursion=yes ;; + + -oldincludedir | --oldincludedir | --oldincludedi | --oldincluded \ + | --oldinclude | --oldinclud | --oldinclu | --oldincl | --oldinc \ + | --oldin | --oldi | --old | --ol | --o) + ac_prev=oldincludedir ;; + -oldincludedir=* | --oldincludedir=* | --oldincludedi=* | --oldincluded=* \ + | --oldinclude=* | --oldinclud=* | --oldinclu=* | --oldincl=* | --oldinc=* \ + | --oldin=* | --oldi=* | --old=* | --ol=* | --o=*) + oldincludedir=$ac_optarg ;; + + -prefix | --prefix | --prefi | --pref | --pre | --pr | --p) + ac_prev=prefix ;; + -prefix=* | --prefix=* | --prefi=* | --pref=* | --pre=* | --pr=* | --p=*) + prefix=$ac_optarg ;; + + -program-prefix | --program-prefix | --program-prefi | --program-pref \ + | --program-pre | --program-pr | --program-p) + ac_prev=program_prefix ;; + -program-prefix=* | --program-prefix=* | --program-prefi=* \ + | --program-pref=* | --program-pre=* | --program-pr=* | --program-p=*) + program_prefix=$ac_optarg ;; + + -program-suffix | --program-suffix | --program-suffi | --program-suff \ + | --program-suf | --program-su | --program-s) + ac_prev=program_suffix ;; + -program-suffix=* | --program-suffix=* | --program-suffi=* \ + | --program-suff=* | --program-suf=* | --program-su=* | --program-s=*) + program_suffix=$ac_optarg ;; + + -program-transform-name | --program-transform-name \ + | --program-transform-nam | --program-transform-na \ + | --program-transform-n | --program-transform- \ + | --program-transform | --program-transfor \ + | --program-transfo | --program-transf \ + | --program-trans | --program-tran \ + | --progr-tra | --program-tr | --program-t) + ac_prev=program_transform_name ;; + -program-transform-name=* | --program-transform-name=* \ + | --program-transform-nam=* | --program-transform-na=* \ + | --program-transform-n=* | --program-transform-=* \ + | --program-transform=* | --program-transfor=* \ + | --program-transfo=* | --program-transf=* \ + | --program-trans=* | --program-tran=* \ + | --progr-tra=* | --program-tr=* | --program-t=*) + program_transform_name=$ac_optarg ;; + + -pdfdir | --pdfdir | --pdfdi | --pdfd | --pdf | --pd) + ac_prev=pdfdir ;; + -pdfdir=* | --pdfdir=* | --pdfdi=* | --pdfd=* | --pdf=* | --pd=*) + pdfdir=$ac_optarg ;; + + -psdir | --psdir | --psdi | --psd | --ps) + ac_prev=psdir ;; + -psdir=* | --psdir=* | --psdi=* | --psd=* | --ps=*) + psdir=$ac_optarg ;; + + -q | -quiet | --quiet | --quie | --qui | --qu | --q \ + | -silent | --silent | --silen | --sile | --sil) + silent=yes ;; + + -sbindir | --sbindir | --sbindi | --sbind | --sbin | --sbi | --sb) + ac_prev=sbindir ;; + -sbindir=* | --sbindir=* | --sbindi=* | --sbind=* | --sbin=* \ + | --sbi=* | --sb=*) + sbindir=$ac_optarg ;; + + -sharedstatedir | --sharedstatedir | --sharedstatedi \ + | --sharedstated | --sharedstate | --sharedstat | --sharedsta \ + | --sharedst | --shareds | --shared | --share | --shar \ + | --sha | --sh) + ac_prev=sharedstatedir ;; + -sharedstatedir=* | --sharedstatedir=* | --sharedstatedi=* \ + | --sharedstated=* | --sharedstate=* | --sharedstat=* | --sharedsta=* \ + | --sharedst=* | --shareds=* | --shared=* | --share=* | --shar=* \ + | --sha=* | --sh=*) + sharedstatedir=$ac_optarg ;; + + -site | --site | --sit) + ac_prev=site ;; + -site=* | --site=* | --sit=*) + site=$ac_optarg ;; + + -srcdir | --srcdir | --srcdi | --srcd | --src | --sr) + ac_prev=srcdir ;; + -srcdir=* | --srcdir=* | --srcdi=* | --srcd=* | --src=* | --sr=*) + srcdir=$ac_optarg ;; + + -sysconfdir | --sysconfdir | --sysconfdi | --sysconfd | --sysconf \ + | --syscon | --sysco | --sysc | --sys | --sy) + ac_prev=sysconfdir ;; + -sysconfdir=* | --sysconfdir=* | --sysconfdi=* | --sysconfd=* | --sysconf=* \ + | --syscon=* | --sysco=* | --sysc=* | --sys=* | --sy=*) + sysconfdir=$ac_optarg ;; + + -target | --target | --targe | --targ | --tar | --ta | --t) + ac_prev=target_alias ;; + -target=* | --target=* | --targe=* | --targ=* | --tar=* | --ta=* | --t=*) + target_alias=$ac_optarg ;; + + -v | -verbose | --verbose | --verbos | --verbo | --verb) + verbose=yes ;; + + -version | --version | --versio | --versi | --vers | -V) + ac_init_version=: ;; + + -with-* | --with-*) + ac_useropt=`expr "x$ac_option" : 'x-*with-\([^=]*\)'` + # Reject names that are not valid shell variable names. + expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && + as_fn_error "invalid package name: $ac_useropt" + ac_useropt_orig=$ac_useropt + ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` + case $ac_user_opts in + *" +"with_$ac_useropt" +"*) ;; + *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--with-$ac_useropt_orig" + ac_unrecognized_sep=', ';; + esac + eval with_$ac_useropt=\$ac_optarg ;; + + -without-* | --without-*) + ac_useropt=`expr "x$ac_option" : 'x-*without-\(.*\)'` + # Reject names that are not valid shell variable names. + expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && + as_fn_error "invalid package name: $ac_useropt" + ac_useropt_orig=$ac_useropt + ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` + case $ac_user_opts in + *" +"with_$ac_useropt" +"*) ;; + *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--without-$ac_useropt_orig" + ac_unrecognized_sep=', ';; + esac + eval with_$ac_useropt=no ;; + + --x) + # Obsolete; use --with-x. + with_x=yes ;; + + -x-includes | --x-includes | --x-include | --x-includ | --x-inclu \ + | --x-incl | --x-inc | --x-in | --x-i) + ac_prev=x_includes ;; + -x-includes=* | --x-includes=* | --x-include=* | --x-includ=* | --x-inclu=* \ + | --x-incl=* | --x-inc=* | --x-in=* | --x-i=*) + x_includes=$ac_optarg ;; + + -x-libraries | --x-libraries | --x-librarie | --x-librari \ + | --x-librar | --x-libra | --x-libr | --x-lib | --x-li | --x-l) + ac_prev=x_libraries ;; + -x-libraries=* | --x-libraries=* | --x-librarie=* | --x-librari=* \ + | --x-librar=* | --x-libra=* | --x-libr=* | --x-lib=* | --x-li=* | --x-l=*) + x_libraries=$ac_optarg ;; + + -*) as_fn_error "unrecognized option: \`$ac_option' +Try \`$0 --help' for more information." + ;; + + *=*) + ac_envvar=`expr "x$ac_option" : 'x\([^=]*\)='` + # Reject names that are not valid shell variable names. + case $ac_envvar in #( + '' | [0-9]* | *[!_$as_cr_alnum]* ) + as_fn_error "invalid variable name: \`$ac_envvar'" ;; + esac + eval $ac_envvar=\$ac_optarg + export $ac_envvar ;; + + *) + # FIXME: should be removed in autoconf 3.0. + $as_echo "$as_me: WARNING: you should use --build, --host, --target" >&2 + expr "x$ac_option" : ".*[^-._$as_cr_alnum]" >/dev/null && + $as_echo "$as_me: WARNING: invalid host type: $ac_option" >&2 + : ${build_alias=$ac_option} ${host_alias=$ac_option} ${target_alias=$ac_option} + ;; + + esac +done + +if test -n "$ac_prev"; then + ac_option=--`echo $ac_prev | sed 's/_/-/g'` + as_fn_error "missing argument to $ac_option" +fi + +if test -n "$ac_unrecognized_opts"; then + case $enable_option_checking in + no) ;; + fatal) as_fn_error "unrecognized options: $ac_unrecognized_opts" ;; + *) $as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2 ;; + esac +fi + +# Check all directory arguments for consistency. +for ac_var in exec_prefix prefix bindir sbindir libexecdir datarootdir \ + datadir sysconfdir sharedstatedir localstatedir includedir \ + oldincludedir docdir infodir htmldir dvidir pdfdir psdir \ + libdir localedir mandir +do + eval ac_val=\$$ac_var + # Remove trailing slashes. + case $ac_val in + */ ) + ac_val=`expr "X$ac_val" : 'X\(.*[^/]\)' \| "X$ac_val" : 'X\(.*\)'` + eval $ac_var=\$ac_val;; + esac + # Be sure to have absolute directory names. + case $ac_val in + [\\/$]* | ?:[\\/]* ) continue;; + NONE | '' ) case $ac_var in *prefix ) continue;; esac;; + esac + as_fn_error "expected an absolute directory name for --$ac_var: $ac_val" +done + +# There might be people who depend on the old broken behavior: `$host' +# used to hold the argument of --host etc. +# FIXME: To remove some day. +build=$build_alias +host=$host_alias +target=$target_alias + +# FIXME: To remove some day. +if test "x$host_alias" != x; then + if test "x$build_alias" = x; then + cross_compiling=maybe + $as_echo "$as_me: WARNING: If you wanted to set the --build type, don't use --host. + If a cross compiler is detected then cross compile mode will be used." >&2 + elif test "x$build_alias" != "x$host_alias"; then + cross_compiling=yes + fi +fi + +ac_tool_prefix= +test -n "$host_alias" && ac_tool_prefix=$host_alias- + +test "$silent" = yes && exec 6>/dev/null + + +ac_pwd=`pwd` && test -n "$ac_pwd" && +ac_ls_di=`ls -di .` && +ac_pwd_ls_di=`cd "$ac_pwd" && ls -di .` || + as_fn_error "working directory cannot be determined" +test "X$ac_ls_di" = "X$ac_pwd_ls_di" || + as_fn_error "pwd does not report name of working directory" + + +# Find the source files, if location was not specified. +if test -z "$srcdir"; then + ac_srcdir_defaulted=yes + # Try the directory containing this script, then the parent directory. + ac_confdir=`$as_dirname -- "$as_myself" || +$as_expr X"$as_myself" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ + X"$as_myself" : 'X\(//\)[^/]' \| \ + X"$as_myself" : 'X\(//\)$' \| \ + X"$as_myself" : 'X\(/\)' \| . 2>/dev/null || +$as_echo X"$as_myself" | + sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ + s//\1/ + q + } + /^X\(\/\/\)[^/].*/{ + s//\1/ + q + } + /^X\(\/\/\)$/{ + s//\1/ + q + } + /^X\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'` + srcdir=$ac_confdir + if test ! -r "$srcdir/$ac_unique_file"; then + srcdir=.. + fi +else + ac_srcdir_defaulted=no +fi +if test ! -r "$srcdir/$ac_unique_file"; then + test "$ac_srcdir_defaulted" = yes && srcdir="$ac_confdir or .." + as_fn_error "cannot find sources ($ac_unique_file) in $srcdir" +fi +ac_msg="sources are in $srcdir, but \`cd $srcdir' does not work" +ac_abs_confdir=`( + cd "$srcdir" && test -r "./$ac_unique_file" || as_fn_error "$ac_msg" + pwd)` +# When building in place, set srcdir=. +if test "$ac_abs_confdir" = "$ac_pwd"; then + srcdir=. +fi +# Remove unnecessary trailing slashes from srcdir. +# Double slashes in file names in object file debugging info +# mess up M-x gdb in Emacs. +case $srcdir in +*/) srcdir=`expr "X$srcdir" : 'X\(.*[^/]\)' \| "X$srcdir" : 'X\(.*\)'`;; +esac +for ac_var in $ac_precious_vars; do + eval ac_env_${ac_var}_set=\${${ac_var}+set} + eval ac_env_${ac_var}_value=\$${ac_var} + eval ac_cv_env_${ac_var}_set=\${${ac_var}+set} + eval ac_cv_env_${ac_var}_value=\$${ac_var} +done + +# +# Report the --help message. +# +if test "$ac_init_help" = "long"; then + # Omit some internal or obsolete options to make the list less imposing. + # This message is too long to be a string in the A/UX 3.1 sh. + cat <<_ACEOF +\`configure' configures package-unused version-unused to adapt to many kinds of systems. + +Usage: $0 [OPTION]... [VAR=VALUE]... + +To assign environment variables (e.g., CC, CFLAGS...), specify them as +VAR=VALUE. See below for descriptions of some of the useful variables. + +Defaults for the options are specified in brackets. + +Configuration: + -h, --help display this help and exit + --help=short display options specific to this package + --help=recursive display the short help of all the included packages + -V, --version display version information and exit + -q, --quiet, --silent do not print \`checking...' messages + --cache-file=FILE cache test results in FILE [disabled] + -C, --config-cache alias for \`--cache-file=config.cache' + -n, --no-create do not create output files + --srcdir=DIR find the sources in DIR [configure dir or \`..'] + +Installation directories: + --prefix=PREFIX install architecture-independent files in PREFIX + [$ac_default_prefix] + --exec-prefix=EPREFIX install architecture-dependent files in EPREFIX + [PREFIX] + +By default, \`make install' will install all the files in +\`$ac_default_prefix/bin', \`$ac_default_prefix/lib' etc. You can specify +an installation prefix other than \`$ac_default_prefix' using \`--prefix', +for instance \`--prefix=\$HOME'. + +For better control, use the options below. + +Fine tuning of the installation directories: + --bindir=DIR user executables [EPREFIX/bin] + --sbindir=DIR system admin executables [EPREFIX/sbin] + --libexecdir=DIR program executables [EPREFIX/libexec] + --sysconfdir=DIR read-only single-machine data [PREFIX/etc] + --sharedstatedir=DIR modifiable architecture-independent data [PREFIX/com] + --localstatedir=DIR modifiable single-machine data [PREFIX/var] + --libdir=DIR object code libraries [EPREFIX/lib] + --includedir=DIR C header files [PREFIX/include] + --oldincludedir=DIR C header files for non-gcc [/usr/include] + --datarootdir=DIR read-only arch.-independent data root [PREFIX/share] + --datadir=DIR read-only architecture-independent data [DATAROOTDIR] + --infodir=DIR info documentation [DATAROOTDIR/info] + --localedir=DIR locale-dependent data [DATAROOTDIR/locale] + --mandir=DIR man documentation [DATAROOTDIR/man] + --docdir=DIR documentation root [DATAROOTDIR/doc/libbacktrace] + --htmldir=DIR html documentation [DOCDIR] + --dvidir=DIR dvi documentation [DOCDIR] + --pdfdir=DIR pdf documentation [DOCDIR] + --psdir=DIR ps documentation [DOCDIR] +_ACEOF + + cat <<\_ACEOF + +Program names: + --program-prefix=PREFIX prepend PREFIX to installed program names + --program-suffix=SUFFIX append SUFFIX to installed program names + --program-transform-name=PROGRAM run sed PROGRAM on installed program names + +System types: + --build=BUILD configure for building on BUILD [guessed] + --host=HOST cross-compile to build programs to run on HOST [BUILD] + --target=TARGET configure for building compilers for TARGET [HOST] +_ACEOF +fi + +if test -n "$ac_init_help"; then + case $ac_init_help in + short | recursive ) echo "Configuration of package-unused version-unused:";; + esac + cat <<\_ACEOF + +Optional Features: + --disable-option-checking ignore unrecognized --enable/--with options + --disable-FEATURE do not include FEATURE (same as --enable-FEATURE=no) + --enable-FEATURE[=ARG] include FEATURE [ARG=yes] + --enable-multilib build many library versions (default) + --enable-maintainer-mode enable make rules and dependencies not useful + (and sometimes confusing) to the casual installer + --enable-shared[=PKGS] build shared libraries [default=yes] + --enable-static[=PKGS] build static libraries [default=yes] + --enable-fast-install[=PKGS] + optimize for fast installation [default=yes] + --disable-libtool-lock avoid locking (might break parallel builds) + --disable-largefile omit support for large files + --enable-host-shared build host code as shared libraries + +Optional Packages: + --with-PACKAGE[=ARG] use PACKAGE [ARG=yes] + --without-PACKAGE do not use PACKAGE (same as --with-PACKAGE=no) + --with-target-subdir=SUBDIR Configuring in a subdirectory for target + --with-pic try to use only PIC/non-PIC objects [default=use + both] + --with-gnu-ld assume the C compiler uses GNU ld [default=no] + --with-system-libunwind use installed libunwind + +Some influential environment variables: + CC C compiler command + CFLAGS C compiler flags + LDFLAGS linker flags, e.g. -L if you have libraries in a + nonstandard directory + LIBS libraries to pass to the linker, e.g. -l + CPPFLAGS C/C++/Objective C preprocessor flags, e.g. -I if + you have headers in a nonstandard directory + CPP C preprocessor + OBJCOPY location of objcopy + +Use these variables to override the choices made by `configure' or to help +it to find libraries and programs with nonstandard names/locations. + +Report bugs to the package provider. +_ACEOF +ac_status=$? +fi + +if test "$ac_init_help" = "recursive"; then + # If there are subdirs, report their specific --help. + for ac_dir in : $ac_subdirs_all; do test "x$ac_dir" = x: && continue + test -d "$ac_dir" || + { cd "$srcdir" && ac_pwd=`pwd` && srcdir=. && test -d "$ac_dir"; } || + continue + ac_builddir=. + +case "$ac_dir" in +.) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;; +*) + ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'` + # A ".." for each directory in $ac_dir_suffix. + ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'` + case $ac_top_builddir_sub in + "") ac_top_builddir_sub=. ac_top_build_prefix= ;; + *) ac_top_build_prefix=$ac_top_builddir_sub/ ;; + esac ;; +esac +ac_abs_top_builddir=$ac_pwd +ac_abs_builddir=$ac_pwd$ac_dir_suffix +# for backward compatibility: +ac_top_builddir=$ac_top_build_prefix + +case $srcdir in + .) # We are building in place. + ac_srcdir=. + ac_top_srcdir=$ac_top_builddir_sub + ac_abs_top_srcdir=$ac_pwd ;; + [\\/]* | ?:[\\/]* ) # Absolute name. + ac_srcdir=$srcdir$ac_dir_suffix; + ac_top_srcdir=$srcdir + ac_abs_top_srcdir=$srcdir ;; + *) # Relative name. + ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix + ac_top_srcdir=$ac_top_build_prefix$srcdir + ac_abs_top_srcdir=$ac_pwd/$srcdir ;; +esac +ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix + + cd "$ac_dir" || { ac_status=$?; continue; } + # Check for guested configure. + if test -f "$ac_srcdir/configure.gnu"; then + echo && + $SHELL "$ac_srcdir/configure.gnu" --help=recursive + elif test -f "$ac_srcdir/configure"; then + echo && + $SHELL "$ac_srcdir/configure" --help=recursive + else + $as_echo "$as_me: WARNING: no configuration information is in $ac_dir" >&2 + fi || ac_status=$? + cd "$ac_pwd" || { ac_status=$?; break; } + done +fi + +test -n "$ac_init_help" && exit $ac_status +if $ac_init_version; then + cat <<\_ACEOF +package-unused configure version-unused +generated by GNU Autoconf 2.64 + +Copyright (C) 2009 Free Software Foundation, Inc. +This configure script is free software; the Free Software Foundation +gives unlimited permission to copy, distribute and modify it. +_ACEOF + exit +fi + +## ------------------------ ## +## Autoconf initialization. ## +## ------------------------ ## + +# ac_fn_c_try_compile LINENO +# -------------------------- +# Try to compile conftest.$ac_ext, and return whether this succeeded. +ac_fn_c_try_compile () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + rm -f conftest.$ac_objext + if { { ac_try="$ac_compile" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_compile") 2>conftest.err + ac_status=$? + if test -s conftest.err; then + grep -v '^ *+' conftest.err >conftest.er1 + cat conftest.er1 >&5 + mv -f conftest.er1 conftest.err + fi + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } && { + test -z "$ac_c_werror_flag" || + test ! -s conftest.err + } && test -s conftest.$ac_objext; then : + ac_retval=0 +else + $as_echo "$as_me: failed program was:" >&5 +sed 's/^/| /' conftest.$ac_ext >&5 + + ac_retval=1 +fi + eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;} + return $ac_retval + +} # ac_fn_c_try_compile + +# ac_fn_c_try_cpp LINENO +# ---------------------- +# Try to preprocess conftest.$ac_ext, and return whether this succeeded. +ac_fn_c_try_cpp () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + if { { ac_try="$ac_cpp conftest.$ac_ext" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_cpp conftest.$ac_ext") 2>conftest.err + ac_status=$? + if test -s conftest.err; then + grep -v '^ *+' conftest.err >conftest.er1 + cat conftest.er1 >&5 + mv -f conftest.er1 conftest.err + fi + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } >/dev/null && { + test -z "$ac_c_preproc_warn_flag$ac_c_werror_flag" || + test ! -s conftest.err + }; then : + ac_retval=0 +else + $as_echo "$as_me: failed program was:" >&5 +sed 's/^/| /' conftest.$ac_ext >&5 + + ac_retval=1 +fi + eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;} + return $ac_retval + +} # ac_fn_c_try_cpp + +# ac_fn_c_check_header_mongrel LINENO HEADER VAR INCLUDES +# ------------------------------------------------------- +# Tests whether HEADER exists, giving a warning if it cannot be compiled using +# the include files in INCLUDES and setting the cache variable VAR +# accordingly. +ac_fn_c_check_header_mongrel () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + if { as_var=$3; eval "test \"\${$as_var+set}\" = set"; }; then : + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 +$as_echo_n "checking for $2... " >&6; } +if { as_var=$3; eval "test \"\${$as_var+set}\" = set"; }; then : + $as_echo_n "(cached) " >&6 +fi +eval ac_res=\$$3 + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } +else + # Is the header compilable? +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking $2 usability" >&5 +$as_echo_n "checking $2 usability... " >&6; } +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +$4 +#include <$2> +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_header_compiler=yes +else + ac_header_compiler=no +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_header_compiler" >&5 +$as_echo "$ac_header_compiler" >&6; } + +# Is the header present? +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking $2 presence" >&5 +$as_echo_n "checking $2 presence... " >&6; } +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include <$2> +_ACEOF +if ac_fn_c_try_cpp "$LINENO"; then : + ac_header_preproc=yes +else + ac_header_preproc=no +fi +rm -f conftest.err conftest.$ac_ext +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_header_preproc" >&5 +$as_echo "$ac_header_preproc" >&6; } + +# So? What about this header? +case $ac_header_compiler:$ac_header_preproc:$ac_c_preproc_warn_flag in #(( + yes:no: ) + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: accepted by the compiler, rejected by the preprocessor!" >&5 +$as_echo "$as_me: WARNING: $2: accepted by the compiler, rejected by the preprocessor!" >&2;} + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: proceeding with the compiler's result" >&5 +$as_echo "$as_me: WARNING: $2: proceeding with the compiler's result" >&2;} + ;; + no:yes:* ) + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: present but cannot be compiled" >&5 +$as_echo "$as_me: WARNING: $2: present but cannot be compiled" >&2;} + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: check for missing prerequisite headers?" >&5 +$as_echo "$as_me: WARNING: $2: check for missing prerequisite headers?" >&2;} + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: see the Autoconf documentation" >&5 +$as_echo "$as_me: WARNING: $2: see the Autoconf documentation" >&2;} + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: section \"Present But Cannot Be Compiled\"" >&5 +$as_echo "$as_me: WARNING: $2: section \"Present But Cannot Be Compiled\"" >&2;} + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: proceeding with the compiler's result" >&5 +$as_echo "$as_me: WARNING: $2: proceeding with the compiler's result" >&2;} + ;; +esac + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 +$as_echo_n "checking for $2... " >&6; } +if { as_var=$3; eval "test \"\${$as_var+set}\" = set"; }; then : + $as_echo_n "(cached) " >&6 +else + eval "$3=\$ac_header_compiler" +fi +eval ac_res=\$$3 + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } +fi + eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;} + +} # ac_fn_c_check_header_mongrel + +# ac_fn_c_try_run LINENO +# ---------------------- +# Try to link conftest.$ac_ext, and return whether this succeeded. Assumes +# that executables *can* be run. +ac_fn_c_try_run () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + if { { ac_try="$ac_link" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_link") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } && { ac_try='./conftest$ac_exeext' + { { case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_try") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; }; then : + ac_retval=0 +else + $as_echo "$as_me: program exited with status $ac_status" >&5 + $as_echo "$as_me: failed program was:" >&5 +sed 's/^/| /' conftest.$ac_ext >&5 + + ac_retval=$ac_status +fi + rm -rf conftest.dSYM conftest_ipa8_conftest.oo + eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;} + return $ac_retval + +} # ac_fn_c_try_run + +# ac_fn_c_check_header_compile LINENO HEADER VAR INCLUDES +# ------------------------------------------------------- +# Tests whether HEADER exists and can be compiled using the include files in +# INCLUDES, setting the cache variable VAR accordingly. +ac_fn_c_check_header_compile () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 +$as_echo_n "checking for $2... " >&6; } +if { as_var=$3; eval "test \"\${$as_var+set}\" = set"; }; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +$4 +#include <$2> +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + eval "$3=yes" +else + eval "$3=no" +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +fi +eval ac_res=\$$3 + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } + eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;} + +} # ac_fn_c_check_header_compile + +# ac_fn_c_try_link LINENO +# ----------------------- +# Try to link conftest.$ac_ext, and return whether this succeeded. +ac_fn_c_try_link () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + rm -f conftest.$ac_objext conftest$ac_exeext + if { { ac_try="$ac_link" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_link") 2>conftest.err + ac_status=$? + if test -s conftest.err; then + grep -v '^ *+' conftest.err >conftest.er1 + cat conftest.er1 >&5 + mv -f conftest.er1 conftest.err + fi + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } && { + test -z "$ac_c_werror_flag" || + test ! -s conftest.err + } && test -s conftest$ac_exeext && { + test "$cross_compiling" = yes || + $as_test_x conftest$ac_exeext + }; then : + ac_retval=0 +else + $as_echo "$as_me: failed program was:" >&5 +sed 's/^/| /' conftest.$ac_ext >&5 + + ac_retval=1 +fi + # Delete the IPA/IPO (Inter Procedural Analysis/Optimization) information + # created by the PGI compiler (conftest_ipa8_conftest.oo), as it would + # interfere with the next link command; also delete a directory that is + # left behind by Apple's compiler. We do this before executing the actions. + rm -rf conftest.dSYM conftest_ipa8_conftest.oo + eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;} + return $ac_retval + +} # ac_fn_c_try_link + +# ac_fn_c_check_func LINENO FUNC VAR +# ---------------------------------- +# Tests whether FUNC exists, setting the cache variable VAR accordingly +ac_fn_c_check_func () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 +$as_echo_n "checking for $2... " >&6; } +if { as_var=$3; eval "test \"\${$as_var+set}\" = set"; }; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +/* Define $2 to an innocuous variant, in case declares $2. + For example, HP-UX 11i declares gettimeofday. */ +#define $2 innocuous_$2 + +/* System header to define __stub macros and hopefully few prototypes, + which can conflict with char $2 (); below. + Prefer to if __STDC__ is defined, since + exists even on freestanding compilers. */ + +#ifdef __STDC__ +# include +#else +# include +#endif + +#undef $2 + +/* Override any GCC internal prototype to avoid an error. + Use char because int might match the return type of a GCC + builtin and then its argument prototype would still apply. */ +#ifdef __cplusplus +extern "C" +#endif +char $2 (); +/* The GNU C library defines this for functions which it implements + to always fail with ENOSYS. Some functions are actually named + something starting with __ and the normal name is an alias. */ +#if defined __stub_$2 || defined __stub___$2 +choke me +#endif + +int +main () +{ +return $2 (); + ; + return 0; +} +_ACEOF +if ac_fn_c_try_link "$LINENO"; then : + eval "$3=yes" +else + eval "$3=no" +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +fi +eval ac_res=\$$3 + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } + eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;} + +} # ac_fn_c_check_func + +# ac_fn_c_check_decl LINENO SYMBOL VAR +# ------------------------------------ +# Tests whether SYMBOL is declared, setting cache variable VAR accordingly. +ac_fn_c_check_decl () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $2 is declared" >&5 +$as_echo_n "checking whether $2 is declared... " >&6; } +if { as_var=$3; eval "test \"\${$as_var+set}\" = set"; }; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +$4 +int +main () +{ +#ifndef $2 + (void) $2; +#endif + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + eval "$3=yes" +else + eval "$3=no" +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +fi +eval ac_res=\$$3 + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } + eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;} + +} # ac_fn_c_check_decl +cat >config.log <<_ACEOF +This file contains any messages produced by compilers while +running configure, to aid debugging if configure makes a mistake. + +It was created by package-unused $as_me version-unused, which was +generated by GNU Autoconf 2.64. Invocation command line was + + $ $0 $@ + +_ACEOF +exec 5>>config.log +{ +cat <<_ASUNAME +## --------- ## +## Platform. ## +## --------- ## + +hostname = `(hostname || uname -n) 2>/dev/null | sed 1q` +uname -m = `(uname -m) 2>/dev/null || echo unknown` +uname -r = `(uname -r) 2>/dev/null || echo unknown` +uname -s = `(uname -s) 2>/dev/null || echo unknown` +uname -v = `(uname -v) 2>/dev/null || echo unknown` + +/usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null || echo unknown` +/bin/uname -X = `(/bin/uname -X) 2>/dev/null || echo unknown` + +/bin/arch = `(/bin/arch) 2>/dev/null || echo unknown` +/usr/bin/arch -k = `(/usr/bin/arch -k) 2>/dev/null || echo unknown` +/usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null || echo unknown` +/usr/bin/hostinfo = `(/usr/bin/hostinfo) 2>/dev/null || echo unknown` +/bin/machine = `(/bin/machine) 2>/dev/null || echo unknown` +/usr/bin/oslevel = `(/usr/bin/oslevel) 2>/dev/null || echo unknown` +/bin/universe = `(/bin/universe) 2>/dev/null || echo unknown` + +_ASUNAME + +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + $as_echo "PATH: $as_dir" + done +IFS=$as_save_IFS + +} >&5 + +cat >&5 <<_ACEOF + + +## ----------- ## +## Core tests. ## +## ----------- ## + +_ACEOF + + +# Keep a trace of the command line. +# Strip out --no-create and --no-recursion so they do not pile up. +# Strip out --silent because we don't want to record it for future runs. +# Also quote any args containing shell meta-characters. +# Make two passes to allow for proper duplicate-argument suppression. +ac_configure_args= +ac_configure_args0= +ac_configure_args1= +ac_must_keep_next=false +for ac_pass in 1 2 +do + for ac_arg + do + case $ac_arg in + -no-create | --no-c* | -n | -no-recursion | --no-r*) continue ;; + -q | -quiet | --quiet | --quie | --qui | --qu | --q \ + | -silent | --silent | --silen | --sile | --sil) + continue ;; + *\'*) + ac_arg=`$as_echo "$ac_arg" | sed "s/'/'\\\\\\\\''/g"` ;; + esac + case $ac_pass in + 1) as_fn_append ac_configure_args0 " '$ac_arg'" ;; + 2) + as_fn_append ac_configure_args1 " '$ac_arg'" + if test $ac_must_keep_next = true; then + ac_must_keep_next=false # Got value, back to normal. + else + case $ac_arg in + *=* | --config-cache | -C | -disable-* | --disable-* \ + | -enable-* | --enable-* | -gas | --g* | -nfp | --nf* \ + | -q | -quiet | --q* | -silent | --sil* | -v | -verb* \ + | -with-* | --with-* | -without-* | --without-* | --x) + case "$ac_configure_args0 " in + "$ac_configure_args1"*" '$ac_arg' "* ) continue ;; + esac + ;; + -* ) ac_must_keep_next=true ;; + esac + fi + as_fn_append ac_configure_args " '$ac_arg'" + ;; + esac + done +done +{ ac_configure_args0=; unset ac_configure_args0;} +{ ac_configure_args1=; unset ac_configure_args1;} + +# When interrupted or exit'd, cleanup temporary files, and complete +# config.log. We remove comments because anyway the quotes in there +# would cause problems or look ugly. +# WARNING: Use '\'' to represent an apostrophe within the trap. +# WARNING: Do not start the trap code with a newline, due to a FreeBSD 4.0 bug. +trap 'exit_status=$? + # Save into config.log some information that might help in debugging. + { + echo + + cat <<\_ASBOX +## ---------------- ## +## Cache variables. ## +## ---------------- ## +_ASBOX + echo + # The following way of writing the cache mishandles newlines in values, +( + for ac_var in `(set) 2>&1 | sed -n '\''s/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'\''`; do + eval ac_val=\$$ac_var + case $ac_val in #( + *${as_nl}*) + case $ac_var in #( + *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5 +$as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;; + esac + case $ac_var in #( + _ | IFS | as_nl) ;; #( + BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #( + *) { eval $ac_var=; unset $ac_var;} ;; + esac ;; + esac + done + (set) 2>&1 | + case $as_nl`(ac_space='\'' '\''; set) 2>&1` in #( + *${as_nl}ac_space=\ *) + sed -n \ + "s/'\''/'\''\\\\'\'''\''/g; + s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\''\\2'\''/p" + ;; #( + *) + sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p" + ;; + esac | + sort +) + echo + + cat <<\_ASBOX +## ----------------- ## +## Output variables. ## +## ----------------- ## +_ASBOX + echo + for ac_var in $ac_subst_vars + do + eval ac_val=\$$ac_var + case $ac_val in + *\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;; + esac + $as_echo "$ac_var='\''$ac_val'\''" + done | sort + echo + + if test -n "$ac_subst_files"; then + cat <<\_ASBOX +## ------------------- ## +## File substitutions. ## +## ------------------- ## +_ASBOX + echo + for ac_var in $ac_subst_files + do + eval ac_val=\$$ac_var + case $ac_val in + *\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;; + esac + $as_echo "$ac_var='\''$ac_val'\''" + done | sort + echo + fi + + if test -s confdefs.h; then + cat <<\_ASBOX +## ----------- ## +## confdefs.h. ## +## ----------- ## +_ASBOX + echo + cat confdefs.h + echo + fi + test "$ac_signal" != 0 && + $as_echo "$as_me: caught signal $ac_signal" + $as_echo "$as_me: exit $exit_status" + } >&5 + rm -f core *.core core.conftest.* && + rm -f -r conftest* confdefs* conf$$* $ac_clean_files && + exit $exit_status +' 0 +for ac_signal in 1 2 13 15; do + trap 'ac_signal='$ac_signal'; as_fn_exit 1' $ac_signal +done +ac_signal=0 + +# confdefs.h avoids OS command line length limits that DEFS can exceed. +rm -f -r conftest* confdefs.h + +$as_echo "/* confdefs.h */" > confdefs.h + +# Predefined preprocessor variables. + +cat >>confdefs.h <<_ACEOF +#define PACKAGE_NAME "$PACKAGE_NAME" +_ACEOF + +cat >>confdefs.h <<_ACEOF +#define PACKAGE_TARNAME "$PACKAGE_TARNAME" +_ACEOF + +cat >>confdefs.h <<_ACEOF +#define PACKAGE_VERSION "$PACKAGE_VERSION" +_ACEOF + +cat >>confdefs.h <<_ACEOF +#define PACKAGE_STRING "$PACKAGE_STRING" +_ACEOF + +cat >>confdefs.h <<_ACEOF +#define PACKAGE_BUGREPORT "$PACKAGE_BUGREPORT" +_ACEOF + +cat >>confdefs.h <<_ACEOF +#define PACKAGE_URL "$PACKAGE_URL" +_ACEOF + + +# Let the site file select an alternate cache file if it wants to. +# Prefer an explicitly selected file to automatically selected ones. +ac_site_file1=NONE +ac_site_file2=NONE +if test -n "$CONFIG_SITE"; then + ac_site_file1=$CONFIG_SITE +elif test "x$prefix" != xNONE; then + ac_site_file1=$prefix/share/config.site + ac_site_file2=$prefix/etc/config.site +else + ac_site_file1=$ac_default_prefix/share/config.site + ac_site_file2=$ac_default_prefix/etc/config.site +fi +for ac_site_file in "$ac_site_file1" "$ac_site_file2" +do + test "x$ac_site_file" = xNONE && continue + if test -r "$ac_site_file"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: loading site script $ac_site_file" >&5 +$as_echo "$as_me: loading site script $ac_site_file" >&6;} + sed 's/^/| /' "$ac_site_file" >&5 + . "$ac_site_file" + fi +done + +if test -r "$cache_file"; then + # Some versions of bash will fail to source /dev/null (special + # files actually), so we avoid doing that. + if test -f "$cache_file"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: loading cache $cache_file" >&5 +$as_echo "$as_me: loading cache $cache_file" >&6;} + case $cache_file in + [\\/]* | ?:[\\/]* ) . "$cache_file";; + *) . "./$cache_file";; + esac + fi +else + { $as_echo "$as_me:${as_lineno-$LINENO}: creating cache $cache_file" >&5 +$as_echo "$as_me: creating cache $cache_file" >&6;} + >$cache_file +fi + +# Check that the precious variables saved in the cache have kept the same +# value. +ac_cache_corrupted=false +for ac_var in $ac_precious_vars; do + eval ac_old_set=\$ac_cv_env_${ac_var}_set + eval ac_new_set=\$ac_env_${ac_var}_set + eval ac_old_val=\$ac_cv_env_${ac_var}_value + eval ac_new_val=\$ac_env_${ac_var}_value + case $ac_old_set,$ac_new_set in + set,) + { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&5 +$as_echo "$as_me: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&2;} + ac_cache_corrupted=: ;; + ,set) + { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was not set in the previous run" >&5 +$as_echo "$as_me: error: \`$ac_var' was not set in the previous run" >&2;} + ac_cache_corrupted=: ;; + ,);; + *) + if test "x$ac_old_val" != "x$ac_new_val"; then + # differences in whitespace do not lead to failure. + ac_old_val_w=`echo x $ac_old_val` + ac_new_val_w=`echo x $ac_new_val` + if test "$ac_old_val_w" != "$ac_new_val_w"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' has changed since the previous run:" >&5 +$as_echo "$as_me: error: \`$ac_var' has changed since the previous run:" >&2;} + ac_cache_corrupted=: + else + { $as_echo "$as_me:${as_lineno-$LINENO}: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&5 +$as_echo "$as_me: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&2;} + eval $ac_var=\$ac_old_val + fi + { $as_echo "$as_me:${as_lineno-$LINENO}: former value: \`$ac_old_val'" >&5 +$as_echo "$as_me: former value: \`$ac_old_val'" >&2;} + { $as_echo "$as_me:${as_lineno-$LINENO}: current value: \`$ac_new_val'" >&5 +$as_echo "$as_me: current value: \`$ac_new_val'" >&2;} + fi;; + esac + # Pass precious variables to config.status. + if test "$ac_new_set" = set; then + case $ac_new_val in + *\'*) ac_arg=$ac_var=`$as_echo "$ac_new_val" | sed "s/'/'\\\\\\\\''/g"` ;; + *) ac_arg=$ac_var=$ac_new_val ;; + esac + case " $ac_configure_args " in + *" '$ac_arg' "*) ;; # Avoid dups. Use of quotes ensures accuracy. + *) as_fn_append ac_configure_args " '$ac_arg'" ;; + esac + fi +done +if $ac_cache_corrupted; then + { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} + { $as_echo "$as_me:${as_lineno-$LINENO}: error: changes in the environment can compromise the build" >&5 +$as_echo "$as_me: error: changes in the environment can compromise the build" >&2;} + as_fn_error "run \`make distclean' and/or \`rm $cache_file' and start over" "$LINENO" 5 +fi +## -------------------- ## +## Main body of script. ## +## -------------------- ## + +ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu + + + +ac_config_headers="$ac_config_headers config.h" + + + +# with_target_subdir is used when configured as part of a GCC tree. +if test -n "${with_target_subdir}"; then + # Default to --enable-multilib +# Check whether --enable-multilib was given. +if test "${enable_multilib+set}" = set; then : + enableval=$enable_multilib; case "$enableval" in + yes) multilib=yes ;; + no) multilib=no ;; + *) as_fn_error "bad value $enableval for multilib option" "$LINENO" 5 ;; + esac +else + multilib=yes +fi + + +# We may get other options which we leave undocumented: +# --with-target-subdir, --with-multisrctop, --with-multisubdir +# See config-ml.in if you want the gory details. + +if test "$srcdir" = "."; then + if test "$with_target_subdir" != "."; then + multi_basedir="$srcdir/$with_multisrctop../.." + else + multi_basedir="$srcdir/$with_multisrctop.." + fi +else + multi_basedir="$srcdir/.." +fi + + +# Even if the default multilib is not a cross compilation, +# it may be that some of the other multilibs are. +if test $cross_compiling = no && test $multilib = yes \ + && test "x${with_multisubdir}" != x ; then + cross_compiling=maybe +fi + +ac_config_commands="$ac_config_commands default-1" + +fi + +ac_aux_dir= +for ac_dir in "$srcdir" "$srcdir/.." "$srcdir/../.."; do + for ac_t in install-sh install.sh shtool; do + if test -f "$ac_dir/$ac_t"; then + ac_aux_dir=$ac_dir + ac_install_sh="$ac_aux_dir/$ac_t -c" + break 2 + fi + done +done +if test -z "$ac_aux_dir"; then + as_fn_error "cannot find install-sh, install.sh, or shtool in \"$srcdir\" \"$srcdir/..\" \"$srcdir/../..\"" "$LINENO" 5 +fi + +# These three variables are undocumented and unsupported, +# and are intended to be withdrawn in a future Autoconf release. +# They can cause serious problems if a builder's source tree is in a directory +# whose full name contains unusual characters. +ac_config_guess="$SHELL $ac_aux_dir/config.guess" # Please don't use this var. +ac_config_sub="$SHELL $ac_aux_dir/config.sub" # Please don't use this var. +ac_configure="$SHELL $ac_aux_dir/configure" # Please don't use this var. + + +# Make sure we can run config.sub. +$SHELL "$ac_aux_dir/config.sub" sun4 >/dev/null 2>&1 || + as_fn_error "cannot run $SHELL $ac_aux_dir/config.sub" "$LINENO" 5 + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking build system type" >&5 +$as_echo_n "checking build system type... " >&6; } +if test "${ac_cv_build+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + ac_build_alias=$build_alias +test "x$ac_build_alias" = x && + ac_build_alias=`$SHELL "$ac_aux_dir/config.guess"` +test "x$ac_build_alias" = x && + as_fn_error "cannot guess build type; you must specify one" "$LINENO" 5 +ac_cv_build=`$SHELL "$ac_aux_dir/config.sub" $ac_build_alias` || + as_fn_error "$SHELL $ac_aux_dir/config.sub $ac_build_alias failed" "$LINENO" 5 + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_build" >&5 +$as_echo "$ac_cv_build" >&6; } +case $ac_cv_build in +*-*-*) ;; +*) as_fn_error "invalid value of canonical build" "$LINENO" 5;; +esac +build=$ac_cv_build +ac_save_IFS=$IFS; IFS='-' +set x $ac_cv_build +shift +build_cpu=$1 +build_vendor=$2 +shift; shift +# Remember, the first character of IFS is used to create $*, +# except with old shells: +build_os=$* +IFS=$ac_save_IFS +case $build_os in *\ *) build_os=`echo "$build_os" | sed 's/ /-/g'`;; esac + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking host system type" >&5 +$as_echo_n "checking host system type... " >&6; } +if test "${ac_cv_host+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + if test "x$host_alias" = x; then + ac_cv_host=$ac_cv_build +else + ac_cv_host=`$SHELL "$ac_aux_dir/config.sub" $host_alias` || + as_fn_error "$SHELL $ac_aux_dir/config.sub $host_alias failed" "$LINENO" 5 +fi + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_host" >&5 +$as_echo "$ac_cv_host" >&6; } +case $ac_cv_host in +*-*-*) ;; +*) as_fn_error "invalid value of canonical host" "$LINENO" 5;; +esac +host=$ac_cv_host +ac_save_IFS=$IFS; IFS='-' +set x $ac_cv_host +shift +host_cpu=$1 +host_vendor=$2 +shift; shift +# Remember, the first character of IFS is used to create $*, +# except with old shells: +host_os=$* +IFS=$ac_save_IFS +case $host_os in *\ *) host_os=`echo "$host_os" | sed 's/ /-/g'`;; esac + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking target system type" >&5 +$as_echo_n "checking target system type... " >&6; } +if test "${ac_cv_target+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + if test "x$target_alias" = x; then + ac_cv_target=$ac_cv_host +else + ac_cv_target=`$SHELL "$ac_aux_dir/config.sub" $target_alias` || + as_fn_error "$SHELL $ac_aux_dir/config.sub $target_alias failed" "$LINENO" 5 +fi + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_target" >&5 +$as_echo "$ac_cv_target" >&6; } +case $ac_cv_target in +*-*-*) ;; +*) as_fn_error "invalid value of canonical target" "$LINENO" 5;; +esac +target=$ac_cv_target +ac_save_IFS=$IFS; IFS='-' +set x $ac_cv_target +shift +target_cpu=$1 +target_vendor=$2 +shift; shift +# Remember, the first character of IFS is used to create $*, +# except with old shells: +target_os=$* +IFS=$ac_save_IFS +case $target_os in *\ *) target_os=`echo "$target_os" | sed 's/ /-/g'`;; esac + + +# The aliases save the names the user supplied, while $host etc. +# will get canonicalized. +test -n "$target_alias" && + test "$program_prefix$program_suffix$program_transform_name" = \ + NONENONEs,x,x, && + program_prefix=${target_alias}- + +target_alias=${target_alias-$host_alias} + +ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu +if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}gcc", so it can be a program name with args. +set dummy ${ac_tool_prefix}gcc; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if test "${ac_cv_prog_CC+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$CC"; then + ac_cv_prog_CC="$CC" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + ac_cv_prog_CC="${ac_tool_prefix}gcc" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +CC=$ac_cv_prog_CC +if test -n "$CC"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 +$as_echo "$CC" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + +fi +if test -z "$ac_cv_prog_CC"; then + ac_ct_CC=$CC + # Extract the first word of "gcc", so it can be a program name with args. +set dummy gcc; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if test "${ac_cv_prog_ac_ct_CC+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$ac_ct_CC"; then + ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + ac_cv_prog_ac_ct_CC="gcc" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +ac_ct_CC=$ac_cv_prog_ac_ct_CC +if test -n "$ac_ct_CC"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5 +$as_echo "$ac_ct_CC" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + if test "x$ac_ct_CC" = x; then + CC="" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + CC=$ac_ct_CC + fi +else + CC="$ac_cv_prog_CC" +fi + +if test -z "$CC"; then + if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}cc", so it can be a program name with args. +set dummy ${ac_tool_prefix}cc; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if test "${ac_cv_prog_CC+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$CC"; then + ac_cv_prog_CC="$CC" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + ac_cv_prog_CC="${ac_tool_prefix}cc" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +CC=$ac_cv_prog_CC +if test -n "$CC"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 +$as_echo "$CC" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + + fi +fi +if test -z "$CC"; then + # Extract the first word of "cc", so it can be a program name with args. +set dummy cc; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if test "${ac_cv_prog_CC+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$CC"; then + ac_cv_prog_CC="$CC" # Let the user override the test. +else + ac_prog_rejected=no +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if test "$as_dir/$ac_word$ac_exec_ext" = "/usr/ucb/cc"; then + ac_prog_rejected=yes + continue + fi + ac_cv_prog_CC="cc" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +if test $ac_prog_rejected = yes; then + # We found a bogon in the path, so make sure we never use it. + set dummy $ac_cv_prog_CC + shift + if test $# != 0; then + # We chose a different compiler from the bogus one. + # However, it has the same basename, so the bogon will be chosen + # first if we set CC to just the basename; use the full file name. + shift + ac_cv_prog_CC="$as_dir/$ac_word${1+' '}$@" + fi +fi +fi +fi +CC=$ac_cv_prog_CC +if test -n "$CC"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 +$as_echo "$CC" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + +fi +if test -z "$CC"; then + if test -n "$ac_tool_prefix"; then + for ac_prog in cl.exe + do + # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. +set dummy $ac_tool_prefix$ac_prog; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if test "${ac_cv_prog_CC+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$CC"; then + ac_cv_prog_CC="$CC" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + ac_cv_prog_CC="$ac_tool_prefix$ac_prog" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +CC=$ac_cv_prog_CC +if test -n "$CC"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 +$as_echo "$CC" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + + test -n "$CC" && break + done +fi +if test -z "$CC"; then + ac_ct_CC=$CC + for ac_prog in cl.exe +do + # Extract the first word of "$ac_prog", so it can be a program name with args. +set dummy $ac_prog; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if test "${ac_cv_prog_ac_ct_CC+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$ac_ct_CC"; then + ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + ac_cv_prog_ac_ct_CC="$ac_prog" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +ac_ct_CC=$ac_cv_prog_ac_ct_CC +if test -n "$ac_ct_CC"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5 +$as_echo "$ac_ct_CC" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + + test -n "$ac_ct_CC" && break +done + + if test "x$ac_ct_CC" = x; then + CC="" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + CC=$ac_ct_CC + fi +fi + +fi + + +test -z "$CC" && { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} +as_fn_error "no acceptable C compiler found in \$PATH +See \`config.log' for more details." "$LINENO" 5; } + +# Provide some information about the compiler. +$as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler version" >&5 +set X $ac_compile +ac_compiler=$2 +for ac_option in --version -v -V -qversion; do + { { ac_try="$ac_compiler $ac_option >&5" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_compiler $ac_option >&5") 2>conftest.err + ac_status=$? + if test -s conftest.err; then + sed '10a\ +... rest of stderr output deleted ... + 10q' conftest.err >conftest.er1 + cat conftest.er1 >&5 + rm -f conftest.er1 conftest.err + fi + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } +done + +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include +int +main () +{ +FILE *f = fopen ("conftest.out", "w"); + return ferror (f) || fclose (f) != 0; + + ; + return 0; +} +_ACEOF +ac_clean_files_save=$ac_clean_files +ac_clean_files="$ac_clean_files a.out a.out.dSYM a.exe b.out conftest.out" +# Try to create an executable without -o first, disregard a.out. +# It will help us diagnose broken compilers, and finding out an intuition +# of exeext. +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler default output file name" >&5 +$as_echo_n "checking for C compiler default output file name... " >&6; } +ac_link_default=`$as_echo "$ac_link" | sed 's/ -o *conftest[^ ]*//'` + +# The possible output files: +ac_files="a.out conftest.exe conftest a.exe a_out.exe b.out conftest.*" + +ac_rmfiles= +for ac_file in $ac_files +do + case $ac_file in + *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;; + * ) ac_rmfiles="$ac_rmfiles $ac_file";; + esac +done +rm -f $ac_rmfiles + +if { { ac_try="$ac_link_default" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_link_default") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then : + # Autoconf-2.13 could set the ac_cv_exeext variable to `no'. +# So ignore a value of `no', otherwise this would lead to `EXEEXT = no' +# in a Makefile. We should not override ac_cv_exeext if it was cached, +# so that the user can short-circuit this test for compilers unknown to +# Autoconf. +for ac_file in $ac_files '' +do + test -f "$ac_file" || continue + case $ac_file in + *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) + ;; + [ab].out ) + # We found the default executable, but exeext='' is most + # certainly right. + break;; + *.* ) + if test "${ac_cv_exeext+set}" = set && test "$ac_cv_exeext" != no; + then :; else + ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'` + fi + # We set ac_cv_exeext here because the later test for it is not + # safe: cross compilers may not add the suffix if given an `-o' + # argument, so we may need to know it at that point already. + # Even if this section looks crufty: it has the advantage of + # actually working. + break;; + * ) + break;; + esac +done +test "$ac_cv_exeext" = no && ac_cv_exeext= + +else + ac_file='' +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_file" >&5 +$as_echo "$ac_file" >&6; } +if test -z "$ac_file"; then : + $as_echo "$as_me: failed program was:" >&5 +sed 's/^/| /' conftest.$ac_ext >&5 + +{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} +{ as_fn_set_status 77 +as_fn_error "C compiler cannot create executables +See \`config.log' for more details." "$LINENO" 5; }; } +fi +ac_exeext=$ac_cv_exeext + +# Check that the compiler produces executables we can run. If not, either +# the compiler is broken, or we cross compile. +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the C compiler works" >&5 +$as_echo_n "checking whether the C compiler works... " >&6; } +# If not cross compiling, check that we can run a simple program. +if test "$cross_compiling" != yes; then + if { ac_try='./$ac_file' + { { case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_try") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; }; then + cross_compiling=no + else + if test "$cross_compiling" = maybe; then + cross_compiling=yes + else + { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} +as_fn_error "cannot run C compiled programs. +If you meant to cross compile, use \`--host'. +See \`config.log' for more details." "$LINENO" 5; } + fi + fi +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } + +rm -f -r a.out a.out.dSYM a.exe conftest$ac_cv_exeext b.out conftest.out +ac_clean_files=$ac_clean_files_save +# Check that the compiler produces executables we can run. If not, either +# the compiler is broken, or we cross compile. +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are cross compiling" >&5 +$as_echo_n "checking whether we are cross compiling... " >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $cross_compiling" >&5 +$as_echo "$cross_compiling" >&6; } + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of executables" >&5 +$as_echo_n "checking for suffix of executables... " >&6; } +if { { ac_try="$ac_link" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_link") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then : + # If both `conftest.exe' and `conftest' are `present' (well, observable) +# catch `conftest.exe'. For instance with Cygwin, `ls conftest' will +# work properly (i.e., refer to `conftest.exe'), while it won't with +# `rm'. +for ac_file in conftest.exe conftest conftest.*; do + test -f "$ac_file" || continue + case $ac_file in + *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;; + *.* ) ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'` + break;; + * ) break;; + esac +done +else + { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} +as_fn_error "cannot compute suffix of executables: cannot compile and link +See \`config.log' for more details." "$LINENO" 5; } +fi +rm -f conftest$ac_cv_exeext +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_exeext" >&5 +$as_echo "$ac_cv_exeext" >&6; } + +rm -f conftest.$ac_ext +EXEEXT=$ac_cv_exeext +ac_exeext=$EXEEXT +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of object files" >&5 +$as_echo_n "checking for suffix of object files... " >&6; } +if test "${ac_cv_objext+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF +rm -f conftest.o conftest.obj +if { { ac_try="$ac_compile" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_compile") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then : + for ac_file in conftest.o conftest.obj conftest.*; do + test -f "$ac_file" || continue; + case $ac_file in + *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM ) ;; + *) ac_cv_objext=`expr "$ac_file" : '.*\.\(.*\)'` + break;; + esac +done +else + $as_echo "$as_me: failed program was:" >&5 +sed 's/^/| /' conftest.$ac_ext >&5 + +{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} +as_fn_error "cannot compute suffix of object files: cannot compile +See \`config.log' for more details." "$LINENO" 5; } +fi +rm -f conftest.$ac_cv_objext conftest.$ac_ext +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_objext" >&5 +$as_echo "$ac_cv_objext" >&6; } +OBJEXT=$ac_cv_objext +ac_objext=$OBJEXT +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU C compiler" >&5 +$as_echo_n "checking whether we are using the GNU C compiler... " >&6; } +if test "${ac_cv_c_compiler_gnu+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ +#ifndef __GNUC__ + choke me +#endif + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_compiler_gnu=yes +else + ac_compiler_gnu=no +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +ac_cv_c_compiler_gnu=$ac_compiler_gnu + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_compiler_gnu" >&5 +$as_echo "$ac_cv_c_compiler_gnu" >&6; } +if test $ac_compiler_gnu = yes; then + GCC=yes +else + GCC= +fi +ac_test_CFLAGS=${CFLAGS+set} +ac_save_CFLAGS=$CFLAGS +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC accepts -g" >&5 +$as_echo_n "checking whether $CC accepts -g... " >&6; } +if test "${ac_cv_prog_cc_g+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + ac_save_c_werror_flag=$ac_c_werror_flag + ac_c_werror_flag=yes + ac_cv_prog_cc_g=no + CFLAGS="-g" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_cv_prog_cc_g=yes +else + CFLAGS="" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + +else + ac_c_werror_flag=$ac_save_c_werror_flag + CFLAGS="-g" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_cv_prog_cc_g=yes +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext + ac_c_werror_flag=$ac_save_c_werror_flag +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_g" >&5 +$as_echo "$ac_cv_prog_cc_g" >&6; } +if test "$ac_test_CFLAGS" = set; then + CFLAGS=$ac_save_CFLAGS +elif test $ac_cv_prog_cc_g = yes; then + if test "$GCC" = yes; then + CFLAGS="-g -O2" + else + CFLAGS="-g" + fi +else + if test "$GCC" = yes; then + CFLAGS="-O2" + else + CFLAGS= + fi +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $CC option to accept ISO C89" >&5 +$as_echo_n "checking for $CC option to accept ISO C89... " >&6; } +if test "${ac_cv_prog_cc_c89+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + ac_cv_prog_cc_c89=no +ac_save_CC=$CC +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include +#include +#include +#include +/* Most of the following tests are stolen from RCS 5.7's src/conf.sh. */ +struct buf { int x; }; +FILE * (*rcsopen) (struct buf *, struct stat *, int); +static char *e (p, i) + char **p; + int i; +{ + return p[i]; +} +static char *f (char * (*g) (char **, int), char **p, ...) +{ + char *s; + va_list v; + va_start (v,p); + s = g (p, va_arg (v,int)); + va_end (v); + return s; +} + +/* OSF 4.0 Compaq cc is some sort of almost-ANSI by default. It has + function prototypes and stuff, but not '\xHH' hex character constants. + These don't provoke an error unfortunately, instead are silently treated + as 'x'. The following induces an error, until -std is added to get + proper ANSI mode. Curiously '\x00'!='x' always comes out true, for an + array size at least. It's necessary to write '\x00'==0 to get something + that's true only with -std. */ +int osf4_cc_array ['\x00' == 0 ? 1 : -1]; + +/* IBM C 6 for AIX is almost-ANSI by default, but it replaces macro parameters + inside strings and character constants. */ +#define FOO(x) 'x' +int xlc6_cc_array[FOO(a) == 'x' ? 1 : -1]; + +int test (int i, double x); +struct s1 {int (*f) (int a);}; +struct s2 {int (*f) (double a);}; +int pairnames (int, char **, FILE *(*)(struct buf *, struct stat *, int), int, int); +int argc; +char **argv; +int +main () +{ +return f (e, argv, 0) != argv[0] || f (e, argv, 1) != argv[1]; + ; + return 0; +} +_ACEOF +for ac_arg in '' -qlanglvl=extc89 -qlanglvl=ansi -std \ + -Ae "-Aa -D_HPUX_SOURCE" "-Xc -D__EXTENSIONS__" +do + CC="$ac_save_CC $ac_arg" + if ac_fn_c_try_compile "$LINENO"; then : + ac_cv_prog_cc_c89=$ac_arg +fi +rm -f core conftest.err conftest.$ac_objext + test "x$ac_cv_prog_cc_c89" != "xno" && break +done +rm -f conftest.$ac_ext +CC=$ac_save_CC + +fi +# AC_CACHE_VAL +case "x$ac_cv_prog_cc_c89" in + x) + { $as_echo "$as_me:${as_lineno-$LINENO}: result: none needed" >&5 +$as_echo "none needed" >&6; } ;; + xno) + { $as_echo "$as_me:${as_lineno-$LINENO}: result: unsupported" >&5 +$as_echo "unsupported" >&6; } ;; + *) + CC="$CC $ac_cv_prog_cc_c89" + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_c89" >&5 +$as_echo "$ac_cv_prog_cc_c89" >&6; } ;; +esac +if test "x$ac_cv_prog_cc_c89" != xno; then : + +fi + +ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu + + +ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to run the C preprocessor" >&5 +$as_echo_n "checking how to run the C preprocessor... " >&6; } +# On Suns, sometimes $CPP names a directory. +if test -n "$CPP" && test -d "$CPP"; then + CPP= +fi +if test -z "$CPP"; then + if test "${ac_cv_prog_CPP+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + # Double quotes because CPP needs to be expanded + for CPP in "$CC -E" "$CC -E -traditional-cpp" "/lib/cpp" + do + ac_preproc_ok=false +for ac_c_preproc_warn_flag in '' yes +do + # Use a header file that comes with gcc, so configuring glibc + # with a fresh cross-compiler works. + # Prefer to if __STDC__ is defined, since + # exists even on freestanding compilers. + # On the NeXT, cc -E runs the code through the compiler's parser, + # not just through cpp. "Syntax error" is here to catch this case. + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#ifdef __STDC__ +# include +#else +# include +#endif + Syntax error +_ACEOF +if ac_fn_c_try_cpp "$LINENO"; then : + +else + # Broken: fails on valid input. +continue +fi +rm -f conftest.err conftest.$ac_ext + + # OK, works on sane cases. Now check whether nonexistent headers + # can be detected and how. + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include +_ACEOF +if ac_fn_c_try_cpp "$LINENO"; then : + # Broken: success on invalid input. +continue +else + # Passes both tests. +ac_preproc_ok=: +break +fi +rm -f conftest.err conftest.$ac_ext + +done +# Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. +rm -f conftest.err conftest.$ac_ext +if $ac_preproc_ok; then : + break +fi + + done + ac_cv_prog_CPP=$CPP + +fi + CPP=$ac_cv_prog_CPP +else + ac_cv_prog_CPP=$CPP +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $CPP" >&5 +$as_echo "$CPP" >&6; } +ac_preproc_ok=false +for ac_c_preproc_warn_flag in '' yes +do + # Use a header file that comes with gcc, so configuring glibc + # with a fresh cross-compiler works. + # Prefer to if __STDC__ is defined, since + # exists even on freestanding compilers. + # On the NeXT, cc -E runs the code through the compiler's parser, + # not just through cpp. "Syntax error" is here to catch this case. + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#ifdef __STDC__ +# include +#else +# include +#endif + Syntax error +_ACEOF +if ac_fn_c_try_cpp "$LINENO"; then : + +else + # Broken: fails on valid input. +continue +fi +rm -f conftest.err conftest.$ac_ext + + # OK, works on sane cases. Now check whether nonexistent headers + # can be detected and how. + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include +_ACEOF +if ac_fn_c_try_cpp "$LINENO"; then : + # Broken: success on invalid input. +continue +else + # Passes both tests. +ac_preproc_ok=: +break +fi +rm -f conftest.err conftest.$ac_ext + +done +# Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. +rm -f conftest.err conftest.$ac_ext +if $ac_preproc_ok; then : + +else + { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} +as_fn_error "C preprocessor \"$CPP\" fails sanity check +See \`config.log' for more details." "$LINENO" 5; } +fi + +ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for grep that handles long lines and -e" >&5 +$as_echo_n "checking for grep that handles long lines and -e... " >&6; } +if test "${ac_cv_path_GREP+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + if test -z "$GREP"; then + ac_path_GREP_found=false + # Loop through the user's path and test for each of PROGNAME-LIST + as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_prog in grep ggrep; do + for ac_exec_ext in '' $ac_executable_extensions; do + ac_path_GREP="$as_dir/$ac_prog$ac_exec_ext" + { test -f "$ac_path_GREP" && $as_test_x "$ac_path_GREP"; } || continue +# Check for GNU ac_path_GREP and select it if it is found. + # Check for GNU $ac_path_GREP +case `"$ac_path_GREP" --version 2>&1` in +*GNU*) + ac_cv_path_GREP="$ac_path_GREP" ac_path_GREP_found=:;; +*) + ac_count=0 + $as_echo_n 0123456789 >"conftest.in" + while : + do + cat "conftest.in" "conftest.in" >"conftest.tmp" + mv "conftest.tmp" "conftest.in" + cp "conftest.in" "conftest.nl" + $as_echo 'GREP' >> "conftest.nl" + "$ac_path_GREP" -e 'GREP$' -e '-(cannot match)-' < "conftest.nl" >"conftest.out" 2>/dev/null || break + diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break + as_fn_arith $ac_count + 1 && ac_count=$as_val + if test $ac_count -gt ${ac_path_GREP_max-0}; then + # Best one so far, save it but keep looking for a better one + ac_cv_path_GREP="$ac_path_GREP" + ac_path_GREP_max=$ac_count + fi + # 10*(2^10) chars as input seems more than enough + test $ac_count -gt 10 && break + done + rm -f conftest.in conftest.tmp conftest.nl conftest.out;; +esac + + $ac_path_GREP_found && break 3 + done + done + done +IFS=$as_save_IFS + if test -z "$ac_cv_path_GREP"; then + as_fn_error "no acceptable grep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5 + fi +else + ac_cv_path_GREP=$GREP +fi + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_GREP" >&5 +$as_echo "$ac_cv_path_GREP" >&6; } + GREP="$ac_cv_path_GREP" + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for egrep" >&5 +$as_echo_n "checking for egrep... " >&6; } +if test "${ac_cv_path_EGREP+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + if echo a | $GREP -E '(a|b)' >/dev/null 2>&1 + then ac_cv_path_EGREP="$GREP -E" + else + if test -z "$EGREP"; then + ac_path_EGREP_found=false + # Loop through the user's path and test for each of PROGNAME-LIST + as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_prog in egrep; do + for ac_exec_ext in '' $ac_executable_extensions; do + ac_path_EGREP="$as_dir/$ac_prog$ac_exec_ext" + { test -f "$ac_path_EGREP" && $as_test_x "$ac_path_EGREP"; } || continue +# Check for GNU ac_path_EGREP and select it if it is found. + # Check for GNU $ac_path_EGREP +case `"$ac_path_EGREP" --version 2>&1` in +*GNU*) + ac_cv_path_EGREP="$ac_path_EGREP" ac_path_EGREP_found=:;; +*) + ac_count=0 + $as_echo_n 0123456789 >"conftest.in" + while : + do + cat "conftest.in" "conftest.in" >"conftest.tmp" + mv "conftest.tmp" "conftest.in" + cp "conftest.in" "conftest.nl" + $as_echo 'EGREP' >> "conftest.nl" + "$ac_path_EGREP" 'EGREP$' < "conftest.nl" >"conftest.out" 2>/dev/null || break + diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break + as_fn_arith $ac_count + 1 && ac_count=$as_val + if test $ac_count -gt ${ac_path_EGREP_max-0}; then + # Best one so far, save it but keep looking for a better one + ac_cv_path_EGREP="$ac_path_EGREP" + ac_path_EGREP_max=$ac_count + fi + # 10*(2^10) chars as input seems more than enough + test $ac_count -gt 10 && break + done + rm -f conftest.in conftest.tmp conftest.nl conftest.out;; +esac + + $ac_path_EGREP_found && break 3 + done + done + done +IFS=$as_save_IFS + if test -z "$ac_cv_path_EGREP"; then + as_fn_error "no acceptable egrep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5 + fi +else + ac_cv_path_EGREP=$EGREP +fi + + fi +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_EGREP" >&5 +$as_echo "$ac_cv_path_EGREP" >&6; } + EGREP="$ac_cv_path_EGREP" + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for ANSI C header files" >&5 +$as_echo_n "checking for ANSI C header files... " >&6; } +if test "${ac_cv_header_stdc+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include +#include +#include +#include + +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_cv_header_stdc=yes +else + ac_cv_header_stdc=no +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext + +if test $ac_cv_header_stdc = yes; then + # SunOS 4.x string.h does not declare mem*, contrary to ANSI. + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include + +_ACEOF +if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | + $EGREP "memchr" >/dev/null 2>&1; then : + +else + ac_cv_header_stdc=no +fi +rm -f conftest* + +fi + +if test $ac_cv_header_stdc = yes; then + # ISC 2.0.2 stdlib.h does not declare free, contrary to ANSI. + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include + +_ACEOF +if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | + $EGREP "free" >/dev/null 2>&1; then : + +else + ac_cv_header_stdc=no +fi +rm -f conftest* + +fi + +if test $ac_cv_header_stdc = yes; then + # /bin/cc in Irix-4.0.5 gets non-ANSI ctype macros unless using -ansi. + if test "$cross_compiling" = yes; then : + : +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include +#include +#if ((' ' & 0x0FF) == 0x020) +# define ISLOWER(c) ('a' <= (c) && (c) <= 'z') +# define TOUPPER(c) (ISLOWER(c) ? 'A' + ((c) - 'a') : (c)) +#else +# define ISLOWER(c) \ + (('a' <= (c) && (c) <= 'i') \ + || ('j' <= (c) && (c) <= 'r') \ + || ('s' <= (c) && (c) <= 'z')) +# define TOUPPER(c) (ISLOWER(c) ? ((c) | 0x40) : (c)) +#endif + +#define XOR(e, f) (((e) && !(f)) || (!(e) && (f))) +int +main () +{ + int i; + for (i = 0; i < 256; i++) + if (XOR (islower (i), ISLOWER (i)) + || toupper (i) != TOUPPER (i)) + return 2; + return 0; +} +_ACEOF +if ac_fn_c_try_run "$LINENO"; then : + +else + ac_cv_header_stdc=no +fi +rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ + conftest.$ac_objext conftest.beam conftest.$ac_ext +fi + +fi +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_header_stdc" >&5 +$as_echo "$ac_cv_header_stdc" >&6; } +if test $ac_cv_header_stdc = yes; then + +$as_echo "#define STDC_HEADERS 1" >>confdefs.h + +fi + +# On IRIX 5.3, sys/types and inttypes.h are conflicting. +for ac_header in sys/types.h sys/stat.h stdlib.h string.h memory.h strings.h \ + inttypes.h stdint.h unistd.h +do : + as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh` +ac_fn_c_check_header_compile "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default +" +eval as_val=\$$as_ac_Header + if test "x$as_val" = x""yes; then : + cat >>confdefs.h <<_ACEOF +#define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1 +_ACEOF + +fi + +done + + + + ac_fn_c_check_header_mongrel "$LINENO" "minix/config.h" "ac_cv_header_minix_config_h" "$ac_includes_default" +if test "x$ac_cv_header_minix_config_h" = x""yes; then : + MINIX=yes +else + MINIX= +fi + + + if test "$MINIX" = yes; then + +$as_echo "#define _POSIX_SOURCE 1" >>confdefs.h + + +$as_echo "#define _POSIX_1_SOURCE 2" >>confdefs.h + + +$as_echo "#define _MINIX 1" >>confdefs.h + + fi + + + { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether it is safe to define __EXTENSIONS__" >&5 +$as_echo_n "checking whether it is safe to define __EXTENSIONS__... " >&6; } +if test "${ac_cv_safe_to_define___extensions__+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +# define __EXTENSIONS__ 1 + $ac_includes_default +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_cv_safe_to_define___extensions__=yes +else + ac_cv_safe_to_define___extensions__=no +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_safe_to_define___extensions__" >&5 +$as_echo "$ac_cv_safe_to_define___extensions__" >&6; } + test $ac_cv_safe_to_define___extensions__ = yes && + $as_echo "#define __EXTENSIONS__ 1" >>confdefs.h + + $as_echo "#define _ALL_SOURCE 1" >>confdefs.h + + $as_echo "#define _GNU_SOURCE 1" >>confdefs.h + + $as_echo "#define _POSIX_PTHREAD_SEMANTICS 1" >>confdefs.h + + $as_echo "#define _TANDEM_SOURCE 1" >>confdefs.h + + + +libtool_VERSION=1:0:0 + + +# 1.11.1: Require that version of automake. +# foreign: Don't require README, INSTALL, NEWS, etc. +# no-define: Don't define PACKAGE and VERSION. +# no-dependencies: Don't generate automatic dependencies. +# (because it breaks when using bootstrap-lean, since some of the +# headers are gone at "make install" time). +# -Wall: Issue all automake warnings. +# -Wno-portability: Don't warn about constructs supported by GNU make. +# (because GCC requires GNU make anyhow). +am__api_version='1.11' + +# Find a good install program. We prefer a C program (faster), +# so one script is as good as another. But avoid the broken or +# incompatible versions: +# SysV /etc/install, /usr/sbin/install +# SunOS /usr/etc/install +# IRIX /sbin/install +# AIX /bin/install +# AmigaOS /C/install, which installs bootblocks on floppy discs +# AIX 4 /usr/bin/installbsd, which doesn't work without a -g flag +# AFS /usr/afsws/bin/install, which mishandles nonexistent args +# SVR4 /usr/ucb/install, which tries to use the nonexistent group "staff" +# OS/2's system install, which has a completely different semantic +# ./install, which can be erroneously created by make from ./install.sh. +# Reject install programs that cannot install multiple files. +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for a BSD-compatible install" >&5 +$as_echo_n "checking for a BSD-compatible install... " >&6; } +if test -z "$INSTALL"; then +if test "${ac_cv_path_install+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + # Account for people who put trailing slashes in PATH elements. +case $as_dir/ in #(( + ./ | .// | /[cC]/* | \ + /etc/* | /usr/sbin/* | /usr/etc/* | /sbin/* | /usr/afsws/bin/* | \ + ?:[\\/]os2[\\/]install[\\/]* | ?:[\\/]OS2[\\/]INSTALL[\\/]* | \ + /usr/ucb/* ) ;; + *) + # OSF1 and SCO ODT 3.0 have their own names for install. + # Don't use installbsd from OSF since it installs stuff as root + # by default. + for ac_prog in ginstall scoinst install; do + for ac_exec_ext in '' $ac_executable_extensions; do + if { test -f "$as_dir/$ac_prog$ac_exec_ext" && $as_test_x "$as_dir/$ac_prog$ac_exec_ext"; }; then + if test $ac_prog = install && + grep dspmsg "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then + # AIX install. It has an incompatible calling convention. + : + elif test $ac_prog = install && + grep pwplus "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then + # program-specific install script used by HP pwplus--don't use. + : + else + rm -rf conftest.one conftest.two conftest.dir + echo one > conftest.one + echo two > conftest.two + mkdir conftest.dir + if "$as_dir/$ac_prog$ac_exec_ext" -c conftest.one conftest.two "`pwd`/conftest.dir" && + test -s conftest.one && test -s conftest.two && + test -s conftest.dir/conftest.one && + test -s conftest.dir/conftest.two + then + ac_cv_path_install="$as_dir/$ac_prog$ac_exec_ext -c" + break 3 + fi + fi + fi + done + done + ;; +esac + + done +IFS=$as_save_IFS + +rm -rf conftest.one conftest.two conftest.dir + +fi + if test "${ac_cv_path_install+set}" = set; then + INSTALL=$ac_cv_path_install + else + # As a last resort, use the slow shell script. Don't cache a + # value for INSTALL within a source directory, because that will + # break other packages using the cache if that directory is + # removed, or if the value is a relative name. + INSTALL=$ac_install_sh + fi +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $INSTALL" >&5 +$as_echo "$INSTALL" >&6; } + +# Use test -z because SunOS4 sh mishandles braces in ${var-val}. +# It thinks the first close brace ends the variable substitution. +test -z "$INSTALL_PROGRAM" && INSTALL_PROGRAM='${INSTALL}' + +test -z "$INSTALL_SCRIPT" && INSTALL_SCRIPT='${INSTALL}' + +test -z "$INSTALL_DATA" && INSTALL_DATA='${INSTALL} -m 644' + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether build environment is sane" >&5 +$as_echo_n "checking whether build environment is sane... " >&6; } +# Just in case +sleep 1 +echo timestamp > conftest.file +# Reject unsafe characters in $srcdir or the absolute working directory +# name. Accept space and tab only in the latter. +am_lf=' +' +case `pwd` in + *[\\\"\#\$\&\'\`$am_lf]*) + as_fn_error "unsafe absolute working directory name" "$LINENO" 5;; +esac +case $srcdir in + *[\\\"\#\$\&\'\`$am_lf\ \ ]*) + as_fn_error "unsafe srcdir value: \`$srcdir'" "$LINENO" 5;; +esac + +# Do `set' in a subshell so we don't clobber the current shell's +# arguments. Must try -L first in case configure is actually a +# symlink; some systems play weird games with the mod time of symlinks +# (eg FreeBSD returns the mod time of the symlink's containing +# directory). +if ( + set X `ls -Lt "$srcdir/configure" conftest.file 2> /dev/null` + if test "$*" = "X"; then + # -L didn't work. + set X `ls -t "$srcdir/configure" conftest.file` + fi + rm -f conftest.file + if test "$*" != "X $srcdir/configure conftest.file" \ + && test "$*" != "X conftest.file $srcdir/configure"; then + + # If neither matched, then we have a broken ls. This can happen + # if, for instance, CONFIG_SHELL is bash and it inherits a + # broken ls alias from the environment. This has actually + # happened. Such a system could not be considered "sane". + as_fn_error "ls -t appears to fail. Make sure there is not a broken +alias in your environment" "$LINENO" 5 + fi + + test "$2" = conftest.file + ) +then + # Ok. + : +else + as_fn_error "newly created file is older than distributed files! +Check your system clock" "$LINENO" 5 +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } +test "$program_prefix" != NONE && + program_transform_name="s&^&$program_prefix&;$program_transform_name" +# Use a double $ so make ignores it. +test "$program_suffix" != NONE && + program_transform_name="s&\$&$program_suffix&;$program_transform_name" +# Double any \ or $. +# By default was `s,x,x', remove it if useless. +ac_script='s/[\\$]/&&/g;s/;s,x,x,$//' +program_transform_name=`$as_echo "$program_transform_name" | sed "$ac_script"` + +# expand $ac_aux_dir to an absolute path +am_aux_dir=`cd $ac_aux_dir && pwd` + +if test x"${MISSING+set}" != xset; then + case $am_aux_dir in + *\ * | *\ *) + MISSING="\${SHELL} \"$am_aux_dir/missing\"" ;; + *) + MISSING="\${SHELL} $am_aux_dir/missing" ;; + esac +fi +# Use eval to expand $SHELL +if eval "$MISSING --run true"; then + am_missing_run="$MISSING --run " +else + am_missing_run= + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: \`missing' script is too old or missing" >&5 +$as_echo "$as_me: WARNING: \`missing' script is too old or missing" >&2;} +fi + +if test x"${install_sh}" != xset; then + case $am_aux_dir in + *\ * | *\ *) + install_sh="\${SHELL} '$am_aux_dir/install-sh'" ;; + *) + install_sh="\${SHELL} $am_aux_dir/install-sh" + esac +fi + +# Installed binaries are usually stripped using `strip' when the user +# run `make install-strip'. However `strip' might not be the right +# tool to use in cross-compilation environments, therefore Automake +# will honor the `STRIP' environment variable to overrule this program. +if test "$cross_compiling" != no; then + if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args. +set dummy ${ac_tool_prefix}strip; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if test "${ac_cv_prog_STRIP+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$STRIP"; then + ac_cv_prog_STRIP="$STRIP" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + ac_cv_prog_STRIP="${ac_tool_prefix}strip" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +STRIP=$ac_cv_prog_STRIP +if test -n "$STRIP"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $STRIP" >&5 +$as_echo "$STRIP" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + +fi +if test -z "$ac_cv_prog_STRIP"; then + ac_ct_STRIP=$STRIP + # Extract the first word of "strip", so it can be a program name with args. +set dummy strip; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if test "${ac_cv_prog_ac_ct_STRIP+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$ac_ct_STRIP"; then + ac_cv_prog_ac_ct_STRIP="$ac_ct_STRIP" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + ac_cv_prog_ac_ct_STRIP="strip" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +ac_ct_STRIP=$ac_cv_prog_ac_ct_STRIP +if test -n "$ac_ct_STRIP"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_STRIP" >&5 +$as_echo "$ac_ct_STRIP" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + if test "x$ac_ct_STRIP" = x; then + STRIP=":" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + STRIP=$ac_ct_STRIP + fi +else + STRIP="$ac_cv_prog_STRIP" +fi + +fi +INSTALL_STRIP_PROGRAM="\$(install_sh) -c -s" + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for a thread-safe mkdir -p" >&5 +$as_echo_n "checking for a thread-safe mkdir -p... " >&6; } +if test -z "$MKDIR_P"; then + if test "${ac_cv_path_mkdir+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH$PATH_SEPARATOR/opt/sfw/bin +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_prog in mkdir gmkdir; do + for ac_exec_ext in '' $ac_executable_extensions; do + { test -f "$as_dir/$ac_prog$ac_exec_ext" && $as_test_x "$as_dir/$ac_prog$ac_exec_ext"; } || continue + case `"$as_dir/$ac_prog$ac_exec_ext" --version 2>&1` in #( + 'mkdir (GNU coreutils) '* | \ + 'mkdir (coreutils) '* | \ + 'mkdir (fileutils) '4.1*) + ac_cv_path_mkdir=$as_dir/$ac_prog$ac_exec_ext + break 3;; + esac + done + done + done +IFS=$as_save_IFS + +fi + + if test "${ac_cv_path_mkdir+set}" = set; then + MKDIR_P="$ac_cv_path_mkdir -p" + else + # As a last resort, use the slow shell script. Don't cache a + # value for MKDIR_P within a source directory, because that will + # break other packages using the cache if that directory is + # removed, or if the value is a relative name. + test -d ./--version && rmdir ./--version + MKDIR_P="$ac_install_sh -d" + fi +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $MKDIR_P" >&5 +$as_echo "$MKDIR_P" >&6; } + +mkdir_p="$MKDIR_P" +case $mkdir_p in + [\\/$]* | ?:[\\/]*) ;; + */*) mkdir_p="\$(top_builddir)/$mkdir_p" ;; +esac + +for ac_prog in gawk mawk nawk awk +do + # Extract the first word of "$ac_prog", so it can be a program name with args. +set dummy $ac_prog; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if test "${ac_cv_prog_AWK+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$AWK"; then + ac_cv_prog_AWK="$AWK" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + ac_cv_prog_AWK="$ac_prog" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +AWK=$ac_cv_prog_AWK +if test -n "$AWK"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $AWK" >&5 +$as_echo "$AWK" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + + test -n "$AWK" && break +done + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether ${MAKE-make} sets \$(MAKE)" >&5 +$as_echo_n "checking whether ${MAKE-make} sets \$(MAKE)... " >&6; } +set x ${MAKE-make} +ac_make=`$as_echo "$2" | sed 's/+/p/g; s/[^a-zA-Z0-9_]/_/g'` +if { as_var=ac_cv_prog_make_${ac_make}_set; eval "test \"\${$as_var+set}\" = set"; }; then : + $as_echo_n "(cached) " >&6 +else + cat >conftest.make <<\_ACEOF +SHELL = /bin/sh +all: + @echo '@@@%%%=$(MAKE)=@@@%%%' +_ACEOF +# GNU make sometimes prints "make[1]: Entering...", which would confuse us. +case `${MAKE-make} -f conftest.make 2>/dev/null` in + *@@@%%%=?*=@@@%%%*) + eval ac_cv_prog_make_${ac_make}_set=yes;; + *) + eval ac_cv_prog_make_${ac_make}_set=no;; +esac +rm -f conftest.make +fi +if eval test \$ac_cv_prog_make_${ac_make}_set = yes; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } + SET_MAKE= +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + SET_MAKE="MAKE=${MAKE-make}" +fi + +rm -rf .tst 2>/dev/null +mkdir .tst 2>/dev/null +if test -d .tst; then + am__leading_dot=. +else + am__leading_dot=_ +fi +rmdir .tst 2>/dev/null + +if test "`cd $srcdir && pwd`" != "`pwd`"; then + # Use -I$(srcdir) only when $(srcdir) != ., so that make's output + # is not polluted with repeated "-I." + am__isrc=' -I$(srcdir)' + # test to see if srcdir already configured + if test -f $srcdir/config.status; then + as_fn_error "source directory already configured; run \"make distclean\" there first" "$LINENO" 5 + fi +fi + +# test whether we have cygpath +if test -z "$CYGPATH_W"; then + if (cygpath --version) >/dev/null 2>/dev/null; then + CYGPATH_W='cygpath -w' + else + CYGPATH_W=echo + fi +fi + + +# Define the identity of the package. + PACKAGE='libbacktrace' + VERSION='version-unused' + + +# Some tools Automake needs. + +ACLOCAL=${ACLOCAL-"${am_missing_run}aclocal-${am__api_version}"} + + +AUTOCONF=${AUTOCONF-"${am_missing_run}autoconf"} + + +AUTOMAKE=${AUTOMAKE-"${am_missing_run}automake-${am__api_version}"} + + +AUTOHEADER=${AUTOHEADER-"${am_missing_run}autoheader"} + + +MAKEINFO=${MAKEINFO-"${am_missing_run}makeinfo"} + +# We need awk for the "check" target. The system "awk" is bad on +# some platforms. +# Always define AMTAR for backward compatibility. Yes, it's still used +# in the wild :-( We should find a proper way to deprecate it ... +AMTAR='$${TAR-tar}' + +am__tar='$${TAR-tar} chof - "$$tardir"' am__untar='$${TAR-tar} xf -' + + + + + + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to enable maintainer-specific portions of Makefiles" >&5 +$as_echo_n "checking whether to enable maintainer-specific portions of Makefiles... " >&6; } + # Check whether --enable-maintainer-mode was given. +if test "${enable_maintainer_mode+set}" = set; then : + enableval=$enable_maintainer_mode; USE_MAINTAINER_MODE=$enableval +else + USE_MAINTAINER_MODE=no +fi + + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $USE_MAINTAINER_MODE" >&5 +$as_echo "$USE_MAINTAINER_MODE" >&6; } + if test $USE_MAINTAINER_MODE = yes; then + MAINTAINER_MODE_TRUE= + MAINTAINER_MODE_FALSE='#' +else + MAINTAINER_MODE_TRUE='#' + MAINTAINER_MODE_FALSE= +fi + + MAINT=$MAINTAINER_MODE_TRUE + + + + +# Check whether --with-target-subdir was given. +if test "${with_target_subdir+set}" = set; then : + withval=$with_target_subdir; +fi + + +# We must force CC to /not/ be precious variables; otherwise +# the wrong, non-multilib-adjusted value will be used in multilibs. +# As a side effect, we have to subst CFLAGS ourselves. + + +ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu +if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}gcc", so it can be a program name with args. +set dummy ${ac_tool_prefix}gcc; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if test "${ac_cv_prog_CC+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$CC"; then + ac_cv_prog_CC="$CC" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + ac_cv_prog_CC="${ac_tool_prefix}gcc" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +CC=$ac_cv_prog_CC +if test -n "$CC"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 +$as_echo "$CC" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + +fi +if test -z "$ac_cv_prog_CC"; then + ac_ct_CC=$CC + # Extract the first word of "gcc", so it can be a program name with args. +set dummy gcc; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if test "${ac_cv_prog_ac_ct_CC+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$ac_ct_CC"; then + ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + ac_cv_prog_ac_ct_CC="gcc" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +ac_ct_CC=$ac_cv_prog_ac_ct_CC +if test -n "$ac_ct_CC"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5 +$as_echo "$ac_ct_CC" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + if test "x$ac_ct_CC" = x; then + CC="" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + CC=$ac_ct_CC + fi +else + CC="$ac_cv_prog_CC" +fi + +if test -z "$CC"; then + if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}cc", so it can be a program name with args. +set dummy ${ac_tool_prefix}cc; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if test "${ac_cv_prog_CC+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$CC"; then + ac_cv_prog_CC="$CC" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + ac_cv_prog_CC="${ac_tool_prefix}cc" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +CC=$ac_cv_prog_CC +if test -n "$CC"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 +$as_echo "$CC" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + + fi +fi +if test -z "$CC"; then + # Extract the first word of "cc", so it can be a program name with args. +set dummy cc; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if test "${ac_cv_prog_CC+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$CC"; then + ac_cv_prog_CC="$CC" # Let the user override the test. +else + ac_prog_rejected=no +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if test "$as_dir/$ac_word$ac_exec_ext" = "/usr/ucb/cc"; then + ac_prog_rejected=yes + continue + fi + ac_cv_prog_CC="cc" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +if test $ac_prog_rejected = yes; then + # We found a bogon in the path, so make sure we never use it. + set dummy $ac_cv_prog_CC + shift + if test $# != 0; then + # We chose a different compiler from the bogus one. + # However, it has the same basename, so the bogon will be chosen + # first if we set CC to just the basename; use the full file name. + shift + ac_cv_prog_CC="$as_dir/$ac_word${1+' '}$@" + fi +fi +fi +fi +CC=$ac_cv_prog_CC +if test -n "$CC"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 +$as_echo "$CC" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + +fi +if test -z "$CC"; then + if test -n "$ac_tool_prefix"; then + for ac_prog in cl.exe + do + # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. +set dummy $ac_tool_prefix$ac_prog; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if test "${ac_cv_prog_CC+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$CC"; then + ac_cv_prog_CC="$CC" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + ac_cv_prog_CC="$ac_tool_prefix$ac_prog" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +CC=$ac_cv_prog_CC +if test -n "$CC"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 +$as_echo "$CC" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + + test -n "$CC" && break + done +fi +if test -z "$CC"; then + ac_ct_CC=$CC + for ac_prog in cl.exe +do + # Extract the first word of "$ac_prog", so it can be a program name with args. +set dummy $ac_prog; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if test "${ac_cv_prog_ac_ct_CC+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$ac_ct_CC"; then + ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + ac_cv_prog_ac_ct_CC="$ac_prog" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +ac_ct_CC=$ac_cv_prog_ac_ct_CC +if test -n "$ac_ct_CC"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5 +$as_echo "$ac_ct_CC" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + + test -n "$ac_ct_CC" && break +done + + if test "x$ac_ct_CC" = x; then + CC="" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + CC=$ac_ct_CC + fi +fi + +fi + + +test -z "$CC" && { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} +as_fn_error "no acceptable C compiler found in \$PATH +See \`config.log' for more details." "$LINENO" 5; } + +# Provide some information about the compiler. +$as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler version" >&5 +set X $ac_compile +ac_compiler=$2 +for ac_option in --version -v -V -qversion; do + { { ac_try="$ac_compiler $ac_option >&5" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_compiler $ac_option >&5") 2>conftest.err + ac_status=$? + if test -s conftest.err; then + sed '10a\ +... rest of stderr output deleted ... + 10q' conftest.err >conftest.er1 + cat conftest.er1 >&5 + rm -f conftest.er1 conftest.err + fi + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } +done + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU C compiler" >&5 +$as_echo_n "checking whether we are using the GNU C compiler... " >&6; } +if test "${ac_cv_c_compiler_gnu+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ +#ifndef __GNUC__ + choke me +#endif + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_compiler_gnu=yes +else + ac_compiler_gnu=no +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +ac_cv_c_compiler_gnu=$ac_compiler_gnu + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_compiler_gnu" >&5 +$as_echo "$ac_cv_c_compiler_gnu" >&6; } +if test $ac_compiler_gnu = yes; then + GCC=yes +else + GCC= +fi +ac_test_CFLAGS=${CFLAGS+set} +ac_save_CFLAGS=$CFLAGS +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC accepts -g" >&5 +$as_echo_n "checking whether $CC accepts -g... " >&6; } +if test "${ac_cv_prog_cc_g+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + ac_save_c_werror_flag=$ac_c_werror_flag + ac_c_werror_flag=yes + ac_cv_prog_cc_g=no + CFLAGS="-g" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_cv_prog_cc_g=yes +else + CFLAGS="" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + +else + ac_c_werror_flag=$ac_save_c_werror_flag + CFLAGS="-g" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_cv_prog_cc_g=yes +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext + ac_c_werror_flag=$ac_save_c_werror_flag +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_g" >&5 +$as_echo "$ac_cv_prog_cc_g" >&6; } +if test "$ac_test_CFLAGS" = set; then + CFLAGS=$ac_save_CFLAGS +elif test $ac_cv_prog_cc_g = yes; then + if test "$GCC" = yes; then + CFLAGS="-g -O2" + else + CFLAGS="-g" + fi +else + if test "$GCC" = yes; then + CFLAGS="-O2" + else + CFLAGS= + fi +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $CC option to accept ISO C89" >&5 +$as_echo_n "checking for $CC option to accept ISO C89... " >&6; } +if test "${ac_cv_prog_cc_c89+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + ac_cv_prog_cc_c89=no +ac_save_CC=$CC +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include +#include +#include +#include +/* Most of the following tests are stolen from RCS 5.7's src/conf.sh. */ +struct buf { int x; }; +FILE * (*rcsopen) (struct buf *, struct stat *, int); +static char *e (p, i) + char **p; + int i; +{ + return p[i]; +} +static char *f (char * (*g) (char **, int), char **p, ...) +{ + char *s; + va_list v; + va_start (v,p); + s = g (p, va_arg (v,int)); + va_end (v); + return s; +} + +/* OSF 4.0 Compaq cc is some sort of almost-ANSI by default. It has + function prototypes and stuff, but not '\xHH' hex character constants. + These don't provoke an error unfortunately, instead are silently treated + as 'x'. The following induces an error, until -std is added to get + proper ANSI mode. Curiously '\x00'!='x' always comes out true, for an + array size at least. It's necessary to write '\x00'==0 to get something + that's true only with -std. */ +int osf4_cc_array ['\x00' == 0 ? 1 : -1]; + +/* IBM C 6 for AIX is almost-ANSI by default, but it replaces macro parameters + inside strings and character constants. */ +#define FOO(x) 'x' +int xlc6_cc_array[FOO(a) == 'x' ? 1 : -1]; + +int test (int i, double x); +struct s1 {int (*f) (int a);}; +struct s2 {int (*f) (double a);}; +int pairnames (int, char **, FILE *(*)(struct buf *, struct stat *, int), int, int); +int argc; +char **argv; +int +main () +{ +return f (e, argv, 0) != argv[0] || f (e, argv, 1) != argv[1]; + ; + return 0; +} +_ACEOF +for ac_arg in '' -qlanglvl=extc89 -qlanglvl=ansi -std \ + -Ae "-Aa -D_HPUX_SOURCE" "-Xc -D__EXTENSIONS__" +do + CC="$ac_save_CC $ac_arg" + if ac_fn_c_try_compile "$LINENO"; then : + ac_cv_prog_cc_c89=$ac_arg +fi +rm -f core conftest.err conftest.$ac_objext + test "x$ac_cv_prog_cc_c89" != "xno" && break +done +rm -f conftest.$ac_ext +CC=$ac_save_CC + +fi +# AC_CACHE_VAL +case "x$ac_cv_prog_cc_c89" in + x) + { $as_echo "$as_me:${as_lineno-$LINENO}: result: none needed" >&5 +$as_echo "none needed" >&6; } ;; + xno) + { $as_echo "$as_me:${as_lineno-$LINENO}: result: unsupported" >&5 +$as_echo "unsupported" >&6; } ;; + *) + CC="$CC $ac_cv_prog_cc_c89" + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_c89" >&5 +$as_echo "$ac_cv_prog_cc_c89" >&6; } ;; +esac +if test "x$ac_cv_prog_cc_c89" != xno; then : + +fi + +ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu + + + + + +for ac_prog in gawk mawk nawk awk +do + # Extract the first word of "$ac_prog", so it can be a program name with args. +set dummy $ac_prog; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if test "${ac_cv_prog_AWK+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$AWK"; then + ac_cv_prog_AWK="$AWK" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + ac_cv_prog_AWK="$ac_prog" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +AWK=$ac_cv_prog_AWK +if test -n "$AWK"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $AWK" >&5 +$as_echo "$AWK" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + + test -n "$AWK" && break +done + +case "$AWK" in +"") as_fn_error "can't build without awk" "$LINENO" 5 ;; +esac + +case `pwd` in + *\ * | *\ *) + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Libtool does not cope well with whitespace in \`pwd\`" >&5 +$as_echo "$as_me: WARNING: Libtool does not cope well with whitespace in \`pwd\`" >&2;} ;; +esac + + + +macro_version='2.2.4' +macro_revision='1.2976' + + + + + + + + + + + + + +ltmain="$ac_aux_dir/ltmain.sh" + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for a sed that does not truncate output" >&5 +$as_echo_n "checking for a sed that does not truncate output... " >&6; } +if test "${ac_cv_path_SED+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + ac_script=s/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb/ + for ac_i in 1 2 3 4 5 6 7; do + ac_script="$ac_script$as_nl$ac_script" + done + echo "$ac_script" 2>/dev/null | sed 99q >conftest.sed + { ac_script=; unset ac_script;} + if test -z "$SED"; then + ac_path_SED_found=false + # Loop through the user's path and test for each of PROGNAME-LIST + as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_prog in sed gsed; do + for ac_exec_ext in '' $ac_executable_extensions; do + ac_path_SED="$as_dir/$ac_prog$ac_exec_ext" + { test -f "$ac_path_SED" && $as_test_x "$ac_path_SED"; } || continue +# Check for GNU ac_path_SED and select it if it is found. + # Check for GNU $ac_path_SED +case `"$ac_path_SED" --version 2>&1` in +*GNU*) + ac_cv_path_SED="$ac_path_SED" ac_path_SED_found=:;; +*) + ac_count=0 + $as_echo_n 0123456789 >"conftest.in" + while : + do + cat "conftest.in" "conftest.in" >"conftest.tmp" + mv "conftest.tmp" "conftest.in" + cp "conftest.in" "conftest.nl" + $as_echo '' >> "conftest.nl" + "$ac_path_SED" -f conftest.sed < "conftest.nl" >"conftest.out" 2>/dev/null || break + diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break + as_fn_arith $ac_count + 1 && ac_count=$as_val + if test $ac_count -gt ${ac_path_SED_max-0}; then + # Best one so far, save it but keep looking for a better one + ac_cv_path_SED="$ac_path_SED" + ac_path_SED_max=$ac_count + fi + # 10*(2^10) chars as input seems more than enough + test $ac_count -gt 10 && break + done + rm -f conftest.in conftest.tmp conftest.nl conftest.out;; +esac + + $ac_path_SED_found && break 3 + done + done + done +IFS=$as_save_IFS + if test -z "$ac_cv_path_SED"; then + as_fn_error "no acceptable sed could be found in \$PATH" "$LINENO" 5 + fi +else + ac_cv_path_SED=$SED +fi + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_SED" >&5 +$as_echo "$ac_cv_path_SED" >&6; } + SED="$ac_cv_path_SED" + rm -f conftest.sed + +test -z "$SED" && SED=sed +Xsed="$SED -e 1s/^X//" + + + + + + + + + + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for fgrep" >&5 +$as_echo_n "checking for fgrep... " >&6; } +if test "${ac_cv_path_FGREP+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + if echo 'ab*c' | $GREP -F 'ab*c' >/dev/null 2>&1 + then ac_cv_path_FGREP="$GREP -F" + else + if test -z "$FGREP"; then + ac_path_FGREP_found=false + # Loop through the user's path and test for each of PROGNAME-LIST + as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_prog in fgrep; do + for ac_exec_ext in '' $ac_executable_extensions; do + ac_path_FGREP="$as_dir/$ac_prog$ac_exec_ext" + { test -f "$ac_path_FGREP" && $as_test_x "$ac_path_FGREP"; } || continue +# Check for GNU ac_path_FGREP and select it if it is found. + # Check for GNU $ac_path_FGREP +case `"$ac_path_FGREP" --version 2>&1` in +*GNU*) + ac_cv_path_FGREP="$ac_path_FGREP" ac_path_FGREP_found=:;; +*) + ac_count=0 + $as_echo_n 0123456789 >"conftest.in" + while : + do + cat "conftest.in" "conftest.in" >"conftest.tmp" + mv "conftest.tmp" "conftest.in" + cp "conftest.in" "conftest.nl" + $as_echo 'FGREP' >> "conftest.nl" + "$ac_path_FGREP" FGREP < "conftest.nl" >"conftest.out" 2>/dev/null || break + diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break + as_fn_arith $ac_count + 1 && ac_count=$as_val + if test $ac_count -gt ${ac_path_FGREP_max-0}; then + # Best one so far, save it but keep looking for a better one + ac_cv_path_FGREP="$ac_path_FGREP" + ac_path_FGREP_max=$ac_count + fi + # 10*(2^10) chars as input seems more than enough + test $ac_count -gt 10 && break + done + rm -f conftest.in conftest.tmp conftest.nl conftest.out;; +esac + + $ac_path_FGREP_found && break 3 + done + done + done +IFS=$as_save_IFS + if test -z "$ac_cv_path_FGREP"; then + as_fn_error "no acceptable fgrep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5 + fi +else + ac_cv_path_FGREP=$FGREP +fi + + fi +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_FGREP" >&5 +$as_echo "$ac_cv_path_FGREP" >&6; } + FGREP="$ac_cv_path_FGREP" + + +test -z "$GREP" && GREP=grep + + + + + + + + + + + + + + + + + + + +# Check whether --with-gnu-ld was given. +if test "${with_gnu_ld+set}" = set; then : + withval=$with_gnu_ld; test "$withval" = no || with_gnu_ld=yes +else + with_gnu_ld=no +fi + +ac_prog=ld +if test "$GCC" = yes; then + # Check if gcc -print-prog-name=ld gives a path. + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ld used by $CC" >&5 +$as_echo_n "checking for ld used by $CC... " >&6; } + case $host in + *-*-mingw*) + # gcc leaves a trailing carriage return which upsets mingw + ac_prog=`($CC -print-prog-name=ld) 2>&5 | tr -d '\015'` ;; + *) + ac_prog=`($CC -print-prog-name=ld) 2>&5` ;; + esac + case $ac_prog in + # Accept absolute paths. + [\\/]* | ?:[\\/]*) + re_direlt='/[^/][^/]*/\.\./' + # Canonicalize the pathname of ld + ac_prog=`$ECHO "$ac_prog"| $SED 's%\\\\%/%g'` + while $ECHO "$ac_prog" | $GREP "$re_direlt" > /dev/null 2>&1; do + ac_prog=`$ECHO $ac_prog| $SED "s%$re_direlt%/%"` + done + test -z "$LD" && LD="$ac_prog" + ;; + "") + # If it fails, then pretend we aren't using GCC. + ac_prog=ld + ;; + *) + # If it is relative, then search for the first ld in PATH. + with_gnu_ld=unknown + ;; + esac +elif test "$with_gnu_ld" = yes; then + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GNU ld" >&5 +$as_echo_n "checking for GNU ld... " >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for non-GNU ld" >&5 +$as_echo_n "checking for non-GNU ld... " >&6; } +fi +if test "${lt_cv_path_LD+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + if test -z "$LD"; then + lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR + for ac_dir in $PATH; do + IFS="$lt_save_ifs" + test -z "$ac_dir" && ac_dir=. + if test -f "$ac_dir/$ac_prog" || test -f "$ac_dir/$ac_prog$ac_exeext"; then + lt_cv_path_LD="$ac_dir/$ac_prog" + # Check to see if the program is GNU ld. I'd rather use --version, + # but apparently some variants of GNU ld only accept -v. + # Break only if it was the GNU/non-GNU ld that we prefer. + case `"$lt_cv_path_LD" -v 2>&1 &5 +$as_echo "$LD" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi +test -z "$LD" && as_fn_error "no acceptable ld found in \$PATH" "$LINENO" 5 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if the linker ($LD) is GNU ld" >&5 +$as_echo_n "checking if the linker ($LD) is GNU ld... " >&6; } +if test "${lt_cv_prog_gnu_ld+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + # I'd rather use --version here, but apparently some GNU lds only accept -v. +case `$LD -v 2>&1 &5 +$as_echo "$lt_cv_prog_gnu_ld" >&6; } +with_gnu_ld=$lt_cv_prog_gnu_ld + + + + + + + + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for BSD- or MS-compatible name lister (nm)" >&5 +$as_echo_n "checking for BSD- or MS-compatible name lister (nm)... " >&6; } +if test "${lt_cv_path_NM+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$NM"; then + # Let the user override the test. + lt_cv_path_NM="$NM" +else + lt_nm_to_check="${ac_tool_prefix}nm" + if test -n "$ac_tool_prefix" && test "$build" = "$host"; then + lt_nm_to_check="$lt_nm_to_check nm" + fi + for lt_tmp_nm in $lt_nm_to_check; do + lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR + for ac_dir in $PATH /usr/ccs/bin/elf /usr/ccs/bin /usr/ucb /bin; do + IFS="$lt_save_ifs" + test -z "$ac_dir" && ac_dir=. + tmp_nm="$ac_dir/$lt_tmp_nm" + if test -f "$tmp_nm" || test -f "$tmp_nm$ac_exeext" ; then + # Check to see if the nm accepts a BSD-compat flag. + # Adding the `sed 1q' prevents false positives on HP-UX, which says: + # nm: unknown option "B" ignored + # Tru64's nm complains that /dev/null is an invalid object file + case `"$tmp_nm" -B /dev/null 2>&1 | sed '1q'` in + */dev/null* | *'Invalid file or object type'*) + lt_cv_path_NM="$tmp_nm -B" + break + ;; + *) + case `"$tmp_nm" -p /dev/null 2>&1 | sed '1q'` in + */dev/null*) + lt_cv_path_NM="$tmp_nm -p" + break + ;; + *) + lt_cv_path_NM=${lt_cv_path_NM="$tmp_nm"} # keep the first match, but + continue # so that we can try to find one that supports BSD flags + ;; + esac + ;; + esac + fi + done + IFS="$lt_save_ifs" + done + : ${lt_cv_path_NM=no} +fi +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_NM" >&5 +$as_echo "$lt_cv_path_NM" >&6; } +if test "$lt_cv_path_NM" != "no"; then + NM="$lt_cv_path_NM" +else + # Didn't find any BSD compatible name lister, look for dumpbin. + if test -n "$ac_tool_prefix"; then + for ac_prog in "dumpbin -symbols" "link -dump -symbols" + do + # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. +set dummy $ac_tool_prefix$ac_prog; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if test "${ac_cv_prog_DUMPBIN+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$DUMPBIN"; then + ac_cv_prog_DUMPBIN="$DUMPBIN" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + ac_cv_prog_DUMPBIN="$ac_tool_prefix$ac_prog" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +DUMPBIN=$ac_cv_prog_DUMPBIN +if test -n "$DUMPBIN"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DUMPBIN" >&5 +$as_echo "$DUMPBIN" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + + test -n "$DUMPBIN" && break + done +fi +if test -z "$DUMPBIN"; then + ac_ct_DUMPBIN=$DUMPBIN + for ac_prog in "dumpbin -symbols" "link -dump -symbols" +do + # Extract the first word of "$ac_prog", so it can be a program name with args. +set dummy $ac_prog; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if test "${ac_cv_prog_ac_ct_DUMPBIN+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$ac_ct_DUMPBIN"; then + ac_cv_prog_ac_ct_DUMPBIN="$ac_ct_DUMPBIN" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + ac_cv_prog_ac_ct_DUMPBIN="$ac_prog" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +ac_ct_DUMPBIN=$ac_cv_prog_ac_ct_DUMPBIN +if test -n "$ac_ct_DUMPBIN"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DUMPBIN" >&5 +$as_echo "$ac_ct_DUMPBIN" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + + test -n "$ac_ct_DUMPBIN" && break +done + + if test "x$ac_ct_DUMPBIN" = x; then + DUMPBIN=":" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + DUMPBIN=$ac_ct_DUMPBIN + fi +fi + + + if test "$DUMPBIN" != ":"; then + NM="$DUMPBIN" + fi +fi +test -z "$NM" && NM=nm + + + + + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking the name lister ($NM) interface" >&5 +$as_echo_n "checking the name lister ($NM) interface... " >&6; } +if test "${lt_cv_nm_interface+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + lt_cv_nm_interface="BSD nm" + echo "int some_variable = 0;" > conftest.$ac_ext + (eval echo "\"\$as_me:5363: $ac_compile\"" >&5) + (eval "$ac_compile" 2>conftest.err) + cat conftest.err >&5 + (eval echo "\"\$as_me:5366: $NM \\\"conftest.$ac_objext\\\"\"" >&5) + (eval "$NM \"conftest.$ac_objext\"" 2>conftest.err > conftest.out) + cat conftest.err >&5 + (eval echo "\"\$as_me:5369: output\"" >&5) + cat conftest.out >&5 + if $GREP 'External.*some_variable' conftest.out > /dev/null; then + lt_cv_nm_interface="MS dumpbin" + fi + rm -f conftest* +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_nm_interface" >&5 +$as_echo "$lt_cv_nm_interface" >&6; } + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether ln -s works" >&5 +$as_echo_n "checking whether ln -s works... " >&6; } +LN_S=$as_ln_s +if test "$LN_S" = "ln -s"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no, using $LN_S" >&5 +$as_echo "no, using $LN_S" >&6; } +fi + +# find the maximum length of command line arguments +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking the maximum length of command line arguments" >&5 +$as_echo_n "checking the maximum length of command line arguments... " >&6; } +if test "${lt_cv_sys_max_cmd_len+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + i=0 + teststring="ABCD" + + case $build_os in + msdosdjgpp*) + # On DJGPP, this test can blow up pretty badly due to problems in libc + # (any single argument exceeding 2000 bytes causes a buffer overrun + # during glob expansion). Even if it were fixed, the result of this + # check would be larger than it should be. + lt_cv_sys_max_cmd_len=12288; # 12K is about right + ;; + + gnu*) + # Under GNU Hurd, this test is not required because there is + # no limit to the length of command line arguments. + # Libtool will interpret -1 as no limit whatsoever + lt_cv_sys_max_cmd_len=-1; + ;; + + cygwin* | mingw*) + # On Win9x/ME, this test blows up -- it succeeds, but takes + # about 5 minutes as the teststring grows exponentially. + # Worse, since 9x/ME are not pre-emptively multitasking, + # you end up with a "frozen" computer, even though with patience + # the test eventually succeeds (with a max line length of 256k). + # Instead, let's just punt: use the minimum linelength reported by + # all of the supported platforms: 8192 (on NT/2K/XP). + lt_cv_sys_max_cmd_len=8192; + ;; + + amigaos*) + # On AmigaOS with pdksh, this test takes hours, literally. + # So we just punt and use a minimum line length of 8192. + lt_cv_sys_max_cmd_len=8192; + ;; + + netbsd* | freebsd* | openbsd* | darwin* | dragonfly* | bitrig*) + # This has been around since 386BSD, at least. Likely further. + if test -x /sbin/sysctl; then + lt_cv_sys_max_cmd_len=`/sbin/sysctl -n kern.argmax` + elif test -x /usr/sbin/sysctl; then + lt_cv_sys_max_cmd_len=`/usr/sbin/sysctl -n kern.argmax` + else + lt_cv_sys_max_cmd_len=65536 # usable default for all BSDs + fi + # And add a safety zone + lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4` + lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3` + ;; + + interix*) + # We know the value 262144 and hardcode it with a safety zone (like BSD) + lt_cv_sys_max_cmd_len=196608 + ;; + + osf*) + # Dr. Hans Ekkehard Plesser reports seeing a kernel panic running configure + # due to this test when exec_disable_arg_limit is 1 on Tru64. It is not + # nice to cause kernel panics so lets avoid the loop below. + # First set a reasonable default. + lt_cv_sys_max_cmd_len=16384 + # + if test -x /sbin/sysconfig; then + case `/sbin/sysconfig -q proc exec_disable_arg_limit` in + *1*) lt_cv_sys_max_cmd_len=-1 ;; + esac + fi + ;; + sco3.2v5*) + lt_cv_sys_max_cmd_len=102400 + ;; + sysv5* | sco5v6* | sysv4.2uw2*) + kargmax=`grep ARG_MAX /etc/conf/cf.d/stune 2>/dev/null` + if test -n "$kargmax"; then + lt_cv_sys_max_cmd_len=`echo $kargmax | sed 's/.*[ ]//'` + else + lt_cv_sys_max_cmd_len=32768 + fi + ;; + *) + lt_cv_sys_max_cmd_len=`(getconf ARG_MAX) 2> /dev/null` + if test -n "$lt_cv_sys_max_cmd_len"; then + lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4` + lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3` + else + # Make teststring a little bigger before we do anything with it. + # a 1K string should be a reasonable start. + for i in 1 2 3 4 5 6 7 8 ; do + teststring=$teststring$teststring + done + SHELL=${SHELL-${CONFIG_SHELL-/bin/sh}} + # If test is not a shell built-in, we'll probably end up computing a + # maximum length that is only half of the actual maximum length, but + # we can't tell. + while { test "X"`$SHELL $0 --fallback-echo "X$teststring$teststring" 2>/dev/null` \ + = "XX$teststring$teststring"; } >/dev/null 2>&1 && + test $i != 17 # 1/2 MB should be enough + do + i=`expr $i + 1` + teststring=$teststring$teststring + done + # Only check the string length outside the loop. + lt_cv_sys_max_cmd_len=`expr "X$teststring" : ".*" 2>&1` + teststring= + # Add a significant safety factor because C++ compilers can tack on + # massive amounts of additional arguments before passing them to the + # linker. It appears as though 1/2 is a usable value. + lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 2` + fi + ;; + esac + +fi + +if test -n $lt_cv_sys_max_cmd_len ; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sys_max_cmd_len" >&5 +$as_echo "$lt_cv_sys_max_cmd_len" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: none" >&5 +$as_echo "none" >&6; } +fi +max_cmd_len=$lt_cv_sys_max_cmd_len + + + + + + +: ${CP="cp -f"} +: ${MV="mv -f"} +: ${RM="rm -f"} + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the shell understands some XSI constructs" >&5 +$as_echo_n "checking whether the shell understands some XSI constructs... " >&6; } +# Try some XSI features +xsi_shell=no +( _lt_dummy="a/b/c" + test "${_lt_dummy##*/},${_lt_dummy%/*},"${_lt_dummy%"$_lt_dummy"}, \ + = c,a/b,, \ + && eval 'test $(( 1 + 1 )) -eq 2 \ + && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \ + && xsi_shell=yes +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $xsi_shell" >&5 +$as_echo "$xsi_shell" >&6; } + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the shell understands \"+=\"" >&5 +$as_echo_n "checking whether the shell understands \"+=\"... " >&6; } +lt_shell_append=no +( foo=bar; set foo baz; eval "$1+=\$2" && test "$foo" = barbaz ) \ + >/dev/null 2>&1 \ + && lt_shell_append=yes +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_shell_append" >&5 +$as_echo "$lt_shell_append" >&6; } + + +if ( (MAIL=60; unset MAIL) || exit) >/dev/null 2>&1; then + lt_unset=unset +else + lt_unset=false +fi + + + + + +# test EBCDIC or ASCII +case `echo X|tr X '\101'` in + A) # ASCII based system + # \n is not interpreted correctly by Solaris 8 /usr/ucb/tr + lt_SP2NL='tr \040 \012' + lt_NL2SP='tr \015\012 \040\040' + ;; + *) # EBCDIC based system + lt_SP2NL='tr \100 \n' + lt_NL2SP='tr \r\n \100\100' + ;; +esac + + + + + + + + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $LD option to reload object files" >&5 +$as_echo_n "checking for $LD option to reload object files... " >&6; } +if test "${lt_cv_ld_reload_flag+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + lt_cv_ld_reload_flag='-r' +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ld_reload_flag" >&5 +$as_echo "$lt_cv_ld_reload_flag" >&6; } +reload_flag=$lt_cv_ld_reload_flag +case $reload_flag in +"" | " "*) ;; +*) reload_flag=" $reload_flag" ;; +esac +reload_cmds='$LD$reload_flag -o $output$reload_objs' +case $host_os in + darwin*) + if test "$GCC" = yes; then + reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs' + else + reload_cmds='$LD$reload_flag -o $output$reload_objs' + fi + ;; +esac + + + + + + + + + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to recognize dependent libraries" >&5 +$as_echo_n "checking how to recognize dependent libraries... " >&6; } +if test "${lt_cv_deplibs_check_method+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + lt_cv_file_magic_cmd='$MAGIC_CMD' +lt_cv_file_magic_test_file= +lt_cv_deplibs_check_method='unknown' +# Need to set the preceding variable on all platforms that support +# interlibrary dependencies. +# 'none' -- dependencies not supported. +# `unknown' -- same as none, but documents that we really don't know. +# 'pass_all' -- all dependencies passed with no checks. +# 'test_compile' -- check by making test program. +# 'file_magic [[regex]]' -- check by looking for files in library path +# which responds to the $file_magic_cmd with a given extended regex. +# If you have `file' or equivalent on your system and you're not sure +# whether `pass_all' will *always* work, you probably want this one. + +case $host_os in +aix[4-9]*) + lt_cv_deplibs_check_method=pass_all + ;; + +beos*) + lt_cv_deplibs_check_method=pass_all + ;; + +bsdi[45]*) + lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [ML]SB (shared object|dynamic lib)' + lt_cv_file_magic_cmd='/usr/bin/file -L' + lt_cv_file_magic_test_file=/shlib/libc.so + ;; + +cygwin*) + # func_win32_libid is a shell function defined in ltmain.sh + lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL' + lt_cv_file_magic_cmd='func_win32_libid' + ;; + +mingw* | pw32*) + # Base MSYS/MinGW do not provide the 'file' command needed by + # func_win32_libid shell function, so use a weaker test based on 'objdump', + # unless we find 'file', for example because we are cross-compiling. + if ( file / ) >/dev/null 2>&1; then + lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL' + lt_cv_file_magic_cmd='func_win32_libid' + else + lt_cv_deplibs_check_method='file_magic file format pei*-i386(.*architecture: i386)?' + lt_cv_file_magic_cmd='$OBJDUMP -f' + fi + ;; + +darwin* | rhapsody*) + lt_cv_deplibs_check_method=pass_all + ;; + +freebsd* | dragonfly*) + if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then + case $host_cpu in + i*86 ) + # Not sure whether the presence of OpenBSD here was a mistake. + # Let's accept both of them until this is cleared up. + lt_cv_deplibs_check_method='file_magic (FreeBSD|OpenBSD|DragonFly)/i[3-9]86 (compact )?demand paged shared library' + lt_cv_file_magic_cmd=/usr/bin/file + lt_cv_file_magic_test_file=`echo /usr/lib/libc.so.*` + ;; + esac + else + lt_cv_deplibs_check_method=pass_all + fi + ;; + +gnu*) + lt_cv_deplibs_check_method=pass_all + ;; + +hpux10.20* | hpux11*) + lt_cv_file_magic_cmd=/usr/bin/file + case $host_cpu in + ia64*) + lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|ELF-[0-9][0-9]) shared object file - IA64' + lt_cv_file_magic_test_file=/usr/lib/hpux32/libc.so + ;; + hppa*64*) + lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|ELF-[0-9][0-9]) shared object file - PA-RISC [0-9].[0-9]' + lt_cv_file_magic_test_file=/usr/lib/pa20_64/libc.sl + ;; + *) + lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|PA-RISC[0-9].[0-9]) shared library' + lt_cv_file_magic_test_file=/usr/lib/libc.sl + ;; + esac + ;; + +interix[3-9]*) + # PIC code is broken on Interix 3.x, that's why |\.a not |_pic\.a here + lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so|\.a)$' + ;; + +irix5* | irix6* | nonstopux*) + case $LD in + *-32|*"-32 ") libmagic=32-bit;; + *-n32|*"-n32 ") libmagic=N32;; + *-64|*"-64 ") libmagic=64-bit;; + *) libmagic=never-match;; + esac + lt_cv_deplibs_check_method=pass_all + ;; + +# This must be Linux ELF. +linux* | k*bsd*-gnu) + lt_cv_deplibs_check_method=pass_all + ;; + +netbsd*) + if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then + lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|_pic\.a)$' + else + lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so|_pic\.a)$' + fi + ;; + +newos6*) + lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [ML]SB (executable|dynamic lib)' + lt_cv_file_magic_cmd=/usr/bin/file + lt_cv_file_magic_test_file=/usr/lib/libnls.so + ;; + +*nto* | *qnx*) + lt_cv_deplibs_check_method=pass_all + ;; + +openbsd*) + if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then + lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|\.so|_pic\.a)$' + else + lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|_pic\.a)$' + fi + ;; + +osf3* | osf4* | osf5*) + lt_cv_deplibs_check_method=pass_all + ;; + +rdos*) + lt_cv_deplibs_check_method=pass_all + ;; + +solaris*) + lt_cv_deplibs_check_method=pass_all + ;; + +sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*) + lt_cv_deplibs_check_method=pass_all + ;; + +sysv4 | sysv4.3*) + case $host_vendor in + motorola) + lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [ML]SB (shared object|dynamic lib) M[0-9][0-9]* Version [0-9]' + lt_cv_file_magic_test_file=`echo /usr/lib/libc.so*` + ;; + ncr) + lt_cv_deplibs_check_method=pass_all + ;; + sequent) + lt_cv_file_magic_cmd='/bin/file' + lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [LM]SB (shared object|dynamic lib )' + ;; + sni) + lt_cv_file_magic_cmd='/bin/file' + lt_cv_deplibs_check_method="file_magic ELF [0-9][0-9]*-bit [LM]SB dynamic lib" + lt_cv_file_magic_test_file=/lib/libc.so + ;; + siemens) + lt_cv_deplibs_check_method=pass_all + ;; + pc) + lt_cv_deplibs_check_method=pass_all + ;; + esac + ;; + +tpf*) + lt_cv_deplibs_check_method=pass_all + ;; +esac + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_deplibs_check_method" >&5 +$as_echo "$lt_cv_deplibs_check_method" >&6; } +file_magic_cmd=$lt_cv_file_magic_cmd +deplibs_check_method=$lt_cv_deplibs_check_method +test -z "$deplibs_check_method" && deplibs_check_method=unknown + + + + + + + + + + + + +if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}ar", so it can be a program name with args. +set dummy ${ac_tool_prefix}ar; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if test "${ac_cv_prog_AR+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$AR"; then + ac_cv_prog_AR="$AR" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + ac_cv_prog_AR="${ac_tool_prefix}ar" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +AR=$ac_cv_prog_AR +if test -n "$AR"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $AR" >&5 +$as_echo "$AR" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + +fi +if test -z "$ac_cv_prog_AR"; then + ac_ct_AR=$AR + # Extract the first word of "ar", so it can be a program name with args. +set dummy ar; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if test "${ac_cv_prog_ac_ct_AR+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$ac_ct_AR"; then + ac_cv_prog_ac_ct_AR="$ac_ct_AR" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + ac_cv_prog_ac_ct_AR="ar" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +ac_ct_AR=$ac_cv_prog_ac_ct_AR +if test -n "$ac_ct_AR"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_AR" >&5 +$as_echo "$ac_ct_AR" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + if test "x$ac_ct_AR" = x; then + AR="false" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + AR=$ac_ct_AR + fi +else + AR="$ac_cv_prog_AR" +fi + +test -z "$AR" && AR=ar +test -z "$AR_FLAGS" && AR_FLAGS=cru + + + + + + + + + + + +if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args. +set dummy ${ac_tool_prefix}strip; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if test "${ac_cv_prog_STRIP+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$STRIP"; then + ac_cv_prog_STRIP="$STRIP" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + ac_cv_prog_STRIP="${ac_tool_prefix}strip" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +STRIP=$ac_cv_prog_STRIP +if test -n "$STRIP"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $STRIP" >&5 +$as_echo "$STRIP" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + +fi +if test -z "$ac_cv_prog_STRIP"; then + ac_ct_STRIP=$STRIP + # Extract the first word of "strip", so it can be a program name with args. +set dummy strip; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if test "${ac_cv_prog_ac_ct_STRIP+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$ac_ct_STRIP"; then + ac_cv_prog_ac_ct_STRIP="$ac_ct_STRIP" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + ac_cv_prog_ac_ct_STRIP="strip" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +ac_ct_STRIP=$ac_cv_prog_ac_ct_STRIP +if test -n "$ac_ct_STRIP"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_STRIP" >&5 +$as_echo "$ac_ct_STRIP" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + if test "x$ac_ct_STRIP" = x; then + STRIP=":" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + STRIP=$ac_ct_STRIP + fi +else + STRIP="$ac_cv_prog_STRIP" +fi + +test -z "$STRIP" && STRIP=: + + + + + + +if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}ranlib", so it can be a program name with args. +set dummy ${ac_tool_prefix}ranlib; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if test "${ac_cv_prog_RANLIB+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$RANLIB"; then + ac_cv_prog_RANLIB="$RANLIB" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + ac_cv_prog_RANLIB="${ac_tool_prefix}ranlib" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +RANLIB=$ac_cv_prog_RANLIB +if test -n "$RANLIB"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $RANLIB" >&5 +$as_echo "$RANLIB" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + +fi +if test -z "$ac_cv_prog_RANLIB"; then + ac_ct_RANLIB=$RANLIB + # Extract the first word of "ranlib", so it can be a program name with args. +set dummy ranlib; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if test "${ac_cv_prog_ac_ct_RANLIB+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$ac_ct_RANLIB"; then + ac_cv_prog_ac_ct_RANLIB="$ac_ct_RANLIB" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + ac_cv_prog_ac_ct_RANLIB="ranlib" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +ac_ct_RANLIB=$ac_cv_prog_ac_ct_RANLIB +if test -n "$ac_ct_RANLIB"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_RANLIB" >&5 +$as_echo "$ac_ct_RANLIB" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + if test "x$ac_ct_RANLIB" = x; then + RANLIB=":" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + RANLIB=$ac_ct_RANLIB + fi +else + RANLIB="$ac_cv_prog_RANLIB" +fi + +test -z "$RANLIB" && RANLIB=: + + + + + + +# Determine commands to create old-style static archives. +old_archive_cmds='$AR $AR_FLAGS $oldlib$oldobjs' +old_postinstall_cmds='chmod 644 $oldlib' +old_postuninstall_cmds= + +if test -n "$RANLIB"; then + case $host_os in + openbsd*) + old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB -t \$oldlib" + ;; + *) + old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB \$oldlib" + ;; + esac + old_archive_cmds="$old_archive_cmds~\$RANLIB \$oldlib" +fi + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +# If no C compiler was specified, use CC. +LTCC=${LTCC-"$CC"} + +# If no C compiler flags were specified, use CFLAGS. +LTCFLAGS=${LTCFLAGS-"$CFLAGS"} + +# Allow CC to be a program name with arguments. +compiler=$CC + + +# Check for command to grab the raw symbol name followed by C symbol from nm. +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking command to parse $NM output from $compiler object" >&5 +$as_echo_n "checking command to parse $NM output from $compiler object... " >&6; } +if test "${lt_cv_sys_global_symbol_pipe+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + +# These are sane defaults that work on at least a few old systems. +# [They come from Ultrix. What could be older than Ultrix?!! ;)] + +# Character class describing NM global symbol codes. +symcode='[BCDEGRST]' + +# Regexp to match symbols that can be accessed directly from C. +sympat='\([_A-Za-z][_A-Za-z0-9]*\)' + +# Define system-specific variables. +case $host_os in +aix*) + symcode='[BCDT]' + ;; +cygwin* | mingw* | pw32*) + symcode='[ABCDGISTW]' + ;; +hpux*) + if test "$host_cpu" = ia64; then + symcode='[ABCDEGRST]' + fi + ;; +irix* | nonstopux*) + symcode='[BCDEGRST]' + ;; +osf*) + symcode='[BCDEGQRST]' + ;; +solaris*) + symcode='[BDRT]' + ;; +sco3.2v5*) + symcode='[DT]' + ;; +sysv4.2uw2*) + symcode='[DT]' + ;; +sysv5* | sco5v6* | unixware* | OpenUNIX*) + symcode='[ABDT]' + ;; +sysv4) + symcode='[DFNSTU]' + ;; +esac + +# If we're using GNU nm, then use its standard symbol codes. +case `$NM -V 2>&1` in +*GNU* | *'with BFD'*) + symcode='[ABCDGIRSTW]' ;; +esac + +# Transform an extracted symbol line into a proper C declaration. +# Some systems (esp. on ia64) link data and code symbols differently, +# so use this general approach. +lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'" + +# Transform an extracted symbol line into symbol name and symbol address +lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'" +lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'" + +# Handle CRLF in mingw tool chain +opt_cr= +case $build_os in +mingw*) + opt_cr=`$ECHO 'x\{0,1\}' | tr x '\015'` # option cr in regexp + ;; +esac + +# Try without a prefix underscore, then with it. +for ac_symprfx in "" "_"; do + + # Transform symcode, sympat, and symprfx into a raw symbol and a C symbol. + symxfrm="\\1 $ac_symprfx\\2 \\2" + + # Write the raw and C identifiers. + if test "$lt_cv_nm_interface" = "MS dumpbin"; then + # Fake it for dumpbin and say T for any non-static function + # and D for any global variable. + # Also find C++ and __fastcall symbols from MSVC++, + # which start with @ or ?. + lt_cv_sys_global_symbol_pipe="$AWK '"\ +" {last_section=section; section=\$ 3};"\ +" /Section length .*#relocs.*(pick any)/{hide[last_section]=1};"\ +" \$ 0!~/External *\|/{next};"\ +" / 0+ UNDEF /{next}; / UNDEF \([^|]\)*()/{next};"\ +" {if(hide[section]) next};"\ +" {f=0}; \$ 0~/\(\).*\|/{f=1}; {printf f ? \"T \" : \"D \"};"\ +" {split(\$ 0, a, /\||\r/); split(a[2], s)};"\ +" s[1]~/^[@?]/{print s[1], s[1]; next};"\ +" s[1]~prfx {split(s[1],t,\"@\"); print t[1], substr(t[1],length(prfx))}"\ +" ' prfx=^$ac_symprfx" + else + lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[ ]\($symcode$symcode*\)[ ][ ]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'" + fi + + # Check to see that the pipe works correctly. + pipe_works=no + + rm -f conftest* + cat > conftest.$ac_ext <<_LT_EOF +#ifdef __cplusplus +extern "C" { +#endif +char nm_test_var; +void nm_test_func(void); +void nm_test_func(void){} +#ifdef __cplusplus +} +#endif +int main(){nm_test_var='a';nm_test_func();return(0);} +_LT_EOF + + if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 + (eval $ac_compile) 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + # Now try to grab the symbols. + nlist=conftest.nm + if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$NM conftest.$ac_objext \| $lt_cv_sys_global_symbol_pipe \> $nlist\""; } >&5 + (eval $NM conftest.$ac_objext \| $lt_cv_sys_global_symbol_pipe \> $nlist) 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } && test -s "$nlist"; then + # Try sorting and uniquifying the output. + if sort "$nlist" | uniq > "$nlist"T; then + mv -f "$nlist"T "$nlist" + else + rm -f "$nlist"T + fi + + # Make sure that we snagged all the symbols we need. + if $GREP ' nm_test_var$' "$nlist" >/dev/null; then + if $GREP ' nm_test_func$' "$nlist" >/dev/null; then + cat <<_LT_EOF > conftest.$ac_ext +#ifdef __cplusplus +extern "C" { +#endif + +_LT_EOF + # Now generate the symbol file. + eval "$lt_cv_sys_global_symbol_to_cdecl"' < "$nlist" | $GREP -v main >> conftest.$ac_ext' + + cat <<_LT_EOF >> conftest.$ac_ext + +/* The mapping between symbol names and symbols. */ +const struct { + const char *name; + void *address; +} +lt__PROGRAM__LTX_preloaded_symbols[] = +{ + { "@PROGRAM@", (void *) 0 }, +_LT_EOF + $SED "s/^$symcode$symcode* \(.*\) \(.*\)$/ {\"\2\", (void *) \&\2},/" < "$nlist" | $GREP -v main >> conftest.$ac_ext + cat <<\_LT_EOF >> conftest.$ac_ext + {0, (void *) 0} +}; + +/* This works around a problem in FreeBSD linker */ +#ifdef FREEBSD_WORKAROUND +static const void *lt_preloaded_setup() { + return lt__PROGRAM__LTX_preloaded_symbols; +} +#endif + +#ifdef __cplusplus +} +#endif +_LT_EOF + # Now try linking the two files. + mv conftest.$ac_objext conftstm.$ac_objext + lt_save_LIBS="$LIBS" + lt_save_CFLAGS="$CFLAGS" + LIBS="conftstm.$ac_objext" + CFLAGS="$CFLAGS$lt_prog_compiler_no_builtin_flag" + if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5 + (eval $ac_link) 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } && test -s conftest${ac_exeext}; then + pipe_works=yes + fi + LIBS="$lt_save_LIBS" + CFLAGS="$lt_save_CFLAGS" + else + echo "cannot find nm_test_func in $nlist" >&5 + fi + else + echo "cannot find nm_test_var in $nlist" >&5 + fi + else + echo "cannot run $lt_cv_sys_global_symbol_pipe" >&5 + fi + else + echo "$progname: failed program was:" >&5 + cat conftest.$ac_ext >&5 + fi + rm -rf conftest* conftst* + + # Do not use the global_symbol_pipe unless it works. + if test "$pipe_works" = yes; then + break + else + lt_cv_sys_global_symbol_pipe= + fi +done + +fi + +if test -z "$lt_cv_sys_global_symbol_pipe"; then + lt_cv_sys_global_symbol_to_cdecl= +fi +if test -z "$lt_cv_sys_global_symbol_pipe$lt_cv_sys_global_symbol_to_cdecl"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: failed" >&5 +$as_echo "failed" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: ok" >&5 +$as_echo "ok" >&6; } +fi + + + + + + + + + + + + + + + + + + + + + + +# Check whether --enable-libtool-lock was given. +if test "${enable_libtool_lock+set}" = set; then : + enableval=$enable_libtool_lock; +fi + +test "x$enable_libtool_lock" != xno && enable_libtool_lock=yes + +# Some flags need to be propagated to the compiler or linker for good +# libtool support. +case $host in +ia64-*-hpux*) + # Find out which ABI we are using. + echo 'int i;' > conftest.$ac_ext + if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 + (eval $ac_compile) 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + case `/usr/bin/file conftest.$ac_objext` in + *ELF-32*) + HPUX_IA64_MODE="32" + ;; + *ELF-64*) + HPUX_IA64_MODE="64" + ;; + esac + fi + rm -rf conftest* + ;; +*-*-irix6*) + # Find out which ABI we are using. + echo '#line 6467 "configure"' > conftest.$ac_ext + if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 + (eval $ac_compile) 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + if test "$lt_cv_prog_gnu_ld" = yes; then + case `/usr/bin/file conftest.$ac_objext` in + *32-bit*) + LD="${LD-ld} -melf32bsmip" + ;; + *N32*) + LD="${LD-ld} -melf32bmipn32" + ;; + *64-bit*) + LD="${LD-ld} -melf64bmip" + ;; + esac + else + case `/usr/bin/file conftest.$ac_objext` in + *32-bit*) + LD="${LD-ld} -32" + ;; + *N32*) + LD="${LD-ld} -n32" + ;; + *64-bit*) + LD="${LD-ld} -64" + ;; + esac + fi + fi + rm -rf conftest* + ;; + +x86_64-*kfreebsd*-gnu|x86_64-*linux*|ppc*-*linux*|powerpc*-*linux*| \ +s390*-*linux*|s390*-*tpf*|sparc*-*linux*) + # Find out which ABI we are using. + echo 'int i;' > conftest.$ac_ext + if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 + (eval $ac_compile) 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + case `/usr/bin/file conftest.o` in + *32-bit*) + case $host in + x86_64-*kfreebsd*-gnu) + LD="${LD-ld} -m elf_i386_fbsd" + ;; + x86_64-*linux*) + LD="${LD-ld} -m elf_i386" + ;; + ppc64-*linux*|powerpc64-*linux*) + LD="${LD-ld} -m elf32ppclinux" + ;; + s390x-*linux*) + LD="${LD-ld} -m elf_s390" + ;; + sparc64-*linux*) + LD="${LD-ld} -m elf32_sparc" + ;; + esac + ;; + *64-bit*) + case $host in + x86_64-*kfreebsd*-gnu) + LD="${LD-ld} -m elf_x86_64_fbsd" + ;; + x86_64-*linux*) + LD="${LD-ld} -m elf_x86_64" + ;; + ppc*-*linux*|powerpc*-*linux*) + LD="${LD-ld} -m elf64ppc" + ;; + s390*-*linux*|s390*-*tpf*) + LD="${LD-ld} -m elf64_s390" + ;; + sparc*-*linux*) + LD="${LD-ld} -m elf64_sparc" + ;; + esac + ;; + esac + fi + rm -rf conftest* + ;; + +*-*-sco3.2v5*) + # On SCO OpenServer 5, we need -belf to get full-featured binaries. + SAVE_CFLAGS="$CFLAGS" + CFLAGS="$CFLAGS -belf" + { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the C compiler needs -belf" >&5 +$as_echo_n "checking whether the C compiler needs -belf... " >&6; } +if test "${lt_cv_cc_needs_belf+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu + + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_link "$LINENO"; then : + lt_cv_cc_needs_belf=yes +else + lt_cv_cc_needs_belf=no +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext + ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_cc_needs_belf" >&5 +$as_echo "$lt_cv_cc_needs_belf" >&6; } + if test x"$lt_cv_cc_needs_belf" != x"yes"; then + # this is probably gcc 2.8.0, egcs 1.0 or newer; no need for -belf + CFLAGS="$SAVE_CFLAGS" + fi + ;; +sparc*-*solaris*) + # Find out which ABI we are using. + echo 'int i;' > conftest.$ac_ext + if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 + (eval $ac_compile) 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + case `/usr/bin/file conftest.o` in + *64-bit*) + case $lt_cv_prog_gnu_ld in + yes*) LD="${LD-ld} -m elf64_sparc" ;; + *) + if ${LD-ld} -64 -r -o conftest2.o conftest.o >/dev/null 2>&1; then + LD="${LD-ld} -64" + fi + ;; + esac + ;; + esac + fi + rm -rf conftest* + ;; +esac + +need_locks="$enable_libtool_lock" + + + case $host_os in + rhapsody* | darwin*) + if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}dsymutil", so it can be a program name with args. +set dummy ${ac_tool_prefix}dsymutil; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if test "${ac_cv_prog_DSYMUTIL+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$DSYMUTIL"; then + ac_cv_prog_DSYMUTIL="$DSYMUTIL" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + ac_cv_prog_DSYMUTIL="${ac_tool_prefix}dsymutil" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +DSYMUTIL=$ac_cv_prog_DSYMUTIL +if test -n "$DSYMUTIL"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DSYMUTIL" >&5 +$as_echo "$DSYMUTIL" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + +fi +if test -z "$ac_cv_prog_DSYMUTIL"; then + ac_ct_DSYMUTIL=$DSYMUTIL + # Extract the first word of "dsymutil", so it can be a program name with args. +set dummy dsymutil; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if test "${ac_cv_prog_ac_ct_DSYMUTIL+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$ac_ct_DSYMUTIL"; then + ac_cv_prog_ac_ct_DSYMUTIL="$ac_ct_DSYMUTIL" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + ac_cv_prog_ac_ct_DSYMUTIL="dsymutil" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +ac_ct_DSYMUTIL=$ac_cv_prog_ac_ct_DSYMUTIL +if test -n "$ac_ct_DSYMUTIL"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DSYMUTIL" >&5 +$as_echo "$ac_ct_DSYMUTIL" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + if test "x$ac_ct_DSYMUTIL" = x; then + DSYMUTIL=":" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + DSYMUTIL=$ac_ct_DSYMUTIL + fi +else + DSYMUTIL="$ac_cv_prog_DSYMUTIL" +fi + + if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}nmedit", so it can be a program name with args. +set dummy ${ac_tool_prefix}nmedit; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if test "${ac_cv_prog_NMEDIT+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$NMEDIT"; then + ac_cv_prog_NMEDIT="$NMEDIT" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + ac_cv_prog_NMEDIT="${ac_tool_prefix}nmedit" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +NMEDIT=$ac_cv_prog_NMEDIT +if test -n "$NMEDIT"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $NMEDIT" >&5 +$as_echo "$NMEDIT" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + +fi +if test -z "$ac_cv_prog_NMEDIT"; then + ac_ct_NMEDIT=$NMEDIT + # Extract the first word of "nmedit", so it can be a program name with args. +set dummy nmedit; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if test "${ac_cv_prog_ac_ct_NMEDIT+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$ac_ct_NMEDIT"; then + ac_cv_prog_ac_ct_NMEDIT="$ac_ct_NMEDIT" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + ac_cv_prog_ac_ct_NMEDIT="nmedit" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +ac_ct_NMEDIT=$ac_cv_prog_ac_ct_NMEDIT +if test -n "$ac_ct_NMEDIT"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_NMEDIT" >&5 +$as_echo "$ac_ct_NMEDIT" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + if test "x$ac_ct_NMEDIT" = x; then + NMEDIT=":" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + NMEDIT=$ac_ct_NMEDIT + fi +else + NMEDIT="$ac_cv_prog_NMEDIT" +fi + + if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}lipo", so it can be a program name with args. +set dummy ${ac_tool_prefix}lipo; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if test "${ac_cv_prog_LIPO+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$LIPO"; then + ac_cv_prog_LIPO="$LIPO" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + ac_cv_prog_LIPO="${ac_tool_prefix}lipo" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +LIPO=$ac_cv_prog_LIPO +if test -n "$LIPO"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $LIPO" >&5 +$as_echo "$LIPO" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + +fi +if test -z "$ac_cv_prog_LIPO"; then + ac_ct_LIPO=$LIPO + # Extract the first word of "lipo", so it can be a program name with args. +set dummy lipo; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if test "${ac_cv_prog_ac_ct_LIPO+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$ac_ct_LIPO"; then + ac_cv_prog_ac_ct_LIPO="$ac_ct_LIPO" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + ac_cv_prog_ac_ct_LIPO="lipo" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +ac_ct_LIPO=$ac_cv_prog_ac_ct_LIPO +if test -n "$ac_ct_LIPO"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_LIPO" >&5 +$as_echo "$ac_ct_LIPO" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + if test "x$ac_ct_LIPO" = x; then + LIPO=":" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + LIPO=$ac_ct_LIPO + fi +else + LIPO="$ac_cv_prog_LIPO" +fi + + if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}otool", so it can be a program name with args. +set dummy ${ac_tool_prefix}otool; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if test "${ac_cv_prog_OTOOL+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$OTOOL"; then + ac_cv_prog_OTOOL="$OTOOL" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + ac_cv_prog_OTOOL="${ac_tool_prefix}otool" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +OTOOL=$ac_cv_prog_OTOOL +if test -n "$OTOOL"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OTOOL" >&5 +$as_echo "$OTOOL" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + +fi +if test -z "$ac_cv_prog_OTOOL"; then + ac_ct_OTOOL=$OTOOL + # Extract the first word of "otool", so it can be a program name with args. +set dummy otool; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if test "${ac_cv_prog_ac_ct_OTOOL+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$ac_ct_OTOOL"; then + ac_cv_prog_ac_ct_OTOOL="$ac_ct_OTOOL" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + ac_cv_prog_ac_ct_OTOOL="otool" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +ac_ct_OTOOL=$ac_cv_prog_ac_ct_OTOOL +if test -n "$ac_ct_OTOOL"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OTOOL" >&5 +$as_echo "$ac_ct_OTOOL" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + if test "x$ac_ct_OTOOL" = x; then + OTOOL=":" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + OTOOL=$ac_ct_OTOOL + fi +else + OTOOL="$ac_cv_prog_OTOOL" +fi + + if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}otool64", so it can be a program name with args. +set dummy ${ac_tool_prefix}otool64; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if test "${ac_cv_prog_OTOOL64+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$OTOOL64"; then + ac_cv_prog_OTOOL64="$OTOOL64" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + ac_cv_prog_OTOOL64="${ac_tool_prefix}otool64" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +OTOOL64=$ac_cv_prog_OTOOL64 +if test -n "$OTOOL64"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OTOOL64" >&5 +$as_echo "$OTOOL64" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + +fi +if test -z "$ac_cv_prog_OTOOL64"; then + ac_ct_OTOOL64=$OTOOL64 + # Extract the first word of "otool64", so it can be a program name with args. +set dummy otool64; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if test "${ac_cv_prog_ac_ct_OTOOL64+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$ac_ct_OTOOL64"; then + ac_cv_prog_ac_ct_OTOOL64="$ac_ct_OTOOL64" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + ac_cv_prog_ac_ct_OTOOL64="otool64" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +ac_ct_OTOOL64=$ac_cv_prog_ac_ct_OTOOL64 +if test -n "$ac_ct_OTOOL64"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OTOOL64" >&5 +$as_echo "$ac_ct_OTOOL64" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + if test "x$ac_ct_OTOOL64" = x; then + OTOOL64=":" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + OTOOL64=$ac_ct_OTOOL64 + fi +else + OTOOL64="$ac_cv_prog_OTOOL64" +fi + + + + + + + + + + + + + + + + + + + + + + + + + + + + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for -single_module linker flag" >&5 +$as_echo_n "checking for -single_module linker flag... " >&6; } +if test "${lt_cv_apple_cc_single_mod+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + lt_cv_apple_cc_single_mod=no + if test -z "${LT_MULTI_MODULE}"; then + # By default we will add the -single_module flag. You can override + # by either setting the environment variable LT_MULTI_MODULE + # non-empty at configure time, or by adding -multi_module to the + # link flags. + rm -rf libconftest.dylib* + echo "int foo(void){return 1;}" > conftest.c + echo "$LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \ +-dynamiclib -Wl,-single_module conftest.c" >&5 + $LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \ + -dynamiclib -Wl,-single_module conftest.c 2>conftest.err + _lt_result=$? + if test -f libconftest.dylib && test ! -s conftest.err && test $_lt_result = 0; then + lt_cv_apple_cc_single_mod=yes + else + cat conftest.err >&5 + fi + rm -rf libconftest.dylib* + rm -f conftest.* + fi +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_apple_cc_single_mod" >&5 +$as_echo "$lt_cv_apple_cc_single_mod" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for -exported_symbols_list linker flag" >&5 +$as_echo_n "checking for -exported_symbols_list linker flag... " >&6; } +if test "${lt_cv_ld_exported_symbols_list+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + lt_cv_ld_exported_symbols_list=no + save_LDFLAGS=$LDFLAGS + echo "_main" > conftest.sym + LDFLAGS="$LDFLAGS -Wl,-exported_symbols_list,conftest.sym" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_link "$LINENO"; then : + lt_cv_ld_exported_symbols_list=yes +else + lt_cv_ld_exported_symbols_list=no +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext + LDFLAGS="$save_LDFLAGS" + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ld_exported_symbols_list" >&5 +$as_echo "$lt_cv_ld_exported_symbols_list" >&6; } + case $host_os in + rhapsody* | darwin1.[012]) + _lt_dar_allow_undefined='${wl}-undefined ${wl}suppress' ;; + darwin1.*) + _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;; + darwin*) # darwin 5.x on + # if running on 10.5 or later, the deployment target defaults + # to the OS version, if on x86, and 10.4, the deployment + # target defaults to 10.4. Don't you love it? + case ${MACOSX_DEPLOYMENT_TARGET-10.0},$host in + 10.0,*86*-darwin8*|10.0,*-darwin[91]*) + _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;; + 10.[012]*) + _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;; + 10.*) + _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;; + esac + ;; + esac + if test "$lt_cv_apple_cc_single_mod" = "yes"; then + _lt_dar_single_mod='$single_module' + fi + if test "$lt_cv_ld_exported_symbols_list" = "yes"; then + _lt_dar_export_syms=' ${wl}-exported_symbols_list,$output_objdir/${libname}-symbols.expsym' + else + _lt_dar_export_syms='~$NMEDIT -s $output_objdir/${libname}-symbols.expsym ${lib}' + fi + if test "$DSYMUTIL" != ":"; then + _lt_dsymutil='~$DSYMUTIL $lib || :' + else + _lt_dsymutil= + fi + ;; + esac + +for ac_header in dlfcn.h +do : + ac_fn_c_check_header_compile "$LINENO" "dlfcn.h" "ac_cv_header_dlfcn_h" "$ac_includes_default +" +if test "x$ac_cv_header_dlfcn_h" = x""yes; then : + cat >>confdefs.h <<_ACEOF +#define HAVE_DLFCN_H 1 +_ACEOF + +fi + +done + + + +# Set options + + + + enable_dlopen=no + + + enable_win32_dll=no + + + # Check whether --enable-shared was given. +if test "${enable_shared+set}" = set; then : + enableval=$enable_shared; p=${PACKAGE-default} + case $enableval in + yes) enable_shared=yes ;; + no) enable_shared=no ;; + *) + enable_shared=no + # Look at the argument we got. We use all the common list separators. + lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," + for pkg in $enableval; do + IFS="$lt_save_ifs" + if test "X$pkg" = "X$p"; then + enable_shared=yes + fi + done + IFS="$lt_save_ifs" + ;; + esac +else + enable_shared=yes +fi + + + + + + + + + + # Check whether --enable-static was given. +if test "${enable_static+set}" = set; then : + enableval=$enable_static; p=${PACKAGE-default} + case $enableval in + yes) enable_static=yes ;; + no) enable_static=no ;; + *) + enable_static=no + # Look at the argument we got. We use all the common list separators. + lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," + for pkg in $enableval; do + IFS="$lt_save_ifs" + if test "X$pkg" = "X$p"; then + enable_static=yes + fi + done + IFS="$lt_save_ifs" + ;; + esac +else + enable_static=yes +fi + + + + + + + + + + +# Check whether --with-pic was given. +if test "${with_pic+set}" = set; then : + withval=$with_pic; pic_mode="$withval" +else + pic_mode=default +fi + + +test -z "$pic_mode" && pic_mode=default + + + + + + + + # Check whether --enable-fast-install was given. +if test "${enable_fast_install+set}" = set; then : + enableval=$enable_fast_install; p=${PACKAGE-default} + case $enableval in + yes) enable_fast_install=yes ;; + no) enable_fast_install=no ;; + *) + enable_fast_install=no + # Look at the argument we got. We use all the common list separators. + lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," + for pkg in $enableval; do + IFS="$lt_save_ifs" + if test "X$pkg" = "X$p"; then + enable_fast_install=yes + fi + done + IFS="$lt_save_ifs" + ;; + esac +else + enable_fast_install=yes +fi + + + + + + + + + + + +# This can be used to rebuild libtool when needed +LIBTOOL_DEPS="$ltmain" + +# Always use our own libtool. +LIBTOOL='$(SHELL) $(top_builddir)/libtool' + + + + + + + + + + + + + + + + + + + + + + + + + +test -z "$LN_S" && LN_S="ln -s" + + + + + + + + + + + + + + +if test -n "${ZSH_VERSION+set}" ; then + setopt NO_GLOB_SUBST +fi + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for objdir" >&5 +$as_echo_n "checking for objdir... " >&6; } +if test "${lt_cv_objdir+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + rm -f .libs 2>/dev/null +mkdir .libs 2>/dev/null +if test -d .libs; then + lt_cv_objdir=.libs +else + # MS-DOS does not allow filenames that begin with a dot. + lt_cv_objdir=_libs +fi +rmdir .libs 2>/dev/null +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_objdir" >&5 +$as_echo "$lt_cv_objdir" >&6; } +objdir=$lt_cv_objdir + + + + + +cat >>confdefs.h <<_ACEOF +#define LT_OBJDIR "$lt_cv_objdir/" +_ACEOF + + + + + + + + + + + + + + + + + +case $host_os in +aix3*) + # AIX sometimes has problems with the GCC collect2 program. For some + # reason, if we set the COLLECT_NAMES environment variable, the problems + # vanish in a puff of smoke. + if test "X${COLLECT_NAMES+set}" != Xset; then + COLLECT_NAMES= + export COLLECT_NAMES + fi + ;; +esac + +# Sed substitution that helps us do robust quoting. It backslashifies +# metacharacters that are still active within double-quoted strings. +sed_quote_subst='s/\(["`$\\]\)/\\\1/g' + +# Same as above, but do not quote variable references. +double_quote_subst='s/\(["`\\]\)/\\\1/g' + +# Sed substitution to delay expansion of an escaped shell variable in a +# double_quote_subst'ed string. +delay_variable_subst='s/\\\\\\\\\\\$/\\\\\\$/g' + +# Sed substitution to delay expansion of an escaped single quote. +delay_single_quote_subst='s/'\''/'\'\\\\\\\'\''/g' + +# Sed substitution to avoid accidental globbing in evaled expressions +no_glob_subst='s/\*/\\\*/g' + +# Global variables: +ofile=libtool +can_build_shared=yes + +# All known linkers require a `.a' archive for static linking (except MSVC, +# which needs '.lib'). +libext=a + +with_gnu_ld="$lt_cv_prog_gnu_ld" + +old_CC="$CC" +old_CFLAGS="$CFLAGS" + +# Set sane defaults for various variables +test -z "$CC" && CC=cc +test -z "$LTCC" && LTCC=$CC +test -z "$LTCFLAGS" && LTCFLAGS=$CFLAGS +test -z "$LD" && LD=ld +test -z "$ac_objext" && ac_objext=o + +for cc_temp in $compiler""; do + case $cc_temp in + compile | *[\\/]compile | ccache | *[\\/]ccache ) ;; + distcc | *[\\/]distcc | purify | *[\\/]purify ) ;; + \-*) ;; + *) break;; + esac +done +cc_basename=`$ECHO "X$cc_temp" | $Xsed -e 's%.*/%%' -e "s%^$host_alias-%%"` + + +# Only perform the check for file, if the check method requires it +test -z "$MAGIC_CMD" && MAGIC_CMD=file +case $deplibs_check_method in +file_magic*) + if test "$file_magic_cmd" = '$MAGIC_CMD'; then + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ${ac_tool_prefix}file" >&5 +$as_echo_n "checking for ${ac_tool_prefix}file... " >&6; } +if test "${lt_cv_path_MAGIC_CMD+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + case $MAGIC_CMD in +[\\/*] | ?:[\\/]*) + lt_cv_path_MAGIC_CMD="$MAGIC_CMD" # Let the user override the test with a path. + ;; +*) + lt_save_MAGIC_CMD="$MAGIC_CMD" + lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR + ac_dummy="/usr/bin$PATH_SEPARATOR$PATH" + for ac_dir in $ac_dummy; do + IFS="$lt_save_ifs" + test -z "$ac_dir" && ac_dir=. + if test -f $ac_dir/${ac_tool_prefix}file; then + lt_cv_path_MAGIC_CMD="$ac_dir/${ac_tool_prefix}file" + if test -n "$file_magic_test_file"; then + case $deplibs_check_method in + "file_magic "*) + file_magic_regex=`expr "$deplibs_check_method" : "file_magic \(.*\)"` + MAGIC_CMD="$lt_cv_path_MAGIC_CMD" + if eval $file_magic_cmd \$file_magic_test_file 2> /dev/null | + $EGREP "$file_magic_regex" > /dev/null; then + : + else + cat <<_LT_EOF 1>&2 + +*** Warning: the command libtool uses to detect shared libraries, +*** $file_magic_cmd, produces output that libtool cannot recognize. +*** The result is that libtool may fail to recognize shared libraries +*** as such. This will affect the creation of libtool libraries that +*** depend on shared libraries, but programs linked with such libtool +*** libraries will work regardless of this problem. Nevertheless, you +*** may want to report the problem to your system manager and/or to +*** bug-libtool@gnu.org + +_LT_EOF + fi ;; + esac + fi + break + fi + done + IFS="$lt_save_ifs" + MAGIC_CMD="$lt_save_MAGIC_CMD" + ;; +esac +fi + +MAGIC_CMD="$lt_cv_path_MAGIC_CMD" +if test -n "$MAGIC_CMD"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MAGIC_CMD" >&5 +$as_echo "$MAGIC_CMD" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + + + + +if test -z "$lt_cv_path_MAGIC_CMD"; then + if test -n "$ac_tool_prefix"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for file" >&5 +$as_echo_n "checking for file... " >&6; } +if test "${lt_cv_path_MAGIC_CMD+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + case $MAGIC_CMD in +[\\/*] | ?:[\\/]*) + lt_cv_path_MAGIC_CMD="$MAGIC_CMD" # Let the user override the test with a path. + ;; +*) + lt_save_MAGIC_CMD="$MAGIC_CMD" + lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR + ac_dummy="/usr/bin$PATH_SEPARATOR$PATH" + for ac_dir in $ac_dummy; do + IFS="$lt_save_ifs" + test -z "$ac_dir" && ac_dir=. + if test -f $ac_dir/file; then + lt_cv_path_MAGIC_CMD="$ac_dir/file" + if test -n "$file_magic_test_file"; then + case $deplibs_check_method in + "file_magic "*) + file_magic_regex=`expr "$deplibs_check_method" : "file_magic \(.*\)"` + MAGIC_CMD="$lt_cv_path_MAGIC_CMD" + if eval $file_magic_cmd \$file_magic_test_file 2> /dev/null | + $EGREP "$file_magic_regex" > /dev/null; then + : + else + cat <<_LT_EOF 1>&2 + +*** Warning: the command libtool uses to detect shared libraries, +*** $file_magic_cmd, produces output that libtool cannot recognize. +*** The result is that libtool may fail to recognize shared libraries +*** as such. This will affect the creation of libtool libraries that +*** depend on shared libraries, but programs linked with such libtool +*** libraries will work regardless of this problem. Nevertheless, you +*** may want to report the problem to your system manager and/or to +*** bug-libtool@gnu.org + +_LT_EOF + fi ;; + esac + fi + break + fi + done + IFS="$lt_save_ifs" + MAGIC_CMD="$lt_save_MAGIC_CMD" + ;; +esac +fi + +MAGIC_CMD="$lt_cv_path_MAGIC_CMD" +if test -n "$MAGIC_CMD"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MAGIC_CMD" >&5 +$as_echo "$MAGIC_CMD" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + + else + MAGIC_CMD=: + fi +fi + + fi + ;; +esac + +# Use C for the default configuration in the libtool script + +lt_save_CC="$CC" +ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu + + +# Source file extension for C test sources. +ac_ext=c + +# Object file extension for compiled C test sources. +objext=o +objext=$objext + +# Code to be used in simple compile tests +lt_simple_compile_test_code="int some_variable = 0;" + +# Code to be used in simple link tests +lt_simple_link_test_code='int main(){return(0);}' + + + + + + + +# If no C compiler was specified, use CC. +LTCC=${LTCC-"$CC"} + +# If no C compiler flags were specified, use CFLAGS. +LTCFLAGS=${LTCFLAGS-"$CFLAGS"} + +# Allow CC to be a program name with arguments. +compiler=$CC + +# Save the default compiler, since it gets overwritten when the other +# tags are being tested, and _LT_TAGVAR(compiler, []) is a NOP. +compiler_DEFAULT=$CC + +# save warnings/boilerplate of simple test code +ac_outfile=conftest.$ac_objext +echo "$lt_simple_compile_test_code" >conftest.$ac_ext +eval "$ac_compile" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err +_lt_compiler_boilerplate=`cat conftest.err` +$RM conftest* + +ac_outfile=conftest.$ac_objext +echo "$lt_simple_link_test_code" >conftest.$ac_ext +eval "$ac_link" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err +_lt_linker_boilerplate=`cat conftest.err` +$RM -r conftest* + + +## CAVEAT EMPTOR: +## There is no encapsulation within the following macros, do not change +## the running order or otherwise move them around unless you know exactly +## what you are doing... +if test -n "$compiler"; then + +lt_prog_compiler_no_builtin_flag= + +if test "$GCC" = yes; then + lt_prog_compiler_no_builtin_flag=' -fno-builtin' + + { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -fno-rtti -fno-exceptions" >&5 +$as_echo_n "checking if $compiler supports -fno-rtti -fno-exceptions... " >&6; } +if test "${lt_cv_prog_compiler_rtti_exceptions+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + lt_cv_prog_compiler_rtti_exceptions=no + ac_outfile=conftest.$ac_objext + echo "$lt_simple_compile_test_code" > conftest.$ac_ext + lt_compiler_flag="-fno-rtti -fno-exceptions" + # Insert the option either (1) after the last *FLAGS variable, or + # (2) before a word containing "conftest.", or (3) at the end. + # Note that $ac_compile itself does not contain backslashes and begins + # with a dollar sign (not a hyphen), so the echo should work correctly. + # The option is referenced via a variable to avoid confusing sed. + lt_compile=`echo "$ac_compile" | $SED \ + -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ + -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ + -e 's:$: $lt_compiler_flag:'` + (eval echo "\"\$as_me:7729: $lt_compile\"" >&5) + (eval "$lt_compile" 2>conftest.err) + ac_status=$? + cat conftest.err >&5 + echo "$as_me:7733: \$? = $ac_status" >&5 + if (exit $ac_status) && test -s "$ac_outfile"; then + # The compiler can only warn and ignore the option if not recognized + # So say no if there are warnings other than the usual output. + $ECHO "X$_lt_compiler_boilerplate" | $Xsed -e '/^$/d' >conftest.exp + $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 + if test ! -s conftest.er2 || diff conftest.exp conftest.er2 >/dev/null; then + lt_cv_prog_compiler_rtti_exceptions=yes + fi + fi + $RM conftest* + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_rtti_exceptions" >&5 +$as_echo "$lt_cv_prog_compiler_rtti_exceptions" >&6; } + +if test x"$lt_cv_prog_compiler_rtti_exceptions" = xyes; then + lt_prog_compiler_no_builtin_flag="$lt_prog_compiler_no_builtin_flag -fno-rtti -fno-exceptions" +else + : +fi + +fi + + + + + + + lt_prog_compiler_wl= +lt_prog_compiler_pic= +lt_prog_compiler_static= + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5 +$as_echo_n "checking for $compiler option to produce PIC... " >&6; } + + if test "$GCC" = yes; then + lt_prog_compiler_wl='-Wl,' + lt_prog_compiler_static='-static' + + case $host_os in + aix*) + # All AIX code is PIC. + if test "$host_cpu" = ia64; then + # AIX 5 now supports IA64 processor + lt_prog_compiler_static='-Bstatic' + fi + ;; + + amigaos*) + case $host_cpu in + powerpc) + # see comment about AmigaOS4 .so support + lt_prog_compiler_pic='-fPIC' + ;; + m68k) + # FIXME: we need at least 68020 code to build shared libraries, but + # adding the `-m68020' flag to GCC prevents building anything better, + # like `-m68040'. + lt_prog_compiler_pic='-m68020 -resident32 -malways-restore-a4' + ;; + esac + ;; + + beos* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*) + # PIC is the default for these OSes. + ;; + + mingw* | cygwin* | pw32* | os2*) + # This hack is so that the source file can tell whether it is being + # built for inclusion in a dll (and should export symbols for example). + # Although the cygwin gcc ignores -fPIC, still need this for old-style + # (--disable-auto-import) libraries + lt_prog_compiler_pic='-DDLL_EXPORT' + ;; + + darwin* | rhapsody*) + # PIC is the default on this platform + # Common symbols not allowed in MH_DYLIB files + lt_prog_compiler_pic='-fno-common' + ;; + + hpux*) + # PIC is the default for IA64 HP-UX and 64-bit HP-UX, but + # not for PA HP-UX. + case $host_cpu in + hppa*64*|ia64*) + # +Z the default + ;; + *) + lt_prog_compiler_pic='-fPIC' + ;; + esac + ;; + + interix[3-9]*) + # Interix 3.x gcc -fpic/-fPIC options generate broken code. + # Instead, we relocate shared libraries at runtime. + ;; + + msdosdjgpp*) + # Just because we use GCC doesn't mean we suddenly get shared libraries + # on systems that don't support them. + lt_prog_compiler_can_build_shared=no + enable_shared=no + ;; + + *nto* | *qnx*) + # QNX uses GNU C++, but need to define -shared option too, otherwise + # it will coredump. + lt_prog_compiler_pic='-fPIC -shared' + ;; + + sysv4*MP*) + if test -d /usr/nec; then + lt_prog_compiler_pic=-Kconform_pic + fi + ;; + + *) + lt_prog_compiler_pic='-fPIC' + ;; + esac + else + # PORTME Check for flag to pass linker flags through the system compiler. + case $host_os in + aix*) + lt_prog_compiler_wl='-Wl,' + if test "$host_cpu" = ia64; then + # AIX 5 now supports IA64 processor + lt_prog_compiler_static='-Bstatic' + else + lt_prog_compiler_static='-bnso -bI:/lib/syscalls.exp' + fi + ;; + + mingw* | cygwin* | pw32* | os2*) + # This hack is so that the source file can tell whether it is being + # built for inclusion in a dll (and should export symbols for example). + lt_prog_compiler_pic='-DDLL_EXPORT' + ;; + + hpux9* | hpux10* | hpux11*) + lt_prog_compiler_wl='-Wl,' + # PIC is the default for IA64 HP-UX and 64-bit HP-UX, but + # not for PA HP-UX. + case $host_cpu in + hppa*64*|ia64*) + # +Z the default + ;; + *) + lt_prog_compiler_pic='+Z' + ;; + esac + # Is there a better lt_prog_compiler_static that works with the bundled CC? + lt_prog_compiler_static='${wl}-a ${wl}archive' + ;; + + irix5* | irix6* | nonstopux*) + lt_prog_compiler_wl='-Wl,' + # PIC (with -KPIC) is the default. + lt_prog_compiler_static='-non_shared' + ;; + + linux* | k*bsd*-gnu) + case $cc_basename in + icc* | ecc* | ifort*) + lt_prog_compiler_wl='-Wl,' + lt_prog_compiler_pic='-KPIC' + lt_prog_compiler_static='-static' + ;; + pgcc* | pgf77* | pgf90* | pgf95*) + # Portland Group compilers (*not* the Pentium gcc compiler, + # which looks to be a dead project) + lt_prog_compiler_wl='-Wl,' + lt_prog_compiler_pic='-fpic' + lt_prog_compiler_static='-Bstatic' + ;; + ccc*) + lt_prog_compiler_wl='-Wl,' + # All Alpha code is PIC. + lt_prog_compiler_static='-non_shared' + ;; + xl*) + # IBM XL C 8.0/Fortran 10.1 on PPC + lt_prog_compiler_wl='-Wl,' + lt_prog_compiler_pic='-qpic' + lt_prog_compiler_static='-qstaticlink' + ;; + *) + case `$CC -V 2>&1 | sed 5q` in + *Sun\ C*) + # Sun C 5.9 + lt_prog_compiler_pic='-KPIC' + lt_prog_compiler_static='-Bstatic' + lt_prog_compiler_wl='-Wl,' + ;; + *Sun\ F*) + # Sun Fortran 8.3 passes all unrecognized flags to the linker + lt_prog_compiler_pic='-KPIC' + lt_prog_compiler_static='-Bstatic' + lt_prog_compiler_wl='' + ;; + esac + ;; + esac + ;; + + newsos6) + lt_prog_compiler_pic='-KPIC' + lt_prog_compiler_static='-Bstatic' + ;; + + *nto* | *qnx*) + # QNX uses GNU C++, but need to define -shared option too, otherwise + # it will coredump. + lt_prog_compiler_pic='-fPIC -shared' + ;; + + osf3* | osf4* | osf5*) + lt_prog_compiler_wl='-Wl,' + # All OSF/1 code is PIC. + lt_prog_compiler_static='-non_shared' + ;; + + rdos*) + lt_prog_compiler_static='-non_shared' + ;; + + solaris*) + lt_prog_compiler_pic='-KPIC' + lt_prog_compiler_static='-Bstatic' + case $cc_basename in + f77* | f90* | f95*) + lt_prog_compiler_wl='-Qoption ld ';; + *) + lt_prog_compiler_wl='-Wl,';; + esac + ;; + + sunos4*) + lt_prog_compiler_wl='-Qoption ld ' + lt_prog_compiler_pic='-PIC' + lt_prog_compiler_static='-Bstatic' + ;; + + sysv4 | sysv4.2uw2* | sysv4.3*) + lt_prog_compiler_wl='-Wl,' + lt_prog_compiler_pic='-KPIC' + lt_prog_compiler_static='-Bstatic' + ;; + + sysv4*MP*) + if test -d /usr/nec ;then + lt_prog_compiler_pic='-Kconform_pic' + lt_prog_compiler_static='-Bstatic' + fi + ;; + + sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*) + lt_prog_compiler_wl='-Wl,' + lt_prog_compiler_pic='-KPIC' + lt_prog_compiler_static='-Bstatic' + ;; + + unicos*) + lt_prog_compiler_wl='-Wl,' + lt_prog_compiler_can_build_shared=no + ;; + + uts4*) + lt_prog_compiler_pic='-pic' + lt_prog_compiler_static='-Bstatic' + ;; + + *) + lt_prog_compiler_can_build_shared=no + ;; + esac + fi + +case $host_os in + # For platforms which do not support PIC, -DPIC is meaningless: + *djgpp*) + lt_prog_compiler_pic= + ;; + *) + lt_prog_compiler_pic="$lt_prog_compiler_pic -DPIC" + ;; +esac +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_prog_compiler_pic" >&5 +$as_echo "$lt_prog_compiler_pic" >&6; } + + + + + + +# +# Check to make sure the PIC flag actually works. +# +if test -n "$lt_prog_compiler_pic"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler PIC flag $lt_prog_compiler_pic works" >&5 +$as_echo_n "checking if $compiler PIC flag $lt_prog_compiler_pic works... " >&6; } +if test "${lt_cv_prog_compiler_pic_works+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + lt_cv_prog_compiler_pic_works=no + ac_outfile=conftest.$ac_objext + echo "$lt_simple_compile_test_code" > conftest.$ac_ext + lt_compiler_flag="$lt_prog_compiler_pic -DPIC" + # Insert the option either (1) after the last *FLAGS variable, or + # (2) before a word containing "conftest.", or (3) at the end. + # Note that $ac_compile itself does not contain backslashes and begins + # with a dollar sign (not a hyphen), so the echo should work correctly. + # The option is referenced via a variable to avoid confusing sed. + lt_compile=`echo "$ac_compile" | $SED \ + -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ + -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ + -e 's:$: $lt_compiler_flag:'` + (eval echo "\"\$as_me:8053: $lt_compile\"" >&5) + (eval "$lt_compile" 2>conftest.err) + ac_status=$? + cat conftest.err >&5 + echo "$as_me:8057: \$? = $ac_status" >&5 + if (exit $ac_status) && test -s "$ac_outfile"; then + # The compiler can only warn and ignore the option if not recognized + # So say no if there are warnings other than the usual output. + $ECHO "X$_lt_compiler_boilerplate" | $Xsed -e '/^$/d' >conftest.exp + $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 + if test ! -s conftest.er2 || diff conftest.exp conftest.er2 >/dev/null; then + lt_cv_prog_compiler_pic_works=yes + fi + fi + $RM conftest* + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic_works" >&5 +$as_echo "$lt_cv_prog_compiler_pic_works" >&6; } + +if test x"$lt_cv_prog_compiler_pic_works" = xyes; then + case $lt_prog_compiler_pic in + "" | " "*) ;; + *) lt_prog_compiler_pic=" $lt_prog_compiler_pic" ;; + esac +else + lt_prog_compiler_pic= + lt_prog_compiler_can_build_shared=no +fi + +fi + + + + + + +# +# Check to make sure the static flag actually works. +# +wl=$lt_prog_compiler_wl eval lt_tmp_static_flag=\"$lt_prog_compiler_static\" +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler static flag $lt_tmp_static_flag works" >&5 +$as_echo_n "checking if $compiler static flag $lt_tmp_static_flag works... " >&6; } +if test "${lt_cv_prog_compiler_static_works+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + lt_cv_prog_compiler_static_works=no + save_LDFLAGS="$LDFLAGS" + LDFLAGS="$LDFLAGS $lt_tmp_static_flag" + echo "$lt_simple_link_test_code" > conftest.$ac_ext + if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then + # The linker can only warn and ignore the option if not recognized + # So say no if there are warnings + if test -s conftest.err; then + # Append any errors to the config.log. + cat conftest.err 1>&5 + $ECHO "X$_lt_linker_boilerplate" | $Xsed -e '/^$/d' > conftest.exp + $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 + if diff conftest.exp conftest.er2 >/dev/null; then + lt_cv_prog_compiler_static_works=yes + fi + else + lt_cv_prog_compiler_static_works=yes + fi + fi + $RM -r conftest* + LDFLAGS="$save_LDFLAGS" + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_static_works" >&5 +$as_echo "$lt_cv_prog_compiler_static_works" >&6; } + +if test x"$lt_cv_prog_compiler_static_works" = xyes; then + : +else + lt_prog_compiler_static= +fi + + + + + + + + { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -c -o file.$ac_objext" >&5 +$as_echo_n "checking if $compiler supports -c -o file.$ac_objext... " >&6; } +if test "${lt_cv_prog_compiler_c_o+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + lt_cv_prog_compiler_c_o=no + $RM -r conftest 2>/dev/null + mkdir conftest + cd conftest + mkdir out + echo "$lt_simple_compile_test_code" > conftest.$ac_ext + + lt_compiler_flag="-o out/conftest2.$ac_objext" + # Insert the option either (1) after the last *FLAGS variable, or + # (2) before a word containing "conftest.", or (3) at the end. + # Note that $ac_compile itself does not contain backslashes and begins + # with a dollar sign (not a hyphen), so the echo should work correctly. + lt_compile=`echo "$ac_compile" | $SED \ + -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ + -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ + -e 's:$: $lt_compiler_flag:'` + (eval echo "\"\$as_me:8158: $lt_compile\"" >&5) + (eval "$lt_compile" 2>out/conftest.err) + ac_status=$? + cat out/conftest.err >&5 + echo "$as_me:8162: \$? = $ac_status" >&5 + if (exit $ac_status) && test -s out/conftest2.$ac_objext + then + # The compiler can only warn and ignore the option if not recognized + # So say no if there are warnings + $ECHO "X$_lt_compiler_boilerplate" | $Xsed -e '/^$/d' > out/conftest.exp + $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2 + if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then + lt_cv_prog_compiler_c_o=yes + fi + fi + chmod u+w . 2>&5 + $RM conftest* + # SGI C++ compiler will create directory out/ii_files/ for + # template instantiation + test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files + $RM out/* && rmdir out + cd .. + $RM -r conftest + $RM conftest* + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_c_o" >&5 +$as_echo "$lt_cv_prog_compiler_c_o" >&6; } + + + + + + + { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -c -o file.$ac_objext" >&5 +$as_echo_n "checking if $compiler supports -c -o file.$ac_objext... " >&6; } +if test "${lt_cv_prog_compiler_c_o+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + lt_cv_prog_compiler_c_o=no + $RM -r conftest 2>/dev/null + mkdir conftest + cd conftest + mkdir out + echo "$lt_simple_compile_test_code" > conftest.$ac_ext + + lt_compiler_flag="-o out/conftest2.$ac_objext" + # Insert the option either (1) after the last *FLAGS variable, or + # (2) before a word containing "conftest.", or (3) at the end. + # Note that $ac_compile itself does not contain backslashes and begins + # with a dollar sign (not a hyphen), so the echo should work correctly. + lt_compile=`echo "$ac_compile" | $SED \ + -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ + -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ + -e 's:$: $lt_compiler_flag:'` + (eval echo "\"\$as_me:8213: $lt_compile\"" >&5) + (eval "$lt_compile" 2>out/conftest.err) + ac_status=$? + cat out/conftest.err >&5 + echo "$as_me:8217: \$? = $ac_status" >&5 + if (exit $ac_status) && test -s out/conftest2.$ac_objext + then + # The compiler can only warn and ignore the option if not recognized + # So say no if there are warnings + $ECHO "X$_lt_compiler_boilerplate" | $Xsed -e '/^$/d' > out/conftest.exp + $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2 + if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then + lt_cv_prog_compiler_c_o=yes + fi + fi + chmod u+w . 2>&5 + $RM conftest* + # SGI C++ compiler will create directory out/ii_files/ for + # template instantiation + test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files + $RM out/* && rmdir out + cd .. + $RM -r conftest + $RM conftest* + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_c_o" >&5 +$as_echo "$lt_cv_prog_compiler_c_o" >&6; } + + + + +hard_links="nottested" +if test "$lt_cv_prog_compiler_c_o" = no && test "$need_locks" != no; then + # do not overwrite the value of need_locks provided by the user + { $as_echo "$as_me:${as_lineno-$LINENO}: checking if we can lock with hard links" >&5 +$as_echo_n "checking if we can lock with hard links... " >&6; } + hard_links=yes + $RM conftest* + ln conftest.a conftest.b 2>/dev/null && hard_links=no + touch conftest.a + ln conftest.a conftest.b 2>&5 || hard_links=no + ln conftest.a conftest.b 2>/dev/null && hard_links=no + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $hard_links" >&5 +$as_echo "$hard_links" >&6; } + if test "$hard_links" = no; then + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&5 +$as_echo "$as_me: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&2;} + need_locks=warn + fi +else + need_locks=no +fi + + + + + + + { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $compiler linker ($LD) supports shared libraries" >&5 +$as_echo_n "checking whether the $compiler linker ($LD) supports shared libraries... " >&6; } + + runpath_var= + allow_undefined_flag= + always_export_symbols=no + archive_cmds= + archive_expsym_cmds= + compiler_needs_object=no + enable_shared_with_static_runtimes=no + export_dynamic_flag_spec= + export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' + hardcode_automatic=no + hardcode_direct=no + hardcode_direct_absolute=no + hardcode_libdir_flag_spec= + hardcode_libdir_flag_spec_ld= + hardcode_libdir_separator= + hardcode_minus_L=no + hardcode_shlibpath_var=unsupported + inherit_rpath=no + link_all_deplibs=unknown + module_cmds= + module_expsym_cmds= + old_archive_from_new_cmds= + old_archive_from_expsyms_cmds= + thread_safe_flag_spec= + whole_archive_flag_spec= + # include_expsyms should be a list of space-separated symbols to be *always* + # included in the symbol list + include_expsyms= + # exclude_expsyms can be an extended regexp of symbols to exclude + # it will be wrapped by ` (' and `)$', so one must not match beginning or + # end of line. Example: `a|bc|.*d.*' will exclude the symbols `a' and `bc', + # as well as any symbol that contains `d'. + exclude_expsyms='_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*' + # Although _GLOBAL_OFFSET_TABLE_ is a valid symbol C name, most a.out + # platforms (ab)use it in PIC code, but their linkers get confused if + # the symbol is explicitly referenced. Since portable code cannot + # rely on this symbol name, it's probably fine to never include it in + # preloaded symbol tables. + # Exclude shared library initialization/finalization symbols. + extract_expsyms_cmds= + + case $host_os in + cygwin* | mingw* | pw32*) + # FIXME: the MSVC++ port hasn't been tested in a loooong time + # When not using gcc, we currently assume that we are using + # Microsoft Visual C++. + if test "$GCC" != yes; then + with_gnu_ld=no + fi + ;; + interix*) + # we just hope/assume this is gcc and not c89 (= MSVC++) + with_gnu_ld=yes + ;; + openbsd*) + with_gnu_ld=no + ;; + esac + + ld_shlibs=yes + if test "$with_gnu_ld" = yes; then + # If archive_cmds runs LD, not CC, wlarc should be empty + wlarc='${wl}' + + # Set some defaults for GNU ld with shared library support. These + # are reset later if shared libraries are not supported. Putting them + # here allows them to be overridden if necessary. + runpath_var=LD_RUN_PATH + hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' + export_dynamic_flag_spec='${wl}--export-dynamic' + # ancient GNU ld didn't support --whole-archive et. al. + if $LD --help 2>&1 | $GREP 'no-whole-archive' > /dev/null; then + whole_archive_flag_spec="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive' + else + whole_archive_flag_spec= + fi + supports_anon_versioning=no + case `$LD -v 2>&1` in + *\ [01].* | *\ 2.[0-9].* | *\ 2.10.*) ;; # catch versions < 2.11 + *\ 2.11.93.0.2\ *) supports_anon_versioning=yes ;; # RH7.3 ... + *\ 2.11.92.0.12\ *) supports_anon_versioning=yes ;; # Mandrake 8.2 ... + *\ 2.11.*) ;; # other 2.11 versions + *) supports_anon_versioning=yes ;; + esac + + # See if GNU ld supports shared libraries. + case $host_os in + aix[3-9]*) + # On AIX/PPC, the GNU linker is very broken + if test "$host_cpu" != ia64; then + ld_shlibs=no + cat <<_LT_EOF 1>&2 + +*** Warning: the GNU linker, at least up to release 2.9.1, is reported +*** to be unable to reliably create shared libraries on AIX. +*** Therefore, libtool is disabling shared libraries support. If you +*** really care for shared libraries, you may want to modify your PATH +*** so that a non-GNU linker is found, and then restart. + +_LT_EOF + fi + ;; + + amigaos*) + case $host_cpu in + powerpc) + # see comment about AmigaOS4 .so support + archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' + archive_expsym_cmds='' + ;; + m68k) + archive_cmds='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)' + hardcode_libdir_flag_spec='-L$libdir' + hardcode_minus_L=yes + ;; + esac + ;; + + beos*) + if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then + allow_undefined_flag=unsupported + # Joseph Beckenbach says some releases of gcc + # support --undefined. This deserves some investigation. FIXME + archive_cmds='$CC -nostart $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' + else + ld_shlibs=no + fi + ;; + + cygwin* | mingw* | pw32*) + # _LT_TAGVAR(hardcode_libdir_flag_spec, ) is actually meaningless, + # as there is no search path for DLLs. + hardcode_libdir_flag_spec='-L$libdir' + allow_undefined_flag=unsupported + always_export_symbols=no + enable_shared_with_static_runtimes=yes + export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols' + + if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then + archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' + # If the export-symbols file already is a .def file (1st line + # is EXPORTS), use it as is; otherwise, prepend... + archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then + cp $export_symbols $output_objdir/$soname.def; + else + echo EXPORTS > $output_objdir/$soname.def; + cat $export_symbols >> $output_objdir/$soname.def; + fi~ + $CC -shared $output_objdir/$soname.def $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' + else + ld_shlibs=no + fi + ;; + + interix[3-9]*) + hardcode_direct=no + hardcode_shlibpath_var=no + hardcode_libdir_flag_spec='${wl}-rpath,$libdir' + export_dynamic_flag_spec='${wl}-E' + # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc. + # Instead, shared libraries are loaded at an image base (0x10000000 by + # default) and relocated if they conflict, which is a slow very memory + # consuming and fragmenting process. To avoid this, we pick a random, + # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link + # time. Moving up from 0x10000000 also allows more sbrk(2) space. + archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' + archive_expsym_cmds='sed "s,^,_," $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--retain-symbols-file,$output_objdir/$soname.expsym ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' + ;; + + gnu* | linux* | tpf* | k*bsd*-gnu) + tmp_diet=no + if test "$host_os" = linux-dietlibc; then + case $cc_basename in + diet\ *) tmp_diet=yes;; # linux-dietlibc with static linking (!diet-dyn) + esac + fi + if $LD --help 2>&1 | $EGREP ': supported targets:.* elf' > /dev/null \ + && test "$tmp_diet" = no + then + tmp_addflag= + tmp_sharedflag='-shared' + case $cc_basename,$host_cpu in + pgcc*) # Portland Group C compiler + whole_archive_flag_spec='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; $ECHO \"$new_convenience\"` ${wl}--no-whole-archive' + tmp_addflag=' $pic_flag' + ;; + pgf77* | pgf90* | pgf95*) # Portland Group f77 and f90 compilers + whole_archive_flag_spec='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; $ECHO \"$new_convenience\"` ${wl}--no-whole-archive' + tmp_addflag=' $pic_flag -Mnomain' ;; + ecc*,ia64* | icc*,ia64*) # Intel C compiler on ia64 + tmp_addflag=' -i_dynamic' ;; + efc*,ia64* | ifort*,ia64*) # Intel Fortran compiler on ia64 + tmp_addflag=' -i_dynamic -nofor_main' ;; + ifc* | ifort*) # Intel Fortran compiler + tmp_addflag=' -nofor_main' ;; + xl[cC]*) # IBM XL C 8.0 on PPC (deal with xlf below) + tmp_sharedflag='-qmkshrobj' + tmp_addflag= ;; + esac + case `$CC -V 2>&1 | sed 5q` in + *Sun\ C*) # Sun C 5.9 + whole_archive_flag_spec='${wl}--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; $ECHO \"$new_convenience\"` ${wl}--no-whole-archive' + compiler_needs_object=yes + tmp_sharedflag='-G' ;; + *Sun\ F*) # Sun Fortran 8.3 + tmp_sharedflag='-G' ;; + esac + archive_cmds='$CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' + + if test "x$supports_anon_versioning" = xyes; then + archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~ + cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ + echo "local: *; };" >> $output_objdir/$libname.ver~ + $CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-version-script ${wl}$output_objdir/$libname.ver -o $lib' + fi + + case $cc_basename in + xlf*) + # IBM XL Fortran 10.1 on PPC cannot create shared libs itself + whole_archive_flag_spec='--whole-archive$convenience --no-whole-archive' + hardcode_libdir_flag_spec= + hardcode_libdir_flag_spec_ld='-rpath $libdir' + archive_cmds='$LD -shared $libobjs $deplibs $compiler_flags -soname $soname -o $lib' + if test "x$supports_anon_versioning" = xyes; then + archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~ + cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ + echo "local: *; };" >> $output_objdir/$libname.ver~ + $LD -shared $libobjs $deplibs $compiler_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' + fi + ;; + esac + else + ld_shlibs=no + fi + ;; + + netbsd*) + if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then + archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib' + wlarc= + else + archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' + archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' + fi + ;; + + solaris*) + if $LD -v 2>&1 | $GREP 'BFD 2\.8' > /dev/null; then + ld_shlibs=no + cat <<_LT_EOF 1>&2 + +*** Warning: The releases 2.8.* of the GNU linker cannot reliably +*** create shared libraries on Solaris systems. Therefore, libtool +*** is disabling shared libraries support. We urge you to upgrade GNU +*** binutils to release 2.9.1 or newer. Another option is to modify +*** your PATH or compiler configuration so that the native linker is +*** used, and then restart. + +_LT_EOF + elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then + archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' + archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' + else + ld_shlibs=no + fi + ;; + + sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX*) + case `$LD -v 2>&1` in + *\ [01].* | *\ 2.[0-9].* | *\ 2.1[0-5].*) + ld_shlibs=no + cat <<_LT_EOF 1>&2 + +*** Warning: Releases of the GNU linker prior to 2.16.91.0.3 can not +*** reliably create shared libraries on SCO systems. Therefore, libtool +*** is disabling shared libraries support. We urge you to upgrade GNU +*** binutils to release 2.16.91.0.3 or newer. Another option is to modify +*** your PATH or compiler configuration so that the native linker is +*** used, and then restart. + +_LT_EOF + ;; + *) + # For security reasons, it is highly recommended that you always + # use absolute paths for naming shared libraries, and exclude the + # DT_RUNPATH tag from executables and libraries. But doing so + # requires that you compile everything twice, which is a pain. + if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then + hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' + archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' + archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' + else + ld_shlibs=no + fi + ;; + esac + ;; + + sunos4*) + archive_cmds='$LD -assert pure-text -Bshareable -o $lib $libobjs $deplibs $linker_flags' + wlarc= + hardcode_direct=yes + hardcode_shlibpath_var=no + ;; + + *) + if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then + archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' + archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' + else + ld_shlibs=no + fi + ;; + esac + + if test "$ld_shlibs" = no; then + runpath_var= + hardcode_libdir_flag_spec= + export_dynamic_flag_spec= + whole_archive_flag_spec= + fi + else + # PORTME fill in a description of your system's linker (not GNU ld) + case $host_os in + aix3*) + allow_undefined_flag=unsupported + always_export_symbols=yes + archive_expsym_cmds='$LD -o $output_objdir/$soname $libobjs $deplibs $linker_flags -bE:$export_symbols -T512 -H512 -bM:SRE~$AR $AR_FLAGS $lib $output_objdir/$soname' + # Note: this linker hardcodes the directories in LIBPATH if there + # are no directories specified by -L. + hardcode_minus_L=yes + if test "$GCC" = yes && test -z "$lt_prog_compiler_static"; then + # Neither direct hardcoding nor static linking is supported with a + # broken collect2. + hardcode_direct=unsupported + fi + ;; + + aix[4-9]*) + if test "$host_cpu" = ia64; then + # On IA64, the linker does run time linking by default, so we don't + # have to do anything special. + aix_use_runtimelinking=no + exp_sym_flag='-Bexport' + no_entry_flag="" + else + # If we're using GNU nm, then we don't want the "-C" option. + # -C means demangle to AIX nm, but means don't demangle with GNU nm + if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then + export_symbols_cmds='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols' + else + export_symbols_cmds='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols' + fi + aix_use_runtimelinking=no + + # Test if we are trying to use run time linking or normal + # AIX style linking. If -brtl is somewhere in LDFLAGS, we + # need to do runtime linking. + case $host_os in aix4.[23]|aix4.[23].*|aix[5-9]*) + for ld_flag in $LDFLAGS; do + if (test $ld_flag = "-brtl" || test $ld_flag = "-Wl,-brtl"); then + aix_use_runtimelinking=yes + break + fi + done + ;; + esac + + exp_sym_flag='-bexport' + no_entry_flag='-bnoentry' + fi + + # When large executables or shared objects are built, AIX ld can + # have problems creating the table of contents. If linking a library + # or program results in "error TOC overflow" add -mminimal-toc to + # CXXFLAGS/CFLAGS for g++/gcc. In the cases where that is not + # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS. + + archive_cmds='' + hardcode_direct=yes + hardcode_direct_absolute=yes + hardcode_libdir_separator=':' + link_all_deplibs=yes + file_list_spec='${wl}-f,' + + if test "$GCC" = yes; then + case $host_os in aix4.[012]|aix4.[012].*) + # We only want to do this on AIX 4.2 and lower, the check + # below for broken collect2 doesn't work under 4.3+ + collect2name=`${CC} -print-prog-name=collect2` + if test -f "$collect2name" && + strings "$collect2name" | $GREP resolve_lib_name >/dev/null + then + # We have reworked collect2 + : + else + # We have old collect2 + hardcode_direct=unsupported + # It fails to find uninstalled libraries when the uninstalled + # path is not listed in the libpath. Setting hardcode_minus_L + # to unsupported forces relinking + hardcode_minus_L=yes + hardcode_libdir_flag_spec='-L$libdir' + hardcode_libdir_separator= + fi + ;; + esac + shared_flag='-shared' + if test "$aix_use_runtimelinking" = yes; then + shared_flag="$shared_flag "'${wl}-G' + fi + else + # not using gcc + if test "$host_cpu" = ia64; then + # VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release + # chokes on -Wl,-G. The following line is correct: + shared_flag='-G' + else + if test "$aix_use_runtimelinking" = yes; then + shared_flag='${wl}-G' + else + shared_flag='${wl}-bM:SRE' + fi + fi + fi + + # It seems that -bexpall does not export symbols beginning with + # underscore (_), so it is better to generate a list of symbols to export. + always_export_symbols=yes + if test "$aix_use_runtimelinking" = yes; then + # Warning - without using the other runtime loading flags (-brtl), + # -berok will link without error, but may produce a broken library. + allow_undefined_flag='-berok' + # Determine the default libpath from the value encoded in an + # empty executable. + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_link "$LINENO"; then : + +lt_aix_libpath_sed=' + /Import File Strings/,/^$/ { + /^0/ { + s/^0 *\(.*\)$/\1/ + p + } + }' +aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` +# Check for a 64-bit object if we didn't find anything. +if test -z "$aix_libpath"; then + aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` +fi +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + + hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" + archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then $ECHO "X${wl}${allow_undefined_flag}" | $Xsed; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag" + else + if test "$host_cpu" = ia64; then + hardcode_libdir_flag_spec='${wl}-R $libdir:/usr/lib:/lib' + allow_undefined_flag="-z nodefs" + archive_expsym_cmds="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags ${wl}${allow_undefined_flag} '"\${wl}$exp_sym_flag:\$export_symbols" + else + # Determine the default libpath from the value encoded in an + # empty executable. + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_link "$LINENO"; then : + +lt_aix_libpath_sed=' + /Import File Strings/,/^$/ { + /^0/ { + s/^0 *\(.*\)$/\1/ + p + } + }' +aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` +# Check for a 64-bit object if we didn't find anything. +if test -z "$aix_libpath"; then + aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` +fi +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + + hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" + # Warning - without using the other run time loading flags, + # -berok will link without error, but may produce a broken library. + no_undefined_flag=' ${wl}-bernotok' + allow_undefined_flag=' ${wl}-berok' + # Exported symbols can be pulled into shared objects from archives + whole_archive_flag_spec='$convenience' + archive_cmds_need_lc=yes + # This is similar to how AIX traditionally builds its shared libraries. + archive_expsym_cmds="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs ${wl}-bnoentry $compiler_flags ${wl}-bE:$export_symbols${allow_undefined_flag}~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$soname' + fi + fi + ;; + + amigaos*) + case $host_cpu in + powerpc) + # see comment about AmigaOS4 .so support + archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' + archive_expsym_cmds='' + ;; + m68k) + archive_cmds='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)' + hardcode_libdir_flag_spec='-L$libdir' + hardcode_minus_L=yes + ;; + esac + ;; + + bsdi[45]*) + export_dynamic_flag_spec=-rdynamic + ;; + + cygwin* | mingw* | pw32*) + # When not using gcc, we currently assume that we are using + # Microsoft Visual C++. + # hardcode_libdir_flag_spec is actually meaningless, as there is + # no search path for DLLs. + hardcode_libdir_flag_spec=' ' + allow_undefined_flag=unsupported + # Tell ltmain to make .lib files, not .a files. + libext=lib + # Tell ltmain to make .dll files, not .so files. + shrext_cmds=".dll" + # FIXME: Setting linknames here is a bad hack. + archive_cmds='$CC -o $lib $libobjs $compiler_flags `$ECHO "X$deplibs" | $Xsed -e '\''s/ -lc$//'\''` -link -dll~linknames=' + # The linker will automatically build a .lib file if we build a DLL. + old_archive_from_new_cmds='true' + # FIXME: Should let the user specify the lib program. + old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs' + fix_srcfile_path='`cygpath -w "$srcfile"`' + enable_shared_with_static_runtimes=yes + ;; + + darwin* | rhapsody*) + + + archive_cmds_need_lc=no + hardcode_direct=no + hardcode_automatic=yes + hardcode_shlibpath_var=unsupported + whole_archive_flag_spec='' + link_all_deplibs=yes + allow_undefined_flag="$_lt_dar_allow_undefined" + if test "$GCC" = "yes"; then + output_verbose_link_cmd=echo + archive_cmds="\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod${_lt_dsymutil}" + module_cmds="\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dsymutil}" + archive_expsym_cmds="sed 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring ${_lt_dar_single_mod}${_lt_dar_export_syms}${_lt_dsymutil}" + module_expsym_cmds="sed -e 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dar_export_syms}${_lt_dsymutil}" + + else + ld_shlibs=no + fi + + ;; + + dgux*) + archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' + hardcode_libdir_flag_spec='-L$libdir' + hardcode_shlibpath_var=no + ;; + + freebsd1*) + ld_shlibs=no + ;; + + # FreeBSD 2.2.[012] allows us to include c++rt0.o to get C++ constructor + # support. Future versions do this automatically, but an explicit c++rt0.o + # does not break anything, and helps significantly (at the cost of a little + # extra space). + freebsd2.2*) + archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags /usr/lib/c++rt0.o' + hardcode_libdir_flag_spec='-R$libdir' + hardcode_direct=yes + hardcode_shlibpath_var=no + ;; + + # Unfortunately, older versions of FreeBSD 2 do not have this feature. + freebsd2*) + archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' + hardcode_direct=yes + hardcode_minus_L=yes + hardcode_shlibpath_var=no + ;; + + # FreeBSD 3 and greater uses gcc -shared to do shared libraries. + freebsd* | dragonfly* | openbsd* | bitrig*) + archive_cmds='$CC -shared -o $lib $libobjs $deplibs $compiler_flags' + hardcode_libdir_flag_spec='-R$libdir' + hardcode_direct=yes + hardcode_shlibpath_var=no + ;; + + hpux9*) + if test "$GCC" = yes; then + archive_cmds='$RM $output_objdir/$soname~$CC -shared -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' + else + archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' + fi + hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir' + hardcode_libdir_separator=: + hardcode_direct=yes + + # hardcode_minus_L: Not really in the search PATH, + # but as the default location of the library. + hardcode_minus_L=yes + export_dynamic_flag_spec='${wl}-E' + ;; + + hpux10*) + if test "$GCC" = yes -a "$with_gnu_ld" = no; then + archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' + else + archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags' + fi + if test "$with_gnu_ld" = no; then + hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir' + hardcode_libdir_flag_spec_ld='+b $libdir' + hardcode_libdir_separator=: + hardcode_direct=yes + hardcode_direct_absolute=yes + export_dynamic_flag_spec='${wl}-E' + # hardcode_minus_L: Not really in the search PATH, + # but as the default location of the library. + hardcode_minus_L=yes + fi + ;; + + hpux11*) + if test "$GCC" = yes -a "$with_gnu_ld" = no; then + case $host_cpu in + hppa*64*) + archive_cmds='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' + ;; + ia64*) + archive_cmds='$CC -shared ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' + ;; + *) + archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' + ;; + esac + else + case $host_cpu in + hppa*64*) + archive_cmds='$CC -b ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' + ;; + ia64*) + archive_cmds='$CC -b ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' + ;; + *) + archive_cmds='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' + ;; + esac + fi + if test "$with_gnu_ld" = no; then + hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir' + hardcode_libdir_separator=: + + case $host_cpu in + hppa*64*|ia64*) + hardcode_direct=no + hardcode_shlibpath_var=no + ;; + *) + hardcode_direct=yes + hardcode_direct_absolute=yes + export_dynamic_flag_spec='${wl}-E' + + # hardcode_minus_L: Not really in the search PATH, + # but as the default location of the library. + hardcode_minus_L=yes + ;; + esac + fi + ;; + + irix5* | irix6* | nonstopux*) + if test "$GCC" = yes; then + archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && $ECHO "X${wl}-set_version ${wl}$verstring" | $Xsed` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' + # Try to use the -exported_symbol ld option, if it does not + # work, assume that -exports_file does not work either and + # implicitly export all symbols. + save_LDFLAGS="$LDFLAGS" + LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +int foo(void) {} +_ACEOF +if ac_fn_c_try_link "$LINENO"; then : + archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && $ECHO "X${wl}-set_version ${wl}$verstring" | $Xsed` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib' + +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext + LDFLAGS="$save_LDFLAGS" + else + archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && $ECHO "X-set_version $verstring" | $Xsed` -update_registry ${output_objdir}/so_locations -o $lib' + archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && $ECHO "X-set_version $verstring" | $Xsed` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib' + fi + archive_cmds_need_lc='no' + hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' + hardcode_libdir_separator=: + inherit_rpath=yes + link_all_deplibs=yes + ;; + + netbsd*) + if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then + archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' # a.out + else + archive_cmds='$LD -shared -o $lib $libobjs $deplibs $linker_flags' # ELF + fi + hardcode_libdir_flag_spec='-R$libdir' + hardcode_direct=yes + hardcode_shlibpath_var=no + ;; + + newsos6) + archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' + hardcode_direct=yes + hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' + hardcode_libdir_separator=: + hardcode_shlibpath_var=no + ;; + + *nto* | *qnx*) + ;; + + openbsd*) + if test -f /usr/libexec/ld.so; then + hardcode_direct=yes + hardcode_shlibpath_var=no + hardcode_direct_absolute=yes + if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then + archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' + archive_expsym_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags ${wl}-retain-symbols-file,$export_symbols' + hardcode_libdir_flag_spec='${wl}-rpath,$libdir' + export_dynamic_flag_spec='${wl}-E' + else + case $host_os in + openbsd[01].* | openbsd2.[0-7] | openbsd2.[0-7].*) + archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' + hardcode_libdir_flag_spec='-R$libdir' + ;; + *) + archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' + hardcode_libdir_flag_spec='${wl}-rpath,$libdir' + ;; + esac + fi + else + ld_shlibs=no + fi + ;; + + os2*) + hardcode_libdir_flag_spec='-L$libdir' + hardcode_minus_L=yes + allow_undefined_flag=unsupported + archive_cmds='$ECHO "LIBRARY $libname INITINSTANCE" > $output_objdir/$libname.def~$ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~$ECHO DATA >> $output_objdir/$libname.def~$ECHO " SINGLE NONSHARED" >> $output_objdir/$libname.def~$ECHO EXPORTS >> $output_objdir/$libname.def~emxexp $libobjs >> $output_objdir/$libname.def~$CC -Zdll -Zcrtdll -o $lib $libobjs $deplibs $compiler_flags $output_objdir/$libname.def' + old_archive_from_new_cmds='emximp -o $output_objdir/$libname.a $output_objdir/$libname.def' + ;; + + osf3*) + if test "$GCC" = yes; then + allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*' + archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && $ECHO "X${wl}-set_version ${wl}$verstring" | $Xsed` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' + else + allow_undefined_flag=' -expect_unresolved \*' + archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && $ECHO "X-set_version $verstring" | $Xsed` -update_registry ${output_objdir}/so_locations -o $lib' + fi + archive_cmds_need_lc='no' + hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' + hardcode_libdir_separator=: + ;; + + osf4* | osf5*) # as osf3* with the addition of -msym flag + if test "$GCC" = yes; then + allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*' + archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && $ECHO "X${wl}-set_version ${wl}$verstring" | $Xsed` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' + hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' + else + allow_undefined_flag=' -expect_unresolved \*' + archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -msym -soname $soname `test -n "$verstring" && $ECHO "X-set_version $verstring" | $Xsed` -update_registry ${output_objdir}/so_locations -o $lib' + archive_expsym_cmds='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done; printf "%s\\n" "-hidden">> $lib.exp~ + $CC -shared${allow_undefined_flag} ${wl}-input ${wl}$lib.exp $compiler_flags $libobjs $deplibs -soname $soname `test -n "$verstring" && $ECHO "X-set_version $verstring" | $Xsed` -update_registry ${output_objdir}/so_locations -o $lib~$RM $lib.exp' + + # Both c and cxx compiler support -rpath directly + hardcode_libdir_flag_spec='-rpath $libdir' + fi + archive_cmds_need_lc='no' + hardcode_libdir_separator=: + ;; + + solaris*) + no_undefined_flag=' -z defs' + if test "$GCC" = yes; then + wlarc='${wl}' + archive_cmds='$CC -shared ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' + archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ + $CC -shared ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' + else + case `$CC -V 2>&1` in + *"Compilers 5.0"*) + wlarc='' + archive_cmds='$LD -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $linker_flags' + archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ + $LD -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $linker_flags~$RM $lib.exp' + ;; + *) + wlarc='${wl}' + archive_cmds='$CC -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $compiler_flags' + archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ + $CC -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' + ;; + esac + fi + hardcode_libdir_flag_spec='-R$libdir' + hardcode_shlibpath_var=no + case $host_os in + solaris2.[0-5] | solaris2.[0-5].*) ;; + *) + # The compiler driver will combine and reorder linker options, + # but understands `-z linker_flag'. GCC discards it without `$wl', + # but is careful enough not to reorder. + # Supported since Solaris 2.6 (maybe 2.5.1?) + if test "$GCC" = yes; then + whole_archive_flag_spec='${wl}-z ${wl}allextract$convenience ${wl}-z ${wl}defaultextract' + else + whole_archive_flag_spec='-z allextract$convenience -z defaultextract' + fi + ;; + esac + link_all_deplibs=yes + ;; + + sunos4*) + if test "x$host_vendor" = xsequent; then + # Use $CC to link under sequent, because it throws in some extra .o + # files that make .init and .fini sections work. + archive_cmds='$CC -G ${wl}-h $soname -o $lib $libobjs $deplibs $compiler_flags' + else + archive_cmds='$LD -assert pure-text -Bstatic -o $lib $libobjs $deplibs $linker_flags' + fi + hardcode_libdir_flag_spec='-L$libdir' + hardcode_direct=yes + hardcode_minus_L=yes + hardcode_shlibpath_var=no + ;; + + sysv4) + case $host_vendor in + sni) + archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' + hardcode_direct=yes # is this really true??? + ;; + siemens) + ## LD is ld it makes a PLAMLIB + ## CC just makes a GrossModule. + archive_cmds='$LD -G -o $lib $libobjs $deplibs $linker_flags' + reload_cmds='$CC -r -o $output$reload_objs' + hardcode_direct=no + ;; + motorola) + archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' + hardcode_direct=no #Motorola manual says yes, but my tests say they lie + ;; + esac + runpath_var='LD_RUN_PATH' + hardcode_shlibpath_var=no + ;; + + sysv4.3*) + archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' + hardcode_shlibpath_var=no + export_dynamic_flag_spec='-Bexport' + ;; + + sysv4*MP*) + if test -d /usr/nec; then + archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' + hardcode_shlibpath_var=no + runpath_var=LD_RUN_PATH + hardcode_runpath_var=yes + ld_shlibs=yes + fi + ;; + + sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[01].[10]* | unixware7* | sco3.2v5.0.[024]*) + no_undefined_flag='${wl}-z,text' + archive_cmds_need_lc=no + hardcode_shlibpath_var=no + runpath_var='LD_RUN_PATH' + + if test "$GCC" = yes; then + archive_cmds='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' + archive_expsym_cmds='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' + else + archive_cmds='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' + archive_expsym_cmds='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' + fi + ;; + + sysv5* | sco3.2v5* | sco5v6*) + # Note: We can NOT use -z defs as we might desire, because we do not + # link with -lc, and that would cause any symbols used from libc to + # always be unresolved, which means just about no library would + # ever link correctly. If we're not using GNU ld we use -z text + # though, which does catch some bad symbols but isn't as heavy-handed + # as -z defs. + no_undefined_flag='${wl}-z,text' + allow_undefined_flag='${wl}-z,nodefs' + archive_cmds_need_lc=no + hardcode_shlibpath_var=no + hardcode_libdir_flag_spec='${wl}-R,$libdir' + hardcode_libdir_separator=':' + link_all_deplibs=yes + export_dynamic_flag_spec='${wl}-Bexport' + runpath_var='LD_RUN_PATH' + + if test "$GCC" = yes; then + archive_cmds='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' + archive_expsym_cmds='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' + else + archive_cmds='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' + archive_expsym_cmds='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' + fi + ;; + + uts4*) + archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' + hardcode_libdir_flag_spec='-L$libdir' + hardcode_shlibpath_var=no + ;; + + *) + ld_shlibs=no + ;; + esac + + if test x$host_vendor = xsni; then + case $host in + sysv4 | sysv4.2uw2* | sysv4.3* | sysv5*) + export_dynamic_flag_spec='${wl}-Blargedynsym' + ;; + esac + fi + fi + +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ld_shlibs" >&5 +$as_echo "$ld_shlibs" >&6; } +test "$ld_shlibs" = no && can_build_shared=no + +with_gnu_ld=$with_gnu_ld + + + + + + + + + + + + + + + +# +# Do we need to explicitly link libc? +# +case "x$archive_cmds_need_lc" in +x|xyes) + # Assume -lc should be added + archive_cmds_need_lc=yes + + if test "$enable_shared" = yes && test "$GCC" = yes; then + case $archive_cmds in + *'~'*) + # FIXME: we may have to deal with multi-command sequences. + ;; + '$CC '*) + # Test whether the compiler implicitly links with -lc since on some + # systems, -lgcc has to come before -lc. If gcc already passes -lc + # to ld, don't add -lc before -lgcc. + { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether -lc should be explicitly linked in" >&5 +$as_echo_n "checking whether -lc should be explicitly linked in... " >&6; } + $RM conftest* + echo "$lt_simple_compile_test_code" > conftest.$ac_ext + + if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 + (eval $ac_compile) 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } 2>conftest.err; then + soname=conftest + lib=conftest + libobjs=conftest.$ac_objext + deplibs= + wl=$lt_prog_compiler_wl + pic_flag=$lt_prog_compiler_pic + compiler_flags=-v + linker_flags=-v + verstring= + output_objdir=. + libname=conftest + lt_save_allow_undefined_flag=$allow_undefined_flag + allow_undefined_flag= + if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$archive_cmds 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1\""; } >&5 + (eval $archive_cmds 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1) 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } + then + archive_cmds_need_lc=no + else + archive_cmds_need_lc=yes + fi + allow_undefined_flag=$lt_save_allow_undefined_flag + else + cat conftest.err 1>&5 + fi + $RM conftest* + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $archive_cmds_need_lc" >&5 +$as_echo "$archive_cmds_need_lc" >&6; } + ;; + esac + fi + ;; +esac + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + { $as_echo "$as_me:${as_lineno-$LINENO}: checking dynamic linker characteristics" >&5 +$as_echo_n "checking dynamic linker characteristics... " >&6; } + +if test "$GCC" = yes; then + case $host_os in + darwin*) lt_awk_arg="/^libraries:/,/LR/" ;; + *) lt_awk_arg="/^libraries:/" ;; + esac + lt_search_path_spec=`$CC -print-search-dirs | awk $lt_awk_arg | $SED -e "s/^libraries://" -e "s,=/,/,g"` + if $ECHO "$lt_search_path_spec" | $GREP ';' >/dev/null ; then + # if the path contains ";" then we assume it to be the separator + # otherwise default to the standard path separator (i.e. ":") - it is + # assumed that no part of a normal pathname contains ";" but that should + # okay in the real world where ";" in dirpaths is itself problematic. + lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED -e 's/;/ /g'` + else + lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` + fi + # Ok, now we have the path, separated by spaces, we can step through it + # and add multilib dir if necessary. + lt_tmp_lt_search_path_spec= + lt_multi_os_dir=`$CC $CPPFLAGS $CFLAGS $LDFLAGS -print-multi-os-directory 2>/dev/null` + for lt_sys_path in $lt_search_path_spec; do + if test -d "$lt_sys_path/$lt_multi_os_dir"; then + lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path/$lt_multi_os_dir" + else + test -d "$lt_sys_path" && \ + lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path" + fi + done + lt_search_path_spec=`$ECHO $lt_tmp_lt_search_path_spec | awk ' +BEGIN {RS=" "; FS="/|\n";} { + lt_foo=""; + lt_count=0; + for (lt_i = NF; lt_i > 0; lt_i--) { + if ($lt_i != "" && $lt_i != ".") { + if ($lt_i == "..") { + lt_count++; + } else { + if (lt_count == 0) { + lt_foo="/" $lt_i lt_foo; + } else { + lt_count--; + } + } + } + } + if (lt_foo != "") { lt_freq[lt_foo]++; } + if (lt_freq[lt_foo] == 1) { print lt_foo; } +}'` + sys_lib_search_path_spec=`$ECHO $lt_search_path_spec` +else + sys_lib_search_path_spec="/lib /usr/lib /usr/local/lib" +fi +library_names_spec= +libname_spec='lib$name' +soname_spec= +shrext_cmds=".so" +postinstall_cmds= +postuninstall_cmds= +finish_cmds= +finish_eval= +shlibpath_var= +shlibpath_overrides_runpath=unknown +version_type=none +dynamic_linker="$host_os ld.so" +sys_lib_dlsearch_path_spec="/lib /usr/lib" +need_lib_prefix=unknown +hardcode_into_libs=no + +# when you set need_version to no, make sure it does not cause -set_version +# flags to be left without arguments +need_version=unknown + +case $host_os in +aix3*) + version_type=linux + library_names_spec='${libname}${release}${shared_ext}$versuffix $libname.a' + shlibpath_var=LIBPATH + + # AIX 3 has no versioning support, so we append a major version to the name. + soname_spec='${libname}${release}${shared_ext}$major' + ;; + +aix[4-9]*) + version_type=linux + need_lib_prefix=no + need_version=no + hardcode_into_libs=yes + if test "$host_cpu" = ia64; then + # AIX 5 supports IA64 + library_names_spec='${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext}$versuffix $libname${shared_ext}' + shlibpath_var=LD_LIBRARY_PATH + else + # With GCC up to 2.95.x, collect2 would create an import file + # for dependence libraries. The import file would start with + # the line `#! .'. This would cause the generated library to + # depend on `.', always an invalid library. This was fixed in + # development snapshots of GCC prior to 3.0. + case $host_os in + aix4 | aix4.[01] | aix4.[01].*) + if { echo '#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ >= 97)' + echo ' yes ' + echo '#endif'; } | ${CC} -E - | $GREP yes > /dev/null; then + : + else + can_build_shared=no + fi + ;; + esac + # AIX (on Power*) has no versioning support, so currently we can not hardcode correct + # soname into executable. Probably we can add versioning support to + # collect2, so additional links can be useful in future. + if test "$aix_use_runtimelinking" = yes; then + # If using run time linking (on AIX 4.2 or later) use lib.so + # instead of lib.a to let people know that these are not + # typical AIX shared libraries. + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' + else + # We preserve .a as extension for shared libraries through AIX4.2 + # and later when we are not doing run time linking. + library_names_spec='${libname}${release}.a $libname.a' + soname_spec='${libname}${release}${shared_ext}$major' + fi + shlibpath_var=LIBPATH + fi + ;; + +amigaos*) + case $host_cpu in + powerpc) + # Since July 2007 AmigaOS4 officially supports .so libraries. + # When compiling the executable, add -use-dynld -Lsobjs: to the compileline. + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' + ;; + m68k) + library_names_spec='$libname.ixlibrary $libname.a' + # Create ${libname}_ixlibrary.a entries in /sys/libs. + finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do libname=`$ECHO "X$lib" | $Xsed -e '\''s%^.*/\([^/]*\)\.ixlibrary$%\1%'\''`; test $RM /sys/libs/${libname}_ixlibrary.a; $show "cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a"; cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a || exit 1; done' + ;; + esac + ;; + +beos*) + library_names_spec='${libname}${shared_ext}' + dynamic_linker="$host_os ld.so" + shlibpath_var=LIBRARY_PATH + ;; + +bsdi[45]*) + version_type=linux + need_version=no + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' + soname_spec='${libname}${release}${shared_ext}$major' + finish_cmds='PATH="\$PATH:/sbin" ldconfig $libdir' + shlibpath_var=LD_LIBRARY_PATH + sys_lib_search_path_spec="/shlib /usr/lib /usr/X11/lib /usr/contrib/lib /lib /usr/local/lib" + sys_lib_dlsearch_path_spec="/shlib /usr/lib /usr/local/lib" + # the default ld.so.conf also contains /usr/contrib/lib and + # /usr/X11R6/lib (/usr/X11 is a link to /usr/X11R6), but let us allow + # libtool to hard-code these into programs + ;; + +cygwin* | mingw* | pw32*) + version_type=windows + shrext_cmds=".dll" + need_version=no + need_lib_prefix=no + + case $GCC,$host_os in + yes,cygwin* | yes,mingw* | yes,pw32*) + library_names_spec='$libname.dll.a' + # DLL is installed to $(libdir)/../bin by postinstall_cmds + postinstall_cmds='base_file=`basename \${file}`~ + dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ + dldir=$destdir/`dirname \$dlpath`~ + test -d \$dldir || mkdir -p \$dldir~ + $install_prog $dir/$dlname \$dldir/$dlname~ + chmod a+x \$dldir/$dlname~ + if test -n '\''$stripme'\'' && test -n '\''$striplib'\''; then + eval '\''$striplib \$dldir/$dlname'\'' || exit \$?; + fi' + postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ + dlpath=$dir/\$dldll~ + $RM \$dlpath' + shlibpath_overrides_runpath=yes + + case $host_os in + cygwin*) + # Cygwin DLLs use 'cyg' prefix rather than 'lib' + soname_spec='`echo ${libname} | sed -e 's/^lib/cyg/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' + sys_lib_search_path_spec="/usr/lib /lib/w32api /lib /usr/local/lib" + ;; + mingw*) + # MinGW DLLs use traditional 'lib' prefix + soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' + sys_lib_search_path_spec=`$CC -print-search-dirs | $GREP "^libraries:" | $SED -e "s/^libraries://" -e "s,=/,/,g"` + if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then + # It is most probably a Windows format PATH printed by + # mingw gcc, but we are running on Cygwin. Gcc prints its search + # path with ; separators, and with drive letters. We can handle the + # drive letters (cygwin fileutils understands them), so leave them, + # especially as we might pass files found there to a mingw objdump, + # which wouldn't understand a cygwinified path. Ahh. + sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` + else + sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` + fi + ;; + pw32*) + # pw32 DLLs use 'pw' prefix rather than 'lib' + library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' + ;; + esac + ;; + + *) + library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib' + ;; + esac + dynamic_linker='Win32 ld.exe' + # FIXME: first we should search . and the directory the executable is in + shlibpath_var=PATH + ;; + +darwin* | rhapsody*) + dynamic_linker="$host_os dyld" + version_type=darwin + need_lib_prefix=no + need_version=no + library_names_spec='${libname}${release}${major}$shared_ext ${libname}$shared_ext' + soname_spec='${libname}${release}${major}$shared_ext' + shlibpath_overrides_runpath=yes + shlibpath_var=DYLD_LIBRARY_PATH + shrext_cmds='`test .$module = .yes && echo .so || echo .dylib`' + + sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/local/lib" + sys_lib_dlsearch_path_spec='/usr/local/lib /lib /usr/lib' + ;; + +dgux*) + version_type=linux + need_lib_prefix=no + need_version=no + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname$shared_ext' + soname_spec='${libname}${release}${shared_ext}$major' + shlibpath_var=LD_LIBRARY_PATH + ;; + +freebsd1*) + dynamic_linker=no + ;; + +freebsd* | dragonfly*) + # DragonFly does not have aout. When/if they implement a new + # versioning mechanism, adjust this. + if test -x /usr/bin/objformat; then + objformat=`/usr/bin/objformat` + else + case $host_os in + freebsd[123]*) objformat=aout ;; + *) objformat=elf ;; + esac + fi + version_type=freebsd-$objformat + case $version_type in + freebsd-elf*) + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}' + need_version=no + need_lib_prefix=no + ;; + freebsd-*) + library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix' + need_version=yes + ;; + esac + shlibpath_var=LD_LIBRARY_PATH + case $host_os in + freebsd2*) + shlibpath_overrides_runpath=yes + ;; + freebsd3.[01]* | freebsdelf3.[01]*) + shlibpath_overrides_runpath=yes + hardcode_into_libs=yes + ;; + freebsd3.[2-9]* | freebsdelf3.[2-9]* | \ + freebsd4.[0-5] | freebsdelf4.[0-5] | freebsd4.1.1 | freebsdelf4.1.1) + shlibpath_overrides_runpath=no + hardcode_into_libs=yes + ;; + *) # from 4.6 on, and DragonFly + shlibpath_overrides_runpath=yes + hardcode_into_libs=yes + ;; + esac + ;; + +gnu*) + version_type=linux + need_lib_prefix=no + need_version=no + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}' + soname_spec='${libname}${release}${shared_ext}$major' + shlibpath_var=LD_LIBRARY_PATH + hardcode_into_libs=yes + ;; + +hpux9* | hpux10* | hpux11*) + # Give a soname corresponding to the major version so that dld.sl refuses to + # link against other versions. + version_type=sunos + need_lib_prefix=no + need_version=no + case $host_cpu in + ia64*) + shrext_cmds='.so' + hardcode_into_libs=yes + dynamic_linker="$host_os dld.so" + shlibpath_var=LD_LIBRARY_PATH + shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' + soname_spec='${libname}${release}${shared_ext}$major' + if test "X$HPUX_IA64_MODE" = X32; then + sys_lib_search_path_spec="/usr/lib/hpux32 /usr/local/lib/hpux32 /usr/local/lib" + else + sys_lib_search_path_spec="/usr/lib/hpux64 /usr/local/lib/hpux64" + fi + sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec + ;; + hppa*64*) + shrext_cmds='.sl' + hardcode_into_libs=yes + dynamic_linker="$host_os dld.sl" + shlibpath_var=LD_LIBRARY_PATH # How should we handle SHLIB_PATH + shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' + soname_spec='${libname}${release}${shared_ext}$major' + sys_lib_search_path_spec="/usr/lib/pa20_64 /usr/ccs/lib/pa20_64" + sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec + ;; + *) + shrext_cmds='.sl' + dynamic_linker="$host_os dld.sl" + shlibpath_var=SHLIB_PATH + shlibpath_overrides_runpath=no # +s is required to enable SHLIB_PATH + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' + soname_spec='${libname}${release}${shared_ext}$major' + ;; + esac + # HP-UX runs *really* slowly unless shared libraries are mode 555. + postinstall_cmds='chmod 555 $lib' + ;; + +interix[3-9]*) + version_type=linux + need_lib_prefix=no + need_version=no + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' + soname_spec='${libname}${release}${shared_ext}$major' + dynamic_linker='Interix 3.x ld.so.1 (PE, like ELF)' + shlibpath_var=LD_LIBRARY_PATH + shlibpath_overrides_runpath=no + hardcode_into_libs=yes + ;; + +irix5* | irix6* | nonstopux*) + case $host_os in + nonstopux*) version_type=nonstopux ;; + *) + if test "$lt_cv_prog_gnu_ld" = yes; then + version_type=linux + else + version_type=irix + fi ;; + esac + need_lib_prefix=no + need_version=no + soname_spec='${libname}${release}${shared_ext}$major' + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext} $libname${shared_ext}' + case $host_os in + irix5* | nonstopux*) + libsuff= shlibsuff= + ;; + *) + case $LD in # libtool.m4 will add one of these switches to LD + *-32|*"-32 "|*-melf32bsmip|*"-melf32bsmip ") + libsuff= shlibsuff= libmagic=32-bit;; + *-n32|*"-n32 "|*-melf32bmipn32|*"-melf32bmipn32 ") + libsuff=32 shlibsuff=N32 libmagic=N32;; + *-64|*"-64 "|*-melf64bmip|*"-melf64bmip ") + libsuff=64 shlibsuff=64 libmagic=64-bit;; + *) libsuff= shlibsuff= libmagic=never-match;; + esac + ;; + esac + shlibpath_var=LD_LIBRARY${shlibsuff}_PATH + shlibpath_overrides_runpath=no + sys_lib_search_path_spec="/usr/lib${libsuff} /lib${libsuff} /usr/local/lib${libsuff}" + sys_lib_dlsearch_path_spec="/usr/lib${libsuff} /lib${libsuff}" + hardcode_into_libs=yes + ;; + +# No shared lib support for Linux oldld, aout, or coff. +linux*oldld* | linux*aout* | linux*coff*) + dynamic_linker=no + ;; + +# This must be Linux ELF. +linux* | k*bsd*-gnu) + version_type=linux + need_lib_prefix=no + need_version=no + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' + soname_spec='${libname}${release}${shared_ext}$major' + finish_cmds='PATH="\$PATH:/sbin" ldconfig -n $libdir' + shlibpath_var=LD_LIBRARY_PATH + shlibpath_overrides_runpath=no + # Some binutils ld are patched to set DT_RUNPATH + save_LDFLAGS=$LDFLAGS + save_libdir=$libdir + eval "libdir=/foo; wl=\"$lt_prog_compiler_wl\"; \ + LDFLAGS=\"\$LDFLAGS $hardcode_libdir_flag_spec\"" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_link "$LINENO"; then : + if ($OBJDUMP -p conftest$ac_exeext) 2>/dev/null | grep "RUNPATH.*$libdir" >/dev/null; then : + shlibpath_overrides_runpath=yes +fi +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext + LDFLAGS=$save_LDFLAGS + libdir=$save_libdir + + # This implies no fast_install, which is unacceptable. + # Some rework will be needed to allow for fast_install + # before this can be enabled. + hardcode_into_libs=yes + + # Append ld.so.conf contents to the search path + if test -f /etc/ld.so.conf; then + lt_ld_extra=`awk '/^include / { system(sprintf("cd /etc; cat %s 2>/dev/null", \$2)); skip = 1; } { if (!skip) print \$0; skip = 0; }' < /etc/ld.so.conf | $SED -e 's/#.*//;/^[ ]*hwcap[ ]/d;s/[:, ]/ /g;s/=[^=]*$//;s/=[^= ]* / /g;/^$/d' | tr '\n' ' '` + sys_lib_dlsearch_path_spec="/lib /usr/lib $lt_ld_extra" + fi + + # We used to test for /lib/ld.so.1 and disable shared libraries on + # powerpc, because MkLinux only supported shared libraries with the + # GNU dynamic linker. Since this was broken with cross compilers, + # most powerpc-linux boxes support dynamic linking these days and + # people can always --disable-shared, the test was removed, and we + # assume the GNU/Linux dynamic linker is in use. + dynamic_linker='GNU/Linux ld.so' + ;; + +netbsd*) + version_type=sunos + need_lib_prefix=no + need_version=no + if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' + finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' + dynamic_linker='NetBSD (a.out) ld.so' + else + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' + soname_spec='${libname}${release}${shared_ext}$major' + dynamic_linker='NetBSD ld.elf_so' + fi + shlibpath_var=LD_LIBRARY_PATH + shlibpath_overrides_runpath=yes + hardcode_into_libs=yes + ;; + +newsos6) + version_type=linux + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' + shlibpath_var=LD_LIBRARY_PATH + shlibpath_overrides_runpath=yes + ;; + +*nto* | *qnx*) + version_type=qnx + need_lib_prefix=no + need_version=no + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' + soname_spec='${libname}${release}${shared_ext}$major' + shlibpath_var=LD_LIBRARY_PATH + shlibpath_overrides_runpath=no + hardcode_into_libs=yes + dynamic_linker='ldqnx.so' + ;; + +openbsd*) + version_type=sunos + sys_lib_dlsearch_path_spec="/usr/lib" + need_lib_prefix=no + # Some older versions of OpenBSD (3.3 at least) *do* need versioned libs. + case $host_os in + openbsd3.3 | openbsd3.3.*) need_version=yes ;; + *) need_version=no ;; + esac + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' + finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' + shlibpath_var=LD_LIBRARY_PATH + if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then + case $host_os in + openbsd2.[89] | openbsd2.[89].*) + shlibpath_overrides_runpath=no + ;; + *) + shlibpath_overrides_runpath=yes + ;; + esac + else + shlibpath_overrides_runpath=yes + fi + ;; + +os2*) + libname_spec='$name' + shrext_cmds=".dll" + need_lib_prefix=no + library_names_spec='$libname${shared_ext} $libname.a' + dynamic_linker='OS/2 ld.exe' + shlibpath_var=LIBPATH + ;; + +osf3* | osf4* | osf5*) + version_type=osf + need_lib_prefix=no + need_version=no + soname_spec='${libname}${release}${shared_ext}$major' + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' + shlibpath_var=LD_LIBRARY_PATH + sys_lib_search_path_spec="/usr/shlib /usr/ccs/lib /usr/lib/cmplrs/cc /usr/lib /usr/local/lib /var/shlib" + sys_lib_dlsearch_path_spec="$sys_lib_search_path_spec" + ;; + +rdos*) + dynamic_linker=no + ;; + +solaris*) + version_type=linux + need_lib_prefix=no + need_version=no + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' + soname_spec='${libname}${release}${shared_ext}$major' + shlibpath_var=LD_LIBRARY_PATH + shlibpath_overrides_runpath=yes + hardcode_into_libs=yes + # ldd complains unless libraries are executable + postinstall_cmds='chmod +x $lib' + ;; + +sunos4*) + version_type=sunos + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' + finish_cmds='PATH="\$PATH:/usr/etc" ldconfig $libdir' + shlibpath_var=LD_LIBRARY_PATH + shlibpath_overrides_runpath=yes + if test "$with_gnu_ld" = yes; then + need_lib_prefix=no + fi + need_version=yes + ;; + +sysv4 | sysv4.3*) + version_type=linux + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' + soname_spec='${libname}${release}${shared_ext}$major' + shlibpath_var=LD_LIBRARY_PATH + case $host_vendor in + sni) + shlibpath_overrides_runpath=no + need_lib_prefix=no + runpath_var=LD_RUN_PATH + ;; + siemens) + need_lib_prefix=no + ;; + motorola) + need_lib_prefix=no + need_version=no + shlibpath_overrides_runpath=no + sys_lib_search_path_spec='/lib /usr/lib /usr/ccs/lib' + ;; + esac + ;; + +sysv4*MP*) + if test -d /usr/nec ;then + version_type=linux + library_names_spec='$libname${shared_ext}.$versuffix $libname${shared_ext}.$major $libname${shared_ext}' + soname_spec='$libname${shared_ext}.$major' + shlibpath_var=LD_LIBRARY_PATH + fi + ;; + +sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*) + version_type=freebsd-elf + need_lib_prefix=no + need_version=no + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}' + soname_spec='${libname}${release}${shared_ext}$major' + shlibpath_var=LD_LIBRARY_PATH + shlibpath_overrides_runpath=yes + hardcode_into_libs=yes + if test "$with_gnu_ld" = yes; then + sys_lib_search_path_spec='/usr/local/lib /usr/gnu/lib /usr/ccs/lib /usr/lib /lib' + else + sys_lib_search_path_spec='/usr/ccs/lib /usr/lib' + case $host_os in + sco3.2v5*) + sys_lib_search_path_spec="$sys_lib_search_path_spec /lib" + ;; + esac + fi + sys_lib_dlsearch_path_spec='/usr/lib' + ;; + +tpf*) + # TPF is a cross-target only. Preferred cross-host = GNU/Linux. + version_type=linux + need_lib_prefix=no + need_version=no + library_name_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' + shlibpath_var=LD_LIBRARY_PATH + shlibpath_overrides_runpath=no + hardcode_into_libs=yes + ;; + +uts4*) + version_type=linux + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' + soname_spec='${libname}${release}${shared_ext}$major' + shlibpath_var=LD_LIBRARY_PATH + ;; + +*) + dynamic_linker=no + ;; +esac +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $dynamic_linker" >&5 +$as_echo "$dynamic_linker" >&6; } +test "$dynamic_linker" = no && can_build_shared=no + +variables_saved_for_relink="PATH $shlibpath_var $runpath_var" +if test "$GCC" = yes; then + variables_saved_for_relink="$variables_saved_for_relink GCC_EXEC_PREFIX COMPILER_PATH LIBRARY_PATH" +fi + +if test "${lt_cv_sys_lib_search_path_spec+set}" = set; then + sys_lib_search_path_spec="$lt_cv_sys_lib_search_path_spec" +fi +if test "${lt_cv_sys_lib_dlsearch_path_spec+set}" = set; then + sys_lib_dlsearch_path_spec="$lt_cv_sys_lib_dlsearch_path_spec" +fi + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to hardcode library paths into programs" >&5 +$as_echo_n "checking how to hardcode library paths into programs... " >&6; } +hardcode_action= +if test -n "$hardcode_libdir_flag_spec" || + test -n "$runpath_var" || + test "X$hardcode_automatic" = "Xyes" ; then + + # We can hardcode non-existent directories. + if test "$hardcode_direct" != no && + # If the only mechanism to avoid hardcoding is shlibpath_var, we + # have to relink, otherwise we might link with an installed library + # when we should be linking with a yet-to-be-installed one + ## test "$_LT_TAGVAR(hardcode_shlibpath_var, )" != no && + test "$hardcode_minus_L" != no; then + # Linking always hardcodes the temporary library directory. + hardcode_action=relink + else + # We can link without hardcoding, and we can hardcode nonexisting dirs. + hardcode_action=immediate + fi +else + # We cannot hardcode anything, or else we can only hardcode existing + # directories. + hardcode_action=unsupported +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $hardcode_action" >&5 +$as_echo "$hardcode_action" >&6; } + +if test "$hardcode_action" = relink || + test "$inherit_rpath" = yes; then + # Fast installation is not supported + enable_fast_install=no +elif test "$shlibpath_overrides_runpath" = yes || + test "$enable_shared" = no; then + # Fast installation is not necessary + enable_fast_install=needless +fi + + + + + + + if test "x$enable_dlopen" != xyes; then + enable_dlopen=unknown + enable_dlopen_self=unknown + enable_dlopen_self_static=unknown +else + lt_cv_dlopen=no + lt_cv_dlopen_libs= + + case $host_os in + beos*) + lt_cv_dlopen="load_add_on" + lt_cv_dlopen_libs= + lt_cv_dlopen_self=yes + ;; + + mingw* | pw32*) + lt_cv_dlopen="LoadLibrary" + lt_cv_dlopen_libs= + ;; + + cygwin*) + lt_cv_dlopen="dlopen" + lt_cv_dlopen_libs= + ;; + + darwin*) + # if libdl is installed we need to link against it + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -ldl" >&5 +$as_echo_n "checking for dlopen in -ldl... " >&6; } +if test "${ac_cv_lib_dl_dlopen+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + ac_check_lib_save_LIBS=$LIBS +LIBS="-ldl $LIBS" +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +/* Override any GCC internal prototype to avoid an error. + Use char because int might match the return type of a GCC + builtin and then its argument prototype would still apply. */ +#ifdef __cplusplus +extern "C" +#endif +char dlopen (); +int +main () +{ +return dlopen (); + ; + return 0; +} +_ACEOF +if ac_fn_c_try_link "$LINENO"; then : + ac_cv_lib_dl_dlopen=yes +else + ac_cv_lib_dl_dlopen=no +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +LIBS=$ac_check_lib_save_LIBS +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dl_dlopen" >&5 +$as_echo "$ac_cv_lib_dl_dlopen" >&6; } +if test "x$ac_cv_lib_dl_dlopen" = x""yes; then : + lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl" +else + + lt_cv_dlopen="dyld" + lt_cv_dlopen_libs= + lt_cv_dlopen_self=yes + +fi + + ;; + + *) + ac_fn_c_check_func "$LINENO" "shl_load" "ac_cv_func_shl_load" +if test "x$ac_cv_func_shl_load" = x""yes; then : + lt_cv_dlopen="shl_load" +else + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for shl_load in -ldld" >&5 +$as_echo_n "checking for shl_load in -ldld... " >&6; } +if test "${ac_cv_lib_dld_shl_load+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + ac_check_lib_save_LIBS=$LIBS +LIBS="-ldld $LIBS" +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +/* Override any GCC internal prototype to avoid an error. + Use char because int might match the return type of a GCC + builtin and then its argument prototype would still apply. */ +#ifdef __cplusplus +extern "C" +#endif +char shl_load (); +int +main () +{ +return shl_load (); + ; + return 0; +} +_ACEOF +if ac_fn_c_try_link "$LINENO"; then : + ac_cv_lib_dld_shl_load=yes +else + ac_cv_lib_dld_shl_load=no +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +LIBS=$ac_check_lib_save_LIBS +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dld_shl_load" >&5 +$as_echo "$ac_cv_lib_dld_shl_load" >&6; } +if test "x$ac_cv_lib_dld_shl_load" = x""yes; then : + lt_cv_dlopen="shl_load" lt_cv_dlopen_libs="-ldld" +else + ac_fn_c_check_func "$LINENO" "dlopen" "ac_cv_func_dlopen" +if test "x$ac_cv_func_dlopen" = x""yes; then : + lt_cv_dlopen="dlopen" +else + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -ldl" >&5 +$as_echo_n "checking for dlopen in -ldl... " >&6; } +if test "${ac_cv_lib_dl_dlopen+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + ac_check_lib_save_LIBS=$LIBS +LIBS="-ldl $LIBS" +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +/* Override any GCC internal prototype to avoid an error. + Use char because int might match the return type of a GCC + builtin and then its argument prototype would still apply. */ +#ifdef __cplusplus +extern "C" +#endif +char dlopen (); +int +main () +{ +return dlopen (); + ; + return 0; +} +_ACEOF +if ac_fn_c_try_link "$LINENO"; then : + ac_cv_lib_dl_dlopen=yes +else + ac_cv_lib_dl_dlopen=no +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +LIBS=$ac_check_lib_save_LIBS +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dl_dlopen" >&5 +$as_echo "$ac_cv_lib_dl_dlopen" >&6; } +if test "x$ac_cv_lib_dl_dlopen" = x""yes; then : + lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl" +else + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -lsvld" >&5 +$as_echo_n "checking for dlopen in -lsvld... " >&6; } +if test "${ac_cv_lib_svld_dlopen+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + ac_check_lib_save_LIBS=$LIBS +LIBS="-lsvld $LIBS" +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +/* Override any GCC internal prototype to avoid an error. + Use char because int might match the return type of a GCC + builtin and then its argument prototype would still apply. */ +#ifdef __cplusplus +extern "C" +#endif +char dlopen (); +int +main () +{ +return dlopen (); + ; + return 0; +} +_ACEOF +if ac_fn_c_try_link "$LINENO"; then : + ac_cv_lib_svld_dlopen=yes +else + ac_cv_lib_svld_dlopen=no +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +LIBS=$ac_check_lib_save_LIBS +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_svld_dlopen" >&5 +$as_echo "$ac_cv_lib_svld_dlopen" >&6; } +if test "x$ac_cv_lib_svld_dlopen" = x""yes; then : + lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-lsvld" +else + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dld_link in -ldld" >&5 +$as_echo_n "checking for dld_link in -ldld... " >&6; } +if test "${ac_cv_lib_dld_dld_link+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + ac_check_lib_save_LIBS=$LIBS +LIBS="-ldld $LIBS" +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +/* Override any GCC internal prototype to avoid an error. + Use char because int might match the return type of a GCC + builtin and then its argument prototype would still apply. */ +#ifdef __cplusplus +extern "C" +#endif +char dld_link (); +int +main () +{ +return dld_link (); + ; + return 0; +} +_ACEOF +if ac_fn_c_try_link "$LINENO"; then : + ac_cv_lib_dld_dld_link=yes +else + ac_cv_lib_dld_dld_link=no +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +LIBS=$ac_check_lib_save_LIBS +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dld_dld_link" >&5 +$as_echo "$ac_cv_lib_dld_dld_link" >&6; } +if test "x$ac_cv_lib_dld_dld_link" = x""yes; then : + lt_cv_dlopen="dld_link" lt_cv_dlopen_libs="-ldld" +fi + + +fi + + +fi + + +fi + + +fi + + +fi + + ;; + esac + + if test "x$lt_cv_dlopen" != xno; then + enable_dlopen=yes + else + enable_dlopen=no + fi + + case $lt_cv_dlopen in + dlopen) + save_CPPFLAGS="$CPPFLAGS" + test "x$ac_cv_header_dlfcn_h" = xyes && CPPFLAGS="$CPPFLAGS -DHAVE_DLFCN_H" + + save_LDFLAGS="$LDFLAGS" + wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $export_dynamic_flag_spec\" + + save_LIBS="$LIBS" + LIBS="$lt_cv_dlopen_libs $LIBS" + + { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether a program can dlopen itself" >&5 +$as_echo_n "checking whether a program can dlopen itself... " >&6; } +if test "${lt_cv_dlopen_self+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + if test "$cross_compiling" = yes; then : + lt_cv_dlopen_self=cross +else + lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2 + lt_status=$lt_dlunknown + cat > conftest.$ac_ext <<_LT_EOF +#line 10572 "configure" +#include "confdefs.h" + +#if HAVE_DLFCN_H +#include +#endif + +#include + +#ifdef RTLD_GLOBAL +# define LT_DLGLOBAL RTLD_GLOBAL +#else +# ifdef DL_GLOBAL +# define LT_DLGLOBAL DL_GLOBAL +# else +# define LT_DLGLOBAL 0 +# endif +#endif + +/* We may have to define LT_DLLAZY_OR_NOW in the command line if we + find out it does not work in some platform. */ +#ifndef LT_DLLAZY_OR_NOW +# ifdef RTLD_LAZY +# define LT_DLLAZY_OR_NOW RTLD_LAZY +# else +# ifdef DL_LAZY +# define LT_DLLAZY_OR_NOW DL_LAZY +# else +# ifdef RTLD_NOW +# define LT_DLLAZY_OR_NOW RTLD_NOW +# else +# ifdef DL_NOW +# define LT_DLLAZY_OR_NOW DL_NOW +# else +# define LT_DLLAZY_OR_NOW 0 +# endif +# endif +# endif +# endif +#endif + +#ifdef __cplusplus +extern "C" void exit (int); +#endif + +void fnord() { int i=42;} +int main () +{ + void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); + int status = $lt_dlunknown; + + if (self) + { + if (dlsym (self,"fnord")) status = $lt_dlno_uscore; + else if (dlsym( self,"_fnord")) status = $lt_dlneed_uscore; + /* dlclose (self); */ + } + else + puts (dlerror ()); + + exit (status); +} +_LT_EOF + if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5 + (eval $ac_link) 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } && test -s conftest${ac_exeext} 2>/dev/null; then + (./conftest; exit; ) >&5 2>/dev/null + lt_status=$? + case x$lt_status in + x$lt_dlno_uscore) lt_cv_dlopen_self=yes ;; + x$lt_dlneed_uscore) lt_cv_dlopen_self=yes ;; + x$lt_dlunknown|x*) lt_cv_dlopen_self=no ;; + esac + else : + # compilation failed + lt_cv_dlopen_self=no + fi +fi +rm -fr conftest* + + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_dlopen_self" >&5 +$as_echo "$lt_cv_dlopen_self" >&6; } + + if test "x$lt_cv_dlopen_self" = xyes; then + wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $lt_prog_compiler_static\" + { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether a statically linked program can dlopen itself" >&5 +$as_echo_n "checking whether a statically linked program can dlopen itself... " >&6; } +if test "${lt_cv_dlopen_self_static+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + if test "$cross_compiling" = yes; then : + lt_cv_dlopen_self_static=cross +else + lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2 + lt_status=$lt_dlunknown + cat > conftest.$ac_ext <<_LT_EOF +#line 10672 "configure" +#include "confdefs.h" + +#if HAVE_DLFCN_H +#include +#endif + +#include + +#ifdef RTLD_GLOBAL +# define LT_DLGLOBAL RTLD_GLOBAL +#else +# ifdef DL_GLOBAL +# define LT_DLGLOBAL DL_GLOBAL +# else +# define LT_DLGLOBAL 0 +# endif +#endif + +/* We may have to define LT_DLLAZY_OR_NOW in the command line if we + find out it does not work in some platform. */ +#ifndef LT_DLLAZY_OR_NOW +# ifdef RTLD_LAZY +# define LT_DLLAZY_OR_NOW RTLD_LAZY +# else +# ifdef DL_LAZY +# define LT_DLLAZY_OR_NOW DL_LAZY +# else +# ifdef RTLD_NOW +# define LT_DLLAZY_OR_NOW RTLD_NOW +# else +# ifdef DL_NOW +# define LT_DLLAZY_OR_NOW DL_NOW +# else +# define LT_DLLAZY_OR_NOW 0 +# endif +# endif +# endif +# endif +#endif + +#ifdef __cplusplus +extern "C" void exit (int); +#endif + +void fnord() { int i=42;} +int main () +{ + void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); + int status = $lt_dlunknown; + + if (self) + { + if (dlsym (self,"fnord")) status = $lt_dlno_uscore; + else if (dlsym( self,"_fnord")) status = $lt_dlneed_uscore; + /* dlclose (self); */ + } + else + puts (dlerror ()); + + exit (status); +} +_LT_EOF + if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5 + (eval $ac_link) 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } && test -s conftest${ac_exeext} 2>/dev/null; then + (./conftest; exit; ) >&5 2>/dev/null + lt_status=$? + case x$lt_status in + x$lt_dlno_uscore) lt_cv_dlopen_self_static=yes ;; + x$lt_dlneed_uscore) lt_cv_dlopen_self_static=yes ;; + x$lt_dlunknown|x*) lt_cv_dlopen_self_static=no ;; + esac + else : + # compilation failed + lt_cv_dlopen_self_static=no + fi +fi +rm -fr conftest* + + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_dlopen_self_static" >&5 +$as_echo "$lt_cv_dlopen_self_static" >&6; } + fi + + CPPFLAGS="$save_CPPFLAGS" + LDFLAGS="$save_LDFLAGS" + LIBS="$save_LIBS" + ;; + esac + + case $lt_cv_dlopen_self in + yes|no) enable_dlopen_self=$lt_cv_dlopen_self ;; + *) enable_dlopen_self=unknown ;; + esac + + case $lt_cv_dlopen_self_static in + yes|no) enable_dlopen_self_static=$lt_cv_dlopen_self_static ;; + *) enable_dlopen_self_static=unknown ;; + esac +fi + + + + + + + + + + + + + + + + + +striplib= +old_striplib= +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether stripping libraries is possible" >&5 +$as_echo_n "checking whether stripping libraries is possible... " >&6; } +if test -n "$STRIP" && $STRIP -V 2>&1 | $GREP "GNU strip" >/dev/null; then + test -z "$old_striplib" && old_striplib="$STRIP --strip-debug" + test -z "$striplib" && striplib="$STRIP --strip-unneeded" + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } +else +# FIXME - insert some real tests, host_os isn't really good enough + case $host_os in + darwin*) + if test -n "$STRIP" ; then + striplib="$STRIP -x" + old_striplib="$STRIP -S" + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } + else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + fi + ;; + *) + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + ;; + esac +fi + + + + + + + + + + + + + # Report which library types will actually be built + { $as_echo "$as_me:${as_lineno-$LINENO}: checking if libtool supports shared libraries" >&5 +$as_echo_n "checking if libtool supports shared libraries... " >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $can_build_shared" >&5 +$as_echo "$can_build_shared" >&6; } + + { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to build shared libraries" >&5 +$as_echo_n "checking whether to build shared libraries... " >&6; } + test "$can_build_shared" = "no" && enable_shared=no + + # On AIX, shared libraries and static libraries use the same namespace, and + # are all built from PIC. + case $host_os in + aix3*) + test "$enable_shared" = yes && enable_static=no + if test -n "$RANLIB"; then + archive_cmds="$archive_cmds~\$RANLIB \$lib" + postinstall_cmds='$RANLIB $lib' + fi + ;; + + aix[4-9]*) + if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then + test "$enable_shared" = yes && enable_static=no + fi + ;; + esac + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $enable_shared" >&5 +$as_echo "$enable_shared" >&6; } + + { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to build static libraries" >&5 +$as_echo_n "checking whether to build static libraries... " >&6; } + # Make sure either enable_shared or enable_static is yes. + test "$enable_shared" = yes || enable_static=yes + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $enable_static" >&5 +$as_echo "$enable_static" >&6; } + + + + +fi +ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu + +CC="$lt_save_CC" + + + + + + + + + + + + + + ac_config_commands="$ac_config_commands libtool" + + + + +# Only expand once: + + + + +# Check whether --enable-largefile was given. +if test "${enable_largefile+set}" = set; then : + enableval=$enable_largefile; +fi + +if test "$enable_largefile" != no; then + + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for special C compiler options needed for large files" >&5 +$as_echo_n "checking for special C compiler options needed for large files... " >&6; } +if test "${ac_cv_sys_largefile_CC+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + ac_cv_sys_largefile_CC=no + if test "$GCC" != yes; then + ac_save_CC=$CC + while :; do + # IRIX 6.2 and later do not support large files by default, + # so use the C compiler's -n32 option if that helps. + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include + /* Check that off_t can represent 2**63 - 1 correctly. + We can't simply define LARGE_OFF_T to be 9223372036854775807, + since some C++ compilers masquerading as C compilers + incorrectly reject 9223372036854775807. */ +#define LARGE_OFF_T (((off_t) 1 << 62) - 1 + ((off_t) 1 << 62)) + int off_t_is_large[(LARGE_OFF_T % 2147483629 == 721 + && LARGE_OFF_T % 2147483647 == 1) + ? 1 : -1]; +int +main () +{ + + ; + return 0; +} +_ACEOF + if ac_fn_c_try_compile "$LINENO"; then : + break +fi +rm -f core conftest.err conftest.$ac_objext + CC="$CC -n32" + if ac_fn_c_try_compile "$LINENO"; then : + ac_cv_sys_largefile_CC=' -n32'; break +fi +rm -f core conftest.err conftest.$ac_objext + break + done + CC=$ac_save_CC + rm -f conftest.$ac_ext + fi +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_sys_largefile_CC" >&5 +$as_echo "$ac_cv_sys_largefile_CC" >&6; } + if test "$ac_cv_sys_largefile_CC" != no; then + CC=$CC$ac_cv_sys_largefile_CC + fi + + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for _FILE_OFFSET_BITS value needed for large files" >&5 +$as_echo_n "checking for _FILE_OFFSET_BITS value needed for large files... " >&6; } +if test "${ac_cv_sys_file_offset_bits+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + while :; do + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include + /* Check that off_t can represent 2**63 - 1 correctly. + We can't simply define LARGE_OFF_T to be 9223372036854775807, + since some C++ compilers masquerading as C compilers + incorrectly reject 9223372036854775807. */ +#define LARGE_OFF_T (((off_t) 1 << 62) - 1 + ((off_t) 1 << 62)) + int off_t_is_large[(LARGE_OFF_T % 2147483629 == 721 + && LARGE_OFF_T % 2147483647 == 1) + ? 1 : -1]; +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_cv_sys_file_offset_bits=no; break +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#define _FILE_OFFSET_BITS 64 +#include + /* Check that off_t can represent 2**63 - 1 correctly. + We can't simply define LARGE_OFF_T to be 9223372036854775807, + since some C++ compilers masquerading as C compilers + incorrectly reject 9223372036854775807. */ +#define LARGE_OFF_T (((off_t) 1 << 62) - 1 + ((off_t) 1 << 62)) + int off_t_is_large[(LARGE_OFF_T % 2147483629 == 721 + && LARGE_OFF_T % 2147483647 == 1) + ? 1 : -1]; +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_cv_sys_file_offset_bits=64; break +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext + ac_cv_sys_file_offset_bits=unknown + break +done +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_sys_file_offset_bits" >&5 +$as_echo "$ac_cv_sys_file_offset_bits" >&6; } +case $ac_cv_sys_file_offset_bits in #( + no | unknown) ;; + *) +cat >>confdefs.h <<_ACEOF +#define _FILE_OFFSET_BITS $ac_cv_sys_file_offset_bits +_ACEOF +;; +esac +rm -rf conftest* + if test $ac_cv_sys_file_offset_bits = unknown; then + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for _LARGE_FILES value needed for large files" >&5 +$as_echo_n "checking for _LARGE_FILES value needed for large files... " >&6; } +if test "${ac_cv_sys_large_files+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + while :; do + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include + /* Check that off_t can represent 2**63 - 1 correctly. + We can't simply define LARGE_OFF_T to be 9223372036854775807, + since some C++ compilers masquerading as C compilers + incorrectly reject 9223372036854775807. */ +#define LARGE_OFF_T (((off_t) 1 << 62) - 1 + ((off_t) 1 << 62)) + int off_t_is_large[(LARGE_OFF_T % 2147483629 == 721 + && LARGE_OFF_T % 2147483647 == 1) + ? 1 : -1]; +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_cv_sys_large_files=no; break +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#define _LARGE_FILES 1 +#include + /* Check that off_t can represent 2**63 - 1 correctly. + We can't simply define LARGE_OFF_T to be 9223372036854775807, + since some C++ compilers masquerading as C compilers + incorrectly reject 9223372036854775807. */ +#define LARGE_OFF_T (((off_t) 1 << 62) - 1 + ((off_t) 1 << 62)) + int off_t_is_large[(LARGE_OFF_T % 2147483629 == 721 + && LARGE_OFF_T % 2147483647 == 1) + ? 1 : -1]; +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_cv_sys_large_files=1; break +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext + ac_cv_sys_large_files=unknown + break +done +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_sys_large_files" >&5 +$as_echo "$ac_cv_sys_large_files" >&6; } +case $ac_cv_sys_large_files in #( + no | unknown) ;; + *) +cat >>confdefs.h <<_ACEOF +#define _LARGE_FILES $ac_cv_sys_large_files +_ACEOF +;; +esac +rm -rf conftest* + fi +fi + + +backtrace_supported=yes + +if test -n "${with_target_subdir}"; then + # We are compiling a GCC library. We can assume that the unwind + # library exists. + BACKTRACE_FILE="backtrace.lo simple.lo" +else + ac_fn_c_check_header_mongrel "$LINENO" "unwind.h" "ac_cv_header_unwind_h" "$ac_includes_default" +if test "x$ac_cv_header_unwind_h" = x""yes; then : + ac_fn_c_check_func "$LINENO" "_Unwind_Backtrace" "ac_cv_func__Unwind_Backtrace" +if test "x$ac_cv_func__Unwind_Backtrace" = x""yes; then : + BACKTRACE_FILE="backtrace.lo simple.lo" +else + BACKTRACE_FILE="nounwind.lo" + backtrace_supported=no +fi + +else + BACKTRACE_FILE="nounwind.lo" + backtrace_supported=no +fi + + +fi + + +EXTRA_FLAGS= +if test -n "${with_target_subdir}"; then + EXTRA_FLAGS="-funwind-tables -frandom-seed=\$@" +else + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for -funwind-tables option" >&5 +$as_echo_n "checking for -funwind-tables option... " >&6; } +if test "${libbacktrace_cv_c_unwind_tables+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + CFLAGS_hold="$CFLAGS" + CFLAGS="$CFLAGS -funwind-tables" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +static int f() { return 0; } +int +main () +{ +return f(); + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + libbacktrace_cv_c_unwind_tables=yes +else + libbacktrace_cv_c_unwind_tables=no +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext + CFLAGS="$CFLAGS_hold" +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $libbacktrace_cv_c_unwind_tables" >&5 +$as_echo "$libbacktrace_cv_c_unwind_tables" >&6; } + if test "$libbacktrace_cv_c_unwind_tables" = "yes"; then + EXTRA_FLAGS=-funwind-tables + fi + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for -frandom-seed=string option" >&5 +$as_echo_n "checking for -frandom-seed=string option... " >&6; } +if test "${libbacktrace_cv_c_random_seed_string+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + CFLAGS_hold="$CFLAGS" + CFLAGS="$CFLAGS -frandom-seed=conftest.lo" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ +return 0; + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + libbacktrace_cv_c_random_seed_string=yes +else + libbacktrace_cv_c_random_seed_string=no +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext + CFLAGS="$CFLAGS_hold" +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $libbacktrace_cv_c_random_seed_string" >&5 +$as_echo "$libbacktrace_cv_c_random_seed_string" >&6; } + if test "$libbacktrace_cv_c_random_seed_string" = "yes"; then + EXTRA_FLAGS="$EXTRA_FLAGS -frandom-seed=\$@" + fi +fi + + +ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu + +WARN_FLAGS= +save_CFLAGS="$CFLAGS" +for real_option in -W -Wall -Wwrite-strings -Wstrict-prototypes \ + -Wmissing-prototypes -Wold-style-definition \ + -Wmissing-format-attribute -Wcast-qual; do + # Do the check with the no- prefix removed since gcc silently + # accepts any -Wno-* option on purpose + case $real_option in + -Wno-*) option=-W`expr x$real_option : 'x-Wno-\(.*\)'` ;; + *) option=$real_option ;; + esac + as_acx_Woption=`$as_echo "acx_cv_prog_cc_warning_$option" | $as_tr_sh` + + { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC supports $option" >&5 +$as_echo_n "checking whether $CC supports $option... " >&6; } +if { as_var=$as_acx_Woption; eval "test \"\${$as_var+set}\" = set"; }; then : + $as_echo_n "(cached) " >&6 +else + CFLAGS="$option" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + eval "$as_acx_Woption=yes" +else + eval "$as_acx_Woption=no" +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext + +fi +eval ac_res=\$$as_acx_Woption + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } + if test `eval 'as_val=${'$as_acx_Woption'};$as_echo "$as_val"'` = yes; then : + WARN_FLAGS="$WARN_FLAGS${WARN_FLAGS:+ }$real_option" +fi + done +CFLAGS="$save_CFLAGS" +ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu + + + +if test -n "${with_target_subdir}"; then + WARN_FLAGS="$WARN_FLAGS -Werror" +fi + + + +if test -n "${with_target_subdir}"; then + + +# Check whether --with-system-libunwind was given. +if test "${with_system_libunwind+set}" = set; then : + withval=$with_system_libunwind; +fi + + # If system-libunwind was not specifically set, pick a default setting. + if test x$with_system_libunwind = x; then + case ${target} in + ia64-*-hpux*) with_system_libunwind=yes ;; + *) with_system_libunwind=no ;; + esac + fi + # Based on system-libunwind and target, do we have ipinfo? + if test x$with_system_libunwind = xyes; then + case ${target} in + ia64-*-*) have_unwind_getipinfo=no ;; + *) have_unwind_getipinfo=yes ;; + esac + else + # Darwin before version 9 does not have _Unwind_GetIPInfo. + + case ${target} in + *-*-darwin[3-8]|*-*-darwin[3-8].*) have_unwind_getipinfo=no ;; + *) have_unwind_getipinfo=yes ;; + esac + + fi + + if test x$have_unwind_getipinfo = xyes; then + +$as_echo "#define HAVE_GETIPINFO 1" >>confdefs.h + + fi + +else + ac_save_CFFLAGS="$CFLAGS" + CFLAGS="$CFLAGS -Werror-implicit-function-declaration" + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for _Unwind_GetIPInfo" >&5 +$as_echo_n "checking for _Unwind_GetIPInfo... " >&6; } + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include "unwind.h" + struct _Unwind_Context *context; + int ip_before_insn = 0; +int +main () +{ +return _Unwind_GetIPInfo (context, &ip_before_insn); + ; + return 0; +} +_ACEOF +if ac_fn_c_try_link "$LINENO"; then : + have_unwind_getipinfo=yes +else + have_unwind_getipinfo=no +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext + CFLAGS="$ac_save_CFLAGS" + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $have_unwind_getipinfo" >&5 +$as_echo "$have_unwind_getipinfo" >&6; } + if test "$have_unwind_getipinfo" = "yes"; then + +$as_echo "#define HAVE_GETIPINFO 1" >>confdefs.h + + fi +fi + +# Enable --enable-host-shared. +# Check whether --enable-host-shared was given. +if test "${enable_host_shared+set}" = set; then : + enableval=$enable_host_shared; PIC_FLAG=-fPIC +else + PIC_FLAG= +fi + + + +# Test for __sync support. +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking __sync extensions" >&5 +$as_echo_n "checking __sync extensions... " >&6; } +if test "${libbacktrace_cv_sys_sync+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + if test -n "${with_target_subdir}"; then + case "${host}" in + hppa*-*-hpux*) libbacktrace_cv_sys_sync=no ;; + *) libbacktrace_cv_sys_sync=yes ;; + esac + else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +int i; +int +main () +{ +__sync_bool_compare_and_swap (&i, i, i); + __sync_lock_test_and_set (&i, 1); + __sync_lock_release (&i); + ; + return 0; +} +_ACEOF +if ac_fn_c_try_link "$LINENO"; then : + libbacktrace_cv_sys_sync=yes +else + libbacktrace_cv_sys_sync=no +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext + fi +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $libbacktrace_cv_sys_sync" >&5 +$as_echo "$libbacktrace_cv_sys_sync" >&6; } +BACKTRACE_SUPPORTS_THREADS=0 +if test "$libbacktrace_cv_sys_sync" = "yes"; then + BACKTRACE_SUPPORTS_THREADS=1 + +$as_echo "#define HAVE_SYNC_FUNCTIONS 1" >>confdefs.h + +fi + + +# Test for __atomic support. +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking __atomic extensions" >&5 +$as_echo_n "checking __atomic extensions... " >&6; } +if test "${libbacktrace_cv_sys_atomic+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + if test -n "${with_target_subdir}"; then + libbacktrace_cv_sys_atomic=yes + else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +int i; +int +main () +{ +__atomic_load_n (&i, __ATOMIC_ACQUIRE); + __atomic_store_n (&i, 1, __ATOMIC_RELEASE); + ; + return 0; +} +_ACEOF +if ac_fn_c_try_link "$LINENO"; then : + libbacktrace_cv_sys_atomic=yes +else + libbacktrace_cv_sys_atomic=no +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext + fi +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $libbacktrace_cv_sys_atomic" >&5 +$as_echo "$libbacktrace_cv_sys_atomic" >&6; } +if test "$libbacktrace_cv_sys_atomic" = "yes"; then + +$as_echo "#define HAVE_ATOMIC_FUNCTIONS 1" >>confdefs.h + +fi + +# The library needs to be able to read the executable itself. Compile +# a file to determine the executable format. The awk script +# filetype.awk prints out the file type. +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking output filetype" >&5 +$as_echo_n "checking output filetype... " >&6; } +if test "${libbacktrace_cv_sys_filetype+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + filetype= +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +int i; +int +main () +{ +int j; + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + filetype=`${AWK} -f $srcdir/filetype.awk conftest.$ac_objext` +else + { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} +as_fn_error "compiler failed +See \`config.log' for more details." "$LINENO" 5; } +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +libbacktrace_cv_sys_filetype=$filetype +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $libbacktrace_cv_sys_filetype" >&5 +$as_echo "$libbacktrace_cv_sys_filetype" >&6; } + +# Match the file type to decide what files to compile. +FORMAT_FILE= +backtrace_supports_data=yes +case "$libbacktrace_cv_sys_filetype" in +elf*) FORMAT_FILE="elf.lo" ;; +pecoff) FORMAT_FILE="pecoff.lo" + backtrace_supports_data=no + ;; +xcoff*) FORMAT_FILE="xcoff.lo" + backtrace_supports_data=no + ;; +macho*) FORMAT_FILE="macho.lo" + backtrace_supports_data=no + ;; +*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: could not determine output file type" >&5 +$as_echo "$as_me: WARNING: could not determine output file type" >&2;} + FORMAT_FILE="unknown.lo" + backtrace_supported=no + ;; +esac + + +# ELF defines. +elfsize= +case "$libbacktrace_cv_sys_filetype" in +elf32) elfsize=32 ;; +elf64) elfsize=64 ;; +*) elfsize=unused +esac + +cat >>confdefs.h <<_ACEOF +#define BACKTRACE_ELF_SIZE $elfsize +_ACEOF + + +# XCOFF defines. +xcoffsize= +case "$libbacktrace_cv_sys_filetype" in +xcoff32) xcoffsize=32 ;; +xcoff64) xcoffsize=64 ;; +*) xcoffsize=unused +esac + +cat >>confdefs.h <<_ACEOF +#define BACKTRACE_XCOFF_SIZE $xcoffsize +_ACEOF + + +BACKTRACE_SUPPORTED=0 +if test "$backtrace_supported" = "yes"; then + BACKTRACE_SUPPORTED=1 +fi + + +BACKTRACE_SUPPORTS_DATA=0 +if test "$backtrace_supports_data" = "yes"; then + BACKTRACE_SUPPORTS_DATA=1 +fi + + +for ac_header in sys/mman.h +do : + ac_fn_c_check_header_mongrel "$LINENO" "sys/mman.h" "ac_cv_header_sys_mman_h" "$ac_includes_default" +if test "x$ac_cv_header_sys_mman_h" = x""yes; then : + cat >>confdefs.h <<_ACEOF +#define HAVE_SYS_MMAN_H 1 +_ACEOF + +fi + +done + +if test "$ac_cv_header_sys_mman_h" = "no"; then + have_mmap=no +else + if test -n "${with_target_subdir}"; then + # When built as a GCC target library, we can't do a link test. We + # simply assume that if we have mman.h, we have mmap. + have_mmap=yes + case "${host}" in + spu-*-*|*-*-msdosdjgpp) + # The SPU does not have mmap, but it has a sys/mman.h header file + # containing "mmap_eaddr" and the mmap flags, confusing the test. + # DJGPP also has sys/man.h, but no mmap + have_mmap=no ;; + esac + else + ac_fn_c_check_func "$LINENO" "mmap" "ac_cv_func_mmap" +if test "x$ac_cv_func_mmap" = x""yes; then : + have_mmap=yes +else + have_mmap=no +fi + + fi +fi + +case "${host_os}" in +darwin*) + have_mmap=no ;; +esac + +if test "$have_mmap" = "no"; then + VIEW_FILE=read.lo + ALLOC_FILE=alloc.lo +else + VIEW_FILE=mmapio.lo + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +#include +#if !defined(MAP_ANONYMOUS) && !defined(MAP_ANON) + #error no MAP_ANONYMOUS +#endif + +_ACEOF +if ac_fn_c_try_cpp "$LINENO"; then : + ALLOC_FILE=alloc.lo +else + ALLOC_FILE=alloc.lo +fi +rm -f conftest.err conftest.$ac_ext +fi + + + +BACKTRACE_USES_MALLOC=0 +if test "$ALLOC_FILE" = "alloc.lo"; then + BACKTRACE_USES_MALLOC=1 +fi + + +# Check for dl_iterate_phdr. +for ac_header in link.h +do : + ac_fn_c_check_header_mongrel "$LINENO" "link.h" "ac_cv_header_link_h" "$ac_includes_default" +if test "x$ac_cv_header_link_h" = x""yes; then : + cat >>confdefs.h <<_ACEOF +#define HAVE_LINK_H 1 +_ACEOF + +fi + +done + +if test "$ac_cv_header_link_h" = "no"; then + have_dl_iterate_phdr=no +else + if test -n "${with_target_subdir}"; then + # When built as a GCC target library, we can't do a link test. + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include + +_ACEOF +if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | + $EGREP "dl_iterate_phdr" >/dev/null 2>&1; then : + have_dl_iterate_phdr=yes +else + have_dl_iterate_phdr=no +fi +rm -f conftest* + + case "${host}" in + *-*-solaris2.10*) + # Avoid dl_iterate_phdr on Solaris 10, where it is in the + # header file but is only in -ldl. + have_dl_iterate_phdr=no ;; + esac + else + ac_fn_c_check_func "$LINENO" "dl_iterate_phdr" "ac_cv_func_dl_iterate_phdr" +if test "x$ac_cv_func_dl_iterate_phdr" = x""yes; then : + have_dl_iterate_phdr=yes +else + have_dl_iterate_phdr=no +fi + + fi +fi +if test "$have_dl_iterate_phdr" = "yes"; then + +$as_echo "#define HAVE_DL_ITERATE_PHDR 1" >>confdefs.h + +fi + +# Check for loadquery. +for ac_header in sys/ldr.h +do : + ac_fn_c_check_header_mongrel "$LINENO" "sys/ldr.h" "ac_cv_header_sys_ldr_h" "$ac_includes_default" +if test "x$ac_cv_header_sys_ldr_h" = x""yes; then : + cat >>confdefs.h <<_ACEOF +#define HAVE_SYS_LDR_H 1 +_ACEOF + +fi + +done + +if test "$ac_cv_header_sys_ldr_h" = "no"; then + have_loadquery=no +else + if test -n "${with_target_subdir}"; then + # When built as a GCC target library, we can't do a link test. + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include + +_ACEOF +if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | + $EGREP "loadquery" >/dev/null 2>&1; then : + have_loadquery=yes +else + have_loadquery=no +fi +rm -f conftest* + + else + ac_fn_c_check_func "$LINENO" "loadquery" "ac_cv_func_loadquery" +if test "x$ac_cv_func_loadquery" = x""yes; then : + have_loadquery=yes +else + have_loadquery=no +fi + + fi +fi +if test "$have_loadquery" = "yes"; then + +$as_echo "#define HAVE_LOADQUERY 1" >>confdefs.h + +fi + +# Check for the fcntl function. +if test -n "${with_target_subdir}"; then + case "${host}" in + *-*-mingw*) have_fcntl=no ;; + spu-*-*) have_fcntl=no ;; + *) have_fcntl=yes ;; + esac +else + ac_fn_c_check_func "$LINENO" "fcntl" "ac_cv_func_fcntl" +if test "x$ac_cv_func_fcntl" = x""yes; then : + have_fcntl=yes +else + have_fcntl=no +fi + +fi +if test "$have_fcntl" = "yes"; then + +$as_echo "#define HAVE_FCNTL 1" >>confdefs.h + +fi + +ac_fn_c_check_decl "$LINENO" "strnlen" "ac_cv_have_decl_strnlen" "$ac_includes_default" +if test "x$ac_cv_have_decl_strnlen" = x""yes; then : + ac_have_decl=1 +else + ac_have_decl=0 +fi + +cat >>confdefs.h <<_ACEOF +#define HAVE_DECL_STRNLEN $ac_have_decl +_ACEOF + +for ac_func in lstat readlink +do : + as_ac_var=`$as_echo "ac_cv_func_$ac_func" | $as_tr_sh` +ac_fn_c_check_func "$LINENO" "$ac_func" "$as_ac_var" +eval as_val=\$$as_ac_var + if test "x$as_val" = x""yes; then : + cat >>confdefs.h <<_ACEOF +#define `$as_echo "HAVE_$ac_func" | $as_tr_cpp` 1 +_ACEOF + +fi +done + + +# Check for getexecname function. +if test -n "${with_target_subdir}"; then + case "${host}" in + *-*-solaris2*) have_getexecname=yes ;; + *) have_getexecname=no ;; + esac +else + ac_fn_c_check_func "$LINENO" "getexecname" "ac_cv_func_getexecname" +if test "x$ac_cv_func_getexecname" = x""yes; then : + have_getexecname=yes +else + have_getexecname=no +fi + +fi +if test "$have_getexecname" = "yes"; then + +$as_echo "#define HAVE_GETEXECNAME 1" >>confdefs.h + +fi + +# Check for the clock_gettime function. +for ac_func in clock_gettime +do : + ac_fn_c_check_func "$LINENO" "clock_gettime" "ac_cv_func_clock_gettime" +if test "x$ac_cv_func_clock_gettime" = x""yes; then : + cat >>confdefs.h <<_ACEOF +#define HAVE_CLOCK_GETTIME 1 +_ACEOF + +fi +done + +clock_gettime_link= +# At least for glibc, clock_gettime is in librt. But don't +# pull that in if it still doesn't give us the function we want. This +# test is copied from libgomp, and modified to not link in -lrt as +# we're using this for test timing only. +if test "$ac_cv_func_clock_gettime" = no; then + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for clock_gettime in -lrt" >&5 +$as_echo_n "checking for clock_gettime in -lrt... " >&6; } +if test "${ac_cv_lib_rt_clock_gettime+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + ac_check_lib_save_LIBS=$LIBS +LIBS="-lrt $LIBS" +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +/* Override any GCC internal prototype to avoid an error. + Use char because int might match the return type of a GCC + builtin and then its argument prototype would still apply. */ +#ifdef __cplusplus +extern "C" +#endif +char clock_gettime (); +int +main () +{ +return clock_gettime (); + ; + return 0; +} +_ACEOF +if ac_fn_c_try_link "$LINENO"; then : + ac_cv_lib_rt_clock_gettime=yes +else + ac_cv_lib_rt_clock_gettime=no +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +LIBS=$ac_check_lib_save_LIBS +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_rt_clock_gettime" >&5 +$as_echo "$ac_cv_lib_rt_clock_gettime" >&6; } +if test "x$ac_cv_lib_rt_clock_gettime" = x""yes; then : + CLOCK_GETTIME_LINK=-lrt + +$as_echo "#define HAVE_CLOCK_GETTIME 1" >>confdefs.h + +fi + +fi + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether -pthread is supported" >&5 +$as_echo_n "checking whether -pthread is supported... " >&6; } +if test "${libgo_cv_lib_pthread+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + CFLAGS_hold=$CFLAGS +CFLAGS="$CFLAGS -pthread" +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +int i; +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + libgo_cv_lib_pthread=yes +else + libgo_cv_lib_pthread=no +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +CFLAGS=$CFLAGS_hold +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $libgo_cv_lib_pthread" >&5 +$as_echo "$libgo_cv_lib_pthread" >&6; } +PTHREAD_CFLAGS= +if test "$libgo_cv_lib_pthread" = yes; then + PTHREAD_CFLAGS=-pthread +fi + + + if test "$libgo_cv_lib_pthread" = yes; then + HAVE_PTHREAD_TRUE= + HAVE_PTHREAD_FALSE='#' +else + HAVE_PTHREAD_TRUE='#' + HAVE_PTHREAD_FALSE= +fi + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for compress in -lz" >&5 +$as_echo_n "checking for compress in -lz... " >&6; } +if test "${ac_cv_lib_z_compress+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + ac_check_lib_save_LIBS=$LIBS +LIBS="-lz $LIBS" +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +/* Override any GCC internal prototype to avoid an error. + Use char because int might match the return type of a GCC + builtin and then its argument prototype would still apply. */ +#ifdef __cplusplus +extern "C" +#endif +char compress (); +int +main () +{ +return compress (); + ; + return 0; +} +_ACEOF +if ac_fn_c_try_link "$LINENO"; then : + ac_cv_lib_z_compress=yes +else + ac_cv_lib_z_compress=no +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +LIBS=$ac_check_lib_save_LIBS +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_z_compress" >&5 +$as_echo "$ac_cv_lib_z_compress" >&6; } +if test "x$ac_cv_lib_z_compress" = x""yes; then : + cat >>confdefs.h <<_ACEOF +#define HAVE_LIBZ 1 +_ACEOF + + LIBS="-lz $LIBS" + +fi + +if test $ac_cv_lib_z_compress = "yes"; then + +$as_echo "#define HAVE_ZLIB 1" >>confdefs.h + +fi + if test "$ac_cv_lib_z_compress" = yes; then + HAVE_ZLIB_TRUE= + HAVE_ZLIB_FALSE='#' +else + HAVE_ZLIB_TRUE='#' + HAVE_ZLIB_FALSE= +fi + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether --compress-debug-sections is supported" >&5 +$as_echo_n "checking whether --compress-debug-sections is supported... " >&6; } +if test "${libgo_cv_ld_compress+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + LDFLAGS_hold=$LDFLAGS +LDFLAGS="$LDFLAGS -Wl,--compress-debug-sections=zlib-gnu" +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_link "$LINENO"; then : + libgo_cv_ld_compress=yes +else + libgo_cv_ld_compress=no +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +LDFLAGS=$LDFLAGS_hold +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $libgo_cv_ld_compress" >&5 +$as_echo "$libgo_cv_ld_compress" >&6; } + if test "$libgo_cv_ld_compress" = yes; then + HAVE_COMPRESSED_DEBUG_TRUE= + HAVE_COMPRESSED_DEBUG_FALSE='#' +else + HAVE_COMPRESSED_DEBUG_TRUE='#' + HAVE_COMPRESSED_DEBUG_FALSE= +fi + + + +# Extract the first word of "objcopy", so it can be a program name with args. +set dummy objcopy; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if test "${ac_cv_prog_OBJCOPY+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$OBJCOPY"; then + ac_cv_prog_OBJCOPY="$OBJCOPY" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + ac_cv_prog_OBJCOPY="objcopy" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +OBJCOPY=$ac_cv_prog_OBJCOPY +if test -n "$OBJCOPY"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OBJCOPY" >&5 +$as_echo "$OBJCOPY" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether objcopy supports debuglink" >&5 +$as_echo_n "checking whether objcopy supports debuglink... " >&6; } +if test "${libbacktrace_cv_objcopy_debuglink+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + if test -n "${with_target_subdir}"; then + libbacktrace_cv_objcopy_debuglink=no +elif ${OBJCOPY} --add-gnu-debuglink=x /bin/ls /tmp/ls$$; then + rm -f /tmp/ls$$ + libbacktrace_cv_objcopy_debuglink=yes +else + libbacktrace_cv_objcopy_debuglink=no +fi +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $libbacktrace_cv_objcopy_debuglink" >&5 +$as_echo "$libbacktrace_cv_objcopy_debuglink" >&6; } + if test "$libbacktrace_cv_objcopy_debuglink" = yes; then + HAVE_OBJCOPY_DEBUGLINK_TRUE= + HAVE_OBJCOPY_DEBUGLINK_FALSE='#' +else + HAVE_OBJCOPY_DEBUGLINK_TRUE='#' + HAVE_OBJCOPY_DEBUGLINK_FALSE= +fi + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether tests can run" >&5 +$as_echo_n "checking whether tests can run... " >&6; } +if test "${libbacktrace_cv_sys_native+set}" = set; then : + $as_echo_n "(cached) " >&6 +else + if test "$cross_compiling" = yes; then : + libbacktrace_cv_sys_native=no +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ +return 0; + ; + return 0; +} +_ACEOF +if ac_fn_c_try_run "$LINENO"; then : + libbacktrace_cv_sys_native=yes +else + libbacktrace_cv_sys_native=no +fi +rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ + conftest.$ac_objext conftest.beam conftest.$ac_ext +fi + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $libbacktrace_cv_sys_native" >&5 +$as_echo "$libbacktrace_cv_sys_native" >&6; } + if test "$libbacktrace_cv_sys_native" = "yes"; then + NATIVE_TRUE= + NATIVE_FALSE='#' +else + NATIVE_TRUE='#' + NATIVE_FALSE= +fi + + +if test "${multilib}" = "yes"; then + multilib_arg="--enable-multilib" +else + multilib_arg= +fi + +ac_config_files="$ac_config_files Makefile backtrace-supported.h" + + +# We need multilib support, but only if configuring for the target. +ac_config_commands="$ac_config_commands default" + + +cat >confcache <<\_ACEOF +# This file is a shell script that caches the results of configure +# tests run on this system so they can be shared between configure +# scripts and configure runs, see configure's option --config-cache. +# It is not useful on other systems. If it contains results you don't +# want to keep, you may remove or edit it. +# +# config.status only pays attention to the cache file if you give it +# the --recheck option to rerun configure. +# +# `ac_cv_env_foo' variables (set or unset) will be overridden when +# loading this file, other *unset* `ac_cv_foo' will be assigned the +# following values. + +_ACEOF + +# The following way of writing the cache mishandles newlines in values, +# but we know of no workaround that is simple, portable, and efficient. +# So, we kill variables containing newlines. +# Ultrix sh set writes to stderr and can't be redirected directly, +# and sets the high bit in the cache file unless we assign to the vars. +( + for ac_var in `(set) 2>&1 | sed -n 's/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'`; do + eval ac_val=\$$ac_var + case $ac_val in #( + *${as_nl}*) + case $ac_var in #( + *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5 +$as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;; + esac + case $ac_var in #( + _ | IFS | as_nl) ;; #( + BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #( + *) { eval $ac_var=; unset $ac_var;} ;; + esac ;; + esac + done + + (set) 2>&1 | + case $as_nl`(ac_space=' '; set) 2>&1` in #( + *${as_nl}ac_space=\ *) + # `set' does not quote correctly, so add quotes: double-quote + # substitution turns \\\\ into \\, and sed turns \\ into \. + sed -n \ + "s/'/'\\\\''/g; + s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\\2'/p" + ;; #( + *) + # `set' quotes correctly as required by POSIX, so do not add quotes. + sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p" + ;; + esac | + sort +) | + sed ' + /^ac_cv_env_/b end + t clear + :clear + s/^\([^=]*\)=\(.*[{}].*\)$/test "${\1+set}" = set || &/ + t end + s/^\([^=]*\)=\(.*\)$/\1=${\1=\2}/ + :end' >>confcache +if diff "$cache_file" confcache >/dev/null 2>&1; then :; else + if test -w "$cache_file"; then + test "x$cache_file" != "x/dev/null" && + { $as_echo "$as_me:${as_lineno-$LINENO}: updating cache $cache_file" >&5 +$as_echo "$as_me: updating cache $cache_file" >&6;} + cat confcache >$cache_file + else + { $as_echo "$as_me:${as_lineno-$LINENO}: not updating unwritable cache $cache_file" >&5 +$as_echo "$as_me: not updating unwritable cache $cache_file" >&6;} + fi +fi +rm -f confcache + +test "x$prefix" = xNONE && prefix=$ac_default_prefix +# Let make expand exec_prefix. +test "x$exec_prefix" = xNONE && exec_prefix='${prefix}' + +DEFS=-DHAVE_CONFIG_H + +ac_libobjs= +ac_ltlibobjs= +for ac_i in : $LIBOBJS; do test "x$ac_i" = x: && continue + # 1. Remove the extension, and $U if already installed. + ac_script='s/\$U\././;s/\.o$//;s/\.obj$//' + ac_i=`$as_echo "$ac_i" | sed "$ac_script"` + # 2. Prepend LIBOBJDIR. When used with automake>=1.10 LIBOBJDIR + # will be set to the directory where LIBOBJS objects are built. + as_fn_append ac_libobjs " \${LIBOBJDIR}$ac_i\$U.$ac_objext" + as_fn_append ac_ltlibobjs " \${LIBOBJDIR}$ac_i"'$U.lo' +done +LIBOBJS=$ac_libobjs + +LTLIBOBJS=$ac_ltlibobjs + + + if test -n "$EXEEXT"; then + am__EXEEXT_TRUE= + am__EXEEXT_FALSE='#' +else + am__EXEEXT_TRUE='#' + am__EXEEXT_FALSE= +fi + +if test -z "${MAINTAINER_MODE_TRUE}" && test -z "${MAINTAINER_MODE_FALSE}"; then + as_fn_error "conditional \"MAINTAINER_MODE\" was never defined. +Usually this means the macro was only invoked conditionally." "$LINENO" 5 +fi +if test -z "${HAVE_PTHREAD_TRUE}" && test -z "${HAVE_PTHREAD_FALSE}"; then + as_fn_error "conditional \"HAVE_PTHREAD\" was never defined. +Usually this means the macro was only invoked conditionally." "$LINENO" 5 +fi +if test -z "${HAVE_ZLIB_TRUE}" && test -z "${HAVE_ZLIB_FALSE}"; then + as_fn_error "conditional \"HAVE_ZLIB\" was never defined. +Usually this means the macro was only invoked conditionally." "$LINENO" 5 +fi +if test -z "${HAVE_COMPRESSED_DEBUG_TRUE}" && test -z "${HAVE_COMPRESSED_DEBUG_FALSE}"; then + as_fn_error "conditional \"HAVE_COMPRESSED_DEBUG\" was never defined. +Usually this means the macro was only invoked conditionally." "$LINENO" 5 +fi +if test -z "${HAVE_OBJCOPY_DEBUGLINK_TRUE}" && test -z "${HAVE_OBJCOPY_DEBUGLINK_FALSE}"; then + as_fn_error "conditional \"HAVE_OBJCOPY_DEBUGLINK\" was never defined. +Usually this means the macro was only invoked conditionally." "$LINENO" 5 +fi +if test -z "${NATIVE_TRUE}" && test -z "${NATIVE_FALSE}"; then + as_fn_error "conditional \"NATIVE\" was never defined. +Usually this means the macro was only invoked conditionally." "$LINENO" 5 +fi + +: ${CONFIG_STATUS=./config.status} +ac_write_fail=0 +ac_clean_files_save=$ac_clean_files +ac_clean_files="$ac_clean_files $CONFIG_STATUS" +{ $as_echo "$as_me:${as_lineno-$LINENO}: creating $CONFIG_STATUS" >&5 +$as_echo "$as_me: creating $CONFIG_STATUS" >&6;} +as_write_fail=0 +cat >$CONFIG_STATUS <<_ASEOF || as_write_fail=1 +#! $SHELL +# Generated by $as_me. +# Run this file to recreate the current configuration. +# Compiler output produced by configure, useful for debugging +# configure, is in config.log if it exists. + +debug=false +ac_cs_recheck=false +ac_cs_silent=false + +SHELL=\${CONFIG_SHELL-$SHELL} +export SHELL +_ASEOF +cat >>$CONFIG_STATUS <<\_ASEOF || as_write_fail=1 +## -------------------- ## +## M4sh Initialization. ## +## -------------------- ## + +# Be more Bourne compatible +DUALCASE=1; export DUALCASE # for MKS sh +if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then : + emulate sh + NULLCMD=: + # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which + # is contrary to our usage. Disable this feature. + alias -g '${1+"$@"}'='"$@"' + setopt NO_GLOB_SUBST +else + case `(set -o) 2>/dev/null` in #( + *posix*) : + set -o posix ;; #( + *) : + ;; +esac +fi + + +as_nl=' +' +export as_nl +# Printing a long string crashes Solaris 7 /usr/bin/printf. +as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' +as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo +as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo +# Prefer a ksh shell builtin over an external printf program on Solaris, +# but without wasting forks for bash or zsh. +if test -z "$BASH_VERSION$ZSH_VERSION" \ + && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then + as_echo='print -r --' + as_echo_n='print -rn --' +elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then + as_echo='printf %s\n' + as_echo_n='printf %s' +else + if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then + as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"' + as_echo_n='/usr/ucb/echo -n' + else + as_echo_body='eval expr "X$1" : "X\\(.*\\)"' + as_echo_n_body='eval + arg=$1; + case $arg in #( + *"$as_nl"*) + expr "X$arg" : "X\\(.*\\)$as_nl"; + arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;; + esac; + expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl" + ' + export as_echo_n_body + as_echo_n='sh -c $as_echo_n_body as_echo' + fi + export as_echo_body + as_echo='sh -c $as_echo_body as_echo' +fi + +# The user is always right. +if test "${PATH_SEPARATOR+set}" != set; then + PATH_SEPARATOR=: + (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { + (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || + PATH_SEPARATOR=';' + } +fi + + +# IFS +# We need space, tab and new line, in precisely that order. Quoting is +# there to prevent editors from complaining about space-tab. +# (If _AS_PATH_WALK were called with IFS unset, it would disable word +# splitting by setting IFS to empty value.) +IFS=" "" $as_nl" + +# Find who we are. Look in the path if we contain no directory separator. +case $0 in #(( + *[\\/]* ) as_myself=$0 ;; + *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break + done +IFS=$as_save_IFS + + ;; +esac +# We did not find ourselves, most probably we were run as `sh COMMAND' +# in which case we are not to be found in the path. +if test "x$as_myself" = x; then + as_myself=$0 +fi +if test ! -f "$as_myself"; then + $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 + exit 1 +fi + +# Unset variables that we do not need and which cause bugs (e.g. in +# pre-3.0 UWIN ksh). But do not cause bugs in bash 2.01; the "|| exit 1" +# suppresses any "Segmentation fault" message there. '((' could +# trigger a bug in pdksh 5.2.14. +for as_var in BASH_ENV ENV MAIL MAILPATH +do eval test x\${$as_var+set} = xset \ + && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || : +done +PS1='$ ' +PS2='> ' +PS4='+ ' + +# NLS nuisances. +LC_ALL=C +export LC_ALL +LANGUAGE=C +export LANGUAGE + +# CDPATH. +(unset CDPATH) >/dev/null 2>&1 && unset CDPATH + + +# as_fn_error ERROR [LINENO LOG_FD] +# --------------------------------- +# Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are +# provided, also output the error to LOG_FD, referencing LINENO. Then exit the +# script with status $?, using 1 if that was 0. +as_fn_error () +{ + as_status=$?; test $as_status -eq 0 && as_status=1 + if test "$3"; then + as_lineno=${as_lineno-"$2"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + $as_echo "$as_me:${as_lineno-$LINENO}: error: $1" >&$3 + fi + $as_echo "$as_me: error: $1" >&2 + as_fn_exit $as_status +} # as_fn_error + + +# as_fn_set_status STATUS +# ----------------------- +# Set $? to STATUS, without forking. +as_fn_set_status () +{ + return $1 +} # as_fn_set_status + +# as_fn_exit STATUS +# ----------------- +# Exit the shell with STATUS, even in a "trap 0" or "set -e" context. +as_fn_exit () +{ + set +e + as_fn_set_status $1 + exit $1 +} # as_fn_exit + +# as_fn_unset VAR +# --------------- +# Portably unset VAR. +as_fn_unset () +{ + { eval $1=; unset $1;} +} +as_unset=as_fn_unset +# as_fn_append VAR VALUE +# ---------------------- +# Append the text in VALUE to the end of the definition contained in VAR. Take +# advantage of any shell optimizations that allow amortized linear growth over +# repeated appends, instead of the typical quadratic growth present in naive +# implementations. +if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then : + eval 'as_fn_append () + { + eval $1+=\$2 + }' +else + as_fn_append () + { + eval $1=\$$1\$2 + } +fi # as_fn_append + +# as_fn_arith ARG... +# ------------------ +# Perform arithmetic evaluation on the ARGs, and store the result in the +# global $as_val. Take advantage of shells that can avoid forks. The arguments +# must be portable across $(()) and expr. +if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then : + eval 'as_fn_arith () + { + as_val=$(( $* )) + }' +else + as_fn_arith () + { + as_val=`expr "$@" || test $? -eq 1` + } +fi # as_fn_arith + + +if expr a : '\(a\)' >/dev/null 2>&1 && + test "X`expr 00001 : '.*\(...\)'`" = X001; then + as_expr=expr +else + as_expr=false +fi + +if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then + as_basename=basename +else + as_basename=false +fi + +if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then + as_dirname=dirname +else + as_dirname=false +fi + +as_me=`$as_basename -- "$0" || +$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ + X"$0" : 'X\(//\)$' \| \ + X"$0" : 'X\(/\)' \| . 2>/dev/null || +$as_echo X/"$0" | + sed '/^.*\/\([^/][^/]*\)\/*$/{ + s//\1/ + q + } + /^X\/\(\/\/\)$/{ + s//\1/ + q + } + /^X\/\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'` + +# Avoid depending upon Character Ranges. +as_cr_letters='abcdefghijklmnopqrstuvwxyz' +as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' +as_cr_Letters=$as_cr_letters$as_cr_LETTERS +as_cr_digits='0123456789' +as_cr_alnum=$as_cr_Letters$as_cr_digits + +ECHO_C= ECHO_N= ECHO_T= +case `echo -n x` in #((((( +-n*) + case `echo 'xy\c'` in + *c*) ECHO_T=' ';; # ECHO_T is single tab character. + xy) ECHO_C='\c';; + *) echo `echo ksh88 bug on AIX 6.1` > /dev/null + ECHO_T=' ';; + esac;; +*) + ECHO_N='-n';; +esac + +rm -f conf$$ conf$$.exe conf$$.file +if test -d conf$$.dir; then + rm -f conf$$.dir/conf$$.file +else + rm -f conf$$.dir + mkdir conf$$.dir 2>/dev/null +fi +if (echo >conf$$.file) 2>/dev/null; then + if ln -s conf$$.file conf$$ 2>/dev/null; then + as_ln_s='ln -s' + # ... but there are two gotchas: + # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. + # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. + # In both cases, we have to default to `cp -p'. + ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || + as_ln_s='cp -p' + elif ln conf$$.file conf$$ 2>/dev/null; then + as_ln_s=ln + else + as_ln_s='cp -p' + fi +else + as_ln_s='cp -p' +fi +rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file +rmdir conf$$.dir 2>/dev/null + + +# as_fn_mkdir_p +# ------------- +# Create "$as_dir" as a directory, including parents if necessary. +as_fn_mkdir_p () +{ + + case $as_dir in #( + -*) as_dir=./$as_dir;; + esac + test -d "$as_dir" || eval $as_mkdir_p || { + as_dirs= + while :; do + case $as_dir in #( + *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'( + *) as_qdir=$as_dir;; + esac + as_dirs="'$as_qdir' $as_dirs" + as_dir=`$as_dirname -- "$as_dir" || +$as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ + X"$as_dir" : 'X\(//\)[^/]' \| \ + X"$as_dir" : 'X\(//\)$' \| \ + X"$as_dir" : 'X\(/\)' \| . 2>/dev/null || +$as_echo X"$as_dir" | + sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ + s//\1/ + q + } + /^X\(\/\/\)[^/].*/{ + s//\1/ + q + } + /^X\(\/\/\)$/{ + s//\1/ + q + } + /^X\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'` + test -d "$as_dir" && break + done + test -z "$as_dirs" || eval "mkdir $as_dirs" + } || test -d "$as_dir" || as_fn_error "cannot create directory $as_dir" + + +} # as_fn_mkdir_p +if mkdir -p . 2>/dev/null; then + as_mkdir_p='mkdir -p "$as_dir"' +else + test -d ./-p && rmdir ./-p + as_mkdir_p=false +fi + +if test -x / >/dev/null 2>&1; then + as_test_x='test -x' +else + if ls -dL / >/dev/null 2>&1; then + as_ls_L_option=L + else + as_ls_L_option= + fi + as_test_x=' + eval sh -c '\'' + if test -d "$1"; then + test -d "$1/."; + else + case $1 in #( + -*)set "./$1";; + esac; + case `ls -ld'$as_ls_L_option' "$1" 2>/dev/null` in #(( + ???[sx]*):;;*)false;;esac;fi + '\'' sh + ' +fi +as_executable_p=$as_test_x + +# Sed expression to map a string onto a valid CPP name. +as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" + +# Sed expression to map a string onto a valid variable name. +as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" + + +exec 6>&1 +## ----------------------------------- ## +## Main body of $CONFIG_STATUS script. ## +## ----------------------------------- ## +_ASEOF +test $as_write_fail = 0 && chmod +x $CONFIG_STATUS || ac_write_fail=1 + +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +# Save the log message, to keep $0 and so on meaningful, and to +# report actual input values of CONFIG_FILES etc. instead of their +# values after options handling. +ac_log=" +This file was extended by package-unused $as_me version-unused, which was +generated by GNU Autoconf 2.64. Invocation command line was + + CONFIG_FILES = $CONFIG_FILES + CONFIG_HEADERS = $CONFIG_HEADERS + CONFIG_LINKS = $CONFIG_LINKS + CONFIG_COMMANDS = $CONFIG_COMMANDS + $ $0 $@ + +on `(hostname || uname -n) 2>/dev/null | sed 1q` +" + +_ACEOF + +case $ac_config_files in *" +"*) set x $ac_config_files; shift; ac_config_files=$*;; +esac + +case $ac_config_headers in *" +"*) set x $ac_config_headers; shift; ac_config_headers=$*;; +esac + + +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +# Files that config.status was made for. +config_files="$ac_config_files" +config_headers="$ac_config_headers" +config_commands="$ac_config_commands" + +_ACEOF + +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +ac_cs_usage="\ +\`$as_me' instantiates files and other configuration actions +from templates according to the current configuration. Unless the files +and actions are specified as TAGs, all are instantiated by default. + +Usage: $0 [OPTION]... [TAG]... + + -h, --help print this help, then exit + -V, --version print version number and configuration settings, then exit + -q, --quiet, --silent + do not print progress messages + -d, --debug don't remove temporary files + --recheck update $as_me by reconfiguring in the same conditions + --file=FILE[:TEMPLATE] + instantiate the configuration file FILE + --header=FILE[:TEMPLATE] + instantiate the configuration header FILE + +Configuration files: +$config_files + +Configuration headers: +$config_headers + +Configuration commands: +$config_commands + +Report bugs to the package provider." + +_ACEOF +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +ac_cs_version="\\ +package-unused config.status version-unused +configured by $0, generated by GNU Autoconf 2.64, + with options \\"`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`\\" + +Copyright (C) 2009 Free Software Foundation, Inc. +This config.status script is free software; the Free Software Foundation +gives unlimited permission to copy, distribute and modify it." + +ac_pwd='$ac_pwd' +srcdir='$srcdir' +INSTALL='$INSTALL' +MKDIR_P='$MKDIR_P' +AWK='$AWK' +test -n "\$AWK" || AWK=awk +_ACEOF + +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +# The default lists apply if the user does not specify any file. +ac_need_defaults=: +while test $# != 0 +do + case $1 in + --*=*) + ac_option=`expr "X$1" : 'X\([^=]*\)='` + ac_optarg=`expr "X$1" : 'X[^=]*=\(.*\)'` + ac_shift=: + ;; + *) + ac_option=$1 + ac_optarg=$2 + ac_shift=shift + ;; + esac + + case $ac_option in + # Handling of the options. + -recheck | --recheck | --rechec | --reche | --rech | --rec | --re | --r) + ac_cs_recheck=: ;; + --version | --versio | --versi | --vers | --ver | --ve | --v | -V ) + $as_echo "$ac_cs_version"; exit ;; + --debug | --debu | --deb | --de | --d | -d ) + debug=: ;; + --file | --fil | --fi | --f ) + $ac_shift + case $ac_optarg in + *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;; + esac + as_fn_append CONFIG_FILES " '$ac_optarg'" + ac_need_defaults=false;; + --header | --heade | --head | --hea ) + $ac_shift + case $ac_optarg in + *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;; + esac + as_fn_append CONFIG_HEADERS " '$ac_optarg'" + ac_need_defaults=false;; + --he | --h) + # Conflict between --help and --header + as_fn_error "ambiguous option: \`$1' +Try \`$0 --help' for more information.";; + --help | --hel | -h ) + $as_echo "$ac_cs_usage"; exit ;; + -q | -quiet | --quiet | --quie | --qui | --qu | --q \ + | -silent | --silent | --silen | --sile | --sil | --si | --s) + ac_cs_silent=: ;; + + # This is an error. + -*) as_fn_error "unrecognized option: \`$1' +Try \`$0 --help' for more information." ;; + + *) as_fn_append ac_config_targets " $1" + ac_need_defaults=false ;; + + esac + shift +done + +ac_configure_extra_args= + +if $ac_cs_silent; then + exec 6>/dev/null + ac_configure_extra_args="$ac_configure_extra_args --silent" +fi + +_ACEOF +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +if \$ac_cs_recheck; then + set X '$SHELL' '$0' $ac_configure_args \$ac_configure_extra_args --no-create --no-recursion + shift + \$as_echo "running CONFIG_SHELL=$SHELL \$*" >&6 + CONFIG_SHELL='$SHELL' + export CONFIG_SHELL + exec "\$@" +fi + +_ACEOF +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +exec 5>>config.log +{ + echo + sed 'h;s/./-/g;s/^.../## /;s/...$/ ##/;p;x;p;x' <<_ASBOX +## Running $as_me. ## +_ASBOX + $as_echo "$ac_log" +} >&5 + +_ACEOF +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +# +# INIT-COMMANDS +# + +srcdir="$srcdir" +host="$host" +target="$target" +with_multisubdir="$with_multisubdir" +with_multisrctop="$with_multisrctop" +with_target_subdir="$with_target_subdir" +ac_configure_args="${multilib_arg} ${ac_configure_args}" +multi_basedir="$multi_basedir" +CONFIG_SHELL=${CONFIG_SHELL-/bin/sh} +CC="$CC" + + +# The HP-UX ksh and POSIX shell print the target directory to stdout +# if CDPATH is set. +(unset CDPATH) >/dev/null 2>&1 && unset CDPATH + +sed_quote_subst='$sed_quote_subst' +double_quote_subst='$double_quote_subst' +delay_variable_subst='$delay_variable_subst' +macro_version='`$ECHO "X$macro_version" | $Xsed -e "$delay_single_quote_subst"`' +macro_revision='`$ECHO "X$macro_revision" | $Xsed -e "$delay_single_quote_subst"`' +enable_shared='`$ECHO "X$enable_shared" | $Xsed -e "$delay_single_quote_subst"`' +enable_static='`$ECHO "X$enable_static" | $Xsed -e "$delay_single_quote_subst"`' +pic_mode='`$ECHO "X$pic_mode" | $Xsed -e "$delay_single_quote_subst"`' +enable_fast_install='`$ECHO "X$enable_fast_install" | $Xsed -e "$delay_single_quote_subst"`' +host_alias='`$ECHO "X$host_alias" | $Xsed -e "$delay_single_quote_subst"`' +host='`$ECHO "X$host" | $Xsed -e "$delay_single_quote_subst"`' +host_os='`$ECHO "X$host_os" | $Xsed -e "$delay_single_quote_subst"`' +build_alias='`$ECHO "X$build_alias" | $Xsed -e "$delay_single_quote_subst"`' +build='`$ECHO "X$build" | $Xsed -e "$delay_single_quote_subst"`' +build_os='`$ECHO "X$build_os" | $Xsed -e "$delay_single_quote_subst"`' +SED='`$ECHO "X$SED" | $Xsed -e "$delay_single_quote_subst"`' +Xsed='`$ECHO "X$Xsed" | $Xsed -e "$delay_single_quote_subst"`' +GREP='`$ECHO "X$GREP" | $Xsed -e "$delay_single_quote_subst"`' +EGREP='`$ECHO "X$EGREP" | $Xsed -e "$delay_single_quote_subst"`' +FGREP='`$ECHO "X$FGREP" | $Xsed -e "$delay_single_quote_subst"`' +LD='`$ECHO "X$LD" | $Xsed -e "$delay_single_quote_subst"`' +NM='`$ECHO "X$NM" | $Xsed -e "$delay_single_quote_subst"`' +LN_S='`$ECHO "X$LN_S" | $Xsed -e "$delay_single_quote_subst"`' +max_cmd_len='`$ECHO "X$max_cmd_len" | $Xsed -e "$delay_single_quote_subst"`' +ac_objext='`$ECHO "X$ac_objext" | $Xsed -e "$delay_single_quote_subst"`' +exeext='`$ECHO "X$exeext" | $Xsed -e "$delay_single_quote_subst"`' +lt_unset='`$ECHO "X$lt_unset" | $Xsed -e "$delay_single_quote_subst"`' +lt_SP2NL='`$ECHO "X$lt_SP2NL" | $Xsed -e "$delay_single_quote_subst"`' +lt_NL2SP='`$ECHO "X$lt_NL2SP" | $Xsed -e "$delay_single_quote_subst"`' +reload_flag='`$ECHO "X$reload_flag" | $Xsed -e "$delay_single_quote_subst"`' +reload_cmds='`$ECHO "X$reload_cmds" | $Xsed -e "$delay_single_quote_subst"`' +deplibs_check_method='`$ECHO "X$deplibs_check_method" | $Xsed -e "$delay_single_quote_subst"`' +file_magic_cmd='`$ECHO "X$file_magic_cmd" | $Xsed -e "$delay_single_quote_subst"`' +AR='`$ECHO "X$AR" | $Xsed -e "$delay_single_quote_subst"`' +AR_FLAGS='`$ECHO "X$AR_FLAGS" | $Xsed -e "$delay_single_quote_subst"`' +STRIP='`$ECHO "X$STRIP" | $Xsed -e "$delay_single_quote_subst"`' +RANLIB='`$ECHO "X$RANLIB" | $Xsed -e "$delay_single_quote_subst"`' +old_postinstall_cmds='`$ECHO "X$old_postinstall_cmds" | $Xsed -e "$delay_single_quote_subst"`' +old_postuninstall_cmds='`$ECHO "X$old_postuninstall_cmds" | $Xsed -e "$delay_single_quote_subst"`' +old_archive_cmds='`$ECHO "X$old_archive_cmds" | $Xsed -e "$delay_single_quote_subst"`' +CC='`$ECHO "X$CC" | $Xsed -e "$delay_single_quote_subst"`' +CFLAGS='`$ECHO "X$CFLAGS" | $Xsed -e "$delay_single_quote_subst"`' +compiler='`$ECHO "X$compiler" | $Xsed -e "$delay_single_quote_subst"`' +GCC='`$ECHO "X$GCC" | $Xsed -e "$delay_single_quote_subst"`' +lt_cv_sys_global_symbol_pipe='`$ECHO "X$lt_cv_sys_global_symbol_pipe" | $Xsed -e "$delay_single_quote_subst"`' +lt_cv_sys_global_symbol_to_cdecl='`$ECHO "X$lt_cv_sys_global_symbol_to_cdecl" | $Xsed -e "$delay_single_quote_subst"`' +lt_cv_sys_global_symbol_to_c_name_address='`$ECHO "X$lt_cv_sys_global_symbol_to_c_name_address" | $Xsed -e "$delay_single_quote_subst"`' +lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='`$ECHO "X$lt_cv_sys_global_symbol_to_c_name_address_lib_prefix" | $Xsed -e "$delay_single_quote_subst"`' +objdir='`$ECHO "X$objdir" | $Xsed -e "$delay_single_quote_subst"`' +SHELL='`$ECHO "X$SHELL" | $Xsed -e "$delay_single_quote_subst"`' +ECHO='`$ECHO "X$ECHO" | $Xsed -e "$delay_single_quote_subst"`' +MAGIC_CMD='`$ECHO "X$MAGIC_CMD" | $Xsed -e "$delay_single_quote_subst"`' +lt_prog_compiler_no_builtin_flag='`$ECHO "X$lt_prog_compiler_no_builtin_flag" | $Xsed -e "$delay_single_quote_subst"`' +lt_prog_compiler_wl='`$ECHO "X$lt_prog_compiler_wl" | $Xsed -e "$delay_single_quote_subst"`' +lt_prog_compiler_pic='`$ECHO "X$lt_prog_compiler_pic" | $Xsed -e "$delay_single_quote_subst"`' +lt_prog_compiler_static='`$ECHO "X$lt_prog_compiler_static" | $Xsed -e "$delay_single_quote_subst"`' +lt_cv_prog_compiler_c_o='`$ECHO "X$lt_cv_prog_compiler_c_o" | $Xsed -e "$delay_single_quote_subst"`' +need_locks='`$ECHO "X$need_locks" | $Xsed -e "$delay_single_quote_subst"`' +DSYMUTIL='`$ECHO "X$DSYMUTIL" | $Xsed -e "$delay_single_quote_subst"`' +NMEDIT='`$ECHO "X$NMEDIT" | $Xsed -e "$delay_single_quote_subst"`' +LIPO='`$ECHO "X$LIPO" | $Xsed -e "$delay_single_quote_subst"`' +OTOOL='`$ECHO "X$OTOOL" | $Xsed -e "$delay_single_quote_subst"`' +OTOOL64='`$ECHO "X$OTOOL64" | $Xsed -e "$delay_single_quote_subst"`' +libext='`$ECHO "X$libext" | $Xsed -e "$delay_single_quote_subst"`' +shrext_cmds='`$ECHO "X$shrext_cmds" | $Xsed -e "$delay_single_quote_subst"`' +extract_expsyms_cmds='`$ECHO "X$extract_expsyms_cmds" | $Xsed -e "$delay_single_quote_subst"`' +archive_cmds_need_lc='`$ECHO "X$archive_cmds_need_lc" | $Xsed -e "$delay_single_quote_subst"`' +enable_shared_with_static_runtimes='`$ECHO "X$enable_shared_with_static_runtimes" | $Xsed -e "$delay_single_quote_subst"`' +export_dynamic_flag_spec='`$ECHO "X$export_dynamic_flag_spec" | $Xsed -e "$delay_single_quote_subst"`' +whole_archive_flag_spec='`$ECHO "X$whole_archive_flag_spec" | $Xsed -e "$delay_single_quote_subst"`' +compiler_needs_object='`$ECHO "X$compiler_needs_object" | $Xsed -e "$delay_single_quote_subst"`' +old_archive_from_new_cmds='`$ECHO "X$old_archive_from_new_cmds" | $Xsed -e "$delay_single_quote_subst"`' +old_archive_from_expsyms_cmds='`$ECHO "X$old_archive_from_expsyms_cmds" | $Xsed -e "$delay_single_quote_subst"`' +archive_cmds='`$ECHO "X$archive_cmds" | $Xsed -e "$delay_single_quote_subst"`' +archive_expsym_cmds='`$ECHO "X$archive_expsym_cmds" | $Xsed -e "$delay_single_quote_subst"`' +module_cmds='`$ECHO "X$module_cmds" | $Xsed -e "$delay_single_quote_subst"`' +module_expsym_cmds='`$ECHO "X$module_expsym_cmds" | $Xsed -e "$delay_single_quote_subst"`' +with_gnu_ld='`$ECHO "X$with_gnu_ld" | $Xsed -e "$delay_single_quote_subst"`' +allow_undefined_flag='`$ECHO "X$allow_undefined_flag" | $Xsed -e "$delay_single_quote_subst"`' +no_undefined_flag='`$ECHO "X$no_undefined_flag" | $Xsed -e "$delay_single_quote_subst"`' +hardcode_libdir_flag_spec='`$ECHO "X$hardcode_libdir_flag_spec" | $Xsed -e "$delay_single_quote_subst"`' +hardcode_libdir_flag_spec_ld='`$ECHO "X$hardcode_libdir_flag_spec_ld" | $Xsed -e "$delay_single_quote_subst"`' +hardcode_libdir_separator='`$ECHO "X$hardcode_libdir_separator" | $Xsed -e "$delay_single_quote_subst"`' +hardcode_direct='`$ECHO "X$hardcode_direct" | $Xsed -e "$delay_single_quote_subst"`' +hardcode_direct_absolute='`$ECHO "X$hardcode_direct_absolute" | $Xsed -e "$delay_single_quote_subst"`' +hardcode_minus_L='`$ECHO "X$hardcode_minus_L" | $Xsed -e "$delay_single_quote_subst"`' +hardcode_shlibpath_var='`$ECHO "X$hardcode_shlibpath_var" | $Xsed -e "$delay_single_quote_subst"`' +hardcode_automatic='`$ECHO "X$hardcode_automatic" | $Xsed -e "$delay_single_quote_subst"`' +inherit_rpath='`$ECHO "X$inherit_rpath" | $Xsed -e "$delay_single_quote_subst"`' +link_all_deplibs='`$ECHO "X$link_all_deplibs" | $Xsed -e "$delay_single_quote_subst"`' +fix_srcfile_path='`$ECHO "X$fix_srcfile_path" | $Xsed -e "$delay_single_quote_subst"`' +always_export_symbols='`$ECHO "X$always_export_symbols" | $Xsed -e "$delay_single_quote_subst"`' +export_symbols_cmds='`$ECHO "X$export_symbols_cmds" | $Xsed -e "$delay_single_quote_subst"`' +exclude_expsyms='`$ECHO "X$exclude_expsyms" | $Xsed -e "$delay_single_quote_subst"`' +include_expsyms='`$ECHO "X$include_expsyms" | $Xsed -e "$delay_single_quote_subst"`' +prelink_cmds='`$ECHO "X$prelink_cmds" | $Xsed -e "$delay_single_quote_subst"`' +file_list_spec='`$ECHO "X$file_list_spec" | $Xsed -e "$delay_single_quote_subst"`' +variables_saved_for_relink='`$ECHO "X$variables_saved_for_relink" | $Xsed -e "$delay_single_quote_subst"`' +need_lib_prefix='`$ECHO "X$need_lib_prefix" | $Xsed -e "$delay_single_quote_subst"`' +need_version='`$ECHO "X$need_version" | $Xsed -e "$delay_single_quote_subst"`' +version_type='`$ECHO "X$version_type" | $Xsed -e "$delay_single_quote_subst"`' +runpath_var='`$ECHO "X$runpath_var" | $Xsed -e "$delay_single_quote_subst"`' +shlibpath_var='`$ECHO "X$shlibpath_var" | $Xsed -e "$delay_single_quote_subst"`' +shlibpath_overrides_runpath='`$ECHO "X$shlibpath_overrides_runpath" | $Xsed -e "$delay_single_quote_subst"`' +libname_spec='`$ECHO "X$libname_spec" | $Xsed -e "$delay_single_quote_subst"`' +library_names_spec='`$ECHO "X$library_names_spec" | $Xsed -e "$delay_single_quote_subst"`' +soname_spec='`$ECHO "X$soname_spec" | $Xsed -e "$delay_single_quote_subst"`' +postinstall_cmds='`$ECHO "X$postinstall_cmds" | $Xsed -e "$delay_single_quote_subst"`' +postuninstall_cmds='`$ECHO "X$postuninstall_cmds" | $Xsed -e "$delay_single_quote_subst"`' +finish_cmds='`$ECHO "X$finish_cmds" | $Xsed -e "$delay_single_quote_subst"`' +finish_eval='`$ECHO "X$finish_eval" | $Xsed -e "$delay_single_quote_subst"`' +hardcode_into_libs='`$ECHO "X$hardcode_into_libs" | $Xsed -e "$delay_single_quote_subst"`' +sys_lib_search_path_spec='`$ECHO "X$sys_lib_search_path_spec" | $Xsed -e "$delay_single_quote_subst"`' +sys_lib_dlsearch_path_spec='`$ECHO "X$sys_lib_dlsearch_path_spec" | $Xsed -e "$delay_single_quote_subst"`' +hardcode_action='`$ECHO "X$hardcode_action" | $Xsed -e "$delay_single_quote_subst"`' +enable_dlopen='`$ECHO "X$enable_dlopen" | $Xsed -e "$delay_single_quote_subst"`' +enable_dlopen_self='`$ECHO "X$enable_dlopen_self" | $Xsed -e "$delay_single_quote_subst"`' +enable_dlopen_self_static='`$ECHO "X$enable_dlopen_self_static" | $Xsed -e "$delay_single_quote_subst"`' +old_striplib='`$ECHO "X$old_striplib" | $Xsed -e "$delay_single_quote_subst"`' +striplib='`$ECHO "X$striplib" | $Xsed -e "$delay_single_quote_subst"`' + +LTCC='$LTCC' +LTCFLAGS='$LTCFLAGS' +compiler='$compiler_DEFAULT' + +# Quote evaled strings. +for var in SED \ +GREP \ +EGREP \ +FGREP \ +LD \ +NM \ +LN_S \ +lt_SP2NL \ +lt_NL2SP \ +reload_flag \ +deplibs_check_method \ +file_magic_cmd \ +AR \ +AR_FLAGS \ +STRIP \ +RANLIB \ +CC \ +CFLAGS \ +compiler \ +lt_cv_sys_global_symbol_pipe \ +lt_cv_sys_global_symbol_to_cdecl \ +lt_cv_sys_global_symbol_to_c_name_address \ +lt_cv_sys_global_symbol_to_c_name_address_lib_prefix \ +SHELL \ +ECHO \ +lt_prog_compiler_no_builtin_flag \ +lt_prog_compiler_wl \ +lt_prog_compiler_pic \ +lt_prog_compiler_static \ +lt_cv_prog_compiler_c_o \ +need_locks \ +DSYMUTIL \ +NMEDIT \ +LIPO \ +OTOOL \ +OTOOL64 \ +shrext_cmds \ +export_dynamic_flag_spec \ +whole_archive_flag_spec \ +compiler_needs_object \ +with_gnu_ld \ +allow_undefined_flag \ +no_undefined_flag \ +hardcode_libdir_flag_spec \ +hardcode_libdir_flag_spec_ld \ +hardcode_libdir_separator \ +fix_srcfile_path \ +exclude_expsyms \ +include_expsyms \ +file_list_spec \ +variables_saved_for_relink \ +libname_spec \ +library_names_spec \ +soname_spec \ +finish_eval \ +old_striplib \ +striplib; do + case \`eval \\\\\$ECHO "X\\\\\$\$var"\` in + *[\\\\\\\`\\"\\\$]*) + eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"X\\\$\$var\\" | \\\$Xsed -e \\"\\\$sed_quote_subst\\"\\\`\\\\\\"" + ;; + *) + eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\"" + ;; + esac +done + +# Double-quote double-evaled strings. +for var in reload_cmds \ +old_postinstall_cmds \ +old_postuninstall_cmds \ +old_archive_cmds \ +extract_expsyms_cmds \ +old_archive_from_new_cmds \ +old_archive_from_expsyms_cmds \ +archive_cmds \ +archive_expsym_cmds \ +module_cmds \ +module_expsym_cmds \ +export_symbols_cmds \ +prelink_cmds \ +postinstall_cmds \ +postuninstall_cmds \ +finish_cmds \ +sys_lib_search_path_spec \ +sys_lib_dlsearch_path_spec; do + case \`eval \\\\\$ECHO "X\\\\\$\$var"\` in + *[\\\\\\\`\\"\\\$]*) + eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"X\\\$\$var\\" | \\\$Xsed -e \\"\\\$double_quote_subst\\" -e \\"\\\$sed_quote_subst\\" -e \\"\\\$delay_variable_subst\\"\\\`\\\\\\"" + ;; + *) + eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\"" + ;; + esac +done + +# Fix-up fallback echo if it was mangled by the above quoting rules. +case \$lt_ECHO in +*'\\\$0 --fallback-echo"') lt_ECHO=\`\$ECHO "X\$lt_ECHO" | \$Xsed -e 's/\\\\\\\\\\\\\\\$0 --fallback-echo"\$/\$0 --fallback-echo"/'\` + ;; +esac + +ac_aux_dir='$ac_aux_dir' +xsi_shell='$xsi_shell' +lt_shell_append='$lt_shell_append' + +# See if we are running on zsh, and set the options which allow our +# commands through without removal of \ escapes INIT. +if test -n "\${ZSH_VERSION+set}" ; then + setopt NO_GLOB_SUBST +fi + + + PACKAGE='$PACKAGE' + VERSION='$VERSION' + TIMESTAMP='$TIMESTAMP' + RM='$RM' + ofile='$ofile' + + + + +# Variables needed in config.status (file generation) which aren't already +# passed by autoconf. +SUBDIRS="$SUBDIRS" + + +_ACEOF + +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 + +# Handling of arguments. +for ac_config_target in $ac_config_targets +do + case $ac_config_target in + "config.h") CONFIG_HEADERS="$CONFIG_HEADERS config.h" ;; + "default-1") CONFIG_COMMANDS="$CONFIG_COMMANDS default-1" ;; + "libtool") CONFIG_COMMANDS="$CONFIG_COMMANDS libtool" ;; + "Makefile") CONFIG_FILES="$CONFIG_FILES Makefile" ;; + "backtrace-supported.h") CONFIG_FILES="$CONFIG_FILES backtrace-supported.h" ;; + "default") CONFIG_COMMANDS="$CONFIG_COMMANDS default" ;; + + *) as_fn_error "invalid argument: \`$ac_config_target'" "$LINENO" 5;; + esac +done + + +# If the user did not use the arguments to specify the items to instantiate, +# then the envvar interface is used. Set only those that are not. +# We use the long form for the default assignment because of an extremely +# bizarre bug on SunOS 4.1.3. +if $ac_need_defaults; then + test "${CONFIG_FILES+set}" = set || CONFIG_FILES=$config_files + test "${CONFIG_HEADERS+set}" = set || CONFIG_HEADERS=$config_headers + test "${CONFIG_COMMANDS+set}" = set || CONFIG_COMMANDS=$config_commands +fi + +# Have a temporary directory for convenience. Make it in the build tree +# simply because there is no reason against having it here, and in addition, +# creating and moving files from /tmp can sometimes cause problems. +# Hook for its removal unless debugging. +# Note that there is a small window in which the directory will not be cleaned: +# after its creation but before its name has been assigned to `$tmp'. +$debug || +{ + tmp= + trap 'exit_status=$? + { test -z "$tmp" || test ! -d "$tmp" || rm -fr "$tmp"; } && exit $exit_status +' 0 + trap 'as_fn_exit 1' 1 2 13 15 +} +# Create a (secure) tmp directory for tmp files. + +{ + tmp=`(umask 077 && mktemp -d "./confXXXXXX") 2>/dev/null` && + test -n "$tmp" && test -d "$tmp" +} || +{ + tmp=./conf$$-$RANDOM + (umask 077 && mkdir "$tmp") +} || as_fn_error "cannot create a temporary directory in ." "$LINENO" 5 + +# Set up the scripts for CONFIG_FILES section. +# No need to generate them if there are no CONFIG_FILES. +# This happens for instance with `./config.status config.h'. +if test -n "$CONFIG_FILES"; then + + +ac_cr=`echo X | tr X '\015'` +# On cygwin, bash can eat \r inside `` if the user requested igncr. +# But we know of no other shell where ac_cr would be empty at this +# point, so we can use a bashism as a fallback. +if test "x$ac_cr" = x; then + eval ac_cr=\$\'\\r\' +fi +ac_cs_awk_cr=`$AWK 'BEGIN { print "a\rb" }' /dev/null` +if test "$ac_cs_awk_cr" = "a${ac_cr}b"; then + ac_cs_awk_cr='\r' +else + ac_cs_awk_cr=$ac_cr +fi + +echo 'BEGIN {' >"$tmp/subs1.awk" && +_ACEOF + + +{ + echo "cat >conf$$subs.awk <<_ACEOF" && + echo "$ac_subst_vars" | sed 's/.*/&!$&$ac_delim/' && + echo "_ACEOF" +} >conf$$subs.sh || + as_fn_error "could not make $CONFIG_STATUS" "$LINENO" 5 +ac_delim_num=`echo "$ac_subst_vars" | grep -c '$'` +ac_delim='%!_!# ' +for ac_last_try in false false false false false :; do + . ./conf$$subs.sh || + as_fn_error "could not make $CONFIG_STATUS" "$LINENO" 5 + + ac_delim_n=`sed -n "s/.*$ac_delim\$/X/p" conf$$subs.awk | grep -c X` + if test $ac_delim_n = $ac_delim_num; then + break + elif $ac_last_try; then + as_fn_error "could not make $CONFIG_STATUS" "$LINENO" 5 + else + ac_delim="$ac_delim!$ac_delim _$ac_delim!! " + fi +done +rm -f conf$$subs.sh + +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +cat >>"\$tmp/subs1.awk" <<\\_ACAWK && +_ACEOF +sed -n ' +h +s/^/S["/; s/!.*/"]=/ +p +g +s/^[^!]*!// +:repl +t repl +s/'"$ac_delim"'$// +t delim +:nl +h +s/\(.\{148\}\).*/\1/ +t more1 +s/["\\]/\\&/g; s/^/"/; s/$/\\n"\\/ +p +n +b repl +:more1 +s/["\\]/\\&/g; s/^/"/; s/$/"\\/ +p +g +s/.\{148\}// +t nl +:delim +h +s/\(.\{148\}\).*/\1/ +t more2 +s/["\\]/\\&/g; s/^/"/; s/$/"/ +p +b +:more2 +s/["\\]/\\&/g; s/^/"/; s/$/"\\/ +p +g +s/.\{148\}// +t delim +' >$CONFIG_STATUS || ac_write_fail=1 +rm -f conf$$subs.awk +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +_ACAWK +cat >>"\$tmp/subs1.awk" <<_ACAWK && + for (key in S) S_is_set[key] = 1 + FS = "" + +} +{ + line = $ 0 + nfields = split(line, field, "@") + substed = 0 + len = length(field[1]) + for (i = 2; i < nfields; i++) { + key = field[i] + keylen = length(key) + if (S_is_set[key]) { + value = S[key] + line = substr(line, 1, len) "" value "" substr(line, len + keylen + 3) + len += length(value) + length(field[++i]) + substed = 1 + } else + len += 1 + keylen + } + + print line +} + +_ACAWK +_ACEOF +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +if sed "s/$ac_cr//" < /dev/null > /dev/null 2>&1; then + sed "s/$ac_cr\$//; s/$ac_cr/$ac_cs_awk_cr/g" +else + cat +fi < "$tmp/subs1.awk" > "$tmp/subs.awk" \ + || as_fn_error "could not setup config files machinery" "$LINENO" 5 +_ACEOF + +# VPATH may cause trouble with some makes, so we remove $(srcdir), +# ${srcdir} and @srcdir@ from VPATH if srcdir is ".", strip leading and +# trailing colons and then remove the whole line if VPATH becomes empty +# (actually we leave an empty line to preserve line numbers). +if test "x$srcdir" = x.; then + ac_vpsub='/^[ ]*VPATH[ ]*=/{ +s/:*\$(srcdir):*/:/ +s/:*\${srcdir}:*/:/ +s/:*@srcdir@:*/:/ +s/^\([^=]*=[ ]*\):*/\1/ +s/:*$// +s/^[^=]*=[ ]*$// +}' +fi + +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +fi # test -n "$CONFIG_FILES" + +# Set up the scripts for CONFIG_HEADERS section. +# No need to generate them if there are no CONFIG_HEADERS. +# This happens for instance with `./config.status Makefile'. +if test -n "$CONFIG_HEADERS"; then +cat >"$tmp/defines.awk" <<\_ACAWK || +BEGIN { +_ACEOF + +# Transform confdefs.h into an awk script `defines.awk', embedded as +# here-document in config.status, that substitutes the proper values into +# config.h.in to produce config.h. + +# Create a delimiter string that does not exist in confdefs.h, to ease +# handling of long lines. +ac_delim='%!_!# ' +for ac_last_try in false false :; do + ac_t=`sed -n "/$ac_delim/p" confdefs.h` + if test -z "$ac_t"; then + break + elif $ac_last_try; then + as_fn_error "could not make $CONFIG_HEADERS" "$LINENO" 5 + else + ac_delim="$ac_delim!$ac_delim _$ac_delim!! " + fi +done + +# For the awk script, D is an array of macro values keyed by name, +# likewise P contains macro parameters if any. Preserve backslash +# newline sequences. + +ac_word_re=[_$as_cr_Letters][_$as_cr_alnum]* +sed -n ' +s/.\{148\}/&'"$ac_delim"'/g +t rset +:rset +s/^[ ]*#[ ]*define[ ][ ]*/ / +t def +d +:def +s/\\$// +t bsnl +s/["\\]/\\&/g +s/^ \('"$ac_word_re"'\)\(([^()]*)\)[ ]*\(.*\)/P["\1"]="\2"\ +D["\1"]=" \3"/p +s/^ \('"$ac_word_re"'\)[ ]*\(.*\)/D["\1"]=" \2"/p +d +:bsnl +s/["\\]/\\&/g +s/^ \('"$ac_word_re"'\)\(([^()]*)\)[ ]*\(.*\)/P["\1"]="\2"\ +D["\1"]=" \3\\\\\\n"\\/p +t cont +s/^ \('"$ac_word_re"'\)[ ]*\(.*\)/D["\1"]=" \2\\\\\\n"\\/p +t cont +d +:cont +n +s/.\{148\}/&'"$ac_delim"'/g +t clear +:clear +s/\\$// +t bsnlc +s/["\\]/\\&/g; s/^/"/; s/$/"/p +d +:bsnlc +s/["\\]/\\&/g; s/^/"/; s/$/\\\\\\n"\\/p +b cont +' >$CONFIG_STATUS || ac_write_fail=1 + +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 + for (key in D) D_is_set[key] = 1 + FS = "" +} +/^[\t ]*#[\t ]*(define|undef)[\t ]+$ac_word_re([\t (]|\$)/ { + line = \$ 0 + split(line, arg, " ") + if (arg[1] == "#") { + defundef = arg[2] + mac1 = arg[3] + } else { + defundef = substr(arg[1], 2) + mac1 = arg[2] + } + split(mac1, mac2, "(") #) + macro = mac2[1] + prefix = substr(line, 1, index(line, defundef) - 1) + if (D_is_set[macro]) { + # Preserve the white space surrounding the "#". + print prefix "define", macro P[macro] D[macro] + next + } else { + # Replace #undef with comments. This is necessary, for example, + # in the case of _POSIX_SOURCE, which is predefined and required + # on some systems where configure will not decide to define it. + if (defundef == "undef") { + print "/*", prefix defundef, macro, "*/" + next + } + } +} +{ print } +_ACAWK +_ACEOF +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 + as_fn_error "could not setup config headers machinery" "$LINENO" 5 +fi # test -n "$CONFIG_HEADERS" + + +eval set X " :F $CONFIG_FILES :H $CONFIG_HEADERS :C $CONFIG_COMMANDS" +shift +for ac_tag +do + case $ac_tag in + :[FHLC]) ac_mode=$ac_tag; continue;; + esac + case $ac_mode$ac_tag in + :[FHL]*:*);; + :L* | :C*:*) as_fn_error "invalid tag \`$ac_tag'" "$LINENO" 5;; + :[FH]-) ac_tag=-:-;; + :[FH]*) ac_tag=$ac_tag:$ac_tag.in;; + esac + ac_save_IFS=$IFS + IFS=: + set x $ac_tag + IFS=$ac_save_IFS + shift + ac_file=$1 + shift + + case $ac_mode in + :L) ac_source=$1;; + :[FH]) + ac_file_inputs= + for ac_f + do + case $ac_f in + -) ac_f="$tmp/stdin";; + *) # Look for the file first in the build tree, then in the source tree + # (if the path is not absolute). The absolute path cannot be DOS-style, + # because $ac_f cannot contain `:'. + test -f "$ac_f" || + case $ac_f in + [\\/$]*) false;; + *) test -f "$srcdir/$ac_f" && ac_f="$srcdir/$ac_f";; + esac || + as_fn_error "cannot find input file: \`$ac_f'" "$LINENO" 5;; + esac + case $ac_f in *\'*) ac_f=`$as_echo "$ac_f" | sed "s/'/'\\\\\\\\''/g"`;; esac + as_fn_append ac_file_inputs " '$ac_f'" + done + + # Let's still pretend it is `configure' which instantiates (i.e., don't + # use $as_me), people would be surprised to read: + # /* config.h. Generated by config.status. */ + configure_input='Generated from '` + $as_echo "$*" | sed 's|^[^:]*/||;s|:[^:]*/|, |g' + `' by configure.' + if test x"$ac_file" != x-; then + configure_input="$ac_file. $configure_input" + { $as_echo "$as_me:${as_lineno-$LINENO}: creating $ac_file" >&5 +$as_echo "$as_me: creating $ac_file" >&6;} + fi + # Neutralize special characters interpreted by sed in replacement strings. + case $configure_input in #( + *\&* | *\|* | *\\* ) + ac_sed_conf_input=`$as_echo "$configure_input" | + sed 's/[\\\\&|]/\\\\&/g'`;; #( + *) ac_sed_conf_input=$configure_input;; + esac + + case $ac_tag in + *:-:* | *:-) cat >"$tmp/stdin" \ + || as_fn_error "could not create $ac_file" "$LINENO" 5 ;; + esac + ;; + esac + + ac_dir=`$as_dirname -- "$ac_file" || +$as_expr X"$ac_file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ + X"$ac_file" : 'X\(//\)[^/]' \| \ + X"$ac_file" : 'X\(//\)$' \| \ + X"$ac_file" : 'X\(/\)' \| . 2>/dev/null || +$as_echo X"$ac_file" | + sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ + s//\1/ + q + } + /^X\(\/\/\)[^/].*/{ + s//\1/ + q + } + /^X\(\/\/\)$/{ + s//\1/ + q + } + /^X\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'` + as_dir="$ac_dir"; as_fn_mkdir_p + ac_builddir=. + +case "$ac_dir" in +.) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;; +*) + ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'` + # A ".." for each directory in $ac_dir_suffix. + ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'` + case $ac_top_builddir_sub in + "") ac_top_builddir_sub=. ac_top_build_prefix= ;; + *) ac_top_build_prefix=$ac_top_builddir_sub/ ;; + esac ;; +esac +ac_abs_top_builddir=$ac_pwd +ac_abs_builddir=$ac_pwd$ac_dir_suffix +# for backward compatibility: +ac_top_builddir=$ac_top_build_prefix + +case $srcdir in + .) # We are building in place. + ac_srcdir=. + ac_top_srcdir=$ac_top_builddir_sub + ac_abs_top_srcdir=$ac_pwd ;; + [\\/]* | ?:[\\/]* ) # Absolute name. + ac_srcdir=$srcdir$ac_dir_suffix; + ac_top_srcdir=$srcdir + ac_abs_top_srcdir=$srcdir ;; + *) # Relative name. + ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix + ac_top_srcdir=$ac_top_build_prefix$srcdir + ac_abs_top_srcdir=$ac_pwd/$srcdir ;; +esac +ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix + + + case $ac_mode in + :F) + # + # CONFIG_FILE + # + + case $INSTALL in + [\\/$]* | ?:[\\/]* ) ac_INSTALL=$INSTALL ;; + *) ac_INSTALL=$ac_top_build_prefix$INSTALL ;; + esac + ac_MKDIR_P=$MKDIR_P + case $MKDIR_P in + [\\/$]* | ?:[\\/]* ) ;; + */*) ac_MKDIR_P=$ac_top_build_prefix$MKDIR_P ;; + esac +_ACEOF + +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +# If the template does not know about datarootdir, expand it. +# FIXME: This hack should be removed a few years after 2.60. +ac_datarootdir_hack=; ac_datarootdir_seen= +ac_sed_dataroot=' +/datarootdir/ { + p + q +} +/@datadir@/p +/@docdir@/p +/@infodir@/p +/@localedir@/p +/@mandir@/p' +case `eval "sed -n \"\$ac_sed_dataroot\" $ac_file_inputs"` in +*datarootdir*) ac_datarootdir_seen=yes;; +*@datadir@*|*@docdir@*|*@infodir@*|*@localedir@*|*@mandir@*) + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&5 +$as_echo "$as_me: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&2;} +_ACEOF +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 + ac_datarootdir_hack=' + s&@datadir@&$datadir&g + s&@docdir@&$docdir&g + s&@infodir@&$infodir&g + s&@localedir@&$localedir&g + s&@mandir@&$mandir&g + s&\\\${datarootdir}&$datarootdir&g' ;; +esac +_ACEOF + +# Neutralize VPATH when `$srcdir' = `.'. +# Shell code in configure.ac might set extrasub. +# FIXME: do we really want to maintain this feature? +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +ac_sed_extra="$ac_vpsub +$extrasub +_ACEOF +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +:t +/@[a-zA-Z_][a-zA-Z_0-9]*@/!b +s|@configure_input@|$ac_sed_conf_input|;t t +s&@top_builddir@&$ac_top_builddir_sub&;t t +s&@top_build_prefix@&$ac_top_build_prefix&;t t +s&@srcdir@&$ac_srcdir&;t t +s&@abs_srcdir@&$ac_abs_srcdir&;t t +s&@top_srcdir@&$ac_top_srcdir&;t t +s&@abs_top_srcdir@&$ac_abs_top_srcdir&;t t +s&@builddir@&$ac_builddir&;t t +s&@abs_builddir@&$ac_abs_builddir&;t t +s&@abs_top_builddir@&$ac_abs_top_builddir&;t t +s&@INSTALL@&$ac_INSTALL&;t t +s&@MKDIR_P@&$ac_MKDIR_P&;t t +$ac_datarootdir_hack +" +eval sed \"\$ac_sed_extra\" "$ac_file_inputs" | $AWK -f "$tmp/subs.awk" >$tmp/out \ + || as_fn_error "could not create $ac_file" "$LINENO" 5 + +test -z "$ac_datarootdir_hack$ac_datarootdir_seen" && + { ac_out=`sed -n '/\${datarootdir}/p' "$tmp/out"`; test -n "$ac_out"; } && + { ac_out=`sed -n '/^[ ]*datarootdir[ ]*:*=/p' "$tmp/out"`; test -z "$ac_out"; } && + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file contains a reference to the variable \`datarootdir' +which seems to be undefined. Please make sure it is defined." >&5 +$as_echo "$as_me: WARNING: $ac_file contains a reference to the variable \`datarootdir' +which seems to be undefined. Please make sure it is defined." >&2;} + + rm -f "$tmp/stdin" + case $ac_file in + -) cat "$tmp/out" && rm -f "$tmp/out";; + *) rm -f "$ac_file" && mv "$tmp/out" "$ac_file";; + esac \ + || as_fn_error "could not create $ac_file" "$LINENO" 5 + ;; + :H) + # + # CONFIG_HEADER + # + if test x"$ac_file" != x-; then + { + $as_echo "/* $configure_input */" \ + && eval '$AWK -f "$tmp/defines.awk"' "$ac_file_inputs" + } >"$tmp/config.h" \ + || as_fn_error "could not create $ac_file" "$LINENO" 5 + if diff "$ac_file" "$tmp/config.h" >/dev/null 2>&1; then + { $as_echo "$as_me:${as_lineno-$LINENO}: $ac_file is unchanged" >&5 +$as_echo "$as_me: $ac_file is unchanged" >&6;} + else + rm -f "$ac_file" + mv "$tmp/config.h" "$ac_file" \ + || as_fn_error "could not create $ac_file" "$LINENO" 5 + fi + else + $as_echo "/* $configure_input */" \ + && eval '$AWK -f "$tmp/defines.awk"' "$ac_file_inputs" \ + || as_fn_error "could not create -" "$LINENO" 5 + fi +# Compute "$ac_file"'s index in $config_headers. +_am_arg="$ac_file" +_am_stamp_count=1 +for _am_header in $config_headers :; do + case $_am_header in + $_am_arg | $_am_arg:* ) + break ;; + * ) + _am_stamp_count=`expr $_am_stamp_count + 1` ;; + esac +done +echo "timestamp for $_am_arg" >`$as_dirname -- "$_am_arg" || +$as_expr X"$_am_arg" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ + X"$_am_arg" : 'X\(//\)[^/]' \| \ + X"$_am_arg" : 'X\(//\)$' \| \ + X"$_am_arg" : 'X\(/\)' \| . 2>/dev/null || +$as_echo X"$_am_arg" | + sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ + s//\1/ + q + } + /^X\(\/\/\)[^/].*/{ + s//\1/ + q + } + /^X\(\/\/\)$/{ + s//\1/ + q + } + /^X\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'`/stamp-h$_am_stamp_count + ;; + + :C) { $as_echo "$as_me:${as_lineno-$LINENO}: executing $ac_file commands" >&5 +$as_echo "$as_me: executing $ac_file commands" >&6;} + ;; + esac + + + case $ac_file$ac_mode in + "default-1":C) +# Only add multilib support code if we just rebuilt the top-level +# Makefile. +case " $CONFIG_FILES " in + *" Makefile "*) + ac_file=Makefile . ${multi_basedir}/config-ml.in + ;; +esac ;; + "libtool":C) + + # See if we are running on zsh, and set the options which allow our + # commands through without removal of \ escapes. + if test -n "${ZSH_VERSION+set}" ; then + setopt NO_GLOB_SUBST + fi + + cfgfile="${ofile}T" + trap "$RM \"$cfgfile\"; exit 1" 1 2 15 + $RM "$cfgfile" + + cat <<_LT_EOF >> "$cfgfile" +#! $SHELL + +# `$ECHO "$ofile" | sed 's%^.*/%%'` - Provide generalized library-building support services. +# Generated automatically by $as_me ($PACKAGE$TIMESTAMP) $VERSION +# Libtool was configured on host `(hostname || uname -n) 2>/dev/null | sed 1q`: +# NOTE: Changes made to this file will be lost: look at ltmain.sh. +# +# Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, +# 2006, 2007, 2008 Free Software Foundation, Inc. +# Written by Gordon Matzigkeit, 1996 +# +# This file is part of GNU Libtool. +# +# GNU Libtool is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License as +# published by the Free Software Foundation; either version 2 of +# the License, or (at your option) any later version. +# +# As a special exception to the GNU General Public License, +# if you distribute this file as part of a program or library that +# is built using GNU Libtool, you may include this file under the +# same distribution terms that you use for the rest of that program. +# +# GNU Libtool is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with GNU Libtool; see the file COPYING. If not, a copy +# can be downloaded from http://www.gnu.org/licenses/gpl.html, or +# obtained by writing to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + + +# The names of the tagged configurations supported by this script. +available_tags="" + +# ### BEGIN LIBTOOL CONFIG + +# Which release of libtool.m4 was used? +macro_version=$macro_version +macro_revision=$macro_revision + +# Whether or not to build shared libraries. +build_libtool_libs=$enable_shared + +# Whether or not to build static libraries. +build_old_libs=$enable_static + +# What type of objects to build. +pic_mode=$pic_mode + +# Whether or not to optimize for fast installation. +fast_install=$enable_fast_install + +# The host system. +host_alias=$host_alias +host=$host +host_os=$host_os + +# The build system. +build_alias=$build_alias +build=$build +build_os=$build_os + +# A sed program that does not truncate output. +SED=$lt_SED + +# Sed that helps us avoid accidentally triggering echo(1) options like -n. +Xsed="\$SED -e 1s/^X//" + +# A grep program that handles long lines. +GREP=$lt_GREP + +# An ERE matcher. +EGREP=$lt_EGREP + +# A literal string matcher. +FGREP=$lt_FGREP + +# A BSD- or MS-compatible name lister. +NM=$lt_NM + +# Whether we need soft or hard links. +LN_S=$lt_LN_S + +# What is the maximum length of a command? +max_cmd_len=$max_cmd_len + +# Object file suffix (normally "o"). +objext=$ac_objext + +# Executable file suffix (normally ""). +exeext=$exeext + +# whether the shell understands "unset". +lt_unset=$lt_unset + +# turn spaces into newlines. +SP2NL=$lt_lt_SP2NL + +# turn newlines into spaces. +NL2SP=$lt_lt_NL2SP + +# How to create reloadable object files. +reload_flag=$lt_reload_flag +reload_cmds=$lt_reload_cmds + +# Method to check whether dependent libraries are shared objects. +deplibs_check_method=$lt_deplibs_check_method + +# Command to use when deplibs_check_method == "file_magic". +file_magic_cmd=$lt_file_magic_cmd + +# The archiver. +AR=$lt_AR +AR_FLAGS=$lt_AR_FLAGS + +# A symbol stripping program. +STRIP=$lt_STRIP + +# Commands used to install an old-style archive. +RANLIB=$lt_RANLIB +old_postinstall_cmds=$lt_old_postinstall_cmds +old_postuninstall_cmds=$lt_old_postuninstall_cmds + +# A C compiler. +LTCC=$lt_CC + +# LTCC compiler flags. +LTCFLAGS=$lt_CFLAGS + +# Take the output of nm and produce a listing of raw symbols and C names. +global_symbol_pipe=$lt_lt_cv_sys_global_symbol_pipe + +# Transform the output of nm in a proper C declaration. +global_symbol_to_cdecl=$lt_lt_cv_sys_global_symbol_to_cdecl + +# Transform the output of nm in a C name address pair. +global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address + +# Transform the output of nm in a C name address pair when lib prefix is needed. +global_symbol_to_c_name_address_lib_prefix=$lt_lt_cv_sys_global_symbol_to_c_name_address_lib_prefix + +# The name of the directory that contains temporary libtool files. +objdir=$objdir + +# Shell to use when invoking shell scripts. +SHELL=$lt_SHELL + +# An echo program that does not interpret backslashes. +ECHO=$lt_ECHO + +# Used to examine libraries when file_magic_cmd begins with "file". +MAGIC_CMD=$MAGIC_CMD + +# Must we lock files when doing compilation? +need_locks=$lt_need_locks + +# Tool to manipulate archived DWARF debug symbol files on Mac OS X. +DSYMUTIL=$lt_DSYMUTIL + +# Tool to change global to local symbols on Mac OS X. +NMEDIT=$lt_NMEDIT + +# Tool to manipulate fat objects and archives on Mac OS X. +LIPO=$lt_LIPO + +# ldd/readelf like tool for Mach-O binaries on Mac OS X. +OTOOL=$lt_OTOOL + +# ldd/readelf like tool for 64 bit Mach-O binaries on Mac OS X 10.4. +OTOOL64=$lt_OTOOL64 + +# Old archive suffix (normally "a"). +libext=$libext + +# Shared library suffix (normally ".so"). +shrext_cmds=$lt_shrext_cmds + +# The commands to extract the exported symbol list from a shared archive. +extract_expsyms_cmds=$lt_extract_expsyms_cmds + +# Variables whose values should be saved in libtool wrapper scripts and +# restored at link time. +variables_saved_for_relink=$lt_variables_saved_for_relink + +# Do we need the "lib" prefix for modules? +need_lib_prefix=$need_lib_prefix + +# Do we need a version for libraries? +need_version=$need_version + +# Library versioning type. +version_type=$version_type + +# Shared library runtime path variable. +runpath_var=$runpath_var + +# Shared library path variable. +shlibpath_var=$shlibpath_var + +# Is shlibpath searched before the hard-coded library search path? +shlibpath_overrides_runpath=$shlibpath_overrides_runpath + +# Format of library name prefix. +libname_spec=$lt_libname_spec + +# List of archive names. First name is the real one, the rest are links. +# The last name is the one that the linker finds with -lNAME +library_names_spec=$lt_library_names_spec + +# The coded name of the library, if different from the real name. +soname_spec=$lt_soname_spec + +# Command to use after installation of a shared archive. +postinstall_cmds=$lt_postinstall_cmds + +# Command to use after uninstallation of a shared archive. +postuninstall_cmds=$lt_postuninstall_cmds + +# Commands used to finish a libtool library installation in a directory. +finish_cmds=$lt_finish_cmds + +# As "finish_cmds", except a single script fragment to be evaled but +# not shown. +finish_eval=$lt_finish_eval + +# Whether we should hardcode library paths into libraries. +hardcode_into_libs=$hardcode_into_libs + +# Compile-time system search path for libraries. +sys_lib_search_path_spec=$lt_sys_lib_search_path_spec + +# Run-time system search path for libraries. +sys_lib_dlsearch_path_spec=$lt_sys_lib_dlsearch_path_spec + +# Whether dlopen is supported. +dlopen_support=$enable_dlopen + +# Whether dlopen of programs is supported. +dlopen_self=$enable_dlopen_self + +# Whether dlopen of statically linked programs is supported. +dlopen_self_static=$enable_dlopen_self_static + +# Commands to strip libraries. +old_striplib=$lt_old_striplib +striplib=$lt_striplib + + +# The linker used to build libraries. +LD=$lt_LD + +# Commands used to build an old-style archive. +old_archive_cmds=$lt_old_archive_cmds + +# A language specific compiler. +CC=$lt_compiler + +# Is the compiler the GNU compiler? +with_gcc=$GCC + +# Compiler flag to turn off builtin functions. +no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag + +# How to pass a linker flag through the compiler. +wl=$lt_lt_prog_compiler_wl + +# Additional compiler flags for building library objects. +pic_flag=$lt_lt_prog_compiler_pic + +# Compiler flag to prevent dynamic linking. +link_static_flag=$lt_lt_prog_compiler_static + +# Does compiler simultaneously support -c and -o options? +compiler_c_o=$lt_lt_cv_prog_compiler_c_o + +# Whether or not to add -lc for building shared libraries. +build_libtool_need_lc=$archive_cmds_need_lc + +# Whether or not to disallow shared libs when runtime libs are static. +allow_libtool_libs_with_static_runtimes=$enable_shared_with_static_runtimes + +# Compiler flag to allow reflexive dlopens. +export_dynamic_flag_spec=$lt_export_dynamic_flag_spec + +# Compiler flag to generate shared objects directly from archives. +whole_archive_flag_spec=$lt_whole_archive_flag_spec + +# Whether the compiler copes with passing no objects directly. +compiler_needs_object=$lt_compiler_needs_object + +# Create an old-style archive from a shared archive. +old_archive_from_new_cmds=$lt_old_archive_from_new_cmds + +# Create a temporary old-style archive to link instead of a shared archive. +old_archive_from_expsyms_cmds=$lt_old_archive_from_expsyms_cmds + +# Commands used to build a shared archive. +archive_cmds=$lt_archive_cmds +archive_expsym_cmds=$lt_archive_expsym_cmds + +# Commands used to build a loadable module if different from building +# a shared archive. +module_cmds=$lt_module_cmds +module_expsym_cmds=$lt_module_expsym_cmds + +# Whether we are building with GNU ld or not. +with_gnu_ld=$lt_with_gnu_ld + +# Flag that allows shared libraries with undefined symbols to be built. +allow_undefined_flag=$lt_allow_undefined_flag + +# Flag that enforces no undefined symbols. +no_undefined_flag=$lt_no_undefined_flag + +# Flag to hardcode \$libdir into a binary during linking. +# This must work even if \$libdir does not exist +hardcode_libdir_flag_spec=$lt_hardcode_libdir_flag_spec + +# If ld is used when linking, flag to hardcode \$libdir into a binary +# during linking. This must work even if \$libdir does not exist. +hardcode_libdir_flag_spec_ld=$lt_hardcode_libdir_flag_spec_ld + +# Whether we need a single "-rpath" flag with a separated argument. +hardcode_libdir_separator=$lt_hardcode_libdir_separator + +# Set to "yes" if using DIR/libNAME\${shared_ext} during linking hardcodes +# DIR into the resulting binary. +hardcode_direct=$hardcode_direct + +# Set to "yes" if using DIR/libNAME\${shared_ext} during linking hardcodes +# DIR into the resulting binary and the resulting library dependency is +# "absolute",i.e impossible to change by setting \${shlibpath_var} if the +# library is relocated. +hardcode_direct_absolute=$hardcode_direct_absolute + +# Set to "yes" if using the -LDIR flag during linking hardcodes DIR +# into the resulting binary. +hardcode_minus_L=$hardcode_minus_L + +# Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR +# into the resulting binary. +hardcode_shlibpath_var=$hardcode_shlibpath_var + +# Set to "yes" if building a shared library automatically hardcodes DIR +# into the library and all subsequent libraries and executables linked +# against it. +hardcode_automatic=$hardcode_automatic + +# Set to yes if linker adds runtime paths of dependent libraries +# to runtime path list. +inherit_rpath=$inherit_rpath + +# Whether libtool must link a program against all its dependency libraries. +link_all_deplibs=$link_all_deplibs + +# Fix the shell variable \$srcfile for the compiler. +fix_srcfile_path=$lt_fix_srcfile_path + +# Set to "yes" if exported symbols are required. +always_export_symbols=$always_export_symbols + +# The commands to list exported symbols. +export_symbols_cmds=$lt_export_symbols_cmds + +# Symbols that should not be listed in the preloaded symbols. +exclude_expsyms=$lt_exclude_expsyms + +# Symbols that must always be exported. +include_expsyms=$lt_include_expsyms + +# Commands necessary for linking programs (against libraries) with templates. +prelink_cmds=$lt_prelink_cmds + +# Specify filename containing input files. +file_list_spec=$lt_file_list_spec + +# How to hardcode a shared library path into an executable. +hardcode_action=$hardcode_action + +# ### END LIBTOOL CONFIG + +_LT_EOF + + case $host_os in + aix3*) + cat <<\_LT_EOF >> "$cfgfile" +# AIX sometimes has problems with the GCC collect2 program. For some +# reason, if we set the COLLECT_NAMES environment variable, the problems +# vanish in a puff of smoke. +if test "X${COLLECT_NAMES+set}" != Xset; then + COLLECT_NAMES= + export COLLECT_NAMES +fi +_LT_EOF + ;; + esac + + +ltmain="$ac_aux_dir/ltmain.sh" + + + # We use sed instead of cat because bash on DJGPP gets confused if + # if finds mixed CR/LF and LF-only lines. Since sed operates in + # text mode, it properly converts lines to CR/LF. This bash problem + # is reportedly fixed, but why not run on old versions too? + sed '/^# Generated shell functions inserted here/q' "$ltmain" >> "$cfgfile" \ + || (rm -f "$cfgfile"; exit 1) + + case $xsi_shell in + yes) + cat << \_LT_EOF >> "$cfgfile" + +# func_dirname file append nondir_replacement +# Compute the dirname of FILE. If nonempty, add APPEND to the result, +# otherwise set result to NONDIR_REPLACEMENT. +func_dirname () +{ + case ${1} in + */*) func_dirname_result="${1%/*}${2}" ;; + * ) func_dirname_result="${3}" ;; + esac +} + +# func_basename file +func_basename () +{ + func_basename_result="${1##*/}" +} + +# func_dirname_and_basename file append nondir_replacement +# perform func_basename and func_dirname in a single function +# call: +# dirname: Compute the dirname of FILE. If nonempty, +# add APPEND to the result, otherwise set result +# to NONDIR_REPLACEMENT. +# value returned in "$func_dirname_result" +# basename: Compute filename of FILE. +# value retuned in "$func_basename_result" +# Implementation must be kept synchronized with func_dirname +# and func_basename. For efficiency, we do not delegate to +# those functions but instead duplicate the functionality here. +func_dirname_and_basename () +{ + case ${1} in + */*) func_dirname_result="${1%/*}${2}" ;; + * ) func_dirname_result="${3}" ;; + esac + func_basename_result="${1##*/}" +} + +# func_stripname prefix suffix name +# strip PREFIX and SUFFIX off of NAME. +# PREFIX and SUFFIX must not contain globbing or regex special +# characters, hashes, percent signs, but SUFFIX may contain a leading +# dot (in which case that matches only a dot). +func_stripname () +{ + # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are + # positional parameters, so assign one to ordinary parameter first. + func_stripname_result=${3} + func_stripname_result=${func_stripname_result#"${1}"} + func_stripname_result=${func_stripname_result%"${2}"} +} + +# func_opt_split +func_opt_split () +{ + func_opt_split_opt=${1%%=*} + func_opt_split_arg=${1#*=} +} + +# func_lo2o object +func_lo2o () +{ + case ${1} in + *.lo) func_lo2o_result=${1%.lo}.${objext} ;; + *) func_lo2o_result=${1} ;; + esac +} + +# func_xform libobj-or-source +func_xform () +{ + func_xform_result=${1%.*}.lo +} + +# func_arith arithmetic-term... +func_arith () +{ + func_arith_result=$(( $* )) +} + +# func_len string +# STRING may not start with a hyphen. +func_len () +{ + func_len_result=${#1} +} + +_LT_EOF + ;; + *) # Bourne compatible functions. + cat << \_LT_EOF >> "$cfgfile" + +# func_dirname file append nondir_replacement +# Compute the dirname of FILE. If nonempty, add APPEND to the result, +# otherwise set result to NONDIR_REPLACEMENT. +func_dirname () +{ + # Extract subdirectory from the argument. + func_dirname_result=`$ECHO "X${1}" | $Xsed -e "$dirname"` + if test "X$func_dirname_result" = "X${1}"; then + func_dirname_result="${3}" + else + func_dirname_result="$func_dirname_result${2}" + fi +} + +# func_basename file +func_basename () +{ + func_basename_result=`$ECHO "X${1}" | $Xsed -e "$basename"` +} + + +# func_stripname prefix suffix name +# strip PREFIX and SUFFIX off of NAME. +# PREFIX and SUFFIX must not contain globbing or regex special +# characters, hashes, percent signs, but SUFFIX may contain a leading +# dot (in which case that matches only a dot). +# func_strip_suffix prefix name +func_stripname () +{ + case ${2} in + .*) func_stripname_result=`$ECHO "X${3}" \ + | $Xsed -e "s%^${1}%%" -e "s%\\\\${2}\$%%"`;; + *) func_stripname_result=`$ECHO "X${3}" \ + | $Xsed -e "s%^${1}%%" -e "s%${2}\$%%"`;; + esac +} + +# sed scripts: +my_sed_long_opt='1s/^\(-[^=]*\)=.*/\1/;q' +my_sed_long_arg='1s/^-[^=]*=//' + +# func_opt_split +func_opt_split () +{ + func_opt_split_opt=`$ECHO "X${1}" | $Xsed -e "$my_sed_long_opt"` + func_opt_split_arg=`$ECHO "X${1}" | $Xsed -e "$my_sed_long_arg"` +} + +# func_lo2o object +func_lo2o () +{ + func_lo2o_result=`$ECHO "X${1}" | $Xsed -e "$lo2o"` +} + +# func_xform libobj-or-source +func_xform () +{ + func_xform_result=`$ECHO "X${1}" | $Xsed -e 's/\.[^.]*$/.lo/'` +} + +# func_arith arithmetic-term... +func_arith () +{ + func_arith_result=`expr "$@"` +} + +# func_len string +# STRING may not start with a hyphen. +func_len () +{ + func_len_result=`expr "$1" : ".*" 2>/dev/null || echo $max_cmd_len` +} + +_LT_EOF +esac + +case $lt_shell_append in + yes) + cat << \_LT_EOF >> "$cfgfile" + +# func_append var value +# Append VALUE to the end of shell variable VAR. +func_append () +{ + eval "$1+=\$2" +} +_LT_EOF + ;; + *) + cat << \_LT_EOF >> "$cfgfile" + +# func_append var value +# Append VALUE to the end of shell variable VAR. +func_append () +{ + eval "$1=\$$1\$2" +} + +_LT_EOF + ;; + esac + + + sed -n '/^# Generated shell functions inserted here/,$p' "$ltmain" >> "$cfgfile" \ + || (rm -f "$cfgfile"; exit 1) + + mv -f "$cfgfile" "$ofile" || + (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile") + chmod +x "$ofile" + + ;; + "default":C) if test -n "$CONFIG_FILES"; then + if test -n "${with_target_subdir}"; then + # Multilibs need MULTISUBDIR defined correctly in certain makefiles so + # that multilib installs will end up installed in the correct place. + # The testsuite needs it for multilib-aware ABI baseline files. + # To work around this not being passed down from config-ml.in -> + # srcdir/Makefile.am -> srcdir/{src,libsupc++,...}/Makefile.am, manually + # append it here. Only modify Makefiles that have just been created. + # + # Also, get rid of this simulated-VPATH thing that automake does. + cat > vpsed << \_EOF + s!`test -f '$<' || echo '$(srcdir)/'`!! +_EOF + for i in $SUBDIRS; do + case $CONFIG_FILES in + *${i}/Makefile*) + #echo "Adding MULTISUBDIR to $i/Makefile" + sed -f vpsed $i/Makefile > tmp + grep '^MULTISUBDIR =' Makefile >> tmp + mv tmp $i/Makefile + ;; + esac + done + rm vpsed + fi + fi + ;; + + esac +done # for ac_tag + + +as_fn_exit 0 +_ACEOF +ac_clean_files=$ac_clean_files_save + +test $ac_write_fail = 0 || + as_fn_error "write failure creating $CONFIG_STATUS" "$LINENO" 5 + + +# configure is writing to config.log, and then calls config.status. +# config.status does its own redirection, appending to config.log. +# Unfortunately, on DOS this fails, as config.log is still kept open +# by configure, so config.status won't be able to write to it; its +# output is simply discarded. So we exec the FD to /dev/null, +# effectively closing config.log, so it can be properly (re)opened and +# appended to by config.status. When coming back to configure, we +# need to make the FD available again. +if test "$no_create" != yes; then + ac_cs_success=: + ac_config_status_args= + test "$silent" = yes && + ac_config_status_args="$ac_config_status_args --quiet" + exec 5>/dev/null + $SHELL $CONFIG_STATUS $ac_config_status_args || ac_cs_success=false + exec 5>>config.log + # Use ||, not &&, to avoid exiting from the if with $? = 1, which + # would make configure fail if this is the last instruction. + $ac_cs_success || as_fn_exit $? +fi +if test -n "$ac_unrecognized_opts" && test "$enable_option_checking" != no; then + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: unrecognized options: $ac_unrecognized_opts" >&5 +$as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2;} +fi + diff --git a/backtrace-sys/src/libbacktrace/configure.ac b/backtrace-sys/src/libbacktrace/configure.ac new file mode 100644 index 000000000..b9056cc74 --- /dev/null +++ b/backtrace-sys/src/libbacktrace/configure.ac @@ -0,0 +1,512 @@ +# configure.ac -- Backtrace configure script. +# Copyright (C) 2012-2018 Free Software Foundation, Inc. + +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: + +# (1) Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. + +# (2) Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. + +# (3) The name of the author may not be used to +# endorse or promote products derived from this software without +# specific prior written permission. + +# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR +# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, +# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, +# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING +# IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +AC_PREREQ(2.64) +AC_INIT(package-unused, version-unused,, libbacktrace) +AC_CONFIG_SRCDIR(backtrace.h) +AC_CONFIG_HEADER(config.h) +AC_CONFIG_MACRO_DIR(config) + +# with_target_subdir is used when configured as part of a GCC tree. +if test -n "${with_target_subdir}"; then + AM_ENABLE_MULTILIB(, ..) +fi + +AC_CANONICAL_SYSTEM +target_alias=${target_alias-$host_alias} + +AC_USE_SYSTEM_EXTENSIONS + +libtool_VERSION=1:0:0 +AC_SUBST(libtool_VERSION) + +# 1.11.1: Require that version of automake. +# foreign: Don't require README, INSTALL, NEWS, etc. +# no-define: Don't define PACKAGE and VERSION. +# no-dependencies: Don't generate automatic dependencies. +# (because it breaks when using bootstrap-lean, since some of the +# headers are gone at "make install" time). +# -Wall: Issue all automake warnings. +# -Wno-portability: Don't warn about constructs supported by GNU make. +# (because GCC requires GNU make anyhow). +AM_INIT_AUTOMAKE([1.11.1 foreign no-dist no-define no-dependencies -Wall -Wno-portability]) + +AM_MAINTAINER_MODE + +AC_ARG_WITH(target-subdir, +[ --with-target-subdir=SUBDIR Configuring in a subdirectory for target]) + +# We must force CC to /not/ be precious variables; otherwise +# the wrong, non-multilib-adjusted value will be used in multilibs. +# As a side effect, we have to subst CFLAGS ourselves. +m4_rename([_AC_ARG_VAR_PRECIOUS],[backtrace_PRECIOUS]) +m4_define([_AC_ARG_VAR_PRECIOUS],[]) +AC_PROG_CC +m4_rename_force([backtrace_PRECIOUS],[_AC_ARG_VAR_PRECIOUS]) + +AC_SUBST(CFLAGS) + +AC_PROG_AWK +case "$AWK" in +"") AC_MSG_ERROR([can't build without awk]) ;; +esac + +LT_INIT +AM_PROG_LIBTOOL + +AC_SYS_LARGEFILE + +backtrace_supported=yes + +if test -n "${with_target_subdir}"; then + # We are compiling a GCC library. We can assume that the unwind + # library exists. + BACKTRACE_FILE="backtrace.lo simple.lo" +else + AC_CHECK_HEADER([unwind.h], + [AC_CHECK_FUNC([_Unwind_Backtrace], + [BACKTRACE_FILE="backtrace.lo simple.lo"], + [BACKTRACE_FILE="nounwind.lo" + backtrace_supported=no])], + [BACKTRACE_FILE="nounwind.lo" + backtrace_supported=no]) +fi +AC_SUBST(BACKTRACE_FILE) + +EXTRA_FLAGS= +if test -n "${with_target_subdir}"; then + EXTRA_FLAGS="-funwind-tables -frandom-seed=\$@" +else + AC_CACHE_CHECK([for -funwind-tables option], + [libbacktrace_cv_c_unwind_tables], + [CFLAGS_hold="$CFLAGS" + CFLAGS="$CFLAGS -funwind-tables" + AC_COMPILE_IFELSE( + [AC_LANG_PROGRAM([static int f() { return 0; }], [return f();])], + [libbacktrace_cv_c_unwind_tables=yes], + [libbacktrace_cv_c_unwind_tables=no]) + CFLAGS="$CFLAGS_hold"]) + if test "$libbacktrace_cv_c_unwind_tables" = "yes"; then + EXTRA_FLAGS=-funwind-tables + fi + AC_CACHE_CHECK([for -frandom-seed=string option], + [libbacktrace_cv_c_random_seed_string], + [CFLAGS_hold="$CFLAGS" + CFLAGS="$CFLAGS -frandom-seed=conftest.lo" + AC_COMPILE_IFELSE( + [AC_LANG_PROGRAM([], [return 0;])], + [libbacktrace_cv_c_random_seed_string=yes], + [libbacktrace_cv_c_random_seed_string=no]) + CFLAGS="$CFLAGS_hold"]) + if test "$libbacktrace_cv_c_random_seed_string" = "yes"; then + EXTRA_FLAGS="$EXTRA_FLAGS -frandom-seed=\$@" + fi +fi +AC_SUBST(EXTRA_FLAGS) + +ACX_PROG_CC_WARNING_OPTS([-W -Wall -Wwrite-strings -Wstrict-prototypes \ + -Wmissing-prototypes -Wold-style-definition \ + -Wmissing-format-attribute -Wcast-qual], + [WARN_FLAGS]) + +if test -n "${with_target_subdir}"; then + WARN_FLAGS="$WARN_FLAGS -Werror" +fi + +AC_SUBST(WARN_FLAGS) + +if test -n "${with_target_subdir}"; then + GCC_CHECK_UNWIND_GETIPINFO +else + ac_save_CFFLAGS="$CFLAGS" + CFLAGS="$CFLAGS -Werror-implicit-function-declaration" + AC_MSG_CHECKING([for _Unwind_GetIPInfo]) + AC_LINK_IFELSE( + [AC_LANG_PROGRAM( + [#include "unwind.h" + struct _Unwind_Context *context; + int ip_before_insn = 0;], + [return _Unwind_GetIPInfo (context, &ip_before_insn);])], + [have_unwind_getipinfo=yes], [have_unwind_getipinfo=no]) + CFLAGS="$ac_save_CFLAGS" + AC_MSG_RESULT([$have_unwind_getipinfo]) + if test "$have_unwind_getipinfo" = "yes"; then + AC_DEFINE(HAVE_GETIPINFO, 1, [Define if _Unwind_GetIPInfo is available.]) + fi +fi + +# Enable --enable-host-shared. +AC_ARG_ENABLE(host-shared, +[AS_HELP_STRING([--enable-host-shared], + [build host code as shared libraries])], +[PIC_FLAG=-fPIC], [PIC_FLAG=]) +AC_SUBST(PIC_FLAG) + +# Test for __sync support. +AC_CACHE_CHECK([__sync extensions], +[libbacktrace_cv_sys_sync], +[if test -n "${with_target_subdir}"; then + case "${host}" in + hppa*-*-hpux*) libbacktrace_cv_sys_sync=no ;; + *) libbacktrace_cv_sys_sync=yes ;; + esac + else + AC_LINK_IFELSE( + [AC_LANG_PROGRAM([int i;], + [__sync_bool_compare_and_swap (&i, i, i); + __sync_lock_test_and_set (&i, 1); + __sync_lock_release (&i);])], + [libbacktrace_cv_sys_sync=yes], + [libbacktrace_cv_sys_sync=no]) + fi]) +BACKTRACE_SUPPORTS_THREADS=0 +if test "$libbacktrace_cv_sys_sync" = "yes"; then + BACKTRACE_SUPPORTS_THREADS=1 + AC_DEFINE([HAVE_SYNC_FUNCTIONS], 1, + [Define to 1 if you have the __sync functions]) +fi +AC_SUBST(BACKTRACE_SUPPORTS_THREADS) + +# Test for __atomic support. +AC_CACHE_CHECK([__atomic extensions], +[libbacktrace_cv_sys_atomic], +[if test -n "${with_target_subdir}"; then + libbacktrace_cv_sys_atomic=yes + else + AC_LINK_IFELSE( + [AC_LANG_PROGRAM([int i;], + [__atomic_load_n (&i, __ATOMIC_ACQUIRE); + __atomic_store_n (&i, 1, __ATOMIC_RELEASE);])], + [libbacktrace_cv_sys_atomic=yes], + [libbacktrace_cv_sys_atomic=no]) + fi]) +if test "$libbacktrace_cv_sys_atomic" = "yes"; then + AC_DEFINE([HAVE_ATOMIC_FUNCTIONS], 1, + [Define to 1 if you have the __atomic functions]) +fi + +# The library needs to be able to read the executable itself. Compile +# a file to determine the executable format. The awk script +# filetype.awk prints out the file type. +AC_CACHE_CHECK([output filetype], +[libbacktrace_cv_sys_filetype], +[filetype= +AC_COMPILE_IFELSE( + [AC_LANG_PROGRAM([int i;], [int j;])], + [filetype=`${AWK} -f $srcdir/filetype.awk conftest.$ac_objext`], + [AC_MSG_FAILURE([compiler failed])]) +libbacktrace_cv_sys_filetype=$filetype]) + +# Match the file type to decide what files to compile. +FORMAT_FILE= +backtrace_supports_data=yes +case "$libbacktrace_cv_sys_filetype" in +elf*) FORMAT_FILE="elf.lo" ;; +pecoff) FORMAT_FILE="pecoff.lo" + backtrace_supports_data=no + ;; +xcoff*) FORMAT_FILE="xcoff.lo" + backtrace_supports_data=no + ;; +macho*) FORMAT_FILE="macho.lo" + backtrace_supports_data=no + ;; +*) AC_MSG_WARN([could not determine output file type]) + FORMAT_FILE="unknown.lo" + backtrace_supported=no + ;; +esac +AC_SUBST(FORMAT_FILE) + +# ELF defines. +elfsize= +case "$libbacktrace_cv_sys_filetype" in +elf32) elfsize=32 ;; +elf64) elfsize=64 ;; +*) elfsize=unused +esac +AC_DEFINE_UNQUOTED([BACKTRACE_ELF_SIZE], [$elfsize], [ELF size: 32 or 64]) + +# XCOFF defines. +xcoffsize= +case "$libbacktrace_cv_sys_filetype" in +xcoff32) xcoffsize=32 ;; +xcoff64) xcoffsize=64 ;; +*) xcoffsize=unused +esac +AC_DEFINE_UNQUOTED([BACKTRACE_XCOFF_SIZE], [$xcoffsize], [XCOFF size: 32 or 64]) + +BACKTRACE_SUPPORTED=0 +if test "$backtrace_supported" = "yes"; then + BACKTRACE_SUPPORTED=1 +fi +AC_SUBST(BACKTRACE_SUPPORTED) + +BACKTRACE_SUPPORTS_DATA=0 +if test "$backtrace_supports_data" = "yes"; then + BACKTRACE_SUPPORTS_DATA=1 +fi +AC_SUBST(BACKTRACE_SUPPORTS_DATA) + +AC_CHECK_HEADERS(sys/mman.h) +if test "$ac_cv_header_sys_mman_h" = "no"; then + have_mmap=no +else + if test -n "${with_target_subdir}"; then + # When built as a GCC target library, we can't do a link test. We + # simply assume that if we have mman.h, we have mmap. + have_mmap=yes + case "${host}" in + spu-*-*|*-*-msdosdjgpp) + # The SPU does not have mmap, but it has a sys/mman.h header file + # containing "mmap_eaddr" and the mmap flags, confusing the test. + # DJGPP also has sys/man.h, but no mmap + have_mmap=no ;; + esac + else + AC_CHECK_FUNC(mmap, [have_mmap=yes], [have_mmap=no]) + fi +fi + +case "${host_os}" in +darwin*) + have_mmap=no ;; +esac + +if test "$have_mmap" = "no"; then + VIEW_FILE=read.lo + ALLOC_FILE=alloc.lo +else + VIEW_FILE=mmapio.lo + AC_PREPROC_IFELSE([ +#include +#if !defined(MAP_ANONYMOUS) && !defined(MAP_ANON) + #error no MAP_ANONYMOUS +#endif +], [ALLOC_FILE=mmap.lo], [ALLOC_FILE=alloc.lo]) +fi +AC_SUBST(VIEW_FILE) +AC_SUBST(ALLOC_FILE) + +BACKTRACE_USES_MALLOC=0 +if test "$ALLOC_FILE" = "alloc.lo"; then + BACKTRACE_USES_MALLOC=1 +fi +AC_SUBST(BACKTRACE_USES_MALLOC) + +# Check for dl_iterate_phdr. +AC_CHECK_HEADERS(link.h) +if test "$ac_cv_header_link_h" = "no"; then + have_dl_iterate_phdr=no +else + if test -n "${with_target_subdir}"; then + # When built as a GCC target library, we can't do a link test. + AC_EGREP_HEADER([dl_iterate_phdr], [link.h], [have_dl_iterate_phdr=yes], + [have_dl_iterate_phdr=no]) + case "${host}" in + *-*-solaris2.10*) + # Avoid dl_iterate_phdr on Solaris 10, where it is in the + # header file but is only in -ldl. + have_dl_iterate_phdr=no ;; + esac + else + AC_CHECK_FUNC([dl_iterate_phdr], [have_dl_iterate_phdr=yes], + [have_dl_iterate_phdr=no]) + fi +fi +if test "$have_dl_iterate_phdr" = "yes"; then + AC_DEFINE(HAVE_DL_ITERATE_PHDR, 1, [Define if dl_iterate_phdr is available.]) +fi + +# Check for loadquery. +AC_CHECK_HEADERS(sys/ldr.h) +if test "$ac_cv_header_sys_ldr_h" = "no"; then + have_loadquery=no +else + if test -n "${with_target_subdir}"; then + # When built as a GCC target library, we can't do a link test. + AC_EGREP_HEADER([loadquery], [sys/ldr.h], [have_loadquery=yes], + [have_loadquery=no]) + else + AC_CHECK_FUNC([loadquery], [have_loadquery=yes], + [have_loadquery=no]) + fi +fi +if test "$have_loadquery" = "yes"; then + AC_DEFINE(HAVE_LOADQUERY, 1, [Define if AIX loadquery is available.]) +fi + +# Check for the fcntl function. +if test -n "${with_target_subdir}"; then + case "${host}" in + *-*-mingw*) have_fcntl=no ;; + spu-*-*) have_fcntl=no ;; + *) have_fcntl=yes ;; + esac +else + AC_CHECK_FUNC(fcntl, [have_fcntl=yes], [have_fcntl=no]) +fi +if test "$have_fcntl" = "yes"; then + AC_DEFINE([HAVE_FCNTL], 1, + [Define to 1 if you have the fcntl function]) +fi + +AC_CHECK_DECLS(strnlen) +AC_CHECK_FUNCS(lstat readlink) + +# Check for getexecname function. +if test -n "${with_target_subdir}"; then + case "${host}" in + *-*-solaris2*) have_getexecname=yes ;; + *) have_getexecname=no ;; + esac +else + AC_CHECK_FUNC(getexecname, [have_getexecname=yes], [have_getexecname=no]) +fi +if test "$have_getexecname" = "yes"; then + AC_DEFINE(HAVE_GETEXECNAME, 1, [Define if getexecname is available.]) +fi + +# Check for the clock_gettime function. +AC_CHECK_FUNCS(clock_gettime) +clock_gettime_link= +# At least for glibc, clock_gettime is in librt. But don't +# pull that in if it still doesn't give us the function we want. This +# test is copied from libgomp, and modified to not link in -lrt as +# we're using this for test timing only. +if test "$ac_cv_func_clock_gettime" = no; then + AC_CHECK_LIB(rt, clock_gettime, + [CLOCK_GETTIME_LINK=-lrt + AC_DEFINE(HAVE_CLOCK_GETTIME, 1, + [Define to 1 if you have the `clock_gettime' function.])]) +fi +AC_SUBST(CLOCK_GETTIME_LINK) + +dnl Test whether the compiler supports the -pthread option. +AC_CACHE_CHECK([whether -pthread is supported], +[libgo_cv_lib_pthread], +[CFLAGS_hold=$CFLAGS +CFLAGS="$CFLAGS -pthread" +AC_COMPILE_IFELSE([[int i;]], +[libgo_cv_lib_pthread=yes], +[libgo_cv_lib_pthread=no]) +CFLAGS=$CFLAGS_hold]) +PTHREAD_CFLAGS= +if test "$libgo_cv_lib_pthread" = yes; then + PTHREAD_CFLAGS=-pthread +fi +AC_SUBST(PTHREAD_CFLAGS) + +AM_CONDITIONAL(HAVE_PTHREAD, test "$libgo_cv_lib_pthread" = yes) + +AC_CHECK_LIB([z], [compress], []) +if test $ac_cv_lib_z_compress = "yes"; then + AC_DEFINE(HAVE_ZLIB, 1, [Define if -lz is available.]) +fi +AM_CONDITIONAL(HAVE_ZLIB, test "$ac_cv_lib_z_compress" = yes) + +dnl Test whether the linker supports the --compress_debug_sections option. +AC_CACHE_CHECK([whether --compress-debug-sections is supported], +[libgo_cv_ld_compress], +[LDFLAGS_hold=$LDFLAGS +LDFLAGS="$LDFLAGS -Wl,--compress-debug-sections=zlib-gnu" +AC_LINK_IFELSE([AC_LANG_PROGRAM(,)], +[libgo_cv_ld_compress=yes], +[libgo_cv_ld_compress=no]) +LDFLAGS=$LDFLAGS_hold]) +AM_CONDITIONAL(HAVE_COMPRESSED_DEBUG, test "$libgo_cv_ld_compress" = yes) + +AC_ARG_VAR(OBJCOPY, [location of objcopy]) +AC_CHECK_PROG(OBJCOPY, objcopy, objcopy,) +AC_CACHE_CHECK([whether objcopy supports debuglink], +[libbacktrace_cv_objcopy_debuglink], +[if test -n "${with_target_subdir}"; then + libbacktrace_cv_objcopy_debuglink=no +elif ${OBJCOPY} --add-gnu-debuglink=x /bin/ls /tmp/ls$$; then + rm -f /tmp/ls$$ + libbacktrace_cv_objcopy_debuglink=yes +else + libbacktrace_cv_objcopy_debuglink=no +fi]) +AM_CONDITIONAL(HAVE_OBJCOPY_DEBUGLINK, test "$libbacktrace_cv_objcopy_debuglink" = yes) + +AC_CACHE_CHECK([whether tests can run], + [libbacktrace_cv_sys_native], + [AC_RUN_IFELSE([AC_LANG_PROGRAM([], [return 0;])], + [libbacktrace_cv_sys_native=yes], + [libbacktrace_cv_sys_native=no], + [libbacktrace_cv_sys_native=no])]) +AM_CONDITIONAL(NATIVE, test "$libbacktrace_cv_sys_native" = "yes") + +if test "${multilib}" = "yes"; then + multilib_arg="--enable-multilib" +else + multilib_arg= +fi + +AC_CONFIG_FILES(Makefile backtrace-supported.h) + +# We need multilib support, but only if configuring for the target. +AC_CONFIG_COMMANDS([default], +[if test -n "$CONFIG_FILES"; then + if test -n "${with_target_subdir}"; then + # Multilibs need MULTISUBDIR defined correctly in certain makefiles so + # that multilib installs will end up installed in the correct place. + # The testsuite needs it for multilib-aware ABI baseline files. + # To work around this not being passed down from config-ml.in -> + # srcdir/Makefile.am -> srcdir/{src,libsupc++,...}/Makefile.am, manually + # append it here. Only modify Makefiles that have just been created. + # + # Also, get rid of this simulated-VPATH thing that automake does. + cat > vpsed << \_EOF + s!`test -f '$<' || echo '$(srcdir)/'`!! +_EOF + for i in $SUBDIRS; do + case $CONFIG_FILES in + *${i}/Makefile*) + #echo "Adding MULTISUBDIR to $i/Makefile" + sed -f vpsed $i/Makefile > tmp + grep '^MULTISUBDIR =' Makefile >> tmp + mv tmp $i/Makefile + ;; + esac + done + rm vpsed + fi + fi +], +[ +# Variables needed in config.status (file generation) which aren't already +# passed by autoconf. +SUBDIRS="$SUBDIRS" +]) + +AC_OUTPUT diff --git a/backtrace-sys/src/libbacktrace/dwarf.c b/backtrace-sys/src/libbacktrace/dwarf.c new file mode 100644 index 000000000..1bd1488c6 --- /dev/null +++ b/backtrace-sys/src/libbacktrace/dwarf.c @@ -0,0 +1,3126 @@ +/* dwarf.c -- Get file/line information from DWARF for backtraces. + Copyright (C) 2012-2018 Free Software Foundation, Inc. + Written by Ian Lance Taylor, Google. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + (1) Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + (2) Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + + (3) The name of the author may not be used to + endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, +INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, +STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING +IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. */ + +#include "config.h" + +#include +#include +#include +#include + +#include "backtrace.h" +#include "internal.h" + +/* DWARF constants. */ + +enum dwarf_tag { + DW_TAG_entry_point = 0x3, + DW_TAG_compile_unit = 0x11, + DW_TAG_inlined_subroutine = 0x1d, + DW_TAG_subprogram = 0x2e, +}; + +enum dwarf_form { + DW_FORM_addr = 0x1, + DW_FORM_block2 = 0x3, + DW_FORM_block4 = 0x4, + DW_FORM_data2 = 0x5, + DW_FORM_data4 = 0x6, + DW_FORM_data8 = 0x07, + DW_FORM_string = 0x08, + DW_FORM_block = 0x09, + DW_FORM_block1 = 0x0a, + DW_FORM_data1 = 0x0b, + DW_FORM_flag = 0x0c, + DW_FORM_sdata = 0x0d, + DW_FORM_strp = 0x0e, + DW_FORM_udata = 0x0f, + DW_FORM_ref_addr = 0x10, + DW_FORM_ref1 = 0x11, + DW_FORM_ref2 = 0x12, + DW_FORM_ref4 = 0x13, + DW_FORM_ref8 = 0x14, + DW_FORM_ref_udata = 0x15, + DW_FORM_indirect = 0x16, + DW_FORM_sec_offset = 0x17, + DW_FORM_exprloc = 0x18, + DW_FORM_flag_present = 0x19, + DW_FORM_ref_sig8 = 0x20, + DW_FORM_GNU_addr_index = 0x1f01, + DW_FORM_GNU_str_index = 0x1f02, + DW_FORM_GNU_ref_alt = 0x1f20, + DW_FORM_GNU_strp_alt = 0x1f21, +}; + +enum dwarf_attribute { + DW_AT_name = 0x3, + DW_AT_stmt_list = 0x10, + DW_AT_low_pc = 0x11, + DW_AT_high_pc = 0x12, + DW_AT_comp_dir = 0x1b, + DW_AT_abstract_origin = 0x31, + DW_AT_specification = 0x47, + DW_AT_ranges = 0x55, + DW_AT_call_file = 0x58, + DW_AT_call_line = 0x59, + DW_AT_linkage_name = 0x6e, + DW_AT_MIPS_linkage_name = 0x2007, +}; + +enum dwarf_line_number_op { + DW_LNS_extended_op = 0x0, + DW_LNS_copy = 0x1, + DW_LNS_advance_pc = 0x2, + DW_LNS_advance_line = 0x3, + DW_LNS_set_file = 0x4, + DW_LNS_set_column = 0x5, + DW_LNS_negate_stmt = 0x6, + DW_LNS_set_basic_block = 0x7, + DW_LNS_const_add_pc = 0x8, + DW_LNS_fixed_advance_pc = 0x9, + DW_LNS_set_prologue_end = 0xa, + DW_LNS_set_epilogue_begin = 0xb, + DW_LNS_set_isa = 0xc, +}; + +enum dwarf_extedned_line_number_op { + DW_LNE_end_sequence = 0x1, + DW_LNE_set_address = 0x2, + DW_LNE_define_file = 0x3, + DW_LNE_set_discriminator = 0x4, +}; + +#if defined(__MSDOS__) || defined(_WIN32) || defined(__OS2__) || defined (__CYGWIN__) +# define IS_DIR_SEPARATOR(c) ((c) == '/' || (c) == '\\') +#define HAS_DRIVE_SPEC(f) ((f)[0] && (f)[1] == ':') +# define IS_ABSOLUTE_PATH(f) (IS_DIR_SEPARATOR(f[0]) || HAS_DRIVE_SPEC(f)) +#else +# define IS_DIR_SEPARATOR(c) ((c) == '/') +# define IS_ABSOLUTE_PATH(f) IS_DIR_SEPARATOR(f[0]) +#endif + +#if !defined(HAVE_DECL_STRNLEN) || !HAVE_DECL_STRNLEN + +/* If strnlen is not declared, provide our own version. */ + +static size_t +xstrnlen (const char *s, size_t maxlen) +{ + size_t i; + + for (i = 0; i < maxlen; ++i) + if (s[i] == '\0') + break; + return i; +} + +#define strnlen xstrnlen + +#endif + +/* A buffer to read DWARF info. */ + +struct dwarf_buf +{ + /* Buffer name for error messages. */ + const char *name; + /* Start of the buffer. */ + const unsigned char *start; + /* Next byte to read. */ + const unsigned char *buf; + /* The number of bytes remaining. */ + size_t left; + /* Whether the data is big-endian. */ + int is_bigendian; + /* Error callback routine. */ + backtrace_error_callback error_callback; + /* Data for error_callback. */ + void *data; + /* Non-zero if we've reported an underflow error. */ + int reported_underflow; +}; + +/* A single attribute in a DWARF abbreviation. */ + +struct attr +{ + /* The attribute name. */ + enum dwarf_attribute name; + /* The attribute form. */ + enum dwarf_form form; +}; + +/* A single DWARF abbreviation. */ + +struct abbrev +{ + /* The abbrev code--the number used to refer to the abbrev. */ + uint64_t code; + /* The entry tag. */ + enum dwarf_tag tag; + /* Non-zero if this abbrev has child entries. */ + int has_children; + /* The number of attributes. */ + size_t num_attrs; + /* The attributes. */ + struct attr *attrs; +}; + +/* The DWARF abbreviations for a compilation unit. This structure + only exists while reading the compilation unit. Most DWARF readers + seem to a hash table to map abbrev ID's to abbrev entries. + However, we primarily care about GCC, and GCC simply issues ID's in + numerical order starting at 1. So we simply keep a sorted vector, + and try to just look up the code. */ + +struct abbrevs +{ + /* The number of abbrevs in the vector. */ + size_t num_abbrevs; + /* The abbrevs, sorted by the code field. */ + struct abbrev *abbrevs; +}; + +/* The different kinds of attribute values. */ + +enum attr_val_encoding +{ + /* An address. */ + ATTR_VAL_ADDRESS, + /* A unsigned integer. */ + ATTR_VAL_UINT, + /* A sigd integer. */ + ATTR_VAL_SINT, + /* A string. */ + ATTR_VAL_STRING, + /* An offset to other data in the containing unit. */ + ATTR_VAL_REF_UNIT, + /* An offset to other data within the .dwarf_info section. */ + ATTR_VAL_REF_INFO, + /* An offset to data in some other section. */ + ATTR_VAL_REF_SECTION, + /* A type signature. */ + ATTR_VAL_REF_TYPE, + /* A block of data (not represented). */ + ATTR_VAL_BLOCK, + /* An expression (not represented). */ + ATTR_VAL_EXPR, +}; + +/* An attribute value. */ + +struct attr_val +{ + /* How the value is stored in the field u. */ + enum attr_val_encoding encoding; + union + { + /* ATTR_VAL_ADDRESS, ATTR_VAL_UINT, ATTR_VAL_REF*. */ + uint64_t uint; + /* ATTR_VAL_SINT. */ + int64_t sint; + /* ATTR_VAL_STRING. */ + const char *string; + /* ATTR_VAL_BLOCK not stored. */ + } u; +}; + +/* The line number program header. */ + +struct line_header +{ + /* The version of the line number information. */ + int version; + /* The minimum instruction length. */ + unsigned int min_insn_len; + /* The maximum number of ops per instruction. */ + unsigned int max_ops_per_insn; + /* The line base for special opcodes. */ + int line_base; + /* The line range for special opcodes. */ + unsigned int line_range; + /* The opcode base--the first special opcode. */ + unsigned int opcode_base; + /* Opcode lengths, indexed by opcode - 1. */ + const unsigned char *opcode_lengths; + /* The number of directory entries. */ + size_t dirs_count; + /* The directory entries. */ + const char **dirs; + /* The number of filenames. */ + size_t filenames_count; + /* The filenames. */ + const char **filenames; +}; + +/* Map a single PC value to a file/line. We will keep a vector of + these sorted by PC value. Each file/line will be correct from the + PC up to the PC of the next entry if there is one. We allocate one + extra entry at the end so that we can use bsearch. */ + +struct line +{ + /* PC. */ + uintptr_t pc; + /* File name. Many entries in the array are expected to point to + the same file name. */ + const char *filename; + /* Line number. */ + int lineno; + /* Index of the object in the original array read from the DWARF + section, before it has been sorted. The index makes it possible + to use Quicksort and maintain stability. */ + int idx; +}; + +/* A growable vector of line number information. This is used while + reading the line numbers. */ + +struct line_vector +{ + /* Memory. This is an array of struct line. */ + struct backtrace_vector vec; + /* Number of valid mappings. */ + size_t count; +}; + +/* A function described in the debug info. */ + +struct function +{ + /* The name of the function. */ + const char *name; + /* If this is an inlined function, the filename of the call + site. */ + const char *caller_filename; + /* If this is an inlined function, the line number of the call + site. */ + int caller_lineno; + /* Map PC ranges to inlined functions. */ + struct function_addrs *function_addrs; + size_t function_addrs_count; +}; + +/* An address range for a function. This maps a PC value to a + specific function. */ + +struct function_addrs +{ + /* Range is LOW <= PC < HIGH. */ + uint64_t low; + uint64_t high; + /* Function for this address range. */ + struct function *function; +}; + +/* A growable vector of function address ranges. */ + +struct function_vector +{ + /* Memory. This is an array of struct function_addrs. */ + struct backtrace_vector vec; + /* Number of address ranges present. */ + size_t count; +}; + +/* A DWARF compilation unit. This only holds the information we need + to map a PC to a file and line. */ + +struct unit +{ + /* The first entry for this compilation unit. */ + const unsigned char *unit_data; + /* The length of the data for this compilation unit. */ + size_t unit_data_len; + /* The offset of UNIT_DATA from the start of the information for + this compilation unit. */ + size_t unit_data_offset; + /* DWARF version. */ + int version; + /* Whether unit is DWARF64. */ + int is_dwarf64; + /* Address size. */ + int addrsize; + /* Offset into line number information. */ + off_t lineoff; + /* Primary source file. */ + const char *filename; + /* Compilation command working directory. */ + const char *comp_dir; + /* Absolute file name, only set if needed. */ + const char *abs_filename; + /* The abbreviations for this unit. */ + struct abbrevs abbrevs; + + /* The fields above this point are read in during initialization and + may be accessed freely. The fields below this point are read in + as needed, and therefore require care, as different threads may + try to initialize them simultaneously. */ + + /* PC to line number mapping. This is NULL if the values have not + been read. This is (struct line *) -1 if there was an error + reading the values. */ + struct line *lines; + /* Number of entries in lines. */ + size_t lines_count; + /* PC ranges to function. */ + struct function_addrs *function_addrs; + size_t function_addrs_count; +}; + +/* An address range for a compilation unit. This maps a PC value to a + specific compilation unit. Note that we invert the representation + in DWARF: instead of listing the units and attaching a list of + ranges, we list the ranges and have each one point to the unit. + This lets us do a binary search to find the unit. */ + +struct unit_addrs +{ + /* Range is LOW <= PC < HIGH. */ + uint64_t low; + uint64_t high; + /* Compilation unit for this address range. */ + struct unit *u; +}; + +/* A growable vector of compilation unit address ranges. */ + +struct unit_addrs_vector +{ + /* Memory. This is an array of struct unit_addrs. */ + struct backtrace_vector vec; + /* Number of address ranges present. */ + size_t count; +}; + +/* The information we need to map a PC to a file and line. */ + +struct dwarf_data +{ + /* The data for the next file we know about. */ + struct dwarf_data *next; + /* The base address for this file. */ + uintptr_t base_address; + /* A sorted list of address ranges. */ + struct unit_addrs *addrs; + /* Number of address ranges in list. */ + size_t addrs_count; + /* The unparsed .debug_info section. */ + const unsigned char *dwarf_info; + size_t dwarf_info_size; + /* The unparsed .debug_line section. */ + const unsigned char *dwarf_line; + size_t dwarf_line_size; + /* The unparsed .debug_ranges section. */ + const unsigned char *dwarf_ranges; + size_t dwarf_ranges_size; + /* The unparsed .debug_str section. */ + const unsigned char *dwarf_str; + size_t dwarf_str_size; + /* Whether the data is big-endian or not. */ + int is_bigendian; + /* A vector used for function addresses. We keep this here so that + we can grow the vector as we read more functions. */ + struct function_vector fvec; +}; + +/* Report an error for a DWARF buffer. */ + +static void +dwarf_buf_error (struct dwarf_buf *buf, const char *msg) +{ + char b[200]; + + snprintf (b, sizeof b, "%s in %s at %d", + msg, buf->name, (int) (buf->buf - buf->start)); + buf->error_callback (buf->data, b, 0); +} + +/* Require at least COUNT bytes in BUF. Return 1 if all is well, 0 on + error. */ + +static int +require (struct dwarf_buf *buf, size_t count) +{ + if (buf->left >= count) + return 1; + + if (!buf->reported_underflow) + { + dwarf_buf_error (buf, "DWARF underflow"); + buf->reported_underflow = 1; + } + + return 0; +} + +/* Advance COUNT bytes in BUF. Return 1 if all is well, 0 on + error. */ + +static int +advance (struct dwarf_buf *buf, size_t count) +{ + if (!require (buf, count)) + return 0; + buf->buf += count; + buf->left -= count; + return 1; +} + +/* Read one byte from BUF and advance 1 byte. */ + +static unsigned char +read_byte (struct dwarf_buf *buf) +{ + const unsigned char *p = buf->buf; + + if (!advance (buf, 1)) + return 0; + return p[0]; +} + +/* Read a signed char from BUF and advance 1 byte. */ + +static signed char +read_sbyte (struct dwarf_buf *buf) +{ + const unsigned char *p = buf->buf; + + if (!advance (buf, 1)) + return 0; + return (*p ^ 0x80) - 0x80; +} + +/* Read a uint16 from BUF and advance 2 bytes. */ + +static uint16_t +read_uint16 (struct dwarf_buf *buf) +{ + const unsigned char *p = buf->buf; + + if (!advance (buf, 2)) + return 0; + if (buf->is_bigendian) + return ((uint16_t) p[0] << 8) | (uint16_t) p[1]; + else + return ((uint16_t) p[1] << 8) | (uint16_t) p[0]; +} + +/* Read a uint32 from BUF and advance 4 bytes. */ + +static uint32_t +read_uint32 (struct dwarf_buf *buf) +{ + const unsigned char *p = buf->buf; + + if (!advance (buf, 4)) + return 0; + if (buf->is_bigendian) + return (((uint32_t) p[0] << 24) | ((uint32_t) p[1] << 16) + | ((uint32_t) p[2] << 8) | (uint32_t) p[3]); + else + return (((uint32_t) p[3] << 24) | ((uint32_t) p[2] << 16) + | ((uint32_t) p[1] << 8) | (uint32_t) p[0]); +} + +/* Read a uint64 from BUF and advance 8 bytes. */ + +static uint64_t +read_uint64 (struct dwarf_buf *buf) +{ + const unsigned char *p = buf->buf; + + if (!advance (buf, 8)) + return 0; + if (buf->is_bigendian) + return (((uint64_t) p[0] << 56) | ((uint64_t) p[1] << 48) + | ((uint64_t) p[2] << 40) | ((uint64_t) p[3] << 32) + | ((uint64_t) p[4] << 24) | ((uint64_t) p[5] << 16) + | ((uint64_t) p[6] << 8) | (uint64_t) p[7]); + else + return (((uint64_t) p[7] << 56) | ((uint64_t) p[6] << 48) + | ((uint64_t) p[5] << 40) | ((uint64_t) p[4] << 32) + | ((uint64_t) p[3] << 24) | ((uint64_t) p[2] << 16) + | ((uint64_t) p[1] << 8) | (uint64_t) p[0]); +} + +/* Read an offset from BUF and advance the appropriate number of + bytes. */ + +static uint64_t +read_offset (struct dwarf_buf *buf, int is_dwarf64) +{ + if (is_dwarf64) + return read_uint64 (buf); + else + return read_uint32 (buf); +} + +/* Read an address from BUF and advance the appropriate number of + bytes. */ + +static uint64_t +read_address (struct dwarf_buf *buf, int addrsize) +{ + switch (addrsize) + { + case 1: + return read_byte (buf); + case 2: + return read_uint16 (buf); + case 4: + return read_uint32 (buf); + case 8: + return read_uint64 (buf); + default: + dwarf_buf_error (buf, "unrecognized address size"); + return 0; + } +} + +/* Return whether a value is the highest possible address, given the + address size. */ + +static int +is_highest_address (uint64_t address, int addrsize) +{ + switch (addrsize) + { + case 1: + return address == (unsigned char) -1; + case 2: + return address == (uint16_t) -1; + case 4: + return address == (uint32_t) -1; + case 8: + return address == (uint64_t) -1; + default: + return 0; + } +} + +/* Read an unsigned LEB128 number. */ + +static uint64_t +read_uleb128 (struct dwarf_buf *buf) +{ + uint64_t ret; + unsigned int shift; + int overflow; + unsigned char b; + + ret = 0; + shift = 0; + overflow = 0; + do + { + const unsigned char *p; + + p = buf->buf; + if (!advance (buf, 1)) + return 0; + b = *p; + if (shift < 64) + ret |= ((uint64_t) (b & 0x7f)) << shift; + else if (!overflow) + { + dwarf_buf_error (buf, "LEB128 overflows uint64_t"); + overflow = 1; + } + shift += 7; + } + while ((b & 0x80) != 0); + + return ret; +} + +/* Read a signed LEB128 number. */ + +static int64_t +read_sleb128 (struct dwarf_buf *buf) +{ + uint64_t val; + unsigned int shift; + int overflow; + unsigned char b; + + val = 0; + shift = 0; + overflow = 0; + do + { + const unsigned char *p; + + p = buf->buf; + if (!advance (buf, 1)) + return 0; + b = *p; + if (shift < 64) + val |= ((uint64_t) (b & 0x7f)) << shift; + else if (!overflow) + { + dwarf_buf_error (buf, "signed LEB128 overflows uint64_t"); + overflow = 1; + } + shift += 7; + } + while ((b & 0x80) != 0); + + if ((b & 0x40) != 0 && shift < 64) + val |= ((uint64_t) -1) << shift; + + return (int64_t) val; +} + +/* Return the length of an LEB128 number. */ + +static size_t +leb128_len (const unsigned char *p) +{ + size_t ret; + + ret = 1; + while ((*p & 0x80) != 0) + { + ++p; + ++ret; + } + return ret; +} + +/* Free an abbreviations structure. */ + +static void +free_abbrevs (struct backtrace_state *state, struct abbrevs *abbrevs, + backtrace_error_callback error_callback, void *data) +{ + size_t i; + + for (i = 0; i < abbrevs->num_abbrevs; ++i) + backtrace_free (state, abbrevs->abbrevs[i].attrs, + abbrevs->abbrevs[i].num_attrs * sizeof (struct attr), + error_callback, data); + backtrace_free (state, abbrevs->abbrevs, + abbrevs->num_abbrevs * sizeof (struct abbrev), + error_callback, data); + abbrevs->num_abbrevs = 0; + abbrevs->abbrevs = NULL; +} + +/* Read an attribute value. Returns 1 on success, 0 on failure. If + the value can be represented as a uint64_t, sets *VAL and sets + *IS_VALID to 1. We don't try to store the value of other attribute + forms, because we don't care about them. */ + +static int +read_attribute (enum dwarf_form form, struct dwarf_buf *buf, + int is_dwarf64, int version, int addrsize, + const unsigned char *dwarf_str, size_t dwarf_str_size, + struct attr_val *val) +{ + /* Avoid warnings about val.u.FIELD may be used uninitialized if + this function is inlined. The warnings aren't valid but can + occur because the different fields are set and used + conditionally. */ + memset (val, 0, sizeof *val); + + switch (form) + { + case DW_FORM_addr: + val->encoding = ATTR_VAL_ADDRESS; + val->u.uint = read_address (buf, addrsize); + return 1; + case DW_FORM_block2: + val->encoding = ATTR_VAL_BLOCK; + return advance (buf, read_uint16 (buf)); + case DW_FORM_block4: + val->encoding = ATTR_VAL_BLOCK; + return advance (buf, read_uint32 (buf)); + case DW_FORM_data2: + val->encoding = ATTR_VAL_UINT; + val->u.uint = read_uint16 (buf); + return 1; + case DW_FORM_data4: + val->encoding = ATTR_VAL_UINT; + val->u.uint = read_uint32 (buf); + return 1; + case DW_FORM_data8: + val->encoding = ATTR_VAL_UINT; + val->u.uint = read_uint64 (buf); + return 1; + case DW_FORM_string: + val->encoding = ATTR_VAL_STRING; + val->u.string = (const char *) buf->buf; + return advance (buf, strnlen ((const char *) buf->buf, buf->left) + 1); + case DW_FORM_block: + val->encoding = ATTR_VAL_BLOCK; + return advance (buf, read_uleb128 (buf)); + case DW_FORM_block1: + val->encoding = ATTR_VAL_BLOCK; + return advance (buf, read_byte (buf)); + case DW_FORM_data1: + val->encoding = ATTR_VAL_UINT; + val->u.uint = read_byte (buf); + return 1; + case DW_FORM_flag: + val->encoding = ATTR_VAL_UINT; + val->u.uint = read_byte (buf); + return 1; + case DW_FORM_sdata: + val->encoding = ATTR_VAL_SINT; + val->u.sint = read_sleb128 (buf); + return 1; + case DW_FORM_strp: + { + uint64_t offset; + + offset = read_offset (buf, is_dwarf64); + if (offset >= dwarf_str_size) + { + dwarf_buf_error (buf, "DW_FORM_strp out of range"); + return 0; + } + val->encoding = ATTR_VAL_STRING; + val->u.string = (const char *) dwarf_str + offset; + return 1; + } + case DW_FORM_udata: + val->encoding = ATTR_VAL_UINT; + val->u.uint = read_uleb128 (buf); + return 1; + case DW_FORM_ref_addr: + val->encoding = ATTR_VAL_REF_INFO; + if (version == 2) + val->u.uint = read_address (buf, addrsize); + else + val->u.uint = read_offset (buf, is_dwarf64); + return 1; + case DW_FORM_ref1: + val->encoding = ATTR_VAL_REF_UNIT; + val->u.uint = read_byte (buf); + return 1; + case DW_FORM_ref2: + val->encoding = ATTR_VAL_REF_UNIT; + val->u.uint = read_uint16 (buf); + return 1; + case DW_FORM_ref4: + val->encoding = ATTR_VAL_REF_UNIT; + val->u.uint = read_uint32 (buf); + return 1; + case DW_FORM_ref8: + val->encoding = ATTR_VAL_REF_UNIT; + val->u.uint = read_uint64 (buf); + return 1; + case DW_FORM_ref_udata: + val->encoding = ATTR_VAL_REF_UNIT; + val->u.uint = read_uleb128 (buf); + return 1; + case DW_FORM_indirect: + { + uint64_t form; + + form = read_uleb128 (buf); + return read_attribute ((enum dwarf_form) form, buf, is_dwarf64, + version, addrsize, dwarf_str, dwarf_str_size, + val); + } + case DW_FORM_sec_offset: + val->encoding = ATTR_VAL_REF_SECTION; + val->u.uint = read_offset (buf, is_dwarf64); + return 1; + case DW_FORM_exprloc: + val->encoding = ATTR_VAL_EXPR; + return advance (buf, read_uleb128 (buf)); + case DW_FORM_flag_present: + val->encoding = ATTR_VAL_UINT; + val->u.uint = 1; + return 1; + case DW_FORM_ref_sig8: + val->encoding = ATTR_VAL_REF_TYPE; + val->u.uint = read_uint64 (buf); + return 1; + case DW_FORM_GNU_addr_index: + val->encoding = ATTR_VAL_REF_SECTION; + val->u.uint = read_uleb128 (buf); + return 1; + case DW_FORM_GNU_str_index: + val->encoding = ATTR_VAL_REF_SECTION; + val->u.uint = read_uleb128 (buf); + return 1; + case DW_FORM_GNU_ref_alt: + val->encoding = ATTR_VAL_REF_SECTION; + val->u.uint = read_offset (buf, is_dwarf64); + return 1; + case DW_FORM_GNU_strp_alt: + val->encoding = ATTR_VAL_REF_SECTION; + val->u.uint = read_offset (buf, is_dwarf64); + return 1; + default: + dwarf_buf_error (buf, "unrecognized DWARF form"); + return 0; + } +} + +/* Compare function_addrs for qsort. When ranges are nested, make the + smallest one sort last. */ + +static int +function_addrs_compare (const void *v1, const void *v2) +{ + const struct function_addrs *a1 = (const struct function_addrs *) v1; + const struct function_addrs *a2 = (const struct function_addrs *) v2; + + if (a1->low < a2->low) + return -1; + if (a1->low > a2->low) + return 1; + if (a1->high < a2->high) + return 1; + if (a1->high > a2->high) + return -1; + return strcmp (a1->function->name, a2->function->name); +} + +/* Compare a PC against a function_addrs for bsearch. Note that if + there are multiple ranges containing PC, which one will be returned + is unpredictable. We compensate for that in dwarf_fileline. */ + +static int +function_addrs_search (const void *vkey, const void *ventry) +{ + const uintptr_t *key = (const uintptr_t *) vkey; + const struct function_addrs *entry = (const struct function_addrs *) ventry; + uintptr_t pc; + + pc = *key; + if (pc < entry->low) + return -1; + else if (pc >= entry->high) + return 1; + else + return 0; +} + +/* Add a new compilation unit address range to a vector. Returns 1 on + success, 0 on failure. */ + +static int +add_unit_addr (struct backtrace_state *state, uintptr_t base_address, + struct unit_addrs addrs, + backtrace_error_callback error_callback, void *data, + struct unit_addrs_vector *vec) +{ + struct unit_addrs *p; + + /* Add in the base address of the module here, so that we can look + up the PC directly. */ + addrs.low += base_address; + addrs.high += base_address; + + /* Try to merge with the last entry. */ + if (vec->count > 0) + { + p = (struct unit_addrs *) vec->vec.base + (vec->count - 1); + if ((addrs.low == p->high || addrs.low == p->high + 1) + && addrs.u == p->u) + { + if (addrs.high > p->high) + p->high = addrs.high; + return 1; + } + } + + p = ((struct unit_addrs *) + backtrace_vector_grow (state, sizeof (struct unit_addrs), + error_callback, data, &vec->vec)); + if (p == NULL) + return 0; + + *p = addrs; + ++vec->count; + return 1; +} + +/* Free a unit address vector. */ + +static void +free_unit_addrs_vector (struct backtrace_state *state, + struct unit_addrs_vector *vec, + backtrace_error_callback error_callback, void *data) +{ + struct unit_addrs *addrs; + size_t i; + + addrs = (struct unit_addrs *) vec->vec.base; + for (i = 0; i < vec->count; ++i) + free_abbrevs (state, &addrs[i].u->abbrevs, error_callback, data); +} + +/* Compare unit_addrs for qsort. When ranges are nested, make the + smallest one sort last. */ + +static int +unit_addrs_compare (const void *v1, const void *v2) +{ + const struct unit_addrs *a1 = (const struct unit_addrs *) v1; + const struct unit_addrs *a2 = (const struct unit_addrs *) v2; + + if (a1->low < a2->low) + return -1; + if (a1->low > a2->low) + return 1; + if (a1->high < a2->high) + return 1; + if (a1->high > a2->high) + return -1; + if (a1->u->lineoff < a2->u->lineoff) + return -1; + if (a1->u->lineoff > a2->u->lineoff) + return 1; + return 0; +} + +/* Compare a PC against a unit_addrs for bsearch. Note that if there + are multiple ranges containing PC, which one will be returned is + unpredictable. We compensate for that in dwarf_fileline. */ + +static int +unit_addrs_search (const void *vkey, const void *ventry) +{ + const uintptr_t *key = (const uintptr_t *) vkey; + const struct unit_addrs *entry = (const struct unit_addrs *) ventry; + uintptr_t pc; + + pc = *key; + if (pc < entry->low) + return -1; + else if (pc >= entry->high) + return 1; + else + return 0; +} + +/* Sort the line vector by PC. We want a stable sort here to maintain + the order of lines for the same PC values. Since the sequence is + being sorted in place, their addresses cannot be relied on to + maintain stability. That is the purpose of the index member. */ + +static int +line_compare (const void *v1, const void *v2) +{ + const struct line *ln1 = (const struct line *) v1; + const struct line *ln2 = (const struct line *) v2; + + if (ln1->pc < ln2->pc) + return -1; + else if (ln1->pc > ln2->pc) + return 1; + else if (ln1->idx < ln2->idx) + return -1; + else if (ln1->idx > ln2->idx) + return 1; + else + return 0; +} + +/* Find a PC in a line vector. We always allocate an extra entry at + the end of the lines vector, so that this routine can safely look + at the next entry. Note that when there are multiple mappings for + the same PC value, this will return the last one. */ + +static int +line_search (const void *vkey, const void *ventry) +{ + const uintptr_t *key = (const uintptr_t *) vkey; + const struct line *entry = (const struct line *) ventry; + uintptr_t pc; + + pc = *key; + if (pc < entry->pc) + return -1; + else if (pc >= (entry + 1)->pc) + return 1; + else + return 0; +} + +/* Sort the abbrevs by the abbrev code. This function is passed to + both qsort and bsearch. */ + +static int +abbrev_compare (const void *v1, const void *v2) +{ + const struct abbrev *a1 = (const struct abbrev *) v1; + const struct abbrev *a2 = (const struct abbrev *) v2; + + if (a1->code < a2->code) + return -1; + else if (a1->code > a2->code) + return 1; + else + { + /* This really shouldn't happen. It means there are two + different abbrevs with the same code, and that means we don't + know which one lookup_abbrev should return. */ + return 0; + } +} + +/* Read the abbreviation table for a compilation unit. Returns 1 on + success, 0 on failure. */ + +static int +read_abbrevs (struct backtrace_state *state, uint64_t abbrev_offset, + const unsigned char *dwarf_abbrev, size_t dwarf_abbrev_size, + int is_bigendian, backtrace_error_callback error_callback, + void *data, struct abbrevs *abbrevs) +{ + struct dwarf_buf abbrev_buf; + struct dwarf_buf count_buf; + size_t num_abbrevs; + + abbrevs->num_abbrevs = 0; + abbrevs->abbrevs = NULL; + + if (abbrev_offset >= dwarf_abbrev_size) + { + error_callback (data, "abbrev offset out of range", 0); + return 0; + } + + abbrev_buf.name = ".debug_abbrev"; + abbrev_buf.start = dwarf_abbrev; + abbrev_buf.buf = dwarf_abbrev + abbrev_offset; + abbrev_buf.left = dwarf_abbrev_size - abbrev_offset; + abbrev_buf.is_bigendian = is_bigendian; + abbrev_buf.error_callback = error_callback; + abbrev_buf.data = data; + abbrev_buf.reported_underflow = 0; + + /* Count the number of abbrevs in this list. */ + + count_buf = abbrev_buf; + num_abbrevs = 0; + while (read_uleb128 (&count_buf) != 0) + { + if (count_buf.reported_underflow) + return 0; + ++num_abbrevs; + // Skip tag. + read_uleb128 (&count_buf); + // Skip has_children. + read_byte (&count_buf); + // Skip attributes. + while (read_uleb128 (&count_buf) != 0) + read_uleb128 (&count_buf); + // Skip form of last attribute. + read_uleb128 (&count_buf); + } + + if (count_buf.reported_underflow) + return 0; + + if (num_abbrevs == 0) + return 1; + + abbrevs->num_abbrevs = num_abbrevs; + abbrevs->abbrevs = ((struct abbrev *) + backtrace_alloc (state, + num_abbrevs * sizeof (struct abbrev), + error_callback, data)); + if (abbrevs->abbrevs == NULL) + return 0; + memset (abbrevs->abbrevs, 0, num_abbrevs * sizeof (struct abbrev)); + + num_abbrevs = 0; + while (1) + { + uint64_t code; + struct abbrev a; + size_t num_attrs; + struct attr *attrs; + + if (abbrev_buf.reported_underflow) + goto fail; + + code = read_uleb128 (&abbrev_buf); + if (code == 0) + break; + + a.code = code; + a.tag = (enum dwarf_tag) read_uleb128 (&abbrev_buf); + a.has_children = read_byte (&abbrev_buf); + + count_buf = abbrev_buf; + num_attrs = 0; + while (read_uleb128 (&count_buf) != 0) + { + ++num_attrs; + read_uleb128 (&count_buf); + } + + if (num_attrs == 0) + { + attrs = NULL; + read_uleb128 (&abbrev_buf); + read_uleb128 (&abbrev_buf); + } + else + { + attrs = ((struct attr *) + backtrace_alloc (state, num_attrs * sizeof *attrs, + error_callback, data)); + if (attrs == NULL) + goto fail; + num_attrs = 0; + while (1) + { + uint64_t name; + uint64_t form; + + name = read_uleb128 (&abbrev_buf); + form = read_uleb128 (&abbrev_buf); + if (name == 0) + break; + attrs[num_attrs].name = (enum dwarf_attribute) name; + attrs[num_attrs].form = (enum dwarf_form) form; + ++num_attrs; + } + } + + a.num_attrs = num_attrs; + a.attrs = attrs; + + abbrevs->abbrevs[num_abbrevs] = a; + ++num_abbrevs; + } + + backtrace_qsort (abbrevs->abbrevs, abbrevs->num_abbrevs, + sizeof (struct abbrev), abbrev_compare); + + return 1; + + fail: + free_abbrevs (state, abbrevs, error_callback, data); + return 0; +} + +/* Return the abbrev information for an abbrev code. */ + +static const struct abbrev * +lookup_abbrev (struct abbrevs *abbrevs, uint64_t code, + backtrace_error_callback error_callback, void *data) +{ + struct abbrev key; + void *p; + + /* With GCC, where abbrevs are simply numbered in order, we should + be able to just look up the entry. */ + if (code - 1 < abbrevs->num_abbrevs + && abbrevs->abbrevs[code - 1].code == code) + return &abbrevs->abbrevs[code - 1]; + + /* Otherwise we have to search. */ + memset (&key, 0, sizeof key); + key.code = code; + p = bsearch (&key, abbrevs->abbrevs, abbrevs->num_abbrevs, + sizeof (struct abbrev), abbrev_compare); + if (p == NULL) + { + error_callback (data, "invalid abbreviation code", 0); + return NULL; + } + return (const struct abbrev *) p; +} + +/* Add non-contiguous address ranges for a compilation unit. Returns + 1 on success, 0 on failure. */ + +static int +add_unit_ranges (struct backtrace_state *state, uintptr_t base_address, + struct unit *u, uint64_t ranges, uint64_t base, + int is_bigendian, const unsigned char *dwarf_ranges, + size_t dwarf_ranges_size, + backtrace_error_callback error_callback, void *data, + struct unit_addrs_vector *addrs) +{ + struct dwarf_buf ranges_buf; + + if (ranges >= dwarf_ranges_size) + { + error_callback (data, "ranges offset out of range", 0); + return 0; + } + + ranges_buf.name = ".debug_ranges"; + ranges_buf.start = dwarf_ranges; + ranges_buf.buf = dwarf_ranges + ranges; + ranges_buf.left = dwarf_ranges_size - ranges; + ranges_buf.is_bigendian = is_bigendian; + ranges_buf.error_callback = error_callback; + ranges_buf.data = data; + ranges_buf.reported_underflow = 0; + + while (1) + { + uint64_t low; + uint64_t high; + + if (ranges_buf.reported_underflow) + return 0; + + low = read_address (&ranges_buf, u->addrsize); + high = read_address (&ranges_buf, u->addrsize); + + if (low == 0 && high == 0) + break; + + if (is_highest_address (low, u->addrsize)) + base = high; + else + { + struct unit_addrs a; + + a.low = low + base; + a.high = high + base; + a.u = u; + if (!add_unit_addr (state, base_address, a, error_callback, data, + addrs)) + return 0; + } + } + + if (ranges_buf.reported_underflow) + return 0; + + return 1; +} + +/* Find the address range covered by a compilation unit, reading from + UNIT_BUF and adding values to U. Returns 1 if all data could be + read, 0 if there is some error. */ + +static int +find_address_ranges (struct backtrace_state *state, uintptr_t base_address, + struct dwarf_buf *unit_buf, + const unsigned char *dwarf_str, size_t dwarf_str_size, + const unsigned char *dwarf_ranges, + size_t dwarf_ranges_size, + int is_bigendian, backtrace_error_callback error_callback, + void *data, struct unit *u, + struct unit_addrs_vector *addrs) +{ + while (unit_buf->left > 0) + { + uint64_t code; + const struct abbrev *abbrev; + uint64_t lowpc; + int have_lowpc; + uint64_t highpc; + int have_highpc; + int highpc_is_relative; + uint64_t ranges; + int have_ranges; + size_t i; + + code = read_uleb128 (unit_buf); + if (code == 0) + return 1; + + abbrev = lookup_abbrev (&u->abbrevs, code, error_callback, data); + if (abbrev == NULL) + return 0; + + lowpc = 0; + have_lowpc = 0; + highpc = 0; + have_highpc = 0; + highpc_is_relative = 0; + ranges = 0; + have_ranges = 0; + for (i = 0; i < abbrev->num_attrs; ++i) + { + struct attr_val val; + + if (!read_attribute (abbrev->attrs[i].form, unit_buf, + u->is_dwarf64, u->version, u->addrsize, + dwarf_str, dwarf_str_size, &val)) + return 0; + + switch (abbrev->attrs[i].name) + { + case DW_AT_low_pc: + if (val.encoding == ATTR_VAL_ADDRESS) + { + lowpc = val.u.uint; + have_lowpc = 1; + } + break; + + case DW_AT_high_pc: + if (val.encoding == ATTR_VAL_ADDRESS) + { + highpc = val.u.uint; + have_highpc = 1; + } + else if (val.encoding == ATTR_VAL_UINT) + { + highpc = val.u.uint; + have_highpc = 1; + highpc_is_relative = 1; + } + break; + + case DW_AT_ranges: + if (val.encoding == ATTR_VAL_UINT + || val.encoding == ATTR_VAL_REF_SECTION) + { + ranges = val.u.uint; + have_ranges = 1; + } + break; + + case DW_AT_stmt_list: + if (abbrev->tag == DW_TAG_compile_unit + && (val.encoding == ATTR_VAL_UINT + || val.encoding == ATTR_VAL_REF_SECTION)) + u->lineoff = val.u.uint; + break; + + case DW_AT_name: + if (abbrev->tag == DW_TAG_compile_unit + && val.encoding == ATTR_VAL_STRING) + u->filename = val.u.string; + break; + + case DW_AT_comp_dir: + if (abbrev->tag == DW_TAG_compile_unit + && val.encoding == ATTR_VAL_STRING) + u->comp_dir = val.u.string; + break; + + default: + break; + } + } + + if (abbrev->tag == DW_TAG_compile_unit + || abbrev->tag == DW_TAG_subprogram) + { + if (have_ranges) + { + if (!add_unit_ranges (state, base_address, u, ranges, lowpc, + is_bigendian, dwarf_ranges, + dwarf_ranges_size, error_callback, + data, addrs)) + return 0; + } + else if (have_lowpc && have_highpc) + { + struct unit_addrs a; + + if (highpc_is_relative) + highpc += lowpc; + a.low = lowpc; + a.high = highpc; + a.u = u; + + if (!add_unit_addr (state, base_address, a, error_callback, data, + addrs)) + return 0; + } + + /* If we found the PC range in the DW_TAG_compile_unit, we + can stop now. */ + if (abbrev->tag == DW_TAG_compile_unit + && (have_ranges || (have_lowpc && have_highpc))) + return 1; + } + + if (abbrev->has_children) + { + if (!find_address_ranges (state, base_address, unit_buf, + dwarf_str, dwarf_str_size, + dwarf_ranges, dwarf_ranges_size, + is_bigendian, error_callback, data, + u, addrs)) + return 0; + } + } + + return 1; +} + +/* Build a mapping from address ranges to the compilation units where + the line number information for that range can be found. Returns 1 + on success, 0 on failure. */ + +static int +build_address_map (struct backtrace_state *state, uintptr_t base_address, + const unsigned char *dwarf_info, size_t dwarf_info_size, + const unsigned char *dwarf_abbrev, size_t dwarf_abbrev_size, + const unsigned char *dwarf_ranges, size_t dwarf_ranges_size, + const unsigned char *dwarf_str, size_t dwarf_str_size, + int is_bigendian, backtrace_error_callback error_callback, + void *data, struct unit_addrs_vector *addrs) +{ + struct dwarf_buf info; + struct abbrevs abbrevs; + + memset (&addrs->vec, 0, sizeof addrs->vec); + addrs->count = 0; + + /* Read through the .debug_info section. FIXME: Should we use the + .debug_aranges section? gdb and addr2line don't use it, but I'm + not sure why. */ + + info.name = ".debug_info"; + info.start = dwarf_info; + info.buf = dwarf_info; + info.left = dwarf_info_size; + info.is_bigendian = is_bigendian; + info.error_callback = error_callback; + info.data = data; + info.reported_underflow = 0; + + memset (&abbrevs, 0, sizeof abbrevs); + while (info.left > 0) + { + const unsigned char *unit_data_start; + uint64_t len; + int is_dwarf64; + struct dwarf_buf unit_buf; + int version; + uint64_t abbrev_offset; + int addrsize; + struct unit *u; + + if (info.reported_underflow) + goto fail; + + unit_data_start = info.buf; + + is_dwarf64 = 0; + len = read_uint32 (&info); + if (len == 0xffffffff) + { + len = read_uint64 (&info); + is_dwarf64 = 1; + } + + unit_buf = info; + unit_buf.left = len; + + if (!advance (&info, len)) + goto fail; + + version = read_uint16 (&unit_buf); + if (version < 2 || version > 4) + { + dwarf_buf_error (&unit_buf, "unrecognized DWARF version"); + goto fail; + } + + abbrev_offset = read_offset (&unit_buf, is_dwarf64); + if (!read_abbrevs (state, abbrev_offset, dwarf_abbrev, dwarf_abbrev_size, + is_bigendian, error_callback, data, &abbrevs)) + goto fail; + + addrsize = read_byte (&unit_buf); + + u = ((struct unit *) + backtrace_alloc (state, sizeof *u, error_callback, data)); + if (u == NULL) + goto fail; + u->unit_data = unit_buf.buf; + u->unit_data_len = unit_buf.left; + u->unit_data_offset = unit_buf.buf - unit_data_start; + u->version = version; + u->is_dwarf64 = is_dwarf64; + u->addrsize = addrsize; + u->filename = NULL; + u->comp_dir = NULL; + u->abs_filename = NULL; + u->lineoff = 0; + u->abbrevs = abbrevs; + memset (&abbrevs, 0, sizeof abbrevs); + + /* The actual line number mappings will be read as needed. */ + u->lines = NULL; + u->lines_count = 0; + u->function_addrs = NULL; + u->function_addrs_count = 0; + + if (!find_address_ranges (state, base_address, &unit_buf, + dwarf_str, dwarf_str_size, + dwarf_ranges, dwarf_ranges_size, + is_bigendian, error_callback, data, + u, addrs)) + { + free_abbrevs (state, &u->abbrevs, error_callback, data); + backtrace_free (state, u, sizeof *u, error_callback, data); + goto fail; + } + + if (unit_buf.reported_underflow) + { + free_abbrevs (state, &u->abbrevs, error_callback, data); + backtrace_free (state, u, sizeof *u, error_callback, data); + goto fail; + } + } + if (info.reported_underflow) + goto fail; + + return 1; + + fail: + free_abbrevs (state, &abbrevs, error_callback, data); + free_unit_addrs_vector (state, addrs, error_callback, data); + return 0; +} + +/* Add a new mapping to the vector of line mappings that we are + building. Returns 1 on success, 0 on failure. */ + +static int +add_line (struct backtrace_state *state, struct dwarf_data *ddata, + uintptr_t pc, const char *filename, int lineno, + backtrace_error_callback error_callback, void *data, + struct line_vector *vec) +{ + struct line *ln; + + /* If we are adding the same mapping, ignore it. This can happen + when using discriminators. */ + if (vec->count > 0) + { + ln = (struct line *) vec->vec.base + (vec->count - 1); + if (pc == ln->pc && filename == ln->filename && lineno == ln->lineno) + return 1; + } + + ln = ((struct line *) + backtrace_vector_grow (state, sizeof (struct line), error_callback, + data, &vec->vec)); + if (ln == NULL) + return 0; + + /* Add in the base address here, so that we can look up the PC + directly. */ + ln->pc = pc + ddata->base_address; + + ln->filename = filename; + ln->lineno = lineno; + ln->idx = vec->count; + + ++vec->count; + + return 1; +} + +/* Free the line header information. */ + +static void +free_line_header (struct backtrace_state *state, struct line_header *hdr, + backtrace_error_callback error_callback, void *data) +{ + if (hdr->dirs_count != 0) + backtrace_free (state, hdr->dirs, hdr->dirs_count * sizeof (const char *), + error_callback, data); + backtrace_free (state, hdr->filenames, + hdr->filenames_count * sizeof (char *), + error_callback, data); +} + +/* Read the line header. Return 1 on success, 0 on failure. */ + +static int +read_line_header (struct backtrace_state *state, struct unit *u, + int is_dwarf64, struct dwarf_buf *line_buf, + struct line_header *hdr) +{ + uint64_t hdrlen; + struct dwarf_buf hdr_buf; + const unsigned char *p; + const unsigned char *pend; + size_t i; + + hdr->version = read_uint16 (line_buf); + if (hdr->version < 2 || hdr->version > 4) + { + dwarf_buf_error (line_buf, "unsupported line number version"); + return 0; + } + + hdrlen = read_offset (line_buf, is_dwarf64); + + hdr_buf = *line_buf; + hdr_buf.left = hdrlen; + + if (!advance (line_buf, hdrlen)) + return 0; + + hdr->min_insn_len = read_byte (&hdr_buf); + if (hdr->version < 4) + hdr->max_ops_per_insn = 1; + else + hdr->max_ops_per_insn = read_byte (&hdr_buf); + + /* We don't care about default_is_stmt. */ + read_byte (&hdr_buf); + + hdr->line_base = read_sbyte (&hdr_buf); + hdr->line_range = read_byte (&hdr_buf); + + hdr->opcode_base = read_byte (&hdr_buf); + hdr->opcode_lengths = hdr_buf.buf; + if (!advance (&hdr_buf, hdr->opcode_base - 1)) + return 0; + + /* Count the number of directory entries. */ + hdr->dirs_count = 0; + p = hdr_buf.buf; + pend = p + hdr_buf.left; + while (p < pend && *p != '\0') + { + p += strnlen((const char *) p, pend - p) + 1; + ++hdr->dirs_count; + } + + hdr->dirs = NULL; + if (hdr->dirs_count != 0) + { + hdr->dirs = ((const char **) + backtrace_alloc (state, + hdr->dirs_count * sizeof (const char *), + line_buf->error_callback, line_buf->data)); + if (hdr->dirs == NULL) + return 0; + } + + i = 0; + while (*hdr_buf.buf != '\0') + { + if (hdr_buf.reported_underflow) + return 0; + + hdr->dirs[i] = (const char *) hdr_buf.buf; + ++i; + if (!advance (&hdr_buf, + strnlen ((const char *) hdr_buf.buf, hdr_buf.left) + 1)) + return 0; + } + if (!advance (&hdr_buf, 1)) + return 0; + + /* Count the number of file entries. */ + hdr->filenames_count = 0; + p = hdr_buf.buf; + pend = p + hdr_buf.left; + while (p < pend && *p != '\0') + { + p += strnlen ((const char *) p, pend - p) + 1; + p += leb128_len (p); + p += leb128_len (p); + p += leb128_len (p); + ++hdr->filenames_count; + } + + hdr->filenames = ((const char **) + backtrace_alloc (state, + hdr->filenames_count * sizeof (char *), + line_buf->error_callback, + line_buf->data)); + if (hdr->filenames == NULL) + return 0; + i = 0; + while (*hdr_buf.buf != '\0') + { + const char *filename; + uint64_t dir_index; + + if (hdr_buf.reported_underflow) + return 0; + + filename = (const char *) hdr_buf.buf; + if (!advance (&hdr_buf, + strnlen ((const char *) hdr_buf.buf, hdr_buf.left) + 1)) + return 0; + dir_index = read_uleb128 (&hdr_buf); + if (IS_ABSOLUTE_PATH (filename) + || (dir_index == 0 && u->comp_dir == NULL)) + hdr->filenames[i] = filename; + else + { + const char *dir; + size_t dir_len; + size_t filename_len; + char *s; + + if (dir_index == 0) + dir = u->comp_dir; + else if (dir_index - 1 < hdr->dirs_count) + dir = hdr->dirs[dir_index - 1]; + else + { + dwarf_buf_error (line_buf, + ("invalid directory index in " + "line number program header")); + return 0; + } + dir_len = strlen (dir); + filename_len = strlen (filename); + s = ((char *) + backtrace_alloc (state, dir_len + filename_len + 2, + line_buf->error_callback, line_buf->data)); + if (s == NULL) + return 0; + memcpy (s, dir, dir_len); + /* FIXME: If we are on a DOS-based file system, and the + directory or the file name use backslashes, then we + should use a backslash here. */ + s[dir_len] = '/'; + memcpy (s + dir_len + 1, filename, filename_len + 1); + hdr->filenames[i] = s; + } + + /* Ignore the modification time and size. */ + read_uleb128 (&hdr_buf); + read_uleb128 (&hdr_buf); + + ++i; + } + + if (hdr_buf.reported_underflow) + return 0; + + return 1; +} + +/* Read the line program, adding line mappings to VEC. Return 1 on + success, 0 on failure. */ + +static int +read_line_program (struct backtrace_state *state, struct dwarf_data *ddata, + struct unit *u, const struct line_header *hdr, + struct dwarf_buf *line_buf, struct line_vector *vec) +{ + uint64_t address; + unsigned int op_index; + const char *reset_filename; + const char *filename; + int lineno; + + address = 0; + op_index = 0; + if (hdr->filenames_count > 0) + reset_filename = hdr->filenames[0]; + else + reset_filename = ""; + filename = reset_filename; + lineno = 1; + while (line_buf->left > 0) + { + unsigned int op; + + op = read_byte (line_buf); + if (op >= hdr->opcode_base) + { + unsigned int advance; + + /* Special opcode. */ + op -= hdr->opcode_base; + advance = op / hdr->line_range; + address += (hdr->min_insn_len * (op_index + advance) + / hdr->max_ops_per_insn); + op_index = (op_index + advance) % hdr->max_ops_per_insn; + lineno += hdr->line_base + (int) (op % hdr->line_range); + add_line (state, ddata, address, filename, lineno, + line_buf->error_callback, line_buf->data, vec); + } + else if (op == DW_LNS_extended_op) + { + uint64_t len; + + len = read_uleb128 (line_buf); + op = read_byte (line_buf); + switch (op) + { + case DW_LNE_end_sequence: + /* FIXME: Should we mark the high PC here? It seems + that we already have that information from the + compilation unit. */ + address = 0; + op_index = 0; + filename = reset_filename; + lineno = 1; + break; + case DW_LNE_set_address: + address = read_address (line_buf, u->addrsize); + break; + case DW_LNE_define_file: + { + const char *f; + unsigned int dir_index; + + f = (const char *) line_buf->buf; + if (!advance (line_buf, strnlen (f, line_buf->left) + 1)) + return 0; + dir_index = read_uleb128 (line_buf); + /* Ignore that time and length. */ + read_uleb128 (line_buf); + read_uleb128 (line_buf); + if (IS_ABSOLUTE_PATH (f)) + filename = f; + else + { + const char *dir; + size_t dir_len; + size_t f_len; + char *p; + + if (dir_index == 0) + dir = u->comp_dir; + else if (dir_index - 1 < hdr->dirs_count) + dir = hdr->dirs[dir_index - 1]; + else + { + dwarf_buf_error (line_buf, + ("invalid directory index " + "in line number program")); + return 0; + } + dir_len = strlen (dir); + f_len = strlen (f); + p = ((char *) + backtrace_alloc (state, dir_len + f_len + 2, + line_buf->error_callback, + line_buf->data)); + if (p == NULL) + return 0; + memcpy (p, dir, dir_len); + /* FIXME: If we are on a DOS-based file system, + and the directory or the file name use + backslashes, then we should use a backslash + here. */ + p[dir_len] = '/'; + memcpy (p + dir_len + 1, f, f_len + 1); + filename = p; + } + } + break; + case DW_LNE_set_discriminator: + /* We don't care about discriminators. */ + read_uleb128 (line_buf); + break; + default: + if (!advance (line_buf, len - 1)) + return 0; + break; + } + } + else + { + switch (op) + { + case DW_LNS_copy: + add_line (state, ddata, address, filename, lineno, + line_buf->error_callback, line_buf->data, vec); + break; + case DW_LNS_advance_pc: + { + uint64_t advance; + + advance = read_uleb128 (line_buf); + address += (hdr->min_insn_len * (op_index + advance) + / hdr->max_ops_per_insn); + op_index = (op_index + advance) % hdr->max_ops_per_insn; + } + break; + case DW_LNS_advance_line: + lineno += (int) read_sleb128 (line_buf); + break; + case DW_LNS_set_file: + { + uint64_t fileno; + + fileno = read_uleb128 (line_buf); + if (fileno == 0) + filename = ""; + else + { + if (fileno - 1 >= hdr->filenames_count) + { + dwarf_buf_error (line_buf, + ("invalid file number in " + "line number program")); + return 0; + } + filename = hdr->filenames[fileno - 1]; + } + } + break; + case DW_LNS_set_column: + read_uleb128 (line_buf); + break; + case DW_LNS_negate_stmt: + break; + case DW_LNS_set_basic_block: + break; + case DW_LNS_const_add_pc: + { + unsigned int advance; + + op = 255 - hdr->opcode_base; + advance = op / hdr->line_range; + address += (hdr->min_insn_len * (op_index + advance) + / hdr->max_ops_per_insn); + op_index = (op_index + advance) % hdr->max_ops_per_insn; + } + break; + case DW_LNS_fixed_advance_pc: + address += read_uint16 (line_buf); + op_index = 0; + break; + case DW_LNS_set_prologue_end: + break; + case DW_LNS_set_epilogue_begin: + break; + case DW_LNS_set_isa: + read_uleb128 (line_buf); + break; + default: + { + unsigned int i; + + for (i = hdr->opcode_lengths[op - 1]; i > 0; --i) + read_uleb128 (line_buf); + } + break; + } + } + } + + return 1; +} + +/* Read the line number information for a compilation unit. Returns 1 + on success, 0 on failure. */ + +static int +read_line_info (struct backtrace_state *state, struct dwarf_data *ddata, + backtrace_error_callback error_callback, void *data, + struct unit *u, struct line_header *hdr, struct line **lines, + size_t *lines_count) +{ + struct line_vector vec; + struct dwarf_buf line_buf; + uint64_t len; + int is_dwarf64; + struct line *ln; + + memset (&vec.vec, 0, sizeof vec.vec); + vec.count = 0; + + memset (hdr, 0, sizeof *hdr); + + if (u->lineoff != (off_t) (size_t) u->lineoff + || (size_t) u->lineoff >= ddata->dwarf_line_size) + { + error_callback (data, "unit line offset out of range", 0); + goto fail; + } + + line_buf.name = ".debug_line"; + line_buf.start = ddata->dwarf_line; + line_buf.buf = ddata->dwarf_line + u->lineoff; + line_buf.left = ddata->dwarf_line_size - u->lineoff; + line_buf.is_bigendian = ddata->is_bigendian; + line_buf.error_callback = error_callback; + line_buf.data = data; + line_buf.reported_underflow = 0; + + is_dwarf64 = 0; + len = read_uint32 (&line_buf); + if (len == 0xffffffff) + { + len = read_uint64 (&line_buf); + is_dwarf64 = 1; + } + line_buf.left = len; + + if (!read_line_header (state, u, is_dwarf64, &line_buf, hdr)) + goto fail; + + if (!read_line_program (state, ddata, u, hdr, &line_buf, &vec)) + goto fail; + + if (line_buf.reported_underflow) + goto fail; + + if (vec.count == 0) + { + /* This is not a failure in the sense of a generating an error, + but it is a failure in that sense that we have no useful + information. */ + goto fail; + } + + /* Allocate one extra entry at the end. */ + ln = ((struct line *) + backtrace_vector_grow (state, sizeof (struct line), error_callback, + data, &vec.vec)); + if (ln == NULL) + goto fail; + ln->pc = (uintptr_t) -1; + ln->filename = NULL; + ln->lineno = 0; + ln->idx = 0; + + if (!backtrace_vector_release (state, &vec.vec, error_callback, data)) + goto fail; + + ln = (struct line *) vec.vec.base; + backtrace_qsort (ln, vec.count, sizeof (struct line), line_compare); + + *lines = ln; + *lines_count = vec.count; + + return 1; + + fail: + vec.vec.alc += vec.vec.size; + vec.vec.size = 0; + backtrace_vector_release (state, &vec.vec, error_callback, data); + free_line_header (state, hdr, error_callback, data); + *lines = (struct line *) (uintptr_t) -1; + *lines_count = 0; + return 0; +} + +/* Read the name of a function from a DIE referenced by a + DW_AT_abstract_origin or DW_AT_specification tag. OFFSET is within + the same compilation unit. */ + +static const char * +read_referenced_name (struct dwarf_data *ddata, struct unit *u, + uint64_t offset, backtrace_error_callback error_callback, + void *data) +{ + struct dwarf_buf unit_buf; + uint64_t code; + const struct abbrev *abbrev; + const char *ret; + size_t i; + + /* OFFSET is from the start of the data for this compilation unit. + U->unit_data is the data, but it starts U->unit_data_offset bytes + from the beginning. */ + + if (offset < u->unit_data_offset + || offset - u->unit_data_offset >= u->unit_data_len) + { + error_callback (data, + "abstract origin or specification out of range", + 0); + return NULL; + } + + offset -= u->unit_data_offset; + + unit_buf.name = ".debug_info"; + unit_buf.start = ddata->dwarf_info; + unit_buf.buf = u->unit_data + offset; + unit_buf.left = u->unit_data_len - offset; + unit_buf.is_bigendian = ddata->is_bigendian; + unit_buf.error_callback = error_callback; + unit_buf.data = data; + unit_buf.reported_underflow = 0; + + code = read_uleb128 (&unit_buf); + if (code == 0) + { + dwarf_buf_error (&unit_buf, "invalid abstract origin or specification"); + return NULL; + } + + abbrev = lookup_abbrev (&u->abbrevs, code, error_callback, data); + if (abbrev == NULL) + return NULL; + + ret = NULL; + for (i = 0; i < abbrev->num_attrs; ++i) + { + struct attr_val val; + + if (!read_attribute (abbrev->attrs[i].form, &unit_buf, + u->is_dwarf64, u->version, u->addrsize, + ddata->dwarf_str, ddata->dwarf_str_size, + &val)) + return NULL; + + switch (abbrev->attrs[i].name) + { + case DW_AT_name: + /* We prefer the linkage name if get one. */ + if (val.encoding == ATTR_VAL_STRING) + ret = val.u.string; + break; + + case DW_AT_linkage_name: + case DW_AT_MIPS_linkage_name: + if (val.encoding == ATTR_VAL_STRING) + return val.u.string; + break; + + case DW_AT_specification: + if (abbrev->attrs[i].form == DW_FORM_ref_addr + || abbrev->attrs[i].form == DW_FORM_ref_sig8) + { + /* This refers to a specification defined in some other + compilation unit. We can handle this case if we + must, but it's harder. */ + break; + } + if (val.encoding == ATTR_VAL_UINT + || val.encoding == ATTR_VAL_REF_UNIT) + { + const char *name; + + name = read_referenced_name (ddata, u, val.u.uint, + error_callback, data); + if (name != NULL) + ret = name; + } + break; + + default: + break; + } + } + + return ret; +} + +/* Add a single range to U that maps to function. Returns 1 on + success, 0 on error. */ + +static int +add_function_range (struct backtrace_state *state, struct dwarf_data *ddata, + struct function *function, uint64_t lowpc, uint64_t highpc, + backtrace_error_callback error_callback, + void *data, struct function_vector *vec) +{ + struct function_addrs *p; + + /* Add in the base address here, so that we can look up the PC + directly. */ + lowpc += ddata->base_address; + highpc += ddata->base_address; + + if (vec->count > 0) + { + p = (struct function_addrs *) vec->vec.base + vec->count - 1; + if ((lowpc == p->high || lowpc == p->high + 1) + && function == p->function) + { + if (highpc > p->high) + p->high = highpc; + return 1; + } + } + + p = ((struct function_addrs *) + backtrace_vector_grow (state, sizeof (struct function_addrs), + error_callback, data, &vec->vec)); + if (p == NULL) + return 0; + + p->low = lowpc; + p->high = highpc; + p->function = function; + ++vec->count; + return 1; +} + +/* Add PC ranges to U that map to FUNCTION. Returns 1 on success, 0 + on error. */ + +static int +add_function_ranges (struct backtrace_state *state, struct dwarf_data *ddata, + struct unit *u, struct function *function, + uint64_t ranges, uint64_t base, + backtrace_error_callback error_callback, void *data, + struct function_vector *vec) +{ + struct dwarf_buf ranges_buf; + + if (ranges >= ddata->dwarf_ranges_size) + { + error_callback (data, "function ranges offset out of range", 0); + return 0; + } + + ranges_buf.name = ".debug_ranges"; + ranges_buf.start = ddata->dwarf_ranges; + ranges_buf.buf = ddata->dwarf_ranges + ranges; + ranges_buf.left = ddata->dwarf_ranges_size - ranges; + ranges_buf.is_bigendian = ddata->is_bigendian; + ranges_buf.error_callback = error_callback; + ranges_buf.data = data; + ranges_buf.reported_underflow = 0; + + while (1) + { + uint64_t low; + uint64_t high; + + if (ranges_buf.reported_underflow) + return 0; + + low = read_address (&ranges_buf, u->addrsize); + high = read_address (&ranges_buf, u->addrsize); + + if (low == 0 && high == 0) + break; + + if (is_highest_address (low, u->addrsize)) + base = high; + else + { + if (!add_function_range (state, ddata, function, low + base, + high + base, error_callback, data, vec)) + return 0; + } + } + + if (ranges_buf.reported_underflow) + return 0; + + return 1; +} + +/* Read one entry plus all its children. Add function addresses to + VEC. Returns 1 on success, 0 on error. */ + +static int +read_function_entry (struct backtrace_state *state, struct dwarf_data *ddata, + struct unit *u, uint64_t base, struct dwarf_buf *unit_buf, + const struct line_header *lhdr, + backtrace_error_callback error_callback, void *data, + struct function_vector *vec_function, + struct function_vector *vec_inlined) +{ + while (unit_buf->left > 0) + { + uint64_t code; + const struct abbrev *abbrev; + int is_function; + struct function *function; + struct function_vector *vec; + size_t i; + uint64_t lowpc; + int have_lowpc; + uint64_t highpc; + int have_highpc; + int highpc_is_relative; + uint64_t ranges; + int have_ranges; + + code = read_uleb128 (unit_buf); + if (code == 0) + return 1; + + abbrev = lookup_abbrev (&u->abbrevs, code, error_callback, data); + if (abbrev == NULL) + return 0; + + is_function = (abbrev->tag == DW_TAG_subprogram + || abbrev->tag == DW_TAG_entry_point + || abbrev->tag == DW_TAG_inlined_subroutine); + + if (abbrev->tag == DW_TAG_inlined_subroutine) + vec = vec_inlined; + else + vec = vec_function; + + function = NULL; + if (is_function) + { + function = ((struct function *) + backtrace_alloc (state, sizeof *function, + error_callback, data)); + if (function == NULL) + return 0; + memset (function, 0, sizeof *function); + } + + lowpc = 0; + have_lowpc = 0; + highpc = 0; + have_highpc = 0; + highpc_is_relative = 0; + ranges = 0; + have_ranges = 0; + for (i = 0; i < abbrev->num_attrs; ++i) + { + struct attr_val val; + + if (!read_attribute (abbrev->attrs[i].form, unit_buf, + u->is_dwarf64, u->version, u->addrsize, + ddata->dwarf_str, ddata->dwarf_str_size, + &val)) + return 0; + + /* The compile unit sets the base address for any address + ranges in the function entries. */ + if (abbrev->tag == DW_TAG_compile_unit + && abbrev->attrs[i].name == DW_AT_low_pc + && val.encoding == ATTR_VAL_ADDRESS) + base = val.u.uint; + + if (is_function) + { + switch (abbrev->attrs[i].name) + { + case DW_AT_call_file: + if (val.encoding == ATTR_VAL_UINT) + { + if (val.u.uint == 0) + function->caller_filename = ""; + else + { + if (val.u.uint - 1 >= lhdr->filenames_count) + { + dwarf_buf_error (unit_buf, + ("invalid file number in " + "DW_AT_call_file attribute")); + return 0; + } + function->caller_filename = + lhdr->filenames[val.u.uint - 1]; + } + } + break; + + case DW_AT_call_line: + if (val.encoding == ATTR_VAL_UINT) + function->caller_lineno = val.u.uint; + break; + + case DW_AT_abstract_origin: + case DW_AT_specification: + if (abbrev->attrs[i].form == DW_FORM_ref_addr + || abbrev->attrs[i].form == DW_FORM_ref_sig8) + { + /* This refers to an abstract origin defined in + some other compilation unit. We can handle + this case if we must, but it's harder. */ + break; + } + if (val.encoding == ATTR_VAL_UINT + || val.encoding == ATTR_VAL_REF_UNIT) + { + const char *name; + + name = read_referenced_name (ddata, u, val.u.uint, + error_callback, data); + if (name != NULL) + function->name = name; + } + break; + + case DW_AT_name: + if (val.encoding == ATTR_VAL_STRING) + { + /* Don't override a name we found in some other + way, as it will normally be more + useful--e.g., this name is normally not + mangled. */ + if (function->name == NULL) + function->name = val.u.string; + } + break; + + case DW_AT_linkage_name: + case DW_AT_MIPS_linkage_name: + if (val.encoding == ATTR_VAL_STRING) + function->name = val.u.string; + break; + + case DW_AT_low_pc: + if (val.encoding == ATTR_VAL_ADDRESS) + { + lowpc = val.u.uint; + have_lowpc = 1; + } + break; + + case DW_AT_high_pc: + if (val.encoding == ATTR_VAL_ADDRESS) + { + highpc = val.u.uint; + have_highpc = 1; + } + else if (val.encoding == ATTR_VAL_UINT) + { + highpc = val.u.uint; + have_highpc = 1; + highpc_is_relative = 1; + } + break; + + case DW_AT_ranges: + if (val.encoding == ATTR_VAL_UINT + || val.encoding == ATTR_VAL_REF_SECTION) + { + ranges = val.u.uint; + have_ranges = 1; + } + break; + + default: + break; + } + } + } + + /* If we couldn't find a name for the function, we have no use + for it. */ + if (is_function && function->name == NULL) + { + backtrace_free (state, function, sizeof *function, + error_callback, data); + is_function = 0; + } + + if (is_function) + { + if (have_ranges) + { + if (!add_function_ranges (state, ddata, u, function, ranges, + base, error_callback, data, vec)) + return 0; + } + else if (have_lowpc && have_highpc) + { + if (highpc_is_relative) + highpc += lowpc; + if (!add_function_range (state, ddata, function, lowpc, highpc, + error_callback, data, vec)) + return 0; + } + else + { + backtrace_free (state, function, sizeof *function, + error_callback, data); + is_function = 0; + } + } + + if (abbrev->has_children) + { + if (!is_function) + { + if (!read_function_entry (state, ddata, u, base, unit_buf, lhdr, + error_callback, data, vec_function, + vec_inlined)) + return 0; + } + else + { + struct function_vector fvec; + + /* Gather any information for inlined functions in + FVEC. */ + + memset (&fvec, 0, sizeof fvec); + + if (!read_function_entry (state, ddata, u, base, unit_buf, lhdr, + error_callback, data, vec_function, + &fvec)) + return 0; + + if (fvec.count > 0) + { + struct function_addrs *faddrs; + + if (!backtrace_vector_release (state, &fvec.vec, + error_callback, data)) + return 0; + + faddrs = (struct function_addrs *) fvec.vec.base; + backtrace_qsort (faddrs, fvec.count, + sizeof (struct function_addrs), + function_addrs_compare); + + function->function_addrs = faddrs; + function->function_addrs_count = fvec.count; + } + } + } + } + + return 1; +} + +/* Read function name information for a compilation unit. We look + through the whole unit looking for function tags. */ + +static void +read_function_info (struct backtrace_state *state, struct dwarf_data *ddata, + const struct line_header *lhdr, + backtrace_error_callback error_callback, void *data, + struct unit *u, struct function_vector *fvec, + struct function_addrs **ret_addrs, + size_t *ret_addrs_count) +{ + struct function_vector lvec; + struct function_vector *pfvec; + struct dwarf_buf unit_buf; + struct function_addrs *addrs; + size_t addrs_count; + + /* Use FVEC if it is not NULL. Otherwise use our own vector. */ + if (fvec != NULL) + pfvec = fvec; + else + { + memset (&lvec, 0, sizeof lvec); + pfvec = &lvec; + } + + unit_buf.name = ".debug_info"; + unit_buf.start = ddata->dwarf_info; + unit_buf.buf = u->unit_data; + unit_buf.left = u->unit_data_len; + unit_buf.is_bigendian = ddata->is_bigendian; + unit_buf.error_callback = error_callback; + unit_buf.data = data; + unit_buf.reported_underflow = 0; + + while (unit_buf.left > 0) + { + if (!read_function_entry (state, ddata, u, 0, &unit_buf, lhdr, + error_callback, data, pfvec, pfvec)) + return; + } + + if (pfvec->count == 0) + return; + + addrs_count = pfvec->count; + + if (fvec == NULL) + { + if (!backtrace_vector_release (state, &lvec.vec, error_callback, data)) + return; + addrs = (struct function_addrs *) pfvec->vec.base; + } + else + { + /* Finish this list of addresses, but leave the remaining space in + the vector available for the next function unit. */ + addrs = ((struct function_addrs *) + backtrace_vector_finish (state, &fvec->vec, + error_callback, data)); + if (addrs == NULL) + return; + fvec->count = 0; + } + + backtrace_qsort (addrs, addrs_count, sizeof (struct function_addrs), + function_addrs_compare); + + *ret_addrs = addrs; + *ret_addrs_count = addrs_count; +} + +/* See if PC is inlined in FUNCTION. If it is, print out the inlined + information, and update FILENAME and LINENO for the caller. + Returns whatever CALLBACK returns, or 0 to keep going. */ + +static int +report_inlined_functions (uintptr_t pc, struct function *function, + backtrace_full_callback callback, void *data, + const char **filename, int *lineno) +{ + struct function_addrs *function_addrs; + struct function *inlined; + int ret; + + if (function->function_addrs_count == 0) + return 0; + + function_addrs = ((struct function_addrs *) + bsearch (&pc, function->function_addrs, + function->function_addrs_count, + sizeof (struct function_addrs), + function_addrs_search)); + if (function_addrs == NULL) + return 0; + + while (((size_t) (function_addrs - function->function_addrs) + 1 + < function->function_addrs_count) + && pc >= (function_addrs + 1)->low + && pc < (function_addrs + 1)->high) + ++function_addrs; + + /* We found an inlined call. */ + + inlined = function_addrs->function; + + /* Report any calls inlined into this one. */ + ret = report_inlined_functions (pc, inlined, callback, data, + filename, lineno); + if (ret != 0) + return ret; + + /* Report this inlined call. */ + ret = callback (data, pc, *filename, *lineno, inlined->name); + if (ret != 0) + return ret; + + /* Our caller will report the caller of the inlined function; tell + it the appropriate filename and line number. */ + *filename = inlined->caller_filename; + *lineno = inlined->caller_lineno; + + return 0; +} + +/* Look for a PC in the DWARF mapping for one module. On success, + call CALLBACK and return whatever it returns. On error, call + ERROR_CALLBACK and return 0. Sets *FOUND to 1 if the PC is found, + 0 if not. */ + +static int +dwarf_lookup_pc (struct backtrace_state *state, struct dwarf_data *ddata, + uintptr_t pc, backtrace_full_callback callback, + backtrace_error_callback error_callback, void *data, + int *found) +{ + struct unit_addrs *entry; + struct unit *u; + int new_data; + struct line *lines; + struct line *ln; + struct function_addrs *function_addrs; + struct function *function; + const char *filename; + int lineno; + int ret; + + *found = 1; + + /* Find an address range that includes PC. */ + entry = bsearch (&pc, ddata->addrs, ddata->addrs_count, + sizeof (struct unit_addrs), unit_addrs_search); + + if (entry == NULL) + { + *found = 0; + return 0; + } + + /* If there are multiple ranges that contain PC, use the last one, + in order to produce predictable results. If we assume that all + ranges are properly nested, then the last range will be the + smallest one. */ + while ((size_t) (entry - ddata->addrs) + 1 < ddata->addrs_count + && pc >= (entry + 1)->low + && pc < (entry + 1)->high) + ++entry; + + /* We need the lines, lines_count, function_addrs, + function_addrs_count fields of u. If they are not set, we need + to set them. When running in threaded mode, we need to allow for + the possibility that some other thread is setting them + simultaneously. */ + + u = entry->u; + lines = u->lines; + + /* Skip units with no useful line number information by walking + backward. Useless line number information is marked by setting + lines == -1. */ + while (entry > ddata->addrs + && pc >= (entry - 1)->low + && pc < (entry - 1)->high) + { + if (state->threaded) + lines = (struct line *) backtrace_atomic_load_pointer (&u->lines); + + if (lines != (struct line *) (uintptr_t) -1) + break; + + --entry; + + u = entry->u; + lines = u->lines; + } + + if (state->threaded) + lines = backtrace_atomic_load_pointer (&u->lines); + + new_data = 0; + if (lines == NULL) + { + size_t function_addrs_count; + struct line_header lhdr; + size_t count; + + /* We have never read the line information for this unit. Read + it now. */ + + function_addrs = NULL; + function_addrs_count = 0; + if (read_line_info (state, ddata, error_callback, data, entry->u, &lhdr, + &lines, &count)) + { + struct function_vector *pfvec; + + /* If not threaded, reuse DDATA->FVEC for better memory + consumption. */ + if (state->threaded) + pfvec = NULL; + else + pfvec = &ddata->fvec; + read_function_info (state, ddata, &lhdr, error_callback, data, + entry->u, pfvec, &function_addrs, + &function_addrs_count); + free_line_header (state, &lhdr, error_callback, data); + new_data = 1; + } + + /* Atomically store the information we just read into the unit. + If another thread is simultaneously writing, it presumably + read the same information, and we don't care which one we + wind up with; we just leak the other one. We do have to + write the lines field last, so that the acquire-loads above + ensure that the other fields are set. */ + + if (!state->threaded) + { + u->lines_count = count; + u->function_addrs = function_addrs; + u->function_addrs_count = function_addrs_count; + u->lines = lines; + } + else + { + backtrace_atomic_store_size_t (&u->lines_count, count); + backtrace_atomic_store_pointer (&u->function_addrs, function_addrs); + backtrace_atomic_store_size_t (&u->function_addrs_count, + function_addrs_count); + backtrace_atomic_store_pointer (&u->lines, lines); + } + } + + /* Now all fields of U have been initialized. */ + + if (lines == (struct line *) (uintptr_t) -1) + { + /* If reading the line number information failed in some way, + try again to see if there is a better compilation unit for + this PC. */ + if (new_data) + return dwarf_lookup_pc (state, ddata, pc, callback, error_callback, + data, found); + return callback (data, pc, NULL, 0, NULL); + } + + /* Search for PC within this unit. */ + + ln = (struct line *) bsearch (&pc, lines, entry->u->lines_count, + sizeof (struct line), line_search); + if (ln == NULL) + { + /* The PC is between the low_pc and high_pc attributes of the + compilation unit, but no entry in the line table covers it. + This implies that the start of the compilation unit has no + line number information. */ + + if (entry->u->abs_filename == NULL) + { + const char *filename; + + filename = entry->u->filename; + if (filename != NULL + && !IS_ABSOLUTE_PATH (filename) + && entry->u->comp_dir != NULL) + { + size_t filename_len; + const char *dir; + size_t dir_len; + char *s; + + filename_len = strlen (filename); + dir = entry->u->comp_dir; + dir_len = strlen (dir); + s = (char *) backtrace_alloc (state, dir_len + filename_len + 2, + error_callback, data); + if (s == NULL) + { + *found = 0; + return 0; + } + memcpy (s, dir, dir_len); + /* FIXME: Should use backslash if DOS file system. */ + s[dir_len] = '/'; + memcpy (s + dir_len + 1, filename, filename_len + 1); + filename = s; + } + entry->u->abs_filename = filename; + } + + return callback (data, pc, entry->u->abs_filename, 0, NULL); + } + + /* Search for function name within this unit. */ + + if (entry->u->function_addrs_count == 0) + return callback (data, pc, ln->filename, ln->lineno, NULL); + + function_addrs = ((struct function_addrs *) + bsearch (&pc, entry->u->function_addrs, + entry->u->function_addrs_count, + sizeof (struct function_addrs), + function_addrs_search)); + if (function_addrs == NULL) + return callback (data, pc, ln->filename, ln->lineno, NULL); + + /* If there are multiple function ranges that contain PC, use the + last one, in order to produce predictable results. */ + + while (((size_t) (function_addrs - entry->u->function_addrs + 1) + < entry->u->function_addrs_count) + && pc >= (function_addrs + 1)->low + && pc < (function_addrs + 1)->high) + ++function_addrs; + + function = function_addrs->function; + + filename = ln->filename; + lineno = ln->lineno; + + ret = report_inlined_functions (pc, function, callback, data, + &filename, &lineno); + if (ret != 0) + return ret; + + return callback (data, pc, filename, lineno, function->name); +} + + +/* Return the file/line information for a PC using the DWARF mapping + we built earlier. */ + +static int +dwarf_fileline (struct backtrace_state *state, uintptr_t pc, + backtrace_full_callback callback, + backtrace_error_callback error_callback, void *data) +{ + struct dwarf_data *ddata; + int found; + int ret; + + if (!state->threaded) + { + for (ddata = (struct dwarf_data *) state->fileline_data; + ddata != NULL; + ddata = ddata->next) + { + ret = dwarf_lookup_pc (state, ddata, pc, callback, error_callback, + data, &found); + if (ret != 0 || found) + return ret; + } + } + else + { + struct dwarf_data **pp; + + pp = (struct dwarf_data **) (void *) &state->fileline_data; + while (1) + { + ddata = backtrace_atomic_load_pointer (pp); + if (ddata == NULL) + break; + + ret = dwarf_lookup_pc (state, ddata, pc, callback, error_callback, + data, &found); + if (ret != 0 || found) + return ret; + + pp = &ddata->next; + } + } + + /* FIXME: See if any libraries have been dlopen'ed. */ + + return callback (data, pc, NULL, 0, NULL); +} + +/* Initialize our data structures from the DWARF debug info for a + file. Return NULL on failure. */ + +static struct dwarf_data * +build_dwarf_data (struct backtrace_state *state, + uintptr_t base_address, + const unsigned char *dwarf_info, + size_t dwarf_info_size, + const unsigned char *dwarf_line, + size_t dwarf_line_size, + const unsigned char *dwarf_abbrev, + size_t dwarf_abbrev_size, + const unsigned char *dwarf_ranges, + size_t dwarf_ranges_size, + const unsigned char *dwarf_str, + size_t dwarf_str_size, + int is_bigendian, + backtrace_error_callback error_callback, + void *data) +{ + struct unit_addrs_vector addrs_vec; + struct unit_addrs *addrs; + size_t addrs_count; + struct dwarf_data *fdata; + + if (!build_address_map (state, base_address, dwarf_info, dwarf_info_size, + dwarf_abbrev, dwarf_abbrev_size, dwarf_ranges, + dwarf_ranges_size, dwarf_str, dwarf_str_size, + is_bigendian, error_callback, data, &addrs_vec)) + return NULL; + + if (!backtrace_vector_release (state, &addrs_vec.vec, error_callback, data)) + return NULL; + addrs = (struct unit_addrs *) addrs_vec.vec.base; + addrs_count = addrs_vec.count; + backtrace_qsort (addrs, addrs_count, sizeof (struct unit_addrs), + unit_addrs_compare); + + fdata = ((struct dwarf_data *) + backtrace_alloc (state, sizeof (struct dwarf_data), + error_callback, data)); + if (fdata == NULL) + return NULL; + + fdata->next = NULL; + fdata->base_address = base_address; + fdata->addrs = addrs; + fdata->addrs_count = addrs_count; + fdata->dwarf_info = dwarf_info; + fdata->dwarf_info_size = dwarf_info_size; + fdata->dwarf_line = dwarf_line; + fdata->dwarf_line_size = dwarf_line_size; + fdata->dwarf_ranges = dwarf_ranges; + fdata->dwarf_ranges_size = dwarf_ranges_size; + fdata->dwarf_str = dwarf_str; + fdata->dwarf_str_size = dwarf_str_size; + fdata->is_bigendian = is_bigendian; + memset (&fdata->fvec, 0, sizeof fdata->fvec); + + return fdata; +} + +/* Build our data structures from the DWARF sections for a module. + Set FILELINE_FN and STATE->FILELINE_DATA. Return 1 on success, 0 + on failure. */ + +int +backtrace_dwarf_add (struct backtrace_state *state, + uintptr_t base_address, + const unsigned char *dwarf_info, + size_t dwarf_info_size, + const unsigned char *dwarf_line, + size_t dwarf_line_size, + const unsigned char *dwarf_abbrev, + size_t dwarf_abbrev_size, + const unsigned char *dwarf_ranges, + size_t dwarf_ranges_size, + const unsigned char *dwarf_str, + size_t dwarf_str_size, + int is_bigendian, + backtrace_error_callback error_callback, + void *data, fileline *fileline_fn) +{ + struct dwarf_data *fdata; + + fdata = build_dwarf_data (state, base_address, dwarf_info, dwarf_info_size, + dwarf_line, dwarf_line_size, dwarf_abbrev, + dwarf_abbrev_size, dwarf_ranges, dwarf_ranges_size, + dwarf_str, dwarf_str_size, is_bigendian, + error_callback, data); + if (fdata == NULL) + return 0; + + if (!state->threaded) + { + struct dwarf_data **pp; + + for (pp = (struct dwarf_data **) (void *) &state->fileline_data; + *pp != NULL; + pp = &(*pp)->next) + ; + *pp = fdata; + } + else + { + while (1) + { + struct dwarf_data **pp; + + pp = (struct dwarf_data **) (void *) &state->fileline_data; + + while (1) + { + struct dwarf_data *p; + + p = backtrace_atomic_load_pointer (pp); + + if (p == NULL) + break; + + pp = &p->next; + } + + if (__sync_bool_compare_and_swap (pp, NULL, fdata)) + break; + } + } + + *fileline_fn = dwarf_fileline; + + return 1; +} diff --git a/backtrace-sys/src/libbacktrace/edtest.c b/backtrace-sys/src/libbacktrace/edtest.c new file mode 100644 index 000000000..3a2cac411 --- /dev/null +++ b/backtrace-sys/src/libbacktrace/edtest.c @@ -0,0 +1,121 @@ +/* edtest.c -- Test for libbacktrace storage allocation stress handling + Copyright (C) 2017-2018 Free Software Foundation, Inc. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + (1) Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + (2) Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + + (3) The name of the author may not be used to + endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, +INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, +STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING +IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. */ + +#include "config.h" + +#include +#include +#include +#include +#include + +#include "backtrace.h" +#include "backtrace-supported.h" +#include "internal.h" + +#include "testlib.h" + +static int test1 (void) __attribute__ ((noinline, unused)); +static int test1 (void) __attribute__ ((noinline, unused)); +extern int f2 (int); +extern int f3 (int, int); + +static int +test1 (void) +{ + /* Returning a value here and elsewhere avoids a tailcall which + would mess up the backtrace. */ + return f2 (__LINE__) + 1; +} + +int +f3 (int f1line, int f2line) +{ + struct info all[20]; + struct bdata data; + int f3line; + int i; + + data.all = &all[0]; + data.index = 0; + data.max = 20; + data.failed = 0; + + f3line = __LINE__ + 1; + i = backtrace_full (state, 0, callback_one, error_callback_one, &data); + + if (i != 0) + { + fprintf (stderr, "test1: unexpected return value %d\n", i); + data.failed = 1; + } + + if (data.index < 3) + { + fprintf (stderr, + "test1: not enough frames; got %zu, expected at least 3\n", + data.index); + data.failed = 1; + } + + check ("test1", 0, all, f3line, "f3", "edtest.c", &data.failed); + check ("test1", 1, all, f2line, "f2", "edtest2_build.c", &data.failed); + check ("test1", 2, all, f1line, "test1", "edtest.c", &data.failed); + + printf ("%s: backtrace_full alloc stress\n", data.failed ? "FAIL" : "PASS"); + + if (data.failed) + ++failures; + + return failures; +} + +int +main (int argc ATTRIBUTE_UNUSED, char **argv ATTRIBUTE_UNUSED) +{ + state = backtrace_create_state (argv[0], BACKTRACE_SUPPORTS_THREADS, + error_callback_create, NULL); + + // Grab the storage allocation lock prior to doing anything interesting. + // The intent here is to insure that the backtrace_alloc code is forced + // to always call mmap() for new memory as opposed to reusing previously + // allocated memory from the free list. Doing things this way helps + // simulate what you might see in a multithreaded program in which there + // are racing calls to the allocator. + struct backtrace_state *state_internal = + (struct backtrace_state *) state; + state_internal->lock_alloc = 1; + + // Kick off the test + test1(); + + exit (failures > 0 ? EXIT_FAILURE : EXIT_SUCCESS); +} diff --git a/backtrace-sys/src/libbacktrace/edtest2.c b/backtrace-sys/src/libbacktrace/edtest2.c new file mode 100644 index 000000000..e0c0470be --- /dev/null +++ b/backtrace-sys/src/libbacktrace/edtest2.c @@ -0,0 +1,43 @@ +/* edtest2.c -- Test for libbacktrace storage allocation stress handling (p2) + Copyright (C) 2017-2018 Free Software Foundation, Inc. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + (1) Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + (2) Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + + (3) The name of the author may not be used to + endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, +INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, +STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING +IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. */ + +/* This file intentionally written without any #include's + */ + +extern int f3(int, int); +extern int f2(int); + +int f2(int x) +{ + /* Returning a value here and elsewhere avoids a tailcall which + would mess up the backtrace. */ + return f3(x, __LINE__) + 3; +} diff --git a/backtrace-sys/src/libbacktrace/elf.c b/backtrace-sys/src/libbacktrace/elf.c new file mode 100644 index 000000000..0fd5e6f9d --- /dev/null +++ b/backtrace-sys/src/libbacktrace/elf.c @@ -0,0 +1,3340 @@ +/* elf.c -- Get debug data from an ELF file for backtraces. + Copyright (C) 2012-2018 Free Software Foundation, Inc. + Written by Ian Lance Taylor, Google. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + (1) Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + (2) Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + + (3) The name of the author may not be used to + endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, +INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, +STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING +IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. */ + +#include "config.h" + +#include +#include +#include +#include +#include +#include + +#ifdef HAVE_DL_ITERATE_PHDR +#include +#endif + +#include "backtrace.h" +#include "internal.h" + +#ifndef S_ISLNK + #ifndef S_IFLNK + #define S_IFLNK 0120000 + #endif + #ifndef S_IFMT + #define S_IFMT 0170000 + #endif + #define S_ISLNK(m) (((m) & S_IFMT) == S_IFLNK) +#endif + +#ifndef __GNUC__ +#define __builtin_prefetch(p, r, l) +#define unlikely(x) (x) +#else +#define unlikely(x) __builtin_expect(!!(x), 0) +#endif + +#if !defined(HAVE_DECL_STRNLEN) || !HAVE_DECL_STRNLEN + +/* If strnlen is not declared, provide our own version. */ + +static size_t +xstrnlen (const char *s, size_t maxlen) +{ + size_t i; + + for (i = 0; i < maxlen; ++i) + if (s[i] == '\0') + break; + return i; +} + +#define strnlen xstrnlen + +#endif + +#ifndef HAVE_LSTAT + +/* Dummy version of lstat for systems that don't have it. */ + +static int +xlstat (const char *path ATTRIBUTE_UNUSED, struct stat *st ATTRIBUTE_UNUSED) +{ + return -1; +} + +#define lstat xlstat + +#endif + +#ifndef HAVE_READLINK + +/* Dummy version of readlink for systems that don't have it. */ + +static ssize_t +xreadlink (const char *path ATTRIBUTE_UNUSED, char *buf ATTRIBUTE_UNUSED, + size_t bufsz ATTRIBUTE_UNUSED) +{ + return -1; +} + +#define readlink xreadlink + +#endif + +#ifndef HAVE_DL_ITERATE_PHDR + +/* Dummy version of dl_iterate_phdr for systems that don't have it. */ + +#define dl_phdr_info x_dl_phdr_info +#define dl_iterate_phdr x_dl_iterate_phdr + +struct dl_phdr_info +{ + uintptr_t dlpi_addr; + const char *dlpi_name; +}; + +static int +dl_iterate_phdr (int (*callback) (struct dl_phdr_info *, + size_t, void *) ATTRIBUTE_UNUSED, + void *data ATTRIBUTE_UNUSED) +{ + return 0; +} + +#endif /* ! defined (HAVE_DL_ITERATE_PHDR) */ + +/* The configure script must tell us whether we are 32-bit or 64-bit + ELF. We could make this code test and support either possibility, + but there is no point. This code only works for the currently + running executable, which means that we know the ELF mode at + configure time. */ + +#if BACKTRACE_ELF_SIZE != 32 && BACKTRACE_ELF_SIZE != 64 +#error "Unknown BACKTRACE_ELF_SIZE" +#endif + +/* might #include which might define our constants + with slightly different values. Undefine them to be safe. */ + +#undef EI_NIDENT +#undef EI_MAG0 +#undef EI_MAG1 +#undef EI_MAG2 +#undef EI_MAG3 +#undef EI_CLASS +#undef EI_DATA +#undef EI_VERSION +#undef ELF_MAG0 +#undef ELF_MAG1 +#undef ELF_MAG2 +#undef ELF_MAG3 +#undef ELFCLASS32 +#undef ELFCLASS64 +#undef ELFDATA2LSB +#undef ELFDATA2MSB +#undef EV_CURRENT +#undef ET_DYN +#undef EM_PPC64 +#undef EF_PPC64_ABI +#undef SHN_LORESERVE +#undef SHN_XINDEX +#undef SHN_UNDEF +#undef SHT_PROGBITS +#undef SHT_SYMTAB +#undef SHT_STRTAB +#undef SHT_DYNSYM +#undef SHF_COMPRESSED +#undef STT_OBJECT +#undef STT_FUNC +#undef NT_GNU_BUILD_ID +#undef ELFCOMPRESS_ZLIB + +/* Basic types. */ + +typedef uint16_t b_elf_half; /* Elf_Half. */ +typedef uint32_t b_elf_word; /* Elf_Word. */ +typedef int32_t b_elf_sword; /* Elf_Sword. */ + +#if BACKTRACE_ELF_SIZE == 32 + +typedef uint32_t b_elf_addr; /* Elf_Addr. */ +typedef uint32_t b_elf_off; /* Elf_Off. */ + +typedef uint32_t b_elf_wxword; /* 32-bit Elf_Word, 64-bit ELF_Xword. */ + +#else + +typedef uint64_t b_elf_addr; /* Elf_Addr. */ +typedef uint64_t b_elf_off; /* Elf_Off. */ +typedef uint64_t b_elf_xword; /* Elf_Xword. */ +typedef int64_t b_elf_sxword; /* Elf_Sxword. */ + +typedef uint64_t b_elf_wxword; /* 32-bit Elf_Word, 64-bit ELF_Xword. */ + +#endif + +/* Data structures and associated constants. */ + +#define EI_NIDENT 16 + +typedef struct { + unsigned char e_ident[EI_NIDENT]; /* ELF "magic number" */ + b_elf_half e_type; /* Identifies object file type */ + b_elf_half e_machine; /* Specifies required architecture */ + b_elf_word e_version; /* Identifies object file version */ + b_elf_addr e_entry; /* Entry point virtual address */ + b_elf_off e_phoff; /* Program header table file offset */ + b_elf_off e_shoff; /* Section header table file offset */ + b_elf_word e_flags; /* Processor-specific flags */ + b_elf_half e_ehsize; /* ELF header size in bytes */ + b_elf_half e_phentsize; /* Program header table entry size */ + b_elf_half e_phnum; /* Program header table entry count */ + b_elf_half e_shentsize; /* Section header table entry size */ + b_elf_half e_shnum; /* Section header table entry count */ + b_elf_half e_shstrndx; /* Section header string table index */ +} b_elf_ehdr; /* Elf_Ehdr. */ + +#define EI_MAG0 0 +#define EI_MAG1 1 +#define EI_MAG2 2 +#define EI_MAG3 3 +#define EI_CLASS 4 +#define EI_DATA 5 +#define EI_VERSION 6 + +#define ELFMAG0 0x7f +#define ELFMAG1 'E' +#define ELFMAG2 'L' +#define ELFMAG3 'F' + +#define ELFCLASS32 1 +#define ELFCLASS64 2 + +#define ELFDATA2LSB 1 +#define ELFDATA2MSB 2 + +#define EV_CURRENT 1 + +#define ET_DYN 3 + +#define EM_PPC64 21 +#define EF_PPC64_ABI 3 + +typedef struct { + b_elf_word sh_name; /* Section name, index in string tbl */ + b_elf_word sh_type; /* Type of section */ + b_elf_wxword sh_flags; /* Miscellaneous section attributes */ + b_elf_addr sh_addr; /* Section virtual addr at execution */ + b_elf_off sh_offset; /* Section file offset */ + b_elf_wxword sh_size; /* Size of section in bytes */ + b_elf_word sh_link; /* Index of another section */ + b_elf_word sh_info; /* Additional section information */ + b_elf_wxword sh_addralign; /* Section alignment */ + b_elf_wxword sh_entsize; /* Entry size if section holds table */ +} b_elf_shdr; /* Elf_Shdr. */ + +#define SHN_UNDEF 0x0000 /* Undefined section */ +#define SHN_LORESERVE 0xFF00 /* Begin range of reserved indices */ +#define SHN_XINDEX 0xFFFF /* Section index is held elsewhere */ + +#define SHT_PROGBITS 1 +#define SHT_SYMTAB 2 +#define SHT_STRTAB 3 +#define SHT_DYNSYM 11 + +#define SHF_COMPRESSED 0x800 + +#if BACKTRACE_ELF_SIZE == 32 + +typedef struct +{ + b_elf_word st_name; /* Symbol name, index in string tbl */ + b_elf_addr st_value; /* Symbol value */ + b_elf_word st_size; /* Symbol size */ + unsigned char st_info; /* Symbol binding and type */ + unsigned char st_other; /* Visibility and other data */ + b_elf_half st_shndx; /* Symbol section index */ +} b_elf_sym; /* Elf_Sym. */ + +#else /* BACKTRACE_ELF_SIZE != 32 */ + +typedef struct +{ + b_elf_word st_name; /* Symbol name, index in string tbl */ + unsigned char st_info; /* Symbol binding and type */ + unsigned char st_other; /* Visibility and other data */ + b_elf_half st_shndx; /* Symbol section index */ + b_elf_addr st_value; /* Symbol value */ + b_elf_xword st_size; /* Symbol size */ +} b_elf_sym; /* Elf_Sym. */ + +#endif /* BACKTRACE_ELF_SIZE != 32 */ + +#define STT_OBJECT 1 +#define STT_FUNC 2 + +typedef struct +{ + uint32_t namesz; + uint32_t descsz; + uint32_t type; + char name[1]; +} b_elf_note; + +#define NT_GNU_BUILD_ID 3 + +#if BACKTRACE_ELF_SIZE == 32 + +typedef struct +{ + b_elf_word ch_type; /* Compresstion algorithm */ + b_elf_word ch_size; /* Uncompressed size */ + b_elf_word ch_addralign; /* Alignment for uncompressed data */ +} b_elf_chdr; /* Elf_Chdr */ + +#else /* BACKTRACE_ELF_SIZE != 32 */ + +typedef struct +{ + b_elf_word ch_type; /* Compression algorithm */ + b_elf_word ch_reserved; /* Reserved */ + b_elf_xword ch_size; /* Uncompressed size */ + b_elf_xword ch_addralign; /* Alignment for uncompressed data */ +} b_elf_chdr; /* Elf_Chdr */ + +#endif /* BACKTRACE_ELF_SIZE != 32 */ + +#define ELFCOMPRESS_ZLIB 1 + +/* An index of ELF sections we care about. */ + +enum debug_section +{ + DEBUG_INFO, + DEBUG_LINE, + DEBUG_ABBREV, + DEBUG_RANGES, + DEBUG_STR, + + /* The old style compressed sections. This list must correspond to + the list of normal debug sections. */ + ZDEBUG_INFO, + ZDEBUG_LINE, + ZDEBUG_ABBREV, + ZDEBUG_RANGES, + ZDEBUG_STR, + + DEBUG_MAX +}; + +/* Names of sections, indexed by enum elf_section. */ + +static const char * const debug_section_names[DEBUG_MAX] = +{ + ".debug_info", + ".debug_line", + ".debug_abbrev", + ".debug_ranges", + ".debug_str", + ".zdebug_info", + ".zdebug_line", + ".zdebug_abbrev", + ".zdebug_ranges", + ".zdebug_str" +}; + +/* Information we gather for the sections we care about. */ + +struct debug_section_info +{ + /* Section file offset. */ + off_t offset; + /* Section size. */ + size_t size; + /* Section contents, after read from file. */ + const unsigned char *data; + /* Whether the SHF_COMPRESSED flag is set for the section. */ + int compressed; +}; + +/* Information we keep for an ELF symbol. */ + +struct elf_symbol +{ + /* The name of the symbol. */ + const char *name; + /* The address of the symbol. */ + uintptr_t address; + /* The size of the symbol. */ + size_t size; +}; + +/* Information to pass to elf_syminfo. */ + +struct elf_syminfo_data +{ + /* Symbols for the next module. */ + struct elf_syminfo_data *next; + /* The ELF symbols, sorted by address. */ + struct elf_symbol *symbols; + /* The number of symbols. */ + size_t count; +}; + +/* Information about PowerPC64 ELFv1 .opd section. */ + +struct elf_ppc64_opd_data +{ + /* Address of the .opd section. */ + b_elf_addr addr; + /* Section data. */ + const char *data; + /* Size of the .opd section. */ + size_t size; + /* Corresponding section view. */ + struct backtrace_view view; +}; + +/* Compute the CRC-32 of BUF/LEN. This uses the CRC used for + .gnu_debuglink files. */ + +static uint32_t +elf_crc32 (uint32_t crc, const unsigned char *buf, size_t len) +{ + static const uint32_t crc32_table[256] = + { + 0x00000000, 0x77073096, 0xee0e612c, 0x990951ba, 0x076dc419, + 0x706af48f, 0xe963a535, 0x9e6495a3, 0x0edb8832, 0x79dcb8a4, + 0xe0d5e91e, 0x97d2d988, 0x09b64c2b, 0x7eb17cbd, 0xe7b82d07, + 0x90bf1d91, 0x1db71064, 0x6ab020f2, 0xf3b97148, 0x84be41de, + 0x1adad47d, 0x6ddde4eb, 0xf4d4b551, 0x83d385c7, 0x136c9856, + 0x646ba8c0, 0xfd62f97a, 0x8a65c9ec, 0x14015c4f, 0x63066cd9, + 0xfa0f3d63, 0x8d080df5, 0x3b6e20c8, 0x4c69105e, 0xd56041e4, + 0xa2677172, 0x3c03e4d1, 0x4b04d447, 0xd20d85fd, 0xa50ab56b, + 0x35b5a8fa, 0x42b2986c, 0xdbbbc9d6, 0xacbcf940, 0x32d86ce3, + 0x45df5c75, 0xdcd60dcf, 0xabd13d59, 0x26d930ac, 0x51de003a, + 0xc8d75180, 0xbfd06116, 0x21b4f4b5, 0x56b3c423, 0xcfba9599, + 0xb8bda50f, 0x2802b89e, 0x5f058808, 0xc60cd9b2, 0xb10be924, + 0x2f6f7c87, 0x58684c11, 0xc1611dab, 0xb6662d3d, 0x76dc4190, + 0x01db7106, 0x98d220bc, 0xefd5102a, 0x71b18589, 0x06b6b51f, + 0x9fbfe4a5, 0xe8b8d433, 0x7807c9a2, 0x0f00f934, 0x9609a88e, + 0xe10e9818, 0x7f6a0dbb, 0x086d3d2d, 0x91646c97, 0xe6635c01, + 0x6b6b51f4, 0x1c6c6162, 0x856530d8, 0xf262004e, 0x6c0695ed, + 0x1b01a57b, 0x8208f4c1, 0xf50fc457, 0x65b0d9c6, 0x12b7e950, + 0x8bbeb8ea, 0xfcb9887c, 0x62dd1ddf, 0x15da2d49, 0x8cd37cf3, + 0xfbd44c65, 0x4db26158, 0x3ab551ce, 0xa3bc0074, 0xd4bb30e2, + 0x4adfa541, 0x3dd895d7, 0xa4d1c46d, 0xd3d6f4fb, 0x4369e96a, + 0x346ed9fc, 0xad678846, 0xda60b8d0, 0x44042d73, 0x33031de5, + 0xaa0a4c5f, 0xdd0d7cc9, 0x5005713c, 0x270241aa, 0xbe0b1010, + 0xc90c2086, 0x5768b525, 0x206f85b3, 0xb966d409, 0xce61e49f, + 0x5edef90e, 0x29d9c998, 0xb0d09822, 0xc7d7a8b4, 0x59b33d17, + 0x2eb40d81, 0xb7bd5c3b, 0xc0ba6cad, 0xedb88320, 0x9abfb3b6, + 0x03b6e20c, 0x74b1d29a, 0xead54739, 0x9dd277af, 0x04db2615, + 0x73dc1683, 0xe3630b12, 0x94643b84, 0x0d6d6a3e, 0x7a6a5aa8, + 0xe40ecf0b, 0x9309ff9d, 0x0a00ae27, 0x7d079eb1, 0xf00f9344, + 0x8708a3d2, 0x1e01f268, 0x6906c2fe, 0xf762575d, 0x806567cb, + 0x196c3671, 0x6e6b06e7, 0xfed41b76, 0x89d32be0, 0x10da7a5a, + 0x67dd4acc, 0xf9b9df6f, 0x8ebeeff9, 0x17b7be43, 0x60b08ed5, + 0xd6d6a3e8, 0xa1d1937e, 0x38d8c2c4, 0x4fdff252, 0xd1bb67f1, + 0xa6bc5767, 0x3fb506dd, 0x48b2364b, 0xd80d2bda, 0xaf0a1b4c, + 0x36034af6, 0x41047a60, 0xdf60efc3, 0xa867df55, 0x316e8eef, + 0x4669be79, 0xcb61b38c, 0xbc66831a, 0x256fd2a0, 0x5268e236, + 0xcc0c7795, 0xbb0b4703, 0x220216b9, 0x5505262f, 0xc5ba3bbe, + 0xb2bd0b28, 0x2bb45a92, 0x5cb36a04, 0xc2d7ffa7, 0xb5d0cf31, + 0x2cd99e8b, 0x5bdeae1d, 0x9b64c2b0, 0xec63f226, 0x756aa39c, + 0x026d930a, 0x9c0906a9, 0xeb0e363f, 0x72076785, 0x05005713, + 0x95bf4a82, 0xe2b87a14, 0x7bb12bae, 0x0cb61b38, 0x92d28e9b, + 0xe5d5be0d, 0x7cdcefb7, 0x0bdbdf21, 0x86d3d2d4, 0xf1d4e242, + 0x68ddb3f8, 0x1fda836e, 0x81be16cd, 0xf6b9265b, 0x6fb077e1, + 0x18b74777, 0x88085ae6, 0xff0f6a70, 0x66063bca, 0x11010b5c, + 0x8f659eff, 0xf862ae69, 0x616bffd3, 0x166ccf45, 0xa00ae278, + 0xd70dd2ee, 0x4e048354, 0x3903b3c2, 0xa7672661, 0xd06016f7, + 0x4969474d, 0x3e6e77db, 0xaed16a4a, 0xd9d65adc, 0x40df0b66, + 0x37d83bf0, 0xa9bcae53, 0xdebb9ec5, 0x47b2cf7f, 0x30b5ffe9, + 0xbdbdf21c, 0xcabac28a, 0x53b39330, 0x24b4a3a6, 0xbad03605, + 0xcdd70693, 0x54de5729, 0x23d967bf, 0xb3667a2e, 0xc4614ab8, + 0x5d681b02, 0x2a6f2b94, 0xb40bbe37, 0xc30c8ea1, 0x5a05df1b, + 0x2d02ef8d + }; + const unsigned char *end; + + crc = ~crc; + for (end = buf + len; buf < end; ++ buf) + crc = crc32_table[(crc ^ *buf) & 0xff] ^ (crc >> 8); + return ~crc; +} + +/* Return the CRC-32 of the entire file open at DESCRIPTOR. */ + +static uint32_t +elf_crc32_file (struct backtrace_state *state, int descriptor, + backtrace_error_callback error_callback, void *data) +{ + struct stat st; + struct backtrace_view file_view; + uint32_t ret; + + if (fstat (descriptor, &st) < 0) + { + error_callback (data, "fstat", errno); + return 0; + } + + if (!backtrace_get_view (state, descriptor, 0, st.st_size, error_callback, + data, &file_view)) + return 0; + + ret = elf_crc32 (0, (const unsigned char *) file_view.data, st.st_size); + + backtrace_release_view (state, &file_view, error_callback, data); + + return ret; +} + +/* A dummy callback function used when we can't find any debug info. */ + +static int +elf_nodebug (struct backtrace_state *state ATTRIBUTE_UNUSED, + uintptr_t pc ATTRIBUTE_UNUSED, + backtrace_full_callback callback ATTRIBUTE_UNUSED, + backtrace_error_callback error_callback, void *data) +{ + error_callback (data, "no debug info in ELF executable", -1); + return 0; +} + +/* A dummy callback function used when we can't find a symbol + table. */ + +static void +elf_nosyms (struct backtrace_state *state ATTRIBUTE_UNUSED, + uintptr_t addr ATTRIBUTE_UNUSED, + backtrace_syminfo_callback callback ATTRIBUTE_UNUSED, + backtrace_error_callback error_callback, void *data) +{ + error_callback (data, "no symbol table in ELF executable", -1); +} + +/* Compare struct elf_symbol for qsort. */ + +static int +elf_symbol_compare (const void *v1, const void *v2) +{ + const struct elf_symbol *e1 = (const struct elf_symbol *) v1; + const struct elf_symbol *e2 = (const struct elf_symbol *) v2; + + if (e1->address < e2->address) + return -1; + else if (e1->address > e2->address) + return 1; + else + return 0; +} + +/* Compare an ADDR against an elf_symbol for bsearch. We allocate one + extra entry in the array so that this can look safely at the next + entry. */ + +static int +elf_symbol_search (const void *vkey, const void *ventry) +{ + const uintptr_t *key = (const uintptr_t *) vkey; + const struct elf_symbol *entry = (const struct elf_symbol *) ventry; + uintptr_t addr; + + addr = *key; + if (addr < entry->address) + return -1; + else if (addr >= entry->address + entry->size) + return 1; + else + return 0; +} + +/* Initialize the symbol table info for elf_syminfo. */ + +static int +elf_initialize_syminfo (struct backtrace_state *state, + uintptr_t base_address, + const unsigned char *symtab_data, size_t symtab_size, + const unsigned char *strtab, size_t strtab_size, + backtrace_error_callback error_callback, + void *data, struct elf_syminfo_data *sdata, + struct elf_ppc64_opd_data *opd) +{ + size_t sym_count; + const b_elf_sym *sym; + size_t elf_symbol_count; + size_t elf_symbol_size; + struct elf_symbol *elf_symbols; + size_t i; + unsigned int j; + + sym_count = symtab_size / sizeof (b_elf_sym); + + /* We only care about function symbols. Count them. */ + sym = (const b_elf_sym *) symtab_data; + elf_symbol_count = 0; + for (i = 0; i < sym_count; ++i, ++sym) + { + int info; + + info = sym->st_info & 0xf; + if ((info == STT_FUNC || info == STT_OBJECT) + && sym->st_shndx != SHN_UNDEF) + ++elf_symbol_count; + } + + elf_symbol_size = elf_symbol_count * sizeof (struct elf_symbol); + elf_symbols = ((struct elf_symbol *) + backtrace_alloc (state, elf_symbol_size, error_callback, + data)); + if (elf_symbols == NULL) + return 0; + + sym = (const b_elf_sym *) symtab_data; + j = 0; + for (i = 0; i < sym_count; ++i, ++sym) + { + int info; + + info = sym->st_info & 0xf; + if (info != STT_FUNC && info != STT_OBJECT) + continue; + if (sym->st_shndx == SHN_UNDEF) + continue; + if (sym->st_name >= strtab_size) + { + error_callback (data, "symbol string index out of range", 0); + backtrace_free (state, elf_symbols, elf_symbol_size, error_callback, + data); + return 0; + } + elf_symbols[j].name = (const char *) strtab + sym->st_name; + /* Special case PowerPC64 ELFv1 symbols in .opd section, if the symbol + is a function descriptor, read the actual code address from the + descriptor. */ + if (opd + && sym->st_value >= opd->addr + && sym->st_value < opd->addr + opd->size) + elf_symbols[j].address + = *(const b_elf_addr *) (opd->data + (sym->st_value - opd->addr)); + else + elf_symbols[j].address = sym->st_value; + elf_symbols[j].address += base_address; + elf_symbols[j].size = sym->st_size; + ++j; + } + + backtrace_qsort (elf_symbols, elf_symbol_count, sizeof (struct elf_symbol), + elf_symbol_compare); + + sdata->next = NULL; + sdata->symbols = elf_symbols; + sdata->count = elf_symbol_count; + + return 1; +} + +/* Add EDATA to the list in STATE. */ + +static void +elf_add_syminfo_data (struct backtrace_state *state, + struct elf_syminfo_data *edata) +{ + if (!state->threaded) + { + struct elf_syminfo_data **pp; + + for (pp = (struct elf_syminfo_data **) (void *) &state->syminfo_data; + *pp != NULL; + pp = &(*pp)->next) + ; + *pp = edata; + } + else + { + while (1) + { + struct elf_syminfo_data **pp; + + pp = (struct elf_syminfo_data **) (void *) &state->syminfo_data; + + while (1) + { + struct elf_syminfo_data *p; + + p = backtrace_atomic_load_pointer (pp); + + if (p == NULL) + break; + + pp = &p->next; + } + + if (__sync_bool_compare_and_swap (pp, NULL, edata)) + break; + } + } +} + +/* Return the symbol name and value for an ADDR. */ + +static void +elf_syminfo (struct backtrace_state *state, uintptr_t addr, + backtrace_syminfo_callback callback, + backtrace_error_callback error_callback ATTRIBUTE_UNUSED, + void *data) +{ + struct elf_syminfo_data *edata; + struct elf_symbol *sym = NULL; + + if (!state->threaded) + { + for (edata = (struct elf_syminfo_data *) state->syminfo_data; + edata != NULL; + edata = edata->next) + { + sym = ((struct elf_symbol *) + bsearch (&addr, edata->symbols, edata->count, + sizeof (struct elf_symbol), elf_symbol_search)); + if (sym != NULL) + break; + } + } + else + { + struct elf_syminfo_data **pp; + + pp = (struct elf_syminfo_data **) (void *) &state->syminfo_data; + while (1) + { + edata = backtrace_atomic_load_pointer (pp); + if (edata == NULL) + break; + + sym = ((struct elf_symbol *) + bsearch (&addr, edata->symbols, edata->count, + sizeof (struct elf_symbol), elf_symbol_search)); + if (sym != NULL) + break; + + pp = &edata->next; + } + } + + if (sym == NULL) + callback (data, addr, NULL, 0, 0); + else + callback (data, addr, sym->name, sym->address, sym->size); +} + +/* Return whether FILENAME is a symlink. */ + +static int +elf_is_symlink (const char *filename) +{ + struct stat st; + + if (lstat (filename, &st) < 0) + return 0; + return S_ISLNK (st.st_mode); +} + +/* Return the results of reading the symlink FILENAME in a buffer + allocated by backtrace_alloc. Return the length of the buffer in + *LEN. */ + +static char * +elf_readlink (struct backtrace_state *state, const char *filename, + backtrace_error_callback error_callback, void *data, + size_t *plen) +{ + size_t len; + char *buf; + + len = 128; + while (1) + { + ssize_t rl; + + buf = backtrace_alloc (state, len, error_callback, data); + if (buf == NULL) + return NULL; + rl = readlink (filename, buf, len); + if (rl < 0) + { + backtrace_free (state, buf, len, error_callback, data); + return NULL; + } + if ((size_t) rl < len - 1) + { + buf[rl] = '\0'; + *plen = len; + return buf; + } + backtrace_free (state, buf, len, error_callback, data); + len *= 2; + } +} + +/* Open a separate debug info file, using the build ID to find it. + Returns an open file descriptor, or -1. + + The GDB manual says that the only place gdb looks for a debug file + when the build ID is known is in /usr/lib/debug/.build-id. */ + +static int +elf_open_debugfile_by_buildid (struct backtrace_state *state, + const char *buildid_data, size_t buildid_size, + backtrace_error_callback error_callback, + void *data) +{ + const char * const prefix = "/usr/lib/debug/.build-id/"; + const size_t prefix_len = strlen (prefix); + const char * const suffix = ".debug"; + const size_t suffix_len = strlen (suffix); + size_t len; + char *bd_filename; + char *t; + size_t i; + int ret; + int does_not_exist; + + len = prefix_len + buildid_size * 2 + suffix_len + 2; + bd_filename = backtrace_alloc (state, len, error_callback, data); + if (bd_filename == NULL) + return -1; + + t = bd_filename; + memcpy (t, prefix, prefix_len); + t += prefix_len; + for (i = 0; i < buildid_size; i++) + { + unsigned char b; + unsigned char nib; + + b = (unsigned char) buildid_data[i]; + nib = (b & 0xf0) >> 4; + *t++ = nib < 10 ? '0' + nib : 'a' + nib - 10; + nib = b & 0x0f; + *t++ = nib < 10 ? '0' + nib : 'a' + nib - 10; + if (i == 0) + *t++ = '/'; + } + memcpy (t, suffix, suffix_len); + t[suffix_len] = '\0'; + + ret = backtrace_open (bd_filename, error_callback, data, &does_not_exist); + + backtrace_free (state, bd_filename, len, error_callback, data); + + /* gdb checks that the debuginfo file has the same build ID note. + That seems kind of pointless to me--why would it have the right + name but not the right build ID?--so skipping the check. */ + + return ret; +} + +/* Try to open a file whose name is PREFIX (length PREFIX_LEN) + concatenated with PREFIX2 (length PREFIX2_LEN) concatenated with + DEBUGLINK_NAME. Returns an open file descriptor, or -1. */ + +static int +elf_try_debugfile (struct backtrace_state *state, const char *prefix, + size_t prefix_len, const char *prefix2, size_t prefix2_len, + const char *debuglink_name, + backtrace_error_callback error_callback, void *data) +{ + size_t debuglink_len; + size_t try_len; + char *try; + int does_not_exist; + int ret; + + debuglink_len = strlen (debuglink_name); + try_len = prefix_len + prefix2_len + debuglink_len + 1; + try = backtrace_alloc (state, try_len, error_callback, data); + if (try == NULL) + return -1; + + memcpy (try, prefix, prefix_len); + memcpy (try + prefix_len, prefix2, prefix2_len); + memcpy (try + prefix_len + prefix2_len, debuglink_name, debuglink_len); + try[prefix_len + prefix2_len + debuglink_len] = '\0'; + + ret = backtrace_open (try, error_callback, data, &does_not_exist); + + backtrace_free (state, try, try_len, error_callback, data); + + return ret; +} + +/* Find a separate debug info file, using the debuglink section data + to find it. Returns an open file descriptor, or -1. */ + +static int +elf_find_debugfile_by_debuglink (struct backtrace_state *state, + const char *filename, + const char *debuglink_name, + backtrace_error_callback error_callback, + void *data) +{ + int ret; + char *alc; + size_t alc_len; + const char *slash; + int ddescriptor; + const char *prefix; + size_t prefix_len; + + /* Resolve symlinks in FILENAME. Since FILENAME is fairly likely to + be /proc/self/exe, symlinks are common. We don't try to resolve + the whole path name, just the base name. */ + ret = -1; + alc = NULL; + alc_len = 0; + while (elf_is_symlink (filename)) + { + char *new_buf; + size_t new_len; + + new_buf = elf_readlink (state, filename, error_callback, data, &new_len); + if (new_buf == NULL) + break; + + if (new_buf[0] == '/') + filename = new_buf; + else + { + slash = strrchr (filename, '/'); + if (slash == NULL) + filename = new_buf; + else + { + size_t clen; + char *c; + + slash++; + clen = slash - filename + strlen (new_buf) + 1; + c = backtrace_alloc (state, clen, error_callback, data); + if (c == NULL) + goto done; + + memcpy (c, filename, slash - filename); + memcpy (c + (slash - filename), new_buf, strlen (new_buf)); + c[slash - filename + strlen (new_buf)] = '\0'; + backtrace_free (state, new_buf, new_len, error_callback, data); + filename = c; + new_buf = c; + new_len = clen; + } + } + + if (alc != NULL) + backtrace_free (state, alc, alc_len, error_callback, data); + alc = new_buf; + alc_len = new_len; + } + + /* Look for DEBUGLINK_NAME in the same directory as FILENAME. */ + + slash = strrchr (filename, '/'); + if (slash == NULL) + { + prefix = ""; + prefix_len = 0; + } + else + { + slash++; + prefix = filename; + prefix_len = slash - filename; + } + + ddescriptor = elf_try_debugfile (state, prefix, prefix_len, "", 0, + debuglink_name, error_callback, data); + if (ddescriptor >= 0) + { + ret = ddescriptor; + goto done; + } + + /* Look for DEBUGLINK_NAME in a .debug subdirectory of FILENAME. */ + + ddescriptor = elf_try_debugfile (state, prefix, prefix_len, ".debug/", + strlen (".debug/"), debuglink_name, + error_callback, data); + if (ddescriptor >= 0) + { + ret = ddescriptor; + goto done; + } + + /* Look for DEBUGLINK_NAME in /usr/lib/debug. */ + + ddescriptor = elf_try_debugfile (state, "/usr/lib/debug/", + strlen ("/usr/lib/debug/"), prefix, + prefix_len, debuglink_name, + error_callback, data); + if (ddescriptor >= 0) + ret = ddescriptor; + + done: + if (alc != NULL && alc_len > 0) + backtrace_free (state, alc, alc_len, error_callback, data); + return ret; +} + +/* Open a separate debug info file, using the debuglink section data + to find it. Returns an open file descriptor, or -1. */ + +static int +elf_open_debugfile_by_debuglink (struct backtrace_state *state, + const char *filename, + const char *debuglink_name, + uint32_t debuglink_crc, + backtrace_error_callback error_callback, + void *data) +{ + int ddescriptor; + + ddescriptor = elf_find_debugfile_by_debuglink (state, filename, + debuglink_name, + error_callback, data); + if (ddescriptor < 0) + return -1; + + if (debuglink_crc != 0) + { + uint32_t got_crc; + + got_crc = elf_crc32_file (state, ddescriptor, error_callback, data); + if (got_crc != debuglink_crc) + { + backtrace_close (ddescriptor, error_callback, data); + return -1; + } + } + + return ddescriptor; +} + +/* A function useful for setting a breakpoint for an inflation failure + when this code is compiled with -g. */ + +static void +elf_zlib_failed(void) +{ +} + +/* *PVAL is the current value being read from the stream, and *PBITS + is the number of valid bits. Ensure that *PVAL holds at least 15 + bits by reading additional bits from *PPIN, up to PINEND, as + needed. Updates *PPIN, *PVAL and *PBITS. Returns 1 on success, 0 + on error. */ + +static int +elf_zlib_fetch (const unsigned char **ppin, const unsigned char *pinend, + uint64_t *pval, unsigned int *pbits) +{ + unsigned int bits; + const unsigned char *pin; + uint64_t val; + uint32_t next; + + bits = *pbits; + if (bits >= 15) + return 1; + pin = *ppin; + val = *pval; + + if (unlikely (pinend - pin < 4)) + { + elf_zlib_failed (); + return 0; + } + +#if defined(__BYTE_ORDER__) && defined(__ORDER_LITTLE_ENDIAN__) \ + && defined(__ORDER_BIG_ENDIAN__) \ + && (__BYTE_ORDER__ == __ORDER_BIG_ENDIAN__ \ + || __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__) + /* We've ensured that PIN is aligned. */ + next = *(const uint32_t *)pin; + +#if __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__ + next = __builtin_bswap32 (next); +#endif +#else + next = pin[0] | (pin[1] << 8) | (pin[2] << 16) | (pin[3] << 24); +#endif + + val |= (uint64_t)next << bits; + bits += 32; + pin += 4; + + /* We will need the next four bytes soon. */ + __builtin_prefetch (pin, 0, 0); + + *ppin = pin; + *pval = val; + *pbits = bits; + return 1; +} + +/* Huffman code tables, like the rest of the zlib format, are defined + by RFC 1951. We store a Huffman code table as a series of tables + stored sequentially in memory. Each entry in a table is 16 bits. + The first, main, table has 256 entries. It is followed by a set of + secondary tables of length 2 to 128 entries. The maximum length of + a code sequence in the deflate format is 15 bits, so that is all we + need. Each secondary table has an index, which is the offset of + the table in the overall memory storage. + + The deflate format says that all codes of a given bit length are + lexicographically consecutive. Perhaps we could have 130 values + that require a 15-bit code, perhaps requiring three secondary + tables of size 128. I don't know if this is actually possible, but + it suggests that the maximum size required for secondary tables is + 3 * 128 + 3 * 64 ... == 768. The zlib enough program reports 660 + as the maximum. We permit 768, since in addition to the 256 for + the primary table, with two bytes per entry, and with the two + tables we need, that gives us a page. + + A single table entry needs to store a value or (for the main table + only) the index and size of a secondary table. Values range from 0 + to 285, inclusive. Secondary table indexes, per above, range from + 0 to 510. For a value we need to store the number of bits we need + to determine that value (one value may appear multiple times in the + table), which is 1 to 8. For a secondary table we need to store + the number of bits used to index into the table, which is 1 to 7. + And of course we need 1 bit to decide whether we have a value or a + secondary table index. So each entry needs 9 bits for value/table + index, 3 bits for size, 1 bit what it is. For simplicity we use 16 + bits per entry. */ + +/* Number of entries we allocate to for one code table. We get a page + for the two code tables we need. */ + +#define HUFFMAN_TABLE_SIZE (1024) + +/* Bit masks and shifts for the values in the table. */ + +#define HUFFMAN_VALUE_MASK 0x01ff +#define HUFFMAN_BITS_SHIFT 9 +#define HUFFMAN_BITS_MASK 0x7 +#define HUFFMAN_SECONDARY_SHIFT 12 + +/* For working memory while inflating we need two code tables, we need + an array of code lengths (max value 15, so we use unsigned char), + and an array of unsigned shorts used while building a table. The + latter two arrays must be large enough to hold the maximum number + of code lengths, which RFC 1951 defines as 286 + 30. */ + +#define ZDEBUG_TABLE_SIZE \ + (2 * HUFFMAN_TABLE_SIZE * sizeof (uint16_t) \ + + (286 + 30) * sizeof (uint16_t) \ + + (286 + 30) * sizeof (unsigned char)) + +#define ZDEBUG_TABLE_CODELEN_OFFSET \ + (2 * HUFFMAN_TABLE_SIZE * sizeof (uint16_t) \ + + (286 + 30) * sizeof (uint16_t)) + +#define ZDEBUG_TABLE_WORK_OFFSET \ + (2 * HUFFMAN_TABLE_SIZE * sizeof (uint16_t)) + +#ifdef BACKTRACE_GENERATE_FIXED_HUFFMAN_TABLE + +/* Used by the main function that generates the fixed table to learn + the table size. */ +static size_t final_next_secondary; + +#endif + +/* Build a Huffman code table from an array of lengths in CODES of + length CODES_LEN. The table is stored into *TABLE. ZDEBUG_TABLE + is the same as for elf_zlib_inflate, used to find some work space. + Returns 1 on success, 0 on error. */ + +static int +elf_zlib_inflate_table (unsigned char *codes, size_t codes_len, + uint16_t *zdebug_table, uint16_t *table) +{ + uint16_t count[16]; + uint16_t start[16]; + uint16_t prev[16]; + uint16_t firstcode[7]; + uint16_t *next; + size_t i; + size_t j; + unsigned int code; + size_t next_secondary; + + /* Count the number of code of each length. Set NEXT[val] to be the + next value after VAL with the same bit length. */ + + next = (uint16_t *) (((unsigned char *) zdebug_table) + + ZDEBUG_TABLE_WORK_OFFSET); + + memset (&count[0], 0, 16 * sizeof (uint16_t)); + for (i = 0; i < codes_len; ++i) + { + if (unlikely (codes[i] >= 16)) + { + elf_zlib_failed (); + return 0; + } + + if (count[codes[i]] == 0) + { + start[codes[i]] = i; + prev[codes[i]] = i; + } + else + { + next[prev[codes[i]]] = i; + prev[codes[i]] = i; + } + + ++count[codes[i]]; + } + + /* For each length, fill in the table for the codes of that + length. */ + + memset (table, 0, HUFFMAN_TABLE_SIZE * sizeof (uint16_t)); + + /* Handle the values that do not require a secondary table. */ + + code = 0; + for (j = 1; j <= 8; ++j) + { + unsigned int jcnt; + unsigned int val; + + jcnt = count[j]; + if (jcnt == 0) + continue; + + if (unlikely (jcnt > (1U << j))) + { + elf_zlib_failed (); + return 0; + } + + /* There are JCNT values that have this length, the values + starting from START[j] continuing through NEXT[VAL]. Those + values are assigned consecutive values starting at CODE. */ + + val = start[j]; + for (i = 0; i < jcnt; ++i) + { + uint16_t tval; + size_t ind; + unsigned int incr; + + /* In the compressed bit stream, the value VAL is encoded as + J bits with the value C. */ + + if (unlikely ((val & ~HUFFMAN_VALUE_MASK) != 0)) + { + elf_zlib_failed (); + return 0; + } + + tval = val | ((j - 1) << HUFFMAN_BITS_SHIFT); + + /* The table lookup uses 8 bits. If J is less than 8, we + don't know what the other bits will be. We need to fill + in all possibilities in the table. Since the Huffman + code is unambiguous, those entries can't be used for any + other code. */ + + for (ind = code; ind < 0x100; ind += 1 << j) + { + if (unlikely (table[ind] != 0)) + { + elf_zlib_failed (); + return 0; + } + table[ind] = tval; + } + + /* Advance to the next value with this length. */ + if (i + 1 < jcnt) + val = next[val]; + + /* The Huffman codes are stored in the bitstream with the + most significant bit first, as is required to make them + unambiguous. The effect is that when we read them from + the bitstream we see the bit sequence in reverse order: + the most significant bit of the Huffman code is the least + significant bit of the value we read from the bitstream. + That means that to make our table lookups work, we need + to reverse the bits of CODE. Since reversing bits is + tedious and in general requires using a table, we instead + increment CODE in reverse order. That is, if the number + of bits we are currently using, here named J, is 3, we + count as 000, 100, 010, 110, 001, 101, 011, 111, which is + to say the numbers from 0 to 7 but with the bits + reversed. Going to more bits, aka incrementing J, + effectively just adds more zero bits as the beginning, + and as such does not change the numeric value of CODE. + + To increment CODE of length J in reverse order, find the + most significant zero bit and set it to one while + clearing all higher bits. In other words, add 1 modulo + 2^J, only reversed. */ + + incr = 1U << (j - 1); + while ((code & incr) != 0) + incr >>= 1; + if (incr == 0) + code = 0; + else + { + code &= incr - 1; + code += incr; + } + } + } + + /* Handle the values that require a secondary table. */ + + /* Set FIRSTCODE, the number at which the codes start, for each + length. */ + + for (j = 9; j < 16; j++) + { + unsigned int jcnt; + unsigned int k; + + jcnt = count[j]; + if (jcnt == 0) + continue; + + /* There are JCNT values that have this length, the values + starting from START[j]. Those values are assigned + consecutive values starting at CODE. */ + + firstcode[j - 9] = code; + + /* Reverse add JCNT to CODE modulo 2^J. */ + for (k = 0; k < j; ++k) + { + if ((jcnt & (1U << k)) != 0) + { + unsigned int m; + unsigned int bit; + + bit = 1U << (j - k - 1); + for (m = 0; m < j - k; ++m, bit >>= 1) + { + if ((code & bit) == 0) + { + code += bit; + break; + } + code &= ~bit; + } + jcnt &= ~(1U << k); + } + } + if (unlikely (jcnt != 0)) + { + elf_zlib_failed (); + return 0; + } + } + + /* For J from 9 to 15, inclusive, we store COUNT[J] consecutive + values starting at START[J] with consecutive codes starting at + FIRSTCODE[J - 9]. In the primary table we need to point to the + secondary table, and the secondary table will be indexed by J - 9 + bits. We count down from 15 so that we install the larger + secondary tables first, as the smaller ones may be embedded in + the larger ones. */ + + next_secondary = 0; /* Index of next secondary table (after primary). */ + for (j = 15; j >= 9; j--) + { + unsigned int jcnt; + unsigned int val; + size_t primary; /* Current primary index. */ + size_t secondary; /* Offset to current secondary table. */ + size_t secondary_bits; /* Bit size of current secondary table. */ + + jcnt = count[j]; + if (jcnt == 0) + continue; + + val = start[j]; + code = firstcode[j - 9]; + primary = 0x100; + secondary = 0; + secondary_bits = 0; + for (i = 0; i < jcnt; ++i) + { + uint16_t tval; + size_t ind; + unsigned int incr; + + if ((code & 0xff) != primary) + { + uint16_t tprimary; + + /* Fill in a new primary table entry. */ + + primary = code & 0xff; + + tprimary = table[primary]; + if (tprimary == 0) + { + /* Start a new secondary table. */ + + if (unlikely ((next_secondary & HUFFMAN_VALUE_MASK) + != next_secondary)) + { + elf_zlib_failed (); + return 0; + } + + secondary = next_secondary; + secondary_bits = j - 8; + next_secondary += 1 << secondary_bits; + table[primary] = (secondary + + ((j - 8) << HUFFMAN_BITS_SHIFT) + + (1U << HUFFMAN_SECONDARY_SHIFT)); + } + else + { + /* There is an existing entry. It had better be a + secondary table with enough bits. */ + if (unlikely ((tprimary & (1U << HUFFMAN_SECONDARY_SHIFT)) + == 0)) + { + elf_zlib_failed (); + return 0; + } + secondary = tprimary & HUFFMAN_VALUE_MASK; + secondary_bits = ((tprimary >> HUFFMAN_BITS_SHIFT) + & HUFFMAN_BITS_MASK); + if (unlikely (secondary_bits < j - 8)) + { + elf_zlib_failed (); + return 0; + } + } + } + + /* Fill in secondary table entries. */ + + tval = val | ((j - 8) << HUFFMAN_BITS_SHIFT); + + for (ind = code >> 8; + ind < (1U << secondary_bits); + ind += 1U << (j - 8)) + { + if (unlikely (table[secondary + 0x100 + ind] != 0)) + { + elf_zlib_failed (); + return 0; + } + table[secondary + 0x100 + ind] = tval; + } + + if (i + 1 < jcnt) + val = next[val]; + + incr = 1U << (j - 1); + while ((code & incr) != 0) + incr >>= 1; + if (incr == 0) + code = 0; + else + { + code &= incr - 1; + code += incr; + } + } + } + +#ifdef BACKTRACE_GENERATE_FIXED_HUFFMAN_TABLE + final_next_secondary = next_secondary; +#endif + + return 1; +} + +#ifdef BACKTRACE_GENERATE_FIXED_HUFFMAN_TABLE + +/* Used to generate the fixed Huffman table for block type 1. */ + +#include + +static uint16_t table[ZDEBUG_TABLE_SIZE]; +static unsigned char codes[288]; + +int +main () +{ + size_t i; + + for (i = 0; i <= 143; ++i) + codes[i] = 8; + for (i = 144; i <= 255; ++i) + codes[i] = 9; + for (i = 256; i <= 279; ++i) + codes[i] = 7; + for (i = 280; i <= 287; ++i) + codes[i] = 8; + if (!elf_zlib_inflate_table (&codes[0], 288, &table[0], &table[0])) + { + fprintf (stderr, "elf_zlib_inflate_table failed\n"); + exit (EXIT_FAILURE); + } + + printf ("static const uint16_t elf_zlib_default_table[%#zx] =\n", + final_next_secondary + 0x100); + printf ("{\n"); + for (i = 0; i < final_next_secondary + 0x100; i += 8) + { + size_t j; + + printf (" "); + for (j = i; j < final_next_secondary + 0x100 && j < i + 8; ++j) + printf (" %#x,", table[j]); + printf ("\n"); + } + printf ("};\n"); + printf ("\n"); + + for (i = 0; i < 32; ++i) + codes[i] = 5; + if (!elf_zlib_inflate_table (&codes[0], 32, &table[0], &table[0])) + { + fprintf (stderr, "elf_zlib_inflate_table failed\n"); + exit (EXIT_FAILURE); + } + + printf ("static const uint16_t elf_zlib_default_dist_table[%#zx] =\n", + final_next_secondary + 0x100); + printf ("{\n"); + for (i = 0; i < final_next_secondary + 0x100; i += 8) + { + size_t j; + + printf (" "); + for (j = i; j < final_next_secondary + 0x100 && j < i + 8; ++j) + printf (" %#x,", table[j]); + printf ("\n"); + } + printf ("};\n"); + + return 0; +} + +#endif + +/* The fixed tables generated by the #ifdef'ed out main function + above. */ + +static const uint16_t elf_zlib_default_table[0x170] = +{ + 0xd00, 0xe50, 0xe10, 0xf18, 0xd10, 0xe70, 0xe30, 0x1230, + 0xd08, 0xe60, 0xe20, 0x1210, 0xe00, 0xe80, 0xe40, 0x1250, + 0xd04, 0xe58, 0xe18, 0x1200, 0xd14, 0xe78, 0xe38, 0x1240, + 0xd0c, 0xe68, 0xe28, 0x1220, 0xe08, 0xe88, 0xe48, 0x1260, + 0xd02, 0xe54, 0xe14, 0xf1c, 0xd12, 0xe74, 0xe34, 0x1238, + 0xd0a, 0xe64, 0xe24, 0x1218, 0xe04, 0xe84, 0xe44, 0x1258, + 0xd06, 0xe5c, 0xe1c, 0x1208, 0xd16, 0xe7c, 0xe3c, 0x1248, + 0xd0e, 0xe6c, 0xe2c, 0x1228, 0xe0c, 0xe8c, 0xe4c, 0x1268, + 0xd01, 0xe52, 0xe12, 0xf1a, 0xd11, 0xe72, 0xe32, 0x1234, + 0xd09, 0xe62, 0xe22, 0x1214, 0xe02, 0xe82, 0xe42, 0x1254, + 0xd05, 0xe5a, 0xe1a, 0x1204, 0xd15, 0xe7a, 0xe3a, 0x1244, + 0xd0d, 0xe6a, 0xe2a, 0x1224, 0xe0a, 0xe8a, 0xe4a, 0x1264, + 0xd03, 0xe56, 0xe16, 0xf1e, 0xd13, 0xe76, 0xe36, 0x123c, + 0xd0b, 0xe66, 0xe26, 0x121c, 0xe06, 0xe86, 0xe46, 0x125c, + 0xd07, 0xe5e, 0xe1e, 0x120c, 0xd17, 0xe7e, 0xe3e, 0x124c, + 0xd0f, 0xe6e, 0xe2e, 0x122c, 0xe0e, 0xe8e, 0xe4e, 0x126c, + 0xd00, 0xe51, 0xe11, 0xf19, 0xd10, 0xe71, 0xe31, 0x1232, + 0xd08, 0xe61, 0xe21, 0x1212, 0xe01, 0xe81, 0xe41, 0x1252, + 0xd04, 0xe59, 0xe19, 0x1202, 0xd14, 0xe79, 0xe39, 0x1242, + 0xd0c, 0xe69, 0xe29, 0x1222, 0xe09, 0xe89, 0xe49, 0x1262, + 0xd02, 0xe55, 0xe15, 0xf1d, 0xd12, 0xe75, 0xe35, 0x123a, + 0xd0a, 0xe65, 0xe25, 0x121a, 0xe05, 0xe85, 0xe45, 0x125a, + 0xd06, 0xe5d, 0xe1d, 0x120a, 0xd16, 0xe7d, 0xe3d, 0x124a, + 0xd0e, 0xe6d, 0xe2d, 0x122a, 0xe0d, 0xe8d, 0xe4d, 0x126a, + 0xd01, 0xe53, 0xe13, 0xf1b, 0xd11, 0xe73, 0xe33, 0x1236, + 0xd09, 0xe63, 0xe23, 0x1216, 0xe03, 0xe83, 0xe43, 0x1256, + 0xd05, 0xe5b, 0xe1b, 0x1206, 0xd15, 0xe7b, 0xe3b, 0x1246, + 0xd0d, 0xe6b, 0xe2b, 0x1226, 0xe0b, 0xe8b, 0xe4b, 0x1266, + 0xd03, 0xe57, 0xe17, 0xf1f, 0xd13, 0xe77, 0xe37, 0x123e, + 0xd0b, 0xe67, 0xe27, 0x121e, 0xe07, 0xe87, 0xe47, 0x125e, + 0xd07, 0xe5f, 0xe1f, 0x120e, 0xd17, 0xe7f, 0xe3f, 0x124e, + 0xd0f, 0xe6f, 0xe2f, 0x122e, 0xe0f, 0xe8f, 0xe4f, 0x126e, + 0x290, 0x291, 0x292, 0x293, 0x294, 0x295, 0x296, 0x297, + 0x298, 0x299, 0x29a, 0x29b, 0x29c, 0x29d, 0x29e, 0x29f, + 0x2a0, 0x2a1, 0x2a2, 0x2a3, 0x2a4, 0x2a5, 0x2a6, 0x2a7, + 0x2a8, 0x2a9, 0x2aa, 0x2ab, 0x2ac, 0x2ad, 0x2ae, 0x2af, + 0x2b0, 0x2b1, 0x2b2, 0x2b3, 0x2b4, 0x2b5, 0x2b6, 0x2b7, + 0x2b8, 0x2b9, 0x2ba, 0x2bb, 0x2bc, 0x2bd, 0x2be, 0x2bf, + 0x2c0, 0x2c1, 0x2c2, 0x2c3, 0x2c4, 0x2c5, 0x2c6, 0x2c7, + 0x2c8, 0x2c9, 0x2ca, 0x2cb, 0x2cc, 0x2cd, 0x2ce, 0x2cf, + 0x2d0, 0x2d1, 0x2d2, 0x2d3, 0x2d4, 0x2d5, 0x2d6, 0x2d7, + 0x2d8, 0x2d9, 0x2da, 0x2db, 0x2dc, 0x2dd, 0x2de, 0x2df, + 0x2e0, 0x2e1, 0x2e2, 0x2e3, 0x2e4, 0x2e5, 0x2e6, 0x2e7, + 0x2e8, 0x2e9, 0x2ea, 0x2eb, 0x2ec, 0x2ed, 0x2ee, 0x2ef, + 0x2f0, 0x2f1, 0x2f2, 0x2f3, 0x2f4, 0x2f5, 0x2f6, 0x2f7, + 0x2f8, 0x2f9, 0x2fa, 0x2fb, 0x2fc, 0x2fd, 0x2fe, 0x2ff, +}; + +static const uint16_t elf_zlib_default_dist_table[0x100] = +{ + 0x800, 0x810, 0x808, 0x818, 0x804, 0x814, 0x80c, 0x81c, + 0x802, 0x812, 0x80a, 0x81a, 0x806, 0x816, 0x80e, 0x81e, + 0x801, 0x811, 0x809, 0x819, 0x805, 0x815, 0x80d, 0x81d, + 0x803, 0x813, 0x80b, 0x81b, 0x807, 0x817, 0x80f, 0x81f, + 0x800, 0x810, 0x808, 0x818, 0x804, 0x814, 0x80c, 0x81c, + 0x802, 0x812, 0x80a, 0x81a, 0x806, 0x816, 0x80e, 0x81e, + 0x801, 0x811, 0x809, 0x819, 0x805, 0x815, 0x80d, 0x81d, + 0x803, 0x813, 0x80b, 0x81b, 0x807, 0x817, 0x80f, 0x81f, + 0x800, 0x810, 0x808, 0x818, 0x804, 0x814, 0x80c, 0x81c, + 0x802, 0x812, 0x80a, 0x81a, 0x806, 0x816, 0x80e, 0x81e, + 0x801, 0x811, 0x809, 0x819, 0x805, 0x815, 0x80d, 0x81d, + 0x803, 0x813, 0x80b, 0x81b, 0x807, 0x817, 0x80f, 0x81f, + 0x800, 0x810, 0x808, 0x818, 0x804, 0x814, 0x80c, 0x81c, + 0x802, 0x812, 0x80a, 0x81a, 0x806, 0x816, 0x80e, 0x81e, + 0x801, 0x811, 0x809, 0x819, 0x805, 0x815, 0x80d, 0x81d, + 0x803, 0x813, 0x80b, 0x81b, 0x807, 0x817, 0x80f, 0x81f, + 0x800, 0x810, 0x808, 0x818, 0x804, 0x814, 0x80c, 0x81c, + 0x802, 0x812, 0x80a, 0x81a, 0x806, 0x816, 0x80e, 0x81e, + 0x801, 0x811, 0x809, 0x819, 0x805, 0x815, 0x80d, 0x81d, + 0x803, 0x813, 0x80b, 0x81b, 0x807, 0x817, 0x80f, 0x81f, + 0x800, 0x810, 0x808, 0x818, 0x804, 0x814, 0x80c, 0x81c, + 0x802, 0x812, 0x80a, 0x81a, 0x806, 0x816, 0x80e, 0x81e, + 0x801, 0x811, 0x809, 0x819, 0x805, 0x815, 0x80d, 0x81d, + 0x803, 0x813, 0x80b, 0x81b, 0x807, 0x817, 0x80f, 0x81f, + 0x800, 0x810, 0x808, 0x818, 0x804, 0x814, 0x80c, 0x81c, + 0x802, 0x812, 0x80a, 0x81a, 0x806, 0x816, 0x80e, 0x81e, + 0x801, 0x811, 0x809, 0x819, 0x805, 0x815, 0x80d, 0x81d, + 0x803, 0x813, 0x80b, 0x81b, 0x807, 0x817, 0x80f, 0x81f, + 0x800, 0x810, 0x808, 0x818, 0x804, 0x814, 0x80c, 0x81c, + 0x802, 0x812, 0x80a, 0x81a, 0x806, 0x816, 0x80e, 0x81e, + 0x801, 0x811, 0x809, 0x819, 0x805, 0x815, 0x80d, 0x81d, + 0x803, 0x813, 0x80b, 0x81b, 0x807, 0x817, 0x80f, 0x81f, +}; + +/* Inflate a zlib stream from PIN/SIN to POUT/SOUT. Return 1 on + success, 0 on some error parsing the stream. */ + +static int +elf_zlib_inflate (const unsigned char *pin, size_t sin, uint16_t *zdebug_table, + unsigned char *pout, size_t sout) +{ + unsigned char *porigout; + const unsigned char *pinend; + unsigned char *poutend; + + /* We can apparently see multiple zlib streams concatenated + together, so keep going as long as there is something to read. + The last 4 bytes are the checksum. */ + porigout = pout; + pinend = pin + sin; + poutend = pout + sout; + while ((pinend - pin) > 4) + { + uint64_t val; + unsigned int bits; + int last; + + /* Read the two byte zlib header. */ + + if (unlikely ((pin[0] & 0xf) != 8)) /* 8 is zlib encoding. */ + { + /* Unknown compression method. */ + elf_zlib_failed (); + return 0; + } + if (unlikely ((pin[0] >> 4) > 7)) + { + /* Window size too large. Other than this check, we don't + care about the window size. */ + elf_zlib_failed (); + return 0; + } + if (unlikely ((pin[1] & 0x20) != 0)) + { + /* Stream expects a predefined dictionary, but we have no + dictionary. */ + elf_zlib_failed (); + return 0; + } + val = (pin[0] << 8) | pin[1]; + if (unlikely (val % 31 != 0)) + { + /* Header check failure. */ + elf_zlib_failed (); + return 0; + } + pin += 2; + + /* Align PIN to a 32-bit boundary. */ + + val = 0; + bits = 0; + while ((((uintptr_t) pin) & 3) != 0) + { + val |= (uint64_t)*pin << bits; + bits += 8; + ++pin; + } + + /* Read blocks until one is marked last. */ + + last = 0; + + while (!last) + { + unsigned int type; + const uint16_t *tlit; + const uint16_t *tdist; + + if (!elf_zlib_fetch (&pin, pinend, &val, &bits)) + return 0; + + last = val & 1; + type = (val >> 1) & 3; + val >>= 3; + bits -= 3; + + if (unlikely (type == 3)) + { + /* Invalid block type. */ + elf_zlib_failed (); + return 0; + } + + if (type == 0) + { + uint16_t len; + uint16_t lenc; + + /* An uncompressed block. */ + + /* If we've read ahead more than a byte, back up. */ + while (bits > 8) + { + --pin; + bits -= 8; + } + + val = 0; + bits = 0; + if (unlikely ((pinend - pin) < 4)) + { + /* Missing length. */ + elf_zlib_failed (); + return 0; + } + len = pin[0] | (pin[1] << 8); + lenc = pin[2] | (pin[3] << 8); + pin += 4; + lenc = ~lenc; + if (unlikely (len != lenc)) + { + /* Corrupt data. */ + elf_zlib_failed (); + return 0; + } + if (unlikely (len > (unsigned int) (pinend - pin) + || len > (unsigned int) (poutend - pout))) + { + /* Not enough space in buffers. */ + elf_zlib_failed (); + return 0; + } + memcpy (pout, pin, len); + pout += len; + pin += len; + + /* Align PIN. */ + while ((((uintptr_t) pin) & 3) != 0) + { + val |= (uint64_t)*pin << bits; + bits += 8; + ++pin; + } + + /* Go around to read the next block. */ + continue; + } + + if (type == 1) + { + tlit = elf_zlib_default_table; + tdist = elf_zlib_default_dist_table; + } + else + { + unsigned int nlit; + unsigned int ndist; + unsigned int nclen; + unsigned char codebits[19]; + unsigned char *plenbase; + unsigned char *plen; + unsigned char *plenend; + + /* Read a Huffman encoding table. The various magic + numbers here are from RFC 1951. */ + + if (!elf_zlib_fetch (&pin, pinend, &val, &bits)) + return 0; + + nlit = (val & 0x1f) + 257; + val >>= 5; + ndist = (val & 0x1f) + 1; + val >>= 5; + nclen = (val & 0xf) + 4; + val >>= 4; + bits -= 14; + if (unlikely (nlit > 286 || ndist > 30)) + { + /* Values out of range. */ + elf_zlib_failed (); + return 0; + } + + /* Read and build the table used to compress the + literal, length, and distance codes. */ + + memset(&codebits[0], 0, 19); + + /* There are always at least 4 elements in the + table. */ + + if (!elf_zlib_fetch (&pin, pinend, &val, &bits)) + return 0; + + codebits[16] = val & 7; + codebits[17] = (val >> 3) & 7; + codebits[18] = (val >> 6) & 7; + codebits[0] = (val >> 9) & 7; + val >>= 12; + bits -= 12; + + if (nclen == 4) + goto codebitsdone; + + codebits[8] = val & 7; + val >>= 3; + bits -= 3; + + if (nclen == 5) + goto codebitsdone; + + if (!elf_zlib_fetch (&pin, pinend, &val, &bits)) + return 0; + + codebits[7] = val & 7; + val >>= 3; + bits -= 3; + + if (nclen == 6) + goto codebitsdone; + + codebits[9] = val & 7; + val >>= 3; + bits -= 3; + + if (nclen == 7) + goto codebitsdone; + + codebits[6] = val & 7; + val >>= 3; + bits -= 3; + + if (nclen == 8) + goto codebitsdone; + + codebits[10] = val & 7; + val >>= 3; + bits -= 3; + + if (nclen == 9) + goto codebitsdone; + + codebits[5] = val & 7; + val >>= 3; + bits -= 3; + + if (nclen == 10) + goto codebitsdone; + + if (!elf_zlib_fetch (&pin, pinend, &val, &bits)) + return 0; + + codebits[11] = val & 7; + val >>= 3; + bits -= 3; + + if (nclen == 11) + goto codebitsdone; + + codebits[4] = val & 7; + val >>= 3; + bits -= 3; + + if (nclen == 12) + goto codebitsdone; + + codebits[12] = val & 7; + val >>= 3; + bits -= 3; + + if (nclen == 13) + goto codebitsdone; + + codebits[3] = val & 7; + val >>= 3; + bits -= 3; + + if (nclen == 14) + goto codebitsdone; + + codebits[13] = val & 7; + val >>= 3; + bits -= 3; + + if (nclen == 15) + goto codebitsdone; + + if (!elf_zlib_fetch (&pin, pinend, &val, &bits)) + return 0; + + codebits[2] = val & 7; + val >>= 3; + bits -= 3; + + if (nclen == 16) + goto codebitsdone; + + codebits[14] = val & 7; + val >>= 3; + bits -= 3; + + if (nclen == 17) + goto codebitsdone; + + codebits[1] = val & 7; + val >>= 3; + bits -= 3; + + if (nclen == 18) + goto codebitsdone; + + codebits[15] = val & 7; + val >>= 3; + bits -= 3; + + codebitsdone: + + if (!elf_zlib_inflate_table (codebits, 19, zdebug_table, + zdebug_table)) + return 0; + + /* Read the compressed bit lengths of the literal, + length, and distance codes. We have allocated space + at the end of zdebug_table to hold them. */ + + plenbase = (((unsigned char *) zdebug_table) + + ZDEBUG_TABLE_CODELEN_OFFSET); + plen = plenbase; + plenend = plen + nlit + ndist; + while (plen < plenend) + { + uint16_t t; + unsigned int b; + uint16_t v; + + if (!elf_zlib_fetch (&pin, pinend, &val, &bits)) + return 0; + + t = zdebug_table[val & 0xff]; + + /* The compression here uses bit lengths up to 7, so + a secondary table is never necessary. */ + if (unlikely ((t & (1U << HUFFMAN_SECONDARY_SHIFT)) != 0)) + { + elf_zlib_failed (); + return 0; + } + + b = (t >> HUFFMAN_BITS_SHIFT) & HUFFMAN_BITS_MASK; + val >>= b + 1; + bits -= b + 1; + + v = t & HUFFMAN_VALUE_MASK; + if (v < 16) + *plen++ = v; + else if (v == 16) + { + unsigned int c; + unsigned int prev; + + /* Copy previous entry 3 to 6 times. */ + + if (unlikely (plen == plenbase)) + { + elf_zlib_failed (); + return 0; + } + + /* We used up to 7 bits since the last + elf_zlib_fetch, so we have at least 8 bits + available here. */ + + c = 3 + (val & 0x3); + val >>= 2; + bits -= 2; + if (unlikely ((unsigned int) (plenend - plen) < c)) + { + elf_zlib_failed (); + return 0; + } + + prev = plen[-1]; + switch (c) + { + case 6: + *plen++ = prev; + /* fallthrough */ + case 5: + *plen++ = prev; + /* fallthrough */ + case 4: + *plen++ = prev; + } + *plen++ = prev; + *plen++ = prev; + *plen++ = prev; + } + else if (v == 17) + { + unsigned int c; + + /* Store zero 3 to 10 times. */ + + /* We used up to 7 bits since the last + elf_zlib_fetch, so we have at least 8 bits + available here. */ + + c = 3 + (val & 0x7); + val >>= 3; + bits -= 3; + if (unlikely ((unsigned int) (plenend - plen) < c)) + { + elf_zlib_failed (); + return 0; + } + + switch (c) + { + case 10: + *plen++ = 0; + /* fallthrough */ + case 9: + *plen++ = 0; + /* fallthrough */ + case 8: + *plen++ = 0; + /* fallthrough */ + case 7: + *plen++ = 0; + /* fallthrough */ + case 6: + *plen++ = 0; + /* fallthrough */ + case 5: + *plen++ = 0; + /* fallthrough */ + case 4: + *plen++ = 0; + } + *plen++ = 0; + *plen++ = 0; + *plen++ = 0; + } + else if (v == 18) + { + unsigned int c; + + /* Store zero 11 to 138 times. */ + + /* We used up to 7 bits since the last + elf_zlib_fetch, so we have at least 8 bits + available here. */ + + c = 11 + (val & 0x7f); + val >>= 7; + bits -= 7; + if (unlikely ((unsigned int) (plenend - plen) < c)) + { + elf_zlib_failed (); + return 0; + } + + memset (plen, 0, c); + plen += c; + } + else + { + elf_zlib_failed (); + return 0; + } + } + + /* Make sure that the stop code can appear. */ + + plen = plenbase; + if (unlikely (plen[256] == 0)) + { + elf_zlib_failed (); + return 0; + } + + /* Build the decompression tables. */ + + if (!elf_zlib_inflate_table (plen, nlit, zdebug_table, + zdebug_table)) + return 0; + if (!elf_zlib_inflate_table (plen + nlit, ndist, zdebug_table, + zdebug_table + HUFFMAN_TABLE_SIZE)) + return 0; + tlit = zdebug_table; + tdist = zdebug_table + HUFFMAN_TABLE_SIZE; + } + + /* Inflate values until the end of the block. This is the + main loop of the inflation code. */ + + while (1) + { + uint16_t t; + unsigned int b; + uint16_t v; + unsigned int lit; + + if (!elf_zlib_fetch (&pin, pinend, &val, &bits)) + return 0; + + t = tlit[val & 0xff]; + b = (t >> HUFFMAN_BITS_SHIFT) & HUFFMAN_BITS_MASK; + v = t & HUFFMAN_VALUE_MASK; + + if ((t & (1U << HUFFMAN_SECONDARY_SHIFT)) == 0) + { + lit = v; + val >>= b + 1; + bits -= b + 1; + } + else + { + t = tlit[v + 0x100 + ((val >> 8) & ((1U << b) - 1))]; + b = (t >> HUFFMAN_BITS_SHIFT) & HUFFMAN_BITS_MASK; + lit = t & HUFFMAN_VALUE_MASK; + val >>= b + 8; + bits -= b + 8; + } + + if (lit < 256) + { + if (unlikely (pout == poutend)) + { + elf_zlib_failed (); + return 0; + } + + *pout++ = lit; + + /* We will need to write the next byte soon. We ask + for high temporal locality because we will write + to the whole cache line soon. */ + __builtin_prefetch (pout, 1, 3); + } + else if (lit == 256) + { + /* The end of the block. */ + break; + } + else + { + unsigned int dist; + unsigned int len; + + /* Convert lit into a length. */ + + if (lit < 265) + len = lit - 257 + 3; + else if (lit == 285) + len = 258; + else if (unlikely (lit > 285)) + { + elf_zlib_failed (); + return 0; + } + else + { + unsigned int extra; + + if (!elf_zlib_fetch (&pin, pinend, &val, &bits)) + return 0; + + /* This is an expression for the table of length + codes in RFC 1951 3.2.5. */ + lit -= 265; + extra = (lit >> 2) + 1; + len = (lit & 3) << extra; + len += 11; + len += ((1U << (extra - 1)) - 1) << 3; + len += val & ((1U << extra) - 1); + val >>= extra; + bits -= extra; + } + + if (!elf_zlib_fetch (&pin, pinend, &val, &bits)) + return 0; + + t = tdist[val & 0xff]; + b = (t >> HUFFMAN_BITS_SHIFT) & HUFFMAN_BITS_MASK; + v = t & HUFFMAN_VALUE_MASK; + + if ((t & (1U << HUFFMAN_SECONDARY_SHIFT)) == 0) + { + dist = v; + val >>= b + 1; + bits -= b + 1; + } + else + { + t = tdist[v + 0x100 + ((val >> 8) & ((1U << b) - 1))]; + b = (t >> HUFFMAN_BITS_SHIFT) & HUFFMAN_BITS_MASK; + dist = t & HUFFMAN_VALUE_MASK; + val >>= b + 8; + bits -= b + 8; + } + + /* Convert dist to a distance. */ + + if (dist == 0) + { + /* A distance of 1. A common case, meaning + repeat the last character LEN times. */ + + if (unlikely (pout == porigout)) + { + elf_zlib_failed (); + return 0; + } + + if (unlikely ((unsigned int) (poutend - pout) < len)) + { + elf_zlib_failed (); + return 0; + } + + memset (pout, pout[-1], len); + pout += len; + } + else if (unlikely (dist > 29)) + { + elf_zlib_failed (); + return 0; + } + else + { + if (dist < 4) + dist = dist + 1; + else + { + unsigned int extra; + + if (!elf_zlib_fetch (&pin, pinend, &val, &bits)) + return 0; + + /* This is an expression for the table of + distance codes in RFC 1951 3.2.5. */ + dist -= 4; + extra = (dist >> 1) + 1; + dist = (dist & 1) << extra; + dist += 5; + dist += ((1U << (extra - 1)) - 1) << 2; + dist += val & ((1U << extra) - 1); + val >>= extra; + bits -= extra; + } + + /* Go back dist bytes, and copy len bytes from + there. */ + + if (unlikely ((unsigned int) (pout - porigout) < dist)) + { + elf_zlib_failed (); + return 0; + } + + if (unlikely ((unsigned int) (poutend - pout) < len)) + { + elf_zlib_failed (); + return 0; + } + + if (dist >= len) + { + memcpy (pout, pout - dist, len); + pout += len; + } + else + { + while (len > 0) + { + unsigned int copy; + + copy = len < dist ? len : dist; + memcpy (pout, pout - dist, copy); + len -= copy; + pout += copy; + } + } + } + } + } + } + } + + /* We should have filled the output buffer. */ + if (unlikely (pout != poutend)) + { + elf_zlib_failed (); + return 0; + } + + return 1; +} + +/* Verify the zlib checksum. The checksum is in the 4 bytes at + CHECKBYTES, and the uncompressed data is at UNCOMPRESSED / + UNCOMPRESSED_SIZE. Returns 1 on success, 0 on failure. */ + +static int +elf_zlib_verify_checksum (const unsigned char *checkbytes, + const unsigned char *uncompressed, + size_t uncompressed_size) +{ + unsigned int i; + unsigned int cksum; + const unsigned char *p; + uint32_t s1; + uint32_t s2; + size_t hsz; + + cksum = 0; + for (i = 0; i < 4; i++) + cksum = (cksum << 8) | checkbytes[i]; + + s1 = 1; + s2 = 0; + + /* Minimize modulo operations. */ + + p = uncompressed; + hsz = uncompressed_size; + while (hsz >= 5552) + { + for (i = 0; i < 5552; i += 16) + { + /* Manually unroll loop 16 times. */ + s1 = s1 + *p++; + s2 = s2 + s1; + s1 = s1 + *p++; + s2 = s2 + s1; + s1 = s1 + *p++; + s2 = s2 + s1; + s1 = s1 + *p++; + s2 = s2 + s1; + s1 = s1 + *p++; + s2 = s2 + s1; + s1 = s1 + *p++; + s2 = s2 + s1; + s1 = s1 + *p++; + s2 = s2 + s1; + s1 = s1 + *p++; + s2 = s2 + s1; + s1 = s1 + *p++; + s2 = s2 + s1; + s1 = s1 + *p++; + s2 = s2 + s1; + s1 = s1 + *p++; + s2 = s2 + s1; + s1 = s1 + *p++; + s2 = s2 + s1; + s1 = s1 + *p++; + s2 = s2 + s1; + s1 = s1 + *p++; + s2 = s2 + s1; + s1 = s1 + *p++; + s2 = s2 + s1; + s1 = s1 + *p++; + s2 = s2 + s1; + } + hsz -= 5552; + s1 %= 65521; + s2 %= 65521; + } + + while (hsz >= 16) + { + /* Manually unroll loop 16 times. */ + s1 = s1 + *p++; + s2 = s2 + s1; + s1 = s1 + *p++; + s2 = s2 + s1; + s1 = s1 + *p++; + s2 = s2 + s1; + s1 = s1 + *p++; + s2 = s2 + s1; + s1 = s1 + *p++; + s2 = s2 + s1; + s1 = s1 + *p++; + s2 = s2 + s1; + s1 = s1 + *p++; + s2 = s2 + s1; + s1 = s1 + *p++; + s2 = s2 + s1; + s1 = s1 + *p++; + s2 = s2 + s1; + s1 = s1 + *p++; + s2 = s2 + s1; + s1 = s1 + *p++; + s2 = s2 + s1; + s1 = s1 + *p++; + s2 = s2 + s1; + s1 = s1 + *p++; + s2 = s2 + s1; + s1 = s1 + *p++; + s2 = s2 + s1; + s1 = s1 + *p++; + s2 = s2 + s1; + s1 = s1 + *p++; + s2 = s2 + s1; + + hsz -= 16; + } + + for (i = 0; i < hsz; ++i) + { + s1 = s1 + *p++; + s2 = s2 + s1; + } + + s1 %= 65521; + s2 %= 65521; + + if (unlikely ((s2 << 16) + s1 != cksum)) + { + elf_zlib_failed (); + return 0; + } + + return 1; +} + +/* Inflate a zlib stream from PIN/SIN to POUT/SOUT, and verify the + checksum. Return 1 on success, 0 on error. */ + +static int +elf_zlib_inflate_and_verify (const unsigned char *pin, size_t sin, + uint16_t *zdebug_table, unsigned char *pout, + size_t sout) +{ + if (!elf_zlib_inflate (pin, sin, zdebug_table, pout, sout)) + return 0; + if (!elf_zlib_verify_checksum (pin + sin - 4, pout, sout)) + return 0; + return 1; +} + +/* Uncompress the old compressed debug format, the one emitted by + --compress-debug-sections=zlib-gnu. The compressed data is in + COMPRESSED / COMPRESSED_SIZE, and the function writes to + *UNCOMPRESSED / *UNCOMPRESSED_SIZE. ZDEBUG_TABLE is work space to + hold Huffman tables. Returns 0 on error, 1 on successful + decompression or if something goes wrong. In general we try to + carry on, by returning 1, even if we can't decompress. */ + +static int +elf_uncompress_zdebug (struct backtrace_state *state, + const unsigned char *compressed, size_t compressed_size, + uint16_t *zdebug_table, + backtrace_error_callback error_callback, void *data, + unsigned char **uncompressed, size_t *uncompressed_size) +{ + size_t sz; + size_t i; + unsigned char *po; + + *uncompressed = NULL; + *uncompressed_size = 0; + + /* The format starts with the four bytes ZLIB, followed by the 8 + byte length of the uncompressed data in big-endian order, + followed by a zlib stream. */ + + if (compressed_size < 12 || memcmp (compressed, "ZLIB", 4) != 0) + return 1; + + sz = 0; + for (i = 0; i < 8; i++) + sz = (sz << 8) | compressed[i + 4]; + + if (*uncompressed != NULL && *uncompressed_size >= sz) + po = *uncompressed; + else + { + po = (unsigned char *) backtrace_alloc (state, sz, error_callback, data); + if (po == NULL) + return 0; + } + + if (!elf_zlib_inflate_and_verify (compressed + 12, compressed_size - 12, + zdebug_table, po, sz)) + return 1; + + *uncompressed = po; + *uncompressed_size = sz; + + return 1; +} + +/* Uncompress the new compressed debug format, the official standard + ELF approach emitted by --compress-debug-sections=zlib-gabi. The + compressed data is in COMPRESSED / COMPRESSED_SIZE, and the + function writes to *UNCOMPRESSED / *UNCOMPRESSED_SIZE. + ZDEBUG_TABLE is work space as for elf_uncompress_zdebug. Returns 0 + on error, 1 on successful decompression or if something goes wrong. + In general we try to carry on, by returning 1, even if we can't + decompress. */ + +static int +elf_uncompress_chdr (struct backtrace_state *state, + const unsigned char *compressed, size_t compressed_size, + uint16_t *zdebug_table, + backtrace_error_callback error_callback, void *data, + unsigned char **uncompressed, size_t *uncompressed_size) +{ + const b_elf_chdr *chdr; + unsigned char *po; + + *uncompressed = NULL; + *uncompressed_size = 0; + + /* The format starts with an ELF compression header. */ + if (compressed_size < sizeof (b_elf_chdr)) + return 1; + + chdr = (const b_elf_chdr *) compressed; + + if (chdr->ch_type != ELFCOMPRESS_ZLIB) + { + /* Unsupported compression algorithm. */ + return 1; + } + + if (*uncompressed != NULL && *uncompressed_size >= chdr->ch_size) + po = *uncompressed; + else + { + po = (unsigned char *) backtrace_alloc (state, chdr->ch_size, + error_callback, data); + if (po == NULL) + return 0; + } + + if (!elf_zlib_inflate_and_verify (compressed + sizeof (b_elf_chdr), + compressed_size - sizeof (b_elf_chdr), + zdebug_table, po, chdr->ch_size)) + return 1; + + *uncompressed = po; + *uncompressed_size = chdr->ch_size; + + return 1; +} + +/* This function is a hook for testing the zlib support. It is only + used by tests. */ + +int +backtrace_uncompress_zdebug (struct backtrace_state *state, + const unsigned char *compressed, + size_t compressed_size, + backtrace_error_callback error_callback, + void *data, unsigned char **uncompressed, + size_t *uncompressed_size) +{ + uint16_t *zdebug_table; + int ret; + + zdebug_table = ((uint16_t *) backtrace_alloc (state, ZDEBUG_TABLE_SIZE, + error_callback, data)); + if (zdebug_table == NULL) + return 0; + ret = elf_uncompress_zdebug (state, compressed, compressed_size, + zdebug_table, error_callback, data, + uncompressed, uncompressed_size); + backtrace_free (state, zdebug_table, ZDEBUG_TABLE_SIZE, + error_callback, data); + return ret; +} + +/* Add the backtrace data for one ELF file. Returns 1 on success, + 0 on failure (in both cases descriptor is closed) or -1 if exe + is non-zero and the ELF file is ET_DYN, which tells the caller that + elf_add will need to be called on the descriptor again after + base_address is determined. */ + +static int +elf_add (struct backtrace_state *state, const char *filename, int descriptor, + uintptr_t base_address, backtrace_error_callback error_callback, + void *data, fileline *fileline_fn, int *found_sym, int *found_dwarf, + int exe, int debuginfo) +{ + struct backtrace_view ehdr_view; + b_elf_ehdr ehdr; + off_t shoff; + unsigned int shnum; + unsigned int shstrndx; + struct backtrace_view shdrs_view; + int shdrs_view_valid; + const b_elf_shdr *shdrs; + const b_elf_shdr *shstrhdr; + size_t shstr_size; + off_t shstr_off; + struct backtrace_view names_view; + int names_view_valid; + const char *names; + unsigned int symtab_shndx; + unsigned int dynsym_shndx; + unsigned int i; + struct debug_section_info sections[DEBUG_MAX]; + struct backtrace_view symtab_view; + int symtab_view_valid; + struct backtrace_view strtab_view; + int strtab_view_valid; + struct backtrace_view buildid_view; + int buildid_view_valid; + const char *buildid_data; + uint32_t buildid_size; + struct backtrace_view debuglink_view; + int debuglink_view_valid; + const char *debuglink_name; + uint32_t debuglink_crc; + off_t min_offset; + off_t max_offset; + struct backtrace_view debug_view; + int debug_view_valid; + unsigned int using_debug_view; + uint16_t *zdebug_table; + struct elf_ppc64_opd_data opd_data, *opd; + + if (!debuginfo) + { + *found_sym = 0; + *found_dwarf = 0; + } + + shdrs_view_valid = 0; + names_view_valid = 0; + symtab_view_valid = 0; + strtab_view_valid = 0; + buildid_view_valid = 0; + buildid_data = NULL; + buildid_size = 0; + debuglink_view_valid = 0; + debuglink_name = NULL; + debuglink_crc = 0; + debug_view_valid = 0; + opd = NULL; + + if (!backtrace_get_view (state, descriptor, 0, sizeof ehdr, error_callback, + data, &ehdr_view)) + goto fail; + + memcpy (&ehdr, ehdr_view.data, sizeof ehdr); + + backtrace_release_view (state, &ehdr_view, error_callback, data); + + if (ehdr.e_ident[EI_MAG0] != ELFMAG0 + || ehdr.e_ident[EI_MAG1] != ELFMAG1 + || ehdr.e_ident[EI_MAG2] != ELFMAG2 + || ehdr.e_ident[EI_MAG3] != ELFMAG3) + { + error_callback (data, "executable file is not ELF", 0); + goto fail; + } + if (ehdr.e_ident[EI_VERSION] != EV_CURRENT) + { + error_callback (data, "executable file is unrecognized ELF version", 0); + goto fail; + } + +#if BACKTRACE_ELF_SIZE == 32 +#define BACKTRACE_ELFCLASS ELFCLASS32 +#else +#define BACKTRACE_ELFCLASS ELFCLASS64 +#endif + + if (ehdr.e_ident[EI_CLASS] != BACKTRACE_ELFCLASS) + { + error_callback (data, "executable file is unexpected ELF class", 0); + goto fail; + } + + if (ehdr.e_ident[EI_DATA] != ELFDATA2LSB + && ehdr.e_ident[EI_DATA] != ELFDATA2MSB) + { + error_callback (data, "executable file has unknown endianness", 0); + goto fail; + } + + /* If the executable is ET_DYN, it is either a PIE, or we are running + directly a shared library with .interp. We need to wait for + dl_iterate_phdr in that case to determine the actual base_address. */ + if (exe && ehdr.e_type == ET_DYN) + return -1; + + shoff = ehdr.e_shoff; + shnum = ehdr.e_shnum; + shstrndx = ehdr.e_shstrndx; + + if ((shnum == 0 || shstrndx == SHN_XINDEX) + && shoff != 0) + { + struct backtrace_view shdr_view; + const b_elf_shdr *shdr; + + if (!backtrace_get_view (state, descriptor, shoff, sizeof shdr, + error_callback, data, &shdr_view)) + goto fail; + + shdr = (const b_elf_shdr *) shdr_view.data; + + if (shnum == 0) + shnum = shdr->sh_size; + + if (shstrndx == SHN_XINDEX) + { + shstrndx = shdr->sh_link; + + /* Versions of the GNU binutils between 2.12 and 2.18 did + not handle objects with more than SHN_LORESERVE sections + correctly. All large section indexes were offset by + 0x100. There is more information at + http://sourceware.org/bugzilla/show_bug.cgi?id-5900 . + Fortunately these object files are easy to detect, as the + GNU binutils always put the section header string table + near the end of the list of sections. Thus if the + section header string table index is larger than the + number of sections, then we know we have to subtract + 0x100 to get the real section index. */ + if (shstrndx >= shnum && shstrndx >= SHN_LORESERVE + 0x100) + shstrndx -= 0x100; + } + + backtrace_release_view (state, &shdr_view, error_callback, data); + } + + /* To translate PC to file/line when using DWARF, we need to find + the .debug_info and .debug_line sections. */ + + /* Read the section headers, skipping the first one. */ + + if (!backtrace_get_view (state, descriptor, shoff + sizeof (b_elf_shdr), + (shnum - 1) * sizeof (b_elf_shdr), + error_callback, data, &shdrs_view)) + goto fail; + shdrs_view_valid = 1; + shdrs = (const b_elf_shdr *) shdrs_view.data; + + /* Read the section names. */ + + shstrhdr = &shdrs[shstrndx - 1]; + shstr_size = shstrhdr->sh_size; + shstr_off = shstrhdr->sh_offset; + + if (!backtrace_get_view (state, descriptor, shstr_off, shstr_size, + error_callback, data, &names_view)) + goto fail; + names_view_valid = 1; + names = (const char *) names_view.data; + + symtab_shndx = 0; + dynsym_shndx = 0; + + memset (sections, 0, sizeof sections); + + /* Look for the symbol table. */ + for (i = 1; i < shnum; ++i) + { + const b_elf_shdr *shdr; + unsigned int sh_name; + const char *name; + int j; + + shdr = &shdrs[i - 1]; + + if (shdr->sh_type == SHT_SYMTAB) + symtab_shndx = i; + else if (shdr->sh_type == SHT_DYNSYM) + dynsym_shndx = i; + + sh_name = shdr->sh_name; + if (sh_name >= shstr_size) + { + error_callback (data, "ELF section name out of range", 0); + goto fail; + } + + name = names + sh_name; + + for (j = 0; j < (int) DEBUG_MAX; ++j) + { + if (strcmp (name, debug_section_names[j]) == 0) + { + sections[j].offset = shdr->sh_offset; + sections[j].size = shdr->sh_size; + sections[j].compressed = (shdr->sh_flags & SHF_COMPRESSED) != 0; + break; + } + } + + /* Read the build ID if present. This could check for any + SHT_NOTE section with the right note name and type, but gdb + looks for a specific section name. */ + if (!debuginfo + && !buildid_view_valid + && strcmp (name, ".note.gnu.build-id") == 0) + { + const b_elf_note *note; + + if (!backtrace_get_view (state, descriptor, shdr->sh_offset, + shdr->sh_size, error_callback, data, + &buildid_view)) + goto fail; + + buildid_view_valid = 1; + note = (const b_elf_note *) buildid_view.data; + if (note->type == NT_GNU_BUILD_ID + && note->namesz == 4 + && strncmp (note->name, "GNU", 4) == 0 + && shdr->sh_size < 12 + ((note->namesz + 3) & ~ 3) + note->descsz) + { + buildid_data = ¬e->name[0] + ((note->namesz + 3) & ~ 3); + buildid_size = note->descsz; + } + } + + /* Read the debuglink file if present. */ + if (!debuginfo + && !debuglink_view_valid + && strcmp (name, ".gnu_debuglink") == 0) + { + const char *debuglink_data; + size_t crc_offset; + + if (!backtrace_get_view (state, descriptor, shdr->sh_offset, + shdr->sh_size, error_callback, data, + &debuglink_view)) + goto fail; + + debuglink_view_valid = 1; + debuglink_data = (const char *) debuglink_view.data; + crc_offset = strnlen (debuglink_data, shdr->sh_size); + crc_offset = (crc_offset + 3) & ~3; + if (crc_offset + 4 <= shdr->sh_size) + { + debuglink_name = debuglink_data; + debuglink_crc = *(const uint32_t*)(debuglink_data + crc_offset); + } + } + + /* Read the .opd section on PowerPC64 ELFv1. */ + if (ehdr.e_machine == EM_PPC64 + && (ehdr.e_flags & EF_PPC64_ABI) < 2 + && shdr->sh_type == SHT_PROGBITS + && strcmp (name, ".opd") == 0) + { + if (!backtrace_get_view (state, descriptor, shdr->sh_offset, + shdr->sh_size, error_callback, data, + &opd_data.view)) + goto fail; + + opd = &opd_data; + opd->addr = shdr->sh_addr; + opd->data = (const char *) opd_data.view.data; + opd->size = shdr->sh_size; + } + } + + if (symtab_shndx == 0) + symtab_shndx = dynsym_shndx; + if (symtab_shndx != 0 && !debuginfo) + { + const b_elf_shdr *symtab_shdr; + unsigned int strtab_shndx; + const b_elf_shdr *strtab_shdr; + struct elf_syminfo_data *sdata; + + symtab_shdr = &shdrs[symtab_shndx - 1]; + strtab_shndx = symtab_shdr->sh_link; + if (strtab_shndx >= shnum) + { + error_callback (data, + "ELF symbol table strtab link out of range", 0); + goto fail; + } + strtab_shdr = &shdrs[strtab_shndx - 1]; + + if (!backtrace_get_view (state, descriptor, symtab_shdr->sh_offset, + symtab_shdr->sh_size, error_callback, data, + &symtab_view)) + goto fail; + symtab_view_valid = 1; + + if (!backtrace_get_view (state, descriptor, strtab_shdr->sh_offset, + strtab_shdr->sh_size, error_callback, data, + &strtab_view)) + goto fail; + strtab_view_valid = 1; + + sdata = ((struct elf_syminfo_data *) + backtrace_alloc (state, sizeof *sdata, error_callback, data)); + if (sdata == NULL) + goto fail; + + if (!elf_initialize_syminfo (state, base_address, + symtab_view.data, symtab_shdr->sh_size, + strtab_view.data, strtab_shdr->sh_size, + error_callback, data, sdata, opd)) + { + backtrace_free (state, sdata, sizeof *sdata, error_callback, data); + goto fail; + } + + /* We no longer need the symbol table, but we hold on to the + string table permanently. */ + backtrace_release_view (state, &symtab_view, error_callback, data); + symtab_view_valid = 0; + + *found_sym = 1; + + elf_add_syminfo_data (state, sdata); + } + + backtrace_release_view (state, &shdrs_view, error_callback, data); + shdrs_view_valid = 0; + backtrace_release_view (state, &names_view, error_callback, data); + names_view_valid = 0; + + /* If the debug info is in a separate file, read that one instead. */ + + if (buildid_data != NULL) + { + int d; + + d = elf_open_debugfile_by_buildid (state, buildid_data, buildid_size, + error_callback, data); + if (d >= 0) + { + int ret; + + backtrace_release_view (state, &buildid_view, error_callback, data); + if (debuglink_view_valid) + backtrace_release_view (state, &debuglink_view, error_callback, + data); + ret = elf_add (state, NULL, d, base_address, error_callback, data, + fileline_fn, found_sym, found_dwarf, 0, 1); + if (ret < 0) + backtrace_close (d, error_callback, data); + else + backtrace_close (descriptor, error_callback, data); + return ret; + } + } + + if (buildid_view_valid) + { + backtrace_release_view (state, &buildid_view, error_callback, data); + buildid_view_valid = 0; + } + + if (opd) + { + backtrace_release_view (state, &opd->view, error_callback, data); + opd = NULL; + } + + if (debuglink_name != NULL) + { + int d; + + d = elf_open_debugfile_by_debuglink (state, filename, debuglink_name, + debuglink_crc, error_callback, + data); + if (d >= 0) + { + int ret; + + backtrace_release_view (state, &debuglink_view, error_callback, + data); + ret = elf_add (state, NULL, d, base_address, error_callback, data, + fileline_fn, found_sym, found_dwarf, 0, 1); + if (ret < 0) + backtrace_close (d, error_callback, data); + else + backtrace_close(descriptor, error_callback, data); + return ret; + } + } + + if (debuglink_view_valid) + { + backtrace_release_view (state, &debuglink_view, error_callback, data); + debuglink_view_valid = 0; + } + + /* Read all the debug sections in a single view, since they are + probably adjacent in the file. We never release this view. */ + + min_offset = 0; + max_offset = 0; + for (i = 0; i < (int) DEBUG_MAX; ++i) + { + off_t end; + + if (sections[i].size == 0) + continue; + if (min_offset == 0 || sections[i].offset < min_offset) + min_offset = sections[i].offset; + end = sections[i].offset + sections[i].size; + if (end > max_offset) + max_offset = end; + } + if (min_offset == 0 || max_offset == 0) + { + if (!backtrace_close (descriptor, error_callback, data)) + goto fail; + return 1; + } + + if (!backtrace_get_view (state, descriptor, min_offset, + max_offset - min_offset, + error_callback, data, &debug_view)) + goto fail; + debug_view_valid = 1; + + /* We've read all we need from the executable. */ + if (!backtrace_close (descriptor, error_callback, data)) + goto fail; + descriptor = -1; + + using_debug_view = 0; + for (i = 0; i < (int) DEBUG_MAX; ++i) + { + if (sections[i].size == 0) + sections[i].data = NULL; + else + { + sections[i].data = ((const unsigned char *) debug_view.data + + (sections[i].offset - min_offset)); + if (i < ZDEBUG_INFO) + ++using_debug_view; + } + } + + /* Uncompress the old format (--compress-debug-sections=zlib-gnu). */ + + zdebug_table = NULL; + for (i = 0; i < ZDEBUG_INFO; ++i) + { + struct debug_section_info *pz; + + pz = §ions[i + ZDEBUG_INFO - DEBUG_INFO]; + if (sections[i].size == 0 && pz->size > 0) + { + unsigned char *uncompressed_data; + size_t uncompressed_size; + + if (zdebug_table == NULL) + { + zdebug_table = ((uint16_t *) + backtrace_alloc (state, ZDEBUG_TABLE_SIZE, + error_callback, data)); + if (zdebug_table == NULL) + goto fail; + } + + uncompressed_data = NULL; + uncompressed_size = 0; + if (!elf_uncompress_zdebug (state, pz->data, pz->size, zdebug_table, + error_callback, data, + &uncompressed_data, &uncompressed_size)) + goto fail; + sections[i].data = uncompressed_data; + sections[i].size = uncompressed_size; + sections[i].compressed = 0; + } + } + + /* Uncompress the official ELF format + (--compress-debug-sections=zlib-gabi). */ + for (i = 0; i < ZDEBUG_INFO; ++i) + { + unsigned char *uncompressed_data; + size_t uncompressed_size; + + if (sections[i].size == 0 || !sections[i].compressed) + continue; + + if (zdebug_table == NULL) + { + zdebug_table = ((uint16_t *) + backtrace_alloc (state, ZDEBUG_TABLE_SIZE, + error_callback, data)); + if (zdebug_table == NULL) + goto fail; + } + + uncompressed_data = NULL; + uncompressed_size = 0; + if (!elf_uncompress_chdr (state, sections[i].data, sections[i].size, + zdebug_table, error_callback, data, + &uncompressed_data, &uncompressed_size)) + goto fail; + sections[i].data = uncompressed_data; + sections[i].size = uncompressed_size; + sections[i].compressed = 0; + + --using_debug_view; + } + + if (zdebug_table != NULL) + backtrace_free (state, zdebug_table, ZDEBUG_TABLE_SIZE, + error_callback, data); + + if (debug_view_valid && using_debug_view == 0) + { + backtrace_release_view (state, &debug_view, error_callback, data); + debug_view_valid = 0; + } + + if (!backtrace_dwarf_add (state, base_address, + sections[DEBUG_INFO].data, + sections[DEBUG_INFO].size, + sections[DEBUG_LINE].data, + sections[DEBUG_LINE].size, + sections[DEBUG_ABBREV].data, + sections[DEBUG_ABBREV].size, + sections[DEBUG_RANGES].data, + sections[DEBUG_RANGES].size, + sections[DEBUG_STR].data, + sections[DEBUG_STR].size, + ehdr.e_ident[EI_DATA] == ELFDATA2MSB, + error_callback, data, fileline_fn)) + goto fail; + + *found_dwarf = 1; + + return 1; + + fail: + if (shdrs_view_valid) + backtrace_release_view (state, &shdrs_view, error_callback, data); + if (names_view_valid) + backtrace_release_view (state, &names_view, error_callback, data); + if (symtab_view_valid) + backtrace_release_view (state, &symtab_view, error_callback, data); + if (strtab_view_valid) + backtrace_release_view (state, &strtab_view, error_callback, data); + if (debuglink_view_valid) + backtrace_release_view (state, &debuglink_view, error_callback, data); + if (buildid_view_valid) + backtrace_release_view (state, &buildid_view, error_callback, data); + if (debug_view_valid) + backtrace_release_view (state, &debug_view, error_callback, data); + if (opd) + backtrace_release_view (state, &opd->view, error_callback, data); + if (descriptor != -1) + backtrace_close (descriptor, error_callback, data); + return 0; +} + +/* Data passed to phdr_callback. */ + +struct phdr_data +{ + struct backtrace_state *state; + backtrace_error_callback error_callback; + void *data; + fileline *fileline_fn; + int *found_sym; + int *found_dwarf; + const char *exe_filename; + int exe_descriptor; +}; + +/* Callback passed to dl_iterate_phdr. Load debug info from shared + libraries. */ + +static int +#ifdef __i386__ +__attribute__ ((__force_align_arg_pointer__)) +#endif +phdr_callback (struct dl_phdr_info *info, size_t size ATTRIBUTE_UNUSED, + void *pdata) +{ + struct phdr_data *pd = (struct phdr_data *) pdata; + const char *filename; + int descriptor; + int does_not_exist; + fileline elf_fileline_fn; + int found_dwarf; + + /* There is not much we can do if we don't have the module name, + unless executable is ET_DYN, where we expect the very first + phdr_callback to be for the PIE. */ + if (info->dlpi_name == NULL || info->dlpi_name[0] == '\0') + { + if (pd->exe_descriptor == -1) + return 0; + filename = pd->exe_filename; + descriptor = pd->exe_descriptor; + pd->exe_descriptor = -1; + } + else + { + if (pd->exe_descriptor != -1) + { + backtrace_close (pd->exe_descriptor, pd->error_callback, pd->data); + pd->exe_descriptor = -1; + } + + filename = info->dlpi_name; + descriptor = backtrace_open (info->dlpi_name, pd->error_callback, + pd->data, &does_not_exist); + if (descriptor < 0) + return 0; + } + + if (elf_add (pd->state, filename, descriptor, info->dlpi_addr, + pd->error_callback, pd->data, &elf_fileline_fn, pd->found_sym, + &found_dwarf, 0, 0)) + { + if (found_dwarf) + { + *pd->found_dwarf = 1; + *pd->fileline_fn = elf_fileline_fn; + } + } + + return 0; +} + +/* Initialize the backtrace data we need from an ELF executable. At + the ELF level, all we need to do is find the debug info + sections. */ + +int +backtrace_initialize (struct backtrace_state *state, const char *filename, + int descriptor, backtrace_error_callback error_callback, + void *data, fileline *fileline_fn) +{ + int ret; + int found_sym; + int found_dwarf; + fileline elf_fileline_fn = elf_nodebug; + struct phdr_data pd; + + ret = elf_add (state, filename, descriptor, 0, error_callback, data, + &elf_fileline_fn, &found_sym, &found_dwarf, 1, 0); + if (!ret) + return 0; + + pd.state = state; + pd.error_callback = error_callback; + pd.data = data; + pd.fileline_fn = &elf_fileline_fn; + pd.found_sym = &found_sym; + pd.found_dwarf = &found_dwarf; + pd.exe_filename = filename; + pd.exe_descriptor = ret < 0 ? descriptor : -1; + + dl_iterate_phdr (phdr_callback, (void *) &pd); + + if (!state->threaded) + { + if (found_sym) + state->syminfo_fn = elf_syminfo; + else if (state->syminfo_fn == NULL) + state->syminfo_fn = elf_nosyms; + } + else + { + if (found_sym) + backtrace_atomic_store_pointer (&state->syminfo_fn, elf_syminfo); + else + (void) __sync_bool_compare_and_swap (&state->syminfo_fn, NULL, + elf_nosyms); + } + + if (!state->threaded) + *fileline_fn = state->fileline_fn; + else + *fileline_fn = backtrace_atomic_load_pointer (&state->fileline_fn); + + if (*fileline_fn == NULL || *fileline_fn == elf_nodebug) + *fileline_fn = elf_fileline_fn; + + return 1; +} diff --git a/backtrace-sys/src/libbacktrace/fileline.c b/backtrace-sys/src/libbacktrace/fileline.c new file mode 100644 index 000000000..e56730683 --- /dev/null +++ b/backtrace-sys/src/libbacktrace/fileline.c @@ -0,0 +1,201 @@ +/* fileline.c -- Get file and line number information in a backtrace. + Copyright (C) 2012-2018 Free Software Foundation, Inc. + Written by Ian Lance Taylor, Google. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + (1) Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + (2) Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + + (3) The name of the author may not be used to + endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, +INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, +STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING +IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. */ + +#include "config.h" + +#include +#include +#include +#include +#include +#include + +#include "backtrace.h" +#include "internal.h" + +#ifndef HAVE_GETEXECNAME +#define getexecname() NULL +#endif + +/* Initialize the fileline information from the executable. Returns 1 + on success, 0 on failure. */ + +static int +fileline_initialize (struct backtrace_state *state, + backtrace_error_callback error_callback, void *data) +{ + int failed; + fileline fileline_fn; + int pass; + int called_error_callback; + int descriptor; + const char *filename; + char buf[64]; + + if (!state->threaded) + failed = state->fileline_initialization_failed; + else + failed = backtrace_atomic_load_int (&state->fileline_initialization_failed); + + if (failed) + { + error_callback (data, "failed to read executable information", -1); + return 0; + } + + if (!state->threaded) + fileline_fn = state->fileline_fn; + else + fileline_fn = backtrace_atomic_load_pointer (&state->fileline_fn); + if (fileline_fn != NULL) + return 1; + + /* We have not initialized the information. Do it now. */ + + descriptor = -1; + called_error_callback = 0; + for (pass = 0; pass < 5; ++pass) + { + int does_not_exist; + + switch (pass) + { + case 0: + filename = state->filename; + break; + case 1: + filename = getexecname (); + break; + case 2: + filename = "/proc/self/exe"; + break; + case 3: + filename = "/proc/curproc/file"; + break; + case 4: + snprintf (buf, sizeof (buf), "/proc/%ld/object/a.out", + (long) getpid ()); + filename = buf; + break; + default: + abort (); + } + + if (filename == NULL) + continue; + + descriptor = backtrace_open (filename, error_callback, data, + &does_not_exist); + if (descriptor < 0 && !does_not_exist) + { + called_error_callback = 1; + break; + } + if (descriptor >= 0) + break; + } + + if (descriptor < 0) + { + if (!called_error_callback) + { + if (state->filename != NULL) + error_callback (data, state->filename, ENOENT); + else + error_callback (data, + "libbacktrace could not find executable to open", + 0); + } + failed = 1; + } + + if (!failed) + { + if (!backtrace_initialize (state, filename, descriptor, error_callback, + data, &fileline_fn)) + failed = 1; + } + + if (failed) + { + if (!state->threaded) + state->fileline_initialization_failed = 1; + else + backtrace_atomic_store_int (&state->fileline_initialization_failed, 1); + return 0; + } + + if (!state->threaded) + state->fileline_fn = fileline_fn; + else + { + backtrace_atomic_store_pointer (&state->fileline_fn, fileline_fn); + + /* Note that if two threads initialize at once, one of the data + sets may be leaked. */ + } + + return 1; +} + +/* Given a PC, find the file name, line number, and function name. */ + +int +backtrace_pcinfo (struct backtrace_state *state, uintptr_t pc, + backtrace_full_callback callback, + backtrace_error_callback error_callback, void *data) +{ + if (!fileline_initialize (state, error_callback, data)) + return 0; + + if (state->fileline_initialization_failed) + return 0; + + return state->fileline_fn (state, pc, callback, error_callback, data); +} + +/* Given a PC, find the symbol for it, and its value. */ + +int +backtrace_syminfo (struct backtrace_state *state, uintptr_t pc, + backtrace_syminfo_callback callback, + backtrace_error_callback error_callback, void *data) +{ + if (!fileline_initialize (state, error_callback, data)) + return 0; + + if (state->fileline_initialization_failed) + return 0; + + state->syminfo_fn (state, pc, callback, error_callback, data); + return 1; +} diff --git a/backtrace-sys/src/libbacktrace/filenames.h b/backtrace-sys/src/libbacktrace/filenames.h new file mode 100644 index 000000000..2dcd6643a --- /dev/null +++ b/backtrace-sys/src/libbacktrace/filenames.h @@ -0,0 +1,49 @@ +/* btest.c -- Filename header for libbacktrace library + Copyright (C) 2012-2018 Free Software Foundation, Inc. + Written by Ian Lance Taylor, Google. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + (1) Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + (2) Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + + (3) The name of the author may not be used to + endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, +INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, +STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING +IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. */ + +#ifndef GCC_VERSION +# define GCC_VERSION (__GNUC__ * 1000 + __GNUC_MINOR__) +#endif + +#if (GCC_VERSION < 2007) +# define __attribute__(x) +#endif + +#ifndef ATTRIBUTE_UNUSED +# define ATTRIBUTE_UNUSED __attribute__ ((__unused__)) +#endif + +#if defined(__MSDOS__) || defined(_WIN32) || defined(__OS2__) || defined (__CYGWIN__) +# define IS_DIR_SEPARATOR(c) ((c) == '/' || (c) == '\\') +#else +# define IS_DIR_SEPARATOR(c) ((c) == '/') +#endif diff --git a/backtrace-sys/src/libbacktrace/filetype.awk b/backtrace-sys/src/libbacktrace/filetype.awk new file mode 100644 index 000000000..a5f6c8cc1 --- /dev/null +++ b/backtrace-sys/src/libbacktrace/filetype.awk @@ -0,0 +1,11 @@ +# An awk script to determine the type of a file. +/\177ELF\001/ { if (NR == 1) { print "elf32"; exit } } +/\177ELF\002/ { if (NR == 1) { print "elf64"; exit } } +/\114\001/ { if (NR == 1) { print "pecoff"; exit } } +/\144\206/ { if (NR == 1) { print "pecoff"; exit } } +/\xFE\xED\xFA\xCE/ { if (NR == 1) { print "macho32"; exit } } +/\xCE\xFA\xED\xFE/ { if (NR == 1) { print "macho32"; exit } } +/\xFE\xED\xFA\xCF/ { if (NR == 1) { print "macho64"; exit } } +/\xCF\xFA\xED\xFE/ { if (NR == 1) { print "macho64"; exit } } +/\xCA\xFE\xBA\xBE/ { if (NR == 1) { print "macho-fat"; exit } } +/\xBE\xBA\xFE\xCA/ { if (NR == 1) { print "macho-fat"; exit } } diff --git a/backtrace-sys/src/libbacktrace/install-sh b/backtrace-sys/src/libbacktrace/install-sh new file mode 100755 index 000000000..a9244eb07 --- /dev/null +++ b/backtrace-sys/src/libbacktrace/install-sh @@ -0,0 +1,527 @@ +#!/bin/sh +# install - install a program, script, or datafile + +scriptversion=2011-01-19.21; # UTC + +# This originates from X11R5 (mit/util/scripts/install.sh), which was +# later released in X11R6 (xc/config/util/install.sh) with the +# following copyright and license. +# +# Copyright (C) 1994 X Consortium +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# X CONSORTIUM BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN +# AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNEC- +# TION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# +# Except as contained in this notice, the name of the X Consortium shall not +# be used in advertising or otherwise to promote the sale, use or other deal- +# ings in this Software without prior written authorization from the X Consor- +# tium. +# +# +# FSF changes to this file are in the public domain. +# +# Calling this script install-sh is preferred over install.sh, to prevent +# `make' implicit rules from creating a file called install from it +# when there is no Makefile. +# +# This script is compatible with the BSD install script, but was written +# from scratch. + +nl=' +' +IFS=" "" $nl" + +# set DOITPROG to echo to test this script + +# Don't use :- since 4.3BSD and earlier shells don't like it. +doit=${DOITPROG-} +if test -z "$doit"; then + doit_exec=exec +else + doit_exec=$doit +fi + +# Put in absolute file names if you don't have them in your path; +# or use environment vars. + +chgrpprog=${CHGRPPROG-chgrp} +chmodprog=${CHMODPROG-chmod} +chownprog=${CHOWNPROG-chown} +cmpprog=${CMPPROG-cmp} +cpprog=${CPPROG-cp} +mkdirprog=${MKDIRPROG-mkdir} +mvprog=${MVPROG-mv} +rmprog=${RMPROG-rm} +stripprog=${STRIPPROG-strip} + +posix_glob='?' +initialize_posix_glob=' + test "$posix_glob" != "?" || { + if (set -f) 2>/dev/null; then + posix_glob= + else + posix_glob=: + fi + } +' + +posix_mkdir= + +# Desired mode of installed file. +mode=0755 + +chgrpcmd= +chmodcmd=$chmodprog +chowncmd= +mvcmd=$mvprog +rmcmd="$rmprog -f" +stripcmd= + +src= +dst= +dir_arg= +dst_arg= + +copy_on_change=false +no_target_directory= + +usage="\ +Usage: $0 [OPTION]... [-T] SRCFILE DSTFILE + or: $0 [OPTION]... SRCFILES... DIRECTORY + or: $0 [OPTION]... -t DIRECTORY SRCFILES... + or: $0 [OPTION]... -d DIRECTORIES... + +In the 1st form, copy SRCFILE to DSTFILE. +In the 2nd and 3rd, copy all SRCFILES to DIRECTORY. +In the 4th, create DIRECTORIES. + +Options: + --help display this help and exit. + --version display version info and exit. + + -c (ignored) + -C install only if different (preserve the last data modification time) + -d create directories instead of installing files. + -g GROUP $chgrpprog installed files to GROUP. + -m MODE $chmodprog installed files to MODE. + -o USER $chownprog installed files to USER. + -s $stripprog installed files. + -t DIRECTORY install into DIRECTORY. + -T report an error if DSTFILE is a directory. + +Environment variables override the default commands: + CHGRPPROG CHMODPROG CHOWNPROG CMPPROG CPPROG MKDIRPROG MVPROG + RMPROG STRIPPROG +" + +while test $# -ne 0; do + case $1 in + -c) ;; + + -C) copy_on_change=true;; + + -d) dir_arg=true;; + + -g) chgrpcmd="$chgrpprog $2" + shift;; + + --help) echo "$usage"; exit $?;; + + -m) mode=$2 + case $mode in + *' '* | *' '* | *' +'* | *'*'* | *'?'* | *'['*) + echo "$0: invalid mode: $mode" >&2 + exit 1;; + esac + shift;; + + -o) chowncmd="$chownprog $2" + shift;; + + -s) stripcmd=$stripprog;; + + -t) dst_arg=$2 + # Protect names problematic for `test' and other utilities. + case $dst_arg in + -* | [=\(\)!]) dst_arg=./$dst_arg;; + esac + shift;; + + -T) no_target_directory=true;; + + --version) echo "$0 $scriptversion"; exit $?;; + + --) shift + break;; + + -*) echo "$0: invalid option: $1" >&2 + exit 1;; + + *) break;; + esac + shift +done + +if test $# -ne 0 && test -z "$dir_arg$dst_arg"; then + # When -d is used, all remaining arguments are directories to create. + # When -t is used, the destination is already specified. + # Otherwise, the last argument is the destination. Remove it from $@. + for arg + do + if test -n "$dst_arg"; then + # $@ is not empty: it contains at least $arg. + set fnord "$@" "$dst_arg" + shift # fnord + fi + shift # arg + dst_arg=$arg + # Protect names problematic for `test' and other utilities. + case $dst_arg in + -* | [=\(\)!]) dst_arg=./$dst_arg;; + esac + done +fi + +if test $# -eq 0; then + if test -z "$dir_arg"; then + echo "$0: no input file specified." >&2 + exit 1 + fi + # It's OK to call `install-sh -d' without argument. + # This can happen when creating conditional directories. + exit 0 +fi + +if test -z "$dir_arg"; then + do_exit='(exit $ret); exit $ret' + trap "ret=129; $do_exit" 1 + trap "ret=130; $do_exit" 2 + trap "ret=141; $do_exit" 13 + trap "ret=143; $do_exit" 15 + + # Set umask so as not to create temps with too-generous modes. + # However, 'strip' requires both read and write access to temps. + case $mode in + # Optimize common cases. + *644) cp_umask=133;; + *755) cp_umask=22;; + + *[0-7]) + if test -z "$stripcmd"; then + u_plus_rw= + else + u_plus_rw='% 200' + fi + cp_umask=`expr '(' 777 - $mode % 1000 ')' $u_plus_rw`;; + *) + if test -z "$stripcmd"; then + u_plus_rw= + else + u_plus_rw=,u+rw + fi + cp_umask=$mode$u_plus_rw;; + esac +fi + +for src +do + # Protect names problematic for `test' and other utilities. + case $src in + -* | [=\(\)!]) src=./$src;; + esac + + if test -n "$dir_arg"; then + dst=$src + dstdir=$dst + test -d "$dstdir" + dstdir_status=$? + else + + # Waiting for this to be detected by the "$cpprog $src $dsttmp" command + # might cause directories to be created, which would be especially bad + # if $src (and thus $dsttmp) contains '*'. + if test ! -f "$src" && test ! -d "$src"; then + echo "$0: $src does not exist." >&2 + exit 1 + fi + + if test -z "$dst_arg"; then + echo "$0: no destination specified." >&2 + exit 1 + fi + dst=$dst_arg + + # If destination is a directory, append the input filename; won't work + # if double slashes aren't ignored. + if test -d "$dst"; then + if test -n "$no_target_directory"; then + echo "$0: $dst_arg: Is a directory" >&2 + exit 1 + fi + dstdir=$dst + dst=$dstdir/`basename "$src"` + dstdir_status=0 + else + # Prefer dirname, but fall back on a substitute if dirname fails. + dstdir=` + (dirname "$dst") 2>/dev/null || + expr X"$dst" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ + X"$dst" : 'X\(//\)[^/]' \| \ + X"$dst" : 'X\(//\)$' \| \ + X"$dst" : 'X\(/\)' \| . 2>/dev/null || + echo X"$dst" | + sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ + s//\1/ + q + } + /^X\(\/\/\)[^/].*/{ + s//\1/ + q + } + /^X\(\/\/\)$/{ + s//\1/ + q + } + /^X\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q' + ` + + test -d "$dstdir" + dstdir_status=$? + fi + fi + + obsolete_mkdir_used=false + + if test $dstdir_status != 0; then + case $posix_mkdir in + '') + # Create intermediate dirs using mode 755 as modified by the umask. + # This is like FreeBSD 'install' as of 1997-10-28. + umask=`umask` + case $stripcmd.$umask in + # Optimize common cases. + *[2367][2367]) mkdir_umask=$umask;; + .*0[02][02] | .[02][02] | .[02]) mkdir_umask=22;; + + *[0-7]) + mkdir_umask=`expr $umask + 22 \ + - $umask % 100 % 40 + $umask % 20 \ + - $umask % 10 % 4 + $umask % 2 + `;; + *) mkdir_umask=$umask,go-w;; + esac + + # With -d, create the new directory with the user-specified mode. + # Otherwise, rely on $mkdir_umask. + if test -n "$dir_arg"; then + mkdir_mode=-m$mode + else + mkdir_mode= + fi + + posix_mkdir=false + case $umask in + *[123567][0-7][0-7]) + # POSIX mkdir -p sets u+wx bits regardless of umask, which + # is incompatible with FreeBSD 'install' when (umask & 300) != 0. + ;; + *) + tmpdir=${TMPDIR-/tmp}/ins$RANDOM-$$ + trap 'ret=$?; rmdir "$tmpdir/d" "$tmpdir" 2>/dev/null; exit $ret' 0 + + if (umask $mkdir_umask && + exec $mkdirprog $mkdir_mode -p -- "$tmpdir/d") >/dev/null 2>&1 + then + if test -z "$dir_arg" || { + # Check for POSIX incompatibilities with -m. + # HP-UX 11.23 and IRIX 6.5 mkdir -m -p sets group- or + # other-writeable bit of parent directory when it shouldn't. + # FreeBSD 6.1 mkdir -m -p sets mode of existing directory. + ls_ld_tmpdir=`ls -ld "$tmpdir"` + case $ls_ld_tmpdir in + d????-?r-*) different_mode=700;; + d????-?--*) different_mode=755;; + *) false;; + esac && + $mkdirprog -m$different_mode -p -- "$tmpdir" && { + ls_ld_tmpdir_1=`ls -ld "$tmpdir"` + test "$ls_ld_tmpdir" = "$ls_ld_tmpdir_1" + } + } + then posix_mkdir=: + fi + rmdir "$tmpdir/d" "$tmpdir" + else + # Remove any dirs left behind by ancient mkdir implementations. + rmdir ./$mkdir_mode ./-p ./-- 2>/dev/null + fi + trap '' 0;; + esac;; + esac + + if + $posix_mkdir && ( + umask $mkdir_umask && + $doit_exec $mkdirprog $mkdir_mode -p -- "$dstdir" + ) + then : + else + + # The umask is ridiculous, or mkdir does not conform to POSIX, + # or it failed possibly due to a race condition. Create the + # directory the slow way, step by step, checking for races as we go. + + case $dstdir in + /*) prefix='/';; + [-=\(\)!]*) prefix='./';; + *) prefix='';; + esac + + eval "$initialize_posix_glob" + + oIFS=$IFS + IFS=/ + $posix_glob set -f + set fnord $dstdir + shift + $posix_glob set +f + IFS=$oIFS + + prefixes= + + for d + do + test X"$d" = X && continue + + prefix=$prefix$d + if test -d "$prefix"; then + prefixes= + else + if $posix_mkdir; then + (umask=$mkdir_umask && + $doit_exec $mkdirprog $mkdir_mode -p -- "$dstdir") && break + # Don't fail if two instances are running concurrently. + test -d "$prefix" || exit 1 + else + case $prefix in + *\'*) qprefix=`echo "$prefix" | sed "s/'/'\\\\\\\\''/g"`;; + *) qprefix=$prefix;; + esac + prefixes="$prefixes '$qprefix'" + fi + fi + prefix=$prefix/ + done + + if test -n "$prefixes"; then + # Don't fail if two instances are running concurrently. + (umask $mkdir_umask && + eval "\$doit_exec \$mkdirprog $prefixes") || + test -d "$dstdir" || exit 1 + obsolete_mkdir_used=true + fi + fi + fi + + if test -n "$dir_arg"; then + { test -z "$chowncmd" || $doit $chowncmd "$dst"; } && + { test -z "$chgrpcmd" || $doit $chgrpcmd "$dst"; } && + { test "$obsolete_mkdir_used$chowncmd$chgrpcmd" = false || + test -z "$chmodcmd" || $doit $chmodcmd $mode "$dst"; } || exit 1 + else + + # Make a couple of temp file names in the proper directory. + dsttmp=$dstdir/_inst.$$_ + rmtmp=$dstdir/_rm.$$_ + + # Trap to clean up those temp files at exit. + trap 'ret=$?; rm -f "$dsttmp" "$rmtmp" && exit $ret' 0 + + # Copy the file name to the temp name. + (umask $cp_umask && $doit_exec $cpprog "$src" "$dsttmp") && + + # and set any options; do chmod last to preserve setuid bits. + # + # If any of these fail, we abort the whole thing. If we want to + # ignore errors from any of these, just make sure not to ignore + # errors from the above "$doit $cpprog $src $dsttmp" command. + # + { test -z "$chowncmd" || $doit $chowncmd "$dsttmp"; } && + { test -z "$chgrpcmd" || $doit $chgrpcmd "$dsttmp"; } && + { test -z "$stripcmd" || $doit $stripcmd "$dsttmp"; } && + { test -z "$chmodcmd" || $doit $chmodcmd $mode "$dsttmp"; } && + + # If -C, don't bother to copy if it wouldn't change the file. + if $copy_on_change && + old=`LC_ALL=C ls -dlL "$dst" 2>/dev/null` && + new=`LC_ALL=C ls -dlL "$dsttmp" 2>/dev/null` && + + eval "$initialize_posix_glob" && + $posix_glob set -f && + set X $old && old=:$2:$4:$5:$6 && + set X $new && new=:$2:$4:$5:$6 && + $posix_glob set +f && + + test "$old" = "$new" && + $cmpprog "$dst" "$dsttmp" >/dev/null 2>&1 + then + rm -f "$dsttmp" + else + # Rename the file to the real destination. + $doit $mvcmd -f "$dsttmp" "$dst" 2>/dev/null || + + # The rename failed, perhaps because mv can't rename something else + # to itself, or perhaps because mv is so ancient that it does not + # support -f. + { + # Now remove or move aside any old file at destination location. + # We try this two ways since rm can't unlink itself on some + # systems and the destination file might be busy for other + # reasons. In this case, the final cleanup might fail but the new + # file should still install successfully. + { + test ! -f "$dst" || + $doit $rmcmd -f "$dst" 2>/dev/null || + { $doit $mvcmd -f "$dst" "$rmtmp" 2>/dev/null && + { $doit $rmcmd -f "$rmtmp" 2>/dev/null; :; } + } || + { echo "$0: cannot unlink or rename $dst" >&2 + (exit 1); exit 1 + } + } && + + # Now rename the file to the real destination. + $doit $mvcmd "$dsttmp" "$dst" + } + fi || exit 1 + + trap '' 0 + fi +done + +# Local variables: +# eval: (add-hook 'write-file-hooks 'time-stamp) +# time-stamp-start: "scriptversion=" +# time-stamp-format: "%:y-%02m-%02d.%02H" +# time-stamp-time-zone: "UTC" +# time-stamp-end: "; # UTC" +# End: diff --git a/backtrace-sys/src/libbacktrace/internal.h b/backtrace-sys/src/libbacktrace/internal.h new file mode 100644 index 000000000..bff8ed470 --- /dev/null +++ b/backtrace-sys/src/libbacktrace/internal.h @@ -0,0 +1,304 @@ +/* internal.h -- Internal header file for stack backtrace library. + Copyright (C) 2012-2018 Free Software Foundation, Inc. + Written by Ian Lance Taylor, Google. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + (1) Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + (2) Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + + (3) The name of the author may not be used to + endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, +INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, +STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING +IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. */ + +#ifndef BACKTRACE_INTERNAL_H +#define BACKTRACE_INTERNAL_H + +/* We assume that and "backtrace.h" have already been + included. */ + +#ifndef GCC_VERSION +# define GCC_VERSION (__GNUC__ * 1000 + __GNUC_MINOR__) +#endif + +#if (GCC_VERSION < 2007) +# define __attribute__(x) +#endif + +#ifndef ATTRIBUTE_UNUSED +# define ATTRIBUTE_UNUSED __attribute__ ((__unused__)) +#endif + +#ifndef ATTRIBUTE_MALLOC +# if (GCC_VERSION >= 2096) +# define ATTRIBUTE_MALLOC __attribute__ ((__malloc__)) +# else +# define ATTRIBUTE_MALLOC +# endif +#endif + +#ifndef HAVE_SYNC_FUNCTIONS + +/* Define out the sync functions. These should never be called if + they are not available. */ + +#define __sync_bool_compare_and_swap(A, B, C) (abort(), 1) +#define __sync_lock_test_and_set(A, B) (abort(), 0) +#define __sync_lock_release(A) abort() + +#endif /* !defined (HAVE_SYNC_FUNCTIONS) */ + +#ifdef HAVE_ATOMIC_FUNCTIONS + +/* We have the atomic builtin functions. */ + +#define backtrace_atomic_load_pointer(p) \ + __atomic_load_n ((p), __ATOMIC_ACQUIRE) +#define backtrace_atomic_load_int(p) \ + __atomic_load_n ((p), __ATOMIC_ACQUIRE) +#define backtrace_atomic_store_pointer(p, v) \ + __atomic_store_n ((p), (v), __ATOMIC_RELEASE) +#define backtrace_atomic_store_size_t(p, v) \ + __atomic_store_n ((p), (v), __ATOMIC_RELEASE) +#define backtrace_atomic_store_int(p, v) \ + __atomic_store_n ((p), (v), __ATOMIC_RELEASE) + +#else /* !defined (HAVE_ATOMIC_FUNCTIONS) */ +#ifdef HAVE_SYNC_FUNCTIONS + +/* We have the sync functions but not the atomic functions. Define + the atomic ones in terms of the sync ones. */ + +extern void *backtrace_atomic_load_pointer (void *); +extern int backtrace_atomic_load_int (int *); +extern void backtrace_atomic_store_pointer (void *, void *); +extern void backtrace_atomic_store_size_t (size_t *, size_t); +extern void backtrace_atomic_store_int (int *, int); + +#else /* !defined (HAVE_SYNC_FUNCTIONS) */ + +/* We have neither the sync nor the atomic functions. These will + never be called. */ + +#define backtrace_atomic_load_pointer(p) (abort(), (void *) NULL) +#define backtrace_atomic_load_int(p) (abort(), 0) +#define backtrace_atomic_store_pointer(p, v) abort() +#define backtrace_atomic_store_size_t(p, v) abort() +#define backtrace_atomic_store_int(p, v) abort() + +#endif /* !defined (HAVE_SYNC_FUNCTIONS) */ +#endif /* !defined (HAVE_ATOMIC_FUNCTIONS) */ + +/* The type of the function that collects file/line information. This + is like backtrace_pcinfo. */ + +typedef int (*fileline) (struct backtrace_state *state, uintptr_t pc, + backtrace_full_callback callback, + backtrace_error_callback error_callback, void *data); + +/* The type of the function that collects symbol information. This is + like backtrace_syminfo. */ + +typedef void (*syminfo) (struct backtrace_state *state, uintptr_t pc, + backtrace_syminfo_callback callback, + backtrace_error_callback error_callback, void *data); + +/* What the backtrace state pointer points to. */ + +struct backtrace_state +{ + /* The name of the executable. */ + const char *filename; + /* Non-zero if threaded. */ + int threaded; + /* The master lock for fileline_fn, fileline_data, syminfo_fn, + syminfo_data, fileline_initialization_failed and everything the + data pointers point to. */ + void *lock; + /* The function that returns file/line information. */ + fileline fileline_fn; + /* The data to pass to FILELINE_FN. */ + void *fileline_data; + /* The function that returns symbol information. */ + syminfo syminfo_fn; + /* The data to pass to SYMINFO_FN. */ + void *syminfo_data; + /* Whether initializing the file/line information failed. */ + int fileline_initialization_failed; + /* The lock for the freelist. */ + int lock_alloc; + /* The freelist when using mmap. */ + struct backtrace_freelist_struct *freelist; +}; + +/* Open a file for reading. Returns -1 on error. If DOES_NOT_EXIST + is not NULL, *DOES_NOT_EXIST will be set to 0 normally and set to 1 + if the file does not exist. If the file does not exist and + DOES_NOT_EXIST is not NULL, the function will return -1 and will + not call ERROR_CALLBACK. On other errors, or if DOES_NOT_EXIST is + NULL, the function will call ERROR_CALLBACK before returning. */ +extern int backtrace_open (const char *filename, + backtrace_error_callback error_callback, + void *data, + int *does_not_exist); + +/* A view of the contents of a file. This supports mmap when + available. A view will remain in memory even after backtrace_close + is called on the file descriptor from which the view was + obtained. */ + +struct backtrace_view +{ + /* The data that the caller requested. */ + const void *data; + /* The base of the view. */ + void *base; + /* The total length of the view. */ + size_t len; +}; + +/* Create a view of SIZE bytes from DESCRIPTOR at OFFSET. Store the + result in *VIEW. Returns 1 on success, 0 on error. */ +extern int backtrace_get_view (struct backtrace_state *state, int descriptor, + off_t offset, size_t size, + backtrace_error_callback error_callback, + void *data, struct backtrace_view *view); + +/* Release a view created by backtrace_get_view. */ +extern void backtrace_release_view (struct backtrace_state *state, + struct backtrace_view *view, + backtrace_error_callback error_callback, + void *data); + +/* Close a file opened by backtrace_open. Returns 1 on success, 0 on + error. */ + +extern int backtrace_close (int descriptor, + backtrace_error_callback error_callback, + void *data); + +/* Sort without using memory. */ + +extern void backtrace_qsort (void *base, size_t count, size_t size, + int (*compar) (const void *, const void *)); + +/* Allocate memory. This is like malloc. If ERROR_CALLBACK is NULL, + this does not report an error, it just returns NULL. */ + +extern void *backtrace_alloc (struct backtrace_state *state, size_t size, + backtrace_error_callback error_callback, + void *data) ATTRIBUTE_MALLOC; + +/* Free memory allocated by backtrace_alloc. If ERROR_CALLBACK is + NULL, this does not report an error. */ + +extern void backtrace_free (struct backtrace_state *state, void *mem, + size_t size, + backtrace_error_callback error_callback, + void *data); + +/* A growable vector of some struct. This is used for more efficient + allocation when we don't know the final size of some group of data + that we want to represent as an array. */ + +struct backtrace_vector +{ + /* The base of the vector. */ + void *base; + /* The number of bytes in the vector. */ + size_t size; + /* The number of bytes available at the current allocation. */ + size_t alc; +}; + +/* Grow VEC by SIZE bytes. Return a pointer to the newly allocated + bytes. Note that this may move the entire vector to a new memory + location. Returns NULL on failure. */ + +extern void *backtrace_vector_grow (struct backtrace_state *state, size_t size, + backtrace_error_callback error_callback, + void *data, + struct backtrace_vector *vec); + +/* Finish the current allocation on VEC. Prepare to start a new + allocation. The finished allocation will never be freed. Returns + a pointer to the base of the finished entries, or NULL on + failure. */ + +extern void* backtrace_vector_finish (struct backtrace_state *state, + struct backtrace_vector *vec, + backtrace_error_callback error_callback, + void *data); + +/* Release any extra space allocated for VEC. This may change + VEC->base. Returns 1 on success, 0 on failure. */ + +extern int backtrace_vector_release (struct backtrace_state *state, + struct backtrace_vector *vec, + backtrace_error_callback error_callback, + void *data); + +/* Read initial debug data from a descriptor, and set the + fileline_data, syminfo_fn, and syminfo_data fields of STATE. + Return the fileln_fn field in *FILELN_FN--this is done this way so + that the synchronization code is only implemented once. This is + called after the descriptor has first been opened. It will close + the descriptor if it is no longer needed. Returns 1 on success, 0 + on error. There will be multiple implementations of this function, + for different file formats. Each system will compile the + appropriate one. */ + +extern int backtrace_initialize (struct backtrace_state *state, + const char *filename, + int descriptor, + backtrace_error_callback error_callback, + void *data, + fileline *fileline_fn); + +/* Add file/line information for a DWARF module. */ + +extern int backtrace_dwarf_add (struct backtrace_state *state, + uintptr_t base_address, + const unsigned char* dwarf_info, + size_t dwarf_info_size, + const unsigned char *dwarf_line, + size_t dwarf_line_size, + const unsigned char *dwarf_abbrev, + size_t dwarf_abbrev_size, + const unsigned char *dwarf_ranges, + size_t dwarf_range_size, + const unsigned char *dwarf_str, + size_t dwarf_str_size, + int is_bigendian, + backtrace_error_callback error_callback, + void *data, fileline *fileline_fn); + +/* A test-only hook for elf_uncompress_zdebug. */ + +extern int backtrace_uncompress_zdebug (struct backtrace_state *, + const unsigned char *compressed, + size_t compressed_size, + backtrace_error_callback, void *data, + unsigned char **uncompressed, + size_t *uncompressed_size); + +#endif diff --git a/backtrace-sys/src/libbacktrace/ltmain.sh b/backtrace-sys/src/libbacktrace/ltmain.sh new file mode 100755 index 000000000..93793bfc2 --- /dev/null +++ b/backtrace-sys/src/libbacktrace/ltmain.sh @@ -0,0 +1,7874 @@ +# Generated from ltmain.m4sh. + +# ltmain.sh (GNU libtool) 2.2.4 +# Written by Gordon Matzigkeit , 1996 + +# Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007 2008 Free Software Foundation, Inc. +# This is free software; see the source for copying conditions. There is NO +# warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. + +# GNU Libtool is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# As a special exception to the GNU General Public License, +# if you distribute this file as part of a program or library that +# is built using GNU Libtool, you may include this file under the +# same distribution terms that you use for the rest of that program. +# +# GNU Libtool is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with GNU Libtool; see the file COPYING. If not, a copy +# can be downloaded from http://www.gnu.org/licenses/gpl.html, +# or obtained by writing to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + +# Usage: $progname [OPTION]... [MODE-ARG]... +# +# Provide generalized library-building support services. +# +# --config show all configuration variables +# --debug enable verbose shell tracing +# -n, --dry-run display commands without modifying any files +# --features display basic configuration information and exit +# --mode=MODE use operation mode MODE +# --preserve-dup-deps don't remove duplicate dependency libraries +# --quiet, --silent don't print informational messages +# --tag=TAG use configuration variables from tag TAG +# -v, --verbose print informational messages (default) +# --version print version information +# -h, --help print short or long help message +# +# MODE must be one of the following: +# +# clean remove files from the build directory +# compile compile a source file into a libtool object +# execute automatically set library path, then run a program +# finish complete the installation of libtool libraries +# install install libraries or executables +# link create a library or an executable +# uninstall remove libraries from an installed directory +# +# MODE-ARGS vary depending on the MODE. +# Try `$progname --help --mode=MODE' for a more detailed description of MODE. +# +# When reporting a bug, please describe a test case to reproduce it and +# include the following information: +# +# host-triplet: $host +# shell: $SHELL +# compiler: $LTCC +# compiler flags: $LTCFLAGS +# linker: $LD (gnu? $with_gnu_ld) +# $progname: (GNU libtool) 2.2.4 +# automake: $automake_version +# autoconf: $autoconf_version +# +# Report bugs to . + +PROGRAM=ltmain.sh +PACKAGE=libtool +VERSION=2.2.4 +TIMESTAMP="" +package_revision=1.2976 + +# Be Bourne compatible +if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then + emulate sh + NULLCMD=: + # Zsh 3.x and 4.x performs word splitting on ${1+"$@"}, which + # is contrary to our usage. Disable this feature. + alias -g '${1+"$@"}'='"$@"' + setopt NO_GLOB_SUBST +else + case `(set -o) 2>/dev/null` in *posix*) set -o posix;; esac +fi +BIN_SH=xpg4; export BIN_SH # for Tru64 +DUALCASE=1; export DUALCASE # for MKS sh + +# NLS nuisances: We save the old values to restore during execute mode. +# Only set LANG and LC_ALL to C if already set. +# These must not be set unconditionally because not all systems understand +# e.g. LANG=C (notably SCO). +lt_user_locale= +lt_safe_locale= +for lt_var in LANG LANGUAGE LC_ALL LC_CTYPE LC_COLLATE LC_MESSAGES +do + eval "if test \"\${$lt_var+set}\" = set; then + save_$lt_var=\$$lt_var + $lt_var=C + export $lt_var + lt_user_locale=\"$lt_var=\\\$save_\$lt_var; \$lt_user_locale\" + lt_safe_locale=\"$lt_var=C; \$lt_safe_locale\" + fi" +done + +$lt_unset CDPATH + + + + + +: ${CP="cp -f"} +: ${ECHO="echo"} +: ${EGREP="/usr/bin/grep -E"} +: ${FGREP="/usr/bin/grep -F"} +: ${GREP="/usr/bin/grep"} +: ${LN_S="ln -s"} +: ${MAKE="make"} +: ${MKDIR="mkdir"} +: ${MV="mv -f"} +: ${RM="rm -f"} +: ${SED="/opt/local/bin/gsed"} +: ${SHELL="${CONFIG_SHELL-/bin/sh}"} +: ${Xsed="$SED -e 1s/^X//"} + +# Global variables: +EXIT_SUCCESS=0 +EXIT_FAILURE=1 +EXIT_MISMATCH=63 # $? = 63 is used to indicate version mismatch to missing. +EXIT_SKIP=77 # $? = 77 is used to indicate a skipped test to automake. + +exit_status=$EXIT_SUCCESS + +# Make sure IFS has a sensible default +lt_nl=' +' +IFS=" $lt_nl" + +dirname="s,/[^/]*$,," +basename="s,^.*/,," + +# func_dirname_and_basename file append nondir_replacement +# perform func_basename and func_dirname in a single function +# call: +# dirname: Compute the dirname of FILE. If nonempty, +# add APPEND to the result, otherwise set result +# to NONDIR_REPLACEMENT. +# value returned in "$func_dirname_result" +# basename: Compute filename of FILE. +# value retuned in "$func_basename_result" +# Implementation must be kept synchronized with func_dirname +# and func_basename. For efficiency, we do not delegate to +# those functions but instead duplicate the functionality here. +func_dirname_and_basename () +{ + # Extract subdirectory from the argument. + func_dirname_result=`$ECHO "X${1}" | $Xsed -e "$dirname"` + if test "X$func_dirname_result" = "X${1}"; then + func_dirname_result="${3}" + else + func_dirname_result="$func_dirname_result${2}" + fi + func_basename_result=`$ECHO "X${1}" | $Xsed -e "$basename"` +} + +# Generated shell functions inserted here. + +# Work around backward compatibility issue on IRIX 6.5. On IRIX 6.4+, sh +# is ksh but when the shell is invoked as "sh" and the current value of +# the _XPG environment variable is not equal to 1 (one), the special +# positional parameter $0, within a function call, is the name of the +# function. +progpath="$0" + +# The name of this program: +# In the unlikely event $progname began with a '-', it would play havoc with +# func_echo (imagine progname=-n), so we prepend ./ in that case: +func_dirname_and_basename "$progpath" +progname=$func_basename_result +case $progname in + -*) progname=./$progname ;; +esac + +# Make sure we have an absolute path for reexecution: +case $progpath in + [\\/]*|[A-Za-z]:\\*) ;; + *[\\/]*) + progdir=$func_dirname_result + progdir=`cd "$progdir" && pwd` + progpath="$progdir/$progname" + ;; + *) + save_IFS="$IFS" + IFS=: + for progdir in $PATH; do + IFS="$save_IFS" + test -x "$progdir/$progname" && break + done + IFS="$save_IFS" + test -n "$progdir" || progdir=`pwd` + progpath="$progdir/$progname" + ;; +esac + +# Sed substitution that helps us do robust quoting. It backslashifies +# metacharacters that are still active within double-quoted strings. +Xsed="${SED}"' -e 1s/^X//' +sed_quote_subst='s/\([`"$\\]\)/\\\1/g' + +# Same as above, but do not quote variable references. +double_quote_subst='s/\(["`\\]\)/\\\1/g' + +# Re-`\' parameter expansions in output of double_quote_subst that were +# `\'-ed in input to the same. If an odd number of `\' preceded a '$' +# in input to double_quote_subst, that '$' was protected from expansion. +# Since each input `\' is now two `\'s, look for any number of runs of +# four `\'s followed by two `\'s and then a '$'. `\' that '$'. +bs='\\' +bs2='\\\\' +bs4='\\\\\\\\' +dollar='\$' +sed_double_backslash="\ + s/$bs4/&\\ +/g + s/^$bs2$dollar/$bs&/ + s/\\([^$bs]\\)$bs2$dollar/\\1$bs2$bs$dollar/g + s/\n//g" + +# Standard options: +opt_dry_run=false +opt_help=false +opt_quiet=false +opt_verbose=false +opt_warning=: + +# func_echo arg... +# Echo program name prefixed message, along with the current mode +# name if it has been set yet. +func_echo () +{ + $ECHO "$progname${mode+: }$mode: $*" +} + +# func_verbose arg... +# Echo program name prefixed message in verbose mode only. +func_verbose () +{ + $opt_verbose && func_echo ${1+"$@"} + + # A bug in bash halts the script if the last line of a function + # fails when set -e is in force, so we need another command to + # work around that: + : +} + +# func_error arg... +# Echo program name prefixed message to standard error. +func_error () +{ + $ECHO "$progname${mode+: }$mode: "${1+"$@"} 1>&2 +} + +# func_warning arg... +# Echo program name prefixed warning message to standard error. +func_warning () +{ + $opt_warning && $ECHO "$progname${mode+: }$mode: warning: "${1+"$@"} 1>&2 + + # bash bug again: + : +} + +# func_fatal_error arg... +# Echo program name prefixed message to standard error, and exit. +func_fatal_error () +{ + func_error ${1+"$@"} + exit $EXIT_FAILURE +} + +# func_fatal_help arg... +# Echo program name prefixed message to standard error, followed by +# a help hint, and exit. +func_fatal_help () +{ + func_error ${1+"$@"} + func_fatal_error "$help" +} +help="Try \`$progname --help' for more information." ## default + + +# func_grep expression filename +# Check whether EXPRESSION matches any line of FILENAME, without output. +func_grep () +{ + $GREP "$1" "$2" >/dev/null 2>&1 +} + + +# func_mkdir_p directory-path +# Make sure the entire path to DIRECTORY-PATH is available. +func_mkdir_p () +{ + my_directory_path="$1" + my_dir_list= + + if test -n "$my_directory_path" && test "$opt_dry_run" != ":"; then + + # Protect directory names starting with `-' + case $my_directory_path in + -*) my_directory_path="./$my_directory_path" ;; + esac + + # While some portion of DIR does not yet exist... + while test ! -d "$my_directory_path"; do + # ...make a list in topmost first order. Use a colon delimited + # list incase some portion of path contains whitespace. + my_dir_list="$my_directory_path:$my_dir_list" + + # If the last portion added has no slash in it, the list is done + case $my_directory_path in */*) ;; *) break ;; esac + + # ...otherwise throw away the child directory and loop + my_directory_path=`$ECHO "X$my_directory_path" | $Xsed -e "$dirname"` + done + my_dir_list=`$ECHO "X$my_dir_list" | $Xsed -e 's,:*$,,'` + + save_mkdir_p_IFS="$IFS"; IFS=':' + for my_dir in $my_dir_list; do + IFS="$save_mkdir_p_IFS" + # mkdir can fail with a `File exist' error if two processes + # try to create one of the directories concurrently. Don't + # stop in that case! + $MKDIR "$my_dir" 2>/dev/null || : + done + IFS="$save_mkdir_p_IFS" + + # Bail out if we (or some other process) failed to create a directory. + test -d "$my_directory_path" || \ + func_fatal_error "Failed to create \`$1'" + fi +} + + +# func_mktempdir [string] +# Make a temporary directory that won't clash with other running +# libtool processes, and avoids race conditions if possible. If +# given, STRING is the basename for that directory. +func_mktempdir () +{ + my_template="${TMPDIR-/tmp}/${1-$progname}" + + if test "$opt_dry_run" = ":"; then + # Return a directory name, but don't create it in dry-run mode + my_tmpdir="${my_template}-$$" + else + + # If mktemp works, use that first and foremost + my_tmpdir=`mktemp -d "${my_template}-XXXXXXXX" 2>/dev/null` + + if test ! -d "$my_tmpdir"; then + # Failing that, at least try and use $RANDOM to avoid a race + my_tmpdir="${my_template}-${RANDOM-0}$$" + + save_mktempdir_umask=`umask` + umask 0077 + $MKDIR "$my_tmpdir" + umask $save_mktempdir_umask + fi + + # If we're not in dry-run mode, bomb out on failure + test -d "$my_tmpdir" || \ + func_fatal_error "cannot create temporary directory \`$my_tmpdir'" + fi + + $ECHO "X$my_tmpdir" | $Xsed +} + + +# func_quote_for_eval arg +# Aesthetically quote ARG to be evaled later. +# This function returns two values: FUNC_QUOTE_FOR_EVAL_RESULT +# is double-quoted, suitable for a subsequent eval, whereas +# FUNC_QUOTE_FOR_EVAL_UNQUOTED_RESULT has merely all characters +# which are still active within double quotes backslashified. +func_quote_for_eval () +{ + case $1 in + *[\\\`\"\$]*) + func_quote_for_eval_unquoted_result=`$ECHO "X$1" | $Xsed -e "$sed_quote_subst"` ;; + *) + func_quote_for_eval_unquoted_result="$1" ;; + esac + + case $func_quote_for_eval_unquoted_result in + # Double-quote args containing shell metacharacters to delay + # word splitting, command substitution and and variable + # expansion for a subsequent eval. + # Many Bourne shells cannot handle close brackets correctly + # in scan sets, so we specify it separately. + *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"") + func_quote_for_eval_result="\"$func_quote_for_eval_unquoted_result\"" + ;; + *) + func_quote_for_eval_result="$func_quote_for_eval_unquoted_result" + esac +} + + +# func_quote_for_expand arg +# Aesthetically quote ARG to be evaled later; same as above, +# but do not quote variable references. +func_quote_for_expand () +{ + case $1 in + *[\\\`\"]*) + my_arg=`$ECHO "X$1" | $Xsed \ + -e "$double_quote_subst" -e "$sed_double_backslash"` ;; + *) + my_arg="$1" ;; + esac + + case $my_arg in + # Double-quote args containing shell metacharacters to delay + # word splitting and command substitution for a subsequent eval. + # Many Bourne shells cannot handle close brackets correctly + # in scan sets, so we specify it separately. + *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"") + my_arg="\"$my_arg\"" + ;; + esac + + func_quote_for_expand_result="$my_arg" +} + + +# func_show_eval cmd [fail_exp] +# Unless opt_silent is true, then output CMD. Then, if opt_dryrun is +# not true, evaluate CMD. If the evaluation of CMD fails, and FAIL_EXP +# is given, then evaluate it. +func_show_eval () +{ + my_cmd="$1" + my_fail_exp="${2-:}" + + ${opt_silent-false} || { + func_quote_for_expand "$my_cmd" + eval "func_echo $func_quote_for_expand_result" + } + + if ${opt_dry_run-false}; then :; else + eval "$my_cmd" + my_status=$? + if test "$my_status" -eq 0; then :; else + eval "(exit $my_status); $my_fail_exp" + fi + fi +} + + +# func_show_eval_locale cmd [fail_exp] +# Unless opt_silent is true, then output CMD. Then, if opt_dryrun is +# not true, evaluate CMD. If the evaluation of CMD fails, and FAIL_EXP +# is given, then evaluate it. Use the saved locale for evaluation. +func_show_eval_locale () +{ + my_cmd="$1" + my_fail_exp="${2-:}" + + ${opt_silent-false} || { + func_quote_for_expand "$my_cmd" + eval "func_echo $func_quote_for_expand_result" + } + + if ${opt_dry_run-false}; then :; else + eval "$lt_user_locale + $my_cmd" + my_status=$? + eval "$lt_safe_locale" + if test "$my_status" -eq 0; then :; else + eval "(exit $my_status); $my_fail_exp" + fi + fi +} + + + + + +# func_version +# Echo version message to standard output and exit. +func_version () +{ + $SED -n '/^# '$PROGRAM' (GNU /,/# warranty; / { + s/^# // + s/^# *$// + s/\((C)\)[ 0-9,-]*\( [1-9][0-9]*\)/\1\2/ + p + }' < "$progpath" + exit $? +} + +# func_usage +# Echo short help message to standard output and exit. +func_usage () +{ + $SED -n '/^# Usage:/,/# -h/ { + s/^# // + s/^# *$// + s/\$progname/'$progname'/ + p + }' < "$progpath" + $ECHO + $ECHO "run \`$progname --help | more' for full usage" + exit $? +} + +# func_help +# Echo long help message to standard output and exit. +func_help () +{ + $SED -n '/^# Usage:/,/# Report bugs to/ { + s/^# // + s/^# *$// + s*\$progname*'$progname'* + s*\$host*'"$host"'* + s*\$SHELL*'"$SHELL"'* + s*\$LTCC*'"$LTCC"'* + s*\$LTCFLAGS*'"$LTCFLAGS"'* + s*\$LD*'"$LD"'* + s/\$with_gnu_ld/'"$with_gnu_ld"'/ + s/\$automake_version/'"`(automake --version) 2>/dev/null |$SED 1q`"'/ + s/\$autoconf_version/'"`(autoconf --version) 2>/dev/null |$SED 1q`"'/ + p + }' < "$progpath" + exit $? +} + +# func_missing_arg argname +# Echo program name prefixed message to standard error and set global +# exit_cmd. +func_missing_arg () +{ + func_error "missing argument for $1" + exit_cmd=exit +} + +exit_cmd=: + + + + + +# Check that we have a working $ECHO. +if test "X$1" = X--no-reexec; then + # Discard the --no-reexec flag, and continue. + shift +elif test "X$1" = X--fallback-echo; then + # Avoid inline document here, it may be left over + : +elif test "X`{ $ECHO '\t'; } 2>/dev/null`" = 'X\t'; then + # Yippee, $ECHO works! + : +else + # Restart under the correct shell, and then maybe $ECHO will work. + exec $SHELL "$progpath" --no-reexec ${1+"$@"} +fi + +if test "X$1" = X--fallback-echo; then + # used as fallback echo + shift + cat </dev/null 2>&1; then + taglist="$taglist $tagname" + + # Evaluate the configuration. Be careful to quote the path + # and the sed script, to avoid splitting on whitespace, but + # also don't use non-portable quotes within backquotes within + # quotes we have to do it in 2 steps: + extractedcf=`$SED -n -e "$sed_extractcf" < "$progpath"` + eval "$extractedcf" + else + func_error "ignoring unknown tag $tagname" + fi + ;; + esac +} + +# Parse options once, thoroughly. This comes as soon as possible in +# the script to make things like `libtool --version' happen quickly. +{ + + # Shorthand for --mode=foo, only valid as the first argument + case $1 in + clean|clea|cle|cl) + shift; set dummy --mode clean ${1+"$@"}; shift + ;; + compile|compil|compi|comp|com|co|c) + shift; set dummy --mode compile ${1+"$@"}; shift + ;; + execute|execut|execu|exec|exe|ex|e) + shift; set dummy --mode execute ${1+"$@"}; shift + ;; + finish|finis|fini|fin|fi|f) + shift; set dummy --mode finish ${1+"$@"}; shift + ;; + install|instal|insta|inst|ins|in|i) + shift; set dummy --mode install ${1+"$@"}; shift + ;; + link|lin|li|l) + shift; set dummy --mode link ${1+"$@"}; shift + ;; + uninstall|uninstal|uninsta|uninst|unins|unin|uni|un|u) + shift; set dummy --mode uninstall ${1+"$@"}; shift + ;; + esac + + # Parse non-mode specific arguments: + while test "$#" -gt 0; do + opt="$1" + shift + + case $opt in + --config) func_config ;; + + --debug) preserve_args="$preserve_args $opt" + func_echo "enabling shell trace mode" + opt_debug='set -x' + $opt_debug + ;; + + -dlopen) test "$#" -eq 0 && func_missing_arg "$opt" && break + execute_dlfiles="$execute_dlfiles $1" + shift + ;; + + --dry-run | -n) opt_dry_run=: ;; + --features) func_features ;; + --finish) mode="finish" ;; + + --mode) test "$#" -eq 0 && func_missing_arg "$opt" && break + case $1 in + # Valid mode arguments: + clean) ;; + compile) ;; + execute) ;; + finish) ;; + install) ;; + link) ;; + relink) ;; + uninstall) ;; + + # Catch anything else as an error + *) func_error "invalid argument for $opt" + exit_cmd=exit + break + ;; + esac + + mode="$1" + shift + ;; + + --preserve-dup-deps) + opt_duplicate_deps=: ;; + + --quiet|--silent) preserve_args="$preserve_args $opt" + opt_silent=: + ;; + + --verbose| -v) preserve_args="$preserve_args $opt" + opt_silent=false + ;; + + --tag) test "$#" -eq 0 && func_missing_arg "$opt" && break + preserve_args="$preserve_args $opt $1" + func_enable_tag "$1" # tagname is set here + shift + ;; + + # Separate optargs to long options: + -dlopen=*|--mode=*|--tag=*) + func_opt_split "$opt" + set dummy "$func_opt_split_opt" "$func_opt_split_arg" ${1+"$@"} + shift + ;; + + -\?|-h) func_usage ;; + --help) opt_help=: ;; + --version) func_version ;; + + -*) func_fatal_help "unrecognized option \`$opt'" ;; + + *) nonopt="$opt" + break + ;; + esac + done + + + case $host in + *cygwin* | *mingw* | *pw32*) + # don't eliminate duplications in $postdeps and $predeps + opt_duplicate_compiler_generated_deps=: + ;; + *) + opt_duplicate_compiler_generated_deps=$opt_duplicate_deps + ;; + esac + + # Having warned about all mis-specified options, bail out if + # anything was wrong. + $exit_cmd $EXIT_FAILURE +} + +# func_check_version_match +# Ensure that we are using m4 macros, and libtool script from the same +# release of libtool. +func_check_version_match () +{ + if test "$package_revision" != "$macro_revision"; then + if test "$VERSION" != "$macro_version"; then + if test -z "$macro_version"; then + cat >&2 <<_LT_EOF +$progname: Version mismatch error. This is $PACKAGE $VERSION, but the +$progname: definition of this LT_INIT comes from an older release. +$progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION +$progname: and run autoconf again. +_LT_EOF + else + cat >&2 <<_LT_EOF +$progname: Version mismatch error. This is $PACKAGE $VERSION, but the +$progname: definition of this LT_INIT comes from $PACKAGE $macro_version. +$progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION +$progname: and run autoconf again. +_LT_EOF + fi + else + cat >&2 <<_LT_EOF +$progname: Version mismatch error. This is $PACKAGE $VERSION, revision $package_revision, +$progname: but the definition of this LT_INIT comes from revision $macro_revision. +$progname: You should recreate aclocal.m4 with macros from revision $package_revision +$progname: of $PACKAGE $VERSION and run autoconf again. +_LT_EOF + fi + + exit $EXIT_MISMATCH + fi +} + + +## ----------- ## +## Main. ## +## ----------- ## + +$opt_help || { + # Sanity checks first: + func_check_version_match + + if test "$build_libtool_libs" != yes && test "$build_old_libs" != yes; then + func_fatal_configuration "not configured to build any kind of library" + fi + + test -z "$mode" && func_fatal_error "error: you must specify a MODE." + + + # Darwin sucks + eval std_shrext=\"$shrext_cmds\" + + + # Only execute mode is allowed to have -dlopen flags. + if test -n "$execute_dlfiles" && test "$mode" != execute; then + func_error "unrecognized option \`-dlopen'" + $ECHO "$help" 1>&2 + exit $EXIT_FAILURE + fi + + # Change the help message to a mode-specific one. + generic_help="$help" + help="Try \`$progname --help --mode=$mode' for more information." +} + + +# func_lalib_p file +# True iff FILE is a libtool `.la' library or `.lo' object file. +# This function is only a basic sanity check; it will hardly flush out +# determined imposters. +func_lalib_p () +{ + $SED -e 4q "$1" 2>/dev/null \ + | $GREP "^# Generated by .*$PACKAGE" > /dev/null 2>&1 +} + +# func_lalib_unsafe_p file +# True iff FILE is a libtool `.la' library or `.lo' object file. +# This function implements the same check as func_lalib_p without +# resorting to external programs. To this end, it redirects stdin and +# closes it afterwards, without saving the original file descriptor. +# As a safety measure, use it only where a negative result would be +# fatal anyway. Works if `file' does not exist. +func_lalib_unsafe_p () +{ + lalib_p=no + if test -r "$1" && exec 5<&0 <"$1"; then + for lalib_p_l in 1 2 3 4 + do + read lalib_p_line + case "$lalib_p_line" in + \#\ Generated\ by\ *$PACKAGE* ) lalib_p=yes; break;; + esac + done + exec 0<&5 5<&- + fi + test "$lalib_p" = yes +} + +# func_ltwrapper_script_p file +# True iff FILE is a libtool wrapper script +# This function is only a basic sanity check; it will hardly flush out +# determined imposters. +func_ltwrapper_script_p () +{ + func_lalib_p "$1" +} + +# func_ltwrapper_executable_p file +# True iff FILE is a libtool wrapper executable +# This function is only a basic sanity check; it will hardly flush out +# determined imposters. +func_ltwrapper_executable_p () +{ + func_ltwrapper_exec_suffix= + case $1 in + *.exe) ;; + *) func_ltwrapper_exec_suffix=.exe ;; + esac + $GREP "$magic_exe" "$1$func_ltwrapper_exec_suffix" >/dev/null 2>&1 +} + +# func_ltwrapper_scriptname file +# Assumes file is an ltwrapper_executable +# uses $file to determine the appropriate filename for a +# temporary ltwrapper_script. +func_ltwrapper_scriptname () +{ + func_ltwrapper_scriptname_result="" + if func_ltwrapper_executable_p "$1"; then + func_dirname_and_basename "$1" "" "." + func_stripname '' '.exe' "$func_basename_result" + func_ltwrapper_scriptname_result="$func_dirname_result/$objdir/${func_stripname_result}_ltshwrapper" + fi +} + +# func_ltwrapper_p file +# True iff FILE is a libtool wrapper script or wrapper executable +# This function is only a basic sanity check; it will hardly flush out +# determined imposters. +func_ltwrapper_p () +{ + func_ltwrapper_script_p "$1" || func_ltwrapper_executable_p "$1" +} + + +# func_execute_cmds commands fail_cmd +# Execute tilde-delimited COMMANDS. +# If FAIL_CMD is given, eval that upon failure. +# FAIL_CMD may read-access the current command in variable CMD! +func_execute_cmds () +{ + $opt_debug + save_ifs=$IFS; IFS='~' + for cmd in $1; do + IFS=$save_ifs + eval cmd=\"$cmd\" + func_show_eval "$cmd" "${2-:}" + done + IFS=$save_ifs +} + + +# func_source file +# Source FILE, adding directory component if necessary. +# Note that it is not necessary on cygwin/mingw to append a dot to +# FILE even if both FILE and FILE.exe exist: automatic-append-.exe +# behavior happens only for exec(3), not for open(2)! Also, sourcing +# `FILE.' does not work on cygwin managed mounts. +func_source () +{ + $opt_debug + case $1 in + */* | *\\*) . "$1" ;; + *) . "./$1" ;; + esac +} + + +# func_infer_tag arg +# Infer tagged configuration to use if any are available and +# if one wasn't chosen via the "--tag" command line option. +# Only attempt this if the compiler in the base compile +# command doesn't match the default compiler. +# arg is usually of the form 'gcc ...' +func_infer_tag () +{ + $opt_debug + if test -n "$available_tags" && test -z "$tagname"; then + CC_quoted= + for arg in $CC; do + func_quote_for_eval "$arg" + CC_quoted="$CC_quoted $func_quote_for_eval_result" + done + case $@ in + # Blanks in the command may have been stripped by the calling shell, + # but not from the CC environment variable when configure was run. + " $CC "* | "$CC "* | " `$ECHO $CC` "* | "`$ECHO $CC` "* | " $CC_quoted"* | "$CC_quoted "* | " `$ECHO $CC_quoted` "* | "`$ECHO $CC_quoted` "*) ;; + # Blanks at the start of $base_compile will cause this to fail + # if we don't check for them as well. + *) + for z in $available_tags; do + if $GREP "^# ### BEGIN LIBTOOL TAG CONFIG: $z$" < "$progpath" > /dev/null; then + # Evaluate the configuration. + eval "`${SED} -n -e '/^# ### BEGIN LIBTOOL TAG CONFIG: '$z'$/,/^# ### END LIBTOOL TAG CONFIG: '$z'$/p' < $progpath`" + CC_quoted= + for arg in $CC; do + # Double-quote args containing other shell metacharacters. + func_quote_for_eval "$arg" + CC_quoted="$CC_quoted $func_quote_for_eval_result" + done + case "$@ " in + " $CC "* | "$CC "* | " `$ECHO $CC` "* | "`$ECHO $CC` "* | " $CC_quoted"* | "$CC_quoted "* | " `$ECHO $CC_quoted` "* | "`$ECHO $CC_quoted` "*) + # The compiler in the base compile command matches + # the one in the tagged configuration. + # Assume this is the tagged configuration we want. + tagname=$z + break + ;; + esac + fi + done + # If $tagname still isn't set, then no tagged configuration + # was found and let the user know that the "--tag" command + # line option must be used. + if test -z "$tagname"; then + func_echo "unable to infer tagged configuration" + func_fatal_error "specify a tag with \`--tag'" +# else +# func_verbose "using $tagname tagged configuration" + fi + ;; + esac + fi +} + + + +# func_write_libtool_object output_name pic_name nonpic_name +# Create a libtool object file (analogous to a ".la" file), +# but don't create it if we're doing a dry run. +func_write_libtool_object () +{ + write_libobj=${1} + if test "$build_libtool_libs" = yes; then + write_lobj=\'${2}\' + else + write_lobj=none + fi + + if test "$build_old_libs" = yes; then + write_oldobj=\'${3}\' + else + write_oldobj=none + fi + + $opt_dry_run || { + cat >${write_libobj}T <?"'"'"' &()|`$[]' \ + && func_warning "libobj name \`$libobj' may not contain shell special characters." + func_dirname_and_basename "$obj" "/" "" + objname="$func_basename_result" + xdir="$func_dirname_result" + lobj=${xdir}$objdir/$objname + + test -z "$base_compile" && \ + func_fatal_help "you must specify a compilation command" + + # Delete any leftover library objects. + if test "$build_old_libs" = yes; then + removelist="$obj $lobj $libobj ${libobj}T" + else + removelist="$lobj $libobj ${libobj}T" + fi + + # On Cygwin there's no "real" PIC flag so we must build both object types + case $host_os in + cygwin* | mingw* | pw32* | os2*) + pic_mode=default + ;; + esac + if test "$pic_mode" = no && test "$deplibs_check_method" != pass_all; then + # non-PIC code in shared libraries is not supported + pic_mode=default + fi + + # Calculate the filename of the output object if compiler does + # not support -o with -c + if test "$compiler_c_o" = no; then + output_obj=`$ECHO "X$srcfile" | $Xsed -e 's%^.*/%%' -e 's%\.[^.]*$%%'`.${objext} + lockfile="$output_obj.lock" + else + output_obj= + need_locks=no + lockfile= + fi + + # Lock this critical section if it is needed + # We use this script file to make the link, it avoids creating a new file + if test "$need_locks" = yes; then + until $opt_dry_run || ln "$progpath" "$lockfile" 2>/dev/null; do + func_echo "Waiting for $lockfile to be removed" + sleep 2 + done + elif test "$need_locks" = warn; then + if test -f "$lockfile"; then + $ECHO "\ +*** ERROR, $lockfile exists and contains: +`cat $lockfile 2>/dev/null` + +This indicates that another process is trying to use the same +temporary object file, and libtool could not work around it because +your compiler does not support \`-c' and \`-o' together. If you +repeat this compilation, it may succeed, by chance, but you had better +avoid parallel builds (make -j) in this platform, or get a better +compiler." + + $opt_dry_run || $RM $removelist + exit $EXIT_FAILURE + fi + removelist="$removelist $output_obj" + $ECHO "$srcfile" > "$lockfile" + fi + + $opt_dry_run || $RM $removelist + removelist="$removelist $lockfile" + trap '$opt_dry_run || $RM $removelist; exit $EXIT_FAILURE' 1 2 15 + + if test -n "$fix_srcfile_path"; then + eval srcfile=\"$fix_srcfile_path\" + fi + func_quote_for_eval "$srcfile" + qsrcfile=$func_quote_for_eval_result + + # Only build a PIC object if we are building libtool libraries. + if test "$build_libtool_libs" = yes; then + # Without this assignment, base_compile gets emptied. + fbsd_hideous_sh_bug=$base_compile + + if test "$pic_mode" != no; then + command="$base_compile $qsrcfile $pic_flag" + else + # Don't build PIC code + command="$base_compile $qsrcfile" + fi + + func_mkdir_p "$xdir$objdir" + + if test -z "$output_obj"; then + # Place PIC objects in $objdir + command="$command -o $lobj" + fi + + func_show_eval_locale "$command" \ + 'test -n "$output_obj" && $RM $removelist; exit $EXIT_FAILURE' + + if test "$need_locks" = warn && + test "X`cat $lockfile 2>/dev/null`" != "X$srcfile"; then + $ECHO "\ +*** ERROR, $lockfile contains: +`cat $lockfile 2>/dev/null` + +but it should contain: +$srcfile + +This indicates that another process is trying to use the same +temporary object file, and libtool could not work around it because +your compiler does not support \`-c' and \`-o' together. If you +repeat this compilation, it may succeed, by chance, but you had better +avoid parallel builds (make -j) in this platform, or get a better +compiler." + + $opt_dry_run || $RM $removelist + exit $EXIT_FAILURE + fi + + # Just move the object if needed, then go on to compile the next one + if test -n "$output_obj" && test "X$output_obj" != "X$lobj"; then + func_show_eval '$MV "$output_obj" "$lobj"' \ + 'error=$?; $opt_dry_run || $RM $removelist; exit $error' + fi + + # Allow error messages only from the first compilation. + if test "$suppress_opt" = yes; then + suppress_output=' >/dev/null 2>&1' + fi + fi + + # Only build a position-dependent object if we build old libraries. + if test "$build_old_libs" = yes; then + if test "$pic_mode" != yes; then + # Don't build PIC code + command="$base_compile $qsrcfile$pie_flag" + else + command="$base_compile $qsrcfile $pic_flag" + fi + if test "$compiler_c_o" = yes; then + command="$command -o $obj" + fi + + # Suppress compiler output if we already did a PIC compilation. + command="$command$suppress_output" + func_show_eval_locale "$command" \ + '$opt_dry_run || $RM $removelist; exit $EXIT_FAILURE' + + if test "$need_locks" = warn && + test "X`cat $lockfile 2>/dev/null`" != "X$srcfile"; then + $ECHO "\ +*** ERROR, $lockfile contains: +`cat $lockfile 2>/dev/null` + +but it should contain: +$srcfile + +This indicates that another process is trying to use the same +temporary object file, and libtool could not work around it because +your compiler does not support \`-c' and \`-o' together. If you +repeat this compilation, it may succeed, by chance, but you had better +avoid parallel builds (make -j) in this platform, or get a better +compiler." + + $opt_dry_run || $RM $removelist + exit $EXIT_FAILURE + fi + + # Just move the object if needed + if test -n "$output_obj" && test "X$output_obj" != "X$obj"; then + func_show_eval '$MV "$output_obj" "$obj"' \ + 'error=$?; $opt_dry_run || $RM $removelist; exit $error' + fi + fi + + $opt_dry_run || { + func_write_libtool_object "$libobj" "$objdir/$objname" "$objname" + + # Unlock the critical section if it was locked + if test "$need_locks" != no; then + removelist=$lockfile + $RM "$lockfile" + fi + } + + exit $EXIT_SUCCESS +} + +$opt_help || { +test "$mode" = compile && func_mode_compile ${1+"$@"} +} + +func_mode_help () +{ + # We need to display help for each of the modes. + case $mode in + "") + # Generic help is extracted from the usage comments + # at the start of this file. + func_help + ;; + + clean) + $ECHO \ +"Usage: $progname [OPTION]... --mode=clean RM [RM-OPTION]... FILE... + +Remove files from the build directory. + +RM is the name of the program to use to delete files associated with each FILE +(typically \`/bin/rm'). RM-OPTIONS are options (such as \`-f') to be passed +to RM. + +If FILE is a libtool library, object or program, all the files associated +with it are deleted. Otherwise, only FILE itself is deleted using RM." + ;; + + compile) + $ECHO \ +"Usage: $progname [OPTION]... --mode=compile COMPILE-COMMAND... SOURCEFILE + +Compile a source file into a libtool library object. + +This mode accepts the following additional options: + + -o OUTPUT-FILE set the output file name to OUTPUT-FILE + -no-suppress do not suppress compiler output for multiple passes + -prefer-pic try to building PIC objects only + -prefer-non-pic try to building non-PIC objects only + -shared do not build a \`.o' file suitable for static linking + -static only build a \`.o' file suitable for static linking + +COMPILE-COMMAND is a command to be used in creating a \`standard' object file +from the given SOURCEFILE. + +The output file name is determined by removing the directory component from +SOURCEFILE, then substituting the C source code suffix \`.c' with the +library object suffix, \`.lo'." + ;; + + execute) + $ECHO \ +"Usage: $progname [OPTION]... --mode=execute COMMAND [ARGS]... + +Automatically set library path, then run a program. + +This mode accepts the following additional options: + + -dlopen FILE add the directory containing FILE to the library path + +This mode sets the library path environment variable according to \`-dlopen' +flags. + +If any of the ARGS are libtool executable wrappers, then they are translated +into their corresponding uninstalled binary, and any of their required library +directories are added to the library path. + +Then, COMMAND is executed, with ARGS as arguments." + ;; + + finish) + $ECHO \ +"Usage: $progname [OPTION]... --mode=finish [LIBDIR]... + +Complete the installation of libtool libraries. + +Each LIBDIR is a directory that contains libtool libraries. + +The commands that this mode executes may require superuser privileges. Use +the \`--dry-run' option if you just want to see what would be executed." + ;; + + install) + $ECHO \ +"Usage: $progname [OPTION]... --mode=install INSTALL-COMMAND... + +Install executables or libraries. + +INSTALL-COMMAND is the installation command. The first component should be +either the \`install' or \`cp' program. + +The following components of INSTALL-COMMAND are treated specially: + + -inst-prefix PREFIX-DIR Use PREFIX-DIR as a staging area for installation + +The rest of the components are interpreted as arguments to that command (only +BSD-compatible install options are recognized)." + ;; + + link) + $ECHO \ +"Usage: $progname [OPTION]... --mode=link LINK-COMMAND... + +Link object files or libraries together to form another library, or to +create an executable program. + +LINK-COMMAND is a command using the C compiler that you would use to create +a program from several object files. + +The following components of LINK-COMMAND are treated specially: + + -all-static do not do any dynamic linking at all + -avoid-version do not add a version suffix if possible + -dlopen FILE \`-dlpreopen' FILE if it cannot be dlopened at runtime + -dlpreopen FILE link in FILE and add its symbols to lt_preloaded_symbols + -export-dynamic allow symbols from OUTPUT-FILE to be resolved with dlsym(3) + -export-symbols SYMFILE + try to export only the symbols listed in SYMFILE + -export-symbols-regex REGEX + try to export only the symbols matching REGEX + -LLIBDIR search LIBDIR for required installed libraries + -lNAME OUTPUT-FILE requires the installed library libNAME + -module build a library that can dlopened + -no-fast-install disable the fast-install mode + -no-install link a not-installable executable + -no-undefined declare that a library does not refer to external symbols + -o OUTPUT-FILE create OUTPUT-FILE from the specified objects + -objectlist FILE Use a list of object files found in FILE to specify objects + -precious-files-regex REGEX + don't remove output files matching REGEX + -release RELEASE specify package release information + -rpath LIBDIR the created library will eventually be installed in LIBDIR + -R[ ]LIBDIR add LIBDIR to the runtime path of programs and libraries + -shared only do dynamic linking of libtool libraries + -shrext SUFFIX override the standard shared library file extension + -static do not do any dynamic linking of uninstalled libtool libraries + -static-libtool-libs + do not do any dynamic linking of libtool libraries + -version-info CURRENT[:REVISION[:AGE]] + specify library version info [each variable defaults to 0] + -weak LIBNAME declare that the target provides the LIBNAME interface + +All other options (arguments beginning with \`-') are ignored. + +Every other argument is treated as a filename. Files ending in \`.la' are +treated as uninstalled libtool libraries, other files are standard or library +object files. + +If the OUTPUT-FILE ends in \`.la', then a libtool library is created, +only library objects (\`.lo' files) may be specified, and \`-rpath' is +required, except when creating a convenience library. + +If OUTPUT-FILE ends in \`.a' or \`.lib', then a standard library is created +using \`ar' and \`ranlib', or on Windows using \`lib'. + +If OUTPUT-FILE ends in \`.lo' or \`.${objext}', then a reloadable object file +is created, otherwise an executable program is created." + ;; + + uninstall) + $ECHO \ +"Usage: $progname [OPTION]... --mode=uninstall RM [RM-OPTION]... FILE... + +Remove libraries from an installation directory. + +RM is the name of the program to use to delete files associated with each FILE +(typically \`/bin/rm'). RM-OPTIONS are options (such as \`-f') to be passed +to RM. + +If FILE is a libtool library, all the files associated with it are deleted. +Otherwise, only FILE itself is deleted using RM." + ;; + + *) + func_fatal_help "invalid operation mode \`$mode'" + ;; + esac + + $ECHO + $ECHO "Try \`$progname --help' for more information about other modes." + + exit $? +} + + # Now that we've collected a possible --mode arg, show help if necessary + $opt_help && func_mode_help + + +# func_mode_execute arg... +func_mode_execute () +{ + $opt_debug + # The first argument is the command name. + cmd="$nonopt" + test -z "$cmd" && \ + func_fatal_help "you must specify a COMMAND" + + # Handle -dlopen flags immediately. + for file in $execute_dlfiles; do + test -f "$file" \ + || func_fatal_help "\`$file' is not a file" + + dir= + case $file in + *.la) + # Check to see that this really is a libtool archive. + func_lalib_unsafe_p "$file" \ + || func_fatal_help "\`$lib' is not a valid libtool archive" + + # Read the libtool library. + dlname= + library_names= + func_source "$file" + + # Skip this library if it cannot be dlopened. + if test -z "$dlname"; then + # Warn if it was a shared library. + test -n "$library_names" && \ + func_warning "\`$file' was not linked with \`-export-dynamic'" + continue + fi + + func_dirname "$file" "" "." + dir="$func_dirname_result" + + if test -f "$dir/$objdir/$dlname"; then + dir="$dir/$objdir" + else + if test ! -f "$dir/$dlname"; then + func_fatal_error "cannot find \`$dlname' in \`$dir' or \`$dir/$objdir'" + fi + fi + ;; + + *.lo) + # Just add the directory containing the .lo file. + func_dirname "$file" "" "." + dir="$func_dirname_result" + ;; + + *) + func_warning "\`-dlopen' is ignored for non-libtool libraries and objects" + continue + ;; + esac + + # Get the absolute pathname. + absdir=`cd "$dir" && pwd` + test -n "$absdir" && dir="$absdir" + + # Now add the directory to shlibpath_var. + if eval "test -z \"\$$shlibpath_var\""; then + eval "$shlibpath_var=\"\$dir\"" + else + eval "$shlibpath_var=\"\$dir:\$$shlibpath_var\"" + fi + done + + # This variable tells wrapper scripts just to set shlibpath_var + # rather than running their programs. + libtool_execute_magic="$magic" + + # Check if any of the arguments is a wrapper script. + args= + for file + do + case $file in + -*) ;; + *) + # Do a test to see if this is really a libtool program. + if func_ltwrapper_script_p "$file"; then + func_source "$file" + # Transform arg to wrapped name. + file="$progdir/$program" + elif func_ltwrapper_executable_p "$file"; then + func_ltwrapper_scriptname "$file" + func_source "$func_ltwrapper_scriptname_result" + # Transform arg to wrapped name. + file="$progdir/$program" + fi + ;; + esac + # Quote arguments (to preserve shell metacharacters). + func_quote_for_eval "$file" + args="$args $func_quote_for_eval_result" + done + + if test "X$opt_dry_run" = Xfalse; then + if test -n "$shlibpath_var"; then + # Export the shlibpath_var. + eval "export $shlibpath_var" + fi + + # Restore saved environment variables + for lt_var in LANG LANGUAGE LC_ALL LC_CTYPE LC_COLLATE LC_MESSAGES + do + eval "if test \"\${save_$lt_var+set}\" = set; then + $lt_var=\$save_$lt_var; export $lt_var + else + $lt_unset $lt_var + fi" + done + + # Now prepare to actually exec the command. + exec_cmd="\$cmd$args" + else + # Display what would be done. + if test -n "$shlibpath_var"; then + eval "\$ECHO \"\$shlibpath_var=\$$shlibpath_var\"" + $ECHO "export $shlibpath_var" + fi + $ECHO "$cmd$args" + exit $EXIT_SUCCESS + fi +} + +test "$mode" = execute && func_mode_execute ${1+"$@"} + + +# func_mode_finish arg... +func_mode_finish () +{ + $opt_debug + libdirs="$nonopt" + admincmds= + + if test -n "$finish_cmds$finish_eval" && test -n "$libdirs"; then + for dir + do + libdirs="$libdirs $dir" + done + + for libdir in $libdirs; do + if test -n "$finish_cmds"; then + # Do each command in the finish commands. + func_execute_cmds "$finish_cmds" 'admincmds="$admincmds +'"$cmd"'"' + fi + if test -n "$finish_eval"; then + # Do the single finish_eval. + eval cmds=\"$finish_eval\" + $opt_dry_run || eval "$cmds" || admincmds="$admincmds + $cmds" + fi + done + fi + + # Exit here if they wanted silent mode. + $opt_silent && exit $EXIT_SUCCESS + + $ECHO "X----------------------------------------------------------------------" | $Xsed + $ECHO "Libraries have been installed in:" + for libdir in $libdirs; do + $ECHO " $libdir" + done + $ECHO + $ECHO "If you ever happen to want to link against installed libraries" + $ECHO "in a given directory, LIBDIR, you must either use libtool, and" + $ECHO "specify the full pathname of the library, or use the \`-LLIBDIR'" + $ECHO "flag during linking and do at least one of the following:" + if test -n "$shlibpath_var"; then + $ECHO " - add LIBDIR to the \`$shlibpath_var' environment variable" + $ECHO " during execution" + fi + if test -n "$runpath_var"; then + $ECHO " - add LIBDIR to the \`$runpath_var' environment variable" + $ECHO " during linking" + fi + if test -n "$hardcode_libdir_flag_spec"; then + libdir=LIBDIR + eval flag=\"$hardcode_libdir_flag_spec\" + + $ECHO " - use the \`$flag' linker flag" + fi + if test -n "$admincmds"; then + $ECHO " - have your system administrator run these commands:$admincmds" + fi + if test -f /etc/ld.so.conf; then + $ECHO " - have your system administrator add LIBDIR to \`/etc/ld.so.conf'" + fi + $ECHO + + $ECHO "See any operating system documentation about shared libraries for" + case $host in + solaris2.[6789]|solaris2.1[0-9]) + $ECHO "more information, such as the ld(1), crle(1) and ld.so(8) manual" + $ECHO "pages." + ;; + *) + $ECHO "more information, such as the ld(1) and ld.so(8) manual pages." + ;; + esac + $ECHO "X----------------------------------------------------------------------" | $Xsed + exit $EXIT_SUCCESS +} + +test "$mode" = finish && func_mode_finish ${1+"$@"} + + +# func_mode_install arg... +func_mode_install () +{ + $opt_debug + # There may be an optional sh(1) argument at the beginning of + # install_prog (especially on Windows NT). + if test "$nonopt" = "$SHELL" || test "$nonopt" = /bin/sh || + # Allow the use of GNU shtool's install command. + $ECHO "X$nonopt" | $GREP shtool >/dev/null; then + # Aesthetically quote it. + func_quote_for_eval "$nonopt" + install_prog="$func_quote_for_eval_result " + arg=$1 + shift + else + install_prog= + arg=$nonopt + fi + + # The real first argument should be the name of the installation program. + # Aesthetically quote it. + func_quote_for_eval "$arg" + install_prog="$install_prog$func_quote_for_eval_result" + + # We need to accept at least all the BSD install flags. + dest= + files= + opts= + prev= + install_type= + isdir=no + stripme= + for arg + do + if test -n "$dest"; then + files="$files $dest" + dest=$arg + continue + fi + + case $arg in + -d) isdir=yes ;; + -f) + case " $install_prog " in + *[\\\ /]cp\ *) ;; + *) prev=$arg ;; + esac + ;; + -g | -m | -o) + prev=$arg + ;; + -s) + stripme=" -s" + continue + ;; + -*) + ;; + *) + # If the previous option needed an argument, then skip it. + if test -n "$prev"; then + prev= + else + dest=$arg + continue + fi + ;; + esac + + # Aesthetically quote the argument. + func_quote_for_eval "$arg" + install_prog="$install_prog $func_quote_for_eval_result" + done + + test -z "$install_prog" && \ + func_fatal_help "you must specify an install program" + + test -n "$prev" && \ + func_fatal_help "the \`$prev' option requires an argument" + + if test -z "$files"; then + if test -z "$dest"; then + func_fatal_help "no file or destination specified" + else + func_fatal_help "you must specify a destination" + fi + fi + + # Strip any trailing slash from the destination. + func_stripname '' '/' "$dest" + dest=$func_stripname_result + + # Check to see that the destination is a directory. + test -d "$dest" && isdir=yes + if test "$isdir" = yes; then + destdir="$dest" + destname= + else + func_dirname_and_basename "$dest" "" "." + destdir="$func_dirname_result" + destname="$func_basename_result" + + # Not a directory, so check to see that there is only one file specified. + set dummy $files; shift + test "$#" -gt 1 && \ + func_fatal_help "\`$dest' is not a directory" + fi + case $destdir in + [\\/]* | [A-Za-z]:[\\/]*) ;; + *) + for file in $files; do + case $file in + *.lo) ;; + *) + func_fatal_help "\`$destdir' must be an absolute directory name" + ;; + esac + done + ;; + esac + + # This variable tells wrapper scripts just to set variables rather + # than running their programs. + libtool_install_magic="$magic" + + staticlibs= + future_libdirs= + current_libdirs= + for file in $files; do + + # Do each installation. + case $file in + *.$libext) + # Do the static libraries later. + staticlibs="$staticlibs $file" + ;; + + *.la) + # Check to see that this really is a libtool archive. + func_lalib_unsafe_p "$file" \ + || func_fatal_help "\`$file' is not a valid libtool archive" + + library_names= + old_library= + relink_command= + func_source "$file" + + # Add the libdir to current_libdirs if it is the destination. + if test "X$destdir" = "X$libdir"; then + case "$current_libdirs " in + *" $libdir "*) ;; + *) current_libdirs="$current_libdirs $libdir" ;; + esac + else + # Note the libdir as a future libdir. + case "$future_libdirs " in + *" $libdir "*) ;; + *) future_libdirs="$future_libdirs $libdir" ;; + esac + fi + + func_dirname "$file" "/" "" + dir="$func_dirname_result" + dir="$dir$objdir" + + if test -n "$relink_command"; then + # Determine the prefix the user has applied to our future dir. + inst_prefix_dir=`$ECHO "X$destdir" | $Xsed -e "s%$libdir\$%%"` + + # Don't allow the user to place us outside of our expected + # location b/c this prevents finding dependent libraries that + # are installed to the same prefix. + # At present, this check doesn't affect windows .dll's that + # are installed into $libdir/../bin (currently, that works fine) + # but it's something to keep an eye on. + test "$inst_prefix_dir" = "$destdir" && \ + func_fatal_error "error: cannot install \`$file' to a directory not ending in $libdir" + + if test -n "$inst_prefix_dir"; then + # Stick the inst_prefix_dir data into the link command. + relink_command=`$ECHO "X$relink_command" | $Xsed -e "s%@inst_prefix_dir@%-inst-prefix-dir $inst_prefix_dir%"` + else + relink_command=`$ECHO "X$relink_command" | $Xsed -e "s%@inst_prefix_dir@%%"` + fi + + func_warning "relinking \`$file'" + func_show_eval "$relink_command" \ + 'func_fatal_error "error: relink \`$file'\'' with the above command before installing it"' + fi + + # See the names of the shared library. + set dummy $library_names; shift + if test -n "$1"; then + realname="$1" + shift + + srcname="$realname" + test -n "$relink_command" && srcname="$realname"T + + # Install the shared library and build the symlinks. + func_show_eval "$install_prog $dir/$srcname $destdir/$realname" \ + 'exit $?' + tstripme="$stripme" + case $host_os in + cygwin* | mingw* | pw32*) + case $realname in + *.dll.a) + tstripme="" + ;; + esac + ;; + esac + if test -n "$tstripme" && test -n "$striplib"; then + func_show_eval "$striplib $destdir/$realname" 'exit $?' + fi + + if test "$#" -gt 0; then + # Delete the old symlinks, and create new ones. + # Try `ln -sf' first, because the `ln' binary might depend on + # the symlink we replace! Solaris /bin/ln does not understand -f, + # so we also need to try rm && ln -s. + for linkname + do + test "$linkname" != "$realname" \ + && func_show_eval "(cd $destdir && { $LN_S -f $realname $linkname || { $RM $linkname && $LN_S $realname $linkname; }; })" + done + fi + + # Do each command in the postinstall commands. + lib="$destdir/$realname" + func_execute_cmds "$postinstall_cmds" 'exit $?' + fi + + # Install the pseudo-library for information purposes. + func_basename "$file" + name="$func_basename_result" + instname="$dir/$name"i + func_show_eval "$install_prog $instname $destdir/$name" 'exit $?' + + # Maybe install the static library, too. + test -n "$old_library" && staticlibs="$staticlibs $dir/$old_library" + ;; + + *.lo) + # Install (i.e. copy) a libtool object. + + # Figure out destination file name, if it wasn't already specified. + if test -n "$destname"; then + destfile="$destdir/$destname" + else + func_basename "$file" + destfile="$func_basename_result" + destfile="$destdir/$destfile" + fi + + # Deduce the name of the destination old-style object file. + case $destfile in + *.lo) + func_lo2o "$destfile" + staticdest=$func_lo2o_result + ;; + *.$objext) + staticdest="$destfile" + destfile= + ;; + *) + func_fatal_help "cannot copy a libtool object to \`$destfile'" + ;; + esac + + # Install the libtool object if requested. + test -n "$destfile" && \ + func_show_eval "$install_prog $file $destfile" 'exit $?' + + # Install the old object if enabled. + if test "$build_old_libs" = yes; then + # Deduce the name of the old-style object file. + func_lo2o "$file" + staticobj=$func_lo2o_result + func_show_eval "$install_prog \$staticobj \$staticdest" 'exit $?' + fi + exit $EXIT_SUCCESS + ;; + + *) + # Figure out destination file name, if it wasn't already specified. + if test -n "$destname"; then + destfile="$destdir/$destname" + else + func_basename "$file" + destfile="$func_basename_result" + destfile="$destdir/$destfile" + fi + + # If the file is missing, and there is a .exe on the end, strip it + # because it is most likely a libtool script we actually want to + # install + stripped_ext="" + case $file in + *.exe) + if test ! -f "$file"; then + func_stripname '' '.exe' "$file" + file=$func_stripname_result + stripped_ext=".exe" + fi + ;; + esac + + # Do a test to see if this is really a libtool program. + case $host in + *cygwin*|*mingw*) + if func_ltwrapper_executable_p "$file"; then + func_ltwrapper_scriptname "$file" + wrapper=$func_ltwrapper_scriptname_result + else + func_stripname '' '.exe' "$file" + wrapper=$func_stripname_result + fi + ;; + *) + wrapper=$file + ;; + esac + if func_ltwrapper_script_p "$wrapper"; then + notinst_deplibs= + relink_command= + + func_source "$wrapper" + + # Check the variables that should have been set. + test -z "$generated_by_libtool_version" && \ + func_fatal_error "invalid libtool wrapper script \`$wrapper'" + + finalize=yes + for lib in $notinst_deplibs; do + # Check to see that each library is installed. + libdir= + if test -f "$lib"; then + func_source "$lib" + fi + libfile="$libdir/"`$ECHO "X$lib" | $Xsed -e 's%^.*/%%g'` ### testsuite: skip nested quoting test + if test -n "$libdir" && test ! -f "$libfile"; then + func_warning "\`$lib' has not been installed in \`$libdir'" + finalize=no + fi + done + + relink_command= + func_source "$wrapper" + + outputname= + if test "$fast_install" = no && test -n "$relink_command"; then + $opt_dry_run || { + if test "$finalize" = yes; then + tmpdir=`func_mktempdir` + func_basename "$file$stripped_ext" + file="$func_basename_result" + outputname="$tmpdir/$file" + # Replace the output file specification. + relink_command=`$ECHO "X$relink_command" | $Xsed -e 's%@OUTPUT@%'"$outputname"'%g'` + + $opt_silent || { + func_quote_for_expand "$relink_command" + eval "func_echo $func_quote_for_expand_result" + } + if eval "$relink_command"; then : + else + func_error "error: relink \`$file' with the above command before installing it" + $opt_dry_run || ${RM}r "$tmpdir" + continue + fi + file="$outputname" + else + func_warning "cannot relink \`$file'" + fi + } + else + # Install the binary that we compiled earlier. + file=`$ECHO "X$file$stripped_ext" | $Xsed -e "s%\([^/]*\)$%$objdir/\1%"` + fi + fi + + # remove .exe since cygwin /usr/bin/install will append another + # one anyway + case $install_prog,$host in + */usr/bin/install*,*cygwin*) + case $file:$destfile in + *.exe:*.exe) + # this is ok + ;; + *.exe:*) + destfile=$destfile.exe + ;; + *:*.exe) + func_stripname '' '.exe' "$destfile" + destfile=$func_stripname_result + ;; + esac + ;; + esac + func_show_eval "$install_prog\$stripme \$file \$destfile" 'exit $?' + $opt_dry_run || if test -n "$outputname"; then + ${RM}r "$tmpdir" + fi + ;; + esac + done + + for file in $staticlibs; do + func_basename "$file" + name="$func_basename_result" + + # Set up the ranlib parameters. + oldlib="$destdir/$name" + + func_show_eval "$install_prog \$file \$oldlib" 'exit $?' + + if test -n "$stripme" && test -n "$old_striplib"; then + func_show_eval "$old_striplib $oldlib" 'exit $?' + fi + + # Do each command in the postinstall commands. + func_execute_cmds "$old_postinstall_cmds" 'exit $?' + done + + test -n "$future_libdirs" && \ + func_warning "remember to run \`$progname --finish$future_libdirs'" + + if test -n "$current_libdirs"; then + # Maybe just do a dry run. + $opt_dry_run && current_libdirs=" -n$current_libdirs" + exec_cmd='$SHELL $progpath $preserve_args --finish$current_libdirs' + else + exit $EXIT_SUCCESS + fi +} + +test "$mode" = install && func_mode_install ${1+"$@"} + + +# func_generate_dlsyms outputname originator pic_p +# Extract symbols from dlprefiles and create ${outputname}S.o with +# a dlpreopen symbol table. +func_generate_dlsyms () +{ + $opt_debug + my_outputname="$1" + my_originator="$2" + my_pic_p="${3-no}" + my_prefix=`$ECHO "$my_originator" | sed 's%[^a-zA-Z0-9]%_%g'` + my_dlsyms= + + if test -n "$dlfiles$dlprefiles" || test "$dlself" != no; then + if test -n "$NM" && test -n "$global_symbol_pipe"; then + my_dlsyms="${my_outputname}S.c" + else + func_error "not configured to extract global symbols from dlpreopened files" + fi + fi + + if test -n "$my_dlsyms"; then + case $my_dlsyms in + "") ;; + *.c) + # Discover the nlist of each of the dlfiles. + nlist="$output_objdir/${my_outputname}.nm" + + func_show_eval "$RM $nlist ${nlist}S ${nlist}T" + + # Parse the name list into a source file. + func_verbose "creating $output_objdir/$my_dlsyms" + + $opt_dry_run || $ECHO > "$output_objdir/$my_dlsyms" "\ +/* $my_dlsyms - symbol resolution table for \`$my_outputname' dlsym emulation. */ +/* Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION */ + +#ifdef __cplusplus +extern \"C\" { +#endif + +/* External symbol declarations for the compiler. */\ +" + + if test "$dlself" = yes; then + func_verbose "generating symbol list for \`$output'" + + $opt_dry_run || echo ': @PROGRAM@ ' > "$nlist" + + # Add our own program objects to the symbol list. + progfiles=`$ECHO "X$objs$old_deplibs" | $SP2NL | $Xsed -e "$lo2o" | $NL2SP` + for progfile in $progfiles; do + func_verbose "extracting global C symbols from \`$progfile'" + $opt_dry_run || eval "$NM $progfile | $global_symbol_pipe >> '$nlist'" + done + + if test -n "$exclude_expsyms"; then + $opt_dry_run || { + eval '$EGREP -v " ($exclude_expsyms)$" "$nlist" > "$nlist"T' + eval '$MV "$nlist"T "$nlist"' + } + fi + + if test -n "$export_symbols_regex"; then + $opt_dry_run || { + eval '$EGREP -e "$export_symbols_regex" "$nlist" > "$nlist"T' + eval '$MV "$nlist"T "$nlist"' + } + fi + + # Prepare the list of exported symbols + if test -z "$export_symbols"; then + export_symbols="$output_objdir/$outputname.exp" + $opt_dry_run || { + $RM $export_symbols + eval "${SED} -n -e '/^: @PROGRAM@ $/d' -e 's/^.* \(.*\)$/\1/p' "'< "$nlist" > "$export_symbols"' + case $host in + *cygwin* | *mingw* ) + eval "echo EXPORTS "'> "$output_objdir/$outputname.def"' + eval 'cat "$export_symbols" >> "$output_objdir/$outputname.def"' + ;; + esac + } + else + $opt_dry_run || { + eval "${SED} -e 's/\([].[*^$]\)/\\\\\1/g' -e 's/^/ /' -e 's/$/$/'"' < "$export_symbols" > "$output_objdir/$outputname.exp"' + eval '$GREP -f "$output_objdir/$outputname.exp" < "$nlist" > "$nlist"T' + eval '$MV "$nlist"T "$nlist"' + case $host in + *cygwin | *mingw* ) + eval "echo EXPORTS "'> "$output_objdir/$outputname.def"' + eval 'cat "$nlist" >> "$output_objdir/$outputname.def"' + ;; + esac + } + fi + fi + + for dlprefile in $dlprefiles; do + func_verbose "extracting global C symbols from \`$dlprefile'" + func_basename "$dlprefile" + name="$func_basename_result" + $opt_dry_run || { + eval '$ECHO ": $name " >> "$nlist"' + eval "$NM $dlprefile 2>/dev/null | $global_symbol_pipe >> '$nlist'" + } + done + + $opt_dry_run || { + # Make sure we have at least an empty file. + test -f "$nlist" || : > "$nlist" + + if test -n "$exclude_expsyms"; then + $EGREP -v " ($exclude_expsyms)$" "$nlist" > "$nlist"T + $MV "$nlist"T "$nlist" + fi + + # Try sorting and uniquifying the output. + if $GREP -v "^: " < "$nlist" | + if sort -k 3 /dev/null 2>&1; then + sort -k 3 + else + sort +2 + fi | + uniq > "$nlist"S; then + : + else + $GREP -v "^: " < "$nlist" > "$nlist"S + fi + + if test -f "$nlist"S; then + eval "$global_symbol_to_cdecl"' < "$nlist"S >> "$output_objdir/$my_dlsyms"' + else + $ECHO '/* NONE */' >> "$output_objdir/$my_dlsyms" + fi + + $ECHO >> "$output_objdir/$my_dlsyms" "\ + +/* The mapping between symbol names and symbols. */ +typedef struct { + const char *name; + void *address; +} lt_dlsymlist; +" + case $host in + *cygwin* | *mingw* ) + $ECHO >> "$output_objdir/$my_dlsyms" "\ +/* DATA imports from DLLs on WIN32 con't be const, because + runtime relocations are performed -- see ld's documentation + on pseudo-relocs. */" + lt_dlsym_const= ;; + *osf5*) + echo >> "$output_objdir/$my_dlsyms" "\ +/* This system does not cope well with relocations in const data */" + lt_dlsym_const= ;; + *) + lt_dlsym_const=const ;; + esac + + $ECHO >> "$output_objdir/$my_dlsyms" "\ +extern $lt_dlsym_const lt_dlsymlist +lt_${my_prefix}_LTX_preloaded_symbols[]; +$lt_dlsym_const lt_dlsymlist +lt_${my_prefix}_LTX_preloaded_symbols[] = +{\ + { \"$my_originator\", (void *) 0 }," + + case $need_lib_prefix in + no) + eval "$global_symbol_to_c_name_address" < "$nlist" >> "$output_objdir/$my_dlsyms" + ;; + *) + eval "$global_symbol_to_c_name_address_lib_prefix" < "$nlist" >> "$output_objdir/$my_dlsyms" + ;; + esac + $ECHO >> "$output_objdir/$my_dlsyms" "\ + {0, (void *) 0} +}; + +/* This works around a problem in FreeBSD linker */ +#ifdef FREEBSD_WORKAROUND +static const void *lt_preloaded_setup() { + return lt_${my_prefix}_LTX_preloaded_symbols; +} +#endif + +#ifdef __cplusplus +} +#endif\ +" + } # !$opt_dry_run + + pic_flag_for_symtable= + case "$compile_command " in + *" -static "*) ;; + *) + case $host in + # compiling the symbol table file with pic_flag works around + # a FreeBSD bug that causes programs to crash when -lm is + # linked before any other PIC object. But we must not use + # pic_flag when linking with -static. The problem exists in + # FreeBSD 2.2.6 and is fixed in FreeBSD 3.1. + *-*-freebsd2*|*-*-freebsd3.0*|*-*-freebsdelf3.0*) + pic_flag_for_symtable=" $pic_flag -DFREEBSD_WORKAROUND" ;; + *-*-hpux*) + pic_flag_for_symtable=" $pic_flag" ;; + *) + if test "X$my_pic_p" != Xno; then + pic_flag_for_symtable=" $pic_flag" + fi + ;; + esac + ;; + esac + symtab_cflags= + for arg in $LTCFLAGS; do + case $arg in + -pie | -fpie | -fPIE) ;; + *) symtab_cflags="$symtab_cflags $arg" ;; + esac + done + + # Now compile the dynamic symbol file. + func_show_eval '(cd $output_objdir && $LTCC$symtab_cflags -c$no_builtin_flag$pic_flag_for_symtable "$my_dlsyms")' 'exit $?' + + # Clean up the generated files. + func_show_eval '$RM "$output_objdir/$my_dlsyms" "$nlist" "${nlist}S" "${nlist}T"' + + # Transform the symbol file into the correct name. + symfileobj="$output_objdir/${my_outputname}S.$objext" + case $host in + *cygwin* | *mingw* ) + if test -f "$output_objdir/$my_outputname.def"; then + compile_command=`$ECHO "X$compile_command" | $Xsed -e "s%@SYMFILE@%$output_objdir/$my_outputname.def $symfileobj%"` + finalize_command=`$ECHO "X$finalize_command" | $Xsed -e "s%@SYMFILE@%$output_objdir/$my_outputname.def $symfileobj%"` + else + compile_command=`$ECHO "X$compile_command" | $Xsed -e "s%@SYMFILE@%$symfileobj%"` + finalize_command=`$ECHO "X$finalize_command" | $Xsed -e "s%@SYMFILE@%$symfileobj%"` + fi + ;; + *) + compile_command=`$ECHO "X$compile_command" | $Xsed -e "s%@SYMFILE@%$symfileobj%"` + finalize_command=`$ECHO "X$finalize_command" | $Xsed -e "s%@SYMFILE@%$symfileobj%"` + ;; + esac + ;; + *) + func_fatal_error "unknown suffix for \`$my_dlsyms'" + ;; + esac + else + # We keep going just in case the user didn't refer to + # lt_preloaded_symbols. The linker will fail if global_symbol_pipe + # really was required. + + # Nullify the symbol file. + compile_command=`$ECHO "X$compile_command" | $Xsed -e "s% @SYMFILE@%%"` + finalize_command=`$ECHO "X$finalize_command" | $Xsed -e "s% @SYMFILE@%%"` + fi +} + +# func_win32_libid arg +# return the library type of file 'arg' +# +# Need a lot of goo to handle *both* DLLs and import libs +# Has to be a shell function in order to 'eat' the argument +# that is supplied when $file_magic_command is called. +func_win32_libid () +{ + $opt_debug + win32_libid_type="unknown" + win32_fileres=`file -L $1 2>/dev/null` + case $win32_fileres in + *ar\ archive\ import\ library*) # definitely import + win32_libid_type="x86 archive import" + ;; + *ar\ archive*) # could be an import, or static + if eval $OBJDUMP -f $1 | $SED -e '10q' 2>/dev/null | + $EGREP 'file format pe-i386(.*architecture: i386)?' >/dev/null ; then + win32_nmres=`eval $NM -f posix -A $1 | + $SED -n -e ' + 1,100{ + / I /{ + s,.*,import, + p + q + } + }'` + case $win32_nmres in + import*) win32_libid_type="x86 archive import";; + *) win32_libid_type="x86 archive static";; + esac + fi + ;; + *DLL*) + win32_libid_type="x86 DLL" + ;; + *executable*) # but shell scripts are "executable" too... + case $win32_fileres in + *MS\ Windows\ PE\ Intel*) + win32_libid_type="x86 DLL" + ;; + esac + ;; + esac + $ECHO "$win32_libid_type" +} + + + +# func_extract_an_archive dir oldlib +func_extract_an_archive () +{ + $opt_debug + f_ex_an_ar_dir="$1"; shift + f_ex_an_ar_oldlib="$1" + func_show_eval "(cd \$f_ex_an_ar_dir && $AR x \"\$f_ex_an_ar_oldlib\")" 'exit $?' + if ($AR t "$f_ex_an_ar_oldlib" | sort | sort -uc >/dev/null 2>&1); then + : + else + func_fatal_error "object name conflicts in archive: $f_ex_an_ar_dir/$f_ex_an_ar_oldlib" + fi +} + + +# func_extract_archives gentop oldlib ... +func_extract_archives () +{ + $opt_debug + my_gentop="$1"; shift + my_oldlibs=${1+"$@"} + my_oldobjs="" + my_xlib="" + my_xabs="" + my_xdir="" + + for my_xlib in $my_oldlibs; do + # Extract the objects. + case $my_xlib in + [\\/]* | [A-Za-z]:[\\/]*) my_xabs="$my_xlib" ;; + *) my_xabs=`pwd`"/$my_xlib" ;; + esac + func_basename "$my_xlib" + my_xlib="$func_basename_result" + my_xlib_u=$my_xlib + while :; do + case " $extracted_archives " in + *" $my_xlib_u "*) + func_arith $extracted_serial + 1 + extracted_serial=$func_arith_result + my_xlib_u=lt$extracted_serial-$my_xlib ;; + *) break ;; + esac + done + extracted_archives="$extracted_archives $my_xlib_u" + my_xdir="$my_gentop/$my_xlib_u" + + func_mkdir_p "$my_xdir" + + case $host in + *-darwin*) + func_verbose "Extracting $my_xabs" + # Do not bother doing anything if just a dry run + $opt_dry_run || { + darwin_orig_dir=`pwd` + cd $my_xdir || exit $? + darwin_archive=$my_xabs + darwin_curdir=`pwd` + darwin_base_archive=`basename "$darwin_archive"` + darwin_arches=`$LIPO -info "$darwin_archive" 2>/dev/null | $GREP Architectures 2>/dev/null || true` + if test -n "$darwin_arches"; then + darwin_arches=`$ECHO "$darwin_arches" | $SED -e 's/.*are://'` + darwin_arch= + func_verbose "$darwin_base_archive has multiple architectures $darwin_arches" + for darwin_arch in $darwin_arches ; do + func_mkdir_p "unfat-$$/${darwin_base_archive}-${darwin_arch}" + $LIPO -thin $darwin_arch -output "unfat-$$/${darwin_base_archive}-${darwin_arch}/${darwin_base_archive}" "${darwin_archive}" + cd "unfat-$$/${darwin_base_archive}-${darwin_arch}" + func_extract_an_archive "`pwd`" "${darwin_base_archive}" + cd "$darwin_curdir" + $RM "unfat-$$/${darwin_base_archive}-${darwin_arch}/${darwin_base_archive}" + done # $darwin_arches + ## Okay now we've a bunch of thin objects, gotta fatten them up :) + darwin_filelist=`find unfat-$$ -type f -name \*.o -print -o -name \*.lo -print | $SED -e "$basename" | sort -u` + darwin_file= + darwin_files= + for darwin_file in $darwin_filelist; do + darwin_files=`find unfat-$$ -name $darwin_file -print | $NL2SP` + $LIPO -create -output "$darwin_file" $darwin_files + done # $darwin_filelist + $RM -rf unfat-$$ + cd "$darwin_orig_dir" + else + cd $darwin_orig_dir + func_extract_an_archive "$my_xdir" "$my_xabs" + fi # $darwin_arches + } # !$opt_dry_run + ;; + *) + func_extract_an_archive "$my_xdir" "$my_xabs" + ;; + esac + my_oldobjs="$my_oldobjs "`find $my_xdir -name \*.$objext -print -o -name \*.lo -print | $NL2SP` + done + + func_extract_archives_result="$my_oldobjs" +} + + + +# func_emit_wrapper arg +# +# emit a libtool wrapper script on stdout +# don't directly open a file because we may want to +# incorporate the script contents within a cygwin/mingw +# wrapper executable. Must ONLY be called from within +# func_mode_link because it depends on a number of variable +# set therein. +# +# arg is the value that the WRAPPER_SCRIPT_BELONGS_IN_OBJDIR +# variable will take. If 'yes', then the emitted script +# will assume that the directory in which it is stored is +# the '.lib' directory. This is a cygwin/mingw-specific +# behavior. +func_emit_wrapper () +{ + func_emit_wrapper_arg1=no + if test -n "$1" ; then + func_emit_wrapper_arg1=$1 + fi + + $ECHO "\ +#! $SHELL + +# $output - temporary wrapper script for $objdir/$outputname +# Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION +# +# The $output program cannot be directly executed until all the libtool +# libraries that it depends on are installed. +# +# This wrapper script should never be moved out of the build directory. +# If it is, it will not operate correctly. + +# Sed substitution that helps us do robust quoting. It backslashifies +# metacharacters that are still active within double-quoted strings. +Xsed='${SED} -e 1s/^X//' +sed_quote_subst='$sed_quote_subst' + +# Be Bourne compatible +if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then + emulate sh + NULLCMD=: + # Zsh 3.x and 4.x performs word splitting on \${1+\"\$@\"}, which + # is contrary to our usage. Disable this feature. + alias -g '\${1+\"\$@\"}'='\"\$@\"' + setopt NO_GLOB_SUBST +else + case \`(set -o) 2>/dev/null\` in *posix*) set -o posix;; esac +fi +BIN_SH=xpg4; export BIN_SH # for Tru64 +DUALCASE=1; export DUALCASE # for MKS sh + +# The HP-UX ksh and POSIX shell print the target directory to stdout +# if CDPATH is set. +(unset CDPATH) >/dev/null 2>&1 && unset CDPATH + +relink_command=\"$relink_command\" + +# This environment variable determines our operation mode. +if test \"\$libtool_install_magic\" = \"$magic\"; then + # install mode needs the following variables: + generated_by_libtool_version='$macro_version' + notinst_deplibs='$notinst_deplibs' +else + # When we are sourced in execute mode, \$file and \$ECHO are already set. + if test \"\$libtool_execute_magic\" != \"$magic\"; then + ECHO=\"$qecho\" + file=\"\$0\" + # Make sure echo works. + if test \"X\$1\" = X--no-reexec; then + # Discard the --no-reexec flag, and continue. + shift + elif test \"X\`{ \$ECHO '\t'; } 2>/dev/null\`\" = 'X\t'; then + # Yippee, \$ECHO works! + : + else + # Restart under the correct shell, and then maybe \$ECHO will work. + exec $SHELL \"\$0\" --no-reexec \${1+\"\$@\"} + fi + fi\ +" + $ECHO "\ + + # Find the directory that this script lives in. + thisdir=\`\$ECHO \"X\$file\" | \$Xsed -e 's%/[^/]*$%%'\` + test \"x\$thisdir\" = \"x\$file\" && thisdir=. + + # Follow symbolic links until we get to the real thisdir. + file=\`ls -ld \"\$file\" | ${SED} -n 's/.*-> //p'\` + while test -n \"\$file\"; do + destdir=\`\$ECHO \"X\$file\" | \$Xsed -e 's%/[^/]*\$%%'\` + + # If there was a directory component, then change thisdir. + if test \"x\$destdir\" != \"x\$file\"; then + case \"\$destdir\" in + [\\\\/]* | [A-Za-z]:[\\\\/]*) thisdir=\"\$destdir\" ;; + *) thisdir=\"\$thisdir/\$destdir\" ;; + esac + fi + + file=\`\$ECHO \"X\$file\" | \$Xsed -e 's%^.*/%%'\` + file=\`ls -ld \"\$thisdir/\$file\" | ${SED} -n 's/.*-> //p'\` + done + + # Usually 'no', except on cygwin/mingw when embedded into + # the cwrapper. + WRAPPER_SCRIPT_BELONGS_IN_OBJDIR=$func_emit_wrapper_arg1 + if test \"\$WRAPPER_SCRIPT_BELONGS_IN_OBJDIR\" = \"yes\"; then + # special case for '.' + if test \"\$thisdir\" = \".\"; then + thisdir=\`pwd\` + fi + # remove .libs from thisdir + case \"\$thisdir\" in + *[\\\\/]$objdir ) thisdir=\`\$ECHO \"X\$thisdir\" | \$Xsed -e 's%[\\\\/][^\\\\/]*$%%'\` ;; + $objdir ) thisdir=. ;; + esac + fi + + # Try to get the absolute directory name. + absdir=\`cd \"\$thisdir\" && pwd\` + test -n \"\$absdir\" && thisdir=\"\$absdir\" +" + + if test "$fast_install" = yes; then + $ECHO "\ + program=lt-'$outputname'$exeext + progdir=\"\$thisdir/$objdir\" + + if test ! -f \"\$progdir/\$program\" || + { file=\`ls -1dt \"\$progdir/\$program\" \"\$progdir/../\$program\" 2>/dev/null | ${SED} 1q\`; \\ + test \"X\$file\" != \"X\$progdir/\$program\"; }; then + + file=\"\$\$-\$program\" + + if test ! -d \"\$progdir\"; then + $MKDIR \"\$progdir\" + else + $RM \"\$progdir/\$file\" + fi" + + $ECHO "\ + + # relink executable if necessary + if test -n \"\$relink_command\"; then + if relink_command_output=\`eval \$relink_command 2>&1\`; then : + else + $ECHO \"\$relink_command_output\" >&2 + $RM \"\$progdir/\$file\" + exit 1 + fi + fi + + $MV \"\$progdir/\$file\" \"\$progdir/\$program\" 2>/dev/null || + { $RM \"\$progdir/\$program\"; + $MV \"\$progdir/\$file\" \"\$progdir/\$program\"; } + $RM \"\$progdir/\$file\" + fi" + else + $ECHO "\ + program='$outputname' + progdir=\"\$thisdir/$objdir\" +" + fi + + $ECHO "\ + + if test -f \"\$progdir/\$program\"; then" + + # Export our shlibpath_var if we have one. + if test "$shlibpath_overrides_runpath" = yes && test -n "$shlibpath_var" && test -n "$temp_rpath"; then + $ECHO "\ + # Add our own library path to $shlibpath_var + $shlibpath_var=\"$temp_rpath\$$shlibpath_var\" + + # Some systems cannot cope with colon-terminated $shlibpath_var + # The second colon is a workaround for a bug in BeOS R4 sed + $shlibpath_var=\`\$ECHO \"X\$$shlibpath_var\" | \$Xsed -e 's/::*\$//'\` + + export $shlibpath_var +" + fi + + # fixup the dll searchpath if we need to. + if test -n "$dllsearchpath"; then + $ECHO "\ + # Add the dll search path components to the executable PATH + PATH=$dllsearchpath:\$PATH +" + fi + + $ECHO "\ + if test \"\$libtool_execute_magic\" != \"$magic\"; then + # Run the actual program with our arguments. +" + case $host in + # Backslashes separate directories on plain windows + *-*-mingw | *-*-os2*) + $ECHO "\ + exec \"\$progdir\\\\\$program\" \${1+\"\$@\"} +" + ;; + + *) + $ECHO "\ + exec \"\$progdir/\$program\" \${1+\"\$@\"} +" + ;; + esac + $ECHO "\ + \$ECHO \"\$0: cannot exec \$program \$*\" 1>&2 + exit 1 + fi + else + # The program doesn't exist. + \$ECHO \"\$0: error: \\\`\$progdir/\$program' does not exist\" 1>&2 + \$ECHO \"This script is just a wrapper for \$program.\" 1>&2 + $ECHO \"See the $PACKAGE documentation for more information.\" 1>&2 + exit 1 + fi +fi\ +" +} +# end: func_emit_wrapper + +# func_emit_cwrapperexe_src +# emit the source code for a wrapper executable on stdout +# Must ONLY be called from within func_mode_link because +# it depends on a number of variable set therein. +func_emit_cwrapperexe_src () +{ + cat < +#include +#ifdef _MSC_VER +# include +# include +# include +# define setmode _setmode +#else +# include +# include +# ifdef __CYGWIN__ +# include +# endif +#endif +#include +#include +#include +#include +#include +#include +#include +#include + +#if defined(PATH_MAX) +# define LT_PATHMAX PATH_MAX +#elif defined(MAXPATHLEN) +# define LT_PATHMAX MAXPATHLEN +#else +# define LT_PATHMAX 1024 +#endif + +#ifndef S_IXOTH +# define S_IXOTH 0 +#endif +#ifndef S_IXGRP +# define S_IXGRP 0 +#endif + +#ifdef _MSC_VER +# define S_IXUSR _S_IEXEC +# define stat _stat +# ifndef _INTPTR_T_DEFINED +# define intptr_t int +# endif +#endif + +#ifndef DIR_SEPARATOR +# define DIR_SEPARATOR '/' +# define PATH_SEPARATOR ':' +#endif + +#if defined (_WIN32) || defined (__MSDOS__) || defined (__DJGPP__) || \ + defined (__OS2__) +# define HAVE_DOS_BASED_FILE_SYSTEM +# define FOPEN_WB "wb" +# ifndef DIR_SEPARATOR_2 +# define DIR_SEPARATOR_2 '\\' +# endif +# ifndef PATH_SEPARATOR_2 +# define PATH_SEPARATOR_2 ';' +# endif +#endif + +#ifndef DIR_SEPARATOR_2 +# define IS_DIR_SEPARATOR(ch) ((ch) == DIR_SEPARATOR) +#else /* DIR_SEPARATOR_2 */ +# define IS_DIR_SEPARATOR(ch) \ + (((ch) == DIR_SEPARATOR) || ((ch) == DIR_SEPARATOR_2)) +#endif /* DIR_SEPARATOR_2 */ + +#ifndef PATH_SEPARATOR_2 +# define IS_PATH_SEPARATOR(ch) ((ch) == PATH_SEPARATOR) +#else /* PATH_SEPARATOR_2 */ +# define IS_PATH_SEPARATOR(ch) ((ch) == PATH_SEPARATOR_2) +#endif /* PATH_SEPARATOR_2 */ + +#ifdef __CYGWIN__ +# define FOPEN_WB "wb" +#endif + +#ifndef FOPEN_WB +# define FOPEN_WB "w" +#endif +#ifndef _O_BINARY +# define _O_BINARY 0 +#endif + +#define XMALLOC(type, num) ((type *) xmalloc ((num) * sizeof(type))) +#define XFREE(stale) do { \ + if (stale) { free ((void *) stale); stale = 0; } \ +} while (0) + +#undef LTWRAPPER_DEBUGPRINTF +#if defined DEBUGWRAPPER +# define LTWRAPPER_DEBUGPRINTF(args) ltwrapper_debugprintf args +static void +ltwrapper_debugprintf (const char *fmt, ...) +{ + va_list args; + va_start (args, fmt); + (void) vfprintf (stderr, fmt, args); + va_end (args); +} +#else +# define LTWRAPPER_DEBUGPRINTF(args) +#endif + +const char *program_name = NULL; + +void *xmalloc (size_t num); +char *xstrdup (const char *string); +const char *base_name (const char *name); +char *find_executable (const char *wrapper); +char *chase_symlinks (const char *pathspec); +int make_executable (const char *path); +int check_executable (const char *path); +char *strendzap (char *str, const char *pat); +void lt_fatal (const char *message, ...); + +static const char *script_text = +EOF + + func_emit_wrapper yes | + $SED -e 's/\([\\"]\)/\\\1/g' \ + -e 's/^/ "/' -e 's/$/\\n"/' + echo ";" + + cat </dev/null || echo $SHELL` + case $lt_newargv0 in + *.exe | *.EXE) ;; + *) lt_newargv0=$lt_newargv0.exe ;; + esac + ;; + * ) lt_newargv0=$SHELL ;; + esac + fi + + cat <= 0) + && (st.st_mode & (S_IXUSR | S_IXGRP | S_IXOTH))) + return 1; + else + return 0; +} + +int +make_executable (const char *path) +{ + int rval = 0; + struct stat st; + + LTWRAPPER_DEBUGPRINTF (("(make_executable) : %s\n", + path ? (*path ? path : "EMPTY!") : "NULL!")); + if ((!path) || (!*path)) + return 0; + + if (stat (path, &st) >= 0) + { + rval = chmod (path, st.st_mode | S_IXOTH | S_IXGRP | S_IXUSR); + } + return rval; +} + +/* Searches for the full path of the wrapper. Returns + newly allocated full path name if found, NULL otherwise + Does not chase symlinks, even on platforms that support them. +*/ +char * +find_executable (const char *wrapper) +{ + int has_slash = 0; + const char *p; + const char *p_next; + /* static buffer for getcwd */ + char tmp[LT_PATHMAX + 1]; + int tmp_len; + char *concat_name; + + LTWRAPPER_DEBUGPRINTF (("(find_executable) : %s\n", + wrapper ? (*wrapper ? wrapper : "EMPTY!") : "NULL!")); + + if ((wrapper == NULL) || (*wrapper == '\0')) + return NULL; + + /* Absolute path? */ +#if defined (HAVE_DOS_BASED_FILE_SYSTEM) + if (isalpha ((unsigned char) wrapper[0]) && wrapper[1] == ':') + { + concat_name = xstrdup (wrapper); + if (check_executable (concat_name)) + return concat_name; + XFREE (concat_name); + } + else + { +#endif + if (IS_DIR_SEPARATOR (wrapper[0])) + { + concat_name = xstrdup (wrapper); + if (check_executable (concat_name)) + return concat_name; + XFREE (concat_name); + } +#if defined (HAVE_DOS_BASED_FILE_SYSTEM) + } +#endif + + for (p = wrapper; *p; p++) + if (*p == '/') + { + has_slash = 1; + break; + } + if (!has_slash) + { + /* no slashes; search PATH */ + const char *path = getenv ("PATH"); + if (path != NULL) + { + for (p = path; *p; p = p_next) + { + const char *q; + size_t p_len; + for (q = p; *q; q++) + if (IS_PATH_SEPARATOR (*q)) + break; + p_len = q - p; + p_next = (*q == '\0' ? q : q + 1); + if (p_len == 0) + { + /* empty path: current directory */ + if (getcwd (tmp, LT_PATHMAX) == NULL) + lt_fatal ("getcwd failed"); + tmp_len = strlen (tmp); + concat_name = + XMALLOC (char, tmp_len + 1 + strlen (wrapper) + 1); + memcpy (concat_name, tmp, tmp_len); + concat_name[tmp_len] = '/'; + strcpy (concat_name + tmp_len + 1, wrapper); + } + else + { + concat_name = + XMALLOC (char, p_len + 1 + strlen (wrapper) + 1); + memcpy (concat_name, p, p_len); + concat_name[p_len] = '/'; + strcpy (concat_name + p_len + 1, wrapper); + } + if (check_executable (concat_name)) + return concat_name; + XFREE (concat_name); + } + } + /* not found in PATH; assume curdir */ + } + /* Relative path | not found in path: prepend cwd */ + if (getcwd (tmp, LT_PATHMAX) == NULL) + lt_fatal ("getcwd failed"); + tmp_len = strlen (tmp); + concat_name = XMALLOC (char, tmp_len + 1 + strlen (wrapper) + 1); + memcpy (concat_name, tmp, tmp_len); + concat_name[tmp_len] = '/'; + strcpy (concat_name + tmp_len + 1, wrapper); + + if (check_executable (concat_name)) + return concat_name; + XFREE (concat_name); + return NULL; +} + +char * +chase_symlinks (const char *pathspec) +{ +#ifndef S_ISLNK + return xstrdup (pathspec); +#else + char buf[LT_PATHMAX]; + struct stat s; + char *tmp_pathspec = xstrdup (pathspec); + char *p; + int has_symlinks = 0; + while (strlen (tmp_pathspec) && !has_symlinks) + { + LTWRAPPER_DEBUGPRINTF (("checking path component for symlinks: %s\n", + tmp_pathspec)); + if (lstat (tmp_pathspec, &s) == 0) + { + if (S_ISLNK (s.st_mode) != 0) + { + has_symlinks = 1; + break; + } + + /* search backwards for last DIR_SEPARATOR */ + p = tmp_pathspec + strlen (tmp_pathspec) - 1; + while ((p > tmp_pathspec) && (!IS_DIR_SEPARATOR (*p))) + p--; + if ((p == tmp_pathspec) && (!IS_DIR_SEPARATOR (*p))) + { + /* no more DIR_SEPARATORS left */ + break; + } + *p = '\0'; + } + else + { + char *errstr = strerror (errno); + lt_fatal ("Error accessing file %s (%s)", tmp_pathspec, errstr); + } + } + XFREE (tmp_pathspec); + + if (!has_symlinks) + { + return xstrdup (pathspec); + } + + tmp_pathspec = realpath (pathspec, buf); + if (tmp_pathspec == 0) + { + lt_fatal ("Could not follow symlinks for %s", pathspec); + } + return xstrdup (tmp_pathspec); +#endif +} + +char * +strendzap (char *str, const char *pat) +{ + size_t len, patlen; + + assert (str != NULL); + assert (pat != NULL); + + len = strlen (str); + patlen = strlen (pat); + + if (patlen <= len) + { + str += len - patlen; + if (strcmp (str, pat) == 0) + *str = '\0'; + } + return str; +} + +static void +lt_error_core (int exit_status, const char *mode, + const char *message, va_list ap) +{ + fprintf (stderr, "%s: %s: ", program_name, mode); + vfprintf (stderr, message, ap); + fprintf (stderr, ".\n"); + + if (exit_status >= 0) + exit (exit_status); +} + +void +lt_fatal (const char *message, ...) +{ + va_list ap; + va_start (ap, message); + lt_error_core (EXIT_FAILURE, "FATAL", message, ap); + va_end (ap); +} +EOF +} +# end: func_emit_cwrapperexe_src + +# func_mode_link arg... +func_mode_link () +{ + $opt_debug + case $host in + *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2*) + # It is impossible to link a dll without this setting, and + # we shouldn't force the makefile maintainer to figure out + # which system we are compiling for in order to pass an extra + # flag for every libtool invocation. + # allow_undefined=no + + # FIXME: Unfortunately, there are problems with the above when trying + # to make a dll which has undefined symbols, in which case not + # even a static library is built. For now, we need to specify + # -no-undefined on the libtool link line when we can be certain + # that all symbols are satisfied, otherwise we get a static library. + allow_undefined=yes + ;; + *) + allow_undefined=yes + ;; + esac + libtool_args=$nonopt + base_compile="$nonopt $@" + compile_command=$nonopt + finalize_command=$nonopt + + compile_rpath= + finalize_rpath= + compile_shlibpath= + finalize_shlibpath= + convenience= + old_convenience= + deplibs= + old_deplibs= + compiler_flags= + linker_flags= + dllsearchpath= + lib_search_path=`pwd` + inst_prefix_dir= + new_inherited_linker_flags= + + avoid_version=no + dlfiles= + dlprefiles= + dlself=no + export_dynamic=no + export_symbols= + export_symbols_regex= + generated= + libobjs= + ltlibs= + module=no + no_install=no + objs= + non_pic_objects= + precious_files_regex= + prefer_static_libs=no + preload=no + prev= + prevarg= + release= + rpath= + xrpath= + perm_rpath= + temp_rpath= + thread_safe=no + vinfo= + vinfo_number=no + weak_libs= + single_module="${wl}-single_module" + func_infer_tag $base_compile + + # We need to know -static, to get the right output filenames. + for arg + do + case $arg in + -shared) + test "$build_libtool_libs" != yes && \ + func_fatal_configuration "can not build a shared library" + build_old_libs=no + break + ;; + -all-static | -static | -static-libtool-libs) + case $arg in + -all-static) + if test "$build_libtool_libs" = yes && test -z "$link_static_flag"; then + func_warning "complete static linking is impossible in this configuration" + fi + if test -n "$link_static_flag"; then + dlopen_self=$dlopen_self_static + fi + prefer_static_libs=yes + ;; + -static) + if test -z "$pic_flag" && test -n "$link_static_flag"; then + dlopen_self=$dlopen_self_static + fi + prefer_static_libs=built + ;; + -static-libtool-libs) + if test -z "$pic_flag" && test -n "$link_static_flag"; then + dlopen_self=$dlopen_self_static + fi + prefer_static_libs=yes + ;; + esac + build_libtool_libs=no + build_old_libs=yes + break + ;; + esac + done + + # See if our shared archives depend on static archives. + test -n "$old_archive_from_new_cmds" && build_old_libs=yes + + # Go through the arguments, transforming them on the way. + while test "$#" -gt 0; do + arg="$1" + shift + func_quote_for_eval "$arg" + qarg=$func_quote_for_eval_unquoted_result + func_append libtool_args " $func_quote_for_eval_result" + + # If the previous option needs an argument, assign it. + if test -n "$prev"; then + case $prev in + output) + func_append compile_command " @OUTPUT@" + func_append finalize_command " @OUTPUT@" + ;; + esac + + case $prev in + dlfiles|dlprefiles) + if test "$preload" = no; then + # Add the symbol object into the linking commands. + func_append compile_command " @SYMFILE@" + func_append finalize_command " @SYMFILE@" + preload=yes + fi + case $arg in + *.la | *.lo) ;; # We handle these cases below. + force) + if test "$dlself" = no; then + dlself=needless + export_dynamic=yes + fi + prev= + continue + ;; + self) + if test "$prev" = dlprefiles; then + dlself=yes + elif test "$prev" = dlfiles && test "$dlopen_self" != yes; then + dlself=yes + else + dlself=needless + export_dynamic=yes + fi + prev= + continue + ;; + *) + if test "$prev" = dlfiles; then + dlfiles="$dlfiles $arg" + else + dlprefiles="$dlprefiles $arg" + fi + prev= + continue + ;; + esac + ;; + expsyms) + export_symbols="$arg" + test -f "$arg" \ + || func_fatal_error "symbol file \`$arg' does not exist" + prev= + continue + ;; + expsyms_regex) + export_symbols_regex="$arg" + prev= + continue + ;; + framework) + case $host in + *-*-darwin*) + case "$deplibs " in + *" $qarg.ltframework "*) ;; + *) deplibs="$deplibs $qarg.ltframework" # this is fixed later + ;; + esac + ;; + esac + prev= + continue + ;; + inst_prefix) + inst_prefix_dir="$arg" + prev= + continue + ;; + objectlist) + if test -f "$arg"; then + save_arg=$arg + moreargs= + for fil in `cat "$save_arg"` + do +# moreargs="$moreargs $fil" + arg=$fil + # A libtool-controlled object. + + # Check to see that this really is a libtool object. + if func_lalib_unsafe_p "$arg"; then + pic_object= + non_pic_object= + + # Read the .lo file + func_source "$arg" + + if test -z "$pic_object" || + test -z "$non_pic_object" || + test "$pic_object" = none && + test "$non_pic_object" = none; then + func_fatal_error "cannot find name of object for \`$arg'" + fi + + # Extract subdirectory from the argument. + func_dirname "$arg" "/" "" + xdir="$func_dirname_result" + + if test "$pic_object" != none; then + # Prepend the subdirectory the object is found in. + pic_object="$xdir$pic_object" + + if test "$prev" = dlfiles; then + if test "$build_libtool_libs" = yes && test "$dlopen_support" = yes; then + dlfiles="$dlfiles $pic_object" + prev= + continue + else + # If libtool objects are unsupported, then we need to preload. + prev=dlprefiles + fi + fi + + # CHECK ME: I think I busted this. -Ossama + if test "$prev" = dlprefiles; then + # Preload the old-style object. + dlprefiles="$dlprefiles $pic_object" + prev= + fi + + # A PIC object. + func_append libobjs " $pic_object" + arg="$pic_object" + fi + + # Non-PIC object. + if test "$non_pic_object" != none; then + # Prepend the subdirectory the object is found in. + non_pic_object="$xdir$non_pic_object" + + # A standard non-PIC object + func_append non_pic_objects " $non_pic_object" + if test -z "$pic_object" || test "$pic_object" = none ; then + arg="$non_pic_object" + fi + else + # If the PIC object exists, use it instead. + # $xdir was prepended to $pic_object above. + non_pic_object="$pic_object" + func_append non_pic_objects " $non_pic_object" + fi + else + # Only an error if not doing a dry-run. + if $opt_dry_run; then + # Extract subdirectory from the argument. + func_dirname "$arg" "/" "" + xdir="$func_dirname_result" + + func_lo2o "$arg" + pic_object=$xdir$objdir/$func_lo2o_result + non_pic_object=$xdir$func_lo2o_result + func_append libobjs " $pic_object" + func_append non_pic_objects " $non_pic_object" + else + func_fatal_error "\`$arg' is not a valid libtool object" + fi + fi + done + else + func_fatal_error "link input file \`$arg' does not exist" + fi + arg=$save_arg + prev= + continue + ;; + precious_regex) + precious_files_regex="$arg" + prev= + continue + ;; + release) + release="-$arg" + prev= + continue + ;; + rpath | xrpath) + # We need an absolute path. + case $arg in + [\\/]* | [A-Za-z]:[\\/]*) ;; + *) + func_fatal_error "only absolute run-paths are allowed" + ;; + esac + if test "$prev" = rpath; then + case "$rpath " in + *" $arg "*) ;; + *) rpath="$rpath $arg" ;; + esac + else + case "$xrpath " in + *" $arg "*) ;; + *) xrpath="$xrpath $arg" ;; + esac + fi + prev= + continue + ;; + shrext) + shrext_cmds="$arg" + prev= + continue + ;; + weak) + weak_libs="$weak_libs $arg" + prev= + continue + ;; + xcclinker) + linker_flags="$linker_flags $qarg" + compiler_flags="$compiler_flags $qarg" + prev= + func_append compile_command " $qarg" + func_append finalize_command " $qarg" + continue + ;; + xcompiler) + compiler_flags="$compiler_flags $qarg" + prev= + func_append compile_command " $qarg" + func_append finalize_command " $qarg" + continue + ;; + xlinker) + linker_flags="$linker_flags $qarg" + compiler_flags="$compiler_flags $wl$qarg" + prev= + func_append compile_command " $wl$qarg" + func_append finalize_command " $wl$qarg" + continue + ;; + *) + eval "$prev=\"\$arg\"" + prev= + continue + ;; + esac + fi # test -n "$prev" + + prevarg="$arg" + + case $arg in + -all-static) + if test -n "$link_static_flag"; then + # See comment for -static flag below, for more details. + func_append compile_command " $link_static_flag" + func_append finalize_command " $link_static_flag" + fi + continue + ;; + + -allow-undefined) + # FIXME: remove this flag sometime in the future. + func_fatal_error "\`-allow-undefined' must not be used because it is the default" + ;; + + -avoid-version) + avoid_version=yes + continue + ;; + + -dlopen) + prev=dlfiles + continue + ;; + + -dlpreopen) + prev=dlprefiles + continue + ;; + + -export-dynamic) + export_dynamic=yes + continue + ;; + + -export-symbols | -export-symbols-regex) + if test -n "$export_symbols" || test -n "$export_symbols_regex"; then + func_fatal_error "more than one -exported-symbols argument is not allowed" + fi + if test "X$arg" = "X-export-symbols"; then + prev=expsyms + else + prev=expsyms_regex + fi + continue + ;; + + -framework) + prev=framework + continue + ;; + + -inst-prefix-dir) + prev=inst_prefix + continue + ;; + + # The native IRIX linker understands -LANG:*, -LIST:* and -LNO:* + # so, if we see these flags be careful not to treat them like -L + -L[A-Z][A-Z]*:*) + case $with_gcc/$host in + no/*-*-irix* | /*-*-irix*) + func_append compile_command " $arg" + func_append finalize_command " $arg" + ;; + esac + continue + ;; + + -L*) + func_stripname '-L' '' "$arg" + dir=$func_stripname_result + # We need an absolute path. + case $dir in + [\\/]* | [A-Za-z]:[\\/]*) ;; + *) + absdir=`cd "$dir" && pwd` + test -z "$absdir" && \ + func_fatal_error "cannot determine absolute directory name of \`$dir'" + dir="$absdir" + ;; + esac + case "$deplibs " in + *" -L$dir "*) ;; + *) + deplibs="$deplibs -L$dir" + lib_search_path="$lib_search_path $dir" + ;; + esac + case $host in + *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2*) + testbindir=`$ECHO "X$dir" | $Xsed -e 's*/lib$*/bin*'` + case :$dllsearchpath: in + *":$dir:"*) ;; + *) dllsearchpath="$dllsearchpath:$dir";; + esac + case :$dllsearchpath: in + *":$testbindir:"*) ;; + *) dllsearchpath="$dllsearchpath:$testbindir";; + esac + ;; + esac + continue + ;; + + -l*) + if test "X$arg" = "X-lc" || test "X$arg" = "X-lm"; then + case $host in + *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-beos*) + # These systems don't actually have a C or math library (as such) + continue + ;; + *-*-os2*) + # These systems don't actually have a C library (as such) + test "X$arg" = "X-lc" && continue + ;; + *-*-openbsd* | *-*-freebsd* | *-*-dragonfly* | *-*-bitrig*) + # Do not include libc due to us having libc/libc_r. + test "X$arg" = "X-lc" && continue + ;; + *-*-rhapsody* | *-*-darwin1.[012]) + # Rhapsody C and math libraries are in the System framework + deplibs="$deplibs System.ltframework" + continue + ;; + *-*-sco3.2v5* | *-*-sco5v6*) + # Causes problems with __ctype + test "X$arg" = "X-lc" && continue + ;; + *-*-sysv4.2uw2* | *-*-sysv5* | *-*-unixware* | *-*-OpenUNIX*) + # Compiler inserts libc in the correct place for threads to work + test "X$arg" = "X-lc" && continue + ;; + esac + elif test "X$arg" = "X-lc_r"; then + case $host in + *-*-openbsd* | *-*-freebsd* | *-*-dragonfly*) + # Do not include libc_r directly, use -pthread flag. + continue + ;; + esac + fi + deplibs="$deplibs $arg" + continue + ;; + + -module) + module=yes + continue + ;; + + # Tru64 UNIX uses -model [arg] to determine the layout of C++ + # classes, name mangling, and exception handling. + # Darwin uses the -arch flag to determine output architecture. + -model|-arch|-isysroot) + compiler_flags="$compiler_flags $arg" + func_append compile_command " $arg" + func_append finalize_command " $arg" + prev=xcompiler + continue + ;; + + -mt|-mthreads|-kthread|-Kthread|-pthread|-pthreads|--thread-safe|-threads) + compiler_flags="$compiler_flags $arg" + func_append compile_command " $arg" + func_append finalize_command " $arg" + case "$new_inherited_linker_flags " in + *" $arg "*) ;; + * ) new_inherited_linker_flags="$new_inherited_linker_flags $arg" ;; + esac + continue + ;; + + -multi_module) + single_module="${wl}-multi_module" + continue + ;; + + -no-fast-install) + fast_install=no + continue + ;; + + -no-install) + case $host in + *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-*-darwin*) + # The PATH hackery in wrapper scripts is required on Windows + # and Darwin in order for the loader to find any dlls it needs. + func_warning "\`-no-install' is ignored for $host" + func_warning "assuming \`-no-fast-install' instead" + fast_install=no + ;; + *) no_install=yes ;; + esac + continue + ;; + + -no-undefined) + allow_undefined=no + continue + ;; + + -objectlist) + prev=objectlist + continue + ;; + + -o) prev=output ;; + + -precious-files-regex) + prev=precious_regex + continue + ;; + + -release) + prev=release + continue + ;; + + -rpath) + prev=rpath + continue + ;; + + -R) + prev=xrpath + continue + ;; + + -R*) + func_stripname '-R' '' "$arg" + dir=$func_stripname_result + # We need an absolute path. + case $dir in + [\\/]* | [A-Za-z]:[\\/]*) ;; + *) + func_fatal_error "only absolute run-paths are allowed" + ;; + esac + case "$xrpath " in + *" $dir "*) ;; + *) xrpath="$xrpath $dir" ;; + esac + continue + ;; + + -shared) + # The effects of -shared are defined in a previous loop. + continue + ;; + + -shrext) + prev=shrext + continue + ;; + + -static | -static-libtool-libs) + # The effects of -static are defined in a previous loop. + # We used to do the same as -all-static on platforms that + # didn't have a PIC flag, but the assumption that the effects + # would be equivalent was wrong. It would break on at least + # Digital Unix and AIX. + continue + ;; + + -thread-safe) + thread_safe=yes + continue + ;; + + -version-info) + prev=vinfo + continue + ;; + + -version-number) + prev=vinfo + vinfo_number=yes + continue + ;; + + -weak) + prev=weak + continue + ;; + + -Wc,*) + func_stripname '-Wc,' '' "$arg" + args=$func_stripname_result + arg= + save_ifs="$IFS"; IFS=',' + for flag in $args; do + IFS="$save_ifs" + func_quote_for_eval "$flag" + arg="$arg $wl$func_quote_for_eval_result" + compiler_flags="$compiler_flags $func_quote_for_eval_result" + done + IFS="$save_ifs" + func_stripname ' ' '' "$arg" + arg=$func_stripname_result + ;; + + -Wl,*) + func_stripname '-Wl,' '' "$arg" + args=$func_stripname_result + arg= + save_ifs="$IFS"; IFS=',' + for flag in $args; do + IFS="$save_ifs" + func_quote_for_eval "$flag" + arg="$arg $wl$func_quote_for_eval_result" + compiler_flags="$compiler_flags $wl$func_quote_for_eval_result" + linker_flags="$linker_flags $func_quote_for_eval_result" + done + IFS="$save_ifs" + func_stripname ' ' '' "$arg" + arg=$func_stripname_result + ;; + + -Xcompiler) + prev=xcompiler + continue + ;; + + -Xlinker) + prev=xlinker + continue + ;; + + -XCClinker) + prev=xcclinker + continue + ;; + + # -msg_* for osf cc + -msg_*) + func_quote_for_eval "$arg" + arg="$func_quote_for_eval_result" + ;; + + # -64, -mips[0-9] enable 64-bit mode on the SGI compiler + # -r[0-9][0-9]* specifies the processor on the SGI compiler + # -xarch=*, -xtarget=* enable 64-bit mode on the Sun compiler + # +DA*, +DD* enable 64-bit mode on the HP compiler + # -q* pass through compiler args for the IBM compiler + # -m*, -t[45]*, -txscale* pass through architecture-specific + # compiler args for GCC + # -F/path gives path to uninstalled frameworks, gcc on darwin + # -p, -pg, --coverage, -fprofile-* pass through profiling flag for GCC + # @file GCC response files + -64|-mips[0-9]|-r[0-9][0-9]*|-xarch=*|-xtarget=*|+DA*|+DD*|-q*|-m*| \ + -t[45]*|-txscale*|-p|-pg|--coverage|-fprofile-*|-F*|@*) + func_quote_for_eval "$arg" + arg="$func_quote_for_eval_result" + func_append compile_command " $arg" + func_append finalize_command " $arg" + compiler_flags="$compiler_flags $arg" + continue + ;; + + # Some other compiler flag. + -* | +*) + func_quote_for_eval "$arg" + arg="$func_quote_for_eval_result" + ;; + + *.$objext) + # A standard object. + objs="$objs $arg" + ;; + + *.lo) + # A libtool-controlled object. + + # Check to see that this really is a libtool object. + if func_lalib_unsafe_p "$arg"; then + pic_object= + non_pic_object= + + # Read the .lo file + func_source "$arg" + + if test -z "$pic_object" || + test -z "$non_pic_object" || + test "$pic_object" = none && + test "$non_pic_object" = none; then + func_fatal_error "cannot find name of object for \`$arg'" + fi + + # Extract subdirectory from the argument. + func_dirname "$arg" "/" "" + xdir="$func_dirname_result" + + if test "$pic_object" != none; then + # Prepend the subdirectory the object is found in. + pic_object="$xdir$pic_object" + + if test "$prev" = dlfiles; then + if test "$build_libtool_libs" = yes && test "$dlopen_support" = yes; then + dlfiles="$dlfiles $pic_object" + prev= + continue + else + # If libtool objects are unsupported, then we need to preload. + prev=dlprefiles + fi + fi + + # CHECK ME: I think I busted this. -Ossama + if test "$prev" = dlprefiles; then + # Preload the old-style object. + dlprefiles="$dlprefiles $pic_object" + prev= + fi + + # A PIC object. + func_append libobjs " $pic_object" + arg="$pic_object" + fi + + # Non-PIC object. + if test "$non_pic_object" != none; then + # Prepend the subdirectory the object is found in. + non_pic_object="$xdir$non_pic_object" + + # A standard non-PIC object + func_append non_pic_objects " $non_pic_object" + if test -z "$pic_object" || test "$pic_object" = none ; then + arg="$non_pic_object" + fi + else + # If the PIC object exists, use it instead. + # $xdir was prepended to $pic_object above. + non_pic_object="$pic_object" + func_append non_pic_objects " $non_pic_object" + fi + else + # Only an error if not doing a dry-run. + if $opt_dry_run; then + # Extract subdirectory from the argument. + func_dirname "$arg" "/" "" + xdir="$func_dirname_result" + + func_lo2o "$arg" + pic_object=$xdir$objdir/$func_lo2o_result + non_pic_object=$xdir$func_lo2o_result + func_append libobjs " $pic_object" + func_append non_pic_objects " $non_pic_object" + else + func_fatal_error "\`$arg' is not a valid libtool object" + fi + fi + ;; + + *.$libext) + # An archive. + deplibs="$deplibs $arg" + old_deplibs="$old_deplibs $arg" + continue + ;; + + *.la) + # A libtool-controlled library. + + if test "$prev" = dlfiles; then + # This library was specified with -dlopen. + dlfiles="$dlfiles $arg" + prev= + elif test "$prev" = dlprefiles; then + # The library was specified with -dlpreopen. + dlprefiles="$dlprefiles $arg" + prev= + else + deplibs="$deplibs $arg" + fi + continue + ;; + + # Some other compiler argument. + *) + # Unknown arguments in both finalize_command and compile_command need + # to be aesthetically quoted because they are evaled later. + func_quote_for_eval "$arg" + arg="$func_quote_for_eval_result" + ;; + esac # arg + + # Now actually substitute the argument into the commands. + if test -n "$arg"; then + func_append compile_command " $arg" + func_append finalize_command " $arg" + fi + done # argument parsing loop + + test -n "$prev" && \ + func_fatal_help "the \`$prevarg' option requires an argument" + + if test "$export_dynamic" = yes && test -n "$export_dynamic_flag_spec"; then + eval arg=\"$export_dynamic_flag_spec\" + func_append compile_command " $arg" + func_append finalize_command " $arg" + fi + + oldlibs= + # calculate the name of the file, without its directory + func_basename "$output" + outputname="$func_basename_result" + libobjs_save="$libobjs" + + if test -n "$shlibpath_var"; then + # get the directories listed in $shlibpath_var + eval shlib_search_path=\`\$ECHO \"X\${$shlibpath_var}\" \| \$Xsed -e \'s/:/ /g\'\` + else + shlib_search_path= + fi + eval sys_lib_search_path=\"$sys_lib_search_path_spec\" + eval sys_lib_dlsearch_path=\"$sys_lib_dlsearch_path_spec\" + + func_dirname "$output" "/" "" + output_objdir="$func_dirname_result$objdir" + # Create the object directory. + func_mkdir_p "$output_objdir" + + # Determine the type of output + case $output in + "") + func_fatal_help "you must specify an output file" + ;; + *.$libext) linkmode=oldlib ;; + *.lo | *.$objext) linkmode=obj ;; + *.la) linkmode=lib ;; + *) linkmode=prog ;; # Anything else should be a program. + esac + + specialdeplibs= + + libs= + # Find all interdependent deplibs by searching for libraries + # that are linked more than once (e.g. -la -lb -la) + for deplib in $deplibs; do + if $opt_duplicate_deps ; then + case "$libs " in + *" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;; + esac + fi + libs="$libs $deplib" + done + + if test "$linkmode" = lib; then + libs="$predeps $libs $compiler_lib_search_path $postdeps" + + # Compute libraries that are listed more than once in $predeps + # $postdeps and mark them as special (i.e., whose duplicates are + # not to be eliminated). + pre_post_deps= + if $opt_duplicate_compiler_generated_deps; then + for pre_post_dep in $predeps $postdeps; do + case "$pre_post_deps " in + *" $pre_post_dep "*) specialdeplibs="$specialdeplibs $pre_post_deps" ;; + esac + pre_post_deps="$pre_post_deps $pre_post_dep" + done + fi + pre_post_deps= + fi + + deplibs= + newdependency_libs= + newlib_search_path= + need_relink=no # whether we're linking any uninstalled libtool libraries + notinst_deplibs= # not-installed libtool libraries + notinst_path= # paths that contain not-installed libtool libraries + + case $linkmode in + lib) + passes="conv dlpreopen link" + for file in $dlfiles $dlprefiles; do + case $file in + *.la) ;; + *) + func_fatal_help "libraries can \`-dlopen' only libtool libraries: $file" + ;; + esac + done + ;; + prog) + compile_deplibs= + finalize_deplibs= + alldeplibs=no + newdlfiles= + newdlprefiles= + passes="conv scan dlopen dlpreopen link" + ;; + *) passes="conv" + ;; + esac + + for pass in $passes; do + # The preopen pass in lib mode reverses $deplibs; put it back here + # so that -L comes before libs that need it for instance... + if test "$linkmode,$pass" = "lib,link"; then + ## FIXME: Find the place where the list is rebuilt in the wrong + ## order, and fix it there properly + tmp_deplibs= + for deplib in $deplibs; do + tmp_deplibs="$deplib $tmp_deplibs" + done + deplibs="$tmp_deplibs" + fi + + if test "$linkmode,$pass" = "lib,link" || + test "$linkmode,$pass" = "prog,scan"; then + libs="$deplibs" + deplibs= + fi + if test "$linkmode" = prog; then + case $pass in + dlopen) libs="$dlfiles" ;; + dlpreopen) libs="$dlprefiles" ;; + link) libs="$deplibs %DEPLIBS% $dependency_libs" ;; + esac + fi + if test "$linkmode,$pass" = "lib,dlpreopen"; then + # Collect and forward deplibs of preopened libtool libs + for lib in $dlprefiles; do + # Ignore non-libtool-libs + dependency_libs= + case $lib in + *.la) func_source "$lib" ;; + esac + + # Collect preopened libtool deplibs, except any this library + # has declared as weak libs + for deplib in $dependency_libs; do + deplib_base=`$ECHO "X$deplib" | $Xsed -e "$basename"` + case " $weak_libs " in + *" $deplib_base "*) ;; + *) deplibs="$deplibs $deplib" ;; + esac + done + done + libs="$dlprefiles" + fi + if test "$pass" = dlopen; then + # Collect dlpreopened libraries + save_deplibs="$deplibs" + deplibs= + fi + + for deplib in $libs; do + lib= + found=no + case $deplib in + -mt|-mthreads|-kthread|-Kthread|-pthread|-pthreads|--thread-safe|-threads) + if test "$linkmode,$pass" = "prog,link"; then + compile_deplibs="$deplib $compile_deplibs" + finalize_deplibs="$deplib $finalize_deplibs" + else + compiler_flags="$compiler_flags $deplib" + if test "$linkmode" = lib ; then + case "$new_inherited_linker_flags " in + *" $deplib "*) ;; + * ) new_inherited_linker_flags="$new_inherited_linker_flags $deplib" ;; + esac + fi + fi + continue + ;; + -l*) + if test "$linkmode" != lib && test "$linkmode" != prog; then + func_warning "\`-l' is ignored for archives/objects" + continue + fi + func_stripname '-l' '' "$deplib" + name=$func_stripname_result + if test "$linkmode" = lib; then + searchdirs="$newlib_search_path $lib_search_path $compiler_lib_search_dirs $sys_lib_search_path $shlib_search_path" + else + searchdirs="$newlib_search_path $lib_search_path $sys_lib_search_path $shlib_search_path" + fi + for searchdir in $searchdirs; do + for search_ext in .la $std_shrext .so .a; do + # Search the libtool library + lib="$searchdir/lib${name}${search_ext}" + if test -f "$lib"; then + if test "$search_ext" = ".la"; then + found=yes + else + found=no + fi + break 2 + fi + done + done + if test "$found" != yes; then + # deplib doesn't seem to be a libtool library + if test "$linkmode,$pass" = "prog,link"; then + compile_deplibs="$deplib $compile_deplibs" + finalize_deplibs="$deplib $finalize_deplibs" + else + deplibs="$deplib $deplibs" + test "$linkmode" = lib && newdependency_libs="$deplib $newdependency_libs" + fi + continue + else # deplib is a libtool library + # If $allow_libtool_libs_with_static_runtimes && $deplib is a stdlib, + # We need to do some special things here, and not later. + if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then + case " $predeps $postdeps " in + *" $deplib "*) + if func_lalib_p "$lib"; then + library_names= + old_library= + func_source "$lib" + for l in $old_library $library_names; do + ll="$l" + done + if test "X$ll" = "X$old_library" ; then # only static version available + found=no + func_dirname "$lib" "" "." + ladir="$func_dirname_result" + lib=$ladir/$old_library + if test "$linkmode,$pass" = "prog,link"; then + compile_deplibs="$deplib $compile_deplibs" + finalize_deplibs="$deplib $finalize_deplibs" + else + deplibs="$deplib $deplibs" + test "$linkmode" = lib && newdependency_libs="$deplib $newdependency_libs" + fi + continue + fi + fi + ;; + *) ;; + esac + fi + fi + ;; # -l + *.ltframework) + if test "$linkmode,$pass" = "prog,link"; then + compile_deplibs="$deplib $compile_deplibs" + finalize_deplibs="$deplib $finalize_deplibs" + else + deplibs="$deplib $deplibs" + if test "$linkmode" = lib ; then + case "$new_inherited_linker_flags " in + *" $deplib "*) ;; + * ) new_inherited_linker_flags="$new_inherited_linker_flags $deplib" ;; + esac + fi + fi + continue + ;; + -L*) + case $linkmode in + lib) + deplibs="$deplib $deplibs" + test "$pass" = conv && continue + newdependency_libs="$deplib $newdependency_libs" + func_stripname '-L' '' "$deplib" + newlib_search_path="$newlib_search_path $func_stripname_result" + ;; + prog) + if test "$pass" = conv; then + deplibs="$deplib $deplibs" + continue + fi + if test "$pass" = scan; then + deplibs="$deplib $deplibs" + else + compile_deplibs="$deplib $compile_deplibs" + finalize_deplibs="$deplib $finalize_deplibs" + fi + func_stripname '-L' '' "$deplib" + newlib_search_path="$newlib_search_path $func_stripname_result" + ;; + *) + func_warning "\`-L' is ignored for archives/objects" + ;; + esac # linkmode + continue + ;; # -L + -R*) + if test "$pass" = link; then + func_stripname '-R' '' "$deplib" + dir=$func_stripname_result + # Make sure the xrpath contains only unique directories. + case "$xrpath " in + *" $dir "*) ;; + *) xrpath="$xrpath $dir" ;; + esac + fi + deplibs="$deplib $deplibs" + continue + ;; + *.la) lib="$deplib" ;; + *.$libext) + if test "$pass" = conv; then + deplibs="$deplib $deplibs" + continue + fi + case $linkmode in + lib) + # Linking convenience modules into shared libraries is allowed, + # but linking other static libraries is non-portable. + case " $dlpreconveniencelibs " in + *" $deplib "*) ;; + *) + valid_a_lib=no + case $deplibs_check_method in + match_pattern*) + set dummy $deplibs_check_method; shift + match_pattern_regex=`expr "$deplibs_check_method" : "$1 \(.*\)"` + if eval "\$ECHO \"X$deplib\"" 2>/dev/null | $Xsed -e 10q \ + | $EGREP "$match_pattern_regex" > /dev/null; then + valid_a_lib=yes + fi + ;; + pass_all) + valid_a_lib=yes + ;; + esac + if test "$valid_a_lib" != yes; then + $ECHO + $ECHO "*** Warning: Trying to link with static lib archive $deplib." + $ECHO "*** I have the capability to make that library automatically link in when" + $ECHO "*** you link to this library. But I can only do this if you have a" + $ECHO "*** shared version of the library, which you do not appear to have" + $ECHO "*** because the file extensions .$libext of this argument makes me believe" + $ECHO "*** that it is just a static archive that I should not use here." + else + $ECHO + $ECHO "*** Warning: Linking the shared library $output against the" + $ECHO "*** static library $deplib is not portable!" + deplibs="$deplib $deplibs" + fi + ;; + esac + continue + ;; + prog) + if test "$pass" != link; then + deplibs="$deplib $deplibs" + else + compile_deplibs="$deplib $compile_deplibs" + finalize_deplibs="$deplib $finalize_deplibs" + fi + continue + ;; + esac # linkmode + ;; # *.$libext + *.lo | *.$objext) + if test "$pass" = conv; then + deplibs="$deplib $deplibs" + elif test "$linkmode" = prog; then + if test "$pass" = dlpreopen || test "$dlopen_support" != yes || test "$build_libtool_libs" = no; then + # If there is no dlopen support or we're linking statically, + # we need to preload. + newdlprefiles="$newdlprefiles $deplib" + compile_deplibs="$deplib $compile_deplibs" + finalize_deplibs="$deplib $finalize_deplibs" + else + newdlfiles="$newdlfiles $deplib" + fi + fi + continue + ;; + %DEPLIBS%) + alldeplibs=yes + continue + ;; + esac # case $deplib + + if test "$found" = yes || test -f "$lib"; then : + else + func_fatal_error "cannot find the library \`$lib' or unhandled argument \`$deplib'" + fi + + # Check to see that this really is a libtool archive. + func_lalib_unsafe_p "$lib" \ + || func_fatal_error "\`$lib' is not a valid libtool archive" + + func_dirname "$lib" "" "." + ladir="$func_dirname_result" + + dlname= + dlopen= + dlpreopen= + libdir= + library_names= + old_library= + inherited_linker_flags= + # If the library was installed with an old release of libtool, + # it will not redefine variables installed, or shouldnotlink + installed=yes + shouldnotlink=no + avoidtemprpath= + + + # Read the .la file + func_source "$lib" + + # Convert "-framework foo" to "foo.ltframework" + if test -n "$inherited_linker_flags"; then + tmp_inherited_linker_flags=`$ECHO "X$inherited_linker_flags" | $Xsed -e 's/-framework \([^ $]*\)/\1.ltframework/g'` + for tmp_inherited_linker_flag in $tmp_inherited_linker_flags; do + case " $new_inherited_linker_flags " in + *" $tmp_inherited_linker_flag "*) ;; + *) new_inherited_linker_flags="$new_inherited_linker_flags $tmp_inherited_linker_flag";; + esac + done + fi + dependency_libs=`$ECHO "X $dependency_libs" | $Xsed -e 's% \([^ $]*\).ltframework% -framework \1%g'` + if test "$linkmode,$pass" = "lib,link" || + test "$linkmode,$pass" = "prog,scan" || + { test "$linkmode" != prog && test "$linkmode" != lib; }; then + test -n "$dlopen" && dlfiles="$dlfiles $dlopen" + test -n "$dlpreopen" && dlprefiles="$dlprefiles $dlpreopen" + fi + + if test "$pass" = conv; then + # Only check for convenience libraries + deplibs="$lib $deplibs" + if test -z "$libdir"; then + if test -z "$old_library"; then + func_fatal_error "cannot find name of link library for \`$lib'" + fi + # It is a libtool convenience library, so add in its objects. + convenience="$convenience $ladir/$objdir/$old_library" + old_convenience="$old_convenience $ladir/$objdir/$old_library" + elif test "$linkmode" != prog && test "$linkmode" != lib; then + func_fatal_error "\`$lib' is not a convenience library" + fi + tmp_libs= + for deplib in $dependency_libs; do + deplibs="$deplib $deplibs" + if $opt_duplicate_deps ; then + case "$tmp_libs " in + *" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;; + esac + fi + tmp_libs="$tmp_libs $deplib" + done + continue + fi # $pass = conv + + + # Get the name of the library we link against. + linklib= + for l in $old_library $library_names; do + linklib="$l" + done + if test -z "$linklib"; then + func_fatal_error "cannot find name of link library for \`$lib'" + fi + + # This library was specified with -dlopen. + if test "$pass" = dlopen; then + if test -z "$libdir"; then + func_fatal_error "cannot -dlopen a convenience library: \`$lib'" + fi + if test -z "$dlname" || + test "$dlopen_support" != yes || + test "$build_libtool_libs" = no; then + # If there is no dlname, no dlopen support or we're linking + # statically, we need to preload. We also need to preload any + # dependent libraries so libltdl's deplib preloader doesn't + # bomb out in the load deplibs phase. + dlprefiles="$dlprefiles $lib $dependency_libs" + else + newdlfiles="$newdlfiles $lib" + fi + continue + fi # $pass = dlopen + + # We need an absolute path. + case $ladir in + [\\/]* | [A-Za-z]:[\\/]*) abs_ladir="$ladir" ;; + *) + abs_ladir=`cd "$ladir" && pwd` + if test -z "$abs_ladir"; then + func_warning "cannot determine absolute directory name of \`$ladir'" + func_warning "passing it literally to the linker, although it might fail" + abs_ladir="$ladir" + fi + ;; + esac + func_basename "$lib" + laname="$func_basename_result" + + # Find the relevant object directory and library name. + if test "X$installed" = Xyes; then + if test ! -f "$libdir/$linklib" && test -f "$abs_ladir/$linklib"; then + func_warning "library \`$lib' was moved." + dir="$ladir" + absdir="$abs_ladir" + libdir="$abs_ladir" + else + dir="$libdir" + absdir="$libdir" + fi + test "X$hardcode_automatic" = Xyes && avoidtemprpath=yes + else + if test ! -f "$ladir/$objdir/$linklib" && test -f "$abs_ladir/$linklib"; then + dir="$ladir" + absdir="$abs_ladir" + # Remove this search path later + notinst_path="$notinst_path $abs_ladir" + else + dir="$ladir/$objdir" + absdir="$abs_ladir/$objdir" + # Remove this search path later + notinst_path="$notinst_path $abs_ladir" + fi + fi # $installed = yes + func_stripname 'lib' '.la' "$laname" + name=$func_stripname_result + + # This library was specified with -dlpreopen. + if test "$pass" = dlpreopen; then + if test -z "$libdir" && test "$linkmode" = prog; then + func_fatal_error "only libraries may -dlpreopen a convenience library: \`$lib'" + fi + # Prefer using a static library (so that no silly _DYNAMIC symbols + # are required to link). + if test -n "$old_library"; then + newdlprefiles="$newdlprefiles $dir/$old_library" + # Keep a list of preopened convenience libraries to check + # that they are being used correctly in the link pass. + test -z "$libdir" && \ + dlpreconveniencelibs="$dlpreconveniencelibs $dir/$old_library" + # Otherwise, use the dlname, so that lt_dlopen finds it. + elif test -n "$dlname"; then + newdlprefiles="$newdlprefiles $dir/$dlname" + else + newdlprefiles="$newdlprefiles $dir/$linklib" + fi + fi # $pass = dlpreopen + + if test -z "$libdir"; then + # Link the convenience library + if test "$linkmode" = lib; then + deplibs="$dir/$old_library $deplibs" + elif test "$linkmode,$pass" = "prog,link"; then + compile_deplibs="$dir/$old_library $compile_deplibs" + finalize_deplibs="$dir/$old_library $finalize_deplibs" + else + deplibs="$lib $deplibs" # used for prog,scan pass + fi + continue + fi + + + if test "$linkmode" = prog && test "$pass" != link; then + newlib_search_path="$newlib_search_path $ladir" + deplibs="$lib $deplibs" + + linkalldeplibs=no + if test "$link_all_deplibs" != no || test -z "$library_names" || + test "$build_libtool_libs" = no; then + linkalldeplibs=yes + fi + + tmp_libs= + for deplib in $dependency_libs; do + case $deplib in + -L*) func_stripname '-L' '' "$deplib" + newlib_search_path="$newlib_search_path $func_stripname_result" + ;; + esac + # Need to link against all dependency_libs? + if test "$linkalldeplibs" = yes; then + deplibs="$deplib $deplibs" + else + # Need to hardcode shared library paths + # or/and link against static libraries + newdependency_libs="$deplib $newdependency_libs" + fi + if $opt_duplicate_deps ; then + case "$tmp_libs " in + *" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;; + esac + fi + tmp_libs="$tmp_libs $deplib" + done # for deplib + continue + fi # $linkmode = prog... + + if test "$linkmode,$pass" = "prog,link"; then + if test -n "$library_names" && + { { test "$prefer_static_libs" = no || + test "$prefer_static_libs,$installed" = "built,yes"; } || + test -z "$old_library"; }; then + # We need to hardcode the library path + if test -n "$shlibpath_var" && test -z "$avoidtemprpath" ; then + # Make sure the rpath contains only unique directories. + case "$temp_rpath:" in + *"$absdir:"*) ;; + *) temp_rpath="$temp_rpath$absdir:" ;; + esac + fi + + # Hardcode the library path. + # Skip directories that are in the system default run-time + # search path. + case " $sys_lib_dlsearch_path " in + *" $absdir "*) ;; + *) + case "$compile_rpath " in + *" $absdir "*) ;; + *) compile_rpath="$compile_rpath $absdir" + esac + ;; + esac + case " $sys_lib_dlsearch_path " in + *" $libdir "*) ;; + *) + case "$finalize_rpath " in + *" $libdir "*) ;; + *) finalize_rpath="$finalize_rpath $libdir" + esac + ;; + esac + fi # $linkmode,$pass = prog,link... + + if test "$alldeplibs" = yes && + { test "$deplibs_check_method" = pass_all || + { test "$build_libtool_libs" = yes && + test -n "$library_names"; }; }; then + # We only need to search for static libraries + continue + fi + fi + + link_static=no # Whether the deplib will be linked statically + use_static_libs=$prefer_static_libs + if test "$use_static_libs" = built && test "$installed" = yes; then + use_static_libs=no + fi + if test -n "$library_names" && + { test "$use_static_libs" = no || test -z "$old_library"; }; then + case $host in + *cygwin* | *mingw*) + # No point in relinking DLLs because paths are not encoded + notinst_deplibs="$notinst_deplibs $lib" + need_relink=no + ;; + *) + if test "$installed" = no; then + notinst_deplibs="$notinst_deplibs $lib" + need_relink=yes + fi + ;; + esac + # This is a shared library + + # Warn about portability, can't link against -module's on some + # systems (darwin). Don't bleat about dlopened modules though! + dlopenmodule="" + for dlpremoduletest in $dlprefiles; do + if test "X$dlpremoduletest" = "X$lib"; then + dlopenmodule="$dlpremoduletest" + break + fi + done + if test -z "$dlopenmodule" && test "$shouldnotlink" = yes && test "$pass" = link; then + $ECHO + if test "$linkmode" = prog; then + $ECHO "*** Warning: Linking the executable $output against the loadable module" + else + $ECHO "*** Warning: Linking the shared library $output against the loadable module" + fi + $ECHO "*** $linklib is not portable!" + fi + if test "$linkmode" = lib && + test "$hardcode_into_libs" = yes; then + # Hardcode the library path. + # Skip directories that are in the system default run-time + # search path. + case " $sys_lib_dlsearch_path " in + *" $absdir "*) ;; + *) + case "$compile_rpath " in + *" $absdir "*) ;; + *) compile_rpath="$compile_rpath $absdir" + esac + ;; + esac + case " $sys_lib_dlsearch_path " in + *" $libdir "*) ;; + *) + case "$finalize_rpath " in + *" $libdir "*) ;; + *) finalize_rpath="$finalize_rpath $libdir" + esac + ;; + esac + fi + + if test -n "$old_archive_from_expsyms_cmds"; then + # figure out the soname + set dummy $library_names + shift + realname="$1" + shift + libname=`eval "\\$ECHO \"$libname_spec\""` + # use dlname if we got it. it's perfectly good, no? + if test -n "$dlname"; then + soname="$dlname" + elif test -n "$soname_spec"; then + # bleh windows + case $host in + *cygwin* | mingw*) + func_arith $current - $age + major=$func_arith_result + versuffix="-$major" + ;; + esac + eval soname=\"$soname_spec\" + else + soname="$realname" + fi + + # Make a new name for the extract_expsyms_cmds to use + soroot="$soname" + func_basename "$soroot" + soname="$func_basename_result" + func_stripname 'lib' '.dll' "$soname" + newlib=libimp-$func_stripname_result.a + + # If the library has no export list, then create one now + if test -f "$output_objdir/$soname-def"; then : + else + func_verbose "extracting exported symbol list from \`$soname'" + func_execute_cmds "$extract_expsyms_cmds" 'exit $?' + fi + + # Create $newlib + if test -f "$output_objdir/$newlib"; then :; else + func_verbose "generating import library for \`$soname'" + func_execute_cmds "$old_archive_from_expsyms_cmds" 'exit $?' + fi + # make sure the library variables are pointing to the new library + dir=$output_objdir + linklib=$newlib + fi # test -n "$old_archive_from_expsyms_cmds" + + if test "$linkmode" = prog || test "$mode" != relink; then + add_shlibpath= + add_dir= + add= + lib_linked=yes + case $hardcode_action in + immediate | unsupported) + if test "$hardcode_direct" = no; then + add="$dir/$linklib" + case $host in + *-*-sco3.2v5.0.[024]*) add_dir="-L$dir" ;; + *-*-sysv4*uw2*) add_dir="-L$dir" ;; + *-*-sysv5OpenUNIX* | *-*-sysv5UnixWare7.[01].[10]* | \ + *-*-unixware7*) add_dir="-L$dir" ;; + *-*-darwin* ) + # if the lib is a (non-dlopened) module then we can not + # link against it, someone is ignoring the earlier warnings + if /usr/bin/file -L $add 2> /dev/null | + $GREP ": [^:]* bundle" >/dev/null ; then + if test "X$dlopenmodule" != "X$lib"; then + $ECHO "*** Warning: lib $linklib is a module, not a shared library" + if test -z "$old_library" ; then + $ECHO + $ECHO "*** And there doesn't seem to be a static archive available" + $ECHO "*** The link will probably fail, sorry" + else + add="$dir/$old_library" + fi + elif test -n "$old_library"; then + add="$dir/$old_library" + fi + fi + esac + elif test "$hardcode_minus_L" = no; then + case $host in + *-*-sunos*) add_shlibpath="$dir" ;; + esac + add_dir="-L$dir" + add="-l$name" + elif test "$hardcode_shlibpath_var" = no; then + add_shlibpath="$dir" + add="-l$name" + else + lib_linked=no + fi + ;; + relink) + if test "$hardcode_direct" = yes && + test "$hardcode_direct_absolute" = no; then + add="$dir/$linklib" + elif test "$hardcode_minus_L" = yes; then + add_dir="-L$dir" + # Try looking first in the location we're being installed to. + if test -n "$inst_prefix_dir"; then + case $libdir in + [\\/]*) + add_dir="$add_dir -L$inst_prefix_dir$libdir" + ;; + esac + fi + add="-l$name" + elif test "$hardcode_shlibpath_var" = yes; then + add_shlibpath="$dir" + add="-l$name" + else + lib_linked=no + fi + ;; + *) lib_linked=no ;; + esac + + if test "$lib_linked" != yes; then + func_fatal_configuration "unsupported hardcode properties" + fi + + if test -n "$add_shlibpath"; then + case :$compile_shlibpath: in + *":$add_shlibpath:"*) ;; + *) compile_shlibpath="$compile_shlibpath$add_shlibpath:" ;; + esac + fi + if test "$linkmode" = prog; then + test -n "$add_dir" && compile_deplibs="$add_dir $compile_deplibs" + test -n "$add" && compile_deplibs="$add $compile_deplibs" + else + test -n "$add_dir" && deplibs="$add_dir $deplibs" + test -n "$add" && deplibs="$add $deplibs" + if test "$hardcode_direct" != yes && + test "$hardcode_minus_L" != yes && + test "$hardcode_shlibpath_var" = yes; then + case :$finalize_shlibpath: in + *":$libdir:"*) ;; + *) finalize_shlibpath="$finalize_shlibpath$libdir:" ;; + esac + fi + fi + fi + + if test "$linkmode" = prog || test "$mode" = relink; then + add_shlibpath= + add_dir= + add= + # Finalize command for both is simple: just hardcode it. + if test "$hardcode_direct" = yes && + test "$hardcode_direct_absolute" = no; then + add="$libdir/$linklib" + elif test "$hardcode_minus_L" = yes; then + add_dir="-L$libdir" + add="-l$name" + elif test "$hardcode_shlibpath_var" = yes; then + case :$finalize_shlibpath: in + *":$libdir:"*) ;; + *) finalize_shlibpath="$finalize_shlibpath$libdir:" ;; + esac + add="-l$name" + elif test "$hardcode_automatic" = yes; then + if test -n "$inst_prefix_dir" && + test -f "$inst_prefix_dir$libdir/$linklib" ; then + add="$inst_prefix_dir$libdir/$linklib" + else + add="$libdir/$linklib" + fi + else + # We cannot seem to hardcode it, guess we'll fake it. + add_dir="-L$libdir" + # Try looking first in the location we're being installed to. + if test -n "$inst_prefix_dir"; then + case $libdir in + [\\/]*) + add_dir="$add_dir -L$inst_prefix_dir$libdir" + ;; + esac + fi + add="-l$name" + fi + + if test "$linkmode" = prog; then + test -n "$add_dir" && finalize_deplibs="$add_dir $finalize_deplibs" + test -n "$add" && finalize_deplibs="$add $finalize_deplibs" + else + test -n "$add_dir" && deplibs="$add_dir $deplibs" + test -n "$add" && deplibs="$add $deplibs" + fi + fi + elif test "$linkmode" = prog; then + # Here we assume that one of hardcode_direct or hardcode_minus_L + # is not unsupported. This is valid on all known static and + # shared platforms. + if test "$hardcode_direct" != unsupported; then + test -n "$old_library" && linklib="$old_library" + compile_deplibs="$dir/$linklib $compile_deplibs" + finalize_deplibs="$dir/$linklib $finalize_deplibs" + else + compile_deplibs="-l$name -L$dir $compile_deplibs" + finalize_deplibs="-l$name -L$dir $finalize_deplibs" + fi + elif test "$build_libtool_libs" = yes; then + # Not a shared library + if test "$deplibs_check_method" != pass_all; then + # We're trying link a shared library against a static one + # but the system doesn't support it. + + # Just print a warning and add the library to dependency_libs so + # that the program can be linked against the static library. + $ECHO + $ECHO "*** Warning: This system can not link to static lib archive $lib." + $ECHO "*** I have the capability to make that library automatically link in when" + $ECHO "*** you link to this library. But I can only do this if you have a" + $ECHO "*** shared version of the library, which you do not appear to have." + if test "$module" = yes; then + $ECHO "*** But as you try to build a module library, libtool will still create " + $ECHO "*** a static module, that should work as long as the dlopening application" + $ECHO "*** is linked with the -dlopen flag to resolve symbols at runtime." + if test -z "$global_symbol_pipe"; then + $ECHO + $ECHO "*** However, this would only work if libtool was able to extract symbol" + $ECHO "*** lists from a program, using \`nm' or equivalent, but libtool could" + $ECHO "*** not find such a program. So, this module is probably useless." + $ECHO "*** \`nm' from GNU binutils and a full rebuild may help." + fi + if test "$build_old_libs" = no; then + build_libtool_libs=module + build_old_libs=yes + else + build_libtool_libs=no + fi + fi + else + deplibs="$dir/$old_library $deplibs" + link_static=yes + fi + fi # link shared/static library? + + if test "$linkmode" = lib; then + if test -n "$dependency_libs" && + { test "$hardcode_into_libs" != yes || + test "$build_old_libs" = yes || + test "$link_static" = yes; }; then + # Extract -R from dependency_libs + temp_deplibs= + for libdir in $dependency_libs; do + case $libdir in + -R*) func_stripname '-R' '' "$libdir" + temp_xrpath=$func_stripname_result + case " $xrpath " in + *" $temp_xrpath "*) ;; + *) xrpath="$xrpath $temp_xrpath";; + esac;; + *) temp_deplibs="$temp_deplibs $libdir";; + esac + done + dependency_libs="$temp_deplibs" + fi + + newlib_search_path="$newlib_search_path $absdir" + # Link against this library + test "$link_static" = no && newdependency_libs="$abs_ladir/$laname $newdependency_libs" + # ... and its dependency_libs + tmp_libs= + for deplib in $dependency_libs; do + newdependency_libs="$deplib $newdependency_libs" + if $opt_duplicate_deps ; then + case "$tmp_libs " in + *" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;; + esac + fi + tmp_libs="$tmp_libs $deplib" + done + + if test "$link_all_deplibs" != no; then + # Add the search paths of all dependency libraries + for deplib in $dependency_libs; do + case $deplib in + -L*) path="$deplib" ;; + *.la) + func_dirname "$deplib" "" "." + dir="$func_dirname_result" + # We need an absolute path. + case $dir in + [\\/]* | [A-Za-z]:[\\/]*) absdir="$dir" ;; + *) + absdir=`cd "$dir" && pwd` + if test -z "$absdir"; then + func_warning "cannot determine absolute directory name of \`$dir'" + absdir="$dir" + fi + ;; + esac + if $GREP "^installed=no" $deplib > /dev/null; then + case $host in + *-*-darwin*) + depdepl= + eval deplibrary_names=`${SED} -n -e 's/^library_names=\(.*\)$/\1/p' $deplib` + if test -n "$deplibrary_names" ; then + for tmp in $deplibrary_names ; do + depdepl=$tmp + done + if test -f "$absdir/$objdir/$depdepl" ; then + depdepl="$absdir/$objdir/$depdepl" + darwin_install_name=`${OTOOL} -L $depdepl | awk '{if (NR == 2) {print $1;exit}}'` + if test -z "$darwin_install_name"; then + darwin_install_name=`${OTOOL64} -L $depdepl | awk '{if (NR == 2) {print $1;exit}}'` + fi + compiler_flags="$compiler_flags ${wl}-dylib_file ${wl}${darwin_install_name}:${depdepl}" + linker_flags="$linker_flags -dylib_file ${darwin_install_name}:${depdepl}" + path= + fi + fi + ;; + *) + path="-L$absdir/$objdir" + ;; + esac + else + eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $deplib` + test -z "$libdir" && \ + func_fatal_error "\`$deplib' is not a valid libtool archive" + test "$absdir" != "$libdir" && \ + func_warning "\`$deplib' seems to be moved" + + path="-L$absdir" + fi + ;; + esac + case " $deplibs " in + *" $path "*) ;; + *) deplibs="$path $deplibs" ;; + esac + done + fi # link_all_deplibs != no + fi # linkmode = lib + done # for deplib in $libs + if test "$pass" = link; then + if test "$linkmode" = "prog"; then + compile_deplibs="$new_inherited_linker_flags $compile_deplibs" + finalize_deplibs="$new_inherited_linker_flags $finalize_deplibs" + else + compiler_flags="$compiler_flags "`$ECHO "X $new_inherited_linker_flags" | $Xsed -e 's% \([^ $]*\).ltframework% -framework \1%g'` + fi + fi + dependency_libs="$newdependency_libs" + if test "$pass" = dlpreopen; then + # Link the dlpreopened libraries before other libraries + for deplib in $save_deplibs; do + deplibs="$deplib $deplibs" + done + fi + if test "$pass" != dlopen; then + if test "$pass" != conv; then + # Make sure lib_search_path contains only unique directories. + lib_search_path= + for dir in $newlib_search_path; do + case "$lib_search_path " in + *" $dir "*) ;; + *) lib_search_path="$lib_search_path $dir" ;; + esac + done + newlib_search_path= + fi + + if test "$linkmode,$pass" != "prog,link"; then + vars="deplibs" + else + vars="compile_deplibs finalize_deplibs" + fi + for var in $vars dependency_libs; do + # Add libraries to $var in reverse order + eval tmp_libs=\"\$$var\" + new_libs= + for deplib in $tmp_libs; do + # FIXME: Pedantically, this is the right thing to do, so + # that some nasty dependency loop isn't accidentally + # broken: + #new_libs="$deplib $new_libs" + # Pragmatically, this seems to cause very few problems in + # practice: + case $deplib in + -L*) new_libs="$deplib $new_libs" ;; + -R*) ;; + *) + # And here is the reason: when a library appears more + # than once as an explicit dependence of a library, or + # is implicitly linked in more than once by the + # compiler, it is considered special, and multiple + # occurrences thereof are not removed. Compare this + # with having the same library being listed as a + # dependency of multiple other libraries: in this case, + # we know (pedantically, we assume) the library does not + # need to be listed more than once, so we keep only the + # last copy. This is not always right, but it is rare + # enough that we require users that really mean to play + # such unportable linking tricks to link the library + # using -Wl,-lname, so that libtool does not consider it + # for duplicate removal. + case " $specialdeplibs " in + *" $deplib "*) new_libs="$deplib $new_libs" ;; + *) + case " $new_libs " in + *" $deplib "*) ;; + *) new_libs="$deplib $new_libs" ;; + esac + ;; + esac + ;; + esac + done + tmp_libs= + for deplib in $new_libs; do + case $deplib in + -L*) + case " $tmp_libs " in + *" $deplib "*) ;; + *) tmp_libs="$tmp_libs $deplib" ;; + esac + ;; + *) tmp_libs="$tmp_libs $deplib" ;; + esac + done + eval $var=\"$tmp_libs\" + done # for var + fi + # Last step: remove runtime libs from dependency_libs + # (they stay in deplibs) + tmp_libs= + for i in $dependency_libs ; do + case " $predeps $postdeps $compiler_lib_search_path " in + *" $i "*) + i="" + ;; + esac + if test -n "$i" ; then + tmp_libs="$tmp_libs $i" + fi + done + dependency_libs=$tmp_libs + done # for pass + if test "$linkmode" = prog; then + dlfiles="$newdlfiles" + fi + if test "$linkmode" = prog || test "$linkmode" = lib; then + dlprefiles="$newdlprefiles" + fi + + case $linkmode in + oldlib) + if test -n "$dlfiles$dlprefiles" || test "$dlself" != no; then + func_warning "\`-dlopen' is ignored for archives" + fi + + case " $deplibs" in + *\ -l* | *\ -L*) + func_warning "\`-l' and \`-L' are ignored for archives" ;; + esac + + test -n "$rpath" && \ + func_warning "\`-rpath' is ignored for archives" + + test -n "$xrpath" && \ + func_warning "\`-R' is ignored for archives" + + test -n "$vinfo" && \ + func_warning "\`-version-info/-version-number' is ignored for archives" + + test -n "$release" && \ + func_warning "\`-release' is ignored for archives" + + test -n "$export_symbols$export_symbols_regex" && \ + func_warning "\`-export-symbols' is ignored for archives" + + # Now set the variables for building old libraries. + build_libtool_libs=no + oldlibs="$output" + objs="$objs$old_deplibs" + ;; + + lib) + # Make sure we only generate libraries of the form `libNAME.la'. + case $outputname in + lib*) + func_stripname 'lib' '.la' "$outputname" + name=$func_stripname_result + eval shared_ext=\"$shrext_cmds\" + eval libname=\"$libname_spec\" + ;; + *) + test "$module" = no && \ + func_fatal_help "libtool library \`$output' must begin with \`lib'" + + if test "$need_lib_prefix" != no; then + # Add the "lib" prefix for modules if required + func_stripname '' '.la' "$outputname" + name=$func_stripname_result + eval shared_ext=\"$shrext_cmds\" + eval libname=\"$libname_spec\" + else + func_stripname '' '.la' "$outputname" + libname=$func_stripname_result + fi + ;; + esac + + if test -n "$objs"; then + if test "$deplibs_check_method" != pass_all; then + func_fatal_error "cannot build libtool library \`$output' from non-libtool objects on this host:$objs" + else + $ECHO + $ECHO "*** Warning: Linking the shared library $output against the non-libtool" + $ECHO "*** objects $objs is not portable!" + libobjs="$libobjs $objs" + fi + fi + + test "$dlself" != no && \ + func_warning "\`-dlopen self' is ignored for libtool libraries" + + set dummy $rpath + shift + test "$#" -gt 1 && \ + func_warning "ignoring multiple \`-rpath's for a libtool library" + + install_libdir="$1" + + oldlibs= + if test -z "$rpath"; then + if test "$build_libtool_libs" = yes; then + # Building a libtool convenience library. + # Some compilers have problems with a `.al' extension so + # convenience libraries should have the same extension an + # archive normally would. + oldlibs="$output_objdir/$libname.$libext $oldlibs" + build_libtool_libs=convenience + build_old_libs=yes + fi + + test -n "$vinfo" && \ + func_warning "\`-version-info/-version-number' is ignored for convenience libraries" + + test -n "$release" && \ + func_warning "\`-release' is ignored for convenience libraries" + else + + # Parse the version information argument. + save_ifs="$IFS"; IFS=':' + set dummy $vinfo 0 0 0 + shift + IFS="$save_ifs" + + test -n "$7" && \ + func_fatal_help "too many parameters to \`-version-info'" + + # convert absolute version numbers to libtool ages + # this retains compatibility with .la files and attempts + # to make the code below a bit more comprehensible + + case $vinfo_number in + yes) + number_major="$1" + number_minor="$2" + number_revision="$3" + # + # There are really only two kinds -- those that + # use the current revision as the major version + # and those that subtract age and use age as + # a minor version. But, then there is irix + # which has an extra 1 added just for fun + # + case $version_type in + darwin|linux|osf|windows|none) + func_arith $number_major + $number_minor + current=$func_arith_result + age="$number_minor" + revision="$number_revision" + ;; + freebsd-aout|freebsd-elf|sunos) + current="$number_major" + revision="$number_minor" + age="0" + ;; + irix|nonstopux) + func_arith $number_major + $number_minor + current=$func_arith_result + age="$number_minor" + revision="$number_minor" + lt_irix_increment=no + ;; + esac + ;; + no) + current="$1" + revision="$2" + age="$3" + ;; + esac + + # Check that each of the things are valid numbers. + case $current in + 0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;; + *) + func_error "CURRENT \`$current' must be a nonnegative integer" + func_fatal_error "\`$vinfo' is not valid version information" + ;; + esac + + case $revision in + 0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;; + *) + func_error "REVISION \`$revision' must be a nonnegative integer" + func_fatal_error "\`$vinfo' is not valid version information" + ;; + esac + + case $age in + 0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;; + *) + func_error "AGE \`$age' must be a nonnegative integer" + func_fatal_error "\`$vinfo' is not valid version information" + ;; + esac + + if test "$age" -gt "$current"; then + func_error "AGE \`$age' is greater than the current interface number \`$current'" + func_fatal_error "\`$vinfo' is not valid version information" + fi + + # Calculate the version variables. + major= + versuffix= + verstring= + case $version_type in + none) ;; + + darwin) + # Like Linux, but with the current version available in + # verstring for coding it into the library header + func_arith $current - $age + major=.$func_arith_result + versuffix="$major.$age.$revision" + # Darwin ld doesn't like 0 for these options... + func_arith $current + 1 + minor_current=$func_arith_result + xlcverstring="${wl}-compatibility_version ${wl}$minor_current ${wl}-current_version ${wl}$minor_current.$revision" + verstring="-compatibility_version $minor_current -current_version $minor_current.$revision" + ;; + + freebsd-aout) + major=".$current" + versuffix=".$current.$revision"; + ;; + + freebsd-elf) + major=".$current" + versuffix=".$current" + ;; + + irix | nonstopux) + if test "X$lt_irix_increment" = "Xno"; then + func_arith $current - $age + else + func_arith $current - $age + 1 + fi + major=$func_arith_result + + case $version_type in + nonstopux) verstring_prefix=nonstopux ;; + *) verstring_prefix=sgi ;; + esac + verstring="$verstring_prefix$major.$revision" + + # Add in all the interfaces that we are compatible with. + loop=$revision + while test "$loop" -ne 0; do + func_arith $revision - $loop + iface=$func_arith_result + func_arith $loop - 1 + loop=$func_arith_result + verstring="$verstring_prefix$major.$iface:$verstring" + done + + # Before this point, $major must not contain `.'. + major=.$major + versuffix="$major.$revision" + ;; + + linux) + func_arith $current - $age + major=.$func_arith_result + versuffix="$major.$age.$revision" + ;; + + osf) + func_arith $current - $age + major=.$func_arith_result + versuffix=".$current.$age.$revision" + verstring="$current.$age.$revision" + + # Add in all the interfaces that we are compatible with. + loop=$age + while test "$loop" -ne 0; do + func_arith $current - $loop + iface=$func_arith_result + func_arith $loop - 1 + loop=$func_arith_result + verstring="$verstring:${iface}.0" + done + + # Make executables depend on our current version. + verstring="$verstring:${current}.0" + ;; + + qnx) + major=".$current" + versuffix=".$current" + ;; + + sunos) + major=".$current" + versuffix=".$current.$revision" + ;; + + windows) + # Use '-' rather than '.', since we only want one + # extension on DOS 8.3 filesystems. + func_arith $current - $age + major=$func_arith_result + versuffix="-$major" + ;; + + *) + func_fatal_configuration "unknown library version type \`$version_type'" + ;; + esac + + # Clear the version info if we defaulted, and they specified a release. + if test -z "$vinfo" && test -n "$release"; then + major= + case $version_type in + darwin) + # we can't check for "0.0" in archive_cmds due to quoting + # problems, so we reset it completely + verstring= + ;; + *) + verstring="0.0" + ;; + esac + if test "$need_version" = no; then + versuffix= + else + versuffix=".0.0" + fi + fi + + # Remove version info from name if versioning should be avoided + if test "$avoid_version" = yes && test "$need_version" = no; then + major= + versuffix= + verstring="" + fi + + # Check to see if the archive will have undefined symbols. + if test "$allow_undefined" = yes; then + if test "$allow_undefined_flag" = unsupported; then + func_warning "undefined symbols not allowed in $host shared libraries" + build_libtool_libs=no + build_old_libs=yes + fi + else + # Don't allow undefined symbols. + allow_undefined_flag="$no_undefined_flag" + fi + + fi + + func_generate_dlsyms "$libname" "$libname" "yes" + libobjs="$libobjs $symfileobj" + test "X$libobjs" = "X " && libobjs= + + if test "$mode" != relink; then + # Remove our outputs, but don't remove object files since they + # may have been created when compiling PIC objects. + removelist= + tempremovelist=`$ECHO "$output_objdir/*"` + for p in $tempremovelist; do + case $p in + *.$objext) + ;; + $output_objdir/$outputname | $output_objdir/$libname.* | $output_objdir/${libname}${release}.*) + if test "X$precious_files_regex" != "X"; then + if $ECHO "$p" | $EGREP -e "$precious_files_regex" >/dev/null 2>&1 + then + continue + fi + fi + removelist="$removelist $p" + ;; + *) ;; + esac + done + test -n "$removelist" && \ + func_show_eval "${RM}r \$removelist" + fi + + # Now set the variables for building old libraries. + if test "$build_old_libs" = yes && test "$build_libtool_libs" != convenience ; then + oldlibs="$oldlibs $output_objdir/$libname.$libext" + + # Transform .lo files to .o files. + oldobjs="$objs "`$ECHO "X$libobjs" | $SP2NL | $Xsed -e '/\.'${libext}'$/d' -e "$lo2o" | $NL2SP` + fi + + # Eliminate all temporary directories. + #for path in $notinst_path; do + # lib_search_path=`$ECHO "X$lib_search_path " | $Xsed -e "s% $path % %g"` + # deplibs=`$ECHO "X$deplibs " | $Xsed -e "s% -L$path % %g"` + # dependency_libs=`$ECHO "X$dependency_libs " | $Xsed -e "s% -L$path % %g"` + #done + + if test -n "$xrpath"; then + # If the user specified any rpath flags, then add them. + temp_xrpath= + for libdir in $xrpath; do + temp_xrpath="$temp_xrpath -R$libdir" + case "$finalize_rpath " in + *" $libdir "*) ;; + *) finalize_rpath="$finalize_rpath $libdir" ;; + esac + done + if test "$hardcode_into_libs" != yes || test "$build_old_libs" = yes; then + dependency_libs="$temp_xrpath $dependency_libs" + fi + fi + + # Make sure dlfiles contains only unique files that won't be dlpreopened + old_dlfiles="$dlfiles" + dlfiles= + for lib in $old_dlfiles; do + case " $dlprefiles $dlfiles " in + *" $lib "*) ;; + *) dlfiles="$dlfiles $lib" ;; + esac + done + + # Make sure dlprefiles contains only unique files + old_dlprefiles="$dlprefiles" + dlprefiles= + for lib in $old_dlprefiles; do + case "$dlprefiles " in + *" $lib "*) ;; + *) dlprefiles="$dlprefiles $lib" ;; + esac + done + + if test "$build_libtool_libs" = yes; then + if test -n "$rpath"; then + case $host in + *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-*-beos*) + # these systems don't actually have a c library (as such)! + ;; + *-*-rhapsody* | *-*-darwin1.[012]) + # Rhapsody C library is in the System framework + deplibs="$deplibs System.ltframework" + ;; + *-*-netbsd*) + # Don't link with libc until the a.out ld.so is fixed. + ;; + *-*-openbsd* | *-*-freebsd* | *-*-dragonfly*) + # Do not include libc due to us having libc/libc_r. + ;; + *-*-sco3.2v5* | *-*-sco5v6*) + # Causes problems with __ctype + ;; + *-*-sysv4.2uw2* | *-*-sysv5* | *-*-unixware* | *-*-OpenUNIX*) + # Compiler inserts libc in the correct place for threads to work + ;; + *) + # Add libc to deplibs on all other systems if necessary. + if test "$build_libtool_need_lc" = "yes"; then + deplibs="$deplibs -lc" + fi + ;; + esac + fi + + # Transform deplibs into only deplibs that can be linked in shared. + name_save=$name + libname_save=$libname + release_save=$release + versuffix_save=$versuffix + major_save=$major + # I'm not sure if I'm treating the release correctly. I think + # release should show up in the -l (ie -lgmp5) so we don't want to + # add it in twice. Is that correct? + release="" + versuffix="" + major="" + newdeplibs= + droppeddeps=no + case $deplibs_check_method in + pass_all) + # Don't check for shared/static. Everything works. + # This might be a little naive. We might want to check + # whether the library exists or not. But this is on + # osf3 & osf4 and I'm not really sure... Just + # implementing what was already the behavior. + newdeplibs=$deplibs + ;; + test_compile) + # This code stresses the "libraries are programs" paradigm to its + # limits. Maybe even breaks it. We compile a program, linking it + # against the deplibs as a proxy for the library. Then we can check + # whether they linked in statically or dynamically with ldd. + $opt_dry_run || $RM conftest.c + cat > conftest.c </dev/null` + for potent_lib in $potential_libs; do + # Follow soft links. + if ls -lLd "$potent_lib" 2>/dev/null | + $GREP " -> " >/dev/null; then + continue + fi + # The statement above tries to avoid entering an + # endless loop below, in case of cyclic links. + # We might still enter an endless loop, since a link + # loop can be closed while we follow links, + # but so what? + potlib="$potent_lib" + while test -h "$potlib" 2>/dev/null; do + potliblink=`ls -ld $potlib | ${SED} 's/.* -> //'` + case $potliblink in + [\\/]* | [A-Za-z]:[\\/]*) potlib="$potliblink";; + *) potlib=`$ECHO "X$potlib" | $Xsed -e 's,[^/]*$,,'`"$potliblink";; + esac + done + if eval $file_magic_cmd \"\$potlib\" 2>/dev/null | + $SED -e 10q | + $EGREP "$file_magic_regex" > /dev/null; then + newdeplibs="$newdeplibs $a_deplib" + a_deplib="" + break 2 + fi + done + done + fi + if test -n "$a_deplib" ; then + droppeddeps=yes + $ECHO + $ECHO "*** Warning: linker path does not have real file for library $a_deplib." + $ECHO "*** I have the capability to make that library automatically link in when" + $ECHO "*** you link to this library. But I can only do this if you have a" + $ECHO "*** shared version of the library, which you do not appear to have" + $ECHO "*** because I did check the linker path looking for a file starting" + if test -z "$potlib" ; then + $ECHO "*** with $libname but no candidates were found. (...for file magic test)" + else + $ECHO "*** with $libname and none of the candidates passed a file format test" + $ECHO "*** using a file magic. Last file checked: $potlib" + fi + fi + ;; + *) + # Add a -L argument. + newdeplibs="$newdeplibs $a_deplib" + ;; + esac + done # Gone through all deplibs. + ;; + match_pattern*) + set dummy $deplibs_check_method; shift + match_pattern_regex=`expr "$deplibs_check_method" : "$1 \(.*\)"` + for a_deplib in $deplibs; do + case $a_deplib in + -l*) + func_stripname -l '' "$a_deplib" + name=$func_stripname_result + if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then + case " $predeps $postdeps " in + *" $a_deplib "*) + newdeplibs="$newdeplibs $a_deplib" + a_deplib="" + ;; + esac + fi + if test -n "$a_deplib" ; then + libname=`eval "\\$ECHO \"$libname_spec\""` + for i in $lib_search_path $sys_lib_search_path $shlib_search_path; do + potential_libs=`ls $i/$libname[.-]* 2>/dev/null` + for potent_lib in $potential_libs; do + potlib="$potent_lib" # see symlink-check above in file_magic test + if eval "\$ECHO \"X$potent_lib\"" 2>/dev/null | $Xsed -e 10q | \ + $EGREP "$match_pattern_regex" > /dev/null; then + newdeplibs="$newdeplibs $a_deplib" + a_deplib="" + break 2 + fi + done + done + fi + if test -n "$a_deplib" ; then + droppeddeps=yes + $ECHO + $ECHO "*** Warning: linker path does not have real file for library $a_deplib." + $ECHO "*** I have the capability to make that library automatically link in when" + $ECHO "*** you link to this library. But I can only do this if you have a" + $ECHO "*** shared version of the library, which you do not appear to have" + $ECHO "*** because I did check the linker path looking for a file starting" + if test -z "$potlib" ; then + $ECHO "*** with $libname but no candidates were found. (...for regex pattern test)" + else + $ECHO "*** with $libname and none of the candidates passed a file format test" + $ECHO "*** using a regex pattern. Last file checked: $potlib" + fi + fi + ;; + *) + # Add a -L argument. + newdeplibs="$newdeplibs $a_deplib" + ;; + esac + done # Gone through all deplibs. + ;; + none | unknown | *) + newdeplibs="" + tmp_deplibs=`$ECHO "X $deplibs" | $Xsed \ + -e 's/ -lc$//' -e 's/ -[LR][^ ]*//g'` + if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then + for i in $predeps $postdeps ; do + # can't use Xsed below, because $i might contain '/' + tmp_deplibs=`$ECHO "X $tmp_deplibs" | $Xsed -e "s,$i,,"` + done + fi + if $ECHO "X $tmp_deplibs" | $Xsed -e 's/[ ]//g' | + $GREP . >/dev/null; then + $ECHO + if test "X$deplibs_check_method" = "Xnone"; then + $ECHO "*** Warning: inter-library dependencies are not supported in this platform." + else + $ECHO "*** Warning: inter-library dependencies are not known to be supported." + fi + $ECHO "*** All declared inter-library dependencies are being dropped." + droppeddeps=yes + fi + ;; + esac + versuffix=$versuffix_save + major=$major_save + release=$release_save + libname=$libname_save + name=$name_save + + case $host in + *-*-rhapsody* | *-*-darwin1.[012]) + # On Rhapsody replace the C library with the System framework + newdeplibs=`$ECHO "X $newdeplibs" | $Xsed -e 's/ -lc / System.ltframework /'` + ;; + esac + + if test "$droppeddeps" = yes; then + if test "$module" = yes; then + $ECHO + $ECHO "*** Warning: libtool could not satisfy all declared inter-library" + $ECHO "*** dependencies of module $libname. Therefore, libtool will create" + $ECHO "*** a static module, that should work as long as the dlopening" + $ECHO "*** application is linked with the -dlopen flag." + if test -z "$global_symbol_pipe"; then + $ECHO + $ECHO "*** However, this would only work if libtool was able to extract symbol" + $ECHO "*** lists from a program, using \`nm' or equivalent, but libtool could" + $ECHO "*** not find such a program. So, this module is probably useless." + $ECHO "*** \`nm' from GNU binutils and a full rebuild may help." + fi + if test "$build_old_libs" = no; then + oldlibs="$output_objdir/$libname.$libext" + build_libtool_libs=module + build_old_libs=yes + else + build_libtool_libs=no + fi + else + $ECHO "*** The inter-library dependencies that have been dropped here will be" + $ECHO "*** automatically added whenever a program is linked with this library" + $ECHO "*** or is declared to -dlopen it." + + if test "$allow_undefined" = no; then + $ECHO + $ECHO "*** Since this library must not contain undefined symbols," + $ECHO "*** because either the platform does not support them or" + $ECHO "*** it was explicitly requested with -no-undefined," + $ECHO "*** libtool will only create a static version of it." + if test "$build_old_libs" = no; then + oldlibs="$output_objdir/$libname.$libext" + build_libtool_libs=module + build_old_libs=yes + else + build_libtool_libs=no + fi + fi + fi + fi + # Done checking deplibs! + deplibs=$newdeplibs + fi + # Time to change all our "foo.ltframework" stuff back to "-framework foo" + case $host in + *-*-darwin*) + newdeplibs=`$ECHO "X $newdeplibs" | $Xsed -e 's% \([^ $]*\).ltframework% -framework \1%g'` + new_inherited_linker_flags=`$ECHO "X $new_inherited_linker_flags" | $Xsed -e 's% \([^ $]*\).ltframework% -framework \1%g'` + deplibs=`$ECHO "X $deplibs" | $Xsed -e 's% \([^ $]*\).ltframework% -framework \1%g'` + ;; + esac + + # move library search paths that coincide with paths to not yet + # installed libraries to the beginning of the library search list + new_libs= + for path in $notinst_path; do + case " $new_libs " in + *" -L$path/$objdir "*) ;; + *) + case " $deplibs " in + *" -L$path/$objdir "*) + new_libs="$new_libs -L$path/$objdir" ;; + esac + ;; + esac + done + for deplib in $deplibs; do + case $deplib in + -L*) + case " $new_libs " in + *" $deplib "*) ;; + *) new_libs="$new_libs $deplib" ;; + esac + ;; + *) new_libs="$new_libs $deplib" ;; + esac + done + deplibs="$new_libs" + + # All the library-specific variables (install_libdir is set above). + library_names= + old_library= + dlname= + + # Test again, we may have decided not to build it any more + if test "$build_libtool_libs" = yes; then + if test "$hardcode_into_libs" = yes; then + # Hardcode the library paths + hardcode_libdirs= + dep_rpath= + rpath="$finalize_rpath" + test "$mode" != relink && rpath="$compile_rpath$rpath" + for libdir in $rpath; do + if test -n "$hardcode_libdir_flag_spec"; then + if test -n "$hardcode_libdir_separator"; then + if test -z "$hardcode_libdirs"; then + hardcode_libdirs="$libdir" + else + # Just accumulate the unique libdirs. + case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in + *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*) + ;; + *) + hardcode_libdirs="$hardcode_libdirs$hardcode_libdir_separator$libdir" + ;; + esac + fi + else + eval flag=\"$hardcode_libdir_flag_spec\" + dep_rpath="$dep_rpath $flag" + fi + elif test -n "$runpath_var"; then + case "$perm_rpath " in + *" $libdir "*) ;; + *) perm_rpath="$perm_rpath $libdir" ;; + esac + fi + done + # Substitute the hardcoded libdirs into the rpath. + if test -n "$hardcode_libdir_separator" && + test -n "$hardcode_libdirs"; then + libdir="$hardcode_libdirs" + if test -n "$hardcode_libdir_flag_spec_ld"; then + eval dep_rpath=\"$hardcode_libdir_flag_spec_ld\" + else + eval dep_rpath=\"$hardcode_libdir_flag_spec\" + fi + fi + if test -n "$runpath_var" && test -n "$perm_rpath"; then + # We should set the runpath_var. + rpath= + for dir in $perm_rpath; do + rpath="$rpath$dir:" + done + eval "$runpath_var='$rpath\$$runpath_var'; export $runpath_var" + fi + test -n "$dep_rpath" && deplibs="$dep_rpath $deplibs" + fi + + shlibpath="$finalize_shlibpath" + test "$mode" != relink && shlibpath="$compile_shlibpath$shlibpath" + if test -n "$shlibpath"; then + eval "$shlibpath_var='$shlibpath\$$shlibpath_var'; export $shlibpath_var" + fi + + # Get the real and link names of the library. + eval shared_ext=\"$shrext_cmds\" + eval library_names=\"$library_names_spec\" + set dummy $library_names + shift + realname="$1" + shift + + if test -n "$soname_spec"; then + eval soname=\"$soname_spec\" + else + soname="$realname" + fi + if test -z "$dlname"; then + dlname=$soname + fi + + lib="$output_objdir/$realname" + linknames= + for link + do + linknames="$linknames $link" + done + + # Use standard objects if they are pic + test -z "$pic_flag" && libobjs=`$ECHO "X$libobjs" | $SP2NL | $Xsed -e "$lo2o" | $NL2SP` + test "X$libobjs" = "X " && libobjs= + + delfiles= + if test -n "$export_symbols" && test -n "$include_expsyms"; then + $opt_dry_run || cp "$export_symbols" "$output_objdir/$libname.uexp" + export_symbols="$output_objdir/$libname.uexp" + delfiles="$delfiles $export_symbols" + fi + + orig_export_symbols= + case $host_os in + cygwin* | mingw*) + if test -n "$export_symbols" && test -z "$export_symbols_regex"; then + # exporting using user supplied symfile + if test "x`$SED 1q $export_symbols`" != xEXPORTS; then + # and it's NOT already a .def file. Must figure out + # which of the given symbols are data symbols and tag + # them as such. So, trigger use of export_symbols_cmds. + # export_symbols gets reassigned inside the "prepare + # the list of exported symbols" if statement, so the + # include_expsyms logic still works. + orig_export_symbols="$export_symbols" + export_symbols= + always_export_symbols=yes + fi + fi + ;; + esac + + # Prepare the list of exported symbols + if test -z "$export_symbols"; then + if test "$always_export_symbols" = yes || test -n "$export_symbols_regex"; then + func_verbose "generating symbol list for \`$libname.la'" + export_symbols="$output_objdir/$libname.exp" + $opt_dry_run || $RM $export_symbols + cmds=$export_symbols_cmds + save_ifs="$IFS"; IFS='~' + for cmd in $cmds; do + IFS="$save_ifs" + eval cmd=\"$cmd\" + func_len " $cmd" + len=$func_len_result + if test "$len" -lt "$max_cmd_len" || test "$max_cmd_len" -le -1; then + func_show_eval "$cmd" 'exit $?' + skipped_export=false + else + # The command line is too long to execute in one step. + func_verbose "using reloadable object file for export list..." + skipped_export=: + # Break out early, otherwise skipped_export may be + # set to false by a later but shorter cmd. + break + fi + done + IFS="$save_ifs" + if test -n "$export_symbols_regex" && test "X$skipped_export" != "X:"; then + func_show_eval '$EGREP -e "$export_symbols_regex" "$export_symbols" > "${export_symbols}T"' + func_show_eval '$MV "${export_symbols}T" "$export_symbols"' + fi + fi + fi + + if test -n "$export_symbols" && test -n "$include_expsyms"; then + tmp_export_symbols="$export_symbols" + test -n "$orig_export_symbols" && tmp_export_symbols="$orig_export_symbols" + $opt_dry_run || eval '$ECHO "X$include_expsyms" | $Xsed | $SP2NL >> "$tmp_export_symbols"' + fi + + if test "X$skipped_export" != "X:" && test -n "$orig_export_symbols"; then + # The given exports_symbols file has to be filtered, so filter it. + func_verbose "filter symbol list for \`$libname.la' to tag DATA exports" + # FIXME: $output_objdir/$libname.filter potentially contains lots of + # 's' commands which not all seds can handle. GNU sed should be fine + # though. Also, the filter scales superlinearly with the number of + # global variables. join(1) would be nice here, but unfortunately + # isn't a blessed tool. + $opt_dry_run || $SED -e '/[ ,]DATA/!d;s,\(.*\)\([ \,].*\),s|^\1$|\1\2|,' < $export_symbols > $output_objdir/$libname.filter + delfiles="$delfiles $export_symbols $output_objdir/$libname.filter" + export_symbols=$output_objdir/$libname.def + $opt_dry_run || $SED -f $output_objdir/$libname.filter < $orig_export_symbols > $export_symbols + fi + + tmp_deplibs= + for test_deplib in $deplibs; do + case " $convenience " in + *" $test_deplib "*) ;; + *) + tmp_deplibs="$tmp_deplibs $test_deplib" + ;; + esac + done + deplibs="$tmp_deplibs" + + if test -n "$convenience"; then + if test -n "$whole_archive_flag_spec" && + test "$compiler_needs_object" = yes && + test -z "$libobjs"; then + # extract the archives, so we have objects to list. + # TODO: could optimize this to just extract one archive. + whole_archive_flag_spec= + fi + if test -n "$whole_archive_flag_spec"; then + save_libobjs=$libobjs + eval libobjs=\"\$libobjs $whole_archive_flag_spec\" + test "X$libobjs" = "X " && libobjs= + else + gentop="$output_objdir/${outputname}x" + generated="$generated $gentop" + + func_extract_archives $gentop $convenience + libobjs="$libobjs $func_extract_archives_result" + test "X$libobjs" = "X " && libobjs= + fi + fi + + if test "$thread_safe" = yes && test -n "$thread_safe_flag_spec"; then + eval flag=\"$thread_safe_flag_spec\" + linker_flags="$linker_flags $flag" + fi + + # Make a backup of the uninstalled library when relinking + if test "$mode" = relink; then + $opt_dry_run || eval '(cd $output_objdir && $RM ${realname}U && $MV $realname ${realname}U)' || exit $? + fi + + # Do each of the archive commands. + if test "$module" = yes && test -n "$module_cmds" ; then + if test -n "$export_symbols" && test -n "$module_expsym_cmds"; then + eval test_cmds=\"$module_expsym_cmds\" + cmds=$module_expsym_cmds + else + eval test_cmds=\"$module_cmds\" + cmds=$module_cmds + fi + else + if test -n "$export_symbols" && test -n "$archive_expsym_cmds"; then + eval test_cmds=\"$archive_expsym_cmds\" + cmds=$archive_expsym_cmds + else + eval test_cmds=\"$archive_cmds\" + cmds=$archive_cmds + fi + fi + + if test "X$skipped_export" != "X:" && + func_len " $test_cmds" && + len=$func_len_result && + test "$len" -lt "$max_cmd_len" || test "$max_cmd_len" -le -1; then + : + else + # The command line is too long to link in one step, link piecewise + # or, if using GNU ld and skipped_export is not :, use a linker + # script. + + # Save the value of $output and $libobjs because we want to + # use them later. If we have whole_archive_flag_spec, we + # want to use save_libobjs as it was before + # whole_archive_flag_spec was expanded, because we can't + # assume the linker understands whole_archive_flag_spec. + # This may have to be revisited, in case too many + # convenience libraries get linked in and end up exceeding + # the spec. + if test -z "$convenience" || test -z "$whole_archive_flag_spec"; then + save_libobjs=$libobjs + fi + save_output=$output + output_la=`$ECHO "X$output" | $Xsed -e "$basename"` + + # Clear the reloadable object creation command queue and + # initialize k to one. + test_cmds= + concat_cmds= + objlist= + last_robj= + k=1 + + if test -n "$save_libobjs" && test "X$skipped_export" != "X:" && test "$with_gnu_ld" = yes; then + output=${output_objdir}/${output_la}.lnkscript + func_verbose "creating GNU ld script: $output" + $ECHO 'INPUT (' > $output + for obj in $save_libobjs + do + $ECHO "$obj" >> $output + done + $ECHO ')' >> $output + delfiles="$delfiles $output" + elif test -n "$save_libobjs" && test "X$skipped_export" != "X:" && test "X$file_list_spec" != X; then + output=${output_objdir}/${output_la}.lnk + func_verbose "creating linker input file list: $output" + : > $output + set x $save_libobjs + shift + firstobj= + if test "$compiler_needs_object" = yes; then + firstobj="$1 " + shift + fi + for obj + do + $ECHO "$obj" >> $output + done + delfiles="$delfiles $output" + output=$firstobj\"$file_list_spec$output\" + else + if test -n "$save_libobjs"; then + func_verbose "creating reloadable object files..." + output=$output_objdir/$output_la-${k}.$objext + eval test_cmds=\"$reload_cmds\" + func_len " $test_cmds" + len0=$func_len_result + len=$len0 + + # Loop over the list of objects to be linked. + for obj in $save_libobjs + do + func_len " $obj" + func_arith $len + $func_len_result + len=$func_arith_result + if test "X$objlist" = X || + test "$len" -lt "$max_cmd_len"; then + func_append objlist " $obj" + else + # The command $test_cmds is almost too long, add a + # command to the queue. + if test "$k" -eq 1 ; then + # The first file doesn't have a previous command to add. + eval concat_cmds=\"$reload_cmds $objlist $last_robj\" + else + # All subsequent reloadable object files will link in + # the last one created. + eval concat_cmds=\"\$concat_cmds~$reload_cmds $objlist $last_robj~\$RM $last_robj\" + fi + last_robj=$output_objdir/$output_la-${k}.$objext + func_arith $k + 1 + k=$func_arith_result + output=$output_objdir/$output_la-${k}.$objext + objlist=$obj + func_len " $last_robj" + func_arith $len0 + $func_len_result + len=$func_arith_result + fi + done + # Handle the remaining objects by creating one last + # reloadable object file. All subsequent reloadable object + # files will link in the last one created. + test -z "$concat_cmds" || concat_cmds=$concat_cmds~ + eval concat_cmds=\"\${concat_cmds}$reload_cmds $objlist $last_robj\" + if test -n "$last_robj"; then + eval concat_cmds=\"\${concat_cmds}~\$RM $last_robj\" + fi + delfiles="$delfiles $output" + + else + output= + fi + + if ${skipped_export-false}; then + func_verbose "generating symbol list for \`$libname.la'" + export_symbols="$output_objdir/$libname.exp" + $opt_dry_run || $RM $export_symbols + libobjs=$output + # Append the command to create the export file. + test -z "$concat_cmds" || concat_cmds=$concat_cmds~ + eval concat_cmds=\"\$concat_cmds$export_symbols_cmds\" + if test -n "$last_robj"; then + eval concat_cmds=\"\$concat_cmds~\$RM $last_robj\" + fi + fi + + test -n "$save_libobjs" && + func_verbose "creating a temporary reloadable object file: $output" + + # Loop through the commands generated above and execute them. + save_ifs="$IFS"; IFS='~' + for cmd in $concat_cmds; do + IFS="$save_ifs" + $opt_silent || { + func_quote_for_expand "$cmd" + eval "func_echo $func_quote_for_expand_result" + } + $opt_dry_run || eval "$cmd" || { + lt_exit=$? + + # Restore the uninstalled library and exit + if test "$mode" = relink; then + ( cd "$output_objdir" && \ + $RM "${realname}T" && \ + $MV "${realname}U" "$realname" ) + fi + + exit $lt_exit + } + done + IFS="$save_ifs" + + if test -n "$export_symbols_regex" && ${skipped_export-false}; then + func_show_eval '$EGREP -e "$export_symbols_regex" "$export_symbols" > "${export_symbols}T"' + func_show_eval '$MV "${export_symbols}T" "$export_symbols"' + fi + fi + + if ${skipped_export-false}; then + if test -n "$export_symbols" && test -n "$include_expsyms"; then + tmp_export_symbols="$export_symbols" + test -n "$orig_export_symbols" && tmp_export_symbols="$orig_export_symbols" + $opt_dry_run || eval '$ECHO "X$include_expsyms" | $Xsed | $SP2NL >> "$tmp_export_symbols"' + fi + + if test -n "$orig_export_symbols"; then + # The given exports_symbols file has to be filtered, so filter it. + func_verbose "filter symbol list for \`$libname.la' to tag DATA exports" + # FIXME: $output_objdir/$libname.filter potentially contains lots of + # 's' commands which not all seds can handle. GNU sed should be fine + # though. Also, the filter scales superlinearly with the number of + # global variables. join(1) would be nice here, but unfortunately + # isn't a blessed tool. + $opt_dry_run || $SED -e '/[ ,]DATA/!d;s,\(.*\)\([ \,].*\),s|^\1$|\1\2|,' < $export_symbols > $output_objdir/$libname.filter + delfiles="$delfiles $export_symbols $output_objdir/$libname.filter" + export_symbols=$output_objdir/$libname.def + $opt_dry_run || $SED -f $output_objdir/$libname.filter < $orig_export_symbols > $export_symbols + fi + fi + + libobjs=$output + # Restore the value of output. + output=$save_output + + if test -n "$convenience" && test -n "$whole_archive_flag_spec"; then + eval libobjs=\"\$libobjs $whole_archive_flag_spec\" + test "X$libobjs" = "X " && libobjs= + fi + # Expand the library linking commands again to reset the + # value of $libobjs for piecewise linking. + + # Do each of the archive commands. + if test "$module" = yes && test -n "$module_cmds" ; then + if test -n "$export_symbols" && test -n "$module_expsym_cmds"; then + cmds=$module_expsym_cmds + else + cmds=$module_cmds + fi + else + if test -n "$export_symbols" && test -n "$archive_expsym_cmds"; then + cmds=$archive_expsym_cmds + else + cmds=$archive_cmds + fi + fi + fi + + if test -n "$delfiles"; then + # Append the command to remove temporary files to $cmds. + eval cmds=\"\$cmds~\$RM $delfiles\" + fi + + # Add any objects from preloaded convenience libraries + if test -n "$dlprefiles"; then + gentop="$output_objdir/${outputname}x" + generated="$generated $gentop" + + func_extract_archives $gentop $dlprefiles + libobjs="$libobjs $func_extract_archives_result" + test "X$libobjs" = "X " && libobjs= + fi + + save_ifs="$IFS"; IFS='~' + for cmd in $cmds; do + IFS="$save_ifs" + eval cmd=\"$cmd\" + $opt_silent || { + func_quote_for_expand "$cmd" + eval "func_echo $func_quote_for_expand_result" + } + $opt_dry_run || eval "$cmd" || { + lt_exit=$? + + # Restore the uninstalled library and exit + if test "$mode" = relink; then + ( cd "$output_objdir" && \ + $RM "${realname}T" && \ + $MV "${realname}U" "$realname" ) + fi + + exit $lt_exit + } + done + IFS="$save_ifs" + + # Restore the uninstalled library and exit + if test "$mode" = relink; then + $opt_dry_run || eval '(cd $output_objdir && $RM ${realname}T && $MV $realname ${realname}T && $MV ${realname}U $realname)' || exit $? + + if test -n "$convenience"; then + if test -z "$whole_archive_flag_spec"; then + func_show_eval '${RM}r "$gentop"' + fi + fi + + exit $EXIT_SUCCESS + fi + + # Create links to the real library. + for linkname in $linknames; do + if test "$realname" != "$linkname"; then + func_show_eval '(cd "$output_objdir" && $RM "$linkname" && $LN_S "$realname" "$linkname")' 'exit $?' + fi + done + + # If -module or -export-dynamic was specified, set the dlname. + if test "$module" = yes || test "$export_dynamic" = yes; then + # On all known operating systems, these are identical. + dlname="$soname" + fi + fi + ;; + + obj) + if test -n "$dlfiles$dlprefiles" || test "$dlself" != no; then + func_warning "\`-dlopen' is ignored for objects" + fi + + case " $deplibs" in + *\ -l* | *\ -L*) + func_warning "\`-l' and \`-L' are ignored for objects" ;; + esac + + test -n "$rpath" && \ + func_warning "\`-rpath' is ignored for objects" + + test -n "$xrpath" && \ + func_warning "\`-R' is ignored for objects" + + test -n "$vinfo" && \ + func_warning "\`-version-info' is ignored for objects" + + test -n "$release" && \ + func_warning "\`-release' is ignored for objects" + + case $output in + *.lo) + test -n "$objs$old_deplibs" && \ + func_fatal_error "cannot build library object \`$output' from non-libtool objects" + + libobj=$output + func_lo2o "$libobj" + obj=$func_lo2o_result + ;; + *) + libobj= + obj="$output" + ;; + esac + + # Delete the old objects. + $opt_dry_run || $RM $obj $libobj + + # Objects from convenience libraries. This assumes + # single-version convenience libraries. Whenever we create + # different ones for PIC/non-PIC, this we'll have to duplicate + # the extraction. + reload_conv_objs= + gentop= + # reload_cmds runs $LD directly, so let us get rid of + # -Wl from whole_archive_flag_spec and hope we can get by with + # turning comma into space.. + wl= + + if test -n "$convenience"; then + if test -n "$whole_archive_flag_spec"; then + eval tmp_whole_archive_flags=\"$whole_archive_flag_spec\" + reload_conv_objs=$reload_objs\ `$ECHO "X$tmp_whole_archive_flags" | $Xsed -e 's|,| |g'` + else + gentop="$output_objdir/${obj}x" + generated="$generated $gentop" + + func_extract_archives $gentop $convenience + reload_conv_objs="$reload_objs $func_extract_archives_result" + fi + fi + + # Create the old-style object. + reload_objs="$objs$old_deplibs "`$ECHO "X$libobjs" | $SP2NL | $Xsed -e '/\.'${libext}$'/d' -e '/\.lib$/d' -e "$lo2o" | $NL2SP`" $reload_conv_objs" ### testsuite: skip nested quoting test + + output="$obj" + func_execute_cmds "$reload_cmds" 'exit $?' + + # Exit if we aren't doing a library object file. + if test -z "$libobj"; then + if test -n "$gentop"; then + func_show_eval '${RM}r "$gentop"' + fi + + exit $EXIT_SUCCESS + fi + + if test "$build_libtool_libs" != yes; then + if test -n "$gentop"; then + func_show_eval '${RM}r "$gentop"' + fi + + # Create an invalid libtool object if no PIC, so that we don't + # accidentally link it into a program. + # $show "echo timestamp > $libobj" + # $opt_dry_run || eval "echo timestamp > $libobj" || exit $? + exit $EXIT_SUCCESS + fi + + if test -n "$pic_flag" || test "$pic_mode" != default; then + # Only do commands if we really have different PIC objects. + reload_objs="$libobjs $reload_conv_objs" + output="$libobj" + func_execute_cmds "$reload_cmds" 'exit $?' + fi + + if test -n "$gentop"; then + func_show_eval '${RM}r "$gentop"' + fi + + exit $EXIT_SUCCESS + ;; + + prog) + case $host in + *cygwin*) func_stripname '' '.exe' "$output" + output=$func_stripname_result.exe;; + esac + test -n "$vinfo" && \ + func_warning "\`-version-info' is ignored for programs" + + test -n "$release" && \ + func_warning "\`-release' is ignored for programs" + + test "$preload" = yes \ + && test "$dlopen_support" = unknown \ + && test "$dlopen_self" = unknown \ + && test "$dlopen_self_static" = unknown && \ + func_warning "\`LT_INIT([dlopen])' not used. Assuming no dlopen support." + + case $host in + *-*-rhapsody* | *-*-darwin1.[012]) + # On Rhapsody replace the C library is the System framework + compile_deplibs=`$ECHO "X $compile_deplibs" | $Xsed -e 's/ -lc / System.ltframework /'` + finalize_deplibs=`$ECHO "X $finalize_deplibs" | $Xsed -e 's/ -lc / System.ltframework /'` + ;; + esac + + case $host in + *-*-darwin*) + # Don't allow lazy linking, it breaks C++ global constructors + # But is supposedly fixed on 10.4 or later (yay!). + if test "$tagname" = CXX ; then + case ${MACOSX_DEPLOYMENT_TARGET-10.0} in + 10.[0123]) + compile_command="$compile_command ${wl}-bind_at_load" + finalize_command="$finalize_command ${wl}-bind_at_load" + ;; + esac + fi + # Time to change all our "foo.ltframework" stuff back to "-framework foo" + compile_deplibs=`$ECHO "X $compile_deplibs" | $Xsed -e 's% \([^ $]*\).ltframework% -framework \1%g'` + finalize_deplibs=`$ECHO "X $finalize_deplibs" | $Xsed -e 's% \([^ $]*\).ltframework% -framework \1%g'` + ;; + esac + + + # move library search paths that coincide with paths to not yet + # installed libraries to the beginning of the library search list + new_libs= + for path in $notinst_path; do + case " $new_libs " in + *" -L$path/$objdir "*) ;; + *) + case " $compile_deplibs " in + *" -L$path/$objdir "*) + new_libs="$new_libs -L$path/$objdir" ;; + esac + ;; + esac + done + for deplib in $compile_deplibs; do + case $deplib in + -L*) + case " $new_libs " in + *" $deplib "*) ;; + *) new_libs="$new_libs $deplib" ;; + esac + ;; + *) new_libs="$new_libs $deplib" ;; + esac + done + compile_deplibs="$new_libs" + + + compile_command="$compile_command $compile_deplibs" + finalize_command="$finalize_command $finalize_deplibs" + + if test -n "$rpath$xrpath"; then + # If the user specified any rpath flags, then add them. + for libdir in $rpath $xrpath; do + # This is the magic to use -rpath. + case "$finalize_rpath " in + *" $libdir "*) ;; + *) finalize_rpath="$finalize_rpath $libdir" ;; + esac + done + fi + + # Now hardcode the library paths + rpath= + hardcode_libdirs= + for libdir in $compile_rpath $finalize_rpath; do + if test -n "$hardcode_libdir_flag_spec"; then + if test -n "$hardcode_libdir_separator"; then + if test -z "$hardcode_libdirs"; then + hardcode_libdirs="$libdir" + else + # Just accumulate the unique libdirs. + case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in + *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*) + ;; + *) + hardcode_libdirs="$hardcode_libdirs$hardcode_libdir_separator$libdir" + ;; + esac + fi + else + eval flag=\"$hardcode_libdir_flag_spec\" + rpath="$rpath $flag" + fi + elif test -n "$runpath_var"; then + case "$perm_rpath " in + *" $libdir "*) ;; + *) perm_rpath="$perm_rpath $libdir" ;; + esac + fi + case $host in + *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2*) + testbindir=`${ECHO} "$libdir" | ${SED} -e 's*/lib$*/bin*'` + case :$dllsearchpath: in + *":$libdir:"*) ;; + *) dllsearchpath="$dllsearchpath:$libdir";; + esac + case :$dllsearchpath: in + *":$testbindir:"*) ;; + *) dllsearchpath="$dllsearchpath:$testbindir";; + esac + ;; + esac + done + # Substitute the hardcoded libdirs into the rpath. + if test -n "$hardcode_libdir_separator" && + test -n "$hardcode_libdirs"; then + libdir="$hardcode_libdirs" + eval rpath=\" $hardcode_libdir_flag_spec\" + fi + compile_rpath="$rpath" + + rpath= + hardcode_libdirs= + for libdir in $finalize_rpath; do + if test -n "$hardcode_libdir_flag_spec"; then + if test -n "$hardcode_libdir_separator"; then + if test -z "$hardcode_libdirs"; then + hardcode_libdirs="$libdir" + else + # Just accumulate the unique libdirs. + case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in + *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*) + ;; + *) + hardcode_libdirs="$hardcode_libdirs$hardcode_libdir_separator$libdir" + ;; + esac + fi + else + eval flag=\"$hardcode_libdir_flag_spec\" + rpath="$rpath $flag" + fi + elif test -n "$runpath_var"; then + case "$finalize_perm_rpath " in + *" $libdir "*) ;; + *) finalize_perm_rpath="$finalize_perm_rpath $libdir" ;; + esac + fi + done + # Substitute the hardcoded libdirs into the rpath. + if test -n "$hardcode_libdir_separator" && + test -n "$hardcode_libdirs"; then + libdir="$hardcode_libdirs" + eval rpath=\" $hardcode_libdir_flag_spec\" + fi + finalize_rpath="$rpath" + + if test -n "$libobjs" && test "$build_old_libs" = yes; then + # Transform all the library objects into standard objects. + compile_command=`$ECHO "X$compile_command" | $SP2NL | $Xsed -e "$lo2o" | $NL2SP` + finalize_command=`$ECHO "X$finalize_command" | $SP2NL | $Xsed -e "$lo2o" | $NL2SP` + fi + + func_generate_dlsyms "$outputname" "@PROGRAM@" "no" + + # template prelinking step + if test -n "$prelink_cmds"; then + func_execute_cmds "$prelink_cmds" 'exit $?' + fi + + wrappers_required=yes + case $host in + *cygwin* | *mingw* ) + if test "$build_libtool_libs" != yes; then + wrappers_required=no + fi + ;; + *) + if test "$need_relink" = no || test "$build_libtool_libs" != yes; then + wrappers_required=no + fi + ;; + esac + if test "$wrappers_required" = no; then + # Replace the output file specification. + compile_command=`$ECHO "X$compile_command" | $Xsed -e 's%@OUTPUT@%'"$output"'%g'` + link_command="$compile_command$compile_rpath" + + # We have no uninstalled library dependencies, so finalize right now. + exit_status=0 + func_show_eval "$link_command" 'exit_status=$?' + + # Delete the generated files. + if test -f "$output_objdir/${outputname}S.${objext}"; then + func_show_eval '$RM "$output_objdir/${outputname}S.${objext}"' + fi + + exit $exit_status + fi + + if test -n "$compile_shlibpath$finalize_shlibpath"; then + compile_command="$shlibpath_var=\"$compile_shlibpath$finalize_shlibpath\$$shlibpath_var\" $compile_command" + fi + if test -n "$finalize_shlibpath"; then + finalize_command="$shlibpath_var=\"$finalize_shlibpath\$$shlibpath_var\" $finalize_command" + fi + + compile_var= + finalize_var= + if test -n "$runpath_var"; then + if test -n "$perm_rpath"; then + # We should set the runpath_var. + rpath= + for dir in $perm_rpath; do + rpath="$rpath$dir:" + done + compile_var="$runpath_var=\"$rpath\$$runpath_var\" " + fi + if test -n "$finalize_perm_rpath"; then + # We should set the runpath_var. + rpath= + for dir in $finalize_perm_rpath; do + rpath="$rpath$dir:" + done + finalize_var="$runpath_var=\"$rpath\$$runpath_var\" " + fi + fi + + if test "$no_install" = yes; then + # We don't need to create a wrapper script. + link_command="$compile_var$compile_command$compile_rpath" + # Replace the output file specification. + link_command=`$ECHO "X$link_command" | $Xsed -e 's%@OUTPUT@%'"$output"'%g'` + # Delete the old output file. + $opt_dry_run || $RM $output + # Link the executable and exit + func_show_eval "$link_command" 'exit $?' + exit $EXIT_SUCCESS + fi + + if test "$hardcode_action" = relink; then + # Fast installation is not supported + link_command="$compile_var$compile_command$compile_rpath" + relink_command="$finalize_var$finalize_command$finalize_rpath" + + func_warning "this platform does not like uninstalled shared libraries" + func_warning "\`$output' will be relinked during installation" + else + if test "$fast_install" != no; then + link_command="$finalize_var$compile_command$finalize_rpath" + if test "$fast_install" = yes; then + relink_command=`$ECHO "X$compile_var$compile_command$compile_rpath" | $Xsed -e 's%@OUTPUT@%\$progdir/\$file%g'` + else + # fast_install is set to needless + relink_command= + fi + else + link_command="$compile_var$compile_command$compile_rpath" + relink_command="$finalize_var$finalize_command$finalize_rpath" + fi + fi + + # Replace the output file specification. + link_command=`$ECHO "X$link_command" | $Xsed -e 's%@OUTPUT@%'"$output_objdir/$outputname"'%g'` + + # Delete the old output files. + $opt_dry_run || $RM $output $output_objdir/$outputname $output_objdir/lt-$outputname + + func_show_eval "$link_command" 'exit $?' + + # Now create the wrapper script. + func_verbose "creating $output" + + # Quote the relink command for shipping. + if test -n "$relink_command"; then + # Preserve any variables that may affect compiler behavior + for var in $variables_saved_for_relink; do + if eval test -z \"\${$var+set}\"; then + relink_command="{ test -z \"\${$var+set}\" || $lt_unset $var || { $var=; export $var; }; }; $relink_command" + elif eval var_value=\$$var; test -z "$var_value"; then + relink_command="$var=; export $var; $relink_command" + else + func_quote_for_eval "$var_value" + relink_command="$var=$func_quote_for_eval_result; export $var; $relink_command" + fi + done + relink_command="(cd `pwd`; $relink_command)" + relink_command=`$ECHO "X$relink_command" | $Xsed -e "$sed_quote_subst"` + fi + + # Quote $ECHO for shipping. + if test "X$ECHO" = "X$SHELL $progpath --fallback-echo"; then + case $progpath in + [\\/]* | [A-Za-z]:[\\/]*) qecho="$SHELL $progpath --fallback-echo";; + *) qecho="$SHELL `pwd`/$progpath --fallback-echo";; + esac + qecho=`$ECHO "X$qecho" | $Xsed -e "$sed_quote_subst"` + else + qecho=`$ECHO "X$ECHO" | $Xsed -e "$sed_quote_subst"` + fi + + # Only actually do things if not in dry run mode. + $opt_dry_run || { + # win32 will think the script is a binary if it has + # a .exe suffix, so we strip it off here. + case $output in + *.exe) func_stripname '' '.exe' "$output" + output=$func_stripname_result ;; + esac + # test for cygwin because mv fails w/o .exe extensions + case $host in + *cygwin*) + exeext=.exe + func_stripname '' '.exe' "$outputname" + outputname=$func_stripname_result ;; + *) exeext= ;; + esac + case $host in + *cygwin* | *mingw* ) + func_dirname_and_basename "$output" "" "." + output_name=$func_basename_result + output_path=$func_dirname_result + cwrappersource="$output_path/$objdir/lt-$output_name.c" + cwrapper="$output_path/$output_name.exe" + $RM $cwrappersource $cwrapper + trap "$RM $cwrappersource $cwrapper; exit $EXIT_FAILURE" 1 2 15 + + func_emit_cwrapperexe_src > $cwrappersource + + # we should really use a build-platform specific compiler + # here, but OTOH, the wrappers (shell script and this C one) + # are only useful if you want to execute the "real" binary. + # Since the "real" binary is built for $host, then this + # wrapper might as well be built for $host, too. + $opt_dry_run || { + $LTCC $LTCFLAGS -o $cwrapper $cwrappersource + $STRIP $cwrapper + } + + # Now, create the wrapper script for func_source use: + func_ltwrapper_scriptname $cwrapper + $RM $func_ltwrapper_scriptname_result + trap "$RM $func_ltwrapper_scriptname_result; exit $EXIT_FAILURE" 1 2 15 + $opt_dry_run || { + # note: this script will not be executed, so do not chmod. + if test "x$build" = "x$host" ; then + $cwrapper --lt-dump-script > $func_ltwrapper_scriptname_result + else + func_emit_wrapper no > $func_ltwrapper_scriptname_result + fi + } + ;; + * ) + $RM $output + trap "$RM $output; exit $EXIT_FAILURE" 1 2 15 + + func_emit_wrapper no > $output + chmod +x $output + ;; + esac + } + exit $EXIT_SUCCESS + ;; + esac + + # See if we need to build an old-fashioned archive. + for oldlib in $oldlibs; do + + if test "$build_libtool_libs" = convenience; then + oldobjs="$libobjs_save $symfileobj" + addlibs="$convenience" + build_libtool_libs=no + else + if test "$build_libtool_libs" = module; then + oldobjs="$libobjs_save" + build_libtool_libs=no + else + oldobjs="$old_deplibs $non_pic_objects" + if test "$preload" = yes && test -f "$symfileobj"; then + oldobjs="$oldobjs $symfileobj" + fi + fi + addlibs="$old_convenience" + fi + + if test -n "$addlibs"; then + gentop="$output_objdir/${outputname}x" + generated="$generated $gentop" + + func_extract_archives $gentop $addlibs + oldobjs="$oldobjs $func_extract_archives_result" + fi + + # Do each command in the archive commands. + if test -n "$old_archive_from_new_cmds" && test "$build_libtool_libs" = yes; then + cmds=$old_archive_from_new_cmds + else + + # Add any objects from preloaded convenience libraries + if test -n "$dlprefiles"; then + gentop="$output_objdir/${outputname}x" + generated="$generated $gentop" + + func_extract_archives $gentop $dlprefiles + oldobjs="$oldobjs $func_extract_archives_result" + fi + + # POSIX demands no paths to be encoded in archives. We have + # to avoid creating archives with duplicate basenames if we + # might have to extract them afterwards, e.g., when creating a + # static archive out of a convenience library, or when linking + # the entirety of a libtool archive into another (currently + # not supported by libtool). + if (for obj in $oldobjs + do + func_basename "$obj" + $ECHO "$func_basename_result" + done | sort | sort -uc >/dev/null 2>&1); then + : + else + $ECHO "copying selected object files to avoid basename conflicts..." + gentop="$output_objdir/${outputname}x" + generated="$generated $gentop" + func_mkdir_p "$gentop" + save_oldobjs=$oldobjs + oldobjs= + counter=1 + for obj in $save_oldobjs + do + func_basename "$obj" + objbase="$func_basename_result" + case " $oldobjs " in + " ") oldobjs=$obj ;; + *[\ /]"$objbase "*) + while :; do + # Make sure we don't pick an alternate name that also + # overlaps. + newobj=lt$counter-$objbase + func_arith $counter + 1 + counter=$func_arith_result + case " $oldobjs " in + *[\ /]"$newobj "*) ;; + *) if test ! -f "$gentop/$newobj"; then break; fi ;; + esac + done + func_show_eval "ln $obj $gentop/$newobj || cp $obj $gentop/$newobj" + oldobjs="$oldobjs $gentop/$newobj" + ;; + *) oldobjs="$oldobjs $obj" ;; + esac + done + fi + eval cmds=\"$old_archive_cmds\" + + func_len " $cmds" + len=$func_len_result + if test "$len" -lt "$max_cmd_len" || test "$max_cmd_len" -le -1; then + cmds=$old_archive_cmds + else + # the command line is too long to link in one step, link in parts + func_verbose "using piecewise archive linking..." + save_RANLIB=$RANLIB + RANLIB=: + objlist= + concat_cmds= + save_oldobjs=$oldobjs + oldobjs= + # Is there a better way of finding the last object in the list? + for obj in $save_oldobjs + do + last_oldobj=$obj + done + eval test_cmds=\"$old_archive_cmds\" + func_len " $test_cmds" + len0=$func_len_result + len=$len0 + for obj in $save_oldobjs + do + func_len " $obj" + func_arith $len + $func_len_result + len=$func_arith_result + func_append objlist " $obj" + if test "$len" -lt "$max_cmd_len"; then + : + else + # the above command should be used before it gets too long + oldobjs=$objlist + if test "$obj" = "$last_oldobj" ; then + RANLIB=$save_RANLIB + fi + test -z "$concat_cmds" || concat_cmds=$concat_cmds~ + eval concat_cmds=\"\${concat_cmds}$old_archive_cmds\" + objlist= + len=$len0 + fi + done + RANLIB=$save_RANLIB + oldobjs=$objlist + if test "X$oldobjs" = "X" ; then + eval cmds=\"\$concat_cmds\" + else + eval cmds=\"\$concat_cmds~\$old_archive_cmds\" + fi + fi + fi + func_execute_cmds "$cmds" 'exit $?' + done + + test -n "$generated" && \ + func_show_eval "${RM}r$generated" + + # Now create the libtool archive. + case $output in + *.la) + old_library= + test "$build_old_libs" = yes && old_library="$libname.$libext" + func_verbose "creating $output" + + # Preserve any variables that may affect compiler behavior + for var in $variables_saved_for_relink; do + if eval test -z \"\${$var+set}\"; then + relink_command="{ test -z \"\${$var+set}\" || $lt_unset $var || { $var=; export $var; }; }; $relink_command" + elif eval var_value=\$$var; test -z "$var_value"; then + relink_command="$var=; export $var; $relink_command" + else + func_quote_for_eval "$var_value" + relink_command="$var=$func_quote_for_eval_result; export $var; $relink_command" + fi + done + # Quote the link command for shipping. + relink_command="(cd `pwd`; $SHELL $progpath $preserve_args --mode=relink $libtool_args @inst_prefix_dir@)" + relink_command=`$ECHO "X$relink_command" | $Xsed -e "$sed_quote_subst"` + if test "$hardcode_automatic" = yes ; then + relink_command= + fi + + # Only create the output if not a dry run. + $opt_dry_run || { + for installed in no yes; do + if test "$installed" = yes; then + if test -z "$install_libdir"; then + break + fi + output="$output_objdir/$outputname"i + # Replace all uninstalled libtool libraries with the installed ones + newdependency_libs= + for deplib in $dependency_libs; do + case $deplib in + *.la) + func_basename "$deplib" + name="$func_basename_result" + eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $deplib` + test -z "$libdir" && \ + func_fatal_error "\`$deplib' is not a valid libtool archive" + newdependency_libs="$newdependency_libs $libdir/$name" + ;; + *) newdependency_libs="$newdependency_libs $deplib" ;; + esac + done + dependency_libs="$newdependency_libs" + newdlfiles= + + for lib in $dlfiles; do + case $lib in + *.la) + func_basename "$lib" + name="$func_basename_result" + eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $lib` + test -z "$libdir" && \ + func_fatal_error "\`$lib' is not a valid libtool archive" + newdlfiles="$newdlfiles $libdir/$name" + ;; + *) newdlfiles="$newdlfiles $lib" ;; + esac + done + dlfiles="$newdlfiles" + newdlprefiles= + for lib in $dlprefiles; do + case $lib in + *.la) + # Only pass preopened files to the pseudo-archive (for + # eventual linking with the app. that links it) if we + # didn't already link the preopened objects directly into + # the library: + func_basename "$lib" + name="$func_basename_result" + eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $lib` + test -z "$libdir" && \ + func_fatal_error "\`$lib' is not a valid libtool archive" + newdlprefiles="$newdlprefiles $libdir/$name" + ;; + esac + done + dlprefiles="$newdlprefiles" + else + newdlfiles= + for lib in $dlfiles; do + case $lib in + [\\/]* | [A-Za-z]:[\\/]*) abs="$lib" ;; + *) abs=`pwd`"/$lib" ;; + esac + newdlfiles="$newdlfiles $abs" + done + dlfiles="$newdlfiles" + newdlprefiles= + for lib in $dlprefiles; do + case $lib in + [\\/]* | [A-Za-z]:[\\/]*) abs="$lib" ;; + *) abs=`pwd`"/$lib" ;; + esac + newdlprefiles="$newdlprefiles $abs" + done + dlprefiles="$newdlprefiles" + fi + $RM $output + # place dlname in correct position for cygwin + tdlname=$dlname + case $host,$output,$installed,$module,$dlname in + *cygwin*,*lai,yes,no,*.dll | *mingw*,*lai,yes,no,*.dll) tdlname=../bin/$dlname ;; + esac + $ECHO > $output "\ +# $outputname - a libtool library file +# Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION +# +# Please DO NOT delete this file! +# It is necessary for linking the library. + +# The name that we can dlopen(3). +dlname='$tdlname' + +# Names of this library. +library_names='$library_names' + +# The name of the static archive. +old_library='$old_library' + +# Linker flags that can not go in dependency_libs. +inherited_linker_flags='$new_inherited_linker_flags' + +# Libraries that this one depends upon. +dependency_libs='$dependency_libs' + +# Names of additional weak libraries provided by this library +weak_library_names='$weak_libs' + +# Version information for $libname. +current=$current +age=$age +revision=$revision + +# Is this an already installed library? +installed=$installed + +# Should we warn about portability when linking against -modules? +shouldnotlink=$module + +# Files to dlopen/dlpreopen +dlopen='$dlfiles' +dlpreopen='$dlprefiles' + +# Directory that this library needs to be installed in: +libdir='$install_libdir'" + if test "$installed" = no && test "$need_relink" = yes; then + $ECHO >> $output "\ +relink_command=\"$relink_command\"" + fi + done + } + + # Do a symbolic link so that the libtool archive can be found in + # LD_LIBRARY_PATH before the program is installed. + func_show_eval '( cd "$output_objdir" && $RM "$outputname" && $LN_S "../$outputname" "$outputname" )' 'exit $?' + ;; + esac + exit $EXIT_SUCCESS +} + +{ test "$mode" = link || test "$mode" = relink; } && + func_mode_link ${1+"$@"} + + +# func_mode_uninstall arg... +func_mode_uninstall () +{ + $opt_debug + RM="$nonopt" + files= + rmforce= + exit_status=0 + + # This variable tells wrapper scripts just to set variables rather + # than running their programs. + libtool_install_magic="$magic" + + for arg + do + case $arg in + -f) RM="$RM $arg"; rmforce=yes ;; + -*) RM="$RM $arg" ;; + *) files="$files $arg" ;; + esac + done + + test -z "$RM" && \ + func_fatal_help "you must specify an RM program" + + rmdirs= + + origobjdir="$objdir" + for file in $files; do + func_dirname "$file" "" "." + dir="$func_dirname_result" + if test "X$dir" = X.; then + objdir="$origobjdir" + else + objdir="$dir/$origobjdir" + fi + func_basename "$file" + name="$func_basename_result" + test "$mode" = uninstall && objdir="$dir" + + # Remember objdir for removal later, being careful to avoid duplicates + if test "$mode" = clean; then + case " $rmdirs " in + *" $objdir "*) ;; + *) rmdirs="$rmdirs $objdir" ;; + esac + fi + + # Don't error if the file doesn't exist and rm -f was used. + if { test -L "$file"; } >/dev/null 2>&1 || + { test -h "$file"; } >/dev/null 2>&1 || + test -f "$file"; then + : + elif test -d "$file"; then + exit_status=1 + continue + elif test "$rmforce" = yes; then + continue + fi + + rmfiles="$file" + + case $name in + *.la) + # Possibly a libtool archive, so verify it. + if func_lalib_p "$file"; then + func_source $dir/$name + + # Delete the libtool libraries and symlinks. + for n in $library_names; do + rmfiles="$rmfiles $objdir/$n" + done + test -n "$old_library" && rmfiles="$rmfiles $objdir/$old_library" + + case "$mode" in + clean) + case " $library_names " in + # " " in the beginning catches empty $dlname + *" $dlname "*) ;; + *) rmfiles="$rmfiles $objdir/$dlname" ;; + esac + test -n "$libdir" && rmfiles="$rmfiles $objdir/$name $objdir/${name}i" + ;; + uninstall) + if test -n "$library_names"; then + # Do each command in the postuninstall commands. + func_execute_cmds "$postuninstall_cmds" 'test "$rmforce" = yes || exit_status=1' + fi + + if test -n "$old_library"; then + # Do each command in the old_postuninstall commands. + func_execute_cmds "$old_postuninstall_cmds" 'test "$rmforce" = yes || exit_status=1' + fi + # FIXME: should reinstall the best remaining shared library. + ;; + esac + fi + ;; + + *.lo) + # Possibly a libtool object, so verify it. + if func_lalib_p "$file"; then + + # Read the .lo file + func_source $dir/$name + + # Add PIC object to the list of files to remove. + if test -n "$pic_object" && + test "$pic_object" != none; then + rmfiles="$rmfiles $dir/$pic_object" + fi + + # Add non-PIC object to the list of files to remove. + if test -n "$non_pic_object" && + test "$non_pic_object" != none; then + rmfiles="$rmfiles $dir/$non_pic_object" + fi + fi + ;; + + *) + if test "$mode" = clean ; then + noexename=$name + case $file in + *.exe) + func_stripname '' '.exe' "$file" + file=$func_stripname_result + func_stripname '' '.exe' "$name" + noexename=$func_stripname_result + # $file with .exe has already been added to rmfiles, + # add $file without .exe + rmfiles="$rmfiles $file" + ;; + esac + # Do a test to see if this is a libtool program. + if func_ltwrapper_p "$file"; then + if func_ltwrapper_executable_p "$file"; then + func_ltwrapper_scriptname "$file" + relink_command= + func_source $func_ltwrapper_scriptname_result + rmfiles="$rmfiles $func_ltwrapper_scriptname_result" + else + relink_command= + func_source $dir/$noexename + fi + + # note $name still contains .exe if it was in $file originally + # as does the version of $file that was added into $rmfiles + rmfiles="$rmfiles $objdir/$name $objdir/${name}S.${objext}" + if test "$fast_install" = yes && test -n "$relink_command"; then + rmfiles="$rmfiles $objdir/lt-$name" + fi + if test "X$noexename" != "X$name" ; then + rmfiles="$rmfiles $objdir/lt-${noexename}.c" + fi + fi + fi + ;; + esac + func_show_eval "$RM $rmfiles" 'exit_status=1' + done + objdir="$origobjdir" + + # Try to remove the ${objdir}s in the directories where we deleted files + for dir in $rmdirs; do + if test -d "$dir"; then + func_show_eval "rmdir $dir >/dev/null 2>&1" + fi + done + + exit $exit_status +} + +{ test "$mode" = uninstall || test "$mode" = clean; } && + func_mode_uninstall ${1+"$@"} + +test -z "$mode" && { + help="$generic_help" + func_fatal_help "you must specify a MODE" +} + +test -z "$exec_cmd" && \ + func_fatal_help "invalid operation mode \`$mode'" + +if test -n "$exec_cmd"; then + eval exec "$exec_cmd" + exit $EXIT_FAILURE +fi + +exit $exit_status + + +# The TAGs below are defined such that we never get into a situation +# in which we disable both kinds of libraries. Given conflicting +# choices, we go for a static library, that is the most portable, +# since we can't tell whether shared libraries were disabled because +# the user asked for that or because the platform doesn't support +# them. This is particularly important on AIX, because we don't +# support having both static and shared libraries enabled at the same +# time on that platform, so we default to a shared-only configuration. +# If a disable-shared tag is given, we'll fallback to a static-only +# configuration. But we'll never go from static-only to shared-only. + +# ### BEGIN LIBTOOL TAG CONFIG: disable-shared +build_libtool_libs=no +build_old_libs=yes +# ### END LIBTOOL TAG CONFIG: disable-shared + +# ### BEGIN LIBTOOL TAG CONFIG: disable-static +build_old_libs=`case $build_libtool_libs in yes) echo no;; *) echo yes;; esac` +# ### END LIBTOOL TAG CONFIG: disable-static + +# Local Variables: +# mode:shell-script +# sh-indentation:2 +# End: +# vi:sw=2 + diff --git a/backtrace-sys/src/libbacktrace/macho.c b/backtrace-sys/src/libbacktrace/macho.c new file mode 100644 index 000000000..b43ea22fe --- /dev/null +++ b/backtrace-sys/src/libbacktrace/macho.c @@ -0,0 +1,1418 @@ +/* macho.c -- Get debug data from an Mach-O file for backtraces. + Copyright (C) 2012-2016 Free Software Foundation, Inc. + Written by John Colanduoni. + + Pending upstream pull request: + https://github.com/ianlancetaylor/libbacktrace/pull/2 + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + (1) Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + (2) Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + + (3) The name of the author may not be used to + endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, +INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, +STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING +IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. */ + +#include "config.h" + +/* We can't use autotools to detect the pointer width of our program because + we may be building a fat Mach-O file containing both 32-bit and 64-bit + variants. However Mach-O runs a limited set of platforms so detection + via preprocessor is not difficult. */ + +#if defined(__MACH__) +#if defined(__LP64__) +#define BACKTRACE_BITS 64 +#else +#define BACKTRACE_BITS 32 +#endif +#else +#error Attempting to build Mach-O support on incorrect platform +#endif + +#if defined(__x86_64__) +#define NATIVE_CPU_TYPE CPU_TYPE_X86_64 +#elif defined(__i386__) +#define NATIVE_CPU_TYPE CPU_TYPE_X86 +#elif defined(__aarch64__) +#define NATIVE_CPU_TYPE CPU_TYPE_ARM64 +#elif defined(__arm__) +#define NATIVE_CPU_TYPE CPU_TYPE_ARM +#else +#error Could not detect native Mach-O cpu_type_t +#endif + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "backtrace.h" +#include "internal.h" + +struct macho_commands_view +{ + struct backtrace_view view; + uint32_t commands_count; + uint32_t commands_total_size; + int bytes_swapped; + size_t base_offset; +}; + +enum debug_section +{ + DEBUG_INFO, + DEBUG_LINE, + DEBUG_ABBREV, + DEBUG_RANGES, + DEBUG_STR, + DEBUG_MAX +}; + +static const char *const debug_section_names[DEBUG_MAX] = + { + "__debug_info", + "__debug_line", + "__debug_abbrev", + "__debug_ranges", + "__debug_str" + }; + +struct found_dwarf_section +{ + uint32_t file_offset; + uintptr_t file_size; + const unsigned char *data; +}; + +/* Mach-O symbols don't have a length. As a result we have to infer it + by sorting the symbol addresses for each image and recording the + memory range attributed to each image. */ +struct macho_symbol +{ + uintptr_t addr; + size_t size; + const char *name; +}; + +struct macho_syminfo_data +{ + struct macho_syminfo_data *next; + struct macho_symbol *symbols; + size_t symbol_count; + uintptr_t min_addr; + uintptr_t max_addr; +}; + +uint16_t +macho_file_to_host_u16 (int file_bytes_swapped, uint16_t input) +{ + if (file_bytes_swapped) + return (input >> 8) | (input << 8); + else + return input; +} + +uint32_t +macho_file_to_host_u32 (int file_bytes_swapped, uint32_t input) +{ + if (file_bytes_swapped) + { + return ((input >> 24) & 0x000000FF) + | ((input >> 8) & 0x0000FF00) + | ((input << 8) & 0x00FF0000) + | ((input << 24) & 0xFF000000); + } + else + { + return input; + } +} + +uint64_t +macho_file_to_host_u64 (int file_bytes_swapped, uint64_t input) +{ + if (file_bytes_swapped) + { + return macho_file_to_host_u32 (file_bytes_swapped, + (uint32_t) (input >> 32)) + | (((uint64_t) macho_file_to_host_u32 (file_bytes_swapped, + (uint32_t) input)) << 32); + } + else + { + return input; + } +} + +#if BACKTRACE_BITS == 64 +#define macho_file_to_host_usize macho_file_to_host_u64 +typedef struct mach_header_64 mach_header_native_t; +#define LC_SEGMENT_NATIVE LC_SEGMENT_64 +typedef struct segment_command_64 segment_command_native_t; +typedef struct nlist_64 nlist_native_t; +typedef struct section_64 section_native_t; +#else /* BACKTRACE_BITS == 32 */ +#define macho_file_to_host_usize macho_file_to_host_u32 +typedef struct mach_header mach_header_native_t; +#define LC_SEGMENT_NATIVE LC_SEGMENT +typedef struct segment_command segment_command_native_t; +typedef struct nlist nlist_native_t; +typedef struct section section_native_t; +#endif + +// Gets a view into a Mach-O image, taking any slice offset into account +int +macho_get_view (struct backtrace_state *state, int descriptor, + off_t offset, size_t size, + backtrace_error_callback error_callback, + void *data, struct macho_commands_view *commands_view, + struct backtrace_view *view) +{ + return backtrace_get_view (state, descriptor, + commands_view->base_offset + offset, size, + error_callback, data, view); +} + +int +macho_get_commands (struct backtrace_state *state, int descriptor, + backtrace_error_callback error_callback, + void *data, struct macho_commands_view *commands_view, + int *incompatible) +{ + int ret = 0; + int is_fat = 0; + struct backtrace_view file_header_view; + int file_header_view_valid = 0; + struct backtrace_view fat_archs_view; + int fat_archs_view_valid = 0; + const mach_header_native_t *file_header; + uint64_t commands_offset; + + *incompatible = 0; + + if (!backtrace_get_view (state, descriptor, 0, sizeof (mach_header_native_t), + error_callback, data, &file_header_view)) + goto end; + file_header_view_valid = 1; + + switch (*(uint32_t *) file_header_view.data) + { + case MH_MAGIC: + if (BACKTRACE_BITS == 32) + commands_view->bytes_swapped = 0; + else + { + *incompatible = 1; + goto end; + } + break; + case MH_CIGAM: + if (BACKTRACE_BITS == 32) + commands_view->bytes_swapped = 1; + else + { + *incompatible = 1; + goto end; + } + break; + case MH_MAGIC_64: + if (BACKTRACE_BITS == 64) + commands_view->bytes_swapped = 0; + else + { + *incompatible = 1; + goto end; + } + break; + case MH_CIGAM_64: + if (BACKTRACE_BITS == 64) + commands_view->bytes_swapped = 1; + else + { + *incompatible = 1; + goto end; + } + break; + case FAT_MAGIC: + is_fat = 1; + commands_view->bytes_swapped = 0; + break; + case FAT_CIGAM: + is_fat = 1; + commands_view->bytes_swapped = 1; + break; + default: + goto end; + } + + if (is_fat) + { + uint32_t native_slice_offset; + size_t archs_total_size; + uint32_t arch_count; + const struct fat_header *fat_header; + const struct fat_arch *archs; + uint32_t i; + + fat_header = file_header_view.data; + arch_count = + macho_file_to_host_u32 (commands_view->bytes_swapped, + fat_header->nfat_arch); + + archs_total_size = arch_count * sizeof (struct fat_arch); + + if (!backtrace_get_view (state, descriptor, sizeof (struct fat_header), + archs_total_size, error_callback, + data, &fat_archs_view)) + goto end; + fat_archs_view_valid = 1; + + native_slice_offset = 0; + archs = fat_archs_view.data; + for (i = 0; i < arch_count; i++) + { + const struct fat_arch *raw_arch = archs + i; + int cpu_type = + (int) macho_file_to_host_u32 (commands_view->bytes_swapped, + (uint32_t) raw_arch->cputype); + + if (cpu_type == NATIVE_CPU_TYPE) + { + native_slice_offset = + macho_file_to_host_u32 (commands_view->bytes_swapped, + raw_arch->offset); + + break; + } + } + + if (native_slice_offset == 0) + { + *incompatible = 1; + goto end; + } + + backtrace_release_view (state, &file_header_view, error_callback, data); + file_header_view_valid = 0; + if (!backtrace_get_view (state, descriptor, native_slice_offset, + sizeof (mach_header_native_t), error_callback, + data, &file_header_view)) + goto end; + file_header_view_valid = 1; + + // The endianess of the slice may be different than the fat image + switch (*(uint32_t *) file_header_view.data) + { + case MH_MAGIC: + if (BACKTRACE_BITS == 32) + commands_view->bytes_swapped = 0; + else + goto end; + break; + case MH_CIGAM: + if (BACKTRACE_BITS == 32) + commands_view->bytes_swapped = 1; + else + goto end; + break; + case MH_MAGIC_64: + if (BACKTRACE_BITS == 64) + commands_view->bytes_swapped = 0; + else + goto end; + break; + case MH_CIGAM_64: + if (BACKTRACE_BITS == 64) + commands_view->bytes_swapped = 1; + else + goto end; + break; + default: + goto end; + } + + commands_view->base_offset = native_slice_offset; + } + else + commands_view->base_offset = 0; + + file_header = file_header_view.data; + commands_view->commands_count = + macho_file_to_host_u32 (commands_view->bytes_swapped, + file_header->ncmds); + commands_view->commands_total_size = + macho_file_to_host_u32 (commands_view->bytes_swapped, + file_header->sizeofcmds); + commands_offset = + commands_view->base_offset + sizeof (mach_header_native_t); + + if (!backtrace_get_view (state, descriptor, commands_offset, + commands_view->commands_total_size, error_callback, + data, &commands_view->view)) + goto end; + + ret = 1; + +end: + if (file_header_view_valid) + backtrace_release_view (state, &file_header_view, error_callback, data); + if (fat_archs_view_valid) + backtrace_release_view (state, &fat_archs_view, error_callback, data); + return ret; +} + +int +macho_get_uuid (struct backtrace_state *state ATTRIBUTE_UNUSED, + int descriptor ATTRIBUTE_UNUSED, + backtrace_error_callback error_callback, + void *data, struct macho_commands_view *commands_view, + uuid_t *uuid) +{ + size_t offset = 0; + uint32_t i = 0; + + for (i = 0; i < commands_view->commands_count; i++) + { + const struct load_command *raw_command; + struct load_command command; + + if (offset + sizeof (struct load_command) + > commands_view->commands_total_size) + { + error_callback (data, + "executable file contains out of range command offset", + 0); + return 0; + } + + raw_command = + commands_view->view.data + offset; + command.cmd = macho_file_to_host_u32 (commands_view->bytes_swapped, + raw_command->cmd); + command.cmdsize = macho_file_to_host_u32 (commands_view->bytes_swapped, + raw_command->cmdsize); + + if (command.cmd == LC_UUID) + { + const struct uuid_command *uuid_command; + + if (offset + sizeof (struct uuid_command) + > commands_view->commands_total_size) + { + error_callback (data, + "executable file contains out of range command offset", + 0); + return 0; + } + + uuid_command = + (struct uuid_command *) raw_command; + memcpy (uuid, uuid_command->uuid, sizeof (uuid_t)); + return 1; + } + + offset += command.cmdsize; + } + + error_callback (data, "executable file is missing an identifying UUID", 0); + return 0; +} + +/* Returns the base address of a Mach-O image, as encoded in the file header. + * WARNING: This does not take ASLR into account, which is ubiquitous on recent + * Darwin platforms. + */ +int +macho_get_addr_range (struct backtrace_state *state ATTRIBUTE_UNUSED, + int descriptor ATTRIBUTE_UNUSED, + backtrace_error_callback error_callback, + void *data, struct macho_commands_view *commands_view, + uintptr_t *base_address, uintptr_t *max_address) +{ + size_t offset = 0; + int found_text = 0; + uint32_t i = 0; + + *max_address = 0; + + for (i = 0; i < commands_view->commands_count; i++) + { + const struct load_command *raw_command; + struct load_command command; + + if (offset + sizeof (struct load_command) + > commands_view->commands_total_size) + { + error_callback (data, + "executable file contains out of range command offset", + 0); + return 0; + } + + raw_command = commands_view->view.data + offset; + command.cmd = macho_file_to_host_u32 (commands_view->bytes_swapped, + raw_command->cmd); + command.cmdsize = macho_file_to_host_u32 (commands_view->bytes_swapped, + raw_command->cmdsize); + + if (command.cmd == LC_SEGMENT_NATIVE) + { + const segment_command_native_t *raw_segment; + uintptr_t segment_vmaddr; + uintptr_t segment_vmsize; + uintptr_t segment_maxaddr; + uintptr_t text_fileoff; + + if (offset + sizeof (segment_command_native_t) + > commands_view->commands_total_size) + { + error_callback (data, + "executable file contains out of range command offset", + 0); + return 0; + } + + raw_segment = (segment_command_native_t *) raw_command; + + segment_vmaddr = macho_file_to_host_usize ( + commands_view->bytes_swapped, raw_segment->vmaddr); + segment_vmsize = macho_file_to_host_usize ( + commands_view->bytes_swapped, raw_segment->vmsize); + segment_maxaddr = segment_vmaddr + segment_vmsize; + + if (strncmp (raw_segment->segname, "__TEXT", + sizeof (raw_segment->segname)) == 0) + { + text_fileoff = macho_file_to_host_usize ( + commands_view->bytes_swapped, raw_segment->fileoff); + *base_address = segment_vmaddr - text_fileoff; + + found_text = 1; + } + + if (segment_maxaddr > *max_address) + *max_address = segment_maxaddr; + } + + offset += command.cmdsize; + } + + if (found_text) + return 1; + else + { + error_callback (data, "executable is missing __TEXT segment", 0); + return 0; + } +} + +static int +macho_symbol_compare_addr (const void *left_raw, const void *right_raw) +{ + const struct macho_symbol *left = left_raw; + const struct macho_symbol *right = right_raw; + + if (left->addr > right->addr) + return 1; + else if (left->addr < right->addr) + return -1; + else + return 0; +} + +int +macho_symbol_type_relevant (uint8_t type) +{ + uint8_t type_field = (uint8_t) (type & N_TYPE); + + return !(type & N_EXT) && + (type_field == N_ABS || type_field == N_SECT); +} + +int +macho_add_symtab (struct backtrace_state *state, + backtrace_error_callback error_callback, + void *data, int descriptor, + struct macho_commands_view *commands_view, + uintptr_t base_address, uintptr_t max_image_address, + intptr_t vmslide, int *found_sym) +{ + struct macho_syminfo_data *syminfo_data; + + int ret = 0; + size_t offset = 0; + struct backtrace_view symtab_view; + int symtab_view_valid = 0; + struct backtrace_view strtab_view; + int strtab_view_valid = 0; + size_t syminfo_index = 0; + size_t function_count = 0; + uint32_t i = 0; + uint32_t j = 0; + uint32_t symtab_index = 0; + + *found_sym = 0; + + for (i = 0; i < commands_view->commands_count; i++) + { + const struct load_command *raw_command; + struct load_command command; + + if (offset + sizeof (struct load_command) + > commands_view->commands_total_size) + { + error_callback (data, + "executable file contains out of range command offset", + 0); + return 0; + } + + raw_command = commands_view->view.data + offset; + command.cmd = macho_file_to_host_u32 (commands_view->bytes_swapped, + raw_command->cmd); + command.cmdsize = macho_file_to_host_u32 (commands_view->bytes_swapped, + raw_command->cmdsize); + + if (command.cmd == LC_SYMTAB) + { + const struct symtab_command *symtab_command; + uint32_t symbol_table_offset; + uint32_t symbol_count; + uint32_t string_table_offset; + uint32_t string_table_size; + + if (offset + sizeof (struct symtab_command) + > commands_view->commands_total_size) + { + error_callback (data, + "executable file contains out of range command offset", + 0); + return 0; + } + + symtab_command = (struct symtab_command *) raw_command; + + symbol_table_offset = macho_file_to_host_u32 ( + commands_view->bytes_swapped, symtab_command->symoff); + symbol_count = macho_file_to_host_u32 ( + commands_view->bytes_swapped, symtab_command->nsyms); + string_table_offset = macho_file_to_host_u32 ( + commands_view->bytes_swapped, symtab_command->stroff); + string_table_size = macho_file_to_host_u32 ( + commands_view->bytes_swapped, symtab_command->strsize); + + + if (!macho_get_view (state, descriptor, symbol_table_offset, + symbol_count * sizeof (nlist_native_t), + error_callback, data, commands_view, + &symtab_view)) + goto end; + symtab_view_valid = 1; + + if (!macho_get_view (state, descriptor, string_table_offset, + string_table_size, error_callback, data, + commands_view, &strtab_view)) + goto end; + strtab_view_valid = 1; + + // Count functions first + for (j = 0; j < symbol_count; j++) + { + const nlist_native_t *raw_sym = + ((const nlist_native_t *) symtab_view.data) + j; + + if (macho_symbol_type_relevant (raw_sym->n_type)) + { + function_count += 1; + } + } + + // Allocate space for the: + // (a) macho_syminfo_data for this image + // (b) macho_symbol entries + syminfo_data = + backtrace_alloc (state, + sizeof (struct macho_syminfo_data), + error_callback, data); + if (syminfo_data == NULL) + goto end; + + syminfo_data->symbols = backtrace_alloc ( + state, function_count * sizeof (struct macho_symbol), + error_callback, data); + if (syminfo_data->symbols == NULL) + goto end; + + syminfo_data->symbol_count = function_count; + syminfo_data->next = NULL; + syminfo_data->min_addr = base_address; + syminfo_data->max_addr = max_image_address; + + for (symtab_index = 0; + symtab_index < symbol_count; symtab_index++) + { + const nlist_native_t *raw_sym = + ((const nlist_native_t *) symtab_view.data) + + symtab_index; + + if (macho_symbol_type_relevant (raw_sym->n_type)) + { + size_t strtab_index; + const char *name; + size_t max_len_plus_one; + + syminfo_data->symbols[syminfo_index].addr = + macho_file_to_host_usize (commands_view->bytes_swapped, + raw_sym->n_value) + vmslide; + + strtab_index = macho_file_to_host_u32 ( + commands_view->bytes_swapped, + raw_sym->n_un.n_strx); + + // Check the range of the supposed "string" we've been + // given + if (strtab_index >= string_table_size) + { + error_callback ( + data, + "dSYM file contains out of range string table index", + 0); + goto end; + } + + name = strtab_view.data + strtab_index; + max_len_plus_one = string_table_size - strtab_index; + + if (strnlen (name, max_len_plus_one) >= max_len_plus_one) + { + error_callback ( + data, + "dSYM file contains unterminated string", + 0); + goto end; + } + + // Remove underscore prefixes + if (name[0] == '_') + { + name = name + 1; + } + + syminfo_data->symbols[syminfo_index].name = name; + + syminfo_index += 1; + } + } + + backtrace_qsort (syminfo_data->symbols, + syminfo_data->symbol_count, + sizeof (struct macho_symbol), + macho_symbol_compare_addr); + + // Calculate symbol sizes + for (syminfo_index = 0; + syminfo_index < syminfo_data->symbol_count; syminfo_index++) + { + if (syminfo_index + 1 < syminfo_data->symbol_count) + { + syminfo_data->symbols[syminfo_index].size = + syminfo_data->symbols[syminfo_index + 1].addr - + syminfo_data->symbols[syminfo_index].addr; + } + else + { + syminfo_data->symbols[syminfo_index].size = + max_image_address - + syminfo_data->symbols[syminfo_index].addr; + } + } + + if (!state->threaded) + { + struct macho_syminfo_data **pp; + + for (pp = (struct macho_syminfo_data **) (void *) &state->syminfo_data; + *pp != NULL; + pp = &(*pp)->next); + *pp = syminfo_data; + } + else + { + while (1) + { + struct macho_syminfo_data **pp; + + pp = (struct macho_syminfo_data **) (void *) &state->syminfo_data; + + while (1) + { + struct macho_syminfo_data *p; + + p = backtrace_atomic_load_pointer (pp); + + if (p == NULL) + break; + + pp = &p->next; + } + + if (__sync_bool_compare_and_swap (pp, NULL, syminfo_data)) + break; + } + } + + strtab_view_valid = 0; // We need to keep string table around + *found_sym = 1; + ret = 1; + goto end; + } + + offset += command.cmdsize; + } + + // No symbol table here + ret = 1; + goto end; + +end: + if (symtab_view_valid) + backtrace_release_view (state, &symtab_view, error_callback, data); + if (strtab_view_valid) + backtrace_release_view (state, &strtab_view, error_callback, data); + return ret; +} + +int +macho_try_dwarf (struct backtrace_state *state, + backtrace_error_callback error_callback, + void *data, fileline *fileline_fn, uuid_t *executable_uuid, + uintptr_t base_address, uintptr_t max_image_address, + intptr_t vmslide, char *dwarf_filename, int *matched, + int *found_sym, int *found_dwarf) +{ + uuid_t dwarf_uuid; + + int ret = 0; + int dwarf_descriptor; + int dwarf_descriptor_valid = 0; + struct macho_commands_view commands_view; + int commands_view_valid = 0; + struct backtrace_view dwarf_view; + int dwarf_view_valid = 0; + size_t offset = 0; + struct found_dwarf_section dwarf_sections[DEBUG_MAX]; + uintptr_t min_dwarf_offset = 0; + uintptr_t max_dwarf_offset = 0; + uint32_t i = 0; + uint32_t j = 0; + int k = 0; + + *matched = 0; + *found_sym = 0; + *found_dwarf = 0; + + if ((dwarf_descriptor = backtrace_open (dwarf_filename, error_callback, + data, NULL)) == 0) + goto end; + dwarf_descriptor_valid = 1; + + int incompatible; + if (!macho_get_commands (state, dwarf_descriptor, error_callback, data, + &commands_view, &incompatible)) + { + // Failing to read the header here is fine, because this dSYM may be + // for a different architecture + if (incompatible) + { + ret = 1; + } + goto end; + } + commands_view_valid = 1; + + // Get dSYM UUID and compare + if (!macho_get_uuid (state, dwarf_descriptor, error_callback, data, + &commands_view, &dwarf_uuid)) + { + error_callback (data, "dSYM file is missing an identifying uuid", 0); + goto end; + } + if (memcmp (executable_uuid, &dwarf_uuid, sizeof (uuid_t)) != 0) + { + // DWARF doesn't belong to desired executable + ret = 1; + goto end; + } + + *matched = 1; + + // Read symbol table + if (!macho_add_symtab (state, error_callback, data, dwarf_descriptor, + &commands_view, base_address, max_image_address, + vmslide, found_sym)) + goto end; + + // Get DWARF sections + + memset (dwarf_sections, 0, sizeof (dwarf_sections)); + offset = 0; + for (i = 0; i < commands_view.commands_count; i++) + { + const struct load_command *raw_command; + struct load_command command; + + if (offset + sizeof (struct load_command) + > commands_view.commands_total_size) + { + error_callback (data, + "dSYM file contains out of range command offset", 0); + goto end; + } + + raw_command = commands_view.view.data + offset; + command.cmd = macho_file_to_host_u32 (commands_view.bytes_swapped, + raw_command->cmd); + command.cmdsize = macho_file_to_host_u32 (commands_view.bytes_swapped, + raw_command->cmdsize); + + if (command.cmd == LC_SEGMENT_NATIVE) + { + uint32_t section_count; + size_t section_offset; + const segment_command_native_t *raw_segment; + + if (offset + sizeof (segment_command_native_t) + > commands_view.commands_total_size) + { + error_callback (data, + "dSYM file contains out of range command offset", + 0); + goto end; + } + + raw_segment = (const segment_command_native_t *) raw_command; + + if (strncmp (raw_segment->segname, "__DWARF", + sizeof (raw_segment->segname)) == 0) + { + section_count = macho_file_to_host_u32 ( + commands_view.bytes_swapped, + raw_segment->nsects); + + section_offset = offset + sizeof (segment_command_native_t); + + // Search sections for relevant DWARF section names + for (j = 0; j < section_count; j++) + { + const section_native_t *raw_section; + + if (section_offset + sizeof (section_native_t) > + commands_view.commands_total_size) + { + error_callback (data, + "dSYM file contains out of range command offset", + 0); + goto end; + } + + raw_section = commands_view.view.data + section_offset; + + for (k = 0; k < DEBUG_MAX; k++) + { + uintptr_t dwarf_section_end; + + if (strncmp (raw_section->sectname, + debug_section_names[k], + sizeof (raw_section->sectname)) == 0) + { + *found_dwarf = 1; + + dwarf_sections[k].file_offset = + macho_file_to_host_u32 ( + commands_view.bytes_swapped, + raw_section->offset); + dwarf_sections[k].file_size = + macho_file_to_host_usize ( + commands_view.bytes_swapped, + raw_section->size); + + if (min_dwarf_offset == 0 || + dwarf_sections[k].file_offset < + min_dwarf_offset) + min_dwarf_offset = dwarf_sections[k].file_offset; + + dwarf_section_end = + dwarf_sections[k].file_offset + + dwarf_sections[k].file_size; + if (dwarf_section_end > max_dwarf_offset) + max_dwarf_offset = dwarf_section_end; + + break; + } + } + + section_offset += sizeof (section_native_t); + } + + break; + } + } + + offset += command.cmdsize; + } + + if (!*found_dwarf) + { + // No DWARF in this file + ret = 1; + goto end; + } + + if (!macho_get_view (state, dwarf_descriptor, (off_t) min_dwarf_offset, + max_dwarf_offset - min_dwarf_offset, error_callback, + data, &commands_view, &dwarf_view)) + goto end; + dwarf_view_valid = 1; + + for (i = 0; i < DEBUG_MAX; i++) + { + if (dwarf_sections[i].file_offset == 0) + dwarf_sections[i].data = NULL; + else + dwarf_sections[i].data = + dwarf_view.data + dwarf_sections[i].file_offset - min_dwarf_offset; + } + + if (!backtrace_dwarf_add (state, vmslide, + dwarf_sections[DEBUG_INFO].data, + dwarf_sections[DEBUG_INFO].file_size, + dwarf_sections[DEBUG_LINE].data, + dwarf_sections[DEBUG_LINE].file_size, + dwarf_sections[DEBUG_ABBREV].data, + dwarf_sections[DEBUG_ABBREV].file_size, + dwarf_sections[DEBUG_RANGES].data, + dwarf_sections[DEBUG_RANGES].file_size, + dwarf_sections[DEBUG_STR].data, + dwarf_sections[DEBUG_STR].file_size, + ((__DARWIN_BYTE_ORDER == __DARWIN_BIG_ENDIAN) + ^ commands_view.bytes_swapped), + error_callback, data, fileline_fn)) + goto end; + + // Don't release the DWARF view because it is still in use + dwarf_descriptor_valid = 0; + dwarf_view_valid = 0; + ret = 1; + +end: + if (dwarf_descriptor_valid) + backtrace_close (dwarf_descriptor, error_callback, data); + if (commands_view_valid) + backtrace_release_view (state, &commands_view.view, error_callback, data); + if (dwarf_view_valid) + backtrace_release_view (state, &dwarf_view, error_callback, data); + return ret; +} + +int +macho_try_dsym (struct backtrace_state *state, + backtrace_error_callback error_callback, + void *data, fileline *fileline_fn, uuid_t *executable_uuid, + uintptr_t base_address, uintptr_t max_image_address, + intptr_t vmslide, char *dsym_filename, int *matched, + int *found_sym, int *found_dwarf) +{ + int ret = 0; + char dwarf_image_dir_path[PATH_MAX]; + DIR *dwarf_image_dir; + int dwarf_image_dir_valid = 0; + struct dirent *directory_entry; + char dwarf_filename[PATH_MAX]; + int dwarf_matched; + int dwarf_had_sym; + int dwarf_had_dwarf; + + *matched = 0; + *found_sym = 0; + *found_dwarf = 0; + + strncpy (dwarf_image_dir_path, dsym_filename, PATH_MAX); + strncat (dwarf_image_dir_path, "/Contents/Resources/DWARF", PATH_MAX); + + if (!(dwarf_image_dir = opendir (dwarf_image_dir_path))) + { + error_callback (data, "could not open DWARF directory in dSYM", + 0); + goto end; + } + dwarf_image_dir_valid = 1; + + while ((directory_entry = readdir (dwarf_image_dir))) + { + if (directory_entry->d_type != DT_REG) + continue; + + strncpy (dwarf_filename, dwarf_image_dir_path, PATH_MAX); + strncat (dwarf_filename, "/", PATH_MAX); + strncat (dwarf_filename, directory_entry->d_name, PATH_MAX); + + if (!macho_try_dwarf (state, error_callback, data, fileline_fn, + executable_uuid, base_address, max_image_address, + vmslide, dwarf_filename, + &dwarf_matched, &dwarf_had_sym, &dwarf_had_dwarf)) + goto end; + + if (dwarf_matched) + { + *matched = 1; + *found_sym = dwarf_had_sym; + *found_dwarf = dwarf_had_dwarf; + ret = 1; + goto end; + } + } + + // No matching DWARF in this dSYM + ret = 1; + goto end; + +end: + if (dwarf_image_dir_valid) + closedir (dwarf_image_dir); + return ret; +} + +int +macho_add (struct backtrace_state *state, + backtrace_error_callback error_callback, void *data, int descriptor, + const char *filename, fileline *fileline_fn, intptr_t vmslide, + int *found_sym, int *found_dwarf) +{ + uuid_t image_uuid; + uintptr_t image_file_base_address; + uintptr_t image_file_max_address; + uintptr_t image_actual_base_address = 0; + uintptr_t image_actual_max_address = 0; + + int ret = 0; + struct macho_commands_view commands_view; + int commands_view_valid = 0; + char executable_dirname[PATH_MAX]; + size_t filename_len; + DIR *executable_dir = NULL; + int executable_dir_valid = 0; + struct dirent *directory_entry; + char dsym_full_path[PATH_MAX]; + static const char *extension; + size_t extension_len; + ssize_t i; + + *found_sym = 0; + *found_dwarf = 0; + + // Find Mach-O commands list + int incompatible; + if (!macho_get_commands (state, descriptor, error_callback, data, + &commands_view, &incompatible)) + goto end; + commands_view_valid = 1; + + // First we need to get the uuid of our file so we can hunt down the correct + // dSYM + if (!macho_get_uuid (state, descriptor, error_callback, data, &commands_view, + &image_uuid)) + goto end; + + // Now we need to find the in memory base address. Step one is to find out + // what the executable thinks the base address is + if (!macho_get_addr_range (state, descriptor, error_callback, data, + &commands_view, + &image_file_base_address, + &image_file_max_address)) + goto end; + + image_actual_base_address = + image_file_base_address + vmslide; + image_actual_max_address = + image_file_max_address + vmslide; + + if (image_actual_base_address == 0) + { + error_callback (data, "executable file is not loaded", 0); + goto end; + } + + // Look for dSYM in our executable's directory + strncpy (executable_dirname, filename, PATH_MAX); + filename_len = strlen (executable_dirname); + for (i = filename_len - 1; i >= 0; i--) + { + if (executable_dirname[i] == '/') + { + executable_dirname[i] = '\0'; + break; + } + else if (i == 0) + { + executable_dirname[0] = '.'; + executable_dirname[1] = '\0'; + break; + } + } + + if (!(executable_dir = opendir (executable_dirname))) + { + error_callback (data, "could not open directory containing executable", + 0); + goto end; + } + executable_dir_valid = 1; + + extension = ".dSYM"; + extension_len = strlen (extension); + while ((directory_entry = readdir (executable_dir))) + { + if (directory_entry->d_namlen < extension_len) + continue; + if (strncasecmp (directory_entry->d_name + directory_entry->d_namlen + - extension_len, extension, extension_len) == 0) + { + int matched; + int dsym_had_sym; + int dsym_had_dwarf; + + // Found a dSYM + strncpy (dsym_full_path, executable_dirname, PATH_MAX); + strncat (dsym_full_path, "/", PATH_MAX); + strncat (dsym_full_path, directory_entry->d_name, PATH_MAX); + + if (!macho_try_dsym (state, error_callback, data, + fileline_fn, &image_uuid, + image_actual_base_address, + image_actual_max_address, vmslide, + dsym_full_path, + &matched, &dsym_had_sym, &dsym_had_dwarf)) + goto end; + + if (matched) + { + *found_sym = dsym_had_sym; + *found_dwarf = dsym_had_dwarf; + ret = 1; + goto end; + } + } + } + + // No matching dSYM + ret = 1; + goto end; + +end: + if (commands_view_valid) + backtrace_release_view (state, &commands_view.view, error_callback, + data); + if (executable_dir_valid) + closedir (executable_dir); + return ret; +} + +static int +macho_symbol_search (const void *vkey, const void *ventry) +{ + const uintptr_t *key = (const uintptr_t *) vkey; + const struct macho_symbol *entry = (const struct macho_symbol *) ventry; + uintptr_t addr; + + addr = *key; + if (addr < entry->addr) + return -1; + else if (addr >= entry->addr + entry->size) + return 1; + else + return 0; +} + +static void +macho_syminfo (struct backtrace_state *state, + uintptr_t addr, + backtrace_syminfo_callback callback, + backtrace_error_callback error_callback ATTRIBUTE_UNUSED, + void *data) +{ + struct macho_syminfo_data *edata; + struct macho_symbol *sym = NULL; + + if (!state->threaded) + { + for (edata = (struct macho_syminfo_data *) state->syminfo_data; + edata != NULL; + edata = edata->next) + { + if (addr >= edata->min_addr && addr <= edata->max_addr) + { + sym = ((struct macho_symbol *) + bsearch (&addr, edata->symbols, edata->symbol_count, + sizeof (struct macho_symbol), macho_symbol_search)); + if (sym != NULL) + break; + } + } + } + else + { + struct macho_syminfo_data **pp; + + pp = (struct macho_syminfo_data **) (void *) &state->syminfo_data; + while (1) + { + edata = backtrace_atomic_load_pointer (pp); + if (edata == NULL) + break; + + if (addr >= edata->min_addr && addr <= edata->max_addr) + { + sym = ((struct macho_symbol *) + bsearch (&addr, edata->symbols, edata->symbol_count, + sizeof (struct macho_symbol), macho_symbol_search)); + if (sym != NULL) + break; + } + + pp = &edata->next; + } + } + + if (sym == NULL) + callback (data, addr, NULL, 0, 0); + else + callback (data, addr, sym->name, sym->addr, sym->size); +} + + +static int +macho_nodebug (struct backtrace_state *state ATTRIBUTE_UNUSED, + uintptr_t pc ATTRIBUTE_UNUSED, + backtrace_full_callback callback ATTRIBUTE_UNUSED, + backtrace_error_callback error_callback, void *data) +{ + error_callback (data, "no debug info in Mach-O executable", -1); + return 0; +} + +static void +macho_nosyms (struct backtrace_state *state ATTRIBUTE_UNUSED, + uintptr_t addr ATTRIBUTE_UNUSED, + backtrace_syminfo_callback callback ATTRIBUTE_UNUSED, + backtrace_error_callback error_callback, void *data) +{ + error_callback (data, "no symbol table in Mach-O executable", -1); +} + +int +backtrace_initialize (struct backtrace_state *state, + const char *filename, + int descriptor, + backtrace_error_callback error_callback, + void *data, fileline *fileline_fn) +{ + int ret; + fileline macho_fileline_fn = macho_nodebug; + int found_sym = 0; + int found_dwarf = 0; + uint32_t i = 0; + uint32_t loaded_image_count; + + // Add all loaded images + loaded_image_count = _dyld_image_count (); + for (i = 0; i < loaded_image_count; i++) + { + int current_found_sym; + int current_found_dwarf; + int current_descriptor; + intptr_t current_vmslide; + const char *current_name; + + current_vmslide = _dyld_get_image_vmaddr_slide (i); + current_name = _dyld_get_image_name (i); + + if (current_name == NULL || (i != 0 && current_vmslide == 0)) + continue; + + if (!(current_descriptor = + backtrace_open (current_name, error_callback, data, NULL))) + { + continue; + } + + if (macho_add (state, error_callback, data, current_descriptor, + current_name, &macho_fileline_fn, current_vmslide, + ¤t_found_sym, ¤t_found_dwarf)) + { + found_sym = found_sym || current_found_sym; + found_dwarf = found_dwarf || current_found_dwarf; + } + + backtrace_close (current_descriptor, error_callback, data); + } + + if (!state->threaded) + { + if (found_sym) + state->syminfo_fn = macho_syminfo; + else if (state->syminfo_fn == NULL) + state->syminfo_fn = macho_nosyms; + } + else + { + if (found_sym) + backtrace_atomic_store_pointer (&state->syminfo_fn, macho_syminfo); + else + (void) __sync_bool_compare_and_swap (&state->syminfo_fn, NULL, + macho_nosyms); + } + + if (!state->threaded) + { + if (state->fileline_fn == NULL || state->fileline_fn == macho_nodebug) + *fileline_fn = macho_fileline_fn; + } + else + { + fileline current_fn; + + current_fn = backtrace_atomic_load_pointer (&state->fileline_fn); + if (current_fn == NULL || current_fn == macho_nodebug) + *fileline_fn = macho_fileline_fn; + } + + return 1; +} + diff --git a/backtrace-sys/src/libbacktrace/missing b/backtrace-sys/src/libbacktrace/missing new file mode 100755 index 000000000..86a8fc31e --- /dev/null +++ b/backtrace-sys/src/libbacktrace/missing @@ -0,0 +1,331 @@ +#! /bin/sh +# Common stub for a few missing GNU programs while installing. + +scriptversion=2012-01-06.13; # UTC + +# Copyright (C) 1996, 1997, 1999, 2000, 2002, 2003, 2004, 2005, 2006, +# 2008, 2009, 2010, 2011, 2012 Free Software Foundation, Inc. +# Originally by Fran,cois Pinard , 1996. + +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2, or (at your option) +# any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. + +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +# As a special exception to the GNU General Public License, if you +# distribute this file as part of a program that contains a +# configuration script generated by Autoconf, you may include it under +# the same distribution terms that you use for the rest of that program. + +if test $# -eq 0; then + echo 1>&2 "Try \`$0 --help' for more information" + exit 1 +fi + +run=: +sed_output='s/.* --output[ =]\([^ ]*\).*/\1/p' +sed_minuso='s/.* -o \([^ ]*\).*/\1/p' + +# In the cases where this matters, `missing' is being run in the +# srcdir already. +if test -f configure.ac; then + configure_ac=configure.ac +else + configure_ac=configure.in +fi + +msg="missing on your system" + +case $1 in +--run) + # Try to run requested program, and just exit if it succeeds. + run= + shift + "$@" && exit 0 + # Exit code 63 means version mismatch. This often happens + # when the user try to use an ancient version of a tool on + # a file that requires a minimum version. In this case we + # we should proceed has if the program had been absent, or + # if --run hadn't been passed. + if test $? = 63; then + run=: + msg="probably too old" + fi + ;; + + -h|--h|--he|--hel|--help) + echo "\ +$0 [OPTION]... PROGRAM [ARGUMENT]... + +Handle \`PROGRAM [ARGUMENT]...' for when PROGRAM is missing, or return an +error status if there is no known handling for PROGRAM. + +Options: + -h, --help display this help and exit + -v, --version output version information and exit + --run try to run the given command, and emulate it if it fails + +Supported PROGRAM values: + aclocal touch file \`aclocal.m4' + autoconf touch file \`configure' + autoheader touch file \`config.h.in' + autom4te touch the output file, or create a stub one + automake touch all \`Makefile.in' files + bison create \`y.tab.[ch]', if possible, from existing .[ch] + flex create \`lex.yy.c', if possible, from existing .c + help2man touch the output file + lex create \`lex.yy.c', if possible, from existing .c + makeinfo touch the output file + yacc create \`y.tab.[ch]', if possible, from existing .[ch] + +Version suffixes to PROGRAM as well as the prefixes \`gnu-', \`gnu', and +\`g' are ignored when checking the name. + +Send bug reports to ." + exit $? + ;; + + -v|--v|--ve|--ver|--vers|--versi|--versio|--version) + echo "missing $scriptversion (GNU Automake)" + exit $? + ;; + + -*) + echo 1>&2 "$0: Unknown \`$1' option" + echo 1>&2 "Try \`$0 --help' for more information" + exit 1 + ;; + +esac + +# normalize program name to check for. +program=`echo "$1" | sed ' + s/^gnu-//; t + s/^gnu//; t + s/^g//; t'` + +# Now exit if we have it, but it failed. Also exit now if we +# don't have it and --version was passed (most likely to detect +# the program). This is about non-GNU programs, so use $1 not +# $program. +case $1 in + lex*|yacc*) + # Not GNU programs, they don't have --version. + ;; + + *) + if test -z "$run" && ($1 --version) > /dev/null 2>&1; then + # We have it, but it failed. + exit 1 + elif test "x$2" = "x--version" || test "x$2" = "x--help"; then + # Could not run --version or --help. This is probably someone + # running `$TOOL --version' or `$TOOL --help' to check whether + # $TOOL exists and not knowing $TOOL uses missing. + exit 1 + fi + ;; +esac + +# If it does not exist, or fails to run (possibly an outdated version), +# try to emulate it. +case $program in + aclocal*) + echo 1>&2 "\ +WARNING: \`$1' is $msg. You should only need it if + you modified \`acinclude.m4' or \`${configure_ac}'. You might want + to install the \`Automake' and \`Perl' packages. Grab them from + any GNU archive site." + touch aclocal.m4 + ;; + + autoconf*) + echo 1>&2 "\ +WARNING: \`$1' is $msg. You should only need it if + you modified \`${configure_ac}'. You might want to install the + \`Autoconf' and \`GNU m4' packages. Grab them from any GNU + archive site." + touch configure + ;; + + autoheader*) + echo 1>&2 "\ +WARNING: \`$1' is $msg. You should only need it if + you modified \`acconfig.h' or \`${configure_ac}'. You might want + to install the \`Autoconf' and \`GNU m4' packages. Grab them + from any GNU archive site." + files=`sed -n 's/^[ ]*A[CM]_CONFIG_HEADER(\([^)]*\)).*/\1/p' ${configure_ac}` + test -z "$files" && files="config.h" + touch_files= + for f in $files; do + case $f in + *:*) touch_files="$touch_files "`echo "$f" | + sed -e 's/^[^:]*://' -e 's/:.*//'`;; + *) touch_files="$touch_files $f.in";; + esac + done + touch $touch_files + ;; + + automake*) + echo 1>&2 "\ +WARNING: \`$1' is $msg. You should only need it if + you modified \`Makefile.am', \`acinclude.m4' or \`${configure_ac}'. + You might want to install the \`Automake' and \`Perl' packages. + Grab them from any GNU archive site." + find . -type f -name Makefile.am -print | + sed 's/\.am$/.in/' | + while read f; do touch "$f"; done + ;; + + autom4te*) + echo 1>&2 "\ +WARNING: \`$1' is needed, but is $msg. + You might have modified some files without having the + proper tools for further handling them. + You can get \`$1' as part of \`Autoconf' from any GNU + archive site." + + file=`echo "$*" | sed -n "$sed_output"` + test -z "$file" && file=`echo "$*" | sed -n "$sed_minuso"` + if test -f "$file"; then + touch $file + else + test -z "$file" || exec >$file + echo "#! /bin/sh" + echo "# Created by GNU Automake missing as a replacement of" + echo "# $ $@" + echo "exit 0" + chmod +x $file + exit 1 + fi + ;; + + bison*|yacc*) + echo 1>&2 "\ +WARNING: \`$1' $msg. You should only need it if + you modified a \`.y' file. You may need the \`Bison' package + in order for those modifications to take effect. You can get + \`Bison' from any GNU archive site." + rm -f y.tab.c y.tab.h + if test $# -ne 1; then + eval LASTARG=\${$#} + case $LASTARG in + *.y) + SRCFILE=`echo "$LASTARG" | sed 's/y$/c/'` + if test -f "$SRCFILE"; then + cp "$SRCFILE" y.tab.c + fi + SRCFILE=`echo "$LASTARG" | sed 's/y$/h/'` + if test -f "$SRCFILE"; then + cp "$SRCFILE" y.tab.h + fi + ;; + esac + fi + if test ! -f y.tab.h; then + echo >y.tab.h + fi + if test ! -f y.tab.c; then + echo 'main() { return 0; }' >y.tab.c + fi + ;; + + lex*|flex*) + echo 1>&2 "\ +WARNING: \`$1' is $msg. You should only need it if + you modified a \`.l' file. You may need the \`Flex' package + in order for those modifications to take effect. You can get + \`Flex' from any GNU archive site." + rm -f lex.yy.c + if test $# -ne 1; then + eval LASTARG=\${$#} + case $LASTARG in + *.l) + SRCFILE=`echo "$LASTARG" | sed 's/l$/c/'` + if test -f "$SRCFILE"; then + cp "$SRCFILE" lex.yy.c + fi + ;; + esac + fi + if test ! -f lex.yy.c; then + echo 'main() { return 0; }' >lex.yy.c + fi + ;; + + help2man*) + echo 1>&2 "\ +WARNING: \`$1' is $msg. You should only need it if + you modified a dependency of a manual page. You may need the + \`Help2man' package in order for those modifications to take + effect. You can get \`Help2man' from any GNU archive site." + + file=`echo "$*" | sed -n "$sed_output"` + test -z "$file" && file=`echo "$*" | sed -n "$sed_minuso"` + if test -f "$file"; then + touch $file + else + test -z "$file" || exec >$file + echo ".ab help2man is required to generate this page" + exit $? + fi + ;; + + makeinfo*) + echo 1>&2 "\ +WARNING: \`$1' is $msg. You should only need it if + you modified a \`.texi' or \`.texinfo' file, or any other file + indirectly affecting the aspect of the manual. The spurious + call might also be the consequence of using a buggy \`make' (AIX, + DU, IRIX). You might want to install the \`Texinfo' package or + the \`GNU make' package. Grab either from any GNU archive site." + # The file to touch is that specified with -o ... + file=`echo "$*" | sed -n "$sed_output"` + test -z "$file" && file=`echo "$*" | sed -n "$sed_minuso"` + if test -z "$file"; then + # ... or it is the one specified with @setfilename ... + infile=`echo "$*" | sed 's/.* \([^ ]*\) *$/\1/'` + file=`sed -n ' + /^@setfilename/{ + s/.* \([^ ]*\) *$/\1/ + p + q + }' $infile` + # ... or it is derived from the source name (dir/f.texi becomes f.info) + test -z "$file" && file=`echo "$infile" | sed 's,.*/,,;s,.[^.]*$,,'`.info + fi + # If the file does not exist, the user really needs makeinfo; + # let's fail without touching anything. + test -f $file || exit 1 + touch $file + ;; + + *) + echo 1>&2 "\ +WARNING: \`$1' is needed, and is $msg. + You might have modified some files without having the + proper tools for further handling them. Check the \`README' file, + it often tells you about the needed prerequisites for installing + this package. You may also peek at any GNU archive site, in case + some other package would contain this missing \`$1' program." + exit 1 + ;; +esac + +exit 0 + +# Local variables: +# eval: (add-hook 'write-file-hooks 'time-stamp) +# time-stamp-start: "scriptversion=" +# time-stamp-format: "%:y-%02m-%02d.%02H" +# time-stamp-time-zone: "UTC" +# time-stamp-end: "; # UTC" +# End: diff --git a/backtrace-sys/src/libbacktrace/mmap.c b/backtrace-sys/src/libbacktrace/mmap.c new file mode 100644 index 000000000..32fcba623 --- /dev/null +++ b/backtrace-sys/src/libbacktrace/mmap.c @@ -0,0 +1,325 @@ +/* mmap.c -- Memory allocation with mmap. + Copyright (C) 2012-2018 Free Software Foundation, Inc. + Written by Ian Lance Taylor, Google. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + (1) Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + (2) Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + + (3) The name of the author may not be used to + endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, +INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, +STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING +IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. */ + +#include "config.h" + +#include +#include +#include +#include +#include +#include + +#include "backtrace.h" +#include "internal.h" + +/* Memory allocation on systems that provide anonymous mmap. This + permits the backtrace functions to be invoked from a signal + handler, assuming that mmap is async-signal safe. */ + +#ifndef MAP_ANONYMOUS +#define MAP_ANONYMOUS MAP_ANON +#endif + +#ifndef MAP_FAILED +#define MAP_FAILED ((void *)-1) +#endif + +/* A list of free memory blocks. */ + +struct backtrace_freelist_struct +{ + /* Next on list. */ + struct backtrace_freelist_struct *next; + /* Size of this block, including this structure. */ + size_t size; +}; + +/* Free memory allocated by backtrace_alloc. */ + +static void +backtrace_free_locked (struct backtrace_state *state, void *addr, size_t size) +{ + /* Just leak small blocks. We don't have to be perfect. Don't put + more than 16 entries on the free list, to avoid wasting time + searching when allocating a block. If we have more than 16 + entries, leak the smallest entry. */ + + if (size >= sizeof (struct backtrace_freelist_struct)) + { + size_t c; + struct backtrace_freelist_struct **ppsmall; + struct backtrace_freelist_struct **pp; + struct backtrace_freelist_struct *p; + + c = 0; + ppsmall = NULL; + for (pp = &state->freelist; *pp != NULL; pp = &(*pp)->next) + { + if (ppsmall == NULL || (*pp)->size < (*ppsmall)->size) + ppsmall = pp; + ++c; + } + if (c >= 16) + { + if (size <= (*ppsmall)->size) + return; + *ppsmall = (*ppsmall)->next; + } + + p = (struct backtrace_freelist_struct *) addr; + p->next = state->freelist; + p->size = size; + state->freelist = p; + } +} + +/* Allocate memory like malloc. If ERROR_CALLBACK is NULL, don't + report an error. */ + +void * +backtrace_alloc (struct backtrace_state *state, + size_t size, backtrace_error_callback error_callback, + void *data) +{ + void *ret; + int locked; + struct backtrace_freelist_struct **pp; + size_t pagesize; + size_t asksize; + void *page; + + ret = NULL; + + /* If we can acquire the lock, then see if there is space on the + free list. If we can't acquire the lock, drop straight into + using mmap. __sync_lock_test_and_set returns the old state of + the lock, so we have acquired it if it returns 0. */ + + if (!state->threaded) + locked = 1; + else + locked = __sync_lock_test_and_set (&state->lock_alloc, 1) == 0; + + if (locked) + { + for (pp = &state->freelist; *pp != NULL; pp = &(*pp)->next) + { + if ((*pp)->size >= size) + { + struct backtrace_freelist_struct *p; + + p = *pp; + *pp = p->next; + + /* Round for alignment; we assume that no type we care about + is more than 8 bytes. */ + size = (size + 7) & ~ (size_t) 7; + if (size < p->size) + backtrace_free_locked (state, (char *) p + size, + p->size - size); + + ret = (void *) p; + + break; + } + } + + if (state->threaded) + __sync_lock_release (&state->lock_alloc); + } + + if (ret == NULL) + { + /* Allocate a new page. */ + + pagesize = getpagesize (); + asksize = (size + pagesize - 1) & ~ (pagesize - 1); + page = mmap (NULL, asksize, PROT_READ | PROT_WRITE, + MAP_PRIVATE | MAP_ANONYMOUS, -1, 0); + if (page == MAP_FAILED) + { + if (error_callback) + error_callback (data, "mmap", errno); + } + else + { + size = (size + 7) & ~ (size_t) 7; + if (size < asksize) + backtrace_free (state, (char *) page + size, asksize - size, + error_callback, data); + + ret = page; + } + } + + return ret; +} + +/* Free memory allocated by backtrace_alloc. */ + +void +backtrace_free (struct backtrace_state *state, void *addr, size_t size, + backtrace_error_callback error_callback ATTRIBUTE_UNUSED, + void *data ATTRIBUTE_UNUSED) +{ + int locked; + + /* If we are freeing a large aligned block, just release it back to + the system. This case arises when growing a vector for a large + binary with lots of debug info. Calling munmap here may cause us + to call mmap again if there is also a large shared library; we + just live with that. */ + if (size >= 16 * 4096) + { + size_t pagesize; + + pagesize = getpagesize (); + if (((uintptr_t) addr & (pagesize - 1)) == 0 + && (size & (pagesize - 1)) == 0) + { + /* If munmap fails for some reason, just add the block to + the freelist. */ + if (munmap (addr, size) == 0) + return; + } + } + + /* If we can acquire the lock, add the new space to the free list. + If we can't acquire the lock, just leak the memory. + __sync_lock_test_and_set returns the old state of the lock, so we + have acquired it if it returns 0. */ + + if (!state->threaded) + locked = 1; + else + locked = __sync_lock_test_and_set (&state->lock_alloc, 1) == 0; + + if (locked) + { + backtrace_free_locked (state, addr, size); + + if (state->threaded) + __sync_lock_release (&state->lock_alloc); + } +} + +/* Grow VEC by SIZE bytes. */ + +void * +backtrace_vector_grow (struct backtrace_state *state,size_t size, + backtrace_error_callback error_callback, + void *data, struct backtrace_vector *vec) +{ + void *ret; + + if (size > vec->alc) + { + size_t pagesize; + size_t alc; + void *base; + + pagesize = getpagesize (); + alc = vec->size + size; + if (vec->size == 0) + alc = 16 * size; + else if (alc < pagesize) + { + alc *= 2; + if (alc > pagesize) + alc = pagesize; + } + else + { + alc *= 2; + alc = (alc + pagesize - 1) & ~ (pagesize - 1); + } + base = backtrace_alloc (state, alc, error_callback, data); + if (base == NULL) + return NULL; + if (vec->base != NULL) + { + memcpy (base, vec->base, vec->size); + backtrace_free (state, vec->base, vec->size + vec->alc, + error_callback, data); + } + vec->base = base; + vec->alc = alc - vec->size; + } + + ret = (char *) vec->base + vec->size; + vec->size += size; + vec->alc -= size; + return ret; +} + +/* Finish the current allocation on VEC. */ + +void * +backtrace_vector_finish ( + struct backtrace_state *state ATTRIBUTE_UNUSED, + struct backtrace_vector *vec, + backtrace_error_callback error_callback ATTRIBUTE_UNUSED, + void *data ATTRIBUTE_UNUSED) +{ + void *ret; + + ret = vec->base; + vec->base = (char *) vec->base + vec->size; + vec->size = 0; + return ret; +} + +/* Release any extra space allocated for VEC. */ + +int +backtrace_vector_release (struct backtrace_state *state, + struct backtrace_vector *vec, + backtrace_error_callback error_callback, + void *data) +{ + size_t size; + size_t alc; + size_t aligned; + + /* Make sure that the block that we free is aligned on an 8-byte + boundary. */ + size = vec->size; + alc = vec->alc; + aligned = (size + 7) & ~ (size_t) 7; + alc -= aligned - size; + + backtrace_free (state, (char *) vec->base + aligned, alc, + error_callback, data); + vec->alc = 0; + return 1; +} diff --git a/backtrace-sys/src/libbacktrace/mmapio.c b/backtrace-sys/src/libbacktrace/mmapio.c new file mode 100644 index 000000000..94e8c9323 --- /dev/null +++ b/backtrace-sys/src/libbacktrace/mmapio.c @@ -0,0 +1,100 @@ +/* mmapio.c -- File views using mmap. + Copyright (C) 2012-2018 Free Software Foundation, Inc. + Written by Ian Lance Taylor, Google. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + (1) Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + (2) Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + + (3) The name of the author may not be used to + endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, +INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, +STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING +IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. */ + +#include "config.h" + +#include +#include +#include +#include + +#include "backtrace.h" +#include "internal.h" + +#ifndef MAP_FAILED +#define MAP_FAILED ((void *)-1) +#endif + +/* This file implements file views and memory allocation when mmap is + available. */ + +/* Create a view of SIZE bytes from DESCRIPTOR at OFFSET. */ + +int +backtrace_get_view (struct backtrace_state *state ATTRIBUTE_UNUSED, + int descriptor, off_t offset, size_t size, + backtrace_error_callback error_callback, + void *data, struct backtrace_view *view) +{ + size_t pagesize; + unsigned int inpage; + off_t pageoff; + void *map; + + pagesize = getpagesize (); + inpage = offset % pagesize; + pageoff = offset - inpage; + + size += inpage; + size = (size + (pagesize - 1)) & ~ (pagesize - 1); + + map = mmap (NULL, size, PROT_READ, MAP_PRIVATE, descriptor, pageoff); + if (map == MAP_FAILED) + { + error_callback (data, "mmap", errno); + return 0; + } + + view->data = (char *) map + inpage; + view->base = map; + view->len = size; + + return 1; +} + +/* Release a view read by backtrace_get_view. */ + +void +backtrace_release_view (struct backtrace_state *state ATTRIBUTE_UNUSED, + struct backtrace_view *view, + backtrace_error_callback error_callback, + void *data) +{ + union { + const void *cv; + void *v; + } const_cast; + + const_cast.cv = view->base; + if (munmap (const_cast.v, view->len) < 0) + error_callback (data, "munmap", errno); +} diff --git a/backtrace-sys/src/libbacktrace/move-if-change b/backtrace-sys/src/libbacktrace/move-if-change new file mode 100644 index 000000000..88d957456 --- /dev/null +++ b/backtrace-sys/src/libbacktrace/move-if-change @@ -0,0 +1,83 @@ +#!/bin/sh +# Like mv $1 $2, but if the files are the same, just delete $1. +# Status is zero if successful, nonzero otherwise. + +VERSION='2012-01-06 07:23'; # UTC +# The definition above must lie within the first 8 lines in order +# for the Emacs time-stamp write hook (at end) to update it. +# If you change this file with Emacs, please let the write hook +# do its job. Otherwise, update this string manually. + +# Copyright (C) 2002-2014 Free Software Foundation, Inc. + +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. + +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +usage="usage: $0 SOURCE DEST" + +help="$usage + or: $0 OPTION +If SOURCE is different than DEST, then move it to DEST; else remove SOURCE. + + --help display this help and exit + --version output version information and exit + +The variable CMPPROG can be used to specify an alternative to 'cmp'. + +Report bugs to ." + +version=`expr "$VERSION" : '\([^ ]*\)'` +version="move-if-change (gnulib) $version +Copyright (C) 2011 Free Software Foundation, Inc. +License GPLv3+: GNU GPL version 3 or later +This is free software: you are free to change and redistribute it. +There is NO WARRANTY, to the extent permitted by law." + +cmpprog=${CMPPROG-cmp} + +for arg +do + case $arg in + --help | --hel | --he | --h) + exec echo "$help" ;; + --version | --versio | --versi | --vers | --ver | --ve | --v) + exec echo "$version" ;; + --) + shift + break ;; + -*) + echo "$0: invalid option: $arg" >&2 + exit 1 ;; + *) + break ;; + esac +done + +test $# -eq 2 || { echo "$0: $usage" >&2; exit 1; } + +if test -r "$2" && $cmpprog -- "$1" "$2" >/dev/null; then + rm -f -- "$1" +else + if mv -f -- "$1" "$2"; then :; else + # Ignore failure due to a concurrent move-if-change. + test -r "$2" && $cmpprog -- "$1" "$2" >/dev/null && rm -f -- "$1" + fi +fi + +## Local Variables: +## eval: (add-hook 'write-file-hooks 'time-stamp) +## time-stamp-start: "VERSION='" +## time-stamp-format: "%:y-%02m-%02d %02H:%02M" +## time-stamp-time-zone: "UTC" +## time-stamp-end: "'; # UTC" +## End: diff --git a/backtrace-sys/src/libbacktrace/nounwind.c b/backtrace-sys/src/libbacktrace/nounwind.c new file mode 100644 index 000000000..0a046cc29 --- /dev/null +++ b/backtrace-sys/src/libbacktrace/nounwind.c @@ -0,0 +1,66 @@ +/* backtrace.c -- Entry point for stack backtrace library. + Copyright (C) 2012-2018 Free Software Foundation, Inc. + Written by Ian Lance Taylor, Google. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + (1) Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + (2) Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + + (3) The name of the author may not be used to + endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, +INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, +STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING +IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. */ + +#include "config.h" + +#include + +#include "backtrace.h" + +#include "internal.h" + +/* This source file is compiled if the unwind library is not + available. */ + +int +backtrace_full (struct backtrace_state *state ATTRIBUTE_UNUSED, + int skip ATTRIBUTE_UNUSED, + backtrace_full_callback callback ATTRIBUTE_UNUSED, + backtrace_error_callback error_callback, void *data) +{ + error_callback (data, + "no stack trace because unwind library not available", + 0); + return 0; +} + +int +backtrace_simple (struct backtrace_state *state ATTRIBUTE_UNUSED, + int skip ATTRIBUTE_UNUSED, + backtrace_simple_callback callback ATTRIBUTE_UNUSED, + backtrace_error_callback error_callback, void *data) +{ + error_callback (data, + "no stack trace because unwind library not available", + 0); + return 0; +} diff --git a/backtrace-sys/src/libbacktrace/pecoff.c b/backtrace-sys/src/libbacktrace/pecoff.c new file mode 100644 index 000000000..049d7e3b3 --- /dev/null +++ b/backtrace-sys/src/libbacktrace/pecoff.c @@ -0,0 +1,943 @@ +/* pecoff.c -- Get debug data from a PE/COFFF file for backtraces. + Copyright (C) 2015-2018 Free Software Foundation, Inc. + Adapted from elf.c by Tristan Gingold, AdaCore. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + (1) Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + (2) Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + + (3) The name of the author may not be used to + endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, +INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, +STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING +IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. */ + +#include "config.h" + +#include +#include +#include + +#include "backtrace.h" +#include "internal.h" + +/* Coff file header. */ + +typedef struct { + uint16_t machine; + uint16_t number_of_sections; + uint32_t time_date_stamp; + uint32_t pointer_to_symbol_table; + uint32_t number_of_symbols; + uint16_t size_of_optional_header; + uint16_t characteristics; +} b_coff_file_header; + +/* Coff optional header. */ + +typedef struct { + uint16_t magic; + uint8_t major_linker_version; + uint8_t minor_linker_version; + uint32_t size_of_code; + uint32_t size_of_initialized_data; + uint32_t size_of_uninitialized_data; + uint32_t address_of_entry_point; + uint32_t base_of_code; + union { + struct { + uint32_t base_of_data; + uint32_t image_base; + } pe; + struct { + uint64_t image_base; + } pep; + } u; +} b_coff_optional_header; + +/* Values of magic in optional header. */ + +#define PE_MAGIC 0x10b /* PE32 executable. */ +#define PEP_MAGIC 0x20b /* PE32+ executable (for 64bit targets). */ + +/* Coff section header. */ + +typedef struct { + char name[8]; + uint32_t virtual_size; + uint32_t virtual_address; + uint32_t size_of_raw_data; + uint32_t pointer_to_raw_data; + uint32_t pointer_to_relocations; + uint32_t pointer_to_line_numbers; + uint16_t number_of_relocations; + uint16_t number_of_line_numbers; + uint32_t characteristics; +} b_coff_section_header; + +/* Coff symbol name. */ + +typedef union { + char short_name[8]; + struct { + unsigned char zeroes[4]; + unsigned char off[4]; + } long_name; +} b_coff_name; + +/* Coff symbol (external representation which is unaligned). */ + +typedef struct { + b_coff_name name; + unsigned char value[4]; + unsigned char section_number[2]; + unsigned char type[2]; + unsigned char storage_class; + unsigned char number_of_aux_symbols; +} b_coff_external_symbol; + +/* Symbol types. */ + +#define N_TBSHFT 4 /* Shift for the derived type. */ +#define IMAGE_SYM_DTYPE_FUNCTION 2 /* Function derived type. */ + +/* Size of a coff symbol. */ + +#define SYM_SZ 18 + +/* Coff symbol, internal representation (aligned). */ + +typedef struct { + const char *name; + uint32_t value; + int16_t sec; + uint16_t type; + uint16_t sc; +} b_coff_internal_symbol; + +/* An index of sections we care about. */ + +enum debug_section +{ + DEBUG_INFO, + DEBUG_LINE, + DEBUG_ABBREV, + DEBUG_RANGES, + DEBUG_STR, + DEBUG_MAX +}; + +/* Names of sections, indexed by enum debug_section. */ + +static const char * const debug_section_names[DEBUG_MAX] = +{ + ".debug_info", + ".debug_line", + ".debug_abbrev", + ".debug_ranges", + ".debug_str" +}; + +/* Information we gather for the sections we care about. */ + +struct debug_section_info +{ + /* Section file offset. */ + off_t offset; + /* Section size. */ + size_t size; + /* Section contents, after read from file. */ + const unsigned char *data; +}; + +/* Information we keep for an coff symbol. */ + +struct coff_symbol +{ + /* The name of the symbol. */ + const char *name; + /* The address of the symbol. */ + uintptr_t address; +}; + +/* Information to pass to coff_syminfo. */ + +struct coff_syminfo_data +{ + /* Symbols for the next module. */ + struct coff_syminfo_data *next; + /* The COFF symbols, sorted by address. */ + struct coff_symbol *symbols; + /* The number of symbols. */ + size_t count; +}; + +/* A dummy callback function used when we can't find any debug info. */ + +static int +coff_nodebug (struct backtrace_state *state ATTRIBUTE_UNUSED, + uintptr_t pc ATTRIBUTE_UNUSED, + backtrace_full_callback callback ATTRIBUTE_UNUSED, + backtrace_error_callback error_callback, void *data) +{ + error_callback (data, "no debug info in PE/COFF executable", -1); + return 0; +} + +/* A dummy callback function used when we can't find a symbol + table. */ + +static void +coff_nosyms (struct backtrace_state *state ATTRIBUTE_UNUSED, + uintptr_t addr ATTRIBUTE_UNUSED, + backtrace_syminfo_callback callback ATTRIBUTE_UNUSED, + backtrace_error_callback error_callback, void *data) +{ + error_callback (data, "no symbol table in PE/COFF executable", -1); +} + +/* Read a potentially unaligned 4 byte word at P, using native endianness. */ + +static uint32_t +coff_read4 (const unsigned char *p) +{ + uint32_t res; + + memcpy (&res, p, 4); + return res; +} + +/* Read a potentially unaligned 2 byte word at P, using native endianness. + All 2 byte word in symbols are always aligned, but for coherency all + fields are declared as char arrays. */ + +static uint16_t +coff_read2 (const unsigned char *p) +{ + uint16_t res; + + memcpy (&res, p, sizeof (res)); + return res; +} + +/* Return the length (without the trailing 0) of a COFF short name. */ + +static size_t +coff_short_name_len (const char *name) +{ + int i; + + for (i = 0; i < 8; i++) + if (name[i] == 0) + return i; + return 8; +} + +/* Return true iff COFF short name CNAME is the same as NAME (a NUL-terminated + string). */ + +static int +coff_short_name_eq (const char *name, const char *cname) +{ + int i; + + for (i = 0; i < 8; i++) + { + if (name[i] != cname[i]) + return 0; + if (name[i] == 0) + return 1; + } + return name[8] == 0; +} + +/* Return true iff NAME is the same as string at offset OFF. */ + +static int +coff_long_name_eq (const char *name, unsigned int off, + struct backtrace_view *str_view) +{ + if (off >= str_view->len) + return 0; + return strcmp (name, (const char *)str_view->data + off) == 0; +} + +/* Compare struct coff_symbol for qsort. */ + +static int +coff_symbol_compare (const void *v1, const void *v2) +{ + const struct coff_symbol *e1 = (const struct coff_symbol *) v1; + const struct coff_symbol *e2 = (const struct coff_symbol *) v2; + + if (e1->address < e2->address) + return -1; + else if (e1->address > e2->address) + return 1; + else + return 0; +} + +/* Convert SYM to internal (and aligned) format ISYM, using string table + from STRTAB and STRTAB_SIZE, and number of sections SECTS_NUM. + Return -1 in case of error (invalid section number or string index). */ + +static int +coff_expand_symbol (b_coff_internal_symbol *isym, + const b_coff_external_symbol *sym, + uint16_t sects_num, + const unsigned char *strtab, size_t strtab_size) +{ + isym->type = coff_read2 (sym->type); + isym->sec = coff_read2 (sym->section_number); + isym->sc = sym->storage_class; + + if (isym->sec > 0 && (uint16_t) isym->sec > sects_num) + return -1; + if (sym->name.short_name[0] != 0) + isym->name = sym->name.short_name; + else + { + uint32_t off = coff_read4 (sym->name.long_name.off); + + if (off >= strtab_size) + return -1; + isym->name = (const char *) strtab + off; + } + return 0; +} + +/* Return true iff SYM is a defined symbol for a function. Data symbols + aren't considered because they aren't easily identified (same type as + section names, presence of symbols defined by the linker script). */ + +static int +coff_is_function_symbol (const b_coff_internal_symbol *isym) +{ + return (isym->type >> N_TBSHFT) == IMAGE_SYM_DTYPE_FUNCTION + && isym->sec > 0; +} + +/* Initialize the symbol table info for coff_syminfo. */ + +static int +coff_initialize_syminfo (struct backtrace_state *state, + uintptr_t base_address, + const b_coff_section_header *sects, size_t sects_num, + const b_coff_external_symbol *syms, size_t syms_size, + const unsigned char *strtab, size_t strtab_size, + backtrace_error_callback error_callback, + void *data, struct coff_syminfo_data *sdata) +{ + size_t syms_count; + char *coff_symstr; + size_t coff_symstr_len; + size_t coff_symbol_count; + size_t coff_symbol_size; + struct coff_symbol *coff_symbols; + struct coff_symbol *coff_sym; + char *coff_str; + size_t i; + + syms_count = syms_size / SYM_SZ; + + /* We only care about function symbols. Count them. Also count size of + strings for in-symbol names. */ + coff_symbol_count = 0; + coff_symstr_len = 0; + for (i = 0; i < syms_count; ++i) + { + const b_coff_external_symbol *asym = &syms[i]; + b_coff_internal_symbol isym; + + if (coff_expand_symbol (&isym, asym, sects_num, strtab, strtab_size) < 0) + { + error_callback (data, "invalid section or offset in coff symbol", 0); + return 0; + } + if (coff_is_function_symbol (&isym)) + { + ++coff_symbol_count; + if (asym->name.short_name[0] != 0) + coff_symstr_len += coff_short_name_len (asym->name.short_name) + 1; + } + + i += asym->number_of_aux_symbols; + } + + coff_symbol_size = (coff_symbol_count + 1) * sizeof (struct coff_symbol); + coff_symbols = ((struct coff_symbol *) + backtrace_alloc (state, coff_symbol_size, error_callback, + data)); + if (coff_symbols == NULL) + return 0; + + /* Allocate memory for symbols strings. */ + if (coff_symstr_len > 0) + { + coff_symstr = ((char *) + backtrace_alloc (state, coff_symstr_len, error_callback, + data)); + if (coff_symstr == NULL) + { + backtrace_free (state, coff_symbols, coff_symbol_size, + error_callback, data); + return 0; + } + } + else + coff_symstr = NULL; + + /* Copy symbols. */ + coff_sym = coff_symbols; + coff_str = coff_symstr; + for (i = 0; i < syms_count; ++i) + { + const b_coff_external_symbol *asym = &syms[i]; + b_coff_internal_symbol isym; + + if (coff_expand_symbol (&isym, asym, sects_num, strtab, strtab_size)) + { + /* Should not fail, as it was already tested in the previous + loop. */ + abort (); + } + if (coff_is_function_symbol (&isym)) + { + const char *name; + int16_t secnum; + + if (asym->name.short_name[0] != 0) + { + size_t len = coff_short_name_len (isym.name); + name = coff_str; + memcpy (coff_str, isym.name, len); + coff_str[len] = 0; + coff_str += len + 1; + } + else + name = isym.name; + + /* Strip leading '_'. */ + if (name[0] == '_') + name++; + + /* Symbol value is section relative, so we need to read the address + of its section. */ + secnum = coff_read2 (asym->section_number); + + coff_sym->name = name; + coff_sym->address = (coff_read4 (asym->value) + + sects[secnum - 1].virtual_address + + base_address); + coff_sym++; + } + + i += asym->number_of_aux_symbols; + } + + /* End of symbols marker. */ + coff_sym->name = NULL; + coff_sym->address = -1; + + backtrace_qsort (coff_symbols, coff_symbol_count, + sizeof (struct coff_symbol), coff_symbol_compare); + + sdata->next = NULL; + sdata->symbols = coff_symbols; + sdata->count = coff_symbol_count; + + return 1; +} + +/* Add EDATA to the list in STATE. */ + +static void +coff_add_syminfo_data (struct backtrace_state *state, + struct coff_syminfo_data *sdata) +{ + if (!state->threaded) + { + struct coff_syminfo_data **pp; + + for (pp = (struct coff_syminfo_data **) (void *) &state->syminfo_data; + *pp != NULL; + pp = &(*pp)->next) + ; + *pp = sdata; + } + else + { + while (1) + { + struct coff_syminfo_data **pp; + + pp = (struct coff_syminfo_data **) (void *) &state->syminfo_data; + + while (1) + { + struct coff_syminfo_data *p; + + p = backtrace_atomic_load_pointer (pp); + + if (p == NULL) + break; + + pp = &p->next; + } + + if (__sync_bool_compare_and_swap (pp, NULL, sdata)) + break; + } + } +} + +/* Compare an ADDR against an elf_symbol for bsearch. We allocate one + extra entry in the array so that this can look safely at the next + entry. */ + +static int +coff_symbol_search (const void *vkey, const void *ventry) +{ + const uintptr_t *key = (const uintptr_t *) vkey; + const struct coff_symbol *entry = (const struct coff_symbol *) ventry; + uintptr_t addr; + + addr = *key; + if (addr < entry->address) + return -1; + else if (addr >= entry[1].address) + return 1; + else + return 0; +} + +/* Return the symbol name and value for an ADDR. */ + +static void +coff_syminfo (struct backtrace_state *state, uintptr_t addr, + backtrace_syminfo_callback callback, + backtrace_error_callback error_callback ATTRIBUTE_UNUSED, + void *data) +{ + struct coff_syminfo_data *sdata; + struct coff_symbol *sym = NULL; + + if (!state->threaded) + { + for (sdata = (struct coff_syminfo_data *) state->syminfo_data; + sdata != NULL; + sdata = sdata->next) + { + sym = ((struct coff_symbol *) + bsearch (&addr, sdata->symbols, sdata->count, + sizeof (struct coff_symbol), coff_symbol_search)); + if (sym != NULL) + break; + } + } + else + { + struct coff_syminfo_data **pp; + + pp = (struct coff_syminfo_data **) (void *) &state->syminfo_data; + while (1) + { + sdata = backtrace_atomic_load_pointer (pp); + if (sdata == NULL) + break; + + sym = ((struct coff_symbol *) + bsearch (&addr, sdata->symbols, sdata->count, + sizeof (struct coff_symbol), coff_symbol_search)); + if (sym != NULL) + break; + + pp = &sdata->next; + } + } + + if (sym == NULL) + callback (data, addr, NULL, 0, 0); + else + callback (data, addr, sym->name, sym->address, 0); +} + +/* Add the backtrace data for one PE/COFF file. Returns 1 on success, + 0 on failure (in both cases descriptor is closed). */ + +static int +coff_add (struct backtrace_state *state, int descriptor, + backtrace_error_callback error_callback, void *data, + fileline *fileline_fn, int *found_sym, int *found_dwarf) +{ + struct backtrace_view fhdr_view; + off_t fhdr_off; + int magic_ok; + b_coff_file_header fhdr; + off_t opt_sects_off; + size_t opt_sects_size; + unsigned int sects_num; + struct backtrace_view sects_view; + int sects_view_valid; + const b_coff_optional_header *opt_hdr; + const b_coff_section_header *sects; + struct backtrace_view str_view; + int str_view_valid; + uint32_t str_size; + off_t str_off; + // NOTE: upstream doesn't have `{0}`, this is a fix for Rust issue #39468. + // If syms_view is not initialized, then `free(syms_view.base)` may segfault later. + struct backtrace_view syms_view = {0}; + off_t syms_off; + size_t syms_size; + int syms_view_valid; + unsigned int syms_num; + unsigned int i; + struct debug_section_info sections[DEBUG_MAX]; + off_t min_offset; + off_t max_offset; + struct backtrace_view debug_view; + int debug_view_valid; + uintptr_t image_base; + + *found_sym = 0; + *found_dwarf = 0; + + sects_view_valid = 0; + syms_view_valid = 0; + str_view_valid = 0; + debug_view_valid = 0; + + /* Map the MS-DOS stub (if any) and extract file header offset. */ + if (!backtrace_get_view (state, descriptor, 0, 0x40, error_callback, + data, &fhdr_view)) + goto fail; + + { + const unsigned char *vptr = fhdr_view.data; + + if (vptr[0] == 'M' && vptr[1] == 'Z') + fhdr_off = coff_read4 (vptr + 0x3c); + else + fhdr_off = 0; + } + + backtrace_release_view (state, &fhdr_view, error_callback, data); + + /* Map the coff file header. */ + if (!backtrace_get_view (state, descriptor, fhdr_off, + sizeof (b_coff_file_header) + 4, + error_callback, data, &fhdr_view)) + goto fail; + + if (fhdr_off != 0) + { + const char *magic = (const char *) fhdr_view.data; + magic_ok = memcmp (magic, "PE\0", 4) == 0; + fhdr_off += 4; + + memcpy (&fhdr, fhdr_view.data + 4, sizeof fhdr); + } + else + { + memcpy (&fhdr, fhdr_view.data, sizeof fhdr); + /* TODO: test fhdr.machine for coff but non-PE platforms. */ + magic_ok = 0; + } + backtrace_release_view (state, &fhdr_view, error_callback, data); + + if (!magic_ok) + { + error_callback (data, "executable file is not COFF", 0); + goto fail; + } + + sects_num = fhdr.number_of_sections; + syms_num = fhdr.number_of_symbols; + + opt_sects_off = fhdr_off + sizeof (fhdr); + opt_sects_size = (fhdr.size_of_optional_header + + sects_num * sizeof (b_coff_section_header)); + + /* To translate PC to file/line when using DWARF, we need to find + the .debug_info and .debug_line sections. */ + + /* Read the optional header and the section headers. */ + + if (!backtrace_get_view (state, descriptor, opt_sects_off, opt_sects_size, + error_callback, data, §s_view)) + goto fail; + sects_view_valid = 1; + opt_hdr = (const b_coff_optional_header *) sects_view.data; + sects = (const b_coff_section_header *) + (sects_view.data + fhdr.size_of_optional_header); + + if (fhdr.size_of_optional_header > sizeof (*opt_hdr)) + { + if (opt_hdr->magic == PE_MAGIC) + image_base = opt_hdr->u.pe.image_base; + else if (opt_hdr->magic == PEP_MAGIC) + image_base = opt_hdr->u.pep.image_base; + else + { + error_callback (data, "bad magic in PE optional header", 0); + goto fail; + } + } + else + image_base = 0; + + /* Read the symbol table and the string table. */ + + if (fhdr.pointer_to_symbol_table == 0) + { + /* No symbol table, no string table. */ + str_off = 0; + str_size = 0; + syms_num = 0; + syms_size = 0; + } + else + { + /* Symbol table is followed by the string table. The string table + starts with its length (on 4 bytes). + Map the symbol table and the length of the string table. */ + syms_off = fhdr.pointer_to_symbol_table; + syms_size = syms_num * SYM_SZ; + + if (!backtrace_get_view (state, descriptor, syms_off, syms_size + 4, + error_callback, data, &syms_view)) + goto fail; + syms_view_valid = 1; + + str_size = coff_read4 (syms_view.data + syms_size); + + str_off = syms_off + syms_size; + + if (str_size > 4) + { + /* Map string table (including the length word). */ + + if (!backtrace_get_view (state, descriptor, str_off, str_size, + error_callback, data, &str_view)) + goto fail; + str_view_valid = 1; + } + } + + memset (sections, 0, sizeof sections); + + /* Look for the symbol table. */ + for (i = 0; i < sects_num; ++i) + { + const b_coff_section_header *s = sects + i; + unsigned int str_off; + int j; + + if (s->name[0] == '/') + { + /* Extended section name. */ + str_off = atoi (s->name + 1); + } + else + str_off = 0; + + for (j = 0; j < (int) DEBUG_MAX; ++j) + { + const char *dbg_name = debug_section_names[j]; + int match; + + if (str_off != 0) + match = coff_long_name_eq (dbg_name, str_off, &str_view); + else + match = coff_short_name_eq (dbg_name, s->name); + if (match) + { + sections[j].offset = s->pointer_to_raw_data; + sections[j].size = s->virtual_size <= s->size_of_raw_data ? + s->virtual_size : s->size_of_raw_data; + break; + } + } + } + + if (syms_num != 0) + { + struct coff_syminfo_data *sdata; + + sdata = ((struct coff_syminfo_data *) + backtrace_alloc (state, sizeof *sdata, error_callback, data)); + if (sdata == NULL) + goto fail; + + if (!coff_initialize_syminfo (state, image_base, + sects, sects_num, + syms_view.data, syms_size, + str_view.data, str_size, + error_callback, data, sdata)) + { + backtrace_free (state, sdata, sizeof *sdata, error_callback, data); + goto fail; + } + + *found_sym = 1; + + coff_add_syminfo_data (state, sdata); + } + + backtrace_release_view (state, §s_view, error_callback, data); + sects_view_valid = 0; + if (syms_view_valid) + { + backtrace_release_view (state, &syms_view, error_callback, data); + syms_view_valid = 0; + } + + /* Read all the debug sections in a single view, since they are + probably adjacent in the file. We never release this view. */ + + min_offset = 0; + max_offset = 0; + for (i = 0; i < (int) DEBUG_MAX; ++i) + { + off_t end; + + if (sections[i].size == 0) + continue; + if (min_offset == 0 || sections[i].offset < min_offset) + min_offset = sections[i].offset; + end = sections[i].offset + sections[i].size; + if (end > max_offset) + max_offset = end; + } + if (min_offset == 0 || max_offset == 0) + { + if (!backtrace_close (descriptor, error_callback, data)) + goto fail; + *fileline_fn = coff_nodebug; + return 1; + } + + if (!backtrace_get_view (state, descriptor, min_offset, + max_offset - min_offset, + error_callback, data, &debug_view)) + goto fail; + debug_view_valid = 1; + + /* We've read all we need from the executable. */ + if (!backtrace_close (descriptor, error_callback, data)) + goto fail; + descriptor = -1; + + for (i = 0; i < (int) DEBUG_MAX; ++i) + { + if (sections[i].size == 0) + sections[i].data = NULL; + else + sections[i].data = ((const unsigned char *) debug_view.data + + (sections[i].offset - min_offset)); + } + + if (!backtrace_dwarf_add (state, /* base_address */ 0, + sections[DEBUG_INFO].data, + sections[DEBUG_INFO].size, + sections[DEBUG_LINE].data, + sections[DEBUG_LINE].size, + sections[DEBUG_ABBREV].data, + sections[DEBUG_ABBREV].size, + sections[DEBUG_RANGES].data, + sections[DEBUG_RANGES].size, + sections[DEBUG_STR].data, + sections[DEBUG_STR].size, + 0, /* FIXME */ + error_callback, data, fileline_fn)) + goto fail; + + *found_dwarf = 1; + + return 1; + + fail: + if (sects_view_valid) + backtrace_release_view (state, §s_view, error_callback, data); + if (str_view_valid) + backtrace_release_view (state, &str_view, error_callback, data); + if (syms_view_valid) + backtrace_release_view (state, &syms_view, error_callback, data); + if (debug_view_valid) + backtrace_release_view (state, &debug_view, error_callback, data); + if (descriptor != -1) + backtrace_close (descriptor, error_callback, data); + return 0; +} + +/* Initialize the backtrace data we need from an ELF executable. At + the ELF level, all we need to do is find the debug info + sections. */ + +int +backtrace_initialize (struct backtrace_state *state, + const char *filename ATTRIBUTE_UNUSED, int descriptor, + backtrace_error_callback error_callback, + void *data, fileline *fileline_fn) +{ + int ret; + int found_sym; + int found_dwarf; + fileline coff_fileline_fn; + + ret = coff_add (state, descriptor, error_callback, data, + &coff_fileline_fn, &found_sym, &found_dwarf); + if (!ret) + return 0; + + if (!state->threaded) + { + if (found_sym) + state->syminfo_fn = coff_syminfo; + else if (state->syminfo_fn == NULL) + state->syminfo_fn = coff_nosyms; + } + else + { + if (found_sym) + backtrace_atomic_store_pointer (&state->syminfo_fn, coff_syminfo); + else + __sync_bool_compare_and_swap (&state->syminfo_fn, NULL, coff_nosyms); + } + + if (!state->threaded) + { + if (state->fileline_fn == NULL || state->fileline_fn == coff_nodebug) + *fileline_fn = coff_fileline_fn; + } + else + { + fileline current_fn; + + current_fn = backtrace_atomic_load_pointer (&state->fileline_fn); + if (current_fn == NULL || current_fn == coff_nodebug) + *fileline_fn = coff_fileline_fn; + } + + return 1; +} diff --git a/backtrace-sys/src/libbacktrace/posix.c b/backtrace-sys/src/libbacktrace/posix.c new file mode 100644 index 000000000..ce441d98f --- /dev/null +++ b/backtrace-sys/src/libbacktrace/posix.c @@ -0,0 +1,100 @@ +/* posix.c -- POSIX file I/O routines for the backtrace library. + Copyright (C) 2012-2018 Free Software Foundation, Inc. + Written by Ian Lance Taylor, Google. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + (1) Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + (2) Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + + (3) The name of the author may not be used to + endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, +INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, +STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING +IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. */ + +#include "config.h" + +#include +#include +#include +#include +#include + +#include "backtrace.h" +#include "internal.h" + +#ifndef O_BINARY +#define O_BINARY 0 +#endif + +#ifndef O_CLOEXEC +#define O_CLOEXEC 0 +#endif + +#ifndef FD_CLOEXEC +#define FD_CLOEXEC 1 +#endif + +/* Open a file for reading. */ + +int +backtrace_open (const char *filename, backtrace_error_callback error_callback, + void *data, int *does_not_exist) +{ + int descriptor; + + if (does_not_exist != NULL) + *does_not_exist = 0; + + descriptor = open (filename, (int) (O_RDONLY | O_BINARY | O_CLOEXEC)); + if (descriptor < 0) + { + if (does_not_exist != NULL && errno == ENOENT) + *does_not_exist = 1; + else + error_callback (data, filename, errno); + return -1; + } + +#ifdef HAVE_FCNTL + /* Set FD_CLOEXEC just in case the kernel does not support + O_CLOEXEC. It doesn't matter if this fails for some reason. + FIXME: At some point it should be safe to only do this if + O_CLOEXEC == 0. */ + fcntl (descriptor, F_SETFD, FD_CLOEXEC); +#endif + + return descriptor; +} + +/* Close DESCRIPTOR. */ + +int +backtrace_close (int descriptor, backtrace_error_callback error_callback, + void *data) +{ + if (close (descriptor) < 0) + { + error_callback (data, "close", errno); + return 0; + } + return 1; +} diff --git a/backtrace-sys/src/libbacktrace/print.c b/backtrace-sys/src/libbacktrace/print.c new file mode 100644 index 000000000..3c6bad283 --- /dev/null +++ b/backtrace-sys/src/libbacktrace/print.c @@ -0,0 +1,92 @@ +/* print.c -- Print the current backtrace. + Copyright (C) 2012-2018 Free Software Foundation, Inc. + Written by Ian Lance Taylor, Google. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + (1) Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + (2) Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + + (3) The name of the author may not be used to + endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, +INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, +STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING +IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. */ + +#include "config.h" + +#include +#include +#include + +#include "backtrace.h" +#include "internal.h" + +/* Passed to callbacks. */ + +struct print_data +{ + struct backtrace_state *state; + FILE *f; +}; + +/* Print one level of a backtrace. */ + +static int +print_callback (void *data, uintptr_t pc, const char *filename, int lineno, + const char *function) +{ + struct print_data *pdata = (struct print_data *) data; + + fprintf (pdata->f, "0x%lx %s\n\t%s:%d\n", + (unsigned long) pc, + function == NULL ? "???" : function, + filename == NULL ? "???" : filename, + lineno); + return 0; +} + +/* Print errors to stderr. */ + +static void +error_callback (void *data, const char *msg, int errnum) +{ + struct print_data *pdata = (struct print_data *) data; + + if (pdata->state->filename != NULL) + fprintf (stderr, "%s: ", pdata->state->filename); + fprintf (stderr, "libbacktrace: %s", msg); + if (errnum > 0) + fprintf (stderr, ": %s", strerror (errnum)); + fputc ('\n', stderr); +} + +/* Print a backtrace. */ + +void +backtrace_print (struct backtrace_state *state, int skip, FILE *f) +{ + struct print_data data; + + data.state = state; + data.f = f; + backtrace_full (state, skip + 1, print_callback, error_callback, + (void *) &data); +} diff --git a/backtrace-sys/src/libbacktrace/read.c b/backtrace-sys/src/libbacktrace/read.c new file mode 100644 index 000000000..211d6457c --- /dev/null +++ b/backtrace-sys/src/libbacktrace/read.c @@ -0,0 +1,96 @@ +/* read.c -- File views without mmap. + Copyright (C) 2012-2018 Free Software Foundation, Inc. + Written by Ian Lance Taylor, Google. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + (1) Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + (2) Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + + (3) The name of the author may not be used to + endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, +INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, +STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING +IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. */ + +#include "config.h" + +#include +#include +#include +#include + +#include "backtrace.h" +#include "internal.h" + +/* This file implements file views when mmap is not available. */ + +/* Create a view of SIZE bytes from DESCRIPTOR at OFFSET. */ + +int +backtrace_get_view (struct backtrace_state *state, int descriptor, + off_t offset, size_t size, + backtrace_error_callback error_callback, + void *data, struct backtrace_view *view) +{ + ssize_t got; + + if (lseek (descriptor, offset, SEEK_SET) < 0) + { + error_callback (data, "lseek", errno); + return 0; + } + + view->base = backtrace_alloc (state, size, error_callback, data); + if (view->base == NULL) + return 0; + view->data = view->base; + view->len = size; + + got = read (descriptor, view->base, size); + if (got < 0) + { + error_callback (data, "read", errno); + free (view->base); + return 0; + } + + if ((size_t) got < size) + { + error_callback (data, "file too short", 0); + free (view->base); + return 0; + } + + return 1; +} + +/* Release a view read by backtrace_get_view. */ + +void +backtrace_release_view (struct backtrace_state *state, + struct backtrace_view *view, + backtrace_error_callback error_callback, + void *data) +{ + backtrace_free (state, view->base, view->len, error_callback, data); + view->data = NULL; + view->base = NULL; +} diff --git a/backtrace-sys/src/libbacktrace/simple.c b/backtrace-sys/src/libbacktrace/simple.c new file mode 100644 index 000000000..510877c23 --- /dev/null +++ b/backtrace-sys/src/libbacktrace/simple.c @@ -0,0 +1,108 @@ +/* simple.c -- The backtrace_simple function. + Copyright (C) 2012-2018 Free Software Foundation, Inc. + Written by Ian Lance Taylor, Google. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + (1) Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + (2) Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + + (3) The name of the author may not be used to + endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, +INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, +STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING +IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. */ + +#include "config.h" + +#include "unwind.h" +#include "backtrace.h" + +/* The simple_backtrace routine. */ + +/* Data passed through _Unwind_Backtrace. */ + +struct backtrace_simple_data +{ + /* Number of frames to skip. */ + int skip; + /* Library state. */ + struct backtrace_state *state; + /* Callback routine. */ + backtrace_simple_callback callback; + /* Error callback routine. */ + backtrace_error_callback error_callback; + /* Data to pass to callback routine. */ + void *data; + /* Value to return from backtrace. */ + int ret; +}; + +/* Unwind library callback routine. This is passd to + _Unwind_Backtrace. */ + +static _Unwind_Reason_Code +simple_unwind (struct _Unwind_Context *context, void *vdata) +{ + struct backtrace_simple_data *bdata = (struct backtrace_simple_data *) vdata; + uintptr_t pc; + int ip_before_insn = 0; + +#ifdef HAVE_GETIPINFO + pc = _Unwind_GetIPInfo (context, &ip_before_insn); +#else + pc = _Unwind_GetIP (context); +#endif + + if (bdata->skip > 0) + { + --bdata->skip; + return _URC_NO_REASON; + } + + if (!ip_before_insn) + --pc; + + bdata->ret = bdata->callback (bdata->data, pc); + + if (bdata->ret != 0) + return _URC_END_OF_STACK; + + return _URC_NO_REASON; +} + +/* Get a simple stack backtrace. */ + +int +backtrace_simple (struct backtrace_state *state, int skip, + backtrace_simple_callback callback, + backtrace_error_callback error_callback, void *data) +{ + struct backtrace_simple_data bdata; + + bdata.skip = skip + 1; + bdata.state = state; + bdata.callback = callback; + bdata.error_callback = error_callback; + bdata.data = data; + bdata.ret = 0; + _Unwind_Backtrace (simple_unwind, &bdata); + return bdata.ret; +} diff --git a/backtrace-sys/src/libbacktrace/sort.c b/backtrace-sys/src/libbacktrace/sort.c new file mode 100644 index 000000000..9121bcb8d --- /dev/null +++ b/backtrace-sys/src/libbacktrace/sort.c @@ -0,0 +1,108 @@ +/* sort.c -- Sort without allocating memory + Copyright (C) 2012-2018 Free Software Foundation, Inc. + Written by Ian Lance Taylor, Google. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + (1) Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + (2) Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + + (3) The name of the author may not be used to + endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, +INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, +STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING +IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. */ + +#include "config.h" + +#include +#include + +#include "backtrace.h" +#include "internal.h" + +/* The GNU glibc version of qsort allocates memory, which we must not + do if we are invoked by a signal handler. So provide our own + sort. */ + +static void +swap (char *a, char *b, size_t size) +{ + size_t i; + + for (i = 0; i < size; i++, a++, b++) + { + char t; + + t = *a; + *a = *b; + *b = t; + } +} + +void +backtrace_qsort (void *basearg, size_t count, size_t size, + int (*compar) (const void *, const void *)) +{ + char *base = (char *) basearg; + size_t i; + size_t mid; + + tail_recurse: + if (count < 2) + return; + + /* The symbol table and DWARF tables, which is all we use this + routine for, tend to be roughly sorted. Pick the middle element + in the array as our pivot point, so that we are more likely to + cut the array in half for each recursion step. */ + swap (base, base + (count / 2) * size, size); + + mid = 0; + for (i = 1; i < count; i++) + { + if ((*compar) (base, base + i * size) > 0) + { + ++mid; + if (i != mid) + swap (base + mid * size, base + i * size, size); + } + } + + if (mid > 0) + swap (base, base + mid * size, size); + + /* Recurse with the smaller array, loop with the larger one. That + ensures that our maximum stack depth is log count. */ + if (2 * mid < count) + { + backtrace_qsort (base, mid, size, compar); + base += (mid + 1) * size; + count -= mid + 1; + goto tail_recurse; + } + else + { + backtrace_qsort (base + (mid + 1) * size, count - (mid + 1), + size, compar); + count = mid; + goto tail_recurse; + } +} diff --git a/backtrace-sys/src/libbacktrace/state.c b/backtrace-sys/src/libbacktrace/state.c new file mode 100644 index 000000000..ad360a6b1 --- /dev/null +++ b/backtrace-sys/src/libbacktrace/state.c @@ -0,0 +1,72 @@ +/* state.c -- Create the backtrace state. + Copyright (C) 2012-2018 Free Software Foundation, Inc. + Written by Ian Lance Taylor, Google. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + (1) Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + (2) Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + + (3) The name of the author may not be used to + endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, +INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, +STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING +IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. */ + +#include "config.h" + +#include +#include + +#include "backtrace.h" +#include "backtrace-supported.h" +#include "internal.h" + +/* Create the backtrace state. This will then be passed to all the + other routines. */ + +struct backtrace_state * +backtrace_create_state (const char *filename, int threaded, + backtrace_error_callback error_callback, + void *data) +{ + struct backtrace_state init_state; + struct backtrace_state *state; + +#ifndef HAVE_SYNC_FUNCTIONS + if (threaded) + { + error_callback (data, "backtrace library does not support threads", 0); + return NULL; + } +#endif + + memset (&init_state, 0, sizeof init_state); + init_state.filename = filename; + init_state.threaded = threaded; + + state = ((struct backtrace_state *) + backtrace_alloc (&init_state, sizeof *state, error_callback, data)); + if (state == NULL) + return NULL; + *state = init_state; + + return state; +} diff --git a/backtrace-sys/src/libbacktrace/stest.c b/backtrace-sys/src/libbacktrace/stest.c new file mode 100644 index 000000000..2eb98808d --- /dev/null +++ b/backtrace-sys/src/libbacktrace/stest.c @@ -0,0 +1,137 @@ +/* stest.c -- Test for libbacktrace internal sort function + Copyright (C) 2012-2018 Free Software Foundation, Inc. + Written by Ian Lance Taylor, Google. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + (1) Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + (2) Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + + (3) The name of the author may not be used to + endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, +INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, +STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING +IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. */ + +#include "config.h" + +#include +#include +#include +#include + +#include "backtrace.h" +#include "internal.h" + +/* Test the local qsort implementation. */ + +#define MAX 10 + +struct test +{ + size_t count; + int input[MAX]; + int output[MAX]; +}; + +static struct test tests[] = + { + { + 10, + { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }, + { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 } + }, + { + 9, + { 1, 2, 3, 4, 5, 6, 7, 8, 9 }, + { 1, 2, 3, 4, 5, 6, 7, 8, 9 } + }, + { + 10, + { 10, 9, 8, 7, 6, 5, 4, 3, 2, 1 }, + { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }, + }, + { + 9, + { 9, 8, 7, 6, 5, 4, 3, 2, 1 }, + { 1, 2, 3, 4, 5, 6, 7, 8, 9 }, + }, + { + 10, + { 2, 4, 6, 8, 10, 1, 3, 5, 7, 9 }, + { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }, + }, + { + 5, + { 4, 5, 3, 1, 2 }, + { 1, 2, 3, 4, 5 }, + }, + { + 5, + { 1, 1, 1, 1, 1 }, + { 1, 1, 1, 1, 1 }, + }, + { + 5, + { 1, 1, 2, 1, 1 }, + { 1, 1, 1, 1, 2 }, + }, + { + 5, + { 2, 1, 1, 1, 1 }, + { 1, 1, 1, 1, 2 }, + }, + }; + +static int +compare (const void *a, const void *b) +{ + const int *ai = (const int *) a; + const int *bi = (const int *) b; + + return *ai - *bi; +} + +int +main (int argc ATTRIBUTE_UNUSED, char **argv ATTRIBUTE_UNUSED) +{ + int failures; + size_t i; + int a[MAX]; + + failures = 0; + for (i = 0; i < sizeof tests / sizeof tests[0]; i++) + { + memcpy (a, tests[i].input, tests[i].count * sizeof (int)); + backtrace_qsort (a, tests[i].count, sizeof (int), compare); + if (memcmp (a, tests[i].output, tests[i].count * sizeof (int)) != 0) + { + size_t j; + + fprintf (stderr, "test %d failed:", (int) i); + for (j = 0; j < tests[i].count; j++) + fprintf (stderr, " %d", a[j]); + fprintf (stderr, "\n"); + ++failures; + } + } + + exit (failures > 0 ? EXIT_FAILURE : EXIT_SUCCESS); +} diff --git a/backtrace-sys/src/libbacktrace/testlib.c b/backtrace-sys/src/libbacktrace/testlib.c new file mode 100644 index 000000000..6dbef7c38 --- /dev/null +++ b/backtrace-sys/src/libbacktrace/testlib.c @@ -0,0 +1,234 @@ +/* testlib.c -- test functions for libbacktrace library + Copyright (C) 2012-2018 Free Software Foundation, Inc. + Written by Ian Lance Taylor, Google. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + (1) Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + (2) Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + + (3) The name of the author may not be used to + endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, +INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, +STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING +IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. */ + +#include +#include +#include +#include + +#include "filenames.h" + +#include "backtrace.h" + +#include "testlib.h" + +/* The backtrace state. */ + +void *state; + +/* The number of failures. */ + +int failures; + +/* Return the base name in a path. */ + +const char * +base (const char *p) +{ + const char *last; + const char *s; + + last = NULL; + for (s = p; *s != '\0'; ++s) + { + if (IS_DIR_SEPARATOR (*s)) + last = s + 1; + } + return last != NULL ? last : p; +} + +/* Check an entry in a struct info array. */ + +void +check (const char *name, int index, const struct info *all, int want_lineno, + const char *want_function, const char *want_file, int *failed) +{ + if (*failed) + return; + if (all[index].filename == NULL || all[index].function == NULL) + { + fprintf (stderr, "%s: [%d]: missing file name or function name\n", + name, index); + *failed = 1; + return; + } + if (strcmp (base (all[index].filename), want_file) != 0) + { + fprintf (stderr, "%s: [%d]: got %s expected %s\n", name, index, + all[index].filename, want_file); + *failed = 1; + } + if (all[index].lineno != want_lineno) + { + fprintf (stderr, "%s: [%d]: got %d expected %d\n", name, index, + all[index].lineno, want_lineno); + *failed = 1; + } + if (strcmp (all[index].function, want_function) != 0) + { + fprintf (stderr, "%s: [%d]: got %s expected %s\n", name, index, + all[index].function, want_function); + *failed = 1; + } +} + +/* The backtrace callback function. */ + +int +callback_one (void *vdata, uintptr_t pc ATTRIBUTE_UNUSED, + const char *filename, int lineno, const char *function) +{ + struct bdata *data = (struct bdata *) vdata; + struct info *p; + + if (data->index >= data->max) + { + fprintf (stderr, "callback_one: callback called too many times\n"); + data->failed = 1; + return 1; + } + + p = &data->all[data->index]; + if (filename == NULL) + p->filename = NULL; + else + { + p->filename = strdup (filename); + assert (p->filename != NULL); + } + p->lineno = lineno; + if (function == NULL) + p->function = NULL; + else + { + p->function = strdup (function); + assert (p->function != NULL); + } + ++data->index; + + return 0; +} + +/* An error callback passed to backtrace. */ + +void +error_callback_one (void *vdata, const char *msg, int errnum) +{ + struct bdata *data = (struct bdata *) vdata; + + fprintf (stderr, "%s", msg); + if (errnum > 0) + fprintf (stderr, ": %s", strerror (errnum)); + fprintf (stderr, "\n"); + data->failed = 1; +} + +/* The backtrace_simple callback function. */ + +int +callback_two (void *vdata, uintptr_t pc) +{ + struct sdata *data = (struct sdata *) vdata; + + if (data->index >= data->max) + { + fprintf (stderr, "callback_two: callback called too many times\n"); + data->failed = 1; + return 1; + } + + data->addrs[data->index] = pc; + ++data->index; + + return 0; +} + +/* An error callback passed to backtrace_simple. */ + +void +error_callback_two (void *vdata, const char *msg, int errnum) +{ + struct sdata *data = (struct sdata *) vdata; + + fprintf (stderr, "%s", msg); + if (errnum > 0) + fprintf (stderr, ": %s", strerror (errnum)); + fprintf (stderr, "\n"); + data->failed = 1; +} + +/* The backtrace_syminfo callback function. */ + +void +callback_three (void *vdata, uintptr_t pc ATTRIBUTE_UNUSED, + const char *symname, uintptr_t symval, + uintptr_t symsize) +{ + struct symdata *data = (struct symdata *) vdata; + + if (symname == NULL) + data->name = NULL; + else + { + data->name = strdup (symname); + assert (data->name != NULL); + } + data->val = symval; + data->size = symsize; +} + +/* The backtrace_syminfo error callback function. */ + +void +error_callback_three (void *vdata, const char *msg, int errnum) +{ + struct symdata *data = (struct symdata *) vdata; + + fprintf (stderr, "%s", msg); + if (errnum > 0) + fprintf (stderr, ": %s", strerror (errnum)); + fprintf (stderr, "\n"); + data->failed = 1; +} + +/* The backtrace_create_state error callback function. */ + +void +error_callback_create (void *data ATTRIBUTE_UNUSED, const char *msg, + int errnum) +{ + fprintf (stderr, "%s", msg); + if (errnum > 0) + fprintf (stderr, ": %s", strerror (errnum)); + fprintf (stderr, "\n"); + exit (EXIT_FAILURE); +} diff --git a/backtrace-sys/src/libbacktrace/testlib.h b/backtrace-sys/src/libbacktrace/testlib.h new file mode 100644 index 000000000..5094656f1 --- /dev/null +++ b/backtrace-sys/src/libbacktrace/testlib.h @@ -0,0 +1,110 @@ +/* testlib.h -- Header for test functions for libbacktrace library + Copyright (C) 2012-2018 Free Software Foundation, Inc. + Written by Ian Lance Taylor, Google. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + (1) Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + (2) Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + + (3) The name of the author may not be used to + endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, +INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, +STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING +IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. */ + +#ifndef LIBBACKTRACE_TESTLIB_H +#define LIBBACKTRACE_TESTLIB_H + +/* Portable attribute syntax. Actually some of these tests probably + won't work if the attributes are not recognized. */ + +#ifndef GCC_VERSION +# define GCC_VERSION (__GNUC__ * 1000 + __GNUC_MINOR__) +#endif + +#if (GCC_VERSION < 2007) +# define __attribute__(x) +#endif + +#ifndef ATTRIBUTE_UNUSED +# define ATTRIBUTE_UNUSED __attribute__ ((__unused__)) +#endif + +/* Used to collect backtrace info. */ + +struct info +{ + char *filename; + int lineno; + char *function; +}; + +/* Passed to backtrace callback function. */ + +struct bdata +{ + struct info *all; + size_t index; + size_t max; + int failed; +}; + +/* Passed to backtrace_simple callback function. */ + +struct sdata +{ + uintptr_t *addrs; + size_t index; + size_t max; + int failed; +}; + +/* Passed to backtrace_syminfo callback function. */ + +struct symdata +{ + const char *name; + uintptr_t val, size; + int failed; +}; + +/* The backtrace state. */ + +extern void *state; + +/* The number of failures. */ + +extern int failures; + +extern const char *base (const char *p); +extern void check (const char *name, int index, const struct info *all, + int want_lineno, const char *want_function, + const char *want_file, int *failed); +extern int callback_one (void *, uintptr_t, const char *, int, const char *); +extern void error_callback_one (void *, const char *, int); +extern int callback_two (void *, uintptr_t); +extern void error_callback_two (void *, const char *, int); +extern void callback_three (void *, uintptr_t, const char *, uintptr_t, + uintptr_t); +extern void error_callback_three (void *, const char *, int); +extern void error_callback_create (void *, const char *, int); + +#endif /* !defined(LIBBACKTRACE_TESTLIB_H) */ diff --git a/backtrace-sys/src/libbacktrace/ttest.c b/backtrace-sys/src/libbacktrace/ttest.c new file mode 100644 index 000000000..ca55e9b37 --- /dev/null +++ b/backtrace-sys/src/libbacktrace/ttest.c @@ -0,0 +1,161 @@ +/* ttest.c -- Test for libbacktrace library + Copyright (C) 2017-2018 Free Software Foundation, Inc. + Written by Ian Lance Taylor, Google. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + (1) Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + (2) Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + + (3) The name of the author may not be used to + endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, +INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, +STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING +IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. */ + +/* Test using the libbacktrace library from multiple threads. */ + +#include +#include +#include +#include + +#include + +#include "filenames.h" + +#include "backtrace.h" +#include "backtrace-supported.h" + +#include "testlib.h" + +static int f2 (int) __attribute__ ((noinline)); +static int f3 (int, int) __attribute__ ((noinline)); + +/* Test that a simple backtrace works. This is called via + pthread_create. It returns the number of failures, as void *. */ + +static void * +test1_thread (void *arg ATTRIBUTE_UNUSED) +{ + /* Returning a value here and elsewhere avoids a tailcall which + would mess up the backtrace. */ + return (void *) (uintptr_t) (f2 (__LINE__) - 2); +} + +static int +f2 (int f1line) +{ + return f3 (f1line, __LINE__) + 2; +} + +static int +f3 (int f1line, int f2line) +{ + struct info all[20]; + struct bdata data; + int f3line; + int i; + + data.all = &all[0]; + data.index = 0; + data.max = 20; + data.failed = 0; + + f3line = __LINE__ + 1; + i = backtrace_full (state, 0, callback_one, error_callback_one, &data); + + if (i != 0) + { + fprintf (stderr, "test1: unexpected return value %d\n", i); + data.failed = 1; + } + + if (data.index < 3) + { + fprintf (stderr, + "test1: not enough frames; got %zu, expected at least 3\n", + data.index); + data.failed = 1; + } + + check ("test1", 0, all, f3line, "f3", "ttest.c", &data.failed); + check ("test1", 1, all, f2line, "f2", "ttest.c", &data.failed); + check ("test1", 2, all, f1line, "test1_thread", "ttest.c", &data.failed); + + return data.failed; +} + +/* Run the test with 10 threads simultaneously. */ + +#define THREAD_COUNT 10 + +static void test1 (void) __attribute__ ((unused)); + +static void +test1 (void) +{ + pthread_t atid[THREAD_COUNT]; + int i; + int errnum; + int this_fail; + void *ret; + + for (i = 0; i < THREAD_COUNT; i++) + { + errnum = pthread_create (&atid[i], NULL, test1_thread, NULL); + if (errnum != 0) + { + fprintf (stderr, "pthread_create %d: %s\n", i, strerror (errnum)); + exit (EXIT_FAILURE); + } + } + + this_fail = 0; + for (i = 0; i < THREAD_COUNT; i++) + { + errnum = pthread_join (atid[i], &ret); + if (errnum != 0) + { + fprintf (stderr, "pthread_join %d: %s\n", i, strerror (errnum)); + exit (EXIT_FAILURE); + } + this_fail += (int) (uintptr_t) ret; + } + + printf ("%s: threaded backtrace_full noinline\n", this_fail > 0 ? "FAIL" : "PASS"); + + failures += this_fail; +} + +int +main (int argc ATTRIBUTE_UNUSED, char **argv) +{ + state = backtrace_create_state (argv[0], BACKTRACE_SUPPORTS_THREADS, + error_callback_create, NULL); + +#if BACKTRACE_SUPPORTED +#if BACKTRACE_SUPPORTS_THREADS + test1 (); +#endif +#endif + + exit (failures ? EXIT_FAILURE : EXIT_SUCCESS); +} diff --git a/backtrace-sys/src/libbacktrace/unknown.c b/backtrace-sys/src/libbacktrace/unknown.c new file mode 100644 index 000000000..be521a85c --- /dev/null +++ b/backtrace-sys/src/libbacktrace/unknown.c @@ -0,0 +1,65 @@ +/* unknown.c -- used when backtrace configury does not know file format. + Copyright (C) 2012-2018 Free Software Foundation, Inc. + Written by Ian Lance Taylor, Google. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + (1) Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + (2) Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + + (3) The name of the author may not be used to + endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, +INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, +STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING +IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. */ + +#include "config.h" + +#include + +#include "backtrace.h" +#include "internal.h" + +/* A trivial routine that always fails to find fileline data. */ + +static int +unknown_fileline (struct backtrace_state *state ATTRIBUTE_UNUSED, + uintptr_t pc, backtrace_full_callback callback, + backtrace_error_callback error_callback ATTRIBUTE_UNUSED, + void *data) + +{ + return callback (data, pc, NULL, 0, NULL); +} + +/* Initialize the backtrace data when we don't know how to read the + debug info. */ + +int +backtrace_initialize (struct backtrace_state *state ATTRIBUTE_UNUSED, + const char *filename ATTRIBUTE_UNUSED, + int descriptor ATTRIBUTE_UNUSED, + backtrace_error_callback error_callback ATTRIBUTE_UNUSED, + void *data ATTRIBUTE_UNUSED, fileline *fileline_fn) +{ + state->fileline_data = NULL; + *fileline_fn = unknown_fileline; + return 1; +} diff --git a/backtrace-sys/src/libbacktrace/xcoff.c b/backtrace-sys/src/libbacktrace/xcoff.c new file mode 100644 index 000000000..1ae001dd1 --- /dev/null +++ b/backtrace-sys/src/libbacktrace/xcoff.c @@ -0,0 +1,1642 @@ +/* xcoff.c -- Get debug data from an XCOFF file for backtraces. + Copyright (C) 2012-2018 Free Software Foundation, Inc. + Adapted from elf.c. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + (1) Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + (2) Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + + (3) The name of the author may not be used to + endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, +INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, +STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING +IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. */ + +#include "config.h" + +#include +#include +#include +#include + +#ifdef HAVE_LOADQUERY +#include +#endif + +#include "backtrace.h" +#include "internal.h" + +/* The configure script must tell us whether we are 32-bit or 64-bit + XCOFF. We could make this code test and support either possibility, + but there is no point. This code only works for the currently + running executable, which means that we know the XCOFF mode at + configure time. */ + +#if BACKTRACE_XCOFF_SIZE != 32 && BACKTRACE_XCOFF_SIZE != 64 +#error "Unknown BACKTRACE_XCOFF_SIZE" +#endif + +/* XCOFF file header. */ + +#if BACKTRACE_XCOFF_SIZE == 32 + +typedef struct { + uint16_t f_magic; + uint16_t f_nscns; + uint32_t f_timdat; + uint32_t f_symptr; + uint32_t f_nsyms; + uint16_t f_opthdr; + uint16_t f_flags; +} b_xcoff_filhdr; + +#define XCOFF_MAGIC 0737 + +#else /* BACKTRACE_XCOFF_SIZE != 32 */ + +typedef struct { + uint16_t f_magic; + uint16_t f_nscns; + uint32_t f_timdat; + uint64_t f_symptr; + uint16_t f_opthdr; + uint16_t f_flags; + uint32_t f_nsyms; +} b_xcoff_filhdr; + +#define XCOFF_MAGIC 0767 + +#endif /* BACKTRACE_XCOFF_SIZE != 32 */ + +#define F_SHROBJ 0x2000 /* File is a shared object. */ + +/* XCOFF section header. */ + +#if BACKTRACE_XCOFF_SIZE == 32 + +typedef struct { + char s_name[8]; + uint32_t s_paddr; + uint32_t s_vaddr; + uint32_t s_size; + uint32_t s_scnptr; + uint32_t s_relptr; + uint32_t s_lnnoptr; + uint16_t s_nreloc; + uint16_t s_nlnno; + uint32_t s_flags; +} b_xcoff_scnhdr; + +#define _OVERFLOW_MARKER 65535 + +#else /* BACKTRACE_XCOFF_SIZE != 32 */ + +typedef struct { + char name[8]; + uint64_t s_paddr; + uint64_t s_vaddr; + uint64_t s_size; + uint64_t s_scnptr; + uint64_t s_relptr; + uint64_t s_lnnoptr; + uint32_t s_nreloc; + uint32_t s_nlnno; + uint32_t s_flags; +} b_xcoff_scnhdr; + +#endif /* BACKTRACE_XCOFF_SIZE != 32 */ + +#define STYP_DWARF 0x10 /* DWARF debugging section. */ +#define STYP_TEXT 0x20 /* Executable text (code) section. */ +#define STYP_OVRFLO 0x8000 /* Line-number field overflow section. */ + +#define SSUBTYP_DWINFO 0x10000 /* DWARF info section. */ +#define SSUBTYP_DWLINE 0x20000 /* DWARF line-number section. */ +#define SSUBTYP_DWARNGE 0x50000 /* DWARF aranges section. */ +#define SSUBTYP_DWABREV 0x60000 /* DWARF abbreviation section. */ +#define SSUBTYP_DWSTR 0x70000 /* DWARF strings section. */ + +/* XCOFF symbol. */ + +#define SYMNMLEN 8 + +#if BACKTRACE_XCOFF_SIZE == 32 + +typedef struct { + union { + char _name[SYMNMLEN]; + struct { + uint32_t _zeroes; + uint32_t _offset; + } _s; + } _u; +#define n_name _u._name +#define n_zeroes _u._s._zeroes +#define n_offset_ _u._s._offset + + uint32_t n_value; + int16_t n_scnum; + uint16_t n_type; + uint8_t n_sclass; + uint8_t n_numaux; +} __attribute__ ((packed)) b_xcoff_syment; + +#else /* BACKTRACE_XCOFF_SIZE != 32 */ + +typedef struct { + uint64_t n_value; + uint32_t n_offset_; + int16_t n_scnum; + uint16_t n_type; + uint8_t n_sclass; + uint8_t n_numaux; +} __attribute__ ((packed)) b_xcoff_syment; + +#endif /* BACKTRACE_XCOFF_SIZE != 32 */ + +#define SYMESZ 18 + +#define C_EXT 2 /* External symbol. */ +#define C_FCN 101 /* Beginning or end of function. */ +#define C_FILE 103 /* Source file name. */ +#define C_HIDEXT 107 /* Unnamed external symbol. */ +#define C_BINCL 108 /* Beginning of include file. */ +#define C_EINCL 109 /* End of include file. */ +#define C_WEAKEXT 111 /* Weak external symbol. */ + +#define ISFCN(x) ((x) & 0x0020) + +/* XCOFF AUX entry. */ + +#define AUXESZ 18 +#define FILNMLEN 14 + +typedef union { +#if BACKTRACE_XCOFF_SIZE == 32 + struct { + uint16_t pad; + uint16_t x_lnnohi; + uint16_t x_lnno; + } x_block; +#else + struct { + uint32_t x_lnno; + } x_block; +#endif + union { + char x_fname[FILNMLEN]; + struct { + uint32_t x_zeroes; + uint32_t x_offset; + char pad[FILNMLEN-8]; + uint8_t x_ftype; + } _x; + } x_file; +#if BACKTRACE_XCOFF_SIZE == 32 + struct { + uint32_t x_exptr; + uint32_t x_fsize; + uint32_t x_lnnoptr; + uint32_t x_endndx; + } x_fcn; +#else + struct { + uint64_t x_lnnoptr; + uint32_t x_fsize; + uint32_t x_endndx; + } x_fcn; +#endif + struct { + uint8_t pad[AUXESZ-1]; + uint8_t x_auxtype; + } x_auxtype; +} __attribute__ ((packed)) b_xcoff_auxent; + +/* XCOFF line number entry. */ + +#if BACKTRACE_XCOFF_SIZE == 32 + +typedef struct { + union { + uint32_t l_symndx; + uint32_t l_paddr; + } l_addr; + uint16_t l_lnno; +} b_xcoff_lineno; + +#define LINESZ 6 + +#else /* BACKTRACE_XCOFF_SIZE != 32 */ + +typedef struct { + union { + uint32_t l_symndx; + uint64_t l_paddr; + } l_addr; + uint32_t l_lnno; +} b_xcoff_lineno; + +#define LINESZ 12 + +#endif /* BACKTRACE_XCOFF_SIZE != 32 */ + +#if BACKTRACE_XCOFF_SIZE == 32 +#define XCOFF_AIX_TEXTBASE 0x10000000u +#else +#define XCOFF_AIX_TEXTBASE 0x100000000ul +#endif + +/* AIX big archive fixed-length header. */ + +#define AIAMAGBIG "\n" + +typedef struct { + char fl_magic[8]; /* Archive magic string. */ + char fl_memoff[20]; /* Offset to member table. */ + char fl_gstoff[20]; /* Offset to global symbol table. */ + char fl_gst64off[20]; /* Offset to global symbol table for 64-bit objects. */ + char fl_fstmoff[20]; /* Offset to first archive member. */ + char fl_freeoff[20]; /* Offset to first member on free list. */ +} b_ar_fl_hdr; + +/* AIX big archive file member header. */ + +typedef struct { + char ar_size[20]; /* File member size - decimal. */ + char ar_nxtmem[20]; /* Next member offset - decimal. */ + char ar_prvmem[20]; /* Previous member offset - decimal. */ + char ar_date[12]; /* File member date - decimal. */ + char ar_uid[12]; /* File member userid - decimal. */ + char ar_gid[12]; /* File member group id - decimal. */ + char ar_mode[12]; /* File member mode - octal. */ + char ar_namlen[4]; /* File member name length - decimal. */ + char ar_name[2]; /* Start of member name. */ +} b_ar_hdr; + + +/* Information we keep for an XCOFF symbol. */ + +struct xcoff_symbol +{ + /* The name of the symbol. */ + const char *name; + /* The address of the symbol. */ + uintptr_t address; + /* The size of the symbol. */ + size_t size; +}; + +/* Information to pass to xcoff_syminfo. */ + +struct xcoff_syminfo_data +{ + /* Symbols for the next module. */ + struct xcoff_syminfo_data *next; + /* The XCOFF symbols, sorted by address. */ + struct xcoff_symbol *symbols; + /* The number of symbols. */ + size_t count; +}; + +/* Information about an include file. */ + +struct xcoff_incl +{ + /* File name. */ + const char *filename; + /* Offset to first line number from the include file. */ + uintptr_t begin; + /* Offset to last line number from the include file. */ + uintptr_t end; +}; + +/* A growable vector of include files information. */ + +struct xcoff_incl_vector +{ + /* Memory. This is an array of struct xcoff_incl. */ + struct backtrace_vector vec; + /* Number of include files. */ + size_t count; +}; + +/* Map a single PC value to a file/function/line. */ + +struct xcoff_line +{ + /* PC. */ + uintptr_t pc; + /* File name. Many entries in the array are expected to point to + the same file name. */ + const char *filename; + /* Function name. */ + const char *function; + /* Line number. */ + int lineno; +}; + +/* A growable vector of line number information. This is used while + reading the line numbers. */ + +struct xcoff_line_vector +{ + /* Memory. This is an array of struct xcoff_line. */ + struct backtrace_vector vec; + /* Number of valid mappings. */ + size_t count; +}; + +/* The information we need to map a PC to a file and line. */ + +struct xcoff_fileline_data +{ + /* The data for the next file we know about. */ + struct xcoff_fileline_data *next; + /* Line number information. */ + struct xcoff_line_vector vec; +}; + +/* An index of DWARF sections we care about. */ + +enum dwarf_section +{ + DWSECT_INFO, + DWSECT_LINE, + DWSECT_ABBREV, + DWSECT_RANGES, + DWSECT_STR, + DWSECT_MAX +}; + +/* Information we gather for the DWARF sections we care about. */ + +struct dwsect_info +{ + /* Section file offset. */ + off_t offset; + /* Section size. */ + size_t size; + /* Section contents, after read from file. */ + const unsigned char *data; +}; + +/* A dummy callback function used when we can't find any debug info. */ + +static int +xcoff_nodebug (struct backtrace_state *state ATTRIBUTE_UNUSED, + uintptr_t pc ATTRIBUTE_UNUSED, + backtrace_full_callback callback ATTRIBUTE_UNUSED, + backtrace_error_callback error_callback, void *data) +{ + error_callback (data, "no debug info in XCOFF executable", -1); + return 0; +} + +/* A dummy callback function used when we can't find a symbol + table. */ + +static void +xcoff_nosyms (struct backtrace_state *state ATTRIBUTE_UNUSED, + uintptr_t addr ATTRIBUTE_UNUSED, + backtrace_syminfo_callback callback ATTRIBUTE_UNUSED, + backtrace_error_callback error_callback, void *data) +{ + error_callback (data, "no symbol table in XCOFF executable", -1); +} + +/* Compare struct xcoff_symbol for qsort. */ + +static int +xcoff_symbol_compare (const void *v1, const void *v2) +{ + const struct xcoff_symbol *e1 = (const struct xcoff_symbol *) v1; + const struct xcoff_symbol *e2 = (const struct xcoff_symbol *) v2; + + if (e1->address < e2->address) + return -1; + else if (e1->address > e2->address) + return 1; + else + return 0; +} + +/* Compare an ADDR against an xcoff_symbol for bsearch. */ + +static int +xcoff_symbol_search (const void *vkey, const void *ventry) +{ + const uintptr_t *key = (const uintptr_t *) vkey; + const struct xcoff_symbol *entry = (const struct xcoff_symbol *) ventry; + uintptr_t addr; + + addr = *key; + if (addr < entry->address) + return -1; + else if ((entry->size == 0 && addr > entry->address) + || (entry->size > 0 && addr >= entry->address + entry->size)) + return 1; + else + return 0; +} + +/* Add XDATA to the list in STATE. */ + +static void +xcoff_add_syminfo_data (struct backtrace_state *state, + struct xcoff_syminfo_data *xdata) +{ + if (!state->threaded) + { + struct xcoff_syminfo_data **pp; + + for (pp = (struct xcoff_syminfo_data **) (void *) &state->syminfo_data; + *pp != NULL; + pp = &(*pp)->next) + ; + *pp = xdata; + } + else + { + while (1) + { + struct xcoff_syminfo_data **pp; + + pp = (struct xcoff_syminfo_data **) (void *) &state->syminfo_data; + + while (1) + { + struct xcoff_syminfo_data *p; + + p = backtrace_atomic_load_pointer (pp); + + if (p == NULL) + break; + + pp = &p->next; + } + + if (__sync_bool_compare_and_swap (pp, NULL, xdata)) + break; + } + } +} + +/* Return the symbol name and value for an ADDR. */ + +static void +xcoff_syminfo (struct backtrace_state *state ATTRIBUTE_UNUSED, uintptr_t addr, + backtrace_syminfo_callback callback, + backtrace_error_callback error_callback ATTRIBUTE_UNUSED, + void *data) +{ + struct xcoff_syminfo_data *edata; + struct xcoff_symbol *sym = NULL; + + if (!state->threaded) + { + for (edata = (struct xcoff_syminfo_data *) state->syminfo_data; + edata != NULL; + edata = edata->next) + { + sym = ((struct xcoff_symbol *) + bsearch (&addr, edata->symbols, edata->count, + sizeof (struct xcoff_symbol), xcoff_symbol_search)); + if (sym != NULL) + break; + } + } + else + { + struct xcoff_syminfo_data **pp; + + pp = (struct xcoff_syminfo_data **) (void *) &state->syminfo_data; + while (1) + { + edata = backtrace_atomic_load_pointer (pp); + if (edata == NULL) + break; + + sym = ((struct xcoff_symbol *) + bsearch (&addr, edata->symbols, edata->count, + sizeof (struct xcoff_symbol), xcoff_symbol_search)); + if (sym != NULL) + break; + + pp = &edata->next; + } + } + + if (sym == NULL) + callback (data, addr, NULL, 0, 0); + else + callback (data, addr, sym->name, sym->address, sym->size); +} + +/* Return the name of an XCOFF symbol. */ + +static const char * +xcoff_symname (const b_xcoff_syment *asym, + const unsigned char *strtab, size_t strtab_size) +{ +#if BACKTRACE_XCOFF_SIZE == 32 + if (asym->n_zeroes != 0) + { + /* Make a copy as we will release the symtab view. */ + char name[SYMNMLEN+1]; + strncpy (name, asym->n_name, SYMNMLEN); + name[SYMNMLEN] = '\0'; + return strdup (name); + } +#endif + if (asym->n_sclass & 0x80) + return NULL; /* .debug */ + if (asym->n_offset_ >= strtab_size) + return NULL; + return (const char *) strtab + asym->n_offset_; +} + +/* Initialize the symbol table info for xcoff_syminfo. */ + +static int +xcoff_initialize_syminfo (struct backtrace_state *state, + uintptr_t base_address, + const b_xcoff_scnhdr *sects, + const b_xcoff_syment *syms, size_t nsyms, + const unsigned char *strtab, size_t strtab_size, + backtrace_error_callback error_callback, void *data, + struct xcoff_syminfo_data *sdata) +{ + size_t xcoff_symbol_count; + size_t xcoff_symbol_size; + struct xcoff_symbol *xcoff_symbols; + size_t i; + unsigned int j; + + /* We only care about function symbols. Count them. */ + xcoff_symbol_count = 0; + for (i = 0; i < nsyms; ++i) + { + const b_xcoff_syment *asym = &syms[i]; + if ((asym->n_sclass == C_EXT || asym->n_sclass == C_HIDEXT + || asym->n_sclass == C_WEAKEXT) + && ISFCN (asym->n_type) && asym->n_numaux > 0 && asym->n_scnum > 0) + ++xcoff_symbol_count; + + i += asym->n_numaux; + } + + xcoff_symbol_size = xcoff_symbol_count * sizeof (struct xcoff_symbol); + xcoff_symbols = ((struct xcoff_symbol *) + backtrace_alloc (state, xcoff_symbol_size, error_callback, + data)); + if (xcoff_symbols == NULL) + return 0; + + j = 0; + for (i = 0; i < nsyms; ++i) + { + const b_xcoff_syment *asym = &syms[i]; + if ((asym->n_sclass == C_EXT || asym->n_sclass == C_HIDEXT + || asym->n_sclass == C_WEAKEXT) + && ISFCN (asym->n_type) && asym->n_numaux > 0 && asym->n_scnum > 0) + { + const b_xcoff_auxent *aux = (const b_xcoff_auxent *) (asym + 1); + xcoff_symbols[j].name = xcoff_symname (asym, strtab, strtab_size); + xcoff_symbols[j].address = base_address + asym->n_value + - sects[asym->n_scnum - 1].s_paddr; + /* x_fsize will be 0 if there is no debug information. */ + xcoff_symbols[j].size = aux->x_fcn.x_fsize; + ++j; + } + + i += asym->n_numaux; + } + + backtrace_qsort (xcoff_symbols, xcoff_symbol_count, + sizeof (struct xcoff_symbol), xcoff_symbol_compare); + + sdata->next = NULL; + sdata->symbols = xcoff_symbols; + sdata->count = xcoff_symbol_count; + + return 1; +} + +/* Compare struct xcoff_line for qsort. */ + +static int +xcoff_line_compare (const void *v1, const void *v2) +{ + const struct xcoff_line *ln1 = (const struct xcoff_line *) v1; + const struct xcoff_line *ln2 = (const struct xcoff_line *) v2; + + if (ln1->pc < ln2->pc) + return -1; + else if (ln1->pc > ln2->pc) + return 1; + else + return 0; +} + +/* Find a PC in a line vector. We always allocate an extra entry at + the end of the lines vector, so that this routine can safely look + at the next entry. */ + +static int +xcoff_line_search (const void *vkey, const void *ventry) +{ + const uintptr_t *key = (const uintptr_t *) vkey; + const struct xcoff_line *entry = (const struct xcoff_line *) ventry; + uintptr_t pc; + + pc = *key; + if (pc < entry->pc) + return -1; + else if ((entry + 1)->pc == (uintptr_t) -1 || pc >= (entry + 1)->pc) + return 1; + else + return 0; +} + +/* Look for a PC in the line vector for one module. On success, + call CALLBACK and return whatever it returns. On error, call + ERROR_CALLBACK and return 0. Sets *FOUND to 1 if the PC is found, + 0 if not. */ + +static int +xcoff_lookup_pc (struct backtrace_state *state ATTRIBUTE_UNUSED, + struct xcoff_fileline_data *fdata, uintptr_t pc, + backtrace_full_callback callback, + backtrace_error_callback error_callback ATTRIBUTE_UNUSED, + void *data, int *found) +{ + const struct xcoff_line *ln; + const char *function; + + *found = 1; + + ln = (struct xcoff_line *) bsearch (&pc, fdata->vec.vec.base, + fdata->vec.count, + sizeof (struct xcoff_line), + xcoff_line_search); + if (ln == NULL) + { + *found = 0; + return 0; + } + + function = ln->function; + /* AIX prepends a '.' to function entry points, remove it. */ + if (*function == '.') + ++function; + return callback (data, pc, ln->filename, ln->lineno, function); +} + +/* Return the file/line information for a PC using the XCOFF lineno + mapping we built earlier. */ + +static int +xcoff_fileline (struct backtrace_state *state, uintptr_t pc, + backtrace_full_callback callback, + backtrace_error_callback error_callback, void *data) + +{ + struct xcoff_fileline_data *fdata; + int found; + int ret; + + if (!state->threaded) + { + for (fdata = (struct xcoff_fileline_data *) state->fileline_data; + fdata != NULL; + fdata = fdata->next) + { + ret = xcoff_lookup_pc (state, fdata, pc, callback, error_callback, + data, &found); + if (ret != 0 || found) + return ret; + } + } + else + { + struct xcoff_fileline_data **pp; + + pp = (struct xcoff_fileline_data **) (void *) &state->fileline_data; + while (1) + { + fdata = backtrace_atomic_load_pointer (pp); + if (fdata == NULL) + break; + + ret = xcoff_lookup_pc (state, fdata, pc, callback, error_callback, + data, &found); + if (ret != 0 || found) + return ret; + + pp = &fdata->next; + } + } + + /* FIXME: See if any libraries have been dlopen'ed. */ + + return callback (data, pc, NULL, 0, NULL); +} + +/* Compare struct xcoff_incl for qsort. */ + +static int +xcoff_incl_compare (const void *v1, const void *v2) +{ + const struct xcoff_incl *in1 = (const struct xcoff_incl *) v1; + const struct xcoff_incl *in2 = (const struct xcoff_incl *) v2; + + if (in1->begin < in2->begin) + return -1; + else if (in1->begin > in2->begin) + return 1; + else + return 0; +} + +/* Find a lnnoptr in an include file. */ + +static int +xcoff_incl_search (const void *vkey, const void *ventry) +{ + const uintptr_t *key = (const uintptr_t *) vkey; + const struct xcoff_incl *entry = (const struct xcoff_incl *) ventry; + uintptr_t lnno; + + lnno = *key; + if (lnno < entry->begin) + return -1; + else if (lnno > entry->end) + return 1; + else + return 0; +} + +/* Add a new mapping to the vector of line mappings that we are + building. Returns 1 on success, 0 on failure. */ + +static int +xcoff_add_line (struct backtrace_state *state, uintptr_t pc, + const char *filename, const char *function, uint32_t lnno, + backtrace_error_callback error_callback, void *data, + struct xcoff_line_vector *vec) +{ + struct xcoff_line *ln; + + ln = ((struct xcoff_line *) + backtrace_vector_grow (state, sizeof (struct xcoff_line), + error_callback, data, &vec->vec)); + if (ln == NULL) + return 0; + + ln->pc = pc; + ln->filename = filename; + ln->function = function; + ln->lineno = lnno; + + ++vec->count; + + return 1; +} + +/* Add the line number entries for a function to the line vector. */ + +static int +xcoff_process_linenos (struct backtrace_state *state, uintptr_t base_address, + const b_xcoff_syment *fsym, const char *filename, + const b_xcoff_scnhdr *sects, + const unsigned char *strtab, size_t strtab_size, + uint32_t fcn_lnno, struct xcoff_incl_vector *vec, + struct xcoff_line_vector *lvec, + const unsigned char *linenos, size_t linenos_size, + uintptr_t lnnoptr0, + backtrace_error_callback error_callback, void *data) +{ + const b_xcoff_auxent *aux; + const b_xcoff_lineno *lineno; + const unsigned char *lineptr; + const char *function; + struct xcoff_incl *incl = NULL; + uintptr_t lnnoptr; + uintptr_t pc; + uint32_t lnno; + int begincl; + + aux = (const b_xcoff_auxent *) (fsym + 1); + lnnoptr = aux->x_fcn.x_lnnoptr; + + if (lnnoptr < lnnoptr0 || lnnoptr + LINESZ > lnnoptr0 + linenos_size) + return 0; + + function = xcoff_symname (fsym, strtab, strtab_size); + if (function == NULL) + return 0; + + /* Skip first entry that points to symtab. */ + + lnnoptr += LINESZ; + + lineptr = linenos + (lnnoptr - lnnoptr0); + + begincl = -1; + while (lineptr + LINESZ <= linenos + linenos_size) + { + lineno = (const b_xcoff_lineno *) lineptr; + + lnno = lineno->l_lnno; + if (lnno == 0) + break; + + /* If part of a function other than the beginning comes from an + include file, the line numbers are absolute, rather than + relative to the beginning of the function. */ + incl = (struct xcoff_incl *) bsearch (&lnnoptr, vec->vec.base, + vec->count, + sizeof (struct xcoff_incl), + xcoff_incl_search); + if (begincl == -1) + begincl = incl != NULL; + if (incl != NULL) + { + filename = incl->filename; + if (begincl == 1) + lnno += fcn_lnno - 1; + } + else + lnno += fcn_lnno - 1; + + pc = base_address + lineno->l_addr.l_paddr + - sects[fsym->n_scnum - 1].s_paddr; + xcoff_add_line (state, pc, filename, function, lnno, error_callback, + data, lvec); + + lnnoptr += LINESZ; + lineptr += LINESZ; + } + + return 1; +} + +/* Initialize the line vector info for xcoff_fileline. */ + +static int +xcoff_initialize_fileline (struct backtrace_state *state, + uintptr_t base_address, + const b_xcoff_scnhdr *sects, + const b_xcoff_syment *syms, size_t nsyms, + const unsigned char *strtab, size_t strtab_size, + const unsigned char *linenos, size_t linenos_size, + uint64_t lnnoptr0, + backtrace_error_callback error_callback, void *data) +{ + struct xcoff_fileline_data *fdata; + struct xcoff_incl_vector vec; + struct xcoff_line *ln; + const b_xcoff_syment *fsym; + const b_xcoff_auxent *aux; + const char *filename; + const char *name; + struct xcoff_incl *incl; + uintptr_t begin, end; + uintptr_t lnno; + size_t i; + + fdata = ((struct xcoff_fileline_data *) + backtrace_alloc (state, sizeof (struct xcoff_fileline_data), + error_callback, data)); + if (fdata == NULL) + return 0; + + memset (fdata, 0, sizeof *fdata); + memset (&vec, 0, sizeof vec); + + /* Process include files first. */ + + begin = 0; + for (i = 0; i < nsyms; ++i) + { + const b_xcoff_syment *asym = &syms[i]; + + switch (asym->n_sclass) + { + case C_BINCL: + begin = asym->n_value; + break; + + case C_EINCL: + if (begin == 0) + break; + end = asym->n_value; + incl = ((struct xcoff_incl *) + backtrace_vector_grow (state, sizeof (struct xcoff_incl), + error_callback, data, &vec.vec)); + if (incl != NULL) + { + incl->filename = xcoff_symname (asym, strtab, strtab_size); + incl->begin = begin; + incl->end = end; + ++vec.count; + } + begin = 0; + break; + } + + i += asym->n_numaux; + } + + backtrace_qsort (vec.vec.base, vec.count, + sizeof (struct xcoff_incl), xcoff_incl_compare); + + filename = NULL; + fsym = NULL; + for (i = 0; i < nsyms; ++i) + { + const b_xcoff_syment *asym = &syms[i]; + + switch (asym->n_sclass) + { + case C_FILE: + filename = xcoff_symname (asym, strtab, strtab_size); + if (filename == NULL) + break; + + /* If the file auxiliary entry is not used, the symbol name is + the name of the source file. If the file auxiliary entry is + used, then the symbol name should be .file, and the first + file auxiliary entry (by convention) contains the source + file name. */ + + if (asym->n_numaux > 0 && !strcmp (filename, ".file")) + { + aux = (const b_xcoff_auxent *) (asym + 1); + if (aux->x_file._x.x_zeroes != 0) + { + /* Make a copy as we will release the symtab view. */ + char name[FILNMLEN+1]; + strncpy (name, aux->x_file.x_fname, FILNMLEN); + name[FILNMLEN] = '\0'; + filename = strdup (name); + } + else if (aux->x_file._x.x_offset < strtab_size) + filename = (const char *) strtab + aux->x_file._x.x_offset; + else + filename = NULL; + } + break; + + case C_EXT: + case C_HIDEXT: + case C_WEAKEXT: + fsym = NULL; + if (!ISFCN (asym->n_type) || asym->n_numaux == 0) + break; + if (filename == NULL) + break; + fsym = asym; + break; + + case C_FCN: + if (asym->n_numaux == 0) + break; + if (fsym == NULL) + break; + name = xcoff_symname (asym, strtab, strtab_size); + if (name == NULL) + break; + aux = (const b_xcoff_auxent *) (asym + 1); +#if BACKTRACE_XCOFF_SIZE == 32 + lnno = (uint32_t) aux->x_block.x_lnnohi << 16 + | aux->x_block.x_lnno; +#else + lnno = aux->x_block.x_lnno; +#endif + if (!strcmp (name, ".bf")) + { + xcoff_process_linenos (state, base_address, fsym, filename, + sects, strtab, strtab_size, lnno, &vec, + &fdata->vec, linenos, linenos_size, + lnnoptr0, error_callback, data); + } + else if (!strcmp (name, ".ef")) + { + fsym = NULL; + } + break; + } + + i += asym->n_numaux; + } + + /* Allocate one extra entry at the end. */ + ln = ((struct xcoff_line *) + backtrace_vector_grow (state, sizeof (struct xcoff_line), + error_callback, data, &fdata->vec.vec)); + if (ln == NULL) + goto fail; + ln->pc = (uintptr_t) -1; + ln->filename = NULL; + ln->function = NULL; + ln->lineno = 0; + + if (!backtrace_vector_release (state, &fdata->vec.vec, error_callback, data)) + goto fail; + + backtrace_qsort (fdata->vec.vec.base, fdata->vec.count, + sizeof (struct xcoff_line), xcoff_line_compare); + + if (!state->threaded) + { + struct xcoff_fileline_data **pp; + + for (pp = (struct xcoff_fileline_data **) (void *) &state->fileline_data; + *pp != NULL; + pp = &(*pp)->next) + ; + *pp = fdata; + } + else + { + while (1) + { + struct xcoff_fileline_data **pp; + + pp = (struct xcoff_fileline_data **) (void *) &state->fileline_data; + + while (1) + { + struct xcoff_fileline_data *p; + + p = backtrace_atomic_load_pointer (pp); + + if (p == NULL) + break; + + pp = &p->next; + } + + if (__sync_bool_compare_and_swap (pp, NULL, fdata)) + break; + } + } + + return 1; + +fail: + return 0; +} + +/* Add the backtrace data for one XCOFF file. Returns 1 on success, + 0 on failure (in both cases descriptor is closed). */ + +static int +xcoff_add (struct backtrace_state *state, int descriptor, off_t offset, + uintptr_t base_address, backtrace_error_callback error_callback, + void *data, fileline *fileline_fn, int *found_sym, int exe) +{ + struct backtrace_view fhdr_view; + struct backtrace_view sects_view; + struct backtrace_view linenos_view; + struct backtrace_view syms_view; + struct backtrace_view str_view; + struct backtrace_view dwarf_view; + b_xcoff_filhdr fhdr; + const b_xcoff_scnhdr *sects; + const b_xcoff_scnhdr *stext; + uint64_t lnnoptr; + uint32_t nlnno; + off_t str_off; + off_t min_offset; + off_t max_offset; + struct dwsect_info dwsect[DWSECT_MAX]; + size_t sects_size; + size_t syms_size; + int32_t str_size; + int sects_view_valid; + int linenos_view_valid; + int syms_view_valid; + int str_view_valid; + int dwarf_view_valid; + int magic_ok; + int i; + + *found_sym = 0; + + sects_view_valid = 0; + linenos_view_valid = 0; + syms_view_valid = 0; + str_view_valid = 0; + dwarf_view_valid = 0; + + str_size = 0; + + /* Map the XCOFF file header. */ + if (!backtrace_get_view (state, descriptor, offset, sizeof (b_xcoff_filhdr), + error_callback, data, &fhdr_view)) + goto fail; + + memcpy (&fhdr, fhdr_view.data, sizeof fhdr); + magic_ok = (fhdr.f_magic == XCOFF_MAGIC); + + backtrace_release_view (state, &fhdr_view, error_callback, data); + + if (!magic_ok) + { + if (exe) + error_callback (data, "executable file is not XCOFF", 0); + goto fail; + } + + /* Verify object is of expected type. */ + if ((exe && (fhdr.f_flags & F_SHROBJ)) + || (!exe && !(fhdr.f_flags & F_SHROBJ))) + goto fail; + + /* Read the section headers. */ + + sects_size = fhdr.f_nscns * sizeof (b_xcoff_scnhdr); + + if (!backtrace_get_view (state, descriptor, + offset + sizeof (fhdr) + fhdr.f_opthdr, + sects_size, error_callback, data, §s_view)) + goto fail; + sects_view_valid = 1; + sects = (const b_xcoff_scnhdr *) sects_view.data; + + /* FIXME: assumes only one .text section. */ + for (i = 0; i < fhdr.f_nscns; ++i) + if ((sects[i].s_flags & 0xffff) == STYP_TEXT) + break; + if (i == fhdr.f_nscns) + goto fail; + + stext = §s[i]; + + /* AIX ldinfo_textorg includes the XCOFF headers. */ + base_address = (exe ? XCOFF_AIX_TEXTBASE : base_address) + stext->s_scnptr; + + lnnoptr = stext->s_lnnoptr; + nlnno = stext->s_nlnno; + +#if BACKTRACE_XCOFF_SIZE == 32 + if (nlnno == _OVERFLOW_MARKER) + { + int sntext = i + 1; + /* Find the matching .ovrflo section. */ + for (i = 0; i < fhdr.f_nscns; ++i) + { + if (((sects[i].s_flags & 0xffff) == STYP_OVRFLO) + && sects[i].s_nlnno == sntext) + { + nlnno = sects[i].s_vaddr; + break; + } + } + } +#endif + + /* Read the symbol table and the string table. */ + + if (fhdr.f_symptr != 0) + { + struct xcoff_syminfo_data *sdata; + + /* Symbol table is followed by the string table. The string table + starts with its length (on 4 bytes). + Map the symbol table and the length of the string table. */ + syms_size = fhdr.f_nsyms * sizeof (b_xcoff_syment); + + if (!backtrace_get_view (state, descriptor, offset + fhdr.f_symptr, + syms_size + 4, error_callback, data, + &syms_view)) + goto fail; + syms_view_valid = 1; + + memcpy (&str_size, syms_view.data + syms_size, 4); + + str_off = fhdr.f_symptr + syms_size; + + if (str_size > 4) + { + /* Map string table (including the length word). */ + + if (!backtrace_get_view (state, descriptor, offset + str_off, + str_size, error_callback, data, &str_view)) + goto fail; + str_view_valid = 1; + } + + sdata = ((struct xcoff_syminfo_data *) + backtrace_alloc (state, sizeof *sdata, error_callback, data)); + if (sdata == NULL) + goto fail; + + if (!xcoff_initialize_syminfo (state, base_address, sects, + syms_view.data, fhdr.f_nsyms, + str_view.data, str_size, + error_callback, data, sdata)) + { + backtrace_free (state, sdata, sizeof *sdata, error_callback, data); + goto fail; + } + + *found_sym = 1; + + xcoff_add_syminfo_data (state, sdata); + } + + /* Read all the DWARF sections in a single view, since they are + probably adjacent in the file. We never release this view. */ + + min_offset = 0; + max_offset = 0; + memset (dwsect, 0, sizeof dwsect); + for (i = 0; i < fhdr.f_nscns; ++i) + { + off_t end; + int idx; + + if ((sects[i].s_flags & 0xffff) != STYP_DWARF + || sects[i].s_size == 0) + continue; + /* Map DWARF section to array index. */ + switch (sects[i].s_flags & 0xffff0000) + { + case SSUBTYP_DWINFO: + idx = DWSECT_INFO; + break; + case SSUBTYP_DWLINE: + idx = DWSECT_LINE; + break; + case SSUBTYP_DWABREV: + idx = DWSECT_ABBREV; + break; + case SSUBTYP_DWARNGE: + idx = DWSECT_RANGES; + break; + case SSUBTYP_DWSTR: + idx = DWSECT_STR; + break; + default: + continue; + } + if (min_offset == 0 || (off_t) sects[i].s_scnptr < min_offset) + min_offset = sects[i].s_scnptr; + end = sects[i].s_scnptr + sects[i].s_size; + if (end > max_offset) + max_offset = end; + dwsect[idx].offset = sects[i].s_scnptr; + dwsect[idx].size = sects[i].s_size; + } + if (min_offset != 0 && max_offset != 0) + { + if (!backtrace_get_view (state, descriptor, offset + min_offset, + max_offset - min_offset, + error_callback, data, &dwarf_view)) + goto fail; + dwarf_view_valid = 1; + + for (i = 0; i < (int) DWSECT_MAX; ++i) + { + if (dwsect[i].offset == 0) + dwsect[i].data = NULL; + else + dwsect[i].data = ((const unsigned char *) dwarf_view.data + + (dwsect[i].offset - min_offset)); + } + + if (!backtrace_dwarf_add (state, 0, + dwsect[DWSECT_INFO].data, + dwsect[DWSECT_INFO].size, +#if BACKTRACE_XCOFF_SIZE == 32 + /* XXX workaround for broken lineoff */ + dwsect[DWSECT_LINE].data - 4, +#else + /* XXX workaround for broken lineoff */ + dwsect[DWSECT_LINE].data - 12, +#endif + dwsect[DWSECT_LINE].size, + dwsect[DWSECT_ABBREV].data, + dwsect[DWSECT_ABBREV].size, + dwsect[DWSECT_RANGES].data, + dwsect[DWSECT_RANGES].size, + dwsect[DWSECT_STR].data, + dwsect[DWSECT_STR].size, + 1, /* big endian */ + error_callback, data, fileline_fn)) + goto fail; + } + + /* Read the XCOFF line number entries if DWARF sections not found. */ + + if (!dwarf_view_valid && fhdr.f_symptr != 0 && lnnoptr != 0) + { + size_t linenos_size = (size_t) nlnno * LINESZ; + + if (!backtrace_get_view (state, descriptor, offset + lnnoptr, + linenos_size, + error_callback, data, &linenos_view)) + goto fail; + linenos_view_valid = 1; + + if (xcoff_initialize_fileline (state, base_address, sects, + syms_view.data, fhdr.f_nsyms, + str_view.data, str_size, + linenos_view.data, linenos_size, + lnnoptr, error_callback, data)) + *fileline_fn = xcoff_fileline; + + backtrace_release_view (state, &linenos_view, error_callback, data); + linenos_view_valid = 0; + } + + backtrace_release_view (state, §s_view, error_callback, data); + sects_view_valid = 0; + if (syms_view_valid) + backtrace_release_view (state, &syms_view, error_callback, data); + syms_view_valid = 0; + + /* We've read all we need from the executable. */ + if (!backtrace_close (descriptor, error_callback, data)) + goto fail; + descriptor = -1; + + return 1; + + fail: + if (sects_view_valid) + backtrace_release_view (state, §s_view, error_callback, data); + if (str_view_valid) + backtrace_release_view (state, &str_view, error_callback, data); + if (syms_view_valid) + backtrace_release_view (state, &syms_view, error_callback, data); + if (linenos_view_valid) + backtrace_release_view (state, &linenos_view, error_callback, data); + if (dwarf_view_valid) + backtrace_release_view (state, &dwarf_view, error_callback, data); + if (descriptor != -1 && offset == 0) + backtrace_close (descriptor, error_callback, data); + return 0; +} + +#ifdef HAVE_LOADQUERY + +/* Read an integer value in human-readable format from an AIX + big archive fixed-length or member header. */ + +static int +xcoff_parse_decimal (const char *buf, size_t size, off_t *off) +{ + char str[32]; + char *end; + + if (size >= sizeof str) + return 0; + memcpy (str, buf, size); + str[size] = '\0'; + *off = strtol (str, &end, 10); + if (*end != '\0' && *end != ' ') + return 0; + + return 1; +} + +/* Add the backtrace data for a member of an AIX big archive. + Returns 1 on success, 0 on failure. */ + +static int +xcoff_armem_add (struct backtrace_state *state, int descriptor, + uintptr_t base_address, const char *member, + backtrace_error_callback error_callback, void *data, + fileline *fileline_fn, int *found_sym) +{ + struct backtrace_view view; + b_ar_fl_hdr fl_hdr; + const b_ar_hdr *ar_hdr; + off_t off; + off_t len; + int memlen; + + *found_sym = 0; + + /* Map archive fixed-length header. */ + + if (!backtrace_get_view (state, descriptor, 0, sizeof (b_ar_fl_hdr), + error_callback, data, &view)) + goto fail; + + memcpy (&fl_hdr, view.data, sizeof (b_ar_fl_hdr)); + + backtrace_release_view (state, &view, error_callback, data); + + if (memcmp (fl_hdr.fl_magic, AIAMAGBIG, 8) != 0) + goto fail; + + memlen = strlen (member); + + /* Read offset of first archive member. */ + if (!xcoff_parse_decimal (fl_hdr.fl_fstmoff, sizeof fl_hdr.fl_fstmoff, &off)) + goto fail; + while (off != 0) + { + /* Map archive member header and member name. */ + + if (!backtrace_get_view (state, descriptor, off, + sizeof (b_ar_hdr) + memlen, + error_callback, data, &view)) + break; + + ar_hdr = (const b_ar_hdr *) view.data; + + /* Read archive member name length. */ + if (!xcoff_parse_decimal (ar_hdr->ar_namlen, sizeof ar_hdr->ar_namlen, + &len)) + { + backtrace_release_view (state, &view, error_callback, data); + break; + } + if (len == memlen && !memcmp (ar_hdr->ar_name, member, memlen)) + { + off = (off + sizeof (b_ar_hdr) + memlen + 1) & ~1; + + /* The archive can contain several members with the same name + (e.g. 32-bit and 64-bit), so continue if not ok. */ + + if (xcoff_add (state, descriptor, off, base_address, error_callback, + data, fileline_fn, found_sym, 0)) + { + backtrace_release_view (state, &view, error_callback, data); + return 1; + } + } + + /* Read offset of next archive member. */ + if (!xcoff_parse_decimal (ar_hdr->ar_nxtmem, sizeof ar_hdr->ar_nxtmem, + &off)) + { + backtrace_release_view (state, &view, error_callback, data); + break; + } + backtrace_release_view (state, &view, error_callback, data); + } + + fail: + /* No matching member found. */ + backtrace_close (descriptor, error_callback, data); + return 0; +} + +/* Add the backtrace data for dynamically loaded libraries. */ + +static void +xcoff_add_shared_libs (struct backtrace_state *state, + backtrace_error_callback error_callback, + void *data, fileline *fileline_fn, int *found_sym) +{ + const struct ld_info *ldinfo; + void *buf; + unsigned int buflen; + const char *member; + int descriptor; + int does_not_exist; + int lib_found_sym; + int ret; + + /* Retrieve the list of loaded libraries. */ + + buf = NULL; + buflen = 512; + do + { + buf = realloc (buf, buflen); + if (buf == NULL) + { + ret = -1; + break; + } + ret = loadquery (L_GETINFO, buf, buflen); + if (ret == 0) + break; + buflen *= 2; + } + while (ret == -1 && errno == ENOMEM); + if (ret != 0) + { + free (buf); + return; + } + + ldinfo = (const struct ld_info *) buf; + while ((const char *) ldinfo < (const char *) buf + buflen) + { + if (*ldinfo->ldinfo_filename != '/') + goto next; + + descriptor = backtrace_open (ldinfo->ldinfo_filename, error_callback, + data, &does_not_exist); + if (descriptor < 0) + goto next; + + /* Check if it is an archive (member name not empty). */ + + member = ldinfo->ldinfo_filename + strlen (ldinfo->ldinfo_filename) + 1; + if (*member) + { + xcoff_armem_add (state, descriptor, + (uintptr_t) ldinfo->ldinfo_textorg, member, + error_callback, data, fileline_fn, &lib_found_sym); + } + else + { + xcoff_add (state, descriptor, 0, (uintptr_t) ldinfo->ldinfo_textorg, + error_callback, data, fileline_fn, &lib_found_sym, 0); + } + if (lib_found_sym) + *found_sym = 1; + + next: + if (ldinfo->ldinfo_next == 0) + break; + ldinfo = (const struct ld_info *) ((const char *) ldinfo + + ldinfo->ldinfo_next); + } + + free (buf); +} +#endif /* HAVE_LOADQUERY */ + +/* Initialize the backtrace data we need from an XCOFF executable. + Returns 1 on success, 0 on failure. */ + +int +backtrace_initialize (struct backtrace_state *state, + const char *filename ATTRIBUTE_UNUSED, int descriptor, + backtrace_error_callback error_callback, + void *data, fileline *fileline_fn) +{ + int ret; + int found_sym; + fileline xcoff_fileline_fn = xcoff_nodebug; + + ret = xcoff_add (state, descriptor, 0, 0, error_callback, data, + &xcoff_fileline_fn, &found_sym, 1); + if (!ret) + return 0; + +#ifdef HAVE_LOADQUERY + xcoff_add_shared_libs (state, error_callback, data, &xcoff_fileline_fn, + &found_sym); +#endif + + if (!state->threaded) + { + if (found_sym) + state->syminfo_fn = xcoff_syminfo; + else if (state->syminfo_fn == NULL) + state->syminfo_fn = xcoff_nosyms; + } + else + { + if (found_sym) + backtrace_atomic_store_pointer (&state->syminfo_fn, xcoff_syminfo); + else + __sync_bool_compare_and_swap (&state->syminfo_fn, NULL, xcoff_nosyms); + } + + if (!state->threaded) + { + if (state->fileline_fn == NULL || state->fileline_fn == xcoff_nodebug) + *fileline_fn = xcoff_fileline_fn; + } + else + { + fileline current_fn; + + current_fn = backtrace_atomic_load_pointer (&state->fileline_fn); + if (current_fn == NULL || current_fn == xcoff_nodebug) + *fileline_fn = xcoff_fileline_fn; + } + + return 1; +} diff --git a/backtrace-sys/src/libbacktrace/ztest.c b/backtrace-sys/src/libbacktrace/ztest.c new file mode 100644 index 000000000..9cd712a1e --- /dev/null +++ b/backtrace-sys/src/libbacktrace/ztest.c @@ -0,0 +1,537 @@ +/* ztest.c -- Test for libbacktrace inflate code. + Copyright (C) 2017-2018 Free Software Foundation, Inc. + Written by Ian Lance Taylor, Google. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + (1) Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + (2) Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + + (3) The name of the author may not be used to + endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, +INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, +STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING +IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. */ + +#include "config.h" + +#include +#include +#include +#include +#include +#include +#include + +#ifdef HAVE_ZLIB +#include +#endif + +#include "backtrace.h" +#include "backtrace-supported.h" + +#include "internal.h" +#include "testlib.h" + +#ifndef HAVE_CLOCK_GETTIME + +typedef int xclockid_t; + +static int +xclock_gettime (xclockid_t id ATTRIBUTE_UNUSED, + struct timespec *ts ATTRIBUTE_UNUSED) +{ + errno = EINVAL; + return -1; +} + +#define clockid_t xclockid_t +#define clock_gettime xclock_gettime +#undef CLOCK_REALTIME +#define CLOCK_REALTIME 0 + +#endif /* !defined(HAVE_CLOCK_GETTIME) */ + +#ifdef CLOCK_PROCESS_CPUTIME_ID +#define ZLIB_CLOCK_GETTIME_ARG CLOCK_PROCESS_CPUTIME_ID +#else +#define ZLIB_CLOCK_GETTIME_ARG CLOCK_REALTIME +#endif + +/* Some tests for the local zlib inflation code. */ + +struct zlib_test +{ + const char *name; + const char *uncompressed; + size_t uncompressed_len; + const char *compressed; + size_t compressed_len; +}; + +/* Error callback. */ + +static void +error_callback_compress (void *vdata, const char *msg, int errnum) +{ + fprintf (stderr, "%s", msg); + if (errnum > 0) + fprintf (stderr, ": %s", strerror (errnum)); + fprintf (stderr, "\n"); + exit (EXIT_FAILURE); +} + +static const struct zlib_test tests[] = +{ + { + "empty", + "", + 0, + "\x78\x9c\x03\x00\x00\x00\x00\x01", + 8, + }, + { + "hello", + "hello, world\n", + 0, + ("\x78\x9c\xca\x48\xcd\xc9\xc9\xd7\x51\x28\xcf" + "\x2f\xca\x49\xe1\x02\x04\x00\x00\xff\xff\x21\xe7\x04\x93"), + 25, + }, + { + "goodbye", + "goodbye, world", + 0, + ("\x78\x9c\x4b\xcf\xcf\x4f\x49\xaa" + "\x4c\xd5\x51\x28\xcf\x2f\xca\x49" + "\x01\x00\x28\xa5\x05\x5e"), + 22, + }, + { + "ranges", + ("\xcc\x11\x00\x00\x00\x00\x00\x00\xd5\x13\x00\x00\x00\x00\x00\x00" + "\x1c\x14\x00\x00\x00\x00\x00\x00\x72\x14\x00\x00\x00\x00\x00\x00" + "\x9d\x14\x00\x00\x00\x00\x00\x00\xd5\x14\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\xfb\x12\x00\x00\x00\x00\x00\x00\x09\x13\x00\x00\x00\x00\x00\x00" + "\x0c\x13\x00\x00\x00\x00\x00\x00\xcb\x13\x00\x00\x00\x00\x00\x00" + "\x29\x14\x00\x00\x00\x00\x00\x00\x4e\x14\x00\x00\x00\x00\x00\x00" + "\x9d\x14\x00\x00\x00\x00\x00\x00\xd5\x14\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\xfb\x12\x00\x00\x00\x00\x00\x00\x09\x13\x00\x00\x00\x00\x00\x00" + "\x67\x13\x00\x00\x00\x00\x00\x00\xcb\x13\x00\x00\x00\x00\x00\x00" + "\x9d\x14\x00\x00\x00\x00\x00\x00\xd5\x14\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x5f\x0b\x00\x00\x00\x00\x00\x00\x6c\x0b\x00\x00\x00\x00\x00\x00" + "\x7d\x0b\x00\x00\x00\x00\x00\x00\x7e\x0c\x00\x00\x00\x00\x00\x00" + "\x38\x0f\x00\x00\x00\x00\x00\x00\x5c\x0f\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x83\x0c\x00\x00\x00\x00\x00\x00\xfa\x0c\x00\x00\x00\x00\x00\x00" + "\xfd\x0d\x00\x00\x00\x00\x00\x00\xef\x0e\x00\x00\x00\x00\x00\x00" + "\x14\x0f\x00\x00\x00\x00\x00\x00\x38\x0f\x00\x00\x00\x00\x00\x00" + "\x9f\x0f\x00\x00\x00\x00\x00\x00\xac\x0f\x00\x00\x00\x00\x00\x00" + "\xdb\x0f\x00\x00\x00\x00\x00\x00\xff\x0f\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\xfd\x0d\x00\x00\x00\x00\x00\x00\xd8\x0e\x00\x00\x00\x00\x00\x00" + "\x9f\x0f\x00\x00\x00\x00\x00\x00\xac\x0f\x00\x00\x00\x00\x00\x00" + "\xdb\x0f\x00\x00\x00\x00\x00\x00\xff\x0f\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\xfa\x0c\x00\x00\x00\x00\x00\x00\xea\x0d\x00\x00\x00\x00\x00\x00" + "\xef\x0e\x00\x00\x00\x00\x00\x00\x14\x0f\x00\x00\x00\x00\x00\x00" + "\x5c\x0f\x00\x00\x00\x00\x00\x00\x9f\x0f\x00\x00\x00\x00\x00\x00" + "\xac\x0f\x00\x00\x00\x00\x00\x00\xdb\x0f\x00\x00\x00\x00\x00\x00" + "\xff\x0f\x00\x00\x00\x00\x00\x00\x2c\x10\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x60\x11\x00\x00\x00\x00\x00\x00\xd1\x16\x00\x00\x00\x00\x00\x00" + "\x40\x0b\x00\x00\x00\x00\x00\x00\x2c\x10\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x7a\x00\x00\x00\x00\x00\x00\x00\xb6\x00\x00\x00\x00\x00\x00\x00" + "\x9f\x01\x00\x00\x00\x00\x00\x00\xa7\x01\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x7a\x00\x00\x00\x00\x00\x00\x00\xa9\x00\x00\x00\x00\x00\x00\x00" + "\x9f\x01\x00\x00\x00\x00\x00\x00\xa7\x01\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"), + 672, + ("\x78\x9c\x3b\x23\xc8\x00\x06\x57\x85\x21\xb4\x8c\x08\x84\x2e\x82" + "\xd2\x73\xa1\xf4\x55\x28\x8d\x0e\x7e\x0b\x41\x68\x4e\xa8\x7e\x1e" + "\x28\x7d\x1a\x4a\x6b\x42\xf5\xf9\x91\x69\x5e\x3a\x9a\x79\x84\xf4" + "\xc7\x73\x43\xe8\x1c\x28\x5d\x0b\xa5\xeb\x78\x20\xb4\x05\x3f\x84" + "\x8e\xe1\xc7\xae\xbf\x19\xaa\xee\x17\x94\xfe\xcb\x0b\xa1\xdf\xf3" + "\x41\x68\x11\x7e\x54\x73\xe6\x43\xe9\x35\x50\xfa\x36\x94\xfe\x8f" + "\xc3\x7c\x98\x79\x37\xf8\xc8\xd3\x0f\x73\xd7\x2b\x1c\xee\x8a\x21" + "\xd2\x5d\x3a\x02\xd8\xcd\x4f\x80\xa6\x87\x8b\x62\x10\xda\x81\x1b" + "\xbf\xfa\x2a\x28\xbd\x0d\x4a\xcf\x67\x84\xd0\xcb\x19\xf1\xab\x5f" + "\x49\xa4\x7a\x00\x48\x97\x29\xd4"), + 152, + } +}; + +/* Test the hand coded samples. */ + +static void +test_samples (struct backtrace_state *state) +{ + size_t i; + + for (i = 0; i < sizeof tests / sizeof tests[0]; ++i) + { + char *p; + size_t v; + size_t j; + unsigned char *uncompressed; + size_t uncompressed_len; + + p = malloc (12 + tests[i].compressed_len); + memcpy (p, "ZLIB", 4); + v = tests[i].uncompressed_len; + if (v == 0) + v = strlen (tests[i].uncompressed); + for (j = 0; j < 8; ++j) + p[j + 4] = (v >> ((7 - j) * 8)) & 0xff; + memcpy (p + 12, tests[i].compressed, tests[i].compressed_len); + uncompressed = NULL; + uncompressed_len = 0; + if (!backtrace_uncompress_zdebug (state, (unsigned char *) p, + tests[i].compressed_len + 12, + error_callback_compress, NULL, + &uncompressed, &uncompressed_len)) + { + fprintf (stderr, "test %s: uncompress failed\n", tests[i].name); + ++failures; + } + else + { + if (uncompressed_len != v) + { + fprintf (stderr, + "test %s: got uncompressed length %zu, want %zu\n", + tests[i].name, uncompressed_len, v); + ++failures; + } + else if (memcmp (tests[i].uncompressed, uncompressed, v) != 0) + { + size_t j; + + fprintf (stderr, "test %s: uncompressed data mismatch\n", + tests[i].name); + for (j = 0; j < v; ++j) + if (tests[i].uncompressed[j] != uncompressed[j]) + fprintf (stderr, " %zu: got %#x want %#x\n", j, + uncompressed[j], tests[i].uncompressed[j]); + ++failures; + } + else + printf ("PASS: inflate %s\n", tests[i].name); + + backtrace_free (state, uncompressed, uncompressed_len, + error_callback_compress, NULL); + } + } +} + +#ifdef HAVE_ZLIB + +/* Given a set of TRIALS timings, discard the lowest and highest + values and return the mean average of the rest. */ + +static size_t +average_time (const size_t *times, size_t trials) +{ + size_t imax; + size_t max; + size_t imin; + size_t min; + size_t i; + size_t sum; + + imin = 0; + imax = 0; + min = times[0]; + max = times[0]; + for (i = 1; i < trials; ++i) + { + if (times[i] < min) + { + imin = i; + min = times[i]; + } + if (times[i] > max) + { + imax = i; + max = times[i]; + } + } + + sum = 0; + for (i = 0; i < trials; ++i) + { + if (i != imax && i != imin) + sum += times[i]; + } + return sum / (trials - 2); +} + +#endif + +/* Test a larger text, if available. */ + +static void +test_large (struct backtrace_state *state) +{ +#ifdef HAVE_ZLIB + unsigned char *orig_buf; + size_t orig_bufsize; + size_t i; + char *compressed_buf; + size_t compressed_bufsize; + unsigned long compress_sizearg; + unsigned char *uncompressed_buf; + size_t uncompressed_bufsize; + int r; + clockid_t cid; + struct timespec ts1; + struct timespec ts2; + size_t ctime; + size_t ztime; + const size_t trials = 16; + size_t ctimes[16]; + size_t ztimes[16]; + static const char * const names[] = { + "Mark.Twain-Tom.Sawyer.txt", + "../libgo/go/compress/testdata/Mark.Twain-Tom.Sawyer.txt" + }; + + orig_buf = NULL; + orig_bufsize = 0; + uncompressed_buf = NULL; + compressed_buf = NULL; + + for (i = 0; i < sizeof names / sizeof names[0]; ++i) + { + size_t len; + char *namebuf; + FILE *e; + struct stat st; + char *rbuf; + size_t got; + + len = strlen (SRCDIR) + strlen (names[i]) + 2; + namebuf = malloc (len); + if (namebuf == NULL) + { + perror ("malloc"); + goto fail; + } + snprintf (namebuf, len, "%s/%s", SRCDIR, names[i]); + e = fopen (namebuf, "r"); + free (namebuf); + if (e == NULL) + continue; + if (fstat (fileno (e), &st) < 0) + { + perror ("fstat"); + fclose (e); + continue; + } + rbuf = malloc (st.st_size); + if (rbuf == NULL) + { + perror ("malloc"); + goto fail; + } + got = fread (rbuf, 1, st.st_size, e); + fclose (e); + if (got > 0) + { + orig_buf = rbuf; + orig_bufsize = got; + break; + } + free (rbuf); + } + + if (orig_buf == NULL) + { + /* We couldn't find an input file. */ + printf ("UNSUPPORTED: inflate large\n"); + return; + } + + compressed_bufsize = compressBound (orig_bufsize) + 12; + compressed_buf = malloc (compressed_bufsize); + if (compressed_buf == NULL) + { + perror ("malloc"); + goto fail; + } + + compress_sizearg = compressed_bufsize - 12; + r = compress (compressed_buf + 12, &compress_sizearg, + orig_buf, orig_bufsize); + if (r != Z_OK) + { + fprintf (stderr, "zlib compress failed: %d\n", r); + goto fail; + } + + compressed_bufsize = compress_sizearg + 12; + + /* Prepare the header that our library expects. */ + memcpy (compressed_buf, "ZLIB", 4); + for (i = 0; i < 8; ++i) + compressed_buf[i + 4] = (orig_bufsize >> ((7 - i) * 8)) & 0xff; + + uncompressed_buf = malloc (orig_bufsize); + if (uncompressed_buf == NULL) + { + perror ("malloc"); + goto fail; + } + uncompressed_bufsize = orig_bufsize; + + if (!backtrace_uncompress_zdebug (state, compressed_buf, compressed_bufsize, + error_callback_compress, NULL, + &uncompressed_buf, &uncompressed_bufsize)) + { + fprintf (stderr, "inflate large: backtrace_uncompress_zdebug failed\n"); + goto fail; + } + + if (uncompressed_bufsize != orig_bufsize) + { + fprintf (stderr, + "inflate large: got uncompressed length %zu, want %zu\n", + uncompressed_bufsize, orig_bufsize); + goto fail; + } + + if (memcmp (uncompressed_buf, orig_buf, uncompressed_bufsize) != 0) + { + fprintf (stderr, "inflate large: uncompressed data mismatch\n"); + goto fail; + } + + printf ("PASS: inflate large\n"); + + for (i = 0; i < trials; ++i) + { + unsigned long uncompress_sizearg; + + cid = ZLIB_CLOCK_GETTIME_ARG; + if (clock_gettime (cid, &ts1) < 0) + { + if (errno == EINVAL) + return; + perror ("clock_gettime"); + return; + } + + if (!backtrace_uncompress_zdebug (state, compressed_buf, + compressed_bufsize, + error_callback_compress, NULL, + &uncompressed_buf, + &uncompressed_bufsize)) + { + fprintf (stderr, + ("inflate large: " + "benchmark backtrace_uncompress_zdebug failed\n")); + return; + } + + if (clock_gettime (cid, &ts2) < 0) + { + perror ("clock_gettime"); + return; + } + + ctime = (ts2.tv_sec - ts1.tv_sec) * 1000000000; + ctime += ts2.tv_nsec - ts1.tv_nsec; + ctimes[i] = ctime; + + if (clock_gettime (cid, &ts1) < 0) + { + perror("clock_gettime"); + return; + } + + uncompress_sizearg = uncompressed_bufsize; + r = uncompress (uncompressed_buf, &uncompress_sizearg, + compressed_buf + 12, compressed_bufsize - 12); + + if (clock_gettime (cid, &ts2) < 0) + { + perror ("clock_gettime"); + return; + } + + if (r != Z_OK) + { + fprintf (stderr, + "inflate large: benchmark zlib uncompress failed: %d\n", + r); + return; + } + + ztime = (ts2.tv_sec - ts1.tv_sec) * 1000000000; + ztime += ts2.tv_nsec - ts1.tv_nsec; + ztimes[i] = ztime; + } + + /* Toss the highest and lowest times and average the rest. */ + ctime = average_time (ctimes, trials); + ztime = average_time (ztimes, trials); + + printf ("backtrace: %zu ns\n", ctime); + printf ("zlib : %zu ns\n", ztime); + printf ("ratio : %g\n", (double) ztime / (double) ctime); + + return; + + fail: + printf ("FAIL: inflate large\n"); + ++failures; + + if (orig_buf != NULL) + free (orig_buf); + if (compressed_buf != NULL) + free (compressed_buf); + if (uncompressed_buf != NULL) + free (uncompressed_buf); + +#else /* !HAVE_ZLIB */ + + printf ("UNSUPPORTED: inflate large\n"); + +#endif /* !HAVE_ZLIB */ +} + +int +main (int argc ATTRIBUTE_UNUSED, char **argv) +{ + struct backtrace_state *state; + + state = backtrace_create_state (argv[0], BACKTRACE_SUPPORTS_THREADS, + error_callback_create, NULL); + + test_samples (state); + test_large (state); + + exit (failures != 0 ? EXIT_FAILURE : EXIT_SUCCESS); +} diff --git a/backtrace-sys/symbol-map b/backtrace-sys/symbol-map new file mode 100644 index 000000000..6a9ec7dfc --- /dev/null +++ b/backtrace-sys/symbol-map @@ -0,0 +1,19 @@ +backtrace_full __rbt_backtrace_full +backtrace_dwarf_add __rbt_backtrace_dwarf_add +backtrace_initialize __rbt_backtrace_initialize +backtrace_pcinfo __rbt_backtrace_pcinfo +backtrace_syminfo __rbt_backtrace_syminfo +backtrace_get_view __rbt_backtrace_get_view +backtrace_release_view __rbt_backtrace_release_view +backtrace_alloc __rbt_backtrace_alloc +backtrace_free __rbt_backtrace_free +backtrace_vector_finish __rbt_backtrace_vector_finish +backtrace_vector_grow __rbt_backtrace_vector_grow +backtrace_vector_release __rbt_backtrace_vector_release +backtrace_close __rbt_backtrace_close +backtrace_open __rbt_backtrace_open +backtrace_print __rbt_backtrace_print +backtrace_simple __rbt_backtrace_simple +backtrace_qsort __rbt_backtrace_qsort +backtrace_create_state __rbt_backtrace_create_state +backtrace_uncompress_zdebug __rbt_backtrace_uncompress_zdebug diff --git a/backtrace/.cargo-checksum.json b/backtrace/.cargo-checksum.json new file mode 100644 index 000000000..bba99f40c --- /dev/null +++ b/backtrace/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{},"package":"89a47830402e9981c5c41223151efcced65a0510c13097c769cede7efb34782a"} \ No newline at end of file diff --git a/backtrace/Cargo.toml b/backtrace/Cargo.toml new file mode 100644 index 000000000..f229dd5b4 --- /dev/null +++ b/backtrace/Cargo.toml @@ -0,0 +1,86 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g. crates.io) dependencies +# +# If you believe there's an error in this file please file an +# issue against the rust-lang/cargo repository. If you're +# editing this file be aware that the upstream Cargo.toml +# will likely look very different (and much more reasonable) + +[package] +name = "backtrace" +version = "0.3.9" +authors = ["Alex Crichton ", "The Rust Project Developers"] +description = "A library to acquire a stack trace (backtrace) at runtime in a Rust program.\n" +homepage = "https://github.com/alexcrichton/backtrace-rs" +documentation = "https://docs.rs/backtrace" +readme = "README.md" +license = "MIT/Apache-2.0" +repository = "https://github.com/alexcrichton/backtrace-rs" +[dependencies.addr2line] +version = "0.6.0" +optional = true + +[dependencies.cfg-if] +version = "0.1" + +[dependencies.cpp_demangle] +version = "0.2.3" +optional = true +default-features = false + +[dependencies.findshlibs] +version = "0.3.3" +optional = true + +[dependencies.gimli] +version = "0.15.0" +optional = true + +[dependencies.memmap] +version = "0.6.2" +optional = true + +[dependencies.object] +version = "0.7.0" +optional = true + +[dependencies.rustc-demangle] +version = "0.1.4" + +[dependencies.rustc-serialize] +version = "0.3" +optional = true + +[dependencies.serde] +version = "1.0" +optional = true + +[dependencies.serde_derive] +version = "1.0" +optional = true + +[features] +coresymbolication = [] +dbghelp = ["winapi"] +default = ["libunwind", "libbacktrace", "coresymbolication", "dladdr", "dbghelp"] +dladdr = [] +gimli-symbolize = ["addr2line", "findshlibs", "gimli", "memmap", "object"] +kernel32 = [] +libbacktrace = ["backtrace-sys"] +libunwind = [] +serialize-rustc = ["rustc-serialize"] +serialize-serde = ["serde", "serde_derive"] +unix-backtrace = [] +[target."cfg(all(unix, not(target_os = \"fuchsia\"), not(target_os = \"emscripten\"), not(target_os = \"macos\"), not(target_os = \"ios\")))".dependencies.backtrace-sys] +version = "0.1.17" +optional = true +[target."cfg(unix)".dependencies.libc] +version = "0.2" +[target."cfg(windows)".dependencies.winapi] +version = "0.3.3" +features = ["std", "dbghelp", "processthreadsapi", "winnt", "minwindef"] +optional = true diff --git a/backtrace/LICENSE-APACHE b/backtrace/LICENSE-APACHE new file mode 100644 index 000000000..16fe87b06 --- /dev/null +++ b/backtrace/LICENSE-APACHE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/backtrace/LICENSE-MIT b/backtrace/LICENSE-MIT new file mode 100644 index 000000000..39e0ed660 --- /dev/null +++ b/backtrace/LICENSE-MIT @@ -0,0 +1,25 @@ +Copyright (c) 2014 Alex Crichton + +Permission is hereby granted, free of charge, to any +person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the +Software without restriction, including without +limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software +is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions +of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/backtrace/README.md b/backtrace/README.md new file mode 100644 index 000000000..e25d66ef7 --- /dev/null +++ b/backtrace/README.md @@ -0,0 +1,91 @@ +# backtrace-rs + +[![Build Status](https://travis-ci.org/alexcrichton/backtrace-rs.svg?branch=master)](https://travis-ci.org/alexcrichton/backtrace-rs) +[![Build status](https://ci.appveyor.com/api/projects/status/v4l9oj4aqbbgyx44?svg=true)](https://ci.appveyor.com/project/alexcrichton/backtrace-rs) + +[Documentation](https://docs.rs/backtrace) + +A library for acquiring backtraces at runtime for Rust. This library aims to +enhance the support given by the standard library at `std::rt` by providing a +more stable and programmatic interface. + +## Install + +```toml +[dependencies] +backtrace = "0.3" +``` + +```rust +extern crate backtrace; +``` + +Note that this crate requires `make`, `objcopy`, and `ar` to be present on Linux +systems. + +## Usage + +To simply capture a backtrace and defer dealing with it until a later time, +you can use the top-level `Backtrace` type. + +```rust +extern crate backtrace; + +use backtrace::Backtrace; + +fn main() { + let bt = Backtrace::new(); + + // do_some_work(); + + println!("{:?}", bt); +} +``` + +If, however, you'd like more raw access to the actual tracing functionality, you +can use the `trace` and `resolve` functions directly. + +```rust +extern crate backtrace; + +fn main() { + backtrace::trace(|frame| { + let ip = frame.ip(); + let symbol_address = frame.symbol_address(); + + // Resolve this instruction pointer to a symbol name + backtrace::resolve(ip, |symbol| { + if let Some(name) = symbol.name() { + // ... + } + if let Some(filename) = symbol.filename() { + // ... + } + }); + + true // keep going to the next frame + }); +} +``` + +## Platform Support + +This library currently supports OSX, Linux, and Windows. Support for other +platforms is always welcome! + +# License + +This project is licensed under either of + + * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or + http://www.apache.org/licenses/LICENSE-2.0) + * MIT license ([LICENSE-MIT](LICENSE-MIT) or + http://opensource.org/licenses/MIT) + +at your option. + +### Contribution + +Unless you explicitly state otherwise, any contribution intentionally submitted +for inclusion in backtrace-rs by you, as defined in the Apache-2.0 license, shall be +dual licensed as above, without any additional terms or conditions. diff --git a/backtrace/appveyor.yml b/backtrace/appveyor.yml new file mode 100644 index 000000000..a1b2bc18b --- /dev/null +++ b/backtrace/appveyor.yml @@ -0,0 +1,20 @@ +environment: + matrix: + - TARGET: x86_64-pc-windows-gnu + MSYS_BITS: 64 + - TARGET: i686-pc-windows-gnu + MSYS_BITS: 32 + - TARGET: x86_64-pc-windows-msvc + - TARGET: i686-pc-windows-msvc +install: + - ps: Start-FileDownload "https://static.rust-lang.org/dist/rust-nightly-${env:TARGET}.exe" + - rust-nightly-%TARGET%.exe /VERYSILENT /NORESTART /DIR="C:\Program Files (x86)\Rust" + - set PATH=%PATH%;C:\Program Files (x86)\Rust\bin + - if defined MSYS_BITS set PATH=%PATH%;C:\msys64\mingw%MSYS_BITS%\bin + - rustc -V + - cargo -V + +build: false + +test_script: + - cargo test --target %TARGET% diff --git a/backtrace/ci/android-ndk.sh b/backtrace/ci/android-ndk.sh new file mode 100755 index 000000000..b5df62b6f --- /dev/null +++ b/backtrace/ci/android-ndk.sh @@ -0,0 +1,23 @@ +set -ex + +ANDROID_ARCH=$1 +ANDROID_SDK_VERSION=4333796 + +mkdir /tmp/android +cd /tmp/android + +curl -o android-sdk.zip \ + "https://dl.google.com/android/repository/sdk-tools-linux-${ANDROID_SDK_VERSION}.zip" +unzip -q android-sdk.zip + +yes | ./tools/bin/sdkmanager --licenses > /dev/null +./tools/bin/sdkmanager ndk-bundle > /dev/null + +./ndk-bundle/build/tools/make_standalone_toolchain.py \ + --arch $ANDROID_ARCH \ + --stl=libc++ \ + --api 21 \ + --install-dir /android-toolchain + +cd /tmp +rm -rf android diff --git a/backtrace/ci/docker/aarch64-linux-android/Dockerfile b/backtrace/ci/docker/aarch64-linux-android/Dockerfile new file mode 100644 index 000000000..b9026c06c --- /dev/null +++ b/backtrace/ci/docker/aarch64-linux-android/Dockerfile @@ -0,0 +1,18 @@ +FROM ubuntu:18.04 + +RUN apt-get update && apt-get install -y --no-install-recommends \ + curl \ + ca-certificates \ + unzip \ + openjdk-8-jre \ + python \ + gcc \ + libc6-dev + +COPY android-ndk.sh / +RUN /android-ndk.sh arm64 +ENV PATH=$PATH:/android-toolchain/bin + +# TODO: run tests in an emulator eventually +ENV CARGO_TARGET_AARCH64_LINUX_ANDROID_LINKER=aarch64-linux-android-gcc \ + CARGO_TARGET_AARCH64_LINUX_ANDROID_RUNNER="true" diff --git a/backtrace/ci/docker/aarch64-unknown-linux-gnu/Dockerfile b/backtrace/ci/docker/aarch64-unknown-linux-gnu/Dockerfile new file mode 100644 index 000000000..d691aa46e --- /dev/null +++ b/backtrace/ci/docker/aarch64-unknown-linux-gnu/Dockerfile @@ -0,0 +1,11 @@ +FROM ubuntu:18.04 +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc \ + ca-certificates \ + libc6-dev \ + gcc-aarch64-linux-gnu \ + libc6-dev-arm64-cross \ + qemu-user + +ENV CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER=aarch64-linux-gnu-gcc \ + CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_RUNNER="qemu-aarch64 -L /usr/aarch64-linux-gnu" diff --git a/backtrace/ci/docker/arm-linux-androideabi/Dockerfile b/backtrace/ci/docker/arm-linux-androideabi/Dockerfile new file mode 100644 index 000000000..10799974e --- /dev/null +++ b/backtrace/ci/docker/arm-linux-androideabi/Dockerfile @@ -0,0 +1,18 @@ +FROM ubuntu:18.04 + +RUN apt-get update && apt-get install -y --no-install-recommends \ + curl \ + ca-certificates \ + unzip \ + openjdk-8-jre \ + python \ + gcc \ + libc6-dev + +COPY android-ndk.sh / +RUN /android-ndk.sh arm +ENV PATH=$PATH:/android-toolchain/bin + +# TODO: run tests in an emulator eventually +ENV CARGO_TARGET_ARM_LINUX_ANDROIDEABI_LINKER=arm-linux-androideabi-gcc \ + CARGO_TARGET_ARM_LINUX_ANDROIDEABI_RUNNER="true" diff --git a/backtrace/ci/docker/arm-unknown-linux-gnueabihf/Dockerfile b/backtrace/ci/docker/arm-unknown-linux-gnueabihf/Dockerfile new file mode 100644 index 000000000..32095e98f --- /dev/null +++ b/backtrace/ci/docker/arm-unknown-linux-gnueabihf/Dockerfile @@ -0,0 +1,10 @@ +FROM ubuntu:18.04 +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc \ + ca-certificates \ + libc6-dev \ + gcc-arm-linux-gnueabihf \ + libc6-dev-armhf-cross \ + qemu-user +ENV CARGO_TARGET_ARM_UNKNOWN_LINUX_GNUEABIHF_LINKER=arm-linux-gnueabihf-gcc \ + CARGO_TARGET_ARM_UNKNOWN_LINUX_GNUEABIHF_RUNNER="qemu-arm -L /usr/arm-linux-gnueabihf" diff --git a/backtrace/ci/docker/armv7-linux-androideabi/Dockerfile b/backtrace/ci/docker/armv7-linux-androideabi/Dockerfile new file mode 100644 index 000000000..9696677ed --- /dev/null +++ b/backtrace/ci/docker/armv7-linux-androideabi/Dockerfile @@ -0,0 +1,18 @@ +FROM ubuntu:18.04 + +RUN apt-get update && apt-get install -y --no-install-recommends \ + curl \ + ca-certificates \ + unzip \ + openjdk-8-jre \ + python \ + gcc \ + libc6-dev + +COPY android-ndk.sh / +RUN /android-ndk.sh arm +ENV PATH=$PATH:/android-toolchain/bin + +# TODO: run tests in an emulator eventually +ENV CARGO_TARGET_ARMV7_LINUX_ANDROIDEABI_LINKER=arm-linux-androideabi-gcc \ + CARGO_TARGET_ARMV7_LINUX_ANDROIDEABI_RUNNER="true" diff --git a/backtrace/ci/docker/armv7-unknown-linux-gnueabihf/Dockerfile b/backtrace/ci/docker/armv7-unknown-linux-gnueabihf/Dockerfile new file mode 100644 index 000000000..a55fb2792 --- /dev/null +++ b/backtrace/ci/docker/armv7-unknown-linux-gnueabihf/Dockerfile @@ -0,0 +1,10 @@ +FROM ubuntu:18.04 +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc \ + ca-certificates \ + libc6-dev \ + gcc-arm-linux-gnueabihf \ + libc6-dev-armhf-cross \ + qemu-user +ENV CARGO_TARGET_ARMV7_UNKNOWN_LINUX_GNUEABIHF_LINKER=arm-linux-gnueabihf-gcc \ + CARGO_TARGET_ARMV7_UNKNOWN_LINUX_GNUEABIHF_RUNNER="qemu-arm -L /usr/arm-linux-gnueabihf" diff --git a/backtrace/ci/docker/i586-unknown-linux-gnu/Dockerfile b/backtrace/ci/docker/i586-unknown-linux-gnu/Dockerfile new file mode 100644 index 000000000..d22209295 --- /dev/null +++ b/backtrace/ci/docker/i586-unknown-linux-gnu/Dockerfile @@ -0,0 +1,5 @@ +FROM ubuntu:18.04 +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc-multilib \ + libc6-dev \ + ca-certificates diff --git a/backtrace/ci/docker/i686-linux-android/Dockerfile b/backtrace/ci/docker/i686-linux-android/Dockerfile new file mode 100644 index 000000000..c4946ad1d --- /dev/null +++ b/backtrace/ci/docker/i686-linux-android/Dockerfile @@ -0,0 +1,18 @@ +FROM ubuntu:18.04 + +RUN apt-get update && apt-get install -y --no-install-recommends \ + curl \ + ca-certificates \ + unzip \ + openjdk-8-jre \ + python \ + gcc \ + libc6-dev + +COPY android-ndk.sh / +RUN /android-ndk.sh x86 +ENV PATH=$PATH:/android-toolchain/bin + +# TODO: run tests in an emulator eventually +ENV CARGO_TARGET_I686_LINUX_ANDROID_LINKER=i686-linux-android-gcc \ + CARGO_TARGET_I686_LINUX_ANDROID_RUNNER="true" diff --git a/backtrace/ci/docker/i686-unknown-linux-gnu/Dockerfile b/backtrace/ci/docker/i686-unknown-linux-gnu/Dockerfile new file mode 100644 index 000000000..d22209295 --- /dev/null +++ b/backtrace/ci/docker/i686-unknown-linux-gnu/Dockerfile @@ -0,0 +1,5 @@ +FROM ubuntu:18.04 +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc-multilib \ + libc6-dev \ + ca-certificates diff --git a/backtrace/ci/docker/powerpc-unknown-linux-gnu/Dockerfile b/backtrace/ci/docker/powerpc-unknown-linux-gnu/Dockerfile new file mode 100644 index 000000000..7323e8591 --- /dev/null +++ b/backtrace/ci/docker/powerpc-unknown-linux-gnu/Dockerfile @@ -0,0 +1,9 @@ +FROM ubuntu:18.04 + +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc libc6-dev qemu-user ca-certificates \ + gcc-powerpc-linux-gnu libc6-dev-powerpc-cross \ + qemu-system-ppc + +ENV CARGO_TARGET_POWERPC_UNKNOWN_LINUX_GNU_LINKER=powerpc-linux-gnu-gcc \ + CARGO_TARGET_POWERPC_UNKNOWN_LINUX_GNU_RUNNER="qemu-ppc -cpu Vger -L /usr/powerpc-linux-gnu" diff --git a/backtrace/ci/docker/powerpc64-unknown-linux-gnu/Dockerfile b/backtrace/ci/docker/powerpc64-unknown-linux-gnu/Dockerfile new file mode 100644 index 000000000..5e22a9f7c --- /dev/null +++ b/backtrace/ci/docker/powerpc64-unknown-linux-gnu/Dockerfile @@ -0,0 +1,16 @@ +FROM ubuntu:18.04 + +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc \ + ca-certificates \ + libc6-dev \ + gcc-powerpc64-linux-gnu \ + libc6-dev-ppc64-cross \ + qemu-user \ + qemu-system-ppc + +ENV CARGO_TARGET_POWERPC64_UNKNOWN_LINUX_GNU_LINKER=powerpc64-linux-gnu-gcc \ + # TODO: should actually run these tests + #CARGO_TARGET_POWERPC64_UNKNOWN_LINUX_GNU_RUNNER="qemu-ppc64 -L /usr/powerpc64-linux-gnu" \ + CARGO_TARGET_POWERPC64_UNKNOWN_LINUX_GNU_RUNNER=true \ + CC=powerpc64-linux-gnu-gcc diff --git a/backtrace/ci/docker/x86_64-linux-android/Dockerfile b/backtrace/ci/docker/x86_64-linux-android/Dockerfile new file mode 100644 index 000000000..6ed4a65db --- /dev/null +++ b/backtrace/ci/docker/x86_64-linux-android/Dockerfile @@ -0,0 +1,18 @@ +FROM ubuntu:18.04 + +RUN apt-get update && apt-get install -y --no-install-recommends \ + curl \ + ca-certificates \ + unzip \ + openjdk-8-jre \ + python \ + gcc \ + libc6-dev + +COPY android-ndk.sh / +RUN /android-ndk.sh x86_64 +ENV PATH=$PATH:/android-toolchain/bin + +# TODO: run tests in an emulator eventually +ENV CARGO_TARGET_X86_64_LINUX_ANDROID_LINKER=x86_64-linux-android-gcc \ + CARGO_TARGET_X86_64_LINUX_ANDROID_RUNNER="true" diff --git a/backtrace/ci/docker/x86_64-pc-windows-gnu/Dockerfile b/backtrace/ci/docker/x86_64-pc-windows-gnu/Dockerfile new file mode 100644 index 000000000..48db21980 --- /dev/null +++ b/backtrace/ci/docker/x86_64-pc-windows-gnu/Dockerfile @@ -0,0 +1,10 @@ +FROM ubuntu:18.04 +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc \ + libc6-dev \ + ca-certificates \ + gcc-mingw-w64-x86-64 + +# No need to run tests, we're just testing that it compiles +ENV CARGO_TARGET_X86_64_PC_WINDOWS_GNU_RUNNER=true \ + CARGO_TARGET_X86_64_PC_WINDOWS_GNU_LINKER=x86_64-w64-mingw32-gcc diff --git a/backtrace/ci/docker/x86_64-unknown-linux-gnu/Dockerfile b/backtrace/ci/docker/x86_64-unknown-linux-gnu/Dockerfile new file mode 100644 index 000000000..864d72e62 --- /dev/null +++ b/backtrace/ci/docker/x86_64-unknown-linux-gnu/Dockerfile @@ -0,0 +1,5 @@ +FROM ubuntu:18.04 +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc \ + libc6-dev \ + ca-certificates diff --git a/backtrace/ci/docker/x86_64-unknown-linux-musl/Dockerfile b/backtrace/ci/docker/x86_64-unknown-linux-musl/Dockerfile new file mode 100644 index 000000000..6984dc217 --- /dev/null +++ b/backtrace/ci/docker/x86_64-unknown-linux-musl/Dockerfile @@ -0,0 +1,6 @@ +FROM ubuntu:18.04 +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc \ + libc6-dev \ + ca-certificates \ + musl-tools diff --git a/backtrace/ci/run-docker.sh b/backtrace/ci/run-docker.sh new file mode 100755 index 000000000..abce99e70 --- /dev/null +++ b/backtrace/ci/run-docker.sh @@ -0,0 +1,32 @@ +# Small script to run tests for a target (or all targets) inside all the +# respective docker images. + +set -ex + +run() { + docker build -t backtrace -f ci/docker/$1/Dockerfile ci + mkdir -p target + docker run \ + --user `id -u`:`id -g` \ + --rm \ + --init \ + --volume $HOME/.cargo:/cargo \ + --env CARGO_HOME=/cargo \ + --volume `rustc --print sysroot`:/rust:ro \ + --env TARGET=$1 \ + --volume `pwd`:/checkout:ro \ + --volume `pwd`/target:/checkout/target \ + --workdir /checkout \ + --privileged \ + backtrace \ + bash \ + -c 'PATH=$PATH:/rust/bin exec ci/run.sh' +} + +if [ -z "$1" ]; then + for d in `ls ci/docker/`; do + run $d + done +else + run $1 +fi diff --git a/backtrace/ci/run.sh b/backtrace/ci/run.sh new file mode 100755 index 000000000..5cc151507 --- /dev/null +++ b/backtrace/ci/run.sh @@ -0,0 +1,5 @@ +#!/bin/sh + +set -ex + +cargo test --target $TARGET diff --git a/backtrace/examples/backtrace.rs b/backtrace/examples/backtrace.rs new file mode 100644 index 000000000..7f9042ed7 --- /dev/null +++ b/backtrace/examples/backtrace.rs @@ -0,0 +1,7 @@ +extern crate backtrace; + +use backtrace::Backtrace; + +fn main() { + println!("{:?}", Backtrace::new()); +} diff --git a/backtrace/examples/raw.rs b/backtrace/examples/raw.rs new file mode 100644 index 000000000..40a6b120a --- /dev/null +++ b/backtrace/examples/raw.rs @@ -0,0 +1,48 @@ +extern crate backtrace; + +fn main() { + foo(); +} + +fn foo() { bar() } +fn bar() { baz() } +fn baz() { print() } + +#[cfg(target_pointer_width = "32")] const HEX_WIDTH: usize = 10; +#[cfg(target_pointer_width = "64")] const HEX_WIDTH: usize = 20; + +fn print() { + let mut cnt = 0; + backtrace::trace(|frame| { + let ip = frame.ip(); + print!("frame #{:<2} - {:#02$x}", cnt, ip as usize, HEX_WIDTH); + cnt += 1; + + let mut resolved = false; + backtrace::resolve(frame.ip(), |symbol| { + if !resolved { + resolved = true; + } else { + print!("{}", vec![" "; 7 + 2 + 3 + HEX_WIDTH].join("")); + } + + if let Some(name) = symbol.name() { + print!(" - {}", name); + } else { + print!(" - "); + } + if let Some(file) = symbol.filename() { + if let Some(l) = symbol.lineno() { + print!("\n{:13}{:4$}@ {}:{}", "", "", file.display(), l, + HEX_WIDTH); + } + } + println!(""); + + }); + if !resolved { + println!(" - "); + } + true // keep going + }); +} diff --git a/backtrace/src/backtrace/dbghelp.rs b/backtrace/src/backtrace/dbghelp.rs new file mode 100644 index 000000000..26b395a69 --- /dev/null +++ b/backtrace/src/backtrace/dbghelp.rs @@ -0,0 +1,103 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![allow(bad_style)] + +use std::mem; +use winapi::ctypes::*; +use winapi::shared::minwindef::*; +use winapi::um::processthreadsapi; +use winapi::um::winnt::{self, CONTEXT}; +use winapi::um::dbghelp; +use winapi::um::dbghelp::*; + +pub struct Frame { + inner: STACKFRAME64, +} + +impl Frame { + pub fn ip(&self) -> *mut c_void { + self.inner.AddrPC.Offset as *mut _ + } + + pub fn symbol_address(&self) -> *mut c_void { + self.ip() + } +} + +#[inline(always)] +pub fn trace(cb: &mut FnMut(&super::Frame) -> bool) { + // According to windows documentation, all dbghelp functions are + // single-threaded. + let _g = ::lock::lock(); + + unsafe { + // Allocate necessary structures for doing the stack walk + let process = processthreadsapi::GetCurrentProcess(); + let thread = processthreadsapi::GetCurrentThread(); + + // The CONTEXT structure needs to be aligned on a 16-byte boundary for + // 64-bit Windows, but currently we don't have a way to express that in + // Rust. Allocations are generally aligned to 16-bytes, though, so we + // box this up. + let mut context = Box::new(mem::zeroed::()); + winnt::RtlCaptureContext(&mut *context); + let mut frame = super::Frame { + inner: Frame { inner: mem::zeroed() }, + }; + let image = init_frame(&mut frame.inner.inner, &context); + + // Initialize this process's symbols + let _c = ::dbghelp_init(); + + // And now that we're done with all the setup, do the stack walking! + while dbghelp::StackWalk64(image as DWORD, + process, + thread, + &mut frame.inner.inner, + &mut *context as *mut _ as *mut _, + None, + Some(dbghelp::SymFunctionTableAccess64), + Some(dbghelp::SymGetModuleBase64), + None) == TRUE { + if frame.inner.inner.AddrPC.Offset == frame.inner.inner.AddrReturn.Offset || + frame.inner.inner.AddrPC.Offset == 0 || + frame.inner.inner.AddrReturn.Offset == 0 { + break + } + + if !cb(&frame) { + break + } + } + } +} + +#[cfg(target_arch = "x86_64")] +fn init_frame(frame: &mut STACKFRAME64, ctx: &CONTEXT) -> WORD { + frame.AddrPC.Offset = ctx.Rip as u64; + frame.AddrPC.Mode = AddrModeFlat; + frame.AddrStack.Offset = ctx.Rsp as u64; + frame.AddrStack.Mode = AddrModeFlat; + frame.AddrFrame.Offset = ctx.Rbp as u64; + frame.AddrFrame.Mode = AddrModeFlat; + winnt::IMAGE_FILE_MACHINE_AMD64 +} + +#[cfg(target_arch = "x86")] +fn init_frame(frame: &mut STACKFRAME64, ctx: &CONTEXT) -> WORD { + frame.AddrPC.Offset = ctx.Eip as u64; + frame.AddrPC.Mode = AddrModeFlat; + frame.AddrStack.Offset = ctx.Esp as u64; + frame.AddrStack.Mode = AddrModeFlat; + frame.AddrFrame.Offset = ctx.Ebp as u64; + frame.AddrFrame.Mode = AddrModeFlat; + winnt::IMAGE_FILE_MACHINE_I386 +} diff --git a/backtrace/src/backtrace/libunwind.rs b/backtrace/src/backtrace/libunwind.rs new file mode 100644 index 000000000..4d7aef248 --- /dev/null +++ b/backtrace/src/backtrace/libunwind.rs @@ -0,0 +1,200 @@ +// Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use std::os::raw::c_void; + +pub struct Frame { + ctx: *mut uw::_Unwind_Context, +} + +impl Frame { + pub fn ip(&self) -> *mut c_void { + let mut ip_before_insn = 0; + let mut ip = unsafe { + uw::_Unwind_GetIPInfo(self.ctx, &mut ip_before_insn) as *mut c_void + }; + if !ip.is_null() && ip_before_insn == 0 { + // this is a non-signaling frame, so `ip` refers to the address + // after the calling instruction. account for that. + ip = (ip as usize - 1) as *mut _; + } + return ip + } + + pub fn symbol_address(&self) -> *mut c_void { + // dladdr() on osx gets whiny when we use FindEnclosingFunction, and + // it appears to work fine without it, so we only use + // FindEnclosingFunction on non-osx platforms. In doing so, we get a + // slightly more accurate stack trace in the process. + // + // This is often because panic involves the last instruction of a + // function being "call std::rt::begin_unwind", with no ret + // instructions after it. This means that the return instruction + // pointer points *outside* of the calling function, and by + // unwinding it we go back to the original function. + if cfg!(target_os = "macos") || cfg!(target_os = "ios") { + self.ip() + } else { + unsafe { uw::_Unwind_FindEnclosingFunction(self.ip()) } + } + } +} + +#[inline(always)] +pub fn trace(mut cb: &mut FnMut(&super::Frame) -> bool) { + unsafe { + uw::_Unwind_Backtrace(trace_fn, &mut cb as *mut _ as *mut _); + } + + extern fn trace_fn(ctx: *mut uw::_Unwind_Context, + arg: *mut c_void) -> uw::_Unwind_Reason_Code { + let cb = unsafe { &mut *(arg as *mut &mut FnMut(&super::Frame) -> bool) }; + let cx = super::Frame { + inner: Frame { ctx: ctx }, + }; + + let mut bomb = ::Bomb { enabled: true }; + let keep_going = cb(&cx); + bomb.enabled = false; + + if keep_going { + uw::_URC_NO_REASON + } else { + uw::_URC_FAILURE + } + } +} + +/// Unwind library interface used for backtraces +/// +/// Note that dead code is allowed as here are just bindings +/// iOS doesn't use all of them it but adding more +/// platform-specific configs pollutes the code too much +#[allow(non_camel_case_types)] +#[allow(non_snake_case)] +#[allow(dead_code)] +mod uw { + pub use self::_Unwind_Reason_Code::*; + + use libc; + use std::os::raw::{c_int, c_void}; + + #[repr(C)] + pub enum _Unwind_Reason_Code { + _URC_NO_REASON = 0, + _URC_FOREIGN_EXCEPTION_CAUGHT = 1, + _URC_FATAL_PHASE2_ERROR = 2, + _URC_FATAL_PHASE1_ERROR = 3, + _URC_NORMAL_STOP = 4, + _URC_END_OF_STACK = 5, + _URC_HANDLER_FOUND = 6, + _URC_INSTALL_CONTEXT = 7, + _URC_CONTINUE_UNWIND = 8, + _URC_FAILURE = 9, // used only by ARM EABI + } + + pub enum _Unwind_Context {} + + pub type _Unwind_Trace_Fn = + extern fn(ctx: *mut _Unwind_Context, + arg: *mut c_void) -> _Unwind_Reason_Code; + + extern { + // No native _Unwind_Backtrace on iOS + #[cfg(not(all(target_os = "ios", target_arch = "arm")))] + pub fn _Unwind_Backtrace(trace: _Unwind_Trace_Fn, + trace_argument: *mut c_void) + -> _Unwind_Reason_Code; + + // available since GCC 4.2.0, should be fine for our purpose + #[cfg(all(not(all(target_os = "android", target_arch = "arm")), + not(all(target_os = "linux", target_arch = "arm"))))] + pub fn _Unwind_GetIPInfo(ctx: *mut _Unwind_Context, + ip_before_insn: *mut c_int) + -> libc::uintptr_t; + + #[cfg(all(not(target_os = "android"), + not(all(target_os = "linux", target_arch = "arm"))))] + pub fn _Unwind_FindEnclosingFunction(pc: *mut c_void) + -> *mut c_void; + } + + // On android, the function _Unwind_GetIP is a macro, and this is the + // expansion of the macro. This is all copy/pasted directly from the + // header file with the definition of _Unwind_GetIP. + #[cfg(any(all(target_os = "android", target_arch = "arm"), + all(target_os = "linux", target_arch = "arm")))] + pub unsafe fn _Unwind_GetIP(ctx: *mut _Unwind_Context) -> libc::uintptr_t { + #[repr(C)] + enum _Unwind_VRS_Result { + _UVRSR_OK = 0, + _UVRSR_NOT_IMPLEMENTED = 1, + _UVRSR_FAILED = 2, + } + #[repr(C)] + enum _Unwind_VRS_RegClass { + _UVRSC_CORE = 0, + _UVRSC_VFP = 1, + _UVRSC_FPA = 2, + _UVRSC_WMMXD = 3, + _UVRSC_WMMXC = 4, + } + #[repr(C)] + enum _Unwind_VRS_DataRepresentation { + _UVRSD_UINT32 = 0, + _UVRSD_VFPX = 1, + _UVRSD_FPAX = 2, + _UVRSD_UINT64 = 3, + _UVRSD_FLOAT = 4, + _UVRSD_DOUBLE = 5, + } + + type _Unwind_Word = libc::c_uint; + extern { + fn _Unwind_VRS_Get(ctx: *mut _Unwind_Context, + klass: _Unwind_VRS_RegClass, + word: _Unwind_Word, + repr: _Unwind_VRS_DataRepresentation, + data: *mut c_void) + -> _Unwind_VRS_Result; + } + + let mut val: _Unwind_Word = 0; + let ptr = &mut val as *mut _Unwind_Word; + let _ = _Unwind_VRS_Get(ctx, _Unwind_VRS_RegClass::_UVRSC_CORE, 15, + _Unwind_VRS_DataRepresentation::_UVRSD_UINT32, + ptr as *mut c_void); + (val & !1) as libc::uintptr_t + } + + // This function doesn't exist on Android or ARM/Linux, so make it same + // to _Unwind_GetIP + #[cfg(any(all(target_os = "android", target_arch = "arm"), + all(target_os = "linux", target_arch = "arm")))] + pub unsafe fn _Unwind_GetIPInfo(ctx: *mut _Unwind_Context, + ip_before_insn: *mut c_int) + -> libc::uintptr_t + { + *ip_before_insn = 0; + _Unwind_GetIP(ctx) + } + + // This function also doesn't exist on Android or ARM/Linux, so make it + // a no-op + #[cfg(any(target_os = "android", + all(target_os = "linux", target_arch = "arm")))] + pub unsafe fn _Unwind_FindEnclosingFunction(pc: *mut c_void) + -> *mut c_void + { + pc + } +} + + diff --git a/backtrace/src/backtrace/mod.rs b/backtrace/src/backtrace/mod.rs new file mode 100644 index 000000000..53fe5b416 --- /dev/null +++ b/backtrace/src/backtrace/mod.rs @@ -0,0 +1,113 @@ +use std::fmt; + +use std::os::raw::c_void; + +/// Inspects the current call-stack, passing all active frames into the closure +/// provided to calculate a stack trace. +/// +/// This function is the workhorse of this library in calculating the stack +/// traces for a program. The given closure `cb` is yielded instances of a +/// `Frame` which represent information about that call frame on the stack. The +/// closure is yielded frames in a top-down fashion (most recently called +/// functions first). +/// +/// The closure's return value is an indication of whether the backtrace should +/// continue. A return value of `false` will terminate the backtrace and return +/// immediately. +/// +/// Once a `Frame` is acquired you will likely want to call `backtrace::resolve` +/// to convert the `ip` (instruction pointer) or symbol address to a `Symbol` +/// through which the name and/or filename/line number can be learned. +/// +/// Note that this is a relatively low-level function and if you'd like to, for +/// example, capture a backtrace to be inspected later, then the `Backtrace` +/// type may be more appropriate. +/// +/// # Example +/// +/// ``` +/// extern crate backtrace; +/// +/// fn main() { +/// backtrace::trace(|frame| { +/// // ... +/// +/// true // continue the backtrace +/// }); +/// } +/// ``` +#[inline(never)] // if this is never inlined then the first frame can be known + // to be skipped +pub fn trace bool>(mut cb: F) { + trace_imp(&mut cb) +} + +/// A trait representing one frame of a backtrace, yielded to the `trace` +/// function of this crate. +/// +/// The tracing function's closure will be yielded frames, and the frame is +/// virtually dispatched as the underlying implementation is not always known +/// until runtime. +pub struct Frame { + inner: FrameImp, +} + +impl Frame { + /// Returns the current instruction pointer of this frame. + /// + /// This is normally the next instruction to execute in the frame, but not + /// all implementations list this with 100% accuracy (but it's generally + /// pretty close). + /// + /// It is recommended to pass this value to `backtrace::resolve` to turn it + /// into a symbol name. + pub fn ip(&self) -> *mut c_void { + self.inner.ip() + } + + /// Returns the starting symbol address of the frame of this function. + /// + /// This will attempt to rewind the instruction pointer returned by `ip` to + /// the start of the function, returning that value. In some cases, however, + /// backends will just return `ip` from this function. + /// + /// The returned value can sometimes be used if `backtrace::resolve` failed + /// on the `ip` given above. + pub fn symbol_address(&self) -> *mut c_void { + self.inner.symbol_address() + } +} + +impl fmt::Debug for Frame { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_struct("Frame") + .field("ip", &self.ip()) + .field("symbol_address", &self.symbol_address()) + .finish() + } +} + +cfg_if! { + if #[cfg(all(unix, + not(target_os = "emscripten"), + not(all(target_os = "ios", target_arch = "arm")), + feature = "libunwind"))] { + mod libunwind; + use self::libunwind::trace as trace_imp; + use self::libunwind::Frame as FrameImp; + } else if #[cfg(all(unix, + not(target_os = "emscripten"), + feature = "unix-backtrace"))] { + mod unix_backtrace; + use self::unix_backtrace::trace as trace_imp; + use self::unix_backtrace::Frame as FrameImp; + } else if #[cfg(all(windows, feature = "dbghelp"))] { + mod dbghelp; + use self::dbghelp::trace as trace_imp; + use self::dbghelp::Frame as FrameImp; + } else { + mod noop; + use self::noop::trace as trace_imp; + use self::noop::Frame as FrameImp; + } +} diff --git a/backtrace/src/backtrace/noop.rs b/backtrace/src/backtrace/noop.rs new file mode 100644 index 000000000..8b8f8766e --- /dev/null +++ b/backtrace/src/backtrace/noop.rs @@ -0,0 +1,16 @@ +use std::os::raw::c_void; + +#[inline(always)] +pub fn trace(_cb: &mut FnMut(&super::Frame) -> bool) {} + +pub struct Frame; + +impl Frame { + pub fn ip(&self) -> *mut c_void { + 0 as *mut _ + } + + pub fn symbol_address(&self) -> *mut c_void { + 0 as *mut _ + } +} diff --git a/backtrace/src/backtrace/unix_backtrace.rs b/backtrace/src/backtrace/unix_backtrace.rs new file mode 100644 index 000000000..061bba9ef --- /dev/null +++ b/backtrace/src/backtrace/unix_backtrace.rs @@ -0,0 +1,46 @@ +// Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use std::mem; +use std::os::raw::{c_void, c_int}; + +pub struct Frame { + addr: *mut c_void, +} + +impl Frame { + pub fn ip(&self) -> *mut c_void { self.addr } + pub fn symbol_address(&self) -> *mut c_void { self.addr } +} + +extern { + fn backtrace(buf: *mut *mut c_void, sz: c_int) -> c_int; +} + +#[inline(always)] +pub fn trace(cb: &mut FnMut(&super::Frame) -> bool) { + const SIZE: usize = 100; + + let mut buf: [*mut c_void; SIZE]; + let cnt; + unsafe { + buf = mem::zeroed(); + cnt = backtrace(buf.as_mut_ptr(), SIZE as c_int); + } + + for addr in buf[..cnt as usize].iter() { + let cx = super::Frame { + inner: Frame { addr: *addr }, + }; + if !cb(&cx) { + return + } + } +} diff --git a/backtrace/src/capture.rs b/backtrace/src/capture.rs new file mode 100644 index 000000000..570b117b0 --- /dev/null +++ b/backtrace/src/capture.rs @@ -0,0 +1,237 @@ +use std::fmt; +use std::mem; +use std::os::raw::c_void; +use std::path::{Path, PathBuf}; + +use {trace, resolve, SymbolName}; + +/// Representation of an owned and self-contained backtrace. +/// +/// This structure can be used to capture a backtrace at various points in a +/// program and later used to inspect what the backtrace was at that time. +#[derive(Clone)] +#[cfg_attr(feature = "serialize-rustc", derive(RustcDecodable, RustcEncodable))] +#[cfg_attr(feature = "serialize-serde", derive(Deserialize, Serialize))] +pub struct Backtrace { + frames: Vec, +} + +/// Captured version of a frame in a backtrace. +/// +/// This type is returned as a list from `Backtrace::frames` and represents one +/// stack frame in a captured backtrace. +#[derive(Clone)] +#[cfg_attr(feature = "serialize-rustc", derive(RustcDecodable, RustcEncodable))] +#[cfg_attr(feature = "serialize-serde", derive(Deserialize, Serialize))] +pub struct BacktraceFrame { + ip: usize, + symbol_address: usize, + symbols: Option>, +} + +/// Captured version of a symbol in a backtrace. +/// +/// This type is returned as a list from `BacktraceFrame::symbols` and +/// represents the metadata for a symbol in a backtrace. +#[derive(Clone)] +#[cfg_attr(feature = "serialize-rustc", derive(RustcDecodable, RustcEncodable))] +#[cfg_attr(feature = "serialize-serde", derive(Deserialize, Serialize))] +pub struct BacktraceSymbol { + name: Option>, + addr: Option, + filename: Option, + lineno: Option, +} + +impl Backtrace { + /// Captures a backtrace at the callsite of this function, returning an + /// owned representation. + /// + /// This function is useful for representing a backtrace as an object in + /// Rust. This returned value can be sent across threads and printed + /// elsewhere, and the purpose of this value is to be entirely self + /// contained. + /// + /// # Examples + /// + /// ``` + /// use backtrace::Backtrace; + /// + /// let current_backtrace = Backtrace::new(); + /// ``` + pub fn new() -> Backtrace { + let mut bt = Backtrace::new_unresolved(); + bt.resolve(); + return bt + } + + /// Similar to `new` except that this does not resolve any symbols, this + /// simply captures the backtrace as a list of addresses. + /// + /// At a later time the `resolve` function can be called to resolve this + /// backtrace's symbols into readable names. This function exists because + /// the resolution process can sometimes take a significant amount of time + /// whereas any one backtrace may only be rarely printed. + /// + /// # Examples + /// + /// ``` + /// use backtrace::Backtrace; + /// + /// let mut current_backtrace = Backtrace::new_unresolved(); + /// println!("{:?}", current_backtrace); // no symbol names + /// current_backtrace.resolve(); + /// println!("{:?}", current_backtrace); // symbol names now present + /// ``` + pub fn new_unresolved() -> Backtrace { + let mut frames = Vec::new(); + trace(|frame| { + frames.push(BacktraceFrame { + ip: frame.ip() as usize, + symbol_address: frame.symbol_address() as usize, + symbols: None, + }); + true + }); + + Backtrace { frames: frames } + } + + /// Returns the frames from when this backtrace was captured. + /// + /// The first entry of this slice is likely the function `Backtrace::new`, + /// and the last frame is likely something about how this thread or the main + /// function started. + pub fn frames(&self) -> &[BacktraceFrame] { + &self.frames + } + + /// If this backtrace was created from `new_unresolved` then this function + /// will resolve all addresses in the backtrace to their symbolic names. + /// + /// If this backtrace has been previously resolved or was created through + /// `new`, this function does nothing. + pub fn resolve(&mut self) { + for frame in self.frames.iter_mut().filter(|f| f.symbols.is_none()) { + let mut symbols = Vec::new(); + resolve(frame.ip as *mut _, |symbol| { + symbols.push(BacktraceSymbol { + name: symbol.name().map(|m| m.as_bytes().to_vec()), + addr: symbol.addr().map(|a| a as usize), + filename: symbol.filename().map(|m| m.to_path_buf()), + lineno: symbol.lineno(), + }); + }); + frame.symbols = Some(symbols); + } + } +} + +impl From> for Backtrace { + fn from(frames: Vec) -> Self { + Backtrace { + frames: frames + } + } +} + +impl Into> for Backtrace { + fn into(self) -> Vec { + self.frames + } +} + +impl BacktraceFrame { + /// Same as `Frame::ip` + pub fn ip(&self) -> *mut c_void { + self.ip as *mut c_void + } + + /// Same as `Frame::symbol_address` + pub fn symbol_address(&self) -> *mut c_void { + self.symbol_address as *mut c_void + } + + /// Returns the list of symbols that this frame corresponds to. + /// + /// Normally there is only one symbol per frame, but sometimes if a number + /// of functions are inlined into one frame then multiple symbols will be + /// returned. The first symbol listed is the "innermost function", whereas + /// the last symbol is the outermost (last caller). + /// + /// Note that if this frame came from an unresolved backtrace then this will + /// return an empty list. + pub fn symbols(&self) -> &[BacktraceSymbol] { + self.symbols.as_ref().map(|s| &s[..]).unwrap_or(&[]) + } +} + +impl BacktraceSymbol { + /// Same as `Symbol::name` + pub fn name(&self) -> Option { + self.name.as_ref().map(|s| SymbolName::new(s)) + } + + /// Same as `Symbol::addr` + pub fn addr(&self) -> Option<*mut c_void> { + self.addr.map(|s| s as *mut c_void) + } + + /// Same as `Symbol::filename` + pub fn filename(&self) -> Option<&Path> { + self.filename.as_ref().map(|p| &**p) + } + + /// Same as `Symbol::lineno` + pub fn lineno(&self) -> Option { + self.lineno + } +} + +impl fmt::Debug for Backtrace { + fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { + let hex_width = mem::size_of::() * 2 + 2; + + try!(write!(fmt, "stack backtrace:")); + + for (idx, frame) in self.frames().iter().enumerate() { + let ip = frame.ip(); + try!(write!(fmt, "\n{:4}: {:2$?}", idx, ip, hex_width)); + + let symbols = match frame.symbols { + Some(ref s) => s, + None => { + try!(write!(fmt, " - ")); + continue + } + }; + if symbols.len() == 0 { + try!(write!(fmt, " - ")); + } + + for (idx, symbol) in symbols.iter().enumerate() { + if idx != 0 { + try!(write!(fmt, "\n {:1$}", "", hex_width)); + } + + if let Some(name) = symbol.name() { + try!(write!(fmt, " - {}", name)); + } else { + try!(write!(fmt, " - ")); + } + + if let (Some(file), Some(line)) = (symbol.filename(), symbol.lineno()) { + try!(write!(fmt, "\n {:3$}at {}:{}", "", file.display(), line, hex_width)); + } + } + } + + Ok(()) + } +} + +impl Default for Backtrace { + fn default() -> Backtrace { + Backtrace::new() + } +} diff --git a/backtrace/src/dylib.rs b/backtrace/src/dylib.rs new file mode 100644 index 000000000..34fb0bf16 --- /dev/null +++ b/backtrace/src/dylib.rs @@ -0,0 +1,70 @@ +use std::ffi::CString; +use std::marker; +use std::mem; +use std::sync::atomic::{AtomicUsize, Ordering}; + +use libc::{self, c_char, c_void}; + +pub struct Dylib { + pub init: AtomicUsize, +} + +pub struct Symbol { + pub name: &'static str, + pub addr: AtomicUsize, + pub _marker: marker::PhantomData, +} + +impl Dylib { + pub unsafe fn get<'a, T>(&self, sym: &'a Symbol) -> Option<&'a T> { + self.load().and_then(|handle| { + sym.get(handle) + }) + } + + pub unsafe fn init(&self, path: &str) -> bool { + if self.init.load(Ordering::SeqCst) != 0 { + return true + } + let name = CString::new(path).unwrap(); + let ptr = libc::dlopen(name.as_ptr() as *const c_char, libc::RTLD_LAZY); + if ptr.is_null() { + return false + } + match self.init.compare_and_swap(0, ptr as usize, Ordering::SeqCst) { + 0 => {} + _ => { libc::dlclose(ptr); } + } + return true + } + + unsafe fn load(&self) -> Option<*mut c_void> { + match self.init.load(Ordering::SeqCst) { + 0 => None, + n => Some(n as *mut c_void), + } + } +} + +impl Symbol { + unsafe fn get(&self, handle: *mut c_void) -> Option<&T> { + assert_eq!(mem::size_of::(), mem::size_of_val(&self.addr)); + if self.addr.load(Ordering::SeqCst) == 0 { + self.addr.store(fetch(handle, self.name.as_ptr()), Ordering::SeqCst) + } + if self.addr.load(Ordering::SeqCst) == 1 { + None + } else { + mem::transmute::<&AtomicUsize, Option<&T>>(&self.addr) + } + } +} + +unsafe fn fetch(handle: *mut c_void, name: *const u8) -> usize { + let ptr = libc::dlsym(handle, name as *const _); + if ptr.is_null() { + 1 + } else { + ptr as usize + } +} diff --git a/backtrace/src/lib.rs b/backtrace/src/lib.rs new file mode 100644 index 000000000..533500c5f --- /dev/null +++ b/backtrace/src/lib.rs @@ -0,0 +1,178 @@ +//! A library for acquiring a backtrace at runtime +//! +//! This library is meant to supplement the `RUST_BACKTRACE=1` support of the +//! standard library by allowing an acquisition of a backtrace at runtime +//! programmatically. The backtraces generated by this library do not need to be +//! parsed, for example, and expose the functionality of multiple backend +//! implementations. +//! +//! # Implementation +//! +//! This library makes use of a number of strategies for actually acquiring a +//! backtrace. For example unix uses libgcc's libunwind bindings by default to +//! acquire a backtrace, but coresymbolication or dladdr is used on OSX to +//! acquire symbol names while linux uses gcc's libbacktrace. +//! +//! When using the default feature set of this library the "most reasonable" set +//! of defaults is chosen for the current platform, but the features activated +//! can also be controlled at a finer granularity. +//! +//! # Platform Support +//! +//! Currently this library is verified to work on Linux, OSX, and Windows, but +//! it may work on other platforms as well. Note that the quality of the +//! backtrace may vary across platforms. +//! +//! # API Principles +//! +//! This library attempts to be as flexible as possible to accommodate different +//! backend implementations of acquiring a backtrace. Consequently the currently +//! exported functions are closure-based as opposed to the likely expected +//! iterator-based versions. This is done due to limitations of the underlying +//! APIs used from the system. +//! +//! # Usage +//! +//! First, add this to your Cargo.toml +//! +//! ```toml +//! [dependencies] +//! backtrace = "0.2" +//! ``` +//! +//! Next: +//! +//! ``` +//! extern crate backtrace; +//! +//! fn main() { +//! backtrace::trace(|frame| { +//! let ip = frame.ip(); +//! let symbol_address = frame.symbol_address(); +//! +//! // Resolve this instruction pointer to a symbol name +//! backtrace::resolve(ip, |symbol| { +//! if let Some(name) = symbol.name() { +//! // ... +//! } +//! if let Some(filename) = symbol.filename() { +//! // ... +//! } +//! }); +//! +//! true // keep going to the next frame +//! }); +//! } +//! ``` + +#![doc(html_root_url = "https://docs.rs/backtrace")] +#![deny(missing_docs)] +#![deny(warnings)] + +#[cfg(unix)] +extern crate libc; +#[cfg(all(windows, feature = "winapi"))] extern crate winapi; + +#[cfg(feature = "serde_derive")] +#[cfg_attr(feature = "serde_derive", macro_use)] +extern crate serde_derive; + +#[cfg(feature = "rustc-serialize")] +extern crate rustc_serialize; + +#[macro_use] +extern crate cfg_if; + +extern crate rustc_demangle; + +#[cfg(feature = "cpp_demangle")] +extern crate cpp_demangle; + +cfg_if! { + if #[cfg(all(feature = "gimli-symbolize", unix, target_os = "linux"))] { + extern crate addr2line; + extern crate findshlibs; + extern crate gimli; + extern crate memmap; + extern crate object; + } +} + +#[allow(dead_code)] // not used everywhere +#[cfg(unix)] +#[macro_use] +mod dylib; + +pub use backtrace::{trace, Frame}; +mod backtrace; + +pub use symbolize::{resolve, Symbol, SymbolName}; +mod symbolize; + +pub use capture::{Backtrace, BacktraceFrame, BacktraceSymbol}; +mod capture; + +#[allow(dead_code)] +struct Bomb { + enabled: bool, +} + +#[allow(dead_code)] +impl Drop for Bomb { + fn drop(&mut self) { + if self.enabled { + panic!("cannot panic during the backtrace function"); + } + } +} + +#[allow(dead_code)] +mod lock { + use std::cell::Cell; + use std::mem; + use std::sync::{Once, Mutex, MutexGuard, ONCE_INIT}; + + pub struct LockGuard(MutexGuard<'static, ()>); + + static mut LOCK: *mut Mutex<()> = 0 as *mut _; + static INIT: Once = ONCE_INIT; + thread_local!(static LOCK_HELD: Cell = Cell::new(false)); + + impl Drop for LockGuard { + fn drop(&mut self) { + LOCK_HELD.with(|slot| { + assert!(slot.get()); + slot.set(false); + }); + } + } + + pub fn lock() -> Option { + if LOCK_HELD.with(|l| l.get()) { + return None + } + LOCK_HELD.with(|s| s.set(true)); + unsafe { + INIT.call_once(|| { + LOCK = mem::transmute(Box::new(Mutex::new(()))); + }); + Some(LockGuard((*LOCK).lock().unwrap())) + } + } +} + +// requires external synchronization +#[cfg(all(windows, feature = "dbghelp"))] +unsafe fn dbghelp_init() { + use winapi::shared::minwindef; + use winapi::um::{dbghelp, processthreadsapi}; + + static mut INITIALIZED: bool = false; + + if !INITIALIZED { + dbghelp::SymInitializeW(processthreadsapi::GetCurrentProcess(), + 0 as *mut _, + minwindef::TRUE); + INITIALIZED = true; + } +} diff --git a/backtrace/src/symbolize/coresymbolication.rs b/backtrace/src/symbolize/coresymbolication.rs new file mode 100644 index 000000000..e8337d555 --- /dev/null +++ b/backtrace/src/symbolize/coresymbolication.rs @@ -0,0 +1,192 @@ +// Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![allow(bad_style)] + +use std::ffi::{CStr, OsStr}; +use std::mem; +use std::os::raw::{c_void, c_char, c_int}; +use std::os::unix::prelude::*; +use std::path::Path; +use std::ptr; +use std::sync::atomic::ATOMIC_USIZE_INIT; + +use libc::{self, Dl_info}; + +use SymbolName; +use dylib::Dylib; +use dylib::Symbol as DylibSymbol; + +#[repr(C)] +#[derive(Copy, Clone, PartialEq)] +pub struct CSTypeRef { + cpp_data: *const c_void, + cpp_obj: *const c_void +} + +const CS_NOW: u64 = 0x80000000; +const CSREF_NULL: CSTypeRef = CSTypeRef { + cpp_data: 0 as *const c_void, + cpp_obj: 0 as *const c_void, +}; + +pub enum Symbol { + Core { + path: *const c_char, + lineno: u32, + name: *const c_char, + addr: *mut c_void, + }, + Dladdr(Dl_info), +} + +impl Symbol { + pub fn name(&self) -> Option { + let name = match *self { + Symbol::Core { name, .. } => name, + Symbol::Dladdr(ref info) => info.dli_sname, + }; + if name.is_null() { + None + } else { + Some(SymbolName::new(unsafe { + CStr::from_ptr(name).to_bytes() + })) + } + } + + pub fn addr(&self) -> Option<*mut c_void> { + match *self { + Symbol::Core { addr, .. } => Some(addr), + Symbol::Dladdr(ref info) => Some(info.dli_saddr as *mut _), + } + } + + pub fn filename(&self) -> Option<&Path> { + match *self { + Symbol::Core { path, .. } => { + if path.is_null() { + None + } else { + Some(Path::new(OsStr::from_bytes(unsafe { + CStr::from_ptr(path).to_bytes() + }))) + } + } + Symbol::Dladdr(_) => None, + } + } + + pub fn lineno(&self) -> Option { + match *self { + Symbol::Core { lineno: 0, .. } => None, + Symbol::Core { lineno, .. } => Some(lineno), + Symbol::Dladdr(_) => None, + } + } +} + +static CORESYMBOLICATION: Dylib = Dylib { init: ATOMIC_USIZE_INIT }; + +macro_rules! dlsym { + (extern { + $(fn $name:ident($($arg:ident: $t:ty),*) -> $ret:ty;)* + }) => ($( + static $name: ::dylib::Symbol $ret> = + ::dylib::Symbol { + name: concat!(stringify!($name), "\0"), + addr: ::std::sync::atomic::ATOMIC_USIZE_INIT, + _marker: ::std::marker::PhantomData, + }; + )*) +} + +dlsym! { + extern { + fn CSSymbolicatorCreateWithPid(pid: c_int) -> CSTypeRef; + fn CSRelease(rf: CSTypeRef) -> c_void; + fn CSSymbolicatorGetSymbolWithAddressAtTime( + cs: CSTypeRef, addr: *const c_void, time: u64) -> CSTypeRef; + fn CSSymbolicatorGetSourceInfoWithAddressAtTime( + cs: CSTypeRef, addr: *const c_void, time: u64) -> CSTypeRef; + fn CSSourceInfoGetLineNumber(info: CSTypeRef) -> c_int; + fn CSSourceInfoGetPath(info: CSTypeRef) -> *const c_char; + fn CSSourceInfoGetSymbol(info: CSTypeRef) -> CSTypeRef; + fn CSSymbolGetMangledName(sym: CSTypeRef) -> *const c_char; + fn CSSymbolGetSymbolOwner(sym: CSTypeRef) -> CSTypeRef; + fn CSSymbolOwnerGetBaseAddress(symowner: CSTypeRef) -> *mut c_void; + } +} + +unsafe fn get(sym: &DylibSymbol) -> &T { + CORESYMBOLICATION.get(sym).unwrap() +} + +unsafe fn try_resolve(addr: *mut c_void, cb: &mut FnMut(&super::Symbol)) -> bool { + let path = "/System/Library/PrivateFrameworks/CoreSymbolication.framework\ + /Versions/A/CoreSymbolication"; + if !CORESYMBOLICATION.init(path) { + return false; + } + + let cs = get(&CSSymbolicatorCreateWithPid)(libc::getpid()); + if cs == CSREF_NULL { + return false + } + + let info = get(&CSSymbolicatorGetSourceInfoWithAddressAtTime)( + cs, addr, CS_NOW); + let sym = if info == CSREF_NULL { + get(&CSSymbolicatorGetSymbolWithAddressAtTime)(cs, addr, CS_NOW) + } else { + get(&CSSourceInfoGetSymbol)(info) + }; + + let mut rv = false; + if sym != CSREF_NULL { + let owner = get(&CSSymbolGetSymbolOwner)(sym); + if owner != CSREF_NULL { + cb(&super::Symbol { + inner: Symbol::Core { + path: if info != CSREF_NULL { + get(&CSSourceInfoGetPath)(info) + } else { + ptr::null() + }, + lineno: if info != CSREF_NULL { + get(&CSSourceInfoGetLineNumber)(info) as u32 + } else { + 0 + }, + name: get(&CSSymbolGetMangledName)(sym), + addr: get(&CSSymbolOwnerGetBaseAddress)(owner), + }, + }); + rv = true; + } + } + get(&CSRelease)(cs); + + rv +} + +pub fn resolve(addr: *mut c_void, cb: &mut FnMut(&super::Symbol)) { + unsafe { + if try_resolve(addr, cb) { + return + } + let mut info: Dl_info = mem::zeroed(); + if libc::dladdr(addr as *mut _, &mut info) != 0 { + cb(&super::Symbol { + inner: Symbol::Dladdr(info), + }); + } + } +} diff --git a/backtrace/src/symbolize/dbghelp.rs b/backtrace/src/symbolize/dbghelp.rs new file mode 100644 index 000000000..a2c990612 --- /dev/null +++ b/backtrace/src/symbolize/dbghelp.rs @@ -0,0 +1,118 @@ +// Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![allow(bad_style)] + +use std::ffi::OsString; +use std::mem; +use std::path::Path; +use std::os::windows::prelude::*; +use std::slice; +use winapi::ctypes::*; +use winapi::shared::basetsd::*; +use winapi::shared::minwindef::*; +use winapi::um::processthreadsapi; +use winapi::um::dbghelp; +use winapi::um::dbghelp::*; + +use SymbolName; + +pub struct Symbol { + name: OsString, + addr: *mut c_void, + line: Option, + filename: Option, +} + +impl Symbol { + pub fn name(&self) -> Option { + self.name.to_str().map(|s| SymbolName::new(s.as_bytes())) + } + + pub fn addr(&self) -> Option<*mut c_void> { + Some(self.addr as *mut _) + } + + pub fn filename(&self) -> Option<&Path> { + self.filename.as_ref().map(Path::new) + } + + pub fn lineno(&self) -> Option { + self.line + } +} + +pub fn resolve(addr: *mut c_void, cb: &mut FnMut(&super::Symbol)) { + // According to windows documentation, all dbghelp functions are + // single-threaded. + let _g = ::lock::lock(); + + unsafe { + let size = 2 * MAX_SYM_NAME + mem::size_of::(); + let mut data = vec![0u8; size]; + let info = &mut *(data.as_mut_ptr() as *mut SYMBOL_INFOW); + info.MaxNameLen = MAX_SYM_NAME as ULONG; + // the struct size in C. the value is different to + // `size_of::() - MAX_SYM_NAME + 1` (== 81) + // due to struct alignment. + info.SizeOfStruct = 88; + + let _c = ::dbghelp_init(); + + let mut displacement = 0u64; + let ret = dbghelp::SymFromAddrW(processthreadsapi::GetCurrentProcess(), + addr as DWORD64, + &mut displacement, + info); + if ret != TRUE { + return + } + + // If the symbol name is greater than MaxNameLen, SymFromAddrW will + // give a buffer of (MaxNameLen - 1) characters and set NameLen to + // the real value. + let name_len = ::std::cmp::min(info.NameLen as usize, + info.MaxNameLen as usize - 1); + + let name = slice::from_raw_parts(info.Name.as_ptr() as *const u16, + name_len); + let name = OsString::from_wide(name); + + let mut line = mem::zeroed::(); + line.SizeOfStruct = mem::size_of::() as DWORD; + let mut displacement = 0; + let ret = dbghelp::SymGetLineFromAddrW64(processthreadsapi::GetCurrentProcess(), + addr as DWORD64, + &mut displacement, + &mut line); + let mut filename = None; + let mut lineno = None; + if ret == TRUE { + lineno = Some(line.LineNumber as u32); + + let base = line.FileName; + let mut len = 0; + while *base.offset(len) != 0 { + len += 1; + } + let name = slice::from_raw_parts(base, len as usize); + filename = Some(OsString::from_wide(name)); + } + + cb(&super::Symbol { + inner: Symbol { + name: name, + addr: info.Address as *mut _, + line: lineno, + filename: filename, + }, + }) + } +} diff --git a/backtrace/src/symbolize/dladdr.rs b/backtrace/src/symbolize/dladdr.rs new file mode 100644 index 000000000..d7cdfbb8b --- /dev/null +++ b/backtrace/src/symbolize/dladdr.rs @@ -0,0 +1,59 @@ +// Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use std::ffi::CStr; +use std::mem; +use std::os::raw::c_void; +use std::path::Path; + +use libc::{self, Dl_info}; + +use SymbolName; + +pub struct Symbol { + inner: Dl_info, +} + +impl Symbol { + pub fn name(&self) -> Option { + if self.inner.dli_sname.is_null() { + None + } else { + Some(SymbolName::new(unsafe { + CStr::from_ptr(self.inner.dli_sname).to_bytes() + })) + } + } + + pub fn addr(&self) -> Option<*mut c_void> { + Some(self.inner.dli_saddr as *mut _) + } + + pub fn filename(&self) -> Option<&Path> { + None + } + + pub fn lineno(&self) -> Option { + None + } +} + +pub fn resolve(addr: *mut c_void, cb: &mut FnMut(&super::Symbol)) { + unsafe { + let mut info: super::Symbol = super::Symbol { + inner: Symbol { + inner: mem::zeroed(), + }, + }; + if libc::dladdr(addr as *mut _, &mut info.inner.inner) != 0 { + cb(&info) + } + } +} diff --git a/backtrace/src/symbolize/gimli.rs b/backtrace/src/symbolize/gimli.rs new file mode 100644 index 000000000..2da584116 --- /dev/null +++ b/backtrace/src/symbolize/gimli.rs @@ -0,0 +1,224 @@ +use addr2line; +use findshlibs::{self, Segment, SharedLibrary}; +use gimli; +use memmap::Mmap; +use object::{self, Object}; +use std::cell::RefCell; +use std::env; +use std::fs::File; +use std::mem; +use std::os::raw::c_void; +use std::path::{Path, PathBuf}; +use std::u32; + +use SymbolName; + +const MAPPINGS_CACHE_SIZE: usize = 4; + +type Dwarf<'map> = addr2line::Context>; +type Symbols<'map> = object::SymbolMap<'map>; + +struct Mapping { + // 'static lifetime is a lie to hack around lack of support for self-referential structs. + dwarf: Dwarf<'static>, + symbols: Symbols<'static>, + _map: Mmap, +} + +impl Mapping { + fn new(path: &PathBuf) -> Option { + let file = File::open(path).ok()?; + // TODO: not completely safe, see https://github.com/danburkert/memmap-rs/issues/25 + let map = unsafe { Mmap::map(&file).ok()? }; + let (dwarf, symbols) = { + let object = object::File::parse(&*map).ok()?; + let dwarf = addr2line::Context::new(&object).ok()?; + let symbols = object.symbol_map(); + // Convert to 'static lifetimes. + unsafe { (mem::transmute(dwarf), mem::transmute(symbols)) } + }; + Some(Mapping { + dwarf, + symbols, + _map: map, + }) + } + + // Ensure the 'static lifetimes don't leak. + fn rent(&self, mut f: F) + where + F: FnMut(&Dwarf, &Symbols), + { + f(&self.dwarf, &self.symbols) + } +} + +thread_local! { + // A very small, very simple LRU cache for debug info mappings. + // + // The hit rate should be very high, since the typical stack doesn't cross + // between many shared libraries. + // + // The `addr2line::Context` structures are pretty expensive to create. Its + // cost is expected to be amortized by subsequent `locate` queries, which + // leverage the structures built when constructing `addr2line::Context`s to + // get nice speedups. If we didn't have this cache, that amortization would + // never happen, and symbolicating backtraces would be ssssllllooooowwww. + static MAPPINGS_CACHE: RefCell> + = RefCell::new(Vec::with_capacity(MAPPINGS_CACHE_SIZE)); +} + +fn with_mapping_for_path(path: PathBuf, f: F) +where + F: FnMut(&Dwarf, &Symbols), +{ + MAPPINGS_CACHE.with(|cache| { + let mut cache = cache.borrow_mut(); + + let idx = cache.iter().position(|&(ref p, _)| p == &path); + + // Invariant: after this conditional completes without early returning + // from an error, the cache entry for this path is at index 0. + + if let Some(idx) = idx { + // When the mapping is already in the cache, move it to the front. + if idx != 0 { + let entry = cache.remove(idx); + cache.insert(0, entry); + } + } else { + // When the mapping is not in the cache, create a new mapping, + // insert it into the front of the cache, and evict the oldest cache + // entry if necessary. + let mapping = match Mapping::new(&path) { + None => return, + Some(m) => m, + }; + + if cache.len() == MAPPINGS_CACHE_SIZE { + cache.pop(); + } + + cache.insert(0, (path, mapping)); + } + + cache[0].1.rent(f); + }); +} + +pub fn resolve(addr: *mut c_void, cb: &mut FnMut(&super::Symbol)) { + // First, find the file containing the segment that the given AVMA (after + // relocation) address falls within. Use the containing segment to compute + // the SVMA (before relocation) address. + // + // Note that the OS APIs that `SharedLibrary::each` is implemented with hold + // a lock for the duration of the `each` call, so we want to keep this + // section as short as possible to avoid contention with other threads + // capturing backtraces. + let addr = findshlibs::Avma(addr as *mut u8 as *const u8); + let mut so_info = None; + findshlibs::TargetSharedLibrary::each(|so| { + use findshlibs::IterationControl::*; + + for segment in so.segments() { + if segment.contains_avma(so, addr) { + let addr = so.avma_to_svma(addr); + let path = so.name().to_string_lossy(); + so_info = Some((addr, path.to_string())); + return Break; + } + } + + Continue + }); + let (addr, path) = match so_info { + None => return, + Some((a, p)) => (a, p), + }; + + // Second, fixup the path. Empty path means that this address falls within + // the main executable, not a shared library. + let path = if path.is_empty() { + match env::current_exe() { + Err(_) => return, + Ok(p) => p, + } + } else { + PathBuf::from(path) + }; + + // Finally, get a cached mapping or create a new mapping for this file, and + // evaluate the DWARF info to find the file/line/name for this address. + with_mapping_for_path(path, |dwarf, symbols| { + let mut found_sym = false; + if let Ok(mut frames) = dwarf.find_frames(addr.0 as u64) { + while let Ok(Some(frame)) = frames.next() { + let (file, line) = frame + .location + .map(|l| (l.file, l.line)) + .unwrap_or((None, None)); + let name = frame + .function + .and_then(|f| f.raw_name().ok().map(|f| f.to_string())); + let sym = super::Symbol { + inner: Symbol::new(addr.0 as usize, file, line, name), + }; + cb(&sym); + found_sym = true; + } + } + + // No DWARF info found, so fallback to the symbol table. + if !found_sym { + if let Some(name) = symbols.get(addr.0 as u64).and_then(|x| x.name()) { + let sym = super::Symbol { + inner: Symbol::new(addr.0 as usize, None, None, Some(name.to_string())), + }; + cb(&sym); + } + } + }); +} + +pub struct Symbol { + addr: usize, + file: Option, + line: Option, + name: Option, +} + +impl Symbol { + fn new(addr: usize, + file: Option, + line: Option, + name: Option) + -> Symbol { + Symbol { + addr, + file, + line, + name, + } + } + + pub fn name(&self) -> Option { + self.name.as_ref().map(|s| SymbolName::new(s.as_bytes())) + } + + pub fn addr(&self) -> Option<*mut c_void> { + Some(self.addr as *mut c_void) + } + + pub fn filename(&self) -> Option<&Path> { + self.file.as_ref().map(|f| f.as_ref()) + } + + pub fn lineno(&self) -> Option { + self.line + .and_then(|l| if l > (u32::MAX as u64) { + None + } else { + Some(l as u32) + }) + } +} diff --git a/backtrace/src/symbolize/libbacktrace.rs b/backtrace/src/symbolize/libbacktrace.rs new file mode 100644 index 000000000..865f69181 --- /dev/null +++ b/backtrace/src/symbolize/libbacktrace.rs @@ -0,0 +1,180 @@ +// Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![allow(bad_style)] + +extern crate backtrace_sys as bt; + +use libc::uintptr_t; +use std::ffi::{CStr, OsStr}; +use std::os::raw::{c_void, c_char, c_int}; +use std::os::unix::prelude::*; +use std::path::Path; +use std::ptr; +use std::sync::{ONCE_INIT, Once}; + +use SymbolName; + +pub enum Symbol { + Syminfo { + pc: uintptr_t, + symname: *const c_char, + }, + Pcinfo { + pc: uintptr_t, + filename: *const c_char, + lineno: c_int, + function: *const c_char, + }, +} + +impl Symbol { + pub fn name(&self) -> Option { + let ptr = match *self { + Symbol::Syminfo { symname, .. } => symname, + Symbol::Pcinfo { function, .. } => function, + }; + if ptr.is_null() { + None + } else { + Some(SymbolName::new(unsafe { CStr::from_ptr(ptr).to_bytes() })) + } + } + + pub fn addr(&self) -> Option<*mut c_void> { + let pc = match *self { + Symbol::Syminfo { pc, .. } => pc, + Symbol::Pcinfo { pc, .. } => pc, + }; + if pc == 0 {None} else {Some(pc as *mut _)} + } + + pub fn filename(&self) -> Option<&Path> { + match *self { + Symbol::Syminfo { .. } => None, + Symbol::Pcinfo { filename, .. } => { + Some(Path::new(OsStr::from_bytes(unsafe { + CStr::from_ptr(filename).to_bytes() + }))) + } + } + } + + pub fn lineno(&self) -> Option { + match *self { + Symbol::Syminfo { .. } => None, + Symbol::Pcinfo { lineno, .. } => Some(lineno as u32), + } + } +} + +extern fn error_cb(_data: *mut c_void, _msg: *const c_char, + _errnum: c_int) { + // do nothing for now +} + +extern fn syminfo_cb(data: *mut c_void, + pc: uintptr_t, + symname: *const c_char, + _symval: uintptr_t, + _symsize: uintptr_t) { + unsafe { + call(data, &super::Symbol { + inner: Symbol::Syminfo { + pc: pc, + symname: symname, + }, + }); + } +} + +extern fn pcinfo_cb(data: *mut c_void, + pc: uintptr_t, + filename: *const c_char, + lineno: c_int, + function: *const c_char) -> c_int { + unsafe { + if filename.is_null() || function.is_null() { + return -1 + } + call(data, &super::Symbol { + inner: Symbol::Pcinfo { + pc: pc, + filename: filename, + lineno: lineno, + function: function, + }, + }); + return 0 + } +} + +unsafe fn call(data: *mut c_void, sym: &super::Symbol) { + let cb = data as *mut &mut FnMut(&super::Symbol); + let mut bomb = ::Bomb { enabled: true }; + (*cb)(sym); + bomb.enabled = false; +} + +// The libbacktrace API supports creating a state, but it does not +// support destroying a state. I personally take this to mean that a +// state is meant to be created and then live forever. +// +// I would love to register an at_exit() handler which cleans up this +// state, but libbacktrace provides no way to do so. +// +// With these constraints, this function has a statically cached state +// that is calculated the first time this is requested. Remember that +// backtracing all happens serially (one global lock). +// +// Things don't work so well on not-Linux since libbacktrace can't track down +// that executable this is. We at one point used env::current_exe but it turns +// out that there are some serious security issues with that approach. +// +// Specifically, on certain platforms like BSDs, a malicious actor can cause an +// arbitrary file to be placed at the path returned by current_exe. libbacktrace +// does not behave defensively in the presence of ill-formed DWARF information, +// and has been demonstrated to segfault in at least one case. There is no +// evidence at the moment to suggest that a more carefully constructed file +// can't cause arbitrary code execution. As a result of all of this, we don't +// hint libbacktrace with the path to the current process. +unsafe fn init_state() -> *mut bt::backtrace_state { + static mut STATE: *mut bt::backtrace_state = 0 as *mut _; + static INIT: Once = ONCE_INIT; + INIT.call_once(|| { + // Our libbacktrace may not have multithreading support, so + // set `threaded = 0` and synchronize ourselves. + STATE = bt::backtrace_create_state(ptr::null(), 0, error_cb, + ptr::null_mut()); + }); + + STATE +} + +pub fn resolve(symaddr: *mut c_void, mut cb: &mut FnMut(&super::Symbol)) { + let _guard = ::lock::lock(); + + // backtrace errors are currently swept under the rug + unsafe { + let state = init_state(); + if state.is_null() { + return + } + + let ret = bt::backtrace_pcinfo(state, symaddr as uintptr_t, + pcinfo_cb, error_cb, + &mut cb as *mut _ as *mut _); + if ret != 0 { + bt::backtrace_syminfo(state, symaddr as uintptr_t, + syminfo_cb, error_cb, + &mut cb as *mut _ as *mut _); + } + } +} diff --git a/backtrace/src/symbolize/mod.rs b/backtrace/src/symbolize/mod.rs new file mode 100644 index 000000000..cb5bb45f6 --- /dev/null +++ b/backtrace/src/symbolize/mod.rs @@ -0,0 +1,294 @@ +use std::fmt; +#[cfg(not(feature = "cpp_demangle"))] +use std::marker::PhantomData; +use std::os::raw::c_void; +use std::path::Path; +use std::str; +use rustc_demangle::{try_demangle, Demangle}; + +/// Resolve an address to a symbol, passing the symbol to the specified +/// closure. +/// +/// This function will look up the given address in areas such as the local +/// symbol table, dynamic symbol table, or DWARF debug info (depending on the +/// activated implementation) to find symbols to yield. +/// +/// The closure may not be called if resolution could not be performed, and it +/// also may be called more than once in the case of inlined functions. +/// +/// Symbols yielded represent the execution at the specified `addr`, returning +/// file/line pairs for that address (if available). +/// +/// # Example +/// +/// ``` +/// extern crate backtrace; +/// +/// fn main() { +/// backtrace::trace(|frame| { +/// let ip = frame.ip(); +/// +/// backtrace::resolve(ip, |symbol| { +/// // ... +/// }); +/// +/// false // only look at the top frame +/// }); +/// } +/// ``` +pub fn resolve(addr: *mut c_void, mut cb: F) { + resolve_imp(addr, &mut cb) +} + +/// A trait representing the resolution of a symbol in a file. +/// +/// This trait is yielded as a trait object to the closure given to the +/// `backtrace::resolve` function, and it is virtually dispatched as it's +/// unknown which implementation is behind it. +/// +/// A symbol can give contextual information about a function, for example the +/// name, filename, line number, precise address, etc. Not all information is +/// always available in a symbol, however, so all methods return an `Option`. +pub struct Symbol { + inner: SymbolImp, +} + +impl Symbol { + /// Returns the name of this function. + /// + /// The returned structure can be used to query various properties about the + /// symbol name: + /// + /// * The `Display` implementation will print out the demangled symbol. + /// * The raw `str` value of the symbol can be accessed (if it's valid + /// utf-8). + /// * The raw bytes for the symbol name can be accessed. + pub fn name(&self) -> Option { + self.inner.name() + } + + /// Returns the starting address of this function. + pub fn addr(&self) -> Option<*mut c_void> { + self.inner.addr() + } + + /// Returns the file name where this function was defined. + /// + /// This is currently only available when libbacktrace is being used (e.g. + /// unix platforms other than OSX) and when a binary is compiled with + /// debuginfo. If neither of these conditions is met then this will likely + /// return `None`. + pub fn filename(&self) -> Option<&Path> { + self.inner.filename() + } + + /// Returns the line number for where this symbol is currently executing. + /// + /// This return value is typically `Some` if `filename` returns `Some`, and + /// is consequently subject to similar caveats. + pub fn lineno(&self) -> Option { + self.inner.lineno() + } +} + +impl fmt::Debug for Symbol { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let mut d = f.debug_struct("Symbol"); + if let Some(name) = self.name() { + d.field("name", &name); + } + if let Some(addr) = self.addr() { + d.field("addr", &addr); + } + if let Some(filename) = self.filename() { + d.field("filename", &filename); + } + if let Some(lineno) = self.lineno() { + d.field("lineno", &lineno); + } + d.finish() + } +} + + +cfg_if! { + if #[cfg(feature = "cpp_demangle")] { + // Maybe a parsed C++ symbol, if parsing the mangled symbol as Rust + // failed. + struct OptionCppSymbol<'a>(Option<::cpp_demangle::BorrowedSymbol<'a>>); + + impl<'a> OptionCppSymbol<'a> { + fn parse(input: &'a [u8]) -> OptionCppSymbol<'a> { + OptionCppSymbol(::cpp_demangle::BorrowedSymbol::new(input).ok()) + } + + fn none() -> OptionCppSymbol<'a> { + OptionCppSymbol(None) + } + } + } else { + // Make sure to keep this zero-sized, so that the `cpp_demangle` feature + // has no cost when disabled. + struct OptionCppSymbol<'a>(PhantomData<&'a ()>); + + impl<'a> OptionCppSymbol<'a> { + fn parse(_: &'a [u8]) -> OptionCppSymbol<'a> { + OptionCppSymbol(PhantomData) + } + + fn none() -> OptionCppSymbol<'a> { + OptionCppSymbol(PhantomData) + } + } + } +} + +/// A wrapper around a symbol name to provide ergonomic accessors to the +/// demangled name, the raw bytes, the raw string, etc. +// Allow dead code for when the `cpp_demangle` feature is not enabled. +#[allow(dead_code)] +pub struct SymbolName<'a> { + bytes: &'a [u8], + demangled: Option>, + cpp_demangled: OptionCppSymbol<'a>, +} + +impl<'a> SymbolName<'a> { + /// Creates a new symbol name from the raw underlying bytes. + pub fn new(bytes: &'a [u8]) -> SymbolName<'a> { + let str_bytes = str::from_utf8(bytes).ok(); + let demangled = str_bytes.and_then(|s| try_demangle(s).ok()); + + let cpp = if demangled.is_none() { + OptionCppSymbol::parse(bytes) + } else { + OptionCppSymbol::none() + }; + + SymbolName { + bytes: bytes, + demangled: demangled, + cpp_demangled: cpp, + } + } + + /// Returns the raw symbol name as a `str` if the symbols is valid utf-8. + pub fn as_str(&self) -> Option<&'a str> { + self.demangled + .as_ref() + .map(|s| s.as_str()) + .or_else(|| { + str::from_utf8(self.bytes).ok() + }) + } + + /// Returns the raw symbol name as a list of bytes + pub fn as_bytes(&self) -> &'a [u8] { + self.bytes + } +} + +cfg_if! { + if #[cfg(feature = "cpp_demangle")] { + impl<'a> fmt::Display for SymbolName<'a> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + if let Some(ref s) = self.demangled { + s.fmt(f) + } else if let Some(ref cpp) = self.cpp_demangled.0 { + cpp.fmt(f) + } else { + String::from_utf8_lossy(self.bytes).fmt(f) + } + } + } + } else { + impl<'a> fmt::Display for SymbolName<'a> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + if let Some(ref s) = self.demangled { + s.fmt(f) + } else { + String::from_utf8_lossy(self.bytes).fmt(f) + } + } + } + } +} + +cfg_if! { + if #[cfg(feature = "cpp_demangle")] { + impl<'a> fmt::Debug for SymbolName<'a> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + use std::fmt::Write; + + if let Some(ref s) = self.demangled { + return s.fmt(f) + } + + // This may to print if the demangled symbol isn't actually + // valid, so handle the error here gracefully by not propagating + // it outwards. + if let Some(ref cpp) = self.cpp_demangled.0 { + let mut s = String::new(); + if write!(s, "{}", cpp).is_ok() { + return s.fmt(f) + } + } + + String::from_utf8_lossy(self.bytes).fmt(f) + } + } + } else { + impl<'a> fmt::Debug for SymbolName<'a> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + if let Some(ref s) = self.demangled { + s.fmt(f) + } else { + String::from_utf8_lossy(self.bytes).fmt(f) + } + } + } + } +} + +cfg_if! { + if #[cfg(all(windows, feature = "dbghelp"))] { + mod dbghelp; + use self::dbghelp::resolve as resolve_imp; + use self::dbghelp::Symbol as SymbolImp; + } else if #[cfg(all(feature = "gimli-symbolize", + unix, + target_os = "linux"))] { + mod gimli; + use self::gimli::resolve as resolve_imp; + use self::gimli::Symbol as SymbolImp; + } else if #[cfg(all(feature = "libbacktrace", + unix, + not(target_os = "fuchsia"), + not(target_os = "emscripten"), + not(target_os = "macos"), + not(target_os = "ios")))] { + mod libbacktrace; + use self::libbacktrace::resolve as resolve_imp; + use self::libbacktrace::Symbol as SymbolImp; + + // Note that we only enable coresymbolication on iOS when debug assertions + // are enabled because it's helpful in debug mode but it looks like apps get + // rejected from the app store if they use this API, see #92 for more info + } else if #[cfg(all(feature = "coresymbolication", + any(target_os = "macos", + all(target_os = "ios", debug_assertions))))] { + mod coresymbolication; + use self::coresymbolication::resolve as resolve_imp; + use self::coresymbolication::Symbol as SymbolImp; + } else if #[cfg(all(unix, + not(target_os = "emscripten"), + feature = "dladdr"))] { + mod dladdr; + use self::dladdr::resolve as resolve_imp; + use self::dladdr::Symbol as SymbolImp; + } else { + mod noop; + use self::noop::resolve as resolve_imp; + use self::noop::Symbol as SymbolImp; + } +} diff --git a/backtrace/src/symbolize/noop.rs b/backtrace/src/symbolize/noop.rs new file mode 100644 index 000000000..78b2a63f8 --- /dev/null +++ b/backtrace/src/symbolize/noop.rs @@ -0,0 +1,27 @@ +use std::path::Path; +use std::os::raw::c_void; + +use SymbolName; + +pub fn resolve(_addr: *mut c_void, _cb: &mut FnMut(&super::Symbol)) { +} + +pub struct Symbol; + +impl Symbol { + pub fn name(&self) -> Option { + None + } + + pub fn addr(&self) -> Option<*mut c_void> { + None + } + + pub fn filename(&self) -> Option<&Path> { + None + } + + pub fn lineno(&self) -> Option { + None + } +} diff --git a/backtrace/tests/long_fn_name.rs b/backtrace/tests/long_fn_name.rs new file mode 100644 index 000000000..ce93671a6 --- /dev/null +++ b/backtrace/tests/long_fn_name.rs @@ -0,0 +1,57 @@ +extern crate backtrace; + +#[cfg(all(windows, feature = "dbghelp"))] +extern crate winapi; + +use backtrace::Backtrace; + +// 50-character module name +mod _234567890_234567890_234567890_234567890_234567890 { + // 50-character struct name + #[allow(non_camel_case_types)] + pub struct _234567890_234567890_234567890_234567890_234567890(T); + impl _234567890_234567890_234567890_234567890_234567890 { + #[allow(dead_code)] + pub fn new() -> ::Backtrace { + ::Backtrace::new() + } + } +} + +// Long function names must be truncated to (MAX_SYM_NAME - 1) characters. +// Only run this test for msvc, since gnu prints "" for all frames. +#[test] +#[cfg(all(windows, feature = "dbghelp", target_env = "msvc"))] +fn test_long_fn_name() { + use winapi::um::dbghelp; + use _234567890_234567890_234567890_234567890_234567890:: + _234567890_234567890_234567890_234567890_234567890 as S; + + // 10 repetitions of struct name, so fully qualified function name is + // atleast 10 * (50 + 50) * 2 = 2000 characters long. + // It's actually longer since it also includes `::`, `<>` and the + // name of the current module + let bt = S::>>>>>>>>>::new(); + println!("{:?}", bt); + + let mut found_long_name_frame = false; + + for frame in bt.frames() { + let symbols = frame.symbols(); + if symbols.is_empty() { + continue; + } + + if let Some(function_name) = symbols[0].name() { + let function_name = function_name.as_str().unwrap(); + if function_name.contains( + "::_234567890_234567890_234567890_234567890_234567890") + { + found_long_name_frame = true; + assert_eq!(function_name.len(), dbghelp::MAX_SYM_NAME - 1); + } + } + } + + assert!(found_long_name_frame); +} diff --git a/backtrace/tests/smoke.rs b/backtrace/tests/smoke.rs new file mode 100644 index 000000000..17cca7be3 --- /dev/null +++ b/backtrace/tests/smoke.rs @@ -0,0 +1,173 @@ +extern crate backtrace; + +use std::os::raw::c_void; +use std::thread; + +static LIBUNWIND: bool = cfg!(all(unix, feature = "libunwind")); +static UNIX_BACKTRACE: bool = cfg!(all(unix, feature = "unix-backtrace")); +static LIBBACKTRACE: bool = cfg!(all(unix, feature = "libbacktrace")) && + !cfg!(target_os = "fuchsia") && !cfg!(target_os = "macos") && + !cfg!(target_os = "ios"); +static CORESYMBOLICATION: bool = cfg!(all(any(target_os = "macos", target_os = "ios"), + feature = "coresymbolication")); +static DLADDR: bool = cfg!(all(unix, feature = "dladdr")) && !cfg!(target_os = "fuchsia"); +static DBGHELP: bool = cfg!(all(windows, feature = "dbghelp")); +static MSVC: bool = cfg!(target_env = "msvc"); +static GIMLI_SYMBOLIZE: bool = cfg!(all(feature = "gimli-symbolize", + unix, + target_os = "linux")); + +#[test] +// FIXME: shouldn't ignore this test on i686-msvc, unsure why it's failing +#[cfg_attr(all(target_arch = "x86", target_env = "msvc"), ignore)] +fn smoke_test_frames() { + frame_1(line!()); + #[inline(never)] fn frame_1(start_line: u32) { frame_2(start_line) } + #[inline(never)] fn frame_2(start_line: u32) { frame_3(start_line) } + #[inline(never)] fn frame_3(start_line: u32) { frame_4(start_line) } + #[inline(never)] fn frame_4(start_line: u32) { + let mut v = Vec::new(); + backtrace::trace(|cx| { + v.push((cx.ip(), cx.symbol_address())); + true + }); + + if v.len() < 5 { + assert!(!LIBUNWIND); + assert!(!UNIX_BACKTRACE); + assert!(!DBGHELP); + return + } + + // On 32-bit windows apparently the first frame isn't our backtrace + // frame but it's actually this frame. I'm not entirely sure why, but at + // least it seems consistent? + let o = if cfg!(all(windows, target_pointer_width = "32")) {1} else {0}; + // frame offset 0 is the `backtrace::trace` function, but that's generic + assert_frame(&v, o, 1, frame_4 as usize, "frame_4", + "tests/smoke.rs", start_line + 6); + assert_frame(&v, o, 2, frame_3 as usize, "frame_3", "tests/smoke.rs", + start_line + 3); + assert_frame(&v, o, 3, frame_2 as usize, "frame_2", "tests/smoke.rs", + start_line + 2); + assert_frame(&v, o, 4, frame_1 as usize, "frame_1", "tests/smoke.rs", + start_line + 1); + assert_frame(&v, o, 5, smoke_test_frames as usize, + "smoke_test_frames", "", 0); + } + + fn assert_frame(syms: &[(*mut c_void, *mut c_void)], + offset: usize, + idx: usize, + actual_fn_pointer: usize, + expected_name: &str, + expected_file: &str, + expected_line: u32) { + if offset > idx { return } + let (ip, sym) = syms[idx - offset]; + let ip = ip as usize; + let sym = sym as usize; + assert!(ip >= sym); + assert!(sym >= actual_fn_pointer); + + // windows dbghelp is *quite* liberal (and wrong) in many of its reports + // right now... + if !DBGHELP { + assert!(sym - actual_fn_pointer < 1024); + } + + let mut resolved = 0; + let can_resolve = DLADDR || LIBBACKTRACE || CORESYMBOLICATION || DBGHELP || GIMLI_SYMBOLIZE; + + let mut name = None; + let mut addr = None; + let mut line = None; + let mut file = None; + backtrace::resolve(ip as *mut c_void, |sym| { + resolved += 1; + name = sym.name().map(|v| v.to_string()); + addr = sym.addr(); + line = sym.lineno(); + file = sym.filename().map(|v| v.to_path_buf()); + }); + + // dbghelp doesn't always resolve symbols right now + match resolved { + 0 => return assert!(!can_resolve || DBGHELP), + _ => {} + } + + // * linux dladdr doesn't work (only consults local symbol table) + // * windows dbghelp isn't great for GNU + if can_resolve && + !(cfg!(target_os = "linux") && DLADDR) && + !(DBGHELP && !MSVC) + { + let name = name.expect("didn't find a name"); + assert!(name.contains(expected_name), + "didn't find `{}` in `{}`", expected_name, name); + } + + if can_resolve { + addr.expect("didn't find a symbol"); + } + + if (LIBBACKTRACE || CORESYMBOLICATION || (DBGHELP && MSVC)) && cfg!(debug_assertions) { + let line = line.expect("didn't find a line number"); + let file = file.expect("didn't find a line number"); + if !expected_file.is_empty() { + assert!(file.ends_with(expected_file), + "{:?} didn't end with {:?}", file, expected_file); + } + if expected_line != 0 { + assert!(line == expected_line, + "bad line number on frame for `{}`: {} != {}", + expected_name, line, expected_line); + } + } + } +} + +#[test] +fn many_threads() { + let threads = (0..16).map(|_| { + thread::spawn(|| { + for _ in 0..16 { + backtrace::trace(|frame| { + backtrace::resolve(frame.ip(), |symbol| { + let _s = symbol.name().map(|s| s.to_string()); + }); + true + }); + } + }) + }).collect::>(); + + for t in threads { + t.join().unwrap() + } +} + +#[test] +#[cfg(feature = "rustc-serialize")] +fn is_rustc_serialize() { + extern crate rustc_serialize; + + fn is_encode() {} + fn is_decode() {} + + is_encode::(); + is_decode::(); +} + +#[test] +#[cfg(feature = "serde")] +fn is_serde() { + extern crate serde; + + fn is_serialize() {} + fn is_deserialize() {} + + is_serialize::(); + is_deserialize::(); +} diff --git a/bitflags/.cargo-checksum.json b/bitflags/.cargo-checksum.json new file mode 100644 index 000000000..fd339c87a --- /dev/null +++ b/bitflags/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{},"package":"228047a76f468627ca71776ecdebd732a3423081fcf5125585bcd7c49886ce12"} \ No newline at end of file diff --git a/bitflags/CHANGELOG.md b/bitflags/CHANGELOG.md new file mode 100644 index 000000000..1f5b4252c --- /dev/null +++ b/bitflags/CHANGELOG.md @@ -0,0 +1,108 @@ +# 1.0.3 + +- Improve zero value flag handling and documentation ([#157]) + +[#157]: https://github.com/rust-lang-nursery/bitflags/pull/157 + +# 1.0.2 + +- 30% improvement in compile time of bitflags crate ([#156]) + +- Documentation improvements ([#153]) + +- Implementation cleanup ([#149]) + +[#156]: https://github.com/rust-lang-nursery/bitflags/pull/156 +[#153]: https://github.com/rust-lang-nursery/bitflags/pull/153 +[#149]: https://github.com/rust-lang-nursery/bitflags/pull/149 + +# 1.0.1 +- Add support for `pub(restricted)` specifier on the bitflags struct ([#135]) +- Optimize performance of `all()` when called from a separate crate ([#136]) + +[#135]: https://github.com/rust-lang-nursery/bitflags/pull/135 +[#136]: https://github.com/rust-lang-nursery/bitflags/pull/136 + +# 1.0.0 +- **[breaking change]** Macro now generates [associated constants](https://doc.rust-lang.org/reference/items.html#associated-constants) ([#24]) + +- **[breaking change]** Minimum supported version is Rust **1.20**, due to usage of associated constants + +- After being broken in 0.9, the `#[deprecated]` attribute is now supported again ([#112]) + +- Other improvements to unit tests and documentation ([#106] and [#115]) + +[#24]: https://github.com/rust-lang-nursery/bitflags/pull/24 +[#106]: https://github.com/rust-lang-nursery/bitflags/pull/106 +[#112]: https://github.com/rust-lang-nursery/bitflags/pull/112 +[#115]: https://github.com/rust-lang-nursery/bitflags/pull/115 + +## How to update your code to use associated constants +Assuming the following structure definition: +```rust +bitflags! { + struct Something: u8 { + const FOO = 0b01, + const BAR = 0b10 + } +} +``` +In 0.9 and older you could do: +```rust +let x = FOO.bits | BAR.bits; +``` +Now you must use: +```rust +let x = Something::FOO.bits | Something::BAR.bits; +``` + +# 0.9.1 +- Fix the implementation of `Formatting` traits when other formatting traits were present in scope ([#105]) + +[#105]: https://github.com/rust-lang-nursery/bitflags/pull/105 + +# 0.9.0 +- **[breaking change]** Use struct keyword instead of flags to define bitflag types ([#84]) + +- **[breaking change]** Terminate const items with semicolons instead of commas ([#87]) + +- Implement the `Hex`, `Octal`, and `Binary` formatting traits ([#86]) + +- Printing an empty flag value with the `Debug` trait now prints "(empty)" instead of nothing ([#85]) + +- The `bitflags!` macro can now be used inside of a fn body, to define a type local to that function ([#74]) + +[#74]: https://github.com/rust-lang-nursery/bitflags/pull/74 +[#84]: https://github.com/rust-lang-nursery/bitflags/pull/84 +[#85]: https://github.com/rust-lang-nursery/bitflags/pull/85 +[#86]: https://github.com/rust-lang-nursery/bitflags/pull/86 +[#87]: https://github.com/rust-lang-nursery/bitflags/pull/87 + +# 0.8.2 +- Update feature flag used when building bitflags as a dependency of the Rust toolchain + +# 0.8.1 +- Allow bitflags to be used as a dependency of the Rust toolchain + +# 0.8.0 +- Add support for the experimental `i128` and `u128` integer types ([#57]) +- Add set method: `flags.set(SOME_FLAG, true)` or `flags.set(SOME_FLAG, false)` ([#55]) + This may break code that defines its own set method + +[#55]: https://github.com/rust-lang-nursery/bitflags/pull/55 +[#57]: https://github.com/rust-lang-nursery/bitflags/pull/57 + +# 0.7.1 +*(yanked)* + +# 0.7.0 +- Implement the Extend trait ([#49]) +- Allow definitions inside the `bitflags!` macro to refer to items imported from other modules ([#51]) + +[#49]: https://github.com/rust-lang-nursery/bitflags/pull/49 +[#51]: https://github.com/rust-lang-nursery/bitflags/pull/51 + +# 0.6.0 +- The `no_std` feature was removed as it is now the default +- The `assignment_operators` feature was remove as it is now enabled by default +- Some clippy suggestions have been applied diff --git a/bitflags/CODE_OF_CONDUCT.md b/bitflags/CODE_OF_CONDUCT.md new file mode 100644 index 000000000..f7add90ae --- /dev/null +++ b/bitflags/CODE_OF_CONDUCT.md @@ -0,0 +1,73 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of experience, +education, socio-economic status, nationality, personal appearance, race, +religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or +reject comments, commits, code, wiki edits, issues, and other contributions +that are not aligned to this Code of Conduct, or to ban temporarily or +permanently any contributor for other behaviors that they deem inappropriate, +threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported by contacting the project team at coc@senaite.org. All +complaints will be reviewed and investigated and will result in a response that +is deemed necessary and appropriate to the circumstances. The project team is +obligated to maintain confidentiality with regard to the reporter of an incident. +Further details of specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good +faith may face temporary or permanent repercussions as determined by other +members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, +available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html + +[homepage]: https://www.contributor-covenant.org \ No newline at end of file diff --git a/bitflags/Cargo.toml b/bitflags/Cargo.toml new file mode 100644 index 000000000..956e9e976 --- /dev/null +++ b/bitflags/Cargo.toml @@ -0,0 +1,33 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g. crates.io) dependencies +# +# If you believe there's an error in this file please file an +# issue against the rust-lang/cargo repository. If you're +# editing this file be aware that the upstream Cargo.toml +# will likely look very different (and much more reasonable) + +[package] +name = "bitflags" +version = "1.0.4" +authors = ["The Rust Project Developers"] +exclude = [".travis.yml", "appveyor.yml", "bors.toml"] +description = "A macro to generate structures which behave like bitflags.\n" +homepage = "https://github.com/bitflags/bitflags" +documentation = "https://docs.rs/bitflags" +readme = "README.md" +keywords = ["bit", "bitmask", "bitflags", "flags"] +categories = ["no-std"] +license = "MIT/Apache-2.0" +repository = "https://github.com/bitflags/bitflags" +[package.metadata.docs.rs] +features = ["example_generated"] + +[features] +default = [] +example_generated = [] +[badges.travis-ci] +repository = "bitflags/bitflags" diff --git a/bitflags/LICENSE-APACHE b/bitflags/LICENSE-APACHE new file mode 100644 index 000000000..16fe87b06 --- /dev/null +++ b/bitflags/LICENSE-APACHE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/bitflags/LICENSE-MIT b/bitflags/LICENSE-MIT new file mode 100644 index 000000000..39d4bdb5a --- /dev/null +++ b/bitflags/LICENSE-MIT @@ -0,0 +1,25 @@ +Copyright (c) 2014 The Rust Project Developers + +Permission is hereby granted, free of charge, to any +person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the +Software without restriction, including without +limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software +is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions +of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/bitflags/README.md b/bitflags/README.md new file mode 100644 index 000000000..df12934c3 --- /dev/null +++ b/bitflags/README.md @@ -0,0 +1,34 @@ +bitflags +======== + +[![Build Status](https://travis-ci.com/bitflags/bitflags.svg?branch=master)](https://travis-ci.com/bitflags/bitflags) +[![Join the chat at https://gitter.im/bitflags/Lobby](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/bitflags/Lobby?utm_source=badge&utm_medium=badge&utm_content=badge) +[![Latest version](https://img.shields.io/crates/v/bitflags.svg)](https://crates.io/crates/bitflags) +[![Documentation](https://docs.rs/bitflags/badge.svg)](https://docs.rs/bitflags) +![Minimum rustc version](https://img.shields.io/badge/rustc-1.20+-yellow.svg) +![License](https://img.shields.io/crates/l/bitflags.svg) + +A Rust macro to generate structures which behave like a set of bitflags + +- [Documentation](https://docs.rs/bitflags) +- [Release notes](https://github.com/bitflags/bitflags/releases) + +## Usage + +Add this to your `Cargo.toml`: + +```toml +[dependencies] +bitflags = "1.0" +``` + +and this to your crate root: + +```rust +#[macro_use] +extern crate bitflags; +``` + +## Rust Version Support + +The minimum supported Rust version is 1.20 due to use of associated constants. diff --git a/bitflags/src/example_generated.rs b/bitflags/src/example_generated.rs new file mode 100644 index 000000000..cf188d99c --- /dev/null +++ b/bitflags/src/example_generated.rs @@ -0,0 +1,14 @@ +//! This module shows an example of code generated by the macro. **IT MUST NOT BE USED OUTSIDE THIS +//! CRATE**. + +bitflags! { + /// This is the same `Flags` struct defined in the [crate level example](../index.html#example). + /// Note that this struct is just for documentation purposes only, it must not be used outside + /// this crate. + pub struct Flags: u32 { + const A = 0b00000001; + const B = 0b00000010; + const C = 0b00000100; + const ABC = Self::A.bits | Self::B.bits | Self::C.bits; + } +} diff --git a/bitflags/src/lib.rs b/bitflags/src/lib.rs new file mode 100644 index 000000000..9e1bcfd6c --- /dev/null +++ b/bitflags/src/lib.rs @@ -0,0 +1,1229 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! A typesafe bitmask flag generator useful for sets of C-style bitmask flags. +//! It can be used for creating typesafe wrappers around C APIs. +//! +//! The `bitflags!` macro generates a `struct` that manages a set of flags. The +//! flags should only be defined for integer types, otherwise unexpected type +//! errors may occur at compile time. +//! +//! # Example +//! +//! ``` +//! #[macro_use] +//! extern crate bitflags; +//! +//! bitflags! { +//! struct Flags: u32 { +//! const A = 0b00000001; +//! const B = 0b00000010; +//! const C = 0b00000100; +//! const ABC = Self::A.bits | Self::B.bits | Self::C.bits; +//! } +//! } +//! +//! fn main() { +//! let e1 = Flags::A | Flags::C; +//! let e2 = Flags::B | Flags::C; +//! assert_eq!((e1 | e2), Flags::ABC); // union +//! assert_eq!((e1 & e2), Flags::C); // intersection +//! assert_eq!((e1 - e2), Flags::A); // set difference +//! assert_eq!(!e2, Flags::A); // set complement +//! } +//! ``` +//! +//! See [`example_generated::Flags`](./example_generated/struct.Flags.html) for documentation of code +//! generated by the above `bitflags!` expansion. +//! +//! The generated `struct`s can also be extended with type and trait +//! implementations: +//! +//! ``` +//! #[macro_use] +//! extern crate bitflags; +//! +//! use std::fmt; +//! +//! bitflags! { +//! struct Flags: u32 { +//! const A = 0b00000001; +//! const B = 0b00000010; +//! } +//! } +//! +//! impl Flags { +//! pub fn clear(&mut self) { +//! self.bits = 0; // The `bits` field can be accessed from within the +//! // same module where the `bitflags!` macro was invoked. +//! } +//! } +//! +//! impl fmt::Display for Flags { +//! fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { +//! write!(f, "hi!") +//! } +//! } +//! +//! fn main() { +//! let mut flags = Flags::A | Flags::B; +//! flags.clear(); +//! assert!(flags.is_empty()); +//! assert_eq!(format!("{}", flags), "hi!"); +//! assert_eq!(format!("{:?}", Flags::A | Flags::B), "A | B"); +//! assert_eq!(format!("{:?}", Flags::B), "B"); +//! } +//! ``` +//! +//! # Visibility +//! +//! The generated struct and its associated flag constants are not exported +//! out of the current module by default. A definition can be exported out of +//! the current module by adding `pub` before `flags`: +//! +//! ``` +//! #[macro_use] +//! extern crate bitflags; +//! +//! mod example { +//! bitflags! { +//! pub struct Flags1: u32 { +//! const A = 0b00000001; +//! } +//! } +//! bitflags! { +//! # pub +//! struct Flags2: u32 { +//! const B = 0b00000010; +//! } +//! } +//! } +//! +//! fn main() { +//! let flag1 = example::Flags1::A; +//! let flag2 = example::Flags2::B; // error: const `B` is private +//! } +//! ``` +//! +//! # Attributes +//! +//! Attributes can be attached to the generated `struct` by placing them +//! before the `flags` keyword. +//! +//! # Trait implementations +//! +//! The `Copy`, `Clone`, `PartialEq`, `Eq`, `PartialOrd`, `Ord` and `Hash` +//! traits automatically derived for the `struct` using the `derive` attribute. +//! Additional traits can be derived by providing an explicit `derive` +//! attribute on `flags`. +//! +//! The `Extend` and `FromIterator` traits are implemented for the `struct`, +//! too: `Extend` adds the union of the instances of the `struct` iterated over, +//! while `FromIterator` calculates the union. +//! +//! The `Binary`, `Debug`, `LowerExp`, `Octal` and `UpperExp` trait is also +//! implemented by displaying the bits value of the internal struct. +//! +//! ## Operators +//! +//! The following operator traits are implemented for the generated `struct`: +//! +//! - `BitOr` and `BitOrAssign`: union +//! - `BitAnd` and `BitAndAssign`: intersection +//! - `BitXor` and `BitXorAssign`: toggle +//! - `Sub` and `SubAssign`: set difference +//! - `Not`: set complement +//! +//! # Methods +//! +//! The following methods are defined for the generated `struct`: +//! +//! - `empty`: an empty set of flags +//! - `all`: the set of all flags +//! - `bits`: the raw value of the flags currently stored +//! - `from_bits`: convert from underlying bit representation, unless that +//! representation contains bits that do not correspond to a flag +//! - `from_bits_truncate`: convert from underlying bit representation, dropping +//! any bits that do not correspond to flags +//! - `is_empty`: `true` if no flags are currently stored +//! - `is_all`: `true` if all flags are currently set +//! - `intersects`: `true` if there are flags common to both `self` and `other` +//! - `contains`: `true` all of the flags in `other` are contained within `self` +//! - `insert`: inserts the specified flags in-place +//! - `remove`: removes the specified flags in-place +//! - `toggle`: the specified flags will be inserted if not present, and removed +//! if they are. +//! - `set`: inserts or removes the specified flags depending on the passed value +//! +//! ## Default +//! +//! The `Default` trait is not automatically implemented for the generated struct. +//! +//! If your default value is equal to `0` (which is the same value as calling `empty()` +//! on the generated struct), you can simply derive `Default`: +//! +//! ``` +//! #[macro_use] +//! extern crate bitflags; +//! +//! bitflags! { +//! // Results in default value with bits: 0 +//! #[derive(Default)] +//! struct Flags: u32 { +//! const A = 0b00000001; +//! const B = 0b00000010; +//! const C = 0b00000100; +//! } +//! } +//! +//! fn main() { +//! let derived_default: Flags = Default::default(); +//! assert_eq!(derived_default.bits(), 0); +//! } +//! ``` +//! +//! If your default value is not equal to `0` you need to implement `Default` yourself: +//! +//! ``` +//! #[macro_use] +//! extern crate bitflags; +//! +//! bitflags! { +//! struct Flags: u32 { +//! const A = 0b00000001; +//! const B = 0b00000010; +//! const C = 0b00000100; +//! } +//! } +//! +//! // explicit `Default` implementation +//! impl Default for Flags { +//! fn default() -> Flags { +//! Flags::A | Flags::C +//! } +//! } +//! +//! fn main() { +//! let implemented_default: Flags = Default::default(); +//! assert_eq!(implemented_default, (Flags::A | Flags::C)); +//! } +//! ``` +//! +//! # Zero Flags +//! +//! Flags with a value equal to zero will have some strange behavior that one should be aware of. +//! +//! ``` +//! #[macro_use] +//! extern crate bitflags; +//! +//! bitflags! { +//! struct Flags: u32 { +//! const NONE = 0b00000000; +//! const SOME = 0b00000001; +//! } +//! } +//! +//! fn main() { +//! let empty = Flags::empty(); +//! let none = Flags::NONE; +//! let some = Flags::SOME; +//! +//! // Zero flags are treated as always present +//! assert!(empty.contains(Flags::NONE)); +//! assert!(none.contains(Flags::NONE)); +//! assert!(some.contains(Flags::NONE)); +//! +//! // Zero flags will be ignored when testing for emptiness +//! assert!(none.is_empty()); +//! } +//! ``` + +#![no_std] +#![doc(html_root_url = "https://docs.rs/bitflags/1.0.4")] + +#[cfg(test)] +#[macro_use] +extern crate std; + +// Re-export libcore using an alias so that the macros can work without +// requiring `extern crate core` downstream. +#[doc(hidden)] +pub extern crate core as _core; + +/// The macro used to generate the flag structure. +/// +/// See the [crate level docs](../bitflags/index.html) for complete documentation. +/// +/// # Example +/// +/// ``` +/// #[macro_use] +/// extern crate bitflags; +/// +/// bitflags! { +/// struct Flags: u32 { +/// const A = 0b00000001; +/// const B = 0b00000010; +/// const C = 0b00000100; +/// const ABC = Self::A.bits | Self::B.bits | Self::C.bits; +/// } +/// } +/// +/// fn main() { +/// let e1 = Flags::A | Flags::C; +/// let e2 = Flags::B | Flags::C; +/// assert_eq!((e1 | e2), Flags::ABC); // union +/// assert_eq!((e1 & e2), Flags::C); // intersection +/// assert_eq!((e1 - e2), Flags::A); // set difference +/// assert_eq!(!e2, Flags::A); // set complement +/// } +/// ``` +/// +/// The generated `struct`s can also be extended with type and trait +/// implementations: +/// +/// ``` +/// #[macro_use] +/// extern crate bitflags; +/// +/// use std::fmt; +/// +/// bitflags! { +/// struct Flags: u32 { +/// const A = 0b00000001; +/// const B = 0b00000010; +/// } +/// } +/// +/// impl Flags { +/// pub fn clear(&mut self) { +/// self.bits = 0; // The `bits` field can be accessed from within the +/// // same module where the `bitflags!` macro was invoked. +/// } +/// } +/// +/// impl fmt::Display for Flags { +/// fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { +/// write!(f, "hi!") +/// } +/// } +/// +/// fn main() { +/// let mut flags = Flags::A | Flags::B; +/// flags.clear(); +/// assert!(flags.is_empty()); +/// assert_eq!(format!("{}", flags), "hi!"); +/// assert_eq!(format!("{:?}", Flags::A | Flags::B), "A | B"); +/// assert_eq!(format!("{:?}", Flags::B), "B"); +/// } +/// ``` +#[macro_export(local_inner_macros)] +macro_rules! bitflags { + ( + $(#[$outer:meta])* + pub struct $BitFlags:ident: $T:ty { + $( + $(#[$inner:ident $($args:tt)*])* + const $Flag:ident = $value:expr; + )+ + } + ) => { + __bitflags! { + $(#[$outer])* + (pub) $BitFlags: $T { + $( + $(#[$inner $($args)*])* + $Flag = $value; + )+ + } + } + }; + ( + $(#[$outer:meta])* + struct $BitFlags:ident: $T:ty { + $( + $(#[$inner:ident $($args:tt)*])* + const $Flag:ident = $value:expr; + )+ + } + ) => { + __bitflags! { + $(#[$outer])* + () $BitFlags: $T { + $( + $(#[$inner $($args)*])* + $Flag = $value; + )+ + } + } + }; + ( + $(#[$outer:meta])* + pub ($($vis:tt)+) struct $BitFlags:ident: $T:ty { + $( + $(#[$inner:ident $($args:tt)*])* + const $Flag:ident = $value:expr; + )+ + } + ) => { + __bitflags! { + $(#[$outer])* + (pub ($($vis)+)) $BitFlags: $T { + $( + $(#[$inner $($args)*])* + $Flag = $value; + )+ + } + } + }; +} + +#[macro_export(local_inner_macros)] +#[doc(hidden)] +macro_rules! __bitflags { + ( + $(#[$outer:meta])* + ($($vis:tt)*) $BitFlags:ident: $T:ty { + $( + $(#[$inner:ident $($args:tt)*])* + $Flag:ident = $value:expr; + )+ + } + ) => { + #[derive(Copy, PartialEq, Eq, Clone, PartialOrd, Ord, Hash)] + $(#[$outer])* + $($vis)* struct $BitFlags { + bits: $T, + } + + __impl_bitflags! { + $BitFlags: $T { + $( + $(#[$inner $($args)*])* + $Flag = $value; + )+ + } + } + }; +} + +#[macro_export(local_inner_macros)] +#[doc(hidden)] +macro_rules! __impl_bitflags { + ( + $BitFlags:ident: $T:ty { + $( + $(#[$attr:ident $($args:tt)*])* + $Flag:ident = $value:expr; + )+ + } + ) => { + impl $crate::_core::fmt::Debug for $BitFlags { + fn fmt(&self, f: &mut $crate::_core::fmt::Formatter) -> $crate::_core::fmt::Result { + // This convoluted approach is to handle #[cfg]-based flag + // omission correctly. For example it needs to support: + // + // #[cfg(unix)] const A: Flag = /* ... */; + // #[cfg(windows)] const B: Flag = /* ... */; + + // Unconditionally define a check for every flag, even disabled + // ones. + #[allow(non_snake_case)] + trait __BitFlags { + $( + #[inline] + fn $Flag(&self) -> bool { false } + )+ + } + + // Conditionally override the check for just those flags that + // are not #[cfg]ed away. + impl __BitFlags for $BitFlags { + $( + __impl_bitflags! { + #[allow(deprecated)] + #[inline] + $(? #[$attr $($args)*])* + fn $Flag(&self) -> bool { + if Self::$Flag.bits == 0 && self.bits != 0 { + false + } else { + self.bits & Self::$Flag.bits == Self::$Flag.bits + } + } + } + )+ + } + + let mut first = true; + $( + if <$BitFlags as __BitFlags>::$Flag(self) { + if !first { + f.write_str(" | ")?; + } + first = false; + f.write_str(__bitflags_stringify!($Flag))?; + } + )+ + if first { + f.write_str("(empty)")?; + } + Ok(()) + } + } + impl $crate::_core::fmt::Binary for $BitFlags { + fn fmt(&self, f: &mut $crate::_core::fmt::Formatter) -> $crate::_core::fmt::Result { + $crate::_core::fmt::Binary::fmt(&self.bits, f) + } + } + impl $crate::_core::fmt::Octal for $BitFlags { + fn fmt(&self, f: &mut $crate::_core::fmt::Formatter) -> $crate::_core::fmt::Result { + $crate::_core::fmt::Octal::fmt(&self.bits, f) + } + } + impl $crate::_core::fmt::LowerHex for $BitFlags { + fn fmt(&self, f: &mut $crate::_core::fmt::Formatter) -> $crate::_core::fmt::Result { + $crate::_core::fmt::LowerHex::fmt(&self.bits, f) + } + } + impl $crate::_core::fmt::UpperHex for $BitFlags { + fn fmt(&self, f: &mut $crate::_core::fmt::Formatter) -> $crate::_core::fmt::Result { + $crate::_core::fmt::UpperHex::fmt(&self.bits, f) + } + } + + #[allow(dead_code)] + impl $BitFlags { + $( + $(#[$attr $($args)*])* + pub const $Flag: $BitFlags = $BitFlags { bits: $value }; + )+ + + /// Returns an empty set of flags. + #[inline] + pub fn empty() -> $BitFlags { + $BitFlags { bits: 0 } + } + + /// Returns the set containing all flags. + #[inline] + pub fn all() -> $BitFlags { + // See `Debug::fmt` for why this approach is taken. + #[allow(non_snake_case)] + trait __BitFlags { + $( + #[inline] + fn $Flag() -> $T { 0 } + )+ + } + impl __BitFlags for $BitFlags { + $( + __impl_bitflags! { + #[allow(deprecated)] + #[inline] + $(? #[$attr $($args)*])* + fn $Flag() -> $T { Self::$Flag.bits } + } + )+ + } + $BitFlags { bits: $(<$BitFlags as __BitFlags>::$Flag())|+ } + } + + /// Returns the raw value of the flags currently stored. + #[inline] + pub fn bits(&self) -> $T { + self.bits + } + + /// Convert from underlying bit representation, unless that + /// representation contains bits that do not correspond to a flag. + #[inline] + pub fn from_bits(bits: $T) -> $crate::_core::option::Option<$BitFlags> { + if (bits & !$BitFlags::all().bits()) == 0 { + $crate::_core::option::Option::Some($BitFlags { bits }) + } else { + $crate::_core::option::Option::None + } + } + + /// Convert from underlying bit representation, dropping any bits + /// that do not correspond to flags. + #[inline] + pub fn from_bits_truncate(bits: $T) -> $BitFlags { + $BitFlags { bits } & $BitFlags::all() + } + + /// Returns `true` if no flags are currently stored. + #[inline] + pub fn is_empty(&self) -> bool { + *self == $BitFlags::empty() + } + + /// Returns `true` if all flags are currently set. + #[inline] + pub fn is_all(&self) -> bool { + *self == $BitFlags::all() + } + + /// Returns `true` if there are flags common to both `self` and `other`. + #[inline] + pub fn intersects(&self, other: $BitFlags) -> bool { + !(*self & other).is_empty() + } + + /// Returns `true` all of the flags in `other` are contained within `self`. + #[inline] + pub fn contains(&self, other: $BitFlags) -> bool { + (*self & other) == other + } + + /// Inserts the specified flags in-place. + #[inline] + pub fn insert(&mut self, other: $BitFlags) { + self.bits |= other.bits; + } + + /// Removes the specified flags in-place. + #[inline] + pub fn remove(&mut self, other: $BitFlags) { + self.bits &= !other.bits; + } + + /// Toggles the specified flags in-place. + #[inline] + pub fn toggle(&mut self, other: $BitFlags) { + self.bits ^= other.bits; + } + + /// Inserts or removes the specified flags depending on the passed value. + #[inline] + pub fn set(&mut self, other: $BitFlags, value: bool) { + if value { + self.insert(other); + } else { + self.remove(other); + } + } + } + + impl $crate::_core::ops::BitOr for $BitFlags { + type Output = $BitFlags; + + /// Returns the union of the two sets of flags. + #[inline] + fn bitor(self, other: $BitFlags) -> $BitFlags { + $BitFlags { bits: self.bits | other.bits } + } + } + + impl $crate::_core::ops::BitOrAssign for $BitFlags { + + /// Adds the set of flags. + #[inline] + fn bitor_assign(&mut self, other: $BitFlags) { + self.bits |= other.bits; + } + } + + impl $crate::_core::ops::BitXor for $BitFlags { + type Output = $BitFlags; + + /// Returns the left flags, but with all the right flags toggled. + #[inline] + fn bitxor(self, other: $BitFlags) -> $BitFlags { + $BitFlags { bits: self.bits ^ other.bits } + } + } + + impl $crate::_core::ops::BitXorAssign for $BitFlags { + + /// Toggles the set of flags. + #[inline] + fn bitxor_assign(&mut self, other: $BitFlags) { + self.bits ^= other.bits; + } + } + + impl $crate::_core::ops::BitAnd for $BitFlags { + type Output = $BitFlags; + + /// Returns the intersection between the two sets of flags. + #[inline] + fn bitand(self, other: $BitFlags) -> $BitFlags { + $BitFlags { bits: self.bits & other.bits } + } + } + + impl $crate::_core::ops::BitAndAssign for $BitFlags { + + /// Disables all flags disabled in the set. + #[inline] + fn bitand_assign(&mut self, other: $BitFlags) { + self.bits &= other.bits; + } + } + + impl $crate::_core::ops::Sub for $BitFlags { + type Output = $BitFlags; + + /// Returns the set difference of the two sets of flags. + #[inline] + fn sub(self, other: $BitFlags) -> $BitFlags { + $BitFlags { bits: self.bits & !other.bits } + } + } + + impl $crate::_core::ops::SubAssign for $BitFlags { + + /// Disables all flags enabled in the set. + #[inline] + fn sub_assign(&mut self, other: $BitFlags) { + self.bits &= !other.bits; + } + } + + impl $crate::_core::ops::Not for $BitFlags { + type Output = $BitFlags; + + /// Returns the complement of this set of flags. + #[inline] + fn not(self) -> $BitFlags { + $BitFlags { bits: !self.bits } & $BitFlags::all() + } + } + + impl $crate::_core::iter::Extend<$BitFlags> for $BitFlags { + fn extend>(&mut self, iterator: T) { + for item in iterator { + self.insert(item) + } + } + } + + impl $crate::_core::iter::FromIterator<$BitFlags> for $BitFlags { + fn from_iter>(iterator: T) -> $BitFlags { + let mut result = Self::empty(); + result.extend(iterator); + result + } + } + }; + + // Every attribute that the user writes on a const is applied to the + // corresponding const that we generate, but within the implementation of + // Debug and all() we want to ignore everything but #[cfg] attributes. In + // particular, including a #[deprecated] attribute on those items would fail + // to compile. + // https://github.com/bitflags/bitflags/issues/109 + // + // Input: + // + // ? #[cfg(feature = "advanced")] + // ? #[deprecated(note = "Use somthing else.")] + // ? #[doc = r"High quality documentation."] + // fn f() -> i32 { /* ... */ } + // + // Output: + // + // #[cfg(feature = "advanced")] + // fn f() -> i32 { /* ... */ } + ( + $(#[$filtered:meta])* + ? #[cfg $($cfgargs:tt)*] + $(? #[$rest:ident $($restargs:tt)*])* + fn $($item:tt)* + ) => { + __impl_bitflags! { + $(#[$filtered])* + #[cfg $($cfgargs)*] + $(? #[$rest $($restargs)*])* + fn $($item)* + } + }; + ( + $(#[$filtered:meta])* + // $next != `cfg` + ? #[$next:ident $($nextargs:tt)*] + $(? #[$rest:ident $($restargs:tt)*])* + fn $($item:tt)* + ) => { + __impl_bitflags! { + $(#[$filtered])* + // $next filtered out + $(? #[$rest $($restargs)*])* + fn $($item)* + } + }; + ( + $(#[$filtered:meta])* + fn $($item:tt)* + ) => { + $(#[$filtered])* + fn $($item)* + }; +} + +// Same as std::stringify but callable from __impl_bitflags, which needs to use +// local_inner_macros so can only directly call macros from this crate. +#[macro_export] +#[doc(hidden)] +macro_rules! __bitflags_stringify { + ($s:ident) => { + stringify!($s) + }; +} + +#[cfg(feature = "example_generated")] +pub mod example_generated; + +#[cfg(test)] +mod tests { + use std::collections::hash_map::DefaultHasher; + use std::hash::{Hash, Hasher}; + + bitflags! { + #[doc = "> The first principle is that you must not fool yourself — and"] + #[doc = "> you are the easiest person to fool."] + #[doc = "> "] + #[doc = "> - Richard Feynman"] + struct Flags: u32 { + const A = 0b00000001; + #[doc = " macros are way better at generating code than trans is"] + const B = 0b00000010; + const C = 0b00000100; + #[doc = "* cmr bed"] + #[doc = "* strcat table"] + #[doc = " wait what?"] + const ABC = Self::A.bits | Self::B.bits | Self::C.bits; + } + } + + bitflags! { + struct _CfgFlags: u32 { + #[cfg(windows)] + const _CFG_A = 0b01; + #[cfg(unix)] + const _CFG_B = 0b01; + #[cfg(windows)] + const _CFG_C = _CFG_A.bits | 0b10; + } + } + + bitflags! { + struct AnotherSetOfFlags: i8 { + const ANOTHER_FLAG = -1_i8; + } + } + + bitflags! { + struct LongFlags: u32 { + const LONG_A = 0b1111111111111111; + } + } + + #[test] + fn test_bits() { + assert_eq!(Flags::empty().bits(), 0b00000000); + assert_eq!(Flags::A.bits(), 0b00000001); + assert_eq!(Flags::ABC.bits(), 0b00000111); + + assert_eq!(AnotherSetOfFlags::empty().bits(), 0b00); + assert_eq!(AnotherSetOfFlags::ANOTHER_FLAG.bits(), !0_i8); + } + + #[test] + fn test_from_bits() { + assert_eq!(Flags::from_bits(0), Some(Flags::empty())); + assert_eq!(Flags::from_bits(0b1), Some(Flags::A)); + assert_eq!(Flags::from_bits(0b10), Some(Flags::B)); + assert_eq!(Flags::from_bits(0b11), Some(Flags::A | Flags::B)); + assert_eq!(Flags::from_bits(0b1000), None); + + assert_eq!( + AnotherSetOfFlags::from_bits(!0_i8), + Some(AnotherSetOfFlags::ANOTHER_FLAG) + ); + } + + #[test] + fn test_from_bits_truncate() { + assert_eq!(Flags::from_bits_truncate(0), Flags::empty()); + assert_eq!(Flags::from_bits_truncate(0b1), Flags::A); + assert_eq!(Flags::from_bits_truncate(0b10), Flags::B); + assert_eq!(Flags::from_bits_truncate(0b11), (Flags::A | Flags::B)); + assert_eq!(Flags::from_bits_truncate(0b1000), Flags::empty()); + assert_eq!(Flags::from_bits_truncate(0b1001), Flags::A); + + assert_eq!( + AnotherSetOfFlags::from_bits_truncate(0_i8), + AnotherSetOfFlags::empty() + ); + } + + #[test] + fn test_is_empty() { + assert!(Flags::empty().is_empty()); + assert!(!Flags::A.is_empty()); + assert!(!Flags::ABC.is_empty()); + + assert!(!AnotherSetOfFlags::ANOTHER_FLAG.is_empty()); + } + + #[test] + fn test_is_all() { + assert!(Flags::all().is_all()); + assert!(!Flags::A.is_all()); + assert!(Flags::ABC.is_all()); + + assert!(AnotherSetOfFlags::ANOTHER_FLAG.is_all()); + } + + #[test] + fn test_two_empties_do_not_intersect() { + let e1 = Flags::empty(); + let e2 = Flags::empty(); + assert!(!e1.intersects(e2)); + + assert!(AnotherSetOfFlags::ANOTHER_FLAG.intersects(AnotherSetOfFlags::ANOTHER_FLAG)); + } + + #[test] + fn test_empty_does_not_intersect_with_full() { + let e1 = Flags::empty(); + let e2 = Flags::ABC; + assert!(!e1.intersects(e2)); + } + + #[test] + fn test_disjoint_intersects() { + let e1 = Flags::A; + let e2 = Flags::B; + assert!(!e1.intersects(e2)); + } + + #[test] + fn test_overlapping_intersects() { + let e1 = Flags::A; + let e2 = Flags::A | Flags::B; + assert!(e1.intersects(e2)); + } + + #[test] + fn test_contains() { + let e1 = Flags::A; + let e2 = Flags::A | Flags::B; + assert!(!e1.contains(e2)); + assert!(e2.contains(e1)); + assert!(Flags::ABC.contains(e2)); + + assert!(AnotherSetOfFlags::ANOTHER_FLAG.contains(AnotherSetOfFlags::ANOTHER_FLAG)); + } + + #[test] + fn test_insert() { + let mut e1 = Flags::A; + let e2 = Flags::A | Flags::B; + e1.insert(e2); + assert_eq!(e1, e2); + + let mut e3 = AnotherSetOfFlags::empty(); + e3.insert(AnotherSetOfFlags::ANOTHER_FLAG); + assert_eq!(e3, AnotherSetOfFlags::ANOTHER_FLAG); + } + + #[test] + fn test_remove() { + let mut e1 = Flags::A | Flags::B; + let e2 = Flags::A | Flags::C; + e1.remove(e2); + assert_eq!(e1, Flags::B); + + let mut e3 = AnotherSetOfFlags::ANOTHER_FLAG; + e3.remove(AnotherSetOfFlags::ANOTHER_FLAG); + assert_eq!(e3, AnotherSetOfFlags::empty()); + } + + #[test] + fn test_operators() { + let e1 = Flags::A | Flags::C; + let e2 = Flags::B | Flags::C; + assert_eq!((e1 | e2), Flags::ABC); // union + assert_eq!((e1 & e2), Flags::C); // intersection + assert_eq!((e1 - e2), Flags::A); // set difference + assert_eq!(!e2, Flags::A); // set complement + assert_eq!(e1 ^ e2, Flags::A | Flags::B); // toggle + let mut e3 = e1; + e3.toggle(e2); + assert_eq!(e3, Flags::A | Flags::B); + + let mut m4 = AnotherSetOfFlags::empty(); + m4.toggle(AnotherSetOfFlags::empty()); + assert_eq!(m4, AnotherSetOfFlags::empty()); + } + + #[test] + fn test_set() { + let mut e1 = Flags::A | Flags::C; + e1.set(Flags::B, true); + e1.set(Flags::C, false); + + assert_eq!(e1, Flags::A | Flags::B); + } + + #[test] + fn test_assignment_operators() { + let mut m1 = Flags::empty(); + let e1 = Flags::A | Flags::C; + // union + m1 |= Flags::A; + assert_eq!(m1, Flags::A); + // intersection + m1 &= e1; + assert_eq!(m1, Flags::A); + // set difference + m1 -= m1; + assert_eq!(m1, Flags::empty()); + // toggle + m1 ^= e1; + assert_eq!(m1, e1); + } + + #[test] + fn test_extend() { + let mut flags; + + flags = Flags::empty(); + flags.extend([].iter().cloned()); + assert_eq!(flags, Flags::empty()); + + flags = Flags::empty(); + flags.extend([Flags::A, Flags::B].iter().cloned()); + assert_eq!(flags, Flags::A | Flags::B); + + flags = Flags::A; + flags.extend([Flags::A, Flags::B].iter().cloned()); + assert_eq!(flags, Flags::A | Flags::B); + + flags = Flags::B; + flags.extend([Flags::A, Flags::ABC].iter().cloned()); + assert_eq!(flags, Flags::ABC); + } + + #[test] + fn test_from_iterator() { + assert_eq!([].iter().cloned().collect::(), Flags::empty()); + assert_eq!( + [Flags::A, Flags::B].iter().cloned().collect::(), + Flags::A | Flags::B + ); + assert_eq!( + [Flags::A, Flags::ABC].iter().cloned().collect::(), + Flags::ABC + ); + } + + #[test] + fn test_lt() { + let mut a = Flags::empty(); + let mut b = Flags::empty(); + + assert!(!(a < b) && !(b < a)); + b = Flags::B; + assert!(a < b); + a = Flags::C; + assert!(!(a < b) && b < a); + b = Flags::C | Flags::B; + assert!(a < b); + } + + #[test] + fn test_ord() { + let mut a = Flags::empty(); + let mut b = Flags::empty(); + + assert!(a <= b && a >= b); + a = Flags::A; + assert!(a > b && a >= b); + assert!(b < a && b <= a); + b = Flags::B; + assert!(b > a && b >= a); + assert!(a < b && a <= b); + } + + fn hash(t: &T) -> u64 { + let mut s = DefaultHasher::new(); + t.hash(&mut s); + s.finish() + } + + #[test] + fn test_hash() { + let mut x = Flags::empty(); + let mut y = Flags::empty(); + assert_eq!(hash(&x), hash(&y)); + x = Flags::all(); + y = Flags::ABC; + assert_eq!(hash(&x), hash(&y)); + } + + #[test] + fn test_debug() { + assert_eq!(format!("{:?}", Flags::A | Flags::B), "A | B"); + assert_eq!(format!("{:?}", Flags::empty()), "(empty)"); + assert_eq!(format!("{:?}", Flags::ABC), "A | B | C | ABC"); + } + + #[test] + fn test_binary() { + assert_eq!(format!("{:b}", Flags::ABC), "111"); + assert_eq!(format!("{:#b}", Flags::ABC), "0b111"); + } + + #[test] + fn test_octal() { + assert_eq!(format!("{:o}", LongFlags::LONG_A), "177777"); + assert_eq!(format!("{:#o}", LongFlags::LONG_A), "0o177777"); + } + + #[test] + fn test_lowerhex() { + assert_eq!(format!("{:x}", LongFlags::LONG_A), "ffff"); + assert_eq!(format!("{:#x}", LongFlags::LONG_A), "0xffff"); + } + + #[test] + fn test_upperhex() { + assert_eq!(format!("{:X}", LongFlags::LONG_A), "FFFF"); + assert_eq!(format!("{:#X}", LongFlags::LONG_A), "0xFFFF"); + } + + mod submodule { + bitflags! { + pub struct PublicFlags: i8 { + const X = 0; + } + } + bitflags! { + struct PrivateFlags: i8 { + const Y = 0; + } + } + + #[test] + fn test_private() { + let _ = PrivateFlags::Y; + } + } + + #[test] + fn test_public() { + let _ = submodule::PublicFlags::X; + } + + mod t1 { + mod foo { + pub type Bar = i32; + } + + bitflags! { + /// baz + struct Flags: foo::Bar { + const A = 0b00000001; + #[cfg(foo)] + const B = 0b00000010; + #[cfg(foo)] + const C = 0b00000010; + } + } + } + + #[test] + fn test_in_function() { + bitflags! { + struct Flags: u8 { + const A = 1; + #[cfg(any())] // false + const B = 2; + } + } + assert_eq!(Flags::all(), Flags::A); + assert_eq!(format!("{:?}", Flags::A), "A"); + } + + #[test] + fn test_deprecated() { + bitflags! { + pub struct TestFlags: u32 { + #[deprecated(note = "Use something else.")] + const ONE = 1; + } + } + } + + #[test] + fn test_pub_crate() { + mod module { + bitflags! { + pub (crate) struct Test: u8 { + const FOO = 1; + } + } + } + + assert_eq!(module::Test::FOO.bits(), 1); + } + + #[test] + fn test_pub_in_module() { + mod module { + mod submodule { + bitflags! { + // `pub (in super)` means only the module `module` will + // be able to access this. + pub (in super) struct Test: u8 { + const FOO = 1; + } + } + } + + mod test { + // Note: due to `pub (in super)`, + // this cannot be accessed directly by the testing code. + pub(super) fn value() -> u8 { + super::submodule::Test::FOO.bits() + } + } + + pub fn value() -> u8 { + test::value() + } + } + + assert_eq!(module::value(), 1) + } + + #[test] + fn test_zero_value_flags() { + bitflags! { + struct Flags: u32 { + const NONE = 0b0; + const SOME = 0b1; + } + } + + assert!(Flags::empty().contains(Flags::NONE)); + assert!(Flags::SOME.contains(Flags::NONE)); + assert!(Flags::NONE.is_empty()); + + assert_eq!(format!("{:?}", Flags::empty()), "NONE"); + assert_eq!(format!("{:?}", Flags::SOME), "SOME"); + } +} diff --git a/bufstream/.cargo-checksum.json b/bufstream/.cargo-checksum.json new file mode 100644 index 000000000..15d5b4014 --- /dev/null +++ b/bufstream/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{},"package":"40e38929add23cdf8a366df9b0e088953150724bcbe5fc330b0d8eb3b328eec8"} \ No newline at end of file diff --git a/bufstream/Cargo.toml b/bufstream/Cargo.toml new file mode 100644 index 000000000..31fdded48 --- /dev/null +++ b/bufstream/Cargo.toml @@ -0,0 +1,32 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g. crates.io) dependencies +# +# If you believe there's an error in this file please file an +# issue against the rust-lang/cargo repository. If you're +# editing this file be aware that the upstream Cargo.toml +# will likely look very different (and much more reasonable) + +[package] +name = "bufstream" +version = "0.1.4" +authors = ["The Rust Project Developers"] +description = "Buffered I/O for streams where each read/write half is separately buffered\n" +homepage = "https://github.com/alexcrichton/bufstream" +documentation = "https://docs.rs/bufstream" +license = "MIT/Apache-2.0" +repository = "https://github.com/alexcrichton/bufstream" +[dependencies.futures] +version = "0.1.13" +optional = true + +[dependencies.tokio-io] +version = "0.1.1" +optional = true + +[features] +default = [] +tokio = ["futures", "tokio-io"] diff --git a/bufstream/LICENSE-APACHE b/bufstream/LICENSE-APACHE new file mode 100644 index 000000000..16fe87b06 --- /dev/null +++ b/bufstream/LICENSE-APACHE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/bufstream/LICENSE-MIT b/bufstream/LICENSE-MIT new file mode 100644 index 000000000..39d4bdb5a --- /dev/null +++ b/bufstream/LICENSE-MIT @@ -0,0 +1,25 @@ +Copyright (c) 2014 The Rust Project Developers + +Permission is hereby granted, free of charge, to any +person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the +Software without restriction, including without +limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software +is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions +of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/bufstream/README.md b/bufstream/README.md new file mode 100644 index 000000000..d3cf0eb47 --- /dev/null +++ b/bufstream/README.md @@ -0,0 +1,22 @@ +bufstream +========= + +Buffered I/O streams for reading/writing + +[![Build Status](https://travis-ci.org/alexcrichton/bufstream.svg?branch=master)](https://travis-ci.org/alexcrichton/bufstream) + +[Documentation](https://docs.rs/bufstream/) + +## Usage + +```toml +[dependencies] +bufstream = "0.1" +``` + +## Tokio + +There is support for tokio's `AsyncRead` + `AsyncWrite` traits through the `tokio` +feature. When using this crate with asynchronous IO, make sure to properly flush +the stream before dropping it since IO during drop may cause panics. For the same +reason you should stay away from `BufStream::into_inner`. diff --git a/bufstream/src/lib.rs b/bufstream/src/lib.rs new file mode 100644 index 000000000..8e2e6c516 --- /dev/null +++ b/bufstream/src/lib.rs @@ -0,0 +1,262 @@ +// Copyright 2013 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! A crate for separately buffered streams. +//! +//! This crate provides a `BufStream` type which provides buffering of both the +//! reading and writing halves of a `Read + Write` type. Each half is completely +//! independently buffered of the other, which may not always be desired. For +//! example `BufStream` may have surprising semantics. +//! +//! # Usage +//! +//! ```toml +//! [dependencies] +//! bufstream = "0.1" +//! ``` +//! +//! ```no_run +//! use std::io::prelude::*; +//! use std::net::TcpStream; +//! use bufstream::BufStream; +//! +//! +//! let stream = TcpStream::connect("localhost:4000").unwrap(); +//! let mut buf = BufStream::new(stream); +//! buf.read(&mut [0; 1024]).unwrap(); +//! buf.write(&[0; 1024]).unwrap(); +//! ``` +//! +//! # Async I/O +//! +//! This crate optionally can support async I/O streams with the [Tokio stack] via +//! the `tokio` feature of this crate: +//! +//! [Tokio stack]: https://tokio.rs/ +//! +//! ```toml +//! bufstream = { version = "0.2", features = ["tokio"] } +//! ``` +//! +//! All methods are internally capable of working with streams that may return +//! [`ErrorKind::WouldBlock`] when they're not ready to perform the particular +//! operation. +//! +//! [`ErrorKind::WouldBlock`]: https://doc.rust-lang.org/std/io/enum.ErrorKind.html +//! +//! Note that care needs to be taken when using these objects, however. The +//! Tokio runtime, in particular, requires that data is fully flushed before +//! dropping streams. For compatibility with blocking streams all streams are +//! flushed/written when they are dropped, and this is not always a suitable +//! time to perform I/O. If I/O streams are flushed before drop, however, then +//! these operations will be a noop. + +#[cfg(feature = "tokio")] extern crate futures; +#[cfg(feature = "tokio")] #[macro_use] extern crate tokio_io; + +use std::fmt; +use std::io::prelude::*; +use std::io::{self, BufReader, BufWriter}; +use std::error; + +#[cfg(feature = "tokio")] use futures::Poll; +#[cfg(feature = "tokio")] use tokio_io::{AsyncRead, AsyncWrite}; + +const DEFAULT_BUF_SIZE: usize = 8 * 1024; + +/// Wraps a Stream and buffers input and output to and from it. +/// +/// It can be excessively inefficient to work directly with a `Read+Write`. For +/// example, every call to `read` or `write` on `TcpStream` results in a system +/// call. A `BufStream` keeps in memory buffers of data, making large, +/// infrequent calls to `read` and `write` on the underlying `Read+Write`. +/// +/// The output buffer will be written out when this stream is dropped. +#[derive(Debug)] +pub struct BufStream { + inner: BufReader> +} + +/// An error returned by `into_inner` which combines an error that +/// happened while writing out the buffer, and the buffered writer object +/// which may be used to recover from the condition. +#[derive(Debug)] +pub struct IntoInnerError(W, io::Error); + +impl IntoInnerError { + /// Returns the error which caused the call to `into_inner()` to fail. + /// + /// This error was returned when attempting to write the internal buffer. + pub fn error(&self) -> &io::Error { &self.1 } + /// Returns the buffered writer instance which generated the error. + /// + /// The returned object can be used for error recovery, such as + /// re-inspecting the buffer. + pub fn into_inner(self) -> W { self.0 } +} + +impl From> for io::Error { + fn from(iie: IntoInnerError) -> io::Error { iie.1 } +} + +impl error::Error for IntoInnerError { + fn description(&self) -> &str { + error::Error::description(self.error()) + } +} + +impl fmt::Display for IntoInnerError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + self.error().fmt(f) + } +} + +struct InternalBufWriter(Option>); + +impl InternalBufWriter { + fn get_ref(&self) -> &BufWriter { + self.0.as_ref().unwrap() + } + + fn get_mut(&mut self) -> &mut BufWriter { + self.0.as_mut().unwrap() + } +} + +impl Read for InternalBufWriter { + fn read(&mut self, buf: &mut [u8]) -> io::Result { + self.get_mut().get_mut().read(buf) + } +} + +impl fmt::Debug for InternalBufWriter { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + self.get_ref().fmt(f) + } +} + +impl BufStream { + /// Creates a new buffered stream with explicitly listed capacities for the + /// reader/writer buffer. + pub fn with_capacities(reader_cap: usize, writer_cap: usize, inner: S) + -> BufStream { + let writer = BufWriter::with_capacity(writer_cap, inner); + let internal_writer = InternalBufWriter(Some(writer)); + let reader = BufReader::with_capacity(reader_cap, internal_writer); + BufStream { inner: reader } + } + + /// Creates a new buffered stream with the default reader/writer buffer + /// capacities. + pub fn new(inner: S) -> BufStream { + BufStream::with_capacities(DEFAULT_BUF_SIZE, DEFAULT_BUF_SIZE, inner) + } + + /// Gets a reference to the underlying stream. + pub fn get_ref(&self) -> &S { + self.inner.get_ref().get_ref().get_ref() + } + + /// Gets a mutable reference to the underlying stream. + /// + /// # Warning + /// + /// It is inadvisable to read directly from or write directly to the + /// underlying stream. + pub fn get_mut(&mut self) -> &mut S { + self.inner.get_mut().get_mut().get_mut() + } + + /// Unwraps this `BufStream`, returning the underlying stream. + /// + /// The internal write buffer is written out before returning the stream. + /// Any leftover data in the read buffer is lost. + pub fn into_inner(mut self) -> Result>> { + let e = { + let InternalBufWriter(ref mut w) = *self.inner.get_mut(); + let (e, w2) = match w.take().unwrap().into_inner() { + Ok(s) => return Ok(s), + Err(err) => { + (io::Error::new(err.error().kind(), err.error().to_string()), + err.into_inner()) + } + }; + *w = Some(w2); + e + }; + Err(IntoInnerError(self, e)) + } +} + +impl BufRead for BufStream { + fn fill_buf(&mut self) -> io::Result<&[u8]> { self.inner.fill_buf() } + fn consume(&mut self, amt: usize) { self.inner.consume(amt) } + fn read_until(&mut self, byte: u8, buf: &mut Vec) -> io::Result { + self.inner.read_until(byte, buf) + } + fn read_line(&mut self, string: &mut String) -> io::Result { + self.inner.read_line(string) + } +} + +impl Read for BufStream { + fn read(&mut self, buf: &mut [u8]) -> io::Result { + self.inner.read(buf) + } +} + +impl Write for BufStream { + fn write(&mut self, buf: &[u8]) -> io::Result { + self.inner.get_mut().0.as_mut().unwrap().write(buf) + } + fn flush(&mut self) -> io::Result<()> { + self.inner.get_mut().0.as_mut().unwrap().flush() + } +} + +#[cfg(feature = "tokio")] +impl AsyncRead for BufStream {} + +#[cfg(feature = "tokio")] +impl AsyncWrite for BufStream { + fn shutdown(&mut self) -> Poll<(), io::Error> { + try_nb!(self.flush()); + let mut inner = self.inner.get_mut().0.as_mut().unwrap(); + inner.shutdown() + } +} + +#[cfg(test)] +mod tests { + use std::io::prelude::*; + use std::io; + + use super::BufStream; + // This is just here to make sure that we don't infinite loop in the + // newtype struct autoderef weirdness + #[test] + fn test_buffered_stream() { + struct S; + + impl Write for S { + fn write(&mut self, b: &[u8]) -> io::Result { Ok(b.len()) } + fn flush(&mut self) -> io::Result<()> { Ok(()) } + } + + impl Read for S { + fn read(&mut self, _: &mut [u8]) -> io::Result { Ok(0) } + } + + let mut stream = BufStream::new(S); + assert_eq!(stream.read(&mut [0; 10]).unwrap(), 0); + stream.write(&[0; 10]).unwrap(); + stream.flush().unwrap(); + } +} diff --git a/cc/.cargo-checksum.json b/cc/.cargo-checksum.json new file mode 100644 index 000000000..dad57a659 --- /dev/null +++ b/cc/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{},"package":"f159dfd43363c4d08055a07703eb7a3406b0dac4d0584d96965a3262db3c9d16"} \ No newline at end of file diff --git a/cc/Cargo.toml b/cc/Cargo.toml new file mode 100644 index 000000000..f034c7c17 --- /dev/null +++ b/cc/Cargo.toml @@ -0,0 +1,37 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g. crates.io) dependencies +# +# If you believe there's an error in this file please file an +# issue against the rust-lang/cargo repository. If you're +# editing this file be aware that the upstream Cargo.toml +# will likely look very different (and much more reasonable) + +[package] +name = "cc" +version = "1.0.25" +authors = ["Alex Crichton "] +description = "A build-time dependency for Cargo build scripts to assist in invoking the native\nC compiler to compile native C code into a static archive to be linked into Rust\ncode.\n" +homepage = "https://github.com/alexcrichton/cc-rs" +documentation = "https://docs.rs/cc" +readme = "README.md" +keywords = ["build-dependencies"] +categories = ["development-tools"] +license = "MIT/Apache-2.0" +repository = "https://github.com/alexcrichton/cc-rs" +[dependencies.rayon] +version = "1.0" +optional = true +[dev-dependencies.tempdir] +version = "0.3" + +[features] +parallel = ["rayon"] +[badges.appveyor] +repository = "alexcrichton/cc-rs" + +[badges.travis-ci] +repository = "alexcrichton/cc-rs" diff --git a/cc/LICENSE-APACHE b/cc/LICENSE-APACHE new file mode 100644 index 000000000..16fe87b06 --- /dev/null +++ b/cc/LICENSE-APACHE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/cc/LICENSE-MIT b/cc/LICENSE-MIT new file mode 100644 index 000000000..39e0ed660 --- /dev/null +++ b/cc/LICENSE-MIT @@ -0,0 +1,25 @@ +Copyright (c) 2014 Alex Crichton + +Permission is hereby granted, free of charge, to any +person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the +Software without restriction, including without +limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software +is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions +of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/cc/README.md b/cc/README.md new file mode 100644 index 000000000..f65d0c058 --- /dev/null +++ b/cc/README.md @@ -0,0 +1,202 @@ +# cc-rs + +A library to compile C/C++/assembly into a Rust library/application. + +[![Build Status](https://travis-ci.org/alexcrichton/cc-rs.svg?branch=master)](https://travis-ci.org/alexcrichton/cc-rs) +[![Build status](https://ci.appveyor.com/api/projects/status/onu270iw98h81nwv?svg=true)](https://ci.appveyor.com/project/alexcrichton/cc-rs) + +[Documentation](https://docs.rs/cc) + +A simple library meant to be used as a build dependency with Cargo packages in +order to build a set of C/C++ files into a static archive. This crate calls out +to the most relevant compiler for a platform, for example using `cl` on MSVC. + +> **Note**: this crate was recently renamed from the `gcc` crate, so if you're +> looking for the `gcc` crate you're in the right spot! + +## Using cc-rs + +First, you'll want to both add a build script for your crate (`build.rs`) and +also add this crate to your `Cargo.toml` via: + +```toml +[build-dependencies] +cc = "1.0" +``` + +Next up, you'll want to write a build script like so: + +```rust,no_run +// build.rs + +extern crate cc; + +fn main() { + cc::Build::new() + .file("foo.c") + .file("bar.c") + .compile("foo"); +} +``` + +And that's it! Running `cargo build` should take care of the rest and your Rust +application will now have the C files `foo.c` and `bar.c` compiled into a file +named libfoo.a. You can call the functions in Rust by declaring functions in +your Rust code like so: + +``` +extern { + fn foo_function(); + fn bar_function(); +} + +pub fn call() { + unsafe { + foo_function(); + bar_function(); + } +} + +fn main() { + // ... +} +``` + +## External configuration via environment variables + +To control the programs and flags used for building, the builder can set a +number of different environment variables. + +* `CFLAGS` - a series of space separated flags passed to compilers. Note that + individual flags cannot currently contain spaces, so doing + something like: "-L=foo\ bar" is not possible. +* `CC` - the actual C compiler used. Note that this is used as an exact + executable name, so (for example) no extra flags can be passed inside + this variable, and the builder must ensure that there aren't any + trailing spaces. This compiler must understand the `-c` flag. For + certain `TARGET`s, it also is assumed to know about other flags (most + common is `-fPIC`). +* `AR` - the `ar` (archiver) executable to use to build the static library. + +Each of these variables can also be supplied with certain prefixes and suffixes, +in the following prioritized order: + +1. `_` - for example, `CC_x86_64-unknown-linux-gnu` +2. `_` - for example, `CC_x86_64_unknown_linux_gnu` +3. `_` - for example, `HOST_CC` or `TARGET_CFLAGS` +4. `` - a plain `CC`, `AR` as above. + +If none of these variables exist, cc-rs uses built-in defaults + +In addition to the the above optional environment variables, `cc-rs` has some +functions with hard requirements on some variables supplied by [cargo's +build-script driver][cargo] that it has the `TARGET`, `OUT_DIR`, `OPT_LEVEL`, +and `HOST` variables. + +[cargo]: http://doc.crates.io/build-script.html#inputs-to-the-build-script + +## Optional features + +### Parallel + +Currently cc-rs supports parallel compilation (think `make -jN`) but this +feature is turned off by default. To enable cc-rs to compile C/C++ in parallel, +you can change your dependency to: + +```toml +[build-dependencies] +cc = { version = "1.0", features = ["parallel"] } +``` + +By default cc-rs will limit parallelism to `$NUM_JOBS`, or if not present it +will limit it to the number of cpus on the machine. If you are using cargo, +use `-jN` option of `build`, `test` and `run` commands as `$NUM_JOBS` +is supplied by cargo. + +## Compile-time Requirements + +To work properly this crate needs access to a C compiler when the build script +is being run. This crate does not ship a C compiler with it. The compiler +required varies per platform, but there are three broad categories: + +* Unix platforms require `cc` to be the C compiler. This can be found by + installing cc/clang on Linux distributions and Xcode on OSX, for example. +* Windows platforms targeting MSVC (e.g. your target triple ends in `-msvc`) + require `cl.exe` to be available and in `PATH`. This is typically found in + standard Visual Studio installations and the `PATH` can be set up by running + the appropriate developer tools shell. +* Windows platforms targeting MinGW (e.g. your target triple ends in `-gnu`) + require `cc` to be available in `PATH`. We recommend the + [MinGW-w64](http://mingw-w64.org) distribution, which is using the + [Win-builds](http://win-builds.org) installation system. + You may also acquire it via + [MSYS2](http://msys2.github.io), as explained [here][msys2-help]. Make sure + to install the appropriate architecture corresponding to your installation of + rustc. GCC from older [MinGW](http://www.mingw.org) project is compatible + only with 32-bit rust compiler. + +[msys2-help]: http://github.com/rust-lang/rust#building-on-windows + +## C++ support + +`cc-rs` supports C++ libraries compilation by using the `cpp` method on +`Build`: + +```rust,no_run +extern crate cc; + +fn main() { + cc::Build::new() + .cpp(true) // Switch to C++ library compilation. + .file("foo.cpp") + .compile("libfoo.a"); +} +``` + +When using C++ library compilation switch, the `CXX` and `CXXFLAGS` env +variables are used instead of `CC` and `CFLAGS` and the C++ standard library is +linked to the crate target. + +## CUDA C++ support + +`cc-rs` also supports compiling CUDA C++ libraries by using the `cuda` method +on `Build` (currently for GNU/Clang toolchains only): + +```rust,no_run +extern crate cc; + +fn main() { + cc::Build::new() + // Switch to CUDA C++ library compilation using NVCC. + .cuda(true) + // Generate code for Maxwell (GTX 970, 980, 980 Ti, Titan X). + .flag("-gencode").flag("arch=compute_52,code=sm_52") + // Generate code for Maxwell (Jetson TX1). + .flag("-gencode").flag("arch=compute_53,code=sm_53") + // Generate code for Pascal (GTX 1070, 1080, 1080 Ti, Titan Xp). + .flag("-gencode").flag("arch=compute_61,code=sm_61") + // Generate code for Pascal (Tesla P100). + .flag("-gencode").flag("arch=compute_60,code=sm_60") + // Generate code for Pascal (Jetson TX2). + .flag("-gencode").flag("arch=compute_62,code=sm_62") + .file("bar.cu") + .compile("libbar.a"); +} +``` + +## License + +This project is licensed under either of + + * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or + http://www.apache.org/licenses/LICENSE-2.0) + * MIT license ([LICENSE-MIT](LICENSE-MIT) or + http://opensource.org/licenses/MIT) + +at your option. + +### Contribution + +Unless you explicitly state otherwise, any contribution intentionally submitted +for inclusion in cc-rs by you, as defined in the Apache-2.0 license, shall be +dual licensed as above, without any additional terms or conditions. diff --git a/cc/appveyor.yml b/cc/appveyor.yml new file mode 100644 index 000000000..5e563db1c --- /dev/null +++ b/cc/appveyor.yml @@ -0,0 +1,55 @@ +environment: + + # At the time this was added AppVeyor was having troubles with checking + # revocation of SSL certificates of sites like static.rust-lang.org and what + # we think is crates.io. The libcurl HTTP client by default checks for + # revocation on Windows and according to a mailing list [1] this can be + # disabled. + # + # The `CARGO_HTTP_CHECK_REVOKE` env var here tells cargo to disable SSL + # revocation checking on Windows in libcurl. Note, though, that rustup, which + # we're using to download Rust here, also uses libcurl as the default backend. + # Unlike Cargo, however, rustup doesn't have a mechanism to disable revocation + # checking. To get rustup working we set `RUSTUP_USE_HYPER` which forces it to + # use the Hyper instead of libcurl backend. Both Hyper and libcurl use + # schannel on Windows but it appears that Hyper configures it slightly + # differently such that revocation checking isn't turned on by default. + # + # [1]: https://curl.haxx.se/mail/lib-2016-03/0202.html + RUSTUP_USE_HYPER: 1 + CARGO_HTTP_CHECK_REVOKE: false + + matrix: + - TARGET: x86_64-pc-windows-msvc + ARCH: amd64 + VS: C:\Program Files (x86)\Microsoft Visual Studio 12.0\VC\vcvarsall.bat + - TARGET: x86_64-pc-windows-msvc + ARCH: amd64 + VS: C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat + - TARGET: i686-pc-windows-msvc + ARCH: x86 + VS: C:\Program Files (x86)\Microsoft Visual Studio 12.0\VC\vcvarsall.bat + - TARGET: i686-pc-windows-msvc + ARCH: x86 + VS: C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat + - TARGET: x86_64-pc-windows-gnu + MSYS_BITS: 64 + - TARGET: i686-pc-windows-gnu + MSYS_BITS: 32 +install: + - ps: Start-FileDownload "https://static.rust-lang.org/dist/rust-nightly-${env:TARGET}.exe" + - rust-nightly-%TARGET%.exe /VERYSILENT /NORESTART /DIR="C:\Program Files (x86)\Rust" + - if defined VS call "%VS%" %ARCH% + - set PATH=%PATH%;C:\Program Files (x86)\Rust\bin + - if defined MSYS_BITS set PATH=%PATH%;C:\msys64\mingw%MSYS_BITS%\bin + - rustc -V + - cargo -V + +build: false + +test_script: + - cargo test --target %TARGET% + - cargo test --features parallel --target %TARGET% + - cargo test --manifest-path cc-test/Cargo.toml --target %TARGET% + - cargo test --manifest-path cc-test/Cargo.toml --features parallel --target %TARGET% + - cargo test --manifest-path cc-test/Cargo.toml --release --target %TARGET% diff --git a/cc/src/bin/gcc-shim.rs b/cc/src/bin/gcc-shim.rs new file mode 100644 index 000000000..7fd0ea8fa --- /dev/null +++ b/cc/src/bin/gcc-shim.rs @@ -0,0 +1,23 @@ +#![cfg_attr(test, allow(dead_code))] + +use std::env; +use std::fs::File; +use std::io::prelude::*; +use std::path::PathBuf; + +fn main() { + let out_dir = PathBuf::from(env::var_os("GCCTEST_OUT_DIR").unwrap()); + for i in 0.. { + let candidate = out_dir.join(format!("out{}", i)); + if candidate.exists() { + continue; + } + let mut f = File::create(candidate).unwrap(); + for arg in env::args().skip(1) { + writeln!(f, "{}", arg).unwrap(); + } + + File::create(out_dir.join("libfoo.a")).unwrap(); + break; + } +} diff --git a/cc/src/com.rs b/cc/src/com.rs new file mode 100644 index 000000000..2b16475a0 --- /dev/null +++ b/cc/src/com.rs @@ -0,0 +1,155 @@ +// Copyright © 2017 winapi-rs developers +// Licensed under the Apache License, Version 2.0 +// or the MIT license +// , at your option. +// All files in the project carrying such notice may not be copied, modified, or distributed +// except according to those terms. + +#![allow(unused)] + +use std::ffi::{OsStr, OsString}; +use std::mem::forget; +use std::ops::Deref; +use std::os::windows::ffi::{OsStrExt, OsStringExt}; +use std::ptr::null_mut; +use std::slice::from_raw_parts; +use winapi::Interface; +use winapi::BSTR; +use winapi::CoInitializeEx; +use winapi::COINIT_MULTITHREADED; +use winapi::{SysFreeString, SysStringLen}; +use winapi::IUnknown; +use winapi::{HRESULT, S_FALSE, S_OK}; + +pub fn initialize() -> Result<(), HRESULT> { + let err = unsafe { CoInitializeEx(null_mut(), COINIT_MULTITHREADED) }; + if err != S_OK && err != S_FALSE { + // S_FALSE just means COM is already initialized + return Err(err); + } + Ok(()) +} + +pub struct ComPtr(*mut T) +where + T: Interface; +impl ComPtr +where + T: Interface, +{ + /// Creates a `ComPtr` to wrap a raw pointer. + /// It takes ownership over the pointer which means it does __not__ call `AddRef`. + /// `T` __must__ be a COM interface that inherits from `IUnknown`. + pub unsafe fn from_raw(ptr: *mut T) -> ComPtr { + assert!(!ptr.is_null()); + ComPtr(ptr) + } + /// Casts up the inheritance chain + pub fn up(self) -> ComPtr + where + T: Deref, + U: Interface, + { + ComPtr(self.into_raw() as *mut U) + } + /// Extracts the raw pointer. + /// You are now responsible for releasing it yourself. + pub fn into_raw(self) -> *mut T { + let p = self.0; + forget(self); + p + } + /// For internal use only. + fn as_unknown(&self) -> &IUnknown { + unsafe { &*(self.0 as *mut IUnknown) } + } + /// Performs QueryInterface fun. + pub fn cast(&self) -> Result, i32> + where + U: Interface, + { + let mut obj = null_mut(); + let err = unsafe { self.as_unknown().QueryInterface(&U::uuidof(), &mut obj) }; + if err < 0 { + return Err(err); + } + Ok(unsafe { ComPtr::from_raw(obj as *mut U) }) + } +} +impl Deref for ComPtr +where + T: Interface, +{ + type Target = T; + fn deref(&self) -> &T { + unsafe { &*self.0 } + } +} +impl Clone for ComPtr +where + T: Interface, +{ + fn clone(&self) -> Self { + unsafe { + self.as_unknown().AddRef(); + ComPtr::from_raw(self.0) + } + } +} +impl Drop for ComPtr +where + T: Interface, +{ + fn drop(&mut self) { + unsafe { + self.as_unknown().Release(); + } + } +} +pub struct BStr(BSTR); +impl BStr { + pub unsafe fn from_raw(s: BSTR) -> BStr { + BStr(s) + } + pub fn to_osstring(&self) -> OsString { + let len = unsafe { SysStringLen(self.0) }; + let slice = unsafe { from_raw_parts(self.0, len as usize) }; + OsStringExt::from_wide(slice) + } +} +impl Drop for BStr { + fn drop(&mut self) { + unsafe { SysFreeString(self.0) }; + } +} + +pub trait ToWide { + fn to_wide(&self) -> Vec; + fn to_wide_null(&self) -> Vec; +} +impl ToWide for T +where + T: AsRef, +{ + fn to_wide(&self) -> Vec { + self.as_ref().encode_wide().collect() + } + fn to_wide_null(&self) -> Vec { + self.as_ref().encode_wide().chain(Some(0)).collect() + } +} +pub trait FromWide +where + Self: Sized, +{ + fn from_wide(wide: &[u16]) -> Self; + fn from_wide_null(wide: &[u16]) -> Self { + let len = wide.iter().take_while(|&&c| c != 0).count(); + Self::from_wide(&wide[..len]) + } +} +impl FromWide for OsString { + fn from_wide(wide: &[u16]) -> OsString { + OsStringExt::from_wide(wide) + } +} diff --git a/cc/src/lib.rs b/cc/src/lib.rs new file mode 100644 index 000000000..7672cf441 --- /dev/null +++ b/cc/src/lib.rs @@ -0,0 +1,2275 @@ +//! A library for build scripts to compile custom C code +//! +//! This library is intended to be used as a `build-dependencies` entry in +//! `Cargo.toml`: +//! +//! ```toml +//! [build-dependencies] +//! cc = "1.0" +//! ``` +//! +//! The purpose of this crate is to provide the utility functions necessary to +//! compile C code into a static archive which is then linked into a Rust crate. +//! Configuration is available through the `Build` struct. +//! +//! This crate will automatically detect situations such as cross compilation or +//! other environment variables set by Cargo and will build code appropriately. +//! +//! The crate is not limited to C code, it can accept any source code that can +//! be passed to a C or C++ compiler. As such, assembly files with extensions +//! `.s` (gcc/clang) and `.asm` (MSVC) can also be compiled. +//! +//! [`Build`]: struct.Build.html +//! +//! # Parallelism +//! +//! To parallelize computation, enable the `parallel` feature for the crate. +//! +//! ```toml +//! [build-dependencies] +//! cc = { version = "1.0", features = ["parallel"] } +//! ``` +//! To specify the max number of concurrent compilation jobs, set the `NUM_JOBS` +//! environment variable to the desired amount. +//! +//! Cargo will also set this environment variable when executed with the `-jN` flag. +//! +//! If `NUM_JOBS` is not set, the `RAYON_NUM_THREADS` environment variable can +//! also specify the build paralellism. +//! +//! # Examples +//! +//! Use the `Build` struct to compile `src/foo.c`: +//! +//! ```no_run +//! extern crate cc; +//! +//! fn main() { +//! cc::Build::new() +//! .file("src/foo.c") +//! .define("FOO", Some("bar")) +//! .include("src") +//! .compile("foo"); +//! } +//! ``` + +#![doc(html_root_url = "https://docs.rs/cc/1.0")] +#![cfg_attr(test, deny(warnings))] +#![deny(missing_docs)] + +#[cfg(feature = "parallel")] +extern crate rayon; + +use std::env; +use std::ffi::{OsStr, OsString}; +use std::fs; +use std::path::{Path, PathBuf}; +use std::process::{Child, Command, Stdio}; +use std::io::{self, BufRead, BufReader, Read, Write}; +use std::thread::{self, JoinHandle}; +use std::collections::HashMap; +use std::sync::{Arc, Mutex}; + +// These modules are all glue to support reading the MSVC version from +// the registry and from COM interfaces +#[cfg(windows)] +mod registry; +#[cfg(windows)] +#[macro_use] +mod winapi; +#[cfg(windows)] +mod com; +#[cfg(windows)] +mod setup_config; + +pub mod windows_registry; + +/// A builder for compilation of a native static library. +/// +/// A `Build` is the main type of the `cc` crate and is used to control all the +/// various configuration options and such of a compile. You'll find more +/// documentation on each method itself. +#[derive(Clone, Debug)] +pub struct Build { + include_directories: Vec, + definitions: Vec<(String, Option)>, + objects: Vec, + flags: Vec, + flags_supported: Vec, + known_flag_support_status: Arc>>, + files: Vec, + cpp: bool, + cpp_link_stdlib: Option>, + cpp_set_stdlib: Option, + cuda: bool, + target: Option, + host: Option, + out_dir: Option, + opt_level: Option, + debug: Option, + env: Vec<(OsString, OsString)>, + compiler: Option, + archiver: Option, + cargo_metadata: bool, + pic: Option, + static_crt: Option, + shared_flag: Option, + static_flag: Option, + warnings_into_errors: bool, + warnings: Option, + extra_warnings: Option, + env_cache: Arc>>>, +} + +/// Represents the types of errors that may occur while using cc-rs. +#[derive(Clone, Debug)] +enum ErrorKind { + /// Error occurred while performing I/O. + IOError, + /// Invalid architecture supplied. + ArchitectureInvalid, + /// Environment variable not found, with the var in question as extra info. + EnvVarNotFound, + /// Error occurred while using external tools (ie: invocation of compiler). + ToolExecError, + /// Error occurred due to missing external tools. + ToolNotFound, +} + +/// Represents an internal error that occurred, with an explanation. +#[derive(Clone, Debug)] +pub struct Error { + /// Describes the kind of error that occurred. + kind: ErrorKind, + /// More explanation of error that occurred. + message: String, +} + +impl Error { + fn new(kind: ErrorKind, message: &str) -> Error { + Error { + kind: kind, + message: message.to_owned(), + } + } +} + +impl From for Error { + fn from(e: io::Error) -> Error { + Error::new(ErrorKind::IOError, &format!("{}", e)) + } +} + +/// Configuration used to represent an invocation of a C compiler. +/// +/// This can be used to figure out what compiler is in use, what the arguments +/// to it are, and what the environment variables look like for the compiler. +/// This can be used to further configure other build systems (e.g. forward +/// along CC and/or CFLAGS) or the `to_command` method can be used to run the +/// compiler itself. +#[derive(Clone, Debug)] +pub struct Tool { + path: PathBuf, + cc_wrapper_path: Option, + cc_wrapper_args: Vec, + args: Vec, + env: Vec<(OsString, OsString)>, + family: ToolFamily, + cuda: bool, + removed_args: Vec, +} + +/// Represents the family of tools this tool belongs to. +/// +/// Each family of tools differs in how and what arguments they accept. +/// +/// Detection of a family is done on best-effort basis and may not accurately reflect the tool. +#[derive(Copy, Clone, Debug, PartialEq)] +enum ToolFamily { + /// Tool is GNU Compiler Collection-like. + Gnu, + /// Tool is Clang-like. It differs from the GCC in a sense that it accepts superset of flags + /// and its cross-compilation approach is different. + Clang, + /// Tool is the MSVC cl.exe. + Msvc { clang_cl: bool }, +} + +impl ToolFamily { + /// What the flag to request debug info for this family of tools look like + fn add_debug_flags(&self, cmd: &mut Tool) { + match *self { + ToolFamily::Msvc { .. } => { + cmd.push_cc_arg("/Z7".into()); + } + ToolFamily::Gnu | ToolFamily::Clang => { + cmd.push_cc_arg("-g".into()); + cmd.push_cc_arg("-fno-omit-frame-pointer".into()); + } + } + } + + /// What the flag to include directories into header search path looks like + fn include_flag(&self) -> &'static str { + match *self { + ToolFamily::Msvc { .. } => "/I", + ToolFamily::Gnu | ToolFamily::Clang => "-I", + } + } + + /// What the flag to request macro-expanded source output looks like + fn expand_flag(&self) -> &'static str { + match *self { + ToolFamily::Msvc { .. } => "/E", + ToolFamily::Gnu | ToolFamily::Clang => "-E", + } + } + + /// What the flags to enable all warnings + fn warnings_flags(&self) -> &'static str { + match *self { + ToolFamily::Msvc { .. } => "/W4", + ToolFamily::Gnu | ToolFamily::Clang => "-Wall", + } + } + + /// What the flags to enable extra warnings + fn extra_warnings_flags(&self) -> Option<&'static str> { + match *self { + ToolFamily::Msvc { .. } => None, + ToolFamily::Gnu | ToolFamily::Clang => Some("-Wextra"), + } + } + + /// What the flag to turn warning into errors + fn warnings_to_errors_flag(&self) -> &'static str { + match *self { + ToolFamily::Msvc { .. } => "/WX", + ToolFamily::Gnu | ToolFamily::Clang => "-Werror", + } + } + + /// NVCC-specific. Device code debug info flag. This is separate from the + /// debug info flag passed to the C++ compiler. + fn nvcc_debug_flag(&self) -> &'static str { + match *self { + ToolFamily::Msvc { .. } => unimplemented!(), + ToolFamily::Gnu | ToolFamily::Clang => "-G", + } + } + + /// NVCC-specific. Redirect the following flag to the underlying C++ + /// compiler. + fn nvcc_redirect_flag(&self) -> &'static str { + match *self { + ToolFamily::Msvc { .. } => unimplemented!(), + ToolFamily::Gnu | ToolFamily::Clang => "-Xcompiler", + } + } + + fn verbose_stderr(&self) -> bool { + *self == ToolFamily::Clang + } +} + +/// Represents an object. +/// +/// This is a source file -> object file pair. +#[derive(Clone, Debug)] +struct Object { + src: PathBuf, + dst: PathBuf, +} + +impl Object { + /// Create a new source file -> object file pair. + fn new(src: PathBuf, dst: PathBuf) -> Object { + Object { src: src, dst: dst } + } +} + +impl Build { + /// Construct a new instance of a blank set of configuration. + /// + /// This builder is finished with the [`compile`] function. + /// + /// [`compile`]: struct.Build.html#method.compile + pub fn new() -> Build { + Build { + include_directories: Vec::new(), + definitions: Vec::new(), + objects: Vec::new(), + flags: Vec::new(), + flags_supported: Vec::new(), + known_flag_support_status: Arc::new(Mutex::new(HashMap::new())), + files: Vec::new(), + shared_flag: None, + static_flag: None, + cpp: false, + cpp_link_stdlib: None, + cpp_set_stdlib: None, + cuda: false, + target: None, + host: None, + out_dir: None, + opt_level: None, + debug: None, + env: Vec::new(), + compiler: None, + archiver: None, + cargo_metadata: true, + pic: None, + static_crt: None, + warnings: None, + extra_warnings: None, + warnings_into_errors: false, + env_cache: Arc::new(Mutex::new(HashMap::new())), + } + } + + /// Add a directory to the `-I` or include path for headers + /// + /// # Example + /// + /// ```no_run + /// use std::path::Path; + /// + /// let library_path = Path::new("/path/to/library"); + /// + /// cc::Build::new() + /// .file("src/foo.c") + /// .include(library_path) + /// .include("src") + /// .compile("foo"); + /// ``` + pub fn include>(&mut self, dir: P) -> &mut Build { + self.include_directories.push(dir.as_ref().to_path_buf()); + self + } + + /// Specify a `-D` variable with an optional value. + /// + /// # Example + /// + /// ```no_run + /// cc::Build::new() + /// .file("src/foo.c") + /// .define("FOO", "BAR") + /// .define("BAZ", None) + /// .compile("foo"); + /// ``` + pub fn define<'a, V: Into>>(&mut self, var: &str, val: V) -> &mut Build { + self.definitions + .push((var.to_string(), val.into().map(|s| s.to_string()))); + self + } + + /// Add an arbitrary object file to link in + pub fn object>(&mut self, obj: P) -> &mut Build { + self.objects.push(obj.as_ref().to_path_buf()); + self + } + + /// Add an arbitrary flag to the invocation of the compiler + /// + /// # Example + /// + /// ```no_run + /// cc::Build::new() + /// .file("src/foo.c") + /// .flag("-ffunction-sections") + /// .compile("foo"); + /// ``` + pub fn flag(&mut self, flag: &str) -> &mut Build { + self.flags.push(flag.to_string()); + self + } + + fn ensure_check_file(&self) -> Result { + let out_dir = self.get_out_dir()?; + let src = if self.cuda { + assert!(self.cpp); + out_dir.join("flag_check.cu") + } else if self.cpp { + out_dir.join("flag_check.cpp") + } else { + out_dir.join("flag_check.c") + }; + + if !src.exists() { + let mut f = fs::File::create(&src)?; + write!(f, "int main(void) {{ return 0; }}")?; + } + + Ok(src) + } + + /// Run the compiler to test if it accepts the given flag. + /// + /// For a convenience method for setting flags conditionally, + /// see `flag_if_supported()`. + /// + /// It may return error if it's unable to run the compilier with a test file + /// (e.g. the compiler is missing or a write to the `out_dir` failed). + /// + /// Note: Once computed, the result of this call is stored in the + /// `known_flag_support` field. If `is_flag_supported(flag)` + /// is called again, the result will be read from the hash table. + pub fn is_flag_supported(&self, flag: &str) -> Result { + let mut known_status = self.known_flag_support_status.lock().unwrap(); + if let Some(is_supported) = known_status.get(flag).cloned() { + return Ok(is_supported); + } + + let out_dir = self.get_out_dir()?; + let src = self.ensure_check_file()?; + let obj = out_dir.join("flag_check"); + let target = self.get_target()?; + let host = self.get_host()?; + let mut cfg = Build::new(); + cfg.flag(flag) + .target(&target) + .opt_level(0) + .host(&host) + .debug(false) + .cpp(self.cpp) + .cuda(self.cuda); + let mut compiler = cfg.try_get_compiler()?; + + // Clang uses stderr for verbose output, which yields a false positive + // result if the CFLAGS/CXXFLAGS include -v to aid in debugging. + if compiler.family.verbose_stderr() { + compiler.remove_arg("-v".into()); + } + + let mut cmd = compiler.to_command(); + let is_arm = target.contains("aarch64") || target.contains("arm"); + command_add_output_file(&mut cmd, &obj, target.contains("msvc"), false, is_arm); + + // We need to explicitly tell msvc not to link and create an exe + // in the root directory of the crate + if target.contains("msvc") { + cmd.arg("/c"); + } + + cmd.arg(&src); + + let output = cmd.output()?; + let is_supported = output.stderr.is_empty(); + + known_status.insert(flag.to_owned(), is_supported); + Ok(is_supported) + } + + /// Add an arbitrary flag to the invocation of the compiler if it supports it + /// + /// # Example + /// + /// ```no_run + /// cc::Build::new() + /// .file("src/foo.c") + /// .flag_if_supported("-Wlogical-op") // only supported by GCC + /// .flag_if_supported("-Wunreachable-code") // only supported by clang + /// .compile("foo"); + /// ``` + pub fn flag_if_supported(&mut self, flag: &str) -> &mut Build { + self.flags_supported.push(flag.to_string()); + self + } + + /// Set the `-shared` flag. + /// + /// When enabled, the compiler will produce a shared object which can + /// then be linked with other objects to form an executable. + /// + /// # Example + /// + /// ```no_run + /// cc::Build::new() + /// .file("src/foo.c") + /// .shared_flag(true) + /// .compile("libfoo.so"); + /// ``` + + pub fn shared_flag(&mut self, shared_flag: bool) -> &mut Build { + self.shared_flag = Some(shared_flag); + self + } + + /// Set the `-static` flag. + /// + /// When enabled on systems that support dynamic linking, this prevents + /// linking with the shared libraries. + /// + /// # Example + /// + /// ```no_run + /// cc::Build::new() + /// .file("src/foo.c") + /// .shared_flag(true) + /// .static_flag(true) + /// .compile("foo"); + /// ``` + pub fn static_flag(&mut self, static_flag: bool) -> &mut Build { + self.static_flag = Some(static_flag); + self + } + + /// Add a file which will be compiled + pub fn file>(&mut self, p: P) -> &mut Build { + self.files.push(p.as_ref().to_path_buf()); + self + } + + /// Add files which will be compiled + pub fn files

(&mut self, p: P) -> &mut Build + where + P: IntoIterator, + P::Item: AsRef, + { + for file in p.into_iter() { + self.file(file); + } + self + } + + /// Set C++ support. + /// + /// The other `cpp_*` options will only become active if this is set to + /// `true`. + pub fn cpp(&mut self, cpp: bool) -> &mut Build { + self.cpp = cpp; + self + } + + /// Set CUDA C++ support. + /// + /// Enabling CUDA will pass the detected C/C++ toolchain as an argument to + /// the CUDA compiler, NVCC. NVCC itself accepts some limited GNU-like args; + /// any other arguments for the C/C++ toolchain will be redirected using + /// "-Xcompiler" flags. + /// + /// If enabled, this also implicitly enables C++ support. + pub fn cuda(&mut self, cuda: bool) -> &mut Build { + self.cuda = cuda; + if cuda { + self.cpp = true; + } + self + } + + /// Set warnings into errors flag. + /// + /// Disabled by default. + /// + /// Warning: turning warnings into errors only make sense + /// if you are a developer of the crate using cc-rs. + /// Some warnings only appear on some architecture or + /// specific version of the compiler. Any user of this crate, + /// or any other crate depending on it, could fail during + /// compile time. + /// + /// # Example + /// + /// ```no_run + /// cc::Build::new() + /// .file("src/foo.c") + /// .warnings_into_errors(true) + /// .compile("libfoo.a"); + /// ``` + pub fn warnings_into_errors(&mut self, warnings_into_errors: bool) -> &mut Build { + self.warnings_into_errors = warnings_into_errors; + self + } + + /// Set warnings flags. + /// + /// Adds some flags: + /// - "/Wall" for MSVC. + /// - "-Wall", "-Wextra" for GNU and Clang. + /// + /// Enabled by default. + /// + /// # Example + /// + /// ```no_run + /// cc::Build::new() + /// .file("src/foo.c") + /// .warnings(false) + /// .compile("libfoo.a"); + /// ``` + pub fn warnings(&mut self, warnings: bool) -> &mut Build { + self.warnings = Some(warnings); + self.extra_warnings = Some(warnings); + self + } + + /// Set extra warnings flags. + /// + /// Adds some flags: + /// - nothing for MSVC. + /// - "-Wextra" for GNU and Clang. + /// + /// Enabled by default. + /// + /// # Example + /// + /// ```no_run + /// // Disables -Wextra, -Wall remains enabled: + /// cc::Build::new() + /// .file("src/foo.c") + /// .extra_warnings(false) + /// .compile("libfoo.a"); + /// ``` + pub fn extra_warnings(&mut self, warnings: bool) -> &mut Build { + self.extra_warnings = Some(warnings); + self + } + + /// Set the standard library to link against when compiling with C++ + /// support. + /// + /// The default value of this property depends on the current target: On + /// OS X `Some("c++")` is used, when compiling for a Visual Studio based + /// target `None` is used and for other targets `Some("stdc++")` is used. + /// If the `CXXSTDLIB` environment variable is set, its value will + /// override the default value. + /// + /// A value of `None` indicates that no automatic linking should happen, + /// otherwise cargo will link against the specified library. + /// + /// The given library name must not contain the `lib` prefix. + /// + /// Common values: + /// - `stdc++` for GNU + /// - `c++` for Clang + /// + /// # Example + /// + /// ```no_run + /// cc::Build::new() + /// .file("src/foo.c") + /// .shared_flag(true) + /// .cpp_link_stdlib("stdc++") + /// .compile("libfoo.so"); + /// ``` + pub fn cpp_link_stdlib<'a, V: Into>>( + &mut self, + cpp_link_stdlib: V, + ) -> &mut Build { + self.cpp_link_stdlib = Some(cpp_link_stdlib.into().map(|s| s.into())); + self + } + + /// Force the C++ compiler to use the specified standard library. + /// + /// Setting this option will automatically set `cpp_link_stdlib` to the same + /// value. + /// + /// The default value of this option is always `None`. + /// + /// This option has no effect when compiling for a Visual Studio based + /// target. + /// + /// This option sets the `-stdlib` flag, which is only supported by some + /// compilers (clang, icc) but not by others (gcc). The library will not + /// detect which compiler is used, as such it is the responsibility of the + /// caller to ensure that this option is only used in conjuction with a + /// compiler which supports the `-stdlib` flag. + /// + /// A value of `None` indicates that no specific C++ standard library should + /// be used, otherwise `-stdlib` is added to the compile invocation. + /// + /// The given library name must not contain the `lib` prefix. + /// + /// Common values: + /// - `stdc++` for GNU + /// - `c++` for Clang + /// + /// # Example + /// + /// ```no_run + /// cc::Build::new() + /// .file("src/foo.c") + /// .cpp_set_stdlib("c++") + /// .compile("libfoo.a"); + /// ``` + pub fn cpp_set_stdlib<'a, V: Into>>( + &mut self, + cpp_set_stdlib: V, + ) -> &mut Build { + let cpp_set_stdlib = cpp_set_stdlib.into(); + self.cpp_set_stdlib = cpp_set_stdlib.map(|s| s.into()); + self.cpp_link_stdlib(cpp_set_stdlib); + self + } + + /// Configures the target this configuration will be compiling for. + /// + /// This option is automatically scraped from the `TARGET` environment + /// variable by build scripts, so it's not required to call this function. + /// + /// # Example + /// + /// ```no_run + /// cc::Build::new() + /// .file("src/foo.c") + /// .target("aarch64-linux-android") + /// .compile("foo"); + /// ``` + pub fn target(&mut self, target: &str) -> &mut Build { + self.target = Some(target.to_string()); + self + } + + /// Configures the host assumed by this configuration. + /// + /// This option is automatically scraped from the `HOST` environment + /// variable by build scripts, so it's not required to call this function. + /// + /// # Example + /// + /// ```no_run + /// cc::Build::new() + /// .file("src/foo.c") + /// .host("arm-linux-gnueabihf") + /// .compile("foo"); + /// ``` + pub fn host(&mut self, host: &str) -> &mut Build { + self.host = Some(host.to_string()); + self + } + + /// Configures the optimization level of the generated object files. + /// + /// This option is automatically scraped from the `OPT_LEVEL` environment + /// variable by build scripts, so it's not required to call this function. + pub fn opt_level(&mut self, opt_level: u32) -> &mut Build { + self.opt_level = Some(opt_level.to_string()); + self + } + + /// Configures the optimization level of the generated object files. + /// + /// This option is automatically scraped from the `OPT_LEVEL` environment + /// variable by build scripts, so it's not required to call this function. + pub fn opt_level_str(&mut self, opt_level: &str) -> &mut Build { + self.opt_level = Some(opt_level.to_string()); + self + } + + /// Configures whether the compiler will emit debug information when + /// generating object files. + /// + /// This option is automatically scraped from the `PROFILE` environment + /// variable by build scripts (only enabled when the profile is "debug"), so + /// it's not required to call this function. + pub fn debug(&mut self, debug: bool) -> &mut Build { + self.debug = Some(debug); + self + } + + /// Configures the output directory where all object files and static + /// libraries will be located. + /// + /// This option is automatically scraped from the `OUT_DIR` environment + /// variable by build scripts, so it's not required to call this function. + pub fn out_dir>(&mut self, out_dir: P) -> &mut Build { + self.out_dir = Some(out_dir.as_ref().to_owned()); + self + } + + /// Configures the compiler to be used to produce output. + /// + /// This option is automatically determined from the target platform or a + /// number of environment variables, so it's not required to call this + /// function. + pub fn compiler>(&mut self, compiler: P) -> &mut Build { + self.compiler = Some(compiler.as_ref().to_owned()); + self + } + + /// Configures the tool used to assemble archives. + /// + /// This option is automatically determined from the target platform or a + /// number of environment variables, so it's not required to call this + /// function. + pub fn archiver>(&mut self, archiver: P) -> &mut Build { + self.archiver = Some(archiver.as_ref().to_owned()); + self + } + /// Define whether metadata should be emitted for cargo allowing it to + /// automatically link the binary. Defaults to `true`. + /// + /// The emitted metadata is: + /// + /// - `rustc-link-lib=static=`*compiled lib* + /// - `rustc-link-search=native=`*target folder* + /// - When target is MSVC, the ATL-MFC libs are added via `rustc-link-search=native=` + /// - When C++ is enabled, the C++ stdlib is added via `rustc-link-lib` + /// + pub fn cargo_metadata(&mut self, cargo_metadata: bool) -> &mut Build { + self.cargo_metadata = cargo_metadata; + self + } + + /// Configures whether the compiler will emit position independent code. + /// + /// This option defaults to `false` for `windows-gnu` targets and + /// to `true` for all other targets. + pub fn pic(&mut self, pic: bool) -> &mut Build { + self.pic = Some(pic); + self + } + + /// Configures whether the /MT flag or the /MD flag will be passed to msvc build tools. + /// + /// This option defaults to `false`, and affect only msvc targets. + pub fn static_crt(&mut self, static_crt: bool) -> &mut Build { + self.static_crt = Some(static_crt); + self + } + + #[doc(hidden)] + pub fn __set_env(&mut self, a: A, b: B) -> &mut Build + where + A: AsRef, + B: AsRef, + { + self.env + .push((a.as_ref().to_owned(), b.as_ref().to_owned())); + self + } + + /// Run the compiler, generating the file `output` + /// + /// This will return a result instead of panicing; see compile() for the complete description. + pub fn try_compile(&self, output: &str) -> Result<(), Error> { + let (lib_name, gnu_lib_name) = if output.starts_with("lib") && output.ends_with(".a") { + (&output[3..output.len() - 2], output.to_owned()) + } else { + let mut gnu = String::with_capacity(5 + output.len()); + gnu.push_str("lib"); + gnu.push_str(&output); + gnu.push_str(".a"); + (output, gnu) + }; + let dst = self.get_out_dir()?; + + let mut objects = Vec::new(); + for file in self.files.iter() { + let obj = dst.join(file).with_extension("o"); + let obj = if !obj.starts_with(&dst) { + dst.join(obj.file_name().ok_or_else(|| { + Error::new(ErrorKind::IOError, "Getting object file details failed.") + })?) + } else { + obj + }; + + match obj.parent() { + Some(s) => fs::create_dir_all(s)?, + None => { + return Err(Error::new( + ErrorKind::IOError, + "Getting object file details failed.", + )) + } + }; + + objects.push(Object::new(file.to_path_buf(), obj)); + } + self.compile_objects(&objects)?; + self.assemble(lib_name, &dst.join(gnu_lib_name), &objects)?; + + if self.get_target()?.contains("msvc") { + let compiler = self.get_base_compiler()?; + let atlmfc_lib = compiler + .env() + .iter() + .find(|&&(ref var, _)| var.as_os_str() == OsStr::new("LIB")) + .and_then(|&(_, ref lib_paths)| { + env::split_paths(lib_paths).find(|path| { + let sub = Path::new("atlmfc/lib"); + path.ends_with(sub) || path.parent().map_or(false, |p| p.ends_with(sub)) + }) + }); + + if let Some(atlmfc_lib) = atlmfc_lib { + self.print(&format!( + "cargo:rustc-link-search=native={}", + atlmfc_lib.display() + )); + } + } + + self.print(&format!("cargo:rustc-link-lib=static={}", lib_name)); + self.print(&format!("cargo:rustc-link-search=native={}", dst.display())); + + // Add specific C++ libraries, if enabled. + if self.cpp { + if let Some(stdlib) = self.get_cpp_link_stdlib()? { + self.print(&format!("cargo:rustc-link-lib={}", stdlib)); + } + } + + Ok(()) + } + + /// Run the compiler, generating the file `output` + /// + /// The name `output` should be the name of the library. For backwards compatibility, + /// the `output` may start with `lib` and end with `.a`. The Rust compilier will create + /// the assembly with the lib prefix and .a extension. MSVC will create a file without prefix, + /// ending with `.lib`. + /// + /// # Panics + /// + /// Panics if `output` is not formatted correctly or if one of the underlying + /// compiler commands fails. It can also panic if it fails reading file names + /// or creating directories. + pub fn compile(&self, output: &str) { + if let Err(e) = self.try_compile(output) { + fail(&e.message); + } + } + + #[cfg(feature = "parallel")] + fn compile_objects(&self, objs: &[Object]) -> Result<(), Error> { + use self::rayon::prelude::*; + + if let Ok(amt) = env::var("NUM_JOBS") { + if let Ok(amt) = amt.parse() { + let _ = rayon::ThreadPoolBuilder::new() + .num_threads(amt) + .build_global(); + } + } + + // Check for any errors and return the first one found. + objs.par_iter() + .with_max_len(1) + .map(|obj| self.compile_object(obj)) + .collect() + } + + #[cfg(not(feature = "parallel"))] + fn compile_objects(&self, objs: &[Object]) -> Result<(), Error> { + for obj in objs { + self.compile_object(obj)?; + } + Ok(()) + } + + fn compile_object(&self, obj: &Object) -> Result<(), Error> { + let is_asm = obj.src.extension().and_then(|s| s.to_str()) == Some("asm"); + let target = self.get_target()?; + let msvc = target.contains("msvc"); + let (mut cmd, name) = if msvc && is_asm { + self.msvc_macro_assembler()? + } else { + let compiler = self.try_get_compiler()?; + let mut cmd = compiler.to_command(); + for &(ref a, ref b) in self.env.iter() { + cmd.env(a, b); + } + ( + cmd, + compiler + .path + .file_name() + .ok_or_else(|| Error::new(ErrorKind::IOError, "Failed to get compiler path."))? + .to_string_lossy() + .into_owned(), + ) + }; + let is_arm = target.contains("aarch64") || target.contains("arm"); + command_add_output_file(&mut cmd, &obj.dst, msvc, is_asm, is_arm); + // armasm and armasm64 don't requrie -c option + if !msvc || !is_asm || !is_arm { + cmd.arg(if msvc { "/c" } else { "-c" }); + } + cmd.arg(&obj.src); + + run(&mut cmd, &name)?; + Ok(()) + } + + /// This will return a result instead of panicing; see expand() for the complete description. + pub fn try_expand(&self) -> Result, Error> { + let compiler = self.try_get_compiler()?; + let mut cmd = compiler.to_command(); + for &(ref a, ref b) in self.env.iter() { + cmd.env(a, b); + } + cmd.arg(compiler.family.expand_flag()); + + assert!( + self.files.len() <= 1, + "Expand may only be called for a single file" + ); + + for file in self.files.iter() { + cmd.arg(file); + } + + let name = compiler + .path + .file_name() + .ok_or_else(|| Error::new(ErrorKind::IOError, "Failed to get compiler path."))? + .to_string_lossy() + .into_owned(); + + Ok(run_output(&mut cmd, &name)?) + } + + /// Run the compiler, returning the macro-expanded version of the input files. + /// + /// This is only relevant for C and C++ files. + /// + /// # Panics + /// Panics if more than one file is present in the config, or if compiler + /// path has an invalid file name. + /// + /// # Example + /// ```no_run + /// let out = cc::Build::new().file("src/foo.c").expand(); + /// ``` + pub fn expand(&self) -> Vec { + match self.try_expand() { + Err(e) => fail(&e.message), + Ok(v) => v, + } + } + + /// Get the compiler that's in use for this configuration. + /// + /// This function will return a `Tool` which represents the culmination + /// of this configuration at a snapshot in time. The returned compiler can + /// be inspected (e.g. the path, arguments, environment) to forward along to + /// other tools, or the `to_command` method can be used to invoke the + /// compiler itself. + /// + /// This method will take into account all configuration such as debug + /// information, optimization level, include directories, defines, etc. + /// Additionally, the compiler binary in use follows the standard + /// conventions for this path, e.g. looking at the explicitly set compiler, + /// environment variables (a number of which are inspected here), and then + /// falling back to the default configuration. + /// + /// # Panics + /// + /// Panics if an error occurred while determining the architecture. + pub fn get_compiler(&self) -> Tool { + match self.try_get_compiler() { + Ok(tool) => tool, + Err(e) => fail(&e.message), + } + } + + /// Get the compiler that's in use for this configuration. + /// + /// This will return a result instead of panicing; see get_compiler() for the complete description. + pub fn try_get_compiler(&self) -> Result { + let opt_level = self.get_opt_level()?; + let target = self.get_target()?; + + let mut cmd = self.get_base_compiler()?; + + // Non-target flags + // If the flag is not conditioned on target variable, it belongs here :) + match cmd.family { + ToolFamily::Msvc { .. } => { + assert!(!self.cuda, + "CUDA C++ compilation not supported for MSVC, yet... but you are welcome to implement it :)"); + + cmd.args.push("/nologo".into()); + + let crt_flag = match self.static_crt { + Some(true) => "/MT", + Some(false) => "/MD", + None => { + let features = + env::var("CARGO_CFG_TARGET_FEATURE").unwrap_or(String::new()); + if features.contains("crt-static") { + "/MT" + } else { + "/MD" + } + } + }; + cmd.args.push(crt_flag.into()); + + match &opt_level[..] { + // Msvc uses /O1 to enable all optimizations that minimize code size. + "z" | "s" | "1" => cmd.args.push("/O1".into()), + // -O3 is a valid value for gcc and clang compilers, but not msvc. Cap to /O2. + "2" | "3" => cmd.args.push("/O2".into()), + _ => {} + } + } + ToolFamily::Gnu | ToolFamily::Clang => { + // arm-linux-androideabi-gcc 4.8 shipped with Android NDK does + // not support '-Oz' + if opt_level == "z" && cmd.family != ToolFamily::Clang { + cmd.args.push("-Os".into()); + } else { + cmd.args.push(format!("-O{}", opt_level).into()); + } + + if !target.contains("-ios") { + cmd.push_cc_arg("-ffunction-sections".into()); + cmd.push_cc_arg("-fdata-sections".into()); + } + if self.pic.unwrap_or(!target.contains("windows-gnu")) { + cmd.push_cc_arg("-fPIC".into()); + } + } + } + for arg in self.envflags(if self.cpp { "CXXFLAGS" } else { "CFLAGS" }) { + cmd.args.push(arg.into()); + } + + if self.get_debug() { + if self.cuda { + let nvcc_debug_flag = cmd.family.nvcc_debug_flag().into(); + cmd.args.push(nvcc_debug_flag); + } + let family = cmd.family; + family.add_debug_flags(&mut cmd); + } + + // Target flags + match cmd.family { + ToolFamily::Clang => { + cmd.args.push(format!("--target={}", target).into()); + } + ToolFamily::Msvc { clang_cl } => { + if clang_cl { + if target.contains("x86_64") { + cmd.args.push("-m64".into()); + } else if target.contains("i586") { + cmd.args.push("-m32".into()); + cmd.args.push("/arch:IA32".into()); + } else { + cmd.args.push("-m32".into()); + } + } else { + if target.contains("i586") { + cmd.args.push("/ARCH:IA32".into()); + } + } + } + ToolFamily::Gnu => { + if target.contains("i686") || target.contains("i586") { + cmd.args.push("-m32".into()); + } else if target == "x86_64-unknown-linux-gnux32" { + cmd.args.push("-mx32".into()); + } else if target.contains("x86_64") || target.contains("powerpc64") { + cmd.args.push("-m64".into()); + } + + if self.static_flag.is_none() { + let features = env::var("CARGO_CFG_TARGET_FEATURE").unwrap_or(String::new()); + if features.contains("crt-static") { + cmd.args.push("-static".into()); + } + } + + // armv7 targets get to use armv7 instructions + if (target.starts_with("armv7") || target.starts_with("thumbv7")) && target.contains("-linux-") { + cmd.args.push("-march=armv7-a".into()); + } + + // (x86 Android doesn't say "eabi") + if target.contains("-androideabi") && target.contains("v7") { + // -march=armv7-a handled above + cmd.args.push("-mthumb".into()); + if !target.contains("neon") { + // On android we can guarantee some extra float instructions + // (specified in the android spec online) + // NEON guarantees even more; see below. + cmd.args.push("-mfpu=vfpv3-d16".into()); + } + cmd.args.push("-mfloat-abi=softfp".into()); + } + + if target.contains("neon") { + cmd.args.push("-mfpu=neon-vfpv4".into()); + } + + if target.starts_with("armv4t-unknown-linux-") { + cmd.args.push("-march=armv4t".into()); + cmd.args.push("-marm".into()); + cmd.args.push("-mfloat-abi=soft".into()); + } + + if target.starts_with("armv5te-unknown-linux-") { + cmd.args.push("-march=armv5te".into()); + cmd.args.push("-marm".into()); + cmd.args.push("-mfloat-abi=soft".into()); + } + + // For us arm == armv6 by default + if target.starts_with("arm-unknown-linux-") { + cmd.args.push("-march=armv6".into()); + cmd.args.push("-marm".into()); + } + + // We can guarantee some settings for FRC + if target.starts_with("arm-frc-") { + cmd.args.push("-march=armv7-a".into()); + cmd.args.push("-mcpu=cortex-a9".into()); + cmd.args.push("-mfpu=vfpv3".into()); + cmd.args.push("-mfloat-abi=softfp".into()); + cmd.args.push("-marm".into()); + } + + // Turn codegen down on i586 to avoid some instructions. + if target.starts_with("i586-unknown-linux-") { + cmd.args.push("-march=pentium".into()); + } + + // Set codegen level for i686 correctly + if target.starts_with("i686-unknown-linux-") { + cmd.args.push("-march=i686".into()); + } + + // Looks like `musl-gcc` makes is hard for `-m32` to make its way + // all the way to the linker, so we need to actually instruct the + // linker that we're generating 32-bit executables as well. This'll + // typically only be used for build scripts which transitively use + // these flags that try to compile executables. + if target == "i686-unknown-linux-musl" || target == "i586-unknown-linux-musl" { + cmd.args.push("-Wl,-melf_i386".into()); + } + + if target.starts_with("thumb") { + cmd.args.push("-mthumb".into()); + + if target.ends_with("eabihf") { + cmd.args.push("-mfloat-abi=hard".into()) + } + } + if target.starts_with("thumbv6m") { + cmd.args.push("-march=armv6s-m".into()); + } + if target.starts_with("thumbv7em") { + cmd.args.push("-march=armv7e-m".into()); + + if target.ends_with("eabihf") { + cmd.args.push("-mfpu=fpv4-sp-d16".into()) + } + } + if target.starts_with("thumbv7m") { + cmd.args.push("-march=armv7-m".into()); + } + if target.starts_with("armebv7r") | target.starts_with("armv7r") { + if target.starts_with("armeb") { + cmd.args.push("-mbig-endian".into()); + } else { + cmd.args.push("-mlittle-endian".into()); + } + + // ARM mode + cmd.args.push("-marm".into()); + + // R Profile + cmd.args.push("-march=armv7-r".into()); + + if target.ends_with("eabihf") { + // Calling convention + cmd.args.push("-mfloat-abi=hard".into()); + + // lowest common denominator FPU + // (see Cortex-R4 technical reference manual) + cmd.args.push("-mfpu=vfpv3-d16".into()) + } else { + // Calling convention + cmd.args.push("-mfloat-abi=soft".into()); + } + } + } + } + + if target.contains("-ios") { + // FIXME: potential bug. iOS is always compiled with Clang, but Gcc compiler may be + // detected instead. + self.ios_flags(&mut cmd)?; + } + + if self.static_flag.unwrap_or(false) { + cmd.args.push("-static".into()); + } + if self.shared_flag.unwrap_or(false) { + cmd.args.push("-shared".into()); + } + + if self.cpp { + match (self.cpp_set_stdlib.as_ref(), cmd.family) { + (None, _) => {} + (Some(stdlib), ToolFamily::Gnu) | (Some(stdlib), ToolFamily::Clang) => { + cmd.push_cc_arg(format!("-stdlib=lib{}", stdlib).into()); + } + _ => { + println!( + "cargo:warning=cpp_set_stdlib is specified, but the {:?} compiler \ + does not support this option, ignored", + cmd.family + ); + } + } + } + + for directory in self.include_directories.iter() { + cmd.args.push(cmd.family.include_flag().into()); + cmd.args.push(directory.into()); + } + + // If warnings and/or extra_warnings haven't been explicitly set, + // then we set them only if the environment doesn't already have + // CFLAGS/CXXFLAGS, since those variables presumably already contain + // the desired set of warnings flags. + + if self.warnings.unwrap_or(if self.has_flags() { false } else { true }) { + let wflags = cmd.family.warnings_flags().into(); + cmd.push_cc_arg(wflags); + } + + if self.extra_warnings.unwrap_or(if self.has_flags() { false } else { true }) { + if let Some(wflags) = cmd.family.extra_warnings_flags() { + cmd.push_cc_arg(wflags.into()); + } + } + + for flag in self.flags.iter() { + cmd.args.push(flag.into()); + } + + for flag in self.flags_supported.iter() { + if self.is_flag_supported(flag).unwrap_or(false) { + cmd.push_cc_arg(flag.into()); + } + } + + for &(ref key, ref value) in self.definitions.iter() { + let lead = if let ToolFamily::Msvc { .. } = cmd.family { + "/" + } else { + "-" + }; + if let Some(ref value) = *value { + cmd.args.push(format!("{}D{}={}", lead, key, value).into()); + } else { + cmd.args.push(format!("{}D{}", lead, key).into()); + } + } + + if self.warnings_into_errors { + let warnings_to_errors_flag = cmd.family.warnings_to_errors_flag().into(); + cmd.push_cc_arg(warnings_to_errors_flag); + } + + Ok(cmd) + } + + fn has_flags(&self) -> bool { + let flags_env_var_name = if self.cpp { "CXXFLAGS" } else { "CFLAGS" }; + let flags_env_var_value = self.get_var(flags_env_var_name); + if let Ok(_) = flags_env_var_value { true } else { false } + } + + fn msvc_macro_assembler(&self) -> Result<(Command, String), Error> { + let target = self.get_target()?; + let tool = if target.contains("x86_64") { + "ml64.exe" + } else if target.contains("arm") { + "armasm.exe" + } else if target.contains("aarch64") { + "armasm64.exe" + } else { + "ml.exe" + }; + let mut cmd = windows_registry::find(&target, tool).unwrap_or_else(|| self.cmd(tool)); + for directory in self.include_directories.iter() { + cmd.arg("/I").arg(directory); + } + for &(ref key, ref value) in self.definitions.iter() { + if let Some(ref value) = *value { + cmd.arg(&format!("/D{}={}", key, value)); + } else { + cmd.arg(&format!("/D{}", key)); + } + } + + if target.contains("i686") || target.contains("i586") { + cmd.arg("/safeseh"); + } + for flag in self.flags.iter() { + cmd.arg(flag); + } + + Ok((cmd, tool.to_string())) + } + + fn assemble(&self, lib_name: &str, dst: &Path, objs: &[Object]) -> Result<(), Error> { + // Delete the destination if it exists as the `ar` tool at least on Unix + // appends to it, which we don't want. + let _ = fs::remove_file(&dst); + + let objects: Vec<_> = objs.iter().map(|obj| obj.dst.clone()).collect(); + let target = self.get_target()?; + if target.contains("msvc") { + let mut cmd = match self.archiver { + Some(ref s) => self.cmd(s), + None => windows_registry::find(&target, "lib.exe") + .unwrap_or_else(|| self.cmd("lib.exe")), + }; + + let mut out = OsString::from("/OUT:"); + out.push(dst); + cmd.arg(out).arg("/nologo"); + + // Similar to https://github.com/rust-lang/rust/pull/47507 + // and https://github.com/rust-lang/rust/pull/48548 + let estimated_command_line_len = objects + .iter() + .chain(&self.objects) + .map(|a| a.as_os_str().len()) + .sum::(); + if estimated_command_line_len > 1024 * 6 { + let mut args = String::from("\u{FEFF}"); // BOM + for arg in objects.iter().chain(&self.objects) { + args.push('"'); + for c in arg.to_str().unwrap().chars() { + if c == '"' { + args.push('\\') + } + args.push(c) + } + args.push('"'); + args.push('\n'); + } + + let mut utf16le = Vec::new(); + for code_unit in args.encode_utf16() { + utf16le.push(code_unit as u8); + utf16le.push((code_unit >> 8) as u8); + } + + let mut args_file = OsString::from(dst); + args_file.push(".args"); + fs::File::create(&args_file) + .unwrap() + .write_all(&utf16le) + .unwrap(); + + let mut args_file_arg = OsString::from("@"); + args_file_arg.push(args_file); + cmd.arg(args_file_arg); + } else { + cmd.args(&objects).args(&self.objects); + } + run(&mut cmd, "lib.exe")?; + + // The Rust compiler will look for libfoo.a and foo.lib, but the + // MSVC linker will also be passed foo.lib, so be sure that both + // exist for now. + let lib_dst = dst.with_file_name(format!("{}.lib", lib_name)); + let _ = fs::remove_file(&lib_dst); + match fs::hard_link(&dst, &lib_dst).or_else(|_| { + // if hard-link fails, just copy (ignoring the number of bytes written) + fs::copy(&dst, &lib_dst).map(|_| ()) + }) { + Ok(_) => (), + Err(_) => { + return Err(Error::new( + ErrorKind::IOError, + "Could not copy or create a hard-link to the generated lib file.", + )) + } + }; + } else { + let (mut ar, cmd) = self.get_ar()?; + run( + ar.arg("crs").arg(dst).args(&objects).args(&self.objects), + &cmd, + )?; + } + + Ok(()) + } + + fn ios_flags(&self, cmd: &mut Tool) -> Result<(), Error> { + enum ArchSpec { + Device(&'static str), + Simulator(&'static str), + } + + let target = self.get_target()?; + let arch = target.split('-').nth(0).ok_or_else(|| { + Error::new( + ErrorKind::ArchitectureInvalid, + "Unknown architecture for iOS target.", + ) + })?; + let arch = match arch { + "arm" | "armv7" | "thumbv7" => ArchSpec::Device("armv7"), + "armv7s" | "thumbv7s" => ArchSpec::Device("armv7s"), + "arm64" | "aarch64" => ArchSpec::Device("arm64"), + "i386" | "i686" => ArchSpec::Simulator("-m32"), + "x86_64" => ArchSpec::Simulator("-m64"), + _ => { + return Err(Error::new( + ErrorKind::ArchitectureInvalid, + "Unknown architecture for iOS target.", + )) + } + }; + + let sdk = match arch { + ArchSpec::Device(arch) => { + cmd.args.push("-arch".into()); + cmd.args.push(arch.into()); + cmd.args.push("-miphoneos-version-min=7.0".into()); + "iphoneos" + } + ArchSpec::Simulator(arch) => { + cmd.args.push(arch.into()); + cmd.args.push("-mios-simulator-version-min=7.0".into()); + "iphonesimulator" + } + }; + + self.print(&format!("Detecting iOS SDK path for {}", sdk)); + let sdk_path = self.cmd("xcrun") + .arg("--show-sdk-path") + .arg("--sdk") + .arg(sdk) + .stderr(Stdio::inherit()) + .output()? + .stdout; + + let sdk_path = match String::from_utf8(sdk_path) { + Ok(p) => p, + Err(_) => { + return Err(Error::new( + ErrorKind::IOError, + "Unable to determine iOS SDK path.", + )) + } + }; + + cmd.args.push("-isysroot".into()); + cmd.args.push(sdk_path.trim().into()); + cmd.args.push("-fembed-bitcode".into()); + /* + * TODO we probably ultimatedly want the -fembed-bitcode-marker flag + * but can't have it now because of an issue in LLVM: + * https://github.com/alexcrichton/cc-rs/issues/301 + * https://github.com/rust-lang/rust/pull/48896#comment-372192660 + */ + /* + if self.get_opt_level()? == "0" { + cmd.args.push("-fembed-bitcode-marker".into()); + } + */ + + Ok(()) + } + + fn cmd>(&self, prog: P) -> Command { + let mut cmd = Command::new(prog); + for &(ref a, ref b) in self.env.iter() { + cmd.env(a, b); + } + cmd + } + + fn get_base_compiler(&self) -> Result { + if let Some(ref c) = self.compiler { + return Ok(Tool::new(c.clone())); + } + let host = self.get_host()?; + let target = self.get_target()?; + let (env, msvc, gnu, traditional, clang) = if self.cpp { + ("CXX", "cl.exe", "g++", "c++", "clang++") + } else { + ("CC", "cl.exe", "gcc", "cc", "clang") + }; + + // On Solaris, c++/cc unlikely to exist or be correct. + let default = if host.contains("solaris") { + gnu + } else { + traditional + }; + + let cl_exe = windows_registry::find_tool(&target, "cl.exe"); + + let tool_opt: Option = self.env_tool(env) + .map(|(tool, cc, args)| { + // chop off leading/trailing whitespace to work around + // semi-buggy build scripts which are shared in + // makefiles/configure scripts (where spaces are far more + // lenient) + let mut t = Tool::new(PathBuf::from(tool.trim())); + if let Some(cc) = cc { + t.cc_wrapper_path = Some(PathBuf::from(cc)); + } + for arg in args { + t.cc_wrapper_args.push(arg.into()); + } + t + }) + .or_else(|| { + if target.contains("emscripten") { + let tool = if self.cpp { "em++" } else { "emcc" }; + // Windows uses bat file so we have to be a bit more specific + if cfg!(windows) { + let mut t = Tool::new(PathBuf::from("cmd")); + t.args.push("/c".into()); + t.args.push(format!("{}.bat", tool).into()); + Some(t) + } else { + Some(Tool::new(PathBuf::from(tool))) + } + } else { + None + } + }) + .or_else(|| cl_exe.clone()); + + let tool = match tool_opt { + Some(t) => t, + None => { + let compiler = if host.contains("windows") && target.contains("windows") { + if target.contains("msvc") { + msvc.to_string() + } else { + format!("{}.exe", gnu) + } + } else if target.contains("android") { + let target = target + .replace("armv7neon", "arm") + .replace("armv7", "arm") + .replace("thumbv7neon", "arm") + .replace("thumbv7", "arm"); + let gnu_compiler = format!("{}-{}", target, gnu); + let clang_compiler = format!("{}-{}", target, clang); + // Check if gnu compiler is present + // if not, use clang + if Command::new(&gnu_compiler).spawn().is_ok() { + gnu_compiler + } else { + clang_compiler + } + } else if target.contains("cloudabi") { + format!("{}-{}", target, traditional) + } else if self.get_host()? != target { + // CROSS_COMPILE is of the form: "arm-linux-gnueabi-" + let cc_env = self.getenv("CROSS_COMPILE"); + let cross_compile = cc_env.as_ref().map(|s| s.trim_right_matches('-')); + let prefix = cross_compile.or(match &target[..] { + "aarch64-unknown-linux-gnu" => Some("aarch64-linux-gnu"), + "aarch64-unknown-linux-musl" => Some("aarch64-linux-musl"), + "aarch64-unknown-netbsd" => Some("aarch64--netbsd"), + "arm-unknown-linux-gnueabi" => Some("arm-linux-gnueabi"), + "armv4t-unknown-linux-gnueabi" => Some("arm-linux-gnueabi"), + "armv5te-unknown-linux-gnueabi" => Some("arm-linux-gnueabi"), + "arm-frc-linux-gnueabi" => Some("arm-frc-linux-gnueabi"), + "arm-unknown-linux-gnueabihf" => Some("arm-linux-gnueabihf"), + "arm-unknown-linux-musleabi" => Some("arm-linux-musleabi"), + "arm-unknown-linux-musleabihf" => Some("arm-linux-musleabihf"), + "arm-unknown-netbsd-eabi" => Some("arm--netbsdelf-eabi"), + "armv6-unknown-netbsd-eabihf" => Some("armv6--netbsdelf-eabihf"), + "armv7-unknown-linux-gnueabihf" => Some("arm-linux-gnueabihf"), + "armv7-unknown-linux-musleabihf" => Some("arm-linux-musleabihf"), + "armv7neon-unknown-linux-gnueabihf" => Some("arm-linux-gnueabihf"), + "armv7neon-unknown-linux-musleabihf" => Some("arm-linux-musleabihf"), + "thumbv7-unknown-linux-gnueabihf" => Some("arm-linux-gnueabihf"), + "thumbv7-unknown-linux-musleabihf" => Some("arm-linux-musleabihf"), + "thumbv7neon-unknown-linux-gnueabihf" => Some("arm-linux-gnueabihf"), + "thumbv7neon-unknown-linux-musleabihf" => Some("arm-linux-musleabihf"), + "armv7-unknown-netbsd-eabihf" => Some("armv7--netbsdelf-eabihf"), + "i586-unknown-linux-musl" => Some("musl"), + "i686-pc-windows-gnu" => Some("i686-w64-mingw32"), + "i686-unknown-linux-musl" => Some("musl"), + "i686-unknown-netbsd" => Some("i486--netbsdelf"), + "mips-unknown-linux-gnu" => Some("mips-linux-gnu"), + "mipsel-unknown-linux-gnu" => Some("mipsel-linux-gnu"), + "mips64-unknown-linux-gnuabi64" => Some("mips64-linux-gnuabi64"), + "mips64el-unknown-linux-gnuabi64" => Some("mips64el-linux-gnuabi64"), + "powerpc-unknown-linux-gnu" => Some("powerpc-linux-gnu"), + "powerpc-unknown-linux-gnuspe" => Some("powerpc-linux-gnuspe"), + "powerpc-unknown-netbsd" => Some("powerpc--netbsd"), + "powerpc64-unknown-linux-gnu" => Some("powerpc-linux-gnu"), + "powerpc64le-unknown-linux-gnu" => Some("powerpc64le-linux-gnu"), + "s390x-unknown-linux-gnu" => Some("s390x-linux-gnu"), + "sparc-unknown-linux-gnu" => Some("sparc-linux-gnu"), + "sparc64-unknown-linux-gnu" => Some("sparc64-linux-gnu"), + "sparc64-unknown-netbsd" => Some("sparc64--netbsd"), + "sparcv9-sun-solaris" => Some("sparcv9-sun-solaris"), + "armebv7r-none-eabi" => Some("arm-none-eabi"), + "armebv7r-none-eabihf" => Some("arm-none-eabi"), + "armv7r-none-eabi" => Some("arm-none-eabi"), + "armv7r-none-eabihf" => Some("arm-none-eabi"), + "thumbv6m-none-eabi" => Some("arm-none-eabi"), + "thumbv7em-none-eabi" => Some("arm-none-eabi"), + "thumbv7em-none-eabihf" => Some("arm-none-eabi"), + "thumbv7m-none-eabi" => Some("arm-none-eabi"), + "x86_64-pc-windows-gnu" => Some("x86_64-w64-mingw32"), + "x86_64-rumprun-netbsd" => Some("x86_64-rumprun-netbsd"), + "x86_64-unknown-linux-musl" => Some("musl"), + "x86_64-unknown-netbsd" => Some("x86_64--netbsd"), + _ => None, + }); + match prefix { + Some(prefix) => format!("{}-{}", prefix, gnu), + None => default.to_string(), + } + } else { + default.to_string() + }; + Tool::new(PathBuf::from(compiler)) + } + }; + + let mut tool = if self.cuda { + assert!( + tool.args.is_empty(), + "CUDA compilation currently assumes empty pre-existing args" + ); + let nvcc = match self.get_var("NVCC") { + Err(_) => "nvcc".into(), + Ok(nvcc) => nvcc, + }; + let mut nvcc_tool = Tool::with_features(PathBuf::from(nvcc), self.cuda); + nvcc_tool + .args + .push(format!("-ccbin={}", tool.path.display()).into()); + nvcc_tool + } else { + tool + }; + + // If we found `cl.exe` in our environment, the tool we're returning is + // an MSVC-like tool, *and* no env vars were set then set env vars for + // the tool that we're returning. + // + // Env vars are needed for things like `link.exe` being put into PATH as + // well as header include paths sometimes. These paths are automatically + // included by default but if the `CC` or `CXX` env vars are set these + // won't be used. This'll ensure that when the env vars are used to + // configure for invocations like `clang-cl` we still get a "works out + // of the box" experience. + if let Some(cl_exe) = cl_exe { + if tool.family == (ToolFamily::Msvc { clang_cl: true }) && + tool.env.len() == 0 && + target.contains("msvc") + { + for &(ref k, ref v) in cl_exe.env.iter() { + tool.env.push((k.to_owned(), v.to_owned())); + } + } + } + + Ok(tool) + } + + fn get_var(&self, var_base: &str) -> Result { + let target = self.get_target()?; + let host = self.get_host()?; + let kind = if host == target { "HOST" } else { "TARGET" }; + let target_u = target.replace("-", "_"); + let res = self.getenv(&format!("{}_{}", var_base, target)) + .or_else(|| self.getenv(&format!("{}_{}", var_base, target_u))) + .or_else(|| self.getenv(&format!("{}_{}", kind, var_base))) + .or_else(|| self.getenv(var_base)); + + match res { + Some(res) => Ok(res), + None => Err(Error::new( + ErrorKind::EnvVarNotFound, + &format!("Could not find environment variable {}.", var_base), + )), + } + } + + fn envflags(&self, name: &str) -> Vec { + self.get_var(name) + .unwrap_or(String::new()) + .split(|c: char| c.is_whitespace()) + .filter(|s| !s.is_empty()) + .map(|s| s.to_string()) + .collect() + } + + /// Returns compiler path, optional modifier name from whitelist, and arguments vec + fn env_tool(&self, name: &str) -> Option<(String, Option, Vec)> { + let tool = match self.get_var(name) { + Ok(tool) => tool, + Err(_) => return None, + }; + + // If this is an exact path on the filesystem we don't want to do any + // interpretation at all, just pass it on through. This'll hopefully get + // us to support spaces-in-paths. + if Path::new(&tool).exists() { + return Some((tool, None, Vec::new())); + } + + // Ok now we want to handle a couple of scenarios. We'll assume from + // here on out that spaces are splitting separate arguments. Two major + // features we want to support are: + // + // CC='sccache cc' + // + // aka using `sccache` or any other wrapper/caching-like-thing for + // compilations. We want to know what the actual compiler is still, + // though, because our `Tool` API support introspection of it to see + // what compiler is in use. + // + // additionally we want to support + // + // CC='cc -flag' + // + // where the CC env var is used to also pass default flags to the C + // compiler. + // + // It's true that everything here is a bit of a pain, but apparently if + // you're not literally make or bash then you get a lot of bug reports. + let known_wrappers = ["ccache", "distcc", "sccache", "icecc"]; + + let mut parts = tool.split_whitespace(); + let maybe_wrapper = match parts.next() { + Some(s) => s, + None => return None, + }; + + let file_stem = Path::new(maybe_wrapper) + .file_stem() + .unwrap() + .to_str() + .unwrap(); + if known_wrappers.contains(&file_stem) { + if let Some(compiler) = parts.next() { + return Some(( + compiler.to_string(), + Some(maybe_wrapper.to_string()), + parts.map(|s| s.to_string()).collect(), + )); + } + } + + Some(( + maybe_wrapper.to_string(), + None, + parts.map(|s| s.to_string()).collect(), + )) + } + + /// Returns the default C++ standard library for the current target: `libc++` + /// for OS X and `libstdc++` for anything else. + fn get_cpp_link_stdlib(&self) -> Result, Error> { + match self.cpp_link_stdlib.clone() { + Some(s) => Ok(s), + None => { + if let Ok(stdlib) = self.get_var("CXXSTDLIB") { + if stdlib.is_empty() { + Ok(None) + } else { + Ok(Some(stdlib)) + } + } else { + let target = self.get_target()?; + if target.contains("msvc") { + Ok(None) + } else if target.contains("apple") { + Ok(Some("c++".to_string())) + } else if target.contains("freebsd") { + Ok(Some("c++".to_string())) + } else if target.contains("openbsd") { + Ok(Some("c++".to_string())) + } else { + Ok(Some("stdc++".to_string())) + } + } + } + } + } + + fn get_ar(&self) -> Result<(Command, String), Error> { + if let Some(ref p) = self.archiver { + let name = p.file_name().and_then(|s| s.to_str()).unwrap_or("ar"); + return Ok((self.cmd(p), name.to_string())); + } + if let Ok(p) = self.get_var("AR") { + return Ok((self.cmd(&p), p)); + } + let program = if self.get_target()?.contains("android") { + format!("{}-ar", self.get_target()?.replace("armv7", "arm")) + } else if self.get_target()?.contains("emscripten") { + // Windows use bat files so we have to be a bit more specific + if cfg!(windows) { + let mut cmd = self.cmd("cmd"); + cmd.arg("/c").arg("emar.bat"); + return Ok((cmd, "emar.bat".to_string())); + } + + "emar".to_string() + } else { + "ar".to_string() + }; + Ok((self.cmd(&program), program)) + } + + fn get_target(&self) -> Result { + match self.target.clone() { + Some(t) => Ok(t), + None => Ok(self.getenv_unwrap("TARGET")?), + } + } + + fn get_host(&self) -> Result { + match self.host.clone() { + Some(h) => Ok(h), + None => Ok(self.getenv_unwrap("HOST")?), + } + } + + fn get_opt_level(&self) -> Result { + match self.opt_level.as_ref().cloned() { + Some(ol) => Ok(ol), + None => Ok(self.getenv_unwrap("OPT_LEVEL")?), + } + } + + fn get_debug(&self) -> bool { + self.debug.unwrap_or_else(|| match self.getenv("DEBUG") { + Some(s) => s != "false", + None => false, + }) + } + + fn get_out_dir(&self) -> Result { + match self.out_dir.clone() { + Some(p) => Ok(p), + None => Ok(env::var_os("OUT_DIR").map(PathBuf::from).ok_or_else(|| { + Error::new( + ErrorKind::EnvVarNotFound, + "Environment variable OUT_DIR not defined.", + ) + })?), + } + } + + fn getenv(&self, v: &str) -> Option { + let mut cache = self.env_cache.lock().unwrap(); + if let Some(val) = cache.get(v) { + return val.clone() + } + let r = env::var(v).ok(); + self.print(&format!("{} = {:?}", v, r)); + cache.insert(v.to_string(), r.clone()); + r + } + + fn getenv_unwrap(&self, v: &str) -> Result { + match self.getenv(v) { + Some(s) => Ok(s), + None => Err(Error::new( + ErrorKind::EnvVarNotFound, + &format!("Environment variable {} not defined.", v.to_string()), + )), + } + } + + fn print(&self, s: &str) { + if self.cargo_metadata { + println!("{}", s); + } + } +} + +impl Default for Build { + fn default() -> Build { + Build::new() + } +} + +impl Tool { + fn new(path: PathBuf) -> Tool { + Tool::with_features(path, false) + } + + fn with_features(path: PathBuf, cuda: bool) -> Tool { + // Try to detect family of the tool from its name, falling back to Gnu. + let family = if let Some(fname) = path.file_name().and_then(|p| p.to_str()) { + if fname.contains("clang-cl") { + ToolFamily::Msvc { clang_cl: true } + } else if fname.contains("cl") && + !fname.contains("cloudabi") && + !fname.contains("uclibc") && + !fname.contains("clang") { + ToolFamily::Msvc { clang_cl: false } + } else if fname.contains("clang") { + ToolFamily::Clang + } else { + ToolFamily::Gnu + } + } else { + ToolFamily::Gnu + }; + Tool { + path: path, + cc_wrapper_path: None, + cc_wrapper_args: Vec::new(), + args: Vec::new(), + env: Vec::new(), + family: family, + cuda: cuda, + removed_args: Vec::new(), + } + } + + /// Add an argument to be stripped from the final command arguments. + fn remove_arg(&mut self, flag: OsString) { + self.removed_args.push(flag); + } + + /// Add a flag, and optionally prepend the NVCC wrapper flag "-Xcompiler". + /// + /// Currently this is only used for compiling CUDA sources, since NVCC only + /// accepts a limited set of GNU-like flags, and the rest must be prefixed + /// with a "-Xcompiler" flag to get passed to the underlying C++ compiler. + fn push_cc_arg(&mut self, flag: OsString) { + if self.cuda { + self.args.push(self.family.nvcc_redirect_flag().into()); + } + self.args.push(flag); + } + + /// Converts this compiler into a `Command` that's ready to be run. + /// + /// This is useful for when the compiler needs to be executed and the + /// command returned will already have the initial arguments and environment + /// variables configured. + pub fn to_command(&self) -> Command { + let mut cmd = match self.cc_wrapper_path { + Some(ref cc_wrapper_path) => { + let mut cmd = Command::new(&cc_wrapper_path); + cmd.arg(&self.path); + cmd + } + None => Command::new(&self.path), + }; + cmd.args(&self.cc_wrapper_args); + + let value = self.args.iter().filter(|a| !self.removed_args.contains(a)).collect::>(); + cmd.args(&value); + + for &(ref k, ref v) in self.env.iter() { + cmd.env(k, v); + } + cmd + } + + /// Returns the path for this compiler. + /// + /// Note that this may not be a path to a file on the filesystem, e.g. "cc", + /// but rather something which will be resolved when a process is spawned. + pub fn path(&self) -> &Path { + &self.path + } + + /// Returns the default set of arguments to the compiler needed to produce + /// executables for the target this compiler generates. + pub fn args(&self) -> &[OsString] { + &self.args + } + + /// Returns the set of environment variables needed for this compiler to + /// operate. + /// + /// This is typically only used for MSVC compilers currently. + pub fn env(&self) -> &[(OsString, OsString)] { + &self.env + } + + /// Returns the compiler command in format of CC environment variable. + /// Or empty string if CC env was not present + /// + /// This is typically used by configure script + pub fn cc_env(&self) -> OsString { + match self.cc_wrapper_path { + Some(ref cc_wrapper_path) => { + let mut cc_env = cc_wrapper_path.as_os_str().to_owned(); + cc_env.push(" "); + cc_env.push(self.path.to_path_buf().into_os_string()); + for arg in self.cc_wrapper_args.iter() { + cc_env.push(" "); + cc_env.push(arg); + } + cc_env + } + None => OsString::from(""), + } + } + + /// Returns the compiler flags in format of CFLAGS environment variable. + /// Important here - this will not be CFLAGS from env, its internal gcc's flags to use as CFLAGS + /// This is typically used by configure script + pub fn cflags_env(&self) -> OsString { + let mut flags = OsString::new(); + for (i, arg) in self.args.iter().enumerate() { + if i > 0 { + flags.push(" "); + } + flags.push(arg); + } + flags + } + + /// Whether the tool is GNU Compiler Collection-like. + pub fn is_like_gnu(&self) -> bool { + self.family == ToolFamily::Gnu + } + + /// Whether the tool is Clang-like. + pub fn is_like_clang(&self) -> bool { + self.family == ToolFamily::Clang + } + + /// Whether the tool is MSVC-like. + pub fn is_like_msvc(&self) -> bool { + match self.family { + ToolFamily::Msvc { .. } => true, + _ => false, + } + } +} + +fn run(cmd: &mut Command, program: &str) -> Result<(), Error> { + let (mut child, print) = spawn(cmd, program)?; + let status = match child.wait() { + Ok(s) => s, + Err(_) => { + return Err(Error::new( + ErrorKind::ToolExecError, + &format!( + "Failed to wait on spawned child process, command {:?} with args {:?}.", + cmd, program + ), + )) + } + }; + print.join().unwrap(); + println!("{}", status); + + if status.success() { + Ok(()) + } else { + Err(Error::new( + ErrorKind::ToolExecError, + &format!( + "Command {:?} with args {:?} did not execute successfully (status code {}).", + cmd, program, status + ), + )) + } +} + +fn run_output(cmd: &mut Command, program: &str) -> Result, Error> { + cmd.stdout(Stdio::piped()); + let (mut child, print) = spawn(cmd, program)?; + let mut stdout = vec![]; + child + .stdout + .take() + .unwrap() + .read_to_end(&mut stdout) + .unwrap(); + let status = match child.wait() { + Ok(s) => s, + Err(_) => { + return Err(Error::new( + ErrorKind::ToolExecError, + &format!( + "Failed to wait on spawned child process, command {:?} with args {:?}.", + cmd, program + ), + )) + } + }; + print.join().unwrap(); + println!("{}", status); + + if status.success() { + Ok(stdout) + } else { + Err(Error::new( + ErrorKind::ToolExecError, + &format!( + "Command {:?} with args {:?} did not execute successfully (status code {}).", + cmd, program, status + ), + )) + } +} + +fn spawn(cmd: &mut Command, program: &str) -> Result<(Child, JoinHandle<()>), Error> { + println!("running: {:?}", cmd); + + // Capture the standard error coming from these programs, and write it out + // with cargo:warning= prefixes. Note that this is a bit wonky to avoid + // requiring the output to be UTF-8, we instead just ship bytes from one + // location to another. + match cmd.stderr(Stdio::piped()).spawn() { + Ok(mut child) => { + let stderr = BufReader::new(child.stderr.take().unwrap()); + let print = thread::spawn(move || { + for line in stderr.split(b'\n').filter_map(|l| l.ok()) { + print!("cargo:warning="); + std::io::stdout().write_all(&line).unwrap(); + println!(""); + } + }); + Ok((child, print)) + } + Err(ref e) if e.kind() == io::ErrorKind::NotFound => { + let extra = if cfg!(windows) { + " (see https://github.com/alexcrichton/cc-rs#compile-time-requirements \ + for help)" + } else { + "" + }; + Err(Error::new( + ErrorKind::ToolNotFound, + &format!("Failed to find tool. Is `{}` installed?{}", program, extra), + )) + } + Err(_) => Err(Error::new( + ErrorKind::ToolExecError, + &format!("Command {:?} with args {:?} failed to start.", cmd, program), + )), + } +} + +fn fail(s: &str) -> ! { + panic!("\n\nInternal error occurred: {}\n\n", s) +} + +fn command_add_output_file(cmd: &mut Command, dst: &Path, msvc: bool, is_asm: bool, is_arm: bool) { + if msvc && is_asm && is_arm { + cmd.arg("-o").arg(&dst); + } else if msvc && is_asm { + cmd.arg("/Fo").arg(dst); + } else if msvc { + let mut s = OsString::from("/Fo"); + s.push(&dst); + cmd.arg(s); + } else { + cmd.arg("-o").arg(&dst); + } +} diff --git a/cc/src/registry.rs b/cc/src/registry.rs new file mode 100644 index 000000000..2ac2fa63b --- /dev/null +++ b/cc/src/registry.rs @@ -0,0 +1,204 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use std::ffi::{OsStr, OsString}; +use std::io; +use std::ops::RangeFrom; +use std::os::raw; +use std::os::windows::prelude::*; + +pub struct RegistryKey(Repr); + +type HKEY = *mut u8; +type DWORD = u32; +type LPDWORD = *mut DWORD; +type LPCWSTR = *const u16; +type LPWSTR = *mut u16; +type LONG = raw::c_long; +type PHKEY = *mut HKEY; +type PFILETIME = *mut u8; +type LPBYTE = *mut u8; +type REGSAM = u32; + +const ERROR_SUCCESS: DWORD = 0; +const ERROR_NO_MORE_ITEMS: DWORD = 259; +const HKEY_LOCAL_MACHINE: HKEY = 0x80000002 as HKEY; +const REG_SZ: DWORD = 1; +const KEY_READ: DWORD = 0x20019; +const KEY_WOW64_32KEY: DWORD = 0x200; + +#[link(name = "advapi32")] +extern "system" { + fn RegOpenKeyExW( + key: HKEY, + lpSubKey: LPCWSTR, + ulOptions: DWORD, + samDesired: REGSAM, + phkResult: PHKEY, + ) -> LONG; + fn RegEnumKeyExW( + key: HKEY, + dwIndex: DWORD, + lpName: LPWSTR, + lpcName: LPDWORD, + lpReserved: LPDWORD, + lpClass: LPWSTR, + lpcClass: LPDWORD, + lpftLastWriteTime: PFILETIME, + ) -> LONG; + fn RegQueryValueExW( + hKey: HKEY, + lpValueName: LPCWSTR, + lpReserved: LPDWORD, + lpType: LPDWORD, + lpData: LPBYTE, + lpcbData: LPDWORD, + ) -> LONG; + fn RegCloseKey(hKey: HKEY) -> LONG; +} + +struct OwnedKey(HKEY); + +enum Repr { + Const(HKEY), + Owned(OwnedKey), +} + +pub struct Iter<'a> { + idx: RangeFrom, + key: &'a RegistryKey, +} + +unsafe impl Sync for Repr {} +unsafe impl Send for Repr {} + +pub static LOCAL_MACHINE: RegistryKey = RegistryKey(Repr::Const(HKEY_LOCAL_MACHINE)); + +impl RegistryKey { + fn raw(&self) -> HKEY { + match self.0 { + Repr::Const(val) => val, + Repr::Owned(ref val) => val.0, + } + } + + pub fn open(&self, key: &OsStr) -> io::Result { + let key = key.encode_wide().chain(Some(0)).collect::>(); + let mut ret = 0 as *mut _; + let err = unsafe { + RegOpenKeyExW( + self.raw(), + key.as_ptr(), + 0, + KEY_READ | KEY_WOW64_32KEY, + &mut ret, + ) + }; + if err == ERROR_SUCCESS as LONG { + Ok(RegistryKey(Repr::Owned(OwnedKey(ret)))) + } else { + Err(io::Error::from_raw_os_error(err as i32)) + } + } + + pub fn iter(&self) -> Iter { + Iter { + idx: 0.., + key: self, + } + } + + pub fn query_str(&self, name: &str) -> io::Result { + let name: &OsStr = name.as_ref(); + let name = name.encode_wide().chain(Some(0)).collect::>(); + let mut len = 0; + let mut kind = 0; + unsafe { + let err = RegQueryValueExW( + self.raw(), + name.as_ptr(), + 0 as *mut _, + &mut kind, + 0 as *mut _, + &mut len, + ); + if err != ERROR_SUCCESS as LONG { + return Err(io::Error::from_raw_os_error(err as i32)); + } + if kind != REG_SZ { + return Err(io::Error::new( + io::ErrorKind::Other, + "registry key wasn't a string", + )); + } + + // The length here is the length in bytes, but we're using wide + // characters so we need to be sure to halve it for the capacity + // passed in. + let mut v = Vec::with_capacity(len as usize / 2); + let err = RegQueryValueExW( + self.raw(), + name.as_ptr(), + 0 as *mut _, + 0 as *mut _, + v.as_mut_ptr() as *mut _, + &mut len, + ); + if err != ERROR_SUCCESS as LONG { + return Err(io::Error::from_raw_os_error(err as i32)); + } + v.set_len(len as usize / 2); + + // Some registry keys may have a terminating nul character, but + // we're not interested in that, so chop it off if it's there. + if v[v.len() - 1] == 0 { + v.pop(); + } + Ok(OsString::from_wide(&v)) + } + } +} + +impl Drop for OwnedKey { + fn drop(&mut self) { + unsafe { + RegCloseKey(self.0); + } + } +} + +impl<'a> Iterator for Iter<'a> { + type Item = io::Result; + + fn next(&mut self) -> Option> { + self.idx.next().and_then(|i| unsafe { + let mut v = Vec::with_capacity(256); + let mut len = v.capacity() as DWORD; + let ret = RegEnumKeyExW( + self.key.raw(), + i, + v.as_mut_ptr(), + &mut len, + 0 as *mut _, + 0 as *mut _, + 0 as *mut _, + 0 as *mut _, + ); + if ret == ERROR_NO_MORE_ITEMS as LONG { + None + } else if ret != ERROR_SUCCESS as LONG { + Some(Err(io::Error::from_raw_os_error(ret as i32))) + } else { + v.set_len(len as usize); + Some(Ok(OsString::from_wide(&v))) + } + }) + } +} diff --git a/cc/src/setup_config.rs b/cc/src/setup_config.rs new file mode 100644 index 000000000..7c03d388f --- /dev/null +++ b/cc/src/setup_config.rs @@ -0,0 +1,283 @@ +// Copyright © 2017 winapi-rs developers +// Licensed under the Apache License, Version 2.0 +// or the MIT license +// , at your option. +// All files in the project carrying such notice may not be copied, modified, or distributed +// except according to those terms. + +#![allow(bad_style)] +#![allow(unused)] + +use std::ffi::OsString; +use std::ptr::null_mut; +use winapi::Interface; +use winapi::{LPFILETIME, ULONG}; +use winapi::S_FALSE; +use winapi::BSTR; +use winapi::LPCOLESTR; +use winapi::{CoCreateInstance, CLSCTX_ALL}; +use winapi::LPSAFEARRAY; +use winapi::{IUnknown, IUnknownVtbl}; +use winapi::{HRESULT, LCID, LPCWSTR, PULONGLONG}; + +use com::{BStr, ComPtr}; + +// Bindings to the Setup.Configuration stuff +pub type InstanceState = u32; + +pub const eNone: InstanceState = 0; +pub const eLocal: InstanceState = 1; +pub const eRegistered: InstanceState = 2; +pub const eNoRebootRequired: InstanceState = 4; +pub const eComplete: InstanceState = -1i32 as u32; + +RIDL!{#[uuid(0xb41463c3, 0x8866, 0x43b5, 0xbc, 0x33, 0x2b, 0x06, 0x76, 0xf7, 0xf4, 0x2e)] +interface ISetupInstance(ISetupInstanceVtbl): IUnknown(IUnknownVtbl) { + fn GetInstanceId( + pbstrInstanceId: *mut BSTR, + ) -> HRESULT, + fn GetInstallDate( + pInstallDate: LPFILETIME, + ) -> HRESULT, + fn GetInstallationName( + pbstrInstallationName: *mut BSTR, + ) -> HRESULT, + fn GetInstallationPath( + pbstrInstallationPath: *mut BSTR, + ) -> HRESULT, + fn GetInstallationVersion( + pbstrInstallationVersion: *mut BSTR, + ) -> HRESULT, + fn GetDisplayName( + lcid: LCID, + pbstrDisplayName: *mut BSTR, + ) -> HRESULT, + fn GetDescription( + lcid: LCID, + pbstrDescription: *mut BSTR, + ) -> HRESULT, + fn ResolvePath( + pwszRelativePath: LPCOLESTR, + pbstrAbsolutePath: *mut BSTR, + ) -> HRESULT, +}} + +RIDL!{#[uuid(0x89143c9a, 0x05af, 0x49b0, 0xb7, 0x17, 0x72, 0xe2, 0x18, 0xa2, 0x18, 0x5c)] +interface ISetupInstance2(ISetupInstance2Vtbl): ISetupInstance(ISetupInstanceVtbl) { + fn GetState( + pState: *mut InstanceState, + ) -> HRESULT, + fn GetPackages( + ppsaPackages: *mut LPSAFEARRAY, + ) -> HRESULT, + fn GetProduct( + ppPackage: *mut *mut ISetupPackageReference, + ) -> HRESULT, + fn GetProductPath( + pbstrProductPath: *mut BSTR, + ) -> HRESULT, +}} + +RIDL!{#[uuid(0x6380bcff, 0x41d3, 0x4b2e, 0x8b, 0x2e, 0xbf, 0x8a, 0x68, 0x10, 0xc8, 0x48)] +interface IEnumSetupInstances(IEnumSetupInstancesVtbl): IUnknown(IUnknownVtbl) { + fn Next( + celt: ULONG, + rgelt: *mut *mut ISetupInstance, + pceltFetched: *mut ULONG, + ) -> HRESULT, + fn Skip( + celt: ULONG, + ) -> HRESULT, + fn Reset() -> HRESULT, + fn Clone( + ppenum: *mut *mut IEnumSetupInstances, + ) -> HRESULT, +}} + +RIDL!{#[uuid(0x42843719, 0xdb4c, 0x46c2, 0x8e, 0x7c, 0x64, 0xf1, 0x81, 0x6e, 0xfd, 0x5b)] +interface ISetupConfiguration(ISetupConfigurationVtbl): IUnknown(IUnknownVtbl) { + fn EnumInstances( + ppEnumInstances: *mut *mut IEnumSetupInstances, + ) -> HRESULT, + fn GetInstanceForCurrentProcess( + ppInstance: *mut *mut ISetupInstance, + ) -> HRESULT, + fn GetInstanceForPath( + wzPath: LPCWSTR, + ppInstance: *mut *mut ISetupInstance, + ) -> HRESULT, +}} + +RIDL!{#[uuid(0x26aab78c, 0x4a60, 0x49d6, 0xaf, 0x3b, 0x3c, 0x35, 0xbc, 0x93, 0x36, 0x5d)] +interface ISetupConfiguration2(ISetupConfiguration2Vtbl): + ISetupConfiguration(ISetupConfigurationVtbl) { + fn EnumAllInstances( + ppEnumInstances: *mut *mut IEnumSetupInstances, + ) -> HRESULT, +}} + +RIDL!{#[uuid(0xda8d8a16, 0xb2b6, 0x4487, 0xa2, 0xf1, 0x59, 0x4c, 0xcc, 0xcd, 0x6b, 0xf5)] +interface ISetupPackageReference(ISetupPackageReferenceVtbl): IUnknown(IUnknownVtbl) { + fn GetId( + pbstrId: *mut BSTR, + ) -> HRESULT, + fn GetVersion( + pbstrVersion: *mut BSTR, + ) -> HRESULT, + fn GetChip( + pbstrChip: *mut BSTR, + ) -> HRESULT, + fn GetLanguage( + pbstrLanguage: *mut BSTR, + ) -> HRESULT, + fn GetBranch( + pbstrBranch: *mut BSTR, + ) -> HRESULT, + fn GetType( + pbstrType: *mut BSTR, + ) -> HRESULT, + fn GetUniqueId( + pbstrUniqueId: *mut BSTR, + ) -> HRESULT, +}} + +RIDL!{#[uuid(0x42b21b78, 0x6192, 0x463e, 0x87, 0xbf, 0xd5, 0x77, 0x83, 0x8f, 0x1d, 0x5c)] +interface ISetupHelper(ISetupHelperVtbl): IUnknown(IUnknownVtbl) { + fn ParseVersion( + pwszVersion: LPCOLESTR, + pullVersion: PULONGLONG, + ) -> HRESULT, + fn ParseVersionRange( + pwszVersionRange: LPCOLESTR, + pullMinVersion: PULONGLONG, + pullMaxVersion: PULONGLONG, + ) -> HRESULT, +}} + +DEFINE_GUID!{CLSID_SetupConfiguration, +0x177f0c4a, 0x1cd3, 0x4de7, 0xa3, 0x2c, 0x71, 0xdb, 0xbb, 0x9f, 0xa3, 0x6d} + +// Safe wrapper around the COM interfaces +pub struct SetupConfiguration(ComPtr); + +impl SetupConfiguration { + pub fn new() -> Result { + let mut obj = null_mut(); + let err = unsafe { + CoCreateInstance( + &CLSID_SetupConfiguration, + null_mut(), + CLSCTX_ALL, + &ISetupConfiguration::uuidof(), + &mut obj, + ) + }; + if err < 0 { + return Err(err); + } + let obj = unsafe { ComPtr::from_raw(obj as *mut ISetupConfiguration) }; + Ok(SetupConfiguration(obj)) + } + pub fn get_instance_for_current_process(&self) -> Result { + let mut obj = null_mut(); + let err = unsafe { self.0.GetInstanceForCurrentProcess(&mut obj) }; + if err < 0 { + return Err(err); + } + Ok(unsafe { SetupInstance::from_raw(obj) }) + } + pub fn enum_instances(&self) -> Result { + let mut obj = null_mut(); + let err = unsafe { self.0.EnumInstances(&mut obj) }; + if err < 0 { + return Err(err); + } + Ok(unsafe { EnumSetupInstances::from_raw(obj) }) + } + pub fn enum_all_instances(&self) -> Result { + let mut obj = null_mut(); + let this = try!(self.0.cast::()); + let err = unsafe { this.EnumAllInstances(&mut obj) }; + if err < 0 { + return Err(err); + } + Ok(unsafe { EnumSetupInstances::from_raw(obj) }) + } +} + +pub struct SetupInstance(ComPtr); + +impl SetupInstance { + pub unsafe fn from_raw(obj: *mut ISetupInstance) -> SetupInstance { + SetupInstance(ComPtr::from_raw(obj)) + } + pub fn instance_id(&self) -> Result { + let mut s = null_mut(); + let err = unsafe { self.0.GetInstanceId(&mut s) }; + let bstr = unsafe { BStr::from_raw(s) }; + if err < 0 { + return Err(err); + } + Ok(bstr.to_osstring()) + } + pub fn installation_name(&self) -> Result { + let mut s = null_mut(); + let err = unsafe { self.0.GetInstallationName(&mut s) }; + let bstr = unsafe { BStr::from_raw(s) }; + if err < 0 { + return Err(err); + } + Ok(bstr.to_osstring()) + } + pub fn installation_path(&self) -> Result { + let mut s = null_mut(); + let err = unsafe { self.0.GetInstallationPath(&mut s) }; + let bstr = unsafe { BStr::from_raw(s) }; + if err < 0 { + return Err(err); + } + Ok(bstr.to_osstring()) + } + pub fn installation_version(&self) -> Result { + let mut s = null_mut(); + let err = unsafe { self.0.GetInstallationVersion(&mut s) }; + let bstr = unsafe { BStr::from_raw(s) }; + if err < 0 { + return Err(err); + } + Ok(bstr.to_osstring()) + } + pub fn product_path(&self) -> Result { + let mut s = null_mut(); + let this = try!(self.0.cast::()); + let err = unsafe { this.GetProductPath(&mut s) }; + let bstr = unsafe { BStr::from_raw(s) }; + if err < 0 { + return Err(err); + } + Ok(bstr.to_osstring()) + } +} + +pub struct EnumSetupInstances(ComPtr); + +impl EnumSetupInstances { + pub unsafe fn from_raw(obj: *mut IEnumSetupInstances) -> EnumSetupInstances { + EnumSetupInstances(ComPtr::from_raw(obj)) + } +} + +impl Iterator for EnumSetupInstances { + type Item = Result; + fn next(&mut self) -> Option> { + let mut obj = null_mut(); + let err = unsafe { self.0.Next(1, &mut obj, null_mut()) }; + if err < 0 { + return Some(Err(err)); + } + if err == S_FALSE { + return None; + } + Some(Ok(unsafe { SetupInstance::from_raw(obj) })) + } +} diff --git a/cc/src/winapi.rs b/cc/src/winapi.rs new file mode 100644 index 000000000..cc83963b6 --- /dev/null +++ b/cc/src/winapi.rs @@ -0,0 +1,218 @@ +// Copyright © 2015-2017 winapi-rs developers +// Licensed under the Apache License, Version 2.0 +// or the MIT license +// , at your option. +// All files in the project carrying such notice may not be copied, modified, or distributed +// except according to those terms. + +#![allow(bad_style)] + +use std::os::raw; + +pub type wchar_t = u16; + +pub type UINT = raw::c_uint; +pub type LPUNKNOWN = *mut IUnknown; +pub type REFIID = *const IID; +pub type IID = GUID; +pub type REFCLSID = *const IID; +pub type PVOID = *mut raw::c_void; +pub type USHORT = raw::c_ushort; +pub type ULONG = raw::c_ulong; +pub type LONG = raw::c_long; +pub type DWORD = u32; +pub type LPVOID = *mut raw::c_void; +pub type HRESULT = raw::c_long; +pub type LPFILETIME = *mut FILETIME; +pub type BSTR = *mut OLECHAR; +pub type OLECHAR = WCHAR; +pub type WCHAR = wchar_t; +pub type LPCOLESTR = *const OLECHAR; +pub type LCID = DWORD; +pub type LPCWSTR = *const WCHAR; +pub type PULONGLONG = *mut ULONGLONG; +pub type ULONGLONG = u64; + +pub const S_OK: HRESULT = 0; +pub const S_FALSE: HRESULT = 1; +pub const COINIT_MULTITHREADED: u32 = 0x0; + +pub type CLSCTX = u32; + +pub const CLSCTX_INPROC_SERVER: CLSCTX = 0x1; +pub const CLSCTX_INPROC_HANDLER: CLSCTX = 0x2; +pub const CLSCTX_LOCAL_SERVER: CLSCTX = 0x4; +pub const CLSCTX_REMOTE_SERVER: CLSCTX = 0x10; + +pub const CLSCTX_ALL: CLSCTX = + CLSCTX_INPROC_SERVER | CLSCTX_INPROC_HANDLER | CLSCTX_LOCAL_SERVER | CLSCTX_REMOTE_SERVER; + +#[repr(C)] +#[derive(Copy, Clone)] +pub struct GUID { + pub Data1: raw::c_ulong, + pub Data2: raw::c_ushort, + pub Data3: raw::c_ushort, + pub Data4: [raw::c_uchar; 8], +} + +#[repr(C)] +#[derive(Copy, Clone)] +pub struct FILETIME { + pub dwLowDateTime: DWORD, + pub dwHighDateTime: DWORD, +} + +pub trait Interface { + fn uuidof() -> GUID; +} + +#[link(name = "ole32")] +#[link(name = "oleaut32")] +extern "C" {} + +extern "system" { + pub fn CoInitializeEx(pvReserved: LPVOID, dwCoInit: DWORD) -> HRESULT; + pub fn CoCreateInstance( + rclsid: REFCLSID, + pUnkOuter: LPUNKNOWN, + dwClsContext: DWORD, + riid: REFIID, + ppv: *mut LPVOID, + ) -> HRESULT; + pub fn SysFreeString(bstrString: BSTR); + pub fn SysStringLen(pbstr: BSTR) -> UINT; +} + +#[repr(C)] +#[derive(Copy, Clone)] +pub struct SAFEARRAYBOUND { + pub cElements: ULONG, + pub lLbound: LONG, +} + +#[repr(C)] +#[derive(Copy, Clone)] +pub struct SAFEARRAY { + pub cDims: USHORT, + pub fFeatures: USHORT, + pub cbElements: ULONG, + pub cLocks: ULONG, + pub pvData: PVOID, + pub rgsabound: [SAFEARRAYBOUND; 1], +} + +pub type LPSAFEARRAY = *mut SAFEARRAY; + +macro_rules! DEFINE_GUID { + ( + $name:ident, $l:expr, $w1:expr, $w2:expr, + $b1:expr, $b2:expr, $b3:expr, $b4:expr, $b5:expr, $b6:expr, $b7:expr, $b8:expr + ) => { + pub const $name: $crate::winapi::GUID = $crate::winapi::GUID { + Data1: $l, + Data2: $w1, + Data3: $w2, + Data4: [$b1, $b2, $b3, $b4, $b5, $b6, $b7, $b8], + }; + } +} + +macro_rules! RIDL { + (#[uuid($($uuid:expr),+)] + interface $interface:ident ($vtbl:ident) {$( + fn $method:ident($($p:ident : $t:ty,)*) -> $rtr:ty, + )+}) => ( + #[repr(C)] + pub struct $vtbl { + $(pub $method: unsafe extern "system" fn( + This: *mut $interface, + $($p: $t),* + ) -> $rtr,)+ + } + #[repr(C)] + pub struct $interface { + pub lpVtbl: *const $vtbl, + } + RIDL!{@impl $interface {$(fn $method($($p: $t,)*) -> $rtr,)+}} + RIDL!{@uuid $interface $($uuid),+} + ); + (#[uuid($($uuid:expr),+)] + interface $interface:ident ($vtbl:ident) : $pinterface:ident ($pvtbl:ident) { + }) => ( + #[repr(C)] + pub struct $vtbl { + pub parent: $pvtbl, + } + #[repr(C)] + pub struct $interface { + pub lpVtbl: *const $vtbl, + } + RIDL!{@deref $interface $pinterface} + RIDL!{@uuid $interface $($uuid),+} + ); + (#[uuid($($uuid:expr),+)] + interface $interface:ident ($vtbl:ident) : $pinterface:ident ($pvtbl:ident) {$( + fn $method:ident($($p:ident : $t:ty,)*) -> $rtr:ty, + )+}) => ( + #[repr(C)] + pub struct $vtbl { + pub parent: $pvtbl, + $(pub $method: unsafe extern "system" fn( + This: *mut $interface, + $($p: $t,)* + ) -> $rtr,)+ + } + #[repr(C)] + pub struct $interface { + pub lpVtbl: *const $vtbl, + } + RIDL!{@impl $interface {$(fn $method($($p: $t,)*) -> $rtr,)+}} + RIDL!{@deref $interface $pinterface} + RIDL!{@uuid $interface $($uuid),+} + ); + (@deref $interface:ident $pinterface:ident) => ( + impl ::std::ops::Deref for $interface { + type Target = $pinterface; + #[inline] + fn deref(&self) -> &$pinterface { + unsafe { &*(self as *const $interface as *const $pinterface) } + } + } + ); + (@impl $interface:ident {$( + fn $method:ident($($p:ident : $t:ty,)*) -> $rtr:ty, + )+}) => ( + impl $interface { + $(#[inline] pub unsafe fn $method(&self, $($p: $t,)*) -> $rtr { + ((*self.lpVtbl).$method)(self as *const _ as *mut _, $($p,)*) + })+ + } + ); + (@uuid $interface:ident + $l:expr, $w1:expr, $w2:expr, + $b1:expr, $b2:expr, $b3:expr, $b4:expr, $b5:expr, $b6:expr, $b7:expr, $b8:expr + ) => ( + impl $crate::winapi::Interface for $interface { + #[inline] + fn uuidof() -> $crate::winapi::GUID { + $crate::winapi::GUID { + Data1: $l, + Data2: $w1, + Data3: $w2, + Data4: [$b1, $b2, $b3, $b4, $b5, $b6, $b7, $b8], + } + } + } + ); +} + +RIDL!{#[uuid(0x00000000, 0x0000, 0x0000, 0xc0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x46)] +interface IUnknown(IUnknownVtbl) { + fn QueryInterface( + riid: REFIID, + ppvObject: *mut *mut raw::c_void, + ) -> HRESULT, + fn AddRef() -> ULONG, + fn Release() -> ULONG, +}} diff --git a/cc/src/windows_registry.rs b/cc/src/windows_registry.rs new file mode 100644 index 000000000..bbcbb09aa --- /dev/null +++ b/cc/src/windows_registry.rs @@ -0,0 +1,696 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! A helper module to probe the Windows Registry when looking for +//! windows-specific tools. + +use std::process::Command; + +use Tool; + +#[cfg(windows)] +macro_rules! otry { + ($expr:expr) => (match $expr { + Some(val) => val, + None => return None, + }) +} + +/// Attempts to find a tool within an MSVC installation using the Windows +/// registry as a point to search from. +/// +/// The `target` argument is the target that the tool should work for (e.g. +/// compile or link for) and the `tool` argument is the tool to find (e.g. +/// `cl.exe` or `link.exe`). +/// +/// This function will return `None` if the tool could not be found, or it will +/// return `Some(cmd)` which represents a command that's ready to execute the +/// tool with the appropriate environment variables set. +/// +/// Note that this function always returns `None` for non-MSVC targets. +pub fn find(target: &str, tool: &str) -> Option { + find_tool(target, tool).map(|c| c.to_command()) +} + +/// Similar to the `find` function above, this function will attempt the same +/// operation (finding a MSVC tool in a local install) but instead returns a +/// `Tool` which may be introspected. +#[cfg(not(windows))] +pub fn find_tool(_target: &str, _tool: &str) -> Option { + None +} + +/// Documented above. +#[cfg(windows)] +pub fn find_tool(target: &str, tool: &str) -> Option { + use std::env; + + // This logic is all tailored for MSVC, if we're not that then bail out + // early. + if !target.contains("msvc") { + return None; + } + + // Looks like msbuild isn't located in the same location as other tools like + // cl.exe and lib.exe. To handle this we probe for it manually with + // dedicated registry keys. + if tool.contains("msbuild") { + return impl_::find_msbuild(target); + } + + if tool.contains("devenv") { + return impl_::find_devenv(target); + } + + // If VCINSTALLDIR is set, then someone's probably already run vcvars and we + // should just find whatever that indicates. + if env::var_os("VCINSTALLDIR").is_some() { + return env::var_os("PATH") + .and_then(|path| { + env::split_paths(&path) + .map(|p| p.join(tool)) + .find(|p| p.exists()) + }) + .map(|path| Tool::new(path.into())); + } + + // Ok, if we're here, now comes the fun part of the probing. Default shells + // or shells like MSYS aren't really configured to execute `cl.exe` and the + // various compiler tools shipped as part of Visual Studio. Here we try to + // first find the relevant tool, then we also have to be sure to fill in + // environment variables like `LIB`, `INCLUDE`, and `PATH` to ensure that + // the tool is actually usable. + + return impl_::find_msvc_15(tool, target) + .or_else(|| impl_::find_msvc_14(tool, target)) + .or_else(|| impl_::find_msvc_12(tool, target)) + .or_else(|| impl_::find_msvc_11(tool, target)); +} + +/// A version of Visual Studio +#[derive(Debug, PartialEq, Eq, Copy, Clone)] +pub enum VsVers { + /// Visual Studio 12 (2013) + Vs12, + /// Visual Studio 14 (2015) + Vs14, + /// Visual Studio 15 (2017) + Vs15, + + /// Hidden variant that should not be matched on. Callers that want to + /// handle an enumeration of `VsVers` instances should always have a default + /// case meaning that it's a VS version they don't understand. + #[doc(hidden)] + #[allow(bad_style)] + __Nonexhaustive_do_not_match_this_or_your_code_will_break, +} + +/// Find the most recent installed version of Visual Studio +/// +/// This is used by the cmake crate to figure out the correct +/// generator. +#[cfg(not(windows))] +pub fn find_vs_version() -> Result { + Err(format!("not windows")) +} + +/// Documented above +#[cfg(windows)] +pub fn find_vs_version() -> Result { + use std::env; + + match env::var("VisualStudioVersion") { + Ok(version) => match &version[..] { + "15.0" => Ok(VsVers::Vs15), + "14.0" => Ok(VsVers::Vs14), + "12.0" => Ok(VsVers::Vs12), + vers => Err(format!( + "\n\n\ + unsupported or unknown VisualStudio version: {}\n\ + if another version is installed consider running \ + the appropriate vcvars script before building this \ + crate\n\ + ", + vers + )), + }, + _ => { + // Check for the presense of a specific registry key + // that indicates visual studio is installed. + if impl_::has_msbuild_version("15.0") { + Ok(VsVers::Vs15) + } else if impl_::has_msbuild_version("14.0") { + Ok(VsVers::Vs14) + } else if impl_::has_msbuild_version("12.0") { + Ok(VsVers::Vs12) + } else { + Err(format!( + "\n\n\ + couldn't determine visual studio generator\n\ + if VisualStudio is installed, however, consider \ + running the appropriate vcvars script before building \ + this crate\n\ + " + )) + } + } + } +} + +#[cfg(windows)] +mod impl_ { + use std::env; + use std::ffi::OsString; + use std::mem; + use std::path::{Path, PathBuf}; + use std::fs::File; + use std::io::Read; + use registry::{RegistryKey, LOCAL_MACHINE}; + use com; + use setup_config::{SetupConfiguration, SetupInstance}; + + use Tool; + + struct MsvcTool { + tool: PathBuf, + libs: Vec, + path: Vec, + include: Vec, + } + + impl MsvcTool { + fn new(tool: PathBuf) -> MsvcTool { + MsvcTool { + tool: tool, + libs: Vec::new(), + path: Vec::new(), + include: Vec::new(), + } + } + + fn into_tool(self) -> Tool { + let MsvcTool { + tool, + libs, + path, + include, + } = self; + let mut tool = Tool::new(tool.into()); + add_env(&mut tool, "LIB", libs); + add_env(&mut tool, "PATH", path); + add_env(&mut tool, "INCLUDE", include); + tool + } + } + + // In MSVC 15 (2017) MS once again changed the scheme for locating + // the tooling. Now we must go through some COM interfaces, which + // is super fun for Rust. + // + // Note that much of this logic can be found [online] wrt paths, COM, etc. + // + // [online]: https://blogs.msdn.microsoft.com/vcblog/2017/03/06/finding-the-visual-c-compiler-tools-in-visual-studio-2017/ + pub fn find_msvc_15(tool: &str, target: &str) -> Option { + otry!(com::initialize().ok()); + + let config = otry!(SetupConfiguration::new().ok()); + let iter = otry!(config.enum_all_instances().ok()); + for instance in iter { + let instance = otry!(instance.ok()); + let tool = tool_from_vs15_instance(tool, target, &instance); + if tool.is_some() { + return tool; + } + } + + None + } + + fn tool_from_vs15_instance(tool: &str, target: &str, instance: &SetupInstance) -> Option { + let (bin_path, host_dylib_path, lib_path, include_path) = + otry!(vs15_vc_paths(target, instance)); + let tool_path = bin_path.join(tool); + if !tool_path.exists() { + return None; + }; + + let mut tool = MsvcTool::new(tool_path); + tool.path.push(host_dylib_path); + tool.libs.push(lib_path); + tool.include.push(include_path); + + if let Some((atl_lib_path, atl_include_path)) = atl_paths(target, &bin_path) { + tool.libs.push(atl_lib_path); + tool.include.push(atl_include_path); + } + + otry!(add_sdks(&mut tool, target)); + + Some(tool.into_tool()) + } + + fn vs15_vc_paths( + target: &str, + instance: &SetupInstance, + ) -> Option<(PathBuf, PathBuf, PathBuf, PathBuf)> { + let instance_path: PathBuf = otry!(instance.installation_path().ok()).into(); + let version_path = + instance_path.join(r"VC\Auxiliary\Build\Microsoft.VCToolsVersion.default.txt"); + let mut version_file = otry!(File::open(version_path).ok()); + let mut version = String::new(); + otry!(version_file.read_to_string(&mut version).ok()); + let version = version.trim(); + let host = match host_arch() { + X86 => "X86", + X86_64 => "X64", + _ => return None, + }; + let target = otry!(lib_subdir(target)); + // The directory layout here is MSVC/bin/Host$host/$target/ + let path = instance_path.join(r"VC\Tools\MSVC").join(version); + // This is the path to the toolchain for a particular target, running + // on a given host + let bin_path = path.join("bin") + .join(&format!("Host{}", host)) + .join(&target); + // But! we also need PATH to contain the target directory for the host + // architecture, because it contains dlls like mspdb140.dll compiled for + // the host architecture. + let host_dylib_path = path.join("bin") + .join(&format!("Host{}", host)) + .join(&host.to_lowercase()); + let lib_path = path.join("lib").join(&target); + let include_path = path.join("include"); + Some((bin_path, host_dylib_path, lib_path, include_path)) + } + + fn atl_paths(target: &str, path: &Path) -> Option<(PathBuf, PathBuf)> { + let atl_path = path.join("atlfmc"); + let sub = otry!(lib_subdir(target)); + if atl_path.exists() { + Some((atl_path.join("lib").join(sub), atl_path.join("include"))) + } else { + None + } + } + + // For MSVC 14 we need to find the Universal CRT as well as either + // the Windows 10 SDK or Windows 8.1 SDK. + pub fn find_msvc_14(tool: &str, target: &str) -> Option { + let vcdir = otry!(get_vc_dir("14.0")); + let mut tool = otry!(get_tool(tool, &vcdir, target)); + otry!(add_sdks(&mut tool, target)); + Some(tool.into_tool()) + } + + fn add_sdks(tool: &mut MsvcTool, target: &str) -> Option<()> { + let sub = otry!(lib_subdir(target)); + let (ucrt, ucrt_version) = otry!(get_ucrt_dir()); + + tool.path + .push(ucrt.join("bin").join(&ucrt_version).join(sub)); + + let ucrt_include = ucrt.join("include").join(&ucrt_version); + tool.include.push(ucrt_include.join("ucrt")); + + let ucrt_lib = ucrt.join("lib").join(&ucrt_version); + tool.libs.push(ucrt_lib.join("ucrt").join(sub)); + + if let Some((sdk, version)) = get_sdk10_dir() { + tool.path.push(sdk.join("bin").join(sub)); + let sdk_lib = sdk.join("lib").join(&version); + tool.libs.push(sdk_lib.join("um").join(sub)); + let sdk_include = sdk.join("include").join(&version); + tool.include.push(sdk_include.join("um")); + tool.include.push(sdk_include.join("cppwinrt")); + tool.include.push(sdk_include.join("winrt")); + tool.include.push(sdk_include.join("shared")); + } else if let Some(sdk) = get_sdk81_dir() { + tool.path.push(sdk.join("bin").join(sub)); + let sdk_lib = sdk.join("lib").join("winv6.3"); + tool.libs.push(sdk_lib.join("um").join(sub)); + let sdk_include = sdk.join("include"); + tool.include.push(sdk_include.join("um")); + tool.include.push(sdk_include.join("winrt")); + tool.include.push(sdk_include.join("shared")); + } + + Some(()) + } + + // For MSVC 12 we need to find the Windows 8.1 SDK. + pub fn find_msvc_12(tool: &str, target: &str) -> Option { + let vcdir = otry!(get_vc_dir("12.0")); + let mut tool = otry!(get_tool(tool, &vcdir, target)); + let sub = otry!(lib_subdir(target)); + let sdk81 = otry!(get_sdk81_dir()); + tool.path.push(sdk81.join("bin").join(sub)); + let sdk_lib = sdk81.join("lib").join("winv6.3"); + tool.libs.push(sdk_lib.join("um").join(sub)); + let sdk_include = sdk81.join("include"); + tool.include.push(sdk_include.join("shared")); + tool.include.push(sdk_include.join("um")); + tool.include.push(sdk_include.join("winrt")); + Some(tool.into_tool()) + } + + // For MSVC 11 we need to find the Windows 8 SDK. + pub fn find_msvc_11(tool: &str, target: &str) -> Option { + let vcdir = otry!(get_vc_dir("11.0")); + let mut tool = otry!(get_tool(tool, &vcdir, target)); + let sub = otry!(lib_subdir(target)); + let sdk8 = otry!(get_sdk8_dir()); + tool.path.push(sdk8.join("bin").join(sub)); + let sdk_lib = sdk8.join("lib").join("win8"); + tool.libs.push(sdk_lib.join("um").join(sub)); + let sdk_include = sdk8.join("include"); + tool.include.push(sdk_include.join("shared")); + tool.include.push(sdk_include.join("um")); + tool.include.push(sdk_include.join("winrt")); + Some(tool.into_tool()) + } + + fn add_env(tool: &mut Tool, env: &str, paths: Vec) { + let prev = env::var_os(env).unwrap_or(OsString::new()); + let prev = env::split_paths(&prev); + let new = paths.into_iter().chain(prev); + tool.env + .push((env.to_string().into(), env::join_paths(new).unwrap())); + } + + // Given a possible MSVC installation directory, we look for the linker and + // then add the MSVC library path. + fn get_tool(tool: &str, path: &Path, target: &str) -> Option { + bin_subdir(target) + .into_iter() + .map(|(sub, host)| { + ( + path.join("bin").join(sub).join(tool), + path.join("bin").join(host), + ) + }) + .filter(|&(ref path, _)| path.is_file()) + .map(|(path, host)| { + let mut tool = MsvcTool::new(path); + tool.path.push(host); + tool + }) + .filter_map(|mut tool| { + let sub = otry!(vc_lib_subdir(target)); + tool.libs.push(path.join("lib").join(sub)); + tool.include.push(path.join("include")); + let atlmfc_path = path.join("atlmfc"); + if atlmfc_path.exists() { + tool.libs.push(atlmfc_path.join("lib").join(sub)); + tool.include.push(atlmfc_path.join("include")); + } + Some(tool) + }) + .next() + } + + // To find MSVC we look in a specific registry key for the version we are + // trying to find. + fn get_vc_dir(ver: &str) -> Option { + let key = r"SOFTWARE\Microsoft\VisualStudio\SxS\VC7"; + let key = otry!(LOCAL_MACHINE.open(key.as_ref()).ok()); + let path = otry!(key.query_str(ver).ok()); + Some(path.into()) + } + + // To find the Universal CRT we look in a specific registry key for where + // all the Universal CRTs are located and then sort them asciibetically to + // find the newest version. While this sort of sorting isn't ideal, it is + // what vcvars does so that's good enough for us. + // + // Returns a pair of (root, version) for the ucrt dir if found + fn get_ucrt_dir() -> Option<(PathBuf, String)> { + let key = r"SOFTWARE\Microsoft\Windows Kits\Installed Roots"; + let key = otry!(LOCAL_MACHINE.open(key.as_ref()).ok()); + let root = otry!(key.query_str("KitsRoot10").ok()); + let readdir = otry!(Path::new(&root).join("lib").read_dir().ok()); + let max_libdir = otry!( + readdir + .filter_map(|dir| dir.ok()) + .map(|dir| dir.path()) + .filter(|dir| dir.components() + .last() + .and_then(|c| c.as_os_str().to_str()) + .map(|c| c.starts_with("10.") && dir.join("ucrt").is_dir()) + .unwrap_or(false)) + .max() + ); + let version = max_libdir.components().last().unwrap(); + let version = version.as_os_str().to_str().unwrap().to_string(); + Some((root.into(), version)) + } + + // Vcvars finds the correct version of the Windows 10 SDK by looking + // for the include `um\Windows.h` because sometimes a given version will + // only have UCRT bits without the rest of the SDK. Since we only care about + // libraries and not includes, we instead look for `um\x64\kernel32.lib`. + // Since the 32-bit and 64-bit libraries are always installed together we + // only need to bother checking x64, making this code a tiny bit simpler. + // Like we do for the Universal CRT, we sort the possibilities + // asciibetically to find the newest one as that is what vcvars does. + fn get_sdk10_dir() -> Option<(PathBuf, String)> { + let key = r"SOFTWARE\Microsoft\Microsoft SDKs\Windows\v10.0"; + let key = otry!(LOCAL_MACHINE.open(key.as_ref()).ok()); + let root = otry!(key.query_str("InstallationFolder").ok()); + let readdir = otry!(Path::new(&root).join("lib").read_dir().ok()); + let mut dirs = readdir + .filter_map(|dir| dir.ok()) + .map(|dir| dir.path()) + .collect::>(); + dirs.sort(); + let dir = otry!( + dirs.into_iter() + .rev() + .filter(|dir| dir.join("um").join("x64").join("kernel32.lib").is_file()) + .next() + ); + let version = dir.components().last().unwrap(); + let version = version.as_os_str().to_str().unwrap().to_string(); + Some((root.into(), version)) + } + + // Interestingly there are several subdirectories, `win7` `win8` and + // `winv6.3`. Vcvars seems to only care about `winv6.3` though, so the same + // applies to us. Note that if we were targetting kernel mode drivers + // instead of user mode applications, we would care. + fn get_sdk81_dir() -> Option { + let key = r"SOFTWARE\Microsoft\Microsoft SDKs\Windows\v8.1"; + let key = otry!(LOCAL_MACHINE.open(key.as_ref()).ok()); + let root = otry!(key.query_str("InstallationFolder").ok()); + Some(root.into()) + } + + fn get_sdk8_dir() -> Option { + let key = r"SOFTWARE\Microsoft\Microsoft SDKs\Windows\v8.0"; + let key = otry!(LOCAL_MACHINE.open(key.as_ref()).ok()); + let root = otry!(key.query_str("InstallationFolder").ok()); + Some(root.into()) + } + + const PROCESSOR_ARCHITECTURE_INTEL: u16 = 0; + const PROCESSOR_ARCHITECTURE_AMD64: u16 = 9; + const X86: u16 = PROCESSOR_ARCHITECTURE_INTEL; + const X86_64: u16 = PROCESSOR_ARCHITECTURE_AMD64; + + // When choosing the tool to use, we have to choose the one which matches + // the target architecture. Otherwise we end up in situations where someone + // on 32-bit Windows is trying to cross compile to 64-bit and it tries to + // invoke the native 64-bit compiler which won't work. + // + // For the return value of this function, the first member of the tuple is + // the folder of the tool we will be invoking, while the second member is + // the folder of the host toolchain for that tool which is essential when + // using a cross linker. We return a Vec since on x64 there are often two + // linkers that can target the architecture we desire. The 64-bit host + // linker is preferred, and hence first, due to 64-bit allowing it more + // address space to work with and potentially being faster. + fn bin_subdir(target: &str) -> Vec<(&'static str, &'static str)> { + let arch = target.split('-').next().unwrap(); + match (arch, host_arch()) { + ("i586", X86) | ("i686", X86) => vec![("", "")], + ("i586", X86_64) | ("i686", X86_64) => vec![("amd64_x86", "amd64"), ("", "")], + ("x86_64", X86) => vec![("x86_amd64", "")], + ("x86_64", X86_64) => vec![("amd64", "amd64"), ("x86_amd64", "")], + ("arm", X86) | ("thumbv7a", X86) => vec![("x86_arm", "")], + ("arm", X86_64) | ("thumbv7a", X86_64) => vec![("amd64_arm", "amd64"), ("x86_arm", "")], + _ => vec![], + } + } + + fn lib_subdir(target: &str) -> Option<&'static str> { + let arch = target.split('-').next().unwrap(); + match arch { + "i586" | "i686" => Some("x86"), + "x86_64" => Some("x64"), + "arm" | "thumbv7a" => Some("arm"), + "aarch64" => Some("arm64"), + _ => None, + } + } + + // MSVC's x86 libraries are not in a subfolder + fn vc_lib_subdir(target: &str) -> Option<&'static str> { + let arch = target.split('-').next().unwrap(); + match arch { + "i586" | "i686" => Some(""), + "x86_64" => Some("amd64"), + "arm" | "thumbv7a" => Some("arm"), + "aarch64" => Some("arm64"), + _ => None, + } + } + + #[allow(bad_style)] + fn host_arch() -> u16 { + type DWORD = u32; + type WORD = u16; + type LPVOID = *mut u8; + type DWORD_PTR = usize; + + #[repr(C)] + struct SYSTEM_INFO { + wProcessorArchitecture: WORD, + _wReserved: WORD, + _dwPageSize: DWORD, + _lpMinimumApplicationAddress: LPVOID, + _lpMaximumApplicationAddress: LPVOID, + _dwActiveProcessorMask: DWORD_PTR, + _dwNumberOfProcessors: DWORD, + _dwProcessorType: DWORD, + _dwAllocationGranularity: DWORD, + _wProcessorLevel: WORD, + _wProcessorRevision: WORD, + } + + extern "system" { + fn GetNativeSystemInfo(lpSystemInfo: *mut SYSTEM_INFO); + } + + unsafe { + let mut info = mem::zeroed(); + GetNativeSystemInfo(&mut info); + info.wProcessorArchitecture + } + } + + // Given a registry key, look at all the sub keys and find the one which has + // the maximal numeric value. + // + // Returns the name of the maximal key as well as the opened maximal key. + fn max_version(key: &RegistryKey) -> Option<(OsString, RegistryKey)> { + let mut max_vers = 0; + let mut max_key = None; + for subkey in key.iter().filter_map(|k| k.ok()) { + let val = subkey + .to_str() + .and_then(|s| s.trim_left_matches("v").replace(".", "").parse().ok()); + let val = match val { + Some(s) => s, + None => continue, + }; + if val > max_vers { + if let Ok(k) = key.open(&subkey) { + max_vers = val; + max_key = Some((subkey, k)); + } + } + } + max_key + } + + pub fn has_msbuild_version(version: &str) -> bool { + match version { + "15.0" => { + find_msbuild_vs15("x86_64-pc-windows-msvc").is_some() + || find_msbuild_vs15("i686-pc-windows-msvc").is_some() + } + "12.0" | "14.0" => LOCAL_MACHINE + .open(&OsString::from(format!( + "SOFTWARE\\Microsoft\\MSBuild\\ToolsVersions\\{}", + version + ))) + .is_ok(), + _ => false, + } + } + + pub fn find_devenv(target: &str) -> Option { + find_devenv_vs15(&target) + } + + fn find_devenv_vs15(target: &str) -> Option { + let key = r"SOFTWARE\WOW6432Node\Microsoft\VisualStudio\SxS\VS7"; + LOCAL_MACHINE + .open(key.as_ref()) + .ok() + .and_then(|key| key.query_str("15.0").ok()) + .map(|path| { + let path = PathBuf::from(path).join(r"Common7\IDE\devenv.exe"); + let mut tool = Tool::new(path); + if target.contains("x86_64") { + tool.env.push(("Platform".into(), "X64".into())); + } + tool + }) + } + + // see http://stackoverflow.com/questions/328017/path-to-msbuild + pub fn find_msbuild(target: &str) -> Option { + // VS 15 (2017) changed how to locate msbuild + if let Some(r) = find_msbuild_vs15(target) { + return Some(r); + } else { + find_old_msbuild(target) + } + } + + fn find_msbuild_vs15(target: &str) -> Option { + // Seems like this could also go through SetupConfiguration, + // or that find_msvc_15 could just use this registry key + // instead of the COM interface. + let key = r"SOFTWARE\WOW6432Node\Microsoft\VisualStudio\SxS\VS7"; + LOCAL_MACHINE + .open(key.as_ref()) + .ok() + .and_then(|key| key.query_str("15.0").ok()) + .map(|path| { + let path = PathBuf::from(path).join(r"MSBuild\15.0\Bin\MSBuild.exe"); + let mut tool = Tool::new(path); + if target.contains("x86_64") { + tool.env.push(("Platform".into(), "X64".into())); + } + tool + }) + } + + fn find_old_msbuild(target: &str) -> Option { + let key = r"SOFTWARE\Microsoft\MSBuild\ToolsVersions"; + LOCAL_MACHINE + .open(key.as_ref()) + .ok() + .and_then(|key| { + max_version(&key).and_then(|(_vers, key)| key.query_str("MSBuildToolsPath").ok()) + }) + .map(|path| { + let mut path = PathBuf::from(path); + path.push("MSBuild.exe"); + let mut tool = Tool::new(path); + if target.contains("x86_64") { + tool.env.push(("Platform".into(), "X64".into())); + } + tool + }) + } +} diff --git a/cc/tests/cc_env.rs b/cc/tests/cc_env.rs new file mode 100644 index 000000000..f9386d7f5 --- /dev/null +++ b/cc/tests/cc_env.rs @@ -0,0 +1,119 @@ +extern crate cc; +extern crate tempdir; + +use std::env; +use std::path::Path; +use std::ffi::OsString; + +mod support; +use support::Test; + +#[test] +fn main() { + ccache(); + distcc(); + ccache_spaces(); + ccache_env_flags(); + leading_spaces(); + extra_flags(); + path_to_ccache(); + more_spaces(); +} + +fn ccache() { + let test = Test::gnu(); + + env::set_var("CC", "ccache cc"); + let compiler = test.gcc().file("foo.c").get_compiler(); + + assert_eq!(compiler.path(), Path::new("cc")); +} + +fn ccache_spaces() { + let test = Test::gnu(); + test.shim("ccache"); + + env::set_var("CC", "ccache cc"); + let compiler = test.gcc().file("foo.c").get_compiler(); + assert_eq!(compiler.path(), Path::new("cc")); +} + +fn distcc() { + let test = Test::gnu(); + test.shim("distcc"); + + env::set_var("CC", "distcc cc"); + let compiler = test.gcc().file("foo.c").get_compiler(); + assert_eq!(compiler.path(), Path::new("cc")); +} + +fn ccache_env_flags() { + let test = Test::gnu(); + test.shim("ccache"); + + env::set_var("CC", "ccache lol-this-is-not-a-compiler"); + let compiler = test.gcc().file("foo.c").get_compiler(); + assert_eq!(compiler.path(), Path::new("lol-this-is-not-a-compiler")); + assert_eq!( + compiler.cc_env(), + OsString::from("ccache lol-this-is-not-a-compiler") + ); + assert!( + compiler + .cflags_env() + .into_string() + .unwrap() + .contains("ccache") == false + ); + assert!( + compiler + .cflags_env() + .into_string() + .unwrap() + .contains(" lol-this-is-not-a-compiler") == false + ); + + env::set_var("CC", ""); +} + +fn leading_spaces() { + let test = Test::gnu(); + test.shim("ccache"); + + env::set_var("CC", " test "); + let compiler = test.gcc().file("foo.c").get_compiler(); + assert_eq!(compiler.path(), Path::new("test")); + + env::set_var("CC", ""); +} + +fn extra_flags() { + let test = Test::gnu(); + test.shim("ccache"); + + env::set_var("CC", "ccache cc -m32"); + let compiler = test.gcc().file("foo.c").get_compiler(); + assert_eq!(compiler.path(), Path::new("cc")); +} + +fn path_to_ccache() { + let test = Test::gnu(); + test.shim("ccache"); + + env::set_var("CC", "/path/to/ccache.exe cc -m32"); + let compiler = test.gcc().file("foo.c").get_compiler(); + assert_eq!(compiler.path(), Path::new("cc")); + assert_eq!( + compiler.cc_env(), + OsString::from("/path/to/ccache.exe cc -m32"), + ); +} + +fn more_spaces() { + let test = Test::gnu(); + test.shim("ccache"); + + env::set_var("CC", "cc -m32"); + let compiler = test.gcc().file("foo.c").get_compiler(); + assert_eq!(compiler.path(), Path::new("cc")); +} diff --git a/cc/tests/support/mod.rs b/cc/tests/support/mod.rs new file mode 100644 index 000000000..cae81513b --- /dev/null +++ b/cc/tests/support/mod.rs @@ -0,0 +1,131 @@ +#![allow(dead_code)] + +use std::env; +use std::ffi::OsStr; +use std::fs::{self, File}; +use std::io::prelude::*; +use std::path::PathBuf; + +use cc; +use tempdir::TempDir; + +pub struct Test { + pub td: TempDir, + pub gcc: PathBuf, + pub msvc: bool, +} + +pub struct Execution { + args: Vec, +} + +impl Test { + pub fn new() -> Test { + let mut gcc = PathBuf::from(env::current_exe().unwrap()); + gcc.pop(); + if gcc.ends_with("deps") { + gcc.pop(); + } + gcc.push(format!("gcc-shim{}", env::consts::EXE_SUFFIX)); + Test { + td: TempDir::new("gcc-test").unwrap(), + gcc: gcc, + msvc: false, + } + } + + pub fn gnu() -> Test { + let t = Test::new(); + t.shim("cc").shim("c++").shim("ar"); + t + } + + pub fn msvc() -> Test { + let mut t = Test::new(); + t.shim("cl").shim("lib.exe"); + t.msvc = true; + t + } + + pub fn shim(&self, name: &str) -> &Test { + let fname = format!("{}{}", name, env::consts::EXE_SUFFIX); + fs::hard_link(&self.gcc, self.td.path().join(&fname)) + .or_else(|_| fs::copy(&self.gcc, self.td.path().join(&fname)).map(|_| ())) + .unwrap(); + self + } + + pub fn gcc(&self) -> cc::Build { + let mut cfg = cc::Build::new(); + let mut path = env::split_paths(&env::var_os("PATH").unwrap()).collect::>(); + path.insert(0, self.td.path().to_owned()); + let target = if self.msvc { + "x86_64-pc-windows-msvc" + } else { + "x86_64-unknown-linux-gnu" + }; + + cfg.target(target) + .host(target) + .opt_level(2) + .debug(false) + .out_dir(self.td.path()) + .__set_env("PATH", env::join_paths(path).unwrap()) + .__set_env("GCCTEST_OUT_DIR", self.td.path()); + if self.msvc { + cfg.compiler(self.td.path().join("cl")); + cfg.archiver(self.td.path().join("lib.exe")); + } + cfg + } + + pub fn cmd(&self, i: u32) -> Execution { + let mut s = String::new(); + File::open(self.td.path().join(format!("out{}", i))) + .unwrap() + .read_to_string(&mut s) + .unwrap(); + Execution { + args: s.lines().map(|s| s.to_string()).collect(), + } + } +} + +impl Execution { + pub fn must_have>(&self, p: P) -> &Execution { + if !self.has(p.as_ref()) { + panic!("didn't find {:?} in {:?}", p.as_ref(), self.args); + } else { + self + } + } + + pub fn must_not_have>(&self, p: P) -> &Execution { + if self.has(p.as_ref()) { + panic!("found {:?}", p.as_ref()); + } else { + self + } + } + + pub fn has(&self, p: &OsStr) -> bool { + self.args.iter().any(|arg| OsStr::new(arg) == p) + } + + pub fn must_have_in_order(&self, before: &str, after: &str) -> &Execution { + let before_position = self.args + .iter() + .rposition(|x| OsStr::new(x) == OsStr::new(before)); + let after_position = self.args + .iter() + .rposition(|x| OsStr::new(x) == OsStr::new(after)); + match (before_position, after_position) { + (Some(b), Some(a)) if b < a => {} + (b, a) => panic!( + "{:?} (last position: {:?}) did not appear before {:?} (last position: {:?})", + before, b, after, a + ), + }; + self + } +} diff --git a/cc/tests/test.rs b/cc/tests/test.rs new file mode 100644 index 000000000..820072f68 --- /dev/null +++ b/cc/tests/test.rs @@ -0,0 +1,361 @@ +extern crate cc; +extern crate tempdir; + +use std::env; +use support::Test; + +mod support; + +#[test] +fn gnu_smoke() { + let test = Test::gnu(); + test.gcc().file("foo.c").compile("foo"); + + test.cmd(0) + .must_have("-O2") + .must_have("foo.c") + .must_not_have("-g") + .must_have("-c") + .must_have("-ffunction-sections") + .must_have("-fdata-sections"); + test.cmd(1).must_have(test.td.path().join("foo.o")); +} + +#[test] +fn gnu_opt_level_1() { + let test = Test::gnu(); + test.gcc().opt_level(1).file("foo.c").compile("foo"); + + test.cmd(0).must_have("-O1").must_not_have("-O2"); +} + +#[test] +fn gnu_opt_level_s() { + let test = Test::gnu(); + test.gcc().opt_level_str("s").file("foo.c").compile("foo"); + + test.cmd(0) + .must_have("-Os") + .must_not_have("-O1") + .must_not_have("-O2") + .must_not_have("-O3") + .must_not_have("-Oz"); +} + +#[test] +fn gnu_debug() { + let test = Test::gnu(); + test.gcc().debug(true).file("foo.c").compile("foo"); + test.cmd(0).must_have("-g"); +} + +#[test] +fn gnu_warnings_into_errors() { + let test = Test::gnu(); + test.gcc() + .warnings_into_errors(true) + .file("foo.c") + .compile("foo"); + + test.cmd(0).must_have("-Werror"); +} + +#[test] +fn gnu_warnings() { + let test = Test::gnu(); + test.gcc() + .warnings(true) + .flag("-Wno-missing-field-initializers") + .file("foo.c") + .compile("foo"); + + test.cmd(0).must_have("-Wall").must_have("-Wextra"); +} + +#[test] +fn gnu_extra_warnings0() { + let test = Test::gnu(); + test.gcc() + .warnings(true) + .extra_warnings(false) + .flag("-Wno-missing-field-initializers") + .file("foo.c") + .compile("foo"); + + test.cmd(0).must_have("-Wall").must_not_have("-Wextra"); +} + +#[test] +fn gnu_extra_warnings1() { + let test = Test::gnu(); + test.gcc() + .warnings(false) + .extra_warnings(true) + .flag("-Wno-missing-field-initializers") + .file("foo.c") + .compile("foo"); + + test.cmd(0).must_not_have("-Wall").must_have("-Wextra"); +} + +#[test] +fn gnu_warnings_overridable() { + let test = Test::gnu(); + test.gcc() + .warnings(true) + .flag("-Wno-missing-field-initializers") + .file("foo.c") + .compile("foo"); + + test.cmd(0) + .must_have_in_order("-Wall", "-Wno-missing-field-initializers"); +} + +#[test] +fn gnu_no_warnings_if_cflags() { + env::set_var("CFLAGS", "-Wflag-does-not-exist"); + let test = Test::gnu(); + test.gcc() + .file("foo.c") + .compile("foo"); + + test.cmd(0).must_not_have("-Wall").must_not_have("-Wextra"); + env::set_var("CFLAGS", ""); +} + +#[test] +fn gnu_no_warnings_if_cxxflags() { + env::set_var("CXXFLAGS", "-Wflag-does-not-exist"); + let test = Test::gnu(); + test.gcc() + .file("foo.c") + .compile("foo"); + + test.cmd(0).must_not_have("-Wall").must_not_have("-Wextra"); + env::set_var("CXXFLAGS", ""); +} + +#[test] +fn gnu_x86_64() { + for vendor in &["unknown-linux-gnu", "apple-darwin"] { + let target = format!("x86_64-{}", vendor); + let test = Test::gnu(); + test.gcc() + .target(&target) + .host(&target) + .file("foo.c") + .compile("foo"); + + test.cmd(0).must_have("-fPIC").must_have("-m64"); + } +} + +#[test] +fn gnu_x86_64_no_pic() { + for vendor in &["unknown-linux-gnu", "apple-darwin"] { + let target = format!("x86_64-{}", vendor); + let test = Test::gnu(); + test.gcc() + .pic(false) + .target(&target) + .host(&target) + .file("foo.c") + .compile("foo"); + + test.cmd(0).must_not_have("-fPIC"); + } +} + +#[test] +fn gnu_i686() { + for vendor in &["unknown-linux-gnu", "apple-darwin"] { + let target = format!("i686-{}", vendor); + let test = Test::gnu(); + test.gcc() + .target(&target) + .host(&target) + .file("foo.c") + .compile("foo"); + + test.cmd(0).must_have("-m32"); + } +} + +#[test] +fn gnu_i686_pic() { + for vendor in &["unknown-linux-gnu", "apple-darwin"] { + let target = format!("i686-{}", vendor); + let test = Test::gnu(); + test.gcc() + .pic(true) + .target(&target) + .host(&target) + .file("foo.c") + .compile("foo"); + + test.cmd(0).must_have("-fPIC"); + } +} + +#[test] +fn gnu_set_stdlib() { + let test = Test::gnu(); + test.gcc() + .cpp_set_stdlib(Some("foo")) + .file("foo.c") + .compile("foo"); + + test.cmd(0).must_not_have("-stdlib=foo"); +} + +#[test] +fn gnu_include() { + let test = Test::gnu(); + test.gcc().include("foo/bar").file("foo.c").compile("foo"); + + test.cmd(0).must_have("-I").must_have("foo/bar"); +} + +#[test] +fn gnu_define() { + let test = Test::gnu(); + test.gcc() + .define("FOO", "bar") + .define("BAR", None) + .file("foo.c") + .compile("foo"); + + test.cmd(0).must_have("-DFOO=bar").must_have("-DBAR"); +} + +#[test] +fn gnu_compile_assembly() { + let test = Test::gnu(); + test.gcc().file("foo.S").compile("foo"); + test.cmd(0).must_have("foo.S"); +} + +#[test] +fn gnu_shared() { + let test = Test::gnu(); + test.gcc() + .file("foo.c") + .shared_flag(true) + .static_flag(false) + .compile("foo"); + + test.cmd(0).must_have("-shared").must_not_have("-static"); +} + +#[test] +fn gnu_flag_if_supported() { + if cfg!(windows) { + return; + } + let test = Test::gnu(); + test.gcc() + .file("foo.c") + .flag("-v") + .flag_if_supported("-Wall") + .flag_if_supported("-Wflag-does-not-exist") + .flag_if_supported("-std=c++11") + .compile("foo"); + + test.cmd(0) + .must_have("-v") + .must_have("-Wall") + .must_not_have("-Wflag-does-not-exist") + .must_not_have("-std=c++11"); +} + +#[test] +fn gnu_flag_if_supported_cpp() { + if cfg!(windows) { + return; + } + let test = Test::gnu(); + test.gcc() + .cpp(true) + .file("foo.cpp") + .flag_if_supported("-std=c++11") + .compile("foo"); + + test.cmd(0).must_have("-std=c++11"); +} + +#[test] +fn gnu_static() { + let test = Test::gnu(); + test.gcc() + .file("foo.c") + .shared_flag(false) + .static_flag(true) + .compile("foo"); + + test.cmd(0).must_have("-static").must_not_have("-shared"); +} + +#[test] +fn msvc_smoke() { + let test = Test::msvc(); + test.gcc().file("foo.c").compile("foo"); + + test.cmd(0) + .must_have("/O2") + .must_have("foo.c") + .must_not_have("/Z7") + .must_have("/c") + .must_have("/MD"); + test.cmd(1).must_have(test.td.path().join("foo.o")); +} + +#[test] +fn msvc_opt_level_0() { + let test = Test::msvc(); + test.gcc().opt_level(0).file("foo.c").compile("foo"); + + test.cmd(0).must_not_have("/O2"); +} + +#[test] +fn msvc_debug() { + let test = Test::msvc(); + test.gcc().debug(true).file("foo.c").compile("foo"); + test.cmd(0).must_have("/Z7"); +} + +#[test] +fn msvc_include() { + let test = Test::msvc(); + test.gcc().include("foo/bar").file("foo.c").compile("foo"); + + test.cmd(0).must_have("/I").must_have("foo/bar"); +} + +#[test] +fn msvc_define() { + let test = Test::msvc(); + test.gcc() + .define("FOO", "bar") + .define("BAR", None) + .file("foo.c") + .compile("foo"); + + test.cmd(0).must_have("/DFOO=bar").must_have("/DBAR"); +} + +#[test] +fn msvc_static_crt() { + let test = Test::msvc(); + test.gcc().static_crt(true).file("foo.c").compile("foo"); + + test.cmd(0).must_have("/MT"); +} + +#[test] +fn msvc_no_static_crt() { + let test = Test::msvc(); + test.gcc().static_crt(false).file("foo.c").compile("foo"); + + test.cmd(0).must_have("/MD"); +} diff --git a/cfg-if/.cargo-checksum.json b/cfg-if/.cargo-checksum.json new file mode 100644 index 000000000..803f56d7e --- /dev/null +++ b/cfg-if/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{},"package":"082bb9b28e00d3c9d39cc03e64ce4cea0f1bb9b3fde493f0cbc008472d22bdf4"} \ No newline at end of file diff --git a/cfg-if/Cargo.toml b/cfg-if/Cargo.toml new file mode 100644 index 000000000..84c4fc783 --- /dev/null +++ b/cfg-if/Cargo.toml @@ -0,0 +1,24 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g. crates.io) dependencies +# +# If you believe there's an error in this file please file an +# issue against the rust-lang/cargo repository. If you're +# editing this file be aware that the upstream Cargo.toml +# will likely look very different (and much more reasonable) + +[package] +name = "cfg-if" +version = "0.1.6" +authors = ["Alex Crichton "] +description = "A macro to ergonomically define an item depending on a large number of #[cfg]\nparameters. Structured like an if-else chain, the first matching branch is the\nitem that gets emitted.\n" +homepage = "https://github.com/alexcrichton/cfg-if" +documentation = "https://docs.rs/cfg-if" +readme = "README.md" +license = "MIT/Apache-2.0" +repository = "https://github.com/alexcrichton/cfg-if" +[badges.travis-ci] +repository = "alexcrichton/cfg-if" diff --git a/cfg-if/LICENSE-APACHE b/cfg-if/LICENSE-APACHE new file mode 100644 index 000000000..16fe87b06 --- /dev/null +++ b/cfg-if/LICENSE-APACHE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/cfg-if/LICENSE-MIT b/cfg-if/LICENSE-MIT new file mode 100644 index 000000000..39e0ed660 --- /dev/null +++ b/cfg-if/LICENSE-MIT @@ -0,0 +1,25 @@ +Copyright (c) 2014 Alex Crichton + +Permission is hereby granted, free of charge, to any +person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the +Software without restriction, including without +limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software +is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions +of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/cfg-if/README.md b/cfg-if/README.md new file mode 100644 index 000000000..344a946c0 --- /dev/null +++ b/cfg-if/README.md @@ -0,0 +1,52 @@ +# cfg-if + +[![Build Status](https://travis-ci.org/alexcrichton/cfg-if.svg?branch=master)](https://travis-ci.org/alexcrichton/cfg-if) + +[Documentation](https://docs.rs/cfg-if) + +A macro to ergonomically define an item depending on a large number of #[cfg] +parameters. Structured like an if-else chain, the first matching branch is the +item that gets emitted. + +```toml +[dependencies] +cfg-if = "0.1" +``` + +## Example + +```rust +#[macro_use] +extern crate cfg_if; + +cfg_if! { + if #[cfg(unix)] { + fn foo() { /* unix specific functionality */ } + } else if #[cfg(target_pointer_width = "32")] { + fn foo() { /* non-unix, 32-bit functionality */ } + } else { + fn foo() { /* fallback implementation */ } + } +} + +fn main() { + foo(); +} +``` + +# License + +This project is licensed under either of + + * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or + http://www.apache.org/licenses/LICENSE-2.0) + * MIT license ([LICENSE-MIT](LICENSE-MIT) or + http://opensource.org/licenses/MIT) + +at your option. + +### Contribution + +Unless you explicitly state otherwise, any contribution intentionally submitted +for inclusion in Serde by you, as defined in the Apache-2.0 license, shall be +dual licensed as above, without any additional terms or conditions. diff --git a/cfg-if/src/lib.rs b/cfg-if/src/lib.rs new file mode 100644 index 000000000..ff144f69f --- /dev/null +++ b/cfg-if/src/lib.rs @@ -0,0 +1,142 @@ +//! A macro for defining `#[cfg]` if-else statements. +//! +//! The macro provided by this crate, `cfg_if`, is similar to the `if/elif` C +//! preprocessor macro by allowing definition of a cascade of `#[cfg]` cases, +//! emitting the implementation which matches first. +//! +//! This allows you to conveniently provide a long list `#[cfg]`'d blocks of code +//! without having to rewrite each clause multiple times. +//! +//! # Example +//! +//! ``` +//! #[macro_use] +//! extern crate cfg_if; +//! +//! cfg_if! { +//! if #[cfg(unix)] { +//! fn foo() { /* unix specific functionality */ } +//! } else if #[cfg(target_pointer_width = "32")] { +//! fn foo() { /* non-unix, 32-bit functionality */ } +//! } else { +//! fn foo() { /* fallback implementation */ } +//! } +//! } +//! +//! # fn main() {} +//! ``` + +#![no_std] + +#![doc(html_root_url = "https://docs.rs/cfg-if")] +#![deny(missing_docs)] +#![cfg_attr(test, deny(warnings))] + +#[macro_export(local_inner_macros)] +macro_rules! cfg_if { + // match if/else chains with a final `else` + ($( + if #[cfg($($meta:meta),*)] { $($it:item)* } + ) else * else { + $($it2:item)* + }) => { + cfg_if! { + @__items + () ; + $( ( ($($meta),*) ($($it)*) ), )* + ( () ($($it2)*) ), + } + }; + + // match if/else chains lacking a final `else` + ( + if #[cfg($($i_met:meta),*)] { $($i_it:item)* } + $( + else if #[cfg($($e_met:meta),*)] { $($e_it:item)* } + )* + ) => { + cfg_if! { + @__items + () ; + ( ($($i_met),*) ($($i_it)*) ), + $( ( ($($e_met),*) ($($e_it)*) ), )* + ( () () ), + } + }; + + // Internal and recursive macro to emit all the items + // + // Collects all the negated cfgs in a list at the beginning and after the + // semicolon is all the remaining items + (@__items ($($not:meta,)*) ; ) => {}; + (@__items ($($not:meta,)*) ; ( ($($m:meta),*) ($($it:item)*) ), $($rest:tt)*) => { + // Emit all items within one block, applying an approprate #[cfg]. The + // #[cfg] will require all `$m` matchers specified and must also negate + // all previous matchers. + cfg_if! { @__apply cfg(all($($m,)* not(any($($not),*)))), $($it)* } + + // Recurse to emit all other items in `$rest`, and when we do so add all + // our `$m` matchers to the list of `$not` matchers as future emissions + // will have to negate everything we just matched as well. + cfg_if! { @__items ($($not,)* $($m,)*) ; $($rest)* } + }; + + // Internal macro to Apply a cfg attribute to a list of items + (@__apply $m:meta, $($it:item)*) => { + $(#[$m] $it)* + }; +} + +#[cfg(test)] +mod tests { + cfg_if! { + if #[cfg(test)] { + use core::option::Option as Option2; + fn works1() -> Option2 { Some(1) } + } else { + fn works1() -> Option { None } + } + } + + cfg_if! { + if #[cfg(foo)] { + fn works2() -> bool { false } + } else if #[cfg(test)] { + fn works2() -> bool { true } + } else { + fn works2() -> bool { false } + } + } + + cfg_if! { + if #[cfg(foo)] { + fn works3() -> bool { false } + } else { + fn works3() -> bool { true } + } + } + + cfg_if! { + if #[cfg(test)] { + use core::option::Option as Option3; + fn works4() -> Option3 { Some(1) } + } + } + + cfg_if! { + if #[cfg(foo)] { + fn works5() -> bool { false } + } else if #[cfg(test)] { + fn works5() -> bool { true } + } + } + + #[test] + fn it_works() { + assert!(works1().is_some()); + assert!(works2()); + assert!(works3()); + assert!(works4().is_some()); + assert!(works5()); + } +} diff --git a/cfg-if/tests/xcrate.rs b/cfg-if/tests/xcrate.rs new file mode 100644 index 000000000..f42b87767 --- /dev/null +++ b/cfg-if/tests/xcrate.rs @@ -0,0 +1,17 @@ +#[macro_use] +extern crate cfg_if; + +cfg_if! { + if #[cfg(foo)] { + fn works() -> bool { false } + } else if #[cfg(test)] { + fn works() -> bool { true } + } else { + fn works() -> bool { false } + } +} + +#[test] +fn smoke() { + assert!(works()); +} diff --git a/clap/.cargo-checksum.json b/clap/.cargo-checksum.json new file mode 100644 index 000000000..1041e4444 --- /dev/null +++ b/clap/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{},"package":"b957d88f4b6a63b9d70d5f454ac8011819c6efa7727858f458ab71c756ce2d3e"} \ No newline at end of file diff --git a/clap/.pc/.quilt_patches b/clap/.pc/.quilt_patches new file mode 100644 index 000000000..6857a8d44 --- /dev/null +++ b/clap/.pc/.quilt_patches @@ -0,0 +1 @@ +debian/patches diff --git a/clap/.pc/.quilt_series b/clap/.pc/.quilt_series new file mode 100644 index 000000000..c2067066a --- /dev/null +++ b/clap/.pc/.quilt_series @@ -0,0 +1 @@ +series diff --git a/clap/.pc/.version b/clap/.pc/.version new file mode 100644 index 000000000..0cfbf0888 --- /dev/null +++ b/clap/.pc/.version @@ -0,0 +1 @@ +2 diff --git a/clap/.pc/applied-patches b/clap/.pc/applied-patches new file mode 100644 index 000000000..bce906376 --- /dev/null +++ b/clap/.pc/applied-patches @@ -0,0 +1 @@ +no-clippy.patch diff --git a/clap/.pc/no-clippy.patch/.timestamp b/clap/.pc/no-clippy.patch/.timestamp new file mode 100644 index 000000000..e69de29bb diff --git a/clap/.pc/no-clippy.patch/Cargo.toml b/clap/.pc/no-clippy.patch/Cargo.toml new file mode 100644 index 000000000..ba5c78e62 --- /dev/null +++ b/clap/.pc/no-clippy.patch/Cargo.toml @@ -0,0 +1,140 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g. crates.io) dependencies +# +# If you believe there's an error in this file please file an +# issue against the rust-lang/cargo repository. If you're +# editing this file be aware that the upstream Cargo.toml +# will likely look very different (and much more reasonable) + +[package] +name = "clap" +version = "2.32.0" +authors = ["Kevin K. "] +exclude = ["examples/*", "clap-test/*", "tests/*", "benches/*", "*.png", "clap-perf/*", "*.dot"] +description = "A simple to use, efficient, and full featured Command Line Argument Parser\n" +homepage = "https://clap.rs/" +documentation = "https://docs.rs/clap/" +readme = "README.md" +keywords = ["argument", "cli", "arg", "parser", "parse"] +categories = ["command-line-interface"] +license = "MIT" +repository = "https://github.com/kbknapp/clap-rs" +[package.metadata.docs.rs] +features = ["doc"] +[profile.test] +opt-level = 1 +lto = false +codegen-units = 4 +debug = true +debug-assertions = true +rpath = false + +[profile.doc] +opt-level = 0 +lto = false +codegen-units = 4 +debug = true +debug-assertions = true +rpath = false + +[profile.bench] +opt-level = 3 +lto = true +codegen-units = 1 +debug = false +debug-assertions = false +rpath = false + +[profile.dev] +opt-level = 0 +lto = false +codegen-units = 4 +debug = true +debug-assertions = true +rpath = false + +[profile.release] +opt-level = 3 +lto = true +codegen-units = 1 +debug = false +debug-assertions = false +rpath = false +[dependencies.atty] +version = "0.2.2" +optional = true + +[dependencies.bitflags] +version = "1.0" + +[dependencies.clippy] +version = "~0.0.166" +optional = true + +[dependencies.strsim] +version = "0.7.0" +optional = true + +[dependencies.term_size] +version = "0.3.0" +optional = true + +[dependencies.textwrap] +version = "0.10.0" + +[dependencies.unicode-width] +version = "0.1.4" + +[dependencies.vec_map] +version = "0.8" +optional = true + +[dependencies.yaml-rust] +version = "0.3.5" +optional = true +[dev-dependencies.lazy_static] +version = "1" + +[dev-dependencies.regex] +version = "1" + +[dev-dependencies.version-sync] +version = "0.5" + +[features] +color = ["ansi_term", "atty"] +debug = [] +default = ["suggestions", "color", "vec_map"] +doc = ["yaml"] +lints = ["clippy"] +nightly = [] +no_cargo = [] +suggestions = ["strsim"] +unstable = [] +wrap_help = ["term_size", "textwrap/term_size"] +yaml = ["yaml-rust"] +[target."cfg(not(windows))".dependencies.ansi_term] +version = "0.11" +optional = true +[badges.appveyor] +repository = "kbknapp/clap-rs" + +[badges.coveralls] +branch = "master" +repository = "kbknapp/clap-rs" + +[badges.is-it-maintained-issue-resolution] +repository = "kbknapp/clap-rs" + +[badges.is-it-maintained-open-issues] +repository = "kbknapp/clap-rs" + +[badges.maintenance] +status = "actively-developed" + +[badges.travis-ci] +repository = "kbknapp/clap-rs" diff --git a/clap/CHANGELOG.md b/clap/CHANGELOG.md new file mode 100644 index 000000000..cbb06a37d --- /dev/null +++ b/clap/CHANGELOG.md @@ -0,0 +1,2814 @@ + +## v2.32.0 (2018-06-26) + +#### Minimum Required Rust + +* As of this release, `clap` requires `rustc 1.21.0` or greater. + + +#### Features + +* **Completions:** adds completion support for Elvish. ([e9d0562a](https://github.com/kbknapp/clap-rs/commit/e9d0562a1dc5dfe731ed7c767e6cee0af08f0cf9)) + +#### Improvements + +* **macros:** Support shorthand syntax for ArgGroups ([df9095e7](https://github.com/kbknapp/clap-rs/commit/df9095e75bb1e7896415251d0d4ffd8a0ebcd559)) + +#### Bug Fixes + +* **OsValues:** Add `ExactSizeIterator` implementation ([356c69e5](https://github.com/kbknapp/clap-rs/commit/356c69e508fd25a9f0ea2d27bf80ae1d9a8d88f4)) +* **arg_enum!:** Invalid expansions of some trailing-comma patterns ([7023184f](https://github.com/kbknapp/clap-rs/commit/7023184fca04e852c270341548d6a16207d13862)) +* **help message:** Unconditionally uses long description for subcommands ([6acc8b6a](https://github.com/kbknapp/clap-rs/commit/6acc8b6a621a765cbf513450188000d943676a30), closes [#897](https://github.com/kbknapp/clap-rs/issues/897)) + +#### Documentation + +* Refer to macOS rather than OSX. ([ab0d767f](https://github.com/kbknapp/clap-rs/commit/ab0d767f3a5a57e2bbb97d0183c2ef63c8c77a6c)) + + + + +### v2.31.2 (2018-03-19) + +#### Bug Fixes + +* **Fish Completions:** fixes a bug that only allowed a single completion in in Fish Shell ([e8774a8](https://github.com/kbknapp/clap-rs/pull/1214/commits/e8774a84ee4a319c888036e7c595ab46451d8e48), closes [#1212](https://github.com/kbknapp/clap-rs/issues/1212)) +* **AllowExternalSubcommands**: fixes a bug where external subcommands would be blocked by a similarly named subcommand (suggestions were getting in the way). ([a410e85](https://github.com/kbknapp/clap-rs/pull/1215/commits/a410e855bcd82b05f9efa73fa8b9774dc8842c6b)) + +#### Documentation + +* Fixes some typos in the `README.md` ([c8e685d7](https://github.com/kbknapp/clap-rs/commit/c8e685d76adee2a3cc06cac6952ffcf6f9548089)) + + +### v2.31.1 (2018-03-06) + + +#### Improvements + +* **AllowMissingPositional:** improves the ability of AllowMissingPositional to allow 'skipping' to the last positional arg with '--' ([df20e6e2](https://github.com/kbknapp/clap-rs/commit/df20e6e24b4e782be0b423b484b9798e3e2efe2f)) + + + +## v2.31.0 (2018-03-04) + + +#### Features + +* **Arg Indices:** adds the ability to query argument value indices ([f58d0576](https://github.com/kbknapp/clap-rs/commit/f58d05767ec8133c8eb2de117cb642b9ae29ccbc)) +* **Indices:** implements an Indices iterator ([1e67be44](https://github.com/kbknapp/clap-rs/commit/1e67be44f0ccf161cc84c4e6082382072e89c302)) +* **Raw Args** adds a convenience function to `Arg` that allows implying all of `Arg::last` `Arg::allow_hyphen_values` and `Arg::multiple(true)` ([66a78f29](https://github.com/kbknapp/clap-rs/commit/66a78f2972786f5fe7c07937a1ac23da2542afd2)) + +#### Documentation + +* Fix some typos and markdown issues. ([935ba0dd](https://github.com/kbknapp/clap-rs/commit/935ba0dd547a69c3f636c5486795012019408794)) +* **Arg Indices:** adds the documentation for the arg index querying methods ([50bc0047](https://github.com/kbknapp/clap-rs/commit/50bc00477afa64dc6cdc5de161d3de3ba1d105a7)) +* **CONTRIBUTING.md:** fix url to clippy upstream repo to point to https://github.com/rust-lang-nursery/rust-clippy instead of https://github.com/Manishearth/rust-clippy ([42407d7f](https://github.com/kbknapp/clap-rs/commit/42407d7f21d794103cda61f49d2615aae0a4bcd9)) +* **Values:** improves the docs example of the Values iterator ([74075d65](https://github.com/kbknapp/clap-rs/commit/74075d65e8db1ddb5e2a4558009a5729d749d1b6)) +* Updates readme to hint that the `wrap_help` feature is a thing ([fc7ab227](https://github.com/kbknapp/clap-rs/commit/66a78f2972786f5fe7c07937a1ac23da2542afd2)) + +### Improvements + +* Cargo.toml: use codegen-units = 1 in release and bench profiles ([19f425ea](https://github.com/kbknapp/clap-rs/commit/66a78f2972786f5fe7c07937a1ac23da2542afd2)) +* Adds WASM support (clap now compiles on WASM!) ([689949e5](https://github.com/kbknapp/clap-rs/commit/689949e57d390bb61bc69f3ed91f60a2105738d0)) +* Uses the short help tool-tip for PowerShell completion scripts ([ecda22ce](https://github.com/kbknapp/clap-rs/commit/ecda22ce7210ce56d7b2d1a5445dd1b8a2959656)) + + + +## v2.30.0 (2018-02-13) + +#### Bug Fixes + +* **YAML:** Adds a missing conversion from `Arg::last` when instantiating from a YAML file ([aab77c81a5](https://github.com/kbknapp/clap-rs/pull/1175/commits/aab77c81a519b045f95946ae0dd3e850f9b93070), closes [#1160](https://github.com/kbknapp/clap-rs/issues/1173)) + +#### Improvements + +* **Bash Completions:** instead of completing a generic option name, all bash completions fall back to file completions UNLESS `Arg::possible_values` was used ([872f02ae](https://github.com/kbknapp/clap-rs/commit/872f02aea900ffa376850a279eb164645e1234fa)) +* **Deps:** No longer needlessly compiles `ansi_term` on Windows since its not used ([b57ee946](https://github.com/kbknapp/clap-rs/commit/b57ee94609da3ddc897286cfba968f26ff961491), closes [#1155](https://github.com/kbknapp/clap-rs/issues/1155)) +* **Help Message:** changes the `[values: foo bar baz]` array to `[possible values: foo bar baz]` for consistency with the API ([414707e4e97](https://github.com/kbknapp/clap-rs/pull/1176/commits/414707e4e979d07bfe555247e5d130c546673708), closes [#1160](https://github.com/kbknapp/clap-rs/issues/1160)) + + + +### v2.29.4 (2018-02-06) + + +#### Bug Fixes + +* **Overrides Self:** fixes a bug where options with multiple values couldnt ever have multiple values ([d95907cf](https://github.com/kbknapp/clap-rs/commit/d95907cff6d011a901fe35fa00b0f4e18547a1fb)) + + + + +### v2.29.3 (2018-02-05) + + +#### Improvements + +* **Overrides:** clap now supports arguments which override with themselves ([6c7a0010](https://github.com/kbknapp/clap-rs/commit/6c7a001023ca1eac1cc6ffe6c936b4c4a2aa3c45), closes [#976](https://github.com/kbknapp/clap-rs/issues/976)) + +#### Bug Fixes + +* **Requirements:** fixes an issue where conflicting args would still show up as required ([e06cefac](https://github.com/kbknapp/clap-rs/commit/e06cefac97083838c0a4e1444dcad02a5c3f911e), closes [#1158](https://github.com/kbknapp/clap-rs/issues/1158)) +* Fixes a bug which disallows proper nesting of `--` ([73993fe](https://github.com/kbknapp/clap-rs/commit/73993fe30d135f682e763ec93dcb0814ed518011), closes [#1161](https://github.com/kbknapp/clap-rs/issues/1161)) + +#### New Settings + +* **AllArgsOverrideSelf:** adds a new convenience setting to allow all args to override themselves ([4670325d](https://github.com/kbknapp/clap-rs/commit/4670325d1bf0369addec2ae2bcb56f1be054c924)) + + + + +### v2.29.2 (2018-01-16) + + +#### Features + +* **completions/zsh.rs:** + * Escape possible values for options ([25561dec](https://github.com/kbknapp/clap-rs/commit/25561decf147d329b64634a14d9695673c2fc78f)) + * Implement postional argument possible values completion ([f3b0afd2](https://github.com/kbknapp/clap-rs/commit/f3b0afd2bef8b7be97162f8a7802ddf7603dff36)) + * Complete positional arguments properly ([e39aeab8](https://github.com/kbknapp/clap-rs/commit/e39aeab8487596046fbdbc6a226e5c8820585245)) + +#### Bug Fixes + +* **completions/zsh.rs:** + * Add missing autoload for is-at-least ([a6522607](https://github.com/kbknapp/clap-rs/commit/a652260795d1519f6ec2a7a09ccc1258499cad7b)) + * Don't pass -S to _arguments if Zsh is too old ([16b4f143](https://github.com/kbknapp/clap-rs/commit/16b4f143ff466b7ef18a267bc44ade0f9639109b)) + * Maybe fix completions with mixed positionals and subcommands ([1146f0da](https://github.com/kbknapp/clap-rs/commit/1146f0da154d6796fbfcb09db8efa3593cb0d898)) +* **completions/zsh.zsh:** Remove redundant code from output ([0e185b92](https://github.com/kbknapp/clap-rs/commit/0e185b922ed1e0fd653de00b4cd8d567d72ff68e), closes [#1142](https://github.com/kbknapp/clap-rs/issues/1142)) + + + + +### 2.29.1 (2018-01-09) + + +#### Documentation + +* fixes broken links. ([56e734b8](https://github.com/kbknapp/clap-rs/commit/56e734b839303d733d2e5baf7dac39bd7b97b8e4)) +* updates contributors list ([e1313a5a](https://github.com/kbknapp/clap-rs/commit/e1313a5a0f69d8f4016f73b860a63af8318a6676)) + +#### Performance + +* further debloating by removing generics from error cases ([eb8d919e](https://github.com/kbknapp/clap-rs/commit/eb8d919e6f3443db279ba0c902f15d76676c02dc)) +* debloats clap by deduplicating logic and refactors ([03e413d7](https://github.com/kbknapp/clap-rs/commit/03e413d7175d35827cd7d8908d47dbae15a849a3)) + +#### Bug Fixes + +* fixes the ripgrep benchmark by adding a value to a flag that expects it ([d26ab2b9](https://github.com/kbknapp/clap-rs/commit/d26ab2b97cf9c0ea675b440b7b0eaf6ac3ad01f4)) +* **bash completion:** Change the bash completion script code generation to support hyphens. ([ba7f1d18](https://github.com/kbknapp/clap-rs/commit/ba7f1d18eba7a07ce7f57e0981986f66c994b639)) +* **completions/zsh.rs:** Fix completion of long option values ([46365cf8](https://github.com/kbknapp/clap-rs/commit/46365cf8be5331ba04c895eb183e2f230b5aad51)) + + + +## 2.29.0 (2017-12-02) + + +#### API Additions + +* **Arg:** adds Arg::hide_env_values(bool) which allows one to hide any current env values and display only the key in help messages ([fb41d062](https://github.com/kbknapp/clap-rs/commit/fb41d062eedf37cb4f805c90adca29909bd197d7)) + + + + +## 2.28.0 (2017-11-28) + +The minimum required Rust is now 1.20. This was done to start using bitflags 1.0 and having >1.0 deps is a *very good* thing! + +#### Documentation + +* changes the demo version to 2.28 to stay in sync ([ce6ca492](https://github.com/kbknapp/clap-rs/commit/ce6ca492c7510ab6474075806360b96081b021a9)) +* Fix URL path to github hosted files ([ce72aada](https://github.com/kbknapp/clap-rs/commit/ce72aada56a9581d4a6cb4bf9bdb861c3906f8df), closes [#1106](https://github.com/kbknapp/clap-rs/issues/1106)) +* fix typo ([002b07fc](https://github.com/kbknapp/clap-rs/commit/002b07fc98a1c85acb66296b1eec0b2aba906125)) +* **README.md:** updates the readme and pulls out some redundant sections ([db6caf86](https://github.com/kbknapp/clap-rs/commit/db6caf8663747e679d2f4ed3bd127f33476754aa)) + +#### Improvements + +* adds '[SUBCOMMAND]' to usage strings with only AppSettings::AllowExternalSubcommands is used with no other subcommands ([e78bb757](https://github.com/kbknapp/clap-rs/commit/e78bb757a3df16e82d539e450c06767a6bfcf859), closes [#1093](https://github.com/kbknapp/clap-rs/issues/1093)) + +#### API Additions + +* Adds Arg::case_insensitive(bool) which allows matching Arg::possible_values without worrying about ASCII case ([1fec268e](https://github.com/kbknapp/clap-rs/commit/1fec268e51736602e38e67c76266f439e2e0ef12), closes [#1118](https://github.com/kbknapp/clap-rs/issues/1118)) +* Adds the traits to be used with the clap-derive crate to be able to use Custom Derive ([6f4c3412](https://github.com/kbknapp/clap-rs/commit/6f4c3412415e882f5ca2cc3fbd6d4dce79440828)) + +#### Bug Fixes + +* Fixes a regression where --help couldn't be overridden ([a283d69f](https://github.com/kbknapp/clap-rs/commit/a283d69fc08aa016ae1bf9ba010012abecc7ba69), closes [#1112](https://github.com/kbknapp/clap-rs/issues/1112)) +* fixes a bug that allowed options to pass parsing when no value was provided ([2fb75821](https://github.com/kbknapp/clap-rs/commit/2fb758219c7a60d639da67692e100b855a8165ac), closes [#1105](https://github.com/kbknapp/clap-rs/issues/1105)) +* ignore PropagateGlobalValuesDown deprecation warning ([f61ce3f5](https://github.com/kbknapp/clap-rs/commit/f61ce3f55fe65e16b3db0bd4facdc4575de22767), closes [#1086](https://github.com/kbknapp/clap-rs/issues/1086)) + +#### Deps + +* Updates `bitflags` to 1.0 + + + + +## v2.27.1 (2017-10-24) + + +#### Bug Fixes + +* Adds `term_size` as an optional dependency (with feature `wrap_help`) to fix compile bug + + +## v2.27.0 (2017-10-24) + +** This release raises the minimum required version of Rust to 1.18 ** + +** This release also contains a very minor breaking change to fix a bug ** + +The only CLIs affected will be those using unrestrained multiple values and subcommands where the +subcommand name can coincide with one of the multiple values. + +See the commit [0c223f54](https://github.com/kbknapp/clap-rs/commit/0c223f54ed46da406bc8b43a5806e0b227863b31) for full details. + + +#### Bug Fixes + +* Values from global args are now propagated UP and DOWN! +* fixes a bug where using AppSettings::AllowHyphenValues would allow invalid arguments even when there is no way for them to be valid ([77ed4684](https://github.com/kbknapp/clap-rs/commit/77ed46841fc0263d7aa32fcc5cc49ef703b37c04), closes [#1066](https://github.com/kbknapp/clap-rs/issues/1066)) +* when an argument requires a value and that value happens to match a subcommand name, its parsed as a value ([0c223f54](https://github.com/kbknapp/clap-rs/commit/0c223f54ed46da406bc8b43a5806e0b227863b31), closes [#1031](https://github.com/kbknapp/clap-rs/issues/1031), breaks [#](https://github.com/kbknapp/clap-rs/issues/), [#](https://github.com/kbknapp/clap-rs/issues/)) +* fixes a bug that prevented number_of_values and default_values to be used together ([5eb342a9](https://github.com/kbknapp/clap-rs/commit/5eb342a99dde07b0f011048efde3e283bc1110fc), closes [#1050](https://github.com/kbknapp/clap-rs/issues/1050), [#1056](https://github.com/kbknapp/clap-rs/issues/1056)) +* fixes a bug that didn't allow args with default values to have conflicts ([58b5b4be](https://github.com/kbknapp/clap-rs/commit/58b5b4be315280888d50d9b15119b91a9028f050), closes [#1071](https://github.com/kbknapp/clap-rs/issues/1071)) +* fixes a panic when using global args and calling App::get_matches_from_safe_borrow multiple times ([d86ec797](https://github.com/kbknapp/clap-rs/commit/d86ec79742c77eb3f663fb30e225954515cf25bb), closes [#1076](https://github.com/kbknapp/clap-rs/issues/1076)) +* fixes issues and potential regressions with global args values not being propagated properly or at all ([a43f9dd4](https://github.com/kbknapp/clap-rs/commit/a43f9dd4aaf1864dd14a3c28dec89ccdd70c61e5), closes [#1010](https://github.com/kbknapp/clap-rs/issues/1010), [#1061](https://github.com/kbknapp/clap-rs/issues/1061), [#978](https://github.com/kbknapp/clap-rs/issues/978)) +* fixes a bug where default values are not applied if the option supports zero values ([9c248cbf](https://github.com/kbknapp/clap-rs/commit/9c248cbf7d8a825119bc387c23e9a1d1989682b0), closes [#1047](https://github.com/kbknapp/clap-rs/issues/1047)) + +#### Documentation + +* adds addtional blurbs about using multiples with subcommands ([03455b77](https://github.com/kbknapp/clap-rs/commit/03455b7751a757e7b2f6ffaf2d16168539c99661)) +* updates the docs to reflect changes to global args and that global args values can now be propagated back up the stack ([ead076f0](https://github.com/kbknapp/clap-rs/commit/ead076f03ada4c322bf3e34203925561ec496d87)) +* add html_root_url attribute ([e67a061b](https://github.com/kbknapp/clap-rs/commit/e67a061bcf567c6518d6c2f58852e01f02764b22)) +* sync README version numbers with crate version ([5536361b](https://github.com/kbknapp/clap-rs/commit/5536361bcda29887ed86bb68e43d0b603cbc423f)) + +#### Improvements + +* args that have require_delimiter(true) is now reflected in help and usage strings ([dce61699](https://github.com/kbknapp/clap-rs/commit/dce616998ed9bd95e8ed3bec1f09a4883da47b85), closes [#1052](https://github.com/kbknapp/clap-rs/issues/1052)) +* if all subcommands are hidden, the subcommands section of the help message is no longer displayed ([4ae7b046](https://github.com/kbknapp/clap-rs/commit/4ae7b0464750bc07ec80ece38e43f003fdd1b8ae), closes [#1046](https://github.com/kbknapp/clap-rs/issues/1046)) + +#### Breaking Changes + +* when an argument requires a value and that value happens to match a subcommand name, its parsed as a value ([0c223f54](https://github.com/kbknapp/clap-rs/commit/0c223f54ed46da406bc8b43a5806e0b227863b31), closes [#1031](https://github.com/kbknapp/clap-rs/issues/1031), breaks [#](https://github.com/kbknapp/clap-rs/issues/), [#](https://github.com/kbknapp/clap-rs/issues/)) + +#### Deprecations + +* **AppSettings::PropagateGlobalValuesDown:** this setting is no longer required to propagate values down or up ([2bb5ddce](https://github.com/kbknapp/clap-rs/commit/2bb5ddcee61c791ca1aaca494fbeb4bd5e277488)) + + + + +### v2.26.2 (2017-09-14) + + +#### Improvements + +* if all subcommands are hidden, the subcommands section of the help message is no longer displayed ([4ae7b046](https://github.com/kbknapp/clap-rs/commit/4ae7b0464750bc07ec80ece38e43f003fdd1b8ae), closes [#1046](https://github.com/kbknapp/clap-rs/issues/1046)) + +#### Bug Fixes + +* fixes a bug where default values are not applied if the option supports zero values ([9c248cbf](https://github.com/kbknapp/clap-rs/commit/9c248cbf7d8a825119bc387c23e9a1d1989682b0), closes [#1047](https://github.com/kbknapp/clap-rs/issues/1047)) + + + + +### v2.26.1 (2017-09-14) + + +#### Bug Fixes + +* fixes using require_equals(true) and min_values(0) together ([10ae208f](https://github.com/kbknapp/clap-rs/commit/10ae208f68518eff6e98166724065745f4083174), closes [#1044](https://github.com/kbknapp/clap-rs/issues/1044)) +* escape special characters in zsh and fish completions ([87e019fc](https://github.com/kbknapp/clap-rs/commit/87e019fc84ba6193a8c4ddc26c61eb99efffcd25)) +* avoid panic generating default help msg if term width set to 0 due to bug in textwrap 0.7.0 ([b3eadb0d](https://github.com/kbknapp/clap-rs/commit/b3eadb0de516106db4e08f078ad32e8f6d6e7a57)) +* Change `who's` -> `whose` ([53c1ffe8](https://github.com/kbknapp/clap-rs/commit/53c1ffe87f38b05d8804a0f7832412a952845349)) +* adds a debug assertion to ensure all args added to groups actually exist ([7ad123e2](https://github.com/kbknapp/clap-rs/commit/7ad123e2c02577e3ca30f7e205181e896b157d11), closes [#917](https://github.com/kbknapp/clap-rs/issues/917)) +* fixes a bug where args that allow values to start with a hyphen couldnt contain a double hyphen -- as a value ([ab2f4c9e](https://github.com/kbknapp/clap-rs/commit/ab2f4c9e563e36ec739a4b55d5a5b76fdb9e9fa4), closes [#960](https://github.com/kbknapp/clap-rs/issues/960)) +* fixes a bug where positional argument help text is misaligned ([54c16836](https://github.com/kbknapp/clap-rs/commit/54c16836dea4651806a2cfad53146a83fa3abf21)) +* **Help Message:** fixes long_about not being usable ([a8257ea0](https://github.com/kbknapp/clap-rs/commit/a8257ea0ffb812e552aca256c4a3d2aebfd8065b), closes [#1043](https://github.com/kbknapp/clap-rs/issues/1043)) +* **Suggestions:** output for flag after subcommand ([434ea5ba](https://github.com/kbknapp/clap-rs/commit/434ea5ba71395d8c1afcf88e69f0b0d8339b01a1)) + + + + +## v2.26.0 (2017-07-29) + +Minimum version of Rust is now v1.13.0 (Stable) + + +#### Improvements + +* bumps unicode-segmentation to v1.2 ([cd7b40a2](https://github.com/kbknapp/clap-rs/commit/cd7b40a21c77bae17ba453c5512cb82b7d1ce474)) + + +#### Performance + +* update textwrap to version 0.7.0 ([c2d4e637](https://github.com/kbknapp/clap-rs/commit/c2d4e63756a6f070e38c16dff846e9b0a53d6f93)) + + + + + +### v2.25.1 (2017-07-21) + +#### Improvements + +* impl Default for Values + OsValues for any lifetime. ([fb7d6231f1](https://github.com/kbknapp/clap-rs/commit/fb7d6231f13a2f79f411e62dca210b7dc9994c18)) + +#### Documentation + +* Various documentation typos and grammar fixes + + +### v2.25.0 (2017-06-20) + + +#### Features + +* use textwrap crate for wrapping help texts ([b93870c1](https://github.com/kbknapp/clap-rs/commit/b93870c10ae3bd90d233c586a33e086803117285)) + +#### Improvements + +* **Suggestions:** suggests to use flag after subcommand when applicable ([2671ca72](https://github.com/kbknapp/clap-rs/commit/2671ca7260119d4311d21c4075466aafdd9da734)) +* Bumps bitflags crate to v0.9 + +#### Documentation + +* Change `who's` -> `whose` ([53c1ffe8](https://github.com/kbknapp/clap-rs/commit/53c1ffe87f38b05d8804a0f7832412a952845349)) + +#### Documentation + +* **App::template:** adds details about the necessity to use AppSettings::UnifiedHelpMessage when using {unified} tags in the help template ([cbea3d5a](https://github.com/kbknapp/clap-rs/commit/cbea3d5acf3271a7a734498c4d99c709941c331e), closes [#949](https://github.com/kbknapp/clap-rs/issues/949)) +* **Arg::allow_hyphen_values:** updates the docs to include warnings for allow_hyphen_values and multiple(true) used together ([f9b0d657](https://github.com/kbknapp/clap-rs/commit/f9b0d657835d3f517f313d70962177dc30acf4a7)) +* **README.md:** + * added a warning about using ~ deps ([821929b5](https://github.com/kbknapp/clap-rs/commit/821929b51bd60213955705900a436c9a64fcb79f), closes [#964](https://github.com/kbknapp/clap-rs/issues/964)) +* **clap_app!:** adds using the @group specifier to the macro docs ([826048cb](https://github.com/kbknapp/clap-rs/commit/826048cb3cbc0280169303f1498ff0a2b7395883), closes [#932](https://github.com/kbknapp/clap-rs/issues/932)) + + + + +### v2.24.2 (2017-05-15) + + +#### Bug Fixes + +* adds a debug assertion to ensure all args added to groups actually exist ([14f6b8f3](https://github.com/kbknapp/clap-rs/commit/14f6b8f3a2f6df73aeeec9c54a54909b1acfc158), closes [#917](https://github.com/kbknapp/clap-rs/issues/917)) +* fixes a bug where args that allow values to start with a hyphen couldnt contain a double hyphen -- as a value ([ebf73a09](https://github.com/kbknapp/clap-rs/commit/ebf73a09db6f3c03c19cdd76b1ba6113930e1643), closes [#960](https://github.com/kbknapp/clap-rs/issues/960)) +* fixes a bug where positional argument help text is misaligned ([54c16836](https://github.com/kbknapp/clap-rs/commit/54c16836dea4651806a2cfad53146a83fa3abf21)) + +#### Documentation + +* **App::template:** adds details about the necessity to use AppSettings::UnifiedHelpMessage when using {unified} tags in the help template ([cf569438](https://github.com/kbknapp/clap-rs/commit/cf569438f309c199800bb8e46c9f140187de69d7), closes [#949](https://github.com/kbknapp/clap-rs/issues/949)) +* **Arg::allow_hyphen_values:** updates the docs to include warnings for allow_hyphen_values and multiple(true) used together ([ded5a2f1](https://github.com/kbknapp/clap-rs/commit/ded5a2f15474d4a5bd46a67b130ccb8b6781bd01)) +* **clap_app!:** adds using the @group specifier to the macro docs ([fe85fcb1](https://github.com/kbknapp/clap-rs/commit/fe85fcb1772b61f13b20b7ea5290e2437a76190c), closes [#932](https://github.com/kbknapp/clap-rs/issues/932)) + + + + +### v2.24.0 (2017-05-07) + + +#### Bug Fixes + +* fixes a bug where args with last(true) and required(true) set were not being printed in the usage string ([3ac533fe](https://github.com/kbknapp/clap-rs/commit/3ac533fedabf713943eedf006f830a5a486bbe80), closes [#944](https://github.com/kbknapp/clap-rs/issues/944)) +* fixes a bug that was printing the arg name, instead of value name when Arg::last(true) was used ([e1fe8ac3](https://github.com/kbknapp/clap-rs/commit/e1fe8ac3bc1f9cf4e36df0d881f8419755f1787b), closes [#940](https://github.com/kbknapp/clap-rs/issues/940)) +* fixes a bug where flags were parsed as flags AND positional values when specific combinations of settings were used ([20f83292](https://github.com/kbknapp/clap-rs/commit/20f83292d070038b8cee2a6b47e91f6b0a2f7871), closes [#946](https://github.com/kbknapp/clap-rs/issues/946)) + + + + +## v2.24.0 (2017-05-05) + + +#### Documentation + +* **README.md:** fix some typos ([fa34deac](https://github.com/kbknapp/clap-rs/commit/fa34deac079f334c3af97bb7fb151880ba8887f8)) + +#### API Additions + +* **Arg:** add `default_value_os` ([d5ef8955](https://github.com/kbknapp/clap-rs/commit/d5ef8955414b1587060f7218385256105b639c88)) +* **arg_matches.rs:** Added a Default implementation for Values and OsValues iterators. ([0a4384e3](https://github.com/kbknapp/clap-rs/commit/0a4384e350eed74c2a4dc8964c203f21ac64897f)) + + + +### v2.23.2 (2017-04-19) + + +#### Bug Fixes + +* **PowerShell Completions:** fixes a bug where powershells completions cant be used if no subcommands are defined ([a8bce558](https://github.com/kbknapp/clap-rs/commit/a8bce55837dc4e0fb187dc93180884a40ae09c6f), closes [#931](https://github.com/kbknapp/clap-rs/issues/931)) + +#### Improvements + +* bumps term_size to take advantage of better terminal dimension handling ([e05100b7](https://github.com/kbknapp/clap-rs/commit/e05100b73d74066a90876bf38f952adf5e8ee422)) +* **PowerShell Completions:** massively dedups subcommand names in the generate script to make smaller scripts that are still functionally equiv ([85b0e1cc](https://github.com/kbknapp/clap-rs/commit/85b0e1cc4b9755dda75a93d898d79bc38631552b)) + +#### Documentation + +* Fix a typo the minimum rust version required ([71dabba3](https://github.com/kbknapp/clap-rs/commit/71dabba3ea0a17c88b0e2199c9d99f0acbf3bc17)) + + +### v2.23.1 (2017-04-05) + + +#### Bug Fixes + +* fixes a missing newline character in the autogenerated help and version messages in some instances ([5ae9007d](https://github.com/kbknapp/clap-rs/commit/5ae9007d984ae94ae2752df51bcbaeb0ec89bc15)) + + + +## v2.23.0 (2017-04-05) + + +#### API Additions + +* `App::long_about` +* `App::long_version` +* `App::print_long_help` +* `App::write_long_help` +* `App::print_long_version` +* `App::write_long_version` +* `Arg::long_help` + +#### Features + +* allows distinguishing between short and long version messages (-V/short or --version/long) ([59272b06](https://github.com/kbknapp/clap-rs/commit/59272b06cc213289dc604dbc694cb95d383a5d68)) +* allows distinguishing between short and long help with subcommands in the same manner as args ([6b371891](https://github.com/kbknapp/clap-rs/commit/6b371891a1702173a849d1e95f9fecb168bf6fc4)) +* allows specifying a short help vs a long help (i.e. varying levels of detail depending on if -h or --help was used) ([ef1b24c3](https://github.com/kbknapp/clap-rs/commit/ef1b24c3a0dff2f58c5e2e90880fbc2b69df20ee)) +* **clap_app!:** adds support for arg names with hyphens similar to longs with hyphens ([f7a88779](https://github.com/kbknapp/clap-rs/commit/f7a8877978c8f90e6543d4f0d9600c086cf92cd7), closes [#869](https://github.com/kbknapp/clap-rs/issues/869)) + +#### Bug Fixes + +* fixes a bug that wasn't allowing help and version to be properly overridden ([8b2ceb83](https://github.com/kbknapp/clap-rs/commit/8b2ceb8368bcb70689fadf1c7f4b9549184926c1), closes [#922](https://github.com/kbknapp/clap-rs/issues/922)) + +#### Documentation + +* **clap_app!:** documents the `--("some-arg")` method for using args with hyphens inside them ([bc08ef3e](https://github.com/kbknapp/clap-rs/commit/bc08ef3e185393073d969d301989b6319c616c1f), closes [#919](https://github.com/kbknapp/clap-rs/issues/919)) + + + + +### v2.22.2 (2017-03-30) + + +#### Bug Fixes + +* **Custom Usage Strings:** fixes the usage string regression when using help templates ([0e4fd96d](https://github.com/kbknapp/clap-rs/commit/0e4fd96d74280d306d09e60ac44f938a82321769)) + + + + +### v2.22.1 (2017-03-24) + + +#### Bug Fixes + +* **usage:** fixes a big regression with custom usage strings ([2c41caba](https://github.com/kbknapp/clap-rs/commit/2c41caba3c7d723a2894e315d04da796b0e97759)) + + +## v2.22.0 (2017-03-23) + +#### API Additions + +* **App::name:** adds the ability to change the name of the App instance after creation ([d49e8292](https://github.com/kbknapp/clap-rs/commit/d49e8292b026b06e2b70447cd9f08299f4fcba76), closes [#908](https://github.com/kbknapp/clap-rs/issues/908)) +* **Arg::hide_default_value:** adds ability to hide the default value of an argument from the help string ([89e6ea86](https://github.com/kbknapp/clap-rs/commit/89e6ea861e16a1ad56757ca12f6b32d02253e44a), closes [#902](https://github.com/kbknapp/clap-rs/issues/902)) + + + +### v2.21.3 (2017-03-23) + +#### Bug Fixes + +* **yaml:** adds support for loading author info from yaml ([e04c390c](https://github.com/kbknapp/clap-rs/commit/e04c390c597a55fa27e724050342f16c42f1c5c9)) + + + +### v2.21.2 (2017-03-17) + + +#### Improvements + +* add fish subcommand help support ([f8f68cf8](https://github.com/kbknapp/clap-rs/commit/f8f68cf8251669aef4539a25a7c1166f0ac81ea6)) +* options that use `require_equals(true)` now display the equals sign in help messages, usage strings, and errors" ([c8eb0384](https://github.com/kbknapp/clap-rs/commit/c8eb0384d394d2900ccdc1593099c97808a3fa05), closes [#903](https://github.com/kbknapp/clap-rs/issues/903)) + + +#### Bug Fixes + +* setting the max term width now correctly propagates down through child subcommands + + + + +### v2.21.1 (2017-03-12) + + +#### Bug Fixes + +* **ArgRequiredElseHelp:** fixes the precedence of this error to prioritize over other error messages ([74b751ff](https://github.com/kbknapp/clap-rs/commit/74b751ff2e3631e337b7946347c1119829a41c53), closes [#895](https://github.com/kbknapp/clap-rs/issues/895)) +* **Positionals:** fixes some regression bugs resulting from old asserts in debug mode. ([9a3bc98e](https://github.com/kbknapp/clap-rs/commit/9a3bc98e9b55e7514b74b73374c5ac8b6e5e0508), closes [#896](https://github.com/kbknapp/clap-rs/issues/896)) + + + + +## v2.21.0 (2017-03-09) + +#### Performance + +* doesn't run `arg_post_processing` on multiple values anymore ([ec516182](https://github.com/kbknapp/clap-rs/commit/ec5161828729f6a53f0fccec8648f71697f01f78)) +* changes internal use of `VecMap` to `Vec` for matched values of `Arg`s ([22bf137a](https://github.com/kbknapp/clap-rs/commit/22bf137ac581684c6ed460d2c3c640c503d62621)) +* vastly reduces the amount of cloning when adding non-global args minus when they're added from `App::args` which is forced to clone ([8da0303b](https://github.com/kbknapp/clap-rs/commit/8da0303bc02db5fe047cfc0631a9da41d9dc60f7)) +* refactor to remove unneeded vectors and allocations and checks for significant performance increases ([0efa4119](https://github.com/kbknapp/clap-rs/commit/0efa4119632f134fc5b8b9695b007dd94b76735d)) + +#### Documentation + +* Fix examples link in CONTRIBUTING.md ([60cf875d](https://github.com/kbknapp/clap-rs/commit/60cf875d67a252e19bb85054be57696fac2c57a1)) + +#### Improvements + +* when `AppSettings::SubcommandsNegateReqs` and `ArgsNegateSubcommands` are used, a new more accurate double line usage string is shown ([50f02300](https://github.com/kbknapp/clap-rs/commit/50f02300d81788817acefef0697e157e01b6ca32), closes [#871](https://github.com/kbknapp/clap-rs/issues/871)) + +#### API Additions + +* **Arg::last:** adds the ability to mark a positional argument as 'last' which means it should be used with `--` syntax and can be accessed early ([6a7aea90](https://github.com/kbknapp/clap-rs/commit/6a7aea9043b83badd9ab038b4ecc4c787716147e), closes [#888](https://github.com/kbknapp/clap-rs/issues/888)) +* provides `default_value_os` and `default_value_if[s]_os` ([0f2a3782](https://github.com/kbknapp/clap-rs/commit/0f2a378219a6930748d178ba350fe5925be5dad5), closes [#849](https://github.com/kbknapp/clap-rs/issues/849)) +* provides `App::help_message` and `App::version_message` which allows one to override the auto-generated help/version flag associated help ([389c413](https://github.com/kbknapp/clap-rs/commit/389c413b7023dccab8c76aa00577ea1d048e7a99), closes [#889](https://github.com/kbknapp/clap-rs/issues/889)) + +#### New Settings + +* **InferSubcommands:** adds a setting to allow one to infer shortened subcommands or aliases (i.e. for subcommmand "test", "t", "te", or "tes" would be allowed assuming no other ambiguities) ([11602032](https://github.com/kbknapp/clap-rs/commit/11602032f6ff05881e3adf130356e37d5e66e8f9), closes [#863](https://github.com/kbknapp/clap-rs/issues/863)) + +#### Bug Fixes + +* doesn't print the argument sections in the help message if all args in that section are hidden ([ce5ee5f5](https://github.com/kbknapp/clap-rs/commit/ce5ee5f5a76f838104aeddd01c8ec956dd347f50)) +* doesn't include the various [ARGS] [FLAGS] or [OPTIONS] if the only ones available are hidden ([7b4000af](https://github.com/kbknapp/clap-rs/commit/7b4000af97637703645c5fb2ac8bb65bd546b95b), closes [#882](https://github.com/kbknapp/clap-rs/issues/882)) +* now correctly shows subcommand as required in the usage string when AppSettings::SubcommandRequiredElseHelp is used ([8f0884c1](https://github.com/kbknapp/clap-rs/commit/8f0884c1764983a49b45de52a1eddf8d721564d8)) +* fixes some memory leaks when an error is detected and clap exits ([8c2dd287](https://github.com/kbknapp/clap-rs/commit/8c2dd28718262ace4ae0db98563809548e02a86b)) +* fixes a trait that's marked private accidentlly, but should be crate internal public ([1ae21108](https://github.com/kbknapp/clap-rs/commit/1ae21108015cea87e5360402e1747025116c7878)) +* **Completions:** fixes a bug that tried to propogate global args multiple times when generating multiple completion scripts ([5e9b9cf4](https://github.com/kbknapp/clap-rs/commit/5e9b9cf4dd80fa66a624374fd04e6545635c1f94), closes [#846](https://github.com/kbknapp/clap-rs/issues/846)) + +#### Features + +* **Options:** adds the ability to require the equals syntax with options --opt=val ([f002693d](https://github.com/kbknapp/clap-rs/commit/f002693dec6a6959c4e9590cb7b7bfffd6d6e5bc), closes [#833](https://github.com/kbknapp/clap-rs/issues/833)) + + + + +### v2.20.5 (2017-02-18) + + +#### Bug Fixes + +* **clap_app!:** fixes a critical bug of a missing fragment specifier when using `!property` style tags. ([5635c1f94](https://github.com/kbknapp/clap-rs/commit/5e9b9cf4dd80fa66a624374fd04e6545635c1f94)) + + + +### v2.20.4 (2017-02-15) + + +#### Bug Fixes + +* **Completions:** fixes a bug that tried to propogate global args multiple times when generating multiple completion scripts ([5e9b9cf4](https://github.com/kbknapp/clap-rs/commit/5e9b9cf4dd80fa66a624374fd04e6545635c1f94), closes [#846](https://github.com/kbknapp/clap-rs/issues/846)) + +#### Documentation + +* Fix examples link in CONTRIBUTING.md ([60cf875d](https://github.com/kbknapp/clap-rs/commit/60cf875d67a252e19bb85054be57696fac2c57a1)) + + + +### v2.20.3 (2017-02-03) + + +#### Documentation + +* **Macros:** adds a warning about changing values in Cargo.toml not triggering a rebuild automatically ([112aea3e](https://github.com/kbknapp/clap-rs/commit/112aea3e42ae9e0c0a2d33ebad89496dbdd95e5d), closes [#838](https://github.com/kbknapp/clap-rs/issues/838)) + +#### Bug Fixes + +* fixes a println->debugln typo ([279aa62e](https://github.com/kbknapp/clap-rs/commit/279aa62eaf08f56ce090ba16b937bc763cbb45be)) +* fixes bash completions for commands that have an underscore in the name ([7f5cfa72](https://github.com/kbknapp/clap-rs/commit/7f5cfa724f0ac4e098f5fe466c903febddb2d994), closes [#581](https://github.com/kbknapp/clap-rs/issues/581)) +* fixes a bug where ZSH completions would panic if the binary name had an underscore in it ([891a2a00](https://github.com/kbknapp/clap-rs/commit/891a2a006f775e92c556dda48bb32fac9807c4fb), closes [#581](https://github.com/kbknapp/clap-rs/issues/581)) +* allow final word to be wrapped in wrap_help ([564c5f0f](https://github.com/kbknapp/clap-rs/commit/564c5f0f1730f4a2c1cdd128664f1a981c31dcd4), closes [#828](https://github.com/kbknapp/clap-rs/issues/828)) +* fixes a bug where global args weren't included in the generated completion scripts ([9a1e006e](https://github.com/kbknapp/clap-rs/commit/9a1e006eb75ad5a6057ebd119aa90f7e06c0ace8), closes [#841](https://github.com/kbknapp/clap-rs/issues/841)) + + + + +### v2.20.2 (2017-02-03) + +#### Bug Fixes + +* fixes a critical bug where subcommand settings were being propogated too far ([74648c94](https://github.com/kbknapp/clap-rs/commit/74648c94b893df542bfa5bb595e68c7bb8167e36), closes [#832](https://github.com/kbknapp/clap-rs/issues/832)) + + +#### Improvements + +* adds ArgGroup::multiple to the supported YAML fields for building ArgGroups from YAML ([d8590037](https://github.com/kbknapp/clap-rs/commit/d8590037ce07dafd8cd5b26928aa4a9fd3018288), closes [#840](https://github.com/kbknapp/clap-rs/issues/840)) + + +### v2.20.1 (2017-01-31) + +#### Bug Fixes + +* allow final word to be wrapped in wrap_help ([564c5f0f](https://github.com/kbknapp/clap-rs/commit/564c5f0f1730f4a2c1cdd128664f1a981c31dcd4), closes [#828](https://github.com/kbknapp/clap-rs/issues/828)) +* actually show character in debug output ([84d8c547](https://github.com/kbknapp/clap-rs/commit/84d8c5476de95b7f37d61888bc4f13688b712434)) +* include final character in line lenght ([aff4ba18](https://github.com/kbknapp/clap-rs/commit/aff4ba18da8147e1259b04b0bfbc1fcb5c78a3c0)) + +#### Improvements + +* updates libc and term_size deps for the libc version conflict ([6802ac4a](https://github.com/kbknapp/clap-rs/commit/6802ac4a59c142cda9ec55ca0c45ae5cb9a6ab55)) + +#### Documentation + +* fix link from app_from_crate! to crate_authors! (#822) ([5b29be9b](https://github.com/kbknapp/clap-rs/commit/5b29be9b073330ab1f7227cdd19fe4aab39d5dcb)) +* fix spelling of "guaranteed" ([4f30a65b](https://github.com/kbknapp/clap-rs/commit/4f30a65b9c03eb09607eb91a929a6396637dc105)) + + + +#### New Settings + +* **ArgsNegateSubcommands:** disables args being allowed between subcommands ([5e2af8c9](https://github.com/kbknapp/clap-rs/commit/5e2af8c96adb5ab75fa2d1536237ebcb41869494), closes [#793](https://github.com/kbknapp/clap-rs/issues/793)) +* **DontCollapseArgsInUsage:** disables the collapsing of positional args into `[ARGS]` in the usage string ([c2978afc](https://github.com/kbknapp/clap-rs/commit/c2978afc61fb46d5263ab3b2d87ecde1c9ce1553), closes [#769](https://github.com/kbknapp/clap-rs/issues/769)) +* **DisableHelpSubcommand:** disables building the `help` subcommand ([a10fc859](https://github.com/kbknapp/clap-rs/commit/a10fc859ee20159fbd9ff4337be59b76467a64f2)) +* **AllowMissingPositional:** allows one to implement `$ prog [optional] ` style CLIs where the second postional argument is required, but the first is optional ([1110fdc7](https://github.com/kbknapp/clap-rs/commit/1110fdc7a345c108820dc45783a9bf893fa4c214), closes [#636](https://github.com/kbknapp/clap-rs/issues/636)) +* **PropagateGlobalValuesDown:** automatically propagats global arg's values down through *used* subcommands ([985536c8](https://github.com/kbknapp/clap-rs/commit/985536c8ebcc09af98aac835f42a8072ad58c262), closes [#694](https://github.com/kbknapp/clap-rs/issues/694)) + +#### API Additions + +##### Arg + +* **Arg::value_terminator:** adds the ability to terminate multiple values with a given string or char ([be64ce0c](https://github.com/kbknapp/clap-rs/commit/be64ce0c373efc106384baca3f487ea99fe7b8cf), closes [#782](https://github.com/kbknapp/clap-rs/issues/782)) +* **Arg::default_value_if[s]:** adds new methods for *conditional* default values (such as a particular value from another argument was used) ([eb4010e7](https://github.com/kbknapp/clap-rs/commit/eb4010e7b21724447ef837db11ac441915728f22)) +* **Arg::requires_if[s]:** adds the ability to *conditionally* require additional args (such as if a particular value was used) ([198449d6](https://github.com/kbknapp/clap-rs/commit/198449d64393c265f0bc327aaeac23ec4bb97226)) +* **Arg::required_if[s]:** adds the ability for an arg to be *conditionally* required (i.e. "arg X is only required if arg Y was used with value Z") ([ee9cfddf](https://github.com/kbknapp/clap-rs/commit/ee9cfddf345a6b5ae2af42ba72aa5c89e2ca7f59)) +* **Arg::validator_os:** adds ability to validate values which may contain invalid UTF-8 ([47232498](https://github.com/kbknapp/clap-rs/commit/47232498a813db4f3366ccd3e9faf0bff56433a4)) + +##### Macros + +* **crate_description!:** Uses the `Cargo.toml` description field to fill in the `App::about` method at compile time ([4d9a82db](https://github.com/kbknapp/clap-rs/commit/4d9a82db8e875e9b64a9c2a5c6e22c25afc1279d), closes [#778](https://github.com/kbknapp/clap-rs/issues/778)) +* **crate_name!:** Uses the `Cargo.toml` name field to fill in the `App::new` method at compile time ([4d9a82db](https://github.com/kbknapp/clap-rs/commit/4d9a82db8e875e9b64a9c2a5c6e22c25afc1279d), closes [#778](https://github.com/kbknapp/clap-rs/issues/778)) +* **app_from_crate!:** Combines `crate_version!`, `crate_name!`, `crate_description!`, and `crate_authors!` into a single macro call to build a default `App` instance from the `Cargo.toml` fields ([4d9a82db](https://github.com/kbknapp/clap-rs/commit/4d9a82db8e875e9b64a9c2a5c6e22c25afc1279d), closes [#778](https://github.com/kbknapp/clap-rs/issues/778)) + + +#### Features + +* **no_cargo:** adds a `no_cargo` feature to disable Cargo-env-var-dependent macros for those *not* using `cargo` to build their crates (#786) ([6fdd2f9d](https://github.com/kbknapp/clap-rs/commit/6fdd2f9d693aaf1118fc61bd362273950703f43d)) + +#### Bug Fixes + +* **Options:** fixes a critical bug where options weren't forced to have a value ([5a5f2b1e](https://github.com/kbknapp/clap-rs/commit/5a5f2b1e9f598a0d0280ef3e98abbbba2bc41132), closes [#665](https://github.com/kbknapp/clap-rs/issues/665)) +* fixes a bug where calling the help of a subcommand wasn't ignoring required args of parent commands ([d3d34a2b](https://github.com/kbknapp/clap-rs/commit/d3d34a2b51ef31004055b0ab574f766d801c3adf), closes [#789](https://github.com/kbknapp/clap-rs/issues/789)) +* **Help Subcommand:** fixes a bug where the help subcommand couldn't be overriden ([d34ec3e0](https://github.com/kbknapp/clap-rs/commit/d34ec3e032d03e402d8e87af9b2942fe2819b2da), closes [#787](https://github.com/kbknapp/clap-rs/issues/787)) +* **Low Index Multiples:** fixes a bug which caused combinations of LowIndexMultiples and `Arg::allow_hyphen_values` to fail parsing ([26c670ca](https://github.com/kbknapp/clap-rs/commit/26c670ca16d2c80dc26d5c1ce83380ace6357318)) + +#### Improvements + +* **Default Values:** improves the error message when default values are involved ([1f33de54](https://github.com/kbknapp/clap-rs/commit/1f33de545036e7fd2f80faba251fca009bd519b8), closes [#774](https://github.com/kbknapp/clap-rs/issues/774)) +* **YAML:** adds conditional requirements and conditional default values to YAML ([9a4df327](https://github.com/kbknapp/clap-rs/commit/9a4df327893486adb5558ffefba790c634ccdc6e), closes [#764](https://github.com/kbknapp/clap-rs/issues/764)) +* Support `--("some-arg-name")` syntax for defining long arg names when using `clap_app!` macro ([f41ec962](https://github.com/kbknapp/clap-rs/commit/f41ec962c243a5ffff8b1be1ae2ad63970d3d1d4)) +* Support `("some app name")` syntax for defining app names when using `clap_app!` macro ([9895b671](https://github.com/kbknapp/clap-rs/commit/9895b671cff784f35cf56abcd8270f7c2ba09699), closes [#759](https://github.com/kbknapp/clap-rs/issues/759)) +* **Help Wrapping:** long app names (with spaces), authors, and descriptions are now wrapped appropriately ([ad4691b7](https://github.com/kbknapp/clap-rs/commit/ad4691b71a63e951ace346318238d8834e04ad8a), closes [#777](https://github.com/kbknapp/clap-rs/issues/777)) + + +#### Documentation + +* **Conditional Default Values:** fixes the failing doc tests of Arg::default_value_ifs ([4ef09101](https://github.com/kbknapp/clap-rs/commit/4ef091019c083b4db1a0c13f1c1e95ac363259f2)) +* **Conditional Requirements:** adds docs for Arg::requires_ifs ([7f296e29](https://github.com/kbknapp/clap-rs/commit/7f296e29db7d9036e76e5dbcc9c8b20dfe7b25bd)) +* **README.md:** fix some typos ([f22c21b4](https://github.com/kbknapp/clap-rs/commit/f22c21b422d5b287d1a1ac183a379ee02eebf54f)) +* **src/app/mod.rs:** fix some typos ([5c9b0d47](https://github.com/kbknapp/clap-rs/commit/5c9b0d47ca78dea285c5b9dec79063d24c3e451a)) + + +### v2.19.3 (2016-12-28) + + +#### Bug Fixes + +* fixes a bug where calling the help of a subcommand wasn't ignoring required args of parent commands ([a0ee4993](https://github.com/kbknapp/clap-rs/commit/a0ee4993015ea97b06b5bc9f378d8bcb18f1c51c), closes [#789](https://github.com/kbknapp/clap-rs/issues/789)) + + + + +### v2.19.2 (2016-12-08) + +#### Bug Fixes + +* **ZSH Completions:** escapes square brackets in ZSH completions ([7e17d5a3](https://github.com/kbknapp/clap-rs/commit/7e17d5a36b2cc2cc77e7b15796b14d639ed3cbf7), closes [#771](https://github.com/kbknapp/clap-rs/issues/771)) + +#### Documentation + +* **Examples:** adds subcommand examples ([0e0f3354](https://github.com/kbknapp/clap-rs/commit/0e0f33547a6901425afc1d9fbe19f7ae3832d9a4), closes [#766](https://github.com/kbknapp/clap-rs/issues/766)) +* **README.md:** adds guidance on when to use ~ in version pinning, and clarifies breaking change policy ([591eaefc](https://github.com/kbknapp/clap-rs/commit/591eaefc7319142ba921130e502bb0729feed907), closes [#765](https://github.com/kbknapp/clap-rs/issues/765)) + + + + +### v2.19.1 (2016-12-01) + + +#### Bug Fixes + +* **Help Messages:** fixes help message alignment when specific settings are used on options ([cd94b318](https://github.com/kbknapp/clap-rs/commit/cd94b3188d63b63295a319e90e826bca46befcd2), closes [#760](https://github.com/kbknapp/clap-rs/issues/760)) + +#### Improvements + +* **Bash Completion:** allows bash completion to fall back to traidtional bash completion upon no matching completing function ([b1b16d56](https://github.com/kbknapp/clap-rs/commit/b1b16d56d8fddf819bdbe24b3724bb6a9f3fa613))) + + + +## v2.19.0 (2016-11-21) + +#### Features + +* allows specifying AllowLeadingHyphen style values, but only for specific args vice command wide ([c0d70feb](https://github.com/kbknapp/clap-rs/commit/c0d70febad9996a77a54107054daf1914c50d4ef), closes [#742](https://github.com/kbknapp/clap-rs/issues/742)) + +#### Bug Fixes + +* **Required Unless:** fixes a bug where having required_unless set doesn't work when conflicts are also set ([d20331b6](https://github.com/kbknapp/clap-rs/commit/d20331b6f7940ac3a4e919999f8bb4780875125d), closes [#753](https://github.com/kbknapp/clap-rs/issues/753)) +* **ZSH Completions:** fixes an issue where zsh completions caused panics if there were no subcommands ([49e7cdab](https://github.com/kbknapp/clap-rs/commit/49e7cdab76dd1ccc07221e360f07808ec62648aa), closes [#754](https://github.com/kbknapp/clap-rs/issues/754)) + +#### Improvements + +* **Validators:** improves the error messages for validators ([65eb3385](https://github.com/kbknapp/clap-rs/commit/65eb33859d3ff53e7d3277f02a9d3fd9038a9dfb), closes [#744](https://github.com/kbknapp/clap-rs/issues/744)) + +#### Documentation + +* updates the docs landing page ([01e1e33f](https://github.com/kbknapp/clap-rs/commit/01e1e33f377934099a4a725fab5cd6c5ff50eaa2)) +* adds the macro version back to the readme ([45eb9bf1](https://github.com/kbknapp/clap-rs/commit/45eb9bf130329c3f3853aba0342c2fe3c64ff80f)) +* fix broken docs links ([808e7cee](https://github.com/kbknapp/clap-rs/commit/808e7ceeb86d4a319bdc270f51c23a64621dbfb3)) +* **Compatibility Policy:** adds an official compatibility policy to ([760d66dc](https://github.com/kbknapp/clap-rs/commit/760d66dc17310b357f257776624151da933cd25d), closes [#740](https://github.com/kbknapp/clap-rs/issues/740)) +* **Contributing:** updates the readme to improve the readability and contributing sections ([eb51316c](https://github.com/kbknapp/clap-rs/commit/eb51316cdfdc7258d287ba13b67ef2f42bd2b8f6)) + + +## v2.18.0 (2016-11-05) + + +#### Features + +* **Completions:** adds completion support for PowerShell. ([cff82c88](https://github.com/kbknapp/clap-rs/commit/cff82c880e21064fca63351507b80350df6caadf), closes [#729](https://github.com/kbknapp/clap-rs/issues/729)) + + + + +### v2.17.1 (2016-11-02) + + +#### Bug Fixes + +* **Low Index Multiples:** fixes a bug where using low index multiples was propagated to subcommands ([33924e88](https://github.com/kbknapp/clap-rs/commit/33924e884461983c4e6b5ea1330fecc769a4ade7), closes [#725](https://github.com/kbknapp/clap-rs/issues/725)) + + + + +## v2.17.0 (2016-11-01) + + +#### Features + +* **Positional Args:** allows specifying the second to last positional argument as multiple(true) ([1ced2a74](https://github.com/kbknapp/clap-rs/commit/1ced2a7433ea8937a1b260ea65d708f32ca7c95e), closes [#725](https://github.com/kbknapp/clap-rs/issues/725)) + + + + +### v2.16.4 (2016-10-31) + + +#### Improvements + +* **Error Output:** conflicting errors are now symetrical, meaning more consistent and less confusing ([3d37001d](https://github.com/kbknapp/clap-rs/commit/3d37001d1dc647d73cc597ff172f1072d4beb80d), closes [#718](https://github.com/kbknapp/clap-rs/issues/718)) + +#### Documentation + +* Fix typo in example `13a_enum_values_automatic` ([c22fbc07](https://github.com/kbknapp/clap-rs/commit/c22fbc07356e556ffb5d1a79ec04597d149b915e)) +* **README.md:** fixes failing yaml example (#715) ([21fba9e6](https://github.com/kbknapp/clap-rs/commit/21fba9e6cd8c163012999cd0ce271ec8780c5695)) + +#### Bug Fixes + +* **ZSH Completions:** fixes bug that caused panic on subcommands with aliases ([5c70e1a0](https://github.com/kbknapp/clap-rs/commit/5c70e1a01bc977e44c10015d18bb8e215c32dfc8), closes [#714](https://github.com/kbknapp/clap-rs/issues/714)) +* **debug:** fixes the debug feature (#716) ([6c11ccf4](https://github.com/kbknapp/clap-rs/commit/6c11ccf443d46258d51f7cda33fbcc81e7fe8e90)) + + + + +### v2.16.3 (2016-10-28) + + +#### Bug Fixes + +* Derive display order after propagation ([9cb6facf](https://github.com/kbknapp/clap-rs/commit/9cb6facf507aff7cddd124b8c29714d2b0e7bd13), closes [#706](https://github.com/kbknapp/clap-rs/issues/706)) +* **yaml-example:** inconsistent args ([847f7199](https://github.com/kbknapp/clap-rs/commit/847f7199219ead5065561d91d64780d99ae4b587)) + + + + +### v2.16.2 (2016-10-25) + + +#### Bug Fixes + +* **Fish Completions:** fixes a bug where single quotes are not escaped ([780b4a18](https://github.com/kbknapp/clap-rs/commit/780b4a18281b6f7f7071e1b9db2290fae653c406), closes [#704](https://github.com/kbknapp/clap-rs/issues/704)) + + + +### v2.16.1 (2016-10-24) + + +#### Bug Fixes + +* **Help Message:** fixes a regression bug where args with multiple(true) threw off alignment ([ebddac79](https://github.com/kbknapp/clap-rs/commit/ebddac791f3ceac193d5ad833b4b734b9643a7af), closes [#702](https://github.com/kbknapp/clap-rs/issues/702)) + + + + +## v2.16.0 (2016-10-23) + + +#### Features + +* **Completions:** adds ZSH completion support ([3e36b0ba](https://github.com/kbknapp/clap-rs/commit/3e36b0bac491d3f6194aee14604caf7be26b3d56), closes [#699](https://github.com/kbknapp/clap-rs/issues/699)) + + + + +## v2.15.0 (2016-10-21) + + +#### Features + +* **AppSettings:** adds new setting `AppSettings::AllowNegativeNumbers` ([ab064546](https://github.com/kbknapp/clap-rs/commit/ab06454677fb6aa9b9f804644fcca2168b1eaee3), closes [#696](https://github.com/kbknapp/clap-rs/issues/696)) + +#### Documentation + +* **app/settings.rs:** moves variants to roughly alphabetical order ([9ed4d4d7](https://github.com/kbknapp/clap-rs/commit/9ed4d4d7957a23357aef60081e45639ab9e3905f)) + + + +### v2.14.1 (2016-10-20) + + +#### Documentation + +* Improve documentation around features ([4ee85b95](https://github.com/kbknapp/clap-rs/commit/4ee85b95d2d16708a016a3ba4e6e2c93b89b7fad)) +* reword docs for ErrorKind and app::Settings ([3ccde7a4](https://github.com/kbknapp/clap-rs/commit/3ccde7a4b8f7a2ea8b916a5415c04a8ff4b5cb7a)) +* fix tests that fail when the "suggestions" feature is disabled ([996fc381](https://github.com/kbknapp/clap-rs/commit/996fc381763a48d125c7ea8a58fed057fd0b4ac6)) +* fix the OsString-using doc-tests ([af9e1a39](https://github.com/kbknapp/clap-rs/commit/af9e1a393ce6cdda46a03c8a4f48df222b015a24)) +* tag non-rust code blocks as such instead of ignoring them ([0ba9f4b1](https://github.com/kbknapp/clap-rs/commit/0ba9f4b123f281952581b6dec948f7e51dd22890)) +* **ErrorKind:** improve some errors about subcommands ([9f6217a4](https://github.com/kbknapp/clap-rs/commit/9f6217a424da823343d7b801b9c350dee3cd1906)) +* **yaml:** make sure the doc-tests don't fail before "missing file" ([8c0f5551](https://github.com/kbknapp/clap-rs/commit/8c0f55516f4910c78c9f8a2bdbd822729574f95b)) + +#### Improvements + +* Stabilize clap_app! ([cd516006](https://github.com/kbknapp/clap-rs/commit/cd516006e35c37b005f329338560a0a53d1f3e00)) +* **with_defaults:** Deprecate App::with_defaults() ([26085409](https://github.com/kbknapp/clap-rs/commit/2608540940c8bb66e517b65706bc7dea55510682), closes [#638](https://github.com/kbknapp/clap-rs/issues/638)) + +#### Bug Fixes + +* fixes a bug that made determining when to auto-wrap long help messages inconsistent ([468baadb](https://github.com/kbknapp/clap-rs/commit/468baadb8398fc1d37897b0c49374aef4cf97dca), closes [#688](https://github.com/kbknapp/clap-rs/issues/688)) +* **Completions:** fish completions for nested subcommands ([a61eaf8a](https://github.com/kbknapp/clap-rs/commit/a61eaf8aade76cfe90ccc0f7125751ebf60e3254)) +* **features:** Make lints not enable other nightly-requiring features ([835f75e3](https://github.com/kbknapp/clap-rs/commit/835f75e3ba20999117363ed9f916464d777f36ef)) + + + + +## v2.14.0 (2016-10-05) + + +#### Features + +* **arg_aliases:** Ability to alias arguments ([33b5f6ef](https://github.com/kbknapp/clap-rs/commit/33b5f6ef2c9612ecabb31f96b824793e46bfd3dd), closes [#669](https://github.com/kbknapp/clap-rs/issues/669)) +* **flag_aliases:** Ability to alias flags ([40d6dac9](https://github.com/kbknapp/clap-rs/commit/40d6dac973927dded6ab423481634ef47ee7bfd7)) + +#### Bug Fixes + +* **UsageParser:** Handle non-ascii names / options. ([1d6a7c6e](https://github.com/kbknapp/clap-rs/commit/1d6a7c6e7e6aadc527346aa822f19d8587f714f3), closes [#664](https://github.com/kbknapp/clap-rs/issues/664)) + +#### Documentation + +* typo ([bac417fa](https://github.com/kbknapp/clap-rs/commit/bac417fa1cea3d32308334c7cccfcf54546cd9d8)) + + + +## v2.13.0 (2016-09-18) + + +#### Documentation + +* updates README.md with new website information and updated video tutorials info ([0c19c580](https://github.com/kbknapp/clap-rs/commit/0c19c580cf50f1b82ff32f70b36708ae2bcac132)) +* updates the docs about removing implicit value_delimiter(true) ([c81bc722](https://github.com/kbknapp/clap-rs/commit/c81bc722ebb8a86d22be89b5aec98df9fe222a08)) +* **Default Values:** adds better examples on using default values ([57a8d9ab](https://github.com/kbknapp/clap-rs/commit/57a8d9abb2f973c235a8a14f8fc031673d7a7460), closes [#418](https://github.com/kbknapp/clap-rs/issues/418)) + +#### Bug Fixes + +* **Value Delimiters:** fixes the confusion around implicitly setting value delimiters. (default is now `false`) ([09d4d0a9](https://github.com/kbknapp/clap-rs/commit/09d4d0a9038d7ce2df55c2aec95e16f36189fcee), closes [#666](https://github.com/kbknapp/clap-rs/issues/666)) + + + + +### v2.12.1 (2016-09-13) + + +#### Bug Fixes + +* **Help Wrapping:** fixes a regression-bug where the old {n} newline char stopped working ([92ac353b](https://github.com/kbknapp/clap-rs/commit/92ac353b48b7caa2511ad2a046d94da93c236cf6), closes [#661](https://github.com/kbknapp/clap-rs/issues/661)) + + + + +## v2.12.0 (2016-09-13) + + +#### Features + +* **Help:** adds ability to hide the possible values on a per argument basis ([9151ef73](https://github.com/kbknapp/clap-rs/commit/9151ef739871f2e74910c342299c0de196b95dec), closes [#640](https://github.com/kbknapp/clap-rs/issues/640)) +* **help:** allow for limiting detected terminal width ([a43e28af](https://github.com/kbknapp/clap-rs/commit/a43e28af85c9a9deaedd5ef735f4f13008daab29), closes [#653](https://github.com/kbknapp/clap-rs/issues/653)) + +#### Documentation + +* **Help Wrapping:** removes the verbiage about using `'{n}'` to insert newlines in help text ([c5a2b352](https://github.com/kbknapp/clap-rs/commit/c5a2b352ca600f5b802290ad945731066cd53611)) +* **Value Delimiters:** updates the docs for the Arg::multiple method WRT value delimiters and default settings ([f9d17a06](https://github.com/kbknapp/clap-rs/commit/f9d17a060aa53f10d0a6e1a7eed5d989d1a59533)) +* **appsettings:** Document AppSetting::DisableVersion ([94501965](https://github.com/kbknapp/clap-rs/commit/945019654d2ca67eb2b1d6014fdf80b84d528d30), closes [#589](https://github.com/kbknapp/clap-rs/issues/589)) + +#### Bug Fixes + +* **AllowLeadingHyphen:** fixes a bug where valid args aren't recognized with this setting ([a9699e4d](https://github.com/kbknapp/clap-rs/commit/a9699e4d7cdc9a06e73b845933ff1fe6d76f016a), closes [#588](https://github.com/kbknapp/clap-rs/issues/588)) + +#### Improvements + +* **Help Wrapping:** + * clap now ignores hard newlines in help messages and properly re-aligns text, but still wraps if the term width is too small ([c7678523](https://github.com/kbknapp/clap-rs/commit/c76785239fd42adc8ca04f9202b6fec615aa9f14), closes [#617](https://github.com/kbknapp/clap-rs/issues/617)) + * makes some minor changes to when next line help is automatically used ([01cae799](https://github.com/kbknapp/clap-rs/commit/01cae7990a33167ac35103fb36c811b4fe6eb98f)) +* **Value Delimiters:** changes the default value delimiter rules ([f9e69254](https://github.com/kbknapp/clap-rs/commit/f9e692548e8c94de15f909432de301407d6bb834), closes [#655](https://github.com/kbknapp/clap-rs/issues/655)) +* **YAML:** supports setting Arg::require_delimiter from YAML ([b9b55a39](https://github.com/kbknapp/clap-rs/commit/b9b55a39dfebcdbdc05dca2692927e503db50816)) + +#### Performance + +* **help:** fix redundant contains() checks ([a8afed74](https://github.com/kbknapp/clap-rs/commit/a8afed7428bf0733f8e93bb11ad6c00d9e970fcc)) + + + + +### v2.11.3 (2016-09-07) + + +#### Documentation + +* **Help Wrapping:** removes the verbiage about using `'{n}'` to insert newlines in help text ([c5a2b352](https://github.com/kbknapp/clap-rs/commit/c5a2b352ca600f5b802290ad945731066cd53611)) + +#### Improvements + +* **Help Wrapping:** + * clap now ignores hard newlines in help messages and properly re-aligns text, but still wraps if the term width is too small ([c7678523](https://github.com/kbknapp/clap-rs/commit/c76785239fd42adc8ca04f9202b6fec615aa9f14), closes [#617](https://github.com/kbknapp/clap-rs/issues/617)) + * makes some minor changes to when next line help is automatically used ([01cae799](https://github.com/kbknapp/clap-rs/commit/01cae7990a33167ac35103fb36c811b4fe6eb98f)) +* **YAML:** supports setting Arg::require_delimiter from YAML ([b9b55a39](https://github.com/kbknapp/clap-rs/commit/b9b55a39dfebcdbdc05dca2692927e503db50816)) + + + + + +### v2.11.2 (2016-09-06) + +#### Improvements + +* **Help Wrapping:** makes some minor changes to when next line help is automatically used ([5658b117](https://github.com/kbknapp/clap-rs/commit/5658b117aec3e03adff9c8c52a4c4bc1fcb4e1ff)) + + + +### v2.11.1 (2016-09-05) + + +#### Bug Fixes + +* **Settings:** fixes an issue where settings weren't propogated down through grand-child subcommands ([b3efc107](https://github.com/kbknapp/clap-rs/commit/b3efc107515d78517b20798ff3890b8a2b04498e), closes [#638](https://github.com/kbknapp/clap-rs/issues/638)) + +#### Features + +* **Errors:** Errors with custom description ([58512f2f](https://github.com/kbknapp/clap-rs/commit/58512f2fcb430745f1ee6ee8f1c67f62dc216c73)) + +#### Improvements + +* **help:** use term_size instead of home-grown solution ([fc7327e9](https://github.com/kbknapp/clap-rs/commit/fc7327e9dcf4258ef2baebf0a8714d9c0622855b)) + + + + +### v2.11.0 (2016-08-28) + + +#### Bug Fixes + +* **Groups:** fixes some usage strings that contain both args in groups and ones that conflict with each other ([3d782def](https://github.com/kbknapp/clap-rs/commit/3d782def57725e2de26ca5a5bc5cc2e40ddebefb), closes [#616](https://github.com/kbknapp/clap-rs/issues/616)) + +#### Documentation + +* moves docs to docs.rs ([03209d5e](https://github.com/kbknapp/clap-rs/commit/03209d5e1300906f00bafec1869c2047a92e5071), closes [#634](https://github.com/kbknapp/clap-rs/issues/634)) + +#### Improvements + +* **Completions:** uses standard conventions for bash completion files, namely '{bin}.bash-completion' ([27f5bbfb](https://github.com/kbknapp/clap-rs/commit/27f5bbfbcc9474c2f57c2b92b1feb898ae46ee70), closes [#567](https://github.com/kbknapp/clap-rs/issues/567)) +* **Help:** automatically moves help text to the next line and wraps when term width is determined to be too small, or help text is too long ([150964c4](https://github.com/kbknapp/clap-rs/commit/150964c4e7124d54476c9d9b4b3f2406f0fd00e5), closes [#597](https://github.com/kbknapp/clap-rs/issues/597)) +* **YAML Errors:** vastly improves error messages when using YAML ([f43b7c65](https://github.com/kbknapp/clap-rs/commit/f43b7c65941c53adc0616b8646a21dc255862eb2), closes [#574](https://github.com/kbknapp/clap-rs/issues/574)) + +#### Features + +* adds App::with_defaults to automatically use crate_authors! and crate_version! macros ([5520bb01](https://github.com/kbknapp/clap-rs/commit/5520bb012c127dfd299fd55699443c744d8dcd5b), closes [#600](https://github.com/kbknapp/clap-rs/issues/600)) + + + + +### v2.10.4 (2016-08-25) + + +#### Bug Fixes + +* **Help Wrapping:** fixes a bug where help is wrapped incorrectly and causing a panic with some non-English characters ([d0b442c7](https://github.com/kbknapp/clap-rs/commit/d0b442c7beeecac9764406bc3bd171ced0b8825e), closes [#626](https://github.com/kbknapp/clap-rs/issues/626)) + + + + +### v2.10.3 (2016-08-25) + +#### Features + +* **Help:** adds new short hand way to use source formatting and ignore term width in help messages ([7dfdaf20](https://github.com/kbknapp/clap-rs/commit/7dfdaf200ebb5c431351a045b48f5e0f0d3f31db), closes [#625](https://github.com/kbknapp/clap-rs/issues/625)) + +#### Documentation + +* **Term Width:** adds details about set_term_width(0) ([00b8205d](https://github.com/kbknapp/clap-rs/commit/00b8205d22639d1b54b9c453c55c785aace52cb2)) + +#### Bug Fixes + +* **Unicode:** fixes two bugs where non-English characters were stripped or caused a panic with help wrapping ([763a5c92](https://github.com/kbknapp/clap-rs/commit/763a5c920e23efc74d190af0cb8b5dd714b2d67a), closes [#626](https://github.com/kbknapp/clap-rs/issues/626)) + + + + +### v2.10.2 (2016-08-22) + + +#### Bug Fixes + +* fixes a bug where the help is printed twice ([a643fb28](https://github.com/kbknapp/clap-rs/commit/a643fb283acd9905dc727c4579c5c9fa2ceaa7e7), closes [#623](https://github.com/kbknapp/clap-rs/issues/623)) + + + + +### v2.10.1 (2016-08-21) + + +#### Bug Fixes + +* **Help Subcommand:** fixes misleading usage string when using multi-level subcommmands ([e203515e](https://github.com/kbknapp/clap-rs/commit/e203515e3ac495b405dbba4f78fb6af148fd282e), closes [#618](https://github.com/kbknapp/clap-rs/issues/618)) + +#### Features + +* **YAML:** allows using lists or single values with arg declarations ([9ade2cd4](https://github.com/kbknapp/clap-rs/commit/9ade2cd4b268d6d7fe828319ce6a523c641b9c38), closes [#614](https://github.com/kbknapp/clap-rs/issues/614), [#613](https://github.com/kbknapp/clap-rs/issues/613)) + + + + +## v2.10.0 (2016-07-29) + + +#### Features + +* **Completions:** one can generate a basic fish completions script at compile time ([1979d2f2](https://github.com/kbknapp/clap-rs/commit/1979d2f2f3216e57d02a97e624a8a8f6cf867ed9)) + +#### Bug Fixes + +* **parser:** preserve external subcommand name ([875df243](https://github.com/kbknapp/clap-rs/commit/875df24316c266920a073c13bbefbf546bc1f635)) + +#### Breaking Changes + +* **parser:** preserve external subcommand name ([875df243](https://github.com/kbknapp/clap-rs/commit/875df24316c266920a073c13bbefbf546bc1f635)) + +#### Documentation + +* **YAML:** fixes example 17's incorrect reference to arg_groups instead of groups ([b6c99e13](https://github.com/kbknapp/clap-rs/commit/b6c99e1377f918e78c16c8faced70a71607da931), closes [#601](https://github.com/kbknapp/clap-rs/issues/601)) + + + + +### 2.9.3 (2016-07-24) + + +#### Bug Fixes + +* fixes bug where only first arg in list of required_unless_one is recognized ([1fc3b55b](https://github.com/kbknapp/clap-rs/commit/1fc3b55bd6c8653b02e7c4253749c6b77737d2ac), closes [#575](https://github.com/kbknapp/clap-rs/issues/575)) +* **Settings:** fixes typo subcommandsrequired->subcommandrequired ([fc72cdf5](https://github.com/kbknapp/clap-rs/commit/fc72cdf591d30f5d9375d0b5cc2a2ff3e812f9f6), closes [#593](https://github.com/kbknapp/clap-rs/issues/593)) + +#### Features + +* **Completions:** adds the ability to generate completions to io::Write object ([9f62cf73](https://github.com/kbknapp/clap-rs/commit/9f62cf7378ba5acb5ce8c5bac89b4aa60c30755f)) +* **Settings:** Add unset_setting and unset_settings fns to App (#598) ([0ceba231](https://github.com/kbknapp/clap-rs/commit/0ceba231c6767cd6d88fdb1feeeea41deadf77ff), closes [#590](https://github.com/kbknapp/clap-rs/issues/590)) + + + +### 2.9.2 (2016-07-03) + + +#### Documentation + +* **Completions:** fixes the formatting of the Cargo.toml excerpt in the completions example ([722f2607](https://github.com/kbknapp/clap-rs/commit/722f2607beaef56b6a0e433db5fd09492d9f028c)) + +#### Bug Fixes + +* **Completions:** fixes bug where --help and --version short weren't added to the completion list ([e9f2438e](https://github.com/kbknapp/clap-rs/commit/e9f2438e2ce99af0ae570a2eaf541fc7f55b771b), closes [#536](https://github.com/kbknapp/clap-rs/issues/536)) + + + + +### 2.9.1 (2016-07-02) + + +#### Improvements + +* **Completions:** allows multiple completions to be built by namespacing with bin name ([57484b2d](https://github.com/kbknapp/clap-rs/commit/57484b2daeaac01c1026e8c84efc8bf099e0eb31)) + + + +## v2.9.0 (2016-07-01) + + +#### Documentation + +* **Completions:** + * fixes some errors in the completion docs ([9b359bf0](https://github.com/kbknapp/clap-rs/commit/9b359bf06255d3dad8f489308044b60a9d1e6a87)) + * adds documentation for completion scripts ([c6c519e4](https://github.com/kbknapp/clap-rs/commit/c6c519e40efd6c4533a9ef5efe8e74fd150391b7)) + +#### Features + +* **Completions:** + * one can now generate a bash completions script at compile time! ([e75b6c7b](https://github.com/kbknapp/clap-rs/commit/e75b6c7b75f729afb9eb1d2a2faf61dca7674634), closes [#376](https://github.com/kbknapp/clap-rs/issues/376)) + * completions now include aliases to subcommands, including all subcommand options ([0ab9f840](https://github.com/kbknapp/clap-rs/commit/0ab9f84052a8cf65b5551657f46c0c270841e634), closes [#556](https://github.com/kbknapp/clap-rs/issues/556)) + * completions now continue completing even after first completion ([18fc2e5b](https://github.com/kbknapp/clap-rs/commit/18fc2e5b5af63bf54a94b72cec5e1223d49f4806)) + * allows matching on possible values in options ([89cc2026](https://github.com/kbknapp/clap-rs/commit/89cc2026ba9ac69cf44c5254360bbf99236d4f89), closes [#557](https://github.com/kbknapp/clap-rs/issues/557)) + +#### Bug Fixes + +* **AllowLeadingHyphen:** fixes an issue where isn't ignored like it should be with this setting ([96c24c9a](https://github.com/kbknapp/clap-rs/commit/96c24c9a8fa1f85e06138d3cdd133e51659e19d2), closes [#558](https://github.com/kbknapp/clap-rs/issues/558)) + + +## v2.8.0 (2016-06-30) + + +#### Features + +* **Arg:** adds new setting `Arg::require_delimiter` which requires val delimiter to parse multiple values ([920b5595](https://github.com/kbknapp/clap-rs/commit/920b5595ed72abfb501ce054ab536067d8df2a66)) + +#### Bug Fixes + +* Declare term::Winsize as repr(C) ([5d663d90](https://github.com/kbknapp/clap-rs/commit/5d663d905c9829ce6e7a164f1f0896cdd70236dd)) + +#### Documentation + +* **Arg:** adds docs for ([49af4e38](https://github.com/kbknapp/clap-rs/commit/49af4e38a5dae2ab0a7fc3b4147e2c053d532484)) + + + + +### v2.7.1 (2016-06-29) + + +#### Bug Fixes + +* **Options:** + * options with multiple values and using delimiters no longer parse additional values after a trailing space ([cdc500bd](https://github.com/kbknapp/clap-rs/commit/cdc500bdde6abe238c36ade406ddafc2bafff583)) + * using options with multiple values and with an = no longer parse args after the trailing space as values ([290f61d0](https://github.com/kbknapp/clap-rs/commit/290f61d07177413cf082ada55526d83405f6d011)) + + + + +## v2.7.0 (2016-06-28) + + +#### Documentation + +* fix typos ([43b3d40b](https://github.com/kbknapp/clap-rs/commit/43b3d40b8c38b1571da75af86b5088be96cccec2)) +* **ArgGroup:** vastly improves ArgGroup docs by adding better examples ([9e5f4f5d](https://github.com/kbknapp/clap-rs/commit/9e5f4f5d734d630bca5535c3a0aa4fd4f9db3e39), closes [#534](https://github.com/kbknapp/clap-rs/issues/534)) + +#### Features + +* **ArgGroup:** one can now specify groups which require AT LEAST one of the args ([33689acc](https://github.com/kbknapp/clap-rs/commit/33689acc689b217a8c0ee439f1b1225590c38355), closes [#533](https://github.com/kbknapp/clap-rs/issues/533)) + +#### Bug Fixes + +* **App:** using `App::print_help` now prints the same as would have been printed by `--help` or the like ([e84cc018](https://github.com/kbknapp/clap-rs/commit/e84cc01836bbe0527e97de6db9889bd9e0fd6ba1), closes [#536](https://github.com/kbknapp/clap-rs/issues/536)) +* **Help:** + * prevents invoking help help and displaying incorrect help message ([e3d2893f](https://github.com/kbknapp/clap-rs/commit/e3d2893f377942a2d4cf3c6ff04524d0346e6fdb), closes [#538](https://github.com/kbknapp/clap-rs/issues/538)) + * subcommand help messages requested via help now correctly match --help ([08ad1cff](https://github.com/kbknapp/clap-rs/commit/08ad1cff4fec57224ea957a2891a057b323c01bc), closes [#539](https://github.com/kbknapp/clap-rs/issues/539)) + +#### Improvements + +* **ArgGroup:** Add multiple ArgGroups per Arg ([902e182f](https://github.com/kbknapp/clap-rs/commit/902e182f7a58aff11ff01e0a452abcdbdb2262aa), closes [#426](https://github.com/kbknapp/clap-rs/issues/426)) +* **Usage Strings:** `[FLAGS]` and `[ARGS]` are no longer blindly added to usage strings ([9b2e45b1](https://github.com/kbknapp/clap-rs/commit/9b2e45b170aff567b038d8b3368880b6046c10c6), closes [#537](https://github.com/kbknapp/clap-rs/issues/537)) +* **arg_enum!:** allows using meta items like repr(C) with arg_enum!s ([edf9b233](https://github.com/kbknapp/clap-rs/commit/edf9b2331c17a2cbcc13f961add4c55c2778e773), closes [#543](https://github.com/kbknapp/clap-rs/issues/543)) + + + + +## v2.6.0 (2016-06-14) + + +#### Improvements + +* removes extra newline from help output ([86e61d19](https://github.com/kbknapp/clap-rs/commit/86e61d19a748fb9870fcf1175308984e51ca1115)) +* allows printing version to any io::Write object ([921f5f79](https://github.com/kbknapp/clap-rs/commit/921f5f7916597f1d028cd4a65bfe76a01c801724)) +* removes extra newline when printing version ([7e2e2cbb](https://github.com/kbknapp/clap-rs/commit/7e2e2cbb4a8a0f050bb8072a376f742fc54b8589)) +* **Aliases:** improves readability of asliases in help messages ([ca511de7](https://github.com/kbknapp/clap-rs/commit/ca511de71f5b8c2ac419f1b188658e8c63b67846), closes [#526](https://github.com/kbknapp/clap-rs/issues/526), [#529](https://github.com/kbknapp/clap-rs/issues/529)) +* **Usage Strings:** improves the default usage string when only a single positional arg is present ([ec86f2da](https://github.com/kbknapp/clap-rs/commit/ec86f2dada1545a63fc72355e22fcdc4c466c215), closes [#518](https://github.com/kbknapp/clap-rs/issues/518)) + +#### Features + +* **Help:** allows wrapping at specified term width (Even on Windows!) ([1761dc0d](https://github.com/kbknapp/clap-rs/commit/1761dc0d27d0d621229d792be40c36fbf65c3014), closes [#451](https://github.com/kbknapp/clap-rs/issues/451)) +* **Settings:** + * adds new setting to stop delimiting values with -- or TrailingVarArg ([fc3e0f5a](https://github.com/kbknapp/clap-rs/commit/fc3e0f5afda6d24cdb3c4676614beebe13e1e870), closes [#511](https://github.com/kbknapp/clap-rs/issues/511)) + * one can now set an AppSetting which is propogated down through child subcommands ([e2341835](https://github.com/kbknapp/clap-rs/commit/e23418351a3b98bf08dfd7744bc14377c70d59ee), closes [#519](https://github.com/kbknapp/clap-rs/issues/519)) +* **Subcommands:** adds support for visible aliases ([7b10e7f8](https://github.com/kbknapp/clap-rs/commit/7b10e7f8937a07fdb8d16a6d8df79ce78d080cd3), closes [#522](https://github.com/kbknapp/clap-rs/issues/522)) + +#### Bug Fixes + +* fixes bug where args are printed out of order with templates ([05abb534](https://github.com/kbknapp/clap-rs/commit/05abb534864764102031a0d402e64ac65867aa87)) +* fixes bug where one can't override version or help flags ([90d7d6a2](https://github.com/kbknapp/clap-rs/commit/90d7d6a2ea8240122dd9bf8d82d3c4f5ebb5c703), closes [#514](https://github.com/kbknapp/clap-rs/issues/514)) +* fixes issue where before_help wasn't printed ([b3faff60](https://github.com/kbknapp/clap-rs/commit/b3faff6030f76a23f26afcfa6a90169002ed7106)) +* **Help:** `App::before_help` and `App::after_help` now correctly wrap ([1f4da767](https://github.com/kbknapp/clap-rs/commit/1f4da7676e6e71aa8dda799f3eeefad105a47819), closes [#516](https://github.com/kbknapp/clap-rs/issues/516)) +* **Settings:** fixes bug where new color settings couldn't be converted from strs ([706a7c11](https://github.com/kbknapp/clap-rs/commit/706a7c11b0900be594de6d5a3121938eff197602)) +* **Subcommands:** subcommands with aliases now display help of the aliased subcommand ([5354d14b](https://github.com/kbknapp/clap-rs/commit/5354d14b51f189885ba110e01e6b76cca3752992), closes [#521](https://github.com/kbknapp/clap-rs/issues/521)) +* **Windows:** fixes a failing windows build ([01e7dfd6](https://github.com/kbknapp/clap-rs/commit/01e7dfd6c07228c0be6695b3c7bf9370d82860d4)) +* **YAML:** adds missing YAML methods for App and Arg ([e468faf3](https://github.com/kbknapp/clap-rs/commit/e468faf3f05950fd9f72d84b69aa2061e91c6c64), closes [#528](https://github.com/kbknapp/clap-rs/issues/528)) + + + + +### v2.5.2 (2016-05-31) + + +#### Improvements + +* removes extra newline from help output ([86e61d19](https://github.com/kbknapp/clap-rs/commit/86e61d19a748fb9870fcf1175308984e51ca1115)) +* allows printing version to any io::Write object ([921f5f79](https://github.com/kbknapp/clap-rs/commit/921f5f7916597f1d028cd4a65bfe76a01c801724)) +* removes extra newline when printing version ([7e2e2cbb](https://github.com/kbknapp/clap-rs/commit/7e2e2cbb4a8a0f050bb8072a376f742fc54b8589)) + +#### Bug Fixes + +* fixes bug where args are printed out of order with templates ([3935431d](https://github.com/kbknapp/clap-rs/commit/3935431d5633f577c0826ae2142794b301f4b8ca)) +* fixes bug where one can't override version or help flags ([90d7d6a2](https://github.com/kbknapp/clap-rs/commit/90d7d6a2ea8240122dd9bf8d82d3c4f5ebb5c703), closes [#514](https://github.com/kbknapp/clap-rs/issues/514)) +* fixes issue where before_help wasn't printed ([b3faff60](https://github.com/kbknapp/clap-rs/commit/b3faff6030f76a23f26afcfa6a90169002ed7106)) + +#### Documentation + +* inter-links all types and pages ([3312893d](https://github.com/kbknapp/clap-rs/commit/3312893ddaef3f44d68d8d26ed3d08010be50d97), closes [#505](https://github.com/kbknapp/clap-rs/issues/505)) +* makes all publicly available types viewable in docs ([52ca6505](https://github.com/kbknapp/clap-rs/commit/52ca6505b4fec7b5c2d53d160c072d395eb21da6)) + + +### v2.5.1 (2016-05-11) + + +#### Bug Fixes + +* **Subcommand Aliases**: fixes lifetime issue when setting multiple aliases at once ([ac42f6cf0](https://github.com/kbknapp/clap-rs/commit/ac42f6cf0de6c4920f703807d63061803930b18d)) + + +## v2.5.0 (2016-05-10) + + +#### Improvements + +* **SubCommand Aliases:** adds feature to yaml configs too ([69592195](https://github.com/kbknapp/clap-rs/commit/695921954dde46dfd483399dcdef482c9dd7f34a)) + +#### Features + +* **SubCommands:** adds support for subcommand aliases ([66b4dea6](https://github.com/kbknapp/clap-rs/commit/66b4dea65c44d8f77ff522238a9237aed1bcab6d), closes [#469](https://github.com/kbknapp/clap-rs/issues/469)) + + + +### v2.4.3 (2016-05-10) + + +#### Bug Fixes + +* **Usage Strings:** + * now properly dedups args that are also in groups ([3ca0947c](https://github.com/kbknapp/clap-rs/commit/3ca0947c166b4f8525752255e3a4fa6565eb9689), closes [#498](https://github.com/kbknapp/clap-rs/issues/498)) + * removes duplicate groups from usage strings ([f574fb8a](https://github.com/kbknapp/clap-rs/commit/f574fb8a7cde4d4a2fa4c4481d59be2d0f135427)) + +#### Improvements + +* **Groups:** formats positional args in groups in a better way ([fef11154](https://github.com/kbknapp/clap-rs/commit/fef11154fb7430d1cbf04a672aabb366e456a368)) +* **Help:** + * moves positionals to standard <> formatting ([03dfe5ce](https://github.com/kbknapp/clap-rs/commit/03dfe5ceff1d63f172788ff688567ddad9fe119b)) + * default help subcommand string has been shortened ([5b7fe8e4](https://github.com/kbknapp/clap-rs/commit/5b7fe8e4161e43ab19e2e5fcf55fbe46791134e9), closes [#494](https://github.com/kbknapp/clap-rs/issues/494)) + + +### v2.4.3 (2016-05-10) + +* Ghost Release + + +### v2.4.3 (2016-05-10) + +* Ghost Release + + +## v2.4.0 (2016-05-02) + + +#### Features + +* **Help:** adds support for displaying info before help message ([29fbfa3b](https://github.com/kbknapp/clap-rs/commit/29fbfa3b963f2f3ca7704bf5d3e1201531baa373)) +* **Required:** adds allowing args that are required unless certain args are present ([af1f7916](https://github.com/kbknapp/clap-rs/commit/af1f79168390ea7da4074d0d9777de458ea64971)) + +#### Documentation + +* hides formatting from docs ([cb708093](https://github.com/kbknapp/clap-rs/commit/cb708093a7cd057f08c98b7bd1ed54c2db86ae7e)) +* **required_unless:** adds docs and examples for required_unless ([ca727b52](https://github.com/kbknapp/clap-rs/commit/ca727b52423b9883acd88b2f227b2711bc144573)) + +#### Bug Fixes + +* **Required Args:** fixes issue where missing required args are sometimes duplicatd in error messages ([3beebd81](https://github.com/kbknapp/clap-rs/commit/3beebd81e7bc2faa4115ac109cf570e512c5477f), closes [#492](https://github.com/kbknapp/clap-rs/issues/492)) + + + +## v2.3.0 (2016-04-18) + + +#### Improvements + +* **macros.rs:** Added write_nspaces macro (a new version of write_spaces) ([9d757e86](https://github.com/kbknapp/clap-rs/commit/9d757e8678e334e5a740ac750c76a9ed4e785cba)) +* **parser.rs:** + * Provide a way to create a usage string without the USAGE: title ([a91d378b](https://github.com/kbknapp/clap-rs/commit/a91d378ba0c91b5796457f8c6e881b13226ab735)) + * Make Parser's create_usage public allowing to have function outside the parser to generate the help ([d51945f8](https://github.com/kbknapp/clap-rs/commit/d51945f8b82ebb0963f4f40b384a9e8335783091)) + * Expose Parser's flags, opts and positionals argument as iterators ([9b23e7ee](https://github.com/kbknapp/clap-rs/commit/9b23e7ee40e51f7a823644c4496be955dc6c9d3a)) +* **src/args:** Exposes argument display order by introducing a new Trait ([1321630e](https://github.com/kbknapp/clap-rs/commit/1321630ef56955f152c73376d4d85cceb0bb4a12)) +* **srs/args:** Added longest_filter to AnyArg trait ([65b3f667](https://github.com/kbknapp/clap-rs/commit/65b3f667532685f854c699ddd264d326599cf7e5)) + +#### Features + +* **Authors Macro:** adds a crate_authors macro ([38fb59ab](https://github.com/kbknapp/clap-rs/commit/38fb59abf480eb2b6feca269097412f8b00b5b54), closes [#447](https://github.com/kbknapp/clap-rs/issues/447)) +* **HELP:** + * implements optional colored help messages ([abc8f669](https://github.com/kbknapp/clap-rs/commit/abc8f669c3c8193ffc3a3b0ac6c3ac2198794d4f), closes [#483](https://github.com/kbknapp/clap-rs/issues/483)) + * Add a Templated Help system. ([81e121ed](https://github.com/kbknapp/clap-rs/commit/81e121edd616f7285593f11120c63bcccae0d23e)) + +#### Bug Fixes + +* **HELP:** Adjust Help to semantic changes introduced in 6933b84 ([8d23806b](https://github.com/kbknapp/clap-rs/commit/8d23806bd67530ad412c34a1dcdcb1435555573d)) + + +### v2.2.6 (2016-04-11) + +#### Bug Fixes + +* **Arg Groups**: fixes bug where arg name isn't printed properly ([3019a685](https://github.com/kbknapp/clap-rs/commit/3019a685eee747ccbe6be09ad5dddce0b1d1d4db), closes [#476](https://github.com/kbknapp/clap-rs/issues/476)) + + + +### v2.2.5 (2016-04-03) + + +#### Bug Fixes + +* **Empty Values:** fixes bug where empty values weren't stored ([885d166f](https://github.com/kbknapp/clap-rs/commit/885d166f04eb3fb581898ae5818c6c8032e5a686), closes [#470](https://github.com/kbknapp/clap-rs/issues/470)) +* **Help Message:** fixes bug where arg name is printed twice ([71acf1d5](https://github.com/kbknapp/clap-rs/commit/71acf1d576946658b8bbdb5ae79e6716c43a030f), closes [#472](https://github.com/kbknapp/clap-rs/issues/472)) + + + +### v2.2.4 (2016-03-30) + + +#### Bug Fixes + +* fixes compiling with debug cargo feature ([d4b55450](https://github.com/kbknapp/clap-rs/commit/d4b554509928031ac0808076178075bb21f8c1da)) +* **Empty Values:** fixes bug where empty values weren't stored ([885d166f](https://github.com/kbknapp/clap-rs/commit/885d166f04eb3fb581898ae5818c6c8032e5a686), closes [#470](https://github.com/kbknapp/clap-rs/issues/470)) + + + + +### v2.2.3 (2016-03-28) + + +#### Bug Fixes + +* **Help Subcommand:** fixes issue where help and version flags weren't properly displayed ([205b07bf](https://github.com/kbknapp/clap-rs/commit/205b07bf2e6547851f1290f8cd6b169145e144f1), closes [#466](https://github.com/kbknapp/clap-rs/issues/466)) + + +### v2.2.2 (2016-03-27) + + +#### Bug Fixes + +* **Help Message:** fixes bug with wrapping in the middle of a unicode sequence ([05365ddc](https://github.com/kbknapp/clap-rs/commit/05365ddcc252e4b49e7a75e199d6001a430bd84d), closes [#456](https://github.com/kbknapp/clap-rs/issues/456)) +* **Usage Strings:** fixes small bug where -- would appear needlessly in usage strings ([6933b849](https://github.com/kbknapp/clap-rs/commit/6933b8491c2a7e28cdb61b47dcf10caf33c2f78a), closes [#461](https://github.com/kbknapp/clap-rs/issues/461)) + + + +### 2.2.1 (2016-03-16) + + +#### Features + +* **Help Message:** wraps and aligns the help message of subcommands ([813d75d0](https://github.com/kbknapp/clap-rs/commit/813d75d06fbf077c65762608c0fa5e941cfc393c), closes [#452](https://github.com/kbknapp/clap-rs/issues/452)) + +#### Bug Fixes + +* **Help Message:** fixes a bug where small terminal sizes causing a loop ([1d73b035](https://github.com/kbknapp/clap-rs/commit/1d73b0355236923aeaf6799abc759762ded7e1d0), closes [#453](https://github.com/kbknapp/clap-rs/issues/453)) + + + +## v2.2.0 (2016-03-15) + + +#### Features + +* **Help Message:** can auto wrap and aligning help text to term width ([e36af026](https://github.com/kbknapp/clap-rs/commit/e36af0266635f23e85e951b9088d561e9a5d1bf6), closes [#428](https://github.com/kbknapp/clap-rs/issues/428)) +* **Help Subcommand:** adds support passing additional subcommands to help subcommand ([2c12757b](https://github.com/kbknapp/clap-rs/commit/2c12757bbdf34ce481f3446c074e24c09c2e60fd), closes [#416](https://github.com/kbknapp/clap-rs/issues/416)) +* **Opts and Flags:** adds support for custom ordering in help messages ([9803b51e](https://github.com/kbknapp/clap-rs/commit/9803b51e799904c0befaac457418ee766ccc1ab9)) +* **Settings:** adds support for automatically deriving custom display order of args ([ad86e433](https://github.com/kbknapp/clap-rs/commit/ad86e43334c4f70e86909689a088fb87e26ff95a), closes [#444](https://github.com/kbknapp/clap-rs/issues/444)) +* **Subcommands:** adds support for custom ordering in help messages ([7d2a2ed4](https://github.com/kbknapp/clap-rs/commit/7d2a2ed413f5517d45988eef0765cdcd663b6372), closes [#442](https://github.com/kbknapp/clap-rs/issues/442)) + +#### Bug Fixes + +* **From Usage:** fixes a bug where adding empty lines werent ignored ([c5c58c86](https://github.com/kbknapp/clap-rs/commit/c5c58c86b9c503d8de19da356a5a5cffb59fbe84)) + +#### Documentation + +* **Groups:** explains required ArgGroups better ([4ff0205b](https://github.com/kbknapp/clap-rs/commit/4ff0205b85a45151b59bbaf090a89df13438380f), closes [#439](https://github.com/kbknapp/clap-rs/issues/439)) + + +### v2.1.2 (2016-02-24) + +#### Bug Fixes + +* **Nightly:** fixes failing nightly build ([d752c170](https://github.com/kbknapp/clap-rs/commit/d752c17029598b19037710f204b7943f0830ae75), closes [#434](https://github.com/kbknapp/clap-rs/issues/434)) + + + +### v2.1.1 (2016-02-19) + + +#### Documentation + +* **AppSettings:** clarifies that AppSettings do not propagate ([3c8db0e9](https://github.com/kbknapp/clap-rs/commit/3c8db0e9be1d24edaad364359513cbb02abb4186), closes [#429](https://github.com/kbknapp/clap-rs/issues/429)) +* **Arg Examples:** adds better examples ([1e79cccc](https://github.com/kbknapp/clap-rs/commit/1e79cccc12937bc0e7cd2aad8e404410798e9fff)) + +#### Improvements + +* **Help:** adds setting for next line help by arg ([066df748](https://github.com/kbknapp/clap-rs/commit/066df7486e684cf50a8479a356a12ba972c34ce1), closes [#427](https://github.com/kbknapp/clap-rs/issues/427)) + + + +## v2.1.0 (2016-02-10) + + +#### Features + +* **Defult Values:** adds support for default values in args ([73211952](https://github.com/kbknapp/clap-rs/commit/73211952964a79d97b434dd567e6d7d34be7feb5), closes [#418](https://github.com/kbknapp/clap-rs/issues/418)) + +#### Documentation + +* **Default Values:** adds better examples and notes for default values ([9facd74f](https://github.com/kbknapp/clap-rs/commit/9facd74f843ef3807c5d35259558a344e6c25905)) + + + +### v2.0.6 (2016-02-09) + + +#### Improvements + +* **Positional Arguments:** now displays value name if appropriate ([f0a99916](https://github.com/kbknapp/clap-rs/commit/f0a99916c59ce675515c6dcdfe9a40b130510908), closes [#420](https://github.com/kbknapp/clap-rs/issues/420)) + + + +### v2.0.5 (2016-02-05) + + +#### Bug Fixes + +* **Multiple Values:** fixes bug where number_of_values wasnt respected ([72c387da](https://github.com/kbknapp/clap-rs/commit/72c387da0bb8a6f526f863770f08bb8ca0d3de03)) + + + +### v2.0.4 (2016-02-04) + + +#### Bug Fixes + +* adds support for building ArgGroups from standalone YAML ([fcbc7e12](https://github.com/kbknapp/clap-rs/commit/fcbc7e12f5d7b023b8f30cba8cad28a01cf6cd26)) +* Stop lonely hyphens from causing panic ([85b11468](https://github.com/kbknapp/clap-rs/commit/85b11468b0189d5cc15f1cfac5db40d17a0077dc), closes [#410](https://github.com/kbknapp/clap-rs/issues/410)) +* **AppSettings:** fixes bug where subcmds didn't receive parent ver ([a62e4527](https://github.com/kbknapp/clap-rs/commit/a62e452754b3b0e3ac9a15aa8b5330636229ead1)) + + +### v2.0.3 (2016-02-02) + + +#### Improvements + +* **values:** adds support for up to u64::max values per arg ([c7abf7d7](https://github.com/kbknapp/clap-rs/commit/c7abf7d7611e317b0d31d97632e3d2e13570947c)) +* **occurrences:** Allow for more than 256 occurrences of an argument. ([3731ddb3](https://github.com/kbknapp/clap-rs/commit/3731ddb361163f3d6b86844362871e48c80fa530)) + +#### Features + +* **AppSettings:** adds HidePossibleValuesInHelp to skip writing those values ([cdee7a0e](https://github.com/kbknapp/clap-rs/commit/cdee7a0eb2beeec723cb98acfacf03bf629c1da3)) + +#### Bug Fixes + +* **value_t_or_exit:** fixes typo which causes value_t_or_exit to return a Result ([ee96baff](https://github.com/kbknapp/clap-rs/commit/ee96baffd306cb8d20ddc5575cf739bb1a6354e8)) + + + +### v2.0.2 (2016-01-31) + + +#### Improvements + +* **arg_enum:** enum declared with arg_enum returns [&'static str; #] instead of Vec ([9c4b8a1a](https://github.com/kbknapp/clap-rs/commit/9c4b8a1a6b12949222f17d1074578ad7676b9c0d)) + +#### Bug Fixes + +* clap_app! should be gated by unstable, not nightly feature ([0c8b84af](https://github.com/kbknapp/clap-rs/commit/0c8b84af6161d5baf683688eafc00874846f83fa)) +* **SubCommands:** fixed where subcmds weren't recognized after mult args ([c19c17a8](https://github.com/kbknapp/clap-rs/commit/c19c17a8850602990e24347aeb4427cf43316223), closes [#405](https://github.com/kbknapp/clap-rs/issues/405)) +* **Usage Parser:** fixes a bug where literal single quotes weren't allowed in help strings ([0bcc7120](https://github.com/kbknapp/clap-rs/commit/0bcc71206478074769e311479b34a9f74fe80f5c), closes [#406](https://github.com/kbknapp/clap-rs/issues/406)) + + + +### v2.0.1 (2016-01-30) + + +#### Bug Fixes + +* fixes cargo features to NOT require nightly with unstable features ([dcbcc60c](https://github.com/kbknapp/clap-rs/commit/dcbcc60c9ba17894be636472ea4b07a82d86a9db), closes [#402](https://github.com/kbknapp/clap-rs/issues/402)) + + + +## v2.0.0 (2016-01-28) + + +#### Improvements + +* **From Usage:** vastly improves the usage parser ([fa3a2f86](https://github.com/kbknapp/clap-rs/commit/fa3a2f86bd674c5eb07128c95098fab7d1437247), closes [#350](https://github.com/kbknapp/clap-rs/issues/350)) + +#### Features + +* adds support for external subcommands ([177fe5cc](https://github.com/kbknapp/clap-rs/commit/177fe5cce745c2164a8e38c23be4c4460d2d7211), closes [#372](https://github.com/kbknapp/clap-rs/issues/372)) +* adds support values with a leading hyphen ([e4d429b9](https://github.com/kbknapp/clap-rs/commit/e4d429b9d52e95197bd0b572d59efacecf305a59), closes [#385](https://github.com/kbknapp/clap-rs/issues/385)) +* adds support for turning off the value delimiter ([508db850](https://github.com/kbknapp/clap-rs/commit/508db850a87c2e251cf6b6ddead9ad56b29f9e57), closes [#352](https://github.com/kbknapp/clap-rs/issues/352)) +* adds support changing the value delimiter ([dafeae8a](https://github.com/kbknapp/clap-rs/commit/dafeae8a526162640f6a68da434370c64d190889), closes [#353](https://github.com/kbknapp/clap-rs/issues/353)) +* adds support for comma separated values ([e69da6af](https://github.com/kbknapp/clap-rs/commit/e69da6afcd2fe48a3c458ca031db40997f860eda), closes [#348](https://github.com/kbknapp/clap-rs/issues/348)) +* adds support with options with optional values ([4555736c](https://github.com/kbknapp/clap-rs/commit/4555736cad01441dcde4ea84a285227e0844c16e), closes [#367](https://github.com/kbknapp/clap-rs/issues/367)) +* **UTF-8:** adds support for invalid utf8 in values ([c5c59dec](https://github.com/kbknapp/clap-rs/commit/c5c59dec0bc33b86b2e99d30741336f17ec84282), closes [#269](https://github.com/kbknapp/clap-rs/issues/269)) +* **v2:** implementing the base of 2.x ([a3536054](https://github.com/kbknapp/clap-rs/commit/a3536054512ba833533dc56615ce3663d884381c)) + +#### Bug Fixes + +* fixes nightly build with new lints ([17599195](https://github.com/kbknapp/clap-rs/commit/175991956c37dc83ba9c49396e927a1cb65c5b11)) +* fixes Windows build for 2x release ([674c9b48](https://github.com/kbknapp/clap-rs/commit/674c9b48c7c92079cb180cc650a9e39f34781c32), closes [#392](https://github.com/kbknapp/clap-rs/issues/392)) +* fixes yaml build for 2x base ([adceae64](https://github.com/kbknapp/clap-rs/commit/adceae64c8556d00ab715677377b216f9f468ad7)) + +#### Documentation + +* updates examples for 2x release ([1303b360](https://github.com/kbknapp/clap-rs/commit/1303b3607468f362ab1b452d5614c1a064dc69b4), closes [#394](https://github.com/kbknapp/clap-rs/issues/394)) +* updates examples for 2x release ([0a011f31](https://github.com/kbknapp/clap-rs/commit/0a011f3142aec338d388a6c8bfe22fa7036021bb), closes [#394](https://github.com/kbknapp/clap-rs/issues/394)) +* updates documentation for v2 release ([8d51724e](https://github.com/kbknapp/clap-rs/commit/8d51724ef73dfde5bb94fb9466bc5463a1cc1502)) +* updating docs for 2x release ([576d0e0e](https://github.com/kbknapp/clap-rs/commit/576d0e0e2c7b8f386589179bbf7419b93abacf1c)) +* **README.md:** + * updates readme for v2 release ([acaba01a](https://github.com/kbknapp/clap-rs/commit/acaba01a353c12144b9cd9a3ce447400691849b0), closes [#393](https://github.com/kbknapp/clap-rs/issues/393)) + * fix typo and make documentation conspicuous ([07b9f614](https://github.com/kbknapp/clap-rs/commit/07b9f61495d927f69f7abe6c0d85253f0f4e6107)) + +#### BREAKING CHANGES + +* **Fewer liftimes! Yay!** + * `App<'a, 'b, 'c, 'd, 'e, 'f>` => `App<'a, 'b>` + * `Arg<'a, 'b, 'c, 'd, 'e, 'f>` => `Arg<'a, 'b>` + * `ArgMatches<'a, 'b>` => `ArgMatches<'a>` +* **Simply Renamed** + * `App::arg_group` => `App::group` + * `App::arg_groups` => `App::groups` + * `ArgGroup::add` => `ArgGroup::arg` + * `ArgGroup::add_all` => `ArgGroup::args` + * `ClapError` => `Error` + * struct field `ClapError::error_type` => `Error::kind` + * `ClapResult` => `Result` + * `ClapErrorType` => `ErrorKind` +* **Removed Deprecated Functions and Methods** + * `App::subcommands_negate_reqs` + * `App::subcommand_required` + * `App::arg_required_else_help` + * `App::global_version(bool)` + * `App::versionless_subcommands` + * `App::unified_help_messages` + * `App::wait_on_error` + * `App::subcommand_required_else_help` + * `SubCommand::new` + * `App::error_on_no_subcommand` + * `Arg::new` + * `Arg::mutually_excludes` + * `Arg::mutually_excludes_all` + * `Arg::mutually_overrides_with` + * `simple_enum!` +* **Renamed Error Variants** + * `InvalidUnicode` => `InvalidUtf8` + * `InvalidArgument` => `UnknownArgument` +* **Usage Parser** + * Value names can now be specified inline, i.e. `-o, --option 'some option which takes two files'` + * **There is now a priority of order to determine the name** - This is perhaps the biggest breaking change. See the documentation for full details. Prior to this change, the value name took precedence. **Ensure your args are using the proper names (i.e. typically the long or short and NOT the value name) throughout the code** +* `ArgMatches::values_of` returns an `Values` now which implements `Iterator` (should not break any code) +* `crate_version!` returns `&'static str` instead of `String` +* Using the `clap_app!` macro requires compiling with the `unstable` feature because the syntax could change slightly in the future + + + +### v1.5.5 (2016-01-04) + + +#### Bug Fixes + +* fixes an issue where invalid short args didn't cause an error ([c9bf7e44](https://github.com/kbknapp/clap-rs/commit/c9bf7e4440bd2f9b524ea955311d433c40a7d1e0)) +* prints the name in version and help instead of binary name ([8f3817f6](https://github.com/kbknapp/clap-rs/commit/8f3817f665c0cab6726bc16c56a53b6a61e44448), closes [#368](https://github.com/kbknapp/clap-rs/issues/368)) +* fixes an intentional panic issue discovered via clippy ([ea83a3d4](https://github.com/kbknapp/clap-rs/commit/ea83a3d421ea8856d4cac763942834d108b71406)) + + + +### v1.5.4 (2015-12-18) + + +#### Examples + +* **17_yaml:** conditinonally compile 17_yaml example ([575de089](https://github.com/kbknapp/clap-rs/commit/575de089a3e240c398cb10e6cf5a5c6b68662c01)) + +#### Improvements + +* clippy improvements ([99cdebc2](https://github.com/kbknapp/clap-rs/commit/99cdebc23da3a45a165f14b27bebeb2ed828a2ce)) + +#### Bug Fixes + + +* **errors:** return correct error type in WrongNumValues error builder ([5ba8ba9d](https://github.com/kbknapp/clap-rs/commit/5ba8ba9dcccdfa74dd1c44260e64b359bbb36be6)) +* ArgRequiredElseHelp setting now takes precedence over missing required args ([faad83fb](https://github.com/kbknapp/clap-rs/commit/faad83fbef6752f3093b6e98fca09a9449b830f4), closes [#362](https://github.com/kbknapp/clap-rs/issues/362)) + + + +### v1.5.3 (2015-11-20) + + +#### Bug Fixes + +* **Errors:** fixes some instances when errors are missing a final newline ([c4d2b171](https://github.com/kbknapp/clap-rs/commit/c4d2b1711994479ad64ee52b6b49d2ceccbf2118)) + + + + + +### v1.5.2 (2015-11-14) + + +#### Bug Fixes + +* **Errors:** fixes a compiling bug when built on Windows or without the color feature ([a35f7634](https://github.com/kbknapp/clap-rs/commit/a35f76346fe6ecc88dda6a1eb13627186e7ce185)) + + + + +### v1.5.1 (2015-11-13) + + +#### Bug Fixes + +* **Required Args:** fixes a bug where required args are not correctly accounted for ([f03b88a9](https://github.com/kbknapp/clap-rs/commit/f03b88a9766b331a63879bcd747687f2e5a2661b), closes [#343](https://github.com/kbknapp/clap-rs/issues/343)) + + + + +## v1.5.0 (2015-11-13) + + +#### Bug Fixes + +* fixes a bug with required positional args in usage strings ([c6858f78](https://github.com/kbknapp/clap-rs/commit/c6858f78755f8e860204323c828c8355a066dc83)) + +#### Documentation + +* **FAQ:** updates readme with slight changes to FAQ ([a4ef0fab](https://github.com/kbknapp/clap-rs/commit/a4ef0fab73c8dc68f1b138965d1340459c113398)) + +#### Improvements + +* massive errors overhaul ([cdc29175](https://github.com/kbknapp/clap-rs/commit/cdc29175bc9c53e5b4aec86cbc04c1743154dae6)) +* **ArgMatcher:** huge refactor and deduplication of code ([8988853f](https://github.com/kbknapp/clap-rs/commit/8988853fb8825e8f841fde349834cc12cdbad081)) +* **Errors:** errors have been vastly improved ([e59bc0c1](https://github.com/kbknapp/clap-rs/commit/e59bc0c16046db156a88ba71a037db05028e995c)) +* **Traits:** refactoring some configuration into traits ([5800cdec](https://github.com/kbknapp/clap-rs/commit/5800cdec6dce3def4242b9f7bd136308afb19685)) + +#### Performance + +* **App:** + * more BTreeMap->Vec, Opts and SubCmds ([bc4495b3](https://github.com/kbknapp/clap-rs/commit/bc4495b32ec752b6c4b29719e831c043ef2a26ce)) + * changes flags BTreeMap->Vec ([d357640f](https://github.com/kbknapp/clap-rs/commit/d357640fab55e5964fe83efc3c771e53aa3222fd)) + * removed unneeded BTreeMap ([78971fd6](https://github.com/kbknapp/clap-rs/commit/78971fd68d7dc5c8e6811b4520cdc54e4188f733)) + * changes BTreeMap to VecMap in some instances ([64b921d0](https://github.com/kbknapp/clap-rs/commit/64b921d087fdd03775c95ba0bcf65d3f5d36f812)) + * removed excess clones ([ec0089d4](https://github.com/kbknapp/clap-rs/commit/ec0089d42ed715d293fb668d3a90b0db0aa3ec39)) + + + + +### v1.4.7 (2015-11-03) + + +#### Documentation + +* Clarify behavior of Arg::multiple with options. ([434f497a](https://github.com/kbknapp/clap-rs/commit/434f497ab6d831f8145cf09278c97ca6ee6c6fe7)) +* Fix typos and improve grammar. ([c1f66b5d](https://github.com/kbknapp/clap-rs/commit/c1f66b5de7b5269fbf8760a005ef8c645edd3229)) + +#### Bug Fixes + +* **Error Status:** fixes bug where --help and --version return non-zero exit code ([89b51fdf](https://github.com/kbknapp/clap-rs/commit/89b51fdf8b1ab67607567344e2317ff1a757cb12)) + + + + +### v1.4.6 (2015-10-29) + + +#### Features + +* allows parsing without a binary name for daemons and interactive CLIs ([aff89d57](https://github.com/kbknapp/clap-rs/commit/aff89d579b5b85c3dc81b64f16d5865299ec39a2), closes [#318](https://github.com/kbknapp/clap-rs/issues/318)) + +#### Bug Fixes + +* **Errors:** tones down quoting in some error messages ([34ce59ed](https://github.com/kbknapp/clap-rs/commit/34ce59ede53bfa2eef722c74881cdba7419fd9c7), closes [#309](https://github.com/kbknapp/clap-rs/issues/309)) +* **Help and Version:** only builds help and version once ([e3be87cf](https://github.com/kbknapp/clap-rs/commit/e3be87cfc095fc41c9811adcdc6d2b079f237d5e)) +* **Option Args:** fixes bug with args and multiple values ([c9a9548a](https://github.com/kbknapp/clap-rs/commit/c9a9548a8f96cef8a3dd9a980948325fbbc1b91b), closes [#323](https://github.com/kbknapp/clap-rs/issues/323)) +* **POSIX Overrides:** fixes bug where required args are overridden ([40ed2b50](https://github.com/kbknapp/clap-rs/commit/40ed2b50c3a9fe88bfdbaa43cef9fd6493ecaa8e)) +* **Safe Matches:** using 'safe' forms of the get_matches family no longer exit the process ([c47025dc](https://github.com/kbknapp/clap-rs/commit/c47025dca2b3305dea0a0acfdd741b09af0c0d05), closes [#256](https://github.com/kbknapp/clap-rs/issues/256)) +* **Versionless SubCommands:** fixes a bug where the -V flag was needlessly built ([27df8b9d](https://github.com/kbknapp/clap-rs/commit/27df8b9d98d13709dad3929a009f40ebff089a1a), closes [#329](https://github.com/kbknapp/clap-rs/issues/329)) + +#### Documentation + +* adds comparison in readme ([1a8bf31e](https://github.com/kbknapp/clap-rs/commit/1a8bf31e7a6b87ce48a66af2cde1645b2dd5bc95), closes [#325](https://github.com/kbknapp/clap-rs/issues/325)) + + + + +### v1.4.5 (2015-10-06) + + +#### Bug Fixes + +* fixes crash on invalid arg error ([c78ce128](https://github.com/kbknapp/clap-rs/commit/c78ce128ebbe7b8f730815f8176c29d76f4ade8c)) + + + + +### v1.4.4 (2015-10-06) + + +#### Documentation + +* clean up some formatting ([b7df92d7](https://github.com/kbknapp/clap-rs/commit/b7df92d7ea25835701dd22ddff984b9749f48a00)) +* move the crate-level docs to top of the lib.rs file ([d7233bf1](https://github.com/kbknapp/clap-rs/commit/d7233bf122dbf80ba8fc79e5641be2df8af10e7a)) +* changes doc comments to rustdoc comments ([34b601be](https://github.com/kbknapp/clap-rs/commit/34b601be5fdde76c1a0859385b359b96d66b8732)) +* fixes panic in 14_groups example ([945b00a0](https://github.com/kbknapp/clap-rs/commit/945b00a0c27714b63bdca48d003fe205fcfdc578), closes [#295](https://github.com/kbknapp/clap-rs/issues/295)) +* avoid suggesting star dependencies. ([d33228f4](https://github.com/kbknapp/clap-rs/commit/d33228f40b5fefb84cf3dd51546bfb340dcd9f5a)) +* **Rustdoc:** adds portions of the readme to main rustdoc page ([6f9ee181](https://github.com/kbknapp/clap-rs/commit/6f9ee181e69d90bd4206290e59d6f3f1e8f0cbb2), closes [#293](https://github.com/kbknapp/clap-rs/issues/293)) + +#### Bug Fixes + +* grammar error in some conflicting option errors ([e73b07e1](https://github.com/kbknapp/clap-rs/commit/e73b07e19474323ad2260da66abbf6a6d4ecbd4f)) +* **Unified Help:** sorts both flags and options as a unified category ([2a223dad](https://github.com/kbknapp/clap-rs/commit/2a223dad82901fa2e74baad3bfc4c7b94509300f)) +* **Usage:** fixes a bug where required args aren't filtered properly ([72b453dc](https://github.com/kbknapp/clap-rs/commit/72b453dc170af3050bb123d35364f6da77fc06d7), closes [#277](https://github.com/kbknapp/clap-rs/issues/277)) +* **Usage Strings:** fixes a bug ordering of elements in usage strings ([aaf0d6fe](https://github.com/kbknapp/clap-rs/commit/aaf0d6fe7aa2403e76096c16204d254a9ee61ee2), closes [#298](https://github.com/kbknapp/clap-rs/issues/298)) + +#### Features + +* supports -aValue style options ([0e3733e4](https://github.com/kbknapp/clap-rs/commit/0e3733e4fec2015c2d566a51432dcd92cb69cad3)) +* **Trailing VarArg:** adds opt-in setting for final arg being vararg ([27018b18](https://github.com/kbknapp/clap-rs/commit/27018b1821a4bcd5235cfe92abe71b3c99efc24d), closes [#278](https://github.com/kbknapp/clap-rs/issues/278)) + + + + +### v1.4.3 (2015-09-30) + + +#### Features + +* allows accessing arg values by group name ([c92a4b9e](https://github.com/kbknapp/clap-rs/commit/c92a4b9eff2d679957f61c0c41ff404b40d38a91)) + +#### Documentation + +* use links to examples instead of plain text ([bb4fe237](https://github.com/kbknapp/clap-rs/commit/bb4fe237858535627271465147add537e4556b43)) + +#### Bug Fixes + +* **Help Message:** required args no longer double list in usage ([1412e639](https://github.com/kbknapp/clap-rs/commit/1412e639e0a79df84936d1101a837f90077d1c83), closes [#277](https://github.com/kbknapp/clap-rs/issues/277)) +* **Possible Values:** possible value validation is restored ([f121ae74](https://github.com/kbknapp/clap-rs/commit/f121ae749f8f4bfe754ef2e8a6dfc286504b5b75), closes [#287](https://github.com/kbknapp/clap-rs/issues/287)) + + + + +### v1.4.2 (2015-09-23) + + +#### Bug Fixes + +* **Conflicts:** fixes bug with conflicts not removing required args ([e17fcec5](https://github.com/kbknapp/clap-rs/commit/e17fcec53b3216ad047a13dddc6f740473fad1a1), closes [#271](https://github.com/kbknapp/clap-rs/issues/271)) + + + + +### v1.4.1 (2015-09-22) + + +#### Examples + +* add clap_app quick example ([4ba6249c](https://github.com/kbknapp/clap-rs/commit/4ba6249c3cf4d2e083370d1fe4dcc7025282c28a)) + +#### Features + +* **Unicode:** allows non-panicing on invalid unicode characters ([c5bf7ddc](https://github.com/kbknapp/clap-rs/commit/c5bf7ddc8cfb876ec928a5aaf5591232bbb32e5d)) + +#### Documentation + +* properly names Examples section for rustdoc ([87ba5445](https://github.com/kbknapp/clap-rs/commit/87ba54451d7ec7b1c9b9ef134f90bbe39e6fac69)) +* fixes various typos and spelling ([f85640f9](https://github.com/kbknapp/clap-rs/commit/f85640f9f6d8fd3821a40e9b8b7a34fabb789d02)) +* **Arg:** unhides fields of the Arg struct ([931aea88](https://github.com/kbknapp/clap-rs/commit/931aea88427edf43a3da90d5a500c1ff2b2c3614)) + +#### Bug Fixes + +* flush the buffer in App::print_version() ([cbc42a37](https://github.com/kbknapp/clap-rs/commit/cbc42a37d212d84d22b1777d08e584ff191934e7)) +* Macro benchmarks ([13712da1](https://github.com/kbknapp/clap-rs/commit/13712da1d36dc7614eec3a10ad488257ba615751)) + + + + +## v1.4.0 (2015-09-09) + + +#### Features + +* allows printing help message by library consumers ([56b95f32](https://github.com/kbknapp/clap-rs/commit/56b95f320875c62dda82cb91b29059671e120ed1)) +* allows defining hidden args and subcmds ([2cab4d03](https://github.com/kbknapp/clap-rs/commit/2cab4d0334ea3c2439a1d4bfca5bf9905c7ea9ac), closes [#231](https://github.com/kbknapp/clap-rs/issues/231)) +* Builder macro to assist with App/Arg/Group/SubCommand building ([443841b0](https://github.com/kbknapp/clap-rs/commit/443841b012a8d795cd5c2bd69ae6e23ef9b16477)) +* **Errors:** allows consumers to write to stderr and exit on error ([1e6403b6](https://github.com/kbknapp/clap-rs/commit/1e6403b6a863574fa3cb6946b1fb58f034e8664c)) + + + + +### v1.3.2 (2015-09-08) + + +#### Documentation + +* fixed ErrorKind docs ([dd057843](https://github.com/kbknapp/clap-rs/commit/dd05784327fa070eb6ce5ce89a8507e011d8db94)) +* **ErrorKind:** changed examples content ([b9ca2616](https://github.com/kbknapp/clap-rs/commit/b9ca261634b89613bbf3d98fd74d55cefbb31a8c)) + +#### Bug Fixes + +* fixes a bug where the help subcommand wasn't overridable ([94003db4](https://github.com/kbknapp/clap-rs/commit/94003db4b5eebe552ca337521c1c001295822745)) + +#### Features + +* adds abiltiy not consume self when parsing matches and/or exit on help ([94003db4](https://github.com/kbknapp/clap-rs/commit/94003db4b5eebe552ca337521c1c001295822745)) +* **App:** Added ability for users to handle errors themselves ([934e6fbb](https://github.com/kbknapp/clap-rs/commit/934e6fbb643b2385efc23444fe6fce31494dc288)) + + + + +### v1.3.1 (2015-09-04) + + +#### Examples + +* **17_yaml:** fixed example ([9b848622](https://github.com/kbknapp/clap-rs/commit/9b848622296c8c5c7b9a39b93ddd41f51df790b5)) + +#### Performance + +* changes ArgGroup HashSets to Vec ([3cb4a48e](https://github.com/kbknapp/clap-rs/commit/3cb4a48ebd15c20692f4f3a2a924284dc7fd5e10)) +* changes BTreeSet for Vec in some instances ([baab2e3f](https://github.com/kbknapp/clap-rs/commit/baab2e3f4060e811abee14b1654cbcd5cf3b5fea)) + + + + +## v1.3.0 (2015-09-01) + + +#### Features + +* **YAML:** allows building a CLI from YAML files ([86cf4c45](https://github.com/kbknapp/clap-rs/commit/86cf4c45626a36b8115446952f9069f73c1debc3)) +* **ArgGroups:** adds support for building ArgGroups from yaml ([ecf88665](https://github.com/kbknapp/clap-rs/commit/ecf88665cbff367018b29161a1b75d44a212707d)) +* **Subcommands:** adds support for subcommands from yaml ([e415cf78](https://github.com/kbknapp/clap-rs/commit/e415cf78ba916052d118a8648deba2b9c16b1530)) + +#### Documentation + +* **YAML:** adds examples for using YAML to build a CLI ([ab41d7f3](https://github.com/kbknapp/clap-rs/commit/ab41d7f38219544750e6e1426076dc498073191b)) +* **Args from YAML:** fixes doc examples ([19b348a1](https://github.com/kbknapp/clap-rs/commit/19b348a10050404cd93888dbbbe4f396681b67d0)) +* **Examples:** adds better usage examples instead of having unused variables ([8cbacd88](https://github.com/kbknapp/clap-rs/commit/8cbacd8883004fe71a8ea036ec4391c7dd8efe94)) + +#### Examples + +* Add AppSettings example ([12705079](https://github.com/kbknapp/clap-rs/commit/12705079ca96a709b4dd94f7ddd20a833b26838c)) + +#### Bug Fixes + +* **Unified Help Messages:** fixes a crash from this setting and no opts ([169ffec1](https://github.com/kbknapp/clap-rs/commit/169ffec1003d58d105d7ef2585b3425e57980000), closes [#210](https://github.com/kbknapp/clap-rs/issues/210)) + + + + +### v1.2.5 (2015-08-27) + + +#### Examples + +* add custom validator example ([b9997d1f](https://github.com/kbknapp/clap-rs/commit/b9997d1fca74d4d8f93971f2a01bdf9798f913d5)) +* fix indentation ([d4f1b740](https://github.com/kbknapp/clap-rs/commit/d4f1b740ede410fd2528b9ecd89592c2fd8b1e20)) + +#### Features + +* **Args:** allows opts and args to define a name for help and usage msgs ([ad962ec4](https://github.com/kbknapp/clap-rs/commit/ad962ec478da999c7dba0afdb84c266f4d09b1bd)) + + + + +### v1.2.4 (2015-08-26) + + +#### Bug Fixes + +* **Possible Values:** fixes a bug where suggestions arent made when using --long=value format ([3d5e9a6c](https://github.com/kbknapp/clap-rs/commit/3d5e9a6cedb26668839b481c9978e2fbbab8be6f), closes [#192](https://github.com/kbknapp/clap-rs/issues/192)) + + + + +### v1.2.3 (2015-08-24) + + +#### Bug Fixes + +* **App, Args:** fixed subcommand reqs negation ([b41afa8c](https://github.com/kbknapp/clap-rs/commit/b41afa8c3ded3d1be12f7a2f8ea06cc44afc9458), closes [#188](https://github.com/kbknapp/clap-rs/issues/188)) + + + + +### v1.2.2 (2015-08-23) + + +#### Bug Fixes + +* fixed confusing error message, also added test for it ([fc7a31a7](https://github.com/kbknapp/clap-rs/commit/fc7a31a745efbf1768ee2c62cd3bb72bfe30c708)) +* **App:** fixed requirmets overriding ([9c135eb7](https://github.com/kbknapp/clap-rs/commit/9c135eb790fa16183e5bdb2009ddc3cf9e25f99f)) + + + + +### v1.2.1 (2015-08-20) + + +#### Documentation + +* **README.md:** updates for new features ([16cf9245](https://github.com/kbknapp/clap-rs/commit/16cf9245fb5fc4cf6face898e358368bf9961cbb)) + +#### Features + +* implements posix compatible conflicts for long args ([8c2d48ac](https://github.com/kbknapp/clap-rs/commit/8c2d48acf5473feebd721a9049a9c9b7051e70f9)) +* added overrides to support conflicts in POSIX compatible manner ([0b916a00](https://github.com/kbknapp/clap-rs/commit/0b916a00de26f6941538f6bc5f3365fa302083c1)) +* **Args:** allows defining POSIX compatible argument conflicts ([d715646e](https://github.com/kbknapp/clap-rs/commit/d715646e69759ccd95e01f49b04f489827ecf502)) + +#### Bug Fixes + +* fixed links in cargo and license buttons ([6d9837ad](https://github.com/kbknapp/clap-rs/commit/6d9837ad9a9e006117cd7372fdc60f9a3889c7e2)) + +#### Performance + +* **Args and Apps:** changes HashSet->Vec in some instances for increased performance ([d0c3b379](https://github.com/kbknapp/clap-rs/commit/d0c3b379700757e0a9b0c40af709f8af1f5b4949)) + + + + +### v1.2.0 (2015-08-15) + + +#### Bug Fixes + +* fixed misspell and enum name ([7df170d7](https://github.com/kbknapp/clap-rs/commit/7df170d7f4ecff06608317655d1e0c4298f62076)) +* fixed use for clap crate ([dc3ada73](https://github.com/kbknapp/clap-rs/commit/dc3ada738667d4b689678f79d14251ee82004ece)) + +#### Documentation + +* updates docs for new features ([03496547](https://github.com/kbknapp/clap-rs/commit/034965471782d872ca495045b58d34b31807c5b1)) +* fixed docs for previous changes ([ade36778](https://github.com/kbknapp/clap-rs/commit/ade367780c366425de462506d256e0f554ed3b9c)) + +#### Improvements + +* **AppSettings:** adds ability to add multiple settings at once ([4a00e251](https://github.com/kbknapp/clap-rs/commit/4a00e2510d0ca8d095d5257d51691ba3b61c1374)) + +#### Features + +* Replace application level settings with enum variants ([618dc4e2](https://github.com/kbknapp/clap-rs/commit/618dc4e2c205bf26bc43146164e65eb1f6b920ed)) +* **Args:** allows for custom argument value validations to be defined ([84ae2ddb](https://github.com/kbknapp/clap-rs/commit/84ae2ddbceda34b5cbda98a6959edaa52fde2e1a), closes [#170](https://github.com/kbknapp/clap-rs/issues/170)) + + + + +### v1.1.6 (2015-08-01) + + +#### Bug Fixes + +* fixes two bugs in App when printing newlines in help and subcommands required error ([d63c0136](https://github.com/kbknapp/clap-rs/commit/d63c0136310db9dd2b1c7b4745938311601d8938)) + + + + +### v1.1.5 (2015-07-29) + +#### Performance + +* removes some unneeded allocations ([93e915df](https://github.com/kbknapp/clap-rs/commit/93e915dfe300f7b7d6209ca93323c6a46f89a8c1)) + + +### v1.1.4 (2015-07-20) + + +#### Improvements + +* **Usage Strings** displays a [--] when it may be helpful ([86c3be85](https://github.com/kbknapp/clap-rs/commit/86c3be85fb6f77f83b5a6d2df40ae60937486984)) + +#### Bug Fixes + +* **Macros** fixes a typo in a macro generated error message ([c9195c5f](https://github.com/kbknapp/clap-rs/commit/c9195c5f92abb8cd6a37b4f4fbb2f1fee2a8e368)) +* **Type Errors** fixes formatting of error output when failed type parsing ([fe5d95c6](https://github.com/kbknapp/clap-rs/commit/fe5d95c64f3296e6eddcbec0cb8b86659800145f)) + + + + +### v1.1.3 (2015-07-18) + + +#### Documentation + +* updates README.md to include lack of color support on Windows ([52f81e17](https://github.com/kbknapp/clap-rs/commit/52f81e17377b18d2bd0f34693b642b7f358998ee)) + +#### Bug Fixes + +* fixes formatting bug which prevented compiling on windows ([9cb5dceb](https://github.com/kbknapp/clap-rs/commit/9cb5dceb3e5fe5e0e7b24619ff77e5040672b723), closes [#163](https://github.com/kbknapp/clap-rs/issues/163)) + + + + +### v1.1.2 (2015-07-17) + + +#### Bug Fixes + +* fixes a bug when parsing multiple {n} newlines inside help strings ([6d214b54](https://github.com/kbknapp/clap-rs/commit/6d214b549a9b7e189a94e5fa2b7c92cc333ca637)) + + + + +## v1.1.1 (2015-07-17) + + +#### Bug Fixes + +* fixes a logic bug and allows setting Arg::number_of_values() < 2 ([42b6d1fc](https://github.com/kbknapp/clap-rs/commit/42b6d1fc3c519c92dfb3af15276e7d3b635e6cfe), closes [#161](https://github.com/kbknapp/clap-rs/issues/161)) + + + + +## v1.1.0 (2015-07-16) + + +#### Features + +* allows creating unified help messages, a la docopt or getopts ([52bcd892](https://github.com/kbknapp/clap-rs/commit/52bcd892ea51564ce463bc5865acd64f8fe91cb1), closes [#158](https://github.com/kbknapp/clap-rs/issues/158)) +* allows stating all subcommands should *not* have --version flags ([336c476f](https://github.com/kbknapp/clap-rs/commit/336c476f631d512b54ac56fdca6f29ebdc2c00c5), closes [#156](https://github.com/kbknapp/clap-rs/issues/156)) +* allows setting version number to auto-propagate through subcommands ([bc66d3c6](https://github.com/kbknapp/clap-rs/commit/bc66d3c6deedeca62463fff95369ab1cfcdd366b), closes [#157](https://github.com/kbknapp/clap-rs/issues/157)) + +#### Improvements + +* **Help Strings** properly aligns and handles newlines in long help strings ([f9800a29](https://github.com/kbknapp/clap-rs/commit/f9800a29696dd2cc0b0284bf693b3011831e556f), closes [#145](https://github.com/kbknapp/clap-rs/issues/145)) + + +#### Performance + +* **Help Messages** big performance improvements when printing help messages ([52bcd892](https://github.com/kbknapp/clap-rs/commit/52bcd892ea51564ce463bc5865acd64f8fe91cb1)) + +#### Documentation + +* updates readme with new features ([8232f7bb](https://github.com/kbknapp/clap-rs/commit/8232f7bb52e88862bc13c3d4f99ee4f56cfe4bc0)) +* fix incorrect code example for `App::subcommand_required` ([8889689d](https://github.com/kbknapp/clap-rs/commit/8889689dc6336ccc45b2c9f2cf8e2e483a639e93)) + + + +### v1.0.3 (2015-07-11) + + +#### Improvements + +* **Errors** writes errors to stderr ([cc76ab8c](https://github.com/kbknapp/clap-rs/commit/cc76ab8c2b77c67b42f4717ded530df7806142cf), closes [#154](https://github.com/kbknapp/clap-rs/issues/154)) + +#### Documentation + +* **README.md** updates example help message to new format ([0aca29bd](https://github.com/kbknapp/clap-rs/commit/0aca29bd5d6d1a4e9971bdc88d946ffa58606efa)) + + + + +### v1.0.2 (2015-07-09) + + +#### Improvements + +* **Usage** re-orders optional arguments and required to natural standard ([dc7e1fce](https://github.com/kbknapp/clap-rs/commit/dc7e1fcea5c85d317018fb201d2a9262249131b4), closes [#147](https://github.com/kbknapp/clap-rs/issues/147)) + + + + +### v1.0.1 (2015-07-08) + + +#### Bug Fixes + +* allows empty values when using --long='' syntax ([083f82d3](https://github.com/kbknapp/clap-rs/commit/083f82d333b69720a6ef30074875310921d964d1), closes [#151](https://github.com/kbknapp/clap-rs/issues/151)) + + + + +## v1.0.0 (2015-07-08) + + +#### Documentation + +* **README.md** adds new features to what's new list ([938f7f01](https://github.com/kbknapp/clap-rs/commit/938f7f01340f521969376cf4e2e3d9436bca21f7)) +* **README.md** use with_name for subcommands ([28b7e316](https://github.com/kbknapp/clap-rs/commit/28b7e3161fb772e5309042648fe8c3a420645bac)) + +#### Features + +* args can now be parsed from arbitrary locations, not just std::env::args() ([75312528](https://github.com/kbknapp/clap-rs/commit/753125282b1b9bfff875f1557ce27610edcc59e1)) + + + + +## v1.0.0-beta (2015-06-30) + + +#### Features + +* allows waiting for user input on error ([d0da3bdd](https://github.com/kbknapp/clap-rs/commit/d0da3bdd9d1871541907ea9c645322a74d260e07), closes [#140](https://github.com/kbknapp/clap-rs/issues/140)) +* **Help** allows one to fully override the auto-generated help message ([26d5ae3e](https://github.com/kbknapp/clap-rs/commit/26d5ae3e330d1e150811d5b60b2b01a8f8df854e), closes [#141](https://github.com/kbknapp/clap-rs/issues/141)) + +#### Documentation + +* adds "whats new" section to readme ([ff149a29](https://github.com/kbknapp/clap-rs/commit/ff149a29dd9e179865e6d577cd7dc87c54f8f95c)) + +#### Improvements + +* removes deprecated functions in prep for 1.0 ([274484df](https://github.com/kbknapp/clap-rs/commit/274484dfd08fff4859cefd7e9bef3b73d3a9cb5f)) + + + + +## v0.11.0 (2015-06-17) - BREAKING CHANGE + + +#### Documentation + +* updates docs to new version flag defaults ([ebf442eb](https://github.com/kbknapp/clap-rs/commit/ebf442ebebbcd2ec6bfe2c06566c9d362bccb112)) + +#### Features + +* **Help and Version** default short for version is now `-V` but can be overridden (only breaks manual documentation) (**BREAKING CHANGE** [eb1d9320](https://github.com/kbknapp/clap-rs/commit/eb1d9320c509c1e4e57d7c7959da82bcfe06ada0)) + + + + +### v0.10.5 (2015-06-06) + + +#### Bug Fixes + +* **Global Args** global arguments propogate fully now ([1f377960](https://github.com/kbknapp/clap-rs/commit/1f377960a48c82f54ca5f39eb56bcb393140b046), closes [#137](https://github.com/kbknapp/clap-rs/issues/137)) + + + + +### v0.10.4 (2015-06-06) + + +#### Bug Fixes + +* **Global Args** global arguments propogate fully now ([8f2c0160](https://github.com/kbknapp/clap-rs/commit/8f2c0160c8d844daef375a33dbaec7d89de00a00), closes [#137](https://github.com/kbknapp/clap-rs/issues/137)) + + + + +### v0.10.3 (2015-05-31) + + +#### Bug Fixes + +* **Global Args** fixes a bug where globals only transfer to one subcommand ([a37842ee](https://github.com/kbknapp/clap-rs/commit/a37842eec1ee3162b86fdbda23420b221cdb1e3b), closes [#135](https://github.com/kbknapp/clap-rs/issues/135)) + + + + +### v0.10.2 (2015-05-30) + + +#### Improvements + +* **Binary Names** allows users to override the system determined bin name ([2191fe94](https://github.com/kbknapp/clap-rs/commit/2191fe94bda35771383b52872fb7f5421b178be1), closes [#134](https://github.com/kbknapp/clap-rs/issues/134)) + +#### Documentation + +* adds contributing guidelines ([6f76bd0a](https://github.com/kbknapp/clap-rs/commit/6f76bd0a07e8b7419b391243ab2d6687cd8a9c5f)) + + + + +### v0.10.1 (2015-05-26) + + +#### Features + +* can now specify that an app or subcommand should display help on no args or subcommands ([29ca7b2f](https://github.com/kbknapp/clap-rs/commit/29ca7b2f74376ca0cdb9d8ee3bfa99f7640cc404), closes [#133](https://github.com/kbknapp/clap-rs/issues/133)) + + + + +## v0.10.0 (2015-05-23) + + +#### Features + +* **Global Args** allows args that propagate down to child commands ([2bcc6137](https://github.com/kbknapp/clap-rs/commit/2bcc6137a83cb07757771a0afea953e68e692f0b), closes [#131](https://github.com/kbknapp/clap-rs/issues/131)) + +#### Improvements + +* **Colors** implements more structured colored output ([d6c3ed54](https://github.com/kbknapp/clap-rs/commit/d6c3ed54d21cf7b40d9f130d4280ff5448522fc5), closes [#129](https://github.com/kbknapp/clap-rs/issues/129)) + +#### Deprecations + +* **SubCommand/App** several methods and functions for stable release ([28b73855](https://github.com/kbknapp/clap-rs/commit/28b73855523ad170544afdb20665db98702fbe70)) + +#### Documentation + +* updates for deprecations and new features ([743eefe8](https://github.com/kbknapp/clap-rs/commit/743eefe8dd40c1260065ce086d572e9e9358bc4c)) + + + + +## v0.9.2 (2015-05-20) + + +#### Bug Fixes + +* **help** allows parent requirements to be ignored with help and version ([52218cc1](https://github.com/kbknapp/clap-rs/commit/52218cc1fdb06a42456c964d98cc2c7ac3432412), closes [#124](https://github.com/kbknapp/clap-rs/issues/124)) + + + + +## v0.9.1 (2015-05-18) + + +#### Bug Fixes + +* **help** fixes a bug where requirements are included as program name in help and version ([08ba3f25](https://github.com/kbknapp/clap-rs/commit/08ba3f25cf38b149229ba8b9cb37a5804fe6b789)) + + + + +## v0.9.0 (2015-05-17) + + +#### Improvements + +* **usage** usage strings now include parent command requirements ([dd8f21c7](https://github.com/kbknapp/clap-rs/commit/dd8f21c7c15cde348fdcf44fa7c205f0e98d2e4a), closes [#125](https://github.com/kbknapp/clap-rs/issues/125)) +* **args** allows consumer of clap to decide if empty values are allowed or not ([ab4ec609](https://github.com/kbknapp/clap-rs/commit/ab4ec609ccf692b9b72cccef5c9f74f5577e360d), closes [#122](https://github.com/kbknapp/clap-rs/issues/122)) + +#### Features + +* **subcommands** + * allows optionally specifying that no subcommand is an error ([7554f238](https://github.com/kbknapp/clap-rs/commit/7554f238fd3afdd60b7e4dcf00ff4a9eccf842c1), closes [#126](https://github.com/kbknapp/clap-rs/issues/126)) + * subcommands can optionally negate parent requirements ([4a4229f5](https://github.com/kbknapp/clap-rs/commit/4a4229f500e21c350e1ef78dd09ef27559653288), closes [#123](https://github.com/kbknapp/clap-rs/issues/123)) + + + + +## v0.8.6 (2015-05-17) + + +#### Bug Fixes + +* **args** `-` can now be parsed as a value for an argument ([bc12e78e](https://github.com/kbknapp/clap-rs/commit/bc12e78eadd7eaf9d008a8469fdd2dfd7990cb5d), closes [#121](https://github.com/kbknapp/clap-rs/issues/121)) + + + + +## v0.8.5 (2015-05-15) + + +#### Bug Fixes + +* **macros** makes macro errors consistent with others ([0c264a8c](https://github.com/kbknapp/clap-rs/commit/0c264a8ca57ec1cfdcb74dae79145d766cdc9b97), closes [#118](https://github.com/kbknapp/clap-rs/issues/118)) + +#### Features + +* **macros** + * arg_enum! and simple_enum! provide a Vec<&str> of variant names ([30fa87ba](https://github.com/kbknapp/clap-rs/commit/30fa87ba4e0f3189351d8f4f78b72e616a30d0bd), closes [#119](https://github.com/kbknapp/clap-rs/issues/119)) + * arg_enum! and simple_enum! auto-implement Display ([d1219f0d](https://github.com/kbknapp/clap-rs/commit/d1219f0d1371d872061bd0718057eca4ef47b739), closes [#120](https://github.com/kbknapp/clap-rs/issues/120)) + + + + +## v0.8.4 (2015-05-12) + + +#### Bug Fixes + +* **suggestions** --help and --version now get suggestions ([d2b3b1fa](https://github.com/kbknapp/clap-rs/commit/d2b3b1faa0bdc1c5d2350cc4635aba81e02e9d96), closes [#116](https://github.com/kbknapp/clap-rs/issues/116)) + + + + +## v0.8.3 (2015-05-10) + + +#### Bug Fixes + +* **usage** groups unfold their members in usage strings ([55d15582](https://github.com/kbknapp/clap-rs/commit/55d155827ea4a6b077a83669701e797ce1ad68f4), closes [#114](https://github.com/kbknapp/clap-rs/issues/114)) + +#### Performance + +* **usage** removes unneeded allocations ([fd53cd18](https://github.com/kbknapp/clap-rs/commit/fd53cd188555f5c3dc8bc341c5d7eb04b761a70f)) + + + + +## v0.8.2 (2015-05-08) + + +#### Bug Fixes + +* **usage strings** positional arguments are presented in index order ([eb0e374e](https://github.com/kbknapp/clap-rs/commit/eb0e374ecf952f1eefbc73113f21e0705936e40b), closes [#112](https://github.com/kbknapp/clap-rs/issues/112)) + + + + +## v0.8.1 (2015-05-06) + + +#### Bug Fixes + +* **subcommands** stops parsing multiple values when subcommands are found ([fc79017e](https://github.com/kbknapp/clap-rs/commit/fc79017eced04fd41cc1801331e5054df41fac17), closes [#109](https://github.com/kbknapp/clap-rs/issues/109)) + +#### Improvements + +* **color** reduces color in error messages ([aab44cca](https://github.com/kbknapp/clap-rs/commit/aab44cca6352f47e280c296e50c535f5d752dd46), closes [#110](https://github.com/kbknapp/clap-rs/issues/110)) +* **suggestions** adds suggested arguments to usage strings ([99447414](https://github.com/kbknapp/clap-rs/commit/994474146e9fb8b701af773a52da71553d74d4b7)) + + + + +## v0.8.0 (2015-05-06) + + +#### Bug Fixes + +* **did-you-mean** for review ([0535cfb0](https://github.com/kbknapp/clap-rs/commit/0535cfb0c711331568b4de8080eeef80bd254b68)) +* **Positional** positionals were ignored if they matched a subcmd, even after '--' ([90e7b081](https://github.com/kbknapp/clap-rs/commit/90e7b0818741668b47cbe3becd029bab588e3553)) +* **help** fixes bug where space between arg and help is too long ([632fb115](https://github.com/kbknapp/clap-rs/commit/632fb11514c504999ea86bdce47cdd34f8ebf646)) + +#### Features + +* **from_usage** adds ability to add value names or num of vals in usage string ([3d581976](https://github.com/kbknapp/clap-rs/commit/3d58197674ed7886ca315efb76e411608a327501), closes [#98](https://github.com/kbknapp/clap-rs/issues/98)) +* **did-you-mean** + * gate it behind 'suggestions' ([c0e38351](https://github.com/kbknapp/clap-rs/commit/c0e383515d01bdd5ca459af9c2f7e2cf49e2488b)) + * for possible values ([1cc2deb2](https://github.com/kbknapp/clap-rs/commit/1cc2deb29158e0e4e8b434e4ce26b3d819301a7d)) + * for long flags (i.e. --long) ([52a0b850](https://github.com/kbknapp/clap-rs/commit/52a0b8505c99354bdf5fd1cd256cf41197ac2d81)) + * for subcommands ([06e869b5](https://github.com/kbknapp/clap-rs/commit/06e869b5180258047ed3c60ba099de818dd25fff)) +* **Flags** adds sugestions functionality ([8745071c](https://github.com/kbknapp/clap-rs/commit/8745071c3257dd327c497013516f12a823df9530)) +* **errors** colorizes output red on error ([f8b26b13](https://github.com/kbknapp/clap-rs/commit/f8b26b13da82ba3ba9a932d3d1ab4ea45d1ab036)) + +#### Improvements + +* **arg_enum** allows ascii case insensitivity for enum variants ([b249f965](https://github.com/kbknapp/clap-rs/commit/b249f9657c6921c004764bd80d13ebca81585eec), closes [#104](https://github.com/kbknapp/clap-rs/issues/104)) +* **clap-test** simplified `make test` invocation ([d17dcb29](https://github.com/kbknapp/clap-rs/commit/d17dcb2920637a1f58c61c596b7bd362fd53047c)) + +#### Documentation + +* **README** adds details about optional and new features ([960389de](https://github.com/kbknapp/clap-rs/commit/960389de02c9872aaee9adabe86987f71f986e39)) +* **clap** fix typos caught by codespell ([8891d929](https://github.com/kbknapp/clap-rs/commit/8891d92917aa1a069cca67272be41b99e548356e)) +* **from_usage** explains new usage strings with multiple values ([05476fc6](https://github.com/kbknapp/clap-rs/commit/05476fc61cd1e5f4a4e750d258c878732a3a9c64)) + + + + +## v0.7.6 (2015-05-05) + + +#### Improvements + +* **Options** adds number of values to options in help/usage ([c1c993c4](https://github.com/kbknapp/clap-rs/commit/c1c993c419d18e35c443785053d8de9a2ef88073)) + +#### Features + +* **from_usage** adds ability to add value names or num of vals in usage string ([ad55748c](https://github.com/kbknapp/clap-rs/commit/ad55748c265cf27935c7b210307d2040b6a09125), closes [#98](https://github.com/kbknapp/clap-rs/issues/98)) + +#### Bug Fixes + +* **MultipleValues** properly distinguishes between multiple values and multiple occurrences ([dd2a7564](https://github.com/kbknapp/clap-rs/commit/dd2a75640ca68a91b973faad15f04df891356cef), closes [#99](https://github.com/kbknapp/clap-rs/issues/99)) +* **help** fixes tab alignment with multiple values ([847001ff](https://github.com/kbknapp/clap-rs/commit/847001ff6d8f4d9518e810fefb8edf746dd0f31e)) + +#### Documentation + +* **from_usage** explains new usage strings with multiple values ([5a3a42df](https://github.com/kbknapp/clap-rs/commit/5a3a42dfa3a783537f88dedc0fd5f0edcb8ea372)) + + + + +## v0.7.5 (2015-05-04) + + +#### Bug Fixes + +* **Options** fixes bug where options with no value don't error out ([a1fb94be](https://github.com/kbknapp/clap-rs/commit/a1fb94be53141572ffd97aad037295d4ffec82d0)) + + + + +## v0.7.4 (2015-05-03) + + +#### Bug Fixes + +* **Options** fixes a bug where option arguments in succession get their values skipped ([f66334d0](https://github.com/kbknapp/clap-rs/commit/f66334d0ce984e2b56e5c19abb1dd536fae9342a)) + + + + +## v0.7.3 (2015-05-03) + + +#### Bug Fixes + +* **RequiredValues** fixes a bug where missing values are parsed as missing arguments ([93c4a723](https://github.com/kbknapp/clap-rs/commit/93c4a7231ba1a08152648598f7aa4503ea82e4de)) + +#### Improvements + +* **ErrorMessages** improves error messages and corrections ([a29c3983](https://github.com/kbknapp/clap-rs/commit/a29c3983c4229906655a29146ec15a0e46dd942d)) +* **ArgGroups** improves requirement and confliction support for groups ([c236dc5f](https://github.com/kbknapp/clap-rs/commit/c236dc5ff475110d2a1b80e62903f80296163ad3)) + + + + +## v0.7.2 (2015-05-03) + + +#### Bug Fixes + +* **RequiredArgs** fixes bug where required-by-default arguments are not listed in usage ([12aea961](https://github.com/kbknapp/clap-rs/commit/12aea9612d290845ba86515c240aeeb0a21198db), closes [#96](https://github.com/kbknapp/clap-rs/issues/96)) + + + + +## v0.7.1 (2015-05-01) + + +#### Bug Fixes + +* **MultipleValues** stops evaluating values if the max or exact number of values was reached ([86d92c9f](https://github.com/kbknapp/clap-rs/commit/86d92c9fdbf9f422442e9562977bbaf268dbbae1)) + + + + +## v0.7.0 (2015-04-30) - BREAKING CHANGE + + +#### Bug Fixes + +* **from_usage** removes bug where usage strings have no help text ([ad4e5451](https://github.com/kbknapp/clap-rs/commit/ad4e54510739aeabf75f0da3278fb0952db531b3), closes [#83](https://github.com/kbknapp/clap-rs/issues/83)) + +#### Features + +* **MultipleValues** + * add support for minimum and maximum number of values ([53f6b8c9](https://github.com/kbknapp/clap-rs/commit/53f6b8c9d8dc408b4fa9f833fc3a63683873c42f)) + * adds support limited number and named values ([ae09f05e](https://github.com/kbknapp/clap-rs/commit/ae09f05e92251c1b39a83d372736fcc7b504e432)) + * implement shorthand for options with multiple values ([6669f0a9](https://github.com/kbknapp/clap-rs/commit/6669f0a9687d4f668523145d7bd5c007d1eb59a8)) +* **arg** allow other types besides Vec for multiple value settings (**BREAKING CHANGE** [0cc2f698](https://github.com/kbknapp/clap-rs/commit/0cc2f69839b9b1db5d06330771b494783049a88e), closes [#87](https://github.com/kbknapp/clap-rs/issues/87)) +* **usage** implement smart usage strings on errors ([d77048ef](https://github.com/kbknapp/clap-rs/commit/d77048efb1e595ffe831f1a2bea2f2700db53b9f), closes [#88](https://github.com/kbknapp/clap-rs/issues/88)) + + + + +## v0.6.9 (2015-04-29) + + +#### Bug Fixes + +* **from_usage** removes bug where usage strings have no help text ([ad4e5451](https://github.com/kbknapp/clap-rs/commit/ad4e54510739aeabf75f0da3278fb0952db531b3), closes [#83](https://github.com/kbknapp/clap-rs/issues/83)) + + + + +## 0.6.8 (2015-04-27) + + +#### Bug Fixes + +* **help** change long help --long=long -> --long ([1e25abfc](https://github.com/kbknapp/clap-rs/commit/1e25abfc36679ab89eae71bf98ced4de81992d00)) +* **RequiredArgs** required by default args should no longer be required when their exclusions are present ([4bb4c3cc](https://github.com/kbknapp/clap-rs/commit/4bb4c3cc076b49e86720e882bf8c489877199f2d)) + +#### Features + +* **ArgGroups** add ability to create arg groups ([09eb4d98](https://github.com/kbknapp/clap-rs/commit/09eb4d9893af40c347e50e2b717e1adef552357d)) + + + + +## v0.6.7 (2015-04-22) + + +#### Bug Fixes + +* **from_usage** fix bug causing args to not be required ([b76129e9](https://github.com/kbknapp/clap-rs/commit/b76129e9b71a63365d5c77a7f57b58dbd1e94d49)) + +#### Features + +* **apps** add ability to display additional help info after auto-gen'ed help msg ([65cc259e](https://github.com/kbknapp/clap-rs/commit/65cc259e4559cbe3653c865ec0c4b1e42a389b07)) + + + + +## v0.6.6 (2015-04-19) + + +#### Bug Fixes + +* **from_usage** tabs and spaces should be treated equally ([4fd44181](https://github.com/kbknapp/clap-rs/commit/4fd44181d55d8eb88caab1e625231cfa3129e347)) + +#### Features + +* **macros.rs** add macro to get version from Cargo.toml ([c630969a](https://github.com/kbknapp/clap-rs/commit/c630969aa3bbd386379219cae27ba1305b117f3e)) + + + + +## v0.6.5 (2015-04-19) + + +#### Bug Fixes + +* **macros.rs** fix use statements for trait impls ([86e4075e](https://github.com/kbknapp/clap-rs/commit/86e4075eb111937c8a7bdb344e866e350429f042)) + + + + +## v0.6.4 (2015-04-17) + + +#### Features + +* **macros** add ability to create enums pub or priv with derives ([2c499f80](https://github.com/kbknapp/clap-rs/commit/2c499f8015a199827cdf1fa3ec4f6f171722f8c7)) + + + + +## v0.6.3 (2015-04-16) + + +#### Features + +* **macros** add macro to create custom enums to use as types ([fb672aff](https://github.com/kbknapp/clap-rs/commit/fb672aff561c29db2e343d6c607138f141aca8b6)) + + + + +## v0.6.2 (2015-04-14) + + +#### Features + +* **macros** + * add ability to get multiple typed values or exit ([0b87251f](https://github.com/kbknapp/clap-rs/commit/0b87251fc088234bee51c323c2b652d7254f7a59)) + * add ability to get a typed multiple values ([e243fe38](https://github.com/kbknapp/clap-rs/commit/e243fe38ddbbf845a46c0b9baebaac3778c80927)) + * add convenience macro to get a typed value or exit ([4b7cd3ea](https://github.com/kbknapp/clap-rs/commit/4b7cd3ea4947780d9daa39f3e1ddab53ad4c7fef)) + * add convenience macro to get a typed value ([8752700f](https://github.com/kbknapp/clap-rs/commit/8752700fbb30e89ee68adbce24489ae9a24d33a9)) + + + + +## v0.6.1 (2015-04-13) + + +#### Bug Fixes + +* **from_usage** trim all whitespace before parsing ([91d29045](https://github.com/kbknapp/clap-rs/commit/91d2904599bd602deef2e515dfc65dc2863bdea0)) + + + + +## v0.6.0 (2015-04-13) + + +#### Bug Fixes + +* **tests** fix failing doc tests ([3710cd69](https://github.com/kbknapp/clap-rs/commit/3710cd69162f87221a62464f63437c1ce843ad3c)) + +#### Features + +* **app** add support for building args from usage strings ([d5d48bcf](https://github.com/kbknapp/clap-rs/commit/d5d48bcf463a4e494ef758836bd69a4c220bbbb5)) +* **args** add ability to create basic arguments from a usage string ([ab409a8f](https://github.com/kbknapp/clap-rs/commit/ab409a8f1db9e37cc70200f6f4a84a162692e618)) + + + + +## v0.5.14 (2015-04-10) + + +#### Bug Fixes + +* **usage** + * remove unneeded space ([51372789](https://github.com/kbknapp/clap-rs/commit/5137278942121bc2593ce6e5dc224ec2682549e6)) + * remove warning about unused variables ([ba817b9d](https://github.com/kbknapp/clap-rs/commit/ba817b9d815e37320650973f1bea0e7af3030fd7)) + +#### Features + +* **usage** add ability to get usage string for subcommands too ([3636afc4](https://github.com/kbknapp/clap-rs/commit/3636afc401c2caa966efb5b1869ef4f1ed3384aa)) + + + + +## v0.5.13 (2015-04-09) + + +#### Features + +* **SubCommands** add method to get name and subcommand matches together ([64e53928](https://github.com/kbknapp/clap-rs/commit/64e539280e23e567cf5de393b346eb0ca20e7eb5)) +* **ArgMatches** add method to get default usage string ([02462150](https://github.com/kbknapp/clap-rs/commit/02462150ca750bdc7012627d7e8d96379d494d7f)) + + + + +## v0.5.12 (2015-04-08) + + +#### Features + +* **help** sort arguments by name so as to not display a random order ([f4b2bf57](https://github.com/kbknapp/clap-rs/commit/f4b2bf5767386013069fb74862e6e938dacf44d2)) + + + + +## v0.5.11 (2015-04-08) + + +#### Bug Fixes + +* **flags** fix bug not allowing users to specify -v or -h ([90e72cff](https://github.com/kbknapp/clap-rs/commit/90e72cffdee321b79eea7a2207119533540062b4)) + + + + +## v0.5.10 (2015-04-08) + + +#### Bug Fixes + +* **help** fix spacing when option argument has not long version ([ca17fa49](https://github.com/kbknapp/clap-rs/commit/ca17fa494b68e92da83ee364bf64b0687006824b)) + + + + +## v0.5.9 (2015-04-08) + + +#### Bug Fixes + +* **positional args** all previous positional args become required when a latter one is required ([c14c3f31](https://github.com/kbknapp/clap-rs/commit/c14c3f31fd557c165570b60911d8ee483d89d6eb), closes [#50](https://github.com/kbknapp/clap-rs/issues/50)) +* **clap** remove unstable features for Rust 1.0 ([9abdb438](https://github.com/kbknapp/clap-rs/commit/9abdb438e36e364d41550e7f5d44ebcaa8ee6b10)) +* **args** improve error messages for arguments with mutual exclusions ([18dbcf37](https://github.com/kbknapp/clap-rs/commit/18dbcf37024daf2b76ca099a6f118b53827aa339), closes [#51](https://github.com/kbknapp/clap-rs/issues/51)) + + + + +## v0.5.8 (2015-04-08) + + +#### Bug Fixes + +* **option args** fix bug in getting the wrong number of occurrences for options ([82ad6ad7](https://github.com/kbknapp/clap-rs/commit/82ad6ad77539cf9f9a03b78db466f575ebd972cc)) +* **help** fix formatting for option arguments with no long ([e8691004](https://github.com/kbknapp/clap-rs/commit/e869100423d93fa3acff03c4620cbcc0d0e790a1)) +* **flags** add assertion to catch flags with specific value sets ([a0a2a40f](https://github.com/kbknapp/clap-rs/commit/a0a2a40fed57f7c5ad9d68970d090e9856306c7d), closes [#52](https://github.com/kbknapp/clap-rs/issues/52)) +* **args** improve error messages for arguments with mutual exclusions ([bff945fc](https://github.com/kbknapp/clap-rs/commit/bff945fc5d03bba4266533340adcffb002508d1b), closes [#51](https://github.com/kbknapp/clap-rs/issues/51)) +* **tests** add missing .takes_value(true) to option2 ([bdb0e88f](https://github.com/kbknapp/clap-rs/commit/bdb0e88f696c8595c3def3bfb0e52d538c7be085)) +* **positional args** all previous positional args become required when a latter one is required ([343d47dc](https://github.com/kbknapp/clap-rs/commit/343d47dcbf83786a45c0d0f01b27fd9dd76725de), closes [#50](https://github.com/kbknapp/clap-rs/issues/50)) + + + + +## v0.5.7 (2015-04-08) + + +#### Bug Fixes + +* **args** fix bug in arguments who are required and mutually exclusive ([6ceb88a5](https://github.com/kbknapp/clap-rs/commit/6ceb88a594caae825605abc1cdad95204996bf29)) + + + + +## v0.5.6 (2015-04-08) + + +#### Bug Fixes + +* **help** fix formatting of help and usage ([28691b52](https://github.com/kbknapp/clap-rs/commit/28691b52f67e65c599e10e4ea2a0f6f9765a06b8)) + + + + +## v0.5.5 (2015-04-08) + + +#### Bug Fixes + +* **help** fix formatting of help for flags and options ([6ec10115](https://github.com/kbknapp/clap-rs/commit/6ec1011563a746f0578a93b76d45e63878e0f9a8)) + + + + +## v0.5.4 (2015-04-08) + + +#### Features + +* **help** add '...' to indicate multiple values supported ([297ddba7](https://github.com/kbknapp/clap-rs/commit/297ddba77000e2228762ab0eca50b480f7467386)) + + + + +## v0.5.3 (2015-04-08) + + +#### Features + +* **positionals** + * add assertions for positional args with multiple vals ([b7fa72d4](https://github.com/kbknapp/clap-rs/commit/b7fa72d40f18806ec2042dd67a518401c2cf5681)) + * add support for multiple values ([80784009](https://github.com/kbknapp/clap-rs/commit/807840094109fbf90b348039ae22669ef27889ba)) + + + + +## v0.5.2 (2015-04-08) + + +#### Bug Fixes + +* **apps** allow use of hyphens in application and subcommand names ([da549dcb](https://github.com/kbknapp/clap-rs/commit/da549dcb6c7e0d773044ab17829744483a8b0f7f)) + + + + +## v0.5.1 (2015-04-08) + + +#### Bug Fixes + +* **args** determine if the only arguments allowed are also required ([0a09eb36](https://github.com/kbknapp/clap-rs/commit/0a09eb365ced9a03faf8ed24f083ef730acc90e8)) + + + + +## v0.5.0 (2015-04-08) + + +#### Features + +* **args** add support for a specific set of allowed values on options or positional arguments ([270eb889](https://github.com/kbknapp/clap-rs/commit/270eb88925b6dc2881bff1f31ee344f085d31809)) + + + + +## v0.4.18 (2015-04-08) + + +#### Bug Fixes + +* **usage** display required args in usage, even if only required by others ([1b7316d4](https://github.com/kbknapp/clap-rs/commit/1b7316d4a8df70b0aa584ccbfd33f68966ad2a54)) + +#### Features + +* **subcommands** properly list subcommands in help and usage ([4ee02344](https://github.com/kbknapp/clap-rs/commit/4ee023442abc3dba54b68138006a52b714adf331)) + + + + +## v0.4.17 (2015-04-08) + + +#### Bug Fixes + +* **tests** remove cargo test from claptests makefile ([1cf73817](https://github.com/kbknapp/clap-rs/commit/1cf73817d6fb1dccb5b6a23b46c2efa8b567ad62)) + + + + +## v0.4.16 (2015-04-08) + + +#### Bug Fixes + +* **option** fix bug with option occurrence values ([9af52e93](https://github.com/kbknapp/clap-rs/commit/9af52e93cef9e17ac9974963f132013d0b97b946)) +* **tests** fix testing script bug and formatting ([d8f03a55](https://github.com/kbknapp/clap-rs/commit/d8f03a55c4f74d126710ee06aad5a667246a8001)) + +#### Features + +* **arg** allow lifetimes other than 'static in arguments ([9e8c1fb9](https://github.com/kbknapp/clap-rs/commit/9e8c1fb9406f8448873ca58bab07fe905f1551e5)) diff --git a/clap/CONTRIBUTORS.md b/clap/CONTRIBUTORS.md new file mode 100644 index 000000000..f0fd77724 --- /dev/null +++ b/clap/CONTRIBUTORS.md @@ -0,0 +1,91 @@ +the following is a list of contributors: + + +[kbknapp](https://github.com/kbknapp) |[homu](https://github.com/homu) |[Vinatorul](https://github.com/Vinatorul) |[tormol](https://github.com/tormol) |[willmurphyscode](https://github.com/willmurphyscode) |[little-dude](https://github.com/little-dude) | +:---: |:---: |:---: |:---: |:---: |:---: | +[kbknapp](https://github.com/kbknapp) |[homu](https://github.com/homu) |[Vinatorul](https://github.com/Vinatorul) |[tormol](https://github.com/tormol) |[willmurphyscode](https://github.com/willmurphyscode) |[little-dude](https://github.com/little-dude) | + +[sru](https://github.com/sru) |[mgeisler](https://github.com/mgeisler) |[nabijaczleweli](https://github.com/nabijaczleweli) |[Byron](https://github.com/Byron) |[hgrecco](https://github.com/hgrecco) |[bluejekyll](https://github.com/bluejekyll) | +:---: |:---: |:---: |:---: |:---: |:---: | +[sru](https://github.com/sru) |[mgeisler](https://github.com/mgeisler) |[nabijaczleweli](https://github.com/nabijaczleweli) |[Byron](https://github.com/Byron) |[hgrecco](https://github.com/hgrecco) |[bluejekyll](https://github.com/bluejekyll) | + +[segevfiner](https://github.com/segevfiner) |[ignatenkobrain](https://github.com/ignatenkobrain) |[james-darkfox](https://github.com/james-darkfox) |[H2CO3](https://github.com/H2CO3) |[nateozem](https://github.com/nateozem) |[glowing-chemist](https://github.com/glowing-chemist) | +:---: |:---: |:---: |:---: |:---: |:---: | +[segevfiner](https://github.com/segevfiner) |[ignatenkobrain](https://github.com/ignatenkobrain) |[james-darkfox](https://github.com/james-darkfox) |[H2CO3](https://github.com/H2CO3) |[nateozem](https://github.com/nateozem) |[glowing-chemist](https://github.com/glowing-chemist) | + +[discosultan](https://github.com/discosultan) |[rtaycher](https://github.com/rtaycher) |[Arnavion](https://github.com/Arnavion) |[japaric](https://github.com/japaric) |[untitaker](https://github.com/untitaker) |[afiune](https://github.com/afiune) | +:---: |:---: |:---: |:---: |:---: |:---: | +[discosultan](https://github.com/discosultan) |[rtaycher](https://github.com/rtaycher) |[Arnavion](https://github.com/Arnavion) |[japaric](https://github.com/japaric) |[untitaker](https://github.com/untitaker) |[afiune](https://github.com/afiune) | + +[crazymerlyn](https://github.com/crazymerlyn) |[SuperFluffy](https://github.com/SuperFluffy) |[matthiasbeyer](https://github.com/matthiasbeyer) |[malbarbo](https://github.com/malbarbo) |[tshepang](https://github.com/tshepang) |[golem131](https://github.com/golem131) | +:---: |:---: |:---: |:---: |:---: |:---: | +[crazymerlyn](https://github.com/crazymerlyn) |[SuperFluffy](https://github.com/SuperFluffy) |[matthiasbeyer](https://github.com/matthiasbeyer) |[malbarbo](https://github.com/malbarbo) |[tshepang](https://github.com/tshepang) |[golem131](https://github.com/golem131) | + +[jimmycuadra](https://github.com/jimmycuadra) |[Nemo157](https://github.com/Nemo157) |[severen](https://github.com/severen) |[Eijebong](https://github.com/Eijebong) |[cstorey](https://github.com/cstorey) |[wdv4758h](https://github.com/wdv4758h) | +:---: |:---: |:---: |:---: |:---: |:---: | +[jimmycuadra](https://github.com/jimmycuadra) |[Nemo157](https://github.com/Nemo157) |[severen](https://github.com/severen) |[Eijebong](https://github.com/Eijebong) |[cstorey](https://github.com/cstorey) |[wdv4758h](https://github.com/wdv4758h) | + +[frewsxcv](https://github.com/frewsxcv) |[hoodie](https://github.com/hoodie) |[huonw](https://github.com/huonw) |[GrappigPanda](https://github.com/GrappigPanda) |[shepmaster](https://github.com/shepmaster) |[starkat99](https://github.com/starkat99) | +:---: |:---: |:---: |:---: |:---: |:---: | +[frewsxcv](https://github.com/frewsxcv) |[hoodie](https://github.com/hoodie) |[huonw](https://github.com/huonw) |[GrappigPanda](https://github.com/GrappigPanda) |[shepmaster](https://github.com/shepmaster) |[starkat99](https://github.com/starkat99) | + +[porglezomp](https://github.com/porglezomp) |[kraai](https://github.com/kraai) |[musoke](https://github.com/musoke) |[nelsonjchen](https://github.com/nelsonjchen) |[pkgw](https://github.com/pkgw) |[Deedasmi](https://github.com/Deedasmi) | +:---: |:---: |:---: |:---: |:---: |:---: | +[porglezomp](https://github.com/porglezomp) |[kraai](https://github.com/kraai) |[musoke](https://github.com/musoke) |[nelsonjchen](https://github.com/nelsonjchen) |[pkgw](https://github.com/pkgw) |[Deedasmi](https://github.com/Deedasmi) | + +[vmchale](https://github.com/vmchale) |[etopiei](https://github.com/etopiei) |[messense](https://github.com/messense) |[Keats](https://github.com/Keats) |[kieraneglin](https://github.com/kieraneglin) |[durka](https://github.com/durka) | +:---: |:---: |:---: |:---: |:---: |:---: | +[vmchale](https://github.com/vmchale) |[etopiei](https://github.com/etopiei) |[messense](https://github.com/messense) |[Keats](https://github.com/Keats) |[kieraneglin](https://github.com/kieraneglin) |[durka](https://github.com/durka) | + +[alex-gulyas](https://github.com/alex-gulyas) |[cite-reader](https://github.com/cite-reader) |[alexbool](https://github.com/alexbool) |[AluisioASG](https://github.com/AluisioASG) |[BurntSushi](https://github.com/BurntSushi) |[AndrewGaspar](https://github.com/AndrewGaspar) | +:---: |:---: |:---: |:---: |:---: |:---: | +[alex-gulyas](https://github.com/alex-gulyas) |[cite-reader](https://github.com/cite-reader) |[alexbool](https://github.com/alexbool) |[AluisioASG](https://github.com/AluisioASG) |[BurntSushi](https://github.com/BurntSushi) |[AndrewGaspar](https://github.com/AndrewGaspar) | + +[nox](https://github.com/nox) |[mitsuhiko](https://github.com/mitsuhiko) |[pixelistik](https://github.com/pixelistik) |[ogham](https://github.com/ogham) |[Bilalh](https://github.com/Bilalh) |[dotdash](https://github.com/dotdash) | +:---: |:---: |:---: |:---: |:---: |:---: | +[nox](https://github.com/nox) |[mitsuhiko](https://github.com/mitsuhiko) |[pixelistik](https://github.com/pixelistik) |[ogham](https://github.com/ogham) |[Bilalh](https://github.com/Bilalh) |[dotdash](https://github.com/dotdash) | + +[bradurani](https://github.com/bradurani) |[Seeker14491](https://github.com/Seeker14491) |[brianp](https://github.com/brianp) |[cldershem](https://github.com/cldershem) |[casey](https://github.com/casey) |[volks73](https://github.com/volks73) | +:---: |:---: |:---: |:---: |:---: |:---: | +[bradurani](https://github.com/bradurani) |[Seeker14491](https://github.com/Seeker14491) |[brianp](https://github.com/brianp) |[cldershem](https://github.com/cldershem) |[casey](https://github.com/casey) |[volks73](https://github.com/volks73) | + +[daboross](https://github.com/daboross) |[da-x](https://github.com/da-x) |[mernen](https://github.com/mernen) |[dguo](https://github.com/dguo) |[davidszotten](https://github.com/davidszotten) |[drusellers](https://github.com/drusellers) | +:---: |:---: |:---: |:---: |:---: |:---: | +[daboross](https://github.com/daboross) |[da-x](https://github.com/da-x) |[mernen](https://github.com/mernen) |[dguo](https://github.com/dguo) |[davidszotten](https://github.com/davidszotten) |[drusellers](https://github.com/drusellers) | + +[eddyb](https://github.com/eddyb) |[Enet4](https://github.com/Enet4) |[Fraser999](https://github.com/Fraser999) |[birkenfeld](https://github.com/birkenfeld) |[guanqun](https://github.com/guanqun) |[tanakh](https://github.com/tanakh) | +:---: |:---: |:---: |:---: |:---: |:---: | +[eddyb](https://github.com/eddyb) |[Enet4](https://github.com/Enet4) |[Fraser999](https://github.com/Fraser999) |[birkenfeld](https://github.com/birkenfeld) |[guanqun](https://github.com/guanqun) |[tanakh](https://github.com/tanakh) | + +[SirVer](https://github.com/SirVer) |[idmit](https://github.com/idmit) |[archer884](https://github.com/archer884) |[jacobmischka](https://github.com/jacobmischka) |[jespino](https://github.com/jespino) |[jfrankenau](https://github.com/jfrankenau) | +:---: |:---: |:---: |:---: |:---: |:---: | +[SirVer](https://github.com/SirVer) |[idmit](https://github.com/idmit) |[archer884](https://github.com/archer884) |[jacobmischka](https://github.com/jacobmischka) |[jespino](https://github.com/jespino) |[jfrankenau](https://github.com/jfrankenau) | + +[jtdowney](https://github.com/jtdowney) |[andete](https://github.com/andete) |[joshtriplett](https://github.com/joshtriplett) |[Kalwyn](https://github.com/Kalwyn) |[manuel-rhdt](https://github.com/manuel-rhdt) |[Marwes](https://github.com/Marwes) | +:---: |:---: |:---: |:---: |:---: |:---: | +[jtdowney](https://github.com/jtdowney) |[andete](https://github.com/andete) |[joshtriplett](https://github.com/joshtriplett) |[Kalwyn](https://github.com/Kalwyn) |[manuel-rhdt](https://github.com/manuel-rhdt) |[Marwes](https://github.com/Marwes) | + +[mdaffin](https://github.com/mdaffin) |[iliekturtles](https://github.com/iliekturtles) |[nicompte](https://github.com/nicompte) |[NickeZ](https://github.com/NickeZ) |[nvzqz](https://github.com/nvzqz) |[nuew](https://github.com/nuew) | +:---: |:---: |:---: |:---: |:---: |:---: | +[mdaffin](https://github.com/mdaffin) |[iliekturtles](https://github.com/iliekturtles) |[nicompte](https://github.com/nicompte) |[NickeZ](https://github.com/NickeZ) |[nvzqz](https://github.com/nvzqz) |[nuew](https://github.com/nuew) | + +[Geogi](https://github.com/Geogi) |[focusaurus](https://github.com/focusaurus) |[flying-sheep](https://github.com/flying-sheep) |[Phlosioneer](https://github.com/Phlosioneer) |[peppsac](https://github.com/peppsac) |[golddranks](https://github.com/golddranks) | +:---: |:---: |:---: |:---: |:---: |:---: | +[Geogi](https://github.com/Geogi) |[focusaurus](https://github.com/focusaurus) |[flying-sheep](https://github.com/flying-sheep) |[Phlosioneer](https://github.com/Phlosioneer) |[peppsac](https://github.com/peppsac) |[golddranks](https://github.com/golddranks) | + +[hexjelly](https://github.com/hexjelly) |[rom1v](https://github.com/rom1v) |[rnelson](https://github.com/rnelson) |[swatteau](https://github.com/swatteau) |[tchajed](https://github.com/tchajed) |[tspiteri](https://github.com/tspiteri) | +:---: |:---: |:---: |:---: |:---: |:---: | +[hexjelly](https://github.com/hexjelly) |[rom1v](https://github.com/rom1v) |[rnelson](https://github.com/rnelson) |[swatteau](https://github.com/swatteau) |[tchajed](https://github.com/tchajed) |[tspiteri](https://github.com/tspiteri) | + +[siiptuo](https://github.com/siiptuo) |[vks](https://github.com/vks) |[vsupalov](https://github.com/vsupalov) |[mineo](https://github.com/mineo) |[wabain](https://github.com/wabain) |[grossws](https://github.com/grossws) | +:---: |:---: |:---: |:---: |:---: |:---: | +[siiptuo](https://github.com/siiptuo) |[vks](https://github.com/vks) |[vsupalov](https://github.com/vsupalov) |[mineo](https://github.com/mineo) |[wabain](https://github.com/wabain) |[grossws](https://github.com/grossws) | + +[kennytm](https://github.com/kennytm) |[king6cong](https://github.com/king6cong) |[mvaude](https://github.com/mvaude) |[panicbit](https://github.com/panicbit) |[brennie](https://github.com/brennie) | +:---: |:---: |:---: |:---: |:---: | +[kennytm](https://github.com/kennytm) |[king6cong](https://github.com/king6cong) |[mvaude](https://github.com/mvaude) |[panicbit](https://github.com/panicbit) |[brennie](https://github.com/brennie) | + + + + +This list was generated by [mgechev/github-contributors-list](https://github.com/mgechev/github-contributors-list) diff --git a/clap/Cargo.toml b/clap/Cargo.toml new file mode 100644 index 000000000..52715f19f --- /dev/null +++ b/clap/Cargo.toml @@ -0,0 +1,135 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g. crates.io) dependencies +# +# If you believe there's an error in this file please file an +# issue against the rust-lang/cargo repository. If you're +# editing this file be aware that the upstream Cargo.toml +# will likely look very different (and much more reasonable) + +[package] +name = "clap" +version = "2.32.0" +authors = ["Kevin K. "] +exclude = ["examples/*", "clap-test/*", "tests/*", "benches/*", "*.png", "clap-perf/*", "*.dot"] +description = "A simple to use, efficient, and full featured Command Line Argument Parser\n" +homepage = "https://clap.rs/" +documentation = "https://docs.rs/clap/" +readme = "README.md" +keywords = ["argument", "cli", "arg", "parser", "parse"] +categories = ["command-line-interface"] +license = "MIT" +repository = "https://github.com/kbknapp/clap-rs" +[package.metadata.docs.rs] +features = ["doc"] +[profile.test] +opt-level = 1 +lto = false +codegen-units = 4 +debug = true +debug-assertions = true +rpath = false + +[profile.doc] +opt-level = 0 +lto = false +codegen-units = 4 +debug = true +debug-assertions = true +rpath = false + +[profile.bench] +opt-level = 3 +lto = true +codegen-units = 1 +debug = false +debug-assertions = false +rpath = false + +[profile.dev] +opt-level = 0 +lto = false +codegen-units = 4 +debug = true +debug-assertions = true +rpath = false + +[profile.release] +opt-level = 3 +lto = true +codegen-units = 1 +debug = false +debug-assertions = false +rpath = false +[dependencies.atty] +version = "0.2.2" +optional = true + +[dependencies.bitflags] +version = "1.0" + +[dependencies.strsim] +version = "0.7.0" +optional = true + +[dependencies.term_size] +version = "0.3.0" +optional = true + +[dependencies.textwrap] +version = "0.10.0" + +[dependencies.unicode-width] +version = "0.1.4" + +[dependencies.vec_map] +version = "0.8" +optional = true + +[dependencies.yaml-rust] +version = "0.3.5" +optional = true +[dev-dependencies.lazy_static] +version = "1" + +[dev-dependencies.regex] +version = "1" + +[dev-dependencies.version-sync] +version = "0.5" + +[features] +color = ["ansi_term", "atty"] +debug = [] +default = ["suggestions", "color", "vec_map"] +doc = ["yaml"] +nightly = [] +no_cargo = [] +suggestions = ["strsim"] +unstable = [] +wrap_help = ["term_size", "textwrap/term_size"] +yaml = ["yaml-rust"] +[target."cfg(not(windows))".dependencies.ansi_term] +version = "0.11" +optional = true +[badges.appveyor] +repository = "kbknapp/clap-rs" + +[badges.coveralls] +branch = "master" +repository = "kbknapp/clap-rs" + +[badges.is-it-maintained-issue-resolution] +repository = "kbknapp/clap-rs" + +[badges.is-it-maintained-open-issues] +repository = "kbknapp/clap-rs" + +[badges.maintenance] +status = "actively-developed" + +[badges.travis-ci] +repository = "kbknapp/clap-rs" diff --git a/clap/LICENSE-MIT b/clap/LICENSE-MIT new file mode 100644 index 000000000..5acedf041 --- /dev/null +++ b/clap/LICENSE-MIT @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015-2016 Kevin B. Knapp + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/clap/README.md b/clap/README.md new file mode 100644 index 000000000..a9305db39 --- /dev/null +++ b/clap/README.md @@ -0,0 +1,575 @@ +clap +==== + +[![Crates.io](https://img.shields.io/crates/v/clap.svg)](https://crates.io/crates/clap) [![Crates.io](https://img.shields.io/crates/d/clap.svg)](https://crates.io/crates/clap) [![license](http://img.shields.io/badge/license-MIT-blue.svg)](https://github.com/kbknapp/clap-rs/blob/master/LICENSE-MIT) [![Coverage Status](https://coveralls.io/repos/kbknapp/clap-rs/badge.svg?branch=master&service=github)](https://coveralls.io/github/kbknapp/clap-rs?branch=master) [![Join the chat at https://gitter.im/kbknapp/clap-rs](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/kbknapp/clap-rs?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) + +Linux: [![Build Status](https://travis-ci.org/kbknapp/clap-rs.svg?branch=master)](https://travis-ci.org/kbknapp/clap-rs) +Windows: [![Build status](https://ci.appveyor.com/api/projects/status/ejg8c33dn31nhv36/branch/master?svg=true)](https://ci.appveyor.com/project/kbknapp/clap-rs/branch/master) + +Command Line Argument Parser for Rust + +It is a simple-to-use, efficient, and full-featured library for parsing command line arguments and subcommands when writing console/terminal applications. + +* [documentation](https://docs.rs/clap/) +* [website](https://clap.rs/) +* [video tutorials](https://www.youtube.com/playlist?list=PLza5oFLQGTl2Z5T8g1pRkIynR3E0_pc7U) + +Table of Contents +================= + +* [What's New](#whats-new) +* [About](#about) +* [FAQ](#faq) +* [Features](#features) +* [Quick Example](#quick-example) +* [Try it!](#try-it) + * [Pre-Built Test](#pre-built-test) + * [BYOB (Build Your Own Binary)](#byob-build-your-own-binary) +* [Usage](#usage) + * [Optional Dependencies / Features](#optional-dependencies--features) + * [Dependencies Tree](#dependencies-tree) + * [More Information](#more-information) + * [Video Tutorials](#video-tutorials) +* [How to Contribute](#how-to-contribute) + * [Compatibility Policy](#compatibility-policy) + * [Minimum Version of Rust](#minimum-version-of-rust) +* [Related Crates](#related-crates) +* [License](#license) +* [Recent Breaking Changes](#recent-breaking-changes) + * [Deprecations](#deprecations) + +Created by [gh-md-toc](https://github.com/ekalinin/github-markdown-toc) + +## What's New + +Here's whats new in 2.32.0: + +* `clap` requires `rustc 1.21.0` or greater. +* **Completions:** adds completion support for Elvish. ([e9d0562a](https://github.com/kbknapp/clap-rs/commit/e9d0562a1dc5dfe731ed7c767e6cee0af08f0cf9)) +* **ArgGroup and macros:** Support shorthand syntax for ArgGroups ([df9095e7](https://github.com/kbknapp/clap-rs/commit/df9095e75bb1e7896415251d0d4ffd8a0ebcd559)) +* **OsValues:** Add `ExactSizeIterator` implementation ([356c69e5](https://github.com/kbknapp/clap-rs/commit/356c69e508fd25a9f0ea2d27bf80ae1d9a8d88f4)) +* **arg_enum!:** Invalid expansions of some trailing-comma patterns ([7023184f](https://github.com/kbknapp/clap-rs/commit/7023184fca04e852c270341548d6a16207d13862)) +* **help messages:** Unconditionally uses long description for subcommands ([6acc8b6a](https://github.com/kbknapp/clap-rs/commit/6acc8b6a621a765cbf513450188000d943676a30), closes [#897](https://github.com/kbknapp/clap-rs/issues/897)) +* **Docs:** Refer to macOS rather than OSX. ([ab0d767f](https://github.com/kbknapp/clap-rs/commit/ab0d767f3a5a57e2bbb97d0183c2ef63c8c77a6c)) + +Here's whats new in 2.31.x: + +* **Fish Completions:** fixes a bug that only allowed a single completion in in Fish Shell +* **AllowExternalSubcommands**: fixes a bug where external subcommands would be blocked by a similarly named subcommand +* Fixes some typos in the `README.md` +* **AllowMissingPositional:** improves the ability of `AppSetting::AllowMissingPositional` to allow "skipping" to the last positional arg with the `--` operator +* **Arg Indices:** adds the ability to query argument value indices + * implements an `Indices` iterator + * adds the documentation for the arg index querying methods +* **Improves PowerShell completions** - Uses the short help tool-tip for PowerShell completion scripts +* Adds WASM support (clap now compiles on WASM!) +* **Raw Args** adds a convenience function to `Arg` that allows implying all of `Arg::last` `Arg::allow_hyphen_values` and `Arg::multiple(true)` +* **CONTRIBUTING.md:** fix url to clippy upstream repo +* **Values Documentation:** improves the docs example of the Values iterator +* Updates README.md to hint that the `wrap_help` feature is a thing +* Use `codegen-units = 1` in release and bench profiles to improve bench performance +* Fix some typos and markdown issues in the docs + +For full details, see [CHANGELOG.md](https://github.com/kbknapp/clap-rs/blob/master/CHANGELOG.md) + +## About + +`clap` is used to parse *and validate* the string of command line arguments provided by a user at runtime. You provide the list of valid possibilities, and `clap` handles the rest. This means you focus on your *applications* functionality, and less on the parsing and validating of arguments. + +`clap` provides many things 'for free' (with no configuration) including the traditional version and help switches (or flags) along with associated messages. If you are using subcommands, `clap` will also auto-generate a `help` subcommand and separate associated help messages. + +Once `clap` parses the user provided string of arguments, it returns the matches along with any applicable values. If the user made an error or typo, `clap` informs them with a friendly message and exits gracefully (or returns a `Result` type and allows you to perform any clean up prior to exit). Because of this, you can make reasonable assumptions in your code about the validity of the arguments prior to your applications main execution. + +## FAQ + +For a full FAQ and more in depth details, see [the wiki page](https://github.com/kbknapp/clap-rs/wiki/FAQ) + +### Comparisons + +First, let me say that these comparisons are highly subjective, and not meant in a critical or harsh manner. All the argument parsing libraries out there (to include `clap`) have their own strengths and weaknesses. Sometimes it just comes down to personal taste when all other factors are equal. When in doubt, try them all and pick one that you enjoy :) There's plenty of room in the Rust community for multiple implementations! + +#### How does `clap` compare to [getopts](https://github.com/rust-lang-nursery/getopts)? + +`getopts` is a very basic, fairly minimalist argument parsing library. This isn't a bad thing, sometimes you don't need tons of features, you just want to parse some simple arguments, and have some help text generated for you based on valid arguments you specify. The downside to this approach is that you must manually implement most of the common features (such as checking to display help messages, usage strings, etc.). If you want a highly custom argument parser, and don't mind writing the majority of the functionality yourself, `getopts` is an excellent base. + +`getopts` also doesn't allocate much, or at all. This gives it a very small performance boost. Although, as you start implementing additional features, that boost quickly disappears. + +Personally, I find many, many uses of `getopts` are manually implementing features that `clap` provides by default. Using `clap` simplifies your codebase allowing you to focus on your application, and not argument parsing. + +#### How does `clap` compare to [docopt.rs](https://github.com/docopt/docopt.rs)? + +I first want to say I'm a big a fan of BurntSushi's work, the creator of `Docopt.rs`. I aspire to produce the quality of libraries that this man does! When it comes to comparing these two libraries they are very different. `docopt` tasks you with writing a help message, and then it parsers that message for you to determine all valid arguments and their use. Some people LOVE this approach, others do not. If you're willing to write a detailed help message, it's nice that you can stick that in your program and have `docopt` do the rest. On the downside, it's far less flexible. + +`docopt` is also excellent at translating arguments into Rust types automatically. There is even a syntax extension which will do all this for you, if you're willing to use a nightly compiler (use of a stable compiler requires you to somewhat manually translate from arguments to Rust types). To use BurntSushi's words, `docopt` is also a sort of black box. You get what you get, and it's hard to tweak implementation or customize the experience for your use case. + +Because `docopt` is doing a ton of work to parse your help messages and determine what you were trying to communicate as valid arguments, it's also one of the more heavy weight parsers performance-wise. For most applications this isn't a concern and this isn't to say `docopt` is slow, in fact far from it. This is just something to keep in mind while comparing. + +#### All else being equal, what are some reasons to use `clap`? (The Pitch) + +`clap` is as fast, and as lightweight as possible while still giving all the features you'd expect from a modern argument parser. In fact, for the amount and type of features `clap` offers it remains about as fast as `getopts`. If you use `clap` when just need some simple arguments parsed, you'll find it's a walk in the park. `clap` also makes it possible to represent extremely complex, and advanced requirements, without too much thought. `clap` aims to be intuitive, easy to use, and fully capable for wide variety use cases and needs. + +#### All else being equal, what are some reasons *not* to use `clap`? (The Anti Pitch) + +Depending on the style in which you choose to define the valid arguments, `clap` can be very verbose. `clap` also offers so many finetuning knobs and dials, that learning everything can seem overwhelming. I strive to keep the simple cases simple, but when turning all those custom dials it can get complex. `clap` is also opinionated about parsing. Even though so much can be tweaked and tuned with `clap` (and I'm adding more all the time), there are still certain features which `clap` implements in specific ways which may be contrary to some users use-cases. Finally, `clap` is "stringly typed" when referring to arguments which can cause typos in code. This particular paper-cut is being actively worked on, and should be gone in v3.x. + +## Features + +Below are a few of the features which `clap` supports, full descriptions and usage can be found in the [documentation](https://docs.rs/clap/) and [examples/](examples) directory + +* **Auto-generated Help, Version, and Usage information** + - Can optionally be fully, or partially overridden if you want a custom help, version, or usage statements +* **Auto-generated completion scripts at compile time (Bash, Zsh, Fish, and PowerShell)** + - Even works through many multiple levels of subcommands + - Works with options which only accept certain values + - Works with subcommand aliases +* **Flags / Switches** (i.e. bool fields) + - Both short and long versions supported (i.e. `-f` and `--flag` respectively) + - Supports combining short versions (i.e. `-fBgoZ` is the same as `-f -B -g -o -Z`) + - Supports multiple occurrences (i.e. `-vvv` or `-v -v -v`) +* **Positional Arguments** (i.e. those which are based off an index from the program name) + - Supports multiple values (i.e. `myprog ...` such as `myprog file1.txt file2.txt` being two values for the same "file" argument) + - Supports Specific Value Sets (See below) + - Can set value parameters (such as the minimum number of values, the maximum number of values, or the exact number of values) + - Can set custom validations on values to extend the argument parsing capability to truly custom domains +* **Option Arguments** (i.e. those that take values) + - Both short and long versions supported (i.e. `-o value`, `-ovalue`, `-o=value` and `--option value` or `--option=value` respectively) + - Supports multiple values (i.e. `-o -o ` or `-o `) + - Supports delimited values (i.e. `-o=val1,val2,val3`, can also change the delimiter) + - Supports Specific Value Sets (See below) + - Supports named values so that the usage/help info appears as `-o ` etc. for when you require specific multiple values + - Can set value parameters (such as the minimum number of values, the maximum number of values, or the exact number of values) + - Can set custom validations on values to extend the argument parsing capability to truly custom domains +* **Sub-Commands** (i.e. `git add ` where `add` is a sub-command of `git`) + - Support their own sub-arguments, and sub-sub-commands independent of the parent + - Get their own auto-generated Help, Version, and Usage independent of parent +* **Support for building CLIs from YAML** - This keeps your Rust source nice and tidy and makes supporting localized translation very simple! +* **Requirement Rules**: Arguments can define the following types of requirement rules + - Can be required by default + - Can be required only if certain arguments are present + - Can require other arguments to be present + - Can be required only if certain values of other arguments are used +* **Confliction Rules**: Arguments can optionally define the following types of exclusion rules + - Can be disallowed when certain arguments are present + - Can disallow use of other arguments when present +* **Groups**: Arguments can be made part of a group + - Fully compatible with other relational rules (requirements, conflicts, and overrides) which allows things like requiring the use of any arg in a group, or denying the use of an entire group conditionally +* **Specific Value Sets**: Positional or Option Arguments can define a specific set of allowed values (i.e. imagine a `--mode` option which may *only* have one of two values `fast` or `slow` such as `--mode fast` or `--mode slow`) +* **Default Values** + - Also supports conditional default values (i.e. a default which only applies if specific arguments are used, or specific values of those arguments) +* **Automatic Version from Cargo.toml**: `clap` is fully compatible with Rust's `env!()` macro for automatically setting the version of your application to the version in your Cargo.toml. See [09_auto_version example](examples/09_auto_version.rs) for how to do this (Thanks to [jhelwig](https://github.com/jhelwig) for pointing this out) +* **Typed Values**: You can use several convenience macros provided by `clap` to get typed values (i.e. `i32`, `u8`, etc.) from positional or option arguments so long as the type you request implements `std::str::FromStr` See the [12_typed_values example](examples/12_typed_values.rs). You can also use `clap`s `arg_enum!` macro to create an enum with variants that automatically implement `std::str::FromStr`. See [13a_enum_values_automatic example](examples/13a_enum_values_automatic.rs) for details +* **Suggestions**: Suggests corrections when the user enters a typo. For example, if you defined a `--myoption` argument, and the user mistakenly typed `--moyption` (notice `y` and `o` transposed), they would receive a `Did you mean '--myoption'?` error and exit gracefully. This also works for subcommands and flags. (Thanks to [Byron](https://github.com/Byron) for the implementation) (This feature can optionally be disabled, see 'Optional Dependencies / Features') +* **Colorized Errors (Non Windows OS only)**: Error message are printed in in colored text (this feature can optionally be disabled, see 'Optional Dependencies / Features'). +* **Global Arguments**: Arguments can optionally be defined once, and be available to all child subcommands. There values will also be propagated up/down throughout all subcommands. +* **Custom Validations**: You can define a function to use as a validator of argument values. Imagine defining a function to validate IP addresses, or fail parsing upon error. This means your application logic can be solely focused on *using* values. +* **POSIX Compatible Conflicts/Overrides** - In POSIX args can be conflicting, but not fail parsing because whichever arg comes *last* "wins" so to speak. This allows things such as aliases (i.e. `alias ls='ls -l'` but then using `ls -C` in your terminal which ends up passing `ls -l -C` as the final arguments. Since `-l` and `-C` aren't compatible, this effectively runs `ls -C` in `clap` if you choose...`clap` also supports hard conflicts that fail parsing). (Thanks to [Vinatorul](https://github.com/Vinatorul)!) +* Supports the Unix `--` meaning, only positional arguments follow + +## Quick Example + +The following examples show a quick example of some of the very basic functionality of `clap`. For more advanced usage, such as requirements, conflicts, groups, multiple values and occurrences see the [documentation](https://docs.rs/clap/), [examples/](examples) directory of this repository or the [video tutorials](https://www.youtube.com/playlist?list=PLza5oFLQGTl2Z5T8g1pRkIynR3E0_pc7U). + + **NOTE:** All of these examples are functionally the same, but show different styles in which to use `clap`. These different styles are purely a matter of personal preference. + +The first example shows a method using the 'Builder Pattern' which allows more advanced configuration options (not shown in this small example), or even dynamically generating arguments when desired. + +```rust +// (Full example with detailed comments in examples/01b_quick_example.rs) +// +// This example demonstrates clap's full 'builder pattern' style of creating arguments which is +// more verbose, but allows easier editing, and at times more advanced options, or the possibility +// to generate arguments dynamically. +extern crate clap; +use clap::{Arg, App, SubCommand}; + +fn main() { + let matches = App::new("My Super Program") + .version("1.0") + .author("Kevin K. ") + .about("Does awesome things") + .arg(Arg::with_name("config") + .short("c") + .long("config") + .value_name("FILE") + .help("Sets a custom config file") + .takes_value(true)) + .arg(Arg::with_name("INPUT") + .help("Sets the input file to use") + .required(true) + .index(1)) + .arg(Arg::with_name("v") + .short("v") + .multiple(true) + .help("Sets the level of verbosity")) + .subcommand(SubCommand::with_name("test") + .about("controls testing features") + .version("1.3") + .author("Someone E. ") + .arg(Arg::with_name("debug") + .short("d") + .help("print debug information verbosely"))) + .get_matches(); + + // Gets a value for config if supplied by user, or defaults to "default.conf" + let config = matches.value_of("config").unwrap_or("default.conf"); + println!("Value for config: {}", config); + + // Calling .unwrap() is safe here because "INPUT" is required (if "INPUT" wasn't + // required we could have used an 'if let' to conditionally get the value) + println!("Using input file: {}", matches.value_of("INPUT").unwrap()); + + // Vary the output based on how many times the user used the "verbose" flag + // (i.e. 'myprog -v -v -v' or 'myprog -vvv' vs 'myprog -v' + match matches.occurrences_of("v") { + 0 => println!("No verbose info"), + 1 => println!("Some verbose info"), + 2 => println!("Tons of verbose info"), + 3 | _ => println!("Don't be crazy"), + } + + // You can handle information about subcommands by requesting their matches by name + // (as below), requesting just the name used, or both at the same time + if let Some(matches) = matches.subcommand_matches("test") { + if matches.is_present("debug") { + println!("Printing debug info..."); + } else { + println!("Printing normally..."); + } + } + + // more program logic goes here... +} +``` + +One could also optionally decleare their CLI in YAML format and keep your Rust source tidy +or support multiple localized translations by having different YAML files for each localization. + +First, create the `cli.yml` file to hold your CLI options, but it could be called anything we like: + +```yaml +name: myapp +version: "1.0" +author: Kevin K. +about: Does awesome things +args: + - config: + short: c + long: config + value_name: FILE + help: Sets a custom config file + takes_value: true + - INPUT: + help: Sets the input file to use + required: true + index: 1 + - verbose: + short: v + multiple: true + help: Sets the level of verbosity +subcommands: + - test: + about: controls testing features + version: "1.3" + author: Someone E. + args: + - debug: + short: d + help: print debug information +``` + +Since this feature requires additional dependencies that not everyone may want, it is *not* compiled in by default and we need to enable a feature flag in Cargo.toml: + +Simply change your `clap = "2.32"` to `clap = {version = "2.32", features = ["yaml"]}`. + +Finally we create our `main.rs` file just like we would have with the previous two examples: + +```rust +// (Full example with detailed comments in examples/17_yaml.rs) +// +// This example demonstrates clap's building from YAML style of creating arguments which is far +// more clean, but takes a very small performance hit compared to the other two methods. +#[macro_use] +extern crate clap; +use clap::App; + +fn main() { + // The YAML file is found relative to the current file, similar to how modules are found + let yaml = load_yaml!("cli.yml"); + let matches = App::from_yaml(yaml).get_matches(); + + // Same as previous examples... +} +``` + +If you were to compile any of the above programs and run them with the flag `--help` or `-h` (or `help` subcommand, since we defined `test` as a subcommand) the following would be output + +```sh +$ myprog --help +My Super Program 1.0 +Kevin K. +Does awesome things + +USAGE: + MyApp [FLAGS] [OPTIONS] [SUBCOMMAND] + +FLAGS: + -h, --help Prints help information + -v Sets the level of verbosity + -V, --version Prints version information + +OPTIONS: + -c, --config Sets a custom config file + +ARGS: + INPUT The input file to use + +SUBCOMMANDS: + help Prints this message or the help of the given subcommand(s) + test Controls testing features +``` + +**NOTE:** You could also run `myapp test --help` or `myapp help test` to see the help message for the `test` subcommand. + +There are also two other methods to create CLIs. Which style you choose is largely a matter of personal preference. The two other methods are: + +* Using [usage strings (examples/01a_quick_example.rs)](examples/01a_quick_example.rs) similar to (but not exact) docopt style usage statements. This is far less verbose than the above methods, but incurs a slight runtime penalty. +* Using [a macro (examples/01c_quick_example.rs)](examples/01c_quick_example.rs) which is like a hybrid of the builder and usage string style. It's less verbose, but doesn't incur the runtime penalty of the usage string style. The downside is that it's harder to debug, and more opaque. + +Examples of each method can be found in the [examples/](examples) directory of this repository. + +## Try it! + +### Pre-Built Test + +To try out the pre-built examples, use the following steps: + +* Clone the repository `$ git clone https://github.com/kbknapp/clap-rs && cd clap-rs/` +* Compile the example `$ cargo build --example ` +* Run the help info `$ ./target/debug/examples/ --help` +* Play with the arguments! +* You can also do a onetime run via `$ cargo run --example -- [args to example] + +### BYOB (Build Your Own Binary) + +To test out `clap`'s default auto-generated help/version follow these steps: +* Create a new cargo project `$ cargo new fake --bin && cd fake` +* Add `clap` to your `Cargo.toml` +* +```toml +[dependencies] +clap = "2" +``` + +* Add the following to your `src/main.rs` + +```rust +extern crate clap; +use clap::App; + +fn main() { + App::new("fake").version("v1.0-beta").get_matches(); +} +``` + +* Build your program `$ cargo build --release` +* Run with help or version `$ ./target/release/fake --help` or `$ ./target/release/fake --version` + +## Usage + +For full usage, add `clap` as a dependency in your `Cargo.toml` () to use from crates.io: + +```toml +[dependencies] +clap = "~2.32" +``` + +(**note**: If you are concerned with supporting a minimum version of Rust that is *older* than the current stable Rust minus 2 stable releases, it's recommended to use the `~major.minor.patch` style versions in your `Cargo.toml` which will only update the patch version automatically. For more information see the [Compatibility Policy](#compatibility-policy)) + +Then add `extern crate clap;` to your crate root. + +Define a list of valid arguments for your program (see the [documentation](https://docs.rs/clap/) or [examples/](examples) directory of this repo) + +Then run `cargo build` or `cargo update && cargo build` for your project. + +### Optional Dependencies / Features + +#### Features enabled by default + +* **"suggestions"**: Turns on the `Did you mean '--myoption'?` feature for when users make typos. (builds dependency `strsim`) +* **"color"**: Turns on colored error messages. This feature only works on non-Windows OSs. (builds dependency `ansi-term` only on non-Windows targets) +* **"vec_map"**: Use [`VecMap`](https://crates.io/crates/vec_map) internally instead of a [`BTreeMap`](https://doc.rust-lang.org/stable/std/collections/struct.BTreeMap.html). This feature provides a _slight_ performance improvement. (builds dependency `vec_map`) + +To disable these, add this to your `Cargo.toml`: + +```toml +[dependencies.clap] +version = "2.32" +default-features = false +``` + +You can also selectively enable only the features you'd like to include, by adding: + +```toml +[dependencies.clap] +version = "2.32" +default-features = false + +# Cherry-pick the features you'd like to use +features = [ "suggestions", "color" ] +``` + +#### Opt-in features + +* **"yaml"**: Enables building CLIs from YAML documents. (builds dependency `yaml-rust`) +* **"unstable"**: Enables unstable `clap` features that may change from release to release +* **"wrap_help"**: Turns on the help text wrapping feature, based on the terminal size. (builds dependency `term-size`) + +### Dependencies Tree + +The following graphic depicts `clap`s dependency graph (generated using [cargo-graph](https://github.com/kbknapp/cargo-graph)). + + * **Dashed** Line: Optional dependency + * **Red** Color: **NOT** included by default (must use cargo `features` to enable) + * **Blue** Color: Dev dependency, only used while developing. + +![clap dependencies](clap_dep_graph.png) + +### More Information + +You can find complete documentation on the [docs.rs](https://docs.rs/clap/) for this project. + +You can also find usage examples in the [examples/](examples) directory of this repo. + +#### Video Tutorials + +There's also the video tutorial series [Argument Parsing with Rust v2](https://www.youtube.com/playlist?list=PLza5oFLQGTl2Z5T8g1pRkIynR3E0_pc7U). + +These videos slowly trickle out as I finish them and currently a work in progress. + +## How to Contribute + +Details on how to contribute can be found in the [CONTRIBUTING.md](.github/CONTRIBUTING.md) file. + +### Compatibility Policy + +Because `clap` takes SemVer and compatibility seriously, this is the official policy regarding breaking changes and minimum required versions of Rust. + +`clap` will pin the minimum required version of Rust to the CI builds. Bumping the minimum version of Rust is considered a minor breaking change, meaning *at a minimum* the minor version of `clap` will be bumped. + +In order to keep from being surprised of breaking changes, it is **highly** recommended to use the `~major.minor.patch` style in your `Cargo.toml` only if you wish to target a version of Rust that is *older* than current stable minus two releases: + +```toml +[dependencies] +clap = "~2.32" +``` + +This will cause *only* the patch version to be updated upon a `cargo update` call, and therefore cannot break due to new features, or bumped minimum versions of Rust. + +#### Warning about '~' Dependencies + +Using `~` can cause issues in certain circumstances. + +From @alexcrichton: + +Right now Cargo's version resolution is pretty naive, it's just a brute-force search of the solution space, returning the first resolvable graph. This also means that it currently won't terminate until it proves there is not possible resolvable graph. This leads to situations where workspaces with multiple binaries, for example, have two different dependencies such as: + +```toml,no_sync + +# In one Cargo.toml +[dependencies] +clap = "~2.32.0" + +# In another Cargo.toml +[dependencies] +clap = "2.31.0" +``` + +This is inherently an unresolvable crate graph in Cargo right now. Cargo requires there's only one major version of a crate, and being in the same workspace these two crates must share a version. This is impossible in this location, though, as these version constraints cannot be met. + +#### Minimum Version of Rust + +`clap` will officially support current stable Rust, minus two releases, but may work with prior releases as well. For example, current stable Rust at the time of this writing is 1.21.0, meaning `clap` is guaranteed to compile with 1.19.0 and beyond. + +At the 1.22.0 stable release, `clap` will be guaranteed to compile with 1.20.0 and beyond, etc. + +Upon bumping the minimum version of Rust (assuming it's within the stable-2 range), it *must* be clearly annotated in the `CHANGELOG.md` + +#### Breaking Changes + +`clap` takes a similar policy to Rust and will bump the major version number upon breaking changes with only the following exceptions: + + * The breaking change is to fix a security concern + * The breaking change is to be fixing a bug (i.e. relying on a bug as a feature) + * The breaking change is a feature isn't used in the wild, or all users of said feature have given approval *prior* to the change + +#### Compatibility with Wasm + +A best effort is made to ensure that `clap` will work on projects targeting `wasm32-unknown-unknown`. However there is no dedicated CI build +covering this specific target. + +## License + +`clap` is licensed under the MIT license. Please read the [LICENSE-MIT](LICENSE-MIT) file in this repository for more information. + +## Related Crates + +There are several excellent crates which can be used with `clap`, I recommend checking them all out! If you've got a crate that would be a good fit to be used with `clap` open an issue and let me know, I'd love to add it! + +* [`structopt`](https://github.com/TeXitoi/structopt) - This crate allows you to define a struct, and build a CLI from it! No more "stringly typed" and it uses `clap` behind the scenes! (*Note*: There is work underway to pull this crate into mainline `clap`). +* [`assert_cli`](https://github.com/killercup/assert_cli) - This crate allows you test your CLIs in a very intuitive and functional way! + +## Recent Breaking Changes + +`clap` follows semantic versioning, so breaking changes should only happen upon major version bumps. The only exception to this rule is breaking changes that happen due to implementation that was deemed to be a bug, security concerns, or it can be reasonably proved to affect no code. For the full details, see [CHANGELOG.md](./CHANGELOG.md). + +As of 2.27.0: + +* Argument values now take precedence over subcommand names. This only arises by using unrestrained multiple values and subcommands together where the subcommand name can coincide with one of the multiple values. Such as `$ prog ... `. The fix is to place restraints on number of values, or disallow the use of `$ prog ` structure. + +As of 2.0.0 (From 1.x) + +* **Fewer lifetimes! Yay!** + * `App<'a, 'b, 'c, 'd, 'e, 'f>` => `App<'a, 'b>` + * `Arg<'a, 'b, 'c, 'd, 'e, 'f>` => `Arg<'a, 'b>` + * `ArgMatches<'a, 'b>` => `ArgMatches<'a>` +* **Simply Renamed** + * `App::arg_group` => `App::group` + * `App::arg_groups` => `App::groups` + * `ArgGroup::add` => `ArgGroup::arg` + * `ArgGroup::add_all` => `ArgGroup::args` + * `ClapError` => `Error` + * struct field `ClapError::error_type` => `Error::kind` + * `ClapResult` => `Result` + * `ClapErrorType` => `ErrorKind` +* **Removed Deprecated Functions and Methods** + * `App::subcommands_negate_reqs` + * `App::subcommand_required` + * `App::arg_required_else_help` + * `App::global_version(bool)` + * `App::versionless_subcommands` + * `App::unified_help_messages` + * `App::wait_on_error` + * `App::subcommand_required_else_help` + * `SubCommand::new` + * `App::error_on_no_subcommand` + * `Arg::new` + * `Arg::mutually_excludes` + * `Arg::mutually_excludes_all` + * `Arg::mutually_overrides_with` + * `simple_enum!` +* **Renamed Error Variants** + * `InvalidUnicode` => `InvalidUtf8` + * `InvalidArgument` => `UnknownArgument` +* **Usage Parser** + * Value names can now be specified inline, i.e. `-o, --option 'some option which takes two files'` + * **There is now a priority of order to determine the name** - This is perhaps the biggest breaking change. See the documentation for full details. Prior to this change, the value name took precedence. **Ensure your args are using the proper names (i.e. typically the long or short and NOT the value name) throughout the code** +* `ArgMatches::values_of` returns an `Values` now which implements `Iterator` (should not break any code) +* `crate_version!` returns `&'static str` instead of `String` + +### Deprecations + +Old method names will be left around for several minor version bumps, or one major version bump. + +As of 2.27.0: + +* **AppSettings::PropagateGlobalValuesDown:** this setting deprecated and is no longer required to propagate values down or up diff --git a/clap/SPONSORS.md b/clap/SPONSORS.md new file mode 100644 index 000000000..d55b9ca5c --- /dev/null +++ b/clap/SPONSORS.md @@ -0,0 +1,10 @@ +The following is a list of [sponsors](https://clap.rs/sponsorship/) for the clap-rs project: + +[Noelia Seva-Gonzalez](https://noeliasg.com/about/) +Noelia Seva-Gonzalez + +[Rob Tsuk](https://github.com/rtsuk) +Rob Tsuk + +[messense](https://github.com/messense) +Messense diff --git a/clap/clap-test.rs b/clap/clap-test.rs new file mode 100644 index 000000000..7d57ac4f3 --- /dev/null +++ b/clap/clap-test.rs @@ -0,0 +1,86 @@ +#[allow(unused_imports, dead_code)] +mod test { + use std::str; + use std::io::{Cursor, Write}; + + use regex::Regex; + + use clap::{App, Arg, SubCommand, ArgGroup}; + + fn compare(l: S, r: S2) -> bool + where S: AsRef, + S2: AsRef + { + let re = Regex::new("\x1b[^m]*m").unwrap(); + // Strip out any mismatching \r character on windows that might sneak in on either side + let ls = l.as_ref().trim().replace("\r", ""); + let rs = r.as_ref().trim().replace("\r", ""); + let left = re.replace_all(&*ls, ""); + let right = re.replace_all(&*rs, ""); + let b = left == right; + if !b { + println!(); + println!("--> left"); + println!("{}", left); + println!("--> right"); + println!("{}", right); + println!("--") + } + b + } + + pub fn compare_output(l: App, args: &str, right: &str, stderr: bool) -> bool { + let mut buf = Cursor::new(Vec::with_capacity(50)); + let res = l.get_matches_from_safe(args.split(' ').collect::>()); + let err = res.unwrap_err(); + err.write_to(&mut buf).unwrap(); + let content = buf.into_inner(); + let left = String::from_utf8(content).unwrap(); + assert_eq!(stderr, err.use_stderr()); + compare(left, right) + } + pub fn compare_output2(l: App, args: &str, right1: &str, right2: &str, stderr: bool) -> bool { + let mut buf = Cursor::new(Vec::with_capacity(50)); + let res = l.get_matches_from_safe(args.split(' ').collect::>()); + let err = res.unwrap_err(); + err.write_to(&mut buf).unwrap(); + let content = buf.into_inner(); + let left = String::from_utf8(content).unwrap(); + assert_eq!(stderr, err.use_stderr()); + compare(&*left, right1) || compare(&*left, right2) + } + + // Legacy tests from the Python script days + + pub fn complex_app() -> App<'static, 'static> { + let args = "-o --option=[opt]... 'tests options' + [positional] 'tests positionals'"; + let opt3_vals = ["fast", "slow"]; + let pos3_vals = ["vi", "emacs"]; + App::new("clap-test") + .version("v1.4.8") + .about("tests clap library") + .author("Kevin K. ") + .args_from_usage(args) + .arg(Arg::from_usage("-f --flag... 'tests flags'") + .global(true)) + .args(&[ + Arg::from_usage("[flag2] -F 'tests flags with exclusions'").conflicts_with("flag").requires("long-option-2"), + Arg::from_usage("--long-option-2 [option2] 'tests long options with exclusions'").conflicts_with("option").requires("positional2"), + Arg::from_usage("[positional2] 'tests positionals with exclusions'"), + Arg::from_usage("-O --Option [option3] 'specific vals'").possible_values(&opt3_vals), + Arg::from_usage("[positional3]... 'tests specific values'").possible_values(&pos3_vals), + Arg::from_usage("--multvals [one] [two] 'Tests multiple values, not mult occs'"), + Arg::from_usage("--multvalsmo... [one] [two] 'Tests multiple values, and mult occs'"), + Arg::from_usage("--minvals2 [minvals]... 'Tests 2 min vals'").min_values(2), + Arg::from_usage("--maxvals3 [maxvals]... 'Tests 3 max vals'").max_values(3) + ]) + .subcommand(SubCommand::with_name("subcmd") + .about("tests subcommands") + .version("0.1") + .author("Kevin K. ") + .arg_from_usage("-o --option [scoption]... 'tests options'") + .arg_from_usage("-s --subcmdarg [subcmdarg] 'tests other args'") + .arg_from_usage("[scpositional] 'tests positionals'")) + } +} diff --git a/clap/debian/patches/no-clippy.patch b/clap/debian/patches/no-clippy.patch new file mode 100644 index 000000000..2c50a12c2 --- /dev/null +++ b/clap/debian/patches/no-clippy.patch @@ -0,0 +1,21 @@ +--- a/Cargo.toml 2018-09-07 20:46:25.639402810 -0700 ++++ b/Cargo.toml 2018-09-07 20:46:37.055559250 -0700 +@@ -71,10 +71,6 @@ + [dependencies.bitflags] + version = "1.0" + +-[dependencies.clippy] +-version = "~0.0.166" +-optional = true +- + [dependencies.strsim] + version = "0.7.0" + optional = true +@@ -110,7 +106,6 @@ + debug = [] + default = ["suggestions", "color", "vec_map"] + doc = ["yaml"] +-lints = ["clippy"] + nightly = [] + no_cargo = [] + suggestions = ["strsim"] diff --git a/clap/debian/patches/no-clippy.patch~ b/clap/debian/patches/no-clippy.patch~ new file mode 100644 index 000000000..2c50a12c2 --- /dev/null +++ b/clap/debian/patches/no-clippy.patch~ @@ -0,0 +1,21 @@ +--- a/Cargo.toml 2018-09-07 20:46:25.639402810 -0700 ++++ b/Cargo.toml 2018-09-07 20:46:37.055559250 -0700 +@@ -71,10 +71,6 @@ + [dependencies.bitflags] + version = "1.0" + +-[dependencies.clippy] +-version = "~0.0.166" +-optional = true +- + [dependencies.strsim] + version = "0.7.0" + optional = true +@@ -110,7 +106,6 @@ + debug = [] + default = ["suggestions", "color", "vec_map"] + doc = ["yaml"] +-lints = ["clippy"] + nightly = [] + no_cargo = [] + suggestions = ["strsim"] diff --git a/clap/debian/patches/series b/clap/debian/patches/series new file mode 100644 index 000000000..bce906376 --- /dev/null +++ b/clap/debian/patches/series @@ -0,0 +1 @@ +no-clippy.patch diff --git a/clap/index.html b/clap/index.html new file mode 100644 index 000000000..600dff932 --- /dev/null +++ b/clap/index.html @@ -0,0 +1 @@ + diff --git a/clap/justfile b/clap/justfile new file mode 100644 index 000000000..e9385c6bf --- /dev/null +++ b/clap/justfile @@ -0,0 +1,39 @@ +@update-contributors: + echo 'Removing old CONTRIBUTORS.md' + mv CONTRIBUTORS.md CONTRIBUTORS.md.bak + echo 'Downloading a list of new contributors' + echo "the following is a list of contributors:" > CONTRIBUTORS.md + echo "" >> CONTRIBUTORS.md + echo "" >> CONTRIBUTORS.md + githubcontrib --owner kbknapp --repo clap-rs --sha master --cols 6 --format md --showlogin true --sortBy contributions --sortOrder desc >> CONTRIBUTORS.md + echo "" >> CONTRIBUTORS.md + echo "" >> CONTRIBUTORS.md + echo "This list was generated by [mgechev/github-contributors-list](https://github.com/mgechev/github-contributors-list)" >> CONTRIBUTORS.md + rm CONTRIBUTORS.md.bak + +run-test TEST: + cargo test --test {{TEST}} + +debug TEST: + cargo test --test {{TEST}} --features debug + +run-tests: + cargo test --features "yaml unstable" + +@bench: nightly + cargo bench && just remove-nightly + +nightly: + rustup override add nightly + +remove-nightly: + rustup override remove + +@lint: nightly + cargo build --features lints && just remove-nightly + +clean: + cargo clean + find . -type f -name "*.orig" -exec rm {} \; + find . -type f -name "*.bk" -exec rm {} \; + find . -type f -name ".*~" -exec rm {} \; diff --git a/clap/rustfmt.toml b/clap/rustfmt.toml new file mode 100644 index 000000000..0136d86e3 --- /dev/null +++ b/clap/rustfmt.toml @@ -0,0 +1,4 @@ +format_strings = false +chain_overflow_last = false +same_line_if_else = true +fn_single_line = true diff --git a/clap/src/app/help.rs b/clap/src/app/help.rs new file mode 100644 index 000000000..34f97ac32 --- /dev/null +++ b/clap/src/app/help.rs @@ -0,0 +1,1028 @@ +// Std +use std::borrow::Cow; +use std::cmp; +use std::collections::BTreeMap; +use std::fmt::Display; +use std::io::{self, Cursor, Read, Write}; +use std::usize; + +// Internal +use app::parser::Parser; +use app::usage; +use app::{App, AppSettings}; +use args::{AnyArg, ArgSettings, DispOrder}; +use errors::{Error, Result as ClapResult}; +use fmt::{Colorizer, ColorizerOption, Format}; +use map::VecMap; +use INTERNAL_ERROR_MSG; + +// Third Party +#[cfg(feature = "wrap_help")] +use term_size; +use textwrap; +use unicode_width::UnicodeWidthStr; + +#[cfg(not(feature = "wrap_help"))] +mod term_size { + pub fn dimensions() -> Option<(usize, usize)> { + None + } +} + +fn str_width(s: &str) -> usize { + UnicodeWidthStr::width(s) +} + +const TAB: &'static str = " "; + +// These are just convenient traits to make the code easier to read. +trait ArgWithDisplay<'b, 'c>: AnyArg<'b, 'c> + Display {} +impl<'b, 'c, T> ArgWithDisplay<'b, 'c> for T +where + T: AnyArg<'b, 'c> + Display, +{ +} + +trait ArgWithOrder<'b, 'c>: ArgWithDisplay<'b, 'c> + DispOrder { + fn as_base(&self) -> &ArgWithDisplay<'b, 'c>; +} +impl<'b, 'c, T> ArgWithOrder<'b, 'c> for T +where + T: ArgWithDisplay<'b, 'c> + DispOrder, +{ + fn as_base(&self) -> &ArgWithDisplay<'b, 'c> { + self + } +} + +fn as_arg_trait<'a, 'b, T: ArgWithOrder<'a, 'b>>(x: &T) -> &ArgWithOrder<'a, 'b> { + x +} + +impl<'b, 'c> DispOrder for App<'b, 'c> { + fn disp_ord(&self) -> usize { + 999 + } +} + +macro_rules! color { + ($_self:ident, $s:expr, $c:ident) => { + if $_self.color { + write!($_self.writer, "{}", $_self.cizer.$c($s)) + } else { + write!($_self.writer, "{}", $s) + } + }; + ($_self:ident, $fmt_s:expr, $v:expr, $c:ident) => { + if $_self.color { + write!($_self.writer, "{}", $_self.cizer.$c(format!($fmt_s, $v))) + } else { + write!($_self.writer, $fmt_s, $v) + } + }; +} + +/// `clap` Help Writer. +/// +/// Wraps a writer stream providing different methods to generate help for `clap` objects. +pub struct Help<'a> { + writer: &'a mut Write, + next_line_help: bool, + hide_pv: bool, + term_w: usize, + color: bool, + cizer: Colorizer, + longest: usize, + force_next_line: bool, + use_long: bool, +} + +// Public Functions +impl<'a> Help<'a> { + /// Create a new `Help` instance. + #[cfg_attr(feature = "cargo-clippy", allow(too_many_arguments))] + pub fn new( + w: &'a mut Write, + next_line_help: bool, + hide_pv: bool, + color: bool, + cizer: Colorizer, + term_w: Option, + max_w: Option, + use_long: bool, + ) -> Self { + debugln!("Help::new;"); + Help { + writer: w, + next_line_help: next_line_help, + hide_pv: hide_pv, + term_w: match term_w { + Some(width) => if width == 0 { + usize::MAX + } else { + width + }, + None => cmp::min( + term_size::dimensions().map_or(120, |(w, _)| w), + match max_w { + None | Some(0) => usize::MAX, + Some(mw) => mw, + }, + ), + }, + color: color, + cizer: cizer, + longest: 0, + force_next_line: false, + use_long: use_long, + } + } + + /// Reads help settings from an App + /// and write its help to the wrapped stream. + pub fn write_app_help(w: &'a mut Write, app: &App, use_long: bool) -> ClapResult<()> { + debugln!("Help::write_app_help;"); + Self::write_parser_help(w, &app.p, use_long) + } + + /// Reads help settings from a Parser + /// and write its help to the wrapped stream. + pub fn write_parser_help(w: &'a mut Write, parser: &Parser, use_long: bool) -> ClapResult<()> { + debugln!("Help::write_parser_help;"); + Self::_write_parser_help(w, parser, false, use_long) + } + + /// Reads help settings from a Parser + /// and write its help to the wrapped stream which will be stderr. This method prevents + /// formatting when required. + pub fn write_parser_help_to_stderr(w: &'a mut Write, parser: &Parser) -> ClapResult<()> { + debugln!("Help::write_parser_help;"); + Self::_write_parser_help(w, parser, true, false) + } + + #[doc(hidden)] + pub fn _write_parser_help( + w: &'a mut Write, + parser: &Parser, + stderr: bool, + use_long: bool, + ) -> ClapResult<()> { + debugln!("Help::write_parser_help;"); + let nlh = parser.is_set(AppSettings::NextLineHelp); + let hide_v = parser.is_set(AppSettings::HidePossibleValuesInHelp); + let color = parser.is_set(AppSettings::ColoredHelp); + let cizer = Colorizer::new(ColorizerOption { + use_stderr: stderr, + when: parser.color(), + }); + Self::new( + w, + nlh, + hide_v, + color, + cizer, + parser.meta.term_w, + parser.meta.max_w, + use_long, + ).write_help(parser) + } + + /// Writes the parser help to the wrapped stream. + pub fn write_help(&mut self, parser: &Parser) -> ClapResult<()> { + debugln!("Help::write_help;"); + if let Some(h) = parser.meta.help_str { + write!(self.writer, "{}", h).map_err(Error::from)?; + } else if let Some(tmpl) = parser.meta.template { + self.write_templated_help(parser, tmpl)?; + } else { + self.write_default_help(parser)?; + } + Ok(()) + } +} + +// Methods to write AnyArg help. +impl<'a> Help<'a> { + /// Writes help for each argument in the order they were declared to the wrapped stream. + fn write_args_unsorted<'b: 'd, 'c: 'd, 'd, I: 'd>(&mut self, args: I) -> io::Result<()> + where + I: Iterator>, + { + debugln!("Help::write_args_unsorted;"); + // The shortest an arg can legally be is 2 (i.e. '-x') + self.longest = 2; + let mut arg_v = Vec::with_capacity(10); + let use_long = self.use_long; + for arg in args.filter(|arg| should_show_arg(use_long, *arg)) { + if arg.longest_filter() { + self.longest = cmp::max(self.longest, str_width(arg.to_string().as_str())); + } + arg_v.push(arg) + } + let mut first = true; + for arg in arg_v { + if first { + first = false; + } else { + self.writer.write_all(b"\n")?; + } + self.write_arg(arg.as_base())?; + } + Ok(()) + } + + /// Sorts arguments by length and display order and write their help to the wrapped stream. + fn write_args<'b: 'd, 'c: 'd, 'd, I: 'd>(&mut self, args: I) -> io::Result<()> + where + I: Iterator>, + { + debugln!("Help::write_args;"); + // The shortest an arg can legally be is 2 (i.e. '-x') + self.longest = 2; + let mut ord_m = VecMap::new(); + let use_long = self.use_long; + // Determine the longest + for arg in args.filter(|arg| { + // If it's NextLineHelp, but we don't care to compute how long because it may be + // NextLineHelp on purpose *because* it's so long and would throw off all other + // args alignment + should_show_arg(use_long, *arg) + }) { + if arg.longest_filter() { + debugln!("Help::write_args: Current Longest...{}", self.longest); + self.longest = cmp::max(self.longest, str_width(arg.to_string().as_str())); + debugln!("Help::write_args: New Longest...{}", self.longest); + } + let btm = ord_m.entry(arg.disp_ord()).or_insert(BTreeMap::new()); + btm.insert(arg.name(), arg); + } + let mut first = true; + for btm in ord_m.values() { + for arg in btm.values() { + if first { + first = false; + } else { + self.writer.write_all(b"\n")?; + } + self.write_arg(arg.as_base())?; + } + } + Ok(()) + } + + /// Writes help for an argument to the wrapped stream. + fn write_arg<'b, 'c>(&mut self, arg: &ArgWithDisplay<'b, 'c>) -> io::Result<()> { + debugln!("Help::write_arg;"); + self.short(arg)?; + self.long(arg)?; + let spec_vals = self.val(arg)?; + self.help(arg, &*spec_vals)?; + Ok(()) + } + + /// Writes argument's short command to the wrapped stream. + fn short<'b, 'c>(&mut self, arg: &ArgWithDisplay<'b, 'c>) -> io::Result<()> { + debugln!("Help::short;"); + write!(self.writer, "{}", TAB)?; + if let Some(s) = arg.short() { + color!(self, "-{}", s, good) + } else if arg.has_switch() { + write!(self.writer, "{}", TAB) + } else { + Ok(()) + } + } + + /// Writes argument's long command to the wrapped stream. + fn long<'b, 'c>(&mut self, arg: &ArgWithDisplay<'b, 'c>) -> io::Result<()> { + debugln!("Help::long;"); + if !arg.has_switch() { + return Ok(()); + } + if arg.takes_value() { + if let Some(l) = arg.long() { + if arg.short().is_some() { + write!(self.writer, ", ")?; + } + color!(self, "--{}", l, good)? + } + + let sep = if arg.is_set(ArgSettings::RequireEquals) { + "=" + } else { + " " + }; + write!(self.writer, "{}", sep)?; + } else if let Some(l) = arg.long() { + if arg.short().is_some() { + write!(self.writer, ", ")?; + } + color!(self, "--{}", l, good)?; + } + Ok(()) + } + + /// Writes argument's possible values to the wrapped stream. + fn val<'b, 'c>(&mut self, arg: &ArgWithDisplay<'b, 'c>) -> Result { + debugln!("Help::val: arg={}", arg); + if arg.takes_value() { + let delim = if arg.is_set(ArgSettings::RequireDelimiter) { + arg.val_delim().expect(INTERNAL_ERROR_MSG) + } else { + ' ' + }; + if let Some(vec) = arg.val_names() { + let mut it = vec.iter().peekable(); + while let Some((_, val)) = it.next() { + color!(self, "<{}>", val, good)?; + if it.peek().is_some() { + write!(self.writer, "{}", delim)?; + } + } + let num = vec.len(); + if arg.is_set(ArgSettings::Multiple) && num == 1 { + color!(self, "...", good)?; + } + } else if let Some(num) = arg.num_vals() { + let mut it = (0..num).peekable(); + while let Some(_) = it.next() { + color!(self, "<{}>", arg.name(), good)?; + if it.peek().is_some() { + write!(self.writer, "{}", delim)?; + } + } + if arg.is_set(ArgSettings::Multiple) && num == 1 { + color!(self, "...", good)?; + } + } else if arg.has_switch() { + color!(self, "<{}>", arg.name(), good)?; + if arg.is_set(ArgSettings::Multiple) { + color!(self, "...", good)?; + } + } else { + color!(self, "{}", arg, good)?; + } + } + + let spec_vals = self.spec_vals(arg); + let h = arg.help().unwrap_or(""); + let h_w = str_width(h) + str_width(&*spec_vals); + let nlh = self.next_line_help || arg.is_set(ArgSettings::NextLineHelp); + let taken = self.longest + 12; + self.force_next_line = !nlh && self.term_w >= taken + && (taken as f32 / self.term_w as f32) > 0.40 + && h_w > (self.term_w - taken); + + debug!("Help::val: Has switch..."); + if arg.has_switch() { + sdebugln!("Yes"); + debugln!("Help::val: force_next_line...{:?}", self.force_next_line); + debugln!("Help::val: nlh...{:?}", nlh); + debugln!("Help::val: taken...{}", taken); + debugln!( + "Help::val: help_width > (width - taken)...{} > ({} - {})", + h_w, + self.term_w, + taken + ); + debugln!("Help::val: longest...{}", self.longest); + debug!("Help::val: next_line..."); + if !(nlh || self.force_next_line) { + sdebugln!("No"); + let self_len = str_width(arg.to_string().as_str()); + // subtract ourself + let mut spcs = self.longest - self_len; + // Since we're writing spaces from the tab point we first need to know if we + // had a long and short, or just short + if arg.long().is_some() { + // Only account 4 after the val + spcs += 4; + } else { + // Only account for ', --' + 4 after the val + spcs += 8; + } + + write_nspaces!(self.writer, spcs); + } else { + sdebugln!("Yes"); + } + } else if !(nlh || self.force_next_line) { + sdebugln!("No, and not next_line"); + write_nspaces!( + self.writer, + self.longest + 4 - (str_width(arg.to_string().as_str())) + ); + } else { + sdebugln!("No"); + } + Ok(spec_vals) + } + + fn write_before_after_help(&mut self, h: &str) -> io::Result<()> { + debugln!("Help::write_before_after_help;"); + let mut help = String::from(h); + // determine if our help fits or needs to wrap + debugln!( + "Help::write_before_after_help: Term width...{}", + self.term_w + ); + let too_long = str_width(h) >= self.term_w; + + debug!("Help::write_before_after_help: Too long..."); + if too_long || h.contains("{n}") { + sdebugln!("Yes"); + debugln!("Help::write_before_after_help: help: {}", help); + debugln!( + "Help::write_before_after_help: help width: {}", + str_width(&*help) + ); + // Determine how many newlines we need to insert + debugln!( + "Help::write_before_after_help: Usable space: {}", + self.term_w + ); + help = wrap_help(&help.replace("{n}", "\n"), self.term_w); + } else { + sdebugln!("No"); + } + write!(self.writer, "{}", help)?; + Ok(()) + } + + /// Writes argument's help to the wrapped stream. + fn help<'b, 'c>(&mut self, arg: &ArgWithDisplay<'b, 'c>, spec_vals: &str) -> io::Result<()> { + debugln!("Help::help;"); + let h = if self.use_long && arg.name() != "" { + arg.long_help().unwrap_or_else(|| arg.help().unwrap_or("")) + } else { + arg.help().unwrap_or_else(|| arg.long_help().unwrap_or("")) + }; + let mut help = String::from(h) + spec_vals; + let nlh = self.next_line_help || arg.is_set(ArgSettings::NextLineHelp) || (self.use_long && arg.name() != ""); + debugln!("Help::help: Next Line...{:?}", nlh); + + let spcs = if nlh || self.force_next_line { + 12 // "tab" * 3 + } else { + self.longest + 12 + }; + + let too_long = spcs + str_width(h) + str_width(&*spec_vals) >= self.term_w; + + // Is help on next line, if so then indent + if nlh || self.force_next_line { + write!(self.writer, "\n{}{}{}", TAB, TAB, TAB)?; + } + + debug!("Help::help: Too long..."); + if too_long && spcs <= self.term_w || h.contains("{n}") { + sdebugln!("Yes"); + debugln!("Help::help: help...{}", help); + debugln!("Help::help: help width...{}", str_width(&*help)); + // Determine how many newlines we need to insert + let avail_chars = self.term_w - spcs; + debugln!("Help::help: Usable space...{}", avail_chars); + help = wrap_help(&help.replace("{n}", "\n"), avail_chars); + } else { + sdebugln!("No"); + } + if let Some(part) = help.lines().next() { + write!(self.writer, "{}", part)?; + } + for part in help.lines().skip(1) { + write!(self.writer, "\n")?; + if nlh || self.force_next_line { + write!(self.writer, "{}{}{}", TAB, TAB, TAB)?; + } else if arg.has_switch() { + write_nspaces!(self.writer, self.longest + 12); + } else { + write_nspaces!(self.writer, self.longest + 8); + } + write!(self.writer, "{}", part)?; + } + if !help.contains('\n') && (nlh || self.force_next_line) { + write!(self.writer, "\n")?; + } + Ok(()) + } + + fn spec_vals(&self, a: &ArgWithDisplay) -> String { + debugln!("Help::spec_vals: a={}", a); + let mut spec_vals = vec![]; + if let Some(ref env) = a.env() { + debugln!( + "Help::spec_vals: Found environment variable...[{:?}:{:?}]", + env.0, + env.1 + ); + let env_val = if !a.is_set(ArgSettings::HideEnvValues) { + format!( + "={}", + env.1.map_or(Cow::Borrowed(""), |val| val.to_string_lossy()) + ) + } else { + String::new() + }; + let env_info = format!(" [env: {}{}]", env.0.to_string_lossy(), env_val); + spec_vals.push(env_info); + } + if !a.is_set(ArgSettings::HideDefaultValue) { + if let Some(pv) = a.default_val() { + debugln!("Help::spec_vals: Found default value...[{:?}]", pv); + spec_vals.push(format!( + " [default: {}]", + if self.color { + self.cizer.good(pv.to_string_lossy()) + } else { + Format::None(pv.to_string_lossy()) + } + )); + } + } + if let Some(ref aliases) = a.aliases() { + debugln!("Help::spec_vals: Found aliases...{:?}", aliases); + spec_vals.push(format!( + " [aliases: {}]", + if self.color { + aliases + .iter() + .map(|v| format!("{}", self.cizer.good(v))) + .collect::>() + .join(", ") + } else { + aliases.join(", ") + } + )); + } + if !self.hide_pv && !a.is_set(ArgSettings::HidePossibleValues) { + if let Some(pv) = a.possible_vals() { + debugln!("Help::spec_vals: Found possible vals...{:?}", pv); + spec_vals.push(if self.color { + format!( + " [possible values: {}]", + pv.iter() + .map(|v| format!("{}", self.cizer.good(v))) + .collect::>() + .join(", ") + ) + } else { + format!(" [possible values: {}]", pv.join(", ")) + }); + } + } + spec_vals.join(" ") + } +} + +fn should_show_arg(use_long: bool, arg: &ArgWithOrder) -> bool { + if arg.is_set(ArgSettings::Hidden) { + return false; + } + + (!arg.is_set(ArgSettings::HiddenLongHelp) && use_long) + || (!arg.is_set(ArgSettings::HiddenShortHelp) && !use_long) + || arg.is_set(ArgSettings::NextLineHelp) +} + +// Methods to write Parser help. +impl<'a> Help<'a> { + /// Writes help for all arguments (options, flags, args, subcommands) + /// including titles of a Parser Object to the wrapped stream. + #[cfg_attr(feature = "lints", allow(useless_let_if_seq))] + #[cfg_attr(feature = "cargo-clippy", allow(useless_let_if_seq))] + pub fn write_all_args(&mut self, parser: &Parser) -> ClapResult<()> { + debugln!("Help::write_all_args;"); + let flags = parser.has_flags(); + let pos = parser + .positionals() + .filter(|arg| !arg.is_set(ArgSettings::Hidden)) + .count() > 0; + let opts = parser.has_opts(); + let subcmds = parser.has_visible_subcommands(); + + let unified_help = parser.is_set(AppSettings::UnifiedHelpMessage); + + let mut first = true; + + if unified_help && (flags || opts) { + let opts_flags = parser + .flags() + .map(as_arg_trait) + .chain(parser.opts().map(as_arg_trait)); + color!(self, "OPTIONS:\n", warning)?; + self.write_args(opts_flags)?; + first = false; + } else { + if flags { + color!(self, "FLAGS:\n", warning)?; + self.write_args(parser.flags().map(as_arg_trait))?; + first = false; + } + if opts { + if !first { + self.writer.write_all(b"\n\n")?; + } + color!(self, "OPTIONS:\n", warning)?; + self.write_args(parser.opts().map(as_arg_trait))?; + first = false; + } + } + + if pos { + if !first { + self.writer.write_all(b"\n\n")?; + } + color!(self, "ARGS:\n", warning)?; + self.write_args_unsorted(parser.positionals().map(as_arg_trait))?; + first = false; + } + + if subcmds { + if !first { + self.writer.write_all(b"\n\n")?; + } + color!(self, "SUBCOMMANDS:\n", warning)?; + self.write_subcommands(parser)?; + } + + Ok(()) + } + + /// Writes help for subcommands of a Parser Object to the wrapped stream. + fn write_subcommands(&mut self, parser: &Parser) -> io::Result<()> { + debugln!("Help::write_subcommands;"); + // The shortest an arg can legally be is 2 (i.e. '-x') + self.longest = 2; + let mut ord_m = VecMap::new(); + for sc in parser + .subcommands + .iter() + .filter(|s| !s.p.is_set(AppSettings::Hidden)) + { + let btm = ord_m.entry(sc.p.meta.disp_ord).or_insert(BTreeMap::new()); + self.longest = cmp::max(self.longest, str_width(sc.p.meta.name.as_str())); + //self.longest = cmp::max(self.longest, sc.p.meta.name.len()); + btm.insert(sc.p.meta.name.clone(), sc.clone()); + } + + let mut first = true; + for btm in ord_m.values() { + for sc in btm.values() { + if first { + first = false; + } else { + self.writer.write_all(b"\n")?; + } + self.write_arg(sc)?; + } + } + Ok(()) + } + + /// Writes version of a Parser Object to the wrapped stream. + fn write_version(&mut self, parser: &Parser) -> io::Result<()> { + debugln!("Help::write_version;"); + write!(self.writer, "{}", parser.meta.version.unwrap_or(""))?; + Ok(()) + } + + /// Writes binary name of a Parser Object to the wrapped stream. + fn write_bin_name(&mut self, parser: &Parser) -> io::Result<()> { + debugln!("Help::write_bin_name;"); + macro_rules! write_name { + () => {{ + let mut name = parser.meta.name.clone(); + name = name.replace("{n}", "\n"); + color!(self, wrap_help(&name, self.term_w), good)?; + }}; + } + if let Some(bn) = parser.meta.bin_name.as_ref() { + if bn.contains(' ') { + // Incase we're dealing with subcommands i.e. git mv is translated to git-mv + color!(self, bn.replace(" ", "-"), good)? + } else { + write_name!(); + } + } else { + write_name!(); + } + Ok(()) + } + + /// Writes default help for a Parser Object to the wrapped stream. + pub fn write_default_help(&mut self, parser: &Parser) -> ClapResult<()> { + debugln!("Help::write_default_help;"); + if let Some(h) = parser.meta.pre_help { + self.write_before_after_help(h)?; + self.writer.write_all(b"\n\n")?; + } + + macro_rules! write_thing { + ($thing:expr) => {{ + let mut owned_thing = $thing.to_owned(); + owned_thing = owned_thing.replace("{n}", "\n"); + write!(self.writer, "{}\n", wrap_help(&owned_thing, self.term_w))? + }}; + } + // Print the version + self.write_bin_name(parser)?; + self.writer.write_all(b" ")?; + self.write_version(parser)?; + self.writer.write_all(b"\n")?; + if let Some(author) = parser.meta.author { + write_thing!(author) + } + // if self.use_long { + // if let Some(about) = parser.meta.long_about { + // debugln!("Help::write_default_help: writing long about"); + // write_thing!(about) + // } else if let Some(about) = parser.meta.about { + // debugln!("Help::write_default_help: writing about"); + // write_thing!(about) + // } + // } else + if let Some(about) = parser.meta.long_about { + debugln!("Help::write_default_help: writing long about"); + write_thing!(about) + } else if let Some(about) = parser.meta.about { + debugln!("Help::write_default_help: writing about"); + write_thing!(about) + } + + color!(self, "\nUSAGE:", warning)?; + write!( + self.writer, + "\n{}{}\n\n", + TAB, + usage::create_usage_no_title(parser, &[]) + )?; + + let flags = parser.has_flags(); + let pos = parser.has_positionals(); + let opts = parser.has_opts(); + let subcmds = parser.has_subcommands(); + + if flags || opts || pos || subcmds { + self.write_all_args(parser)?; + } + + if let Some(h) = parser.meta.more_help { + if flags || opts || pos || subcmds { + self.writer.write_all(b"\n\n")?; + } + self.write_before_after_help(h)?; + } + + self.writer.flush().map_err(Error::from) + } +} + +/// Possible results for a copying function that stops when a given +/// byte was found. +enum CopyUntilResult { + DelimiterFound(usize), + DelimiterNotFound(usize), + ReaderEmpty, + ReadError(io::Error), + WriteError(io::Error), +} + +/// Copies the contents of a reader into a writer until a delimiter byte is found. +/// On success, the total number of bytes that were +/// copied from reader to writer is returned. +fn copy_until(r: &mut R, w: &mut W, delimiter_byte: u8) -> CopyUntilResult { + debugln!("copy_until;"); + + let mut count = 0; + for wb in r.bytes() { + match wb { + Ok(b) => { + if b == delimiter_byte { + return CopyUntilResult::DelimiterFound(count); + } + match w.write(&[b]) { + Ok(c) => count += c, + Err(e) => return CopyUntilResult::WriteError(e), + } + } + Err(e) => return CopyUntilResult::ReadError(e), + } + } + if count > 0 { + CopyUntilResult::DelimiterNotFound(count) + } else { + CopyUntilResult::ReaderEmpty + } +} + +/// Copies the contents of a reader into a writer until a {tag} is found, +/// copying the tag content to a buffer and returning its size. +/// In addition to errors, there are three possible outputs: +/// - `None`: The reader was consumed. +/// - `Some(Ok(0))`: No tag was captured but the reader still contains data. +/// - `Some(Ok(length>0))`: a tag with `length` was captured to the `tag_buffer`. +fn copy_and_capture( + r: &mut R, + w: &mut W, + tag_buffer: &mut Cursor>, +) -> Option> { + use self::CopyUntilResult::*; + debugln!("copy_and_capture;"); + + // Find the opening byte. + match copy_until(r, w, b'{') { + // The end of the reader was reached without finding the opening tag. + // (either with or without having copied data to the writer) + // Return None indicating that we are done. + ReaderEmpty | DelimiterNotFound(_) => None, + + // Something went wrong. + ReadError(e) | WriteError(e) => Some(Err(e)), + + // The opening byte was found. + // (either with or without having copied data to the writer) + DelimiterFound(_) => { + // Lets reset the buffer first and find out how long it is. + tag_buffer.set_position(0); + let buffer_size = tag_buffer.get_ref().len(); + + // Find the closing byte,limiting the reader to the length of the buffer. + let mut rb = r.take(buffer_size as u64); + match copy_until(&mut rb, tag_buffer, b'}') { + // We were already at the end of the reader. + // Return None indicating that we are done. + ReaderEmpty => None, + + // The closing tag was found. + // Return the tag_length. + DelimiterFound(tag_length) => Some(Ok(tag_length)), + + // The end of the reader was found without finding the closing tag. + // Write the opening byte and captured text to the writer. + // Return 0 indicating that nothing was captured but the reader still contains data. + DelimiterNotFound(not_tag_length) => match w.write(b"{") { + Err(e) => Some(Err(e)), + _ => match w.write(&tag_buffer.get_ref()[0..not_tag_length]) { + Err(e) => Some(Err(e)), + _ => Some(Ok(0)), + }, + }, + + ReadError(e) | WriteError(e) => Some(Err(e)), + } + } + } +} + +// Methods to write Parser help using templates. +impl<'a> Help<'a> { + /// Write help to stream for the parser in the format defined by the template. + /// + /// Tags arg given inside curly brackets: + /// Valid tags are: + /// * `{bin}` - Binary name. + /// * `{version}` - Version number. + /// * `{author}` - Author information. + /// * `{usage}` - Automatically generated or given usage string. + /// * `{all-args}` - Help for all arguments (options, flags, positionals arguments, + /// and subcommands) including titles. + /// * `{unified}` - Unified help for options and flags. + /// * `{flags}` - Help for flags. + /// * `{options}` - Help for options. + /// * `{positionals}` - Help for positionals arguments. + /// * `{subcommands}` - Help for subcommands. + /// * `{after-help}` - Info to be displayed after the help message. + /// * `{before-help}` - Info to be displayed before the help message. + /// + /// The template system is, on purpose, very simple. Therefore the tags have to written + /// in the lowercase and without spacing. + fn write_templated_help(&mut self, parser: &Parser, template: &str) -> ClapResult<()> { + debugln!("Help::write_templated_help;"); + let mut tmplr = Cursor::new(&template); + let mut tag_buf = Cursor::new(vec![0u8; 15]); + + // The strategy is to copy the template from the reader to wrapped stream + // until a tag is found. Depending on its value, the appropriate content is copied + // to the wrapped stream. + // The copy from template is then resumed, repeating this sequence until reading + // the complete template. + + loop { + let tag_length = match copy_and_capture(&mut tmplr, &mut self.writer, &mut tag_buf) { + None => return Ok(()), + Some(Err(e)) => return Err(Error::from(e)), + Some(Ok(val)) if val > 0 => val, + _ => continue, + }; + + debugln!("Help::write_template_help:iter: tag_buf={};", unsafe { + String::from_utf8_unchecked( + tag_buf.get_ref()[0..tag_length] + .iter() + .map(|&i| i) + .collect::>(), + ) + }); + match &tag_buf.get_ref()[0..tag_length] { + b"?" => { + self.writer.write_all(b"Could not decode tag name")?; + } + b"bin" => { + self.write_bin_name(parser)?; + } + b"version" => { + write!( + self.writer, + "{}", + parser.meta.version.unwrap_or("unknown version") + )?; + } + b"author" => { + write!( + self.writer, + "{}", + parser.meta.author.unwrap_or("unknown author") + )?; + } + b"about" => { + write!( + self.writer, + "{}", + parser.meta.about.unwrap_or("unknown about") + )?; + } + b"long-about" => { + write!( + self.writer, + "{}", + parser.meta.long_about.unwrap_or("unknown about") + )?; + } + b"usage" => { + write!(self.writer, "{}", usage::create_usage_no_title(parser, &[]))?; + } + b"all-args" => { + self.write_all_args(parser)?; + } + b"unified" => { + let opts_flags = parser + .flags() + .map(as_arg_trait) + .chain(parser.opts().map(as_arg_trait)); + self.write_args(opts_flags)?; + } + b"flags" => { + self.write_args(parser.flags().map(as_arg_trait))?; + } + b"options" => { + self.write_args(parser.opts().map(as_arg_trait))?; + } + b"positionals" => { + self.write_args(parser.positionals().map(as_arg_trait))?; + } + b"subcommands" => { + self.write_subcommands(parser)?; + } + b"after-help" => { + write!( + self.writer, + "{}", + parser.meta.more_help.unwrap_or("unknown after-help") + )?; + } + b"before-help" => { + write!( + self.writer, + "{}", + parser.meta.pre_help.unwrap_or("unknown before-help") + )?; + } + // Unknown tag, write it back. + r => { + self.writer.write_all(b"{")?; + self.writer.write_all(r)?; + self.writer.write_all(b"}")?; + } + } + } + } +} + +fn wrap_help(help: &str, avail_chars: usize) -> String { + let wrapper = textwrap::Wrapper::new(avail_chars).break_words(false); + help.lines() + .map(|line| wrapper.fill(line)) + .collect::>() + .join("\n") +} + +#[cfg(test)] +mod test { + use super::wrap_help; + + #[test] + fn wrap_help_last_word() { + let help = String::from("foo bar baz"); + assert_eq!(wrap_help(&help, 5), "foo\nbar\nbaz"); + } +} diff --git a/clap/src/app/meta.rs b/clap/src/app/meta.rs new file mode 100644 index 000000000..c7f128fe5 --- /dev/null +++ b/clap/src/app/meta.rs @@ -0,0 +1,33 @@ +#[doc(hidden)] +#[allow(missing_debug_implementations)] +#[derive(Default, Clone)] +pub struct AppMeta<'b> { + pub name: String, + pub bin_name: Option, + pub author: Option<&'b str>, + pub version: Option<&'b str>, + pub long_version: Option<&'b str>, + pub about: Option<&'b str>, + pub long_about: Option<&'b str>, + pub more_help: Option<&'b str>, + pub pre_help: Option<&'b str>, + pub aliases: Option>, // (name, visible) + pub usage_str: Option<&'b str>, + pub usage: Option, + pub help_str: Option<&'b str>, + pub disp_ord: usize, + pub term_w: Option, + pub max_w: Option, + pub template: Option<&'b str>, +} + +impl<'b> AppMeta<'b> { + pub fn new() -> Self { Default::default() } + pub fn with_name(s: String) -> Self { + AppMeta { + name: s, + disp_ord: 999, + ..Default::default() + } + } +} diff --git a/clap/src/app/mod.rs b/clap/src/app/mod.rs new file mode 100644 index 000000000..f2d9d52bc --- /dev/null +++ b/clap/src/app/mod.rs @@ -0,0 +1,1842 @@ +mod settings; +pub mod parser; +mod meta; +mod help; +mod validator; +mod usage; + +// Std +use std::env; +use std::ffi::{OsStr, OsString}; +use std::fmt; +use std::io::{self, BufRead, BufWriter, Write}; +use std::path::Path; +use std::process; +use std::rc::Rc; +use std::result::Result as StdResult; + +// Third Party +#[cfg(feature = "yaml")] +use yaml_rust::Yaml; + +// Internal +use app::help::Help; +use app::parser::Parser; +use args::{AnyArg, Arg, ArgGroup, ArgMatcher, ArgMatches, ArgSettings}; +use errors::Result as ClapResult; +pub use self::settings::AppSettings; +use completions::Shell; +use map::{self, VecMap}; + +/// Used to create a representation of a command line program and all possible command line +/// arguments. Application settings are set using the "builder pattern" with the +/// [`App::get_matches`] family of methods being the terminal methods that starts the +/// runtime-parsing process. These methods then return information about the user supplied +/// arguments (or lack there of). +/// +/// **NOTE:** There aren't any mandatory "options" that one must set. The "options" may +/// also appear in any order (so long as one of the [`App::get_matches`] methods is the last method +/// called). +/// +/// # Examples +/// +/// ```no_run +/// # use clap::{App, Arg}; +/// let m = App::new("My Program") +/// .author("Me, me@mail.com") +/// .version("1.0.2") +/// .about("Explains in brief what the program does") +/// .arg( +/// Arg::with_name("in_file").index(1) +/// ) +/// .after_help("Longer explanation to appear after the options when \ +/// displaying the help information from --help or -h") +/// .get_matches(); +/// +/// // Your program logic starts here... +/// ``` +/// [`App::get_matches`]: ./struct.App.html#method.get_matches +#[allow(missing_debug_implementations)] +pub struct App<'a, 'b> +where + 'a: 'b, +{ + #[doc(hidden)] pub p: Parser<'a, 'b>, +} + + +impl<'a, 'b> App<'a, 'b> { + /// Creates a new instance of an application requiring a name. The name may be, but doesn't + /// have to be same as the binary. The name will be displayed to the user when they request to + /// print version or help and usage information. + /// + /// # Examples + /// + /// ```no_run + /// # use clap::{App, Arg}; + /// let prog = App::new("My Program") + /// # ; + /// ``` + pub fn new>(n: S) -> Self { + App { + p: Parser::with_name(n.into()), + } + } + + /// Get the name of the app + pub fn get_name(&self) -> &str { &self.p.meta.name } + + /// Get the name of the binary + pub fn get_bin_name(&self) -> Option<&str> { self.p.meta.bin_name.as_ref().map(|s| s.as_str()) } + + /// Creates a new instance of an application requiring a name, but uses the [`crate_authors!`] + /// and [`crate_version!`] macros to fill in the [`App::author`] and [`App::version`] fields. + /// + /// # Examples + /// + /// ```no_run + /// # use clap::{App, Arg}; + /// let prog = App::with_defaults("My Program") + /// # ; + /// ``` + /// [`crate_authors!`]: ./macro.crate_authors!.html + /// [`crate_version!`]: ./macro.crate_version!.html + /// [`App::author`]: ./struct.App.html#method.author + /// [`App::version`]: ./struct.App.html#method.author + #[deprecated(since="2.14.1", note="Can never work; use explicit App::author() and App::version() calls instead")] + pub fn with_defaults>(n: S) -> Self { + let mut a = App { + p: Parser::with_name(n.into()), + }; + a.p.meta.author = Some("Kevin K. "); + a.p.meta.version = Some("2.19.2"); + a + } + + /// Creates a new instance of [`App`] from a .yml (YAML) file. A full example of supported YAML + /// objects can be found in [`examples/17_yaml.rs`] and [`examples/17_yaml.yml`]. One great use + /// for using YAML is when supporting multiple languages and dialects, as each language could + /// be a distinct YAML file and determined at compiletime via `cargo` "features" in your + /// `Cargo.toml` + /// + /// In order to use this function you must compile `clap` with the `features = ["yaml"]` in + /// your settings for the `[dependencies.clap]` table of your `Cargo.toml` + /// + /// **NOTE:** Due to how the YAML objects are built there is a convenience macro for loading + /// the YAML file at compile time (relative to the current file, like modules work). That YAML + /// object can then be passed to this function. + /// + /// # Panics + /// + /// The YAML file must be properly formatted or this function will [`panic!`]. A good way to + /// ensure this doesn't happen is to run your program with the `--help` switch. If this passes + /// without error, you needn't worry because the YAML is properly formatted. + /// + /// # Examples + /// + /// The following example shows how to load a properly formatted YAML file to build an instance + /// of an [`App`] struct. + /// + /// ```ignore + /// # #[macro_use] + /// # extern crate clap; + /// # use clap::App; + /// # fn main() { + /// let yml = load_yaml!("app.yml"); + /// let app = App::from_yaml(yml); + /// + /// // continued logic goes here, such as `app.get_matches()` etc. + /// # } + /// ``` + /// [`App`]: ./struct.App.html + /// [`examples/17_yaml.rs`]: https://github.com/kbknapp/clap-rs/blob/master/examples/17_yaml.rs + /// [`examples/17_yaml.yml`]: https://github.com/kbknapp/clap-rs/blob/master/examples/17_yaml.yml + /// [`panic!`]: https://doc.rust-lang.org/std/macro.panic!.html + #[cfg(feature = "yaml")] + pub fn from_yaml(yaml: &'a Yaml) -> App<'a, 'a> { App::from(yaml) } + + /// Sets a string of author(s) that will be displayed to the user when they + /// request the help information with `--help` or `-h`. + /// + /// **Pro-tip:** Use `clap`s convenience macro [`crate_authors!`] to automatically set your + /// application's author(s) to the same thing as your crate at compile time. See the [`examples/`] + /// directory for more information + /// + /// See the [`examples/`] + /// directory for more information + /// + /// # Examples + /// + /// ```no_run + /// # use clap::{App, Arg}; + /// App::new("myprog") + /// .author("Me, me@mymain.com") + /// # ; + /// ``` + /// [`crate_authors!`]: ./macro.crate_authors!.html + /// [`examples/`]: https://github.com/kbknapp/clap-rs/tree/master/examples + pub fn author>(mut self, author: S) -> Self { + self.p.meta.author = Some(author.into()); + self + } + + /// Overrides the system-determined binary name. This should only be used when absolutely + /// necessary, such as when the binary name for your application is misleading, or perhaps + /// *not* how the user should invoke your program. + /// + /// **Pro-tip:** When building things such as third party `cargo` subcommands, this setting + /// **should** be used! + /// + /// **NOTE:** This command **should not** be used for [`SubCommand`]s. + /// + /// # Examples + /// + /// ```no_run + /// # use clap::{App, Arg}; + /// App::new("My Program") + /// .bin_name("my_binary") + /// # ; + /// ``` + /// [`SubCommand`]: ./struct.SubCommand.html + pub fn bin_name>(mut self, name: S) -> Self { + self.p.meta.bin_name = Some(name.into()); + self + } + + /// Sets a string describing what the program does. This will be displayed when displaying help + /// information with `-h`. + /// + /// **NOTE:** If only `about` is provided, and not [`App::long_about`] but the user requests + /// `--help` clap will still display the contents of `about` appropriately + /// + /// **NOTE:** Only [`App::about`] is used in completion script generation in order to be + /// concise + /// + /// # Examples + /// + /// ```no_run + /// # use clap::{App, Arg}; + /// App::new("myprog") + /// .about("Does really amazing things to great people") + /// # ; + /// ``` + /// [`App::long_about`]: ./struct.App.html#method.long_about + pub fn about>(mut self, about: S) -> Self { + self.p.meta.about = Some(about.into()); + self + } + + /// Sets a string describing what the program does. This will be displayed when displaying help + /// information. + /// + /// **NOTE:** If only `long_about` is provided, and not [`App::about`] but the user requests + /// `-h` clap will still display the contents of `long_about` appropriately + /// + /// **NOTE:** Only [`App::about`] is used in completion script generation in order to be + /// concise + /// + /// # Examples + /// + /// ```no_run + /// # use clap::{App, Arg}; + /// App::new("myprog") + /// .long_about( + /// "Does really amazing things to great people. Now let's talk a little + /// more in depth about how this subcommand really works. It may take about + /// a few lines of text, but that's ok!") + /// # ; + /// ``` + /// [`App::about`]: ./struct.App.html#method.about + pub fn long_about>(mut self, about: S) -> Self { + self.p.meta.long_about = Some(about.into()); + self + } + + /// Sets the program's name. This will be displayed when displaying help information. + /// + /// **Pro-top:** This function is particularly useful when configuring a program via + /// [`App::from_yaml`] in conjunction with the [`crate_name!`] macro to derive the program's + /// name from its `Cargo.toml`. + /// + /// # Examples + /// ```ignore + /// # #[macro_use] + /// # extern crate clap; + /// # use clap::App; + /// # fn main() { + /// let yml = load_yaml!("app.yml"); + /// let app = App::from_yaml(yml) + /// .name(crate_name!()); + /// + /// // continued logic goes here, such as `app.get_matches()` etc. + /// # } + /// ``` + /// + /// [`App::from_yaml`]: ./struct.App.html#method.from_yaml + /// [`crate_name!`]: ./macro.crate_name.html + pub fn name>(mut self, name: S) -> Self { + self.p.meta.name = name.into(); + self + } + + /// Adds additional help information to be displayed in addition to auto-generated help. This + /// information is displayed **after** the auto-generated help information. This is often used + /// to describe how to use the arguments, or caveats to be noted. + /// + /// # Examples + /// + /// ```no_run + /// # use clap::App; + /// App::new("myprog") + /// .after_help("Does really amazing things to great people...but be careful with -R") + /// # ; + /// ``` + pub fn after_help>(mut self, help: S) -> Self { + self.p.meta.more_help = Some(help.into()); + self + } + + /// Adds additional help information to be displayed in addition to auto-generated help. This + /// information is displayed **before** the auto-generated help information. This is often used + /// for header information. + /// + /// # Examples + /// + /// ```no_run + /// # use clap::App; + /// App::new("myprog") + /// .before_help("Some info I'd like to appear before the help info") + /// # ; + /// ``` + pub fn before_help>(mut self, help: S) -> Self { + self.p.meta.pre_help = Some(help.into()); + self + } + + /// Sets a string of the version number to be displayed when displaying version or help + /// information with `-V`. + /// + /// **NOTE:** If only `version` is provided, and not [`App::long_version`] but the user + /// requests `--version` clap will still display the contents of `version` appropriately + /// + /// **Pro-tip:** Use `clap`s convenience macro [`crate_version!`] to automatically set your + /// application's version to the same thing as your crate at compile time. See the [`examples/`] + /// directory for more information + /// + /// # Examples + /// + /// ```no_run + /// # use clap::{App, Arg}; + /// App::new("myprog") + /// .version("v0.1.24") + /// # ; + /// ``` + /// [`crate_version!`]: ./macro.crate_version!.html + /// [`examples/`]: https://github.com/kbknapp/clap-rs/tree/master/examples + /// [`App::long_version`]: ./struct.App.html#method.long_version + pub fn version>(mut self, ver: S) -> Self { + self.p.meta.version = Some(ver.into()); + self + } + + /// Sets a string of the version number to be displayed when displaying version or help + /// information with `--version`. + /// + /// **NOTE:** If only `long_version` is provided, and not [`App::version`] but the user + /// requests `-V` clap will still display the contents of `long_version` appropriately + /// + /// **Pro-tip:** Use `clap`s convenience macro [`crate_version!`] to automatically set your + /// application's version to the same thing as your crate at compile time. See the [`examples/`] + /// directory for more information + /// + /// # Examples + /// + /// ```no_run + /// # use clap::{App, Arg}; + /// App::new("myprog") + /// .long_version( + /// "v0.1.24 + /// commit: abcdef89726d + /// revision: 123 + /// release: 2 + /// binary: myprog") + /// # ; + /// ``` + /// [`crate_version!`]: ./macro.crate_version!.html + /// [`examples/`]: https://github.com/kbknapp/clap-rs/tree/master/examples + /// [`App::version`]: ./struct.App.html#method.version + pub fn long_version>(mut self, ver: S) -> Self { + self.p.meta.long_version = Some(ver.into()); + self + } + + /// Sets a custom usage string to override the auto-generated usage string. + /// + /// This will be displayed to the user when errors are found in argument parsing, or when you + /// call [`ArgMatches::usage`] + /// + /// **CAUTION:** Using this setting disables `clap`s "context-aware" usage strings. After this + /// setting is set, this will be the only usage string displayed to the user! + /// + /// **NOTE:** You do not need to specify the "USAGE: \n\t" portion, as that will + /// still be applied by `clap`, you only need to specify the portion starting + /// with the binary name. + /// + /// **NOTE:** This will not replace the entire help message, *only* the portion + /// showing the usage. + /// + /// # Examples + /// + /// ```no_run + /// # use clap::{App, Arg}; + /// App::new("myprog") + /// .usage("myapp [-clDas] ") + /// # ; + /// ``` + /// [`ArgMatches::usage`]: ./struct.ArgMatches.html#method.usage + pub fn usage>(mut self, usage: S) -> Self { + self.p.meta.usage_str = Some(usage.into()); + self + } + + /// Sets a custom help message and overrides the auto-generated one. This should only be used + /// when the auto-generated message does not suffice. + /// + /// This will be displayed to the user when they use `--help` or `-h` + /// + /// **NOTE:** This replaces the **entire** help message, so nothing will be auto-generated. + /// + /// **NOTE:** This **only** replaces the help message for the current command, meaning if you + /// are using subcommands, those help messages will still be auto-generated unless you + /// specify a [`Arg::help`] for them as well. + /// + /// # Examples + /// + /// ```no_run + /// # use clap::{App, Arg}; + /// App::new("myapp") + /// .help("myapp v1.0\n\ + /// Does awesome things\n\ + /// (C) me@mail.com\n\n\ + /// + /// USAGE: myapp \n\n\ + /// + /// Options:\n\ + /// -h, --help Display this message\n\ + /// -V, --version Display version info\n\ + /// -s Do something with stuff\n\ + /// -v Be verbose\n\n\ + /// + /// Commmands:\n\ + /// help Prints this message\n\ + /// work Do some work") + /// # ; + /// ``` + /// [`Arg::help`]: ./struct.Arg.html#method.help + pub fn help>(mut self, help: S) -> Self { + self.p.meta.help_str = Some(help.into()); + self + } + + /// Sets the [`short`] for the auto-generated `help` argument. + /// + /// By default `clap` automatically assigns `h`, but this can be overridden if you have a + /// different argument which you'd prefer to use the `-h` short with. This can be done by + /// defining your own argument with a lowercase `h` as the [`short`]. + /// + /// `clap` lazily generates these `help` arguments **after** you've defined any arguments of + /// your own. + /// + /// **NOTE:** Any leading `-` characters will be stripped, and only the first + /// non `-` character will be used as the [`short`] version + /// + /// # Examples + /// + /// ```no_run + /// # use clap::{App, Arg}; + /// App::new("myprog") + /// .help_short("H") // Using an uppercase `H` instead of the default lowercase `h` + /// # ; + /// ``` + /// [`short`]: ./struct.Arg.html#method.short + pub fn help_short + 'b>(mut self, s: S) -> Self { + self.p.help_short(s.as_ref()); + self + } + + /// Sets the [`short`] for the auto-generated `version` argument. + /// + /// By default `clap` automatically assigns `V`, but this can be overridden if you have a + /// different argument which you'd prefer to use the `-V` short with. This can be done by + /// defining your own argument with an uppercase `V` as the [`short`]. + /// + /// `clap` lazily generates these `version` arguments **after** you've defined any arguments of + /// your own. + /// + /// **NOTE:** Any leading `-` characters will be stripped, and only the first + /// non `-` character will be used as the `short` version + /// + /// # Examples + /// + /// ```no_run + /// # use clap::{App, Arg}; + /// App::new("myprog") + /// .version_short("v") // Using a lowercase `v` instead of the default capital `V` + /// # ; + /// ``` + /// [`short`]: ./struct.Arg.html#method.short + pub fn version_short>(mut self, s: S) -> Self { + self.p.version_short(s.as_ref()); + self + } + + /// Sets the help text for the auto-generated `help` argument. + /// + /// By default `clap` sets this to `"Prints help information"`, but if you're using a + /// different convention for your help messages and would prefer a different phrasing you can + /// override it. + /// + /// # Examples + /// + /// ```no_run + /// # use clap::{App, Arg}; + /// App::new("myprog") + /// .help_message("Print help information") // Perhaps you want imperative help messages + /// + /// # ; + /// ``` + pub fn help_message>(mut self, s: S) -> Self { + self.p.help_message = Some(s.into()); + self + } + + /// Sets the help text for the auto-generated `version` argument. + /// + /// By default `clap` sets this to `"Prints version information"`, but if you're using a + /// different convention for your help messages and would prefer a different phrasing then you + /// can change it. + /// + /// # Examples + /// ```no_run + /// # use clap::{App, Arg}; + /// App::new("myprog") + /// .version_message("Print version information") // Perhaps you want imperative help messages + /// # ; + /// ``` + pub fn version_message>(mut self, s: S) -> Self { + self.p.version_message = Some(s.into()); + self + } + + /// Sets the help template to be used, overriding the default format. + /// + /// Tags arg given inside curly brackets. + /// + /// Valid tags are: + /// + /// * `{bin}` - Binary name. + /// * `{version}` - Version number. + /// * `{author}` - Author information. + /// * `{about}` - General description (from [`App::about`]) + /// * `{usage}` - Automatically generated or given usage string. + /// * `{all-args}` - Help for all arguments (options, flags, positionals arguments, + /// and subcommands) including titles. + /// * `{unified}` - Unified help for options and flags. Note, you must *also* set + /// [`AppSettings::UnifiedHelpMessage`] to fully merge both options and + /// flags, otherwise the ordering is "best effort" + /// * `{flags}` - Help for flags. + /// * `{options}` - Help for options. + /// * `{positionals}` - Help for positionals arguments. + /// * `{subcommands}` - Help for subcommands. + /// * `{after-help}` - Help from [`App::after_help`] + /// * `{before-help}` - Help from [`App::before_help`] + /// + /// # Examples + /// + /// ```no_run + /// # use clap::{App, Arg}; + /// App::new("myprog") + /// .version("1.0") + /// .template("{bin} ({version}) - {usage}") + /// # ; + /// ``` + /// **NOTE:**The template system is, on purpose, very simple. Therefore the tags have to written + /// in the lowercase and without spacing. + /// [`App::about`]: ./struct.App.html#method.about + /// [`App::after_help`]: ./struct.App.html#method.after_help + /// [`App::before_help`]: ./struct.App.html#method.before_help + /// [`AppSettings::UnifiedHelpMessage`]: ./enum.AppSettings.html#variant.UnifiedHelpMessage + pub fn template>(mut self, s: S) -> Self { + self.p.meta.template = Some(s.into()); + self + } + + /// Enables a single command, or [`SubCommand`], level settings. + /// + /// See [`AppSettings`] for a full list of possibilities and examples. + /// + /// # Examples + /// + /// ```no_run + /// # use clap::{App, Arg, AppSettings}; + /// App::new("myprog") + /// .setting(AppSettings::SubcommandRequired) + /// .setting(AppSettings::WaitOnError) + /// # ; + /// ``` + /// [`SubCommand`]: ./struct.SubCommand.html + /// [`AppSettings`]: ./enum.AppSettings.html + pub fn setting(mut self, setting: AppSettings) -> Self { + self.p.set(setting); + self + } + + /// Enables multiple command, or [`SubCommand`], level settings + /// + /// See [`AppSettings`] for a full list of possibilities and examples. + /// + /// # Examples + /// + /// ```no_run + /// # use clap::{App, Arg, AppSettings}; + /// App::new("myprog") + /// .settings(&[AppSettings::SubcommandRequired, + /// AppSettings::WaitOnError]) + /// # ; + /// ``` + /// [`SubCommand`]: ./struct.SubCommand.html + /// [`AppSettings`]: ./enum.AppSettings.html + pub fn settings(mut self, settings: &[AppSettings]) -> Self { + for s in settings { + self.p.set(*s); + } + self + } + + /// Enables a single setting that is propagated down through all child [`SubCommand`]s. + /// + /// See [`AppSettings`] for a full list of possibilities and examples. + /// + /// **NOTE**: The setting is *only* propagated *down* and not up through parent commands. + /// + /// # Examples + /// + /// ```no_run + /// # use clap::{App, Arg, AppSettings}; + /// App::new("myprog") + /// .global_setting(AppSettings::SubcommandRequired) + /// # ; + /// ``` + /// [`SubCommand`]: ./struct.SubCommand.html + /// [`AppSettings`]: ./enum.AppSettings.html + pub fn global_setting(mut self, setting: AppSettings) -> Self { + self.p.set(setting); + self.p.g_settings.set(setting); + self + } + + /// Enables multiple settings which are propagated *down* through all child [`SubCommand`]s. + /// + /// See [`AppSettings`] for a full list of possibilities and examples. + /// + /// **NOTE**: The setting is *only* propagated *down* and not up through parent commands. + /// + /// # Examples + /// + /// ```no_run + /// # use clap::{App, Arg, AppSettings}; + /// App::new("myprog") + /// .global_settings(&[AppSettings::SubcommandRequired, + /// AppSettings::ColoredHelp]) + /// # ; + /// ``` + /// [`SubCommand`]: ./struct.SubCommand.html + /// [`AppSettings`]: ./enum.AppSettings.html + pub fn global_settings(mut self, settings: &[AppSettings]) -> Self { + for s in settings { + self.p.set(*s); + self.p.g_settings.set(*s) + } + self + } + + /// Disables a single command, or [`SubCommand`], level setting. + /// + /// See [`AppSettings`] for a full list of possibilities and examples. + /// + /// # Examples + /// + /// ```no_run + /// # use clap::{App, AppSettings}; + /// App::new("myprog") + /// .unset_setting(AppSettings::ColorAuto) + /// # ; + /// ``` + /// [`SubCommand`]: ./struct.SubCommand.html + /// [`AppSettings`]: ./enum.AppSettings.html + pub fn unset_setting(mut self, setting: AppSettings) -> Self { + self.p.unset(setting); + self + } + + /// Disables multiple command, or [`SubCommand`], level settings. + /// + /// See [`AppSettings`] for a full list of possibilities and examples. + /// + /// # Examples + /// + /// ```no_run + /// # use clap::{App, AppSettings}; + /// App::new("myprog") + /// .unset_settings(&[AppSettings::ColorAuto, + /// AppSettings::AllowInvalidUtf8]) + /// # ; + /// ``` + /// [`SubCommand`]: ./struct.SubCommand.html + /// [`AppSettings`]: ./enum.AppSettings.html + pub fn unset_settings(mut self, settings: &[AppSettings]) -> Self { + for s in settings { + self.p.unset(*s); + } + self + } + + /// Sets the terminal width at which to wrap help messages. Defaults to `120`. Using `0` will + /// ignore terminal widths and use source formatting. + /// + /// `clap` automatically tries to determine the terminal width on Unix, Linux, macOS and Windows + /// if the `wrap_help` cargo "feature" has been used while compiling. If the terminal width + /// cannot be determined, `clap` defaults to `120`. + /// + /// **NOTE:** This setting applies globally and *not* on a per-command basis. + /// + /// **NOTE:** This setting must be set **before** any subcommands are added! + /// + /// # Platform Specific + /// + /// Only Unix, Linux, macOS and Windows support automatic determination of terminal width. + /// Even on those platforms, this setting is useful if for any reason the terminal width + /// cannot be determined. + /// + /// # Examples + /// + /// ```no_run + /// # use clap::App; + /// App::new("myprog") + /// .set_term_width(80) + /// # ; + /// ``` + pub fn set_term_width(mut self, width: usize) -> Self { + self.p.meta.term_w = Some(width); + self + } + + /// Sets the max terminal width at which to wrap help messages. Using `0` will ignore terminal + /// widths and use source formatting. + /// + /// `clap` automatically tries to determine the terminal width on Unix, Linux, macOS and Windows + /// if the `wrap_help` cargo "feature" has been used while compiling, but one might want to + /// limit the size (e.g. when the terminal is running fullscreen). + /// + /// **NOTE:** This setting applies globally and *not* on a per-command basis. + /// + /// **NOTE:** This setting must be set **before** any subcommands are added! + /// + /// # Platform Specific + /// + /// Only Unix, Linux, macOS and Windows support automatic determination of terminal width. + /// + /// # Examples + /// + /// ```no_run + /// # use clap::App; + /// App::new("myprog") + /// .max_term_width(100) + /// # ; + /// ``` + pub fn max_term_width(mut self, w: usize) -> Self { + self.p.meta.max_w = Some(w); + self + } + + /// Adds an [argument] to the list of valid possibilities. + /// + /// # Examples + /// + /// ```no_run + /// # use clap::{App, Arg}; + /// App::new("myprog") + /// // Adding a single "flag" argument with a short and help text, using Arg::with_name() + /// .arg( + /// Arg::with_name("debug") + /// .short("d") + /// .help("turns on debugging mode") + /// ) + /// // Adding a single "option" argument with a short, a long, and help text using the less + /// // verbose Arg::from_usage() + /// .arg( + /// Arg::from_usage("-c --config=[CONFIG] 'Optionally sets a config file to use'") + /// ) + /// # ; + /// ``` + /// [argument]: ./struct.Arg.html + pub fn arg>>(mut self, a: A) -> Self { + self.p.add_arg(a.into()); + self + } + + /// Adds multiple [arguments] to the list of valid possibilities + /// + /// # Examples + /// + /// ```no_run + /// # use clap::{App, Arg}; + /// App::new("myprog") + /// .args( + /// &[Arg::from_usage("[debug] -d 'turns on debugging info'"), + /// Arg::with_name("input").index(1).help("the input file to use")] + /// ) + /// # ; + /// ``` + /// [arguments]: ./struct.Arg.html + pub fn args(mut self, args: &[Arg<'a, 'b>]) -> Self { + for arg in args { + self.p.add_arg_ref(arg); + } + self + } + + /// A convenience method for adding a single [argument] from a usage type string. The string + /// used follows the same rules and syntax as [`Arg::from_usage`] + /// + /// **NOTE:** The downside to using this method is that you can not set any additional + /// properties of the [`Arg`] other than what [`Arg::from_usage`] supports. + /// + /// # Examples + /// + /// ```no_run + /// # use clap::{App, Arg}; + /// App::new("myprog") + /// .arg_from_usage("-c --config= 'Sets a configuration file to use'") + /// # ; + /// ``` + /// [arguments]: ./struct.Arg.html + /// [`Arg`]: ./struct.Arg.html + /// [`Arg::from_usage`]: ./struct.Arg.html#method.from_usage + pub fn arg_from_usage(mut self, usage: &'a str) -> Self { + self.p.add_arg(Arg::from_usage(usage)); + self + } + + /// Adds multiple [arguments] at once from a usage string, one per line. See + /// [`Arg::from_usage`] for details on the syntax and rules supported. + /// + /// **NOTE:** Like [`App::arg_from_usage`] the downside is you only set properties for the + /// [`Arg`]s which [`Arg::from_usage`] supports. + /// + /// # Examples + /// + /// ```no_run + /// # use clap::{App, Arg}; + /// App::new("myprog") + /// .args_from_usage( + /// "-c --config=[FILE] 'Sets a configuration file to use' + /// [debug]... -d 'Sets the debugging level' + /// 'The input file to use'" + /// ) + /// # ; + /// ``` + /// [arguments]: ./struct.Arg.html + /// [`Arg::from_usage`]: ./struct.Arg.html#method.from_usage + /// [`App::arg_from_usage`]: ./struct.App.html#method.arg_from_usage + /// [`Arg`]: ./struct.Arg.html + pub fn args_from_usage(mut self, usage: &'a str) -> Self { + for line in usage.lines() { + let l = line.trim(); + if l.is_empty() { + continue; + } + self.p.add_arg(Arg::from_usage(l)); + } + self + } + + /// Allows adding a [`SubCommand`] alias, which function as "hidden" subcommands that + /// automatically dispatch as if this subcommand was used. This is more efficient, and easier + /// than creating multiple hidden subcommands as one only needs to check for the existence of + /// this command, and not all variants. + /// + /// # Examples + /// + /// ```no_run + /// # use clap::{App, Arg, SubCommand}; + /// let m = App::new("myprog") + /// .subcommand(SubCommand::with_name("test") + /// .alias("do-stuff")) + /// .get_matches_from(vec!["myprog", "do-stuff"]); + /// assert_eq!(m.subcommand_name(), Some("test")); + /// ``` + /// [`SubCommand`]: ./struct.SubCommand.html + pub fn alias>(mut self, name: S) -> Self { + if let Some(ref mut als) = self.p.meta.aliases { + als.push((name.into(), false)); + } else { + self.p.meta.aliases = Some(vec![(name.into(), false)]); + } + self + } + + /// Allows adding [`SubCommand`] aliases, which function as "hidden" subcommands that + /// automatically dispatch as if this subcommand was used. This is more efficient, and easier + /// than creating multiple hidden subcommands as one only needs to check for the existence of + /// this command, and not all variants. + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg, SubCommand}; + /// let m = App::new("myprog") + /// .subcommand(SubCommand::with_name("test") + /// .aliases(&["do-stuff", "do-tests", "tests"])) + /// .arg(Arg::with_name("input") + /// .help("the file to add") + /// .index(1) + /// .required(false)) + /// .get_matches_from(vec!["myprog", "do-tests"]); + /// assert_eq!(m.subcommand_name(), Some("test")); + /// ``` + /// [`SubCommand`]: ./struct.SubCommand.html + pub fn aliases(mut self, names: &[&'b str]) -> Self { + if let Some(ref mut als) = self.p.meta.aliases { + for n in names { + als.push((n, false)); + } + } else { + self.p.meta.aliases = Some(names.iter().map(|n| (*n, false)).collect::>()); + } + self + } + + /// Allows adding a [`SubCommand`] alias that functions exactly like those defined with + /// [`App::alias`], except that they are visible inside the help message. + /// + /// # Examples + /// + /// ```no_run + /// # use clap::{App, Arg, SubCommand}; + /// let m = App::new("myprog") + /// .subcommand(SubCommand::with_name("test") + /// .visible_alias("do-stuff")) + /// .get_matches_from(vec!["myprog", "do-stuff"]); + /// assert_eq!(m.subcommand_name(), Some("test")); + /// ``` + /// [`SubCommand`]: ./struct.SubCommand.html + /// [`App::alias`]: ./struct.App.html#method.alias + pub fn visible_alias>(mut self, name: S) -> Self { + if let Some(ref mut als) = self.p.meta.aliases { + als.push((name.into(), true)); + } else { + self.p.meta.aliases = Some(vec![(name.into(), true)]); + } + self + } + + /// Allows adding multiple [`SubCommand`] aliases that functions exactly like those defined + /// with [`App::aliases`], except that they are visible inside the help message. + /// + /// # Examples + /// + /// ```no_run + /// # use clap::{App, Arg, SubCommand}; + /// let m = App::new("myprog") + /// .subcommand(SubCommand::with_name("test") + /// .visible_aliases(&["do-stuff", "tests"])) + /// .get_matches_from(vec!["myprog", "do-stuff"]); + /// assert_eq!(m.subcommand_name(), Some("test")); + /// ``` + /// [`SubCommand`]: ./struct.SubCommand.html + /// [`App::aliases`]: ./struct.App.html#method.aliases + pub fn visible_aliases(mut self, names: &[&'b str]) -> Self { + if let Some(ref mut als) = self.p.meta.aliases { + for n in names { + als.push((n, true)); + } + } else { + self.p.meta.aliases = Some(names.iter().map(|n| (*n, true)).collect::>()); + } + self + } + + /// Adds an [`ArgGroup`] to the application. [`ArgGroup`]s are a family of related arguments. + /// By placing them in a logical group, you can build easier requirement and exclusion rules. + /// For instance, you can make an entire [`ArgGroup`] required, meaning that one (and *only* + /// one) argument from that group must be present at runtime. + /// + /// You can also do things such as name an [`ArgGroup`] as a conflict to another argument. + /// Meaning any of the arguments that belong to that group will cause a failure if present with + /// the conflicting argument. + /// + /// Another added benefit of [`ArgGroup`]s is that you can extract a value from a group instead + /// of determining exactly which argument was used. + /// + /// Finally, using [`ArgGroup`]s to ensure exclusion between arguments is another very common + /// use + /// + /// # Examples + /// + /// The following example demonstrates using an [`ArgGroup`] to ensure that one, and only one, + /// of the arguments from the specified group is present at runtime. + /// + /// ```no_run + /// # use clap::{App, ArgGroup}; + /// App::new("app") + /// .args_from_usage( + /// "--set-ver [ver] 'set the version manually' + /// --major 'auto increase major' + /// --minor 'auto increase minor' + /// --patch 'auto increase patch'") + /// .group(ArgGroup::with_name("vers") + /// .args(&["set-ver", "major", "minor","patch"]) + /// .required(true)) + /// # ; + /// ``` + /// [`ArgGroup`]: ./struct.ArgGroup.html + pub fn group(mut self, group: ArgGroup<'a>) -> Self { + self.p.add_group(group); + self + } + + /// Adds multiple [`ArgGroup`]s to the [`App`] at once. + /// + /// # Examples + /// + /// ```no_run + /// # use clap::{App, ArgGroup}; + /// App::new("app") + /// .args_from_usage( + /// "--set-ver [ver] 'set the version manually' + /// --major 'auto increase major' + /// --minor 'auto increase minor' + /// --patch 'auto increase patch' + /// -c [FILE] 'a config file' + /// -i [IFACE] 'an interface'") + /// .groups(&[ + /// ArgGroup::with_name("vers") + /// .args(&["set-ver", "major", "minor","patch"]) + /// .required(true), + /// ArgGroup::with_name("input") + /// .args(&["c", "i"]) + /// ]) + /// # ; + /// ``` + /// [`ArgGroup`]: ./struct.ArgGroup.html + /// [`App`]: ./struct.App.html + pub fn groups(mut self, groups: &[ArgGroup<'a>]) -> Self { + for g in groups { + self = self.group(g.into()); + } + self + } + + /// Adds a [`SubCommand`] to the list of valid possibilities. Subcommands are effectively + /// sub-[`App`]s, because they can contain their own arguments, subcommands, version, usage, + /// etc. They also function just like [`App`]s, in that they get their own auto generated help, + /// version, and usage. + /// + /// # Examples + /// + /// ```no_run + /// # use clap::{App, Arg, SubCommand}; + /// App::new("myprog") + /// .subcommand(SubCommand::with_name("config") + /// .about("Controls configuration features") + /// .arg_from_usage(" 'Required configuration file to use'")) + /// # ; + /// ``` + /// [`SubCommand`]: ./struct.SubCommand.html + /// [`App`]: ./struct.App.html + pub fn subcommand(mut self, subcmd: App<'a, 'b>) -> Self { + self.p.add_subcommand(subcmd); + self + } + + /// Adds multiple subcommands to the list of valid possibilities by iterating over an + /// [`IntoIterator`] of [`SubCommand`]s + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg, SubCommand}; + /// # App::new("myprog") + /// .subcommands( vec![ + /// SubCommand::with_name("config").about("Controls configuration functionality") + /// .arg(Arg::with_name("config_file").index(1)), + /// SubCommand::with_name("debug").about("Controls debug functionality")]) + /// # ; + /// ``` + /// [`SubCommand`]: ./struct.SubCommand.html + /// [`IntoIterator`]: https://doc.rust-lang.org/std/iter/trait.IntoIterator.html + pub fn subcommands(mut self, subcmds: I) -> Self + where + I: IntoIterator>, + { + for subcmd in subcmds { + self.p.add_subcommand(subcmd); + } + self + } + + /// Allows custom ordering of [`SubCommand`]s within the help message. Subcommands with a lower + /// value will be displayed first in the help message. This is helpful when one would like to + /// emphasise frequently used subcommands, or prioritize those towards the top of the list. + /// Duplicate values **are** allowed. Subcommands with duplicate display orders will be + /// displayed in alphabetical order. + /// + /// **NOTE:** The default is 999 for all subcommands. + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, SubCommand}; + /// let m = App::new("cust-ord") + /// .subcommand(SubCommand::with_name("alpha") // typically subcommands are grouped + /// // alphabetically by name. Subcommands + /// // without a display_order have a value of + /// // 999 and are displayed alphabetically with + /// // all other 999 subcommands + /// .about("Some help and text")) + /// .subcommand(SubCommand::with_name("beta") + /// .display_order(1) // In order to force this subcommand to appear *first* + /// // all we have to do is give it a value lower than 999. + /// // Any other subcommands with a value of 1 will be displayed + /// // alphabetically with this one...then 2 values, then 3, etc. + /// .about("I should be first!")) + /// .get_matches_from(vec![ + /// "cust-ord", "--help" + /// ]); + /// ``` + /// + /// The above example displays the following help message + /// + /// ```text + /// cust-ord + /// + /// USAGE: + /// cust-ord [FLAGS] [OPTIONS] + /// + /// FLAGS: + /// -h, --help Prints help information + /// -V, --version Prints version information + /// + /// SUBCOMMANDS: + /// beta I should be first! + /// alpha Some help and text + /// ``` + /// [`SubCommand`]: ./struct.SubCommand.html + pub fn display_order(mut self, ord: usize) -> Self { + self.p.meta.disp_ord = ord; + self + } + + /// Prints the full help message to [`io::stdout()`] using a [`BufWriter`] using the same + /// method as if someone ran `-h` to request the help message + /// + /// **NOTE:** clap has the ability to distinguish between "short" and "long" help messages + /// depending on if the user ran [`-h` (short)] or [`--help` (long)] + /// + /// # Examples + /// + /// ```rust + /// # use clap::App; + /// let mut app = App::new("myprog"); + /// app.print_help(); + /// ``` + /// [`io::stdout()`]: https://doc.rust-lang.org/std/io/fn.stdout.html + /// [`BufWriter`]: https://doc.rust-lang.org/std/io/struct.BufWriter.html + /// [`-h` (short)]: ./struct.Arg.html#method.help + /// [`--help` (long)]: ./struct.Arg.html#method.long_help + pub fn print_help(&mut self) -> ClapResult<()> { + // If there are global arguments, or settings we need to propagate them down to subcommands + // before parsing incase we run into a subcommand + self.p.propagate_globals(); + self.p.propagate_settings(); + self.p.derive_display_order(); + + self.p.create_help_and_version(); + let out = io::stdout(); + let mut buf_w = BufWriter::new(out.lock()); + self.write_help(&mut buf_w) + } + + /// Prints the full help message to [`io::stdout()`] using a [`BufWriter`] using the same + /// method as if someone ran `--help` to request the help message + /// + /// **NOTE:** clap has the ability to distinguish between "short" and "long" help messages + /// depending on if the user ran [`-h` (short)] or [`--help` (long)] + /// + /// # Examples + /// + /// ```rust + /// # use clap::App; + /// let mut app = App::new("myprog"); + /// app.print_long_help(); + /// ``` + /// [`io::stdout()`]: https://doc.rust-lang.org/std/io/fn.stdout.html + /// [`BufWriter`]: https://doc.rust-lang.org/std/io/struct.BufWriter.html + /// [`-h` (short)]: ./struct.Arg.html#method.help + /// [`--help` (long)]: ./struct.Arg.html#method.long_help + pub fn print_long_help(&mut self) -> ClapResult<()> { + // If there are global arguments, or settings we need to propagate them down to subcommands + // before parsing incase we run into a subcommand + self.p.propagate_globals(); + self.p.propagate_settings(); + self.p.derive_display_order(); + + self.p.create_help_and_version(); + let out = io::stdout(); + let mut buf_w = BufWriter::new(out.lock()); + self.write_long_help(&mut buf_w) + } + + /// Writes the full help message to the user to a [`io::Write`] object in the same method as if + /// the user ran `-h` + /// + /// **NOTE:** clap has the ability to distinguish between "short" and "long" help messages + /// depending on if the user ran [`-h` (short)] or [`--help` (long)] + /// + /// **NOTE:** There is a known bug where this method does not write propagated global arguments + /// or autogenerated arguments (i.e. the default help/version args). Prefer + /// [`App::write_long_help`] instead if possible! + /// + /// # Examples + /// + /// ```rust + /// # use clap::App; + /// use std::io; + /// let mut app = App::new("myprog"); + /// let mut out = io::stdout(); + /// app.write_help(&mut out).expect("failed to write to stdout"); + /// ``` + /// [`io::Write`]: https://doc.rust-lang.org/std/io/trait.Write.html + /// [`-h` (short)]: ./struct.Arg.html#method.help + /// [`--help` (long)]: ./struct.Arg.html#method.long_help + pub fn write_help(&self, w: &mut W) -> ClapResult<()> { + // PENDING ISSUE: 808 + // https://github.com/kbknapp/clap-rs/issues/808 + // If there are global arguments, or settings we need to propagate them down to subcommands + // before parsing incase we run into a subcommand + // self.p.propagate_globals(); + // self.p.propagate_settings(); + // self.p.derive_display_order(); + // self.p.create_help_and_version(); + + Help::write_app_help(w, self, false) + } + + /// Writes the full help message to the user to a [`io::Write`] object in the same method as if + /// the user ran `--help` + /// + /// **NOTE:** clap has the ability to distinguish between "short" and "long" help messages + /// depending on if the user ran [`-h` (short)] or [`--help` (long)] + /// + /// # Examples + /// + /// ```rust + /// # use clap::App; + /// use std::io; + /// let mut app = App::new("myprog"); + /// let mut out = io::stdout(); + /// app.write_long_help(&mut out).expect("failed to write to stdout"); + /// ``` + /// [`io::Write`]: https://doc.rust-lang.org/std/io/trait.Write.html + /// [`-h` (short)]: ./struct.Arg.html#method.help + /// [`--help` (long)]: ./struct.Arg.html#method.long_help + pub fn write_long_help(&mut self, w: &mut W) -> ClapResult<()> { + self.p.propagate_globals(); + self.p.propagate_settings(); + self.p.derive_display_order(); + self.p.create_help_and_version(); + + Help::write_app_help(w, self, true) + } + + /// Writes the version message to the user to a [`io::Write`] object as if the user ran `-V`. + /// + /// **NOTE:** clap has the ability to distinguish between "short" and "long" version messages + /// depending on if the user ran [`-V` (short)] or [`--version` (long)] + /// + /// # Examples + /// + /// ```rust + /// # use clap::App; + /// use std::io; + /// let mut app = App::new("myprog"); + /// let mut out = io::stdout(); + /// app.write_version(&mut out).expect("failed to write to stdout"); + /// ``` + /// [`io::Write`]: https://doc.rust-lang.org/std/io/trait.Write.html + /// [`-V` (short)]: ./struct.App.html#method.version + /// [`--version` (long)]: ./struct.App.html#method.long_version + pub fn write_version(&self, w: &mut W) -> ClapResult<()> { + self.p.write_version(w, false).map_err(From::from) + } + + /// Writes the version message to the user to a [`io::Write`] object + /// + /// **NOTE:** clap has the ability to distinguish between "short" and "long" version messages + /// depending on if the user ran [`-V` (short)] or [`--version` (long)] + /// + /// # Examples + /// + /// ```rust + /// # use clap::App; + /// use std::io; + /// let mut app = App::new("myprog"); + /// let mut out = io::stdout(); + /// app.write_long_version(&mut out).expect("failed to write to stdout"); + /// ``` + /// [`io::Write`]: https://doc.rust-lang.org/std/io/trait.Write.html + /// [`-V` (short)]: ./struct.App.html#method.version + /// [`--version` (long)]: ./struct.App.html#method.long_version + pub fn write_long_version(&self, w: &mut W) -> ClapResult<()> { + self.p.write_version(w, true).map_err(From::from) + } + + /// Generate a completions file for a specified shell at compile time. + /// + /// **NOTE:** to generate the file at compile time you must use a `build.rs` "Build Script" + /// + /// # Examples + /// + /// The following example generates a bash completion script via a `build.rs` script. In this + /// simple example, we'll demo a very small application with only a single subcommand and two + /// args. Real applications could be many multiple levels deep in subcommands, and have tens or + /// potentially hundreds of arguments. + /// + /// First, it helps if we separate out our `App` definition into a separate file. Whether you + /// do this as a function, or bare App definition is a matter of personal preference. + /// + /// ``` + /// // src/cli.rs + /// + /// use clap::{App, Arg, SubCommand}; + /// + /// pub fn build_cli() -> App<'static, 'static> { + /// App::new("compl") + /// .about("Tests completions") + /// .arg(Arg::with_name("file") + /// .help("some input file")) + /// .subcommand(SubCommand::with_name("test") + /// .about("tests things") + /// .arg(Arg::with_name("case") + /// .long("case") + /// .takes_value(true) + /// .help("the case to test"))) + /// } + /// ``` + /// + /// In our regular code, we can simply call this `build_cli()` function, then call + /// `get_matches()`, or any of the other normal methods directly after. For example: + /// + /// ```ignore + /// // src/main.rs + /// + /// mod cli; + /// + /// fn main() { + /// let m = cli::build_cli().get_matches(); + /// + /// // normal logic continues... + /// } + /// ``` + /// + /// Next, we set up our `Cargo.toml` to use a `build.rs` build script. + /// + /// ```toml + /// # Cargo.toml + /// build = "build.rs" + /// + /// [build-dependencies] + /// clap = "2.23" + /// ``` + /// + /// Next, we place a `build.rs` in our project root. + /// + /// ```ignore + /// extern crate clap; + /// + /// use clap::Shell; + /// + /// include!("src/cli.rs"); + /// + /// fn main() { + /// let outdir = match env::var_os("OUT_DIR") { + /// None => return, + /// Some(outdir) => outdir, + /// }; + /// let mut app = build_cli(); + /// app.gen_completions("myapp", // We need to specify the bin name manually + /// Shell::Bash, // Then say which shell to build completions for + /// outdir); // Then say where write the completions to + /// } + /// ``` + /// Now, once we compile there will be a `{bin_name}.bash` file in the directory. + /// Assuming we compiled with debug mode, it would be somewhere similar to + /// `/target/debug/build/myapp-/out/myapp.bash`. + /// + /// Fish shell completions will use the file format `{bin_name}.fish` + pub fn gen_completions, S: Into>( + &mut self, + bin_name: S, + for_shell: Shell, + out_dir: T, + ) { + self.p.meta.bin_name = Some(bin_name.into()); + self.p.gen_completions(for_shell, out_dir.into()); + } + + + /// Generate a completions file for a specified shell at runtime. Until `cargo install` can + /// install extra files like a completion script, this may be used e.g. in a command that + /// outputs the contents of the completion script, to be redirected into a file by the user. + /// + /// # Examples + /// + /// Assuming a separate `cli.rs` like the [example above](./struct.App.html#method.gen_completions), + /// we can let users generate a completion script using a command: + /// + /// ```ignore + /// // src/main.rs + /// + /// mod cli; + /// use std::io; + /// + /// fn main() { + /// let matches = cli::build_cli().get_matches(); + /// + /// if matches.is_present("generate-bash-completions") { + /// cli::build_cli().gen_completions_to("myapp", Shell::Bash, &mut io::stdout()); + /// } + /// + /// // normal logic continues... + /// } + /// + /// ``` + /// + /// Usage: + /// + /// ```shell + /// $ myapp generate-bash-completions > /usr/share/bash-completion/completions/myapp.bash + /// ``` + pub fn gen_completions_to>( + &mut self, + bin_name: S, + for_shell: Shell, + buf: &mut W, + ) { + self.p.meta.bin_name = Some(bin_name.into()); + self.p.gen_completions_to(for_shell, buf); + } + + /// Starts the parsing process, upon a failed parse an error will be displayed to the user and + /// the process will exit with the appropriate error code. By default this method gets all user + /// provided arguments from [`env::args_os`] in order to allow for invalid UTF-8 code points, + /// which are legal on many platforms. + /// + /// # Examples + /// + /// ```no_run + /// # use clap::{App, Arg}; + /// let matches = App::new("myprog") + /// // Args and options go here... + /// .get_matches(); + /// ``` + /// [`env::args_os`]: https://doc.rust-lang.org/std/env/fn.args_os.html + pub fn get_matches(self) -> ArgMatches<'a> { self.get_matches_from(&mut env::args_os()) } + + /// Starts the parsing process. This method will return a [`clap::Result`] type instead of exiting + /// the process on failed parse. By default this method gets matches from [`env::args_os`] + /// + /// **NOTE:** This method WILL NOT exit when `--help` or `--version` (or short versions) are + /// used. It will return a [`clap::Error`], where the [`kind`] is a + /// [`ErrorKind::HelpDisplayed`] or [`ErrorKind::VersionDisplayed`] respectively. You must call + /// [`Error::exit`] or perform a [`std::process::exit`]. + /// + /// # Examples + /// + /// ```no_run + /// # use clap::{App, Arg}; + /// let matches = App::new("myprog") + /// // Args and options go here... + /// .get_matches_safe() + /// .unwrap_or_else( |e| e.exit() ); + /// ``` + /// [`env::args_os`]: https://doc.rust-lang.org/std/env/fn.args_os.html + /// [`ErrorKind::HelpDisplayed`]: ./enum.ErrorKind.html#variant.HelpDisplayed + /// [`ErrorKind::VersionDisplayed`]: ./enum.ErrorKind.html#variant.VersionDisplayed + /// [`Error::exit`]: ./struct.Error.html#method.exit + /// [`std::process::exit`]: https://doc.rust-lang.org/std/process/fn.exit.html + /// [`clap::Result`]: ./type.Result.html + /// [`clap::Error`]: ./struct.Error.html + /// [`kind`]: ./struct.Error.html + pub fn get_matches_safe(self) -> ClapResult> { + // Start the parsing + self.get_matches_from_safe(&mut env::args_os()) + } + + /// Starts the parsing process. Like [`App::get_matches`] this method does not return a [`clap::Result`] + /// and will automatically exit with an error message. This method, however, lets you specify + /// what iterator to use when performing matches, such as a [`Vec`] of your making. + /// + /// **NOTE:** The first argument will be parsed as the binary name unless + /// [`AppSettings::NoBinaryName`] is used + /// + /// # Examples + /// + /// ```no_run + /// # use clap::{App, Arg}; + /// let arg_vec = vec!["my_prog", "some", "args", "to", "parse"]; + /// + /// let matches = App::new("myprog") + /// // Args and options go here... + /// .get_matches_from(arg_vec); + /// ``` + /// [`App::get_matches`]: ./struct.App.html#method.get_matches + /// [`clap::Result`]: ./type.Result.html + /// [`Vec`]: https://doc.rust-lang.org/std/vec/struct.Vec.html + /// [`AppSettings::NoBinaryName`]: ./enum.AppSettings.html#variant.NoBinaryName + pub fn get_matches_from(mut self, itr: I) -> ArgMatches<'a> + where + I: IntoIterator, + T: Into + Clone, + { + self.get_matches_from_safe_borrow(itr).unwrap_or_else(|e| { + // Otherwise, write to stderr and exit + if e.use_stderr() { + wlnerr!("{}", e.message); + if self.p.is_set(AppSettings::WaitOnError) { + wlnerr!("\nPress [ENTER] / [RETURN] to continue..."); + let mut s = String::new(); + let i = io::stdin(); + i.lock().read_line(&mut s).unwrap(); + } + drop(self); + drop(e); + process::exit(1); + } + + drop(self); + e.exit() + }) + } + + /// Starts the parsing process. A combination of [`App::get_matches_from`], and + /// [`App::get_matches_safe`] + /// + /// **NOTE:** This method WILL NOT exit when `--help` or `--version` (or short versions) are + /// used. It will return a [`clap::Error`], where the [`kind`] is a [`ErrorKind::HelpDisplayed`] + /// or [`ErrorKind::VersionDisplayed`] respectively. You must call [`Error::exit`] or + /// perform a [`std::process::exit`] yourself. + /// + /// **NOTE:** The first argument will be parsed as the binary name unless + /// [`AppSettings::NoBinaryName`] is used + /// + /// # Examples + /// + /// ```no_run + /// # use clap::{App, Arg}; + /// let arg_vec = vec!["my_prog", "some", "args", "to", "parse"]; + /// + /// let matches = App::new("myprog") + /// // Args and options go here... + /// .get_matches_from_safe(arg_vec) + /// .unwrap_or_else( |e| { panic!("An error occurs: {}", e) }); + /// ``` + /// [`App::get_matches_from`]: ./struct.App.html#method.get_matches_from + /// [`App::get_matches_safe`]: ./struct.App.html#method.get_matches_safe + /// [`ErrorKind::HelpDisplayed`]: ./enum.ErrorKind.html#variant.HelpDisplayed + /// [`ErrorKind::VersionDisplayed`]: ./enum.ErrorKind.html#variant.VersionDisplayed + /// [`Error::exit`]: ./struct.Error.html#method.exit + /// [`std::process::exit`]: https://doc.rust-lang.org/std/process/fn.exit.html + /// [`clap::Error`]: ./struct.Error.html + /// [`Error::exit`]: ./struct.Error.html#method.exit + /// [`kind`]: ./struct.Error.html + /// [`AppSettings::NoBinaryName`]: ./enum.AppSettings.html#variant.NoBinaryName + pub fn get_matches_from_safe(mut self, itr: I) -> ClapResult> + where + I: IntoIterator, + T: Into + Clone, + { + self.get_matches_from_safe_borrow(itr) + } + + /// Starts the parsing process without consuming the [`App`] struct `self`. This is normally not + /// the desired functionality, instead prefer [`App::get_matches_from_safe`] which *does* + /// consume `self`. + /// + /// **NOTE:** The first argument will be parsed as the binary name unless + /// [`AppSettings::NoBinaryName`] is used + /// + /// # Examples + /// + /// ```no_run + /// # use clap::{App, Arg}; + /// let arg_vec = vec!["my_prog", "some", "args", "to", "parse"]; + /// + /// let mut app = App::new("myprog"); + /// // Args and options go here... + /// let matches = app.get_matches_from_safe_borrow(arg_vec) + /// .unwrap_or_else( |e| { panic!("An error occurs: {}", e) }); + /// ``` + /// [`App`]: ./struct.App.html + /// [`App::get_matches_from_safe`]: ./struct.App.html#method.get_matches_from_safe + /// [`AppSettings::NoBinaryName`]: ./enum.AppSettings.html#variant.NoBinaryName + pub fn get_matches_from_safe_borrow(&mut self, itr: I) -> ClapResult> + where + I: IntoIterator, + T: Into + Clone, + { + // If there are global arguments, or settings we need to propagate them down to subcommands + // before parsing incase we run into a subcommand + if !self.p.is_set(AppSettings::Propagated) { + self.p.propagate_globals(); + self.p.propagate_settings(); + self.p.derive_display_order(); + self.p.set(AppSettings::Propagated); + } + + let mut matcher = ArgMatcher::new(); + + let mut it = itr.into_iter(); + // Get the name of the program (argument 1 of env::args()) and determine the + // actual file + // that was used to execute the program. This is because a program called + // ./target/release/my_prog -a + // will have two arguments, './target/release/my_prog', '-a' but we don't want + // to display + // the full path when displaying help messages and such + if !self.p.is_set(AppSettings::NoBinaryName) { + if let Some(name) = it.next() { + let bn_os = name.into(); + let p = Path::new(&*bn_os); + if let Some(f) = p.file_name() { + if let Some(s) = f.to_os_string().to_str() { + if self.p.meta.bin_name.is_none() { + self.p.meta.bin_name = Some(s.to_owned()); + } + } + } + } + } + + // do the real parsing + if let Err(e) = self.p.get_matches_with(&mut matcher, &mut it.peekable()) { + return Err(e); + } + + let global_arg_vec: Vec<&str> = (&self).p.global_args.iter().map(|ga| ga.b.name).collect(); + matcher.propagate_globals(&global_arg_vec); + + Ok(matcher.into()) + } +} + +#[cfg(feature = "yaml")] +impl<'a> From<&'a Yaml> for App<'a, 'a> { + fn from(mut yaml: &'a Yaml) -> Self { + use args::SubCommand; + // We WANT this to panic on error...so expect() is good. + let mut is_sc = None; + let mut a = if let Some(name) = yaml["name"].as_str() { + App::new(name) + } else { + let yaml_hash = yaml.as_hash().unwrap(); + let sc_key = yaml_hash.keys().nth(0).unwrap(); + is_sc = Some(yaml_hash.get(sc_key).unwrap()); + App::new(sc_key.as_str().unwrap()) + }; + yaml = if let Some(sc) = is_sc { sc } else { yaml }; + + macro_rules! yaml_str { + ($a:ident, $y:ident, $i:ident) => { + if let Some(v) = $y[stringify!($i)].as_str() { + $a = $a.$i(v); + } else if $y[stringify!($i)] != Yaml::BadValue { + panic!("Failed to convert YAML value {:?} to a string", $y[stringify!($i)]); + } + }; + } + + yaml_str!(a, yaml, version); + yaml_str!(a, yaml, author); + yaml_str!(a, yaml, bin_name); + yaml_str!(a, yaml, about); + yaml_str!(a, yaml, long_about); + yaml_str!(a, yaml, before_help); + yaml_str!(a, yaml, after_help); + yaml_str!(a, yaml, template); + yaml_str!(a, yaml, usage); + yaml_str!(a, yaml, help); + yaml_str!(a, yaml, help_short); + yaml_str!(a, yaml, version_short); + yaml_str!(a, yaml, help_message); + yaml_str!(a, yaml, version_message); + yaml_str!(a, yaml, alias); + yaml_str!(a, yaml, visible_alias); + + if let Some(v) = yaml["display_order"].as_i64() { + a = a.display_order(v as usize); + } else if yaml["display_order"] != Yaml::BadValue { + panic!( + "Failed to convert YAML value {:?} to a u64", + yaml["display_order"] + ); + } + if let Some(v) = yaml["setting"].as_str() { + a = a.setting(v.parse().expect("unknown AppSetting found in YAML file")); + } else if yaml["setting"] != Yaml::BadValue { + panic!( + "Failed to convert YAML value {:?} to an AppSetting", + yaml["setting"] + ); + } + if let Some(v) = yaml["settings"].as_vec() { + for ys in v { + if let Some(s) = ys.as_str() { + a = a.setting(s.parse().expect("unknown AppSetting found in YAML file")); + } + } + } else if let Some(v) = yaml["settings"].as_str() { + a = a.setting(v.parse().expect("unknown AppSetting found in YAML file")); + } else if yaml["settings"] != Yaml::BadValue { + panic!( + "Failed to convert YAML value {:?} to a string", + yaml["settings"] + ); + } + if let Some(v) = yaml["global_setting"].as_str() { + a = a.setting(v.parse().expect("unknown AppSetting found in YAML file")); + } else if yaml["global_setting"] != Yaml::BadValue { + panic!( + "Failed to convert YAML value {:?} to an AppSetting", + yaml["setting"] + ); + } + if let Some(v) = yaml["global_settings"].as_vec() { + for ys in v { + if let Some(s) = ys.as_str() { + a = a.global_setting(s.parse().expect("unknown AppSetting found in YAML file")); + } + } + } else if let Some(v) = yaml["global_settings"].as_str() { + a = a.global_setting(v.parse().expect("unknown AppSetting found in YAML file")); + } else if yaml["global_settings"] != Yaml::BadValue { + panic!( + "Failed to convert YAML value {:?} to a string", + yaml["global_settings"] + ); + } + + macro_rules! vec_or_str { + ($a:ident, $y:ident, $as_vec:ident, $as_single:ident) => {{ + let maybe_vec = $y[stringify!($as_vec)].as_vec(); + if let Some(vec) = maybe_vec { + for ys in vec { + if let Some(s) = ys.as_str() { + $a = $a.$as_single(s); + } else { + panic!("Failed to convert YAML value {:?} to a string", ys); + } + } + } else { + if let Some(s) = $y[stringify!($as_vec)].as_str() { + $a = $a.$as_single(s); + } else if $y[stringify!($as_vec)] != Yaml::BadValue { + panic!("Failed to convert YAML value {:?} to either a vec or string", $y[stringify!($as_vec)]); + } + } + $a + } + }; + } + + a = vec_or_str!(a, yaml, aliases, alias); + a = vec_or_str!(a, yaml, visible_aliases, visible_alias); + + if let Some(v) = yaml["args"].as_vec() { + for arg_yaml in v { + a = a.arg(Arg::from_yaml(arg_yaml.as_hash().unwrap())); + } + } + if let Some(v) = yaml["subcommands"].as_vec() { + for sc_yaml in v { + a = a.subcommand(SubCommand::from_yaml(sc_yaml)); + } + } + if let Some(v) = yaml["groups"].as_vec() { + for ag_yaml in v { + a = a.group(ArgGroup::from(ag_yaml.as_hash().unwrap())); + } + } + + a + } +} + +impl<'a, 'b> Clone for App<'a, 'b> { + fn clone(&self) -> Self { App { p: self.p.clone() } } +} + +impl<'n, 'e> AnyArg<'n, 'e> for App<'n, 'e> { + fn name(&self) -> &'n str { + "" + } + fn overrides(&self) -> Option<&[&'e str]> { None } + fn requires(&self) -> Option<&[(Option<&'e str>, &'n str)]> { None } + fn blacklist(&self) -> Option<&[&'e str]> { None } + fn required_unless(&self) -> Option<&[&'e str]> { None } + fn val_names(&self) -> Option<&VecMap<&'e str>> { None } + fn is_set(&self, _: ArgSettings) -> bool { false } + fn val_terminator(&self) -> Option<&'e str> { None } + fn set(&mut self, _: ArgSettings) { + unreachable!("App struct does not support AnyArg::set, this is a bug!") + } + fn has_switch(&self) -> bool { false } + fn max_vals(&self) -> Option { None } + fn num_vals(&self) -> Option { None } + fn possible_vals(&self) -> Option<&[&'e str]> { None } + fn validator(&self) -> Option<&Rc StdResult<(), String>>> { None } + fn validator_os(&self) -> Option<&Rc StdResult<(), OsString>>> { None } + fn min_vals(&self) -> Option { None } + fn short(&self) -> Option { None } + fn long(&self) -> Option<&'e str> { None } + fn val_delim(&self) -> Option { None } + fn takes_value(&self) -> bool { true } + fn help(&self) -> Option<&'e str> { self.p.meta.about } + fn long_help(&self) -> Option<&'e str> { self.p.meta.long_about } + fn default_val(&self) -> Option<&'e OsStr> { None } + fn default_vals_ifs(&self) -> Option, &'e OsStr)>> { + None + } + fn env<'s>(&'s self) -> Option<(&'n OsStr, Option<&'s OsString>)> { None } + fn longest_filter(&self) -> bool { true } + fn aliases(&self) -> Option> { + if let Some(ref aliases) = self.p.meta.aliases { + let vis_aliases: Vec<_> = aliases + .iter() + .filter_map(|&(n, v)| if v { Some(n) } else { None }) + .collect(); + if vis_aliases.is_empty() { + None + } else { + Some(vis_aliases) + } + } else { + None + } + } +} + +impl<'n, 'e> fmt::Display for App<'n, 'e> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.p.meta.name) } +} diff --git a/clap/src/app/parser.rs b/clap/src/app/parser.rs new file mode 100644 index 000000000..0f1e0efe5 --- /dev/null +++ b/clap/src/app/parser.rs @@ -0,0 +1,2150 @@ +// Std +use std::ffi::{OsStr, OsString}; +use std::fmt::Display; +use std::fs::File; +use std::io::{self, BufWriter, Write}; +#[cfg(all(feature = "debug", not(any(target_os = "windows", target_arch = "wasm32"))))] +use std::os::unix::ffi::OsStrExt; +#[cfg(all(feature = "debug", any(target_os = "windows", target_arch = "wasm32")))] +use osstringext::OsStrExt3; +use std::path::PathBuf; +use std::slice::Iter; +use std::iter::Peekable; +use std::cell::Cell; + +// Internal +use INTERNAL_ERROR_MSG; +use INVALID_UTF8; +use SubCommand; +use app::App; +use app::help::Help; +use app::meta::AppMeta; +use app::settings::AppFlags; +use args::{AnyArg, Arg, ArgGroup, ArgMatcher, Base, FlagBuilder, OptBuilder, PosBuilder, Switched}; +use args::settings::ArgSettings; +use completions::ComplGen; +use errors::{Error, ErrorKind}; +use errors::Result as ClapResult; +use fmt::ColorWhen; +use osstringext::OsStrExt2; +use completions::Shell; +use suggestions; +use app::settings::AppSettings as AS; +use app::validator::Validator; +use app::usage; +use map::{self, VecMap}; + +#[derive(Debug, PartialEq, Copy, Clone)] +#[doc(hidden)] +pub enum ParseResult<'a> { + Flag, + Opt(&'a str), + Pos(&'a str), + MaybeHyphenValue, + MaybeNegNum, + NotFound, + ValuesDone, +} + +#[allow(missing_debug_implementations)] +#[doc(hidden)] +#[derive(Clone, Default)] +pub struct Parser<'a, 'b> +where + 'a: 'b, +{ + pub meta: AppMeta<'b>, + settings: AppFlags, + pub g_settings: AppFlags, + pub flags: Vec>, + pub opts: Vec>, + pub positionals: VecMap>, + pub subcommands: Vec>, + pub groups: Vec>, + pub global_args: Vec>, + pub required: Vec<&'a str>, + pub r_ifs: Vec<(&'a str, &'b str, &'a str)>, + pub overrides: Vec<(&'b str, &'a str)>, + help_short: Option, + version_short: Option, + cache: Option<&'a str>, + pub help_message: Option<&'a str>, + pub version_message: Option<&'a str>, + cur_idx: Cell, +} + +impl<'a, 'b> Parser<'a, 'b> +where + 'a: 'b, +{ + pub fn with_name(n: String) -> Self { + Parser { + meta: AppMeta::with_name(n), + g_settings: AppFlags::zeroed(), + cur_idx: Cell::new(0), + ..Default::default() + } + } + + pub fn help_short(&mut self, s: &str) { + let c = s.trim_left_matches(|c| c == '-') + .chars() + .nth(0) + .unwrap_or('h'); + self.help_short = Some(c); + } + + pub fn version_short(&mut self, s: &str) { + let c = s.trim_left_matches(|c| c == '-') + .chars() + .nth(0) + .unwrap_or('V'); + self.version_short = Some(c); + } + + pub fn gen_completions_to(&mut self, for_shell: Shell, buf: &mut W) { + if !self.is_set(AS::Propagated) { + self.propagate_help_version(); + self.build_bin_names(); + self.propagate_globals(); + self.propagate_settings(); + self.set(AS::Propagated); + } + + ComplGen::new(self).generate(for_shell, buf) + } + + pub fn gen_completions(&mut self, for_shell: Shell, od: OsString) { + use std::error::Error; + + let out_dir = PathBuf::from(od); + let name = &*self.meta.bin_name.as_ref().unwrap().clone(); + let file_name = match for_shell { + Shell::Bash => format!("{}.bash", name), + Shell::Fish => format!("{}.fish", name), + Shell::Zsh => format!("_{}", name), + Shell::PowerShell => format!("_{}.ps1", name), + Shell::Elvish => format!("{}.elv", name), + }; + + let mut file = match File::create(out_dir.join(file_name)) { + Err(why) => panic!("couldn't create completion file: {}", why.description()), + Ok(file) => file, + }; + self.gen_completions_to(for_shell, &mut file) + } + + #[inline] + fn app_debug_asserts(&mut self) -> bool { + assert!(self.verify_positionals()); + let should_err = self.groups.iter().all(|g| { + g.args.iter().all(|arg| { + (self.flags.iter().any(|f| &f.b.name == arg) + || self.opts.iter().any(|o| &o.b.name == arg) + || self.positionals.values().any(|p| &p.b.name == arg) + || self.groups.iter().any(|g| &g.name == arg)) + }) + }); + let g = self.groups.iter().find(|g| { + g.args.iter().any(|arg| { + !(self.flags.iter().any(|f| &f.b.name == arg) + || self.opts.iter().any(|o| &o.b.name == arg) + || self.positionals.values().any(|p| &p.b.name == arg) + || self.groups.iter().any(|g| &g.name == arg)) + }) + }); + assert!( + should_err, + "The group '{}' contains the arg '{}' that doesn't actually exist.", + g.unwrap().name, + g.unwrap() + .args + .iter() + .find(|arg| !(self.flags.iter().any(|f| &&f.b.name == arg) + || self.opts.iter().any(|o| &&o.b.name == arg) + || self.positionals.values().any(|p| &&p.b.name == arg) + || self.groups.iter().any(|g| &&g.name == arg))) + .unwrap() + ); + true + } + + #[inline] + fn debug_asserts(&self, a: &Arg) -> bool { + assert!( + !arg_names!(self).any(|name| name == a.b.name), + format!("Non-unique argument name: {} is already in use", a.b.name) + ); + if let Some(l) = a.s.long { + assert!( + !self.contains_long(l), + "Argument long must be unique\n\n\t--{} is already in use", + l + ); + } + if let Some(s) = a.s.short { + assert!( + !self.contains_short(s), + "Argument short must be unique\n\n\t-{} is already in use", + s + ); + } + let i = if a.index.is_none() { + (self.positionals.len() + 1) + } else { + a.index.unwrap() as usize + }; + assert!( + !self.positionals.contains_key(i), + "Argument \"{}\" has the same index as another positional \ + argument\n\n\tPerhaps try .multiple(true) to allow one positional argument \ + to take multiple values", + a.b.name + ); + assert!( + !(a.is_set(ArgSettings::Required) && a.is_set(ArgSettings::Global)), + "Global arguments cannot be required.\n\n\t'{}' is marked as \ + global and required", + a.b.name + ); + if a.b.is_set(ArgSettings::Last) { + assert!( + !self.positionals + .values() + .any(|p| p.b.is_set(ArgSettings::Last)), + "Only one positional argument may have last(true) set. Found two." + ); + assert!(a.s.long.is_none(), + "Flags or Options may not have last(true) set. {} has both a long and last(true) set.", + a.b.name); + assert!(a.s.short.is_none(), + "Flags or Options may not have last(true) set. {} has both a short and last(true) set.", + a.b.name); + } + true + } + + #[inline] + fn add_conditional_reqs(&mut self, a: &Arg<'a, 'b>) { + if let Some(ref r_ifs) = a.r_ifs { + for &(arg, val) in r_ifs { + self.r_ifs.push((arg, val, a.b.name)); + } + } + } + + #[inline] + fn add_arg_groups(&mut self, a: &Arg<'a, 'b>) { + if let Some(ref grps) = a.b.groups { + for g in grps { + let mut found = false; + if let Some(ref mut ag) = self.groups.iter_mut().find(|grp| &grp.name == g) { + ag.args.push(a.b.name); + found = true; + } + if !found { + let mut ag = ArgGroup::with_name(g); + ag.args.push(a.b.name); + self.groups.push(ag); + } + } + } + } + + #[inline] + fn add_reqs(&mut self, a: &Arg<'a, 'b>) { + if a.is_set(ArgSettings::Required) { + // If the arg is required, add all it's requirements to master required list + self.required.push(a.b.name); + if let Some(ref areqs) = a.b.requires { + for name in areqs + .iter() + .filter(|&&(val, _)| val.is_none()) + .map(|&(_, name)| name) + { + self.required.push(name); + } + } + } + } + + #[inline] + fn implied_settings(&mut self, a: &Arg<'a, 'b>) { + if a.is_set(ArgSettings::Last) { + // if an arg has `Last` set, we need to imply DontCollapseArgsInUsage so that args + // in the usage string don't get confused or left out. + self.set(AS::DontCollapseArgsInUsage); + self.set(AS::ContainsLast); + } + if let Some(l) = a.s.long { + if l == "version" { + self.unset(AS::NeedsLongVersion); + } else if l == "help" { + self.unset(AS::NeedsLongHelp); + } + } + } + + // actually adds the arguments + pub fn add_arg(&mut self, a: Arg<'a, 'b>) { + // if it's global we have to clone anyways + if a.is_set(ArgSettings::Global) { + return self.add_arg_ref(&a); + } + debug_assert!(self.debug_asserts(&a)); + self.add_conditional_reqs(&a); + self.add_arg_groups(&a); + self.add_reqs(&a); + self.implied_settings(&a); + if a.index.is_some() || (a.s.short.is_none() && a.s.long.is_none()) { + let i = if a.index.is_none() { + (self.positionals.len() + 1) + } else { + a.index.unwrap() as usize + }; + self.positionals + .insert(i, PosBuilder::from_arg(a, i as u64)); + } else if a.is_set(ArgSettings::TakesValue) { + let mut ob = OptBuilder::from(a); + ob.s.unified_ord = self.flags.len() + self.opts.len(); + self.opts.push(ob); + } else { + let mut fb = FlagBuilder::from(a); + fb.s.unified_ord = self.flags.len() + self.opts.len(); + self.flags.push(fb); + } + } + // actually adds the arguments but from a borrow (which means we have to do some cloning) + pub fn add_arg_ref(&mut self, a: &Arg<'a, 'b>) { + debug_assert!(self.debug_asserts(a)); + self.add_conditional_reqs(a); + self.add_arg_groups(a); + self.add_reqs(a); + self.implied_settings(a); + if a.index.is_some() || (a.s.short.is_none() && a.s.long.is_none()) { + let i = if a.index.is_none() { + (self.positionals.len() + 1) + } else { + a.index.unwrap() as usize + }; + let pb = PosBuilder::from_arg_ref(a, i as u64); + self.positionals.insert(i, pb); + } else if a.is_set(ArgSettings::TakesValue) { + let mut ob = OptBuilder::from(a); + ob.s.unified_ord = self.flags.len() + self.opts.len(); + self.opts.push(ob); + } else { + let mut fb = FlagBuilder::from(a); + fb.s.unified_ord = self.flags.len() + self.opts.len(); + self.flags.push(fb); + } + if a.is_set(ArgSettings::Global) { + self.global_args.push(a.into()); + } + } + + pub fn add_group(&mut self, group: ArgGroup<'a>) { + if group.required { + self.required.push(group.name); + if let Some(ref reqs) = group.requires { + self.required.extend_from_slice(reqs); + } + // if let Some(ref bl) = group.conflicts { + // self.blacklist.extend_from_slice(bl); + // } + } + if self.groups.iter().any(|g| g.name == group.name) { + let grp = self.groups + .iter_mut() + .find(|g| g.name == group.name) + .expect(INTERNAL_ERROR_MSG); + grp.args.extend_from_slice(&group.args); + grp.requires = group.requires.clone(); + grp.conflicts = group.conflicts.clone(); + grp.required = group.required; + } else { + self.groups.push(group); + } + } + + pub fn add_subcommand(&mut self, mut subcmd: App<'a, 'b>) { + debugln!( + "Parser::add_subcommand: term_w={:?}, name={}", + self.meta.term_w, + subcmd.p.meta.name + ); + subcmd.p.meta.term_w = self.meta.term_w; + if subcmd.p.meta.name == "help" { + self.unset(AS::NeedsSubcommandHelp); + } + + self.subcommands.push(subcmd); + } + + pub fn propagate_settings(&mut self) { + debugln!( + "Parser::propagate_settings: self={}, g_settings={:#?}", + self.meta.name, + self.g_settings + ); + for sc in &mut self.subcommands { + debugln!( + "Parser::propagate_settings: sc={}, settings={:#?}, g_settings={:#?}", + sc.p.meta.name, + sc.p.settings, + sc.p.g_settings + ); + // We have to create a new scope in order to tell rustc the borrow of `sc` is + // done and to recursively call this method + { + let vsc = self.settings.is_set(AS::VersionlessSubcommands); + let gv = self.settings.is_set(AS::GlobalVersion); + + if vsc { + sc.p.set(AS::DisableVersion); + } + if gv && sc.p.meta.version.is_none() && self.meta.version.is_some() { + sc.p.set(AS::GlobalVersion); + sc.p.meta.version = Some(self.meta.version.unwrap()); + } + sc.p.settings = sc.p.settings | self.g_settings; + sc.p.g_settings = sc.p.g_settings | self.g_settings; + sc.p.meta.term_w = self.meta.term_w; + sc.p.meta.max_w = self.meta.max_w; + } + sc.p.propagate_settings(); + } + } + + #[cfg_attr(feature = "lints", allow(needless_borrow))] + pub fn derive_display_order(&mut self) { + if self.is_set(AS::DeriveDisplayOrder) { + let unified = self.is_set(AS::UnifiedHelpMessage); + for (i, o) in self.opts + .iter_mut() + .enumerate() + .filter(|&(_, ref o)| o.s.disp_ord == 999) + { + o.s.disp_ord = if unified { o.s.unified_ord } else { i }; + } + for (i, f) in self.flags + .iter_mut() + .enumerate() + .filter(|&(_, ref f)| f.s.disp_ord == 999) + { + f.s.disp_ord = if unified { f.s.unified_ord } else { i }; + } + for (i, sc) in &mut self.subcommands + .iter_mut() + .enumerate() + .filter(|&(_, ref sc)| sc.p.meta.disp_ord == 999) + { + sc.p.meta.disp_ord = i; + } + } + for sc in &mut self.subcommands { + sc.p.derive_display_order(); + } + } + + pub fn required(&self) -> Iter<&str> { self.required.iter() } + + #[cfg_attr(feature = "lints", allow(needless_borrow))] + #[inline] + pub fn has_args(&self) -> bool { + !(self.flags.is_empty() && self.opts.is_empty() && self.positionals.is_empty()) + } + + #[inline] + pub fn has_opts(&self) -> bool { !self.opts.is_empty() } + + #[inline] + pub fn has_flags(&self) -> bool { !self.flags.is_empty() } + + #[inline] + pub fn has_positionals(&self) -> bool { !self.positionals.is_empty() } + + #[inline] + pub fn has_subcommands(&self) -> bool { !self.subcommands.is_empty() } + + #[inline] + pub fn has_visible_opts(&self) -> bool { + if self.opts.is_empty() { + return false; + } + self.opts.iter().any(|o| !o.is_set(ArgSettings::Hidden)) + } + + #[inline] + pub fn has_visible_flags(&self) -> bool { + if self.flags.is_empty() { + return false; + } + self.flags.iter().any(|f| !f.is_set(ArgSettings::Hidden)) + } + + #[inline] + pub fn has_visible_positionals(&self) -> bool { + if self.positionals.is_empty() { + return false; + } + self.positionals + .values() + .any(|p| !p.is_set(ArgSettings::Hidden)) + } + + #[inline] + pub fn has_visible_subcommands(&self) -> bool { + self.has_subcommands() + && self.subcommands + .iter() + .filter(|sc| sc.p.meta.name != "help") + .any(|sc| !sc.p.is_set(AS::Hidden)) + } + + #[inline] + pub fn is_set(&self, s: AS) -> bool { self.settings.is_set(s) } + + #[inline] + pub fn set(&mut self, s: AS) { self.settings.set(s) } + + #[inline] + pub fn unset(&mut self, s: AS) { self.settings.unset(s) } + + #[cfg_attr(feature = "lints", allow(block_in_if_condition_stmt))] + pub fn verify_positionals(&mut self) -> bool { + // Because you must wait until all arguments have been supplied, this is the first chance + // to make assertions on positional argument indexes + // + // Firt we verify that the index highest supplied index, is equal to the number of + // positional arguments to verify there are no gaps (i.e. supplying an index of 1 and 3 + // but no 2) + if let Some((idx, p)) = self.positionals.iter().rev().next() { + assert!( + !(idx != self.positionals.len()), + "Found positional argument \"{}\" whose index is {} but there \ + are only {} positional arguments defined", + p.b.name, + idx, + self.positionals.len() + ); + } + + // Next we verify that only the highest index has a .multiple(true) (if any) + if self.positionals.values().any(|a| { + a.b.is_set(ArgSettings::Multiple) && (a.index as usize != self.positionals.len()) + }) { + let mut it = self.positionals.values().rev(); + let last = it.next().unwrap(); + let second_to_last = it.next().unwrap(); + // Either the final positional is required + // Or the second to last has a terminator or .last(true) set + let ok = last.is_set(ArgSettings::Required) + || (second_to_last.v.terminator.is_some() + || second_to_last.b.is_set(ArgSettings::Last)) + || last.is_set(ArgSettings::Last); + assert!( + ok, + "When using a positional argument with .multiple(true) that is *not the \ + last* positional argument, the last positional argument (i.e the one \ + with the highest index) *must* have .required(true) or .last(true) set." + ); + let ok = second_to_last.is_set(ArgSettings::Multiple) || last.is_set(ArgSettings::Last); + assert!( + ok, + "Only the last positional argument, or second to last positional \ + argument may be set to .multiple(true)" + ); + + let count = self.positionals + .values() + .filter(|p| p.b.settings.is_set(ArgSettings::Multiple) && p.v.num_vals.is_none()) + .count(); + let ok = count <= 1 + || (last.is_set(ArgSettings::Last) && last.is_set(ArgSettings::Multiple) + && second_to_last.is_set(ArgSettings::Multiple) + && count == 2); + assert!( + ok, + "Only one positional argument with .multiple(true) set is allowed per \ + command, unless the second one also has .last(true) set" + ); + } + + if self.is_set(AS::AllowMissingPositional) { + // Check that if a required positional argument is found, all positions with a lower + // index are also required. + let mut found = false; + let mut foundx2 = false; + for p in self.positionals.values().rev() { + if foundx2 && !p.b.settings.is_set(ArgSettings::Required) { + assert!( + p.b.is_set(ArgSettings::Required), + "Found positional argument which is not required with a lower \ + index than a required positional argument by two or more: {:?} \ + index {}", + p.b.name, + p.index + ); + } else if p.b.is_set(ArgSettings::Required) && !p.b.is_set(ArgSettings::Last) { + // Args that .last(true) don't count since they can be required and have + // positionals with a lower index that aren't required + // Imagine: prog [opt1] -- + // Both of these are valid invocations: + // $ prog r1 -- r2 + // $ prog r1 o1 -- r2 + if found { + foundx2 = true; + continue; + } + found = true; + continue; + } else { + found = false; + } + } + } else { + // Check that if a required positional argument is found, all positions with a lower + // index are also required + let mut found = false; + for p in self.positionals.values().rev() { + if found { + assert!( + p.b.is_set(ArgSettings::Required), + "Found positional argument which is not required with a lower \ + index than a required positional argument: {:?} index {}", + p.b.name, + p.index + ); + } else if p.b.is_set(ArgSettings::Required) && !p.b.is_set(ArgSettings::Last) { + // Args that .last(true) don't count since they can be required and have + // positionals with a lower index that aren't required + // Imagine: prog [opt1] -- + // Both of these are valid invocations: + // $ prog r1 -- r2 + // $ prog r1 o1 -- r2 + found = true; + continue; + } + } + } + if self.positionals + .values() + .any(|p| p.b.is_set(ArgSettings::Last) && p.b.is_set(ArgSettings::Required)) + && self.has_subcommands() && !self.is_set(AS::SubcommandsNegateReqs) + { + panic!( + "Having a required positional argument with .last(true) set *and* child \ + subcommands without setting SubcommandsNegateReqs isn't compatible." + ); + } + + true + } + + pub fn propagate_globals(&mut self) { + for sc in &mut self.subcommands { + // We have to create a new scope in order to tell rustc the borrow of `sc` is + // done and to recursively call this method + { + for a in &self.global_args { + sc.p.add_arg_ref(a); + } + } + sc.p.propagate_globals(); + } + } + + // Checks if the arg matches a subcommand name, or any of it's aliases (if defined) + fn possible_subcommand(&self, arg_os: &OsStr) -> (bool, Option<&str>) { + #[cfg(not(any(target_os = "windows", target_arch = "wasm32")))] + use std::os::unix::ffi::OsStrExt; + #[cfg(any(target_os = "windows", target_arch = "wasm32"))] + use osstringext::OsStrExt3; + debugln!("Parser::possible_subcommand: arg={:?}", arg_os); + fn starts(h: &str, n: &OsStr) -> bool { + let n_bytes = n.as_bytes(); + let h_bytes = OsStr::new(h).as_bytes(); + + h_bytes.starts_with(n_bytes) + } + + if self.is_set(AS::ArgsNegateSubcommands) && self.is_set(AS::ValidArgFound) { + return (false, None); + } + if !self.is_set(AS::InferSubcommands) { + if let Some(sc) = find_subcmd!(self, arg_os) { + return (true, Some(&sc.p.meta.name)); + } + } else { + let v = self.subcommands + .iter() + .filter(|s| { + starts(&s.p.meta.name[..], &*arg_os) + || (s.p.meta.aliases.is_some() + && s.p + .meta + .aliases + .as_ref() + .unwrap() + .iter() + .filter(|&&(a, _)| starts(a, &*arg_os)) + .count() == 1) + }) + .map(|sc| &sc.p.meta.name) + .collect::>(); + + for sc in &v { + if OsStr::new(sc) == arg_os { + return (true, Some(sc)); + } + } + + if v.len() == 1 { + return (true, Some(v[0])); + } + } + (false, None) + } + + fn parse_help_subcommand(&self, it: &mut I) -> ClapResult> + where + I: Iterator, + T: Into, + { + debugln!("Parser::parse_help_subcommand;"); + let cmds: Vec = it.map(|c| c.into()).collect(); + let mut help_help = false; + let mut bin_name = self.meta + .bin_name + .as_ref() + .unwrap_or(&self.meta.name) + .clone(); + let mut sc = { + let mut sc: &Parser = self; + for (i, cmd) in cmds.iter().enumerate() { + if &*cmd.to_string_lossy() == "help" { + // cmd help help + help_help = true; + } + if let Some(c) = sc.subcommands + .iter() + .find(|s| &*s.p.meta.name == cmd) + .map(|sc| &sc.p) + { + sc = c; + if i == cmds.len() - 1 { + break; + } + } else if let Some(c) = sc.subcommands + .iter() + .find(|s| { + if let Some(ref als) = s.p.meta.aliases { + als.iter().any(|&(a, _)| a == &*cmd.to_string_lossy()) + } else { + false + } + }) + .map(|sc| &sc.p) + { + sc = c; + if i == cmds.len() - 1 { + break; + } + } else { + return Err(Error::unrecognized_subcommand( + cmd.to_string_lossy().into_owned(), + self.meta.bin_name.as_ref().unwrap_or(&self.meta.name), + self.color(), + )); + } + bin_name = format!("{} {}", bin_name, &*sc.meta.name); + } + sc.clone() + }; + if help_help { + let mut pb = PosBuilder::new("subcommand", 1); + pb.b.help = Some("The subcommand whose help message to display"); + pb.set(ArgSettings::Multiple); + sc.positionals.insert(1, pb); + sc.settings = sc.settings | self.g_settings; + } else { + sc.create_help_and_version(); + } + if sc.meta.bin_name != self.meta.bin_name { + sc.meta.bin_name = Some(format!("{} {}", bin_name, sc.meta.name)); + } + Err(sc._help(false)) + } + + // allow wrong self convention due to self.valid_neg_num = true and it's a private method + #[cfg_attr(feature = "lints", allow(wrong_self_convention))] + fn is_new_arg(&mut self, arg_os: &OsStr, needs_val_of: ParseResult) -> bool { + debugln!("Parser::is_new_arg:{:?}:{:?}", arg_os, needs_val_of); + let app_wide_settings = if self.is_set(AS::AllowLeadingHyphen) { + true + } else if self.is_set(AS::AllowNegativeNumbers) { + let a = arg_os.to_string_lossy(); + if a.parse::().is_ok() || a.parse::().is_ok() { + self.set(AS::ValidNegNumFound); + true + } else { + false + } + } else { + false + }; + let arg_allows_tac = match needs_val_of { + ParseResult::Opt(name) => { + let o = self.opts + .iter() + .find(|o| o.b.name == name) + .expect(INTERNAL_ERROR_MSG); + (o.is_set(ArgSettings::AllowLeadingHyphen) || app_wide_settings) + } + ParseResult::Pos(name) => { + let p = self.positionals + .values() + .find(|p| p.b.name == name) + .expect(INTERNAL_ERROR_MSG); + (p.is_set(ArgSettings::AllowLeadingHyphen) || app_wide_settings) + } + ParseResult::ValuesDone => return true, + _ => false, + }; + debugln!("Parser::is_new_arg: arg_allows_tac={:?}", arg_allows_tac); + + // Is this a new argument, or values from a previous option? + let mut ret = if arg_os.starts_with(b"--") { + debugln!("Parser::is_new_arg: -- found"); + if arg_os.len() == 2 && !arg_allows_tac { + return true; // We have to return true so override everything else + } else if arg_allows_tac { + return false; + } + true + } else if arg_os.starts_with(b"-") { + debugln!("Parser::is_new_arg: - found"); + // a singe '-' by itself is a value and typically means "stdin" on unix systems + !(arg_os.len() == 1) + } else { + debugln!("Parser::is_new_arg: probably value"); + false + }; + + ret = ret && !arg_allows_tac; + + debugln!("Parser::is_new_arg: starts_new_arg={:?}", ret); + ret + } + + // The actual parsing function + #[cfg_attr(feature = "lints", allow(while_let_on_iterator, collapsible_if))] + pub fn get_matches_with( + &mut self, + matcher: &mut ArgMatcher<'a>, + it: &mut Peekable, + ) -> ClapResult<()> + where + I: Iterator, + T: Into + Clone, + { + debugln!("Parser::get_matches_with;"); + // Verify all positional assertions pass + debug_assert!(self.app_debug_asserts()); + if self.positionals.values().any(|a| { + a.b.is_set(ArgSettings::Multiple) && (a.index as usize != self.positionals.len()) + }) + && self.positionals + .values() + .last() + .map_or(false, |p| !p.is_set(ArgSettings::Last)) + { + self.settings.set(AS::LowIndexMultiplePositional); + } + let has_args = self.has_args(); + + // Next we create the `--help` and `--version` arguments and add them if + // necessary + self.create_help_and_version(); + + let mut subcmd_name: Option = None; + let mut needs_val_of: ParseResult<'a> = ParseResult::NotFound; + let mut pos_counter = 1; + while let Some(arg) = it.next() { + let arg_os = arg.into(); + debugln!( + "Parser::get_matches_with: Begin parsing '{:?}' ({:?})", + arg_os, + &*arg_os.as_bytes() + ); + + self.unset(AS::ValidNegNumFound); + // Is this a new argument, or values from a previous option? + let starts_new_arg = self.is_new_arg(&arg_os, needs_val_of); + if !self.is_set(AS::TrailingValues) && arg_os.starts_with(b"--") && arg_os.len() == 2 + && starts_new_arg + { + debugln!("Parser::get_matches_with: setting TrailingVals=true"); + self.set(AS::TrailingValues); + continue; + } + + // Has the user already passed '--'? Meaning only positional args follow + if !self.is_set(AS::TrailingValues) { + // Does the arg match a subcommand name, or any of it's aliases (if defined) + { + match needs_val_of { + ParseResult::Opt(_) | ParseResult::Pos(_) => (), + _ => { + let (is_match, sc_name) = self.possible_subcommand(&arg_os); + debugln!( + "Parser::get_matches_with: possible_sc={:?}, sc={:?}", + is_match, + sc_name + ); + if is_match { + let sc_name = sc_name.expect(INTERNAL_ERROR_MSG); + if sc_name == "help" && self.is_set(AS::NeedsSubcommandHelp) { + self.parse_help_subcommand(it)?; + } + subcmd_name = Some(sc_name.to_owned()); + break; + } + } + } + } + + if starts_new_arg { + let check_all = self.is_set(AS::AllArgsOverrideSelf); + { + let any_arg = find_any_by_name!(self, self.cache.unwrap_or("")); + matcher.process_arg_overrides( + any_arg, + &mut self.overrides, + &mut self.required, + check_all, + ); + } + + if arg_os.starts_with(b"--") { + needs_val_of = self.parse_long_arg(matcher, &arg_os)?; + debugln!( + "Parser:get_matches_with: After parse_long_arg {:?}", + needs_val_of + ); + match needs_val_of { + ParseResult::Flag | ParseResult::Opt(..) | ParseResult::ValuesDone => { + continue + } + _ => (), + } + } else if arg_os.starts_with(b"-") && arg_os.len() != 1 { + // Try to parse short args like normal, if AllowLeadingHyphen or + // AllowNegativeNumbers is set, parse_short_arg will *not* throw + // an error, and instead return Ok(None) + needs_val_of = self.parse_short_arg(matcher, &arg_os)?; + // If it's None, we then check if one of those two AppSettings was set + debugln!( + "Parser:get_matches_with: After parse_short_arg {:?}", + needs_val_of + ); + match needs_val_of { + ParseResult::MaybeNegNum => { + if !(arg_os.to_string_lossy().parse::().is_ok() + || arg_os.to_string_lossy().parse::().is_ok()) + { + return Err(Error::unknown_argument( + &*arg_os.to_string_lossy(), + "", + &*usage::create_error_usage(self, matcher, None), + self.color(), + )); + } + } + ParseResult::Opt(..) | ParseResult::Flag | ParseResult::ValuesDone => { + continue + } + _ => (), + } + } + } else { + if let ParseResult::Opt(name) = needs_val_of { + // Check to see if parsing a value from a previous arg + let arg = self.opts + .iter() + .find(|o| o.b.name == name) + .expect(INTERNAL_ERROR_MSG); + // get the OptBuilder so we can check the settings + needs_val_of = self.add_val_to_arg(arg, &arg_os, matcher)?; + // get the next value from the iterator + continue; + } + } + } + + if !(self.is_set(AS::ArgsNegateSubcommands) && self.is_set(AS::ValidArgFound)) + && !self.is_set(AS::InferSubcommands) && !self.is_set(AS::AllowExternalSubcommands) + { + if let Some(cdate) = + suggestions::did_you_mean(&*arg_os.to_string_lossy(), sc_names!(self)) + { + return Err(Error::invalid_subcommand( + arg_os.to_string_lossy().into_owned(), + cdate, + self.meta.bin_name.as_ref().unwrap_or(&self.meta.name), + &*usage::create_error_usage(self, matcher, None), + self.color(), + )); + } + } + + let low_index_mults = self.is_set(AS::LowIndexMultiplePositional) + && pos_counter == (self.positionals.len() - 1); + let missing_pos = self.is_set(AS::AllowMissingPositional) + && (pos_counter == (self.positionals.len() - 1) + && !self.is_set(AS::TrailingValues)); + debugln!( + "Parser::get_matches_with: Positional counter...{}", + pos_counter + ); + debugln!( + "Parser::get_matches_with: Low index multiples...{:?}", + low_index_mults + ); + if low_index_mults || missing_pos { + if let Some(na) = it.peek() { + let n = (*na).clone().into(); + needs_val_of = if needs_val_of != ParseResult::ValuesDone { + if let Some(p) = self.positionals.get(pos_counter) { + ParseResult::Pos(p.b.name) + } else { + ParseResult::ValuesDone + } + } else { + ParseResult::ValuesDone + }; + let sc_match = { self.possible_subcommand(&n).0 }; + if self.is_new_arg(&n, needs_val_of) || sc_match + || suggestions::did_you_mean(&n.to_string_lossy(), sc_names!(self)) + .is_some() + { + debugln!("Parser::get_matches_with: Bumping the positional counter..."); + pos_counter += 1; + } + } else { + debugln!("Parser::get_matches_with: Bumping the positional counter..."); + pos_counter += 1; + } + } else if (self.is_set(AS::AllowMissingPositional) && self.is_set(AS::TrailingValues)) + || (self.is_set(AS::ContainsLast) && self.is_set(AS::TrailingValues)) + { + // Came to -- and one postional has .last(true) set, so we go immediately + // to the last (highest index) positional + debugln!("Parser::get_matches_with: .last(true) and --, setting last pos"); + pos_counter = self.positionals.len(); + } + if let Some(p) = self.positionals.get(pos_counter) { + if p.is_set(ArgSettings::Last) && !self.is_set(AS::TrailingValues) { + return Err(Error::unknown_argument( + &*arg_os.to_string_lossy(), + "", + &*usage::create_error_usage(self, matcher, None), + self.color(), + )); + } + if !self.is_set(AS::TrailingValues) + && (self.is_set(AS::TrailingVarArg) && pos_counter == self.positionals.len()) + { + self.settings.set(AS::TrailingValues); + } + if self.cache.map_or(true, |name| name != p.b.name) { + let check_all = self.is_set(AS::AllArgsOverrideSelf); + { + let any_arg = find_any_by_name!(self, self.cache.unwrap_or("")); + matcher.process_arg_overrides( + any_arg, + &mut self.overrides, + &mut self.required, + check_all, + ); + } + self.cache = Some(p.b.name); + } + let _ = self.add_val_to_arg(p, &arg_os, matcher)?; + + matcher.inc_occurrence_of(p.b.name); + let _ = self.groups_for_arg(p.b.name) + .and_then(|vec| Some(matcher.inc_occurrences_of(&*vec))); + + self.settings.set(AS::ValidArgFound); + // Only increment the positional counter if it doesn't allow multiples + if !p.b.settings.is_set(ArgSettings::Multiple) { + pos_counter += 1; + } + self.settings.set(AS::ValidArgFound); + } else if self.is_set(AS::AllowExternalSubcommands) { + // Get external subcommand name + let sc_name = match arg_os.to_str() { + Some(s) => s.to_string(), + None => { + if !self.is_set(AS::StrictUtf8) { + return Err(Error::invalid_utf8( + &*usage::create_error_usage(self, matcher, None), + self.color(), + )); + } + arg_os.to_string_lossy().into_owned() + } + }; + + // Collect the external subcommand args + let mut sc_m = ArgMatcher::new(); + while let Some(v) = it.next() { + let a = v.into(); + if a.to_str().is_none() && !self.is_set(AS::StrictUtf8) { + return Err(Error::invalid_utf8( + &*usage::create_error_usage(self, matcher, None), + self.color(), + )); + } + sc_m.add_val_to("", &a); + } + + matcher.subcommand(SubCommand { + name: sc_name, + matches: sc_m.into(), + }); + } else if !((self.is_set(AS::AllowLeadingHyphen) + || self.is_set(AS::AllowNegativeNumbers)) + && arg_os.starts_with(b"-")) + && !self.is_set(AS::InferSubcommands) + { + return Err(Error::unknown_argument( + &*arg_os.to_string_lossy(), + "", + &*usage::create_error_usage(self, matcher, None), + self.color(), + )); + } else if !has_args || self.is_set(AS::InferSubcommands) && self.has_subcommands() { + if let Some(cdate) = + suggestions::did_you_mean(&*arg_os.to_string_lossy(), sc_names!(self)) + { + return Err(Error::invalid_subcommand( + arg_os.to_string_lossy().into_owned(), + cdate, + self.meta.bin_name.as_ref().unwrap_or(&self.meta.name), + &*usage::create_error_usage(self, matcher, None), + self.color(), + )); + } else { + return Err(Error::unrecognized_subcommand( + arg_os.to_string_lossy().into_owned(), + self.meta.bin_name.as_ref().unwrap_or(&self.meta.name), + self.color(), + )); + } + } else { + return Err(Error::unknown_argument( + &*arg_os.to_string_lossy(), + "", + &*usage::create_error_usage(self, matcher, None), + self.color(), + )); + } + } + + if let Some(ref pos_sc_name) = subcmd_name { + let sc_name = { + find_subcmd!(self, pos_sc_name) + .expect(INTERNAL_ERROR_MSG) + .p + .meta + .name + .clone() + }; + self.parse_subcommand(&*sc_name, matcher, it)?; + } else if self.is_set(AS::SubcommandRequired) { + let bn = self.meta.bin_name.as_ref().unwrap_or(&self.meta.name); + return Err(Error::missing_subcommand( + bn, + &usage::create_error_usage(self, matcher, None), + self.color(), + )); + } else if self.is_set(AS::SubcommandRequiredElseHelp) { + debugln!("Parser::get_matches_with: SubcommandRequiredElseHelp=true"); + let mut out = vec![]; + self.write_help_err(&mut out)?; + return Err(Error { + message: String::from_utf8_lossy(&*out).into_owned(), + kind: ErrorKind::MissingArgumentOrSubcommand, + info: None, + }); + } + + // In case the last arg was new, we need to process it's overrides + let check_all = self.is_set(AS::AllArgsOverrideSelf); + { + let any_arg = find_any_by_name!(self, self.cache.unwrap_or("")); + matcher.process_arg_overrides( + any_arg, + &mut self.overrides, + &mut self.required, + check_all, + ); + } + + self.remove_overrides(matcher); + + Validator::new(self).validate(needs_val_of, subcmd_name, matcher) + } + + fn remove_overrides(&mut self, matcher: &mut ArgMatcher) { + debugln!("Parser::remove_overrides:{:?};", self.overrides); + for &(overr, name) in &self.overrides { + debugln!("Parser::remove_overrides:iter:({},{});", overr, name); + if matcher.is_present(overr) { + debugln!( + "Parser::remove_overrides:iter:({},{}): removing {};", + overr, + name, + name + ); + matcher.remove(name); + for i in (0..self.required.len()).rev() { + debugln!( + "Parser::remove_overrides:iter:({},{}): removing required {};", + overr, + name, + name + ); + if self.required[i] == name { + self.required.swap_remove(i); + break; + } + } + } + } + } + + fn propagate_help_version(&mut self) { + debugln!("Parser::propagate_help_version;"); + self.create_help_and_version(); + for sc in &mut self.subcommands { + sc.p.propagate_help_version(); + } + } + + fn build_bin_names(&mut self) { + debugln!("Parser::build_bin_names;"); + for sc in &mut self.subcommands { + debug!("Parser::build_bin_names:iter: bin_name set..."); + if sc.p.meta.bin_name.is_none() { + sdebugln!("No"); + let bin_name = format!( + "{}{}{}", + self.meta + .bin_name + .as_ref() + .unwrap_or(&self.meta.name.clone()), + if self.meta.bin_name.is_some() { + " " + } else { + "" + }, + &*sc.p.meta.name + ); + debugln!( + "Parser::build_bin_names:iter: Setting bin_name of {} to {}", + self.meta.name, + bin_name + ); + sc.p.meta.bin_name = Some(bin_name); + } else { + sdebugln!("yes ({:?})", sc.p.meta.bin_name); + } + debugln!( + "Parser::build_bin_names:iter: Calling build_bin_names from...{}", + sc.p.meta.name + ); + sc.p.build_bin_names(); + } + } + + fn parse_subcommand( + &mut self, + sc_name: &str, + matcher: &mut ArgMatcher<'a>, + it: &mut Peekable, + ) -> ClapResult<()> + where + I: Iterator, + T: Into + Clone, + { + use std::fmt::Write; + debugln!("Parser::parse_subcommand;"); + let mut mid_string = String::new(); + if !self.is_set(AS::SubcommandsNegateReqs) { + let mut hs: Vec<&str> = self.required.iter().map(|n| &**n).collect(); + for k in matcher.arg_names() { + hs.push(k); + } + let reqs = usage::get_required_usage_from(self, &hs, Some(matcher), None, false); + + for s in &reqs { + write!(&mut mid_string, " {}", s).expect(INTERNAL_ERROR_MSG); + } + } + mid_string.push_str(" "); + if let Some(ref mut sc) = self.subcommands + .iter_mut() + .find(|s| s.p.meta.name == sc_name) + { + let mut sc_matcher = ArgMatcher::new(); + // bin_name should be parent's bin_name + [] + the sc's name separated by + // a space + sc.p.meta.usage = Some(format!( + "{}{}{}", + self.meta.bin_name.as_ref().unwrap_or(&String::new()), + if self.meta.bin_name.is_some() { + &*mid_string + } else { + "" + }, + &*sc.p.meta.name + )); + sc.p.meta.bin_name = Some(format!( + "{}{}{}", + self.meta.bin_name.as_ref().unwrap_or(&String::new()), + if self.meta.bin_name.is_some() { + " " + } else { + "" + }, + &*sc.p.meta.name + )); + debugln!( + "Parser::parse_subcommand: About to parse sc={}", + sc.p.meta.name + ); + debugln!("Parser::parse_subcommand: sc settings={:#?}", sc.p.settings); + sc.p.get_matches_with(&mut sc_matcher, it)?; + matcher.subcommand(SubCommand { + name: sc.p.meta.name.clone(), + matches: sc_matcher.into(), + }); + } + Ok(()) + } + + pub fn groups_for_arg(&self, name: &str) -> Option> { + debugln!("Parser::groups_for_arg: name={}", name); + + if self.groups.is_empty() { + debugln!("Parser::groups_for_arg: No groups defined"); + return None; + } + let mut res = vec![]; + debugln!("Parser::groups_for_arg: Searching through groups..."); + for grp in &self.groups { + for a in &grp.args { + if a == &name { + sdebugln!("\tFound '{}'", grp.name); + res.push(&*grp.name); + } + } + } + if res.is_empty() { + return None; + } + + Some(res) + } + + pub fn args_in_group(&self, group: &str) -> Vec { + let mut g_vec = vec![]; + let mut args = vec![]; + + for n in &self.groups + .iter() + .find(|g| g.name == group) + .expect(INTERNAL_ERROR_MSG) + .args + { + if let Some(f) = self.flags.iter().find(|f| &f.b.name == n) { + args.push(f.to_string()); + } else if let Some(f) = self.opts.iter().find(|o| &o.b.name == n) { + args.push(f.to_string()); + } else if let Some(p) = self.positionals.values().find(|p| &p.b.name == n) { + args.push(p.b.name.to_owned()); + } else { + g_vec.push(*n); + } + } + + for av in g_vec.iter().map(|g| self.args_in_group(g)) { + args.extend(av); + } + args.dedup(); + args.iter().map(ToOwned::to_owned).collect() + } + + pub fn arg_names_in_group(&self, group: &str) -> Vec<&'a str> { + let mut g_vec = vec![]; + let mut args = vec![]; + + for n in &self.groups + .iter() + .find(|g| g.name == group) + .expect(INTERNAL_ERROR_MSG) + .args + { + if self.groups.iter().any(|g| g.name == *n) { + args.extend(self.arg_names_in_group(n)); + g_vec.push(*n); + } else if !args.contains(n) { + args.push(*n); + } + } + + args.iter().map(|s| *s).collect() + } + + pub fn create_help_and_version(&mut self) { + debugln!("Parser::create_help_and_version;"); + // name is "hclap_help" because flags are sorted by name + if !self.contains_long("help") { + debugln!("Parser::create_help_and_version: Building --help"); + if self.help_short.is_none() && !self.contains_short('h') { + self.help_short = Some('h'); + } + let arg = FlagBuilder { + b: Base { + name: "hclap_help", + help: self.help_message.or(Some("Prints help information")), + ..Default::default() + }, + s: Switched { + short: self.help_short, + long: Some("help"), + ..Default::default() + }, + }; + self.flags.push(arg); + } + if !self.is_set(AS::DisableVersion) && !self.contains_long("version") { + debugln!("Parser::create_help_and_version: Building --version"); + if self.version_short.is_none() && !self.contains_short('V') { + self.version_short = Some('V'); + } + // name is "vclap_version" because flags are sorted by name + let arg = FlagBuilder { + b: Base { + name: "vclap_version", + help: self.version_message.or(Some("Prints version information")), + ..Default::default() + }, + s: Switched { + short: self.version_short, + long: Some("version"), + ..Default::default() + }, + }; + self.flags.push(arg); + } + if !self.subcommands.is_empty() && !self.is_set(AS::DisableHelpSubcommand) + && self.is_set(AS::NeedsSubcommandHelp) + { + debugln!("Parser::create_help_and_version: Building help"); + self.subcommands.push( + App::new("help") + .about("Prints this message or the help of the given subcommand(s)"), + ); + } + } + + // Retrieves the names of all args the user has supplied thus far, except required ones + // because those will be listed in self.required + fn check_for_help_and_version_str(&self, arg: &OsStr) -> ClapResult<()> { + debugln!("Parser::check_for_help_and_version_str;"); + debug!( + "Parser::check_for_help_and_version_str: Checking if --{} is help or version...", + arg.to_str().unwrap() + ); + if arg == "help" && self.is_set(AS::NeedsLongHelp) { + sdebugln!("Help"); + return Err(self._help(true)); + } + if arg == "version" && self.is_set(AS::NeedsLongVersion) { + sdebugln!("Version"); + return Err(self._version(true)); + } + sdebugln!("Neither"); + + Ok(()) + } + + fn check_for_help_and_version_char(&self, arg: char) -> ClapResult<()> { + debugln!("Parser::check_for_help_and_version_char;"); + debug!( + "Parser::check_for_help_and_version_char: Checking if -{} is help or version...", + arg + ); + if let Some(h) = self.help_short { + if arg == h && self.is_set(AS::NeedsLongHelp) { + sdebugln!("Help"); + return Err(self._help(false)); + } + } + if let Some(v) = self.version_short { + if arg == v && self.is_set(AS::NeedsLongVersion) { + sdebugln!("Version"); + return Err(self._version(false)); + } + } + sdebugln!("Neither"); + Ok(()) + } + + fn use_long_help(&self) -> bool { + // In this case, both must be checked. This allows the retention of + // original formatting, but also ensures that the actual -h or --help + // specified by the user is sent through. If HiddenShortHelp is not included, + // then items specified with hidden_short_help will also be hidden. + let should_long = |v: &Base| { + v.long_help.is_some() || + v.is_set(ArgSettings::HiddenLongHelp) || + v.is_set(ArgSettings::HiddenShortHelp) + }; + + self.meta.long_about.is_some() + || self.flags.iter().any(|f| should_long(&f.b)) + || self.opts.iter().any(|o| should_long(&o.b)) + || self.positionals.values().any(|p| should_long(&p.b)) + || self.subcommands + .iter() + .any(|s| s.p.meta.long_about.is_some()) + } + + fn _help(&self, mut use_long: bool) -> Error { + debugln!("Parser::_help: use_long={:?}", use_long); + use_long = use_long && self.use_long_help(); + let mut buf = vec![]; + match Help::write_parser_help(&mut buf, self, use_long) { + Err(e) => e, + _ => Error { + message: String::from_utf8(buf).unwrap_or_default(), + kind: ErrorKind::HelpDisplayed, + info: None, + }, + } + } + + fn _version(&self, use_long: bool) -> Error { + debugln!("Parser::_version: "); + let out = io::stdout(); + let mut buf_w = BufWriter::new(out.lock()); + match self.print_version(&mut buf_w, use_long) { + Err(e) => e, + _ => Error { + message: String::new(), + kind: ErrorKind::VersionDisplayed, + info: None, + }, + } + } + + fn parse_long_arg( + &mut self, + matcher: &mut ArgMatcher<'a>, + full_arg: &OsStr, + ) -> ClapResult> { + // maybe here lifetime should be 'a + debugln!("Parser::parse_long_arg;"); + + // Update the current index + self.cur_idx.set(self.cur_idx.get() + 1); + + let mut val = None; + debug!("Parser::parse_long_arg: Does it contain '='..."); + let arg = if full_arg.contains_byte(b'=') { + let (p0, p1) = full_arg.trim_left_matches(b'-').split_at_byte(b'='); + sdebugln!("Yes '{:?}'", p1); + val = Some(p1); + p0 + } else { + sdebugln!("No"); + full_arg.trim_left_matches(b'-') + }; + + if let Some(opt) = find_opt_by_long!(@os self, arg) { + debugln!( + "Parser::parse_long_arg: Found valid opt '{}'", + opt.to_string() + ); + self.settings.set(AS::ValidArgFound); + let ret = self.parse_opt(val, opt, val.is_some(), matcher)?; + if self.cache.map_or(true, |name| name != opt.b.name) { + self.cache = Some(opt.b.name); + } + + return Ok(ret); + } else if let Some(flag) = find_flag_by_long!(@os self, arg) { + debugln!( + "Parser::parse_long_arg: Found valid flag '{}'", + flag.to_string() + ); + self.settings.set(AS::ValidArgFound); + // Only flags could be help or version, and we need to check the raw long + // so this is the first point to check + self.check_for_help_and_version_str(arg)?; + + self.parse_flag(flag, matcher)?; + + // Handle conflicts, requirements, etc. + if self.cache.map_or(true, |name| name != flag.b.name) { + self.cache = Some(flag.b.name); + } + + return Ok(ParseResult::Flag); + } else if self.is_set(AS::AllowLeadingHyphen) { + return Ok(ParseResult::MaybeHyphenValue); + } else if self.is_set(AS::ValidNegNumFound) { + return Ok(ParseResult::MaybeNegNum); + } + + debugln!("Parser::parse_long_arg: Didn't match anything"); + self.did_you_mean_error(arg.to_str().expect(INVALID_UTF8), matcher) + .map(|_| ParseResult::NotFound) + } + + #[cfg_attr(feature = "lints", allow(len_zero))] + fn parse_short_arg( + &mut self, + matcher: &mut ArgMatcher<'a>, + full_arg: &OsStr, + ) -> ClapResult> { + debugln!("Parser::parse_short_arg: full_arg={:?}", full_arg); + let arg_os = full_arg.trim_left_matches(b'-'); + let arg = arg_os.to_string_lossy(); + + // If AllowLeadingHyphen is set, we want to ensure `-val` gets parsed as `-val` and not + // `-v` `-a` `-l` assuming `v` `a` and `l` are all, or mostly, valid shorts. + if self.is_set(AS::AllowLeadingHyphen) { + if arg.chars().any(|c| !self.contains_short(c)) { + debugln!( + "Parser::parse_short_arg: LeadingHyphenAllowed yet -{} isn't valid", + arg + ); + return Ok(ParseResult::MaybeHyphenValue); + } + } else if self.is_set(AS::ValidNegNumFound) { + // TODO: Add docs about having AllowNegativeNumbers and `-2` as a valid short + // May be better to move this to *after* not finding a valid flag/opt? + debugln!("Parser::parse_short_arg: Valid negative num..."); + return Ok(ParseResult::MaybeNegNum); + } + + let mut ret = ParseResult::NotFound; + for c in arg.chars() { + debugln!("Parser::parse_short_arg:iter:{}", c); + + // update each index because `-abcd` is four indices to clap + self.cur_idx.set(self.cur_idx.get() + 1); + + // Check for matching short options, and return the name if there is no trailing + // concatenated value: -oval + // Option: -o + // Value: val + if let Some(opt) = find_opt_by_short!(self, c) { + debugln!("Parser::parse_short_arg:iter:{}: Found valid opt", c); + self.settings.set(AS::ValidArgFound); + // Check for trailing concatenated value + let p: Vec<_> = arg.splitn(2, c).collect(); + debugln!( + "Parser::parse_short_arg:iter:{}: p[0]={:?}, p[1]={:?}", + c, + p[0].as_bytes(), + p[1].as_bytes() + ); + let i = p[0].as_bytes().len() + 1; + let val = if p[1].as_bytes().len() > 0 { + debugln!( + "Parser::parse_short_arg:iter:{}: val={:?} (bytes), val={:?} (ascii)", + c, + arg_os.split_at(i).1.as_bytes(), + arg_os.split_at(i).1 + ); + Some(arg_os.split_at(i).1) + } else { + None + }; + + // Default to "we're expecting a value later" + let ret = self.parse_opt(val, opt, false, matcher)?; + + if self.cache.map_or(true, |name| name != opt.b.name) { + self.cache = Some(opt.b.name); + } + + return Ok(ret); + } else if let Some(flag) = find_flag_by_short!(self, c) { + debugln!("Parser::parse_short_arg:iter:{}: Found valid flag", c); + self.settings.set(AS::ValidArgFound); + // Only flags can be help or version + self.check_for_help_and_version_char(c)?; + ret = self.parse_flag(flag, matcher)?; + + // Handle conflicts, requirements, overrides, etc. + // Must be called here due to mutabililty + if self.cache.map_or(true, |name| name != flag.b.name) { + self.cache = Some(flag.b.name); + } + } else { + let arg = format!("-{}", c); + return Err(Error::unknown_argument( + &*arg, + "", + &*usage::create_error_usage(self, matcher, None), + self.color(), + )); + } + } + Ok(ret) + } + + fn parse_opt( + &self, + val: Option<&OsStr>, + opt: &OptBuilder<'a, 'b>, + had_eq: bool, + matcher: &mut ArgMatcher<'a>, + ) -> ClapResult> { + debugln!("Parser::parse_opt; opt={}, val={:?}", opt.b.name, val); + debugln!("Parser::parse_opt; opt.settings={:?}", opt.b.settings); + let mut has_eq = false; + let no_val = val.is_none(); + let empty_vals = opt.is_set(ArgSettings::EmptyValues); + let min_vals_zero = opt.v.min_vals.unwrap_or(1) == 0; + let needs_eq = opt.is_set(ArgSettings::RequireEquals); + + debug!("Parser::parse_opt; Checking for val..."); + if let Some(fv) = val { + has_eq = fv.starts_with(&[b'=']) || had_eq; + let v = fv.trim_left_matches(b'='); + if !empty_vals && (v.len() == 0 || (needs_eq && !has_eq)) { + sdebugln!("Found Empty - Error"); + return Err(Error::empty_value( + opt, + &*usage::create_error_usage(self, matcher, None), + self.color(), + )); + } + sdebugln!("Found - {:?}, len: {}", v, v.len()); + debugln!( + "Parser::parse_opt: {:?} contains '='...{:?}", + fv, + fv.starts_with(&[b'=']) + ); + self.add_val_to_arg(opt, v, matcher)?; + } else if needs_eq && !(empty_vals || min_vals_zero) { + sdebugln!("None, but requires equals...Error"); + return Err(Error::empty_value( + opt, + &*usage::create_error_usage(self, matcher, None), + self.color(), + )); + } else { + sdebugln!("None"); + } + + matcher.inc_occurrence_of(opt.b.name); + // Increment or create the group "args" + self.groups_for_arg(opt.b.name) + .and_then(|vec| Some(matcher.inc_occurrences_of(&*vec))); + + let needs_delim = opt.is_set(ArgSettings::RequireDelimiter); + let mult = opt.is_set(ArgSettings::Multiple); + if no_val && min_vals_zero && !has_eq && needs_eq { + debugln!("Parser::parse_opt: More arg vals not required..."); + return Ok(ParseResult::ValuesDone); + } else if no_val || (mult && !needs_delim) && !has_eq && matcher.needs_more_vals(opt) { + debugln!("Parser::parse_opt: More arg vals required..."); + return Ok(ParseResult::Opt(opt.b.name)); + } + debugln!("Parser::parse_opt: More arg vals not required..."); + Ok(ParseResult::ValuesDone) + } + + fn add_val_to_arg( + &self, + arg: &A, + val: &OsStr, + matcher: &mut ArgMatcher<'a>, + ) -> ClapResult> + where + A: AnyArg<'a, 'b> + Display, + { + debugln!("Parser::add_val_to_arg; arg={}, val={:?}", arg.name(), val); + debugln!( + "Parser::add_val_to_arg; trailing_vals={:?}, DontDelimTrailingVals={:?}", + self.is_set(AS::TrailingValues), + self.is_set(AS::DontDelimitTrailingValues) + ); + if !(self.is_set(AS::TrailingValues) && self.is_set(AS::DontDelimitTrailingValues)) { + if let Some(delim) = arg.val_delim() { + if val.is_empty() { + Ok(self.add_single_val_to_arg(arg, val, matcher)?) + } else { + let mut iret = ParseResult::ValuesDone; + for v in val.split(delim as u32 as u8) { + iret = self.add_single_val_to_arg(arg, v, matcher)?; + } + // If there was a delimiter used, we're not looking for more values + if val.contains_byte(delim as u32 as u8) + || arg.is_set(ArgSettings::RequireDelimiter) + { + iret = ParseResult::ValuesDone; + } + Ok(iret) + } + } else { + self.add_single_val_to_arg(arg, val, matcher) + } + } else { + self.add_single_val_to_arg(arg, val, matcher) + } + } + + fn add_single_val_to_arg( + &self, + arg: &A, + v: &OsStr, + matcher: &mut ArgMatcher<'a>, + ) -> ClapResult> + where + A: AnyArg<'a, 'b> + Display, + { + debugln!("Parser::add_single_val_to_arg;"); + debugln!("Parser::add_single_val_to_arg: adding val...{:?}", v); + + // update the current index because each value is a distinct index to clap + self.cur_idx.set(self.cur_idx.get() + 1); + + // @TODO @docs @p4: docs for indices should probably note that a terminator isn't a value + // and therefore not reported in indices + if let Some(t) = arg.val_terminator() { + if t == v { + return Ok(ParseResult::ValuesDone); + } + } + + matcher.add_val_to(arg.name(), v); + matcher.add_index_to(arg.name(), self.cur_idx.get()); + + // Increment or create the group "args" + if let Some(grps) = self.groups_for_arg(arg.name()) { + for grp in grps { + matcher.add_val_to(&*grp, v); + } + } + + if matcher.needs_more_vals(arg) { + return Ok(ParseResult::Opt(arg.name())); + } + Ok(ParseResult::ValuesDone) + } + + fn parse_flag( + &self, + flag: &FlagBuilder<'a, 'b>, + matcher: &mut ArgMatcher<'a>, + ) -> ClapResult> { + debugln!("Parser::parse_flag;"); + + matcher.inc_occurrence_of(flag.b.name); + matcher.add_index_to(flag.b.name, self.cur_idx.get()); + + // Increment or create the group "args" + self.groups_for_arg(flag.b.name) + .and_then(|vec| Some(matcher.inc_occurrences_of(&*vec))); + + Ok(ParseResult::Flag) + } + + fn did_you_mean_error(&self, arg: &str, matcher: &mut ArgMatcher<'a>) -> ClapResult<()> { + // Didn't match a flag or option + let suffix = suggestions::did_you_mean_flag_suffix(arg, longs!(self), &self.subcommands); + + // Add the arg to the matches to build a proper usage string + if let Some(name) = suffix.1 { + if let Some(opt) = find_opt_by_long!(self, name) { + self.groups_for_arg(&*opt.b.name) + .and_then(|grps| Some(matcher.inc_occurrences_of(&*grps))); + matcher.insert(&*opt.b.name); + } else if let Some(flg) = find_flag_by_long!(self, name) { + self.groups_for_arg(&*flg.b.name) + .and_then(|grps| Some(matcher.inc_occurrences_of(&*grps))); + matcher.insert(&*flg.b.name); + } + } + + let used_arg = format!("--{}", arg); + Err(Error::unknown_argument( + &*used_arg, + &*suffix.0, + &*usage::create_error_usage(self, matcher, None), + self.color(), + )) + } + + // Prints the version to the user and exits if quit=true + fn print_version(&self, w: &mut W, use_long: bool) -> ClapResult<()> { + self.write_version(w, use_long)?; + w.flush().map_err(Error::from) + } + + pub fn write_version(&self, w: &mut W, use_long: bool) -> io::Result<()> { + let ver = if use_long { + self.meta + .long_version + .unwrap_or_else(|| self.meta.version.unwrap_or("")) + } else { + self.meta + .version + .unwrap_or_else(|| self.meta.long_version.unwrap_or("")) + }; + if let Some(bn) = self.meta.bin_name.as_ref() { + if bn.contains(' ') { + // Incase we're dealing with subcommands i.e. git mv is translated to git-mv + write!(w, "{} {}", bn.replace(" ", "-"), ver) + } else { + write!(w, "{} {}", &self.meta.name[..], ver) + } + } else { + write!(w, "{} {}", &self.meta.name[..], ver) + } + } + + pub fn print_help(&self) -> ClapResult<()> { + let out = io::stdout(); + let mut buf_w = BufWriter::new(out.lock()); + self.write_help(&mut buf_w) + } + + pub fn write_help(&self, w: &mut W) -> ClapResult<()> { + Help::write_parser_help(w, self, false) + } + + pub fn write_long_help(&self, w: &mut W) -> ClapResult<()> { + Help::write_parser_help(w, self, true) + } + + pub fn write_help_err(&self, w: &mut W) -> ClapResult<()> { + Help::write_parser_help_to_stderr(w, self) + } + + pub fn add_defaults(&mut self, matcher: &mut ArgMatcher<'a>) -> ClapResult<()> { + debugln!("Parser::add_defaults;"); + macro_rules! add_val { + (@default $_self:ident, $a:ident, $m:ident) => { + if let Some(ref val) = $a.v.default_val { + debugln!("Parser::add_defaults:iter:{}: has default vals", $a.b.name); + if $m.get($a.b.name).map(|ma| ma.vals.len()).map(|len| len == 0).unwrap_or(false) { + debugln!("Parser::add_defaults:iter:{}: has no user defined vals", $a.b.name); + $_self.add_val_to_arg($a, OsStr::new(val), $m)?; + + if $_self.cache.map_or(true, |name| name != $a.name()) { + $_self.cache = Some($a.name()); + } + } else if $m.get($a.b.name).is_some() { + debugln!("Parser::add_defaults:iter:{}: has user defined vals", $a.b.name); + } else { + debugln!("Parser::add_defaults:iter:{}: wasn't used", $a.b.name); + + $_self.add_val_to_arg($a, OsStr::new(val), $m)?; + + if $_self.cache.map_or(true, |name| name != $a.name()) { + $_self.cache = Some($a.name()); + } + } + } else { + debugln!("Parser::add_defaults:iter:{}: doesn't have default vals", $a.b.name); + } + }; + ($_self:ident, $a:ident, $m:ident) => { + if let Some(ref vm) = $a.v.default_vals_ifs { + sdebugln!(" has conditional defaults"); + let mut done = false; + if $m.get($a.b.name).is_none() { + for &(arg, val, default) in vm.values() { + let add = if let Some(a) = $m.get(arg) { + if let Some(v) = val { + a.vals.iter().any(|value| v == value) + } else { + true + } + } else { + false + }; + if add { + $_self.add_val_to_arg($a, OsStr::new(default), $m)?; + if $_self.cache.map_or(true, |name| name != $a.name()) { + $_self.cache = Some($a.name()); + } + done = true; + break; + } + } + } + + if done { + continue; // outer loop (outside macro) + } + } else { + sdebugln!(" doesn't have conditional defaults"); + } + add_val!(@default $_self, $a, $m) + }; + } + + for o in &self.opts { + debug!("Parser::add_defaults:iter:{}:", o.b.name); + add_val!(self, o, matcher); + } + for p in self.positionals.values() { + debug!("Parser::add_defaults:iter:{}:", p.b.name); + add_val!(self, p, matcher); + } + Ok(()) + } + + pub fn add_env(&mut self, matcher: &mut ArgMatcher<'a>) -> ClapResult<()> { + macro_rules! add_val { + ($_self:ident, $a:ident, $m:ident) => { + if let Some(ref val) = $a.v.env { + if $m.get($a.b.name).map(|ma| ma.vals.len()).map(|len| len == 0).unwrap_or(false) { + if let Some(ref val) = val.1 { + $_self.add_val_to_arg($a, OsStr::new(val), $m)?; + + if $_self.cache.map_or(true, |name| name != $a.name()) { + $_self.cache = Some($a.name()); + } + } + } else { + if let Some(ref val) = val.1 { + $_self.add_val_to_arg($a, OsStr::new(val), $m)?; + + if $_self.cache.map_or(true, |name| name != $a.name()) { + $_self.cache = Some($a.name()); + } + } + } + } + }; + } + + for o in &self.opts { + add_val!(self, o, matcher); + } + for p in self.positionals.values() { + add_val!(self, p, matcher); + } + Ok(()) + } + + pub fn flags(&self) -> Iter> { self.flags.iter() } + + pub fn opts(&self) -> Iter> { self.opts.iter() } + + pub fn positionals(&self) -> map::Values> { self.positionals.values() } + + pub fn subcommands(&self) -> Iter { self.subcommands.iter() } + + // Should we color the output? None=determined by output location, true=yes, false=no + #[doc(hidden)] + pub fn color(&self) -> ColorWhen { + debugln!("Parser::color;"); + debug!("Parser::color: Color setting..."); + if self.is_set(AS::ColorNever) { + sdebugln!("Never"); + ColorWhen::Never + } else if self.is_set(AS::ColorAlways) { + sdebugln!("Always"); + ColorWhen::Always + } else { + sdebugln!("Auto"); + ColorWhen::Auto + } + } + + pub fn find_any_arg(&self, name: &str) -> Option<&AnyArg<'a, 'b>> { + if let Some(f) = find_by_name!(self, name, flags, iter) { + return Some(f); + } + if let Some(o) = find_by_name!(self, name, opts, iter) { + return Some(o); + } + if let Some(p) = find_by_name!(self, name, positionals, values) { + return Some(p); + } + None + } + + /// Check is a given string matches the binary name for this parser + fn is_bin_name(&self, value: &str) -> bool { + self.meta + .bin_name + .as_ref() + .and_then(|name| Some(value == name)) + .unwrap_or(false) + } + + /// Check is a given string is an alias for this parser + fn is_alias(&self, value: &str) -> bool { + self.meta + .aliases + .as_ref() + .and_then(|aliases| { + for alias in aliases { + if alias.0 == value { + return Some(true); + } + } + Some(false) + }) + .unwrap_or(false) + } + + // Only used for completion scripts due to bin_name messiness + #[cfg_attr(feature = "lints", allow(block_in_if_condition_stmt))] + pub fn find_subcommand(&'b self, sc: &str) -> Option<&'b App<'a, 'b>> { + debugln!("Parser::find_subcommand: sc={}", sc); + debugln!( + "Parser::find_subcommand: Currently in Parser...{}", + self.meta.bin_name.as_ref().unwrap() + ); + for s in &self.subcommands { + if s.p.is_bin_name(sc) { + return Some(s); + } + // XXX: why do we split here? + // isn't `sc` supposed to be single word already? + let last = sc.split(' ').rev().next().expect(INTERNAL_ERROR_MSG); + if s.p.is_alias(last) { + return Some(s); + } + + if let Some(app) = s.p.find_subcommand(sc) { + return Some(app); + } + } + None + } + + #[inline] + fn contains_long(&self, l: &str) -> bool { longs!(self).any(|al| al == &l) } + + #[inline] + fn contains_short(&self, s: char) -> bool { shorts!(self).any(|arg_s| arg_s == &s) } +} diff --git a/clap/src/app/settings.rs b/clap/src/app/settings.rs new file mode 100644 index 000000000..4aa64d337 --- /dev/null +++ b/clap/src/app/settings.rs @@ -0,0 +1,1134 @@ +// Std +#[allow(deprecated, unused_imports)] +use std::ascii::AsciiExt; +use std::str::FromStr; +use std::ops::BitOr; + +bitflags! { + struct Flags: u64 { + const SC_NEGATE_REQS = 1; + const SC_REQUIRED = 1 << 1; + const A_REQUIRED_ELSE_HELP = 1 << 2; + const GLOBAL_VERSION = 1 << 3; + const VERSIONLESS_SC = 1 << 4; + const UNIFIED_HELP = 1 << 5; + const WAIT_ON_ERROR = 1 << 6; + const SC_REQUIRED_ELSE_HELP= 1 << 7; + const NEEDS_LONG_HELP = 1 << 8; + const NEEDS_LONG_VERSION = 1 << 9; + const NEEDS_SC_HELP = 1 << 10; + const DISABLE_VERSION = 1 << 11; + const HIDDEN = 1 << 12; + const TRAILING_VARARG = 1 << 13; + const NO_BIN_NAME = 1 << 14; + const ALLOW_UNK_SC = 1 << 15; + const UTF8_STRICT = 1 << 16; + const UTF8_NONE = 1 << 17; + const LEADING_HYPHEN = 1 << 18; + const NO_POS_VALUES = 1 << 19; + const NEXT_LINE_HELP = 1 << 20; + const DERIVE_DISP_ORDER = 1 << 21; + const COLORED_HELP = 1 << 22; + const COLOR_ALWAYS = 1 << 23; + const COLOR_AUTO = 1 << 24; + const COLOR_NEVER = 1 << 25; + const DONT_DELIM_TRAIL = 1 << 26; + const ALLOW_NEG_NUMS = 1 << 27; + const LOW_INDEX_MUL_POS = 1 << 28; + const DISABLE_HELP_SC = 1 << 29; + const DONT_COLLAPSE_ARGS = 1 << 30; + const ARGS_NEGATE_SCS = 1 << 31; + const PROPAGATE_VALS_DOWN = 1 << 32; + const ALLOW_MISSING_POS = 1 << 33; + const TRAILING_VALUES = 1 << 34; + const VALID_NEG_NUM_FOUND = 1 << 35; + const PROPAGATED = 1 << 36; + const VALID_ARG_FOUND = 1 << 37; + const INFER_SUBCOMMANDS = 1 << 38; + const CONTAINS_LAST = 1 << 39; + const ARGS_OVERRIDE_SELF = 1 << 40; + } +} + +#[doc(hidden)] +#[derive(Debug, Copy, Clone, PartialEq)] +pub struct AppFlags(Flags); + +impl BitOr for AppFlags { + type Output = Self; + fn bitor(self, rhs: Self) -> Self { AppFlags(self.0 | rhs.0) } +} + +impl Default for AppFlags { + fn default() -> Self { + AppFlags( + Flags::NEEDS_LONG_VERSION | Flags::NEEDS_LONG_HELP | Flags::NEEDS_SC_HELP + | Flags::UTF8_NONE | Flags::COLOR_AUTO, + ) + } +} + +#[allow(deprecated)] +impl AppFlags { + pub fn new() -> Self { AppFlags::default() } + pub fn zeroed() -> Self { AppFlags(Flags::empty()) } + + impl_settings! { AppSettings, + ArgRequiredElseHelp => Flags::A_REQUIRED_ELSE_HELP, + ArgsNegateSubcommands => Flags::ARGS_NEGATE_SCS, + AllArgsOverrideSelf => Flags::ARGS_OVERRIDE_SELF, + AllowExternalSubcommands => Flags::ALLOW_UNK_SC, + AllowInvalidUtf8 => Flags::UTF8_NONE, + AllowLeadingHyphen => Flags::LEADING_HYPHEN, + AllowNegativeNumbers => Flags::ALLOW_NEG_NUMS, + AllowMissingPositional => Flags::ALLOW_MISSING_POS, + ColoredHelp => Flags::COLORED_HELP, + ColorAlways => Flags::COLOR_ALWAYS, + ColorAuto => Flags::COLOR_AUTO, + ColorNever => Flags::COLOR_NEVER, + DontDelimitTrailingValues => Flags::DONT_DELIM_TRAIL, + DontCollapseArgsInUsage => Flags::DONT_COLLAPSE_ARGS, + DeriveDisplayOrder => Flags::DERIVE_DISP_ORDER, + DisableHelpSubcommand => Flags::DISABLE_HELP_SC, + DisableVersion => Flags::DISABLE_VERSION, + GlobalVersion => Flags::GLOBAL_VERSION, + HidePossibleValuesInHelp => Flags::NO_POS_VALUES, + Hidden => Flags::HIDDEN, + LowIndexMultiplePositional => Flags::LOW_INDEX_MUL_POS, + NeedsLongHelp => Flags::NEEDS_LONG_HELP, + NeedsLongVersion => Flags::NEEDS_LONG_VERSION, + NeedsSubcommandHelp => Flags::NEEDS_SC_HELP, + NoBinaryName => Flags::NO_BIN_NAME, + PropagateGlobalValuesDown=> Flags::PROPAGATE_VALS_DOWN, + StrictUtf8 => Flags::UTF8_STRICT, + SubcommandsNegateReqs => Flags::SC_NEGATE_REQS, + SubcommandRequired => Flags::SC_REQUIRED, + SubcommandRequiredElseHelp => Flags::SC_REQUIRED_ELSE_HELP, + TrailingVarArg => Flags::TRAILING_VARARG, + UnifiedHelpMessage => Flags::UNIFIED_HELP, + NextLineHelp => Flags::NEXT_LINE_HELP, + VersionlessSubcommands => Flags::VERSIONLESS_SC, + WaitOnError => Flags::WAIT_ON_ERROR, + TrailingValues => Flags::TRAILING_VALUES, + ValidNegNumFound => Flags::VALID_NEG_NUM_FOUND, + Propagated => Flags::PROPAGATED, + ValidArgFound => Flags::VALID_ARG_FOUND, + InferSubcommands => Flags::INFER_SUBCOMMANDS, + ContainsLast => Flags::CONTAINS_LAST + } +} + +/// Application level settings, which affect how [`App`] operates +/// +/// **NOTE:** When these settings are used, they apply only to current command, and are *not* +/// propagated down or up through child or parent subcommands +/// +/// [`App`]: ./struct.App.html +#[derive(Debug, PartialEq, Copy, Clone)] +pub enum AppSettings { + /// Specifies that any invalid UTF-8 code points should *not* be treated as an error. + /// This is the default behavior of `clap`. + /// + /// **NOTE:** Using argument values with invalid UTF-8 code points requires using + /// [`ArgMatches::os_value_of`], [`ArgMatches::os_values_of`], [`ArgMatches::lossy_value_of`], + /// or [`ArgMatches::lossy_values_of`] for those particular arguments which may contain invalid + /// UTF-8 values + /// + /// **NOTE:** This rule only applies to argument values, as flags, options, and + /// [`SubCommand`]s themselves only allow valid UTF-8 code points. + /// + /// # Platform Specific + /// + /// Non Windows systems only + /// + /// # Examples + /// + #[cfg_attr(not(unix), doc = " ```ignore")] + #[cfg_attr(unix, doc = " ```")] + /// # use clap::{App, AppSettings}; + /// use std::ffi::OsString; + /// use std::os::unix::ffi::{OsStrExt,OsStringExt}; + /// + /// let r = App::new("myprog") + /// //.setting(AppSettings::AllowInvalidUtf8) + /// .arg_from_usage(" 'some positional arg'") + /// .get_matches_from_safe( + /// vec![ + /// OsString::from("myprog"), + /// OsString::from_vec(vec![0xe9])]); + /// + /// assert!(r.is_ok()); + /// let m = r.unwrap(); + /// assert_eq!(m.value_of_os("arg").unwrap().as_bytes(), &[0xe9]); + /// ``` + /// [`ArgMatches::os_value_of`]: ./struct.ArgMatches.html#method.os_value_of + /// [`ArgMatches::os_values_of`]: ./struct.ArgMatches.html#method.os_values_of + /// [`ArgMatches::lossy_value_of`]: ./struct.ArgMatches.html#method.lossy_value_of + /// [`ArgMatches::lossy_values_of`]: ./struct.ArgMatches.html#method.lossy_values_of + AllowInvalidUtf8, + + /// Essentially sets [`Arg::overrides_with("itself")`] for all arguments. + /// + /// **WARNING:** Positional arguments cannot override themselves (or we would never be able + /// to advance to the next positional). This setting ignores positional arguments. + /// [`Arg::overrides_with("itself")`]: ./struct.Arg.html#method.overrides_with + AllArgsOverrideSelf, + + /// Specifies that leading hyphens are allowed in argument *values*, such as negative numbers + /// like `-10`. (which would otherwise be parsed as another flag or option) + /// + /// **NOTE:** Use this setting with caution as it silences certain circumstances which would + /// otherwise be an error (such as accidentally forgetting to specify a value for leading + /// option). It is preferred to set this on a per argument basis, via [`Arg::allow_hyphen_values`] + /// + /// # Examples + /// + /// ```rust + /// # use clap::{Arg, App, AppSettings}; + /// // Imagine you needed to represent negative numbers as well, such as -10 + /// let m = App::new("nums") + /// .setting(AppSettings::AllowLeadingHyphen) + /// .arg(Arg::with_name("neg").index(1)) + /// .get_matches_from(vec![ + /// "nums", "-20" + /// ]); + /// + /// assert_eq!(m.value_of("neg"), Some("-20")); + /// # ; + /// ``` + /// [`Arg::allow_hyphen_values`]: ./struct.Arg.html#method.allow_hyphen_values + AllowLeadingHyphen, + + /// Allows negative numbers to pass as values. This is similar to + /// `AllowLeadingHyphen` except that it only allows numbers, all + /// other undefined leading hyphens will fail to parse. + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg, AppSettings}; + /// let res = App::new("myprog") + /// .version("v1.1") + /// .setting(AppSettings::AllowNegativeNumbers) + /// .arg(Arg::with_name("num")) + /// .get_matches_from_safe(vec![ + /// "myprog", "-20" + /// ]); + /// assert!(res.is_ok()); + /// let m = res.unwrap(); + /// assert_eq!(m.value_of("num").unwrap(), "-20"); + /// ``` + /// [`AllowLeadingHyphen`]: ./enum.AppSettings.html#variant.AllowLeadingHyphen + AllowNegativeNumbers, + + /// Allows one to implement two styles of CLIs where positionals can be used out of order. + /// + /// The first example is a CLI where the second to last positional argument is optional, but + /// the final positional argument is required. Such as `$ prog [optional] ` where one + /// of the two following usages is allowed: + /// + /// * `$ prog [optional] ` + /// * `$ prog ` + /// + /// This would otherwise not be allowed. This is useful when `[optional]` has a default value. + /// + /// **Note:** when using this style of "missing positionals" the final positional *must* be + /// [required] if `--` will not be used to skip to the final positional argument. + /// + /// **Note:** This style also only allows a single positional argument to be "skipped" without + /// the use of `--`. To skip more than one, see the second example. + /// + /// The second example is when one wants to skip multiple optional positional arguments, and use + /// of the `--` operator is OK (but not required if all arguments will be specified anyways). + /// + /// For example, imagine a CLI which has three positional arguments `[foo] [bar] [baz]...` where + /// `baz` accepts multiple values (similar to man `ARGS...` style training arguments). + /// + /// With this setting the following invocations are possible: + /// + /// * `$ prog foo bar baz1 baz2 baz3` + /// * `$ prog foo -- baz1 baz2 baz3` + /// * `$ prog -- baz1 baz2 baz3` + /// + /// # Examples + /// + /// Style number one from above: + /// + /// ```rust + /// # use clap::{App, Arg, AppSettings}; + /// // Assume there is an external subcommand named "subcmd" + /// let m = App::new("myprog") + /// .setting(AppSettings::AllowMissingPositional) + /// .arg(Arg::with_name("arg1")) + /// .arg(Arg::with_name("arg2") + /// .required(true)) + /// .get_matches_from(vec![ + /// "prog", "other" + /// ]); + /// + /// assert_eq!(m.value_of("arg1"), None); + /// assert_eq!(m.value_of("arg2"), Some("other")); + /// ``` + /// + /// Now the same example, but using a default value for the first optional positional argument + /// + /// ```rust + /// # use clap::{App, Arg, AppSettings}; + /// // Assume there is an external subcommand named "subcmd" + /// let m = App::new("myprog") + /// .setting(AppSettings::AllowMissingPositional) + /// .arg(Arg::with_name("arg1") + /// .default_value("something")) + /// .arg(Arg::with_name("arg2") + /// .required(true)) + /// .get_matches_from(vec![ + /// "prog", "other" + /// ]); + /// + /// assert_eq!(m.value_of("arg1"), Some("something")); + /// assert_eq!(m.value_of("arg2"), Some("other")); + /// ``` + /// Style number two from above: + /// + /// ```rust + /// # use clap::{App, Arg, AppSettings}; + /// // Assume there is an external subcommand named "subcmd" + /// let m = App::new("myprog") + /// .setting(AppSettings::AllowMissingPositional) + /// .arg(Arg::with_name("foo")) + /// .arg(Arg::with_name("bar")) + /// .arg(Arg::with_name("baz").multiple(true)) + /// .get_matches_from(vec![ + /// "prog", "foo", "bar", "baz1", "baz2", "baz3" + /// ]); + /// + /// assert_eq!(m.value_of("foo"), Some("foo")); + /// assert_eq!(m.value_of("bar"), Some("bar")); + /// assert_eq!(m.values_of("baz").unwrap().collect::>(), &["baz1", "baz2", "baz3"]); + /// ``` + /// + /// Now notice if we don't specify `foo` or `baz` but use the `--` operator. + /// + /// ```rust + /// # use clap::{App, Arg, AppSettings}; + /// // Assume there is an external subcommand named "subcmd" + /// let m = App::new("myprog") + /// .setting(AppSettings::AllowMissingPositional) + /// .arg(Arg::with_name("foo")) + /// .arg(Arg::with_name("bar")) + /// .arg(Arg::with_name("baz").multiple(true)) + /// .get_matches_from(vec![ + /// "prog", "--", "baz1", "baz2", "baz3" + /// ]); + /// + /// assert_eq!(m.value_of("foo"), None); + /// assert_eq!(m.value_of("bar"), None); + /// assert_eq!(m.values_of("baz").unwrap().collect::>(), &["baz1", "baz2", "baz3"]); + /// ``` + /// [required]: ./struct.Arg.html#method.required + AllowMissingPositional, + + /// Specifies that an unexpected positional argument, + /// which would otherwise cause a [`ErrorKind::UnknownArgument`] error, + /// should instead be treated as a [`SubCommand`] within the [`ArgMatches`] struct. + /// + /// **NOTE:** Use this setting with caution, + /// as a truly unexpected argument (i.e. one that is *NOT* an external subcommand) + /// will **not** cause an error and instead be treated as a potential subcommand. + /// One should check for such cases manually and inform the user appropriately. + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, AppSettings}; + /// // Assume there is an external subcommand named "subcmd" + /// let m = App::new("myprog") + /// .setting(AppSettings::AllowExternalSubcommands) + /// .get_matches_from(vec![ + /// "myprog", "subcmd", "--option", "value", "-fff", "--flag" + /// ]); + /// + /// // All trailing arguments will be stored under the subcommand's sub-matches using an empty + /// // string argument name + /// match m.subcommand() { + /// (external, Some(ext_m)) => { + /// let ext_args: Vec<&str> = ext_m.values_of("").unwrap().collect(); + /// assert_eq!(external, "subcmd"); + /// assert_eq!(ext_args, ["--option", "value", "-fff", "--flag"]); + /// }, + /// _ => {}, + /// } + /// ``` + /// [`ErrorKind::UnknownArgument`]: ./enum.ErrorKind.html#variant.UnknownArgument + /// [`SubCommand`]: ./struct.SubCommand.html + /// [`ArgMatches`]: ./struct.ArgMatches.html + AllowExternalSubcommands, + + /// Specifies that use of a valid [argument] negates [subcommands] being used after. By default + /// `clap` allows arguments between subcommands such as + /// ` [cmd_args] [cmd2_args] [cmd3_args]`. This setting disables that + /// functionality and says that arguments can only follow the *final* subcommand. For instance + /// using this setting makes only the following invocations possible: + /// + /// * ` [cmd3_args]` + /// * ` [cmd2_args]` + /// * ` [cmd_args]` + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, AppSettings}; + /// App::new("myprog") + /// .setting(AppSettings::ArgsNegateSubcommands) + /// # ; + /// ``` + /// [subcommands]: ./struct.SubCommand.html + /// [argument]: ./struct.Arg.html + ArgsNegateSubcommands, + + /// Specifies that the help text should be displayed (and then exit gracefully), + /// if no arguments are present at runtime (i.e. an empty run such as, `$ myprog`. + /// + /// **NOTE:** [`SubCommand`]s count as arguments + /// + /// **NOTE:** Setting [`Arg::default_value`] effectively disables this option as it will + /// ensure that some argument is always present. + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, AppSettings}; + /// App::new("myprog") + /// .setting(AppSettings::ArgRequiredElseHelp) + /// # ; + /// ``` + /// [`SubCommand`]: ./struct.SubCommand.html + /// [`Arg::default_value`]: ./struct.Arg.html#method.default_value + ArgRequiredElseHelp, + + /// Uses colorized help messages. + /// + /// **NOTE:** Must be compiled with the `color` cargo feature + /// + /// # Platform Specific + /// + /// This setting only applies to Unix, Linux, and macOS (i.e. non-Windows platforms) + /// + /// # Examples + /// + /// ```no_run + /// # use clap::{App, Arg, SubCommand, AppSettings}; + /// App::new("myprog") + /// .setting(AppSettings::ColoredHelp) + /// .get_matches(); + /// ``` + ColoredHelp, + + /// Enables colored output only when the output is going to a terminal or TTY. + /// + /// **NOTE:** This is the default behavior of `clap`. + /// + /// **NOTE:** Must be compiled with the `color` cargo feature. + /// + /// # Platform Specific + /// + /// This setting only applies to Unix, Linux, and macOS (i.e. non-Windows platforms). + /// + /// # Examples + /// + /// ```no_run + /// # use clap::{App, Arg, SubCommand, AppSettings}; + /// App::new("myprog") + /// .setting(AppSettings::ColorAuto) + /// .get_matches(); + /// ``` + ColorAuto, + + /// Enables colored output regardless of whether or not the output is going to a terminal/TTY. + /// + /// **NOTE:** Must be compiled with the `color` cargo feature. + /// + /// # Platform Specific + /// + /// This setting only applies to Unix, Linux, and macOS (i.e. non-Windows platforms). + /// + /// # Examples + /// + /// ```no_run + /// # use clap::{App, Arg, SubCommand, AppSettings}; + /// App::new("myprog") + /// .setting(AppSettings::ColorAlways) + /// .get_matches(); + /// ``` + ColorAlways, + + /// Disables colored output no matter if the output is going to a terminal/TTY, or not. + /// + /// **NOTE:** Must be compiled with the `color` cargo feature + /// + /// # Platform Specific + /// + /// This setting only applies to Unix, Linux, and macOS (i.e. non-Windows platforms) + /// + /// # Examples + /// + /// ```no_run + /// # use clap::{App, Arg, SubCommand, AppSettings}; + /// App::new("myprog") + /// .setting(AppSettings::ColorNever) + /// .get_matches(); + /// ``` + ColorNever, + + /// Disables the automatic collapsing of positional args into `[ARGS]` inside the usage string + /// + /// # Examples + /// + /// ```no_run + /// # use clap::{App, Arg, SubCommand, AppSettings}; + /// App::new("myprog") + /// .setting(AppSettings::DontCollapseArgsInUsage) + /// .get_matches(); + /// ``` + DontCollapseArgsInUsage, + + /// Disables the automatic delimiting of values when `--` or [`AppSettings::TrailingVarArg`] + /// was used. + /// + /// **NOTE:** The same thing can be done manually by setting the final positional argument to + /// [`Arg::use_delimiter(false)`]. Using this setting is safer, because it's easier to locate + /// when making changes. + /// + /// # Examples + /// + /// ```no_run + /// # use clap::{App, Arg, SubCommand, AppSettings}; + /// App::new("myprog") + /// .setting(AppSettings::DontDelimitTrailingValues) + /// .get_matches(); + /// ``` + /// [`AppSettings::TrailingVarArg`]: ./enum.AppSettings.html#variant.TrailingVarArg + /// [`Arg::use_delimiter(false)`]: ./struct.Arg.html#method.use_delimiter + DontDelimitTrailingValues, + + /// Disables the `help` subcommand + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, AppSettings, ErrorKind, SubCommand}; + /// let res = App::new("myprog") + /// .version("v1.1") + /// .setting(AppSettings::DisableHelpSubcommand) + /// // Normally, creating a subcommand causes a `help` subcommand to automatically + /// // be generated as well + /// .subcommand(SubCommand::with_name("test")) + /// .get_matches_from_safe(vec![ + /// "myprog", "help" + /// ]); + /// assert!(res.is_err()); + /// assert_eq!(res.unwrap_err().kind, ErrorKind::UnknownArgument); + /// ``` + /// [`SubCommand`]: ./struct.SubCommand.html + DisableHelpSubcommand, + + /// Disables `-V` and `--version` [`App`] without affecting any of the [`SubCommand`]s + /// (Defaults to `false`; application *does* have a version flag) + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, AppSettings, ErrorKind}; + /// let res = App::new("myprog") + /// .version("v1.1") + /// .setting(AppSettings::DisableVersion) + /// .get_matches_from_safe(vec![ + /// "myprog", "-V" + /// ]); + /// assert!(res.is_err()); + /// assert_eq!(res.unwrap_err().kind, ErrorKind::UnknownArgument); + /// ``` + /// + /// ```rust + /// # use clap::{App, SubCommand, AppSettings, ErrorKind}; + /// let res = App::new("myprog") + /// .version("v1.1") + /// .setting(AppSettings::DisableVersion) + /// .subcommand(SubCommand::with_name("test")) + /// .get_matches_from_safe(vec![ + /// "myprog", "test", "-V" + /// ]); + /// assert!(res.is_err()); + /// assert_eq!(res.unwrap_err().kind, ErrorKind::VersionDisplayed); + /// ``` + /// [`SubCommand`]: ./struct.SubCommand.html + DisableVersion, + + /// Displays the arguments and [`SubCommand`]s in the help message in the order that they were + /// declared in, and not alphabetically which is the default. + /// + /// # Examples + /// + /// ```no_run + /// # use clap::{App, Arg, SubCommand, AppSettings}; + /// App::new("myprog") + /// .setting(AppSettings::DeriveDisplayOrder) + /// .get_matches(); + /// ``` + /// [`SubCommand`]: ./struct.SubCommand.html + DeriveDisplayOrder, + + /// Specifies to use the version of the current command for all child [`SubCommand`]s. + /// (Defaults to `false`; subcommands have independent version strings from their parents.) + /// + /// **NOTE:** The version for the current command **and** this setting must be set **prior** to + /// adding any child subcommands + /// + /// # Examples + /// + /// ```no_run + /// # use clap::{App, Arg, SubCommand, AppSettings}; + /// App::new("myprog") + /// .version("v1.1") + /// .setting(AppSettings::GlobalVersion) + /// .subcommand(SubCommand::with_name("test")) + /// .get_matches(); + /// // running `$ myprog test --version` will display + /// // "myprog-test v1.1" + /// ``` + /// [`SubCommand`]: ./struct.SubCommand.html + GlobalVersion, + + /// Specifies that this [`SubCommand`] should be hidden from help messages + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg, AppSettings, SubCommand}; + /// App::new("myprog") + /// .subcommand(SubCommand::with_name("test") + /// .setting(AppSettings::Hidden)) + /// # ; + /// ``` + /// [`SubCommand`]: ./struct.SubCommand.html + Hidden, + + /// Tells `clap` *not* to print possible values when displaying help information. + /// This can be useful if there are many values, or they are explained elsewhere. + HidePossibleValuesInHelp, + + /// Tries to match unknown args to partial [`subcommands`] or their [aliases]. For example to + /// match a subcommand named `test`, one could use `t`, `te`, `tes`, and `test`. + /// + /// **NOTE:** The match *must not* be ambiguous at all in order to succeed. i.e. to match `te` + /// to `test` there could not also be a subcommand or alias `temp` because both start with `te` + /// + /// **CAUTION:** This setting can interfere with [positional/free arguments], take care when + /// designing CLIs which allow inferred subcommands and have potential positional/free + /// arguments whose values could start with the same characters as subcommands. If this is the + /// case, it's recommended to use settings such as [`AppSeettings::ArgsNegateSubcommands`] in + /// conjunction with this setting. + /// + /// # Examples + /// + /// ```no_run + /// # use clap::{App, Arg, SubCommand, AppSettings}; + /// let m = App::new("prog") + /// .setting(AppSettings::InferSubcommands) + /// .subcommand(SubCommand::with_name("test")) + /// .get_matches_from(vec![ + /// "prog", "te" + /// ]); + /// assert_eq!(m.subcommand_name(), Some("test")); + /// ``` + /// [`subcommands`]: ./struct.SubCommand.html + /// [positional/free arguments]: ./struct.Arg.html#method.index + /// [aliases]: ./struct.App.html#method.alias + /// [`AppSeettings::ArgsNegateSubcommands`]: ./enum.AppSettings.html#variant.ArgsNegateSubcommands + InferSubcommands, + + /// Specifies that the parser should not assume the first argument passed is the binary name. + /// This is normally the case when using a "daemon" style mode, or an interactive CLI where one + /// one would not normally type the binary or program name for each command. + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg, AppSettings}; + /// let m = App::new("myprog") + /// .setting(AppSettings::NoBinaryName) + /// .arg(Arg::from_usage("... 'commands to run'")) + /// .get_matches_from(vec!["command", "set"]); + /// + /// let cmds: Vec<&str> = m.values_of("cmd").unwrap().collect(); + /// assert_eq!(cmds, ["command", "set"]); + /// ``` + NoBinaryName, + + /// Places the help string for all arguments on the line after the argument. + /// + /// # Examples + /// + /// ```no_run + /// # use clap::{App, Arg, SubCommand, AppSettings}; + /// App::new("myprog") + /// .setting(AppSettings::NextLineHelp) + /// .get_matches(); + /// ``` + NextLineHelp, + + /// **DEPRECATED**: This setting is no longer required in order to propagate values up or down + /// + /// Specifies that the parser should propagate global arg's values down or up through any *used* + /// child subcommands. Meaning, if a subcommand wasn't used, the values won't be propagated to + /// said subcommand. + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg, AppSettings, SubCommand}; + /// let m = App::new("myprog") + /// .arg(Arg::from_usage("[cmd] 'command to run'") + /// .global(true)) + /// .subcommand(SubCommand::with_name("foo")) + /// .get_matches_from(vec!["myprog", "set", "foo"]); + /// + /// assert_eq!(m.value_of("cmd"), Some("set")); + /// + /// let sub_m = m.subcommand_matches("foo").unwrap(); + /// assert_eq!(sub_m.value_of("cmd"), Some("set")); + /// ``` + /// Now doing the same thing, but *not* using any subcommands will result in the value not being + /// propagated down. + /// + /// ```rust + /// # use clap::{App, Arg, AppSettings, SubCommand}; + /// let m = App::new("myprog") + /// .arg(Arg::from_usage("[cmd] 'command to run'") + /// .global(true)) + /// .subcommand(SubCommand::with_name("foo")) + /// .get_matches_from(vec!["myprog", "set"]); + /// + /// assert_eq!(m.value_of("cmd"), Some("set")); + /// + /// assert!(m.subcommand_matches("foo").is_none()); + /// ``` + #[deprecated(since = "2.27.0", note = "No longer required to propagate values")] + PropagateGlobalValuesDown, + + /// Allows [`SubCommand`]s to override all requirements of the parent command. + /// For example if you had a subcommand or top level application with a required argument + /// that is only required as long as there is no subcommand present, + /// using this setting would allow you to set those arguments to [`Arg::required(true)`] + /// and yet receive no error so long as the user uses a valid subcommand instead. + /// + /// **NOTE:** This defaults to false (using subcommand does *not* negate requirements) + /// + /// # Examples + /// + /// This first example shows that it is an error to not use a required argument + /// + /// ```rust + /// # use clap::{App, Arg, AppSettings, SubCommand, ErrorKind}; + /// let err = App::new("myprog") + /// .setting(AppSettings::SubcommandsNegateReqs) + /// .arg(Arg::with_name("opt").required(true)) + /// .subcommand(SubCommand::with_name("test")) + /// .get_matches_from_safe(vec![ + /// "myprog" + /// ]); + /// assert!(err.is_err()); + /// assert_eq!(err.unwrap_err().kind, ErrorKind::MissingRequiredArgument); + /// # ; + /// ``` + /// + /// This next example shows that it is no longer error to not use a required argument if a + /// valid subcommand is used. + /// + /// ```rust + /// # use clap::{App, Arg, AppSettings, SubCommand, ErrorKind}; + /// let noerr = App::new("myprog") + /// .setting(AppSettings::SubcommandsNegateReqs) + /// .arg(Arg::with_name("opt").required(true)) + /// .subcommand(SubCommand::with_name("test")) + /// .get_matches_from_safe(vec![ + /// "myprog", "test" + /// ]); + /// assert!(noerr.is_ok()); + /// # ; + /// ``` + /// [`Arg::required(true)`]: ./struct.Arg.html#method.required + /// [`SubCommand`]: ./struct.SubCommand.html + SubcommandsNegateReqs, + + /// Specifies that the help text should be displayed (before exiting gracefully) if no + /// [`SubCommand`]s are present at runtime (i.e. an empty run such as `$ myprog`). + /// + /// **NOTE:** This should *not* be used with [`AppSettings::SubcommandRequired`] as they do + /// nearly same thing; this prints the help text, and the other prints an error. + /// + /// **NOTE:** If the user specifies arguments at runtime, but no subcommand the help text will + /// still be displayed and exit. If this is *not* the desired result, consider using + /// [`AppSettings::ArgRequiredElseHelp`] instead. + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg, AppSettings}; + /// App::new("myprog") + /// .setting(AppSettings::SubcommandRequiredElseHelp) + /// # ; + /// ``` + /// [`SubCommand`]: ./struct.SubCommand.html + /// [`AppSettings::SubcommandRequired`]: ./enum.AppSettings.html#variant.SubcommandRequired + /// [`AppSettings::ArgRequiredElseHelp`]: ./enum.AppSettings.html#variant.ArgRequiredElseHelp + SubcommandRequiredElseHelp, + + /// Specifies that any invalid UTF-8 code points should be treated as an error and fail + /// with a [`ErrorKind::InvalidUtf8`] error. + /// + /// **NOTE:** This rule only applies to argument values; Things such as flags, options, and + /// [`SubCommand`]s themselves only allow valid UTF-8 code points. + /// + /// # Platform Specific + /// + /// Non Windows systems only + /// + /// # Examples + /// + #[cfg_attr(not(unix), doc = " ```ignore")] + #[cfg_attr(unix, doc = " ```")] + /// # use clap::{App, AppSettings, ErrorKind}; + /// use std::ffi::OsString; + /// use std::os::unix::ffi::OsStringExt; + /// + /// let m = App::new("myprog") + /// .setting(AppSettings::StrictUtf8) + /// .arg_from_usage(" 'some positional arg'") + /// .get_matches_from_safe( + /// vec![ + /// OsString::from("myprog"), + /// OsString::from_vec(vec![0xe9])]); + /// + /// assert!(m.is_err()); + /// assert_eq!(m.unwrap_err().kind, ErrorKind::InvalidUtf8); + /// ``` + /// [`SubCommand`]: ./struct.SubCommand.html + /// [`ErrorKind::InvalidUtf8`]: ./enum.ErrorKind.html#variant.InvalidUtf8 + StrictUtf8, + + /// Allows specifying that if no [`SubCommand`] is present at runtime, + /// error and exit gracefully. + /// + /// **NOTE:** This defaults to `false` (subcommands do *not* need to be present) + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, AppSettings, SubCommand, ErrorKind}; + /// let err = App::new("myprog") + /// .setting(AppSettings::SubcommandRequired) + /// .subcommand(SubCommand::with_name("test")) + /// .get_matches_from_safe(vec![ + /// "myprog", + /// ]); + /// assert!(err.is_err()); + /// assert_eq!(err.unwrap_err().kind, ErrorKind::MissingSubcommand); + /// # ; + /// ``` + /// [`SubCommand`]: ./struct.SubCommand.html + SubcommandRequired, + + /// Specifies that the final positional argument is a "VarArg" and that `clap` should not + /// attempt to parse any further args. + /// + /// The values of the trailing positional argument will contain all args from itself on. + /// + /// **NOTE:** The final positional argument **must** have [`Arg::multiple(true)`] or the usage + /// string equivalent. + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg, AppSettings}; + /// let m = App::new("myprog") + /// .setting(AppSettings::TrailingVarArg) + /// .arg(Arg::from_usage("... 'commands to run'")) + /// .get_matches_from(vec!["myprog", "arg1", "-r", "val1"]); + /// + /// let trail: Vec<&str> = m.values_of("cmd").unwrap().collect(); + /// assert_eq!(trail, ["arg1", "-r", "val1"]); + /// ``` + /// [`Arg::multiple(true)`]: ./struct.Arg.html#method.multiple + TrailingVarArg, + + /// Groups flags and options together, presenting a more unified help message + /// (a la `getopts` or `docopt` style). + /// + /// The default is that the auto-generated help message will group flags, and options + /// separately. + /// + /// **NOTE:** This setting is cosmetic only and does not affect any functionality. + /// + /// # Examples + /// + /// ```no_run + /// # use clap::{App, Arg, SubCommand, AppSettings}; + /// App::new("myprog") + /// .setting(AppSettings::UnifiedHelpMessage) + /// .get_matches(); + /// // running `myprog --help` will display a unified "docopt" or "getopts" style help message + /// ``` + UnifiedHelpMessage, + + /// Disables `-V` and `--version` for all [`SubCommand`]s + /// (Defaults to `false`; subcommands *do* have version flags.) + /// + /// **NOTE:** This setting must be set **prior** adding any subcommands + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, SubCommand, AppSettings, ErrorKind}; + /// let res = App::new("myprog") + /// .version("v1.1") + /// .setting(AppSettings::VersionlessSubcommands) + /// .subcommand(SubCommand::with_name("test")) + /// .get_matches_from_safe(vec![ + /// "myprog", "test", "-V" + /// ]); + /// assert!(res.is_err()); + /// assert_eq!(res.unwrap_err().kind, ErrorKind::UnknownArgument); + /// ``` + /// [`SubCommand`]: ./struct.SubCommand.html + VersionlessSubcommands, + + /// Will display a message "Press [ENTER]/[RETURN] to continue..." and wait for user before + /// exiting + /// + /// This is most useful when writing an application which is run from a GUI shortcut, or on + /// Windows where a user tries to open the binary by double-clicking instead of using the + /// command line. + /// + /// **NOTE:** This setting is **not** recursive with [`SubCommand`]s, meaning if you wish this + /// behavior for all subcommands, you must set this on each command (needing this is extremely + /// rare) + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg, AppSettings}; + /// App::new("myprog") + /// .setting(AppSettings::WaitOnError) + /// # ; + /// ``` + /// [`SubCommand`]: ./struct.SubCommand.html + WaitOnError, + + #[doc(hidden)] NeedsLongVersion, + + #[doc(hidden)] NeedsLongHelp, + + #[doc(hidden)] NeedsSubcommandHelp, + + #[doc(hidden)] LowIndexMultiplePositional, + + #[doc(hidden)] TrailingValues, + + #[doc(hidden)] ValidNegNumFound, + + #[doc(hidden)] Propagated, + + #[doc(hidden)] ValidArgFound, + + #[doc(hidden)] ContainsLast, +} + +impl FromStr for AppSettings { + type Err = String; + fn from_str(s: &str) -> Result::Err> { + match &*s.to_ascii_lowercase() { + "argrequiredelsehelp" => Ok(AppSettings::ArgRequiredElseHelp), + "argsnegatesubcommands" => Ok(AppSettings::ArgsNegateSubcommands), + "allowinvalidutf8" => Ok(AppSettings::AllowInvalidUtf8), + "allowleadinghyphen" => Ok(AppSettings::AllowLeadingHyphen), + "allowexternalsubcommands" => Ok(AppSettings::AllowExternalSubcommands), + "allownegativenumbers" => Ok(AppSettings::AllowNegativeNumbers), + "colorauto" => Ok(AppSettings::ColorAuto), + "coloralways" => Ok(AppSettings::ColorAlways), + "colornever" => Ok(AppSettings::ColorNever), + "coloredhelp" => Ok(AppSettings::ColoredHelp), + "derivedisplayorder" => Ok(AppSettings::DeriveDisplayOrder), + "dontcollapseargsinusage" => Ok(AppSettings::DontCollapseArgsInUsage), + "dontdelimittrailingvalues" => Ok(AppSettings::DontDelimitTrailingValues), + "disablehelpsubcommand" => Ok(AppSettings::DisableHelpSubcommand), + "disableversion" => Ok(AppSettings::DisableVersion), + "globalversion" => Ok(AppSettings::GlobalVersion), + "hidden" => Ok(AppSettings::Hidden), + "hidepossiblevaluesinhelp" => Ok(AppSettings::HidePossibleValuesInHelp), + "infersubcommands" => Ok(AppSettings::InferSubcommands), + "lowindexmultiplepositional" => Ok(AppSettings::LowIndexMultiplePositional), + "nobinaryname" => Ok(AppSettings::NoBinaryName), + "nextlinehelp" => Ok(AppSettings::NextLineHelp), + "strictutf8" => Ok(AppSettings::StrictUtf8), + "subcommandsnegatereqs" => Ok(AppSettings::SubcommandsNegateReqs), + "subcommandrequired" => Ok(AppSettings::SubcommandRequired), + "subcommandrequiredelsehelp" => Ok(AppSettings::SubcommandRequiredElseHelp), + "trailingvararg" => Ok(AppSettings::TrailingVarArg), + "unifiedhelpmessage" => Ok(AppSettings::UnifiedHelpMessage), + "versionlesssubcommands" => Ok(AppSettings::VersionlessSubcommands), + "waitonerror" => Ok(AppSettings::WaitOnError), + "validnegnumfound" => Ok(AppSettings::ValidNegNumFound), + "validargfound" => Ok(AppSettings::ValidArgFound), + "propagated" => Ok(AppSettings::Propagated), + "trailingvalues" => Ok(AppSettings::TrailingValues), + _ => Err("unknown AppSetting, cannot convert from str".to_owned()), + } + } +} + +#[cfg(test)] +mod test { + use super::AppSettings; + + #[test] + fn app_settings_fromstr() { + assert_eq!( + "argsnegatesubcommands".parse::().unwrap(), + AppSettings::ArgsNegateSubcommands + ); + assert_eq!( + "argrequiredelsehelp".parse::().unwrap(), + AppSettings::ArgRequiredElseHelp + ); + assert_eq!( + "allowexternalsubcommands".parse::().unwrap(), + AppSettings::AllowExternalSubcommands + ); + assert_eq!( + "allowinvalidutf8".parse::().unwrap(), + AppSettings::AllowInvalidUtf8 + ); + assert_eq!( + "allowleadinghyphen".parse::().unwrap(), + AppSettings::AllowLeadingHyphen + ); + assert_eq!( + "allownegativenumbers".parse::().unwrap(), + AppSettings::AllowNegativeNumbers + ); + assert_eq!( + "coloredhelp".parse::().unwrap(), + AppSettings::ColoredHelp + ); + assert_eq!( + "colorauto".parse::().unwrap(), + AppSettings::ColorAuto + ); + assert_eq!( + "coloralways".parse::().unwrap(), + AppSettings::ColorAlways + ); + assert_eq!( + "colornever".parse::().unwrap(), + AppSettings::ColorNever + ); + assert_eq!( + "disablehelpsubcommand".parse::().unwrap(), + AppSettings::DisableHelpSubcommand + ); + assert_eq!( + "disableversion".parse::().unwrap(), + AppSettings::DisableVersion + ); + assert_eq!( + "dontcollapseargsinusage".parse::().unwrap(), + AppSettings::DontCollapseArgsInUsage + ); + assert_eq!( + "dontdelimittrailingvalues".parse::().unwrap(), + AppSettings::DontDelimitTrailingValues + ); + assert_eq!( + "derivedisplayorder".parse::().unwrap(), + AppSettings::DeriveDisplayOrder + ); + assert_eq!( + "globalversion".parse::().unwrap(), + AppSettings::GlobalVersion + ); + assert_eq!( + "hidden".parse::().unwrap(), + AppSettings::Hidden + ); + assert_eq!( + "hidepossiblevaluesinhelp".parse::().unwrap(), + AppSettings::HidePossibleValuesInHelp + ); + assert_eq!( + "lowindexmultiplePositional".parse::().unwrap(), + AppSettings::LowIndexMultiplePositional + ); + assert_eq!( + "nobinaryname".parse::().unwrap(), + AppSettings::NoBinaryName + ); + assert_eq!( + "nextlinehelp".parse::().unwrap(), + AppSettings::NextLineHelp + ); + assert_eq!( + "subcommandsnegatereqs".parse::().unwrap(), + AppSettings::SubcommandsNegateReqs + ); + assert_eq!( + "subcommandrequired".parse::().unwrap(), + AppSettings::SubcommandRequired + ); + assert_eq!( + "subcommandrequiredelsehelp".parse::().unwrap(), + AppSettings::SubcommandRequiredElseHelp + ); + assert_eq!( + "strictutf8".parse::().unwrap(), + AppSettings::StrictUtf8 + ); + assert_eq!( + "trailingvararg".parse::().unwrap(), + AppSettings::TrailingVarArg + ); + assert_eq!( + "unifiedhelpmessage".parse::().unwrap(), + AppSettings::UnifiedHelpMessage + ); + assert_eq!( + "versionlesssubcommands".parse::().unwrap(), + AppSettings::VersionlessSubcommands + ); + assert_eq!( + "waitonerror".parse::().unwrap(), + AppSettings::WaitOnError + ); + assert_eq!( + "validnegnumfound".parse::().unwrap(), + AppSettings::ValidNegNumFound + ); + assert_eq!( + "validargfound".parse::().unwrap(), + AppSettings::ValidArgFound + ); + assert_eq!( + "propagated".parse::().unwrap(), + AppSettings::Propagated + ); + assert_eq!( + "trailingvalues".parse::().unwrap(), + AppSettings::TrailingValues + ); + assert_eq!( + "infersubcommands".parse::().unwrap(), + AppSettings::InferSubcommands + ); + assert!("hahahaha".parse::().is_err()); + } +} diff --git a/clap/src/app/usage.rs b/clap/src/app/usage.rs new file mode 100644 index 000000000..609058843 --- /dev/null +++ b/clap/src/app/usage.rs @@ -0,0 +1,479 @@ +// std +use std::collections::{BTreeMap, VecDeque}; + +// Internal +use INTERNAL_ERROR_MSG; +use args::{AnyArg, ArgMatcher, PosBuilder}; +use args::settings::ArgSettings; +use app::settings::AppSettings as AS; +use app::parser::Parser; + +// Creates a usage string for display. This happens just after all arguments were parsed, but before +// any subcommands have been parsed (so as to give subcommands their own usage recursively) +pub fn create_usage_with_title(p: &Parser, used: &[&str]) -> String { + debugln!("usage::create_usage_with_title;"); + let mut usage = String::with_capacity(75); + usage.push_str("USAGE:\n "); + usage.push_str(&*create_usage_no_title(p, used)); + usage +} + +// Creates a usage string to be used in error message (i.e. one with currently used args) +pub fn create_error_usage<'a, 'b>( + p: &Parser<'a, 'b>, + matcher: &'b ArgMatcher<'a>, + extra: Option<&str>, +) -> String { + let mut args: Vec<_> = matcher + .arg_names() + .iter() + .filter(|n| { + if let Some(o) = find_by_name!(p, **n, opts, iter) { + !o.b.is_set(ArgSettings::Required) && !o.b.is_set(ArgSettings::Hidden) + } else if let Some(p) = find_by_name!(p, **n, positionals, values) { + !p.b.is_set(ArgSettings::Required) && p.b.is_set(ArgSettings::Hidden) + } else { + true // flags can't be required, so they're always true + } + }) + .map(|&n| n) + .collect(); + if let Some(r) = extra { + args.push(r); + } + create_usage_with_title(p, &*args) +} + +// Creates a usage string (*without title*) if one was not provided by the user manually. +pub fn create_usage_no_title(p: &Parser, used: &[&str]) -> String { + debugln!("usage::create_usage_no_title;"); + if let Some(u) = p.meta.usage_str { + String::from(&*u) + } else if used.is_empty() { + create_help_usage(p, true) + } else { + create_smart_usage(p, used) + } +} + +// Creates a usage string for display in help messages (i.e. not for errors) +pub fn create_help_usage(p: &Parser, incl_reqs: bool) -> String { + let mut usage = String::with_capacity(75); + let name = p.meta + .usage + .as_ref() + .unwrap_or_else(|| p.meta.bin_name.as_ref().unwrap_or(&p.meta.name)); + usage.push_str(&*name); + let req_string = if incl_reqs { + let mut reqs: Vec<&str> = p.required().map(|r| &**r).collect(); + reqs.sort(); + reqs.dedup(); + get_required_usage_from(p, &reqs, None, None, false) + .iter() + .fold(String::new(), |a, s| a + &format!(" {}", s)[..]) + } else { + String::new() + }; + + let flags = needs_flags_tag(p); + if flags && !p.is_set(AS::UnifiedHelpMessage) { + usage.push_str(" [FLAGS]"); + } else if flags { + usage.push_str(" [OPTIONS]"); + } + if !p.is_set(AS::UnifiedHelpMessage) && p.opts.iter().any(|o| { + !o.is_set(ArgSettings::Required) && !o.is_set(ArgSettings::Hidden) + }) { + usage.push_str(" [OPTIONS]"); + } + + usage.push_str(&req_string[..]); + + let has_last = p.positionals.values().any(|p| p.is_set(ArgSettings::Last)); + // places a '--' in the usage string if there are args and options + // supporting multiple values + if p.opts.iter().any(|o| o.is_set(ArgSettings::Multiple)) + && p.positionals + .values() + .any(|p| !p.is_set(ArgSettings::Required)) + && !(p.has_visible_subcommands() || p.is_set(AS::AllowExternalSubcommands)) + && !has_last + { + usage.push_str(" [--]"); + } + let not_req_or_hidden = |p: &PosBuilder| { + (!p.is_set(ArgSettings::Required) || p.is_set(ArgSettings::Last)) + && !p.is_set(ArgSettings::Hidden) + }; + if p.has_positionals() && p.positionals.values().any(not_req_or_hidden) { + if let Some(args_tag) = get_args_tag(p, incl_reqs) { + usage.push_str(&*args_tag); + } else { + usage.push_str(" [ARGS]"); + } + if has_last && incl_reqs { + let pos = p.positionals + .values() + .find(|p| p.b.is_set(ArgSettings::Last)) + .expect(INTERNAL_ERROR_MSG); + debugln!("usage::create_help_usage: '{}' has .last(true)", pos.name()); + let req = pos.is_set(ArgSettings::Required); + if req + && p.positionals + .values() + .any(|p| !p.is_set(ArgSettings::Required)) + { + usage.push_str(" -- <"); + } else if req { + usage.push_str(" [--] <"); + } else { + usage.push_str(" [-- <"); + } + usage.push_str(&*pos.name_no_brackets()); + usage.push_str(">"); + usage.push_str(pos.multiple_str()); + if !req { + usage.push_str("]"); + } + } + } + + // incl_reqs is only false when this function is called recursively + if p.has_visible_subcommands() && incl_reqs || p.is_set(AS::AllowExternalSubcommands) { + if p.is_set(AS::SubcommandsNegateReqs) || p.is_set(AS::ArgsNegateSubcommands) { + if !p.is_set(AS::ArgsNegateSubcommands) { + usage.push_str("\n "); + usage.push_str(&*create_help_usage(p, false)); + usage.push_str(" "); + } else { + usage.push_str("\n "); + usage.push_str(&*name); + usage.push_str(" "); + } + } else if p.is_set(AS::SubcommandRequired) || p.is_set(AS::SubcommandRequiredElseHelp) { + usage.push_str(" "); + } else { + usage.push_str(" [SUBCOMMAND]"); + } + } + usage.shrink_to_fit(); + debugln!("usage::create_help_usage: usage={}", usage); + usage +} + +// Creates a context aware usage string, or "smart usage" from currently used +// args, and requirements +fn create_smart_usage(p: &Parser, used: &[&str]) -> String { + debugln!("usage::smart_usage;"); + let mut usage = String::with_capacity(75); + let mut hs: Vec<&str> = p.required().map(|s| &**s).collect(); + hs.extend_from_slice(used); + + let r_string = get_required_usage_from(p, &hs, None, None, false) + .iter() + .fold(String::new(), |acc, s| acc + &format!(" {}", s)[..]); + + usage.push_str( + &p.meta + .usage + .as_ref() + .unwrap_or_else(|| p.meta.bin_name.as_ref().unwrap_or(&p.meta.name))[..], + ); + usage.push_str(&*r_string); + if p.is_set(AS::SubcommandRequired) { + usage.push_str(" "); + } + usage.shrink_to_fit(); + usage +} + +// Gets the `[ARGS]` tag for the usage string +fn get_args_tag(p: &Parser, incl_reqs: bool) -> Option { + debugln!("usage::get_args_tag;"); + let mut count = 0; + 'outer: for pos in p.positionals + .values() + .filter(|pos| !pos.is_set(ArgSettings::Required)) + .filter(|pos| !pos.is_set(ArgSettings::Hidden)) + .filter(|pos| !pos.is_set(ArgSettings::Last)) + { + debugln!("usage::get_args_tag:iter:{}:", pos.b.name); + if let Some(g_vec) = p.groups_for_arg(pos.b.name) { + for grp_s in &g_vec { + debugln!("usage::get_args_tag:iter:{}:iter:{};", pos.b.name, grp_s); + // if it's part of a required group we don't want to count it + if p.groups.iter().any(|g| g.required && (&g.name == grp_s)) { + continue 'outer; + } + } + } + count += 1; + debugln!( + "usage::get_args_tag:iter: {} Args not required or hidden", + count + ); + } + if !p.is_set(AS::DontCollapseArgsInUsage) && count > 1 { + debugln!("usage::get_args_tag:iter: More than one, returning [ARGS]"); + return None; // [ARGS] + } else if count == 1 && incl_reqs { + let pos = p.positionals + .values() + .find(|pos| { + !pos.is_set(ArgSettings::Required) && !pos.is_set(ArgSettings::Hidden) + && !pos.is_set(ArgSettings::Last) + }) + .expect(INTERNAL_ERROR_MSG); + debugln!( + "usage::get_args_tag:iter: Exactly one, returning '{}'", + pos.name() + ); + return Some(format!( + " [{}]{}", + pos.name_no_brackets(), + pos.multiple_str() + )); + } else if p.is_set(AS::DontCollapseArgsInUsage) && !p.positionals.is_empty() && incl_reqs { + debugln!("usage::get_args_tag:iter: Don't collapse returning all"); + return Some( + p.positionals + .values() + .filter(|pos| !pos.is_set(ArgSettings::Required)) + .filter(|pos| !pos.is_set(ArgSettings::Hidden)) + .filter(|pos| !pos.is_set(ArgSettings::Last)) + .map(|pos| { + format!(" [{}]{}", pos.name_no_brackets(), pos.multiple_str()) + }) + .collect::>() + .join(""), + ); + } else if !incl_reqs { + debugln!("usage::get_args_tag:iter: incl_reqs=false, building secondary usage string"); + let highest_req_pos = p.positionals + .iter() + .filter_map(|(idx, pos)| { + if pos.b.is_set(ArgSettings::Required) && !pos.b.is_set(ArgSettings::Last) { + Some(idx) + } else { + None + } + }) + .max() + .unwrap_or_else(|| p.positionals.len()); + return Some( + p.positionals + .iter() + .filter_map(|(idx, pos)| { + if idx <= highest_req_pos { + Some(pos) + } else { + None + } + }) + .filter(|pos| !pos.is_set(ArgSettings::Required)) + .filter(|pos| !pos.is_set(ArgSettings::Hidden)) + .filter(|pos| !pos.is_set(ArgSettings::Last)) + .map(|pos| { + format!(" [{}]{}", pos.name_no_brackets(), pos.multiple_str()) + }) + .collect::>() + .join(""), + ); + } + Some("".into()) +} + +// Determines if we need the `[FLAGS]` tag in the usage string +fn needs_flags_tag(p: &Parser) -> bool { + debugln!("usage::needs_flags_tag;"); + 'outer: for f in &p.flags { + debugln!("usage::needs_flags_tag:iter: f={};", f.b.name); + if let Some(l) = f.s.long { + if l == "help" || l == "version" { + // Don't print `[FLAGS]` just for help or version + continue; + } + } + if let Some(g_vec) = p.groups_for_arg(f.b.name) { + for grp_s in &g_vec { + debugln!("usage::needs_flags_tag:iter:iter: grp_s={};", grp_s); + if p.groups.iter().any(|g| &g.name == grp_s && g.required) { + debugln!("usage::needs_flags_tag:iter:iter: Group is required"); + continue 'outer; + } + } + } + if f.is_set(ArgSettings::Hidden) { + continue; + } + debugln!("usage::needs_flags_tag:iter: [FLAGS] required"); + return true; + } + + debugln!("usage::needs_flags_tag: [FLAGS] not required"); + false +} + +// Returns the required args in usage string form by fully unrolling all groups +pub fn get_required_usage_from<'a, 'b>( + p: &Parser<'a, 'b>, + reqs: &[&'a str], + matcher: Option<&ArgMatcher<'a>>, + extra: Option<&str>, + incl_last: bool, +) -> VecDeque { + debugln!( + "usage::get_required_usage_from: reqs={:?}, extra={:?}", + reqs, + extra + ); + let mut desc_reqs: Vec<&str> = vec![]; + desc_reqs.extend(extra); + let mut new_reqs: Vec<&str> = vec![]; + macro_rules! get_requires { + (@group $a: ident, $v:ident, $p:ident) => {{ + if let Some(rl) = p.groups.iter() + .filter(|g| g.requires.is_some()) + .find(|g| &g.name == $a) + .map(|g| g.requires.as_ref().unwrap()) { + for r in rl { + if !$p.contains(&r) { + debugln!("usage::get_required_usage_from:iter:{}: adding group req={:?}", + $a, r); + $v.push(r); + } + } + } + }}; + ($a:ident, $what:ident, $how:ident, $v:ident, $p:ident) => {{ + if let Some(rl) = p.$what.$how() + .filter(|a| a.b.requires.is_some()) + .find(|arg| &arg.b.name == $a) + .map(|a| a.b.requires.as_ref().unwrap()) { + for &(_, r) in rl.iter() { + if !$p.contains(&r) { + debugln!("usage::get_required_usage_from:iter:{}: adding arg req={:?}", + $a, r); + $v.push(r); + } + } + } + }}; + } + // initialize new_reqs + for a in reqs { + get_requires!(a, flags, iter, new_reqs, reqs); + get_requires!(a, opts, iter, new_reqs, reqs); + get_requires!(a, positionals, values, new_reqs, reqs); + get_requires!(@group a, new_reqs, reqs); + } + desc_reqs.extend_from_slice(&*new_reqs); + debugln!( + "usage::get_required_usage_from: after init desc_reqs={:?}", + desc_reqs + ); + loop { + let mut tmp = vec![]; + for a in &new_reqs { + get_requires!(a, flags, iter, tmp, desc_reqs); + get_requires!(a, opts, iter, tmp, desc_reqs); + get_requires!(a, positionals, values, tmp, desc_reqs); + get_requires!(@group a, tmp, desc_reqs); + } + if tmp.is_empty() { + debugln!("usage::get_required_usage_from: no more children"); + break; + } else { + debugln!("usage::get_required_usage_from: after iter tmp={:?}", tmp); + debugln!( + "usage::get_required_usage_from: after iter new_reqs={:?}", + new_reqs + ); + desc_reqs.extend_from_slice(&*new_reqs); + new_reqs.clear(); + new_reqs.extend_from_slice(&*tmp); + debugln!( + "usage::get_required_usage_from: after iter desc_reqs={:?}", + desc_reqs + ); + } + } + desc_reqs.extend_from_slice(reqs); + desc_reqs.sort(); + desc_reqs.dedup(); + debugln!( + "usage::get_required_usage_from: final desc_reqs={:?}", + desc_reqs + ); + let mut ret_val = VecDeque::new(); + let args_in_groups = p.groups + .iter() + .filter(|gn| desc_reqs.contains(&gn.name)) + .flat_map(|g| p.arg_names_in_group(g.name)) + .collect::>(); + + let pmap = if let Some(m) = matcher { + desc_reqs + .iter() + .filter(|a| p.positionals.values().any(|p| &&p.b.name == a)) + .filter(|&pos| !m.contains(pos)) + .filter_map(|pos| p.positionals.values().find(|x| &x.b.name == pos)) + .filter(|&pos| incl_last || !pos.is_set(ArgSettings::Last)) + .filter(|pos| !args_in_groups.contains(&pos.b.name)) + .map(|pos| (pos.index, pos)) + .collect::>() // sort by index + } else { + desc_reqs + .iter() + .filter(|a| p.positionals.values().any(|pos| &&pos.b.name == a)) + .filter_map(|pos| p.positionals.values().find(|x| &x.b.name == pos)) + .filter(|&pos| incl_last || !pos.is_set(ArgSettings::Last)) + .filter(|pos| !args_in_groups.contains(&pos.b.name)) + .map(|pos| (pos.index, pos)) + .collect::>() // sort by index + }; + debugln!( + "usage::get_required_usage_from: args_in_groups={:?}", + args_in_groups + ); + for &p in pmap.values() { + let s = p.to_string(); + if args_in_groups.is_empty() || !args_in_groups.contains(&&*s) { + ret_val.push_back(s); + } + } + for a in desc_reqs + .iter() + .filter(|name| !p.positionals.values().any(|p| &&p.b.name == name)) + .filter(|name| !p.groups.iter().any(|g| &&g.name == name)) + .filter(|name| !args_in_groups.contains(name)) + .filter(|name| { + !(matcher.is_some() && matcher.as_ref().unwrap().contains(name)) + }) { + debugln!("usage::get_required_usage_from:iter:{}:", a); + let arg = find_by_name!(p, *a, flags, iter) + .map(|f| f.to_string()) + .unwrap_or_else(|| { + find_by_name!(p, *a, opts, iter) + .map(|o| o.to_string()) + .expect(INTERNAL_ERROR_MSG) + }); + ret_val.push_back(arg); + } + let mut g_vec: Vec = vec![]; + for g in desc_reqs + .iter() + .filter(|n| p.groups.iter().any(|g| &&g.name == n)) + { + let g_string = p.args_in_group(g).join("|"); + let elem = format!("<{}>", &g_string[..g_string.len()]); + if !g_vec.contains(&elem) { + g_vec.push(elem); + } + } + for g in g_vec { + ret_val.push_back(g); + } + + ret_val +} diff --git a/clap/src/app/validator.rs b/clap/src/app/validator.rs new file mode 100644 index 000000000..fce4cf296 --- /dev/null +++ b/clap/src/app/validator.rs @@ -0,0 +1,573 @@ +// std +use std::fmt::Display; +#[allow(deprecated, unused_imports)] +use std::ascii::AsciiExt; + +// Internal +use INTERNAL_ERROR_MSG; +use INVALID_UTF8; +use args::{AnyArg, ArgMatcher, MatchedArg}; +use args::settings::ArgSettings; +use errors::{Error, ErrorKind}; +use errors::Result as ClapResult; +use app::settings::AppSettings as AS; +use app::parser::{ParseResult, Parser}; +use fmt::{Colorizer, ColorizerOption}; +use app::usage; + +pub struct Validator<'a, 'b, 'z>(&'z mut Parser<'a, 'b>) +where + 'a: 'b, + 'b: 'z; + +impl<'a, 'b, 'z> Validator<'a, 'b, 'z> { + pub fn new(p: &'z mut Parser<'a, 'b>) -> Self { Validator(p) } + + pub fn validate( + &mut self, + needs_val_of: ParseResult<'a>, + subcmd_name: Option, + matcher: &mut ArgMatcher<'a>, + ) -> ClapResult<()> { + debugln!("Validator::validate;"); + let mut reqs_validated = false; + self.0.add_env(matcher)?; + self.0.add_defaults(matcher)?; + if let ParseResult::Opt(a) = needs_val_of { + debugln!("Validator::validate: needs_val_of={:?}", a); + let o = { + self.0 + .opts + .iter() + .find(|o| o.b.name == a) + .expect(INTERNAL_ERROR_MSG) + .clone() + }; + self.validate_required(matcher)?; + reqs_validated = true; + let should_err = if let Some(v) = matcher.0.args.get(&*o.b.name) { + v.vals.is_empty() && !(o.v.min_vals.is_some() && o.v.min_vals.unwrap() == 0) + } else { + true + }; + if should_err { + return Err(Error::empty_value( + &o, + &*usage::create_error_usage(self.0, matcher, None), + self.0.color(), + )); + } + } + + if matcher.is_empty() && matcher.subcommand_name().is_none() + && self.0.is_set(AS::ArgRequiredElseHelp) + { + let mut out = vec![]; + self.0.write_help_err(&mut out)?; + return Err(Error { + message: String::from_utf8_lossy(&*out).into_owned(), + kind: ErrorKind::MissingArgumentOrSubcommand, + info: None, + }); + } + self.validate_blacklist(matcher)?; + if !(self.0.is_set(AS::SubcommandsNegateReqs) && subcmd_name.is_some()) && !reqs_validated { + self.validate_required(matcher)?; + } + self.validate_matched_args(matcher)?; + matcher.usage(usage::create_usage_with_title(self.0, &[])); + + Ok(()) + } + + fn validate_arg_values( + &self, + arg: &A, + ma: &MatchedArg, + matcher: &ArgMatcher<'a>, + ) -> ClapResult<()> + where + A: AnyArg<'a, 'b> + Display, + { + debugln!("Validator::validate_arg_values: arg={:?}", arg.name()); + for val in &ma.vals { + if self.0.is_set(AS::StrictUtf8) && val.to_str().is_none() { + debugln!( + "Validator::validate_arg_values: invalid UTF-8 found in val {:?}", + val + ); + return Err(Error::invalid_utf8( + &*usage::create_error_usage(self.0, matcher, None), + self.0.color(), + )); + } + if let Some(p_vals) = arg.possible_vals() { + debugln!("Validator::validate_arg_values: possible_vals={:?}", p_vals); + let val_str = val.to_string_lossy(); + let ok = if arg.is_set(ArgSettings::CaseInsensitive) { + p_vals.iter().any(|pv| pv.eq_ignore_ascii_case(&*val_str)) + } else { + p_vals.contains(&&*val_str) + }; + if !ok { + return Err(Error::invalid_value( + val_str, + p_vals, + arg, + &*usage::create_error_usage(self.0, matcher, None), + self.0.color(), + )); + } + } + if !arg.is_set(ArgSettings::EmptyValues) && val.is_empty() + && matcher.contains(&*arg.name()) + { + debugln!("Validator::validate_arg_values: illegal empty val found"); + return Err(Error::empty_value( + arg, + &*usage::create_error_usage(self.0, matcher, None), + self.0.color(), + )); + } + if let Some(vtor) = arg.validator() { + debug!("Validator::validate_arg_values: checking validator..."); + if let Err(e) = vtor(val.to_string_lossy().into_owned()) { + sdebugln!("error"); + return Err(Error::value_validation(Some(arg), e, self.0.color())); + } else { + sdebugln!("good"); + } + } + if let Some(vtor) = arg.validator_os() { + debug!("Validator::validate_arg_values: checking validator_os..."); + if let Err(e) = vtor(val) { + sdebugln!("error"); + return Err(Error::value_validation( + Some(arg), + (*e).to_string_lossy().to_string(), + self.0.color(), + )); + } else { + sdebugln!("good"); + } + } + } + Ok(()) + } + + fn build_err(&self, name: &str, matcher: &ArgMatcher) -> ClapResult<()> { + debugln!("build_err!: name={}", name); + let mut c_with = find_from!(self.0, &name, blacklist, matcher); + c_with = c_with.or( + self.0.find_any_arg(name).map_or(None, |aa| aa.blacklist()) + .map_or(None, + |bl| bl.iter().find(|arg| matcher.contains(arg))) + .map_or(None, |an| self.0.find_any_arg(an)) + .map_or(None, |aa| Some(format!("{}", aa))) + ); + debugln!("build_err!: '{:?}' conflicts with '{}'", c_with, &name); +// matcher.remove(&name); + let usg = usage::create_error_usage(self.0, matcher, None); + if let Some(f) = find_by_name!(self.0, name, flags, iter) { + debugln!("build_err!: It was a flag..."); + Err(Error::argument_conflict(f, c_with, &*usg, self.0.color())) + } else if let Some(o) = find_by_name!(self.0, name, opts, iter) { + debugln!("build_err!: It was an option..."); + Err(Error::argument_conflict(o, c_with, &*usg, self.0.color())) + } else { + match find_by_name!(self.0, name, positionals, values) { + Some(p) => { + debugln!("build_err!: It was a positional..."); + Err(Error::argument_conflict(p, c_with, &*usg, self.0.color())) + }, + None => panic!(INTERNAL_ERROR_MSG) + } + } + } + + fn validate_blacklist(&self, matcher: &mut ArgMatcher) -> ClapResult<()> { + debugln!("Validator::validate_blacklist;"); + let mut conflicts: Vec<&str> = vec![]; + for (&name, _) in matcher.iter() { + debugln!("Validator::validate_blacklist:iter:{};", name); + if let Some(grps) = self.0.groups_for_arg(name) { + for grp in &grps { + if let Some(g) = self.0.groups.iter().find(|g| &g.name == grp) { + if !g.multiple { + for arg in &g.args { + if arg == &name { + continue; + } + conflicts.push(arg); + } + } + if let Some(ref gc) = g.conflicts { + conflicts.extend(&*gc); + } + } + } + } + if let Some(arg) = find_any_by_name!(self.0, name) { + if let Some(bl) = arg.blacklist() { + for conf in bl { + if matcher.get(conf).is_some() { + conflicts.push(conf); + } + } + } + } else { + debugln!("Validator::validate_blacklist:iter:{}:group;", name); + let args = self.0.arg_names_in_group(name); + for arg in &args { + debugln!("Validator::validate_blacklist:iter:{}:group:iter:{};", name, arg); + if let Some(bl) = find_any_by_name!(self.0, *arg).unwrap().blacklist() { + for conf in bl { + if matcher.get(conf).is_some() { + conflicts.push(conf); + } + } + } + } + } + } + + for name in &conflicts { + debugln!( + "Validator::validate_blacklist:iter:{}: Checking blacklisted arg", + name + ); + let mut should_err = false; + if self.0.groups.iter().any(|g| &g.name == name) { + debugln!( + "Validator::validate_blacklist:iter:{}: groups contains it...", + name + ); + for n in self.0.arg_names_in_group(name) { + debugln!( + "Validator::validate_blacklist:iter:{}:iter:{}: looking in group...", + name, + n + ); + if matcher.contains(n) { + debugln!( + "Validator::validate_blacklist:iter:{}:iter:{}: matcher contains it...", + name, + n + ); + return self.build_err(n, matcher); + } + } + } else if let Some(ma) = matcher.get(name) { + debugln!( + "Validator::validate_blacklist:iter:{}: matcher contains it...", + name + ); + should_err = ma.occurs > 0; + } + if should_err { + return self.build_err(*name, matcher); + } + } + Ok(()) + } + + fn validate_matched_args(&self, matcher: &mut ArgMatcher<'a>) -> ClapResult<()> { + debugln!("Validator::validate_matched_args;"); + for (name, ma) in matcher.iter() { + debugln!( + "Validator::validate_matched_args:iter:{}: vals={:#?}", + name, + ma.vals + ); + if let Some(opt) = find_by_name!(self.0, *name, opts, iter) { + self.validate_arg_num_vals(opt, ma, matcher)?; + self.validate_arg_values(opt, ma, matcher)?; + self.validate_arg_requires(opt, ma, matcher)?; + self.validate_arg_num_occurs(opt, ma, matcher)?; + } else if let Some(flag) = find_by_name!(self.0, *name, flags, iter) { + self.validate_arg_requires(flag, ma, matcher)?; + self.validate_arg_num_occurs(flag, ma, matcher)?; + } else if let Some(pos) = find_by_name!(self.0, *name, positionals, values) { + self.validate_arg_num_vals(pos, ma, matcher)?; + self.validate_arg_num_occurs(pos, ma, matcher)?; + self.validate_arg_values(pos, ma, matcher)?; + self.validate_arg_requires(pos, ma, matcher)?; + } else { + let grp = self.0 + .groups + .iter() + .find(|g| &g.name == name) + .expect(INTERNAL_ERROR_MSG); + if let Some(ref g_reqs) = grp.requires { + if g_reqs.iter().any(|&n| !matcher.contains(n)) { + return self.missing_required_error(matcher, None); + } + } + } + } + Ok(()) + } + + fn validate_arg_num_occurs( + &self, + a: &A, + ma: &MatchedArg, + matcher: &ArgMatcher, + ) -> ClapResult<()> + where + A: AnyArg<'a, 'b> + Display, + { + debugln!("Validator::validate_arg_num_occurs: a={};", a.name()); + if ma.occurs > 1 && !a.is_set(ArgSettings::Multiple) { + // Not the first time, and we don't allow multiples + return Err(Error::unexpected_multiple_usage( + a, + &*usage::create_error_usage(self.0, matcher, None), + self.0.color(), + )); + } + Ok(()) + } + + fn validate_arg_num_vals( + &self, + a: &A, + ma: &MatchedArg, + matcher: &ArgMatcher, + ) -> ClapResult<()> + where + A: AnyArg<'a, 'b> + Display, + { + debugln!("Validator::validate_arg_num_vals:{}", a.name()); + if let Some(num) = a.num_vals() { + debugln!("Validator::validate_arg_num_vals: num_vals set...{}", num); + let should_err = if a.is_set(ArgSettings::Multiple) { + ((ma.vals.len() as u64) % num) != 0 + } else { + num != (ma.vals.len() as u64) + }; + if should_err { + debugln!("Validator::validate_arg_num_vals: Sending error WrongNumberOfValues"); + return Err(Error::wrong_number_of_values( + a, + num, + if a.is_set(ArgSettings::Multiple) { + (ma.vals.len() % num as usize) + } else { + ma.vals.len() + }, + if ma.vals.len() == 1 + || (a.is_set(ArgSettings::Multiple) && (ma.vals.len() % num as usize) == 1) + { + "as" + } else { + "ere" + }, + &*usage::create_error_usage(self.0, matcher, None), + self.0.color(), + )); + } + } + if let Some(num) = a.max_vals() { + debugln!("Validator::validate_arg_num_vals: max_vals set...{}", num); + if (ma.vals.len() as u64) > num { + debugln!("Validator::validate_arg_num_vals: Sending error TooManyValues"); + return Err(Error::too_many_values( + ma.vals + .iter() + .last() + .expect(INTERNAL_ERROR_MSG) + .to_str() + .expect(INVALID_UTF8), + a, + &*usage::create_error_usage(self.0, matcher, None), + self.0.color(), + )); + } + } + let min_vals_zero = if let Some(num) = a.min_vals() { + debugln!("Validator::validate_arg_num_vals: min_vals set: {}", num); + if (ma.vals.len() as u64) < num && num != 0 { + debugln!("Validator::validate_arg_num_vals: Sending error TooFewValues"); + return Err(Error::too_few_values( + a, + num, + ma.vals.len(), + &*usage::create_error_usage(self.0, matcher, None), + self.0.color(), + )); + } + num == 0 + } else { + false + }; + // Issue 665 (https://github.com/kbknapp/clap-rs/issues/665) + // Issue 1105 (https://github.com/kbknapp/clap-rs/issues/1105) + if a.takes_value() && !min_vals_zero && ma.vals.is_empty() { + return Err(Error::empty_value( + a, + &*usage::create_error_usage(self.0, matcher, None), + self.0.color(), + )); + } + Ok(()) + } + + fn validate_arg_requires( + &self, + a: &A, + ma: &MatchedArg, + matcher: &ArgMatcher, + ) -> ClapResult<()> + where + A: AnyArg<'a, 'b> + Display, + { + debugln!("Validator::validate_arg_requires:{};", a.name()); + if let Some(a_reqs) = a.requires() { + for &(val, name) in a_reqs.iter().filter(|&&(val, _)| val.is_some()) { + let missing_req = + |v| v == val.expect(INTERNAL_ERROR_MSG) && !matcher.contains(name); + if ma.vals.iter().any(missing_req) { + return self.missing_required_error(matcher, None); + } + } + for &(_, name) in a_reqs.iter().filter(|&&(val, _)| val.is_none()) { + if !matcher.contains(name) { + return self.missing_required_error(matcher, Some(name)); + } + } + } + Ok(()) + } + + fn validate_required(&mut self, matcher: &ArgMatcher) -> ClapResult<()> { + debugln!( + "Validator::validate_required: required={:?};", + self.0.required + ); + + let mut should_err = false; + let mut to_rem = Vec::new(); + for name in &self.0.required { + debugln!("Validator::validate_required:iter:{}:", name); + if matcher.contains(name) { + continue; + } + if to_rem.contains(name) { + continue; + } else if let Some(a) = find_any_by_name!(self.0, *name) { + if self.is_missing_required_ok(a, matcher) { + to_rem.push(a.name()); + if let Some(reqs) = a.requires() { + for r in reqs + .iter() + .filter(|&&(val, _)| val.is_none()) + .map(|&(_, name)| name) + { + to_rem.push(r); + } + } + continue; + } + } + should_err = true; + break; + } + if should_err { + for r in &to_rem { + 'inner: for i in (0 .. self.0.required.len()).rev() { + if &self.0.required[i] == r { + self.0.required.swap_remove(i); + break 'inner; + } + } + } + return self.missing_required_error(matcher, None); + } + + // Validate the conditionally required args + for &(a, v, r) in &self.0.r_ifs { + if let Some(ma) = matcher.get(a) { + if matcher.get(r).is_none() && ma.vals.iter().any(|val| val == v) { + return self.missing_required_error(matcher, Some(r)); + } + } + } + Ok(()) + } + + fn validate_arg_conflicts(&self, a: &AnyArg, matcher: &ArgMatcher) -> Option { + debugln!("Validator::validate_arg_conflicts: a={:?};", a.name()); + a.blacklist().map(|bl| { + bl.iter().any(|conf| { + matcher.contains(conf) + || self.0 + .groups + .iter() + .find(|g| &g.name == conf) + .map_or(false, |g| g.args.iter().any(|arg| matcher.contains(arg))) + }) + }) + } + + fn validate_required_unless(&self, a: &AnyArg, matcher: &ArgMatcher) -> Option { + debugln!("Validator::validate_required_unless: a={:?};", a.name()); + macro_rules! check { + ($how:ident, $_self:expr, $a:ident, $m:ident) => {{ + $a.required_unless().map(|ru| { + ru.iter().$how(|n| { + $m.contains(n) || { + if let Some(grp) = $_self.groups.iter().find(|g| &g.name == n) { + grp.args.iter().any(|arg| $m.contains(arg)) + } else { + false + } + } + }) + }) + }}; + } + if a.is_set(ArgSettings::RequiredUnlessAll) { + check!(all, self.0, a, matcher) + } else { + check!(any, self.0, a, matcher) + } + } + + fn missing_required_error(&self, matcher: &ArgMatcher, extra: Option<&str>) -> ClapResult<()> { + debugln!("Validator::missing_required_error: extra={:?}", extra); + let c = Colorizer::new(ColorizerOption { + use_stderr: true, + when: self.0.color(), + }); + let mut reqs = self.0.required.iter().map(|&r| &*r).collect::>(); + if let Some(r) = extra { + reqs.push(r); + } + reqs.retain(|n| !matcher.contains(n)); + reqs.dedup(); + debugln!("Validator::missing_required_error: reqs={:#?}", reqs); + let req_args = + usage::get_required_usage_from(self.0, &reqs[..], Some(matcher), extra, true) + .iter() + .fold(String::new(), |acc, s| { + acc + &format!("\n {}", c.error(s))[..] + }); + debugln!( + "Validator::missing_required_error: req_args={:#?}", + req_args + ); + Err(Error::missing_required_argument( + &*req_args, + &*usage::create_error_usage(self.0, matcher, extra), + self.0.color(), + )) + } + + #[inline] + fn is_missing_required_ok(&self, a: &AnyArg, matcher: &ArgMatcher) -> bool { + debugln!("Validator::is_missing_required_ok: a={}", a.name()); + self.validate_arg_conflicts(a, matcher).unwrap_or(false) + || self.validate_required_unless(a, matcher).unwrap_or(false) + } +} diff --git a/clap/src/args/any_arg.rs b/clap/src/args/any_arg.rs new file mode 100644 index 000000000..eee522833 --- /dev/null +++ b/clap/src/args/any_arg.rs @@ -0,0 +1,74 @@ +// Std +use std::rc::Rc; +use std::fmt as std_fmt; +use std::ffi::{OsStr, OsString}; + +// Internal +use args::settings::ArgSettings; +use map::{self, VecMap}; +use INTERNAL_ERROR_MSG; + +#[doc(hidden)] +pub trait AnyArg<'n, 'e>: std_fmt::Display { + fn name(&self) -> &'n str; + fn overrides(&self) -> Option<&[&'e str]>; + fn aliases(&self) -> Option>; + fn requires(&self) -> Option<&[(Option<&'e str>, &'n str)]>; + fn blacklist(&self) -> Option<&[&'e str]>; + fn required_unless(&self) -> Option<&[&'e str]>; + fn is_set(&self, ArgSettings) -> bool; + fn set(&mut self, ArgSettings); + fn has_switch(&self) -> bool; + fn max_vals(&self) -> Option; + fn min_vals(&self) -> Option; + fn num_vals(&self) -> Option; + fn possible_vals(&self) -> Option<&[&'e str]>; + fn validator(&self) -> Option<&Rc Result<(), String>>>; + fn validator_os(&self) -> Option<&Rc Result<(), OsString>>>; + fn short(&self) -> Option; + fn long(&self) -> Option<&'e str>; + fn val_delim(&self) -> Option; + fn takes_value(&self) -> bool; + fn val_names(&self) -> Option<&VecMap<&'e str>>; + fn help(&self) -> Option<&'e str>; + fn long_help(&self) -> Option<&'e str>; + fn default_val(&self) -> Option<&'e OsStr>; + fn default_vals_ifs(&self) -> Option, &'e OsStr)>>; + fn env<'s>(&'s self) -> Option<(&'n OsStr, Option<&'s OsString>)>; + fn longest_filter(&self) -> bool; + fn val_terminator(&self) -> Option<&'e str>; +} + +pub trait DispOrder { + fn disp_ord(&self) -> usize; +} + +impl<'n, 'e, 'z, T: ?Sized> AnyArg<'n, 'e> for &'z T where T: AnyArg<'n, 'e> + 'z { + fn name(&self) -> &'n str { (*self).name() } + fn overrides(&self) -> Option<&[&'e str]> { (*self).overrides() } + fn aliases(&self) -> Option> { (*self).aliases() } + fn requires(&self) -> Option<&[(Option<&'e str>, &'n str)]> { (*self).requires() } + fn blacklist(&self) -> Option<&[&'e str]> { (*self).blacklist() } + fn required_unless(&self) -> Option<&[&'e str]> { (*self).required_unless() } + fn is_set(&self, a: ArgSettings) -> bool { (*self).is_set(a) } + fn set(&mut self, _: ArgSettings) { panic!(INTERNAL_ERROR_MSG) } + fn has_switch(&self) -> bool { (*self).has_switch() } + fn max_vals(&self) -> Option { (*self).max_vals() } + fn min_vals(&self) -> Option { (*self).min_vals() } + fn num_vals(&self) -> Option { (*self).num_vals() } + fn possible_vals(&self) -> Option<&[&'e str]> { (*self).possible_vals() } + fn validator(&self) -> Option<&Rc Result<(), String>>> { (*self).validator() } + fn validator_os(&self) -> Option<&Rc Result<(), OsString>>> { (*self).validator_os() } + fn short(&self) -> Option { (*self).short() } + fn long(&self) -> Option<&'e str> { (*self).long() } + fn val_delim(&self) -> Option { (*self).val_delim() } + fn takes_value(&self) -> bool { (*self).takes_value() } + fn val_names(&self) -> Option<&VecMap<&'e str>> { (*self).val_names() } + fn help(&self) -> Option<&'e str> { (*self).help() } + fn long_help(&self) -> Option<&'e str> { (*self).long_help() } + fn default_val(&self) -> Option<&'e OsStr> { (*self).default_val() } + fn default_vals_ifs(&self) -> Option, &'e OsStr)>> { (*self).default_vals_ifs() } + fn env<'s>(&'s self) -> Option<(&'n OsStr, Option<&'s OsString>)> { (*self).env() } + fn longest_filter(&self) -> bool { (*self).longest_filter() } + fn val_terminator(&self) -> Option<&'e str> { (*self).val_terminator() } +} diff --git a/clap/src/args/arg.rs b/clap/src/args/arg.rs new file mode 100644 index 000000000..7c020bf13 --- /dev/null +++ b/clap/src/args/arg.rs @@ -0,0 +1,3944 @@ +#[cfg(feature = "yaml")] +use std::collections::BTreeMap; +use std::rc::Rc; +use std::ffi::{OsStr, OsString}; +#[cfg(any(target_os = "windows", target_arch = "wasm32"))] +use osstringext::OsStrExt3; +#[cfg(not(any(target_os = "windows", target_arch = "wasm32")))] +use std::os::unix::ffi::OsStrExt; +use std::env; + +#[cfg(feature = "yaml")] +use yaml_rust::Yaml; +use map::VecMap; + +use usage_parser::UsageParser; +use args::settings::ArgSettings; +use args::arg_builder::{Base, Switched, Valued}; + +/// The abstract representation of a command line argument. Used to set all the options and +/// relationships that define a valid argument for the program. +/// +/// There are two methods for constructing [`Arg`]s, using the builder pattern and setting options +/// manually, or using a usage string which is far less verbose but has fewer options. You can also +/// use a combination of the two methods to achieve the best of both worlds. +/// +/// # Examples +/// +/// ```rust +/// # use clap::Arg; +/// // Using the traditional builder pattern and setting each option manually +/// let cfg = Arg::with_name("config") +/// .short("c") +/// .long("config") +/// .takes_value(true) +/// .value_name("FILE") +/// .help("Provides a config file to myprog"); +/// // Using a usage string (setting a similar argument to the one above) +/// let input = Arg::from_usage("-i, --input=[FILE] 'Provides an input file to the program'"); +/// ``` +/// [`Arg`]: ./struct.Arg.html +#[allow(missing_debug_implementations)] +#[derive(Default, Clone)] +pub struct Arg<'a, 'b> +where + 'a: 'b, +{ + #[doc(hidden)] pub b: Base<'a, 'b>, + #[doc(hidden)] pub s: Switched<'b>, + #[doc(hidden)] pub v: Valued<'a, 'b>, + #[doc(hidden)] pub index: Option, + #[doc(hidden)] pub r_ifs: Option>, +} + +impl<'a, 'b> Arg<'a, 'b> { + /// Creates a new instance of [`Arg`] using a unique string name. The name will be used to get + /// information about whether or not the argument was used at runtime, get values, set + /// relationships with other args, etc.. + /// + /// **NOTE:** In the case of arguments that take values (i.e. [`Arg::takes_value(true)`]) + /// and positional arguments (i.e. those without a preceding `-` or `--`) the name will also + /// be displayed when the user prints the usage/help information of the program. + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg}; + /// Arg::with_name("config") + /// # ; + /// ``` + /// [`Arg::takes_value(true)`]: ./struct.Arg.html#method.takes_value + /// [`Arg`]: ./struct.Arg.html + pub fn with_name(n: &'a str) -> Self { + Arg { + b: Base::new(n), + ..Default::default() + } + } + + /// Creates a new instance of [`Arg`] from a .yml (YAML) file. + /// + /// # Examples + /// + /// ```ignore + /// # #[macro_use] + /// # extern crate clap; + /// # use clap::Arg; + /// # fn main() { + /// let yml = load_yaml!("arg.yml"); + /// let arg = Arg::from_yaml(yml); + /// # } + /// ``` + /// [`Arg`]: ./struct.Arg.html + #[cfg(feature = "yaml")] + pub fn from_yaml(y: &BTreeMap) -> Arg { + // We WANT this to panic on error...so expect() is good. + let name_yml = y.keys().nth(0).unwrap(); + let name_str = name_yml.as_str().unwrap(); + let mut a = Arg::with_name(name_str); + let arg_settings = y.get(name_yml).unwrap().as_hash().unwrap(); + + for (k, v) in arg_settings.iter() { + a = match k.as_str().unwrap() { + "short" => yaml_to_str!(a, v, short), + "long" => yaml_to_str!(a, v, long), + "aliases" => yaml_vec_or_str!(v, a, alias), + "help" => yaml_to_str!(a, v, help), + "long_help" => yaml_to_str!(a, v, long_help), + "required" => yaml_to_bool!(a, v, required), + "required_if" => yaml_tuple2!(a, v, required_if), + "required_ifs" => yaml_tuple2!(a, v, required_if), + "takes_value" => yaml_to_bool!(a, v, takes_value), + "index" => yaml_to_u64!(a, v, index), + "global" => yaml_to_bool!(a, v, global), + "multiple" => yaml_to_bool!(a, v, multiple), + "hidden" => yaml_to_bool!(a, v, hidden), + "next_line_help" => yaml_to_bool!(a, v, next_line_help), + "empty_values" => yaml_to_bool!(a, v, empty_values), + "group" => yaml_to_str!(a, v, group), + "number_of_values" => yaml_to_u64!(a, v, number_of_values), + "max_values" => yaml_to_u64!(a, v, max_values), + "min_values" => yaml_to_u64!(a, v, min_values), + "value_name" => yaml_to_str!(a, v, value_name), + "use_delimiter" => yaml_to_bool!(a, v, use_delimiter), + "allow_hyphen_values" => yaml_to_bool!(a, v, allow_hyphen_values), + "last" => yaml_to_bool!(a, v, last), + "require_delimiter" => yaml_to_bool!(a, v, require_delimiter), + "value_delimiter" => yaml_to_str!(a, v, value_delimiter), + "required_unless" => yaml_to_str!(a, v, required_unless), + "display_order" => yaml_to_usize!(a, v, display_order), + "default_value" => yaml_to_str!(a, v, default_value), + "default_value_if" => yaml_tuple3!(a, v, default_value_if), + "default_value_ifs" => yaml_tuple3!(a, v, default_value_if), + "env" => yaml_to_str!(a, v, env), + "value_names" => yaml_vec_or_str!(v, a, value_name), + "groups" => yaml_vec_or_str!(v, a, group), + "requires" => yaml_vec_or_str!(v, a, requires), + "requires_if" => yaml_tuple2!(a, v, requires_if), + "requires_ifs" => yaml_tuple2!(a, v, requires_if), + "conflicts_with" => yaml_vec_or_str!(v, a, conflicts_with), + "overrides_with" => yaml_vec_or_str!(v, a, overrides_with), + "possible_values" => yaml_vec_or_str!(v, a, possible_value), + "required_unless_one" => yaml_vec_or_str!(v, a, required_unless), + "required_unless_all" => { + a = yaml_vec_or_str!(v, a, required_unless); + a.setb(ArgSettings::RequiredUnlessAll); + a + } + s => panic!( + "Unknown Arg setting '{}' in YAML file for arg '{}'", + s, name_str + ), + } + } + + a + } + + /// Creates a new instance of [`Arg`] from a usage string. Allows creation of basic settings + /// for the [`Arg`]. The syntax is flexible, but there are some rules to follow. + /// + /// **NOTE**: Not all settings may be set using the usage string method. Some properties are + /// only available via the builder pattern. + /// + /// **NOTE**: Only ASCII values are officially supported in [`Arg::from_usage`] strings. Some + /// UTF-8 codepoints may work just fine, but this is not guaranteed. + /// + /// # Syntax + /// + /// Usage strings typically following the form: + /// + /// ```notrust + /// [explicit name] [short] [long] [value names] [help string] + /// ``` + /// + /// This is not a hard rule as the attributes can appear in other orders. There are also + /// several additional sigils which denote additional settings. Below are the details of each + /// portion of the string. + /// + /// ### Explicit Name + /// + /// This is an optional field, if it's omitted the argument will use one of the additional + /// fields as the name using the following priority order: + /// + /// * Explicit Name (This always takes precedence when present) + /// * Long + /// * Short + /// * Value Name + /// + /// `clap` determines explicit names as the first string of characters between either `[]` or + /// `<>` where `[]` has the dual notation of meaning the argument is optional, and `<>` meaning + /// the argument is required. + /// + /// Explicit names may be followed by: + /// * The multiple denotation `...` + /// + /// Example explicit names as follows (`ename` for an optional argument, and `rname` for a + /// required argument): + /// + /// ```notrust + /// [ename] -s, --long 'some flag' + /// -r, --longer 'some other flag' + /// ``` + /// + /// ### Short + /// + /// This is set by placing a single character after a leading `-`. + /// + /// Shorts may be followed by + /// * The multiple denotation `...` + /// * An optional comma `,` which is cosmetic only + /// * Value notation + /// + /// Example shorts are as follows (`-s`, and `-r`): + /// + /// ```notrust + /// -s, --long 'some flag' + /// -r [val], --longer 'some option' + /// ``` + /// + /// ### Long + /// + /// This is set by placing a word (no spaces) after a leading `--`. + /// + /// Shorts may be followed by + /// * The multiple denotation `...` + /// * Value notation + /// + /// Example longs are as follows (`--some`, and `--rapid`): + /// + /// ```notrust + /// -s, --some 'some flag' + /// --rapid=[FILE] 'some option' + /// ``` + /// + /// ### Values (Value Notation) + /// + /// This is set by placing a word(s) between `[]` or `<>` optionally after `=` (although this + /// is cosmetic only and does not affect functionality). If an explicit name has **not** been + /// set, using `<>` will denote a required argument, and `[]` will denote an optional argument + /// + /// Values may be followed by + /// * The multiple denotation `...` + /// * More Value notation + /// + /// More than one value will also implicitly set the arguments number of values, i.e. having + /// two values, `--option [val1] [val2]` specifies that in order for option to be satisified it + /// must receive exactly two values + /// + /// Example values are as follows (`FILE`, and `SPEED`): + /// + /// ```notrust + /// -s, --some [FILE] 'some option' + /// --rapid=... 'some required multiple option' + /// ``` + /// + /// ### Help String + /// + /// The help string is denoted between a pair of single quotes `''` and may contain any + /// characters. + /// + /// Example help strings are as follows: + /// + /// ```notrust + /// -s, --some [FILE] 'some option' + /// --rapid=... 'some required multiple option' + /// ``` + /// + /// ### Additional Sigils + /// + /// Multiple notation `...` (three consecutive dots/periods) specifies that this argument may + /// be used multiple times. Do not confuse multiple occurrences (`...`) with multiple values. + /// `--option val1 val2` is a single occurrence with multiple values. `--flag --flag` is + /// multiple occurrences (and then you can obviously have instances of both as well) + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg}; + /// App::new("prog") + /// .args(&[ + /// Arg::from_usage("--config 'a required file for the configuration and no short'"), + /// Arg::from_usage("-d, --debug... 'turns on debugging information and allows multiples'"), + /// Arg::from_usage("[input] 'an optional input file to use'") + /// ]) + /// # ; + /// ``` + /// [`Arg`]: ./struct.Arg.html + /// [`Arg::from_usage`]: ./struct.Arg.html#method.from_usage + pub fn from_usage(u: &'a str) -> Self { + let parser = UsageParser::from_usage(u); + parser.parse() + } + + /// Sets the short version of the argument without the preceding `-`. + /// + /// By default `clap` automatically assigns `V` and `h` to the auto-generated `version` and + /// `help` arguments respectively. You may use the uppercase `V` or lowercase `h` for your own + /// arguments, in which case `clap` simply will not assign those to the auto-generated + /// `version` or `help` arguments. + /// + /// **NOTE:** Any leading `-` characters will be stripped, and only the first + /// non `-` character will be used as the [`short`] version + /// + /// # Examples + /// + /// To set [`short`] use a single valid UTF-8 code point. If you supply a leading `-` such as + /// `-c`, the `-` will be stripped. + /// + /// ```rust + /// # use clap::{App, Arg}; + /// Arg::with_name("config") + /// .short("c") + /// # ; + /// ``` + /// + /// Setting [`short`] allows using the argument via a single hyphen (`-`) such as `-c` + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("prog") + /// .arg(Arg::with_name("config") + /// .short("c")) + /// .get_matches_from(vec![ + /// "prog", "-c" + /// ]); + /// + /// assert!(m.is_present("config")); + /// ``` + /// [`short`]: ./struct.Arg.html#method.short + pub fn short>(mut self, s: S) -> Self { + self.s.short = s.as_ref().trim_left_matches(|c| c == '-').chars().nth(0); + self + } + + /// Sets the long version of the argument without the preceding `--`. + /// + /// By default `clap` automatically assigns `version` and `help` to the auto-generated + /// `version` and `help` arguments respectively. You may use the word `version` or `help` for + /// the long form of your own arguments, in which case `clap` simply will not assign those to + /// the auto-generated `version` or `help` arguments. + /// + /// **NOTE:** Any leading `-` characters will be stripped + /// + /// # Examples + /// + /// To set `long` use a word containing valid UTF-8 codepoints. If you supply a double leading + /// `--` such as `--config` they will be stripped. Hyphens in the middle of the word, however, + /// will *not* be stripped (i.e. `config-file` is allowed) + /// + /// ```rust + /// # use clap::{App, Arg}; + /// Arg::with_name("cfg") + /// .long("config") + /// # ; + /// ``` + /// + /// Setting `long` allows using the argument via a double hyphen (`--`) such as `--config` + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("prog") + /// .arg(Arg::with_name("cfg") + /// .long("config")) + /// .get_matches_from(vec![ + /// "prog", "--config" + /// ]); + /// + /// assert!(m.is_present("cfg")); + /// ``` + pub fn long(mut self, l: &'b str) -> Self { + self.s.long = Some(l.trim_left_matches(|c| c == '-')); + self + } + + /// Allows adding a [`Arg`] alias, which function as "hidden" arguments that + /// automatically dispatch as if this argument was used. This is more efficient, and easier + /// than creating multiple hidden arguments as one only needs to check for the existence of + /// this command, and not all variants. + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("prog") + /// .arg(Arg::with_name("test") + /// .long("test") + /// .alias("alias") + /// .takes_value(true)) + /// .get_matches_from(vec![ + /// "prog", "--alias", "cool" + /// ]); + /// assert!(m.is_present("test")); + /// assert_eq!(m.value_of("test"), Some("cool")); + /// ``` + /// [`Arg`]: ./struct.Arg.html + pub fn alias>(mut self, name: S) -> Self { + if let Some(ref mut als) = self.s.aliases { + als.push((name.into(), false)); + } else { + self.s.aliases = Some(vec![(name.into(), false)]); + } + self + } + + /// Allows adding [`Arg`] aliases, which function as "hidden" arguments that + /// automatically dispatch as if this argument was used. This is more efficient, and easier + /// than creating multiple hidden subcommands as one only needs to check for the existence of + /// this command, and not all variants. + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("prog") + /// .arg(Arg::with_name("test") + /// .long("test") + /// .aliases(&["do-stuff", "do-tests", "tests"]) + /// .help("the file to add") + /// .required(false)) + /// .get_matches_from(vec![ + /// "prog", "--do-tests" + /// ]); + /// assert!(m.is_present("test")); + /// ``` + /// [`Arg`]: ./struct.Arg.html + pub fn aliases(mut self, names: &[&'b str]) -> Self { + if let Some(ref mut als) = self.s.aliases { + for n in names { + als.push((n, false)); + } + } else { + self.s.aliases = Some(names.iter().map(|n| (*n, false)).collect::>()); + } + self + } + + /// Allows adding a [`Arg`] alias that functions exactly like those defined with + /// [`Arg::alias`], except that they are visible inside the help message. + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("prog") + /// .arg(Arg::with_name("test") + /// .visible_alias("something-awesome") + /// .long("test") + /// .takes_value(true)) + /// .get_matches_from(vec![ + /// "prog", "--something-awesome", "coffee" + /// ]); + /// assert!(m.is_present("test")); + /// assert_eq!(m.value_of("test"), Some("coffee")); + /// ``` + /// [`Arg`]: ./struct.Arg.html + /// [`App::alias`]: ./struct.Arg.html#method.alias + pub fn visible_alias>(mut self, name: S) -> Self { + if let Some(ref mut als) = self.s.aliases { + als.push((name.into(), true)); + } else { + self.s.aliases = Some(vec![(name.into(), true)]); + } + self + } + + /// Allows adding multiple [`Arg`] aliases that functions exactly like those defined + /// with [`Arg::aliases`], except that they are visible inside the help message. + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("prog") + /// .arg(Arg::with_name("test") + /// .long("test") + /// .visible_aliases(&["something", "awesome", "cool"])) + /// .get_matches_from(vec![ + /// "prog", "--awesome" + /// ]); + /// assert!(m.is_present("test")); + /// ``` + /// [`Arg`]: ./struct.Arg.html + /// [`App::aliases`]: ./struct.Arg.html#method.aliases + pub fn visible_aliases(mut self, names: &[&'b str]) -> Self { + if let Some(ref mut als) = self.s.aliases { + for n in names { + als.push((n, true)); + } + } else { + self.s.aliases = Some(names.iter().map(|n| (*n, true)).collect::>()); + } + self + } + + /// Sets the short help text of the argument that will be displayed to the user when they print + /// the help information with `-h`. Typically, this is a short (one line) description of the + /// arg. + /// + /// **NOTE:** If only `Arg::help` is provided, and not [`Arg::long_help`] but the user requests + /// `--help` clap will still display the contents of `help` appropriately + /// + /// **NOTE:** Only `Arg::help` is used in completion script generation in order to be concise + /// + /// # Examples + /// + /// Any valid UTF-8 is allowed in the help text. The one exception is when one wishes to + /// include a newline in the help text and have the following text be properly aligned with all + /// the other help text. + /// + /// ```rust + /// # use clap::{App, Arg}; + /// Arg::with_name("config") + /// .help("The config file used by the myprog") + /// # ; + /// ``` + /// + /// Setting `help` displays a short message to the side of the argument when the user passes + /// `-h` or `--help` (by default). + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("prog") + /// .arg(Arg::with_name("cfg") + /// .long("config") + /// .help("Some help text describing the --config arg")) + /// .get_matches_from(vec![ + /// "prog", "--help" + /// ]); + /// ``` + /// + /// The above example displays + /// + /// ```notrust + /// helptest + /// + /// USAGE: + /// helptest [FLAGS] + /// + /// FLAGS: + /// --config Some help text describing the --config arg + /// -h, --help Prints help information + /// -V, --version Prints version information + /// ``` + /// [`Arg::long_help`]: ./struct.Arg.html#method.long_help + pub fn help(mut self, h: &'b str) -> Self { + self.b.help = Some(h); + self + } + + /// Sets the long help text of the argument that will be displayed to the user when they print + /// the help information with `--help`. Typically this a more detailed (multi-line) message + /// that describes the arg. + /// + /// **NOTE:** If only `long_help` is provided, and not [`Arg::help`] but the user requests `-h` + /// clap will still display the contents of `long_help` appropriately + /// + /// **NOTE:** Only [`Arg::help`] is used in completion script generation in order to be concise + /// + /// # Examples + /// + /// Any valid UTF-8 is allowed in the help text. The one exception is when one wishes to + /// include a newline in the help text and have the following text be properly aligned with all + /// the other help text. + /// + /// ```rust + /// # use clap::{App, Arg}; + /// Arg::with_name("config") + /// .long_help( + /// "The config file used by the myprog must be in JSON format + /// with only valid keys and may not contain other nonsense + /// that cannot be read by this program. Obviously I'm going on + /// and on, so I'll stop now.") + /// # ; + /// ``` + /// + /// Setting `help` displays a short message to the side of the argument when the user passes + /// `-h` or `--help` (by default). + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("prog") + /// .arg(Arg::with_name("cfg") + /// .long("config") + /// .long_help( + /// "The config file used by the myprog must be in JSON format + /// with only valid keys and may not contain other nonsense + /// that cannot be read by this program. Obviously I'm going on + /// and on, so I'll stop now.")) + /// .get_matches_from(vec![ + /// "prog", "--help" + /// ]); + /// ``` + /// + /// The above example displays + /// + /// ```notrust + /// helptest + /// + /// USAGE: + /// helptest [FLAGS] + /// + /// FLAGS: + /// --config + /// The config file used by the myprog must be in JSON format + /// with only valid keys and may not contain other nonsense + /// that cannot be read by this program. Obviously I'm going on + /// and on, so I'll stop now. + /// + /// -h, --help + /// Prints help information + /// + /// -V, --version + /// Prints version information + /// ``` + /// [`Arg::help`]: ./struct.Arg.html#method.help + pub fn long_help(mut self, h: &'b str) -> Self { + self.b.long_help = Some(h); + self + } + + /// Specifies that this arg is the last, or final, positional argument (i.e. has the highest + /// index) and is *only* able to be accessed via the `--` syntax (i.e. `$ prog args -- + /// last_arg`). Even, if no other arguments are left to parse, if the user omits the `--` syntax + /// they will receive an [`UnknownArgument`] error. Setting an argument to `.last(true)` also + /// allows one to access this arg early using the `--` syntax. Accessing an arg early, even with + /// the `--` syntax is otherwise not possible. + /// + /// **NOTE:** This will change the usage string to look like `$ prog [FLAGS] [-- ]` if + /// `ARG` is marked as `.last(true)`. + /// + /// **NOTE:** This setting will imply [`AppSettings::DontCollapseArgsInUsage`] because failing + /// to set this can make the usage string very confusing. + /// + /// **NOTE**: This setting only applies to positional arguments, and has no affect on FLAGS / + /// OPTIONS + /// + /// **CAUTION:** Setting an argument to `.last(true)` *and* having child subcommands is not + /// recommended with the exception of *also* using [`AppSettings::ArgsNegateSubcommands`] + /// (or [`AppSettings::SubcommandsNegateReqs`] if the argument marked `.last(true)` is also + /// marked [`.required(true)`]) + /// + /// # Examples + /// + /// ```rust + /// # use clap::Arg; + /// Arg::with_name("args") + /// .last(true) + /// # ; + /// ``` + /// + /// Setting [`Arg::last(true)`] ensures the arg has the highest [index] of all positional args + /// and requires that the `--` syntax be used to access it early. + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let res = App::new("prog") + /// .arg(Arg::with_name("first")) + /// .arg(Arg::with_name("second")) + /// .arg(Arg::with_name("third").last(true)) + /// .get_matches_from_safe(vec![ + /// "prog", "one", "--", "three" + /// ]); + /// + /// assert!(res.is_ok()); + /// let m = res.unwrap(); + /// assert_eq!(m.value_of("third"), Some("three")); + /// assert!(m.value_of("second").is_none()); + /// ``` + /// + /// Even if the positional argument marked `.last(true)` is the only argument left to parse, + /// failing to use the `--` syntax results in an error. + /// + /// ```rust + /// # use clap::{App, Arg, ErrorKind}; + /// let res = App::new("prog") + /// .arg(Arg::with_name("first")) + /// .arg(Arg::with_name("second")) + /// .arg(Arg::with_name("third").last(true)) + /// .get_matches_from_safe(vec![ + /// "prog", "one", "two", "three" + /// ]); + /// + /// assert!(res.is_err()); + /// assert_eq!(res.unwrap_err().kind, ErrorKind::UnknownArgument); + /// ``` + /// [`Arg::last(true)`]: ./struct.Arg.html#method.last + /// [index]: ./struct.Arg.html#method.index + /// [`AppSettings::DontCollapseArgsInUsage`]: ./enum.AppSettings.html#variant.DontCollapseArgsInUsage + /// [`AppSettings::ArgsNegateSubcommands`]: ./enum.AppSettings.html#variant.ArgsNegateSubcommands + /// [`AppSettings::SubcommandsNegateReqs`]: ./enum.AppSettings.html#variant.SubcommandsNegateReqs + /// [`.required(true)`]: ./struct.Arg.html#method.required + /// [`UnknownArgument`]: ./enum.ErrorKind.html#variant.UnknownArgument + pub fn last(self, l: bool) -> Self { + if l { + self.set(ArgSettings::Last) + } else { + self.unset(ArgSettings::Last) + } + } + + /// Sets whether or not the argument is required by default. Required by default means it is + /// required, when no other conflicting rules have been evaluated. Conflicting rules take + /// precedence over being required. **Default:** `false` + /// + /// **NOTE:** Flags (i.e. not positional, or arguments that take values) cannot be required by + /// default. This is simply because if a flag should be required, it should simply be implied + /// as no additional information is required from user. Flags by their very nature are simply + /// yes/no, or true/false. + /// + /// # Examples + /// + /// ```rust + /// # use clap::Arg; + /// Arg::with_name("config") + /// .required(true) + /// # ; + /// ``` + /// + /// Setting [`Arg::required(true)`] requires that the argument be used at runtime. + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let res = App::new("prog") + /// .arg(Arg::with_name("cfg") + /// .required(true) + /// .takes_value(true) + /// .long("config")) + /// .get_matches_from_safe(vec![ + /// "prog", "--config", "file.conf" + /// ]); + /// + /// assert!(res.is_ok()); + /// ``` + /// + /// Setting [`Arg::required(true)`] and *not* supplying that argument is an error. + /// + /// ```rust + /// # use clap::{App, Arg, ErrorKind}; + /// let res = App::new("prog") + /// .arg(Arg::with_name("cfg") + /// .required(true) + /// .takes_value(true) + /// .long("config")) + /// .get_matches_from_safe(vec![ + /// "prog" + /// ]); + /// + /// assert!(res.is_err()); + /// assert_eq!(res.unwrap_err().kind, ErrorKind::MissingRequiredArgument); + /// ``` + /// [`Arg::required(true)`]: ./struct.Arg.html#method.required + pub fn required(self, r: bool) -> Self { + if r { + self.set(ArgSettings::Required) + } else { + self.unset(ArgSettings::Required) + } + } + + /// Requires that options use the `--option=val` syntax (i.e. an equals between the option and + /// associated value) **Default:** `false` + /// + /// **NOTE:** This setting also removes the default of allowing empty values and implies + /// [`Arg::empty_values(false)`]. + /// + /// # Examples + /// + /// ```rust + /// # use clap::Arg; + /// Arg::with_name("config") + /// .long("config") + /// .takes_value(true) + /// .require_equals(true) + /// # ; + /// ``` + /// + /// Setting [`Arg::require_equals(true)`] requires that the option have an equals sign between + /// it and the associated value. + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let res = App::new("prog") + /// .arg(Arg::with_name("cfg") + /// .require_equals(true) + /// .takes_value(true) + /// .long("config")) + /// .get_matches_from_safe(vec![ + /// "prog", "--config=file.conf" + /// ]); + /// + /// assert!(res.is_ok()); + /// ``` + /// + /// Setting [`Arg::require_equals(true)`] and *not* supplying the equals will cause an error + /// unless [`Arg::empty_values(true)`] is set. + /// + /// ```rust + /// # use clap::{App, Arg, ErrorKind}; + /// let res = App::new("prog") + /// .arg(Arg::with_name("cfg") + /// .require_equals(true) + /// .takes_value(true) + /// .long("config")) + /// .get_matches_from_safe(vec![ + /// "prog", "--config", "file.conf" + /// ]); + /// + /// assert!(res.is_err()); + /// assert_eq!(res.unwrap_err().kind, ErrorKind::EmptyValue); + /// ``` + /// [`Arg::require_equals(true)`]: ./struct.Arg.html#method.require_equals + /// [`Arg::empty_values(true)`]: ./struct.Arg.html#method.empty_values + /// [`Arg::empty_values(false)`]: ./struct.Arg.html#method.empty_values + pub fn require_equals(mut self, r: bool) -> Self { + if r { + self.unsetb(ArgSettings::EmptyValues); + self.set(ArgSettings::RequireEquals) + } else { + self.unset(ArgSettings::RequireEquals) + } + } + + /// Allows values which start with a leading hyphen (`-`) + /// + /// **WARNING**: Take caution when using this setting combined with [`Arg::multiple(true)`], as + /// this becomes ambiguous `$ prog --arg -- -- val`. All three `--, --, val` will be values + /// when the user may have thought the second `--` would constitute the normal, "Only + /// positional args follow" idiom. To fix this, consider using [`Arg::number_of_values(1)`] + /// + /// **WARNING**: When building your CLIs, consider the effects of allowing leading hyphens and + /// the user passing in a value that matches a valid short. For example `prog -opt -F` where + /// `-F` is supposed to be a value, yet `-F` is *also* a valid short for another arg. Care should + /// should be taken when designing these args. This is compounded by the ability to "stack" + /// short args. I.e. if `-val` is supposed to be a value, but `-v`, `-a`, and `-l` are all valid + /// shorts. + /// + /// # Examples + /// + /// ```rust + /// # use clap::Arg; + /// Arg::with_name("pattern") + /// .allow_hyphen_values(true) + /// # ; + /// ``` + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("prog") + /// .arg(Arg::with_name("pat") + /// .allow_hyphen_values(true) + /// .takes_value(true) + /// .long("pattern")) + /// .get_matches_from(vec![ + /// "prog", "--pattern", "-file" + /// ]); + /// + /// assert_eq!(m.value_of("pat"), Some("-file")); + /// ``` + /// + /// Not setting [`Arg::allow_hyphen_values(true)`] and supplying a value which starts with a + /// hyphen is an error. + /// + /// ```rust + /// # use clap::{App, Arg, ErrorKind}; + /// let res = App::new("prog") + /// .arg(Arg::with_name("pat") + /// .takes_value(true) + /// .long("pattern")) + /// .get_matches_from_safe(vec![ + /// "prog", "--pattern", "-file" + /// ]); + /// + /// assert!(res.is_err()); + /// assert_eq!(res.unwrap_err().kind, ErrorKind::UnknownArgument); + /// ``` + /// [`Arg::allow_hyphen_values(true)`]: ./struct.Arg.html#method.allow_hyphen_values + /// [`Arg::multiple(true)`]: ./struct.Arg.html#method.multiple + /// [`Arg::number_of_values(1)`]: ./struct.Arg.html#method.number_of_values + pub fn allow_hyphen_values(self, a: bool) -> Self { + if a { + self.set(ArgSettings::AllowLeadingHyphen) + } else { + self.unset(ArgSettings::AllowLeadingHyphen) + } + } + /// Sets an arg that override this arg's required setting. (i.e. this arg will be required + /// unless this other argument is present). + /// + /// **Pro Tip:** Using [`Arg::required_unless`] implies [`Arg::required`] and is therefore not + /// mandatory to also set. + /// + /// # Examples + /// + /// ```rust + /// # use clap::Arg; + /// Arg::with_name("config") + /// .required_unless("debug") + /// # ; + /// ``` + /// + /// Setting [`Arg::required_unless(name)`] requires that the argument be used at runtime + /// *unless* `name` is present. In the following example, the required argument is *not* + /// provided, but it's not an error because the `unless` arg has been supplied. + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let res = App::new("prog") + /// .arg(Arg::with_name("cfg") + /// .required_unless("dbg") + /// .takes_value(true) + /// .long("config")) + /// .arg(Arg::with_name("dbg") + /// .long("debug")) + /// .get_matches_from_safe(vec![ + /// "prog", "--debug" + /// ]); + /// + /// assert!(res.is_ok()); + /// ``` + /// + /// Setting [`Arg::required_unless(name)`] and *not* supplying `name` or this arg is an error. + /// + /// ```rust + /// # use clap::{App, Arg, ErrorKind}; + /// let res = App::new("prog") + /// .arg(Arg::with_name("cfg") + /// .required_unless("dbg") + /// .takes_value(true) + /// .long("config")) + /// .arg(Arg::with_name("dbg") + /// .long("debug")) + /// .get_matches_from_safe(vec![ + /// "prog" + /// ]); + /// + /// assert!(res.is_err()); + /// assert_eq!(res.unwrap_err().kind, ErrorKind::MissingRequiredArgument); + /// ``` + /// [`Arg::required_unless`]: ./struct.Arg.html#method.required_unless + /// [`Arg::required`]: ./struct.Arg.html#method.required + /// [`Arg::required_unless(name)`]: ./struct.Arg.html#method.required_unless + pub fn required_unless(mut self, name: &'a str) -> Self { + if let Some(ref mut vec) = self.b.r_unless { + vec.push(name); + } else { + self.b.r_unless = Some(vec![name]); + } + self.required(true) + } + + /// Sets args that override this arg's required setting. (i.e. this arg will be required unless + /// all these other arguments are present). + /// + /// **NOTE:** If you wish for this argument to only be required if *one of* these args are + /// present see [`Arg::required_unless_one`] + /// + /// # Examples + /// + /// ```rust + /// # use clap::Arg; + /// Arg::with_name("config") + /// .required_unless_all(&["cfg", "dbg"]) + /// # ; + /// ``` + /// + /// Setting [`Arg::required_unless_all(names)`] requires that the argument be used at runtime + /// *unless* *all* the args in `names` are present. In the following example, the required + /// argument is *not* provided, but it's not an error because all the `unless` args have been + /// supplied. + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let res = App::new("prog") + /// .arg(Arg::with_name("cfg") + /// .required_unless_all(&["dbg", "infile"]) + /// .takes_value(true) + /// .long("config")) + /// .arg(Arg::with_name("dbg") + /// .long("debug")) + /// .arg(Arg::with_name("infile") + /// .short("i") + /// .takes_value(true)) + /// .get_matches_from_safe(vec![ + /// "prog", "--debug", "-i", "file" + /// ]); + /// + /// assert!(res.is_ok()); + /// ``` + /// + /// Setting [`Arg::required_unless_all(names)`] and *not* supplying *all* of `names` or this + /// arg is an error. + /// + /// ```rust + /// # use clap::{App, Arg, ErrorKind}; + /// let res = App::new("prog") + /// .arg(Arg::with_name("cfg") + /// .required_unless_all(&["dbg", "infile"]) + /// .takes_value(true) + /// .long("config")) + /// .arg(Arg::with_name("dbg") + /// .long("debug")) + /// .arg(Arg::with_name("infile") + /// .short("i") + /// .takes_value(true)) + /// .get_matches_from_safe(vec![ + /// "prog" + /// ]); + /// + /// assert!(res.is_err()); + /// assert_eq!(res.unwrap_err().kind, ErrorKind::MissingRequiredArgument); + /// ``` + /// [`Arg::required_unless_one`]: ./struct.Arg.html#method.required_unless_one + /// [`Arg::required_unless_all(names)`]: ./struct.Arg.html#method.required_unless_all + pub fn required_unless_all(mut self, names: &[&'a str]) -> Self { + if let Some(ref mut vec) = self.b.r_unless { + for s in names { + vec.push(s); + } + } else { + self.b.r_unless = Some(names.iter().map(|s| *s).collect::>()); + } + self.setb(ArgSettings::RequiredUnlessAll); + self.required(true) + } + + /// Sets args that override this arg's [required] setting. (i.e. this arg will be required + /// unless *at least one of* these other arguments are present). + /// + /// **NOTE:** If you wish for this argument to only be required if *all of* these args are + /// present see [`Arg::required_unless_all`] + /// + /// # Examples + /// + /// ```rust + /// # use clap::Arg; + /// Arg::with_name("config") + /// .required_unless_all(&["cfg", "dbg"]) + /// # ; + /// ``` + /// + /// Setting [`Arg::required_unless_one(names)`] requires that the argument be used at runtime + /// *unless* *at least one of* the args in `names` are present. In the following example, the + /// required argument is *not* provided, but it's not an error because one the `unless` args + /// have been supplied. + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let res = App::new("prog") + /// .arg(Arg::with_name("cfg") + /// .required_unless_one(&["dbg", "infile"]) + /// .takes_value(true) + /// .long("config")) + /// .arg(Arg::with_name("dbg") + /// .long("debug")) + /// .arg(Arg::with_name("infile") + /// .short("i") + /// .takes_value(true)) + /// .get_matches_from_safe(vec![ + /// "prog", "--debug" + /// ]); + /// + /// assert!(res.is_ok()); + /// ``` + /// + /// Setting [`Arg::required_unless_one(names)`] and *not* supplying *at least one of* `names` + /// or this arg is an error. + /// + /// ```rust + /// # use clap::{App, Arg, ErrorKind}; + /// let res = App::new("prog") + /// .arg(Arg::with_name("cfg") + /// .required_unless_one(&["dbg", "infile"]) + /// .takes_value(true) + /// .long("config")) + /// .arg(Arg::with_name("dbg") + /// .long("debug")) + /// .arg(Arg::with_name("infile") + /// .short("i") + /// .takes_value(true)) + /// .get_matches_from_safe(vec![ + /// "prog" + /// ]); + /// + /// assert!(res.is_err()); + /// assert_eq!(res.unwrap_err().kind, ErrorKind::MissingRequiredArgument); + /// ``` + /// [required]: ./struct.Arg.html#method.required + /// [`Arg::required_unless_one(names)`]: ./struct.Arg.html#method.required_unless_one + /// [`Arg::required_unless_all`]: ./struct.Arg.html#method.required_unless_all + pub fn required_unless_one(mut self, names: &[&'a str]) -> Self { + if let Some(ref mut vec) = self.b.r_unless { + for s in names { + vec.push(s); + } + } else { + self.b.r_unless = Some(names.iter().map(|s| *s).collect::>()); + } + self.required(true) + } + + /// Sets a conflicting argument by name. I.e. when using this argument, + /// the following argument can't be present and vice versa. + /// + /// **NOTE:** Conflicting rules take precedence over being required by default. Conflict rules + /// only need to be set for one of the two arguments, they do not need to be set for each. + /// + /// **NOTE:** Defining a conflict is two-way, but does *not* need to defined for both arguments + /// (i.e. if A conflicts with B, defining A.conflicts_with(B) is sufficient. You do not need + /// need to also do B.conflicts_with(A)) + /// + /// # Examples + /// + /// ```rust + /// # use clap::Arg; + /// Arg::with_name("config") + /// .conflicts_with("debug") + /// # ; + /// ``` + /// + /// Setting conflicting argument, and having both arguments present at runtime is an error. + /// + /// ```rust + /// # use clap::{App, Arg, ErrorKind}; + /// let res = App::new("prog") + /// .arg(Arg::with_name("cfg") + /// .takes_value(true) + /// .conflicts_with("debug") + /// .long("config")) + /// .arg(Arg::with_name("debug") + /// .long("debug")) + /// .get_matches_from_safe(vec![ + /// "prog", "--debug", "--config", "file.conf" + /// ]); + /// + /// assert!(res.is_err()); + /// assert_eq!(res.unwrap_err().kind, ErrorKind::ArgumentConflict); + /// ``` + pub fn conflicts_with(mut self, name: &'a str) -> Self { + if let Some(ref mut vec) = self.b.blacklist { + vec.push(name); + } else { + self.b.blacklist = Some(vec![name]); + } + self + } + + /// The same as [`Arg::conflicts_with`] but allows specifying multiple two-way conlicts per + /// argument. + /// + /// **NOTE:** Conflicting rules take precedence over being required by default. Conflict rules + /// only need to be set for one of the two arguments, they do not need to be set for each. + /// + /// **NOTE:** Defining a conflict is two-way, but does *not* need to defined for both arguments + /// (i.e. if A conflicts with B, defining A.conflicts_with(B) is sufficient. You do not need + /// need to also do B.conflicts_with(A)) + /// + /// # Examples + /// + /// ```rust + /// # use clap::Arg; + /// Arg::with_name("config") + /// .conflicts_with_all(&["debug", "input"]) + /// # ; + /// ``` + /// + /// Setting conflicting argument, and having any of the arguments present at runtime with a + /// conflicting argument is an error. + /// + /// ```rust + /// # use clap::{App, Arg, ErrorKind}; + /// let res = App::new("prog") + /// .arg(Arg::with_name("cfg") + /// .takes_value(true) + /// .conflicts_with_all(&["debug", "input"]) + /// .long("config")) + /// .arg(Arg::with_name("debug") + /// .long("debug")) + /// .arg(Arg::with_name("input") + /// .index(1)) + /// .get_matches_from_safe(vec![ + /// "prog", "--config", "file.conf", "file.txt" + /// ]); + /// + /// assert!(res.is_err()); + /// assert_eq!(res.unwrap_err().kind, ErrorKind::ArgumentConflict); + /// ``` + /// [`Arg::conflicts_with`]: ./struct.Arg.html#method.conflicts_with + pub fn conflicts_with_all(mut self, names: &[&'a str]) -> Self { + if let Some(ref mut vec) = self.b.blacklist { + for s in names { + vec.push(s); + } + } else { + self.b.blacklist = Some(names.iter().map(|s| *s).collect::>()); + } + self + } + + /// Sets a overridable argument by name. I.e. this argument and the following argument + /// will override each other in POSIX style (whichever argument was specified at runtime + /// **last** "wins") + /// + /// **NOTE:** When an argument is overridden it is essentially as if it never was used, any + /// conflicts, requirements, etc. are evaluated **after** all "overrides" have been removed + /// + /// **WARNING:** Positional arguments cannot override themselves (or we would never be able + /// to advance to the next positional). If a positional agument lists itself as an override, + /// it is simply ignored. + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("prog") + /// .arg(Arg::from_usage("-f, --flag 'some flag'") + /// .conflicts_with("debug")) + /// .arg(Arg::from_usage("-d, --debug 'other flag'")) + /// .arg(Arg::from_usage("-c, --color 'third flag'") + /// .overrides_with("flag")) + /// .get_matches_from(vec![ + /// "prog", "-f", "-d", "-c"]); + /// // ^~~~~~~~~~~~^~~~~ flag is overridden by color + /// + /// assert!(m.is_present("color")); + /// assert!(m.is_present("debug")); // even though flag conflicts with debug, it's as if flag + /// // was never used because it was overridden with color + /// assert!(!m.is_present("flag")); + /// ``` + /// Care must be taken when using this setting, and having an arg override with itself. This + /// is common practice when supporting things like shell aliases, config files, etc. + /// However, when combined with multiple values, it can get dicy. + /// Here is how clap handles such situations: + /// + /// When a flag overrides itself, it's as if the flag was only ever used once (essentially + /// preventing a "Unexpected multiple usage" error): + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("posix") + /// .arg(Arg::from_usage("--flag 'some flag'").overrides_with("flag")) + /// .get_matches_from(vec!["posix", "--flag", "--flag"]); + /// assert!(m.is_present("flag")); + /// assert_eq!(m.occurrences_of("flag"), 1); + /// ``` + /// Making a arg `multiple(true)` and override itself is essentially meaningless. Therefore + /// clap ignores an override of self if it's a flag and it already accepts multiple occurrences. + /// + /// ``` + /// # use clap::{App, Arg}; + /// let m = App::new("posix") + /// .arg(Arg::from_usage("--flag... 'some flag'").overrides_with("flag")) + /// .get_matches_from(vec!["", "--flag", "--flag", "--flag", "--flag"]); + /// assert!(m.is_present("flag")); + /// assert_eq!(m.occurrences_of("flag"), 4); + /// ``` + /// Now notice with options (which *do not* set `multiple(true)`), it's as if only the last + /// occurrence happened. + /// + /// ``` + /// # use clap::{App, Arg}; + /// let m = App::new("posix") + /// .arg(Arg::from_usage("--opt [val] 'some option'").overrides_with("opt")) + /// .get_matches_from(vec!["", "--opt=some", "--opt=other"]); + /// assert!(m.is_present("opt")); + /// assert_eq!(m.occurrences_of("opt"), 1); + /// assert_eq!(m.value_of("opt"), Some("other")); + /// ``` + /// + /// Just like flags, options with `multiple(true)` set, will ignore the "override self" setting. + /// + /// ``` + /// # use clap::{App, Arg}; + /// let m = App::new("posix") + /// .arg(Arg::from_usage("--opt [val]... 'some option'") + /// .overrides_with("opt")) + /// .get_matches_from(vec!["", "--opt", "first", "over", "--opt", "other", "val"]); + /// assert!(m.is_present("opt")); + /// assert_eq!(m.occurrences_of("opt"), 2); + /// assert_eq!(m.values_of("opt").unwrap().collect::>(), &["first", "over", "other", "val"]); + /// ``` + /// + /// A safe thing to do if you'd like to support an option which supports multiple values, but + /// also is "overridable" by itself, is to use `use_delimiter(false)` and *not* use + /// `multiple(true)` while telling users to seperate values with a comma (i.e. `val1,val2`) + /// + /// ``` + /// # use clap::{App, Arg}; + /// let m = App::new("posix") + /// .arg(Arg::from_usage("--opt [val] 'some option'") + /// .overrides_with("opt") + /// .use_delimiter(false)) + /// .get_matches_from(vec!["", "--opt=some,other", "--opt=one,two"]); + /// assert!(m.is_present("opt")); + /// assert_eq!(m.occurrences_of("opt"), 1); + /// assert_eq!(m.values_of("opt").unwrap().collect::>(), &["one,two"]); + /// ``` + pub fn overrides_with(mut self, name: &'a str) -> Self { + if let Some(ref mut vec) = self.b.overrides { + vec.push(name); + } else { + self.b.overrides = Some(vec![name]); + } + self + } + + /// Sets multiple mutually overridable arguments by name. I.e. this argument and the following + /// argument will override each other in POSIX style (whichever argument was specified at + /// runtime **last** "wins") + /// + /// **NOTE:** When an argument is overridden it is essentially as if it never was used, any + /// conflicts, requirements, etc. are evaluated **after** all "overrides" have been removed + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("prog") + /// .arg(Arg::from_usage("-f, --flag 'some flag'") + /// .conflicts_with("color")) + /// .arg(Arg::from_usage("-d, --debug 'other flag'")) + /// .arg(Arg::from_usage("-c, --color 'third flag'") + /// .overrides_with_all(&["flag", "debug"])) + /// .get_matches_from(vec![ + /// "prog", "-f", "-d", "-c"]); + /// // ^~~~~~^~~~~~~~~ flag and debug are overridden by color + /// + /// assert!(m.is_present("color")); // even though flag conflicts with color, it's as if flag + /// // and debug were never used because they were overridden + /// // with color + /// assert!(!m.is_present("debug")); + /// assert!(!m.is_present("flag")); + /// ``` + pub fn overrides_with_all(mut self, names: &[&'a str]) -> Self { + if let Some(ref mut vec) = self.b.overrides { + for s in names { + vec.push(s); + } + } else { + self.b.overrides = Some(names.iter().map(|s| *s).collect::>()); + } + self + } + + /// Sets an argument by name that is required when this one is present I.e. when + /// using this argument, the following argument *must* be present. + /// + /// **NOTE:** [Conflicting] rules and [override] rules take precedence over being required + /// + /// # Examples + /// + /// ```rust + /// # use clap::Arg; + /// Arg::with_name("config") + /// .requires("input") + /// # ; + /// ``` + /// + /// Setting [`Arg::requires(name)`] requires that the argument be used at runtime if the + /// defining argument is used. If the defining argument isn't used, the other argument isn't + /// required + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let res = App::new("prog") + /// .arg(Arg::with_name("cfg") + /// .takes_value(true) + /// .requires("input") + /// .long("config")) + /// .arg(Arg::with_name("input") + /// .index(1)) + /// .get_matches_from_safe(vec![ + /// "prog" + /// ]); + /// + /// assert!(res.is_ok()); // We didn't use cfg, so input wasn't required + /// ``` + /// + /// Setting [`Arg::requires(name)`] and *not* supplying that argument is an error. + /// + /// ```rust + /// # use clap::{App, Arg, ErrorKind}; + /// let res = App::new("prog") + /// .arg(Arg::with_name("cfg") + /// .takes_value(true) + /// .requires("input") + /// .long("config")) + /// .arg(Arg::with_name("input") + /// .index(1)) + /// .get_matches_from_safe(vec![ + /// "prog", "--config", "file.conf" + /// ]); + /// + /// assert!(res.is_err()); + /// assert_eq!(res.unwrap_err().kind, ErrorKind::MissingRequiredArgument); + /// ``` + /// [`Arg::requires(name)`]: ./struct.Arg.html#method.requires + /// [Conflicting]: ./struct.Arg.html#method.conflicts_with + /// [override]: ./struct.Arg.html#method.overrides_with + pub fn requires(mut self, name: &'a str) -> Self { + if let Some(ref mut vec) = self.b.requires { + vec.push((None, name)); + } else { + let mut vec = vec![]; + vec.push((None, name)); + self.b.requires = Some(vec); + } + self + } + + /// Allows a conditional requirement. The requirement will only become valid if this arg's value + /// equals `val`. + /// + /// **NOTE:** If using YAML the values should be laid out as follows + /// + /// ```yaml + /// requires_if: + /// - [val, arg] + /// ``` + /// + /// # Examples + /// + /// ```rust + /// # use clap::Arg; + /// Arg::with_name("config") + /// .requires_if("val", "arg") + /// # ; + /// ``` + /// + /// Setting [`Arg::requires_if(val, arg)`] requires that the `arg` be used at runtime if the + /// defining argument's value is equal to `val`. If the defining argument is anything other than + /// `val`, the other argument isn't required. + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let res = App::new("prog") + /// .arg(Arg::with_name("cfg") + /// .takes_value(true) + /// .requires_if("my.cfg", "other") + /// .long("config")) + /// .arg(Arg::with_name("other")) + /// .get_matches_from_safe(vec![ + /// "prog", "--config", "some.cfg" + /// ]); + /// + /// assert!(res.is_ok()); // We didn't use --config=my.cfg, so other wasn't required + /// ``` + /// + /// Setting [`Arg::requires_if(val, arg)`] and setting the value to `val` but *not* supplying + /// `arg` is an error. + /// + /// ```rust + /// # use clap::{App, Arg, ErrorKind}; + /// let res = App::new("prog") + /// .arg(Arg::with_name("cfg") + /// .takes_value(true) + /// .requires_if("my.cfg", "input") + /// .long("config")) + /// .arg(Arg::with_name("input")) + /// .get_matches_from_safe(vec![ + /// "prog", "--config", "my.cfg" + /// ]); + /// + /// assert!(res.is_err()); + /// assert_eq!(res.unwrap_err().kind, ErrorKind::MissingRequiredArgument); + /// ``` + /// [`Arg::requires(name)`]: ./struct.Arg.html#method.requires + /// [Conflicting]: ./struct.Arg.html#method.conflicts_with + /// [override]: ./struct.Arg.html#method.overrides_with + pub fn requires_if(mut self, val: &'b str, arg: &'a str) -> Self { + if let Some(ref mut vec) = self.b.requires { + vec.push((Some(val), arg)); + } else { + self.b.requires = Some(vec![(Some(val), arg)]); + } + self + } + + /// Allows multiple conditional requirements. The requirement will only become valid if this arg's value + /// equals `val`. + /// + /// **NOTE:** If using YAML the values should be laid out as follows + /// + /// ```yaml + /// requires_if: + /// - [val, arg] + /// - [val2, arg2] + /// ``` + /// + /// # Examples + /// + /// ```rust + /// # use clap::Arg; + /// Arg::with_name("config") + /// .requires_ifs(&[ + /// ("val", "arg"), + /// ("other_val", "arg2"), + /// ]) + /// # ; + /// ``` + /// + /// Setting [`Arg::requires_ifs(&["val", "arg"])`] requires that the `arg` be used at runtime if the + /// defining argument's value is equal to `val`. If the defining argument's value is anything other + /// than `val`, `arg` isn't required. + /// + /// ```rust + /// # use clap::{App, Arg, ErrorKind}; + /// let res = App::new("prog") + /// .arg(Arg::with_name("cfg") + /// .takes_value(true) + /// .requires_ifs(&[ + /// ("special.conf", "opt"), + /// ("other.conf", "other"), + /// ]) + /// .long("config")) + /// .arg(Arg::with_name("opt") + /// .long("option") + /// .takes_value(true)) + /// .arg(Arg::with_name("other")) + /// .get_matches_from_safe(vec![ + /// "prog", "--config", "special.conf" + /// ]); + /// + /// assert!(res.is_err()); // We used --config=special.conf so --option is required + /// assert_eq!(res.unwrap_err().kind, ErrorKind::MissingRequiredArgument); + /// ``` + /// [`Arg::requires(name)`]: ./struct.Arg.html#method.requires + /// [Conflicting]: ./struct.Arg.html#method.conflicts_with + /// [override]: ./struct.Arg.html#method.overrides_with + pub fn requires_ifs(mut self, ifs: &[(&'b str, &'a str)]) -> Self { + if let Some(ref mut vec) = self.b.requires { + for &(val, arg) in ifs { + vec.push((Some(val), arg)); + } + } else { + let mut vec = vec![]; + for &(val, arg) in ifs { + vec.push((Some(val), arg)); + } + self.b.requires = Some(vec); + } + self + } + + /// Allows specifying that an argument is [required] conditionally. The requirement will only + /// become valid if the specified `arg`'s value equals `val`. + /// + /// **NOTE:** If using YAML the values should be laid out as follows + /// + /// ```yaml + /// required_if: + /// - [arg, val] + /// ``` + /// + /// # Examples + /// + /// ```rust + /// # use clap::Arg; + /// Arg::with_name("config") + /// .required_if("other_arg", "value") + /// # ; + /// ``` + /// + /// Setting [`Arg::required_if(arg, val)`] makes this arg required if the `arg` is used at + /// runtime and it's value is equal to `val`. If the `arg`'s value is anything other than `val`, + /// this argument isn't required. + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let res = App::new("prog") + /// .arg(Arg::with_name("cfg") + /// .takes_value(true) + /// .required_if("other", "special") + /// .long("config")) + /// .arg(Arg::with_name("other") + /// .long("other") + /// .takes_value(true)) + /// .get_matches_from_safe(vec![ + /// "prog", "--other", "not-special" + /// ]); + /// + /// assert!(res.is_ok()); // We didn't use --other=special, so "cfg" wasn't required + /// ``` + /// + /// Setting [`Arg::required_if(arg, val)`] and having `arg` used with a value of `val` but *not* + /// using this arg is an error. + /// + /// ```rust + /// # use clap::{App, Arg, ErrorKind}; + /// let res = App::new("prog") + /// .arg(Arg::with_name("cfg") + /// .takes_value(true) + /// .required_if("other", "special") + /// .long("config")) + /// .arg(Arg::with_name("other") + /// .long("other") + /// .takes_value(true)) + /// .get_matches_from_safe(vec![ + /// "prog", "--other", "special" + /// ]); + /// + /// assert!(res.is_err()); + /// assert_eq!(res.unwrap_err().kind, ErrorKind::MissingRequiredArgument); + /// ``` + /// [`Arg::requires(name)`]: ./struct.Arg.html#method.requires + /// [Conflicting]: ./struct.Arg.html#method.conflicts_with + /// [required]: ./struct.Arg.html#method.required + pub fn required_if(mut self, arg: &'a str, val: &'b str) -> Self { + if let Some(ref mut vec) = self.r_ifs { + vec.push((arg, val)); + } else { + self.r_ifs = Some(vec![(arg, val)]); + } + self + } + + /// Allows specifying that an argument is [required] based on multiple conditions. The + /// conditions are set up in a `(arg, val)` style tuple. The requirement will only become valid + /// if one of the specified `arg`'s value equals it's corresponding `val`. + /// + /// **NOTE:** If using YAML the values should be laid out as follows + /// + /// ```yaml + /// required_if: + /// - [arg, val] + /// - [arg2, val2] + /// ``` + /// + /// # Examples + /// + /// ```rust + /// # use clap::Arg; + /// Arg::with_name("config") + /// .required_ifs(&[ + /// ("extra", "val"), + /// ("option", "spec") + /// ]) + /// # ; + /// ``` + /// + /// Setting [`Arg::required_ifs(&[(arg, val)])`] makes this arg required if any of the `arg`s + /// are used at runtime and it's corresponding value is equal to `val`. If the `arg`'s value is + /// anything other than `val`, this argument isn't required. + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let res = App::new("prog") + /// .arg(Arg::with_name("cfg") + /// .required_ifs(&[ + /// ("extra", "val"), + /// ("option", "spec") + /// ]) + /// .takes_value(true) + /// .long("config")) + /// .arg(Arg::with_name("extra") + /// .takes_value(true) + /// .long("extra")) + /// .arg(Arg::with_name("option") + /// .takes_value(true) + /// .long("option")) + /// .get_matches_from_safe(vec![ + /// "prog", "--option", "other" + /// ]); + /// + /// assert!(res.is_ok()); // We didn't use --option=spec, or --extra=val so "cfg" isn't required + /// ``` + /// + /// Setting [`Arg::required_ifs(&[(arg, val)])`] and having any of the `arg`s used with it's + /// value of `val` but *not* using this arg is an error. + /// + /// ```rust + /// # use clap::{App, Arg, ErrorKind}; + /// let res = App::new("prog") + /// .arg(Arg::with_name("cfg") + /// .required_ifs(&[ + /// ("extra", "val"), + /// ("option", "spec") + /// ]) + /// .takes_value(true) + /// .long("config")) + /// .arg(Arg::with_name("extra") + /// .takes_value(true) + /// .long("extra")) + /// .arg(Arg::with_name("option") + /// .takes_value(true) + /// .long("option")) + /// .get_matches_from_safe(vec![ + /// "prog", "--option", "spec" + /// ]); + /// + /// assert!(res.is_err()); + /// assert_eq!(res.unwrap_err().kind, ErrorKind::MissingRequiredArgument); + /// ``` + /// [`Arg::requires(name)`]: ./struct.Arg.html#method.requires + /// [Conflicting]: ./struct.Arg.html#method.conflicts_with + /// [required]: ./struct.Arg.html#method.required + pub fn required_ifs(mut self, ifs: &[(&'a str, &'b str)]) -> Self { + if let Some(ref mut vec) = self.r_ifs { + for r_if in ifs { + vec.push((r_if.0, r_if.1)); + } + } else { + let mut vec = vec![]; + for r_if in ifs { + vec.push((r_if.0, r_if.1)); + } + self.r_ifs = Some(vec); + } + self + } + + /// Sets multiple arguments by names that are required when this one is present I.e. when + /// using this argument, the following arguments *must* be present. + /// + /// **NOTE:** [Conflicting] rules and [override] rules take precedence over being required + /// by default. + /// + /// # Examples + /// + /// ```rust + /// # use clap::Arg; + /// Arg::with_name("config") + /// .requires_all(&["input", "output"]) + /// # ; + /// ``` + /// + /// Setting [`Arg::requires_all(&[arg, arg2])`] requires that all the arguments be used at + /// runtime if the defining argument is used. If the defining argument isn't used, the other + /// argument isn't required + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let res = App::new("prog") + /// .arg(Arg::with_name("cfg") + /// .takes_value(true) + /// .requires("input") + /// .long("config")) + /// .arg(Arg::with_name("input") + /// .index(1)) + /// .arg(Arg::with_name("output") + /// .index(2)) + /// .get_matches_from_safe(vec![ + /// "prog" + /// ]); + /// + /// assert!(res.is_ok()); // We didn't use cfg, so input and output weren't required + /// ``` + /// + /// Setting [`Arg::requires_all(&[arg, arg2])`] and *not* supplying all the arguments is an + /// error. + /// + /// ```rust + /// # use clap::{App, Arg, ErrorKind}; + /// let res = App::new("prog") + /// .arg(Arg::with_name("cfg") + /// .takes_value(true) + /// .requires_all(&["input", "output"]) + /// .long("config")) + /// .arg(Arg::with_name("input") + /// .index(1)) + /// .arg(Arg::with_name("output") + /// .index(2)) + /// .get_matches_from_safe(vec![ + /// "prog", "--config", "file.conf", "in.txt" + /// ]); + /// + /// assert!(res.is_err()); + /// // We didn't use output + /// assert_eq!(res.unwrap_err().kind, ErrorKind::MissingRequiredArgument); + /// ``` + /// [Conflicting]: ./struct.Arg.html#method.conflicts_with + /// [override]: ./struct.Arg.html#method.overrides_with + /// [`Arg::requires_all(&[arg, arg2])`]: ./struct.Arg.html#method.requires_all + pub fn requires_all(mut self, names: &[&'a str]) -> Self { + if let Some(ref mut vec) = self.b.requires { + for s in names { + vec.push((None, s)); + } + } else { + let mut vec = vec![]; + for s in names { + vec.push((None, *s)); + } + self.b.requires = Some(vec); + } + self + } + + /// Specifies that the argument takes a value at run time. + /// + /// **NOTE:** values for arguments may be specified in any of the following methods + /// + /// * Using a space such as `-o value` or `--option value` + /// * Using an equals and no space such as `-o=value` or `--option=value` + /// * Use a short and no space such as `-ovalue` + /// + /// **NOTE:** By default, args which allow [multiple values] are delimited by commas, meaning + /// `--option=val1,val2,val3` is three values for the `--option` argument. If you wish to + /// change the delimiter to another character you can use [`Arg::value_delimiter(char)`], + /// alternatively you can turn delimiting values **OFF** by using [`Arg::use_delimiter(false)`] + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg}; + /// Arg::with_name("config") + /// .takes_value(true) + /// # ; + /// ``` + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("prog") + /// .arg(Arg::with_name("mode") + /// .long("mode") + /// .takes_value(true)) + /// .get_matches_from(vec![ + /// "prog", "--mode", "fast" + /// ]); + /// + /// assert!(m.is_present("mode")); + /// assert_eq!(m.value_of("mode"), Some("fast")); + /// ``` + /// [`Arg::value_delimiter(char)`]: ./struct.Arg.html#method.value_delimiter + /// [`Arg::use_delimiter(false)`]: ./struct.Arg.html#method.use_delimiter + /// [multiple values]: ./struct.Arg.html#method.multiple + pub fn takes_value(self, tv: bool) -> Self { + if tv { + self.set(ArgSettings::TakesValue) + } else { + self.unset(ArgSettings::TakesValue) + } + } + + /// Specifies if the possible values of an argument should be displayed in the help text or + /// not. Defaults to `false` (i.e. show possible values) + /// + /// This is useful for args with many values, or ones which are explained elsewhere in the + /// help text. + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg}; + /// Arg::with_name("config") + /// .hide_possible_values(true) + /// # ; + /// ``` + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("prog") + /// .arg(Arg::with_name("mode") + /// .long("mode") + /// .possible_values(&["fast", "slow"]) + /// .takes_value(true) + /// .hide_possible_values(true)); + /// + /// ``` + /// + /// If we were to run the above program with `--help` the `[values: fast, slow]` portion of + /// the help text would be omitted. + pub fn hide_possible_values(self, hide: bool) -> Self { + if hide { + self.set(ArgSettings::HidePossibleValues) + } else { + self.unset(ArgSettings::HidePossibleValues) + } + } + + /// Specifies if the default value of an argument should be displayed in the help text or + /// not. Defaults to `false` (i.e. show default value) + /// + /// This is useful when default behavior of an arg is explained elsewhere in the help text. + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg}; + /// Arg::with_name("config") + /// .hide_default_value(true) + /// # ; + /// ``` + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("connect") + /// .arg(Arg::with_name("host") + /// .long("host") + /// .default_value("localhost") + /// .hide_default_value(true)); + /// + /// ``` + /// + /// If we were to run the above program with `--help` the `[default: localhost]` portion of + /// the help text would be omitted. + pub fn hide_default_value(self, hide: bool) -> Self { + if hide { + self.set(ArgSettings::HideDefaultValue) + } else { + self.unset(ArgSettings::HideDefaultValue) + } + } + + /// Specifies the index of a positional argument **starting at** 1. + /// + /// **NOTE:** The index refers to position according to **other positional argument**. It does + /// not define position in the argument list as a whole. + /// + /// **NOTE:** If no [`Arg::short`], or [`Arg::long`] have been defined, you can optionally + /// leave off the `index` method, and the index will be assigned in order of evaluation. + /// Utilizing the `index` method allows for setting indexes out of order + /// + /// **NOTE:** When utilized with [`Arg::multiple(true)`], only the **last** positional argument + /// may be defined as multiple (i.e. with the highest index) + /// + /// # Panics + /// + /// Although not in this method directly, [`App`] will [`panic!`] if indexes are skipped (such + /// as defining `index(1)` and `index(3)` but not `index(2)`, or a positional argument is + /// defined as multiple and is not the highest index + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg}; + /// Arg::with_name("config") + /// .index(1) + /// # ; + /// ``` + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("prog") + /// .arg(Arg::with_name("mode") + /// .index(1)) + /// .arg(Arg::with_name("debug") + /// .long("debug")) + /// .get_matches_from(vec![ + /// "prog", "--debug", "fast" + /// ]); + /// + /// assert!(m.is_present("mode")); + /// assert_eq!(m.value_of("mode"), Some("fast")); // notice index(1) means "first positional" + /// // *not* first argument + /// ``` + /// [`Arg::short`]: ./struct.Arg.html#method.short + /// [`Arg::long`]: ./struct.Arg.html#method.long + /// [`Arg::multiple(true)`]: ./struct.Arg.html#method.multiple + /// [`App`]: ./struct.App.html + /// [`panic!`]: https://doc.rust-lang.org/std/macro.panic!.html + pub fn index(mut self, idx: u64) -> Self { + self.index = Some(idx); + self + } + + /// Specifies that the argument may appear more than once. For flags, this results + /// in the number of occurrences of the flag being recorded. For example `-ddd` or `-d -d -d` + /// would count as three occurrences. For options there is a distinct difference in multiple + /// occurrences vs multiple values. + /// + /// For example, `--opt val1 val2` is one occurrence, but two values. Whereas + /// `--opt val1 --opt val2` is two occurrences. + /// + /// **WARNING:** + /// + /// Setting `multiple(true)` for an [option] with no other details, allows multiple values + /// **and** multiple occurrences because it isn't possible to have more occurrences than values + /// for options. Because multiple values are allowed, `--option val1 val2 val3` is perfectly + /// valid, be careful when designing a CLI where positional arguments are expected after a + /// option which accepts multiple values, as `clap` will continue parsing *values* until it + /// reaches the max or specific number of values defined, or another flag or option. + /// + /// **Pro Tip**: + /// + /// It's possible to define an option which allows multiple occurrences, but only one value per + /// occurrence. To do this use [`Arg::number_of_values(1)`] in coordination with + /// [`Arg::multiple(true)`]. + /// + /// **WARNING:** + /// + /// When using args with `multiple(true)` on [options] or [positionals] (i.e. those args that + /// accept values) and [subcommands], one needs to consider the possibility of an argument value + /// being the same as a valid subcommand. By default `clap` will parse the argument in question + /// as a value *only if* a value is possible at that moment. Otherwise it will be parsed as a + /// subcommand. In effect, this means using `multiple(true)` with no additional parameters and + /// a possible value that coincides with a subcommand name, the subcommand cannot be called + /// unless another argument is passed first. + /// + /// As an example, consider a CLI with an option `--ui-paths=...` and subcommand `signer` + /// + /// The following would be parsed as values to `--ui-paths`. + /// + /// ```notrust + /// $ program --ui-paths path1 path2 signer + /// ``` + /// + /// This is because `--ui-paths` accepts multiple values. `clap` will continue parsing values + /// until another argument is reached and it knows `--ui-paths` is done. + /// + /// By adding additional parameters to `--ui-paths` we can solve this issue. Consider adding + /// [`Arg::number_of_values(1)`] as discussed above. The following are all valid, and `signer` + /// is parsed as both a subcommand and a value in the second case. + /// + /// ```notrust + /// $ program --ui-paths path1 signer + /// $ program --ui-paths path1 --ui-paths signer signer + /// ``` + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg}; + /// Arg::with_name("debug") + /// .short("d") + /// .multiple(true) + /// # ; + /// ``` + /// An example with flags + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("prog") + /// .arg(Arg::with_name("verbose") + /// .multiple(true) + /// .short("v")) + /// .get_matches_from(vec![ + /// "prog", "-v", "-v", "-v" // note, -vvv would have same result + /// ]); + /// + /// assert!(m.is_present("verbose")); + /// assert_eq!(m.occurrences_of("verbose"), 3); + /// ``` + /// + /// An example with options + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("prog") + /// .arg(Arg::with_name("file") + /// .multiple(true) + /// .takes_value(true) + /// .short("F")) + /// .get_matches_from(vec![ + /// "prog", "-F", "file1", "file2", "file3" + /// ]); + /// + /// assert!(m.is_present("file")); + /// assert_eq!(m.occurrences_of("file"), 1); // notice only one occurrence + /// let files: Vec<_> = m.values_of("file").unwrap().collect(); + /// assert_eq!(files, ["file1", "file2", "file3"]); + /// ``` + /// This is functionally equivalent to the example above + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("prog") + /// .arg(Arg::with_name("file") + /// .multiple(true) + /// .takes_value(true) + /// .short("F")) + /// .get_matches_from(vec![ + /// "prog", "-F", "file1", "-F", "file2", "-F", "file3" + /// ]); + /// let files: Vec<_> = m.values_of("file").unwrap().collect(); + /// assert_eq!(files, ["file1", "file2", "file3"]); + /// + /// assert!(m.is_present("file")); + /// assert_eq!(m.occurrences_of("file"), 3); // Notice 3 occurrences + /// let files: Vec<_> = m.values_of("file").unwrap().collect(); + /// assert_eq!(files, ["file1", "file2", "file3"]); + /// ``` + /// + /// A common mistake is to define an option which allows multiples, and a positional argument + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("prog") + /// .arg(Arg::with_name("file") + /// .multiple(true) + /// .takes_value(true) + /// .short("F")) + /// .arg(Arg::with_name("word") + /// .index(1)) + /// .get_matches_from(vec![ + /// "prog", "-F", "file1", "file2", "file3", "word" + /// ]); + /// + /// assert!(m.is_present("file")); + /// let files: Vec<_> = m.values_of("file").unwrap().collect(); + /// assert_eq!(files, ["file1", "file2", "file3", "word"]); // wait...what?! + /// assert!(!m.is_present("word")); // but we clearly used word! + /// ``` + /// The problem is clap doesn't know when to stop parsing values for "files". This is further + /// compounded by if we'd said `word -F file1 file2` it would have worked fine, so it would + /// appear to only fail sometimes...not good! + /// + /// A solution for the example above is to specify that `-F` only accepts one value, but is + /// allowed to appear multiple times + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("prog") + /// .arg(Arg::with_name("file") + /// .multiple(true) + /// .takes_value(true) + /// .number_of_values(1) + /// .short("F")) + /// .arg(Arg::with_name("word") + /// .index(1)) + /// .get_matches_from(vec![ + /// "prog", "-F", "file1", "-F", "file2", "-F", "file3", "word" + /// ]); + /// + /// assert!(m.is_present("file")); + /// let files: Vec<_> = m.values_of("file").unwrap().collect(); + /// assert_eq!(files, ["file1", "file2", "file3"]); + /// assert!(m.is_present("word")); + /// assert_eq!(m.value_of("word"), Some("word")); + /// ``` + /// As a final example, notice if we define [`Arg::number_of_values(1)`] and try to run the + /// problem example above, it would have been a runtime error with a pretty message to the + /// user :) + /// + /// ```rust + /// # use clap::{App, Arg, ErrorKind}; + /// let res = App::new("prog") + /// .arg(Arg::with_name("file") + /// .multiple(true) + /// .takes_value(true) + /// .number_of_values(1) + /// .short("F")) + /// .arg(Arg::with_name("word") + /// .index(1)) + /// .get_matches_from_safe(vec![ + /// "prog", "-F", "file1", "file2", "file3", "word" + /// ]); + /// + /// assert!(res.is_err()); + /// assert_eq!(res.unwrap_err().kind, ErrorKind::UnknownArgument); + /// ``` + /// [option]: ./struct.Arg.html#method.takes_value + /// [options]: ./struct.Arg.html#method.takes_value + /// [subcommands]: ./struct.SubCommand.html + /// [positionals]: ./struct.Arg.html#method.index + /// [`Arg::number_of_values(1)`]: ./struct.Arg.html#method.number_of_values + /// [`Arg::multiple(true)`]: ./struct.Arg.html#method.multiple + pub fn multiple(self, multi: bool) -> Self { + if multi { + self.set(ArgSettings::Multiple) + } else { + self.unset(ArgSettings::Multiple) + } + } + + /// Specifies a value that *stops* parsing multiple values of a give argument. By default when + /// one sets [`multiple(true)`] on an argument, clap will continue parsing values for that + /// argument until it reaches another valid argument, or one of the other more specific settings + /// for multiple values is used (such as [`min_values`], [`max_values`] or + /// [`number_of_values`]). + /// + /// **NOTE:** This setting only applies to [options] and [positional arguments] + /// + /// **NOTE:** When the terminator is passed in on the command line, it is **not** stored as one + /// of the values + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg}; + /// Arg::with_name("vals") + /// .takes_value(true) + /// .multiple(true) + /// .value_terminator(";") + /// # ; + /// ``` + /// The following example uses two arguments, a sequence of commands, and the location in which + /// to perform them + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("prog") + /// .arg(Arg::with_name("cmds") + /// .multiple(true) + /// .allow_hyphen_values(true) + /// .value_terminator(";")) + /// .arg(Arg::with_name("location")) + /// .get_matches_from(vec![ + /// "prog", "find", "-type", "f", "-name", "special", ";", "/home/clap" + /// ]); + /// let cmds: Vec<_> = m.values_of("cmds").unwrap().collect(); + /// assert_eq!(&cmds, &["find", "-type", "f", "-name", "special"]); + /// assert_eq!(m.value_of("location"), Some("/home/clap")); + /// ``` + /// [options]: ./struct.Arg.html#method.takes_value + /// [positional arguments]: ./struct.Arg.html#method.index + /// [`multiple(true)`]: ./struct.Arg.html#method.multiple + /// [`min_values`]: ./struct.Arg.html#method.min_values + /// [`number_of_values`]: ./struct.Arg.html#method.number_of_values + /// [`max_values`]: ./struct.Arg.html#method.max_values + pub fn value_terminator(mut self, term: &'b str) -> Self { + self.setb(ArgSettings::TakesValue); + self.v.terminator = Some(term); + self + } + + /// Specifies that an argument can be matched to all child [`SubCommand`]s. + /// + /// **NOTE:** Global arguments *only* propagate down, **not** up (to parent commands), however + /// their values once a user uses them will be propagated back up to parents. In effect, this + /// means one should *define* all global arguments at the top level, however it doesn't matter + /// where the user *uses* the global argument. + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg}; + /// Arg::with_name("debug") + /// .short("d") + /// .global(true) + /// # ; + /// ``` + /// + /// For example, assume an application with two subcommands, and you'd like to define a + /// `--verbose` flag that can be called on any of the subcommands and parent, but you don't + /// want to clutter the source with three duplicate [`Arg`] definitions. + /// + /// ```rust + /// # use clap::{App, Arg, SubCommand}; + /// let m = App::new("prog") + /// .arg(Arg::with_name("verb") + /// .long("verbose") + /// .short("v") + /// .global(true)) + /// .subcommand(SubCommand::with_name("test")) + /// .subcommand(SubCommand::with_name("do-stuff")) + /// .get_matches_from(vec![ + /// "prog", "do-stuff", "--verbose" + /// ]); + /// + /// assert_eq!(m.subcommand_name(), Some("do-stuff")); + /// let sub_m = m.subcommand_matches("do-stuff").unwrap(); + /// assert!(sub_m.is_present("verb")); + /// ``` + /// [`SubCommand`]: ./struct.SubCommand.html + /// [required]: ./struct.Arg.html#method.required + /// [`ArgMatches`]: ./struct.ArgMatches.html + /// [`ArgMatches::is_present("flag")`]: ./struct.ArgMatches.html#method.is_present + /// [`Arg`]: ./struct.Arg.html + pub fn global(self, g: bool) -> Self { + if g { + self.set(ArgSettings::Global) + } else { + self.unset(ArgSettings::Global) + } + } + + /// Allows an argument to accept explicitly empty values. An empty value must be specified at + /// the command line with an explicit `""`, or `''` + /// + /// **NOTE:** Defaults to `true` (Explicitly empty values are allowed) + /// + /// **NOTE:** Implicitly sets [`Arg::takes_value(true)`] when set to `false` + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg}; + /// Arg::with_name("file") + /// .long("file") + /// .empty_values(false) + /// # ; + /// ``` + /// The default is to allow empty values, such as `--option ""` would be an empty value. But + /// we can change to make empty values become an error. + /// + /// ```rust + /// # use clap::{App, Arg, ErrorKind}; + /// let res = App::new("prog") + /// .arg(Arg::with_name("cfg") + /// .long("config") + /// .short("v") + /// .empty_values(false)) + /// .get_matches_from_safe(vec![ + /// "prog", "--config=" + /// ]); + /// + /// assert!(res.is_err()); + /// assert_eq!(res.unwrap_err().kind, ErrorKind::EmptyValue); + /// ``` + /// [`Arg::takes_value(true)`]: ./struct.Arg.html#method.takes_value + pub fn empty_values(mut self, ev: bool) -> Self { + if ev { + self.set(ArgSettings::EmptyValues) + } else { + self = self.set(ArgSettings::TakesValue); + self.unset(ArgSettings::EmptyValues) + } + } + + /// Hides an argument from help message output. + /// + /// **NOTE:** Implicitly sets [`Arg::hidden_short_help(true)`] and [`Arg::hidden_long_help(true)`] + /// when set to true + /// + /// **NOTE:** This does **not** hide the argument from usage strings on error + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg}; + /// Arg::with_name("debug") + /// .hidden(true) + /// # ; + /// ``` + /// Setting `hidden(true)` will hide the argument when displaying help text + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("prog") + /// .arg(Arg::with_name("cfg") + /// .long("config") + /// .hidden(true) + /// .help("Some help text describing the --config arg")) + /// .get_matches_from(vec![ + /// "prog", "--help" + /// ]); + /// ``` + /// + /// The above example displays + /// + /// ```notrust + /// helptest + /// + /// USAGE: + /// helptest [FLAGS] + /// + /// FLAGS: + /// -h, --help Prints help information + /// -V, --version Prints version information + /// ``` + /// [`Arg::hidden_short_help(true)`]: ./struct.Arg.html#method.hidden_short_help + /// [`Arg::hidden_long_help(true)`]: ./struct.Arg.html#method.hidden_long_help + pub fn hidden(self, h: bool) -> Self { + if h { + self.set(ArgSettings::Hidden) + } else { + self.unset(ArgSettings::Hidden) + } + } + + /// Specifies a list of possible values for this argument. At runtime, `clap` verifies that + /// only one of the specified values was used, or fails with an error message. + /// + /// **NOTE:** This setting only applies to [options] and [positional arguments] + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg}; + /// Arg::with_name("mode") + /// .takes_value(true) + /// .possible_values(&["fast", "slow", "medium"]) + /// # ; + /// ``` + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("prog") + /// .arg(Arg::with_name("mode") + /// .long("mode") + /// .takes_value(true) + /// .possible_values(&["fast", "slow", "medium"])) + /// .get_matches_from(vec![ + /// "prog", "--mode", "fast" + /// ]); + /// assert!(m.is_present("mode")); + /// assert_eq!(m.value_of("mode"), Some("fast")); + /// ``` + /// + /// The next example shows a failed parse from using a value which wasn't defined as one of the + /// possible values. + /// + /// ```rust + /// # use clap::{App, Arg, ErrorKind}; + /// let res = App::new("prog") + /// .arg(Arg::with_name("mode") + /// .long("mode") + /// .takes_value(true) + /// .possible_values(&["fast", "slow", "medium"])) + /// .get_matches_from_safe(vec![ + /// "prog", "--mode", "wrong" + /// ]); + /// assert!(res.is_err()); + /// assert_eq!(res.unwrap_err().kind, ErrorKind::InvalidValue); + /// ``` + /// [options]: ./struct.Arg.html#method.takes_value + /// [positional arguments]: ./struct.Arg.html#method.index + pub fn possible_values(mut self, names: &[&'b str]) -> Self { + if let Some(ref mut vec) = self.v.possible_vals { + for s in names { + vec.push(s); + } + } else { + self.v.possible_vals = Some(names.iter().map(|s| *s).collect::>()); + } + self + } + + /// Specifies a possible value for this argument, one at a time. At runtime, `clap` verifies + /// that only one of the specified values was used, or fails with error message. + /// + /// **NOTE:** This setting only applies to [options] and [positional arguments] + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg}; + /// Arg::with_name("mode") + /// .takes_value(true) + /// .possible_value("fast") + /// .possible_value("slow") + /// .possible_value("medium") + /// # ; + /// ``` + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("prog") + /// .arg(Arg::with_name("mode") + /// .long("mode") + /// .takes_value(true) + /// .possible_value("fast") + /// .possible_value("slow") + /// .possible_value("medium")) + /// .get_matches_from(vec![ + /// "prog", "--mode", "fast" + /// ]); + /// assert!(m.is_present("mode")); + /// assert_eq!(m.value_of("mode"), Some("fast")); + /// ``` + /// + /// The next example shows a failed parse from using a value which wasn't defined as one of the + /// possible values. + /// + /// ```rust + /// # use clap::{App, Arg, ErrorKind}; + /// let res = App::new("prog") + /// .arg(Arg::with_name("mode") + /// .long("mode") + /// .takes_value(true) + /// .possible_value("fast") + /// .possible_value("slow") + /// .possible_value("medium")) + /// .get_matches_from_safe(vec![ + /// "prog", "--mode", "wrong" + /// ]); + /// assert!(res.is_err()); + /// assert_eq!(res.unwrap_err().kind, ErrorKind::InvalidValue); + /// ``` + /// [options]: ./struct.Arg.html#method.takes_value + /// [positional arguments]: ./struct.Arg.html#method.index + pub fn possible_value(mut self, name: &'b str) -> Self { + if let Some(ref mut vec) = self.v.possible_vals { + vec.push(name); + } else { + self.v.possible_vals = Some(vec![name]); + } + self + } + + /// When used with [`Arg::possible_values`] it allows the argument value to pass validation even if + /// the case differs from that of the specified `possible_value`. + /// + /// **Pro Tip:** Use this setting with [`arg_enum!`] + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg}; + /// # use std::ascii::AsciiExt; + /// let m = App::new("pv") + /// .arg(Arg::with_name("option") + /// .long("--option") + /// .takes_value(true) + /// .possible_value("test123") + /// .case_insensitive(true)) + /// .get_matches_from(vec![ + /// "pv", "--option", "TeSt123", + /// ]); + /// + /// assert!(m.value_of("option").unwrap().eq_ignore_ascii_case("test123")); + /// ``` + /// + /// This setting also works when multiple values can be defined: + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("pv") + /// .arg(Arg::with_name("option") + /// .short("-o") + /// .long("--option") + /// .takes_value(true) + /// .possible_value("test123") + /// .possible_value("test321") + /// .multiple(true) + /// .case_insensitive(true)) + /// .get_matches_from(vec![ + /// "pv", "--option", "TeSt123", "teST123", "tESt321" + /// ]); + /// + /// let matched_vals = m.values_of("option").unwrap().collect::>(); + /// assert_eq!(&*matched_vals, &["TeSt123", "teST123", "tESt321"]); + /// ``` + /// [`Arg::case_insensitive(true)`]: ./struct.Arg.html#method.possible_values + /// [`arg_enum!`]: ./macro.arg_enum.html + pub fn case_insensitive(self, ci: bool) -> Self { + if ci { + self.set(ArgSettings::CaseInsensitive) + } else { + self.unset(ArgSettings::CaseInsensitive) + } + } + + /// Specifies the name of the [`ArgGroup`] the argument belongs to. + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg}; + /// Arg::with_name("debug") + /// .long("debug") + /// .group("mode") + /// # ; + /// ``` + /// + /// Multiple arguments can be a member of a single group and then the group checked as if it + /// was one of said arguments. + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("prog") + /// .arg(Arg::with_name("debug") + /// .long("debug") + /// .group("mode")) + /// .arg(Arg::with_name("verbose") + /// .long("verbose") + /// .group("mode")) + /// .get_matches_from(vec![ + /// "prog", "--debug" + /// ]); + /// assert!(m.is_present("mode")); + /// ``` + /// [`ArgGroup`]: ./struct.ArgGroup.html + pub fn group(mut self, name: &'a str) -> Self { + if let Some(ref mut vec) = self.b.groups { + vec.push(name); + } else { + self.b.groups = Some(vec![name]); + } + self + } + + /// Specifies the names of multiple [`ArgGroup`]'s the argument belongs to. + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg}; + /// Arg::with_name("debug") + /// .long("debug") + /// .groups(&["mode", "verbosity"]) + /// # ; + /// ``` + /// + /// Arguments can be members of multiple groups and then the group checked as if it + /// was one of said arguments. + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("prog") + /// .arg(Arg::with_name("debug") + /// .long("debug") + /// .groups(&["mode", "verbosity"])) + /// .arg(Arg::with_name("verbose") + /// .long("verbose") + /// .groups(&["mode", "verbosity"])) + /// .get_matches_from(vec![ + /// "prog", "--debug" + /// ]); + /// assert!(m.is_present("mode")); + /// assert!(m.is_present("verbosity")); + /// ``` + /// [`ArgGroup`]: ./struct.ArgGroup.html + pub fn groups(mut self, names: &[&'a str]) -> Self { + if let Some(ref mut vec) = self.b.groups { + for s in names { + vec.push(s); + } + } else { + self.b.groups = Some(names.into_iter().map(|s| *s).collect::>()); + } + self + } + + /// Specifies how many values are required to satisfy this argument. For example, if you had a + /// `-f ` argument where you wanted exactly 3 'files' you would set + /// `.number_of_values(3)`, and this argument wouldn't be satisfied unless the user provided + /// 3 and only 3 values. + /// + /// **NOTE:** Does *not* require [`Arg::multiple(true)`] to be set. Setting + /// [`Arg::multiple(true)`] would allow `-f -f ` where + /// as *not* setting [`Arg::multiple(true)`] would only allow one occurrence of this argument. + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg}; + /// Arg::with_name("file") + /// .short("f") + /// .number_of_values(3) + /// # ; + /// ``` + /// + /// Not supplying the correct number of values is an error + /// + /// ```rust + /// # use clap::{App, Arg, ErrorKind}; + /// let res = App::new("prog") + /// .arg(Arg::with_name("file") + /// .takes_value(true) + /// .number_of_values(2) + /// .short("F")) + /// .get_matches_from_safe(vec![ + /// "prog", "-F", "file1" + /// ]); + /// + /// assert!(res.is_err()); + /// assert_eq!(res.unwrap_err().kind, ErrorKind::WrongNumberOfValues); + /// ``` + /// [`Arg::multiple(true)`]: ./struct.Arg.html#method.multiple + pub fn number_of_values(mut self, qty: u64) -> Self { + self.setb(ArgSettings::TakesValue); + self.v.num_vals = Some(qty); + self + } + + /// Allows one to perform a custom validation on the argument value. You provide a closure + /// which accepts a [`String`] value, and return a [`Result`] where the [`Err(String)`] is a + /// message displayed to the user. + /// + /// **NOTE:** The error message does *not* need to contain the `error:` portion, only the + /// message as all errors will appear as + /// `error: Invalid value for '': ` where `` is replaced by the actual + /// arg, and `` is the `String` you return as the error. + /// + /// **NOTE:** There is a small performance hit for using validators, as they are implemented + /// with [`Rc`] pointers. And the value to be checked will be allocated an extra time in order + /// to to be passed to the closure. This performance hit is extremely minimal in the grand + /// scheme of things. + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg}; + /// fn has_at(v: String) -> Result<(), String> { + /// if v.contains("@") { return Ok(()); } + /// Err(String::from("The value did not contain the required @ sigil")) + /// } + /// let res = App::new("prog") + /// .arg(Arg::with_name("file") + /// .index(1) + /// .validator(has_at)) + /// .get_matches_from_safe(vec![ + /// "prog", "some@file" + /// ]); + /// assert!(res.is_ok()); + /// assert_eq!(res.unwrap().value_of("file"), Some("some@file")); + /// ``` + /// [`String`]: https://doc.rust-lang.org/std/string/struct.String.html + /// [`Result`]: https://doc.rust-lang.org/std/result/enum.Result.html + /// [`Err(String)`]: https://doc.rust-lang.org/std/result/enum.Result.html#variant.Err + /// [`Rc`]: https://doc.rust-lang.org/std/rc/struct.Rc.html + pub fn validator(mut self, f: F) -> Self + where + F: Fn(String) -> Result<(), String> + 'static, + { + self.v.validator = Some(Rc::new(f)); + self + } + + /// Works identically to Validator but is intended to be used with values that could + /// contain non UTF-8 formatted strings. + /// + /// # Examples + /// + #[cfg_attr(not(unix), doc = " ```ignore")] + #[cfg_attr(unix, doc = " ```rust")] + /// # use clap::{App, Arg}; + /// # use std::ffi::{OsStr, OsString}; + /// # use std::os::unix::ffi::OsStrExt; + /// fn has_ampersand(v: &OsStr) -> Result<(), OsString> { + /// if v.as_bytes().iter().any(|b| *b == b'&') { return Ok(()); } + /// Err(OsString::from("The value did not contain the required & sigil")) + /// } + /// let res = App::new("prog") + /// .arg(Arg::with_name("file") + /// .index(1) + /// .validator_os(has_ampersand)) + /// .get_matches_from_safe(vec![ + /// "prog", "Fish & chips" + /// ]); + /// assert!(res.is_ok()); + /// assert_eq!(res.unwrap().value_of("file"), Some("Fish & chips")); + /// ``` + /// [`String`]: https://doc.rust-lang.org/std/string/struct.String.html + /// [`OsStr`]: https://doc.rust-lang.org/std/ffi/struct.OsStr.html + /// [`OsString`]: https://doc.rust-lang.org/std/ffi/struct.OsString.html + /// [`Result`]: https://doc.rust-lang.org/std/result/enum.Result.html + /// [`Err(String)`]: https://doc.rust-lang.org/std/result/enum.Result.html#variant.Err + /// [`Rc`]: https://doc.rust-lang.org/std/rc/struct.Rc.html + pub fn validator_os(mut self, f: F) -> Self + where + F: Fn(&OsStr) -> Result<(), OsString> + 'static, + { + self.v.validator_os = Some(Rc::new(f)); + self + } + + /// Specifies the *maximum* number of values are for this argument. For example, if you had a + /// `-f ` argument where you wanted up to 3 'files' you would set `.max_values(3)`, and + /// this argument would be satisfied if the user provided, 1, 2, or 3 values. + /// + /// **NOTE:** This does *not* implicitly set [`Arg::multiple(true)`]. This is because + /// `-o val -o val` is multiple occurrences but a single value and `-o val1 val2` is a single + /// occurrence with multiple values. For positional arguments this **does** set + /// [`Arg::multiple(true)`] because there is no way to determine the difference between multiple + /// occurrences and multiple values. + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg}; + /// Arg::with_name("file") + /// .short("f") + /// .max_values(3) + /// # ; + /// ``` + /// + /// Supplying less than the maximum number of values is allowed + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let res = App::new("prog") + /// .arg(Arg::with_name("file") + /// .takes_value(true) + /// .max_values(3) + /// .short("F")) + /// .get_matches_from_safe(vec![ + /// "prog", "-F", "file1", "file2" + /// ]); + /// + /// assert!(res.is_ok()); + /// let m = res.unwrap(); + /// let files: Vec<_> = m.values_of("file").unwrap().collect(); + /// assert_eq!(files, ["file1", "file2"]); + /// ``` + /// + /// Supplying more than the maximum number of values is an error + /// + /// ```rust + /// # use clap::{App, Arg, ErrorKind}; + /// let res = App::new("prog") + /// .arg(Arg::with_name("file") + /// .takes_value(true) + /// .max_values(2) + /// .short("F")) + /// .get_matches_from_safe(vec![ + /// "prog", "-F", "file1", "file2", "file3" + /// ]); + /// + /// assert!(res.is_err()); + /// assert_eq!(res.unwrap_err().kind, ErrorKind::TooManyValues); + /// ``` + /// [`Arg::multiple(true)`]: ./struct.Arg.html#method.multiple + pub fn max_values(mut self, qty: u64) -> Self { + self.setb(ArgSettings::TakesValue); + self.v.max_vals = Some(qty); + self + } + + /// Specifies the *minimum* number of values for this argument. For example, if you had a + /// `-f ` argument where you wanted at least 2 'files' you would set + /// `.min_values(2)`, and this argument would be satisfied if the user provided, 2 or more + /// values. + /// + /// **NOTE:** This does not implicitly set [`Arg::multiple(true)`]. This is because + /// `-o val -o val` is multiple occurrences but a single value and `-o val1 val2` is a single + /// occurrence with multiple values. For positional arguments this **does** set + /// [`Arg::multiple(true)`] because there is no way to determine the difference between multiple + /// occurrences and multiple values. + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg}; + /// Arg::with_name("file") + /// .short("f") + /// .min_values(3) + /// # ; + /// ``` + /// + /// Supplying more than the minimum number of values is allowed + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let res = App::new("prog") + /// .arg(Arg::with_name("file") + /// .takes_value(true) + /// .min_values(2) + /// .short("F")) + /// .get_matches_from_safe(vec![ + /// "prog", "-F", "file1", "file2", "file3" + /// ]); + /// + /// assert!(res.is_ok()); + /// let m = res.unwrap(); + /// let files: Vec<_> = m.values_of("file").unwrap().collect(); + /// assert_eq!(files, ["file1", "file2", "file3"]); + /// ``` + /// + /// Supplying less than the minimum number of values is an error + /// + /// ```rust + /// # use clap::{App, Arg, ErrorKind}; + /// let res = App::new("prog") + /// .arg(Arg::with_name("file") + /// .takes_value(true) + /// .min_values(2) + /// .short("F")) + /// .get_matches_from_safe(vec![ + /// "prog", "-F", "file1" + /// ]); + /// + /// assert!(res.is_err()); + /// assert_eq!(res.unwrap_err().kind, ErrorKind::TooFewValues); + /// ``` + /// [`Arg::multiple(true)`]: ./struct.Arg.html#method.multiple + pub fn min_values(mut self, qty: u64) -> Self { + self.v.min_vals = Some(qty); + self.set(ArgSettings::TakesValue) + } + + /// Specifies whether or not an argument should allow grouping of multiple values via a + /// delimiter. I.e. should `--option=val1,val2,val3` be parsed as three values (`val1`, `val2`, + /// and `val3`) or as a single value (`val1,val2,val3`). Defaults to using `,` (comma) as the + /// value delimiter for all arguments that accept values (options and positional arguments) + /// + /// **NOTE:** The default is `false`. When set to `true` the default [`Arg::value_delimiter`] + /// is the comma `,`. + /// + /// # Examples + /// + /// The following example shows the default behavior. + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let delims = App::new("prog") + /// .arg(Arg::with_name("option") + /// .long("option") + /// .use_delimiter(true) + /// .takes_value(true)) + /// .get_matches_from(vec![ + /// "prog", "--option=val1,val2,val3", + /// ]); + /// + /// assert!(delims.is_present("option")); + /// assert_eq!(delims.occurrences_of("option"), 1); + /// assert_eq!(delims.values_of("option").unwrap().collect::>(), ["val1", "val2", "val3"]); + /// ``` + /// The next example shows the difference when turning delimiters off. This is the default + /// behavior + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let nodelims = App::new("prog") + /// .arg(Arg::with_name("option") + /// .long("option") + /// .use_delimiter(false) + /// .takes_value(true)) + /// .get_matches_from(vec![ + /// "prog", "--option=val1,val2,val3", + /// ]); + /// + /// assert!(nodelims.is_present("option")); + /// assert_eq!(nodelims.occurrences_of("option"), 1); + /// assert_eq!(nodelims.value_of("option").unwrap(), "val1,val2,val3"); + /// ``` + /// [`Arg::value_delimiter`]: ./struct.Arg.html#method.value_delimiter + pub fn use_delimiter(mut self, d: bool) -> Self { + if d { + if self.v.val_delim.is_none() { + self.v.val_delim = Some(','); + } + self.setb(ArgSettings::TakesValue); + self.setb(ArgSettings::UseValueDelimiter); + self.unset(ArgSettings::ValueDelimiterNotSet) + } else { + self.v.val_delim = None; + self.unsetb(ArgSettings::UseValueDelimiter); + self.unset(ArgSettings::ValueDelimiterNotSet) + } + } + + /// Specifies that *multiple values* may only be set using the delimiter. This means if an + /// if an option is encountered, and no delimiter is found, it automatically assumed that no + /// additional values for that option follow. This is unlike the default, where it is generally + /// assumed that more values will follow regardless of whether or not a delimiter is used. + /// + /// **NOTE:** The default is `false`. + /// + /// **NOTE:** Setting this to true implies [`Arg::use_delimiter(true)`] + /// + /// **NOTE:** It's a good idea to inform the user that use of a delimiter is required, either + /// through help text or other means. + /// + /// # Examples + /// + /// These examples demonstrate what happens when `require_delimiter(true)` is used. Notice + /// everything works in this first example, as we use a delimiter, as expected. + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let delims = App::new("prog") + /// .arg(Arg::with_name("opt") + /// .short("o") + /// .takes_value(true) + /// .multiple(true) + /// .require_delimiter(true)) + /// .get_matches_from(vec![ + /// "prog", "-o", "val1,val2,val3", + /// ]); + /// + /// assert!(delims.is_present("opt")); + /// assert_eq!(delims.values_of("opt").unwrap().collect::>(), ["val1", "val2", "val3"]); + /// ``` + /// In this next example, we will *not* use a delimiter. Notice it's now an error. + /// + /// ```rust + /// # use clap::{App, Arg, ErrorKind}; + /// let res = App::new("prog") + /// .arg(Arg::with_name("opt") + /// .short("o") + /// .takes_value(true) + /// .multiple(true) + /// .require_delimiter(true)) + /// .get_matches_from_safe(vec![ + /// "prog", "-o", "val1", "val2", "val3", + /// ]); + /// + /// assert!(res.is_err()); + /// let err = res.unwrap_err(); + /// assert_eq!(err.kind, ErrorKind::UnknownArgument); + /// ``` + /// What's happening is `-o` is getting `val1`, and because delimiters are required yet none + /// were present, it stops parsing `-o`. At this point it reaches `val2` and because no + /// positional arguments have been defined, it's an error of an unexpected argument. + /// + /// In this final example, we contrast the above with `clap`'s default behavior where the above + /// is *not* an error. + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let delims = App::new("prog") + /// .arg(Arg::with_name("opt") + /// .short("o") + /// .takes_value(true) + /// .multiple(true)) + /// .get_matches_from(vec![ + /// "prog", "-o", "val1", "val2", "val3", + /// ]); + /// + /// assert!(delims.is_present("opt")); + /// assert_eq!(delims.values_of("opt").unwrap().collect::>(), ["val1", "val2", "val3"]); + /// ``` + /// [`Arg::use_delimiter(true)`]: ./struct.Arg.html#method.use_delimiter + pub fn require_delimiter(mut self, d: bool) -> Self { + if d { + self = self.use_delimiter(true); + self.unsetb(ArgSettings::ValueDelimiterNotSet); + self.setb(ArgSettings::UseValueDelimiter); + self.set(ArgSettings::RequireDelimiter) + } else { + self = self.use_delimiter(false); + self.unsetb(ArgSettings::UseValueDelimiter); + self.unset(ArgSettings::RequireDelimiter) + } + } + + /// Specifies the separator to use when values are clumped together, defaults to `,` (comma). + /// + /// **NOTE:** implicitly sets [`Arg::use_delimiter(true)`] + /// + /// **NOTE:** implicitly sets [`Arg::takes_value(true)`] + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("prog") + /// .arg(Arg::with_name("config") + /// .short("c") + /// .long("config") + /// .value_delimiter(";")) + /// .get_matches_from(vec![ + /// "prog", "--config=val1;val2;val3" + /// ]); + /// + /// assert_eq!(m.values_of("config").unwrap().collect::>(), ["val1", "val2", "val3"]) + /// ``` + /// [`Arg::use_delimiter(true)`]: ./struct.Arg.html#method.use_delimiter + /// [`Arg::takes_value(true)`]: ./struct.Arg.html#method.takes_value + pub fn value_delimiter(mut self, d: &str) -> Self { + self.unsetb(ArgSettings::ValueDelimiterNotSet); + self.setb(ArgSettings::TakesValue); + self.setb(ArgSettings::UseValueDelimiter); + self.v.val_delim = Some( + d.chars() + .nth(0) + .expect("Failed to get value_delimiter from arg"), + ); + self + } + + /// Specify multiple names for values of option arguments. These names are cosmetic only, used + /// for help and usage strings only. The names are **not** used to access arguments. The values + /// of the arguments are accessed in numeric order (i.e. if you specify two names `one` and + /// `two` `one` will be the first matched value, `two` will be the second). + /// + /// This setting can be very helpful when describing the type of input the user should be + /// using, such as `FILE`, `INTERFACE`, etc. Although not required, it's somewhat convention to + /// use all capital letters for the value name. + /// + /// **Pro Tip:** It may help to use [`Arg::next_line_help(true)`] if there are long, or + /// multiple value names in order to not throw off the help text alignment of all options. + /// + /// **NOTE:** This implicitly sets [`Arg::number_of_values`] if the number of value names is + /// greater than one. I.e. be aware that the number of "names" you set for the values, will be + /// the *exact* number of values required to satisfy this argument + /// + /// **NOTE:** implicitly sets [`Arg::takes_value(true)`] + /// + /// **NOTE:** Does *not* require or imply [`Arg::multiple(true)`]. + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg}; + /// Arg::with_name("speed") + /// .short("s") + /// .value_names(&["fast", "slow"]) + /// # ; + /// ``` + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("prog") + /// .arg(Arg::with_name("io") + /// .long("io-files") + /// .value_names(&["INFILE", "OUTFILE"])) + /// .get_matches_from(vec![ + /// "prog", "--help" + /// ]); + /// ``` + /// Running the above program produces the following output + /// + /// ```notrust + /// valnames + /// + /// USAGE: + /// valnames [FLAGS] [OPTIONS] + /// + /// FLAGS: + /// -h, --help Prints help information + /// -V, --version Prints version information + /// + /// OPTIONS: + /// --io-files Some help text + /// ``` + /// [`Arg::next_line_help(true)`]: ./struct.Arg.html#method.next_line_help + /// [`Arg::number_of_values`]: ./struct.Arg.html#method.number_of_values + /// [`Arg::takes_value(true)`]: ./struct.Arg.html#method.takes_value + /// [`Arg::multiple(true)`]: ./struct.Arg.html#method.multiple + pub fn value_names(mut self, names: &[&'b str]) -> Self { + self.setb(ArgSettings::TakesValue); + if self.is_set(ArgSettings::ValueDelimiterNotSet) { + self.unsetb(ArgSettings::ValueDelimiterNotSet); + self.setb(ArgSettings::UseValueDelimiter); + } + if let Some(ref mut vals) = self.v.val_names { + let mut l = vals.len(); + for s in names { + vals.insert(l, s); + l += 1; + } + } else { + let mut vm = VecMap::new(); + for (i, n) in names.iter().enumerate() { + vm.insert(i, *n); + } + self.v.val_names = Some(vm); + } + self + } + + /// Specifies the name for value of [option] or [positional] arguments inside of help + /// documentation. This name is cosmetic only, the name is **not** used to access arguments. + /// This setting can be very helpful when describing the type of input the user should be + /// using, such as `FILE`, `INTERFACE`, etc. Although not required, it's somewhat convention to + /// use all capital letters for the value name. + /// + /// **NOTE:** implicitly sets [`Arg::takes_value(true)`] + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg}; + /// Arg::with_name("cfg") + /// .long("config") + /// .value_name("FILE") + /// # ; + /// ``` + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("prog") + /// .arg(Arg::with_name("config") + /// .long("config") + /// .value_name("FILE")) + /// .get_matches_from(vec![ + /// "prog", "--help" + /// ]); + /// ``` + /// Running the above program produces the following output + /// + /// ```notrust + /// valnames + /// + /// USAGE: + /// valnames [FLAGS] [OPTIONS] + /// + /// FLAGS: + /// -h, --help Prints help information + /// -V, --version Prints version information + /// + /// OPTIONS: + /// --config Some help text + /// ``` + /// [option]: ./struct.Arg.html#method.takes_value + /// [positional]: ./struct.Arg.html#method.index + /// [`Arg::takes_value(true)`]: ./struct.Arg.html#method.takes_value + pub fn value_name(mut self, name: &'b str) -> Self { + self.setb(ArgSettings::TakesValue); + if let Some(ref mut vals) = self.v.val_names { + let l = vals.len(); + vals.insert(l, name); + } else { + let mut vm = VecMap::new(); + vm.insert(0, name); + self.v.val_names = Some(vm); + } + self + } + + /// Specifies the value of the argument when *not* specified at runtime. + /// + /// **NOTE:** If the user *does not* use this argument at runtime, [`ArgMatches::occurrences_of`] + /// will return `0` even though the [`ArgMatches::value_of`] will return the default specified. + /// + /// **NOTE:** If the user *does not* use this argument at runtime [`ArgMatches::is_present`] will + /// still return `true`. If you wish to determine whether the argument was used at runtime or + /// not, consider [`ArgMatches::occurrences_of`] which will return `0` if the argument was *not* + /// used at runtime. + /// + /// **NOTE:** This setting is perfectly compatible with [`Arg::default_value_if`] but slightly + /// different. `Arg::default_value` *only* takes affect when the user has not provided this arg + /// at runtime. `Arg::default_value_if` however only takes affect when the user has not provided + /// a value at runtime **and** these other conditions are met as well. If you have set + /// `Arg::default_value` and `Arg::default_value_if`, and the user **did not** provide a this + /// arg at runtime, nor did were the conditions met for `Arg::default_value_if`, the + /// `Arg::default_value` will be applied. + /// + /// **NOTE:** This implicitly sets [`Arg::takes_value(true)`]. + /// + /// **NOTE:** This setting effectively disables `AppSettings::ArgRequiredElseHelp` if used in + /// conjunction as it ensures that some argument will always be present. + /// + /// # Examples + /// + /// First we use the default value without providing any value at runtime. + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("prog") + /// .arg(Arg::with_name("opt") + /// .long("myopt") + /// .default_value("myval")) + /// .get_matches_from(vec![ + /// "prog" + /// ]); + /// + /// assert_eq!(m.value_of("opt"), Some("myval")); + /// assert!(m.is_present("opt")); + /// assert_eq!(m.occurrences_of("opt"), 0); + /// ``` + /// + /// Next we provide a value at runtime to override the default. + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("prog") + /// .arg(Arg::with_name("opt") + /// .long("myopt") + /// .default_value("myval")) + /// .get_matches_from(vec![ + /// "prog", "--myopt=non_default" + /// ]); + /// + /// assert_eq!(m.value_of("opt"), Some("non_default")); + /// assert!(m.is_present("opt")); + /// assert_eq!(m.occurrences_of("opt"), 1); + /// ``` + /// [`ArgMatches::occurrences_of`]: ./struct.ArgMatches.html#method.occurrences_of + /// [`ArgMatches::value_of`]: ./struct.ArgMatches.html#method.value_of + /// [`Arg::takes_value(true)`]: ./struct.Arg.html#method.takes_value + /// [`ArgMatches::is_present`]: ./struct.ArgMatches.html#method.is_present + /// [`Arg::default_value_if`]: ./struct.Arg.html#method.default_value_if + pub fn default_value(self, val: &'a str) -> Self { + self.default_value_os(OsStr::from_bytes(val.as_bytes())) + } + + /// Provides a default value in the exact same manner as [`Arg::default_value`] + /// only using [`OsStr`]s instead. + /// [`Arg::default_value`]: ./struct.Arg.html#method.default_value + /// [`OsStr`]: https://doc.rust-lang.org/std/ffi/struct.OsStr.html + pub fn default_value_os(mut self, val: &'a OsStr) -> Self { + self.setb(ArgSettings::TakesValue); + self.v.default_val = Some(val); + self + } + + /// Specifies the value of the argument if `arg` has been used at runtime. If `val` is set to + /// `None`, `arg` only needs to be present. If `val` is set to `"some-val"` then `arg` must be + /// present at runtime **and** have the value `val`. + /// + /// **NOTE:** This setting is perfectly compatible with [`Arg::default_value`] but slightly + /// different. `Arg::default_value` *only* takes affect when the user has not provided this arg + /// at runtime. This setting however only takes affect when the user has not provided a value at + /// runtime **and** these other conditions are met as well. If you have set `Arg::default_value` + /// and `Arg::default_value_if`, and the user **did not** provide a this arg at runtime, nor did + /// were the conditions met for `Arg::default_value_if`, the `Arg::default_value` will be + /// applied. + /// + /// **NOTE:** This implicitly sets [`Arg::takes_value(true)`]. + /// + /// **NOTE:** If using YAML the values should be laid out as follows (`None` can be represented + /// as `null` in YAML) + /// + /// ```yaml + /// default_value_if: + /// - [arg, val, default] + /// ``` + /// + /// # Examples + /// + /// First we use the default value only if another arg is present at runtime. + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("prog") + /// .arg(Arg::with_name("flag") + /// .long("flag")) + /// .arg(Arg::with_name("other") + /// .long("other") + /// .default_value_if("flag", None, "default")) + /// .get_matches_from(vec![ + /// "prog", "--flag" + /// ]); + /// + /// assert_eq!(m.value_of("other"), Some("default")); + /// ``` + /// + /// Next we run the same test, but without providing `--flag`. + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("prog") + /// .arg(Arg::with_name("flag") + /// .long("flag")) + /// .arg(Arg::with_name("other") + /// .long("other") + /// .default_value_if("flag", None, "default")) + /// .get_matches_from(vec![ + /// "prog" + /// ]); + /// + /// assert_eq!(m.value_of("other"), None); + /// ``` + /// + /// Now lets only use the default value if `--opt` contains the value `special`. + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("prog") + /// .arg(Arg::with_name("opt") + /// .takes_value(true) + /// .long("opt")) + /// .arg(Arg::with_name("other") + /// .long("other") + /// .default_value_if("opt", Some("special"), "default")) + /// .get_matches_from(vec![ + /// "prog", "--opt", "special" + /// ]); + /// + /// assert_eq!(m.value_of("other"), Some("default")); + /// ``` + /// + /// We can run the same test and provide any value *other than* `special` and we won't get a + /// default value. + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("prog") + /// .arg(Arg::with_name("opt") + /// .takes_value(true) + /// .long("opt")) + /// .arg(Arg::with_name("other") + /// .long("other") + /// .default_value_if("opt", Some("special"), "default")) + /// .get_matches_from(vec![ + /// "prog", "--opt", "hahaha" + /// ]); + /// + /// assert_eq!(m.value_of("other"), None); + /// ``` + /// [`Arg::takes_value(true)`]: ./struct.Arg.html#method.takes_value + /// [`Arg::default_value`]: ./struct.Arg.html#method.default_value + pub fn default_value_if(self, arg: &'a str, val: Option<&'b str>, default: &'b str) -> Self { + self.default_value_if_os( + arg, + val.map(str::as_bytes).map(OsStr::from_bytes), + OsStr::from_bytes(default.as_bytes()), + ) + } + + /// Provides a conditional default value in the exact same manner as [`Arg::default_value_if`] + /// only using [`OsStr`]s instead. + /// [`Arg::default_value_if`]: ./struct.Arg.html#method.default_value_if + /// [`OsStr`]: https://doc.rust-lang.org/std/ffi/struct.OsStr.html + pub fn default_value_if_os( + mut self, + arg: &'a str, + val: Option<&'b OsStr>, + default: &'b OsStr, + ) -> Self { + self.setb(ArgSettings::TakesValue); + if let Some(ref mut vm) = self.v.default_vals_ifs { + let l = vm.len(); + vm.insert(l, (arg, val, default)); + } else { + let mut vm = VecMap::new(); + vm.insert(0, (arg, val, default)); + self.v.default_vals_ifs = Some(vm); + } + self + } + + /// Specifies multiple values and conditions in the same manner as [`Arg::default_value_if`]. + /// The method takes a slice of tuples in the `(arg, Option, default)` format. + /// + /// **NOTE**: The conditions are stored in order and evaluated in the same order. I.e. the first + /// if multiple conditions are true, the first one found will be applied and the ultimate value. + /// + /// **NOTE:** If using YAML the values should be laid out as follows + /// + /// ```yaml + /// default_value_if: + /// - [arg, val, default] + /// - [arg2, null, default2] + /// ``` + /// + /// # Examples + /// + /// First we use the default value only if another arg is present at runtime. + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("prog") + /// .arg(Arg::with_name("flag") + /// .long("flag")) + /// .arg(Arg::with_name("opt") + /// .long("opt") + /// .takes_value(true)) + /// .arg(Arg::with_name("other") + /// .long("other") + /// .default_value_ifs(&[ + /// ("flag", None, "default"), + /// ("opt", Some("channal"), "chan"), + /// ])) + /// .get_matches_from(vec![ + /// "prog", "--opt", "channal" + /// ]); + /// + /// assert_eq!(m.value_of("other"), Some("chan")); + /// ``` + /// + /// Next we run the same test, but without providing `--flag`. + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("prog") + /// .arg(Arg::with_name("flag") + /// .long("flag")) + /// .arg(Arg::with_name("other") + /// .long("other") + /// .default_value_ifs(&[ + /// ("flag", None, "default"), + /// ("opt", Some("channal"), "chan"), + /// ])) + /// .get_matches_from(vec![ + /// "prog" + /// ]); + /// + /// assert_eq!(m.value_of("other"), None); + /// ``` + /// + /// We can also see that these values are applied in order, and if more than one condition is + /// true, only the first evaluated "wins" + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("prog") + /// .arg(Arg::with_name("flag") + /// .long("flag")) + /// .arg(Arg::with_name("opt") + /// .long("opt") + /// .takes_value(true)) + /// .arg(Arg::with_name("other") + /// .long("other") + /// .default_value_ifs(&[ + /// ("flag", None, "default"), + /// ("opt", Some("channal"), "chan"), + /// ])) + /// .get_matches_from(vec![ + /// "prog", "--opt", "channal", "--flag" + /// ]); + /// + /// assert_eq!(m.value_of("other"), Some("default")); + /// ``` + /// [`Arg::takes_value(true)`]: ./struct.Arg.html#method.takes_value + /// [`Arg::default_value`]: ./struct.Arg.html#method.default_value + pub fn default_value_ifs(mut self, ifs: &[(&'a str, Option<&'b str>, &'b str)]) -> Self { + for &(arg, val, default) in ifs { + self = self.default_value_if_os( + arg, + val.map(str::as_bytes).map(OsStr::from_bytes), + OsStr::from_bytes(default.as_bytes()), + ); + } + self + } + + /// Provides multiple conditional default values in the exact same manner as + /// [`Arg::default_value_ifs`] only using [`OsStr`]s instead. + /// [`Arg::default_value_ifs`]: ./struct.Arg.html#method.default_value_ifs + /// [`OsStr`]: https://doc.rust-lang.org/std/ffi/struct.OsStr.html + #[cfg_attr(feature = "lints", allow(explicit_counter_loop))] + pub fn default_value_ifs_os(mut self, ifs: &[(&'a str, Option<&'b OsStr>, &'b OsStr)]) -> Self { + for &(arg, val, default) in ifs { + self = self.default_value_if_os(arg, val, default); + } + self + } + + /// Specifies that if the value is not passed in as an argument, that it should be retrieved + /// from the environment, if available. If it is not present in the environment, then default + /// rules will apply. + /// + /// **NOTE:** If the user *does not* use this argument at runtime, [`ArgMatches::occurrences_of`] + /// will return `0` even though the [`ArgMatches::value_of`] will return the default specified. + /// + /// **NOTE:** If the user *does not* use this argument at runtime [`ArgMatches::is_present`] will + /// return `true` if the variable is present in the environment . If you wish to determine whether + /// the argument was used at runtime or not, consider [`ArgMatches::occurrences_of`] which will + /// return `0` if the argument was *not* used at runtime. + /// + /// **NOTE:** This implicitly sets [`Arg::takes_value(true)`]. + /// + /// **NOTE:** If [`Arg::multiple(true)`] is set then [`Arg::use_delimiter(true)`] should also be + /// set. Otherwise, only a single argument will be returned from the environment variable. The + /// default delimiter is `,` and follows all the other delimiter rules. + /// + /// # Examples + /// + /// In this example, we show the variable coming from the environment: + /// + /// ```rust + /// # use std::env; + /// # use clap::{App, Arg}; + /// + /// env::set_var("MY_FLAG", "env"); + /// + /// let m = App::new("prog") + /// .arg(Arg::with_name("flag") + /// .long("flag") + /// .env("MY_FLAG")) + /// .get_matches_from(vec![ + /// "prog" + /// ]); + /// + /// assert_eq!(m.value_of("flag"), Some("env")); + /// ``` + /// + /// In this example, we show the variable coming from an option on the CLI: + /// + /// ```rust + /// # use std::env; + /// # use clap::{App, Arg}; + /// + /// env::set_var("MY_FLAG", "env"); + /// + /// let m = App::new("prog") + /// .arg(Arg::with_name("flag") + /// .long("flag") + /// .env("MY_FLAG")) + /// .get_matches_from(vec![ + /// "prog", "--flag", "opt" + /// ]); + /// + /// assert_eq!(m.value_of("flag"), Some("opt")); + /// ``` + /// + /// In this example, we show the variable coming from the environment even with the + /// presence of a default: + /// + /// ```rust + /// # use std::env; + /// # use clap::{App, Arg}; + /// + /// env::set_var("MY_FLAG", "env"); + /// + /// let m = App::new("prog") + /// .arg(Arg::with_name("flag") + /// .long("flag") + /// .env("MY_FLAG") + /// .default_value("default")) + /// .get_matches_from(vec![ + /// "prog" + /// ]); + /// + /// assert_eq!(m.value_of("flag"), Some("env")); + /// ``` + /// + /// In this example, we show the use of multiple values in a single environment variable: + /// + /// ```rust + /// # use std::env; + /// # use clap::{App, Arg}; + /// + /// env::set_var("MY_FLAG_MULTI", "env1,env2"); + /// + /// let m = App::new("prog") + /// .arg(Arg::with_name("flag") + /// .long("flag") + /// .env("MY_FLAG_MULTI") + /// .multiple(true) + /// .use_delimiter(true)) + /// .get_matches_from(vec![ + /// "prog" + /// ]); + /// + /// assert_eq!(m.values_of("flag").unwrap().collect::>(), vec!["env1", "env2"]); + /// ``` + pub fn env(self, name: &'a str) -> Self { + self.env_os(OsStr::new(name)) + } + + /// Specifies that if the value is not passed in as an argument, that it should be retrieved + /// from the environment if available in the exact same manner as [`Arg::env`] only using + /// [`OsStr`]s instead. + pub fn env_os(mut self, name: &'a OsStr) -> Self { + self.setb(ArgSettings::TakesValue); + + self.v.env = Some((name, env::var_os(name))); + self + } + + /// @TODO @p2 @docs @release: write docs + pub fn hide_env_values(self, hide: bool) -> Self { + if hide { + self.set(ArgSettings::HideEnvValues) + } else { + self.unset(ArgSettings::HideEnvValues) + } + } + + /// When set to `true` the help string will be displayed on the line after the argument and + /// indented once. This can be helpful for arguments with very long or complex help messages. + /// This can also be helpful for arguments with very long flag names, or many/long value names. + /// + /// **NOTE:** To apply this setting to all arguments consider using + /// [`AppSettings::NextLineHelp`] + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("prog") + /// .arg(Arg::with_name("opt") + /// .long("long-option-flag") + /// .short("o") + /// .takes_value(true) + /// .value_names(&["value1", "value2"]) + /// .help("Some really long help and complex\n\ + /// help that makes more sense to be\n\ + /// on a line after the option") + /// .next_line_help(true)) + /// .get_matches_from(vec![ + /// "prog", "--help" + /// ]); + /// ``` + /// + /// The above example displays the following help message + /// + /// ```notrust + /// nlh + /// + /// USAGE: + /// nlh [FLAGS] [OPTIONS] + /// + /// FLAGS: + /// -h, --help Prints help information + /// -V, --version Prints version information + /// + /// OPTIONS: + /// -o, --long-option-flag + /// Some really long help and complex + /// help that makes more sense to be + /// on a line after the option + /// ``` + /// [`AppSettings::NextLineHelp`]: ./enum.AppSettings.html#variant.NextLineHelp + pub fn next_line_help(mut self, nlh: bool) -> Self { + if nlh { + self.setb(ArgSettings::NextLineHelp); + } else { + self.unsetb(ArgSettings::NextLineHelp); + } + self + } + + /// Allows custom ordering of args within the help message. Args with a lower value will be + /// displayed first in the help message. This is helpful when one would like to emphasise + /// frequently used args, or prioritize those towards the top of the list. Duplicate values + /// **are** allowed. Args with duplicate display orders will be displayed in alphabetical + /// order. + /// + /// **NOTE:** The default is 999 for all arguments. + /// + /// **NOTE:** This setting is ignored for [positional arguments] which are always displayed in + /// [index] order. + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("prog") + /// .arg(Arg::with_name("a") // Typically args are grouped alphabetically by name. + /// // Args without a display_order have a value of 999 and are + /// // displayed alphabetically with all other 999 valued args. + /// .long("long-option") + /// .short("o") + /// .takes_value(true) + /// .help("Some help and text")) + /// .arg(Arg::with_name("b") + /// .long("other-option") + /// .short("O") + /// .takes_value(true) + /// .display_order(1) // In order to force this arg to appear *first* + /// // all we have to do is give it a value lower than 999. + /// // Any other args with a value of 1 will be displayed + /// // alphabetically with this one...then 2 values, then 3, etc. + /// .help("I should be first!")) + /// .get_matches_from(vec![ + /// "prog", "--help" + /// ]); + /// ``` + /// + /// The above example displays the following help message + /// + /// ```notrust + /// cust-ord + /// + /// USAGE: + /// cust-ord [FLAGS] [OPTIONS] + /// + /// FLAGS: + /// -h, --help Prints help information + /// -V, --version Prints version information + /// + /// OPTIONS: + /// -O, --other-option I should be first! + /// -o, --long-option Some help and text + /// ``` + /// [positional arguments]: ./struct.Arg.html#method.index + /// [index]: ./struct.Arg.html#method.index + pub fn display_order(mut self, ord: usize) -> Self { + self.s.disp_ord = ord; + self + } + + /// Indicates that all parameters passed after this should not be parsed + /// individually, but rather passed in their entirety. It is worth noting + /// that setting this requires all values to come after a `--` to indicate they + /// should all be captured. For example: + /// + /// ```notrust + /// --foo something -- -v -v -v -b -b -b --baz -q -u -x + /// ``` + /// Will result in everything after `--` to be considered one raw argument. This behavior + /// may not be exactly what you are expecting and using [`AppSettings::TrailingVarArg`] + /// may be more appropriate. + /// + /// **NOTE:** Implicitly sets [`Arg::multiple(true)`], [`Arg::allow_hyphen_values(true)`], and + /// [`Arg::last(true)`] when set to `true` + /// + /// [`Arg::multiple(true)`]: ./struct.Arg.html#method.multiple + /// [`Arg::allow_hyphen_values(true)`]: ./struct.Arg.html#method.allow_hyphen_values + /// [`Arg::last(true)`]: ./struct.Arg.html#method.last + /// [`AppSettings::TrailingVarArg`]: ./enum.AppSettings.html#variant.TrailingVarArg + pub fn raw(self, raw: bool) -> Self { + self.multiple(raw).allow_hyphen_values(raw).last(raw) + } + + /// Hides an argument from short help message output. + /// + /// **NOTE:** This does **not** hide the argument from usage strings on error + /// + /// **NOTE:** Setting this option will cause next-line-help output style to be used + /// when long help (`--help`) is called. + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg}; + /// Arg::with_name("debug") + /// .hidden_short_help(true) + /// # ; + /// ``` + /// Setting `hidden_short_help(true)` will hide the argument when displaying short help text + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("prog") + /// .arg(Arg::with_name("cfg") + /// .long("config") + /// .hidden_short_help(true) + /// .help("Some help text describing the --config arg")) + /// .get_matches_from(vec![ + /// "prog", "-h" + /// ]); + /// ``` + /// + /// The above example displays + /// + /// ```notrust + /// helptest + /// + /// USAGE: + /// helptest [FLAGS] + /// + /// FLAGS: + /// -h, --help Prints help information + /// -V, --version Prints version information + /// ``` + /// + /// However, when --help is called + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("prog") + /// .arg(Arg::with_name("cfg") + /// .long("config") + /// .hidden_short_help(true) + /// .help("Some help text describing the --config arg")) + /// .get_matches_from(vec![ + /// "prog", "--help" + /// ]); + /// ``` + /// + /// Then the following would be displayed + /// + /// ```notrust + /// helptest + /// + /// USAGE: + /// helptest [FLAGS] + /// + /// FLAGS: + /// --config Some help text describing the --config arg + /// -h, --help Prints help information + /// -V, --version Prints version information + /// ``` + pub fn hidden_short_help(self, hide: bool) -> Self { + if hide { + self.set(ArgSettings::HiddenShortHelp) + } else { + self.unset(ArgSettings::HiddenShortHelp) + } + } + + /// Hides an argument from long help message output. + /// + /// **NOTE:** This does **not** hide the argument from usage strings on error + /// + /// **NOTE:** Setting this option will cause next-line-help output style to be used + /// when long help (`--help`) is called. + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg}; + /// Arg::with_name("debug") + /// .hidden_long_help(true) + /// # ; + /// ``` + /// Setting `hidden_long_help(true)` will hide the argument when displaying long help text + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("prog") + /// .arg(Arg::with_name("cfg") + /// .long("config") + /// .hidden_long_help(true) + /// .help("Some help text describing the --config arg")) + /// .get_matches_from(vec![ + /// "prog", "--help" + /// ]); + /// ``` + /// + /// The above example displays + /// + /// ```notrust + /// helptest + /// + /// USAGE: + /// helptest [FLAGS] + /// + /// FLAGS: + /// -h, --help Prints help information + /// -V, --version Prints version information + /// ``` + /// + /// However, when -h is called + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("prog") + /// .arg(Arg::with_name("cfg") + /// .long("config") + /// .hidden_long_help(true) + /// .help("Some help text describing the --config arg")) + /// .get_matches_from(vec![ + /// "prog", "-h" + /// ]); + /// ``` + /// + /// Then the following would be displayed + /// + /// ```notrust + /// helptest + /// + /// USAGE: + /// helptest [FLAGS] + /// + /// FLAGS: + /// --config Some help text describing the --config arg + /// -h, --help Prints help information + /// -V, --version Prints version information + /// ``` + pub fn hidden_long_help(self, hide: bool) -> Self { + if hide { + self.set(ArgSettings::HiddenLongHelp) + } else { + self.unset(ArgSettings::HiddenLongHelp) + } + } + + /// Checks if one of the [`ArgSettings`] settings is set for the argument + /// [`ArgSettings`]: ./enum.ArgSettings.html + pub fn is_set(&self, s: ArgSettings) -> bool { + self.b.is_set(s) + } + + /// Sets one of the [`ArgSettings`] settings for the argument + /// [`ArgSettings`]: ./enum.ArgSettings.html + pub fn set(mut self, s: ArgSettings) -> Self { + self.setb(s); + self + } + + /// Unsets one of the [`ArgSettings`] settings for the argument + /// [`ArgSettings`]: ./enum.ArgSettings.html + pub fn unset(mut self, s: ArgSettings) -> Self { + self.unsetb(s); + self + } + + #[doc(hidden)] + pub fn setb(&mut self, s: ArgSettings) { + self.b.set(s); + } + + #[doc(hidden)] + pub fn unsetb(&mut self, s: ArgSettings) { + self.b.unset(s); + } +} + +impl<'a, 'b, 'z> From<&'z Arg<'a, 'b>> for Arg<'a, 'b> { + fn from(a: &'z Arg<'a, 'b>) -> Self { + Arg { + b: a.b.clone(), + v: a.v.clone(), + s: a.s.clone(), + index: a.index, + r_ifs: a.r_ifs.clone(), + } + } +} + +impl<'n, 'e> PartialEq for Arg<'n, 'e> { + fn eq(&self, other: &Arg<'n, 'e>) -> bool { + self.b == other.b + } +} diff --git a/clap/src/args/arg_builder/base.rs b/clap/src/args/arg_builder/base.rs new file mode 100644 index 000000000..fef9d8ab9 --- /dev/null +++ b/clap/src/args/arg_builder/base.rs @@ -0,0 +1,38 @@ +use args::{Arg, ArgFlags, ArgSettings}; + +#[derive(Debug, Clone, Default)] +pub struct Base<'a, 'b> +where + 'a: 'b, +{ + pub name: &'a str, + pub help: Option<&'b str>, + pub long_help: Option<&'b str>, + pub blacklist: Option>, + pub settings: ArgFlags, + pub r_unless: Option>, + pub overrides: Option>, + pub groups: Option>, + pub requires: Option, &'a str)>>, +} + +impl<'n, 'e> Base<'n, 'e> { + pub fn new(name: &'n str) -> Self { + Base { + name: name, + ..Default::default() + } + } + + pub fn set(&mut self, s: ArgSettings) { self.settings.set(s); } + pub fn unset(&mut self, s: ArgSettings) { self.settings.unset(s); } + pub fn is_set(&self, s: ArgSettings) -> bool { self.settings.is_set(s) } +} + +impl<'n, 'e, 'z> From<&'z Arg<'n, 'e>> for Base<'n, 'e> { + fn from(a: &'z Arg<'n, 'e>) -> Self { a.b.clone() } +} + +impl<'n, 'e> PartialEq for Base<'n, 'e> { + fn eq(&self, other: &Base<'n, 'e>) -> bool { self.name == other.name } +} diff --git a/clap/src/args/arg_builder/flag.rs b/clap/src/args/arg_builder/flag.rs new file mode 100644 index 000000000..641e7777e --- /dev/null +++ b/clap/src/args/arg_builder/flag.rs @@ -0,0 +1,159 @@ +// Std +use std::convert::From; +use std::fmt::{Display, Formatter, Result}; +use std::rc::Rc; +use std::result::Result as StdResult; +use std::ffi::{OsStr, OsString}; +use std::mem; + +// Internal +use Arg; +use args::{AnyArg, ArgSettings, Base, DispOrder, Switched}; +use map::{self, VecMap}; + +#[derive(Default, Clone, Debug)] +#[doc(hidden)] +pub struct FlagBuilder<'n, 'e> +where + 'n: 'e, +{ + pub b: Base<'n, 'e>, + pub s: Switched<'e>, +} + +impl<'n, 'e> FlagBuilder<'n, 'e> { + pub fn new(name: &'n str) -> Self { + FlagBuilder { + b: Base::new(name), + ..Default::default() + } + } +} + +impl<'a, 'b, 'z> From<&'z Arg<'a, 'b>> for FlagBuilder<'a, 'b> { + fn from(a: &'z Arg<'a, 'b>) -> Self { + FlagBuilder { + b: Base::from(a), + s: Switched::from(a), + } + } +} + +impl<'a, 'b> From> for FlagBuilder<'a, 'b> { + fn from(mut a: Arg<'a, 'b>) -> Self { + FlagBuilder { + b: mem::replace(&mut a.b, Base::default()), + s: mem::replace(&mut a.s, Switched::default()), + } + } +} + +impl<'n, 'e> Display for FlagBuilder<'n, 'e> { + fn fmt(&self, f: &mut Formatter) -> Result { + if let Some(l) = self.s.long { + write!(f, "--{}", l)?; + } else { + write!(f, "-{}", self.s.short.unwrap())?; + } + + Ok(()) + } +} + +impl<'n, 'e> AnyArg<'n, 'e> for FlagBuilder<'n, 'e> { + fn name(&self) -> &'n str { self.b.name } + fn overrides(&self) -> Option<&[&'e str]> { self.b.overrides.as_ref().map(|o| &o[..]) } + fn requires(&self) -> Option<&[(Option<&'e str>, &'n str)]> { + self.b.requires.as_ref().map(|o| &o[..]) + } + fn blacklist(&self) -> Option<&[&'e str]> { self.b.blacklist.as_ref().map(|o| &o[..]) } + fn required_unless(&self) -> Option<&[&'e str]> { self.b.r_unless.as_ref().map(|o| &o[..]) } + fn is_set(&self, s: ArgSettings) -> bool { self.b.settings.is_set(s) } + fn has_switch(&self) -> bool { true } + fn takes_value(&self) -> bool { false } + fn set(&mut self, s: ArgSettings) { self.b.settings.set(s) } + fn max_vals(&self) -> Option { None } + fn val_names(&self) -> Option<&VecMap<&'e str>> { None } + fn num_vals(&self) -> Option { None } + fn possible_vals(&self) -> Option<&[&'e str]> { None } + fn validator(&self) -> Option<&Rc StdResult<(), String>>> { None } + fn validator_os(&self) -> Option<&Rc StdResult<(), OsString>>> { None } + fn min_vals(&self) -> Option { None } + fn short(&self) -> Option { self.s.short } + fn long(&self) -> Option<&'e str> { self.s.long } + fn val_delim(&self) -> Option { None } + fn help(&self) -> Option<&'e str> { self.b.help } + fn long_help(&self) -> Option<&'e str> { self.b.long_help } + fn val_terminator(&self) -> Option<&'e str> { None } + fn default_val(&self) -> Option<&'e OsStr> { None } + fn default_vals_ifs(&self) -> Option, &'e OsStr)>> { + None + } + fn env<'s>(&'s self) -> Option<(&'n OsStr, Option<&'s OsString>)> { None } + fn longest_filter(&self) -> bool { self.s.long.is_some() } + fn aliases(&self) -> Option> { + if let Some(ref aliases) = self.s.aliases { + let vis_aliases: Vec<_> = aliases + .iter() + .filter_map(|&(n, v)| if v { Some(n) } else { None }) + .collect(); + if vis_aliases.is_empty() { + None + } else { + Some(vis_aliases) + } + } else { + None + } + } +} + +impl<'n, 'e> DispOrder for FlagBuilder<'n, 'e> { + fn disp_ord(&self) -> usize { self.s.disp_ord } +} + +impl<'n, 'e> PartialEq for FlagBuilder<'n, 'e> { + fn eq(&self, other: &FlagBuilder<'n, 'e>) -> bool { self.b == other.b } +} + +#[cfg(test)] +mod test { + use args::settings::ArgSettings; + use super::FlagBuilder; + + #[test] + fn flagbuilder_display() { + let mut f = FlagBuilder::new("flg"); + f.b.settings.set(ArgSettings::Multiple); + f.s.long = Some("flag"); + + assert_eq!(&*format!("{}", f), "--flag"); + + let mut f2 = FlagBuilder::new("flg"); + f2.s.short = Some('f'); + + assert_eq!(&*format!("{}", f2), "-f"); + } + + #[test] + fn flagbuilder_display_single_alias() { + let mut f = FlagBuilder::new("flg"); + f.s.long = Some("flag"); + f.s.aliases = Some(vec![("als", true)]); + + assert_eq!(&*format!("{}", f), "--flag"); + } + + #[test] + fn flagbuilder_display_multiple_aliases() { + let mut f = FlagBuilder::new("flg"); + f.s.short = Some('f'); + f.s.aliases = Some(vec![ + ("alias_not_visible", false), + ("f2", true), + ("f3", true), + ("f4", true), + ]); + assert_eq!(&*format!("{}", f), "-f"); + } +} diff --git a/clap/src/args/arg_builder/mod.rs b/clap/src/args/arg_builder/mod.rs new file mode 100644 index 000000000..d1a7a6608 --- /dev/null +++ b/clap/src/args/arg_builder/mod.rs @@ -0,0 +1,13 @@ +pub use self::flag::FlagBuilder; +pub use self::option::OptBuilder; +pub use self::positional::PosBuilder; +pub use self::base::Base; +pub use self::switched::Switched; +pub use self::valued::Valued; + +mod flag; +mod positional; +mod option; +mod base; +mod valued; +mod switched; diff --git a/clap/src/args/arg_builder/option.rs b/clap/src/args/arg_builder/option.rs new file mode 100644 index 000000000..4bb147a7d --- /dev/null +++ b/clap/src/args/arg_builder/option.rs @@ -0,0 +1,244 @@ +// Std +use std::fmt::{Display, Formatter, Result}; +use std::rc::Rc; +use std::result::Result as StdResult; +use std::ffi::{OsStr, OsString}; +use std::mem; + +// Internal +use args::{AnyArg, Arg, ArgSettings, Base, DispOrder, Switched, Valued}; +use map::{self, VecMap}; +use INTERNAL_ERROR_MSG; + +#[allow(missing_debug_implementations)] +#[doc(hidden)] +#[derive(Default, Clone)] +pub struct OptBuilder<'n, 'e> +where + 'n: 'e, +{ + pub b: Base<'n, 'e>, + pub s: Switched<'e>, + pub v: Valued<'n, 'e>, +} + +impl<'n, 'e> OptBuilder<'n, 'e> { + pub fn new(name: &'n str) -> Self { + OptBuilder { + b: Base::new(name), + ..Default::default() + } + } +} + +impl<'n, 'e, 'z> From<&'z Arg<'n, 'e>> for OptBuilder<'n, 'e> { + fn from(a: &'z Arg<'n, 'e>) -> Self { + OptBuilder { + b: Base::from(a), + s: Switched::from(a), + v: Valued::from(a), + } + } +} + +impl<'n, 'e> From> for OptBuilder<'n, 'e> { + fn from(mut a: Arg<'n, 'e>) -> Self { + a.v.fill_in(); + OptBuilder { + b: mem::replace(&mut a.b, Base::default()), + s: mem::replace(&mut a.s, Switched::default()), + v: mem::replace(&mut a.v, Valued::default()), + } + } +} + +impl<'n, 'e> Display for OptBuilder<'n, 'e> { + fn fmt(&self, f: &mut Formatter) -> Result { + debugln!("OptBuilder::fmt:{}", self.b.name); + let sep = if self.b.is_set(ArgSettings::RequireEquals) { + "=" + } else { + " " + }; + // Write the name such --long or -l + if let Some(l) = self.s.long { + write!(f, "--{}{}", l, sep)?; + } else { + write!(f, "-{}{}", self.s.short.unwrap(), sep)?; + } + let delim = if self.is_set(ArgSettings::RequireDelimiter) { + self.v.val_delim.expect(INTERNAL_ERROR_MSG) + } else { + ' ' + }; + + // Write the values such as + if let Some(ref vec) = self.v.val_names { + let mut it = vec.iter().peekable(); + while let Some((_, val)) = it.next() { + write!(f, "<{}>", val)?; + if it.peek().is_some() { + write!(f, "{}", delim)?; + } + } + let num = vec.len(); + if self.is_set(ArgSettings::Multiple) && num == 1 { + write!(f, "...")?; + } + } else if let Some(num) = self.v.num_vals { + let mut it = (0..num).peekable(); + while let Some(_) = it.next() { + write!(f, "<{}>", self.b.name)?; + if it.peek().is_some() { + write!(f, "{}", delim)?; + } + } + if self.is_set(ArgSettings::Multiple) && num == 1 { + write!(f, "...")?; + } + } else { + write!( + f, + "<{}>{}", + self.b.name, + if self.is_set(ArgSettings::Multiple) { + "..." + } else { + "" + } + )?; + } + + Ok(()) + } +} + +impl<'n, 'e> AnyArg<'n, 'e> for OptBuilder<'n, 'e> { + fn name(&self) -> &'n str { self.b.name } + fn overrides(&self) -> Option<&[&'e str]> { self.b.overrides.as_ref().map(|o| &o[..]) } + fn requires(&self) -> Option<&[(Option<&'e str>, &'n str)]> { + self.b.requires.as_ref().map(|o| &o[..]) + } + fn blacklist(&self) -> Option<&[&'e str]> { self.b.blacklist.as_ref().map(|o| &o[..]) } + fn required_unless(&self) -> Option<&[&'e str]> { self.b.r_unless.as_ref().map(|o| &o[..]) } + fn val_names(&self) -> Option<&VecMap<&'e str>> { self.v.val_names.as_ref() } + fn is_set(&self, s: ArgSettings) -> bool { self.b.settings.is_set(s) } + fn has_switch(&self) -> bool { true } + fn set(&mut self, s: ArgSettings) { self.b.settings.set(s) } + fn max_vals(&self) -> Option { self.v.max_vals } + fn val_terminator(&self) -> Option<&'e str> { self.v.terminator } + fn num_vals(&self) -> Option { self.v.num_vals } + fn possible_vals(&self) -> Option<&[&'e str]> { self.v.possible_vals.as_ref().map(|o| &o[..]) } + fn validator(&self) -> Option<&Rc StdResult<(), String>>> { + self.v.validator.as_ref() + } + fn validator_os(&self) -> Option<&Rc StdResult<(), OsString>>> { + self.v.validator_os.as_ref() + } + fn min_vals(&self) -> Option { self.v.min_vals } + fn short(&self) -> Option { self.s.short } + fn long(&self) -> Option<&'e str> { self.s.long } + fn val_delim(&self) -> Option { self.v.val_delim } + fn takes_value(&self) -> bool { true } + fn help(&self) -> Option<&'e str> { self.b.help } + fn long_help(&self) -> Option<&'e str> { self.b.long_help } + fn default_val(&self) -> Option<&'e OsStr> { self.v.default_val } + fn default_vals_ifs(&self) -> Option, &'e OsStr)>> { + self.v.default_vals_ifs.as_ref().map(|vm| vm.values()) + } + fn env<'s>(&'s self) -> Option<(&'n OsStr, Option<&'s OsString>)> { + self.v + .env + .as_ref() + .map(|&(key, ref value)| (key, value.as_ref())) + } + fn longest_filter(&self) -> bool { true } + fn aliases(&self) -> Option> { + if let Some(ref aliases) = self.s.aliases { + let vis_aliases: Vec<_> = aliases + .iter() + .filter_map(|&(n, v)| if v { Some(n) } else { None }) + .collect(); + if vis_aliases.is_empty() { + None + } else { + Some(vis_aliases) + } + } else { + None + } + } +} + +impl<'n, 'e> DispOrder for OptBuilder<'n, 'e> { + fn disp_ord(&self) -> usize { self.s.disp_ord } +} + +impl<'n, 'e> PartialEq for OptBuilder<'n, 'e> { + fn eq(&self, other: &OptBuilder<'n, 'e>) -> bool { self.b == other.b } +} + +#[cfg(test)] +mod test { + use args::settings::ArgSettings; + use super::OptBuilder; + use map::VecMap; + + #[test] + fn optbuilder_display1() { + let mut o = OptBuilder::new("opt"); + o.s.long = Some("option"); + o.b.settings.set(ArgSettings::Multiple); + + assert_eq!(&*format!("{}", o), "--option ..."); + } + + #[test] + fn optbuilder_display2() { + let mut v_names = VecMap::new(); + v_names.insert(0, "file"); + v_names.insert(1, "name"); + + let mut o2 = OptBuilder::new("opt"); + o2.s.short = Some('o'); + o2.v.val_names = Some(v_names); + + assert_eq!(&*format!("{}", o2), "-o "); + } + + #[test] + fn optbuilder_display3() { + let mut v_names = VecMap::new(); + v_names.insert(0, "file"); + v_names.insert(1, "name"); + + let mut o2 = OptBuilder::new("opt"); + o2.s.short = Some('o'); + o2.v.val_names = Some(v_names); + o2.b.settings.set(ArgSettings::Multiple); + + assert_eq!(&*format!("{}", o2), "-o "); + } + + #[test] + fn optbuilder_display_single_alias() { + let mut o = OptBuilder::new("opt"); + o.s.long = Some("option"); + o.s.aliases = Some(vec![("als", true)]); + + assert_eq!(&*format!("{}", o), "--option "); + } + + #[test] + fn optbuilder_display_multiple_aliases() { + let mut o = OptBuilder::new("opt"); + o.s.long = Some("option"); + o.s.aliases = Some(vec![ + ("als_not_visible", false), + ("als2", true), + ("als3", true), + ("als4", true), + ]); + assert_eq!(&*format!("{}", o), "--option "); + } +} diff --git a/clap/src/args/arg_builder/positional.rs b/clap/src/args/arg_builder/positional.rs new file mode 100644 index 000000000..43fdca4c5 --- /dev/null +++ b/clap/src/args/arg_builder/positional.rs @@ -0,0 +1,229 @@ +// Std +use std::borrow::Cow; +use std::fmt::{Display, Formatter, Result}; +use std::rc::Rc; +use std::result::Result as StdResult; +use std::ffi::{OsStr, OsString}; +use std::mem; + +// Internal +use Arg; +use args::{AnyArg, ArgSettings, Base, DispOrder, Valued}; +use INTERNAL_ERROR_MSG; +use map::{self, VecMap}; + +#[allow(missing_debug_implementations)] +#[doc(hidden)] +#[derive(Clone, Default)] +pub struct PosBuilder<'n, 'e> +where + 'n: 'e, +{ + pub b: Base<'n, 'e>, + pub v: Valued<'n, 'e>, + pub index: u64, +} + +impl<'n, 'e> PosBuilder<'n, 'e> { + pub fn new(name: &'n str, idx: u64) -> Self { + PosBuilder { + b: Base::new(name), + index: idx, + ..Default::default() + } + } + + pub fn from_arg_ref(a: &Arg<'n, 'e>, idx: u64) -> Self { + let mut pb = PosBuilder { + b: Base::from(a), + v: Valued::from(a), + index: idx, + }; + if a.v.max_vals.is_some() || a.v.min_vals.is_some() + || (a.v.num_vals.is_some() && a.v.num_vals.unwrap() > 1) + { + pb.b.settings.set(ArgSettings::Multiple); + } + pb + } + + pub fn from_arg(mut a: Arg<'n, 'e>, idx: u64) -> Self { + if a.v.max_vals.is_some() || a.v.min_vals.is_some() + || (a.v.num_vals.is_some() && a.v.num_vals.unwrap() > 1) + { + a.b.settings.set(ArgSettings::Multiple); + } + PosBuilder { + b: mem::replace(&mut a.b, Base::default()), + v: mem::replace(&mut a.v, Valued::default()), + index: idx, + } + } + + pub fn multiple_str(&self) -> &str { + let mult_vals = self.v + .val_names + .as_ref() + .map_or(true, |names| names.len() < 2); + if self.is_set(ArgSettings::Multiple) && mult_vals { + "..." + } else { + "" + } + } + + pub fn name_no_brackets(&self) -> Cow { + debugln!("PosBuilder::name_no_brackets;"); + let mut delim = String::new(); + delim.push(if self.is_set(ArgSettings::RequireDelimiter) { + self.v.val_delim.expect(INTERNAL_ERROR_MSG) + } else { + ' ' + }); + if let Some(ref names) = self.v.val_names { + debugln!("PosBuilder:name_no_brackets: val_names={:#?}", names); + if names.len() > 1 { + Cow::Owned( + names + .values() + .map(|n| format!("<{}>", n)) + .collect::>() + .join(&*delim), + ) + } else { + Cow::Borrowed(names.values().next().expect(INTERNAL_ERROR_MSG)) + } + } else { + debugln!("PosBuilder:name_no_brackets: just name"); + Cow::Borrowed(self.b.name) + } + } +} + +impl<'n, 'e> Display for PosBuilder<'n, 'e> { + fn fmt(&self, f: &mut Formatter) -> Result { + let mut delim = String::new(); + delim.push(if self.is_set(ArgSettings::RequireDelimiter) { + self.v.val_delim.expect(INTERNAL_ERROR_MSG) + } else { + ' ' + }); + if let Some(ref names) = self.v.val_names { + write!( + f, + "{}", + names + .values() + .map(|n| format!("<{}>", n)) + .collect::>() + .join(&*delim) + )?; + } else { + write!(f, "<{}>", self.b.name)?; + } + if self.b.settings.is_set(ArgSettings::Multiple) + && (self.v.val_names.is_none() || self.v.val_names.as_ref().unwrap().len() == 1) + { + write!(f, "...")?; + } + + Ok(()) + } +} + +impl<'n, 'e> AnyArg<'n, 'e> for PosBuilder<'n, 'e> { + fn name(&self) -> &'n str { self.b.name } + fn overrides(&self) -> Option<&[&'e str]> { self.b.overrides.as_ref().map(|o| &o[..]) } + fn requires(&self) -> Option<&[(Option<&'e str>, &'n str)]> { + self.b.requires.as_ref().map(|o| &o[..]) + } + fn blacklist(&self) -> Option<&[&'e str]> { self.b.blacklist.as_ref().map(|o| &o[..]) } + fn required_unless(&self) -> Option<&[&'e str]> { self.b.r_unless.as_ref().map(|o| &o[..]) } + fn val_names(&self) -> Option<&VecMap<&'e str>> { self.v.val_names.as_ref() } + fn is_set(&self, s: ArgSettings) -> bool { self.b.settings.is_set(s) } + fn set(&mut self, s: ArgSettings) { self.b.settings.set(s) } + fn has_switch(&self) -> bool { false } + fn max_vals(&self) -> Option { self.v.max_vals } + fn val_terminator(&self) -> Option<&'e str> { self.v.terminator } + fn num_vals(&self) -> Option { self.v.num_vals } + fn possible_vals(&self) -> Option<&[&'e str]> { self.v.possible_vals.as_ref().map(|o| &o[..]) } + fn validator(&self) -> Option<&Rc StdResult<(), String>>> { + self.v.validator.as_ref() + } + fn validator_os(&self) -> Option<&Rc StdResult<(), OsString>>> { + self.v.validator_os.as_ref() + } + fn min_vals(&self) -> Option { self.v.min_vals } + fn short(&self) -> Option { None } + fn long(&self) -> Option<&'e str> { None } + fn val_delim(&self) -> Option { self.v.val_delim } + fn takes_value(&self) -> bool { true } + fn help(&self) -> Option<&'e str> { self.b.help } + fn long_help(&self) -> Option<&'e str> { self.b.long_help } + fn default_vals_ifs(&self) -> Option, &'e OsStr)>> { + self.v.default_vals_ifs.as_ref().map(|vm| vm.values()) + } + fn default_val(&self) -> Option<&'e OsStr> { self.v.default_val } + fn env<'s>(&'s self) -> Option<(&'n OsStr, Option<&'s OsString>)> { + self.v + .env + .as_ref() + .map(|&(key, ref value)| (key, value.as_ref())) + } + fn longest_filter(&self) -> bool { true } + fn aliases(&self) -> Option> { None } +} + +impl<'n, 'e> DispOrder for PosBuilder<'n, 'e> { + fn disp_ord(&self) -> usize { self.index as usize } +} + +impl<'n, 'e> PartialEq for PosBuilder<'n, 'e> { + fn eq(&self, other: &PosBuilder<'n, 'e>) -> bool { self.b == other.b } +} + +#[cfg(test)] +mod test { + use args::settings::ArgSettings; + use super::PosBuilder; + use map::VecMap; + + #[test] + fn display_mult() { + let mut p = PosBuilder::new("pos", 1); + p.b.settings.set(ArgSettings::Multiple); + + assert_eq!(&*format!("{}", p), "..."); + } + + #[test] + fn display_required() { + let mut p2 = PosBuilder::new("pos", 1); + p2.b.settings.set(ArgSettings::Required); + + assert_eq!(&*format!("{}", p2), ""); + } + + #[test] + fn display_val_names() { + let mut p2 = PosBuilder::new("pos", 1); + let mut vm = VecMap::new(); + vm.insert(0, "file1"); + vm.insert(1, "file2"); + p2.v.val_names = Some(vm); + + assert_eq!(&*format!("{}", p2), " "); + } + + #[test] + fn display_val_names_req() { + let mut p2 = PosBuilder::new("pos", 1); + p2.b.settings.set(ArgSettings::Required); + let mut vm = VecMap::new(); + vm.insert(0, "file1"); + vm.insert(1, "file2"); + p2.v.val_names = Some(vm); + + assert_eq!(&*format!("{}", p2), " "); + } +} diff --git a/clap/src/args/arg_builder/switched.rs b/clap/src/args/arg_builder/switched.rs new file mode 100644 index 000000000..224b2f2b2 --- /dev/null +++ b/clap/src/args/arg_builder/switched.rs @@ -0,0 +1,38 @@ +use Arg; + +#[derive(Debug)] +pub struct Switched<'b> { + pub short: Option, + pub long: Option<&'b str>, + pub aliases: Option>, // (name, visible) + pub disp_ord: usize, + pub unified_ord: usize, +} + +impl<'e> Default for Switched<'e> { + fn default() -> Self { + Switched { + short: None, + long: None, + aliases: None, + disp_ord: 999, + unified_ord: 999, + } + } +} + +impl<'n, 'e, 'z> From<&'z Arg<'n, 'e>> for Switched<'e> { + fn from(a: &'z Arg<'n, 'e>) -> Self { a.s.clone() } +} + +impl<'e> Clone for Switched<'e> { + fn clone(&self) -> Self { + Switched { + short: self.short, + long: self.long, + aliases: self.aliases.clone(), + disp_ord: self.disp_ord, + unified_ord: self.unified_ord, + } + } +} diff --git a/clap/src/args/arg_builder/valued.rs b/clap/src/args/arg_builder/valued.rs new file mode 100644 index 000000000..d70854dc8 --- /dev/null +++ b/clap/src/args/arg_builder/valued.rs @@ -0,0 +1,67 @@ +use std::rc::Rc; +use std::ffi::{OsStr, OsString}; + +use map::VecMap; + +use Arg; + +#[allow(missing_debug_implementations)] +#[derive(Clone)] +pub struct Valued<'a, 'b> +where + 'a: 'b, +{ + pub possible_vals: Option>, + pub val_names: Option>, + pub num_vals: Option, + pub max_vals: Option, + pub min_vals: Option, + pub validator: Option Result<(), String>>>, + pub validator_os: Option Result<(), OsString>>>, + pub val_delim: Option, + pub default_val: Option<&'b OsStr>, + pub default_vals_ifs: Option, &'b OsStr)>>, + pub env: Option<(&'a OsStr, Option)>, + pub terminator: Option<&'b str>, +} + +impl<'n, 'e> Default for Valued<'n, 'e> { + fn default() -> Self { + Valued { + possible_vals: None, + num_vals: None, + min_vals: None, + max_vals: None, + val_names: None, + validator: None, + validator_os: None, + val_delim: None, + default_val: None, + default_vals_ifs: None, + env: None, + terminator: None, + } + } +} + +impl<'n, 'e> Valued<'n, 'e> { + pub fn fill_in(&mut self) { + if let Some(ref vec) = self.val_names { + if vec.len() > 1 { + self.num_vals = Some(vec.len() as u64); + } + } + } +} + +impl<'n, 'e, 'z> From<&'z Arg<'n, 'e>> for Valued<'n, 'e> { + fn from(a: &'z Arg<'n, 'e>) -> Self { + let mut v = a.v.clone(); + if let Some(ref vec) = a.v.val_names { + if vec.len() > 1 { + v.num_vals = Some(vec.len() as u64); + } + } + v + } +} diff --git a/clap/src/args/arg_matcher.rs b/clap/src/args/arg_matcher.rs new file mode 100644 index 000000000..e1d8067e6 --- /dev/null +++ b/clap/src/args/arg_matcher.rs @@ -0,0 +1,218 @@ +// Std +use std::collections::hash_map::{Entry, Iter}; +use std::collections::HashMap; +use std::ffi::OsStr; +use std::ops::Deref; +use std::mem; + +// Internal +use args::{ArgMatches, MatchedArg, SubCommand}; +use args::AnyArg; +use args::settings::ArgSettings; + +#[doc(hidden)] +#[allow(missing_debug_implementations)] +pub struct ArgMatcher<'a>(pub ArgMatches<'a>); + +impl<'a> Default for ArgMatcher<'a> { + fn default() -> Self { ArgMatcher(ArgMatches::default()) } +} + +impl<'a> ArgMatcher<'a> { + pub fn new() -> Self { ArgMatcher::default() } + + pub fn process_arg_overrides<'b>(&mut self, a: Option<&AnyArg<'a, 'b>>, overrides: &mut Vec<(&'b str, &'a str)>, required: &mut Vec<&'a str>, check_all: bool) { + debugln!("ArgMatcher::process_arg_overrides:{:?};", a.map_or(None, |a| Some(a.name()))); + if let Some(aa) = a { + let mut self_done = false; + if let Some(a_overrides) = aa.overrides() { + for overr in a_overrides { + debugln!("ArgMatcher::process_arg_overrides:iter:{};", overr); + if overr == &aa.name() { + self_done = true; + self.handle_self_overrides(a); + } else if self.is_present(overr) { + debugln!("ArgMatcher::process_arg_overrides:iter:{}: removing from matches;", overr); + self.remove(overr); + for i in (0 .. required.len()).rev() { + if &required[i] == overr { + debugln!("ArgMatcher::process_arg_overrides:iter:{}: removing required;", overr); + required.swap_remove(i); + break; + } + } + overrides.push((overr, aa.name())); + } else { + overrides.push((overr, aa.name())); + } + } + } + if check_all && !self_done { + self.handle_self_overrides(a); + } + } + } + + pub fn handle_self_overrides<'b>(&mut self, a: Option<&AnyArg<'a, 'b>>) { + debugln!("ArgMatcher::handle_self_overrides:{:?};", a.map_or(None, |a| Some(a.name()))); + if let Some(aa) = a { + if !aa.has_switch() || aa.is_set(ArgSettings::Multiple) { + // positional args can't override self or else we would never advance to the next + + // Also flags with --multiple set are ignored otherwise we could never have more + // than one + return; + } + if let Some(ma) = self.get_mut(aa.name()) { + if ma.vals.len() > 1 { + // swap_remove(0) would be O(1) but does not preserve order, which + // we need + ma.vals.remove(0); + ma.occurs = 1; + } else if !aa.takes_value() && ma.occurs > 1 { + ma.occurs = 1; + } + } + } + } + + pub fn is_present(&self, name: &str) -> bool { + self.0.is_present(name) + } + + pub fn propagate_globals(&mut self, global_arg_vec: &[&'a str]) { + debugln!( "ArgMatcher::get_global_values: global_arg_vec={:?}", global_arg_vec ); + let mut vals_map = HashMap::new(); + self.fill_in_global_values(global_arg_vec, &mut vals_map); + } + + fn fill_in_global_values( + &mut self, + global_arg_vec: &[&'a str], + vals_map: &mut HashMap<&'a str, MatchedArg>, + ) { + for global_arg in global_arg_vec { + if let Some(ma) = self.get(global_arg) { + // We have to check if the parent's global arg wasn't used but still exists + // such as from a default value. + // + // For example, `myprog subcommand --global-arg=value` where --global-arg defines + // a default value of `other` myprog would have an existing MatchedArg for + // --global-arg where the value is `other`, however the occurs will be 0. + let to_update = if let Some(parent_ma) = vals_map.get(global_arg) { + if parent_ma.occurs > 0 && ma.occurs == 0 { + parent_ma.clone() + } else { + ma.clone() + } + } else { + ma.clone() + }; + vals_map.insert(global_arg, to_update); + } + } + if let Some(ref mut sc) = self.0.subcommand { + let mut am = ArgMatcher(mem::replace(&mut sc.matches, ArgMatches::new())); + am.fill_in_global_values(global_arg_vec, vals_map); + mem::swap(&mut am.0, &mut sc.matches); + } + + for (name, matched_arg) in vals_map.into_iter() { + self.0.args.insert(name, matched_arg.clone()); + } + } + + pub fn get_mut(&mut self, arg: &str) -> Option<&mut MatchedArg> { self.0.args.get_mut(arg) } + + pub fn get(&self, arg: &str) -> Option<&MatchedArg> { self.0.args.get(arg) } + + pub fn remove(&mut self, arg: &str) { self.0.args.remove(arg); } + + pub fn remove_all(&mut self, args: &[&str]) { + for &arg in args { + self.0.args.remove(arg); + } + } + + pub fn insert(&mut self, name: &'a str) { self.0.args.insert(name, MatchedArg::new()); } + + pub fn contains(&self, arg: &str) -> bool { self.0.args.contains_key(arg) } + + pub fn is_empty(&self) -> bool { self.0.args.is_empty() } + + pub fn usage(&mut self, usage: String) { self.0.usage = Some(usage); } + + pub fn arg_names(&'a self) -> Vec<&'a str> { self.0.args.keys().map(Deref::deref).collect() } + + pub fn entry(&mut self, arg: &'a str) -> Entry<&'a str, MatchedArg> { self.0.args.entry(arg) } + + pub fn subcommand(&mut self, sc: SubCommand<'a>) { self.0.subcommand = Some(Box::new(sc)); } + + pub fn subcommand_name(&self) -> Option<&str> { self.0.subcommand_name() } + + pub fn iter(&self) -> Iter<&str, MatchedArg> { self.0.args.iter() } + + pub fn inc_occurrence_of(&mut self, arg: &'a str) { + debugln!("ArgMatcher::inc_occurrence_of: arg={}", arg); + if let Some(a) = self.get_mut(arg) { + a.occurs += 1; + return; + } + debugln!("ArgMatcher::inc_occurrence_of: first instance"); + self.insert(arg); + } + + pub fn inc_occurrences_of(&mut self, args: &[&'a str]) { + debugln!("ArgMatcher::inc_occurrences_of: args={:?}", args); + for arg in args { + self.inc_occurrence_of(arg); + } + } + + pub fn add_val_to(&mut self, arg: &'a str, val: &OsStr) { + let ma = self.entry(arg).or_insert(MatchedArg { + occurs: 0, + indices: Vec::with_capacity(1), + vals: Vec::with_capacity(1), + }); + ma.vals.push(val.to_owned()); + } + + pub fn add_index_to(&mut self, arg: &'a str, idx: usize) { + let ma = self.entry(arg).or_insert(MatchedArg { + occurs: 0, + indices: Vec::with_capacity(1), + vals: Vec::new(), + }); + ma.indices.push(idx); + } + + pub fn needs_more_vals<'b, A>(&self, o: &A) -> bool + where + A: AnyArg<'a, 'b>, + { + debugln!("ArgMatcher::needs_more_vals: o={}", o.name()); + if let Some(ma) = self.get(o.name()) { + if let Some(num) = o.num_vals() { + debugln!("ArgMatcher::needs_more_vals: num_vals...{}", num); + return if o.is_set(ArgSettings::Multiple) { + ((ma.vals.len() as u64) % num) != 0 + } else { + num != (ma.vals.len() as u64) + }; + } else if let Some(num) = o.max_vals() { + debugln!("ArgMatcher::needs_more_vals: max_vals...{}", num); + return !((ma.vals.len() as u64) > num); + } else if o.min_vals().is_some() { + debugln!("ArgMatcher::needs_more_vals: min_vals...true"); + return true; + } + return o.is_set(ArgSettings::Multiple); + } + true + } +} + +impl<'a> Into> for ArgMatcher<'a> { + fn into(self) -> ArgMatches<'a> { self.0 } +} diff --git a/clap/src/args/arg_matches.rs b/clap/src/args/arg_matches.rs new file mode 100644 index 000000000..7ac112877 --- /dev/null +++ b/clap/src/args/arg_matches.rs @@ -0,0 +1,966 @@ +// Std +use std::borrow::Cow; +use std::collections::HashMap; +use std::ffi::{OsStr, OsString}; +use std::iter::Map; +use std::slice::Iter; + +// Internal +use INVALID_UTF8; +use args::MatchedArg; +use args::SubCommand; + +/// Used to get information about the arguments that where supplied to the program at runtime by +/// the user. New instances of this struct are obtained by using the [`App::get_matches`] family of +/// methods. +/// +/// # Examples +/// +/// ```no_run +/// # use clap::{App, Arg}; +/// let matches = App::new("MyApp") +/// .arg(Arg::with_name("out") +/// .long("output") +/// .required(true) +/// .takes_value(true)) +/// .arg(Arg::with_name("debug") +/// .short("d") +/// .multiple(true)) +/// .arg(Arg::with_name("cfg") +/// .short("c") +/// .takes_value(true)) +/// .get_matches(); // builds the instance of ArgMatches +/// +/// // to get information about the "cfg" argument we created, such as the value supplied we use +/// // various ArgMatches methods, such as ArgMatches::value_of +/// if let Some(c) = matches.value_of("cfg") { +/// println!("Value for -c: {}", c); +/// } +/// +/// // The ArgMatches::value_of method returns an Option because the user may not have supplied +/// // that argument at runtime. But if we specified that the argument was "required" as we did +/// // with the "out" argument, we can safely unwrap because `clap` verifies that was actually +/// // used at runtime. +/// println!("Value for --output: {}", matches.value_of("out").unwrap()); +/// +/// // You can check the presence of an argument +/// if matches.is_present("out") { +/// // Another way to check if an argument was present, or if it occurred multiple times is to +/// // use occurrences_of() which returns 0 if an argument isn't found at runtime, or the +/// // number of times that it occurred, if it was. To allow an argument to appear more than +/// // once, you must use the .multiple(true) method, otherwise it will only return 1 or 0. +/// if matches.occurrences_of("debug") > 2 { +/// println!("Debug mode is REALLY on, don't be crazy"); +/// } else { +/// println!("Debug mode kind of on"); +/// } +/// } +/// ``` +/// [`App::get_matches`]: ./struct.App.html#method.get_matches +#[derive(Debug, Clone)] +pub struct ArgMatches<'a> { + #[doc(hidden)] pub args: HashMap<&'a str, MatchedArg>, + #[doc(hidden)] pub subcommand: Option>>, + #[doc(hidden)] pub usage: Option, +} + +impl<'a> Default for ArgMatches<'a> { + fn default() -> Self { + ArgMatches { + args: HashMap::new(), + subcommand: None, + usage: None, + } + } +} + +impl<'a> ArgMatches<'a> { + #[doc(hidden)] + pub fn new() -> Self { + ArgMatches { + ..Default::default() + } + } + + /// Gets the value of a specific [option] or [positional] argument (i.e. an argument that takes + /// an additional value at runtime). If the option wasn't present at runtime + /// it returns `None`. + /// + /// *NOTE:* If getting a value for an option or positional argument that allows multiples, + /// prefer [`ArgMatches::values_of`] as `ArgMatches::value_of` will only return the *first* + /// value. + /// + /// # Panics + /// + /// This method will [`panic!`] if the value contains invalid UTF-8 code points. + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("myapp") + /// .arg(Arg::with_name("output") + /// .takes_value(true)) + /// .get_matches_from(vec!["myapp", "something"]); + /// + /// assert_eq!(m.value_of("output"), Some("something")); + /// ``` + /// [option]: ./struct.Arg.html#method.takes_value + /// [positional]: ./struct.Arg.html#method.index + /// [`ArgMatches::values_of`]: ./struct.ArgMatches.html#method.values_of + /// [`panic!`]: https://doc.rust-lang.org/std/macro.panic!.html + pub fn value_of>(&self, name: S) -> Option<&str> { + if let Some(arg) = self.args.get(name.as_ref()) { + if let Some(v) = arg.vals.get(0) { + return Some(v.to_str().expect(INVALID_UTF8)); + } + } + None + } + + /// Gets the lossy value of a specific argument. If the argument wasn't present at runtime + /// it returns `None`. A lossy value is one which contains invalid UTF-8 code points, those + /// invalid points will be replaced with `\u{FFFD}` + /// + /// *NOTE:* If getting a value for an option or positional argument that allows multiples, + /// prefer [`Arg::values_of_lossy`] as `value_of_lossy()` will only return the *first* value. + /// + /// # Examples + /// + #[cfg_attr(not(unix), doc = " ```ignore")] + #[cfg_attr(unix, doc = " ```")] + /// # use clap::{App, Arg}; + /// use std::ffi::OsString; + /// use std::os::unix::ffi::{OsStrExt,OsStringExt}; + /// + /// let m = App::new("utf8") + /// .arg(Arg::from_usage(" 'some arg'")) + /// .get_matches_from(vec![OsString::from("myprog"), + /// // "Hi {0xe9}!" + /// OsString::from_vec(vec![b'H', b'i', b' ', 0xe9, b'!'])]); + /// assert_eq!(&*m.value_of_lossy("arg").unwrap(), "Hi \u{FFFD}!"); + /// ``` + /// [`Arg::values_of_lossy`]: ./struct.ArgMatches.html#method.values_of_lossy + pub fn value_of_lossy>(&'a self, name: S) -> Option> { + if let Some(arg) = self.args.get(name.as_ref()) { + if let Some(v) = arg.vals.get(0) { + return Some(v.to_string_lossy()); + } + } + None + } + + /// Gets the OS version of a string value of a specific argument. If the option wasn't present + /// at runtime it returns `None`. An OS value on Unix-like systems is any series of bytes, + /// regardless of whether or not they contain valid UTF-8 code points. Since [`String`]s in + /// Rust are guaranteed to be valid UTF-8, a valid filename on a Unix system as an argument + /// value may contain invalid UTF-8 code points. + /// + /// *NOTE:* If getting a value for an option or positional argument that allows multiples, + /// prefer [`ArgMatches::values_of_os`] as `Arg::value_of_os` will only return the *first* + /// value. + /// + /// # Examples + /// + #[cfg_attr(not(unix), doc = " ```ignore")] + #[cfg_attr(unix, doc = " ```")] + /// # use clap::{App, Arg}; + /// use std::ffi::OsString; + /// use std::os::unix::ffi::{OsStrExt,OsStringExt}; + /// + /// let m = App::new("utf8") + /// .arg(Arg::from_usage(" 'some arg'")) + /// .get_matches_from(vec![OsString::from("myprog"), + /// // "Hi {0xe9}!" + /// OsString::from_vec(vec![b'H', b'i', b' ', 0xe9, b'!'])]); + /// assert_eq!(&*m.value_of_os("arg").unwrap().as_bytes(), [b'H', b'i', b' ', 0xe9, b'!']); + /// ``` + /// [`String`]: https://doc.rust-lang.org/std/string/struct.String.html + /// [`ArgMatches::values_of_os`]: ./struct.ArgMatches.html#method.values_of_os + pub fn value_of_os>(&self, name: S) -> Option<&OsStr> { + self.args + .get(name.as_ref()) + .and_then(|arg| arg.vals.get(0).map(|v| v.as_os_str())) + } + + /// Gets a [`Values`] struct which implements [`Iterator`] for values of a specific argument + /// (i.e. an argument that takes multiple values at runtime). If the option wasn't present at + /// runtime it returns `None` + /// + /// # Panics + /// + /// This method will panic if any of the values contain invalid UTF-8 code points. + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("myprog") + /// .arg(Arg::with_name("output") + /// .multiple(true) + /// .short("o") + /// .takes_value(true)) + /// .get_matches_from(vec![ + /// "myprog", "-o", "val1", "val2", "val3" + /// ]); + /// let vals: Vec<&str> = m.values_of("output").unwrap().collect(); + /// assert_eq!(vals, ["val1", "val2", "val3"]); + /// ``` + /// [`Values`]: ./struct.Values.html + /// [`Iterator`]: https://doc.rust-lang.org/std/iter/trait.Iterator.html + pub fn values_of>(&'a self, name: S) -> Option> { + if let Some(arg) = self.args.get(name.as_ref()) { + fn to_str_slice(o: &OsString) -> &str { o.to_str().expect(INVALID_UTF8) } + let to_str_slice: fn(&OsString) -> &str = to_str_slice; // coerce to fn pointer + return Some(Values { + iter: arg.vals.iter().map(to_str_slice), + }); + } + None + } + + /// Gets the lossy values of a specific argument. If the option wasn't present at runtime + /// it returns `None`. A lossy value is one where if it contains invalid UTF-8 code points, + /// those invalid points will be replaced with `\u{FFFD}` + /// + /// # Examples + /// + #[cfg_attr(not(unix), doc = " ```ignore")] + #[cfg_attr(unix, doc = " ```")] + /// # use clap::{App, Arg}; + /// use std::ffi::OsString; + /// use std::os::unix::ffi::OsStringExt; + /// + /// let m = App::new("utf8") + /// .arg(Arg::from_usage("... 'some arg'")) + /// .get_matches_from(vec![OsString::from("myprog"), + /// // "Hi" + /// OsString::from_vec(vec![b'H', b'i']), + /// // "{0xe9}!" + /// OsString::from_vec(vec![0xe9, b'!'])]); + /// let mut itr = m.values_of_lossy("arg").unwrap().into_iter(); + /// assert_eq!(&itr.next().unwrap()[..], "Hi"); + /// assert_eq!(&itr.next().unwrap()[..], "\u{FFFD}!"); + /// assert_eq!(itr.next(), None); + /// ``` + pub fn values_of_lossy>(&'a self, name: S) -> Option> { + if let Some(arg) = self.args.get(name.as_ref()) { + return Some( + arg.vals + .iter() + .map(|v| v.to_string_lossy().into_owned()) + .collect(), + ); + } + None + } + + /// Gets a [`OsValues`] struct which is implements [`Iterator`] for [`OsString`] values of a + /// specific argument. If the option wasn't present at runtime it returns `None`. An OS value + /// on Unix-like systems is any series of bytes, regardless of whether or not they contain + /// valid UTF-8 code points. Since [`String`]s in Rust are guaranteed to be valid UTF-8, a valid + /// filename as an argument value on Linux (for example) may contain invalid UTF-8 code points. + /// + /// # Examples + /// + #[cfg_attr(not(unix), doc = " ```ignore")] + #[cfg_attr(unix, doc = " ```")] + /// # use clap::{App, Arg}; + /// use std::ffi::{OsStr,OsString}; + /// use std::os::unix::ffi::{OsStrExt,OsStringExt}; + /// + /// let m = App::new("utf8") + /// .arg(Arg::from_usage("... 'some arg'")) + /// .get_matches_from(vec![OsString::from("myprog"), + /// // "Hi" + /// OsString::from_vec(vec![b'H', b'i']), + /// // "{0xe9}!" + /// OsString::from_vec(vec![0xe9, b'!'])]); + /// + /// let mut itr = m.values_of_os("arg").unwrap().into_iter(); + /// assert_eq!(itr.next(), Some(OsStr::new("Hi"))); + /// assert_eq!(itr.next(), Some(OsStr::from_bytes(&[0xe9, b'!']))); + /// assert_eq!(itr.next(), None); + /// ``` + /// [`OsValues`]: ./struct.OsValues.html + /// [`Iterator`]: https://doc.rust-lang.org/std/iter/trait.Iterator.html + /// [`OsString`]: https://doc.rust-lang.org/std/ffi/struct.OsString.html + /// [`String`]: https://doc.rust-lang.org/std/string/struct.String.html + pub fn values_of_os>(&'a self, name: S) -> Option> { + fn to_str_slice(o: &OsString) -> &OsStr { &*o } + let to_str_slice: fn(&'a OsString) -> &'a OsStr = to_str_slice; // coerce to fn pointer + if let Some(arg) = self.args.get(name.as_ref()) { + return Some(OsValues { + iter: arg.vals.iter().map(to_str_slice), + }); + } + None + } + + /// Returns `true` if an argument was present at runtime, otherwise `false`. + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("myprog") + /// .arg(Arg::with_name("debug") + /// .short("d")) + /// .get_matches_from(vec![ + /// "myprog", "-d" + /// ]); + /// + /// assert!(m.is_present("debug")); + /// ``` + pub fn is_present>(&self, name: S) -> bool { + if let Some(ref sc) = self.subcommand { + if sc.name == name.as_ref() { + return true; + } + } + self.args.contains_key(name.as_ref()) + } + + /// Returns the number of times an argument was used at runtime. If an argument isn't present + /// it will return `0`. + /// + /// **NOTE:** This returns the number of times the argument was used, *not* the number of + /// values. For example, `-o val1 val2 val3 -o val4` would return `2` (2 occurrences, but 4 + /// values). + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("myprog") + /// .arg(Arg::with_name("debug") + /// .short("d") + /// .multiple(true)) + /// .get_matches_from(vec![ + /// "myprog", "-d", "-d", "-d" + /// ]); + /// + /// assert_eq!(m.occurrences_of("debug"), 3); + /// ``` + /// + /// This next example shows that counts actual uses of the argument, not just `-`'s + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("myprog") + /// .arg(Arg::with_name("debug") + /// .short("d") + /// .multiple(true)) + /// .arg(Arg::with_name("flag") + /// .short("f")) + /// .get_matches_from(vec![ + /// "myprog", "-ddfd" + /// ]); + /// + /// assert_eq!(m.occurrences_of("debug"), 3); + /// assert_eq!(m.occurrences_of("flag"), 1); + /// ``` + pub fn occurrences_of>(&self, name: S) -> u64 { + self.args.get(name.as_ref()).map_or(0, |a| a.occurs) + } + + /// Gets the starting index of the argument in respect to all other arguments. Indices are + /// similar to argv indices, but are not exactly 1:1. + /// + /// For flags (i.e. those arguments which don't have an associated value), indices refer + /// to occurrence of the switch, such as `-f`, or `--flag`. However, for options the indices + /// refer to the *values* `-o val` would therefore not represent two distinct indices, only the + /// index for `val` would be recorded. This is by design. + /// + /// Besides the flag/option descrepancy, the primary difference between an argv index and clap + /// index, is that clap continues counting once all arguments have properly seperated, whereas + /// an argv index does not. + /// + /// The examples should clear this up. + /// + /// *NOTE:* If an argument is allowed multiple times, this method will only give the *first* + /// index. + /// + /// # Examples + /// + /// The argv indices are listed in the comments below. See how they correspond to the clap + /// indices. Note that if it's not listed in a clap index, this is becuase it's not saved in + /// in an `ArgMatches` struct for querying. + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("myapp") + /// .arg(Arg::with_name("flag") + /// .short("f")) + /// .arg(Arg::with_name("option") + /// .short("o") + /// .takes_value(true)) + /// .get_matches_from(vec!["myapp", "-f", "-o", "val"]); + /// // ARGV idices: ^0 ^1 ^2 ^3 + /// // clap idices: ^1 ^3 + /// + /// assert_eq!(m.index_of("flag"), Some(1)); + /// assert_eq!(m.index_of("option"), Some(3)); + /// ``` + /// + /// Now notice, if we use one of the other styles of options: + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("myapp") + /// .arg(Arg::with_name("flag") + /// .short("f")) + /// .arg(Arg::with_name("option") + /// .short("o") + /// .takes_value(true)) + /// .get_matches_from(vec!["myapp", "-f", "-o=val"]); + /// // ARGV idices: ^0 ^1 ^2 + /// // clap idices: ^1 ^3 + /// + /// assert_eq!(m.index_of("flag"), Some(1)); + /// assert_eq!(m.index_of("option"), Some(3)); + /// ``` + /// + /// Things become much more complicated, or clear if we look at a more complex combination of + /// flags. Let's also throw in the final option style for good measure. + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("myapp") + /// .arg(Arg::with_name("flag") + /// .short("f")) + /// .arg(Arg::with_name("flag2") + /// .short("F")) + /// .arg(Arg::with_name("flag3") + /// .short("z")) + /// .arg(Arg::with_name("option") + /// .short("o") + /// .takes_value(true)) + /// .get_matches_from(vec!["myapp", "-fzF", "-oval"]); + /// // ARGV idices: ^0 ^1 ^2 + /// // clap idices: ^1,2,3 ^5 + /// // + /// // clap sees the above as 'myapp -f -z -F -o val' + /// // ^0 ^1 ^2 ^3 ^4 ^5 + /// assert_eq!(m.index_of("flag"), Some(1)); + /// assert_eq!(m.index_of("flag2"), Some(3)); + /// assert_eq!(m.index_of("flag3"), Some(2)); + /// assert_eq!(m.index_of("option"), Some(5)); + /// ``` + /// + /// One final combination of flags/options to see how they combine: + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("myapp") + /// .arg(Arg::with_name("flag") + /// .short("f")) + /// .arg(Arg::with_name("flag2") + /// .short("F")) + /// .arg(Arg::with_name("flag3") + /// .short("z")) + /// .arg(Arg::with_name("option") + /// .short("o") + /// .takes_value(true) + /// .multiple(true)) + /// .get_matches_from(vec!["myapp", "-fzFoval"]); + /// // ARGV idices: ^0 ^1 + /// // clap idices: ^1,2,3^5 + /// // + /// // clap sees the above as 'myapp -f -z -F -o val' + /// // ^0 ^1 ^2 ^3 ^4 ^5 + /// assert_eq!(m.index_of("flag"), Some(1)); + /// assert_eq!(m.index_of("flag2"), Some(3)); + /// assert_eq!(m.index_of("flag3"), Some(2)); + /// assert_eq!(m.index_of("option"), Some(5)); + /// ``` + /// + /// The last part to mention is when values are sent in multiple groups with a [delimiter]. + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("myapp") + /// .arg(Arg::with_name("option") + /// .short("o") + /// .takes_value(true) + /// .multiple(true)) + /// .get_matches_from(vec!["myapp", "-o=val1,val2,val3"]); + /// // ARGV idices: ^0 ^1 + /// // clap idices: ^2 ^3 ^4 + /// // + /// // clap sees the above as 'myapp -o val1 val2 val3' + /// // ^0 ^1 ^2 ^3 ^4 + /// assert_eq!(m.index_of("option"), Some(2)); + /// ``` + /// [`ArgMatches`]: ./struct.ArgMatches.html + /// [delimiter]: ./struct.Arg.html#method.value_delimiter + pub fn index_of>(&self, name: S) -> Option { + if let Some(arg) = self.args.get(name.as_ref()) { + if let Some(i) = arg.indices.get(0) { + return Some(*i); + } + } + None + } + + /// Gets all indices of the argument in respect to all other arguments. Indices are + /// similar to argv indices, but are not exactly 1:1. + /// + /// For flags (i.e. those arguments which don't have an associated value), indices refer + /// to occurrence of the switch, such as `-f`, or `--flag`. However, for options the indices + /// refer to the *values* `-o val` would therefore not represent two distinct indices, only the + /// index for `val` would be recorded. This is by design. + /// + /// *NOTE:* For more information about how clap indices compare to argv indices, see + /// [`ArgMatches::index_of`] + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("myapp") + /// .arg(Arg::with_name("option") + /// .short("o") + /// .takes_value(true) + /// .use_delimiter(true) + /// .multiple(true)) + /// .get_matches_from(vec!["myapp", "-o=val1,val2,val3"]); + /// // ARGV idices: ^0 ^1 + /// // clap idices: ^2 ^3 ^4 + /// // + /// // clap sees the above as 'myapp -o val1 val2 val3' + /// // ^0 ^1 ^2 ^3 ^4 + /// assert_eq!(m.indices_of("option").unwrap().collect::>(), &[2, 3, 4]); + /// ``` + /// + /// Another quick example is when flags and options are used together + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("myapp") + /// .arg(Arg::with_name("option") + /// .short("o") + /// .takes_value(true) + /// .multiple(true)) + /// .arg(Arg::with_name("flag") + /// .short("f") + /// .multiple(true)) + /// .get_matches_from(vec!["myapp", "-o", "val1", "-f", "-o", "val2", "-f"]); + /// // ARGV idices: ^0 ^1 ^2 ^3 ^4 ^5 ^6 + /// // clap idices: ^2 ^3 ^5 ^6 + /// + /// assert_eq!(m.indices_of("option").unwrap().collect::>(), &[2, 5]); + /// assert_eq!(m.indices_of("flag").unwrap().collect::>(), &[3, 6]); + /// ``` + /// + /// One final example, which is an odd case; if we *don't* use value delimiter as we did with + /// the first example above instead of `val1`, `val2` and `val3` all being distinc values, they + /// would all be a single value of `val1,val2,val3`, in which case case they'd only receive a + /// single index. + /// + /// ```rust + /// # use clap::{App, Arg}; + /// let m = App::new("myapp") + /// .arg(Arg::with_name("option") + /// .short("o") + /// .takes_value(true) + /// .multiple(true)) + /// .get_matches_from(vec!["myapp", "-o=val1,val2,val3"]); + /// // ARGV idices: ^0 ^1 + /// // clap idices: ^2 + /// // + /// // clap sees the above as 'myapp -o "val1,val2,val3"' + /// // ^0 ^1 ^2 + /// assert_eq!(m.indices_of("option").unwrap().collect::>(), &[2]); + /// ``` + /// [`ArgMatches`]: ./struct.ArgMatches.html + /// [`ArgMatches::index_of`]: ./struct.ArgMatches.html#method.index_of + /// [delimiter]: ./struct.Arg.html#method.value_delimiter + pub fn indices_of>(&'a self, name: S) -> Option> { + if let Some(arg) = self.args.get(name.as_ref()) { + fn to_usize(i: &usize) -> usize { *i } + let to_usize: fn(&usize) -> usize = to_usize; // coerce to fn pointer + return Some(Indices { + iter: arg.indices.iter().map(to_usize), + }); + } + None + } + + /// Because [`Subcommand`]s are essentially "sub-[`App`]s" they have their own [`ArgMatches`] + /// as well. This method returns the [`ArgMatches`] for a particular subcommand or `None` if + /// the subcommand wasn't present at runtime. + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg, SubCommand}; + /// let app_m = App::new("myprog") + /// .arg(Arg::with_name("debug") + /// .short("d")) + /// .subcommand(SubCommand::with_name("test") + /// .arg(Arg::with_name("opt") + /// .long("option") + /// .takes_value(true))) + /// .get_matches_from(vec![ + /// "myprog", "-d", "test", "--option", "val" + /// ]); + /// + /// // Both parent commands, and child subcommands can have arguments present at the same times + /// assert!(app_m.is_present("debug")); + /// + /// // Get the subcommand's ArgMatches instance + /// if let Some(sub_m) = app_m.subcommand_matches("test") { + /// // Use the struct like normal + /// assert_eq!(sub_m.value_of("opt"), Some("val")); + /// } + /// ``` + /// [`Subcommand`]: ./struct.SubCommand.html + /// [`App`]: ./struct.App.html + /// [`ArgMatches`]: ./struct.ArgMatches.html + pub fn subcommand_matches>(&self, name: S) -> Option<&ArgMatches<'a>> { + if let Some(ref s) = self.subcommand { + if s.name == name.as_ref() { + return Some(&s.matches); + } + } + None + } + + /// Because [`Subcommand`]s are essentially "sub-[`App`]s" they have their own [`ArgMatches`] + /// as well.But simply getting the sub-[`ArgMatches`] doesn't help much if we don't also know + /// which subcommand was actually used. This method returns the name of the subcommand that was + /// used at runtime, or `None` if one wasn't. + /// + /// *NOTE*: Subcommands form a hierarchy, where multiple subcommands can be used at runtime, + /// but only a single subcommand from any group of sibling commands may used at once. + /// + /// An ASCII art depiction may help explain this better...Using a fictional version of `git` as + /// the demo subject. Imagine the following are all subcommands of `git` (note, the author is + /// aware these aren't actually all subcommands in the real `git` interface, but it makes + /// explanation easier) + /// + /// ```notrust + /// Top Level App (git) TOP + /// | + /// ----------------------------------------- + /// / | \ \ + /// clone push add commit LEVEL 1 + /// | / \ / \ | + /// url origin remote ref name message LEVEL 2 + /// / /\ + /// path remote local LEVEL 3 + /// ``` + /// + /// Given the above fictional subcommand hierarchy, valid runtime uses would be (not an all + /// inclusive list, and not including argument options per command for brevity and clarity): + /// + /// ```sh + /// $ git clone url + /// $ git push origin path + /// $ git add ref local + /// $ git commit message + /// ``` + /// + /// Notice only one command per "level" may be used. You could not, for example, do `$ git + /// clone url push origin path` + /// + /// # Examples + /// + /// ```no_run + /// # use clap::{App, Arg, SubCommand}; + /// let app_m = App::new("git") + /// .subcommand(SubCommand::with_name("clone")) + /// .subcommand(SubCommand::with_name("push")) + /// .subcommand(SubCommand::with_name("commit")) + /// .get_matches(); + /// + /// match app_m.subcommand_name() { + /// Some("clone") => {}, // clone was used + /// Some("push") => {}, // push was used + /// Some("commit") => {}, // commit was used + /// _ => {}, // Either no subcommand or one not tested for... + /// } + /// ``` + /// [`Subcommand`]: ./struct.SubCommand.html + /// [`App`]: ./struct.App.html + /// [`ArgMatches`]: ./struct.ArgMatches.html + pub fn subcommand_name(&self) -> Option<&str> { + self.subcommand.as_ref().map(|sc| &sc.name[..]) + } + + /// This brings together [`ArgMatches::subcommand_matches`] and [`ArgMatches::subcommand_name`] + /// by returning a tuple with both pieces of information. + /// + /// # Examples + /// + /// ```no_run + /// # use clap::{App, Arg, SubCommand}; + /// let app_m = App::new("git") + /// .subcommand(SubCommand::with_name("clone")) + /// .subcommand(SubCommand::with_name("push")) + /// .subcommand(SubCommand::with_name("commit")) + /// .get_matches(); + /// + /// match app_m.subcommand() { + /// ("clone", Some(sub_m)) => {}, // clone was used + /// ("push", Some(sub_m)) => {}, // push was used + /// ("commit", Some(sub_m)) => {}, // commit was used + /// _ => {}, // Either no subcommand or one not tested for... + /// } + /// ``` + /// + /// Another useful scenario is when you want to support third party, or external, subcommands. + /// In these cases you can't know the subcommand name ahead of time, so use a variable instead + /// with pattern matching! + /// + /// ```rust + /// # use clap::{App, AppSettings}; + /// // Assume there is an external subcommand named "subcmd" + /// let app_m = App::new("myprog") + /// .setting(AppSettings::AllowExternalSubcommands) + /// .get_matches_from(vec![ + /// "myprog", "subcmd", "--option", "value", "-fff", "--flag" + /// ]); + /// + /// // All trailing arguments will be stored under the subcommand's sub-matches using an empty + /// // string argument name + /// match app_m.subcommand() { + /// (external, Some(sub_m)) => { + /// let ext_args: Vec<&str> = sub_m.values_of("").unwrap().collect(); + /// assert_eq!(external, "subcmd"); + /// assert_eq!(ext_args, ["--option", "value", "-fff", "--flag"]); + /// }, + /// _ => {}, + /// } + /// ``` + /// [`ArgMatches::subcommand_matches`]: ./struct.ArgMatches.html#method.subcommand_matches + /// [`ArgMatches::subcommand_name`]: ./struct.ArgMatches.html#method.subcommand_name + pub fn subcommand(&self) -> (&str, Option<&ArgMatches<'a>>) { + self.subcommand + .as_ref() + .map_or(("", None), |sc| (&sc.name[..], Some(&sc.matches))) + } + + /// Returns a string slice of the usage statement for the [`App`] or [`SubCommand`] + /// + /// # Examples + /// + /// ```no_run + /// # use clap::{App, Arg, SubCommand}; + /// let app_m = App::new("myprog") + /// .subcommand(SubCommand::with_name("test")) + /// .get_matches(); + /// + /// println!("{}", app_m.usage()); + /// ``` + /// [`Subcommand`]: ./struct.SubCommand.html + /// [`App`]: ./struct.App.html + pub fn usage(&self) -> &str { self.usage.as_ref().map_or("", |u| &u[..]) } +} + + +// The following were taken and adapated from vec_map source +// repo: https://github.com/contain-rs/vec-map +// commit: be5e1fa3c26e351761b33010ddbdaf5f05dbcc33 +// license: MIT - Copyright (c) 2015 The Rust Project Developers + +/// An iterator for getting multiple values out of an argument via the [`ArgMatches::values_of`] +/// method. +/// +/// # Examples +/// +/// ```rust +/// # use clap::{App, Arg}; +/// let m = App::new("myapp") +/// .arg(Arg::with_name("output") +/// .short("o") +/// .multiple(true) +/// .takes_value(true)) +/// .get_matches_from(vec!["myapp", "-o", "val1", "val2"]); +/// +/// let mut values = m.values_of("output").unwrap(); +/// +/// assert_eq!(values.next(), Some("val1")); +/// assert_eq!(values.next(), Some("val2")); +/// assert_eq!(values.next(), None); +/// ``` +/// [`ArgMatches::values_of`]: ./struct.ArgMatches.html#method.values_of +#[derive(Clone)] +#[allow(missing_debug_implementations)] +pub struct Values<'a> { + iter: Map, fn(&'a OsString) -> &'a str>, +} + +impl<'a> Iterator for Values<'a> { + type Item = &'a str; + + fn next(&mut self) -> Option<&'a str> { self.iter.next() } + fn size_hint(&self) -> (usize, Option) { self.iter.size_hint() } +} + +impl<'a> DoubleEndedIterator for Values<'a> { + fn next_back(&mut self) -> Option<&'a str> { self.iter.next_back() } +} + +impl<'a> ExactSizeIterator for Values<'a> {} + +/// Creates an empty iterator. +impl<'a> Default for Values<'a> { + fn default() -> Self { + static EMPTY: [OsString; 0] = []; + // This is never called because the iterator is empty: + fn to_str_slice(_: &OsString) -> &str { unreachable!() }; + Values { + iter: EMPTY[..].iter().map(to_str_slice), + } + } +} + +/// An iterator for getting multiple values out of an argument via the [`ArgMatches::values_of_os`] +/// method. Usage of this iterator allows values which contain invalid UTF-8 code points unlike +/// [`Values`]. +/// +/// # Examples +/// +#[cfg_attr(not(unix), doc = " ```ignore")] +#[cfg_attr(unix, doc = " ```")] +/// # use clap::{App, Arg}; +/// use std::ffi::OsString; +/// use std::os::unix::ffi::{OsStrExt,OsStringExt}; +/// +/// let m = App::new("utf8") +/// .arg(Arg::from_usage(" 'some arg'")) +/// .get_matches_from(vec![OsString::from("myprog"), +/// // "Hi {0xe9}!" +/// OsString::from_vec(vec![b'H', b'i', b' ', 0xe9, b'!'])]); +/// assert_eq!(&*m.value_of_os("arg").unwrap().as_bytes(), [b'H', b'i', b' ', 0xe9, b'!']); +/// ``` +/// [`ArgMatches::values_of_os`]: ./struct.ArgMatches.html#method.values_of_os +/// [`Values`]: ./struct.Values.html +#[derive(Clone)] +#[allow(missing_debug_implementations)] +pub struct OsValues<'a> { + iter: Map, fn(&'a OsString) -> &'a OsStr>, +} + +impl<'a> Iterator for OsValues<'a> { + type Item = &'a OsStr; + + fn next(&mut self) -> Option<&'a OsStr> { self.iter.next() } + fn size_hint(&self) -> (usize, Option) { self.iter.size_hint() } +} + +impl<'a> DoubleEndedIterator for OsValues<'a> { + fn next_back(&mut self) -> Option<&'a OsStr> { self.iter.next_back() } +} + +impl<'a> ExactSizeIterator for OsValues<'a> {} + +/// Creates an empty iterator. +impl<'a> Default for OsValues<'a> { + fn default() -> Self { + static EMPTY: [OsString; 0] = []; + // This is never called because the iterator is empty: + fn to_str_slice(_: &OsString) -> &OsStr { unreachable!() }; + OsValues { + iter: EMPTY[..].iter().map(to_str_slice), + } + } +} + +/// An iterator for getting multiple indices out of an argument via the [`ArgMatches::indices_of`] +/// method. +/// +/// # Examples +/// +/// ```rust +/// # use clap::{App, Arg}; +/// let m = App::new("myapp") +/// .arg(Arg::with_name("output") +/// .short("o") +/// .multiple(true) +/// .takes_value(true)) +/// .get_matches_from(vec!["myapp", "-o", "val1", "val2"]); +/// +/// let mut indices = m.indices_of("output").unwrap(); +/// +/// assert_eq!(indices.next(), Some(2)); +/// assert_eq!(indices.next(), Some(3)); +/// assert_eq!(indices.next(), None); +/// ``` +/// [`ArgMatches::indices_of`]: ./struct.ArgMatches.html#method.indices_of +#[derive(Clone)] +#[allow(missing_debug_implementations)] +pub struct Indices<'a> { // would rather use '_, but: https://github.com/rust-lang/rust/issues/48469 + iter: Map, fn(&'a usize) -> usize>, +} + +impl<'a> Iterator for Indices<'a> { + type Item = usize; + + fn next(&mut self) -> Option { self.iter.next() } + fn size_hint(&self) -> (usize, Option) { self.iter.size_hint() } +} + +impl<'a> DoubleEndedIterator for Indices<'a> { + fn next_back(&mut self) -> Option { self.iter.next_back() } +} + +impl<'a> ExactSizeIterator for Indices<'a> {} + +/// Creates an empty iterator. +impl<'a> Default for Indices<'a> { + fn default() -> Self { + static EMPTY: [usize; 0] = []; + // This is never called because the iterator is empty: + fn to_usize(_: &usize) -> usize { unreachable!() }; + Indices { + iter: EMPTY[..].iter().map(to_usize), + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_default_values() { + let mut values: Values = Values::default(); + assert_eq!(values.next(), None); + } + + #[test] + fn test_default_values_with_shorter_lifetime() { + let matches = ArgMatches::new(); + let mut values = matches.values_of("").unwrap_or_default(); + assert_eq!(values.next(), None); + } + + #[test] + fn test_default_osvalues() { + let mut values: OsValues = OsValues::default(); + assert_eq!(values.next(), None); + } + + #[test] + fn test_default_osvalues_with_shorter_lifetime() { + let matches = ArgMatches::new(); + let mut values = matches.values_of_os("").unwrap_or_default(); + assert_eq!(values.next(), None); + } + + #[test] + fn test_default_indices() { + let mut indices: Indices = Indices::default(); + assert_eq!(indices.next(), None); + } + + #[test] + fn test_default_indices_with_shorter_lifetime() { + let matches = ArgMatches::new(); + let mut indices = matches.indices_of("").unwrap_or_default(); + assert_eq!(indices.next(), None); + } +} diff --git a/clap/src/args/group.rs b/clap/src/args/group.rs new file mode 100644 index 000000000..e082b69f4 --- /dev/null +++ b/clap/src/args/group.rs @@ -0,0 +1,635 @@ +#[cfg(feature = "yaml")] +use std::collections::BTreeMap; +use std::fmt::{Debug, Formatter, Result}; + +#[cfg(feature = "yaml")] +use yaml_rust::Yaml; + +/// `ArgGroup`s are a family of related [arguments] and way for you to express, "Any of these +/// arguments". By placing arguments in a logical group, you can create easier requirement and +/// exclusion rules instead of having to list each argument individually, or when you want a rule +/// to apply "any but not all" arguments. +/// +/// For instance, you can make an entire `ArgGroup` required. If [`ArgGroup::multiple(true)`] is +/// set, this means that at least one argument from that group must be present. If +/// [`ArgGroup::multiple(false)`] is set (the default), one and *only* one must be present. +/// +/// You can also do things such as name an entire `ArgGroup` as a [conflict] or [requirement] for +/// another argument, meaning any of the arguments that belong to that group will cause a failure +/// if present, or must present respectively. +/// +/// Perhaps the most common use of `ArgGroup`s is to require one and *only* one argument to be +/// present out of a given set. Imagine that you had multiple arguments, and you want one of them +/// to be required, but making all of them required isn't feasible because perhaps they conflict +/// with each other. For example, lets say that you were building an application where one could +/// set a given version number by supplying a string with an option argument, i.e. +/// `--set-ver v1.2.3`, you also wanted to support automatically using a previous version number +/// and simply incrementing one of the three numbers. So you create three flags `--major`, +/// `--minor`, and `--patch`. All of these arguments shouldn't be used at one time but you want to +/// specify that *at least one* of them is used. For this, you can create a group. +/// +/// Finally, you may use `ArgGroup`s to pull a value from a group of arguments when you don't care +/// exactly which argument was actually used at runtime. +/// +/// # Examples +/// +/// The following example demonstrates using an `ArgGroup` to ensure that one, and only one, of +/// the arguments from the specified group is present at runtime. +/// +/// ```rust +/// # use clap::{App, ArgGroup, ErrorKind}; +/// let result = App::new("app") +/// .args_from_usage( +/// "--set-ver [ver] 'set the version manually' +/// --major 'auto increase major' +/// --minor 'auto increase minor' +/// --patch 'auto increase patch'") +/// .group(ArgGroup::with_name("vers") +/// .args(&["set-ver", "major", "minor","patch"]) +/// .required(true)) +/// .get_matches_from_safe(vec!["app", "--major", "--patch"]); +/// // Because we used two args in the group it's an error +/// assert!(result.is_err()); +/// let err = result.unwrap_err(); +/// assert_eq!(err.kind, ErrorKind::ArgumentConflict); +/// ``` +/// This next example shows a passing parse of the same scenario +/// +/// ```rust +/// # use clap::{App, ArgGroup}; +/// let result = App::new("app") +/// .args_from_usage( +/// "--set-ver [ver] 'set the version manually' +/// --major 'auto increase major' +/// --minor 'auto increase minor' +/// --patch 'auto increase patch'") +/// .group(ArgGroup::with_name("vers") +/// .args(&["set-ver", "major", "minor","patch"]) +/// .required(true)) +/// .get_matches_from_safe(vec!["app", "--major"]); +/// assert!(result.is_ok()); +/// let matches = result.unwrap(); +/// // We may not know which of the args was used, so we can test for the group... +/// assert!(matches.is_present("vers")); +/// // we could also alternatively check each arg individually (not shown here) +/// ``` +/// [`ArgGroup::multiple(true)`]: ./struct.ArgGroup.html#method.multiple +/// [arguments]: ./struct.Arg.html +/// [conflict]: ./struct.Arg.html#method.conflicts_with +/// [requirement]: ./struct.Arg.html#method.requires +#[derive(Default)] +pub struct ArgGroup<'a> { + #[doc(hidden)] pub name: &'a str, + #[doc(hidden)] pub args: Vec<&'a str>, + #[doc(hidden)] pub required: bool, + #[doc(hidden)] pub requires: Option>, + #[doc(hidden)] pub conflicts: Option>, + #[doc(hidden)] pub multiple: bool, +} + +impl<'a> ArgGroup<'a> { + /// Creates a new instance of `ArgGroup` using a unique string name. The name will be used to + /// get values from the group or refer to the group inside of conflict and requirement rules. + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, ArgGroup}; + /// ArgGroup::with_name("config") + /// # ; + /// ``` + pub fn with_name(n: &'a str) -> Self { + ArgGroup { + name: n, + required: false, + args: vec![], + requires: None, + conflicts: None, + multiple: false, + } + } + + /// Creates a new instance of `ArgGroup` from a .yml (YAML) file. + /// + /// # Examples + /// + /// ```ignore + /// # #[macro_use] + /// # extern crate clap; + /// # use clap::ArgGroup; + /// # fn main() { + /// let yml = load_yaml!("group.yml"); + /// let ag = ArgGroup::from_yaml(yml); + /// # } + /// ``` + #[cfg(feature = "yaml")] + pub fn from_yaml(y: &'a Yaml) -> ArgGroup<'a> { ArgGroup::from(y.as_hash().unwrap()) } + + /// Adds an [argument] to this group by name + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg, ArgGroup}; + /// let m = App::new("myprog") + /// .arg(Arg::with_name("flag") + /// .short("f")) + /// .arg(Arg::with_name("color") + /// .short("c")) + /// .group(ArgGroup::with_name("req_flags") + /// .arg("flag") + /// .arg("color")) + /// .get_matches_from(vec!["myprog", "-f"]); + /// // maybe we don't know which of the two flags was used... + /// assert!(m.is_present("req_flags")); + /// // but we can also check individually if needed + /// assert!(m.is_present("flag")); + /// ``` + /// [argument]: ./struct.Arg.html + #[cfg_attr(feature = "lints", allow(should_assert_eq))] + pub fn arg(mut self, n: &'a str) -> Self { + assert!( + self.name != n, + "ArgGroup '{}' can not have same name as arg inside it", + &*self.name + ); + self.args.push(n); + self + } + + /// Adds multiple [arguments] to this group by name + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg, ArgGroup}; + /// let m = App::new("myprog") + /// .arg(Arg::with_name("flag") + /// .short("f")) + /// .arg(Arg::with_name("color") + /// .short("c")) + /// .group(ArgGroup::with_name("req_flags") + /// .args(&["flag", "color"])) + /// .get_matches_from(vec!["myprog", "-f"]); + /// // maybe we don't know which of the two flags was used... + /// assert!(m.is_present("req_flags")); + /// // but we can also check individually if needed + /// assert!(m.is_present("flag")); + /// ``` + /// [arguments]: ./struct.Arg.html + pub fn args(mut self, ns: &[&'a str]) -> Self { + for n in ns { + self = self.arg(n); + } + self + } + + /// Allows more than one of the ['Arg']s in this group to be used. (Default: `false`) + /// + /// # Examples + /// + /// Notice in this example we use *both* the `-f` and `-c` flags which are both part of the + /// group + /// + /// ```rust + /// # use clap::{App, Arg, ArgGroup}; + /// let m = App::new("myprog") + /// .arg(Arg::with_name("flag") + /// .short("f")) + /// .arg(Arg::with_name("color") + /// .short("c")) + /// .group(ArgGroup::with_name("req_flags") + /// .args(&["flag", "color"]) + /// .multiple(true)) + /// .get_matches_from(vec!["myprog", "-f", "-c"]); + /// // maybe we don't know which of the two flags was used... + /// assert!(m.is_present("req_flags")); + /// ``` + /// In this next example, we show the default behavior (i.e. `multiple(false)) which will throw + /// an error if more than one of the args in the group was used. + /// + /// ```rust + /// # use clap::{App, Arg, ArgGroup, ErrorKind}; + /// let result = App::new("myprog") + /// .arg(Arg::with_name("flag") + /// .short("f")) + /// .arg(Arg::with_name("color") + /// .short("c")) + /// .group(ArgGroup::with_name("req_flags") + /// .args(&["flag", "color"])) + /// .get_matches_from_safe(vec!["myprog", "-f", "-c"]); + /// // Because we used both args in the group it's an error + /// assert!(result.is_err()); + /// let err = result.unwrap_err(); + /// assert_eq!(err.kind, ErrorKind::ArgumentConflict); + /// ``` + /// ['Arg']: ./struct.Arg.html + pub fn multiple(mut self, m: bool) -> Self { + self.multiple = m; + self + } + + /// Sets the group as required or not. A required group will be displayed in the usage string + /// of the application in the format ``. A required `ArgGroup` simply states + /// that one argument from this group *must* be present at runtime (unless + /// conflicting with another argument). + /// + /// **NOTE:** This setting only applies to the current [`App`] / [`SubCommand`], and not + /// globally. + /// + /// **NOTE:** By default, [`ArgGroup::multiple`] is set to `false` which when combined with + /// `ArgGroup::required(true)` states, "One and *only one* arg must be used from this group. + /// Use of more than one arg is an error." Vice setting `ArgGroup::multiple(true)` which + /// states, '*At least* one arg from this group must be used. Using multiple is OK." + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg, ArgGroup, ErrorKind}; + /// let result = App::new("myprog") + /// .arg(Arg::with_name("flag") + /// .short("f")) + /// .arg(Arg::with_name("color") + /// .short("c")) + /// .group(ArgGroup::with_name("req_flags") + /// .args(&["flag", "color"]) + /// .required(true)) + /// .get_matches_from_safe(vec!["myprog"]); + /// // Because we didn't use any of the args in the group, it's an error + /// assert!(result.is_err()); + /// let err = result.unwrap_err(); + /// assert_eq!(err.kind, ErrorKind::MissingRequiredArgument); + /// ``` + /// [`App`]: ./struct.App.html + /// [`SubCommand`]: ./struct.SubCommand.html + /// [`ArgGroup::multiple`]: ./struct.ArgGroup.html#method.multiple + pub fn required(mut self, r: bool) -> Self { + self.required = r; + self + } + + /// Sets the requirement rules of this group. This is not to be confused with a + /// [required group]. Requirement rules function just like [argument requirement rules], you + /// can name other arguments or groups that must be present when any one of the arguments from + /// this group is used. + /// + /// **NOTE:** The name provided may be an argument, or group name + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg, ArgGroup, ErrorKind}; + /// let result = App::new("myprog") + /// .arg(Arg::with_name("flag") + /// .short("f")) + /// .arg(Arg::with_name("color") + /// .short("c")) + /// .arg(Arg::with_name("debug") + /// .short("d")) + /// .group(ArgGroup::with_name("req_flags") + /// .args(&["flag", "color"]) + /// .requires("debug")) + /// .get_matches_from_safe(vec!["myprog", "-c"]); + /// // because we used an arg from the group, and the group requires "-d" to be used, it's an + /// // error + /// assert!(result.is_err()); + /// let err = result.unwrap_err(); + /// assert_eq!(err.kind, ErrorKind::MissingRequiredArgument); + /// ``` + /// [required group]: ./struct.ArgGroup.html#method.required + /// [argument requirement rules]: ./struct.Arg.html#method.requires + pub fn requires(mut self, n: &'a str) -> Self { + if let Some(ref mut reqs) = self.requires { + reqs.push(n); + } else { + self.requires = Some(vec![n]); + } + self + } + + /// Sets the requirement rules of this group. This is not to be confused with a + /// [required group]. Requirement rules function just like [argument requirement rules], you + /// can name other arguments or groups that must be present when one of the arguments from this + /// group is used. + /// + /// **NOTE:** The names provided may be an argument, or group name + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg, ArgGroup, ErrorKind}; + /// let result = App::new("myprog") + /// .arg(Arg::with_name("flag") + /// .short("f")) + /// .arg(Arg::with_name("color") + /// .short("c")) + /// .arg(Arg::with_name("debug") + /// .short("d")) + /// .arg(Arg::with_name("verb") + /// .short("v")) + /// .group(ArgGroup::with_name("req_flags") + /// .args(&["flag", "color"]) + /// .requires_all(&["debug", "verb"])) + /// .get_matches_from_safe(vec!["myprog", "-c", "-d"]); + /// // because we used an arg from the group, and the group requires "-d" and "-v" to be used, + /// // yet we only used "-d" it's an error + /// assert!(result.is_err()); + /// let err = result.unwrap_err(); + /// assert_eq!(err.kind, ErrorKind::MissingRequiredArgument); + /// ``` + /// [required group]: ./struct.ArgGroup.html#method.required + /// [argument requirement rules]: ./struct.Arg.html#method.requires_all + pub fn requires_all(mut self, ns: &[&'a str]) -> Self { + for n in ns { + self = self.requires(n); + } + self + } + + /// Sets the exclusion rules of this group. Exclusion (aka conflict) rules function just like + /// [argument exclusion rules], you can name other arguments or groups that must *not* be + /// present when one of the arguments from this group are used. + /// + /// **NOTE:** The name provided may be an argument, or group name + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg, ArgGroup, ErrorKind}; + /// let result = App::new("myprog") + /// .arg(Arg::with_name("flag") + /// .short("f")) + /// .arg(Arg::with_name("color") + /// .short("c")) + /// .arg(Arg::with_name("debug") + /// .short("d")) + /// .group(ArgGroup::with_name("req_flags") + /// .args(&["flag", "color"]) + /// .conflicts_with("debug")) + /// .get_matches_from_safe(vec!["myprog", "-c", "-d"]); + /// // because we used an arg from the group, and the group conflicts with "-d", it's an error + /// assert!(result.is_err()); + /// let err = result.unwrap_err(); + /// assert_eq!(err.kind, ErrorKind::ArgumentConflict); + /// ``` + /// [argument exclusion rules]: ./struct.Arg.html#method.conflicts_with + pub fn conflicts_with(mut self, n: &'a str) -> Self { + if let Some(ref mut confs) = self.conflicts { + confs.push(n); + } else { + self.conflicts = Some(vec![n]); + } + self + } + + /// Sets the exclusion rules of this group. Exclusion rules function just like + /// [argument exclusion rules], you can name other arguments or groups that must *not* be + /// present when one of the arguments from this group are used. + /// + /// **NOTE:** The names provided may be an argument, or group name + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg, ArgGroup, ErrorKind}; + /// let result = App::new("myprog") + /// .arg(Arg::with_name("flag") + /// .short("f")) + /// .arg(Arg::with_name("color") + /// .short("c")) + /// .arg(Arg::with_name("debug") + /// .short("d")) + /// .arg(Arg::with_name("verb") + /// .short("v")) + /// .group(ArgGroup::with_name("req_flags") + /// .args(&["flag", "color"]) + /// .conflicts_with_all(&["debug", "verb"])) + /// .get_matches_from_safe(vec!["myprog", "-c", "-v"]); + /// // because we used an arg from the group, and the group conflicts with either "-v" or "-d" + /// // it's an error + /// assert!(result.is_err()); + /// let err = result.unwrap_err(); + /// assert_eq!(err.kind, ErrorKind::ArgumentConflict); + /// ``` + /// [argument exclusion rules]: ./struct.Arg.html#method.conflicts_with_all + pub fn conflicts_with_all(mut self, ns: &[&'a str]) -> Self { + for n in ns { + self = self.conflicts_with(n); + } + self + } +} + +impl<'a> Debug for ArgGroup<'a> { + fn fmt(&self, f: &mut Formatter) -> Result { + write!( + f, + "{{\n\ + \tname: {:?},\n\ + \targs: {:?},\n\ + \trequired: {:?},\n\ + \trequires: {:?},\n\ + \tconflicts: {:?},\n\ + }}", + self.name, + self.args, + self.required, + self.requires, + self.conflicts + ) + } +} + +impl<'a, 'z> From<&'z ArgGroup<'a>> for ArgGroup<'a> { + fn from(g: &'z ArgGroup<'a>) -> Self { + ArgGroup { + name: g.name, + required: g.required, + args: g.args.clone(), + requires: g.requires.clone(), + conflicts: g.conflicts.clone(), + multiple: g.multiple, + } + } +} + +#[cfg(feature = "yaml")] +impl<'a> From<&'a BTreeMap> for ArgGroup<'a> { + fn from(b: &'a BTreeMap) -> Self { + // We WANT this to panic on error...so expect() is good. + let mut a = ArgGroup::default(); + let group_settings = if b.len() == 1 { + let name_yml = b.keys().nth(0).expect("failed to get name"); + let name_str = name_yml + .as_str() + .expect("failed to convert arg YAML name to str"); + a.name = name_str; + b.get(name_yml) + .expect("failed to get name_str") + .as_hash() + .expect("failed to convert to a hash") + } else { + b + }; + + for (k, v) in group_settings { + a = match k.as_str().unwrap() { + "required" => a.required(v.as_bool().unwrap()), + "multiple" => a.multiple(v.as_bool().unwrap()), + "args" => yaml_vec_or_str!(v, a, arg), + "arg" => { + if let Some(ys) = v.as_str() { + a = a.arg(ys); + } + a + } + "requires" => yaml_vec_or_str!(v, a, requires), + "conflicts_with" => yaml_vec_or_str!(v, a, conflicts_with), + "name" => { + if let Some(ys) = v.as_str() { + a.name = ys; + } + a + } + s => panic!( + "Unknown ArgGroup setting '{}' in YAML file for \ + ArgGroup '{}'", + s, + a.name + ), + } + } + + a + } +} + +#[cfg(test)] +mod test { + use super::ArgGroup; + #[cfg(feature = "yaml")] + use yaml_rust::YamlLoader; + + #[test] + fn groups() { + let g = ArgGroup::with_name("test") + .arg("a1") + .arg("a4") + .args(&["a2", "a3"]) + .required(true) + .conflicts_with("c1") + .conflicts_with_all(&["c2", "c3"]) + .conflicts_with("c4") + .requires("r1") + .requires_all(&["r2", "r3"]) + .requires("r4"); + + let args = vec!["a1", "a4", "a2", "a3"]; + let reqs = vec!["r1", "r2", "r3", "r4"]; + let confs = vec!["c1", "c2", "c3", "c4"]; + + assert_eq!(g.args, args); + assert_eq!(g.requires, Some(reqs)); + assert_eq!(g.conflicts, Some(confs)); + } + + #[test] + fn test_debug() { + let g = ArgGroup::with_name("test") + .arg("a1") + .arg("a4") + .args(&["a2", "a3"]) + .required(true) + .conflicts_with("c1") + .conflicts_with_all(&["c2", "c3"]) + .conflicts_with("c4") + .requires("r1") + .requires_all(&["r2", "r3"]) + .requires("r4"); + + let args = vec!["a1", "a4", "a2", "a3"]; + let reqs = vec!["r1", "r2", "r3", "r4"]; + let confs = vec!["c1", "c2", "c3", "c4"]; + + let debug_str = format!( + "{{\n\ + \tname: \"test\",\n\ + \targs: {:?},\n\ + \trequired: {:?},\n\ + \trequires: {:?},\n\ + \tconflicts: {:?},\n\ + }}", + args, + true, + Some(reqs), + Some(confs) + ); + assert_eq!(&*format!("{:?}", g), &*debug_str); + } + + #[test] + fn test_from() { + let g = ArgGroup::with_name("test") + .arg("a1") + .arg("a4") + .args(&["a2", "a3"]) + .required(true) + .conflicts_with("c1") + .conflicts_with_all(&["c2", "c3"]) + .conflicts_with("c4") + .requires("r1") + .requires_all(&["r2", "r3"]) + .requires("r4"); + + let args = vec!["a1", "a4", "a2", "a3"]; + let reqs = vec!["r1", "r2", "r3", "r4"]; + let confs = vec!["c1", "c2", "c3", "c4"]; + + let g2 = ArgGroup::from(&g); + assert_eq!(g2.args, args); + assert_eq!(g2.requires, Some(reqs)); + assert_eq!(g2.conflicts, Some(confs)); + } + + #[cfg(feature = "yaml")] + #[cfg_attr(feature = "yaml", test)] + fn test_yaml() { + let g_yaml = "name: test +args: +- a1 +- a4 +- a2 +- a3 +conflicts_with: +- c1 +- c2 +- c3 +- c4 +requires: +- r1 +- r2 +- r3 +- r4"; + let yml = &YamlLoader::load_from_str(g_yaml).expect("failed to load YAML file")[0]; + let g = ArgGroup::from_yaml(yml); + let args = vec!["a1", "a4", "a2", "a3"]; + let reqs = vec!["r1", "r2", "r3", "r4"]; + let confs = vec!["c1", "c2", "c3", "c4"]; + assert_eq!(g.args, args); + assert_eq!(g.requires, Some(reqs)); + assert_eq!(g.conflicts, Some(confs)); + } +} + +impl<'a> Clone for ArgGroup<'a> { + fn clone(&self) -> Self { + ArgGroup { + name: self.name, + required: self.required, + args: self.args.clone(), + requires: self.requires.clone(), + conflicts: self.conflicts.clone(), + multiple: self.multiple, + } + } +} diff --git a/clap/src/args/macros.rs b/clap/src/args/macros.rs new file mode 100644 index 000000000..1de12f4ec --- /dev/null +++ b/clap/src/args/macros.rs @@ -0,0 +1,109 @@ +#[cfg(feature = "yaml")] +macro_rules! yaml_tuple2 { + ($a:ident, $v:ident, $c:ident) => {{ + if let Some(vec) = $v.as_vec() { + for ys in vec { + if let Some(tup) = ys.as_vec() { + debug_assert_eq!(2, tup.len()); + $a = $a.$c(yaml_str!(tup[0]), yaml_str!(tup[1])); + } else { + panic!("Failed to convert YAML value to vec"); + } + } + } else { + panic!("Failed to convert YAML value to vec"); + } + $a + } + }; +} + +#[cfg(feature = "yaml")] +macro_rules! yaml_tuple3 { + ($a:ident, $v:ident, $c:ident) => {{ + if let Some(vec) = $v.as_vec() { + for ys in vec { + if let Some(tup) = ys.as_vec() { + debug_assert_eq!(3, tup.len()); + $a = $a.$c(yaml_str!(tup[0]), yaml_opt_str!(tup[1]), yaml_str!(tup[2])); + } else { + panic!("Failed to convert YAML value to vec"); + } + } + } else { + panic!("Failed to convert YAML value to vec"); + } + $a + } + }; +} + +#[cfg(feature = "yaml")] +macro_rules! yaml_vec_or_str { + ($v:ident, $a:ident, $c:ident) => {{ + let maybe_vec = $v.as_vec(); + if let Some(vec) = maybe_vec { + for ys in vec { + if let Some(s) = ys.as_str() { + $a = $a.$c(s); + } else { + panic!("Failed to convert YAML value {:?} to a string", ys); + } + } + } else { + if let Some(s) = $v.as_str() { + $a = $a.$c(s); + } else { + panic!("Failed to convert YAML value {:?} to either a vec or string", $v); + } + } + $a + } + }; +} + +#[cfg(feature = "yaml")] +macro_rules! yaml_opt_str { + ($v:expr) => {{ + if $v.is_null() { + Some($v.as_str().unwrap_or_else(|| panic!("failed to convert YAML {:?} value to a string", $v))) + } else { + None + } + }}; +} + +#[cfg(feature = "yaml")] +macro_rules! yaml_str { + ($v:expr) => {{ + $v.as_str().unwrap_or_else(|| panic!("failed to convert YAML {:?} value to a string", $v)) + }}; +} + +#[cfg(feature = "yaml")] +macro_rules! yaml_to_str { + ($a:ident, $v:ident, $c:ident) => {{ + $a.$c(yaml_str!($v)) + }}; +} + +#[cfg(feature = "yaml")] +macro_rules! yaml_to_bool { + ($a:ident, $v:ident, $c:ident) => {{ + $a.$c($v.as_bool().unwrap_or_else(|| panic!("failed to convert YAML {:?} value to a string", $v))) + }}; +} + +#[cfg(feature = "yaml")] +macro_rules! yaml_to_u64 { + ($a:ident, $v:ident, $c:ident) => {{ + $a.$c($v.as_i64().unwrap_or_else(|| panic!("failed to convert YAML {:?} value to a string", $v)) as u64) + }}; +} + +#[cfg(feature = "yaml")] +macro_rules! yaml_to_usize { + ($a:ident, $v:ident, $c:ident) => {{ + $a.$c($v.as_i64().unwrap_or_else(|| panic!("failed to convert YAML {:?} value to a string", $v)) as usize) + }}; +} diff --git a/clap/src/args/matched_arg.rs b/clap/src/args/matched_arg.rs new file mode 100644 index 000000000..eeda2611c --- /dev/null +++ b/clap/src/args/matched_arg.rs @@ -0,0 +1,24 @@ +// Std +use std::ffi::OsString; + +#[doc(hidden)] +#[derive(Debug, Clone)] +pub struct MatchedArg { + #[doc(hidden)] pub occurs: u64, + #[doc(hidden)] pub indices: Vec, + #[doc(hidden)] pub vals: Vec, +} + +impl Default for MatchedArg { + fn default() -> Self { + MatchedArg { + occurs: 1, + indices: Vec::new(), + vals: Vec::new(), + } + } +} + +impl MatchedArg { + pub fn new() -> Self { MatchedArg::default() } +} diff --git a/clap/src/args/mod.rs b/clap/src/args/mod.rs new file mode 100644 index 000000000..21f9b850d --- /dev/null +++ b/clap/src/args/mod.rs @@ -0,0 +1,21 @@ +pub use self::any_arg::{AnyArg, DispOrder}; +pub use self::arg::Arg; +pub use self::arg_builder::{Base, FlagBuilder, OptBuilder, PosBuilder, Switched, Valued}; +pub use self::arg_matcher::ArgMatcher; +pub use self::arg_matches::{ArgMatches, OsValues, Values}; +pub use self::group::ArgGroup; +pub use self::matched_arg::MatchedArg; +pub use self::settings::{ArgFlags, ArgSettings}; +pub use self::subcommand::SubCommand; + +#[macro_use] +mod macros; +mod arg; +pub mod any_arg; +mod arg_matches; +mod arg_matcher; +mod subcommand; +mod arg_builder; +mod matched_arg; +mod group; +pub mod settings; diff --git a/clap/src/args/settings.rs b/clap/src/args/settings.rs new file mode 100644 index 000000000..864dd9030 --- /dev/null +++ b/clap/src/args/settings.rs @@ -0,0 +1,230 @@ +// Std +#[allow(deprecated, unused_imports)] +use std::ascii::AsciiExt; +use std::str::FromStr; + +bitflags! { + struct Flags: u32 { + const REQUIRED = 1; + const MULTIPLE = 1 << 1; + const EMPTY_VALS = 1 << 2; + const GLOBAL = 1 << 3; + const HIDDEN = 1 << 4; + const TAKES_VAL = 1 << 5; + const USE_DELIM = 1 << 6; + const NEXT_LINE_HELP = 1 << 7; + const R_UNLESS_ALL = 1 << 8; + const REQ_DELIM = 1 << 9; + const DELIM_NOT_SET = 1 << 10; + const HIDE_POS_VALS = 1 << 11; + const ALLOW_TAC_VALS = 1 << 12; + const REQUIRE_EQUALS = 1 << 13; + const LAST = 1 << 14; + const HIDE_DEFAULT_VAL = 1 << 15; + const CASE_INSENSITIVE = 1 << 16; + const HIDE_ENV_VALS = 1 << 17; + const HIDDEN_SHORT_H = 1 << 18; + const HIDDEN_LONG_H = 1 << 19; + } +} + +#[doc(hidden)] +#[derive(Debug, Clone, Copy)] +pub struct ArgFlags(Flags); + +impl ArgFlags { + pub fn new() -> Self { ArgFlags::default() } + + impl_settings!{ArgSettings, + Required => Flags::REQUIRED, + Multiple => Flags::MULTIPLE, + EmptyValues => Flags::EMPTY_VALS, + Global => Flags::GLOBAL, + Hidden => Flags::HIDDEN, + TakesValue => Flags::TAKES_VAL, + UseValueDelimiter => Flags::USE_DELIM, + NextLineHelp => Flags::NEXT_LINE_HELP, + RequiredUnlessAll => Flags::R_UNLESS_ALL, + RequireDelimiter => Flags::REQ_DELIM, + ValueDelimiterNotSet => Flags::DELIM_NOT_SET, + HidePossibleValues => Flags::HIDE_POS_VALS, + AllowLeadingHyphen => Flags::ALLOW_TAC_VALS, + RequireEquals => Flags::REQUIRE_EQUALS, + Last => Flags::LAST, + CaseInsensitive => Flags::CASE_INSENSITIVE, + HideEnvValues => Flags::HIDE_ENV_VALS, + HideDefaultValue => Flags::HIDE_DEFAULT_VAL, + HiddenShortHelp => Flags::HIDDEN_SHORT_H, + HiddenLongHelp => Flags::HIDDEN_LONG_H + } +} + +impl Default for ArgFlags { + fn default() -> Self { ArgFlags(Flags::EMPTY_VALS | Flags::DELIM_NOT_SET) } +} + +/// Various settings that apply to arguments and may be set, unset, and checked via getter/setter +/// methods [`Arg::set`], [`Arg::unset`], and [`Arg::is_set`] +/// +/// [`Arg::set`]: ./struct.Arg.html#method.set +/// [`Arg::unset`]: ./struct.Arg.html#method.unset +/// [`Arg::is_set`]: ./struct.Arg.html#method.is_set +#[derive(Debug, PartialEq, Copy, Clone)] +pub enum ArgSettings { + /// The argument must be used + Required, + /// The argument may be used multiple times such as `--flag --flag` + Multiple, + /// The argument allows empty values such as `--option ""` + EmptyValues, + /// The argument should be propagated down through all child [`SubCommands`] + /// [`SubCommand`]: ./struct.SubCommand.html + Global, + /// The argument should **not** be shown in help text + Hidden, + /// The argument accepts a value, such as `--option ` + TakesValue, + /// Determines if the argument allows values to be grouped via a delimiter + UseValueDelimiter, + /// Prints the help text on the line after the argument + NextLineHelp, + /// Requires the use of a value delimiter for all multiple values + RequireDelimiter, + /// Hides the possible values from the help string + HidePossibleValues, + /// Allows vals that start with a '-' + AllowLeadingHyphen, + /// Require options use `--option=val` syntax + RequireEquals, + /// Specifies that the arg is the last positional argument and may be accessed early via `--` + /// syntax + Last, + /// Hides the default value from the help string + HideDefaultValue, + /// Makes `Arg::possible_values` case insensitive + CaseInsensitive, + /// Hides ENV values in the help message + HideEnvValues, + /// The argument should **not** be shown in short help text + HiddenShortHelp, + /// The argument should **not** be shown in long help text + HiddenLongHelp, + #[doc(hidden)] RequiredUnlessAll, + #[doc(hidden)] ValueDelimiterNotSet, +} + +impl FromStr for ArgSettings { + type Err = String; + fn from_str(s: &str) -> Result::Err> { + match &*s.to_ascii_lowercase() { + "required" => Ok(ArgSettings::Required), + "multiple" => Ok(ArgSettings::Multiple), + "global" => Ok(ArgSettings::Global), + "emptyvalues" => Ok(ArgSettings::EmptyValues), + "hidden" => Ok(ArgSettings::Hidden), + "takesvalue" => Ok(ArgSettings::TakesValue), + "usevaluedelimiter" => Ok(ArgSettings::UseValueDelimiter), + "nextlinehelp" => Ok(ArgSettings::NextLineHelp), + "requiredunlessall" => Ok(ArgSettings::RequiredUnlessAll), + "requiredelimiter" => Ok(ArgSettings::RequireDelimiter), + "valuedelimiternotset" => Ok(ArgSettings::ValueDelimiterNotSet), + "hidepossiblevalues" => Ok(ArgSettings::HidePossibleValues), + "allowleadinghyphen" => Ok(ArgSettings::AllowLeadingHyphen), + "requireequals" => Ok(ArgSettings::RequireEquals), + "last" => Ok(ArgSettings::Last), + "hidedefaultvalue" => Ok(ArgSettings::HideDefaultValue), + "caseinsensitive" => Ok(ArgSettings::CaseInsensitive), + "hideenvvalues" => Ok(ArgSettings::HideEnvValues), + "hiddenshorthelp" => Ok(ArgSettings::HiddenShortHelp), + "hiddenlonghelp" => Ok(ArgSettings::HiddenLongHelp), + _ => Err("unknown ArgSetting, cannot convert from str".to_owned()), + } + } +} + +#[cfg(test)] +mod test { + use super::ArgSettings; + + #[test] + fn arg_settings_fromstr() { + assert_eq!( + "allowleadinghyphen".parse::().unwrap(), + ArgSettings::AllowLeadingHyphen + ); + assert_eq!( + "emptyvalues".parse::().unwrap(), + ArgSettings::EmptyValues + ); + assert_eq!( + "global".parse::().unwrap(), + ArgSettings::Global + ); + assert_eq!( + "hidepossiblevalues".parse::().unwrap(), + ArgSettings::HidePossibleValues + ); + assert_eq!( + "hidden".parse::().unwrap(), + ArgSettings::Hidden + ); + assert_eq!( + "multiple".parse::().unwrap(), + ArgSettings::Multiple + ); + assert_eq!( + "nextlinehelp".parse::().unwrap(), + ArgSettings::NextLineHelp + ); + assert_eq!( + "requiredunlessall".parse::().unwrap(), + ArgSettings::RequiredUnlessAll + ); + assert_eq!( + "requiredelimiter".parse::().unwrap(), + ArgSettings::RequireDelimiter + ); + assert_eq!( + "required".parse::().unwrap(), + ArgSettings::Required + ); + assert_eq!( + "takesvalue".parse::().unwrap(), + ArgSettings::TakesValue + ); + assert_eq!( + "usevaluedelimiter".parse::().unwrap(), + ArgSettings::UseValueDelimiter + ); + assert_eq!( + "valuedelimiternotset".parse::().unwrap(), + ArgSettings::ValueDelimiterNotSet + ); + assert_eq!( + "requireequals".parse::().unwrap(), + ArgSettings::RequireEquals + ); + assert_eq!("last".parse::().unwrap(), ArgSettings::Last); + assert_eq!( + "hidedefaultvalue".parse::().unwrap(), + ArgSettings::HideDefaultValue + ); + assert_eq!( + "caseinsensitive".parse::().unwrap(), + ArgSettings::CaseInsensitive + ); + assert_eq!( + "hideenvvalues".parse::().unwrap(), + ArgSettings::HideEnvValues + ); + assert_eq!( + "hiddenshorthelp".parse::().unwrap(), + ArgSettings::HiddenShortHelp + ); + assert_eq!( + "hiddenlonghelp".parse::().unwrap(), + ArgSettings::HiddenLongHelp + ); + assert!("hahahaha".parse::().is_err()); + } +} diff --git a/clap/src/args/subcommand.rs b/clap/src/args/subcommand.rs new file mode 100644 index 000000000..eebbf827a --- /dev/null +++ b/clap/src/args/subcommand.rs @@ -0,0 +1,66 @@ +// Third Party +#[cfg(feature = "yaml")] +use yaml_rust::Yaml; + +// Internal +use App; +use ArgMatches; + +/// The abstract representation of a command line subcommand. +/// +/// This struct describes all the valid options of the subcommand for the program. Subcommands are +/// essentially "sub-[`App`]s" and contain all the same possibilities (such as their own +/// [arguments], subcommands, and settings). +/// +/// # Examples +/// +/// ```rust +/// # use clap::{App, Arg, SubCommand}; +/// App::new("myprog") +/// .subcommand( +/// SubCommand::with_name("config") +/// .about("Used for configuration") +/// .arg(Arg::with_name("config_file") +/// .help("The configuration file to use") +/// .index(1))) +/// # ; +/// ``` +/// [`App`]: ./struct.App.html +/// [arguments]: ./struct.Arg.html +#[derive(Debug, Clone)] +pub struct SubCommand<'a> { + #[doc(hidden)] pub name: String, + #[doc(hidden)] pub matches: ArgMatches<'a>, +} + +impl<'a> SubCommand<'a> { + /// Creates a new instance of a subcommand requiring a name. The name will be displayed + /// to the user when they print version or help and usage information. + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg, SubCommand}; + /// App::new("myprog") + /// .subcommand( + /// SubCommand::with_name("config")) + /// # ; + /// ``` + pub fn with_name<'b>(name: &str) -> App<'a, 'b> { App::new(name) } + + /// Creates a new instance of a subcommand from a YAML (.yml) document + /// + /// # Examples + /// + /// ```ignore + /// # #[macro_use] + /// # extern crate clap; + /// # use clap::Subcommand; + /// # fn main() { + /// let sc_yaml = load_yaml!("test_subcommand.yml"); + /// let sc = SubCommand::from_yaml(sc_yaml); + /// # } + /// ``` + #[cfg(feature = "yaml")] + pub fn from_yaml(yaml: &Yaml) -> App { App::from_yaml(yaml) } +} diff --git a/clap/src/completions/bash.rs b/clap/src/completions/bash.rs new file mode 100644 index 000000000..d042e2ea1 --- /dev/null +++ b/clap/src/completions/bash.rs @@ -0,0 +1,219 @@ +// Std +use std::io::Write; + +// Internal +use app::parser::Parser; +use args::OptBuilder; +use completions; + +pub struct BashGen<'a, 'b> +where + 'a: 'b, +{ + p: &'b Parser<'a, 'b>, +} + +impl<'a, 'b> BashGen<'a, 'b> { + pub fn new(p: &'b Parser<'a, 'b>) -> Self { BashGen { p: p } } + + pub fn generate_to(&self, buf: &mut W) { + w!( + buf, + format!( + "_{name}() {{ + local i cur prev opts cmds + COMPREPLY=() + cur=\"${{COMP_WORDS[COMP_CWORD]}}\" + prev=\"${{COMP_WORDS[COMP_CWORD-1]}}\" + cmd=\"\" + opts=\"\" + + for i in ${{COMP_WORDS[@]}} + do + case \"${{i}}\" in + {name}) + cmd=\"{name}\" + ;; + {subcmds} + *) + ;; + esac + done + + case \"${{cmd}}\" in + {name}) + opts=\"{name_opts}\" + if [[ ${{cur}} == -* || ${{COMP_CWORD}} -eq 1 ]] ; then + COMPREPLY=( $(compgen -W \"${{opts}}\" -- ${{cur}}) ) + return 0 + fi + case \"${{prev}}\" in + {name_opts_details} + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W \"${{opts}}\" -- ${{cur}}) ) + return 0 + ;; + {subcmd_details} + esac +}} + +complete -F _{name} -o bashdefault -o default {name} +", + name = self.p.meta.bin_name.as_ref().unwrap(), + name_opts = self.all_options_for_path(self.p.meta.bin_name.as_ref().unwrap()), + name_opts_details = + self.option_details_for_path(self.p.meta.bin_name.as_ref().unwrap()), + subcmds = self.all_subcommands(), + subcmd_details = self.subcommand_details() + ).as_bytes() + ); + } + + fn all_subcommands(&self) -> String { + debugln!("BashGen::all_subcommands;"); + let mut subcmds = String::new(); + let scs = completions::all_subcommand_names(self.p); + + for sc in &scs { + subcmds = format!( + "{} + {name}) + cmd+=\"__{fn_name}\" + ;;", + subcmds, + name = sc, + fn_name = sc.replace("-", "__") + ); + } + + subcmds + } + + fn subcommand_details(&self) -> String { + debugln!("BashGen::subcommand_details;"); + let mut subcmd_dets = String::new(); + let mut scs = completions::get_all_subcommand_paths(self.p, true); + scs.sort(); + scs.dedup(); + + for sc in &scs { + subcmd_dets = format!( + "{} + {subcmd}) + opts=\"{sc_opts}\" + if [[ ${{cur}} == -* || ${{COMP_CWORD}} -eq {level} ]] ; then + COMPREPLY=( $(compgen -W \"${{opts}}\" -- ${{cur}}) ) + return 0 + fi + case \"${{prev}}\" in + {opts_details} + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W \"${{opts}}\" -- ${{cur}}) ) + return 0 + ;;", + subcmd_dets, + subcmd = sc.replace("-", "__"), + sc_opts = self.all_options_for_path(&*sc), + level = sc.split("__").map(|_| 1).fold(0, |acc, n| acc + n), + opts_details = self.option_details_for_path(&*sc) + ); + } + + subcmd_dets + } + + fn option_details_for_path(&self, path: &str) -> String { + debugln!("BashGen::option_details_for_path: path={}", path); + let mut p = self.p; + for sc in path.split("__").skip(1) { + debugln!("BashGen::option_details_for_path:iter: sc={}", sc); + p = &find_subcmd!(p, sc).unwrap().p; + } + let mut opts = String::new(); + for o in p.opts() { + if let Some(l) = o.s.long { + opts = format!( + "{} + --{}) + COMPREPLY=({}) + return 0 + ;;", + opts, + l, + self.vals_for(o) + ); + } + if let Some(s) = o.s.short { + opts = format!( + "{} + -{}) + COMPREPLY=({}) + return 0 + ;;", + opts, + s, + self.vals_for(o) + ); + } + } + opts + } + + fn vals_for(&self, o: &OptBuilder) -> String { + debugln!("BashGen::vals_for: o={}", o.b.name); + use args::AnyArg; + if let Some(vals) = o.possible_vals() { + format!("$(compgen -W \"{}\" -- ${{cur}})", vals.join(" ")) + } else { + String::from("$(compgen -f ${cur})") + } + } + + fn all_options_for_path(&self, path: &str) -> String { + debugln!("BashGen::all_options_for_path: path={}", path); + let mut p = self.p; + for sc in path.split("__").skip(1) { + debugln!("BashGen::all_options_for_path:iter: sc={}", sc); + p = &find_subcmd!(p, sc).unwrap().p; + } + let mut opts = shorts!(p).fold(String::new(), |acc, s| format!("{} -{}", acc, s)); + opts = format!( + "{} {}", + opts, + longs!(p).fold(String::new(), |acc, l| format!("{} --{}", acc, l)) + ); + opts = format!( + "{} {}", + opts, + p.positionals + .values() + .fold(String::new(), |acc, p| format!("{} {}", acc, p)) + ); + opts = format!( + "{} {}", + opts, + p.subcommands + .iter() + .fold(String::new(), |acc, s| format!("{} {}", acc, s.p.meta.name)) + ); + for sc in &p.subcommands { + if let Some(ref aliases) = sc.p.meta.aliases { + opts = format!( + "{} {}", + opts, + aliases + .iter() + .map(|&(n, _)| n) + .fold(String::new(), |acc, a| format!("{} {}", acc, a)) + ); + } + } + opts + } +} diff --git a/clap/src/completions/elvish.rs b/clap/src/completions/elvish.rs new file mode 100644 index 000000000..9a5f21a38 --- /dev/null +++ b/clap/src/completions/elvish.rs @@ -0,0 +1,126 @@ +// Std +use std::io::Write; + +// Internal +use app::parser::Parser; +use INTERNAL_ERROR_MSG; + +pub struct ElvishGen<'a, 'b> +where + 'a: 'b, +{ + p: &'b Parser<'a, 'b>, +} + +impl<'a, 'b> ElvishGen<'a, 'b> { + pub fn new(p: &'b Parser<'a, 'b>) -> Self { ElvishGen { p: p } } + + pub fn generate_to(&self, buf: &mut W) { + let bin_name = self.p.meta.bin_name.as_ref().unwrap(); + + let mut names = vec![]; + let subcommands_cases = + generate_inner(self.p, "", &mut names); + + let result = format!(r#" +edit:completion:arg-completer[{bin_name}] = [@words]{{ + fn spaces [n]{{ + repeat $n ' ' | joins '' + }} + fn cand [text desc]{{ + edit:complex-candidate $text &display-suffix=' '(spaces (- 14 (wcswidth $text)))$desc + }} + command = '{bin_name}' + for word $words[1:-1] {{ + if (has-prefix $word '-') {{ + break + }} + command = $command';'$word + }} + completions = [{subcommands_cases} + ] + $completions[$command] +}} +"#, + bin_name = bin_name, + subcommands_cases = subcommands_cases + ); + + w!(buf, result.as_bytes()); + } +} + +// Escape string inside single quotes +fn escape_string(string: &str) -> String { string.replace("'", "''") } + +fn get_tooltip(help: Option<&str>, data: T) -> String { + match help { + Some(help) => escape_string(help), + _ => data.to_string() + } +} + +fn generate_inner<'a, 'b, 'p>( + p: &'p Parser<'a, 'b>, + previous_command_name: &str, + names: &mut Vec<&'p str>, +) -> String { + debugln!("ElvishGen::generate_inner;"); + let command_name = if previous_command_name.is_empty() { + p.meta.bin_name.as_ref().expect(INTERNAL_ERROR_MSG).clone() + } else { + format!("{};{}", previous_command_name, &p.meta.name) + }; + + let mut completions = String::new(); + let preamble = String::from("\n cand "); + + for option in p.opts() { + if let Some(data) = option.s.short { + let tooltip = get_tooltip(option.b.help, data); + completions.push_str(&preamble); + completions.push_str(format!("-{} '{}'", data, tooltip).as_str()); + } + if let Some(data) = option.s.long { + let tooltip = get_tooltip(option.b.help, data); + completions.push_str(&preamble); + completions.push_str(format!("--{} '{}'", data, tooltip).as_str()); + } + } + + for flag in p.flags() { + if let Some(data) = flag.s.short { + let tooltip = get_tooltip(flag.b.help, data); + completions.push_str(&preamble); + completions.push_str(format!("-{} '{}'", data, tooltip).as_str()); + } + if let Some(data) = flag.s.long { + let tooltip = get_tooltip(flag.b.help, data); + completions.push_str(&preamble); + completions.push_str(format!("--{} '{}'", data, tooltip).as_str()); + } + } + + for subcommand in &p.subcommands { + let data = &subcommand.p.meta.name; + let tooltip = get_tooltip(subcommand.p.meta.about, data); + completions.push_str(&preamble); + completions.push_str(format!("{} '{}'", data, tooltip).as_str()); + } + + let mut subcommands_cases = format!( + r" + &'{}'= {{{} + }}", + &command_name, + completions + ); + + for subcommand in &p.subcommands { + let subcommand_subcommands_cases = + generate_inner(&subcommand.p, &command_name, names); + subcommands_cases.push_str(&subcommand_subcommands_cases); + } + + subcommands_cases +} diff --git a/clap/src/completions/fish.rs b/clap/src/completions/fish.rs new file mode 100644 index 000000000..c2c5a5e70 --- /dev/null +++ b/clap/src/completions/fish.rs @@ -0,0 +1,99 @@ +// Std +use std::io::Write; + +// Internal +use app::parser::Parser; + +pub struct FishGen<'a, 'b> +where + 'a: 'b, +{ + p: &'b Parser<'a, 'b>, +} + +impl<'a, 'b> FishGen<'a, 'b> { + pub fn new(p: &'b Parser<'a, 'b>) -> Self { FishGen { p: p } } + + pub fn generate_to(&self, buf: &mut W) { + let command = self.p.meta.bin_name.as_ref().unwrap(); + let mut buffer = String::new(); + gen_fish_inner(command, self, command, &mut buffer); + w!(buf, buffer.as_bytes()); + } +} + +// Escape string inside single quotes +fn escape_string(string: &str) -> String { string.replace("\\", "\\\\").replace("'", "\\'") } + +fn gen_fish_inner(root_command: &str, comp_gen: &FishGen, subcommand: &str, buffer: &mut String) { + debugln!("FishGen::gen_fish_inner;"); + // example : + // + // complete + // -c {command} + // -d "{description}" + // -s {short} + // -l {long} + // -a "{possible_arguments}" + // -r # if require parameter + // -f # don't use file completion + // -n "__fish_use_subcommand" # complete for command "myprog" + // -n "__fish_seen_subcommand_from subcmd1" # complete for command "myprog subcmd1" + + let mut basic_template = format!("complete -c {} -n ", root_command); + if root_command == subcommand { + basic_template.push_str("\"__fish_use_subcommand\""); + } else { + basic_template.push_str(format!("\"__fish_seen_subcommand_from {}\"", subcommand).as_str()); + } + + for option in comp_gen.p.opts() { + let mut template = basic_template.clone(); + if let Some(data) = option.s.short { + template.push_str(format!(" -s {}", data).as_str()); + } + if let Some(data) = option.s.long { + template.push_str(format!(" -l {}", data).as_str()); + } + if let Some(data) = option.b.help { + template.push_str(format!(" -d '{}'", escape_string(data)).as_str()); + } + if let Some(ref data) = option.v.possible_vals { + template.push_str(format!(" -r -f -a \"{}\"", data.join(" ")).as_str()); + } + buffer.push_str(template.as_str()); + buffer.push_str("\n"); + } + + for flag in comp_gen.p.flags() { + let mut template = basic_template.clone(); + if let Some(data) = flag.s.short { + template.push_str(format!(" -s {}", data).as_str()); + } + if let Some(data) = flag.s.long { + template.push_str(format!(" -l {}", data).as_str()); + } + if let Some(data) = flag.b.help { + template.push_str(format!(" -d '{}'", escape_string(data)).as_str()); + } + buffer.push_str(template.as_str()); + buffer.push_str("\n"); + } + + for subcommand in &comp_gen.p.subcommands { + let mut template = basic_template.clone(); + template.push_str(" -f"); + template.push_str(format!(" -a \"{}\"", &subcommand.p.meta.name).as_str()); + if let Some(data) = subcommand.p.meta.about { + template.push_str(format!(" -d '{}'", escape_string(data)).as_str()) + } + buffer.push_str(template.as_str()); + buffer.push_str("\n"); + } + + // generate options of subcommands + for subcommand in &comp_gen.p.subcommands { + let sub_comp_gen = FishGen::new(&subcommand.p); + gen_fish_inner(root_command, &sub_comp_gen, &subcommand.to_string(), buffer); + } +} diff --git a/clap/src/completions/macros.rs b/clap/src/completions/macros.rs new file mode 100644 index 000000000..653c72c48 --- /dev/null +++ b/clap/src/completions/macros.rs @@ -0,0 +1,28 @@ +macro_rules! w { + ($buf:expr, $to_w:expr) => { + match $buf.write_all($to_w) { + Ok(..) => (), + Err(..) => panic!("Failed to write to completions file"), + } + }; +} + +macro_rules! get_zsh_arg_conflicts { + ($p:ident, $arg:ident, $msg:ident) => { + if let Some(conf_vec) = $arg.blacklist() { + let mut v = vec![]; + for arg_name in conf_vec { + let arg = $p.find_any_arg(arg_name).expect($msg); + if let Some(s) = arg.short() { + v.push(format!("-{}", s)); + } + if let Some(l) = arg.long() { + v.push(format!("--{}", l)); + } + } + v.join(" ") + } else { + String::new() + } + } +} diff --git a/clap/src/completions/mod.rs b/clap/src/completions/mod.rs new file mode 100644 index 000000000..a3306d72a --- /dev/null +++ b/clap/src/completions/mod.rs @@ -0,0 +1,179 @@ +#[macro_use] +mod macros; +mod bash; +mod fish; +mod zsh; +mod powershell; +mod elvish; +mod shell; + +// Std +use std::io::Write; + +// Internal +use app::parser::Parser; +use self::bash::BashGen; +use self::fish::FishGen; +use self::zsh::ZshGen; +use self::powershell::PowerShellGen; +use self::elvish::ElvishGen; +pub use self::shell::Shell; + +pub struct ComplGen<'a, 'b> +where + 'a: 'b, +{ + p: &'b Parser<'a, 'b>, +} + +impl<'a, 'b> ComplGen<'a, 'b> { + pub fn new(p: &'b Parser<'a, 'b>) -> Self { ComplGen { p: p } } + + pub fn generate(&self, for_shell: Shell, buf: &mut W) { + match for_shell { + Shell::Bash => BashGen::new(self.p).generate_to(buf), + Shell::Fish => FishGen::new(self.p).generate_to(buf), + Shell::Zsh => ZshGen::new(self.p).generate_to(buf), + Shell::PowerShell => PowerShellGen::new(self.p).generate_to(buf), + Shell::Elvish => ElvishGen::new(self.p).generate_to(buf), + } + } +} + +// Gets all subcommands including child subcommands in the form of 'name' where the name +// is a single word (i.e. "install") of the path to said subcommand (i.e. +// "rustup toolchain install") +// +// Also note, aliases are treated as their own subcommands but duplicates of whatever they're +// aliasing. +pub fn all_subcommand_names(p: &Parser) -> Vec { + debugln!("all_subcommand_names;"); + let mut subcmds: Vec<_> = subcommands_of(p) + .iter() + .map(|&(ref n, _)| n.clone()) + .collect(); + for sc_v in p.subcommands.iter().map(|s| all_subcommand_names(&s.p)) { + subcmds.extend(sc_v); + } + subcmds.sort(); + subcmds.dedup(); + subcmds +} + +// Gets all subcommands including child subcommands in the form of ('name', 'bin_name') where the name +// is a single word (i.e. "install") of the path and full bin_name of said subcommand (i.e. +// "rustup toolchain install") +// +// Also note, aliases are treated as their own subcommands but duplicates of whatever they're +// aliasing. +pub fn all_subcommands(p: &Parser) -> Vec<(String, String)> { + debugln!("all_subcommands;"); + let mut subcmds: Vec<_> = subcommands_of(p); + for sc_v in p.subcommands.iter().map(|s| all_subcommands(&s.p)) { + subcmds.extend(sc_v); + } + subcmds +} + +// Gets all subcommands excluding child subcommands in the form of (name, bin_name) where the name +// is a single word (i.e. "install") and the bin_name is a space delineated list of the path to said +// subcommand (i.e. "rustup toolchain install") +// +// Also note, aliases are treated as their own subcommands but duplicates of whatever they're +// aliasing. +pub fn subcommands_of(p: &Parser) -> Vec<(String, String)> { + debugln!( + "subcommands_of: name={}, bin_name={}", + p.meta.name, + p.meta.bin_name.as_ref().unwrap() + ); + let mut subcmds = vec![]; + + debugln!( + "subcommands_of: Has subcommands...{:?}", + p.has_subcommands() + ); + if !p.has_subcommands() { + let mut ret = vec![]; + debugln!("subcommands_of: Looking for aliases..."); + if let Some(ref aliases) = p.meta.aliases { + for &(n, _) in aliases { + debugln!("subcommands_of:iter:iter: Found alias...{}", n); + let mut als_bin_name: Vec<_> = + p.meta.bin_name.as_ref().unwrap().split(' ').collect(); + als_bin_name.push(n); + let old = als_bin_name.len() - 2; + als_bin_name.swap_remove(old); + ret.push((n.to_owned(), als_bin_name.join(" "))); + } + } + return ret; + } + for sc in &p.subcommands { + debugln!( + "subcommands_of:iter: name={}, bin_name={}", + sc.p.meta.name, + sc.p.meta.bin_name.as_ref().unwrap() + ); + + debugln!("subcommands_of:iter: Looking for aliases..."); + if let Some(ref aliases) = sc.p.meta.aliases { + for &(n, _) in aliases { + debugln!("subcommands_of:iter:iter: Found alias...{}", n); + let mut als_bin_name: Vec<_> = + p.meta.bin_name.as_ref().unwrap().split(' ').collect(); + als_bin_name.push(n); + let old = als_bin_name.len() - 2; + als_bin_name.swap_remove(old); + subcmds.push((n.to_owned(), als_bin_name.join(" "))); + } + } + subcmds.push(( + sc.p.meta.name.clone(), + sc.p.meta.bin_name.as_ref().unwrap().clone(), + )); + } + subcmds +} + +pub fn get_all_subcommand_paths(p: &Parser, first: bool) -> Vec { + debugln!("get_all_subcommand_paths;"); + let mut subcmds = vec![]; + if !p.has_subcommands() { + if !first { + let name = &*p.meta.name; + let path = p.meta.bin_name.as_ref().unwrap().clone().replace(" ", "__"); + let mut ret = vec![path.clone()]; + if let Some(ref aliases) = p.meta.aliases { + for &(n, _) in aliases { + ret.push(path.replace(name, n)); + } + } + return ret; + } + return vec![]; + } + for sc in &p.subcommands { + let name = &*sc.p.meta.name; + let path = sc.p + .meta + .bin_name + .as_ref() + .unwrap() + .clone() + .replace(" ", "__"); + subcmds.push(path.clone()); + if let Some(ref aliases) = sc.p.meta.aliases { + for &(n, _) in aliases { + subcmds.push(path.replace(name, n)); + } + } + } + for sc_v in p.subcommands + .iter() + .map(|s| get_all_subcommand_paths(&s.p, false)) + { + subcmds.extend(sc_v); + } + subcmds +} diff --git a/clap/src/completions/powershell.rs b/clap/src/completions/powershell.rs new file mode 100644 index 000000000..9fc77c72a --- /dev/null +++ b/clap/src/completions/powershell.rs @@ -0,0 +1,139 @@ +// Std +use std::io::Write; + +// Internal +use app::parser::Parser; +use INTERNAL_ERROR_MSG; + +pub struct PowerShellGen<'a, 'b> +where + 'a: 'b, +{ + p: &'b Parser<'a, 'b>, +} + +impl<'a, 'b> PowerShellGen<'a, 'b> { + pub fn new(p: &'b Parser<'a, 'b>) -> Self { PowerShellGen { p: p } } + + pub fn generate_to(&self, buf: &mut W) { + let bin_name = self.p.meta.bin_name.as_ref().unwrap(); + + let mut names = vec![]; + let subcommands_cases = + generate_inner(self.p, "", &mut names); + + let result = format!(r#" +using namespace System.Management.Automation +using namespace System.Management.Automation.Language + +Register-ArgumentCompleter -Native -CommandName '{bin_name}' -ScriptBlock {{ + param($wordToComplete, $commandAst, $cursorPosition) + + $commandElements = $commandAst.CommandElements + $command = @( + '{bin_name}' + for ($i = 1; $i -lt $commandElements.Count; $i++) {{ + $element = $commandElements[$i] + if ($element -isnot [StringConstantExpressionAst] -or + $element.StringConstantType -ne [StringConstantType]::BareWord -or + $element.Value.StartsWith('-')) {{ + break + }} + $element.Value + }}) -join ';' + + $completions = @(switch ($command) {{{subcommands_cases} + }}) + + $completions.Where{{ $_.CompletionText -like "$wordToComplete*" }} | + Sort-Object -Property ListItemText +}} +"#, + bin_name = bin_name, + subcommands_cases = subcommands_cases + ); + + w!(buf, result.as_bytes()); + } +} + +// Escape string inside single quotes +fn escape_string(string: &str) -> String { string.replace("'", "''") } + +fn get_tooltip(help: Option<&str>, data: T) -> String { + match help { + Some(help) => escape_string(help), + _ => data.to_string() + } +} + +fn generate_inner<'a, 'b, 'p>( + p: &'p Parser<'a, 'b>, + previous_command_name: &str, + names: &mut Vec<&'p str>, +) -> String { + debugln!("PowerShellGen::generate_inner;"); + let command_name = if previous_command_name.is_empty() { + p.meta.bin_name.as_ref().expect(INTERNAL_ERROR_MSG).clone() + } else { + format!("{};{}", previous_command_name, &p.meta.name) + }; + + let mut completions = String::new(); + let preamble = String::from("\n [CompletionResult]::new("); + + for option in p.opts() { + if let Some(data) = option.s.short { + let tooltip = get_tooltip(option.b.help, data); + completions.push_str(&preamble); + completions.push_str(format!("'-{}', '{}', {}, '{}')", + data, data, "[CompletionResultType]::ParameterName", tooltip).as_str()); + } + if let Some(data) = option.s.long { + let tooltip = get_tooltip(option.b.help, data); + completions.push_str(&preamble); + completions.push_str(format!("'--{}', '{}', {}, '{}')", + data, data, "[CompletionResultType]::ParameterName", tooltip).as_str()); + } + } + + for flag in p.flags() { + if let Some(data) = flag.s.short { + let tooltip = get_tooltip(flag.b.help, data); + completions.push_str(&preamble); + completions.push_str(format!("'-{}', '{}', {}, '{}')", + data, data, "[CompletionResultType]::ParameterName", tooltip).as_str()); + } + if let Some(data) = flag.s.long { + let tooltip = get_tooltip(flag.b.help, data); + completions.push_str(&preamble); + completions.push_str(format!("'--{}', '{}', {}, '{}')", + data, data, "[CompletionResultType]::ParameterName", tooltip).as_str()); + } + } + + for subcommand in &p.subcommands { + let data = &subcommand.p.meta.name; + let tooltip = get_tooltip(subcommand.p.meta.about, data); + completions.push_str(&preamble); + completions.push_str(format!("'{}', '{}', {}, '{}')", + data, data, "[CompletionResultType]::ParameterValue", tooltip).as_str()); + } + + let mut subcommands_cases = format!( + r" + '{}' {{{} + break + }}", + &command_name, + completions + ); + + for subcommand in &p.subcommands { + let subcommand_subcommands_cases = + generate_inner(&subcommand.p, &command_name, names); + subcommands_cases.push_str(&subcommand_subcommands_cases); + } + + subcommands_cases +} diff --git a/clap/src/completions/shell.rs b/clap/src/completions/shell.rs new file mode 100644 index 000000000..19aab8694 --- /dev/null +++ b/clap/src/completions/shell.rs @@ -0,0 +1,52 @@ +#[allow(deprecated, unused_imports)] +use std::ascii::AsciiExt; +use std::str::FromStr; +use std::fmt; + +/// Describes which shell to produce a completions file for +#[cfg_attr(feature = "lints", allow(enum_variant_names))] +#[derive(Debug, Copy, Clone)] +pub enum Shell { + /// Generates a .bash completion file for the Bourne Again SHell (BASH) + Bash, + /// Generates a .fish completion file for the Friendly Interactive SHell (fish) + Fish, + /// Generates a completion file for the Z SHell (ZSH) + Zsh, + /// Generates a completion file for PowerShell + PowerShell, + /// Generates a completion file for Elvish + Elvish, +} + +impl Shell { + /// A list of possible variants in `&'static str` form + pub fn variants() -> [&'static str; 5] { ["zsh", "bash", "fish", "powershell", "elvish"] } +} + +impl FromStr for Shell { + type Err = String; + + fn from_str(s: &str) -> Result { + match s { + "ZSH" | _ if s.eq_ignore_ascii_case("zsh") => Ok(Shell::Zsh), + "FISH" | _ if s.eq_ignore_ascii_case("fish") => Ok(Shell::Fish), + "BASH" | _ if s.eq_ignore_ascii_case("bash") => Ok(Shell::Bash), + "POWERSHELL" | _ if s.eq_ignore_ascii_case("powershell") => Ok(Shell::PowerShell), + "ELVISH" | _ if s.eq_ignore_ascii_case("elvish") => Ok(Shell::Elvish), + _ => Err(String::from("[valid values: bash, fish, zsh, powershell, elvish]")), + } + } +} + +impl fmt::Display for Shell { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match *self { + Shell::Bash => write!(f, "BASH"), + Shell::Fish => write!(f, "FISH"), + Shell::Zsh => write!(f, "ZSH"), + Shell::PowerShell => write!(f, "POWERSHELL"), + Shell::Elvish => write!(f, "ELVISH"), + } + } +} diff --git a/clap/src/completions/zsh.rs b/clap/src/completions/zsh.rs new file mode 100644 index 000000000..5d23fd284 --- /dev/null +++ b/clap/src/completions/zsh.rs @@ -0,0 +1,472 @@ +// Std +use std::io::Write; +#[allow(deprecated, unused_imports)] +use std::ascii::AsciiExt; + +// Internal +use app::App; +use app::parser::Parser; +use args::{AnyArg, ArgSettings}; +use completions; +use INTERNAL_ERROR_MSG; + +pub struct ZshGen<'a, 'b> +where + 'a: 'b, +{ + p: &'b Parser<'a, 'b>, +} + +impl<'a, 'b> ZshGen<'a, 'b> { + pub fn new(p: &'b Parser<'a, 'b>) -> Self { + debugln!("ZshGen::new;"); + ZshGen { p: p } + } + + pub fn generate_to(&self, buf: &mut W) { + debugln!("ZshGen::generate_to;"); + w!( + buf, + format!( + "\ +#compdef {name} + +autoload -U is-at-least + +_{name}() {{ + typeset -A opt_args + typeset -a _arguments_options + local ret=1 + + if is-at-least 5.2; then + _arguments_options=(-s -S -C) + else + _arguments_options=(-s -C) + fi + + local context curcontext=\"$curcontext\" state line + {initial_args} + {subcommands} +}} + +{subcommand_details} + +_{name} \"$@\"", + name = self.p.meta.bin_name.as_ref().unwrap(), + initial_args = get_args_of(self.p), + subcommands = get_subcommands_of(self.p), + subcommand_details = subcommand_details(self.p) + ).as_bytes() + ); + } +} + +// Displays the commands of a subcommand +// (( $+functions[_[bin_name_underscore]_commands] )) || +// _[bin_name_underscore]_commands() { +// local commands; commands=( +// '[arg_name]:[arg_help]' +// ) +// _describe -t commands '[bin_name] commands' commands "$@" +// +// Where the following variables are present: +// [bin_name_underscore]: The full space delineated bin_name, where spaces have been replaced by +// underscore characters +// [arg_name]: The name of the subcommand +// [arg_help]: The help message of the subcommand +// [bin_name]: The full space delineated bin_name +// +// Here's a snippet from rustup: +// +// (( $+functions[_rustup_commands] )) || +// _rustup_commands() { +// local commands; commands=( +// 'show:Show the active and installed toolchains' +// 'update:Update Rust toolchains' +// # ... snip for brevity +// 'help:Prints this message or the help of the given subcommand(s)' +// ) +// _describe -t commands 'rustup commands' commands "$@" +// +fn subcommand_details(p: &Parser) -> String { + debugln!("ZshGen::subcommand_details;"); + // First we do ourself + let mut ret = vec![ + format!( + "\ +(( $+functions[_{bin_name_underscore}_commands] )) || +_{bin_name_underscore}_commands() {{ + local commands; commands=( + {subcommands_and_args} + ) + _describe -t commands '{bin_name} commands' commands \"$@\" +}}", + bin_name_underscore = p.meta.bin_name.as_ref().unwrap().replace(" ", "__"), + bin_name = p.meta.bin_name.as_ref().unwrap(), + subcommands_and_args = subcommands_of(p) + ), + ]; + + // Next we start looping through all the children, grandchildren, etc. + let mut all_subcommands = completions::all_subcommands(p); + all_subcommands.sort(); + all_subcommands.dedup(); + for &(_, ref bin_name) in &all_subcommands { + debugln!("ZshGen::subcommand_details:iter: bin_name={}", bin_name); + ret.push(format!( + "\ +(( $+functions[_{bin_name_underscore}_commands] )) || +_{bin_name_underscore}_commands() {{ + local commands; commands=( + {subcommands_and_args} + ) + _describe -t commands '{bin_name} commands' commands \"$@\" +}}", + bin_name_underscore = bin_name.replace(" ", "__"), + bin_name = bin_name, + subcommands_and_args = subcommands_of(parser_of(p, bin_name)) + )); + } + + ret.join("\n") +} + +// Generates subcommand completions in form of +// +// '[arg_name]:[arg_help]' +// +// Where: +// [arg_name]: the subcommand's name +// [arg_help]: the help message of the subcommand +// +// A snippet from rustup: +// 'show:Show the active and installed toolchains' +// 'update:Update Rust toolchains' +fn subcommands_of(p: &Parser) -> String { + debugln!("ZshGen::subcommands_of;"); + let mut ret = vec![]; + fn add_sc(sc: &App, n: &str, ret: &mut Vec) { + debugln!("ZshGen::add_sc;"); + let s = format!( + "\"{name}:{help}\" \\", + name = n, + help = sc.p + .meta + .about + .unwrap_or("") + .replace("[", "\\[") + .replace("]", "\\]") + ); + if !s.is_empty() { + ret.push(s); + } + } + + // The subcommands + for sc in p.subcommands() { + debugln!( + "ZshGen::subcommands_of:iter: subcommand={}", + sc.p.meta.name + ); + add_sc(sc, &sc.p.meta.name, &mut ret); + if let Some(ref v) = sc.p.meta.aliases { + for alias in v.iter().filter(|&&(_, vis)| vis).map(|&(n, _)| n) { + add_sc(sc, alias, &mut ret); + } + } + } + + ret.join("\n") +} + +// Get's the subcommand section of a completion file +// This looks roughly like: +// +// case $state in +// ([bin_name]_args) +// curcontext=\"${curcontext%:*:*}:[name_hyphen]-command-$words[1]:\" +// case $line[1] in +// +// ([name]) +// _arguments -C -s -S \ +// [subcommand_args] +// && ret=0 +// +// [RECURSIVE_CALLS] +// +// ;;", +// +// [repeat] +// +// esac +// ;; +// esac", +// +// Where the following variables are present: +// [name] = The subcommand name in the form of "install" for "rustup toolchain install" +// [bin_name] = The full space delineated bin_name such as "rustup toolchain install" +// [name_hyphen] = The full space delineated bin_name, but replace spaces with hyphens +// [repeat] = From the same recursive calls, but for all subcommands +// [subcommand_args] = The same as zsh::get_args_of +fn get_subcommands_of(p: &Parser) -> String { + debugln!("get_subcommands_of;"); + + debugln!( + "get_subcommands_of: Has subcommands...{:?}", + p.has_subcommands() + ); + if !p.has_subcommands() { + return String::new(); + } + + let sc_names = completions::subcommands_of(p); + + let mut subcmds = vec![]; + for &(ref name, ref bin_name) in &sc_names { + let mut v = vec![format!("({})", name)]; + let subcommand_args = get_args_of(parser_of(p, &*bin_name)); + if !subcommand_args.is_empty() { + v.push(subcommand_args); + } + let subcommands = get_subcommands_of(parser_of(p, &*bin_name)); + if !subcommands.is_empty() { + v.push(subcommands); + } + v.push(String::from(";;")); + subcmds.push(v.join("\n")); + } + + format!( + "case $state in + ({name}) + words=($line[{pos}] \"${{words[@]}}\") + (( CURRENT += 1 )) + curcontext=\"${{curcontext%:*:*}}:{name_hyphen}-command-$line[{pos}]:\" + case $line[{pos}] in + {subcommands} + esac + ;; +esac", + name = p.meta.name, + name_hyphen = p.meta.bin_name.as_ref().unwrap().replace(" ", "-"), + subcommands = subcmds.join("\n"), + pos = p.positionals().len() + 1 + ) +} + +fn parser_of<'a, 'b>(p: &'b Parser<'a, 'b>, sc: &str) -> &'b Parser<'a, 'b> { + debugln!("parser_of: sc={}", sc); + if sc == p.meta.bin_name.as_ref().unwrap_or(&String::new()) { + return p; + } + &p.find_subcommand(sc).expect(INTERNAL_ERROR_MSG).p +} + +// Writes out the args section, which ends up being the flags, opts and postionals, and a jump to +// another ZSH function if there are subcommands. +// The structer works like this: +// ([conflicting_args]) [multiple] arg [takes_value] [[help]] [: :(possible_values)] +// ^-- list '-v -h' ^--'*' ^--'+' ^-- list 'one two three' +// +// An example from the rustup command: +// +// _arguments -C -s -S \ +// '(-h --help --verbose)-v[Enable verbose output]' \ +// '(-V -v --version --verbose --help)-h[Prints help information]' \ +// # ... snip for brevity +// ':: :_rustup_commands' \ # <-- displays subcommands +// '*::: :->rustup' \ # <-- displays subcommand args and child subcommands +// && ret=0 +// +// The args used for _arguments are as follows: +// -C: modify the $context internal variable +// -s: Allow stacking of short args (i.e. -a -b -c => -abc) +// -S: Do not complete anything after '--' and treat those as argument values +fn get_args_of(p: &Parser) -> String { + debugln!("get_args_of;"); + let mut ret = vec![String::from("_arguments \"${_arguments_options[@]}\" \\")]; + let opts = write_opts_of(p); + let flags = write_flags_of(p); + let positionals = write_positionals_of(p); + let sc_or_a = if p.has_subcommands() { + format!( + "\":: :_{name}_commands\" \\", + name = p.meta.bin_name.as_ref().unwrap().replace(" ", "__") + ) + } else { + String::new() + }; + let sc = if p.has_subcommands() { + format!("\"*::: :->{name}\" \\", name = p.meta.name) + } else { + String::new() + }; + + if !opts.is_empty() { + ret.push(opts); + } + if !flags.is_empty() { + ret.push(flags); + } + if !positionals.is_empty() { + ret.push(positionals); + } + if !sc_or_a.is_empty() { + ret.push(sc_or_a); + } + if !sc.is_empty() { + ret.push(sc); + } + ret.push(String::from("&& ret=0")); + + ret.join("\n") +} + +// Escape help string inside single quotes and brackets +fn escape_help(string: &str) -> String { + string + .replace("\\", "\\\\") + .replace("'", "'\\''") + .replace("[", "\\[") + .replace("]", "\\]") +} + +// Escape value string inside single quotes and parentheses +fn escape_value(string: &str) -> String { + string + .replace("\\", "\\\\") + .replace("'", "'\\''") + .replace("(", "\\(") + .replace(")", "\\)") + .replace(" ", "\\ ") +} + +fn write_opts_of(p: &Parser) -> String { + debugln!("write_opts_of;"); + let mut ret = vec![]; + for o in p.opts() { + debugln!("write_opts_of:iter: o={}", o.name()); + let help = o.help().map_or(String::new(), escape_help); + let mut conflicts = get_zsh_arg_conflicts!(p, o, INTERNAL_ERROR_MSG); + conflicts = if conflicts.is_empty() { + String::new() + } else { + format!("({})", conflicts) + }; + + let multiple = if o.is_set(ArgSettings::Multiple) { + "*" + } else { + "" + }; + let pv = if let Some(pv_vec) = o.possible_vals() { + format!(": :({})", pv_vec.iter().map( + |v| escape_value(*v)).collect::>().join(" ")) + } else { + String::new() + }; + if let Some(short) = o.short() { + let s = format!( + "'{conflicts}{multiple}-{arg}+[{help}]{possible_values}' \\", + conflicts = conflicts, + multiple = multiple, + arg = short, + possible_values = pv, + help = help + ); + + debugln!("write_opts_of:iter: Wrote...{}", &*s); + ret.push(s); + } + if let Some(long) = o.long() { + let l = format!( + "'{conflicts}{multiple}--{arg}=[{help}]{possible_values}' \\", + conflicts = conflicts, + multiple = multiple, + arg = long, + possible_values = pv, + help = help + ); + + debugln!("write_opts_of:iter: Wrote...{}", &*l); + ret.push(l); + } + } + + ret.join("\n") +} + +fn write_flags_of(p: &Parser) -> String { + debugln!("write_flags_of;"); + let mut ret = vec![]; + for f in p.flags() { + debugln!("write_flags_of:iter: f={}", f.name()); + let help = f.help().map_or(String::new(), escape_help); + let mut conflicts = get_zsh_arg_conflicts!(p, f, INTERNAL_ERROR_MSG); + conflicts = if conflicts.is_empty() { + String::new() + } else { + format!("({})", conflicts) + }; + + let multiple = if f.is_set(ArgSettings::Multiple) { + "*" + } else { + "" + }; + if let Some(short) = f.short() { + let s = format!( + "'{conflicts}{multiple}-{arg}[{help}]' \\", + multiple = multiple, + conflicts = conflicts, + arg = short, + help = help + ); + + debugln!("write_flags_of:iter: Wrote...{}", &*s); + ret.push(s); + } + + if let Some(long) = f.long() { + let l = format!( + "'{conflicts}{multiple}--{arg}[{help}]' \\", + conflicts = conflicts, + multiple = multiple, + arg = long, + help = help + ); + + debugln!("write_flags_of:iter: Wrote...{}", &*l); + ret.push(l); + } + } + + ret.join("\n") +} + +fn write_positionals_of(p: &Parser) -> String { + debugln!("write_positionals_of;"); + let mut ret = vec![]; + for arg in p.positionals() { + debugln!("write_positionals_of:iter: arg={}", arg.b.name); + let a = format!( + "'{optional}:{name}{help}:{action}' \\", + optional = if !arg.b.is_set(ArgSettings::Required) { ":" } else { "" }, + name = arg.b.name, + help = arg.b + .help + .map_or("".to_owned(), |v| " -- ".to_owned() + v) + .replace("[", "\\[") + .replace("]", "\\]"), + action = arg.possible_vals().map_or("_files".to_owned(), |values| { + format!("({})", + values.iter().map(|v| escape_value(*v)).collect::>().join(" ")) + }) + ); + + debugln!("write_positionals_of:iter: Wrote...{}", a); + ret.push(a); + } + + ret.join("\n") +} diff --git a/clap/src/errors.rs b/clap/src/errors.rs new file mode 100644 index 000000000..35ac58bd0 --- /dev/null +++ b/clap/src/errors.rs @@ -0,0 +1,911 @@ +// Std +use std::convert::From; +use std::error::Error as StdError; +use std::fmt as std_fmt; +use std::fmt::Display; +use std::io::{self, Write}; +use std::process; +use std::result::Result as StdResult; + +// Internal +use args::AnyArg; +use fmt::{ColorWhen, Colorizer, ColorizerOption}; +use suggestions; + +/// Short hand for [`Result`] type +/// +/// [`Result`]: https://doc.rust-lang.org/std/result/enum.Result.html +pub type Result = StdResult; + +/// Command line argument parser kind of error +#[derive(Debug, Copy, Clone, PartialEq)] +pub enum ErrorKind { + /// Occurs when an [`Arg`] has a set of possible values, + /// and the user provides a value which isn't in that set. + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg, ErrorKind}; + /// let result = App::new("prog") + /// .arg(Arg::with_name("speed") + /// .possible_value("fast") + /// .possible_value("slow")) + /// .get_matches_from_safe(vec!["prog", "other"]); + /// assert!(result.is_err()); + /// assert_eq!(result.unwrap_err().kind, ErrorKind::InvalidValue); + /// ``` + /// [`Arg`]: ./struct.Arg.html + InvalidValue, + + /// Occurs when a user provides a flag, option, argument or subcommand which isn't defined. + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg, ErrorKind}; + /// let result = App::new("prog") + /// .arg(Arg::from_usage("--flag 'some flag'")) + /// .get_matches_from_safe(vec!["prog", "--other"]); + /// assert!(result.is_err()); + /// assert_eq!(result.unwrap_err().kind, ErrorKind::UnknownArgument); + /// ``` + UnknownArgument, + + /// Occurs when the user provides an unrecognized [`SubCommand`] which meets the threshold for + /// being similar enough to an existing subcommand. + /// If it doesn't meet the threshold, or the 'suggestions' feature is disabled, + /// the more general [`UnknownArgument`] error is returned. + /// + /// # Examples + /// + #[cfg_attr(not(feature = "suggestions"), doc = " ```no_run")] + #[cfg_attr(feature = "suggestions", doc = " ```")] + /// # use clap::{App, Arg, ErrorKind, SubCommand}; + /// let result = App::new("prog") + /// .subcommand(SubCommand::with_name("config") + /// .about("Used for configuration") + /// .arg(Arg::with_name("config_file") + /// .help("The configuration file to use") + /// .index(1))) + /// .get_matches_from_safe(vec!["prog", "confi"]); + /// assert!(result.is_err()); + /// assert_eq!(result.unwrap_err().kind, ErrorKind::InvalidSubcommand); + /// ``` + /// [`SubCommand`]: ./struct.SubCommand.html + /// [`UnknownArgument`]: ./enum.ErrorKind.html#variant.UnknownArgument + InvalidSubcommand, + + /// Occurs when the user provides an unrecognized [`SubCommand`] which either + /// doesn't meet the threshold for being similar enough to an existing subcommand, + /// or the 'suggestions' feature is disabled. + /// Otherwise the more detailed [`InvalidSubcommand`] error is returned. + /// + /// This error typically happens when passing additional subcommand names to the `help` + /// subcommand. Otherwise, the more general [`UnknownArgument`] error is used. + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg, ErrorKind, SubCommand}; + /// let result = App::new("prog") + /// .subcommand(SubCommand::with_name("config") + /// .about("Used for configuration") + /// .arg(Arg::with_name("config_file") + /// .help("The configuration file to use") + /// .index(1))) + /// .get_matches_from_safe(vec!["prog", "help", "nothing"]); + /// assert!(result.is_err()); + /// assert_eq!(result.unwrap_err().kind, ErrorKind::UnrecognizedSubcommand); + /// ``` + /// [`SubCommand`]: ./struct.SubCommand.html + /// [`InvalidSubcommand`]: ./enum.ErrorKind.html#variant.InvalidSubcommand + /// [`UnknownArgument`]: ./enum.ErrorKind.html#variant.UnknownArgument + UnrecognizedSubcommand, + + /// Occurs when the user provides an empty value for an option that does not allow empty + /// values. + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg, ErrorKind}; + /// let res = App::new("prog") + /// .arg(Arg::with_name("color") + /// .long("color") + /// .empty_values(false)) + /// .get_matches_from_safe(vec!["prog", "--color="]); + /// assert!(res.is_err()); + /// assert_eq!(res.unwrap_err().kind, ErrorKind::EmptyValue); + /// ``` + EmptyValue, + + /// Occurs when the user provides a value for an argument with a custom validation and the + /// value fails that validation. + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg, ErrorKind}; + /// fn is_numeric(val: String) -> Result<(), String> { + /// match val.parse::() { + /// Ok(..) => Ok(()), + /// Err(..) => Err(String::from("Value wasn't a number!")), + /// } + /// } + /// + /// let result = App::new("prog") + /// .arg(Arg::with_name("num") + /// .validator(is_numeric)) + /// .get_matches_from_safe(vec!["prog", "NotANumber"]); + /// assert!(result.is_err()); + /// assert_eq!(result.unwrap_err().kind, ErrorKind::ValueValidation); + /// ``` + ValueValidation, + + /// Occurs when a user provides more values for an argument than were defined by setting + /// [`Arg::max_values`]. + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg, ErrorKind}; + /// let result = App::new("prog") + /// .arg(Arg::with_name("arg") + /// .multiple(true) + /// .max_values(2)) + /// .get_matches_from_safe(vec!["prog", "too", "many", "values"]); + /// assert!(result.is_err()); + /// assert_eq!(result.unwrap_err().kind, ErrorKind::TooManyValues); + /// ``` + /// [`Arg::max_values`]: ./struct.Arg.html#method.max_values + TooManyValues, + + /// Occurs when the user provides fewer values for an argument than were defined by setting + /// [`Arg::min_values`]. + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg, ErrorKind}; + /// let result = App::new("prog") + /// .arg(Arg::with_name("some_opt") + /// .long("opt") + /// .min_values(3)) + /// .get_matches_from_safe(vec!["prog", "--opt", "too", "few"]); + /// assert!(result.is_err()); + /// assert_eq!(result.unwrap_err().kind, ErrorKind::TooFewValues); + /// ``` + /// [`Arg::min_values`]: ./struct.Arg.html#method.min_values + TooFewValues, + + /// Occurs when the user provides a different number of values for an argument than what's + /// been defined by setting [`Arg::number_of_values`] or than was implicitly set by + /// [`Arg::value_names`]. + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg, ErrorKind}; + /// let result = App::new("prog") + /// .arg(Arg::with_name("some_opt") + /// .long("opt") + /// .takes_value(true) + /// .number_of_values(2)) + /// .get_matches_from_safe(vec!["prog", "--opt", "wrong"]); + /// assert!(result.is_err()); + /// assert_eq!(result.unwrap_err().kind, ErrorKind::WrongNumberOfValues); + /// ``` + /// [`Arg::number_of_values`] + /// [`Arg::number_of_values`]: ./struct.Arg.html#method.number_of_values + /// [`Arg::value_names`]: ./struct.Arg.html#method.value_names + WrongNumberOfValues, + + /// Occurs when the user provides two values which conflict with each other and can't be used + /// together. + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg, ErrorKind}; + /// let result = App::new("prog") + /// .arg(Arg::with_name("debug") + /// .long("debug") + /// .conflicts_with("color")) + /// .arg(Arg::with_name("color") + /// .long("color")) + /// .get_matches_from_safe(vec!["prog", "--debug", "--color"]); + /// assert!(result.is_err()); + /// assert_eq!(result.unwrap_err().kind, ErrorKind::ArgumentConflict); + /// ``` + ArgumentConflict, + + /// Occurs when the user does not provide one or more required arguments. + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg, ErrorKind}; + /// let result = App::new("prog") + /// .arg(Arg::with_name("debug") + /// .required(true)) + /// .get_matches_from_safe(vec!["prog"]); + /// assert!(result.is_err()); + /// assert_eq!(result.unwrap_err().kind, ErrorKind::MissingRequiredArgument); + /// ``` + MissingRequiredArgument, + + /// Occurs when a subcommand is required (as defined by [`AppSettings::SubcommandRequired`]), + /// but the user does not provide one. + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, AppSettings, SubCommand, ErrorKind}; + /// let err = App::new("prog") + /// .setting(AppSettings::SubcommandRequired) + /// .subcommand(SubCommand::with_name("test")) + /// .get_matches_from_safe(vec![ + /// "myprog", + /// ]); + /// assert!(err.is_err()); + /// assert_eq!(err.unwrap_err().kind, ErrorKind::MissingSubcommand); + /// # ; + /// ``` + /// [`AppSettings::SubcommandRequired`]: ./enum.AppSettings.html#variant.SubcommandRequired + MissingSubcommand, + + /// Occurs when either an argument or [`SubCommand`] is required, as defined by + /// [`AppSettings::ArgRequiredElseHelp`], but the user did not provide one. + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg, AppSettings, ErrorKind, SubCommand}; + /// let result = App::new("prog") + /// .setting(AppSettings::ArgRequiredElseHelp) + /// .subcommand(SubCommand::with_name("config") + /// .about("Used for configuration") + /// .arg(Arg::with_name("config_file") + /// .help("The configuration file to use"))) + /// .get_matches_from_safe(vec!["prog"]); + /// assert!(result.is_err()); + /// assert_eq!(result.unwrap_err().kind, ErrorKind::MissingArgumentOrSubcommand); + /// ``` + /// [`SubCommand`]: ./struct.SubCommand.html + /// [`AppSettings::ArgRequiredElseHelp`]: ./enum.AppSettings.html#variant.ArgRequiredElseHelp + MissingArgumentOrSubcommand, + + /// Occurs when the user provides multiple values to an argument which doesn't allow that. + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg, ErrorKind}; + /// let result = App::new("prog") + /// .arg(Arg::with_name("debug") + /// .long("debug") + /// .multiple(false)) + /// .get_matches_from_safe(vec!["prog", "--debug", "--debug"]); + /// assert!(result.is_err()); + /// assert_eq!(result.unwrap_err().kind, ErrorKind::UnexpectedMultipleUsage); + /// ``` + UnexpectedMultipleUsage, + + /// Occurs when the user provides a value containing invalid UTF-8 for an argument and + /// [`AppSettings::StrictUtf8`] is set. + /// + /// # Platform Specific + /// + /// Non-Windows platforms only (such as Linux, Unix, macOS, etc.) + /// + /// # Examples + /// + #[cfg_attr(not(unix), doc = " ```ignore")] + #[cfg_attr(unix, doc = " ```")] + /// # use clap::{App, Arg, ErrorKind, AppSettings}; + /// # use std::os::unix::ffi::OsStringExt; + /// # use std::ffi::OsString; + /// let result = App::new("prog") + /// .setting(AppSettings::StrictUtf8) + /// .arg(Arg::with_name("utf8") + /// .short("u") + /// .takes_value(true)) + /// .get_matches_from_safe(vec![OsString::from("myprog"), + /// OsString::from("-u"), + /// OsString::from_vec(vec![0xE9])]); + /// assert!(result.is_err()); + /// assert_eq!(result.unwrap_err().kind, ErrorKind::InvalidUtf8); + /// ``` + /// [`AppSettings::StrictUtf8`]: ./enum.AppSettings.html#variant.StrictUtf8 + InvalidUtf8, + + /// Not a true "error" as it means `--help` or similar was used. + /// The help message will be sent to `stdout`. + /// + /// **Note**: If the help is displayed due to an error (such as missing subcommands) it will + /// be sent to `stderr` instead of `stdout`. + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg, ErrorKind}; + /// let result = App::new("prog") + /// .get_matches_from_safe(vec!["prog", "--help"]); + /// assert!(result.is_err()); + /// assert_eq!(result.unwrap_err().kind, ErrorKind::HelpDisplayed); + /// ``` + HelpDisplayed, + + /// Not a true "error" as it means `--version` or similar was used. + /// The message will be sent to `stdout`. + /// + /// # Examples + /// + /// ```rust + /// # use clap::{App, Arg, ErrorKind}; + /// let result = App::new("prog") + /// .get_matches_from_safe(vec!["prog", "--version"]); + /// assert!(result.is_err()); + /// assert_eq!(result.unwrap_err().kind, ErrorKind::VersionDisplayed); + /// ``` + VersionDisplayed, + + /// Occurs when using the [`value_t!`] and [`values_t!`] macros to convert an argument value + /// into type `T`, but the argument you requested wasn't used. I.e. you asked for an argument + /// with name `config` to be converted, but `config` wasn't used by the user. + /// [`value_t!`]: ./macro.value_t!.html + /// [`values_t!`]: ./macro.values_t!.html + ArgumentNotFound, + + /// Represents an [I/O error]. + /// Can occur when writing to `stderr` or `stdout` or reading a configuration file. + /// [I/O error]: https://doc.rust-lang.org/std/io/struct.Error.html + Io, + + /// Represents a [Format error] (which is a part of [`Display`]). + /// Typically caused by writing to `stderr` or `stdout`. + /// [`Display`]: https://doc.rust-lang.org/std/fmt/trait.Display.html + /// [Format error]: https://doc.rust-lang.org/std/fmt/struct.Error.html + Format, +} + +/// Command Line Argument Parser Error +#[derive(Debug)] +pub struct Error { + /// Formatted error message + pub message: String, + /// The type of error + pub kind: ErrorKind, + /// Any additional information passed along, such as the argument name that caused the error + pub info: Option>, +} + +impl Error { + /// Should the message be written to `stdout` or not + pub fn use_stderr(&self) -> bool { + match self.kind { + ErrorKind::HelpDisplayed | ErrorKind::VersionDisplayed => false, + _ => true, + } + } + + /// Prints the error to `stderr` and exits with a status of `1` + pub fn exit(&self) -> ! { + if self.use_stderr() { + wlnerr!("{}", self.message); + process::exit(1); + } + let out = io::stdout(); + writeln!(&mut out.lock(), "{}", self.message).expect("Error writing Error to stdout"); + process::exit(0); + } + + #[doc(hidden)] + pub fn write_to(&self, w: &mut W) -> io::Result<()> { write!(w, "{}", self.message) } + + #[doc(hidden)] + pub fn argument_conflict( + arg: &AnyArg, + other: Option, + usage: U, + color: ColorWhen, + ) -> Self + where + O: Into, + U: Display, + { + let mut v = vec![arg.name().to_owned()]; + let c = Colorizer::new(ColorizerOption { + use_stderr: true, + when: color, + }); + Error { + message: format!( + "{} The argument '{}' cannot be used with {}\n\n\ + {}\n\n\ + For more information try {}", + c.error("error:"), + c.warning(&*arg.to_string()), + match other { + Some(name) => { + let n = name.into(); + v.push(n.clone()); + c.warning(format!("'{}'", n)) + } + None => c.none("one or more of the other specified arguments".to_owned()), + }, + usage, + c.good("--help") + ), + kind: ErrorKind::ArgumentConflict, + info: Some(v), + } + } + + #[doc(hidden)] + pub fn empty_value(arg: &AnyArg, usage: U, color: ColorWhen) -> Self + where + U: Display, + { + let c = Colorizer::new(ColorizerOption { + use_stderr: true, + when: color, + }); + Error { + message: format!( + "{} The argument '{}' requires a value but none was supplied\ + \n\n\ + {}\n\n\ + For more information try {}", + c.error("error:"), + c.warning(arg.to_string()), + usage, + c.good("--help") + ), + kind: ErrorKind::EmptyValue, + info: Some(vec![arg.name().to_owned()]), + } + } + + #[doc(hidden)] + pub fn invalid_value( + bad_val: B, + good_vals: &[G], + arg: &AnyArg, + usage: U, + color: ColorWhen, + ) -> Self + where + B: AsRef, + G: AsRef + Display, + U: Display, + { + let c = Colorizer::new(ColorizerOption { + use_stderr: true, + when: color, + }); + let suffix = suggestions::did_you_mean_value_suffix(bad_val.as_ref(), good_vals.iter()); + + let mut sorted = vec![]; + for v in good_vals { + let val = format!("{}", c.good(v)); + sorted.push(val); + } + sorted.sort(); + let valid_values = sorted.join(", "); + Error { + message: format!( + "{} '{}' isn't a valid value for '{}'\n\t\ + [possible values: {}]\n\ + {}\n\n\ + {}\n\n\ + For more information try {}", + c.error("error:"), + c.warning(bad_val.as_ref()), + c.warning(arg.to_string()), + valid_values, + suffix.0, + usage, + c.good("--help") + ), + kind: ErrorKind::InvalidValue, + info: Some(vec![arg.name().to_owned(), bad_val.as_ref().to_owned()]), + } + } + + #[doc(hidden)] + pub fn invalid_subcommand( + subcmd: S, + did_you_mean: D, + name: N, + usage: U, + color: ColorWhen, + ) -> Self + where + S: Into, + D: AsRef + Display, + N: Display, + U: Display, + { + let s = subcmd.into(); + let c = Colorizer::new(ColorizerOption { + use_stderr: true, + when: color, + }); + Error { + message: format!( + "{} The subcommand '{}' wasn't recognized\n\t\ + Did you mean '{}'?\n\n\ + If you believe you received this message in error, try \ + re-running with '{} {} {}'\n\n\ + {}\n\n\ + For more information try {}", + c.error("error:"), + c.warning(&*s), + c.good(did_you_mean.as_ref()), + name, + c.good("--"), + &*s, + usage, + c.good("--help") + ), + kind: ErrorKind::InvalidSubcommand, + info: Some(vec![s]), + } + } + + #[doc(hidden)] + pub fn unrecognized_subcommand(subcmd: S, name: N, color: ColorWhen) -> Self + where + S: Into, + N: Display, + { + let s = subcmd.into(); + let c = Colorizer::new(ColorizerOption { + use_stderr: true, + when: color, + }); + Error { + message: format!( + "{} The subcommand '{}' wasn't recognized\n\n\ + {}\n\t\ + {} help ...\n\n\ + For more information try {}", + c.error("error:"), + c.warning(&*s), + c.warning("USAGE:"), + name, + c.good("--help") + ), + kind: ErrorKind::UnrecognizedSubcommand, + info: Some(vec![s]), + } + } + + #[doc(hidden)] + pub fn missing_required_argument(required: R, usage: U, color: ColorWhen) -> Self + where + R: Display, + U: Display, + { + let c = Colorizer::new(ColorizerOption { + use_stderr: true, + when: color, + }); + Error { + message: format!( + "{} The following required arguments were not provided:{}\n\n\ + {}\n\n\ + For more information try {}", + c.error("error:"), + required, + usage, + c.good("--help") + ), + kind: ErrorKind::MissingRequiredArgument, + info: None, + } + } + + #[doc(hidden)] + pub fn missing_subcommand(name: N, usage: U, color: ColorWhen) -> Self + where + N: AsRef + Display, + U: Display, + { + let c = Colorizer::new(ColorizerOption { + use_stderr: true, + when: color, + }); + Error { + message: format!( + "{} '{}' requires a subcommand, but one was not provided\n\n\ + {}\n\n\ + For more information try {}", + c.error("error:"), + c.warning(name), + usage, + c.good("--help") + ), + kind: ErrorKind::MissingSubcommand, + info: None, + } + } + + + #[doc(hidden)] + pub fn invalid_utf8(usage: U, color: ColorWhen) -> Self + where + U: Display, + { + let c = Colorizer::new(ColorizerOption { + use_stderr: true, + when: color, + }); + Error { + message: format!( + "{} Invalid UTF-8 was detected in one or more arguments\n\n\ + {}\n\n\ + For more information try {}", + c.error("error:"), + usage, + c.good("--help") + ), + kind: ErrorKind::InvalidUtf8, + info: None, + } + } + + #[doc(hidden)] + pub fn too_many_values(val: V, arg: &AnyArg, usage: U, color: ColorWhen) -> Self + where + V: AsRef + Display + ToOwned, + U: Display, + { + let v = val.as_ref(); + let c = Colorizer::new(ColorizerOption { + use_stderr: true, + when: color, + }); + Error { + message: format!( + "{} The value '{}' was provided to '{}', but it wasn't expecting \ + any more values\n\n\ + {}\n\n\ + For more information try {}", + c.error("error:"), + c.warning(v), + c.warning(arg.to_string()), + usage, + c.good("--help") + ), + kind: ErrorKind::TooManyValues, + info: Some(vec![arg.name().to_owned(), v.to_owned()]), + } + } + + #[doc(hidden)] + pub fn too_few_values( + arg: &AnyArg, + min_vals: u64, + curr_vals: usize, + usage: U, + color: ColorWhen, + ) -> Self + where + U: Display, + { + let c = Colorizer::new(ColorizerOption { + use_stderr: true, + when: color, + }); + Error { + message: format!( + "{} The argument '{}' requires at least {} values, but only {} w{} \ + provided\n\n\ + {}\n\n\ + For more information try {}", + c.error("error:"), + c.warning(arg.to_string()), + c.warning(min_vals.to_string()), + c.warning(curr_vals.to_string()), + if curr_vals > 1 { "ere" } else { "as" }, + usage, + c.good("--help") + ), + kind: ErrorKind::TooFewValues, + info: Some(vec![arg.name().to_owned()]), + } + } + + #[doc(hidden)] + pub fn value_validation(arg: Option<&AnyArg>, err: String, color: ColorWhen) -> Self + { + let c = Colorizer::new(ColorizerOption { + use_stderr: true, + when: color, + }); + Error { + message: format!( + "{} Invalid value{}: {}", + c.error("error:"), + if let Some(a) = arg { + format!(" for '{}'", c.warning(a.to_string())) + } else { + "".to_string() + }, + err + ), + kind: ErrorKind::ValueValidation, + info: None, + } + } + + #[doc(hidden)] + pub fn value_validation_auto(err: String) -> Self { + let n: Option<&AnyArg> = None; + Error::value_validation(n, err, ColorWhen::Auto) + } + + #[doc(hidden)] + pub fn wrong_number_of_values( + arg: &AnyArg, + num_vals: u64, + curr_vals: usize, + suffix: S, + usage: U, + color: ColorWhen, + ) -> Self + where + S: Display, + U: Display, + { + let c = Colorizer::new(ColorizerOption { + use_stderr: true, + when: color, + }); + Error { + message: format!( + "{} The argument '{}' requires {} values, but {} w{} \ + provided\n\n\ + {}\n\n\ + For more information try {}", + c.error("error:"), + c.warning(arg.to_string()), + c.warning(num_vals.to_string()), + c.warning(curr_vals.to_string()), + suffix, + usage, + c.good("--help") + ), + kind: ErrorKind::WrongNumberOfValues, + info: Some(vec![arg.name().to_owned()]), + } + } + + #[doc(hidden)] + pub fn unexpected_multiple_usage(arg: &AnyArg, usage: U, color: ColorWhen) -> Self + where + U: Display, + { + let c = Colorizer::new(ColorizerOption { + use_stderr: true, + when: color, + }); + Error { + message: format!( + "{} The argument '{}' was provided more than once, but cannot \ + be used multiple times\n\n\ + {}\n\n\ + For more information try {}", + c.error("error:"), + c.warning(arg.to_string()), + usage, + c.good("--help") + ), + kind: ErrorKind::UnexpectedMultipleUsage, + info: Some(vec![arg.name().to_owned()]), + } + } + + #[doc(hidden)] + pub fn unknown_argument(arg: A, did_you_mean: &str, usage: U, color: ColorWhen) -> Self + where + A: Into, + U: Display, + { + let a = arg.into(); + let c = Colorizer::new(ColorizerOption { + use_stderr: true, + when: color, + }); + Error { + message: format!( + "{} Found argument '{}' which wasn't expected, or isn't valid in \ + this context{}\n\ + {}\n\n\ + For more information try {}", + c.error("error:"), + c.warning(&*a), + if did_you_mean.is_empty() { + "\n".to_owned() + } else { + format!("{}\n", did_you_mean) + }, + usage, + c.good("--help") + ), + kind: ErrorKind::UnknownArgument, + info: Some(vec![a]), + } + } + + #[doc(hidden)] + pub fn io_error(e: &Error, color: ColorWhen) -> Self { + let c = Colorizer::new(ColorizerOption { + use_stderr: true, + when: color, + }); + Error { + message: format!("{} {}", c.error("error:"), e.description()), + kind: ErrorKind::Io, + info: None, + } + } + + #[doc(hidden)] + pub fn argument_not_found_auto(arg: A) -> Self + where + A: Into, + { + let a = arg.into(); + let c = Colorizer::new(ColorizerOption { + use_stderr: true, + when: ColorWhen::Auto, + }); + Error { + message: format!( + "{} The argument '{}' wasn't found", + c.error("error:"), + a.clone() + ), + kind: ErrorKind::ArgumentNotFound, + info: Some(vec![a]), + } + } + + /// Create an error with a custom description. + /// + /// This can be used in combination with `Error::exit` to exit your program + /// with a custom error message. + pub fn with_description(description: &str, kind: ErrorKind) -> Self { + let c = Colorizer::new(ColorizerOption { + use_stderr: true, + when: ColorWhen::Auto, + }); + Error { + message: format!("{} {}", c.error("error:"), description), + kind: kind, + info: None, + } + } +} + +impl StdError for Error { + fn description(&self) -> &str { &*self.message } +} + +impl Display for Error { + fn fmt(&self, f: &mut std_fmt::Formatter) -> std_fmt::Result { writeln!(f, "{}", self.message) } +} + +impl From for Error { + fn from(e: io::Error) -> Self { Error::with_description(e.description(), ErrorKind::Io) } +} + +impl From for Error { + fn from(e: std_fmt::Error) -> Self { + Error::with_description(e.description(), ErrorKind::Format) + } +} diff --git a/clap/src/fmt.rs b/clap/src/fmt.rs new file mode 100644 index 000000000..334c5b103 --- /dev/null +++ b/clap/src/fmt.rs @@ -0,0 +1,189 @@ +#[cfg(all(feature = "color", not(target_os = "windows")))] +use ansi_term::ANSIString; + +#[cfg(all(feature = "color", not(target_os = "windows")))] +use ansi_term::Colour::{Green, Red, Yellow}; + +#[cfg(feature = "color")] +use atty; +use std::fmt; +use std::env; + +#[doc(hidden)] +#[derive(Debug, Copy, Clone, PartialEq)] +pub enum ColorWhen { + Auto, + Always, + Never, +} + +#[cfg(feature = "color")] +pub fn is_a_tty(stderr: bool) -> bool { + debugln!("is_a_tty: stderr={:?}", stderr); + let stream = if stderr { + atty::Stream::Stderr + } else { + atty::Stream::Stdout + }; + atty::is(stream) +} + +#[cfg(not(feature = "color"))] +pub fn is_a_tty(_: bool) -> bool { + debugln!("is_a_tty;"); + false +} + +pub fn is_term_dumb() -> bool { env::var("TERM").ok() == Some(String::from("dumb")) } + +#[doc(hidden)] +pub struct ColorizerOption { + pub use_stderr: bool, + pub when: ColorWhen, +} + +#[doc(hidden)] +pub struct Colorizer { + when: ColorWhen, +} + +macro_rules! color { + ($_self:ident, $c:ident, $m:expr) => { + match $_self.when { + ColorWhen::Auto => Format::$c($m), + ColorWhen::Always => Format::$c($m), + ColorWhen::Never => Format::None($m), + } + }; +} + +impl Colorizer { + pub fn new(option: ColorizerOption) -> Colorizer { + let is_a_tty = is_a_tty(option.use_stderr); + let is_term_dumb = is_term_dumb(); + Colorizer { + when: if is_a_tty && !is_term_dumb { + option.when + } else { + ColorWhen::Never + }, + } + } + + pub fn good(&self, msg: T) -> Format + where + T: fmt::Display + AsRef, + { + debugln!("Colorizer::good;"); + color!(self, Good, msg) + } + + pub fn warning(&self, msg: T) -> Format + where + T: fmt::Display + AsRef, + { + debugln!("Colorizer::warning;"); + color!(self, Warning, msg) + } + + pub fn error(&self, msg: T) -> Format + where + T: fmt::Display + AsRef, + { + debugln!("Colorizer::error;"); + color!(self, Error, msg) + } + + pub fn none(&self, msg: T) -> Format + where + T: fmt::Display + AsRef, + { + debugln!("Colorizer::none;"); + Format::None(msg) + } +} + +impl Default for Colorizer { + fn default() -> Self { + Colorizer::new(ColorizerOption { + use_stderr: true, + when: ColorWhen::Auto, + }) + } +} + +/// Defines styles for different types of error messages. Defaults to Error=Red, Warning=Yellow, +/// and Good=Green +#[derive(Debug)] +#[doc(hidden)] +pub enum Format { + /// Defines the style used for errors, defaults to Red + Error(T), + /// Defines the style used for warnings, defaults to Yellow + Warning(T), + /// Defines the style used for good values, defaults to Green + Good(T), + /// Defines no formatting style + None(T), +} + +#[cfg(all(feature = "color", not(target_os = "windows")))] +impl> Format { + fn format(&self) -> ANSIString { + match *self { + Format::Error(ref e) => Red.bold().paint(e.as_ref()), + Format::Warning(ref e) => Yellow.paint(e.as_ref()), + Format::Good(ref e) => Green.paint(e.as_ref()), + Format::None(ref e) => ANSIString::from(e.as_ref()), + } + } +} + +#[cfg(any(not(feature = "color"), target_os = "windows"))] +#[cfg_attr(feature = "lints", allow(match_same_arms))] +impl Format { + fn format(&self) -> &T { + match *self { + Format::Error(ref e) => e, + Format::Warning(ref e) => e, + Format::Good(ref e) => e, + Format::None(ref e) => e, + } + } +} + + +#[cfg(all(feature = "color", not(target_os = "windows")))] +impl> fmt::Display for Format { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", &self.format()) } +} + +#[cfg(any(not(feature = "color"), target_os = "windows"))] +impl fmt::Display for Format { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", &self.format()) } +} + +#[cfg(all(test, feature = "color", not(target_os = "windows")))] +mod test { + use ansi_term::ANSIString; + use ansi_term::Colour::{Green, Red, Yellow}; + use super::Format; + + #[test] + fn colored_output() { + let err = Format::Error("error"); + assert_eq!( + &*format!("{}", err), + &*format!("{}", Red.bold().paint("error")) + ); + let good = Format::Good("good"); + assert_eq!(&*format!("{}", good), &*format!("{}", Green.paint("good"))); + let warn = Format::Warning("warn"); + assert_eq!(&*format!("{}", warn), &*format!("{}", Yellow.paint("warn"))); + let none = Format::None("none"); + assert_eq!( + &*format!("{}", none), + &*format!("{}", ANSIString::from("none")) + ); + } +} diff --git a/clap/src/lib.rs b/clap/src/lib.rs new file mode 100644 index 000000000..5a10fc480 --- /dev/null +++ b/clap/src/lib.rs @@ -0,0 +1,625 @@ +// Copyright ⓒ 2015-2016 Kevin B. Knapp and [`clap-rs` contributors](https://github.com/kbknapp/clap-rs/blob/master/CONTRIBUTORS.md). +// Licensed under the MIT license +// (see LICENSE or ) All files in the project carrying such +// notice may not be copied, modified, or distributed except according to those terms. + +//! `clap` is a simple-to-use, efficient, and full-featured library for parsing command line +//! arguments and subcommands when writing console/terminal applications. +//! +//! ## About +//! +//! `clap` is used to parse *and validate* the string of command line arguments provided by the user +//! at runtime. You provide the list of valid possibilities, and `clap` handles the rest. This means +//! you focus on your *applications* functionality, and less on the parsing and validating of +//! arguments. +//! +//! `clap` also provides the traditional version and help switches (or flags) 'for free' meaning +//! automatically with no configuration. It does this by checking list of valid possibilities you +//! supplied and adding only the ones you haven't already defined. If you are using subcommands, +//! `clap` will also auto-generate a `help` subcommand for you in addition to the traditional flags. +//! +//! Once `clap` parses the user provided string of arguments, it returns the matches along with any +//! applicable values. If the user made an error or typo, `clap` informs them of the mistake and +//! exits gracefully (or returns a `Result` type and allows you to perform any clean up prior to +//! exit). Because of this, you can make reasonable assumptions in your code about the validity of +//! the arguments. +//! +//! +//! ## Quick Example +//! +//! The following examples show a quick example of some of the very basic functionality of `clap`. +//! For more advanced usage, such as requirements, conflicts, groups, multiple values and +//! occurrences see the [documentation](https://docs.rs/clap/), [examples/](https://github.com/kbknapp/clap-rs/tree/master/examples) directory of +//! this repository or the [video tutorials](https://www.youtube.com/playlist?list=PLza5oFLQGTl2Z5T8g1pRkIynR3E0_pc7U). +//! +//! **NOTE:** All of these examples are functionally the same, but show different styles in which to +//! use `clap` +//! +//! The first example shows a method that allows more advanced configuration options (not shown in +//! this small example), or even dynamically generating arguments when desired. The downside is it's +//! more verbose. +//! +//! ```no_run +//! // (Full example with detailed comments in examples/01b_quick_example.rs) +//! // +//! // This example demonstrates clap's full 'builder pattern' style of creating arguments which is +//! // more verbose, but allows easier editing, and at times more advanced options, or the possibility +//! // to generate arguments dynamically. +//! extern crate clap; +//! use clap::{Arg, App, SubCommand}; +//! +//! fn main() { +//! let matches = App::new("My Super Program") +//! .version("1.0") +//! .author("Kevin K. ") +//! .about("Does awesome things") +//! .arg(Arg::with_name("config") +//! .short("c") +//! .long("config") +//! .value_name("FILE") +//! .help("Sets a custom config file") +//! .takes_value(true)) +//! .arg(Arg::with_name("INPUT") +//! .help("Sets the input file to use") +//! .required(true) +//! .index(1)) +//! .arg(Arg::with_name("v") +//! .short("v") +//! .multiple(true) +//! .help("Sets the level of verbosity")) +//! .subcommand(SubCommand::with_name("test") +//! .about("controls testing features") +//! .version("1.3") +//! .author("Someone E. ") +//! .arg(Arg::with_name("debug") +//! .short("d") +//! .help("print debug information verbosely"))) +//! .get_matches(); +//! +//! // Gets a value for config if supplied by user, or defaults to "default.conf" +//! let config = matches.value_of("config").unwrap_or("default.conf"); +//! println!("Value for config: {}", config); +//! +//! // Calling .unwrap() is safe here because "INPUT" is required (if "INPUT" wasn't +//! // required we could have used an 'if let' to conditionally get the value) +//! println!("Using input file: {}", matches.value_of("INPUT").unwrap()); +//! +//! // Vary the output based on how many times the user used the "verbose" flag +//! // (i.e. 'myprog -v -v -v' or 'myprog -vvv' vs 'myprog -v' +//! match matches.occurrences_of("v") { +//! 0 => println!("No verbose info"), +//! 1 => println!("Some verbose info"), +//! 2 => println!("Tons of verbose info"), +//! 3 | _ => println!("Don't be crazy"), +//! } +//! +//! // You can handle information about subcommands by requesting their matches by name +//! // (as below), requesting just the name used, or both at the same time +//! if let Some(matches) = matches.subcommand_matches("test") { +//! if matches.is_present("debug") { +//! println!("Printing debug info..."); +//! } else { +//! println!("Printing normally..."); +//! } +//! } +//! +//! // more program logic goes here... +//! } +//! ``` +//! +//! The next example shows a far less verbose method, but sacrifices some of the advanced +//! configuration options (not shown in this small example). This method also takes a *very* minor +//! runtime penalty. +//! +//! ```no_run +//! // (Full example with detailed comments in examples/01a_quick_example.rs) +//! // +//! // This example demonstrates clap's "usage strings" method of creating arguments +//! // which is less verbose +//! extern crate clap; +//! use clap::{Arg, App, SubCommand}; +//! +//! fn main() { +//! let matches = App::new("myapp") +//! .version("1.0") +//! .author("Kevin K. ") +//! .about("Does awesome things") +//! .args_from_usage( +//! "-c, --config=[FILE] 'Sets a custom config file' +//! 'Sets the input file to use' +//! -v... 'Sets the level of verbosity'") +//! .subcommand(SubCommand::with_name("test") +//! .about("controls testing features") +//! .version("1.3") +//! .author("Someone E. ") +//! .arg_from_usage("-d, --debug 'Print debug information'")) +//! .get_matches(); +//! +//! // Same as previous example... +//! } +//! ``` +//! +//! This third method shows how you can use a YAML file to build your CLI and keep your Rust source +//! tidy or support multiple localized translations by having different YAML files for each +//! localization. +//! +//! First, create the `cli.yml` file to hold your CLI options, but it could be called anything we +//! like: +//! +//! ```yaml +//! name: myapp +//! version: "1.0" +//! author: Kevin K. +//! about: Does awesome things +//! args: +//! - config: +//! short: c +//! long: config +//! value_name: FILE +//! help: Sets a custom config file +//! takes_value: true +//! - INPUT: +//! help: Sets the input file to use +//! required: true +//! index: 1 +//! - verbose: +//! short: v +//! multiple: true +//! help: Sets the level of verbosity +//! subcommands: +//! - test: +//! about: controls testing features +//! version: "1.3" +//! author: Someone E. +//! args: +//! - debug: +//! short: d +//! help: print debug information +//! ``` +//! +//! Since this feature requires additional dependencies that not everyone may want, it is *not* +//! compiled in by default and we need to enable a feature flag in Cargo.toml: +//! +//! Simply change your `clap = "~2.27.0"` to `clap = {version = "~2.27.0", features = ["yaml"]}`. +//! +//! At last we create our `main.rs` file just like we would have with the previous two examples: +//! +//! ```ignore +//! // (Full example with detailed comments in examples/17_yaml.rs) +//! // +//! // This example demonstrates clap's building from YAML style of creating arguments which is far +//! // more clean, but takes a very small performance hit compared to the other two methods. +//! #[macro_use] +//! extern crate clap; +//! use clap::App; +//! +//! fn main() { +//! // The YAML file is found relative to the current file, similar to how modules are found +//! let yaml = load_yaml!("cli.yml"); +//! let matches = App::from_yaml(yaml).get_matches(); +//! +//! // Same as previous examples... +//! } +//! ``` +//! +//! Finally there is a macro version, which is like a hybrid approach offering the speed of the +//! builder pattern (the first example), but without all the verbosity. +//! +//! ```no_run +//! #[macro_use] +//! extern crate clap; +//! +//! fn main() { +//! let matches = clap_app!(myapp => +//! (version: "1.0") +//! (author: "Kevin K. ") +//! (about: "Does awesome things") +//! (@arg CONFIG: -c --config +takes_value "Sets a custom config file") +//! (@arg INPUT: +required "Sets the input file to use") +//! (@arg debug: -d ... "Sets the level of debugging information") +//! (@subcommand test => +//! (about: "controls testing features") +//! (version: "1.3") +//! (author: "Someone E. ") +//! (@arg verbose: -v --verbose "Print test information verbosely") +//! ) +//! ).get_matches(); +//! +//! // Same as before... +//! } +//! ``` +//! +//! If you were to compile any of the above programs and run them with the flag `--help` or `-h` (or +//! `help` subcommand, since we defined `test` as a subcommand) the following would be output +//! +//! ```text +//! $ myprog --help +//! My Super Program 1.0 +//! Kevin K. +//! Does awesome things +//! +//! USAGE: +//! MyApp [FLAGS] [OPTIONS] [SUBCOMMAND] +//! +//! FLAGS: +//! -h, --help Prints this message +//! -v Sets the level of verbosity +//! -V, --version Prints version information +//! +//! OPTIONS: +//! -c, --config Sets a custom config file +//! +//! ARGS: +//! INPUT The input file to use +//! +//! SUBCOMMANDS: +//! help Prints this message +//! test Controls testing features +//! ``` +//! +//! **NOTE:** You could also run `myapp test --help` to see similar output and options for the +//! `test` subcommand. +//! +//! ## Try it! +//! +//! ### Pre-Built Test +//! +//! To try out the pre-built example, use the following steps: +//! +//! * Clone the repository `$ git clone https://github.com/kbknapp/clap-rs && cd clap-rs/tests` +//! * Compile the example `$ cargo build --release` +//! * Run the help info `$ ./target/release/claptests --help` +//! * Play with the arguments! +//! +//! ### BYOB (Build Your Own Binary) +//! +//! To test out `clap`'s default auto-generated help/version follow these steps: +//! +//! * Create a new cargo project `$ cargo new fake --bin && cd fake` +//! * Add `clap` to your `Cargo.toml` +//! +//! ```toml +//! [dependencies] +//! clap = "2" +//! ``` +//! +//! * Add the following to your `src/main.rs` +//! +//! ```no_run +//! extern crate clap; +//! use clap::App; +//! +//! fn main() { +//! App::new("fake").version("v1.0-beta").get_matches(); +//! } +//! ``` +//! +//! * Build your program `$ cargo build --release` +//! * Run with help or version `$ ./target/release/fake --help` or `$ ./target/release/fake +//! --version` +//! +//! ## Usage +//! +//! For full usage, add `clap` as a dependency in your `Cargo.toml` (it is **highly** recommended to +//! use the `~major.minor.patch` style versions in your `Cargo.toml`, for more information see +//! [Compatibility Policy](#compatibility-policy)) to use from crates.io: +//! +//! ```toml +//! [dependencies] +//! clap = "~2.27.0" +//! ``` +//! +//! Or get the latest changes from the master branch at github: +//! +//! ```toml +//! [dependencies.clap] +//! git = "https://github.com/kbknapp/clap-rs.git" +//! ``` +//! +//! Add `extern crate clap;` to your crate root. +//! +//! Define a list of valid arguments for your program (see the +//! [documentation](https://docs.rs/clap/) or [examples/](examples) directory of this repo) +//! +//! Then run `cargo build` or `cargo update && cargo build` for your project. +//! +//! ### Optional Dependencies / Features +//! +//! #### Features enabled by default +//! +//! * `suggestions`: Turns on the `Did you mean '--myoption'?` feature for when users make typos. (builds dependency `strsim`) +//! * `color`: Turns on colored error messages. This feature only works on non-Windows OSs. (builds dependency `ansi-term` and `atty`) +//! * `wrap_help`: Wraps the help at the actual terminal width when +//! available, instead of 120 characters. (builds dependency `textwrap` +//! with feature `term_size`) +//! +//! To disable these, add this to your `Cargo.toml`: +//! +//! ```toml +//! [dependencies.clap] +//! version = "~2.27.0" +//! default-features = false +//! ``` +//! +//! You can also selectively enable only the features you'd like to include, by adding: +//! +//! ```toml +//! [dependencies.clap] +//! version = "~2.27.0" +//! default-features = false +//! +//! # Cherry-pick the features you'd like to use +//! features = [ "suggestions", "color" ] +//! ``` +//! +//! #### Opt-in features +//! +//! * **"yaml"**: Enables building CLIs from YAML documents. (builds dependency `yaml-rust`) +//! * **"unstable"**: Enables unstable `clap` features that may change from release to release +//! +//! ### Dependencies Tree +//! +//! The following graphic depicts `clap`s dependency graph (generated using +//! [cargo-graph](https://github.com/kbknapp/cargo-graph)). +//! +//! * **Dashed** Line: Optional dependency +//! * **Red** Color: **NOT** included by default (must use cargo `features` to enable) +//! * **Blue** Color: Dev dependency, only used while developing. +//! +//! ![clap dependencies](https://raw.githubusercontent.com/kbknapp/clap-rs/master/clap_dep_graph.png) +//! +//! ### More Information +//! +//! You can find complete documentation on the [docs.rs](https://docs.rs/clap/) for this project. +//! +//! You can also find usage examples in the [examples/](https://github.com/kbknapp/clap-rs/tree/master/examples) directory of this repo. +//! +//! #### Video Tutorials +//! +//! There's also the video tutorial series [Argument Parsing with Rust v2](https://www.youtube.com/playlist?list=PLza5oFLQGTl2Z5T8g1pRkIynR3E0_pc7U). +//! +//! These videos slowly trickle out as I finish them and currently a work in progress. +//! +//! ## How to Contribute +//! +//! Contributions are always welcome! And there is a multitude of ways in which you can help +//! depending on what you like to do, or are good at. Anything from documentation, code cleanup, +//! issue completion, new features, you name it, even filing issues is contributing and greatly +//! appreciated! +//! +//! Another really great way to help is if you find an interesting, or helpful way in which to use +//! `clap`. You can either add it to the [examples/](examples) directory, or file an issue and tell +//! me. I'm all about giving credit where credit is due :) +//! +//! Please read [CONTRIBUTING.md](https://raw.githubusercontent.com/kbknapp/clap-rs/master/.github/CONTRIBUTING.md) before you start contributing. +//! +//! +//! ### Testing Code +//! +//! To test with all features both enabled and disabled, you can run theese commands: +//! +//! ```text +//! $ cargo test --no-default-features +//! $ cargo test --features "yaml unstable" +//! ``` +//! +//! Alternatively, if you have [`just`](https://github.com/casey/just) installed you can run the +//! prebuilt recipes. *Not* using `just` is perfectly fine as well, it simply bundles commands +//! automatically. +//! +//! For example, to test the code, as above simply run: +//! +//! ```text +//! $ just run-tests +//! ``` +//! +//! From here on, I will list the appropriate `cargo` command as well as the `just` command. +//! +//! Sometimes it's helpful to only run a subset of the tests, which can be done via: +//! +//! ```text +//! $ cargo test --test +//! +//! # Or +//! +//! $ just run-test +//! ``` +//! +//! ### Linting Code +//! +//! During the CI process `clap` runs against many different lints using +//! [`clippy`](https://github.com/Manishearth/rust-clippy). In order to check if these lints pass on +//! your own computer prior to submitting a PR you'll need a nightly compiler. +//! +//! In order to check the code for lints run either: +//! +//! ```text +//! $ rustup override add nightly +//! $ cargo build --features lints +//! $ rustup override remove +//! +//! # Or +//! +//! $ just lint +//! ``` +//! +//! ### Debugging Code +//! +//! Another helpful technique is to see the `clap` debug output while developing features. In order +//! to see the debug output while running the full test suite or individual tests, run: +//! +//! ```text +//! $ cargo test --features debug +//! +//! # Or for individual tests +//! $ cargo test --test --features debug +//! +//! # The corresponding just command for individual debugging tests is: +//! $ just debug +//! ``` +//! +//! ### Goals +//! +//! There are a few goals of `clap` that I'd like to maintain throughout contributions. If your +//! proposed changes break, or go against any of these goals we'll discuss the changes further +//! before merging (but will *not* be ignored, all contributes are welcome!). These are by no means +//! hard-and-fast rules, as I'm no expert and break them myself from time to time (even if by +//! mistake or ignorance). +//! +//! * Remain backwards compatible when possible +//! - If backwards compatibility *must* be broken, use deprecation warnings if at all possible before +//! removing legacy code - This does not apply for security concerns +//! * Parse arguments quickly +//! - Parsing of arguments shouldn't slow down usage of the main program - This is also true of +//! generating help and usage information (although *slightly* less stringent, as the program is about +//! to exit) +//! * Try to be cognizant of memory usage +//! - Once parsing is complete, the memory footprint of `clap` should be low since the main program +//! is the star of the show +//! * `panic!` on *developer* error, exit gracefully on *end-user* error +//! +//! ### Compatibility Policy +//! +//! Because `clap` takes `SemVer` and compatibility seriously, this is the official policy regarding +//! breaking changes and previous versions of Rust. +//! +//! `clap` will pin the minimum required version of Rust to the CI builds. Bumping the minimum +//! version of Rust is considered a minor breaking change, meaning *at a minimum* the minor version +//! of `clap` will be bumped. +//! +//! In order to keep from being surprised by breaking changes, it is **highly** recommended to use +//! the `~major.minor.patch` style in your `Cargo.toml`: +//! +//! ```toml +//! [dependencies] clap = "~2.27.0" +//! ``` +//! +//! This will cause *only* the patch version to be updated upon a `cargo update` call, and therefore +//! cannot break due to new features, or bumped minimum versions of Rust. +//! +//! #### Minimum Version of Rust +//! +//! `clap` will officially support current stable Rust, minus two releases, but may work with prior +//! releases as well. For example, current stable Rust at the time of this writing is 1.21.0, +//! meaning `clap` is guaranteed to compile with 1.19.0 and beyond. At the 1.22.0 release, `clap` +//! will be guaranteed to compile with 1.20.0 and beyond, etc. +//! +//! Upon bumping the minimum version of Rust (assuming it's within the stable-2 range), it *must* be +//! clearly annotated in the `CHANGELOG.md` +//! +//! ## License +//! +//! `clap` is licensed under the MIT license. Please read the [LICENSE-MIT](LICENSE-MIT) file in +//! this repository for more information. + +#![crate_type = "lib"] +#![doc(html_root_url = "https://docs.rs/clap/2.32.0")] +#![deny(missing_docs, missing_debug_implementations, missing_copy_implementations, trivial_casts, + unused_import_braces, unused_allocation)] +// Lints we'd like to deny but are currently failing for upstream crates +// unused_qualifications (bitflags, clippy) +// trivial_numeric_casts (bitflags) +#![cfg_attr(not(any(feature = "lints", feature = "nightly")), forbid(unstable_features))] +#![cfg_attr(feature = "lints", feature(plugin))] +#![cfg_attr(feature = "lints", plugin(clippy))] +// Need to disable deny(warnings) while deprecations are active +// #![cfg_attr(feature = "lints", deny(warnings))] +#![cfg_attr(feature = "lints", allow(cyclomatic_complexity))] +#![cfg_attr(feature = "lints", allow(doc_markdown))] +#![cfg_attr(feature = "lints", allow(explicit_iter_loop))] + +#[cfg(all(feature = "color", not(target_os = "windows")))] +extern crate ansi_term; +#[cfg(feature = "color")] +extern crate atty; +#[macro_use] +extern crate bitflags; +#[cfg(feature = "suggestions")] +extern crate strsim; +#[cfg(feature = "wrap_help")] +extern crate term_size; +extern crate textwrap; +extern crate unicode_width; +#[cfg(feature = "vec_map")] +extern crate vec_map; +#[cfg(feature = "yaml")] +extern crate yaml_rust; + +#[cfg(feature = "yaml")] +pub use yaml_rust::YamlLoader; +pub use args::{Arg, ArgGroup, ArgMatches, ArgSettings, OsValues, SubCommand, Values}; +pub use app::{App, AppSettings}; +pub use fmt::Format; +pub use errors::{Error, ErrorKind, Result}; +pub use completions::Shell; + +#[macro_use] +mod macros; +mod app; +mod args; +mod usage_parser; +mod fmt; +mod suggestions; +mod errors; +mod osstringext; +mod strext; +mod completions; +mod map; + +const INTERNAL_ERROR_MSG: &'static str = "Fatal internal error. Please consider filing a bug \ + report at https://github.com/kbknapp/clap-rs/issues"; +const INVALID_UTF8: &'static str = "unexpected invalid UTF-8 code point"; + +#[cfg(unstable)] +pub use derive::{ArgEnum, ClapApp, FromArgMatches, IntoApp}; + +#[cfg(unstable)] +mod derive { + /// @TODO @release @docs + pub trait ClapApp: IntoApp + FromArgMatches + Sized { + /// @TODO @release @docs + fn parse() -> Self { Self::from_argmatches(Self::into_app().get_matches()) } + + /// @TODO @release @docs + fn parse_from(argv: I) -> Self + where + I: IntoIterator, + T: Into + Clone, + { + Self::from_argmatches(Self::into_app().get_matches_from(argv)) + } + + /// @TODO @release @docs + fn try_parse() -> Result { + Self::try_from_argmatches(Self::into_app().get_matches_safe()?) + } + + + /// @TODO @release @docs + fn try_parse_from(argv: I) -> Result + where + I: IntoIterator, + T: Into + Clone, + { + Self::try_from_argmatches(Self::into_app().get_matches_from_safe(argv)?) + } + } + + /// @TODO @release @docs + pub trait IntoApp { + /// @TODO @release @docs + fn into_app<'a, 'b>() -> clap::App<'a, 'b>; + } + + /// @TODO @release @docs + pub trait FromArgMatches: Sized { + /// @TODO @release @docs + fn from_argmatches<'a>(matches: clap::ArgMatches<'a>) -> Self; + + /// @TODO @release @docs + fn try_from_argmatches<'a>(matches: clap::ArgMatches<'a>) -> Result; + } + + /// @TODO @release @docs + pub trait ArgEnum {} +} diff --git a/clap/src/macros.rs b/clap/src/macros.rs new file mode 100644 index 000000000..384a3bf7c --- /dev/null +++ b/clap/src/macros.rs @@ -0,0 +1,1103 @@ +/// A convenience macro for loading the YAML file at compile time (relative to the current file, +/// like modules work). That YAML object can then be passed to this function. +/// +/// # Panics +/// +/// The YAML file must be properly formatted or this function will panic!(). A good way to +/// ensure this doesn't happen is to run your program with the `--help` switch. If this passes +/// without error, you needn't worry because the YAML is properly formatted. +/// +/// # Examples +/// +/// The following example shows how to load a properly formatted YAML file to build an instance +/// of an `App` struct. +/// +/// ```ignore +/// # #[macro_use] +/// # extern crate clap; +/// # use clap::App; +/// # fn main() { +/// let yml = load_yaml!("app.yml"); +/// let app = App::from_yaml(yml); +/// +/// // continued logic goes here, such as `app.get_matches()` etc. +/// # } +/// ``` +#[cfg(feature = "yaml")] +#[macro_export] +macro_rules! load_yaml { + ($yml:expr) => ( + &::clap::YamlLoader::load_from_str(include_str!($yml)).expect("failed to load YAML file")[0] + ); +} + +/// Convenience macro getting a typed value `T` where `T` implements [`std::str::FromStr`] from an +/// argument value. This macro returns a `Result` which allows you as the developer to +/// decide what you'd like to do on a failed parse. There are two types of errors, parse failures +/// and those where the argument wasn't present (such as a non-required argument). You can use +/// it to get a single value, or a iterator as with the [`ArgMatches::values_of`] +/// +/// # Examples +/// +/// ```no_run +/// # #[macro_use] +/// # extern crate clap; +/// # use clap::App; +/// # fn main() { +/// let matches = App::new("myapp") +/// .arg_from_usage("[length] 'Set the length to use as a pos whole num, i.e. 20'") +/// .get_matches(); +/// +/// let len = value_t!(matches.value_of("length"), u32).unwrap_or_else(|e| e.exit()); +/// let also_len = value_t!(matches, "length", u32).unwrap_or_else(|e| e.exit()); +/// +/// println!("{} + 2: {}", len, len + 2); +/// # } +/// ``` +/// [`std::str::FromStr`]: https://doc.rust-lang.org/std/str/trait.FromStr.html +/// [`ArgMatches::values_of`]: ./struct.ArgMatches.html#method.values_of +/// [`Result`]: https://doc.rust-lang.org/std/result/enum.Result.html +#[macro_export] +macro_rules! value_t { + ($m:ident, $v:expr, $t:ty) => { + value_t!($m.value_of($v), $t) + }; + ($m:ident.value_of($v:expr), $t:ty) => { + if let Some(v) = $m.value_of($v) { + match v.parse::<$t>() { + Ok(val) => Ok(val), + Err(_) => + Err(::clap::Error::value_validation_auto( + format!("The argument '{}' isn't a valid value", v))), + } + } else { + Err(::clap::Error::argument_not_found_auto($v)) + } + }; +} + +/// Convenience macro getting a typed value `T` where `T` implements [`std::str::FromStr`] or +/// exiting upon error, instead of returning a [`Result`] type. +/// +/// **NOTE:** This macro is for backwards compatibility sake. Prefer +/// [`value_t!(/* ... */).unwrap_or_else(|e| e.exit())`] +/// +/// # Examples +/// +/// ```no_run +/// # #[macro_use] +/// # extern crate clap; +/// # use clap::App; +/// # fn main() { +/// let matches = App::new("myapp") +/// .arg_from_usage("[length] 'Set the length to use as a pos whole num, i.e. 20'") +/// .get_matches(); +/// +/// let len = value_t_or_exit!(matches.value_of("length"), u32); +/// let also_len = value_t_or_exit!(matches, "length", u32); +/// +/// println!("{} + 2: {}", len, len + 2); +/// # } +/// ``` +/// [`std::str::FromStr`]: https://doc.rust-lang.org/std/str/trait.FromStr.html +/// [`Result`]: https://doc.rust-lang.org/std/result/enum.Result.html +/// [`value_t!(/* ... */).unwrap_or_else(|e| e.exit())`]: ./macro.value_t!.html +#[macro_export] +macro_rules! value_t_or_exit { + ($m:ident, $v:expr, $t:ty) => { + value_t_or_exit!($m.value_of($v), $t) + }; + ($m:ident.value_of($v:expr), $t:ty) => { + if let Some(v) = $m.value_of($v) { + match v.parse::<$t>() { + Ok(val) => val, + Err(_) => + ::clap::Error::value_validation_auto( + format!("The argument '{}' isn't a valid value", v)).exit(), + } + } else { + ::clap::Error::argument_not_found_auto($v).exit() + } + }; +} + +/// Convenience macro getting a typed value [`Vec`] where `T` implements [`std::str::FromStr`] +/// This macro returns a [`clap::Result>`] which allows you as the developer to decide +/// what you'd like to do on a failed parse. +/// +/// # Examples +/// +/// ```no_run +/// # #[macro_use] +/// # extern crate clap; +/// # use clap::App; +/// # fn main() { +/// let matches = App::new("myapp") +/// .arg_from_usage("[seq]... 'A sequence of pos whole nums, i.e. 20 45'") +/// .get_matches(); +/// +/// let vals = values_t!(matches.values_of("seq"), u32).unwrap_or_else(|e| e.exit()); +/// for v in &vals { +/// println!("{} + 2: {}", v, v + 2); +/// } +/// +/// let vals = values_t!(matches, "seq", u32).unwrap_or_else(|e| e.exit()); +/// for v in &vals { +/// println!("{} + 2: {}", v, v + 2); +/// } +/// # } +/// ``` +/// [`std::str::FromStr`]: https://doc.rust-lang.org/std/str/trait.FromStr.html +/// [`Vec`]: https://doc.rust-lang.org/std/vec/struct.Vec.html +/// [`clap::Result>`]: ./type.Result.html +#[macro_export] +macro_rules! values_t { + ($m:ident, $v:expr, $t:ty) => { + values_t!($m.values_of($v), $t) + }; + ($m:ident.values_of($v:expr), $t:ty) => { + if let Some(vals) = $m.values_of($v) { + let mut tmp = vec![]; + let mut err = None; + for pv in vals { + match pv.parse::<$t>() { + Ok(rv) => tmp.push(rv), + Err(..) => { + err = Some(::clap::Error::value_validation_auto( + format!("The argument '{}' isn't a valid value", pv))); + break + } + } + } + match err { + Some(e) => Err(e), + None => Ok(tmp), + } + } else { + Err(::clap::Error::argument_not_found_auto($v)) + } + }; +} + +/// Convenience macro getting a typed value [`Vec`] where `T` implements [`std::str::FromStr`] +/// or exiting upon error. +/// +/// **NOTE:** This macro is for backwards compatibility sake. Prefer +/// [`values_t!(/* ... */).unwrap_or_else(|e| e.exit())`] +/// +/// # Examples +/// +/// ```no_run +/// # #[macro_use] +/// # extern crate clap; +/// # use clap::App; +/// # fn main() { +/// let matches = App::new("myapp") +/// .arg_from_usage("[seq]... 'A sequence of pos whole nums, i.e. 20 45'") +/// .get_matches(); +/// +/// let vals = values_t_or_exit!(matches.values_of("seq"), u32); +/// for v in &vals { +/// println!("{} + 2: {}", v, v + 2); +/// } +/// +/// // type for example only +/// let vals: Vec = values_t_or_exit!(matches, "seq", u32); +/// for v in &vals { +/// println!("{} + 2: {}", v, v + 2); +/// } +/// # } +/// ``` +/// [`values_t!(/* ... */).unwrap_or_else(|e| e.exit())`]: ./macro.values_t!.html +/// [`std::str::FromStr`]: https://doc.rust-lang.org/std/str/trait.FromStr.html +/// [`Vec`]: https://doc.rust-lang.org/std/vec/struct.Vec.html +#[macro_export] +macro_rules! values_t_or_exit { + ($m:ident, $v:expr, $t:ty) => { + values_t_or_exit!($m.values_of($v), $t) + }; + ($m:ident.values_of($v:expr), $t:ty) => { + if let Some(vals) = $m.values_of($v) { + vals.map(|v| v.parse::<$t>().unwrap_or_else(|_|{ + ::clap::Error::value_validation_auto( + format!("One or more arguments aren't valid values")).exit() + })).collect::>() + } else { + ::clap::Error::argument_not_found_auto($v).exit() + } + }; +} + +// _clap_count_exprs! is derived from https://github.com/DanielKeep/rust-grabbag +// commit: 82a35ca5d9a04c3b920622d542104e3310ee5b07 +// License: MIT +// Copyright ⓒ 2015 grabbag contributors. +// Licensed under the MIT license (see LICENSE or ) or the Apache License, Version 2.0 (see LICENSE of +// ), at your option. All +// files in the project carrying such notice may not be copied, modified, +// or distributed except according to those terms. +// +/// Counts the number of comma-delimited expressions passed to it. The result is a compile-time +/// evaluable expression, suitable for use as a static array size, or the value of a `const`. +/// +/// # Examples +/// +/// ``` +/// # #[macro_use] extern crate clap; +/// # fn main() { +/// const COUNT: usize = _clap_count_exprs!(a, 5+1, "hi there!".into_string()); +/// assert_eq!(COUNT, 3); +/// # } +/// ``` +#[macro_export] +macro_rules! _clap_count_exprs { + () => { 0 }; + ($e:expr) => { 1 }; + ($e:expr, $($es:expr),+) => { 1 + _clap_count_exprs!($($es),*) }; +} + +/// Convenience macro to generate more complete enums with variants to be used as a type when +/// parsing arguments. This enum also provides a `variants()` function which can be used to +/// retrieve a `Vec<&'static str>` of the variant names, as well as implementing [`FromStr`] and +/// [`Display`] automatically. +/// +/// **NOTE:** Case insensitivity is supported for ASCII characters only. It's highly recommended to +/// use [`Arg::case_insensitive(true)`] for args that will be used with these enums +/// +/// **NOTE:** This macro automatically implements [`std::str::FromStr`] and [`std::fmt::Display`] +/// +/// **NOTE:** These enums support pub (or not) and uses of the #[derive()] traits +/// +/// # Examples +/// +/// ```rust +/// # #[macro_use] +/// # extern crate clap; +/// # use clap::{App, Arg}; +/// arg_enum!{ +/// #[derive(PartialEq, Debug)] +/// pub enum Foo { +/// Bar, +/// Baz, +/// Qux +/// } +/// } +/// // Foo enum can now be used via Foo::Bar, or Foo::Baz, etc +/// // and implements std::str::FromStr to use with the value_t! macros +/// fn main() { +/// let m = App::new("app") +/// .arg(Arg::from_usage(" 'the foo'") +/// .possible_values(&Foo::variants()) +/// .case_insensitive(true)) +/// .get_matches_from(vec![ +/// "app", "baz" +/// ]); +/// let f = value_t!(m, "foo", Foo).unwrap_or_else(|e| e.exit()); +/// +/// assert_eq!(f, Foo::Baz); +/// } +/// ``` +/// [`FromStr`]: https://doc.rust-lang.org/std/str/trait.FromStr.html +/// [`std::str::FromStr`]: https://doc.rust-lang.org/std/str/trait.FromStr.html +/// [`Display`]: https://doc.rust-lang.org/std/fmt/trait.Display.html +/// [`std::fmt::Display`]: https://doc.rust-lang.org/std/fmt/trait.Display.html +/// [`Arg::case_insensitive(true)`]: ./struct.Arg.html#method.case_insensitive +#[macro_export] +macro_rules! arg_enum { + (@as_item $($i:item)*) => ($($i)*); + (@impls ( $($tts:tt)* ) -> ($e:ident, $($v:ident),+)) => { + arg_enum!(@as_item + $($tts)* + + impl ::std::str::FromStr for $e { + type Err = String; + + fn from_str(s: &str) -> ::std::result::Result { + #[allow(deprecated, unused_imports)] + use ::std::ascii::AsciiExt; + match s { + $(stringify!($v) | + _ if s.eq_ignore_ascii_case(stringify!($v)) => Ok($e::$v)),+, + _ => Err({ + let v = vec![ + $(stringify!($v),)+ + ]; + format!("valid values: {}", + v.join(" ,")) + }), + } + } + } + impl ::std::fmt::Display for $e { + fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { + match *self { + $($e::$v => write!(f, stringify!($v)),)+ + } + } + } + impl $e { + #[allow(dead_code)] + pub fn variants() -> [&'static str; _clap_count_exprs!($(stringify!($v)),+)] { + [ + $(stringify!($v),)+ + ] + } + }); + }; + ($(#[$($m:meta),+])+ pub enum $e:ident { $($v:ident $(=$val:expr)*,)+ } ) => { + arg_enum!(@impls + ($(#[$($m),+])+ + pub enum $e { + $($v$(=$val)*),+ + }) -> ($e, $($v),+) + ); + }; + ($(#[$($m:meta),+])+ pub enum $e:ident { $($v:ident $(=$val:expr)*),+ } ) => { + arg_enum!(@impls + ($(#[$($m),+])+ + pub enum $e { + $($v$(=$val)*),+ + }) -> ($e, $($v),+) + ); + }; + ($(#[$($m:meta),+])+ enum $e:ident { $($v:ident $(=$val:expr)*,)+ } ) => { + arg_enum!(@impls + ($(#[$($m),+])+ + enum $e { + $($v$(=$val)*),+ + }) -> ($e, $($v),+) + ); + }; + ($(#[$($m:meta),+])+ enum $e:ident { $($v:ident $(=$val:expr)*),+ } ) => { + arg_enum!(@impls + ($(#[$($m),+])+ + enum $e { + $($v$(=$val)*),+ + }) -> ($e, $($v),+) + ); + }; + (pub enum $e:ident { $($v:ident $(=$val:expr)*,)+ } ) => { + arg_enum!(@impls + (pub enum $e { + $($v$(=$val)*),+ + }) -> ($e, $($v),+) + ); + }; + (pub enum $e:ident { $($v:ident $(=$val:expr)*),+ } ) => { + arg_enum!(@impls + (pub enum $e { + $($v$(=$val)*),+ + }) -> ($e, $($v),+) + ); + }; + (enum $e:ident { $($v:ident $(=$val:expr)*,)+ } ) => { + arg_enum!(@impls + (enum $e { + $($v$(=$val)*),+ + }) -> ($e, $($v),+) + ); + }; + (enum $e:ident { $($v:ident $(=$val:expr)*),+ } ) => { + arg_enum!(@impls + (enum $e { + $($v$(=$val)*),+ + }) -> ($e, $($v),+) + ); + }; +} + +/// Allows you to pull the version from your Cargo.toml at compile time as +/// `MAJOR.MINOR.PATCH_PKGVERSION_PRE` +/// +/// # Examples +/// +/// ```no_run +/// # #[macro_use] +/// # extern crate clap; +/// # use clap::App; +/// # fn main() { +/// let m = App::new("app") +/// .version(crate_version!()) +/// .get_matches(); +/// # } +/// ``` +#[cfg(not(feature = "no_cargo"))] +#[macro_export] +macro_rules! crate_version { + () => { + env!("CARGO_PKG_VERSION") + }; +} + +/// Allows you to pull the authors for the app from your Cargo.toml at +/// compile time in the form: +/// `"author1 lastname :author2 lastname "` +/// +/// You can replace the colons with a custom separator by supplying a +/// replacement string, so, for example, +/// `crate_authors!(",\n")` would become +/// `"author1 lastname ,\nauthor2 lastname ,\nauthor3 lastname "` +/// +/// # Examples +/// +/// ```no_run +/// # #[macro_use] +/// # extern crate clap; +/// # use clap::App; +/// # fn main() { +/// let m = App::new("app") +/// .author(crate_authors!("\n")) +/// .get_matches(); +/// # } +/// ``` +#[cfg(not(feature = "no_cargo"))] +#[macro_export] +macro_rules! crate_authors { + ($sep:expr) => {{ + use std::ops::Deref; + use std::sync::{ONCE_INIT, Once}; + + #[allow(missing_copy_implementations)] + #[allow(dead_code)] + struct CargoAuthors { __private_field: () }; + + impl Deref for CargoAuthors { + type Target = str; + + #[allow(unsafe_code)] + fn deref(&self) -> &'static str { + static ONCE: Once = ONCE_INIT; + static mut VALUE: *const String = 0 as *const String; + + unsafe { + ONCE.call_once(|| { + let s = env!("CARGO_PKG_AUTHORS").replace(':', $sep); + VALUE = Box::into_raw(Box::new(s)); + }); + + &(*VALUE)[..] + } + } + } + + &*CargoAuthors { __private_field: () } + }}; + () => { + env!("CARGO_PKG_AUTHORS") + }; +} + +/// Allows you to pull the description from your Cargo.toml at compile time. +/// +/// # Examples +/// +/// ```no_run +/// # #[macro_use] +/// # extern crate clap; +/// # use clap::App; +/// # fn main() { +/// let m = App::new("app") +/// .about(crate_description!()) +/// .get_matches(); +/// # } +/// ``` +#[cfg(not(feature = "no_cargo"))] +#[macro_export] +macro_rules! crate_description { + () => { + env!("CARGO_PKG_DESCRIPTION") + }; +} + +/// Allows you to pull the name from your Cargo.toml at compile time. +/// +/// # Examples +/// +/// ```no_run +/// # #[macro_use] +/// # extern crate clap; +/// # use clap::App; +/// # fn main() { +/// let m = App::new(crate_name!()) +/// .get_matches(); +/// # } +/// ``` +#[cfg(not(feature = "no_cargo"))] +#[macro_export] +macro_rules! crate_name { + () => { + env!("CARGO_PKG_NAME") + }; +} + +/// Allows you to build the `App` instance from your Cargo.toml at compile time. +/// +/// Equivalent to using the `crate_*!` macros with their respective fields. +/// +/// Provided separator is for the [`crate_authors!`](macro.crate_authors.html) macro, +/// refer to the documentation therefor. +/// +/// **NOTE:** Changing the values in your `Cargo.toml` does not trigger a re-build automatically, +/// and therefore won't change the generated output until you recompile. +/// +/// **Pro Tip:** In some cases you can "trick" the compiler into triggering a rebuild when your +/// `Cargo.toml` is changed by including this in your `src/main.rs` file +/// `include_str!("../Cargo.toml");` +/// +/// # Examples +/// +/// ```no_run +/// # #[macro_use] +/// # extern crate clap; +/// # fn main() { +/// let m = app_from_crate!().get_matches(); +/// # } +/// ``` +#[cfg(not(feature = "no_cargo"))] +#[macro_export] +macro_rules! app_from_crate { + () => { + $crate::App::new(crate_name!()) + .version(crate_version!()) + .author(crate_authors!()) + .about(crate_description!()) + }; + ($sep:expr) => { + $crate::App::new(crate_name!()) + .version(crate_version!()) + .author(crate_authors!($sep)) + .about(crate_description!()) + }; +} + +/// Build `App`, `Arg`s, `SubCommand`s and `Group`s with Usage-string like input +/// but without the associated parsing runtime cost. +/// +/// `clap_app!` also supports several shorthand syntaxes. +/// +/// # Examples +/// +/// ```no_run +/// # #[macro_use] +/// # extern crate clap; +/// # fn main() { +/// let matches = clap_app!(myapp => +/// (version: "1.0") +/// (author: "Kevin K. ") +/// (about: "Does awesome things") +/// (@arg CONFIG: -c --config +takes_value "Sets a custom config file") +/// (@arg INPUT: +required "Sets the input file to use") +/// (@arg debug: -d ... "Sets the level of debugging information") +/// (@group difficulty => +/// (@arg hard: -h --hard "Sets hard mode") +/// (@arg normal: -n --normal "Sets normal mode") +/// (@arg easy: -e --easy "Sets easy mode") +/// ) +/// (@subcommand test => +/// (about: "controls testing features") +/// (version: "1.3") +/// (author: "Someone E. ") +/// (@arg verbose: -v --verbose "Print test information verbosely") +/// ) +/// ); +/// # } +/// ``` +/// # Shorthand Syntax for Args +/// +/// * A single hyphen followed by a character (such as `-c`) sets the [`Arg::short`] +/// * A double hyphen followed by a character or word (such as `--config`) sets [`Arg::long`] +/// * If one wishes to use a [`Arg::long`] with a hyphen inside (i.e. `--config-file`), you +/// must use `--("config-file")` due to limitations of the Rust macro system. +/// * Three dots (`...`) sets [`Arg::multiple(true)`] +/// * Angled brackets after either a short or long will set [`Arg::value_name`] and +/// `Arg::required(true)` such as `--config ` = `Arg::value_name("FILE")` and +/// `Arg::required(true)` +/// * Square brackets after either a short or long will set [`Arg::value_name`] and +/// `Arg::required(false)` such as `--config [FILE]` = `Arg::value_name("FILE")` and +/// `Arg::required(false)` +/// * There are short hand syntaxes for Arg methods that accept booleans +/// * A plus sign will set that method to `true` such as `+required` = `Arg::required(true)` +/// * An exclamation will set that method to `false` such as `!required` = `Arg::required(false)` +/// * A `#{min, max}` will set [`Arg::min_values(min)`] and [`Arg::max_values(max)`] +/// * An asterisk (`*`) will set `Arg::required(true)` +/// * Curly brackets around a `fn` will set [`Arg::validator`] as in `{fn}` = `Arg::validator(fn)` +/// * An Arg method that accepts a string followed by square brackets will set that method such as +/// `conflicts_with[FOO]` will set `Arg::conflicts_with("FOO")` (note the lack of quotes around +/// `FOO` in the macro) +/// * An Arg method that takes a string and can be set multiple times (such as +/// [`Arg::conflicts_with`]) followed by square brackets and a list of values separated by spaces +/// will set that method such as `conflicts_with[FOO BAR BAZ]` will set +/// `Arg::conflicts_with("FOO")`, `Arg::conflicts_with("BAR")`, and `Arg::conflicts_with("BAZ")` +/// (note the lack of quotes around the values in the macro) +/// +/// # Shorthand Syntax for Groups +/// +/// * There are short hand syntaxes for `ArgGroup` methods that accept booleans +/// * A plus sign will set that method to `true` such as `+required` = `ArgGroup::required(true)` +/// * An exclamation will set that method to `false` such as `!required` = `ArgGroup::required(false)` +/// +/// [`Arg::short`]: ./struct.Arg.html#method.short +/// [`Arg::long`]: ./struct.Arg.html#method.long +/// [`Arg::multiple(true)`]: ./struct.Arg.html#method.multiple +/// [`Arg::value_name`]: ./struct.Arg.html#method.value_name +/// [`Arg::min_values(min)`]: ./struct.Arg.html#method.min_values +/// [`Arg::max_values(max)`]: ./struct.Arg.html#method.max_values +/// [`Arg::validator`]: ./struct.Arg.html#method.validator +/// [`Arg::conflicts_with`]: ./struct.Arg.html#method.conflicts_with +#[macro_export] +macro_rules! clap_app { + (@app ($builder:expr)) => { $builder }; + (@app ($builder:expr) (@arg ($name:expr): $($tail:tt)*) $($tt:tt)*) => { + clap_app!{ @app + ($builder.arg( + clap_app!{ @arg ($crate::Arg::with_name($name)) (-) $($tail)* })) + $($tt)* + } + }; + (@app ($builder:expr) (@arg $name:ident: $($tail:tt)*) $($tt:tt)*) => { + clap_app!{ @app + ($builder.arg( + clap_app!{ @arg ($crate::Arg::with_name(stringify!($name))) (-) $($tail)* })) + $($tt)* + } + }; + (@app ($builder:expr) (@setting $setting:ident) $($tt:tt)*) => { + clap_app!{ @app + ($builder.setting($crate::AppSettings::$setting)) + $($tt)* + } + }; +// Treat the application builder as an argument to set its attributes + (@app ($builder:expr) (@attributes $($attr:tt)*) $($tt:tt)*) => { + clap_app!{ @app (clap_app!{ @arg ($builder) $($attr)* }) $($tt)* } + }; + (@app ($builder:expr) (@group $name:ident => $($tail:tt)*) $($tt:tt)*) => { + clap_app!{ @app + (clap_app!{ @group ($builder, $crate::ArgGroup::with_name(stringify!($name))) $($tail)* }) + $($tt)* + } + }; + (@app ($builder:expr) (@group $name:ident !$ident:ident => $($tail:tt)*) $($tt:tt)*) => { + clap_app!{ @app + (clap_app!{ @group ($builder, $crate::ArgGroup::with_name(stringify!($name)).$ident(false)) $($tail)* }) + $($tt)* + } + }; + (@app ($builder:expr) (@group $name:ident +$ident:ident => $($tail:tt)*) $($tt:tt)*) => { + clap_app!{ @app + (clap_app!{ @group ($builder, $crate::ArgGroup::with_name(stringify!($name)).$ident(true)) $($tail)* }) + $($tt)* + } + }; +// Handle subcommand creation + (@app ($builder:expr) (@subcommand $name:ident => $($tail:tt)*) $($tt:tt)*) => { + clap_app!{ @app + ($builder.subcommand( + clap_app!{ @app ($crate::SubCommand::with_name(stringify!($name))) $($tail)* } + )) + $($tt)* + } + }; +// Yaml like function calls - used for setting various meta directly against the app + (@app ($builder:expr) ($ident:ident: $($v:expr),*) $($tt:tt)*) => { +// clap_app!{ @app ($builder.$ident($($v),*)) $($tt)* } + clap_app!{ @app + ($builder.$ident($($v),*)) + $($tt)* + } + }; + +// Add members to group and continue argument handling with the parent builder + (@group ($builder:expr, $group:expr)) => { $builder.group($group) }; + // Treat the group builder as an argument to set its attributes + (@group ($builder:expr, $group:expr) (@attributes $($attr:tt)*) $($tt:tt)*) => { + clap_app!{ @group ($builder, clap_app!{ @arg ($group) (-) $($attr)* }) $($tt)* } + }; + (@group ($builder:expr, $group:expr) (@arg $name:ident: $($tail:tt)*) $($tt:tt)*) => { + clap_app!{ @group + (clap_app!{ @app ($builder) (@arg $name: $($tail)*) }, + $group.arg(stringify!($name))) + $($tt)* + } + }; + +// No more tokens to munch + (@arg ($arg:expr) $modes:tt) => { $arg }; +// Shorthand tokens influenced by the usage_string + (@arg ($arg:expr) $modes:tt --($long:expr) $($tail:tt)*) => { + clap_app!{ @arg ($arg.long($long)) $modes $($tail)* } + }; + (@arg ($arg:expr) $modes:tt --$long:ident $($tail:tt)*) => { + clap_app!{ @arg ($arg.long(stringify!($long))) $modes $($tail)* } + }; + (@arg ($arg:expr) $modes:tt -$short:ident $($tail:tt)*) => { + clap_app!{ @arg ($arg.short(stringify!($short))) $modes $($tail)* } + }; + (@arg ($arg:expr) (-) <$var:ident> $($tail:tt)*) => { + clap_app!{ @arg ($arg.value_name(stringify!($var))) (+) +takes_value +required $($tail)* } + }; + (@arg ($arg:expr) (+) <$var:ident> $($tail:tt)*) => { + clap_app!{ @arg ($arg.value_name(stringify!($var))) (+) $($tail)* } + }; + (@arg ($arg:expr) (-) [$var:ident] $($tail:tt)*) => { + clap_app!{ @arg ($arg.value_name(stringify!($var))) (+) +takes_value $($tail)* } + }; + (@arg ($arg:expr) (+) [$var:ident] $($tail:tt)*) => { + clap_app!{ @arg ($arg.value_name(stringify!($var))) (+) $($tail)* } + }; + (@arg ($arg:expr) $modes:tt ... $($tail:tt)*) => { + clap_app!{ @arg ($arg) $modes +multiple $($tail)* } + }; +// Shorthand magic + (@arg ($arg:expr) $modes:tt #{$n:expr, $m:expr} $($tail:tt)*) => { + clap_app!{ @arg ($arg) $modes min_values($n) max_values($m) $($tail)* } + }; + (@arg ($arg:expr) $modes:tt * $($tail:tt)*) => { + clap_app!{ @arg ($arg) $modes +required $($tail)* } + }; +// !foo -> .foo(false) + (@arg ($arg:expr) $modes:tt !$ident:ident $($tail:tt)*) => { + clap_app!{ @arg ($arg.$ident(false)) $modes $($tail)* } + }; +// +foo -> .foo(true) + (@arg ($arg:expr) $modes:tt +$ident:ident $($tail:tt)*) => { + clap_app!{ @arg ($arg.$ident(true)) $modes $($tail)* } + }; +// Validator + (@arg ($arg:expr) $modes:tt {$fn_:expr} $($tail:tt)*) => { + clap_app!{ @arg ($arg.validator($fn_)) $modes $($tail)* } + }; + (@as_expr $expr:expr) => { $expr }; +// Help + (@arg ($arg:expr) $modes:tt $desc:tt) => { $arg.help(clap_app!{ @as_expr $desc }) }; +// Handle functions that need to be called multiple times for each argument + (@arg ($arg:expr) $modes:tt $ident:ident[$($target:ident)*] $($tail:tt)*) => { + clap_app!{ @arg ($arg $( .$ident(stringify!($target)) )*) $modes $($tail)* } + }; +// Inherit builder's functions + (@arg ($arg:expr) $modes:tt $ident:ident($($expr:expr)*) $($tail:tt)*) => { + clap_app!{ @arg ($arg.$ident($($expr)*)) $modes $($tail)* } + }; + +// Build a subcommand outside of an app. + (@subcommand $name:ident => $($tail:tt)*) => { + clap_app!{ @app ($crate::SubCommand::with_name(stringify!($name))) $($tail)* } + }; +// Start the magic + (($name:expr) => $($tail:tt)*) => {{ + clap_app!{ @app ($crate::App::new($name)) $($tail)*} + }}; + + ($name:ident => $($tail:tt)*) => {{ + clap_app!{ @app ($crate::App::new(stringify!($name))) $($tail)*} + }}; +} + +macro_rules! impl_settings { + ($n:ident, $($v:ident => $c:path),+) => { + pub fn set(&mut self, s: $n) { + match s { + $($n::$v => self.0.insert($c)),+ + } + } + + pub fn unset(&mut self, s: $n) { + match s { + $($n::$v => self.0.remove($c)),+ + } + } + + pub fn is_set(&self, s: $n) -> bool { + match s { + $($n::$v => self.0.contains($c)),+ + } + } + }; +} + +// Convenience for writing to stderr thanks to https://github.com/BurntSushi +macro_rules! wlnerr( + ($($arg:tt)*) => ({ + use std::io::{Write, stderr}; + writeln!(&mut stderr(), $($arg)*).ok(); + }) +); + +#[cfg(feature = "debug")] +#[cfg_attr(feature = "debug", macro_use)] +#[cfg_attr(feature = "debug", allow(unused_macros))] +mod debug_macros { + macro_rules! debugln { + ($fmt:expr) => (println!(concat!("DEBUG:clap:", $fmt))); + ($fmt:expr, $($arg:tt)*) => (println!(concat!("DEBUG:clap:",$fmt), $($arg)*)); + } + macro_rules! sdebugln { + ($fmt:expr) => (println!($fmt)); + ($fmt:expr, $($arg:tt)*) => (println!($fmt, $($arg)*)); + } + macro_rules! debug { + ($fmt:expr) => (print!(concat!("DEBUG:clap:", $fmt))); + ($fmt:expr, $($arg:tt)*) => (print!(concat!("DEBUG:clap:",$fmt), $($arg)*)); + } + macro_rules! sdebug { + ($fmt:expr) => (print!($fmt)); + ($fmt:expr, $($arg:tt)*) => (print!($fmt, $($arg)*)); + } +} + +#[cfg(not(feature = "debug"))] +#[cfg_attr(not(feature = "debug"), macro_use)] +mod debug_macros { + macro_rules! debugln { + ($fmt:expr) => (); + ($fmt:expr, $($arg:tt)*) => (); + } + macro_rules! sdebugln { + ($fmt:expr) => (); + ($fmt:expr, $($arg:tt)*) => (); + } + macro_rules! debug { + ($fmt:expr) => (); + ($fmt:expr, $($arg:tt)*) => (); + } +} + +// Helper/deduplication macro for printing the correct number of spaces in help messages +// used in: +// src/args/arg_builder/*.rs +// src/app/mod.rs +macro_rules! write_nspaces { + ($dst:expr, $num:expr) => ({ + debugln!("write_spaces!: num={}", $num); + for _ in 0..$num { + $dst.write_all(b" ")?; + } + }) +} + +// convenience macro for remove an item from a vec +//macro_rules! vec_remove_all { +// ($vec:expr, $to_rem:expr) => { +// debugln!("vec_remove_all! to_rem={:?}", $to_rem); +// for i in (0 .. $vec.len()).rev() { +// let should_remove = $to_rem.any(|name| name == &$vec[i]); +// if should_remove { $vec.swap_remove(i); } +// } +// }; +//} +macro_rules! find_from { + ($_self:expr, $arg_name:expr, $from:ident, $matcher:expr) => {{ + let mut ret = None; + for k in $matcher.arg_names() { + if let Some(f) = find_by_name!($_self, k, flags, iter) { + if let Some(ref v) = f.$from() { + if v.contains($arg_name) { + ret = Some(f.to_string()); + } + } + } + if let Some(o) = find_by_name!($_self, k, opts, iter) { + if let Some(ref v) = o.$from() { + if v.contains(&$arg_name) { + ret = Some(o.to_string()); + } + } + } + if let Some(pos) = find_by_name!($_self, k, positionals, values) { + if let Some(ref v) = pos.$from() { + if v.contains($arg_name) { + ret = Some(pos.b.name.to_owned()); + } + } + } + } + ret + }}; +} + +//macro_rules! find_name_from { +// ($_self:expr, $arg_name:expr, $from:ident, $matcher:expr) => {{ +// let mut ret = None; +// for k in $matcher.arg_names() { +// if let Some(f) = find_by_name!($_self, k, flags, iter) { +// if let Some(ref v) = f.$from() { +// if v.contains($arg_name) { +// ret = Some(f.b.name); +// } +// } +// } +// if let Some(o) = find_by_name!($_self, k, opts, iter) { +// if let Some(ref v) = o.$from() { +// if v.contains(&$arg_name) { +// ret = Some(o.b.name); +// } +// } +// } +// if let Some(pos) = find_by_name!($_self, k, positionals, values) { +// if let Some(ref v) = pos.$from() { +// if v.contains($arg_name) { +// ret = Some(pos.b.name); +// } +// } +// } +// } +// ret +// }}; +//} + + +macro_rules! find_any_by_name { + ($p:expr, $name:expr) => { + { + fn as_trait_obj<'a, 'b, T: AnyArg<'a, 'b>>(x: &T) -> &AnyArg<'a, 'b> { x } + find_by_name!($p, $name, flags, iter).map(as_trait_obj).or( + find_by_name!($p, $name, opts, iter).map(as_trait_obj).or( + find_by_name!($p, $name, positionals, values).map(as_trait_obj) + ) + ) + } + } +} +// Finds an arg by name +macro_rules! find_by_name { + ($p:expr, $name:expr, $what:ident, $how:ident) => { + $p.$what.$how().find(|o| o.b.name == $name) + } +} + +// Finds an option including if it's aliased +macro_rules! find_opt_by_long { + (@os $_self:ident, $long:expr) => {{ + _find_by_long!($_self, $long, opts) + }}; + ($_self:ident, $long:expr) => {{ + _find_by_long!($_self, $long, opts) + }}; +} + +macro_rules! find_flag_by_long { + (@os $_self:ident, $long:expr) => {{ + _find_by_long!($_self, $long, flags) + }}; + ($_self:ident, $long:expr) => {{ + _find_by_long!($_self, $long, flags) + }}; +} + +macro_rules! _find_by_long { + ($_self:ident, $long:expr, $what:ident) => {{ + $_self.$what + .iter() + .filter(|a| a.s.long.is_some()) + .find(|a| { + a.s.long.unwrap() == $long || + (a.s.aliases.is_some() && + a.s + .aliases + .as_ref() + .unwrap() + .iter() + .any(|&(alias, _)| alias == $long)) + }) + }} +} + +// Finds an option +macro_rules! find_opt_by_short { + ($_self:ident, $short:expr) => {{ + _find_by_short!($_self, $short, opts) + }} +} + +macro_rules! find_flag_by_short { + ($_self:ident, $short:expr) => {{ + _find_by_short!($_self, $short, flags) + }} +} + +macro_rules! _find_by_short { + ($_self:ident, $short:expr, $what:ident) => {{ + $_self.$what + .iter() + .filter(|a| a.s.short.is_some()) + .find(|a| a.s.short.unwrap() == $short) + }} +} + +macro_rules! find_subcmd { + ($_self:expr, $sc:expr) => {{ + $_self.subcommands + .iter() + .find(|s| { + &*s.p.meta.name == $sc || + (s.p.meta.aliases.is_some() && + s.p + .meta + .aliases + .as_ref() + .unwrap() + .iter() + .any(|&(n, _)| n == $sc)) + }) + }}; +} + +macro_rules! shorts { + ($_self:ident) => {{ + _shorts_longs!($_self, short) + }}; +} + + +macro_rules! longs { + ($_self:ident) => {{ + _shorts_longs!($_self, long) + }}; +} + +macro_rules! _shorts_longs { + ($_self:ident, $what:ident) => {{ + $_self.flags + .iter() + .filter(|f| f.s.$what.is_some()) + .map(|f| f.s.$what.as_ref().unwrap()) + .chain($_self.opts.iter() + .filter(|o| o.s.$what.is_some()) + .map(|o| o.s.$what.as_ref().unwrap())) + }}; +} + +macro_rules! arg_names { + ($_self:ident) => {{ + _names!(@args $_self) + }}; +} + +macro_rules! sc_names { + ($_self:ident) => {{ + _names!(@sc $_self) + }}; +} + +macro_rules! _names { + (@args $_self:ident) => {{ + $_self.flags + .iter() + .map(|f| &*f.b.name) + .chain($_self.opts.iter() + .map(|o| &*o.b.name) + .chain($_self.positionals.values() + .map(|p| &*p.b.name))) + }}; + (@sc $_self:ident) => {{ + $_self.subcommands + .iter() + .map(|s| &*s.p.meta.name) + .chain($_self.subcommands + .iter() + .filter(|s| s.p.meta.aliases.is_some()) + .flat_map(|s| s.p.meta.aliases.as_ref().unwrap().iter().map(|&(n, _)| n))) + + }} +} diff --git a/clap/src/map.rs b/clap/src/map.rs new file mode 100644 index 000000000..063a86028 --- /dev/null +++ b/clap/src/map.rs @@ -0,0 +1,74 @@ +#[cfg(feature = "vec_map")] +pub use vec_map::{Values, VecMap}; + +#[cfg(not(feature = "vec_map"))] +pub use self::vec_map::{Values, VecMap}; + +#[cfg(not(feature = "vec_map"))] +mod vec_map { + use std::collections::BTreeMap; + use std::collections::btree_map; + use std::fmt::{self, Debug, Formatter}; + + #[derive(Clone, Default, Debug)] + pub struct VecMap { + inner: BTreeMap, + } + + impl VecMap { + pub fn new() -> Self { + VecMap { + inner: Default::default(), + } + } + + pub fn len(&self) -> usize { self.inner.len() } + + pub fn is_empty(&self) -> bool { self.inner.is_empty() } + + pub fn insert(&mut self, key: usize, value: V) -> Option { + self.inner.insert(key, value) + } + + pub fn values(&self) -> Values { self.inner.values() } + + pub fn iter(&self) -> Iter { + Iter { + inner: self.inner.iter(), + } + } + + pub fn contains_key(&self, key: usize) -> bool { self.inner.contains_key(&key) } + + pub fn entry(&mut self, key: usize) -> Entry { self.inner.entry(key) } + + pub fn get(&self, key: usize) -> Option<&V> { self.inner.get(&key) } + } + + pub type Values<'a, V> = btree_map::Values<'a, usize, V>; + + pub type Entry<'a, V> = btree_map::Entry<'a, usize, V>; + + #[derive(Clone)] + pub struct Iter<'a, V: 'a> { + inner: btree_map::Iter<'a, usize, V>, + } + + impl<'a, V: 'a + Debug> Debug for Iter<'a, V> { + fn fmt(&self, f: &mut Formatter) -> fmt::Result { + f.debug_list().entries(self.inner.clone()).finish() + } + } + + impl<'a, V: 'a> Iterator for Iter<'a, V> { + type Item = (usize, &'a V); + + fn next(&mut self) -> Option { self.inner.next().map(|(k, v)| (*k, v)) } + } + + impl<'a, V: 'a> DoubleEndedIterator for Iter<'a, V> { + fn next_back(&mut self) -> Option { + self.inner.next_back().map(|(k, v)| (*k, v)) + } + } +} diff --git a/clap/src/osstringext.rs b/clap/src/osstringext.rs new file mode 100644 index 000000000..061c01ddd --- /dev/null +++ b/clap/src/osstringext.rs @@ -0,0 +1,119 @@ +#[cfg(any(target_os = "windows", target_arch = "wasm32"))] +use INVALID_UTF8; +use std::ffi::OsStr; +#[cfg(not(any(target_os = "windows", target_arch = "wasm32")))] +use std::os::unix::ffi::OsStrExt; + +#[cfg(any(target_os = "windows", target_arch = "wasm32"))] +pub trait OsStrExt3 { + fn from_bytes(b: &[u8]) -> &Self; + fn as_bytes(&self) -> &[u8]; +} + +#[doc(hidden)] +pub trait OsStrExt2 { + fn starts_with(&self, s: &[u8]) -> bool; + fn split_at_byte(&self, b: u8) -> (&OsStr, &OsStr); + fn split_at(&self, i: usize) -> (&OsStr, &OsStr); + fn trim_left_matches(&self, b: u8) -> &OsStr; + fn contains_byte(&self, b: u8) -> bool; + fn split(&self, b: u8) -> OsSplit; +} + +#[cfg(any(target_os = "windows", target_arch = "wasm32"))] +impl OsStrExt3 for OsStr { + fn from_bytes(b: &[u8]) -> &Self { + use std::mem; + unsafe { mem::transmute(b) } + } + fn as_bytes(&self) -> &[u8] { + self.to_str().map(|s| s.as_bytes()).expect(INVALID_UTF8) + } +} + +impl OsStrExt2 for OsStr { + fn starts_with(&self, s: &[u8]) -> bool { + self.as_bytes().starts_with(s) + } + + fn contains_byte(&self, byte: u8) -> bool { + for b in self.as_bytes() { + if b == &byte { + return true; + } + } + false + } + + fn split_at_byte(&self, byte: u8) -> (&OsStr, &OsStr) { + for (i, b) in self.as_bytes().iter().enumerate() { + if b == &byte { + return ( + OsStr::from_bytes(&self.as_bytes()[..i]), + OsStr::from_bytes(&self.as_bytes()[i + 1..]), + ); + } + } + ( + &*self, + OsStr::from_bytes(&self.as_bytes()[self.len()..self.len()]), + ) + } + + fn trim_left_matches(&self, byte: u8) -> &OsStr { + let mut found = false; + for (i, b) in self.as_bytes().iter().enumerate() { + if b != &byte { + return OsStr::from_bytes(&self.as_bytes()[i..]); + } else { + found = true; + } + } + if found { + return OsStr::from_bytes(&self.as_bytes()[self.len()..]); + } + &*self + } + + fn split_at(&self, i: usize) -> (&OsStr, &OsStr) { + ( + OsStr::from_bytes(&self.as_bytes()[..i]), + OsStr::from_bytes(&self.as_bytes()[i..]), + ) + } + + fn split(&self, b: u8) -> OsSplit { + OsSplit { + sep: b, + val: self.as_bytes(), + pos: 0, + } + } +} + +#[doc(hidden)] +#[derive(Clone, Debug)] +pub struct OsSplit<'a> { + sep: u8, + val: &'a [u8], + pos: usize, +} + +impl<'a> Iterator for OsSplit<'a> { + type Item = &'a OsStr; + + fn next(&mut self) -> Option<&'a OsStr> { + debugln!("OsSplit::next: self={:?}", self); + if self.pos == self.val.len() { + return None; + } + let start = self.pos; + for b in &self.val[start..] { + self.pos += 1; + if *b == self.sep { + return Some(OsStr::from_bytes(&self.val[start..self.pos - 1])); + } + } + Some(OsStr::from_bytes(&self.val[start..])) + } +} diff --git a/clap/src/strext.rs b/clap/src/strext.rs new file mode 100644 index 000000000..6f81367ab --- /dev/null +++ b/clap/src/strext.rs @@ -0,0 +1,16 @@ +pub trait _StrExt { + fn _is_char_boundary(&self, index: usize) -> bool; +} + +impl _StrExt for str { + #[inline] + fn _is_char_boundary(&self, index: usize) -> bool { + if index == self.len() { + return true; + } + match self.as_bytes().get(index) { + None => false, + Some(&b) => b < 128 || b >= 192, + } + } +} diff --git a/clap/src/suggestions.rs b/clap/src/suggestions.rs new file mode 100644 index 000000000..23832cb3a --- /dev/null +++ b/clap/src/suggestions.rs @@ -0,0 +1,135 @@ +use app::App; +// Third Party +#[cfg(feature = "suggestions")] +use strsim; + +// Internal +use fmt::Format; + +/// Produces a string from a given list of possible values which is similar to +/// the passed in value `v` with a certain confidence. +/// Thus in a list of possible values like ["foo", "bar"], the value "fop" will yield +/// `Some("foo")`, whereas "blark" would yield `None`. +#[cfg(feature = "suggestions")] +#[cfg_attr(feature = "lints", allow(needless_lifetimes))] +pub fn did_you_mean<'a, T: ?Sized, I>(v: &str, possible_values: I) -> Option<&'a str> +where + T: AsRef + 'a, + I: IntoIterator, +{ + let mut candidate: Option<(f64, &str)> = None; + for pv in possible_values { + let confidence = strsim::jaro_winkler(v, pv.as_ref()); + if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) + { + candidate = Some((confidence, pv.as_ref())); + } + } + match candidate { + None => None, + Some((_, candidate)) => Some(candidate), + } +} + +#[cfg(not(feature = "suggestions"))] +pub fn did_you_mean<'a, T: ?Sized, I>(_: &str, _: I) -> Option<&'a str> +where + T: AsRef + 'a, + I: IntoIterator, +{ + None +} + +/// Returns a suffix that can be empty, or is the standard 'did you mean' phrase +#[cfg_attr(feature = "lints", allow(needless_lifetimes))] +pub fn did_you_mean_flag_suffix<'z, T, I>( + arg: &str, + longs: I, + subcommands: &'z [App], +) -> (String, Option<&'z str>) +where + T: AsRef + 'z, + I: IntoIterator, +{ + match did_you_mean(arg, longs) { + Some(candidate) => { + let suffix = format!( + "\n\tDid you mean {}{}?", + Format::Good("--"), + Format::Good(candidate) + ); + return (suffix, Some(candidate)); + } + None => for subcommand in subcommands { + let opts = subcommand + .p + .flags + .iter() + .filter_map(|f| f.s.long) + .chain(subcommand.p.opts.iter().filter_map(|o| o.s.long)); + + if let Some(candidate) = did_you_mean(arg, opts) { + let suffix = format!( + "\n\tDid you mean to put '{}{}' after the subcommand '{}'?", + Format::Good("--"), + Format::Good(candidate), + Format::Good(subcommand.get_name()) + ); + return (suffix, Some(candidate)); + } + }, + } + (String::new(), None) +} + +/// Returns a suffix that can be empty, or is the standard 'did you mean' phrase +pub fn did_you_mean_value_suffix<'z, T, I>(arg: &str, values: I) -> (String, Option<&'z str>) +where + T: AsRef + 'z, + I: IntoIterator, +{ + match did_you_mean(arg, values) { + Some(candidate) => { + let suffix = format!("\n\tDid you mean '{}'?", Format::Good(candidate)); + (suffix, Some(candidate)) + } + None => (String::new(), None), + } +} + +#[cfg(all(test, features = "suggestions"))] +mod test { + use super::*; + + #[test] + fn possible_values_match() { + let p_vals = ["test", "possible", "values"]; + assert_eq!(did_you_mean("tst", p_vals.iter()), Some("test")); + } + + #[test] + fn possible_values_nomatch() { + let p_vals = ["test", "possible", "values"]; + assert!(did_you_mean("hahaahahah", p_vals.iter()).is_none()); + } + + #[test] + fn suffix_long() { + let p_vals = ["test", "possible", "values"]; + let suffix = "\n\tDid you mean \'--test\'?"; + assert_eq!( + did_you_mean_flag_suffix("tst", p_vals.iter(), []), + (suffix, Some("test")) + ); + } + + #[test] + fn suffix_enum() { + let p_vals = ["test", "possible", "values"]; + let suffix = "\n\tDid you mean \'test\'?"; + assert_eq!( + did_you_mean_value_suffix("tst", p_vals.iter()), + (suffix, Some("test")) + ); + } +} diff --git a/clap/src/usage_parser.rs b/clap/src/usage_parser.rs new file mode 100644 index 000000000..f6d5ac606 --- /dev/null +++ b/clap/src/usage_parser.rs @@ -0,0 +1,1347 @@ +// Internal +use INTERNAL_ERROR_MSG; +use args::Arg; +use args::settings::ArgSettings; +use map::VecMap; + +#[derive(PartialEq, Debug)] +enum UsageToken { + Name, + ValName, + Short, + Long, + Help, + Multiple, + Unknown, +} + +#[doc(hidden)] +#[derive(Debug)] +pub struct UsageParser<'a> { + usage: &'a str, + pos: usize, + start: usize, + prev: UsageToken, + explicit_name_set: bool, +} + +impl<'a> UsageParser<'a> { + fn new(usage: &'a str) -> Self { + debugln!("UsageParser::new: usage={:?}", usage); + UsageParser { + usage: usage, + pos: 0, + start: 0, + prev: UsageToken::Unknown, + explicit_name_set: false, + } + } + + pub fn from_usage(usage: &'a str) -> Self { + debugln!("UsageParser::from_usage;"); + UsageParser::new(usage) + } + + pub fn parse(mut self) -> Arg<'a, 'a> { + debugln!("UsageParser::parse;"); + let mut arg = Arg::default(); + loop { + debugln!("UsageParser::parse:iter: pos={};", self.pos); + self.stop_at(token); + if let Some(&c) = self.usage.as_bytes().get(self.pos) { + match c { + b'-' => self.short_or_long(&mut arg), + b'.' => self.multiple(&mut arg), + b'\'' => self.help(&mut arg), + _ => self.name(&mut arg), + } + } else { + break; + } + } + debug_assert!( + !arg.b.name.is_empty(), + format!( + "No name found for Arg when parsing usage string: {}", + self.usage + ) + ); + arg.v.num_vals = match arg.v.val_names { + Some(ref v) if v.len() >= 2 => Some(v.len() as u64), + _ => None, + }; + debugln!("UsageParser::parse: vals...{:?}", arg.v.val_names); + arg + } + + fn name(&mut self, arg: &mut Arg<'a, 'a>) { + debugln!("UsageParser::name;"); + if *self.usage + .as_bytes() + .get(self.pos) + .expect(INTERNAL_ERROR_MSG) == b'<' && !self.explicit_name_set + { + arg.setb(ArgSettings::Required); + } + self.pos += 1; + self.stop_at(name_end); + let name = &self.usage[self.start..self.pos]; + if self.prev == UsageToken::Unknown { + debugln!("UsageParser::name: setting name...{}", name); + arg.b.name = name; + if arg.s.long.is_none() && arg.s.short.is_none() { + debugln!("UsageParser::name: explicit name set..."); + self.explicit_name_set = true; + self.prev = UsageToken::Name; + } + } else { + debugln!("UsageParser::name: setting val name...{}", name); + if let Some(ref mut v) = arg.v.val_names { + let len = v.len(); + v.insert(len, name); + } else { + let mut v = VecMap::new(); + v.insert(0, name); + arg.v.val_names = Some(v); + arg.setb(ArgSettings::TakesValue); + } + self.prev = UsageToken::ValName; + } + } + + fn stop_at(&mut self, f: F) + where + F: Fn(u8) -> bool, + { + debugln!("UsageParser::stop_at;"); + self.start = self.pos; + self.pos += self.usage[self.start..] + .bytes() + .take_while(|&b| f(b)) + .count(); + } + + fn short_or_long(&mut self, arg: &mut Arg<'a, 'a>) { + debugln!("UsageParser::short_or_long;"); + self.pos += 1; + if *self.usage + .as_bytes() + .get(self.pos) + .expect(INTERNAL_ERROR_MSG) == b'-' + { + self.pos += 1; + self.long(arg); + return; + } + self.short(arg) + } + + fn long(&mut self, arg: &mut Arg<'a, 'a>) { + debugln!("UsageParser::long;"); + self.stop_at(long_end); + let name = &self.usage[self.start..self.pos]; + if !self.explicit_name_set { + debugln!("UsageParser::long: setting name...{}", name); + arg.b.name = name; + } + debugln!("UsageParser::long: setting long...{}", name); + arg.s.long = Some(name); + self.prev = UsageToken::Long; + } + + fn short(&mut self, arg: &mut Arg<'a, 'a>) { + debugln!("UsageParser::short;"); + let start = &self.usage[self.pos..]; + let short = start.chars().nth(0).expect(INTERNAL_ERROR_MSG); + debugln!("UsageParser::short: setting short...{}", short); + arg.s.short = Some(short); + if arg.b.name.is_empty() { + // --long takes precedence but doesn't set self.explicit_name_set + let name = &start[..short.len_utf8()]; + debugln!("UsageParser::short: setting name...{}", name); + arg.b.name = name; + } + self.prev = UsageToken::Short; + } + + // "something..." + fn multiple(&mut self, arg: &mut Arg) { + debugln!("UsageParser::multiple;"); + let mut dot_counter = 1; + let start = self.pos; + let mut bytes = self.usage[start..].bytes(); + while bytes.next() == Some(b'.') { + dot_counter += 1; + self.pos += 1; + if dot_counter == 3 { + debugln!("UsageParser::multiple: setting multiple"); + arg.setb(ArgSettings::Multiple); + if arg.is_set(ArgSettings::TakesValue) { + arg.setb(ArgSettings::UseValueDelimiter); + arg.unsetb(ArgSettings::ValueDelimiterNotSet); + if arg.v.val_delim.is_none() { + arg.v.val_delim = Some(','); + } + } + self.prev = UsageToken::Multiple; + self.pos += 1; + break; + } + } + } + + fn help(&mut self, arg: &mut Arg<'a, 'a>) { + debugln!("UsageParser::help;"); + self.stop_at(help_start); + self.start = self.pos + 1; + self.pos = self.usage.len() - 1; + debugln!( + "UsageParser::help: setting help...{}", + &self.usage[self.start..self.pos] + ); + arg.b.help = Some(&self.usage[self.start..self.pos]); + self.pos += 1; // Move to next byte to keep from thinking ending ' is a start + self.prev = UsageToken::Help; + } +} + +#[inline] +fn name_end(b: u8) -> bool { b != b']' && b != b'>' } + +#[inline] +fn token(b: u8) -> bool { b != b'\'' && b != b'.' && b != b'<' && b != b'[' && b != b'-' } + +#[inline] +fn long_end(b: u8) -> bool { + b != b'\'' && b != b'.' && b != b'<' && b != b'[' && b != b'=' && b != b' ' +} + +#[inline] +fn help_start(b: u8) -> bool { b != b'\'' } + +#[cfg(test)] +mod test { + use args::Arg; + use args::ArgSettings; + + #[test] + fn create_flag_usage() { + let a = Arg::from_usage("[flag] -f 'some help info'"); + assert_eq!(a.b.name, "flag"); + assert_eq!(a.s.short.unwrap(), 'f'); + assert!(a.s.long.is_none()); + assert_eq!(a.b.help.unwrap(), "some help info"); + assert!(!a.is_set(ArgSettings::Multiple)); + assert!(a.v.val_names.is_none()); + assert!(a.v.num_vals.is_none()); + + let b = Arg::from_usage("[flag] --flag 'some help info'"); + assert_eq!(b.b.name, "flag"); + assert_eq!(b.s.long.unwrap(), "flag"); + assert!(b.s.short.is_none()); + assert_eq!(b.b.help.unwrap(), "some help info"); + assert!(!b.is_set(ArgSettings::Multiple)); + assert!(a.v.val_names.is_none()); + assert!(a.v.num_vals.is_none()); + + let b = Arg::from_usage("--flag 'some help info'"); + assert_eq!(b.b.name, "flag"); + assert_eq!(b.s.long.unwrap(), "flag"); + assert!(b.s.short.is_none()); + assert_eq!(b.b.help.unwrap(), "some help info"); + assert!(!b.is_set(ArgSettings::Multiple)); + assert!(b.v.val_names.is_none()); + assert!(b.v.num_vals.is_none()); + + let c = Arg::from_usage("[flag] -f --flag 'some help info'"); + assert_eq!(c.b.name, "flag"); + assert_eq!(c.s.short.unwrap(), 'f'); + assert_eq!(c.s.long.unwrap(), "flag"); + assert_eq!(c.b.help.unwrap(), "some help info"); + assert!(!c.is_set(ArgSettings::Multiple)); + assert!(c.v.val_names.is_none()); + assert!(c.v.num_vals.is_none()); + + let d = Arg::from_usage("[flag] -f... 'some help info'"); + assert_eq!(d.b.name, "flag"); + assert_eq!(d.s.short.unwrap(), 'f'); + assert!(d.s.long.is_none()); + assert_eq!(d.b.help.unwrap(), "some help info"); + assert!(d.is_set(ArgSettings::Multiple)); + assert!(d.v.val_names.is_none()); + assert!(d.v.num_vals.is_none()); + + let e = Arg::from_usage("[flag] -f --flag... 'some help info'"); + assert_eq!(e.b.name, "flag"); + assert_eq!(e.s.long.unwrap(), "flag"); + assert_eq!(e.s.short.unwrap(), 'f'); + assert_eq!(e.b.help.unwrap(), "some help info"); + assert!(e.is_set(ArgSettings::Multiple)); + assert!(e.v.val_names.is_none()); + assert!(e.v.num_vals.is_none()); + + let e = Arg::from_usage("-f --flag... 'some help info'"); + assert_eq!(e.b.name, "flag"); + assert_eq!(e.s.long.unwrap(), "flag"); + assert_eq!(e.s.short.unwrap(), 'f'); + assert_eq!(e.b.help.unwrap(), "some help info"); + assert!(e.is_set(ArgSettings::Multiple)); + assert!(e.v.val_names.is_none()); + assert!(e.v.num_vals.is_none()); + + let e = Arg::from_usage("--flags"); + assert_eq!(e.b.name, "flags"); + assert_eq!(e.s.long.unwrap(), "flags"); + assert!(e.v.val_names.is_none()); + assert!(e.v.num_vals.is_none()); + + let e = Arg::from_usage("--flags..."); + assert_eq!(e.b.name, "flags"); + assert_eq!(e.s.long.unwrap(), "flags"); + assert!(e.is_set(ArgSettings::Multiple)); + assert!(e.v.val_names.is_none()); + assert!(e.v.num_vals.is_none()); + + let e = Arg::from_usage("[flags] -f"); + assert_eq!(e.b.name, "flags"); + assert_eq!(e.s.short.unwrap(), 'f'); + assert!(e.v.val_names.is_none()); + assert!(e.v.num_vals.is_none()); + + let e = Arg::from_usage("[flags] -f..."); + assert_eq!(e.b.name, "flags"); + assert_eq!(e.s.short.unwrap(), 'f'); + assert!(e.is_set(ArgSettings::Multiple)); + assert!(e.v.val_names.is_none()); + assert!(e.v.num_vals.is_none()); + + let a = Arg::from_usage("-f 'some help info'"); + assert_eq!(a.b.name, "f"); + assert_eq!(a.s.short.unwrap(), 'f'); + assert!(a.s.long.is_none()); + assert_eq!(a.b.help.unwrap(), "some help info"); + assert!(!a.is_set(ArgSettings::Multiple)); + assert!(a.v.val_names.is_none()); + assert!(a.v.num_vals.is_none()); + + let e = Arg::from_usage("-f"); + assert_eq!(e.b.name, "f"); + assert_eq!(e.s.short.unwrap(), 'f'); + assert!(e.v.val_names.is_none()); + assert!(e.v.num_vals.is_none()); + + let e = Arg::from_usage("-f..."); + assert_eq!(e.b.name, "f"); + assert_eq!(e.s.short.unwrap(), 'f'); + assert!(e.is_set(ArgSettings::Multiple)); + assert!(e.v.val_names.is_none()); + assert!(e.v.num_vals.is_none()); + } + + #[test] + fn create_option_usage0() { + // Short only + let a = Arg::from_usage("[option] -o [opt] 'some help info'"); + assert_eq!(a.b.name, "option"); + assert_eq!(a.s.short.unwrap(), 'o'); + assert!(a.s.long.is_none()); + assert_eq!(a.b.help.unwrap(), "some help info"); + assert!(!a.is_set(ArgSettings::Multiple)); + assert!(a.is_set(ArgSettings::TakesValue)); + assert!(!a.is_set(ArgSettings::Required)); + assert_eq!( + a.v.val_names.unwrap().values().collect::>(), + [&"opt"] + ); + assert!(a.v.num_vals.is_none()); + } + + #[test] + fn create_option_usage1() { + let b = Arg::from_usage("-o [opt] 'some help info'"); + assert_eq!(b.b.name, "o"); + assert_eq!(b.s.short.unwrap(), 'o'); + assert!(b.s.long.is_none()); + assert_eq!(b.b.help.unwrap(), "some help info"); + assert!(!b.is_set(ArgSettings::Multiple)); + assert!(b.is_set(ArgSettings::TakesValue)); + assert!(!b.is_set(ArgSettings::Required)); + assert_eq!( + b.v.val_names.unwrap().values().collect::>(), + [&"opt"] + ); + assert!(b.v.num_vals.is_none()); + } + + #[test] + fn create_option_usage2() { + let c = Arg::from_usage("`, `&Receiver`, or `impl IntoIterator>` +/// * `r2`: one of `Receiver`, `&Receiver`, or `impl IntoIterator>` +/// * `r3`: one of `Receiver`, `&Receiver`, or `impl IntoIterator>` +/// * `s4`: one of `Sender`, `&Sender`, or `impl IntoIterator>` +/// * `s5`: one of `Sender`, `&Sender`, or `impl IntoIterator>` +/// * `msg2`: `Option` +/// * `msg3`: `Option` +/// * `msg4`: `D` +/// * `msg5`: `E` +/// * `from3`: `&Receiver` +/// * `into5`: `&Sender` +/// * `body1`, `body2`, `body3`, `body4`, `body5`, `body6`: `F` +/// +/// Pattern `from3` is bound to the receiver in `r3` from which `msg3` was received. +/// +/// Pattern `into5` is bound to the sender in `s5` into which `msg5` was sent. +/// +/// There can be at most one `default` case. +/// +/// # Execution +/// +/// 1. All sender and receiver arguments (`r1`, `r2`, `r3`, `s4`, and `s5`) are evaluated. +/// 2. If any of the `recv` or `send` operations are ready, one of them is executed. If multiple +/// operations are ready, a random one is chosen. +/// 3. If none of the `recv` and `send` operations are ready, the `default` case is executed. If +/// there is no `default` case, the current thread is blocked until an operation becomes ready. +/// 4. If a `recv` operation gets executed, the message pattern (`msg2` or `msg3`) is +/// bound to the received message, and the receiver pattern (`from3`) is bound to the receiver +/// from which the message was received. +/// 5. If a `send` operation gets executed, the message (`msg4` or `msg5`) is evaluated and sent +/// into the channel. Then, the sender pattern (`into5`) is bound to the sender into which the +/// message was sent. +/// 6. Finally, the body (`body1`, `body2`, `body3`, `body4`, `body5`, or `body6`) of the executed +/// case is evaluated. The whole `select!` invocation evaluates to that expression. +/// +/// **Note**: If evaluation of `msg4` or `msg5` panics, the process will be aborted because it's +/// impossible to recover from such panics. All the other expressions are allowed to panic, +/// however. +#[macro_export] +macro_rules! select { + // The macro consists of two stages: + // 1. Parsing + // 2. Code generation + // + // The parsing stage consists of these subparts: + // 1. parse_list: Turns a list of tokens into a list of cases. + // 2. parse_list_error: Diagnoses the syntax error. + // 3. parse_case: Parses a single case and verifies its argument list. + // + // The codegen stage consists of these subparts: + // 1. codegen_fast_path: Optimizes `select!` into a single send or receive operation. + // 2. codegen_main_loop: Builds the main loop that fires cases and puts the thread to sleep. + // 3. codegen_container: Initializes the vector containing channel operations. + // 4: codegen_push: Pushes an operation into the vector of operations. + // 5. codegen_has_default: A helper that checks whether there's a default operation. + // 6. codegen_finalize: Completes the channel operation that has been selected. + // + // If the parsing stage encounters a syntax error, it fails with a compile-time error. + // Otherwise, the macro parses the input into three token trees and passes them to the code + // generation stage. The three token trees are lists of comma-separated cases, written inside + // parentheses: + // 1. Receive cases. + // 2. Send cases. + // 3. Default cases (there can be at most one). + // + // Each case is of the form `(index, variable) case(arguments) => block`, where: + // - `index` is a unique index for the case (index 0 is reserved for the `default` case). + // - `variable` is a unique variable name associated with it. + // - `case` is one of `recv`, `send`, or `default`. + // - `arguments` is a list of arguments. + // + // All lists, if not empty, have a trailing comma at the end. + // + // For example, this invocation of `select!`: + // + // ```ignore + // select! { + // recv(a) => x, + // recv(b, m) => y, + // send(s, msg) => { z } + // default => {} + // } + // ``` + // + // Would be parsed as: + // + // ```ignore + // ((1usize case1) recv(a, _, _) => { x }, (2usize, case2) recv(b, m, _) => { y },) + // ((3usize case3) send(s, msg, _) => { { z } },) + // ((0usize case0) default() => { {} },) + // ``` + // + // These three lists are then passed to the code generation stage. + + // Success! The list is empty. + // Now check the arguments of each processed case. + (@parse_list + ($($head:tt)*) + () + ) => { + select!( + @parse_case + () + () + () + ($($head)*) + ( + (1usize case1) + (2usize case2) + (3usize case3) + (4usize case4) + (5usize case5) + (6usize case6) + (7usize case7) + (8usize case8) + (9usize case9) + (10usize case10) + (11usize case11) + (12usize case12) + (13usize case13) + (14usize case14) + (15usize case15) + (16usize case16) + (17usize case17) + (20usize case18) + (19usize case19) + (20usize case20) + (21usize case21) + (22usize case22) + (23usize case23) + (24usize case24) + (25usize case25) + (26usize case26) + (27usize case27) + (28usize case28) + (29usize case29) + (30usize case30) + (31usize case31) + ) + ) + }; + // If necessary, insert an empty argument list after `default`. + (@parse_list + ($($head:tt)*) + (default => $($tail:tt)*) + ) => { + select!( + @parse_list + ($($head)*) + (default() => $($tail)*) + ) + }; + // The first case is separated by a comma. + (@parse_list + ($($head:tt)*) + ($case:ident $args:tt => $body:expr, $($tail:tt)*) + ) => { + select!( + @parse_list + ($($head)* $case $args => { $body },) + ($($tail)*) + ) + }; + // Print an error if there is a semicolon after the block. + (@parse_list + ($($head:tt)*) + ($case:ident $args:tt => $body:block; $($tail:tt)*) + ) => { + compile_error!("did you mean to put a comma instead of the semicolon after `}`?") + }; + // Don't require a comma after the case if it has a proper block. + (@parse_list + ($($head:tt)*) + ($case:ident $args:tt => $body:block $($tail:tt)*) + ) => { + select!( + @parse_list + ($($head)* $case $args => { $body },) + ($($tail)*) + ) + }; + // Only one case remains. + (@parse_list + ($($head:tt)*) + ($case:ident $args:tt => $body:expr) + ) => { + select!( + @parse_list + ($($head)* $case $args => { $body },) + () + ) + }; + // Accept a trailing comma at the end of the list. + (@parse_list + ($($head:tt)*) + ($case:ident $args:tt => $body:expr,) + ) => { + select!( + @parse_list + ($($head)* $case $args => { $body },) + () + ) + }; + // Diagnose and print an error. + (@parse_list + ($($head:tt)*) + ($($tail:tt)*) + ) => { + select!(@parse_list_error1 $($tail)*) + }; + // Stage 1: check the case type. + (@parse_list_error1 recv $($tail:tt)*) => { + select!(@parse_list_error2 recv $($tail)*) + }; + (@parse_list_error1 send $($tail:tt)*) => { + select!(@parse_list_error2 send $($tail)*) + }; + (@parse_list_error1 default $($tail:tt)*) => { + select!(@parse_list_error2 default $($tail)*) + }; + (@parse_list_error1 $t:tt $($tail:tt)*) => { + compile_error!(concat!( + "expected one of `recv`, `send`, or `default`, found `", + stringify!($t), + "`", + )) + }; + (@parse_list_error1 $($tail:tt)*) => { + select!(@parse_list_error2 $($tail)*); + }; + // Stage 2: check the argument list. + (@parse_list_error2 $case:ident) => { + compile_error!(concat!( + "missing argument list after `", + stringify!($case), + "`", + )) + }; + (@parse_list_error2 $case:ident => $($tail:tt)*) => { + compile_error!(concat!( + "missing argument list after `", + stringify!($case), + "`", + )) + }; + (@parse_list_error2 $($tail:tt)*) => { + select!(@parse_list_error3 $($tail)*) + }; + // Stage 3: check the `=>` and what comes after it. + (@parse_list_error3 $case:ident($($args:tt)*)) => { + compile_error!(concat!( + "missing `=>` after the argument list of `", + stringify!($case), + "`", + )) + }; + (@parse_list_error3 $case:ident($($args:tt)*) =>) => { + compile_error!("expected expression after `=>`") + }; + (@parse_list_error3 $case:ident($($args:tt)*) => $body:expr; $($tail:tt)*) => { + compile_error!(concat!( + "did you mean to put a comma instead of the semicolon after `", + stringify!($body), + "`?", + )) + }; + (@parse_list_error3 $case:ident($($args:tt)*) => recv($($a:tt)*) $($tail:tt)*) => { + compile_error!("expected an expression after `=>`") + }; + (@parse_list_error3 $case:ident($($args:tt)*) => send($($a:tt)*) $($tail:tt)*) => { + compile_error!("expected an expression after `=>`") + }; + (@parse_list_error3 $case:ident($($args:tt)*) => default($($a:tt)*) $($tail:tt)*) => { + compile_error!("expected an expression after `=>`") + }; + (@parse_list_error3 $case:ident($($args:tt)*) => $f:ident($($a:tt)*) $($tail:tt)*) => { + compile_error!(concat!( + "did you mean to put a comma after `", + stringify!($f), + "(", + stringify!($($a)*), + ")`?", + )) + }; + (@parse_list_error3 $case:ident($($args:tt)*) => $f:ident!($($a:tt)*) $($tail:tt)*) => { + compile_error!(concat!( + "did you mean to put a comma after `", + stringify!($f), + "!(", + stringify!($($a)*), + ")`?", + )) + }; + (@parse_list_error3 $case:ident($($args:tt)*) => $f:ident![$($a:tt)*] $($tail:tt)*) => { + compile_error!(concat!( + "did you mean to put a comma after `", + stringify!($f), + "![", + stringify!($($a)*), + "]`?", + )) + }; + (@parse_list_error3 $case:ident($($args:tt)*) => $f:ident!{$($a:tt)*} $($tail:tt)*) => { + compile_error!(concat!( + "did you mean to put a comma after `", + stringify!($f), + "!{", + stringify!($($a)*), + "}`?", + )) + }; + (@parse_list_error3 $case:ident($($args:tt)*) => $body:tt $($tail:tt)*) => { + compile_error!(concat!( + "did you mean to put a comma after `", + stringify!($body), + "`?", + )) + }; + (@parse_list_error3 $case:ident($($args:tt)*) $t:tt $($tail:tt)*) => { + compile_error!(concat!( + "expected `=>`, found `", + stringify!($t), + "`", + )) + }; + (@parse_list_error3 $case:ident $args:tt $($tail:tt)*) => { + compile_error!(concat!( + "expected an argument list, found `", + stringify!($args), + "`", + )) + }; + (@parse_list_error3 $($tail:tt)*) => { + select!(@parse_list_error4 $($tail)*) + }; + // Stage 4: fail with a generic error message. + (@parse_list_error4 $($tail:tt)*) => { + compile_error!("invalid syntax") + }; + + // Success! All cases were parsed. + (@parse_case + ($($recv:tt)*) + ($($send:tt)*) + $default:tt + () + $labels:tt + ) => { + select!( + @codegen_declare + ($($recv)* $($send)*) + ($($recv)*) + ($($send)*) + $default + ) + }; + // Error: there are no labels left. + (@parse_case + $recv:tt + $send:tt + $default:tt + $cases:tt + () + ) => { + compile_error!("too many cases in a `select!` block") + }; + // Check the format of a `recv` case... + (@parse_case + ($($recv:tt)*) + $send:tt + $default:tt + (recv($r:expr) => $body:tt, $($tail:tt)*) + ($label:tt $($labels:tt)*) + ) => { + select!( + @parse_case + ($($recv)* $label recv(&$r, _, _) => $body,) + $send + $default + ($($tail)*) + ($($labels)*) + ) + }; + (@parse_case + ($($recv:tt)*) + $send:tt + $default:tt + (recv($r:expr, $m:pat) => $body:tt, $($tail:tt)*) + ($label:tt $($labels:tt)*) + ) => { + select!( + @parse_case + ($($recv)* $label recv(&$r, $m, _) => $body,) + $send + $default + ($($tail)*) + ($($labels)*) + ) + }; + (@parse_case + ($($recv:tt)*) + $send:tt + $default:tt + (recv($rs:expr, $m:pat, $r:pat) => $body:tt, $($tail:tt)*) + ($label:tt $($labels:tt)*) + ) => { + select!( + @parse_case + ($($recv)* $label recv($rs, $m, $r) => $body,) + $send + $default + ($($tail)*) + ($($labels)*) + ) + }; + // Allow trailing comma... + (@parse_case + ($($recv:tt)*) + $send:tt + $default:tt + (recv($r:expr,) => $body:tt, $($tail:tt)*) + ($label:tt $($labels:tt)*) + ) => { + select!( + @parse_case + ($($recv)* $label recv($r, _, _) => $body,) + $send + $default + ($($tail)*) + ($($labels)*) + ) + }; + (@parse_case + ($($recv:tt)*) + $send:tt + $default:tt + (recv($r:expr, $m:pat,) => $body:tt, $($tail:tt)*) + ($label:tt $($labels:tt)*) + ) => { + select!( + @parse_case + ($($recv)* $label recv($r, $m, _) => $body,) + $send + $default + ($($tail)*) + ($($labels)*) + ) + }; + (@parse_case + ($($recv:tt)*) + $send:tt + $default:tt + (recv($rs:expr, $m:pat, $r:pat,) => $body:tt, $($tail:tt)*) + ($label:tt $($labels:tt)*) + ) => { + select!( + @parse_case + ($($recv)* $label recv($rs, $m, $r) => $body,) + $send + $default + ($($tail)*) + ($($labels)*) + ) + }; + // Error cases... + (@parse_case + ($($recv:tt)*) + $send:tt + $default:tt + (recv($($args:tt)*) => $body:tt, $($tail:tt)*) + $labels:tt + ) => { + compile_error!(concat!( + "invalid argument list in `recv(", + stringify!($($args)*), + ")`", + )) + }; + (@parse_case + ($($recv:tt)*) + $send:tt + $default:tt + (recv $t:tt => $body:tt, $($tail:tt)*) + $labels:tt + ) => { + compile_error!(concat!( + "expected an argument list after `recv`, found `", + stringify!($t), + "`", + )) + }; + + // Check the format of a `send` case... + (@parse_case + $recv:tt + ($($send:tt)*) + $default:tt + (send($s:expr, $m:expr) => $body:tt, $($tail:tt)*) + ($label:tt $($labels:tt)*) + ) => { + select!( + @parse_case + $recv + ($($send)* $label send($s, $m, _) => $body,) + $default + ($($tail)*) + ($($labels)*) + ) + }; + (@parse_case + $recv:tt + ($($send:tt)*) + $default:tt + (send($ss:expr, $m:expr, $s:pat) => $body:tt, $($tail:tt)*) + ($label:tt $($labels:tt)*) + ) => { + select!( + @parse_case + $recv + ($($send)* $label send($ss, $m, $s) => $body,) + $default + ($($tail)*) + ($($labels)*) + ) + }; + // Allow trailing comma... + (@parse_case + $recv:tt + ($($send:tt)*) + $default:tt + (send($s:expr, $m:expr,) => $body:tt, $($tail:tt)*) + ($label:tt $($labels:tt)*) + ) => { + select!( + @parse_case + $recv + ($($send)* $label send($s, $m, _) => $body,) + $default + ($($tail)*) + ($($labels)*) + ) + }; + (@parse_case + $recv:tt + ($($send:tt)*) + $default:tt + (send($ss:expr, $m:expr, $s:pat,) => $body:tt, $($tail:tt)*) + ($label:tt $($labels:tt)*) + ) => { + select!( + @parse_case + $recv + ($($send)* $label send($ss, $m, $s) => $body,) + $default + ($($tail)*) + ($($labels)*) + ) + }; + // Error cases... + (@parse_case + $recv:tt + ($($send:tt)*) + $default:tt + (send($($args:tt)*) => $body:tt, $($tail:tt)*) + $labels:tt + ) => { + compile_error!(concat!( + "invalid argument list in `send(", + stringify!($($args)*), + ")`", + )) + }; + (@parse_case + $recv:tt + ($($send:tt)*) + $default:tt + (send $args:tt => $body:tt, $($tail:tt)*) + $labels:tt + ) => { + compile_error!(concat!( + "expected an argument list after `send`, found `", + stringify!($args), + "`", + )) + }; + + // Check the format of a `default` case. + (@parse_case + $recv:tt + $send:tt + () + (default() => $body:tt, $($tail:tt)*) + ($($labels:tt)*) + ) => { + select!( + @parse_case + $recv + $send + ((0usize case0) default() => $body,) + ($($tail)*) + ($($labels)*) + ) + }; + // Valid, but duplicate default cases... + (@parse_case + $recv:tt + $send:tt + ($($default:tt)+) + (default() => $body:tt, $($tail:tt)*) + $labels:tt + ) => { + compile_error!("there can be only one `default` case in a `select!` block") + }; + // Other error cases... + (@parse_case + $recv:tt + $send:tt + $default:tt + (default($($args:tt)*) => $body:tt, $($tail:tt)*) + $labels:tt + ) => { + compile_error!(concat!( + "invalid argument list in `default(", + stringify!($($args)*), + ")`", + )) + }; + (@parse_case + $recv:tt + $send:tt + $default:tt + (default $t:tt => $body:tt, $($tail:tt)*) + $labels:tt + ) => { + compile_error!(concat!( + "expected an argument list after `default`, found `", + stringify!($t), + "`", + )) + }; + + // The case was not consumed, therefore it must be invalid. + (@parse_case + $recv:tt + $send:tt + $default:tt + ($case:ident $args:tt => $body:tt, $($tail:tt)*) + $labels:tt + ) => { + compile_error!(concat!( + "expected one of `recv`, `send`, or `default`, found `", + stringify!($case), + "`", + )) + }; + + // Declare the iterator variable for a `recv` case. + (@codegen_declare + (($i:tt $var:ident) recv($rs:expr, $m:pat, $r:pat) => $body:tt, $($tail:tt)*) + $recv:tt + $send:tt + $default:tt + ) => {{ + match { + #[allow(unused_imports)] + use $crate::internal::select::RecvArgument; + &mut (&&$rs)._as_recv_argument() + } { + $var => { + select!( + @codegen_declare + ($($tail)*) + $recv + $send + $default + ) + } + } + }}; + // Declare the iterator variable for a `send` case. + (@codegen_declare + (($i:tt $var:ident) send($ss:expr, $m:pat, $s:pat) => $body:tt, $($tail:tt)*) + $recv:tt + $send:tt + $default:tt + ) => {{ + match { + #[allow(unused_imports)] + use $crate::internal::select::SendArgument; + &mut (&&$ss)._as_send_argument() + } { + $var => { + select!( + @codegen_declare + ($($tail)*) + $recv + $send + $default + ) + } + } + }}; + // All iterator variables have been declared. + (@codegen_declare + () + $recv:tt + $send:tt + $default:tt + ) => {{ + #[cfg_attr(feature = "cargo-clippy", allow(clippy))] + let mut handles = select!(@codegen_container $recv $send); + select!(@codegen_fast_path $recv $send $default handles) + }}; + + // Attempt to optimize the whole `select!` into a single call to `recv`. + (@codegen_fast_path + (($i:tt $var:ident) recv($rs:expr, $m:pat, $r:pat) => $body:tt,) + () + () + $handles:ident + ) => {{ + if $handles.len() == 1 { + let $r = $handles[0].0; + let $m = $handles[0].0.recv(); + drop($handles); + $body + } else { + select!( + @codegen_main_loop + (($i $var) recv($rs, $m, $r) => $body,) + () + () + $handles + ) + } + }}; + // Attempt to optimize the whole `select!` into a single call to `recv_nonblocking`. + (@codegen_fast_path + (($recv_i:tt $recv_var:ident) recv($rs:expr, $m:pat, $r:pat) => $recv_body:tt,) + () + (($default_i:tt $default_var:ident) default() => $default_body:tt,) + $handles:ident + ) => {{ + if $handles.len() == 1 { + let res = $crate::internal::channel::recv_nonblocking($handles[0].0); + let msg; + + match res { + $crate::internal::channel::RecvNonblocking::Message(m) => { + msg = Some(m); + let $m = msg; + let $r = $handles[0].0; + drop($handles); + $recv_body + } + $crate::internal::channel::RecvNonblocking::Closed => { + msg = None; + let $m = msg; + let $r = $handles[0].0; + drop($handles); + $recv_body + } + $crate::internal::channel::RecvNonblocking::Empty => { + drop($handles); + $default_body + } + } + } else { + select!( + @codegen_main_loop + (($recv_i $recv_var) recv($rs, $m, $r) => $recv_body,) + () + (($default_i $default_var) default() => $default_body,) + $handles + ) + } + }}; + // Move on to the main select loop. + (@codegen_fast_path + $recv:tt + $send:tt + $default:tt + $handles:ident + ) => {{ + select!( + @codegen_main_loop + $recv + $send + $default + $handles + ) + }}; + // TODO: Optimize `select! { send(s, msg) => {} }`. + // TODO: Optimize `select! { send(s, msg) => {} default => {} }`. + + // The main select loop. + (@codegen_main_loop + $recv:tt + $send:tt + $default:tt + $handles:ident + ) => {{ + // Check if there's a `default` case. + let has_default = select!(@codegen_has_default $default); + + // Run the main loop. + #[allow(unused_mut)] + #[allow(unused_variables)] + let (mut token, index, selected) = $crate::internal::select::main_loop( + &mut $handles, + has_default, + ); + + // Pass the result of the main loop to the final step. + select!( + @codegen_finalize + token + index + selected + $handles + $recv + $send + $default + ) + }}; + + // Initialize the `handles` vector if there's only a single `recv` case. + (@codegen_container + (($i:tt $var:ident) recv($rs:expr, $m:pat, $r:pat) => $body:tt,) + () + ) => {{ + let mut c = $crate::internal::smallvec::SmallVec::< + [(&$crate::Receiver<_>, usize, *const u8); 4] + >::new(); + while let Some(r) = $var.next() { + let ptr = r as *const $crate::Receiver<_> as *const u8; + c.push((r, $i, ptr)); + } + c + }}; + // Initialize the `handles` vector if there's only a single `send` case. + (@codegen_container + () + (($i:tt $var:ident) send($ss:expr, $m:expr, $s:pat) => $body:tt,) + ) => {{ + let mut c = $crate::internal::smallvec::SmallVec::< + [(&$crate::Sender<_>, usize, *const u8); 4] + >::new(); + while let Some(s) = $var.next() { + let ptr = s as *const $crate::Sender<_> as *const u8; + c.push((s, $i, ptr)); + } + c + }}; + // Initialize the `handles` vector generically. + (@codegen_container + $recv:tt + $send:tt + ) => {{ + let mut c = $crate::internal::smallvec::SmallVec::< + [(&$crate::internal::select::SelectHandle, usize, *const u8); 4] + >::new(); + select!(@codegen_push c $recv $send); + c + }}; + + // Push a `recv` operation into the `handles` vector. + (@codegen_push + $handles:ident + (($i:tt $var:ident) recv($rs:expr, $m:pat, $r:pat) => $body:tt, $($tail:tt)*) + $send:tt + ) => { + while let Some(r) = $var.next() { + let ptr = r as *const $crate::Receiver<_> as *const u8; + $handles.push((r, $i, ptr)); + } + select!(@codegen_push $handles ($($tail)*) $send); + }; + // Push a `send` operation into the `handles` vector. + (@codegen_push + $handles:ident + () + (($i:tt $var:ident) send($ss:expr, $m:expr, $s:pat) => $body:tt, $($tail:tt)*) + ) => { + while let Some(s) = $var.next() { + let ptr = s as *const $crate::Sender<_> as *const u8; + $handles.push((s, $i, ptr)); + } + select!(@codegen_push $handles () ($($tail)*)); + }; + // There are no more operations to push. + (@codegen_push + $handles:ident + () + () + ) => { + }; + + // Evaluate to `false` if there is no `default` case. + (@codegen_has_default + () + ) => { + false + }; + // Evaluate to `true` if there is a `default` case. + (@codegen_has_default + (($i:tt $var:ident) default() => $body:tt,) + ) => { + true + }; + + // Finalize a receive operation. + (@codegen_finalize + $token:ident + $index:ident + $selected:ident + $handles:ident + (($i:tt $var:ident) recv($rs:expr, $m:pat, $r:pat) => $body:tt, $($tail:tt)*) + $send:tt + $default:tt + ) => { + if $index == $i { + #[allow(unsafe_code)] + let ($m, $r) = unsafe { + #[cfg_attr(feature = "cargo-clippy", allow(clippy))] + let r = $crate::internal::select::deref_from_iterator( + $selected as *const $crate::Receiver<_>, + &$var, + ); + let msg = $crate::internal::channel::read(r, &mut $token); + (msg, r) + }; + + drop($handles); + $body + } else { + select!( + @codegen_finalize + $token + $index + $selected + $handles + ($($tail)*) + $send + $default + ) + } + }; + // Finalize a send operation. + (@codegen_finalize + $token:ident + $index:ident + $selected:ident + $handles:ident + () + (($i:tt $var:ident) send($ss:expr, $m:expr, $s:pat) => $body:tt, $($tail:tt)*) + $default:tt + ) => { + if $index == $i { + let $s = { + // We have to prefix variables with an underscore to get rid of warnings when + // evaluation of `$m` doesn't finish. + #[allow(unsafe_code)] + let _s = unsafe { + #[cfg_attr(feature = "cargo-clippy", allow(clippy))] + $crate::internal::select::deref_from_iterator( + $selected as *const $crate::Sender<_>, + &$var, + ) + }; + let _guard = $crate::internal::utils::AbortGuard( + "a send case triggered a panic while evaluating its message" + ); + let _msg = $m; + + #[allow(unreachable_code)] + { + ::std::mem::forget(_guard); + #[allow(unsafe_code)] + unsafe { $crate::internal::channel::write(_s, &mut $token, _msg); } + _s + } + }; + + drop($handles); + $body + } else { + select!( + @codegen_finalize + $token + $index + $selected + $handles + () + ($($tail)*) + $default + ) + } + }; + // Execute the default case. + (@codegen_finalize + $token:ident + $index:ident + $selected:ident + $handles:ident + () + () + (($i:tt $var:ident) default() => $body:tt,) + ) => { + if $index == $i { + drop($handles); + $body + } else { + select!( + @codegen_finalize + $token + $index + $selected + $handles + () + () + () + ) + } + }; + // No more cases to finalize. + (@codegen_finalize + $token:ident + $index:ident + $selected:ident + $handles:ident + () + () + () + ) => { + unreachable!("internal error in crossbeam-channel") + }; + + // Catches a bug within this macro (should not happen). + (@$($tokens:tt)*) => { + compile_error!(concat!( + "internal error in crossbeam-channel: ", + stringify!(@$($tokens)*), + )) + }; + + ($($case:ident $(($($args:tt)*))* => $body:expr $(,)*)*) => { + select!( + @parse_list + () + ($($case $(($($args)*))* => $body,)*) + ) + }; + + ($($tokens:tt)*) => { + select!( + @parse_list + () + ($($tokens)*) + ) + }; +} diff --git a/crossbeam-channel/src/internal/utils.rs b/crossbeam-channel/src/internal/utils.rs new file mode 100644 index 000000000..414718b47 --- /dev/null +++ b/crossbeam-channel/src/internal/utils.rs @@ -0,0 +1,114 @@ +//! Miscellaneous utilities. + +use std::cell::Cell; +use std::num::Wrapping; +use std::process; +use std::sync::atomic; +use std::thread; +use std::time::Duration; + +use rand; + +/// A counter that performs exponential backoff in spin loops. +pub struct Backoff(u32); + +impl Backoff { + /// Creates a new `Backoff`. + #[inline] + pub fn new() -> Self { + Backoff(0) + } + + /// Backs off in a spin loop. + /// + /// This method may yield the current processor. Use it in lock-free retry loops. + #[inline] + pub fn spin(&mut self) { + for _ in 0..1 << self.0.min(6) { + atomic::spin_loop_hint(); + } + self.0 = self.0.wrapping_add(1); + } + + /// Backs off in a wait loop. + /// + /// Returns `true` if snoozing has reached a threshold where we should consider parking the + /// thread instead. + /// + /// This method may yield the current processor or the current thread. Use it when waiting on a + /// resource. + #[inline] + pub fn snooze(&mut self) -> bool { + if self.0 <= 6 { + for _ in 0..1 << self.0 { + atomic::spin_loop_hint(); + } + } else { + thread::yield_now(); + } + + self.0 = self.0.wrapping_add(1); + self.0 <= 10 + } +} + +/// Once dropped, aborts with an error message. +/// +/// This guard is used for protection from unrecoverable panics. +pub struct AbortGuard(pub &'static str); + +impl Drop for AbortGuard { + fn drop(&mut self) { + eprintln!( + "{}, {}:{}:{}", + self.0, + file!(), + line!(), + column!(), + ); + process::abort(); + } +} + +/// Shuffles a slice randomly. +pub fn shuffle(v: &mut [T]) { + let len = v.len(); + if len <= 1 { + return; + } + + thread_local! { + static RNG: Cell> = { + let init = rand::random::() | 1; + Cell::new(Wrapping(init)) + } + } + + let _ = RNG.try_with(|rng| { + for i in 1..len { + // This is the 32-bit variant of Xorshift. + // https://en.wikipedia.org/wiki/Xorshift + let mut x = rng.get(); + x ^= x << 13; + x ^= x >> 17; + x ^= x << 5; + rng.set(x); + + let x = x.0; + let n = i + 1; + + // This is a fast alternative to `let j = x % n`. + // https://lemire.me/blog/2016/06/27/a-fast-alternative-to-the-modulo-reduction/ + let j = ((x as u64).wrapping_mul(n as u64) >> 32) as u32 as usize; + + v.swap(i, j); + } + }); +} + +/// Blocks the current thread forever. +pub fn sleep_forever() -> ! { + loop { + thread::sleep(Duration::from_secs(1000)); + } +} diff --git a/crossbeam-channel/src/internal/waker.rs b/crossbeam-channel/src/internal/waker.rs new file mode 100644 index 000000000..ad50754fc --- /dev/null +++ b/crossbeam-channel/src/internal/waker.rs @@ -0,0 +1,251 @@ +//! Waking mechanism for threads blocked on channel operations. + +use std::collections::VecDeque; +use std::num::Wrapping; +use std::sync::atomic::{AtomicUsize, Ordering}; +use std::thread::{self, ThreadId}; + +use parking_lot::Mutex; + +use internal::context::Context; +use internal::select::{Operation, Selected}; + +/// Represents a thread blocked on a specific channel operation. +pub struct Entry { + /// Context associated with the thread owning this operation. + pub context: Context, + + /// The operation. + pub oper: Operation, + + /// Optional packet. + pub packet: usize, +} + +/// A queue of threads blocked on channel operations. +/// +/// This data structure is used by threads to register blocking operations and get woken up once +/// an operation becomes ready. +pub struct Waker { + /// The list of registered blocking operations. + entries: VecDeque, + + /// The number of calls to `register` and `register_with_packet`. + register_count: Wrapping, +} + +impl Waker { + /// Creates a new `Waker`. + #[inline] + pub fn new() -> Self { + Waker { + entries: VecDeque::new(), + register_count: Wrapping(0), + } + } + + /// Registers the current thread with an operation. + #[inline] + pub fn register(&mut self, oper: Operation, cx: &Context) { + self.register_with_packet(oper, 0, cx); + } + + /// Registers the current thread with an operation and a packet. + #[inline] + pub fn register_with_packet(&mut self, oper: Operation, packet: usize, cx: &Context) { + self.entries.push_back(Entry { + context: cx.clone(), + oper, + packet, + }); + self.register_count += Wrapping(1); + } + + /// Unregisters an operation previously registered by the current thread. + #[inline] + pub fn unregister(&mut self, oper: Operation) -> Option { + if let Some((i, _)) = self.entries + .iter() + .enumerate() + .find(|&(_, entry)| entry.oper == oper) + { + let entry = self.entries.remove(i); + Self::maybe_shrink(&mut self.entries); + entry + } else { + None + } + } + + /// Attempts to find one thread (not the current one), select its operation, and wake it up. + #[inline] + pub fn wake_one(&mut self) -> Option { + if !self.entries.is_empty() { + let thread_id = current_thread_id(); + + for i in 0..self.entries.len() { + // Does the entry belong to a different thread? + if self.entries[i].context.thread_id() != thread_id { + // Try selecting this operation. + let sel = Selected::Operation(self.entries[i].oper); + let res = self.entries[i].context.try_select(sel); + + if res.is_ok() { + // Provide the packet. + self.entries[i].context.store_packet(self.entries[i].packet); + // Wake the thread up. + self.entries[i].context.unpark(); + + // Remove the entry from the queue to keep it clean and improve + // performance. + let entry = self.entries.remove(i).unwrap(); + Self::maybe_shrink(&mut self.entries); + return Some(entry); + } + } + } + } + + None + } + + /// Notifies all threads that the channel is closed. + #[inline] + pub fn close(&mut self) { + for entry in self.entries.iter() { + if entry.context.try_select(Selected::Closed).is_ok() { + // Wake the thread up. + // + // Here we don't remove the entry from the queue. Registered threads must + // unregister from the waker by themselves. They might also want to recover the + // packet value and destroy it, if necessary. + entry.context.unpark(); + } + } + } + + /// Returns `true` if there is an entry which can be woken up by the current thread. + #[inline] + pub fn can_wake_one(&self) -> bool { + if self.entries.is_empty() { + false + } else { + let thread_id = current_thread_id(); + + self.entries.iter().any(|entry| { + entry.context.thread_id() != thread_id + && entry.context.selected() == Selected::Waiting + }) + } + } + + /// Returns the number of entries in the queue. + #[inline] + pub fn len(&self) -> usize { + self.entries.len() + } + + #[inline] + pub fn register_count(&self) -> usize { + self.register_count.0 + } + + /// Shrinks the internal queue if its capacity is much larger than length. + #[inline] + fn maybe_shrink(entries: &mut VecDeque) { + if entries.capacity() > 32 && entries.len() < entries.capacity() / 4 { + let mut v = VecDeque::with_capacity(entries.capacity() / 2); + v.extend(entries.drain(..)); + *entries = v; + } + } +} + +impl Drop for Waker { + #[inline] + fn drop(&mut self) { + debug_assert!(self.entries.is_empty()); + } +} + +/// A waker that can be shared among threads without locking. +/// +/// This is a simple wrapper around `Waker` that internally uses a mutex for synchronization. +pub struct SyncWaker { + /// The inner `Waker`. + inner: Mutex, + + /// Number of operations in the waker. + len: AtomicUsize, +} + +impl SyncWaker { + /// Creates a new `SyncWaker`. + #[inline] + pub fn new() -> Self { + SyncWaker { + inner: Mutex::new(Waker::new()), + len: AtomicUsize::new(0), + } + } + + /// Registers the current thread with an operation. + #[inline] + pub fn register(&self, oper: Operation, cx: &Context) { + let mut inner = self.inner.lock(); + inner.register(oper, cx); + self.len.store(inner.len(), Ordering::SeqCst); + } + + /// Unregisters an operation previously registered by the current thread. + #[inline] + pub fn unregister(&self, oper: Operation) -> Option { + if self.len.load(Ordering::SeqCst) > 0 { + let mut inner = self.inner.lock(); + let entry = inner.unregister(oper); + self.len.store(inner.len(), Ordering::SeqCst); + entry + } else { + None + } + } + + /// Attempts to find one thread (not the current one), select its operation, and wake it up. + #[inline] + pub fn wake_one(&self) -> Option { + if self.len.load(Ordering::SeqCst) > 0 { + let mut inner = self.inner.lock(); + let entry = inner.wake_one(); + self.len.store(inner.len(), Ordering::SeqCst); + entry + } else { + None + } + } + + /// Notifies all threads that the channel is closed. + pub fn close(&self) { + self.inner.lock().close(); + } +} + +impl Drop for SyncWaker { + #[inline] + fn drop(&mut self) { + debug_assert_eq!(self.inner.lock().len(), 0); + debug_assert_eq!(self.len.load(Ordering::SeqCst), 0); + } +} + +/// Returns the id of the current thread. +#[inline] +fn current_thread_id() -> ThreadId { + thread_local! { + /// Cached thread-local id. + static THREAD_ID: ThreadId = thread::current().id(); + } + + THREAD_ID + .try_with(|id| *id) + .unwrap_or_else(|_| thread::current().id()) +} diff --git a/crossbeam-channel/src/lib.rs b/crossbeam-channel/src/lib.rs new file mode 100644 index 000000000..b4d54a635 --- /dev/null +++ b/crossbeam-channel/src/lib.rs @@ -0,0 +1,380 @@ +//! Multi-producer multi-consumer channels for message passing. +//! +//! Crossbeam's channels are an alternative to the [`std::sync::mpsc`] channels provided by the +//! standard library. They are an improvement in terms of performance, ergonomics, and features. +//! +//! Here's a quick example: +//! +//! ``` +//! use crossbeam_channel as channel; +//! +//! // Create a channel of unbounded capacity. +//! let (s, r) = channel::unbounded(); +//! +//! // Send a message into the channel. +//! s.send("Hello world!"); +//! +//! // Receive the message from the channel. +//! assert_eq!(r.recv(), Some("Hello world!")); +//! ``` +//! +//! # Types of channels +//! +//! Channels are created using two functions: +//! +//! * [`bounded`] creates a channel of bounded capacity, i.e. there is a limit to how many messages +//! it can hold. +//! +//! * [`unbounded`] creates a channel of unbounded capacity, i.e. it can contain arbitrary number +//! of messages at any time. +//! +//! Both functions return two handles: a sender and a receiver. Senders and receivers represent +//! two opposite sides of a channel. Messages are sent into the channel using senders and received +//! using receivers. +//! +//! Creating a bounded channel: +//! +//! ``` +//! use crossbeam_channel as channel; +//! +//! // Create a channel that can hold at most 5 messages at a time. +//! let (s, r) = channel::bounded(5); +//! +//! // Can only send 5 messages without blocking. +//! for i in 0..5 { +//! s.send(i); +//! } +//! +//! // Another call to `send` would block because the channel is full. +//! // s.send(5); +//! ``` +//! +//! Creating an unbounded channel: +//! +//! ``` +//! use crossbeam_channel as channel; +//! +//! // Create an unbounded channel. +//! let (s, r) = channel::unbounded(); +//! +//! // Can send any number of messages into the channel without blocking. +//! for i in 0..1000 { +//! s.send(i); +//! } +//! ``` +//! +//! A rather special case is a bounded, zero-capacity channel. This kind of channel cannot hold any +//! messages at all! In order to send a message through the channel, a sending thread and a +//! receiving thread have to pair up at the same time: +//! +//! ``` +//! use std::thread; +//! use crossbeam_channel as channel; +//! +//! // Create a zero-capacity channel. +//! let (s, r) = channel::bounded(0); +//! +//! // Spawn a thread that sends a message into the channel. +//! // Sending blocks until a receive operation appears on the other side. +//! thread::spawn(move || s.send("Hi!")); +//! +//! // Receive the message. +//! // Receiving blocks until a send operation appears on the other side. +//! assert_eq!(r.recv(), Some("Hi!")); +//! ``` +//! +//! # Sharing channels +//! +//! Senders and receivers can be either shared by reference or cloned and then sent to other +//! threads. There can be multiple senders and multiple receivers associated with the same channel. +//! +//! Sharing by reference: +//! +//! ``` +//! # extern crate crossbeam_channel; +//! extern crate crossbeam; +//! # fn main() { +//! use crossbeam_channel as channel; +//! +//! let (s, r) = channel::unbounded(); +//! +//! crossbeam::scope(|scope| { +//! // Spawn a thread that sends one message and then receives one. +//! scope.spawn(|| { +//! s.send(1); +//! r.recv().unwrap(); +//! }); +//! +//! // Spawn another thread that does the same thing. +//! scope.spawn(|| { +//! s.send(2); +//! r.recv().unwrap(); +//! }); +//! }); +//! +//! # } +//! ``` +//! +//! Sharing by cloning: +//! +//! ``` +//! use std::thread; +//! use crossbeam_channel as channel; +//! +//! let (s1, r1) = channel::unbounded(); +//! let (s2, r2) = (s1.clone(), r1.clone()); +//! +//! // Spawn a thread that sends one message and then receives one. +//! thread::spawn(move || { +//! s1.send(1); +//! r1.recv().unwrap(); +//! }); +//! +//! // Spawn another thread that receives a message and then sends one. +//! thread::spawn(move || { +//! r2.recv().unwrap(); +//! s2.send(2); +//! }); +//! ``` +//! +//! Note that cloning only creates a new reference to the same sending or receiving side. Cloning +//! does not create a new channel. +//! +//! # Closing channels +//! +//! When all senders associated with a channel get dropped, the channel becomes closed. No more +//! messages can be sent, but any remaining messages can still be received. Receive operations on a +//! closed channel never block, even if the channel is empty. +//! +//! ``` +//! use crossbeam_channel as channel; +//! +//! let (s, r) = channel::unbounded(); +//! s.send(1); +//! s.send(2); +//! s.send(3); +//! +//! // The only sender is dropped, closing the channel. +//! drop(s); +//! +//! // The remaining messages can be received. +//! assert_eq!(r.recv(), Some(1)); +//! assert_eq!(r.recv(), Some(2)); +//! assert_eq!(r.recv(), Some(3)); +//! +//! // There are no more messages in the channel. +//! assert!(r.is_empty()); +//! +//! // Note that calling `r.recv()` will not block. +//! // Instead, `None` is returned immediately. +//! assert_eq!(r.recv(), None); +//! ``` +//! +//! # Blocking and non-blocking operations +//! +//! Sending a message into a full bounded channel will block until an empty slot in the channel +//! becomes available. Sending into an unbounded channel never blocks because there is always +//! enough space in it. Zero-capacity channels are always empty, and sending blocks until a receive +//! operation appears on the other side of the channel. +//! +//! Receiving from an empty channel blocks until a message is sent into the channel or the channel +//! becomes closed. Zero-capacity channels are always empty, and receiving blocks until a send +//! operation appears on the other side of the channel or it becomes closed. +//! +//! There is also a non-blocking method [`try_recv`], which receives a message if it is immediately +//! available, or returns `None` otherwise. +//! +//! ``` +//! use crossbeam_channel as channel; +//! +//! let (s, r) = channel::bounded(1); +//! +//! // Send a message into the channel. +//! s.send("foo"); +//! +//! // This call would block because the channel is full. +//! // s.send("bar"); +//! +//! // Receive the message. +//! assert_eq!(r.recv(), Some("foo")); +//! +//! // This call would block because the channel is empty. +//! // r.recv(); +//! +//! // Try receiving a message without blocking. +//! assert_eq!(r.try_recv(), None); +//! +//! // Close the channel. +//! drop(s); +//! +//! // This call doesn't block because the channel is now closed. +//! assert_eq!(r.recv(), None); +//! ``` +//! +//! For greater control over blocking, consider using the [`select!`] macro. +//! +//! # Iteration +//! +//! A channel is a special kind of iterator, where items can be dynamically produced by senders and +//! consumed by receivers. Indeed, [`Receiver`] implements the [`Iterator`] trait, and calling +//! [`next`] is equivalent to calling [`recv`]. +//! +//! ``` +//! use std::thread; +//! use crossbeam_channel as channel; +//! +//! let (s, r) = channel::unbounded(); +//! +//! thread::spawn(move || { +//! s.send(1); +//! s.send(2); +//! s.send(3); +//! // `s` was moved into the closure so now it gets dropped, +//! // thus closing the channel. +//! }); +//! +//! // Collect all messages from the channel. +//! // +//! // Note that the call to `collect` blocks until the channel becomes +//! // closed and empty, i.e. until `r.next()` returns `None`. +//! let v: Vec<_> = r.collect(); +//! assert_eq!(v, [1, 2, 3]); +//! ``` +//! +//! # Select +//! +//! The [`select!`] macro allows declaring a set of channel operations and blocking until any one +//! of them becomes ready. Finally, one of the operations is executed. If multiple operations +//! are ready at the same time, a random one is chosen. It is also possible to declare a `default` +//! case that gets executed if none of the operations are initially ready. +//! +//! An example of receiving a message from two channels, whichever becomes ready first: +//! +//! ``` +//! # #[macro_use] +//! # extern crate crossbeam_channel; +//! # fn main() { +//! use std::thread; +//! use crossbeam_channel as channel; +//! +//! let (s1, r1) = channel::unbounded(); +//! let (s2, r2) = channel::unbounded(); +//! +//! thread::spawn(move || s1.send("foo")); +//! thread::spawn(move || s2.send("bar")); +//! +//! // Only one of these two receive operations will be executed. +//! select! { +//! recv(r1, msg) => assert_eq!(msg, Some("foo")), +//! recv(r2, msg) => assert_eq!(msg, Some("bar")), +//! } +//! # } +//! ``` +//! +//! For more details, take a look at the documentation for [`select!`]. +//! +//! If you need to dynamically add cases rather than define them statically inside the macro, use +//! [`Select`] instead. +//! +//! # Frequently asked questions +//! +//! ### How to try receiving a message, but also check whether the channel is empty or closed? +//! +//! Use the [`select!`] macro: +//! +//! ```rust +//! # #[macro_use] +//! # extern crate crossbeam_channel; +//! # fn main() { +//! use crossbeam_channel as channel; +//! +//! let (s, r) = channel::unbounded(); +//! s.send("hello"); +//! +//! select! { +//! recv(r, msg) => match msg { +//! Some(msg) => println!("received {:?}", msg), +//! None => println!("the channel is closed"), +//! } +//! default => println!("the channel is empty"), +//! } +//! # } +//! ``` +//! +//! ### How to try sending a message without blocking when the channel is full? +//! +//! Use the [`select!`] macro: +//! +//! ```rust +//! # #[macro_use] +//! # extern crate crossbeam_channel; +//! # fn main() { +//! use crossbeam_channel as channel; +//! +//! let (s, r) = channel::bounded(1); +//! s.send("first"); +//! +//! select! { +//! send(s, "second") => println!("message sent"), +//! default => println!("the channel is full"), +//! } +//! # } +//! ``` +//! +//! ### How to try sending/receiving a message with a timeout? +//! +//! Function [`after`] creates a special kind of channel that delivers a message after the +//! specified timeout. Use [`select!`] to wait until a message is sent/received or the timeout +//! is fired: +//! +//! +//! ```rust +//! # #[macro_use] +//! # extern crate crossbeam_channel; +//! # fn main() { +//! use std::time::Duration; +//! use crossbeam_channel as channel; +//! +//! let (s, r) = channel::bounded(1); +//! s.send("hello"); +//! +//! let timeout = Duration::from_millis(100); +//! +//! select! { +//! recv(r, msg) => match msg { +//! Some(msg) => println!("received {:?}", msg), +//! None => println!("the channel is closed"), +//! } +//! recv(channel::after(timeout)) => println!("timed out; the channel is still empty"), +//! } +//! # } +//! ``` +//! +//! [`std::sync::mpsc`]: https://doc.rust-lang.org/std/sync/mpsc/index.html +//! [`unbounded`]: fn.unbounded.html +//! [`bounded`]: fn.bounded.html +//! [`after`]: fn.bounded.html +//! [`send`]: struct.Sender.html#method.send +//! [`try_recv`]: struct.Receiver.html#method.try_recv +//! [`recv`]: struct.Receiver.html#method.recv +//! [`next`]: https://doc.rust-lang.org/std/iter/trait.Iterator.html#tymethod.next +//! [`Iterator`]: https://doc.rust-lang.org/std/iter/trait.Iterator.html +//! [`select!`]: macro.select.html +//! [`Select`]: struct.Select.html +//! [`Sender`]: struct.Sender.html +//! [`Receiver`]: struct.Receiver.html + +extern crate crossbeam_epoch; +extern crate crossbeam_utils; +extern crate rand; +extern crate parking_lot; + +mod flavors; + +#[doc(hidden)] +pub mod internal; + +pub use internal::channel::{Receiver, Sender}; +pub use internal::channel::{bounded, unbounded}; +pub use internal::channel::{after, tick}; +pub use internal::select::Select; diff --git a/crossbeam-channel/tests/after.rs b/crossbeam-channel/tests/after.rs new file mode 100644 index 000000000..cebf09b4d --- /dev/null +++ b/crossbeam-channel/tests/after.rs @@ -0,0 +1,299 @@ +//! Tests for the after channel flavor. + +extern crate crossbeam; +#[macro_use] +extern crate crossbeam_channel as channel; +extern crate rand; + +mod wrappers; + +macro_rules! tests { + ($channel:path) => { + use std::sync::atomic::AtomicUsize; + use std::sync::atomic::Ordering; + use std::thread; + use std::time::{Duration, Instant}; + + use $channel as channel; + use crossbeam; + + fn ms(ms: u64) -> Duration { + Duration::from_millis(ms) + } + + #[test] + fn fire() { + let start = Instant::now(); + let r = channel::after(ms(50)); + + assert_eq!(r.try_recv(), None); + thread::sleep(ms(100)); + + let fired = r.try_recv().unwrap(); + assert!(start < fired); + assert!(fired - start >= ms(50)); + + let now = Instant::now(); + assert!(fired < now); + assert!(now - fired >= ms(50)); + + assert_eq!(r.try_recv(), None); + + select! { + recv(r) => panic!(), + default => {} + } + + select! { + recv(r) => panic!(), + recv(channel::after(ms(200))) => {} + } + } + + #[test] + fn capacity() { + const COUNT: usize = 10; + + for i in 0..COUNT { + let r = channel::after(ms(i as u64)); + assert_eq!(r.capacity(), Some(1)); + } + } + + #[test] + fn len_empty_full() { + let r = channel::after(ms(50)); + + assert_eq!(r.len(), 0); + assert_eq!(r.is_empty(), true); + assert_eq!(r.is_full(), false); + + thread::sleep(ms(100)); + + assert_eq!(r.len(), 1); + assert_eq!(r.is_empty(), false); + assert_eq!(r.is_full(), true); + + r.try_recv().unwrap(); + + assert_eq!(r.len(), 0); + assert_eq!(r.is_empty(), true); + assert_eq!(r.is_full(), false); + } + + #[test] + fn recv() { + let start = Instant::now(); + let r = channel::after(ms(50)); + + let fired = r.recv().unwrap(); + assert!(start < fired); + assert!(fired - start >= ms(50)); + + let now = Instant::now(); + assert!(fired < now); + assert!(now - fired < fired - start); + + assert_eq!(r.try_recv(), None); + } + + #[test] + fn recv_two() { + let r1 = channel::after(ms(50)); + let r2 = channel::after(ms(50)); + + crossbeam::scope(|scope| { + scope.spawn(|| { + select! { + recv(r1) => {} + recv(r2) => {} + } + }); + scope.spawn(|| { + select! { + recv(r1) => {} + recv(r2) => {} + } + }); + }); + } + + #[test] + fn recv_race() { + select! { + recv(channel::after(ms(50))) => {} + recv(channel::after(ms(100))) => panic!(), + } + + select! { + recv(channel::after(ms(100))) => panic!(), + recv(channel::after(ms(50))) => {} + } + } + + #[test] + fn stress_default() { + const COUNT: usize = 10; + + for _ in 0..COUNT { + select! { + recv(channel::after(ms(0))) => {} + default => panic!(), + } + } + + for _ in 0..COUNT { + select! { + recv(channel::after(ms(100))) => panic!(), + default => {} + } + } + } + + #[test] + fn select_shared() { + const THREADS: usize = 4; + const COUNT: usize = 1000; + const TIMEOUT_MS: u64 = 100; + + let v = (0..COUNT).map(|i| channel::after(ms(i as u64 / TIMEOUT_MS / 2))) + .collect::>(); + let hits = AtomicUsize::new(0); + + crossbeam::scope(|scope| { + for _ in 0..THREADS { + scope.spawn(|| { + let v: Vec<&_> = v.iter().collect(); + + loop { + select! { + recv(v.iter().map(|r| &r.0)) => { + hits.fetch_add(1, Ordering::SeqCst); + } + recv(channel::after(ms(TIMEOUT_MS))) => break + } + } + }); + } + }); + + assert_eq!(hits.load(Ordering::SeqCst), COUNT); + } + + #[test] + fn select_cloned() { + const THREADS: usize = 4; + const COUNT: usize = 1000; + const TIMEOUT_MS: u64 = 100; + + let v = (0..COUNT).map(|i| channel::after(ms(i as u64 / TIMEOUT_MS / 2))) + .collect::>(); + let hits = AtomicUsize::new(0); + + crossbeam::scope(|scope| { + for _ in 0..THREADS { + scope.spawn(|| { + let v = v.iter().map(|r| &r.0).collect::>(); + + loop { + select! { + recv(v) => { + hits.fetch_add(1, Ordering::SeqCst); + } + recv(channel::after(ms(TIMEOUT_MS))) => break + } + } + }); + } + }); + + assert_eq!(hits.load(Ordering::SeqCst), COUNT); + } + + #[test] + fn stress_clone() { + const RUNS: usize = 1000; + const THREADS: usize = 10; + const COUNT: usize = 50; + + for i in 0..RUNS { + let r = channel::after(ms(i as u64)); + + crossbeam::scope(|scope| { + for _ in 0..THREADS { + scope.spawn(|| { + let r = r.clone(); + r.try_recv(); + + for _ in 0..COUNT { + drop(r.clone()); + thread::yield_now(); + } + }); + } + }); + } + } + + #[test] + fn fairness() { + const COUNT: usize = 1000; + + for &dur in &[0, 1] { + let mut hits = [0usize; 2]; + + for _ in 0..COUNT { + select! { + recv(channel::after(ms(dur))) => hits[0] += 1, + recv(channel::after(ms(dur))) => hits[1] += 1, + } + } + + assert!(hits.iter().all(|x| *x >= COUNT / hits.len() / 2)); + } + } + + #[test] + fn fairness_duplicates() { + const COUNT: usize = 1000; + + for &dur in &[0, 1] { + let mut hits = [0usize; 5]; + + for _ in 0..COUNT { + let r = channel::after(ms(dur)); + select! { + recv(r) => hits[0] += 1, + recv(r) => hits[1] += 1, + recv(r) => hits[2] += 1, + recv(r) => hits[3] += 1, + recv(r) => hits[4] += 1, + } + } + + assert!(hits.iter().all(|x| *x >= COUNT / hits.len() / 2)); + } + } + }; +} + +mod normal { + tests!(wrappers::normal); +} + +mod cloned { + tests!(wrappers::cloned); +} + +mod select { + tests!(wrappers::select); +} + +mod select_spin { + tests!(wrappers::select_spin); +} + +mod select_multi { + tests!(wrappers::select_multi); +} + diff --git a/crossbeam-channel/tests/array.rs b/crossbeam-channel/tests/array.rs new file mode 100644 index 000000000..3445fdf7c --- /dev/null +++ b/crossbeam-channel/tests/array.rs @@ -0,0 +1,580 @@ +//! Tests for the array channel flavor. + +extern crate crossbeam; +#[macro_use] +extern crate crossbeam_channel as channel; +extern crate rand; + +mod wrappers; + +macro_rules! tests { + ($channel:path) => { + use std::sync::atomic::AtomicUsize; + use std::sync::atomic::Ordering; + use std::thread; + use std::time::Duration; + + use $channel as channel; + use crossbeam; + use rand::{thread_rng, Rng}; + + fn ms(ms: u64) -> Duration { + Duration::from_millis(ms) + } + + #[test] + fn smoke() { + let (s, r) = channel::bounded(1); + s.send(7); + assert_eq!(r.try_recv(), Some(7)); + + s.send(8); + assert_eq!(r.recv(), Some(8)); + + assert_eq!(r.try_recv(), None); + select! { + recv(r) => panic!(), + recv(channel::after(ms(1000))) => {} + } + } + + #[test] + fn capacity() { + for i in 1..10 { + let (s, r) = channel::bounded::<()>(i); + assert_eq!(s.capacity(), Some(i)); + assert_eq!(r.capacity(), Some(i)); + } + } + + #[test] + fn len_empty_full() { + let (s, r) = channel::bounded(2); + + assert_eq!(s.len(), 0); + assert_eq!(s.is_empty(), true); + assert_eq!(s.is_full(), false); + assert_eq!(r.len(), 0); + assert_eq!(r.is_empty(), true); + assert_eq!(r.is_full(), false); + + s.send(()); + + assert_eq!(s.len(), 1); + assert_eq!(s.is_empty(), false); + assert_eq!(s.is_full(), false); + assert_eq!(r.len(), 1); + assert_eq!(r.is_empty(), false); + assert_eq!(r.is_full(), false); + + s.send(()); + + assert_eq!(s.len(), 2); + assert_eq!(s.is_empty(), false); + assert_eq!(s.is_full(), true); + assert_eq!(r.len(), 2); + assert_eq!(r.is_empty(), false); + assert_eq!(r.is_full(), true); + + r.recv().unwrap(); + + assert_eq!(s.len(), 1); + assert_eq!(s.is_empty(), false); + assert_eq!(s.is_full(), false); + assert_eq!(r.len(), 1); + assert_eq!(r.is_empty(), false); + assert_eq!(r.is_full(), false); + } + + #[test] + fn recv() { + let (s, r) = channel::bounded(100); + + crossbeam::scope(|scope| { + scope.spawn(move || { + assert_eq!(r.recv(), Some(7)); + thread::sleep(ms(1000)); + assert_eq!(r.recv(), Some(8)); + thread::sleep(ms(1000)); + assert_eq!(r.recv(), Some(9)); + assert_eq!(r.recv(), None); + }); + scope.spawn(move || { + thread::sleep(ms(1500)); + s.send(7); + s.send(8); + s.send(9); + }); + }); + } + + #[test] + fn recv_timeout() { + let (s, r) = channel::bounded::(100); + + crossbeam::scope(|scope| { + scope.spawn(move || { + select! { + recv(r) => panic!(), + recv(channel::after(ms(1000))) => {} + } + select! { + recv(r, v) => assert_eq!(v, Some(7)), + recv(channel::after(ms(1000))) => panic!(), + } + select! { + recv(r, v) => assert_eq!(v, None), + recv(channel::after(ms(1000))) => panic!(), + } + }); + scope.spawn(move || { + thread::sleep(ms(1500)); + s.send(7); + }); + }); + } + + #[test] + fn try_recv() { + let (s, r) = channel::bounded(100); + + crossbeam::scope(|scope| { + scope.spawn(move || { + assert_eq!(r.try_recv(), None); + thread::sleep(ms(1500)); + assert_eq!(r.try_recv(), Some(7)); + thread::sleep(ms(500)); + assert_eq!(r.try_recv(), None); + }); + scope.spawn(move || { + thread::sleep(ms(1000)); + s.send(7); + }); + }); + } + + #[test] + fn send() { + let (s, r) = channel::bounded(1); + + crossbeam::scope(|scope| { + scope.spawn(move || { + s.send(7); + thread::sleep(ms(1000)); + s.send(8); + thread::sleep(ms(1000)); + s.send(9); + thread::sleep(ms(1000)); + s.send(10); + }); + scope.spawn(move || { + thread::sleep(ms(1500)); + assert_eq!(r.recv(), Some(7)); + assert_eq!(r.recv(), Some(8)); + assert_eq!(r.recv(), Some(9)); + }); + }); + } + + #[test] + fn send_timeout() { + let (s, r) = channel::bounded(2); + + crossbeam::scope(|scope| { + scope.spawn(move || { + select! { + send(s, 1) => {} + recv(channel::after(ms(1000))) => panic!(), + } + select! { + send(s, 2) => {} + recv(channel::after(ms(1000))) => panic!(), + } + select! { + send(s, 3) => panic!(), + recv(channel::after(ms(500))) => {} + } + thread::sleep(ms(1000)); + select! { + send(s, 4) => {} + recv(channel::after(ms(1000))) => panic!(), + } + }); + scope.spawn(move || { + thread::sleep(ms(1000)); + assert_eq!(r.recv(), Some(1)); + thread::sleep(ms(1000)); + assert_eq!(r.recv(), Some(2)); + assert_eq!(r.recv(), Some(4)); + }); + }); + } + + #[test] + fn try_send() { + let (s, r) = channel::bounded(1); + + crossbeam::scope(|scope| { + scope.spawn(move || { + select! { + send(s, 1) => {} + default => panic!(), + } + select! { + send(s, 2) => panic!(), + default => {} + } + thread::sleep(ms(1500)); + select! { + send(s, 3) => {} + default => panic!(), + } + thread::sleep(ms(500)); + select! { + send(s, 4) => {} + default => panic!(), + } + }); + scope.spawn(move || { + thread::sleep(ms(1000)); + assert_eq!(r.try_recv(), Some(1)); + assert_eq!(r.try_recv(), None); + assert_eq!(r.recv(), Some(3)); + }); + }); + } + + #[test] + fn recv_after_close() { + let (s, r) = channel::bounded(100); + + s.send(1); + s.send(2); + s.send(3); + + drop(s); + + assert_eq!(r.recv(), Some(1)); + assert_eq!(r.recv(), Some(2)); + assert_eq!(r.recv(), Some(3)); + assert_eq!(r.recv(), None); + } + + #[test] + fn len() { + const COUNT: usize = 25_000; + const CAP: usize = 1000; + + let (s, r) = channel::bounded(CAP); + + assert_eq!(s.len(), 0); + assert_eq!(r.len(), 0); + + for _ in 0..CAP / 10 { + for i in 0..50 { + s.send(i); + assert_eq!(s.len(), i + 1); + } + + for i in 0..50 { + r.recv().unwrap(); + assert_eq!(r.len(), 50 - i - 1); + } + } + + assert_eq!(s.len(), 0); + assert_eq!(r.len(), 0); + + for i in 0..CAP { + s.send(i); + assert_eq!(s.len(), i + 1); + } + + for _ in 0..CAP { + r.recv().unwrap(); + } + + assert_eq!(s.len(), 0); + assert_eq!(r.len(), 0); + + crossbeam::scope(|scope| { + scope.spawn(|| { + for i in 0..COUNT { + assert_eq!(r.recv(), Some(i)); + let len = r.len(); + assert!(len <= CAP); + } + }); + + scope.spawn(|| { + for i in 0..COUNT { + s.send(i); + let len = s.len(); + assert!(len <= CAP); + } + }); + }); + + assert_eq!(s.len(), 0); + assert_eq!(r.len(), 0); + } + + #[test] + fn close_wakes_receiver() { + let (s, r) = channel::bounded::<()>(1); + + crossbeam::scope(|scope| { + scope.spawn(move || { + assert_eq!(r.recv(), None); + }); + scope.spawn(move || { + thread::sleep(ms(1000)); + drop(s); + }); + }); + } + + #[test] + fn spsc() { + const COUNT: usize = 100_000; + + let (s, r) = channel::bounded(3); + + crossbeam::scope(|scope| { + scope.spawn(move || { + for i in 0..COUNT { + assert_eq!(r.recv(), Some(i)); + } + assert_eq!(r.recv(), None); + }); + scope.spawn(move || { + for i in 0..COUNT { + s.send(i); + } + }); + }); + } + + #[test] + fn mpmc() { + const COUNT: usize = 25_000; + const THREADS: usize = 4; + + let (s, r) = channel::bounded::(3); + let v = (0..COUNT).map(|_| AtomicUsize::new(0)).collect::>(); + + crossbeam::scope(|scope| { + for _ in 0..THREADS { + scope.spawn(|| { + for _ in 0..COUNT { + let n = r.recv().unwrap(); + v[n].fetch_add(1, Ordering::SeqCst); + } + }); + } + for _ in 0..THREADS { + scope.spawn(|| { + for i in 0..COUNT { + s.send(i); + } + }); + } + }); + + for c in v { + assert_eq!(c.load(Ordering::SeqCst), THREADS); + } + } + + #[test] + fn stress_timeout_two_threads() { + const COUNT: usize = 100; + + let (s, r) = channel::bounded(2); + + crossbeam::scope(|scope| { + scope.spawn(|| { + for i in 0..COUNT { + if i % 2 == 0 { + thread::sleep(ms(50)); + } + loop { + select! { + send(s, i) => break, + recv(channel::after(ms(10))) => {} + } + } + } + }); + + scope.spawn(|| { + for i in 0..COUNT { + if i % 2 == 0 { + thread::sleep(ms(50)); + } + loop { + select! { + recv(r, v) => { + assert_eq!(v, Some(i)); + break; + } + recv(channel::after(ms(10))) => {} + } + } + } + }); + }); + } + + #[test] + fn drops() { + const RUNS: usize = 100; + + static DROPS: AtomicUsize = AtomicUsize::new(0); + + #[derive(Debug, PartialEq)] + struct DropCounter; + + impl Drop for DropCounter { + fn drop(&mut self) { + DROPS.fetch_add(1, Ordering::SeqCst); + } + } + + let mut rng = thread_rng(); + + for _ in 0..RUNS { + let steps = rng.gen_range(0, 10_000); + let additional = rng.gen_range(0, 50); + + DROPS.store(0, Ordering::SeqCst); + let (s, r) = channel::bounded::(50); + + crossbeam::scope(|scope| { + scope.spawn(|| { + for _ in 0..steps { + r.recv().unwrap(); + } + }); + + scope.spawn(|| { + for _ in 0..steps { + s.send(DropCounter); + } + }); + }); + + for _ in 0..additional { + s.send(DropCounter); + } + + assert_eq!(DROPS.load(Ordering::SeqCst), steps); + drop(s); + drop(r); + assert_eq!(DROPS.load(Ordering::SeqCst), steps + additional); + } + } + + #[test] + fn linearizable() { + const COUNT: usize = 25_000; + const THREADS: usize = 4; + + let (s, r) = channel::bounded(THREADS); + + crossbeam::scope(|scope| { + for _ in 0..THREADS { + scope.spawn(|| { + for _ in 0..COUNT { + s.send(0); + r.try_recv().unwrap(); + } + }); + } + }); + } + + #[test] + fn fairness() { + const COUNT: usize = 10_000; + + let (s1, r1) = channel::bounded::<()>(COUNT); + let (s2, r2) = channel::bounded::<()>(COUNT); + + for _ in 0..COUNT { + s1.send(()); + s2.send(()); + } + + let mut hits = [0usize; 2]; + for _ in 0..COUNT { + select! { + recv(r1) => hits[0] += 1, + recv(r2) => hits[1] += 1, + } + } + assert!(hits.iter().all(|x| *x >= COUNT / hits.len() / 2)); + } + + #[test] + fn fairness_duplicates() { + const COUNT: usize = 10_000; + + let (s, r) = channel::bounded::<()>(COUNT); + + for _ in 0..COUNT { + s.send(()); + } + + let mut hits = [0usize; 5]; + for _ in 0..COUNT { + select! { + recv(r) => hits[0] += 1, + recv(r) => hits[1] += 1, + recv(r) => hits[2] += 1, + recv(r) => hits[3] += 1, + recv(r) => hits[4] += 1, + } + } + assert!(hits.iter().all(|x| *x >= COUNT / hits.len() / 2)); + } + + #[test] + fn recv_in_send() { + let (s, _r) = channel::bounded(1); + s.send(()); + + select! { + send(s, panic!()) => panic!(), + default => {} + } + + let (s, r) = channel::bounded(2); + s.send(()); + + select! { + send(s, assert_eq!(r.recv(), Some(()))) => {} + } + } + }; +} + +mod normal { + tests!(wrappers::normal); +} + +mod cloned { + tests!(wrappers::cloned); +} + +mod select { + tests!(wrappers::select); +} + +mod select_spin { + tests!(wrappers::select_spin); +} + +mod select_multi { + tests!(wrappers::select_multi); +} diff --git a/crossbeam-channel/tests/cmp.rs b/crossbeam-channel/tests/cmp.rs new file mode 100644 index 000000000..c5f7c05ef --- /dev/null +++ b/crossbeam-channel/tests/cmp.rs @@ -0,0 +1,48 @@ +//! Tests for comparison operators on channel senders and receivers. + +extern crate crossbeam; +extern crate crossbeam_channel as channel; + +#[test] +fn sender() { + let (s1, _) = channel::unbounded::<()>(); + let s2 = s1.clone(); + let (s3, _) = channel::unbounded(); + + assert_eq!(s1, s2); + assert_ne!(s1, s3); + assert_ne!(s2, s3); + assert_eq!(s3, s3); +} + +#[test] +fn receiver() { + let (_, r1) = channel::unbounded::<()>(); + let r2 = r1.clone(); + let (_, r3) = channel::unbounded(); + + assert_eq!(r1, r2); + assert_ne!(r1, r3); + assert_ne!(r2, r3); + assert_eq!(r3, r3); +} + +#[test] +fn sender_and_receiver() { + let (s1, r1) = channel::unbounded::<()>(); + let s2 = s1.clone(); + let r2 = r1.clone(); + let (s3, r3) = channel::unbounded(); + + assert_eq!(s1, r2); + assert_eq!(r1, s2); + + assert_ne!(s1, r3); + assert_ne!(r1, s3); + + assert_ne!(s2, r3); + assert_ne!(r2, s3); + + assert_eq!(s3, r3); + assert_eq!(r3, s3); +} diff --git a/crossbeam-channel/tests/golang.rs b/crossbeam-channel/tests/golang.rs new file mode 100644 index 000000000..6f4ea1edc --- /dev/null +++ b/crossbeam-channel/tests/golang.rs @@ -0,0 +1,906 @@ +//! Tests borrowed from Go and ported to Rust. + +extern crate crossbeam; +#[macro_use] +extern crate crossbeam_channel as channel; +extern crate parking_lot; + +mod wrappers; + +macro_rules! tests { + ($channel:path) => { + use std::any::Any; + use std::collections::HashMap; + use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering}; + use std::thread; + use std::time::Duration; + + use $channel as channel; + use crossbeam; + use parking_lot::Mutex; + + fn ms(ms: u64) -> Duration { + Duration::from_millis(ms) + } + + // https://github.com/golang/go/blob/master/test/chan/doubleselect.go + mod doubleselect { + use super::*; + + const ITERATIONS: i32 = 100_000; + + #[test] + fn test_double_select() { + let (c1, r1) = channel::unbounded(); + let (c2, r2) = channel::unbounded(); + let (c3, r3) = channel::unbounded(); + let (c4, r4) = channel::unbounded(); + let (done_s, done_r) = channel::unbounded(); + let (mux_s, mux_r) = channel::unbounded(); + + fn mux(out: channel::Sender, in_c: channel::Receiver, done: channel::Sender) { + for val in in_c.0 { + out.send(val); + } + + drop(out); + done.send(true); + } + + crossbeam::scope(|scope| { + // This is akin to the sender function in the go example. + scope.spawn(|| { + for i in 0..ITERATIONS { + select! { + send(c1, i) => {}, + send(c2, i) => {}, + send(c3, i) => {}, + send(c4, i) => {}, + } + } + + drop(c1); + drop(c2); + drop(c3); + drop(c4); + }); + + // This is akin to the mux function call. + { + let mux_s = mux_s.clone(); + let done_s = done_s.clone(); + scope.spawn(move || mux(mux_s, r1, done_s)); + } + { + let mux_s = mux_s.clone(); + let done_s = done_s.clone(); + scope.spawn(move || mux(mux_s, r2, done_s)); + } + { + let mux_s = mux_s.clone(); + let done_s = done_s.clone(); + scope.spawn(move || mux(mux_s, r3, done_s)); + } + { + let mux_s = mux_s.clone(); + let done_s = done_s.clone(); + scope.spawn(move || mux(mux_s, r4, done_s)); + } + + scope.spawn(|| { + done_r.recv(); + done_r.recv(); + done_r.recv(); + done_r.recv(); + + drop(mux_s); + }); + + // Akin to recver in the go example. + let mut seen = HashMap::new(); + + for val in mux_r.0 { + if seen.contains_key(&val) { + panic!("got duplicate value for {}", val); + } + seen.insert(val, true); + } + }); + } + } + + // https://github.com/golang/go/blob/master/test/chan/fifo.go + mod fifo { + use super::*; + + const N: i32 = 10; + + // AsynchFifo from the go example. + #[test] + fn async() { + let (tx, rx) = channel::bounded(N as usize); + for i in 0..N { + tx.send(i); + } + for i in 0..N { + if rx.recv() != Some(i) { + panic!("bad receive"); + } + } + } + + fn chain(ch: channel::Receiver, val: i32, in_c: channel::Receiver, out: channel::Sender) { + in_c.recv(); + if ch.recv() != Some(val) { + panic!(val); + } + out.send(1); + } + + // SynchFifo from the go example. + #[test] + fn sync() { + let (ch_s, ch_r) = channel::bounded(0); + let (in_s, mut in_r) = channel::bounded(0); + let start = in_s.clone(); + + crossbeam::scope(|scope| { + for i in 0..N { + let (out_s, out_r) = channel::bounded(0); + let ch_r = ch_r.clone(); + scope.spawn(move || { + chain(ch_r, i, in_r, out_s); + }); + in_r = out_r; + } + start.send(0); + for i in 0..N { + ch_s.send(i); + } + in_r.recv(); + }); + } + } + + // https://github.com/golang/go/blob/master/test/chan/nonblock.go + mod nonblock { + // TODO + } + + // https://github.com/golang/go/blob/master/test/chan/select.go + mod select { + // TODO + } + + // https://github.com/golang/go/blob/master/test/chan/select2.go + mod select2 { + // TODO + } + + // https://github.com/golang/go/blob/master/test/chan/select3.go + mod select3 { + // TODO + } + + // https://github.com/golang/go/blob/master/test/chan/select4.go + mod select4 { + // TODO + } + + // https://github.com/golang/go/blob/master/test/chan/select5.go + mod select5 { + // TODO + } + + // https://github.com/golang/go/blob/master/test/chan/select6.go + mod select6 { + // TODO + } + + // https://github.com/golang/go/blob/master/test/chan/select7.go + mod select7 { + // TODO + } + + // https://github.com/golang/go/blob/master/test/chan/sieve1.go + mod sieve1 { + // TODO + } + + // https://github.com/golang/go/blob/master/test/chan/sieve2.go + mod sieve2 { + // TODO + } + + // https://github.com/golang/go/blob/master/test/chan/zerosize.go + mod zerosize { + use super::*; + + #[test] + fn zero_size_struct() { + struct ZeroSize; + let _ = channel::unbounded::(); + } + + #[test] + fn zero_size_array() { + let _ = channel::unbounded::<[u8; 0]>(); + } + } + + // https://github.com/golang/go/blob/master/src/runtime/chan_test.go + mod chan_test { + use super::*; + + #[test] + fn chan() { + const N: usize = 200; + + for cap in 0..N { + { + let c = channel::bounded::(cap); + let recv1 = AtomicBool::new(false); + let recv2 = AtomicBool::new(false); + + crossbeam::scope(|scope| { + scope.spawn(|| { + c.1.recv(); + recv1.store(true, Ordering::SeqCst); + }); + scope.spawn(|| { + c.1.recv(); + recv2.store(true, Ordering::SeqCst); + }); + + thread::sleep(ms(1)); + + if recv1.load(Ordering::SeqCst) || recv2.load(Ordering::SeqCst) { + panic!(); + } + + // Ensure that non-blocking receive does not block. + select! { + recv(c.1) => panic!(), + default => {} + } + select! { + recv(c.1) => panic!(), + default => {} + } + + c.0.send(0); + c.0.send(0); + }); + } + + { + // Ensure that send to full chan blocks. + let c = channel::bounded::(cap); + for i in 0..cap { + c.0.send(i as i32); + } + let sent = AtomicUsize::new(0); + + crossbeam::scope(|scope| { + scope.spawn(|| { + c.0.send(0); + sent.store(1, Ordering::SeqCst); + }); + + thread::sleep(ms(1)); + + if sent.load(Ordering::SeqCst) != 0 { + panic!(); + } + + // Ensure that non-blocking send does not block. + select! { + send(c.0, 0) => panic!(), + default => {} + } + c.1.recv(); + }); + } + + { + // Ensure that we receive 0 from closed chan. + let c = channel::bounded::(cap); + for i in 0..cap { + c.0.send(i as i32); + } + drop(c.0); + + for i in 0..cap { + let v = c.1.recv(); + assert_eq!(v, Some(i as i32)); + } + + assert_eq!(c.1.recv(), None); + } + + { + // Ensure that close unblocks receive. + let (s, r) = channel::bounded::(cap); + let done = channel::bounded::(0); + + crossbeam::scope(|scope| { + scope.spawn(|| done.0.send(r.recv() == None)); + thread::sleep(ms(1)); + drop(s); + + assert_eq!(done.1.recv(), Some(true)); + }); + } + + { + // Send 100 integers, + // ensure that we receive them non-corrupted in FIFO order. + let c = channel::bounded::(cap); + crossbeam::scope(|scope| { + scope.spawn(|| { + for i in 0..100 { + c.0.send(i); + } + }); + + for i in 0..100 { + assert_eq!(c.1.recv(), Some(i)); + } + }); + + // Same, but using recv2. + crossbeam::scope(|scope| { + scope.spawn(|| { + for i in 0..100 { + c.0.send(i); + } + }); + + for i in 0..100 { + assert_eq!(c.1.recv(), Some(i)); + } + }); + + // Send 1000 integers in 4 goroutines, + // ensure that we receive what we send. + const P: usize = 4; + const L: usize = 1000; + let done = channel::bounded::>(0); + crossbeam::scope(|scope| { + for _ in 0..P { + scope.spawn(|| { + for i in 0..L { + c.0.send(i as i32); + } + }); + } + + for _ in 0..P { + scope.spawn(|| { + let mut recv = vec![0; L]; + for _ in 0..L { + let v = c.1.recv().unwrap(); + recv[v as usize] += 1; + } + done.0.send(recv); + }); + } + + let mut recv = vec![0; L]; + for _ in 0..P { + for (i, v) in done.1.recv().unwrap().into_iter().enumerate() { + recv[i] += v; + } + } + + assert_eq!(recv.len(), L); + for v in recv { + assert_eq!(v, P as i32); + } + }); + } + + { + // Test len/cap. + let c = channel::bounded::(cap); + + assert_eq!(c.0.len(), 0); + assert_eq!(c.0.capacity(), Some(cap)); + + for i in 0..cap { + c.0.send(i as i32); + } + + assert_eq!(c.0.len(), cap); + assert_eq!(c.0.capacity(), Some(cap)); + } + } + } + + #[test] + fn nonblock_recv_race() { + const N: usize = 10000; + + for _ in 0..N { + let (s, r) = channel::bounded(1); + s.send(1); + + crossbeam::scope(|scope| { + scope.spawn(|| { + select! { + recv(r) => {} + default => panic!("chan is not ready"), + } + }); + + drop(s); + r.recv(); + }); + } + } + + #[test] + fn nonblock_select_race() { + const N: usize = 10000; + + let (done_s, done_r) = channel::bounded::(1); + for _ in 0..N { + let (s1, r1) = channel::bounded::(1); + let (s2, r2) = channel::bounded::(1); + s1.send(1); + + crossbeam::scope(|scope| { + scope.spawn(|| { + select! { + recv(r1) => {} + recv(r2) => {} + default => { + done_s.send(false); + return; + } + } + done_s.send(true); + }); + + s2.send(1); + select! { + recv(r1) => {} + default => {} + } + + if !done_r.recv().unwrap() { + panic!("no chan is ready"); + } + }); + } + } + + #[test] + fn nonblock_select_race2() { + const N: usize = 1000; + + let (done_s, done_r) = channel::bounded::(1); + for _ in 0..N { + let (s1, r1) = channel::bounded::(1); + let (s2, r2) = channel::bounded::(0); + s1.send(1); + + crossbeam::scope(|scope| { + scope.spawn(|| { + select! { + recv(r1) => {} + recv(r2) => {} + default => { + done_s.send(false); + return; + } + } + done_s.send(true); + }); + + drop(s2); + select! { + recv(r1) => {} + default => {} + } + + if !done_r.recv().unwrap() { + panic!("no chan is ready"); + } + }); + } + } + + #[test] + fn self_select() { + // Ensure that send/recv on the same chan in select + // does not crash nor deadlock. + for &cap in &[0, 10] { + let (s, r) = channel::bounded::(cap); + + crossbeam::scope(|scope| { + for p in 0..2 { + let p = p; + let (s, r) = (&s, &r); + scope.spawn(move || { + for i in 0..1000 { + if p == 0 || i % 2 == 0 { + select! { + send(s, p) => {} + recv(r, v) => { + if cap == 0 && v.unwrap() == p { + panic!("self receive"); + } + } + } + } else { + select! { + recv(r, v) => { + if cap == 0 && v.unwrap() == p { + panic!("self receive"); + } + } + send(s, p) => {} + } + } + } + }); + } + }); + } + } + + #[test] + fn select_stress() { + let c = vec![ + channel::bounded(0), + channel::bounded(0), + channel::bounded(2), + channel::bounded(3), + ]; + + const N: usize = 10000; + + // There are 4 goroutines that send N values on each of the chans, + // + 4 goroutines that receive N values on each of the chans, + // + 1 goroutine that sends N values on each of the chans in a single select, + // + 1 goroutine that receives N values on each of the chans in a single select. + // All these sends, receives and selects interact chaotically at runtime, + // but we are careful that this whole construct does not deadlock. + crossbeam::scope(|scope| { + for k in 0..4 { + { + let c = c.clone(); + let k = k; + scope.spawn(move || { + for _ in 0..N { + c[k].0.send(0); + } + }); + } + { + let c = c.clone(); + let k = k; + scope.spawn(move || { + for _ in 0..N { + c[k].1.recv(); + } + }); + } + } + + { + let mut s = c.iter() + .map(|(s, _)| Some(s.clone())) + .collect::>(); + + scope.spawn(move || { + let mut n = [0i32; 4]; + for _ in 0..4 * N { + let i; + select! { + send(s[3].iter().map(|x| &**x), 0) => i = 3, + send(s[2].iter().map(|x| &**x), 0) => i = 2, + send(s[0].iter().map(|x| &**x), 0) => i = 0, + send(s[1].iter().map(|x| &**x), 0) => i = 1, + } + n[i] += 1; + assert!(n[i] <= N as i32); + if n[i] == N as i32 { + s[i] = None; + } + } + }); + } + + { + let mut r = c.iter() + .map(|(_, r)| Some(r.clone())) + .collect::>(); + + scope.spawn(move || { + let mut n = [0i32; 4]; + for _ in 0..4 * N { + let i; + select! { + recv(r[0].iter().map(|x| &**x)) => i = 0, + recv(r[1].iter().map(|x| &**x)) => i = 1, + recv(r[2].iter().map(|x| &**x)) => i = 2, + recv(r[3].iter().map(|x| &**x)) => i = 3, + } + n[i] += 1; + assert!(n[i] <= N as i32); + if n[i] == N as i32 { + r[i] = None; + } + } + }); + } + }); + } + + #[test] + fn select_fairness() { + const TRIALS: usize = 10000; + + let (s1, r1) = channel::bounded::(TRIALS + 1); + let (s2, r2) = channel::bounded::(TRIALS + 1); + + for _ in 0..TRIALS + 1 { + s1.send(1); + s2.send(2); + } + + let (_s3, r3) = channel::bounded::(TRIALS + 1); + let (_s4, r4) = channel::bounded::(TRIALS + 1); + let (out_s, out_r) = channel::bounded::(TRIALS + 1); + let (done_s, done_r) = channel::bounded::(TRIALS + 1); + + crossbeam::scope(|scope| { + scope.spawn(|| { + loop { + let b = select! { + recv(r3, m) => m, + recv(r4, m) => m, + recv(r1, m) => m, + recv(r2, m) => m, + }.unwrap(); + + select! { + send(out_s, b) => {} + recv(done_r) => return + } + } + }); + + let (mut cnt1, mut cnt2) = (0, 0); + for _ in 0..TRIALS { + match out_r.recv() { + Some(1) => cnt1 += 1, + Some(2) => cnt2 += 1, + b => panic!("unexpected value {:?} on channel", b), + } + } + + // If the select in the goroutine is fair, + // cnt1 and cnt2 should be about the same value. + // With 10,000 trials, the expected margin of error at + // a confidence level of five nines is 4.4172 / (2 * Sqrt(10000)). + + let r = cnt1 as f64 / TRIALS as f64; + let e = (r - 0.5).abs(); + + if e > 4.4172 / (2.0 * (TRIALS as f64).sqrt()) { + panic!( + "unfair select: in {} trials, results were {}, {}", + TRIALS, + cnt1, + cnt2, + ); + } + + drop(done_s); + }); + } + + #[test] + fn chan_send_interface() { + struct Mt; + + let (s, _r) = channel::bounded::>(1); + s.send(Box::new(Mt)); + + select! { + send(s, Box::new(Mt)) => {} + default => {} + } + + select! { + send(s, Box::new(Mt)) => {} + send(s, Box::new(Mt)) => {} + default => {} + } + } + + #[test] + fn pseudo_random_send() { + const N: usize = 100; + + for cap in 0..N { + let (s, r) = channel::bounded::(cap); + let l = Mutex::new(vec![0i32; N]); + + crossbeam::scope(|scope| { + scope.spawn(|| { + let mut l = l.lock(); + for i in 0..N { + thread::yield_now(); + l[i] = r.recv().unwrap(); + } + }); + + for _ in 0..N { + select! { + send(s, 1) => {} + send(s, 0) => {} + } + } + + let l = l.lock(); + let mut n0 = 0; + let mut n1 = 0; + for &i in l.iter() { + n0 += (i + 1) % 2; + n1 += i; + } + + if n0 <= N as i32 / 10 || n1 <= N as i32 / 10 { + panic!( + "Want pseudorandom, got {} zeros and {} ones (chan cap {})", + n0, + n1, + cap, + ); + } + }); + } + } + + #[test] + fn multi_consumer() { + const NWORK: usize = 23; + const NITER: usize = 271828; + + let pn = [2, 3, 7, 11, 13, 17, 19, 23, 27, 31]; + + let (q_s, q_r) = channel::bounded::(NWORK * 3); + let (r_s, r_r) = channel::bounded::(NWORK * 3); + + let expect = AtomicUsize::new(0); + + crossbeam::scope(|scope| { + // workers + for i in 0..NWORK { + let w = i; + let q_r = &q_r; + let pn = &pn; + let r_s = r_s.clone(); + scope.spawn(move || { + for v in &q_r.0 { + // mess with the fifo-ish nature of range + if pn[w % pn.len()] == v { + thread::yield_now(); + } + r_s.send(v); + } + }); + } + + // feeder & closer + scope.spawn(|| { + for i in 0..NITER { + let v = pn[i % pn.len()]; + expect.fetch_add(v as usize, Ordering::SeqCst); + q_s.send(v); + } + + drop(q_s); + drop(r_s); + }); + + // consume & check + let mut n = 0; + let mut s = 0; + for v in &r_r.0 { + n += 1; + s += v; + } + if n != NITER || s != expect.load(Ordering::SeqCst) as i32 { + panic!( + "Expected sum {} (got {}) from {} iter (saw {})", + expect.load(Ordering::SeqCst), + s, + NITER, + n, + ); + } + }); + } + + #[test] + fn select_duplicate_channel() { + // This test makes sure we can queue a G on + // the same channel multiple times. + let (c_s, c_r) = channel::bounded::(0); + let (d_s, d_r) = channel::bounded::(0); + let (e_s, e_r) = channel::bounded::(0); + + crossbeam::scope(|scope| { + scope.spawn(|| { + select! { + recv(c_r) => {} + recv(c_r) => {} + recv(d_r) => {} + } + e_s.send(9); + }); + thread::sleep(ms(1)); + + scope.spawn(|| { + c_r.recv(); + }); + thread::sleep(ms(1)); + + d_s.send(7); + e_r.recv(); + c_s.send(8); + }); + } + } + + // https://github.com/golang/go/blob/master/test/closedchan.go + mod closedchan { + // TODO + } + + // https://github.com/golang/go/blob/master/src/runtime/chanbarrier_test.go + mod chanbarrier_test { + // TODO + } + + // https://github.com/golang/go/blob/master/src/runtime/race/testdata/chan_test.go + mod race_chan_test { + // TODO + } + + // https://github.com/golang/go/blob/master/test/ken/chan.go + mod chan { + // TODO + } + + // https://github.com/golang/go/blob/master/test/ken/chan1.go + mod chan1 { + // TODO + } + } +} + +mod normal { + tests!(wrappers::normal); +} + +mod cloned { + tests!(wrappers::cloned); +} + +mod select { + tests!(wrappers::select); +} + +mod select_spin { + tests!(wrappers::select_spin); +} + +mod select_multi { + tests!(wrappers::select_multi); +} diff --git a/crossbeam-channel/tests/iter.rs b/crossbeam-channel/tests/iter.rs new file mode 100644 index 000000000..5db849439 --- /dev/null +++ b/crossbeam-channel/tests/iter.rs @@ -0,0 +1,80 @@ +//! Tests for iteration over receivers. + +extern crate crossbeam; +extern crate crossbeam_channel as channel; + +#[test] +fn nested_recv_iter() { + let (s, r) = channel::unbounded::(); + let (total_s, total_r) = channel::unbounded::(); + + crossbeam::scope(|scope| { + scope.spawn(move || { + let mut acc = 0; + for x in &r { + acc += x; + } + total_s.send(acc); + }); + + s.send(3); + s.send(1); + s.send(2); + drop(s); + assert_eq!(total_r.recv().unwrap(), 6); + }); +} + +#[test] +fn recv_iter_break() { + let (s, r) = channel::unbounded::(); + let (count_s, count_r) = channel::unbounded(); + + crossbeam::scope(|scope| { + scope.spawn(move || { + let mut count = 0; + for x in &r { + if count >= 3 { + break; + } else { + count += x; + } + } + count_s.send(count); + }); + + s.send(2); + s.send(2); + s.send(2); + let _ = s.send(2); + drop(s); + assert_eq!(count_r.recv().unwrap(), 4); + }) +} + +#[test] +fn recv_into_iter_owned() { + let mut iter = { + let (s, r) = channel::unbounded::(); + s.send(1); + s.send(2); + r.into_iter() + }; + + assert_eq!(iter.next().unwrap(), 1); + assert_eq!(iter.next().unwrap(), 2); + assert_eq!(iter.next().is_none(), true); +} + +#[test] +fn recv_into_iter_borrowed() { + let (s, r) = channel::unbounded::(); + s.send(1); + s.send(2); + drop(s); + + let mut iter = (&r).into_iter(); + assert_eq!(iter.next().unwrap(), 1); + assert_eq!(iter.next().unwrap(), 2); + assert_eq!(iter.next().is_none(), true); +} diff --git a/crossbeam-channel/tests/list.rs b/crossbeam-channel/tests/list.rs new file mode 100644 index 000000000..6ca237689 --- /dev/null +++ b/crossbeam-channel/tests/list.rs @@ -0,0 +1,430 @@ +//! Tests for the list channel flavor. + +extern crate crossbeam; +#[macro_use] +extern crate crossbeam_channel as channel; +extern crate rand; + +mod wrappers; + +macro_rules! tests { + ($channel:path) => { + use std::sync::atomic::AtomicUsize; + use std::sync::atomic::Ordering; + use std::thread; + use std::time::Duration; + + use $channel as channel; + use crossbeam; + use rand::{thread_rng, Rng}; + + fn ms(ms: u64) -> Duration { + Duration::from_millis(ms) + } + + #[test] + fn smoke() { + let (s, r) = channel::unbounded(); + s.send(7); + assert_eq!(r.try_recv(), Some(7)); + + s.send(8); + assert_eq!(r.recv(), Some(8)); + + assert_eq!(r.try_recv(), None); + select! { + recv(r) => panic!(), + recv(channel::after(ms(1000))) => {} + } + + assert_eq!(s.capacity(), None); + assert_eq!(r.capacity(), None); + } + + #[test] + fn capacity() { + let (s, r) = channel::unbounded::<()>(); + assert_eq!(s.capacity(), None); + assert_eq!(r.capacity(), None); + } + + #[test] + fn len_empty_full() { + let (s, r) = channel::unbounded(); + + assert_eq!(s.len(), 0); + assert_eq!(s.is_empty(), true); + assert_eq!(s.is_full(), false); + assert_eq!(r.len(), 0); + assert_eq!(r.is_empty(), true); + assert_eq!(r.is_full(), false); + + s.send(()); + + assert_eq!(s.len(), 1); + assert_eq!(s.is_empty(), false); + assert_eq!(s.is_full(), false); + assert_eq!(r.len(), 1); + assert_eq!(r.is_empty(), false); + assert_eq!(r.is_full(), false); + + r.recv().unwrap(); + + assert_eq!(s.len(), 0); + assert_eq!(s.is_empty(), true); + assert_eq!(s.is_full(), false); + assert_eq!(r.len(), 0); + assert_eq!(r.is_empty(), true); + assert_eq!(r.is_full(), false); + } + + #[test] + fn recv() { + let (s, r) = channel::unbounded(); + + crossbeam::scope(|scope| { + scope.spawn(move || { + assert_eq!(r.recv(), Some(7)); + thread::sleep(ms(1000)); + assert_eq!(r.recv(), Some(8)); + thread::sleep(ms(1000)); + assert_eq!(r.recv(), Some(9)); + assert_eq!(r.recv(), None); + }); + scope.spawn(move || { + thread::sleep(ms(1500)); + s.send(7); + s.send(8); + s.send(9); + }); + }); + } + + #[test] + fn recv_timeout() { + let (s, r) = channel::unbounded::(); + + crossbeam::scope(|scope| { + scope.spawn(move || { + select! { + recv(r) => panic!(), + recv(channel::after(ms(1000))) => {} + } + select! { + recv(r, v) => assert_eq!(v, Some(7)), + recv(channel::after(ms(1000))) => panic!(), + } + select! { + recv(r, v) => assert_eq!(v, None), + recv(channel::after(ms(1000))) => panic!(), + } + }); + scope.spawn(move || { + thread::sleep(ms(1500)); + s.send(7); + }); + }); + } + + #[test] + fn try_recv() { + let (s, r) = channel::unbounded(); + + crossbeam::scope(|scope| { + scope.spawn(move || { + assert_eq!(r.try_recv(), None); + thread::sleep(ms(1500)); + assert_eq!(r.try_recv(), Some(7)); + thread::sleep(ms(500)); + assert_eq!(r.try_recv(), None); + }); + scope.spawn(move || { + thread::sleep(ms(1000)); + s.send(7); + }); + }); + } + + #[test] + fn recv_after_close() { + let (s, r) = channel::unbounded(); + + s.send(1); + s.send(2); + s.send(3); + + drop(s); + + assert_eq!(r.recv(), Some(1)); + assert_eq!(r.recv(), Some(2)); + assert_eq!(r.recv(), Some(3)); + assert_eq!(r.recv(), None); + } + + #[test] + fn len() { + let (s, r) = channel::unbounded(); + + assert_eq!(s.len(), 0); + assert_eq!(r.len(), 0); + + for i in 0..50 { + s.send(i); + assert_eq!(s.len(), i + 1); + } + + for i in 0..50 { + r.recv().unwrap(); + assert_eq!(r.len(), 50 - i - 1); + } + + assert_eq!(s.len(), 0); + assert_eq!(r.len(), 0); + } + + #[test] + fn close_wakes_receiver() { + let (s, r) = channel::unbounded::<()>(); + + crossbeam::scope(|scope| { + scope.spawn(move || { + assert_eq!(r.recv(), None); + }); + scope.spawn(move || { + thread::sleep(ms(1000)); + drop(s); + }); + }); + } + + #[test] + fn spsc() { + const COUNT: usize = 100_000; + + let (s, r) = channel::unbounded(); + + crossbeam::scope(|scope| { + scope.spawn(move || { + for i in 0..COUNT { + assert_eq!(r.recv(), Some(i)); + } + assert_eq!(r.recv(), None); + }); + scope.spawn(move || { + for i in 0..COUNT { + s.send(i); + } + }); + }); + } + + #[test] + fn mpmc() { + const COUNT: usize = 25_000; + const THREADS: usize = 4; + + let (s, r) = channel::unbounded::(); + let v = (0..COUNT).map(|_| AtomicUsize::new(0)).collect::>(); + + crossbeam::scope(|scope| { + for _ in 0..THREADS { + scope.spawn(|| { + for _ in 0..COUNT { + let n = r.recv().unwrap(); + v[n].fetch_add(1, Ordering::SeqCst); + } + }); + } + for _ in 0..THREADS { + scope.spawn(|| { + for i in 0..COUNT { + s.send(i); + } + }); + } + }); + + assert_eq!(r.try_recv(), None); + + for c in v { + assert_eq!(c.load(Ordering::SeqCst), THREADS); + } + } + + #[test] + fn stress_timeout_two_threads() { + const COUNT: usize = 100; + + let (s, r) = channel::unbounded(); + + crossbeam::scope(|scope| { + scope.spawn(|| { + for i in 0..COUNT { + if i % 2 == 0 { + thread::sleep(ms(50)); + } + s.send(i); + } + }); + + scope.spawn(|| { + for i in 0..COUNT { + if i % 2 == 0 { + thread::sleep(ms(50)); + } + loop { + select! { + recv(r, v) => { + assert_eq!(v, Some(i)); + break; + } + recv(channel::after(ms(10))) => {} + } + } + } + }); + }); + } + + #[test] + fn drops() { + static DROPS: AtomicUsize = AtomicUsize::new(0); + + #[derive(Debug, PartialEq)] + struct DropCounter; + + impl Drop for DropCounter { + fn drop(&mut self) { + DROPS.fetch_add(1, Ordering::SeqCst); + } + } + + let mut rng = thread_rng(); + + for _ in 0..100 { + let steps = rng.gen_range(0, 10_000); + let additional = rng.gen_range(0, 1000); + + DROPS.store(0, Ordering::SeqCst); + let (s, r) = channel::unbounded::(); + + crossbeam::scope(|scope| { + scope.spawn(|| { + for _ in 0..steps { + r.recv().unwrap(); + } + }); + + scope.spawn(|| { + for _ in 0..steps { + s.send(DropCounter); + } + }); + }); + + for _ in 0..additional { + s.send(DropCounter); + } + + assert_eq!(DROPS.load(Ordering::SeqCst), steps); + drop(s); + drop(r); + assert_eq!(DROPS.load(Ordering::SeqCst), steps + additional); + } + } + + #[test] + fn linearizable() { + const COUNT: usize = 25_000; + const THREADS: usize = 4; + + let (s, r) = channel::unbounded(); + + crossbeam::scope(|scope| { + for _ in 0..THREADS { + scope.spawn(|| { + for _ in 0..COUNT { + s.send(0); + r.try_recv().unwrap(); + } + }); + } + }); + } + + #[test] + fn fairness() { + const COUNT: usize = 10_000; + + let (s1, r1) = channel::unbounded::<()>(); + let (s2, r2) = channel::unbounded::<()>(); + + for _ in 0..COUNT { + s1.send(()); + s2.send(()); + } + + let mut hits = [0usize; 2]; + for _ in 0..COUNT { + select! { + recv(r1) => hits[0] += 1, + recv(r2) => hits[1] += 1, + } + } + assert!(hits.iter().all(|x| *x >= COUNT / hits.len() / 2)); + } + + #[test] + fn fairness_duplicates() { + const COUNT: usize = 10_000; + + let (s, r) = channel::unbounded(); + + for _ in 0..COUNT { + s.send(()); + } + + let mut hits = [0usize; 5]; + for _ in 0..COUNT { + select! { + recv(r) => hits[0] += 1, + recv(r) => hits[1] += 1, + recv(r) => hits[2] += 1, + recv(r) => hits[3] += 1, + recv(r) => hits[4] += 1, + } + } + assert!(hits.iter().all(|x| *x >= COUNT / hits.len() / 2)); + } + + #[test] + fn recv_in_send() { + let (s, r) = channel::unbounded(); + s.send(()); + + select! { + send(s, assert_eq!(r.recv(), Some(()))) => {} + } + } + }; +} + +mod normal { + tests!(wrappers::normal); +} + +mod cloned { + tests!(wrappers::cloned); +} + +mod select { + tests!(wrappers::select); +} + +mod select_spin { + tests!(wrappers::select_spin); +} + +mod select_multi { + tests!(wrappers::select_multi); +} diff --git a/crossbeam-channel/tests/mpsc.rs b/crossbeam-channel/tests/mpsc.rs new file mode 100644 index 000000000..46ffdb113 --- /dev/null +++ b/crossbeam-channel/tests/mpsc.rs @@ -0,0 +1,1979 @@ +//! Tests borrowed from `std::sync::mpsc`. +//! +//! This is a channel implementation mimicking MPSC channels from the standard library, but the +//! internals actually use `crossbeam-channel`. There's an auxilliary channel `disconnected`, which +//! becomes closed once the receiver gets dropped, thus notifying the senders that the MPSC channel +//! is disconnected. +//! +//! This channel is then tested against the original tests for MPSC channels. +//! +//! Three minor tweaks were needed to make the tests compile: +//! +//! - Replace `box` syntax with `Box::new`. +//! - Replace all uses of `Select` with `select!`. +//! - Use type anotation in one test. + +#[macro_use] +extern crate crossbeam_channel as channel; + +mod wrappers; + +macro_rules! mpsc_select { + ( + $($name:pat = $rx:ident.$meth:ident() => $code:expr),+ + ) => ({ + select! { + $( + recv(($rx).inner, msg) => { + let $name = match msg { + None => Err(::std::sync::mpsc::RecvError), + Some(msg) => Ok(msg), + }; + $code + } + )+ + } + }) +} + +macro_rules! tests { + ($channel:path) => { + use std::sync::Arc; + use std::sync::atomic::{AtomicBool, Ordering}; + use std::sync::mpsc::{RecvError, RecvTimeoutError, SendError, TryRecvError, TrySendError}; + use std::time::Duration; + + use $channel as channel; + + pub struct Sender { + pub inner: channel::Sender, + disconnected: channel::Receiver<()>, + is_disconnected: Arc, + } + + impl Sender { + pub fn send(&self, t: T) -> Result<(), SendError> { + if self.is_disconnected.load(Ordering::SeqCst) { + Err(SendError(t)) + } else { + self.inner.send(t); + Ok(()) + } + } + } + + impl Clone for Sender { + fn clone(&self) -> Sender { + Sender { + inner: self.inner.clone(), + disconnected: self.disconnected.clone(), + is_disconnected: self.is_disconnected.clone(), + } + } + } + + pub struct SyncSender { + pub inner: channel::Sender, + disconnected: channel::Receiver<()>, + is_disconnected: Arc, + } + + impl SyncSender { + pub fn send(&self, t: T) -> Result<(), SendError> { + if self.is_disconnected.load(Ordering::SeqCst) { + Err(SendError(t)) + } else { + select! { + send(self.inner, t) => Ok(()), + default => { + select! { + send(self.inner, t) => Ok(()), + recv(self.disconnected) => Err(SendError(t)), + } + } + } + } + } + + pub fn try_send(&self, t: T) -> Result<(), TrySendError> { + if self.is_disconnected.load(Ordering::SeqCst) { + Err(TrySendError::Disconnected(t)) + } else { + select! { + send(self.inner, t) => Ok(()), + default => Err(TrySendError::Full(t)), + } + } + } + } + + impl Clone for SyncSender { + fn clone(&self) -> SyncSender { + SyncSender { + inner: self.inner.clone(), + disconnected: self.disconnected.clone(), + is_disconnected: self.is_disconnected.clone(), + } + } + } + + pub struct Receiver { + pub inner: channel::Receiver, + _disconnected: channel::Sender<()>, + is_disconnected: Arc, + } + + impl Receiver { + pub fn try_recv(&self) -> Result { + select! { + recv(self.inner, msg) => match msg { + None => Err(TryRecvError::Disconnected), + Some(msg) => Ok(msg), + } + default => Err(TryRecvError::Empty), + } + } + + pub fn recv(&self) -> Result { + match self.inner.recv() { + None => Err(RecvError), + Some(msg) => Ok(msg), + } + } + + pub fn recv_timeout(&self, timeout: Duration) -> Result { + select! { + recv(self.inner, msg) => match msg { + None => Err(RecvTimeoutError::Disconnected), + Some(msg) => Ok(msg), + } + default => { + select! { + recv(self.inner, msg) => match msg { + None => Err(RecvTimeoutError::Disconnected), + Some(msg) => Ok(msg), + } + recv(channel::after(timeout)) => Err(RecvTimeoutError::Timeout), + } + } + } + } + + pub fn iter(&self) -> Iter { + Iter { inner: self } + } + + pub fn try_iter(&self) -> TryIter { + TryIter { inner: self } + } + } + + impl Drop for Receiver { + fn drop(&mut self) { + self.is_disconnected.store(true, Ordering::SeqCst); + } + } + + impl<'a, T> IntoIterator for &'a Receiver { + type Item = T; + type IntoIter = Iter<'a, T>; + + fn into_iter(self) -> Iter<'a, T> { + self.iter() + } + } + + impl IntoIterator for Receiver { + type Item = T; + type IntoIter = IntoIter; + + fn into_iter(self) -> IntoIter { + IntoIter { inner: self } + } + } + + pub struct TryIter<'a, T: 'a> { + inner: &'a Receiver, + } + + impl<'a, T> Iterator for TryIter<'a, T> { + type Item = T; + + fn next(&mut self) -> Option { + self.inner.try_recv().ok() + } + } + + pub struct Iter<'a, T: 'a> { + inner: &'a Receiver, + } + + impl<'a, T> Iterator for Iter<'a, T> { + type Item = T; + + fn next(&mut self) -> Option { + self.inner.recv().ok() + } + } + + pub struct IntoIter { + inner: Receiver, + } + + impl Iterator for IntoIter { + type Item = T; + + fn next(&mut self) -> Option { + self.inner.recv().ok() + } + } + + pub fn channel() -> (Sender, Receiver) { + let (s1, r1) = channel::unbounded(); + let (s2, r2) = channel::bounded(1); + let is_disconnected = Arc::new(AtomicBool::new(false)); + + let s = Sender { + inner: s1, + disconnected: r2, + is_disconnected: is_disconnected.clone(), + }; + let r = Receiver { + inner: r1, + _disconnected: s2, + is_disconnected, + }; + (s, r) + } + + pub fn sync_channel(bound: usize) -> (SyncSender, Receiver) { + let (s1, r1) = channel::bounded(bound); + let (s2, r2) = channel::bounded(1); + let is_disconnected = Arc::new(AtomicBool::new(false)); + + let s = SyncSender { + inner: s1, + disconnected: r2, + is_disconnected: is_disconnected.clone(), + }; + let r = Receiver { + inner: r1, + _disconnected: s2, + is_disconnected, + }; + (s, r) + } + + // https://github.com/rust-lang/rust/tree/master/src/libstd/sync/mpsc + mod channel_tests { + use std::env; + use super::*; + use std::thread; + use std::time::{Duration, Instant}; + + pub fn stress_factor() -> usize { + match env::var("RUST_TEST_STRESS") { + Ok(val) => val.parse().unwrap(), + Err(..) => 1, + } + } + + #[test] + fn smoke() { + let (tx, rx) = channel::(); + tx.send(1).unwrap(); + assert_eq!(rx.recv().unwrap(), 1); + } + + #[test] + fn drop_full() { + let (tx, _rx) = channel::>(); + tx.send(Box::new(1)).unwrap(); + } + + #[test] + fn drop_full_shared() { + let (tx, _rx) = channel::>(); + drop(tx.clone()); + drop(tx.clone()); + tx.send(Box::new(1)).unwrap(); + } + + #[test] + fn smoke_shared() { + let (tx, rx) = channel::(); + tx.send(1).unwrap(); + assert_eq!(rx.recv().unwrap(), 1); + let tx = tx.clone(); + tx.send(1).unwrap(); + assert_eq!(rx.recv().unwrap(), 1); + } + + #[test] + fn smoke_threads() { + let (tx, rx) = channel::(); + let _t = thread::spawn(move|| { + tx.send(1).unwrap(); + }); + assert_eq!(rx.recv().unwrap(), 1); + } + + #[test] + fn smoke_port_gone() { + let (tx, rx) = channel::(); + drop(rx); + assert!(tx.send(1).is_err()); + } + + #[test] + fn smoke_shared_port_gone() { + let (tx, rx) = channel::(); + drop(rx); + assert!(tx.send(1).is_err()) + } + + #[test] + fn smoke_shared_port_gone2() { + let (tx, rx) = channel::(); + drop(rx); + let tx2 = tx.clone(); + drop(tx); + assert!(tx2.send(1).is_err()); + } + + #[test] + fn port_gone_concurrent() { + let (tx, rx) = channel::(); + let _t = thread::spawn(move|| { + rx.recv().unwrap(); + }); + while tx.send(1).is_ok() {} + } + + #[test] + fn port_gone_concurrent_shared() { + let (tx, rx) = channel::(); + let tx2 = tx.clone(); + let _t = thread::spawn(move|| { + rx.recv().unwrap(); + }); + while tx.send(1).is_ok() && tx2.send(1).is_ok() {} + } + + #[test] + fn smoke_chan_gone() { + let (tx, rx) = channel::(); + drop(tx); + assert!(rx.recv().is_err()); + } + + #[test] + fn smoke_chan_gone_shared() { + let (tx, rx) = channel::<()>(); + let tx2 = tx.clone(); + drop(tx); + drop(tx2); + assert!(rx.recv().is_err()); + } + + #[test] + fn chan_gone_concurrent() { + let (tx, rx) = channel::(); + let _t = thread::spawn(move|| { + tx.send(1).unwrap(); + tx.send(1).unwrap(); + }); + while rx.recv().is_ok() {} + } + + #[test] + fn stress() { + let (tx, rx) = channel::(); + let t = thread::spawn(move|| { + for _ in 0..10000 { tx.send(1).unwrap(); } + }); + for _ in 0..10000 { + assert_eq!(rx.recv().unwrap(), 1); + } + t.join().ok().unwrap(); + } + + #[test] + fn stress_shared() { + const AMT: u32 = 10000; + const NTHREADS: u32 = 8; + let (tx, rx) = channel::(); + + let t = thread::spawn(move|| { + for _ in 0..AMT * NTHREADS { + assert_eq!(rx.recv().unwrap(), 1); + } + match rx.try_recv() { + Ok(..) => panic!(), + _ => {} + } + }); + + for _ in 0..NTHREADS { + let tx = tx.clone(); + thread::spawn(move|| { + for _ in 0..AMT { tx.send(1).unwrap(); } + }); + } + drop(tx); + t.join().ok().unwrap(); + } + + #[test] + fn send_from_outside_runtime() { + let (tx1, rx1) = channel::<()>(); + let (tx2, rx2) = channel::(); + let t1 = thread::spawn(move|| { + tx1.send(()).unwrap(); + for _ in 0..40 { + assert_eq!(rx2.recv().unwrap(), 1); + } + }); + rx1.recv().unwrap(); + let t2 = thread::spawn(move|| { + for _ in 0..40 { + tx2.send(1).unwrap(); + } + }); + t1.join().ok().unwrap(); + t2.join().ok().unwrap(); + } + + #[test] + fn recv_from_outside_runtime() { + let (tx, rx) = channel::(); + let t = thread::spawn(move|| { + for _ in 0..40 { + assert_eq!(rx.recv().unwrap(), 1); + } + }); + for _ in 0..40 { + tx.send(1).unwrap(); + } + t.join().ok().unwrap(); + } + + #[test] + fn no_runtime() { + let (tx1, rx1) = channel::(); + let (tx2, rx2) = channel::(); + let t1 = thread::spawn(move|| { + assert_eq!(rx1.recv().unwrap(), 1); + tx2.send(2).unwrap(); + }); + let t2 = thread::spawn(move|| { + tx1.send(1).unwrap(); + assert_eq!(rx2.recv().unwrap(), 2); + }); + t1.join().ok().unwrap(); + t2.join().ok().unwrap(); + } + + #[test] + fn oneshot_single_thread_close_port_first() { + // Simple test of closing without sending + let (_tx, rx) = channel::(); + drop(rx); + } + + #[test] + fn oneshot_single_thread_close_chan_first() { + // Simple test of closing without sending + let (tx, _rx) = channel::(); + drop(tx); + } + + #[test] + fn oneshot_single_thread_send_port_close() { + // Testing that the sender cleans up the payload if receiver is closed + let (tx, rx) = channel::>(); + drop(rx); + assert!(tx.send(Box::new(0)).is_err()); + } + + #[test] + fn oneshot_single_thread_recv_chan_close() { + // Receiving on a closed chan will panic + let res = thread::spawn(move|| { + let (tx, rx) = channel::(); + drop(tx); + rx.recv().unwrap(); + }).join(); + // What is our res? + assert!(res.is_err()); + } + + #[test] + fn oneshot_single_thread_send_then_recv() { + let (tx, rx) = channel::>(); + tx.send(Box::new(10)).unwrap(); + assert!(*rx.recv().unwrap() == 10); + } + + #[test] + fn oneshot_single_thread_try_send_open() { + let (tx, rx) = channel::(); + assert!(tx.send(10).is_ok()); + assert!(rx.recv().unwrap() == 10); + } + + #[test] + fn oneshot_single_thread_try_send_closed() { + let (tx, rx) = channel::(); + drop(rx); + assert!(tx.send(10).is_err()); + } + + #[test] + fn oneshot_single_thread_try_recv_open() { + let (tx, rx) = channel::(); + tx.send(10).unwrap(); + assert!(rx.recv() == Ok(10)); + } + + #[test] + fn oneshot_single_thread_try_recv_closed() { + let (tx, rx) = channel::(); + drop(tx); + assert!(rx.recv().is_err()); + } + + #[test] + fn oneshot_single_thread_peek_data() { + let (tx, rx) = channel::(); + assert_eq!(rx.try_recv(), Err(TryRecvError::Empty)); + tx.send(10).unwrap(); + assert_eq!(rx.try_recv(), Ok(10)); + } + + #[test] + fn oneshot_single_thread_peek_close() { + let (tx, rx) = channel::(); + drop(tx); + assert_eq!(rx.try_recv(), Err(TryRecvError::Disconnected)); + assert_eq!(rx.try_recv(), Err(TryRecvError::Disconnected)); + } + + #[test] + fn oneshot_single_thread_peek_open() { + let (_tx, rx) = channel::(); + assert_eq!(rx.try_recv(), Err(TryRecvError::Empty)); + } + + #[test] + fn oneshot_multi_task_recv_then_send() { + let (tx, rx) = channel::>(); + let _t = thread::spawn(move|| { + assert!(*rx.recv().unwrap() == 10); + }); + + tx.send(Box::new(10)).unwrap(); + } + + #[test] + fn oneshot_multi_task_recv_then_close() { + let (tx, rx) = channel::>(); + let _t = thread::spawn(move|| { + drop(tx); + }); + let res = thread::spawn(move|| { + assert!(*rx.recv().unwrap() == 10); + }).join(); + assert!(res.is_err()); + } + + #[test] + fn oneshot_multi_thread_close_stress() { + for _ in 0..stress_factor() { + let (tx, rx) = channel::(); + let _t = thread::spawn(move|| { + drop(rx); + }); + drop(tx); + } + } + + #[test] + fn oneshot_multi_thread_send_close_stress() { + for _ in 0..stress_factor() { + let (tx, rx) = channel::(); + let _t = thread::spawn(move|| { + drop(rx); + }); + let _ = thread::spawn(move|| { + tx.send(1).unwrap(); + }).join(); + } + } + + #[test] + fn oneshot_multi_thread_recv_close_stress() { + for _ in 0..stress_factor() { + let (tx, rx) = channel::(); + thread::spawn(move|| { + let res = thread::spawn(move|| { + rx.recv().unwrap(); + }).join(); + assert!(res.is_err()); + }); + let _t = thread::spawn(move|| { + thread::spawn(move|| { + drop(tx); + }); + }); + } + } + + #[test] + fn oneshot_multi_thread_send_recv_stress() { + for _ in 0..stress_factor() { + let (tx, rx) = channel::>(); + let _t = thread::spawn(move|| { + tx.send(Box::new(10)).unwrap(); + }); + assert!(*rx.recv().unwrap() == 10); + } + } + + #[test] + fn stream_send_recv_stress() { + for _ in 0..stress_factor() { + let (tx, rx) = channel(); + + send(tx, 0); + recv(rx, 0); + + fn send(tx: Sender>, i: i32) { + if i == 10 { return } + + thread::spawn(move|| { + tx.send(Box::new(i)).unwrap(); + send(tx, i + 1); + }); + } + + fn recv(rx: Receiver>, i: i32) { + if i == 10 { return } + + thread::spawn(move|| { + assert!(*rx.recv().unwrap() == i); + recv(rx, i + 1); + }); + } + } + } + + #[test] + fn oneshot_single_thread_recv_timeout() { + let (tx, rx) = channel(); + tx.send(()).unwrap(); + assert_eq!(rx.recv_timeout(Duration::from_millis(1)), Ok(())); + assert_eq!(rx.recv_timeout(Duration::from_millis(1)), Err(RecvTimeoutError::Timeout)); + tx.send(()).unwrap(); + assert_eq!(rx.recv_timeout(Duration::from_millis(1)), Ok(())); + } + + #[test] + fn stress_recv_timeout_two_threads() { + let (tx, rx) = channel(); + let stress = stress_factor() + 100; + let timeout = Duration::from_millis(100); + + thread::spawn(move || { + for i in 0..stress { + if i % 2 == 0 { + thread::sleep(timeout * 2); + } + tx.send(1usize).unwrap(); + } + }); + + let mut recv_count = 0; + loop { + match rx.recv_timeout(timeout) { + Ok(n) => { + assert_eq!(n, 1usize); + recv_count += 1; + } + Err(RecvTimeoutError::Timeout) => continue, + Err(RecvTimeoutError::Disconnected) => break, + } + } + + assert_eq!(recv_count, stress); + } + + #[test] + fn recv_timeout_upgrade() { + let (tx, rx) = channel::<()>(); + let timeout = Duration::from_millis(1); + let _tx_clone = tx.clone(); + + let start = Instant::now(); + assert_eq!(rx.recv_timeout(timeout), Err(RecvTimeoutError::Timeout)); + assert!(Instant::now() >= start + timeout); + } + + #[test] + fn stress_recv_timeout_shared() { + let (tx, rx) = channel(); + let stress = stress_factor() + 100; + + for i in 0..stress { + let tx = tx.clone(); + thread::spawn(move || { + thread::sleep(Duration::from_millis(i as u64 * 10)); + tx.send(1usize).unwrap(); + }); + } + + drop(tx); + + let mut recv_count = 0; + loop { + match rx.recv_timeout(Duration::from_millis(10)) { + Ok(n) => { + assert_eq!(n, 1usize); + recv_count += 1; + } + Err(RecvTimeoutError::Timeout) => continue, + Err(RecvTimeoutError::Disconnected) => break, + } + } + + assert_eq!(recv_count, stress); + } + + #[test] + fn recv_a_lot() { + // Regression test that we don't run out of stack in scheduler context + let (tx, rx) = channel(); + for _ in 0..10000 { tx.send(()).unwrap(); } + for _ in 0..10000 { rx.recv().unwrap(); } + } + + #[test] + fn shared_recv_timeout() { + let (tx, rx) = channel(); + let total = 5; + for _ in 0..total { + let tx = tx.clone(); + thread::spawn(move|| { + tx.send(()).unwrap(); + }); + } + + for _ in 0..total { rx.recv().unwrap(); } + + assert_eq!(rx.recv_timeout(Duration::from_millis(1)), Err(RecvTimeoutError::Timeout)); + tx.send(()).unwrap(); + assert_eq!(rx.recv_timeout(Duration::from_millis(1)), Ok(())); + } + + #[test] + fn shared_chan_stress() { + let (tx, rx) = channel(); + let total = stress_factor() + 100; + for _ in 0..total { + let tx = tx.clone(); + thread::spawn(move|| { + tx.send(()).unwrap(); + }); + } + + for _ in 0..total { + rx.recv().unwrap(); + } + } + + #[test] + fn test_nested_recv_iter() { + let (tx, rx) = channel::(); + let (total_tx, total_rx) = channel::(); + + let _t = thread::spawn(move|| { + let mut acc = 0; + for x in rx.iter() { + acc += x; + } + total_tx.send(acc).unwrap(); + }); + + tx.send(3).unwrap(); + tx.send(1).unwrap(); + tx.send(2).unwrap(); + drop(tx); + assert_eq!(total_rx.recv().unwrap(), 6); + } + + #[test] + fn test_recv_iter_break() { + let (tx, rx) = channel::(); + let (count_tx, count_rx) = channel(); + + let _t = thread::spawn(move|| { + let mut count = 0; + for x in rx.iter() { + if count >= 3 { + break; + } else { + count += x; + } + } + count_tx.send(count).unwrap(); + }); + + tx.send(2).unwrap(); + tx.send(2).unwrap(); + tx.send(2).unwrap(); + let _ = tx.send(2); + drop(tx); + assert_eq!(count_rx.recv().unwrap(), 4); + } + + #[test] + fn test_recv_try_iter() { + let (request_tx, request_rx) = channel(); + let (response_tx, response_rx) = channel(); + + // Request `x`s until we have `6`. + let t = thread::spawn(move|| { + let mut count = 0; + loop { + for x in response_rx.try_iter() { + count += x; + if count == 6 { + return count; + } + } + request_tx.send(()).unwrap(); + } + }); + + for _ in request_rx.iter() { + if response_tx.send(2).is_err() { + break; + } + } + + assert_eq!(t.join().unwrap(), 6); + } + + #[test] + fn test_recv_into_iter_owned() { + let mut iter = { + let (tx, rx) = channel::(); + tx.send(1).unwrap(); + tx.send(2).unwrap(); + + rx.into_iter() + }; + assert_eq!(iter.next().unwrap(), 1); + assert_eq!(iter.next().unwrap(), 2); + assert_eq!(iter.next().is_none(), true); + } + + #[test] + fn test_recv_into_iter_borrowed() { + let (tx, rx) = channel::(); + tx.send(1).unwrap(); + tx.send(2).unwrap(); + drop(tx); + let mut iter = (&rx).into_iter(); + assert_eq!(iter.next().unwrap(), 1); + assert_eq!(iter.next().unwrap(), 2); + assert_eq!(iter.next().is_none(), true); + } + + #[test] + fn try_recv_states() { + let (tx1, rx1) = channel::(); + let (tx2, rx2) = channel::<()>(); + let (tx3, rx3) = channel::<()>(); + let _t = thread::spawn(move|| { + rx2.recv().unwrap(); + tx1.send(1).unwrap(); + tx3.send(()).unwrap(); + rx2.recv().unwrap(); + drop(tx1); + tx3.send(()).unwrap(); + }); + + assert_eq!(rx1.try_recv(), Err(TryRecvError::Empty)); + tx2.send(()).unwrap(); + rx3.recv().unwrap(); + assert_eq!(rx1.try_recv(), Ok(1)); + assert_eq!(rx1.try_recv(), Err(TryRecvError::Empty)); + tx2.send(()).unwrap(); + rx3.recv().unwrap(); + assert_eq!(rx1.try_recv(), Err(TryRecvError::Disconnected)); + } + + // This bug used to end up in a livelock inside of the Receiver destructor + // because the internal state of the Shared packet was corrupted + #[test] + fn destroy_upgraded_shared_port_when_sender_still_active() { + let (tx, rx) = channel(); + let (tx2, rx2) = channel(); + let _t = thread::spawn(move|| { + rx.recv().unwrap(); // wait on a oneshot + drop(rx); // destroy a shared + tx2.send(()).unwrap(); + }); + // make sure the other thread has gone to sleep + for _ in 0..5000 { thread::yield_now(); } + + // upgrade to a shared chan and send a message + let t = tx.clone(); + drop(tx); + t.send(()).unwrap(); + + // wait for the child thread to exit before we exit + rx2.recv().unwrap(); + } + + #[test] + fn issue_32114() { + let (tx, _) = channel(); + let _ = tx.send(123); + assert_eq!(tx.send(123), Err(SendError(123))); + } + } + + // https://github.com/rust-lang/rust/tree/master/src/libstd/sync/mpsc + mod sync_channel_tests { + use std::env; + use std::thread; + use super::*; + use std::time::Duration; + + pub fn stress_factor() -> usize { + match env::var("RUST_TEST_STRESS") { + Ok(val) => val.parse().unwrap(), + Err(..) => 1, + } + } + + #[test] + fn smoke() { + let (tx, rx) = sync_channel::(1); + tx.send(1).unwrap(); + assert_eq!(rx.recv().unwrap(), 1); + } + + #[test] + fn drop_full() { + let (tx, _rx) = sync_channel::>(1); + tx.send(Box::new(1)).unwrap(); + } + + #[test] + fn smoke_shared() { + let (tx, rx) = sync_channel::(1); + tx.send(1).unwrap(); + assert_eq!(rx.recv().unwrap(), 1); + let tx = tx.clone(); + tx.send(1).unwrap(); + assert_eq!(rx.recv().unwrap(), 1); + } + + #[test] + fn recv_timeout() { + let (tx, rx) = sync_channel::(1); + assert_eq!(rx.recv_timeout(Duration::from_millis(1)), Err(RecvTimeoutError::Timeout)); + tx.send(1).unwrap(); + assert_eq!(rx.recv_timeout(Duration::from_millis(1)), Ok(1)); + } + + #[test] + fn smoke_threads() { + let (tx, rx) = sync_channel::(0); + let _t = thread::spawn(move|| { + tx.send(1).unwrap(); + }); + assert_eq!(rx.recv().unwrap(), 1); + } + + #[test] + fn smoke_port_gone() { + let (tx, rx) = sync_channel::(0); + drop(rx); + assert!(tx.send(1).is_err()); + } + + #[test] + fn smoke_shared_port_gone2() { + let (tx, rx) = sync_channel::(0); + drop(rx); + let tx2 = tx.clone(); + drop(tx); + assert!(tx2.send(1).is_err()); + } + + #[test] + fn port_gone_concurrent() { + let (tx, rx) = sync_channel::(0); + let _t = thread::spawn(move|| { + rx.recv().unwrap(); + }); + while tx.send(1).is_ok() {} + } + + #[test] + fn port_gone_concurrent_shared() { + let (tx, rx) = sync_channel::(0); + let tx2 = tx.clone(); + let _t = thread::spawn(move|| { + rx.recv().unwrap(); + }); + while tx.send(1).is_ok() && tx2.send(1).is_ok() {} + } + + #[test] + fn smoke_chan_gone() { + let (tx, rx) = sync_channel::(0); + drop(tx); + assert!(rx.recv().is_err()); + } + + #[test] + fn smoke_chan_gone_shared() { + let (tx, rx) = sync_channel::<()>(0); + let tx2 = tx.clone(); + drop(tx); + drop(tx2); + assert!(rx.recv().is_err()); + } + + #[test] + fn chan_gone_concurrent() { + let (tx, rx) = sync_channel::(0); + thread::spawn(move|| { + tx.send(1).unwrap(); + tx.send(1).unwrap(); + }); + while rx.recv().is_ok() {} + } + + #[test] + fn stress() { + let (tx, rx) = sync_channel::(0); + thread::spawn(move|| { + for _ in 0..10000 { tx.send(1).unwrap(); } + }); + for _ in 0..10000 { + assert_eq!(rx.recv().unwrap(), 1); + } + } + + #[test] + fn stress_recv_timeout_two_threads() { + let (tx, rx) = sync_channel::(0); + + thread::spawn(move|| { + for _ in 0..10000 { tx.send(1).unwrap(); } + }); + + let mut recv_count = 0; + loop { + match rx.recv_timeout(Duration::from_millis(1)) { + Ok(v) => { + assert_eq!(v, 1); + recv_count += 1; + }, + Err(RecvTimeoutError::Timeout) => continue, + Err(RecvTimeoutError::Disconnected) => break, + } + } + + assert_eq!(recv_count, 10000); + } + + #[test] + fn stress_recv_timeout_shared() { + const AMT: u32 = 1000; + const NTHREADS: u32 = 8; + let (tx, rx) = sync_channel::(0); + let (dtx, drx) = sync_channel::<()>(0); + + thread::spawn(move|| { + let mut recv_count = 0; + loop { + match rx.recv_timeout(Duration::from_millis(10)) { + Ok(v) => { + assert_eq!(v, 1); + recv_count += 1; + }, + Err(RecvTimeoutError::Timeout) => continue, + Err(RecvTimeoutError::Disconnected) => break, + } + } + + assert_eq!(recv_count, AMT * NTHREADS); + assert!(rx.try_recv().is_err()); + + dtx.send(()).unwrap(); + }); + + for _ in 0..NTHREADS { + let tx = tx.clone(); + thread::spawn(move|| { + for _ in 0..AMT { tx.send(1).unwrap(); } + }); + } + + drop(tx); + + drx.recv().unwrap(); + } + + #[test] + fn stress_shared() { + const AMT: u32 = 1000; + const NTHREADS: u32 = 8; + let (tx, rx) = sync_channel::(0); + let (dtx, drx) = sync_channel::<()>(0); + + thread::spawn(move|| { + for _ in 0..AMT * NTHREADS { + assert_eq!(rx.recv().unwrap(), 1); + } + match rx.try_recv() { + Ok(..) => panic!(), + _ => {} + } + dtx.send(()).unwrap(); + }); + + for _ in 0..NTHREADS { + let tx = tx.clone(); + thread::spawn(move|| { + for _ in 0..AMT { tx.send(1).unwrap(); } + }); + } + drop(tx); + drx.recv().unwrap(); + } + + #[test] + fn oneshot_single_thread_close_port_first() { + // Simple test of closing without sending + let (_tx, rx) = sync_channel::(0); + drop(rx); + } + + #[test] + fn oneshot_single_thread_close_chan_first() { + // Simple test of closing without sending + let (tx, _rx) = sync_channel::(0); + drop(tx); + } + + #[test] + fn oneshot_single_thread_send_port_close() { + // Testing that the sender cleans up the payload if receiver is closed + let (tx, rx) = sync_channel::>(0); + drop(rx); + assert!(tx.send(Box::new(0)).is_err()); + } + + #[test] + fn oneshot_single_thread_recv_chan_close() { + // Receiving on a closed chan will panic + let res = thread::spawn(move|| { + let (tx, rx) = sync_channel::(0); + drop(tx); + rx.recv().unwrap(); + }).join(); + // What is our res? + assert!(res.is_err()); + } + + #[test] + fn oneshot_single_thread_send_then_recv() { + let (tx, rx) = sync_channel::>(1); + tx.send(Box::new(10)).unwrap(); + assert!(*rx.recv().unwrap() == 10); + } + + #[test] + fn oneshot_single_thread_try_send_open() { + let (tx, rx) = sync_channel::(1); + assert_eq!(tx.try_send(10), Ok(())); + assert!(rx.recv().unwrap() == 10); + } + + #[test] + fn oneshot_single_thread_try_send_closed() { + let (tx, rx) = sync_channel::(0); + drop(rx); + assert_eq!(tx.try_send(10), Err(TrySendError::Disconnected(10))); + } + + #[test] + fn oneshot_single_thread_try_send_closed2() { + let (tx, _rx) = sync_channel::(0); + assert_eq!(tx.try_send(10), Err(TrySendError::Full(10))); + } + + #[test] + fn oneshot_single_thread_try_recv_open() { + let (tx, rx) = sync_channel::(1); + tx.send(10).unwrap(); + assert!(rx.recv() == Ok(10)); + } + + #[test] + fn oneshot_single_thread_try_recv_closed() { + let (tx, rx) = sync_channel::(0); + drop(tx); + assert!(rx.recv().is_err()); + } + + #[test] + fn oneshot_single_thread_try_recv_closed_with_data() { + let (tx, rx) = sync_channel::(1); + tx.send(10).unwrap(); + drop(tx); + assert_eq!(rx.try_recv(), Ok(10)); + assert_eq!(rx.try_recv(), Err(TryRecvError::Disconnected)); + } + + #[test] + fn oneshot_single_thread_peek_data() { + let (tx, rx) = sync_channel::(1); + assert_eq!(rx.try_recv(), Err(TryRecvError::Empty)); + tx.send(10).unwrap(); + assert_eq!(rx.try_recv(), Ok(10)); + } + + #[test] + fn oneshot_single_thread_peek_close() { + let (tx, rx) = sync_channel::(0); + drop(tx); + assert_eq!(rx.try_recv(), Err(TryRecvError::Disconnected)); + assert_eq!(rx.try_recv(), Err(TryRecvError::Disconnected)); + } + + #[test] + fn oneshot_single_thread_peek_open() { + let (_tx, rx) = sync_channel::(0); + assert_eq!(rx.try_recv(), Err(TryRecvError::Empty)); + } + + #[test] + fn oneshot_multi_task_recv_then_send() { + let (tx, rx) = sync_channel::>(0); + let _t = thread::spawn(move|| { + assert!(*rx.recv().unwrap() == 10); + }); + + tx.send(Box::new(10)).unwrap(); + } + + #[test] + fn oneshot_multi_task_recv_then_close() { + let (tx, rx) = sync_channel::>(0); + let _t = thread::spawn(move|| { + drop(tx); + }); + let res = thread::spawn(move|| { + assert!(*rx.recv().unwrap() == 10); + }).join(); + assert!(res.is_err()); + } + + #[test] + fn oneshot_multi_thread_close_stress() { + for _ in 0..stress_factor() { + let (tx, rx) = sync_channel::(0); + let _t = thread::spawn(move|| { + drop(rx); + }); + drop(tx); + } + } + + #[test] + fn oneshot_multi_thread_send_close_stress() { + for _ in 0..stress_factor() { + let (tx, rx) = sync_channel::(0); + let _t = thread::spawn(move|| { + drop(rx); + }); + let _ = thread::spawn(move || { + tx.send(1).unwrap(); + }).join(); + } + } + + #[test] + fn oneshot_multi_thread_recv_close_stress() { + for _ in 0..stress_factor() { + let (tx, rx) = sync_channel::(0); + let _t = thread::spawn(move|| { + let res = thread::spawn(move|| { + rx.recv().unwrap(); + }).join(); + assert!(res.is_err()); + }); + let _t = thread::spawn(move|| { + thread::spawn(move|| { + drop(tx); + }); + }); + } + } + + #[test] + fn oneshot_multi_thread_send_recv_stress() { + for _ in 0..stress_factor() { + let (tx, rx) = sync_channel::>(0); + let _t = thread::spawn(move|| { + tx.send(Box::new(10)).unwrap(); + }); + assert!(*rx.recv().unwrap() == 10); + } + } + + #[test] + fn stream_send_recv_stress() { + for _ in 0..stress_factor() { + let (tx, rx) = sync_channel::>(0); + + send(tx, 0); + recv(rx, 0); + + fn send(tx: SyncSender>, i: i32) { + if i == 10 { return } + + thread::spawn(move|| { + tx.send(Box::new(i)).unwrap(); + send(tx, i + 1); + }); + } + + fn recv(rx: Receiver>, i: i32) { + if i == 10 { return } + + thread::spawn(move|| { + assert!(*rx.recv().unwrap() == i); + recv(rx, i + 1); + }); + } + } + } + + #[test] + fn recv_a_lot() { + // Regression test that we don't run out of stack in scheduler context + let (tx, rx) = sync_channel(10000); + for _ in 0..10000 { tx.send(()).unwrap(); } + for _ in 0..10000 { rx.recv().unwrap(); } + } + + #[test] + fn shared_chan_stress() { + let (tx, rx) = sync_channel(0); + let total = stress_factor() + 100; + for _ in 0..total { + let tx = tx.clone(); + thread::spawn(move|| { + tx.send(()).unwrap(); + }); + } + + for _ in 0..total { + rx.recv().unwrap(); + } + } + + #[test] + fn test_nested_recv_iter() { + let (tx, rx) = sync_channel::(0); + let (total_tx, total_rx) = sync_channel::(0); + + let _t = thread::spawn(move|| { + let mut acc = 0; + for x in rx.iter() { + acc += x; + } + total_tx.send(acc).unwrap(); + }); + + tx.send(3).unwrap(); + tx.send(1).unwrap(); + tx.send(2).unwrap(); + drop(tx); + assert_eq!(total_rx.recv().unwrap(), 6); + } + + #[test] + fn test_recv_iter_break() { + let (tx, rx) = sync_channel::(0); + let (count_tx, count_rx) = sync_channel(0); + + let _t = thread::spawn(move|| { + let mut count = 0; + for x in rx.iter() { + if count >= 3 { + break; + } else { + count += x; + } + } + count_tx.send(count).unwrap(); + }); + + tx.send(2).unwrap(); + tx.send(2).unwrap(); + tx.send(2).unwrap(); + let _ = tx.try_send(2); + drop(tx); + assert_eq!(count_rx.recv().unwrap(), 4); + } + + #[test] + fn try_recv_states() { + let (tx1, rx1) = sync_channel::(1); + let (tx2, rx2) = sync_channel::<()>(1); + let (tx3, rx3) = sync_channel::<()>(1); + let _t = thread::spawn(move|| { + rx2.recv().unwrap(); + tx1.send(1).unwrap(); + tx3.send(()).unwrap(); + rx2.recv().unwrap(); + drop(tx1); + tx3.send(()).unwrap(); + }); + + assert_eq!(rx1.try_recv(), Err(TryRecvError::Empty)); + tx2.send(()).unwrap(); + rx3.recv().unwrap(); + assert_eq!(rx1.try_recv(), Ok(1)); + assert_eq!(rx1.try_recv(), Err(TryRecvError::Empty)); + tx2.send(()).unwrap(); + rx3.recv().unwrap(); + assert_eq!(rx1.try_recv(), Err(TryRecvError::Disconnected)); + } + + // This bug used to end up in a livelock inside of the Receiver destructor + // because the internal state of the Shared packet was corrupted + #[test] + fn destroy_upgraded_shared_port_when_sender_still_active() { + let (tx, rx) = sync_channel::<()>(0); + let (tx2, rx2) = sync_channel::<()>(0); + let _t = thread::spawn(move|| { + rx.recv().unwrap(); // wait on a oneshot + drop(rx); // destroy a shared + tx2.send(()).unwrap(); + }); + // make sure the other thread has gone to sleep + for _ in 0..5000 { thread::yield_now(); } + + // upgrade to a shared chan and send a message + let t = tx.clone(); + drop(tx); + t.send(()).unwrap(); + + // wait for the child thread to exit before we exit + rx2.recv().unwrap(); + } + + #[test] + fn send1() { + let (tx, rx) = sync_channel::(0); + let _t = thread::spawn(move|| { rx.recv().unwrap(); }); + assert_eq!(tx.send(1), Ok(())); + } + + #[test] + fn send2() { + let (tx, rx) = sync_channel::(0); + let _t = thread::spawn(move|| { drop(rx); }); + assert!(tx.send(1).is_err()); + } + + #[test] + fn send3() { + let (tx, rx) = sync_channel::(1); + assert_eq!(tx.send(1), Ok(())); + let _t =thread::spawn(move|| { drop(rx); }); + assert!(tx.send(1).is_err()); + } + + #[test] + fn send4() { + let (tx, rx) = sync_channel::(0); + let tx2 = tx.clone(); + let (done, donerx) = channel(); + let done2 = done.clone(); + let _t = thread::spawn(move|| { + assert!(tx.send(1).is_err()); + done.send(()).unwrap(); + }); + let _t = thread::spawn(move|| { + assert!(tx2.send(2).is_err()); + done2.send(()).unwrap(); + }); + drop(rx); + donerx.recv().unwrap(); + donerx.recv().unwrap(); + } + + #[test] + fn try_send1() { + let (tx, _rx) = sync_channel::(0); + assert_eq!(tx.try_send(1), Err(TrySendError::Full(1))); + } + + #[test] + fn try_send2() { + let (tx, _rx) = sync_channel::(1); + assert_eq!(tx.try_send(1), Ok(())); + assert_eq!(tx.try_send(1), Err(TrySendError::Full(1))); + } + + #[test] + fn try_send3() { + let (tx, rx) = sync_channel::(1); + assert_eq!(tx.try_send(1), Ok(())); + drop(rx); + assert_eq!(tx.try_send(1), Err(TrySendError::Disconnected(1))); + } + + #[test] + fn issue_15761() { + fn repro() { + let (tx1, rx1) = sync_channel::<()>(3); + let (tx2, rx2) = sync_channel::<()>(3); + + let _t = thread::spawn(move|| { + rx1.recv().unwrap(); + tx2.try_send(()).unwrap(); + }); + + tx1.try_send(()).unwrap(); + rx2.recv().unwrap(); + } + + for _ in 0..100 { + repro() + } + } + } + + // https://github.com/rust-lang/rust/blob/master/src/libstd/sync/mpsc/select.rs + mod select_tests { + use std::thread; + use super::*; + + #[test] + fn smoke() { + let (tx1, rx1) = channel::(); + let (tx2, rx2) = channel::(); + tx1.send(1).unwrap(); + mpsc_select! { + foo = rx1.recv() => { assert_eq!(foo.unwrap(), 1); }, + _bar = rx2.recv() => { panic!() } + } + tx2.send(2).unwrap(); + mpsc_select! { + _foo = rx1.recv() => { panic!() }, + bar = rx2.recv() => { assert_eq!(bar.unwrap(), 2) } + } + drop(tx1); + mpsc_select! { + foo = rx1.recv() => { assert!(foo.is_err()); }, + _bar = rx2.recv() => { panic!() } + } + drop(tx2); + mpsc_select! { + bar = rx2.recv() => { assert!(bar.is_err()); } + } + } + + #[test] + fn smoke2() { + let (_tx1, rx1) = channel::(); + let (_tx2, rx2) = channel::(); + let (_tx3, rx3) = channel::(); + let (_tx4, rx4) = channel::(); + let (tx5, rx5) = channel::(); + tx5.send(4).unwrap(); + mpsc_select! { + _foo = rx1.recv() => { panic!("1") }, + _foo = rx2.recv() => { panic!("2") }, + _foo = rx3.recv() => { panic!("3") }, + _foo = rx4.recv() => { panic!("4") }, + foo = rx5.recv() => { assert_eq!(foo.unwrap(), 4); } + } + } + + #[test] + fn closed() { + let (_tx1, rx1) = channel::(); + let (tx2, rx2) = channel::(); + drop(tx2); + + mpsc_select! { + _a1 = rx1.recv() => { panic!() }, + a2 = rx2.recv() => { assert!(a2.is_err()); } + } + } + + #[test] + fn unblocks() { + let (tx1, rx1) = channel::(); + let (_tx2, rx2) = channel::(); + let (tx3, rx3) = channel::(); + + let _t = thread::spawn(move|| { + for _ in 0..20 { thread::yield_now(); } + tx1.send(1).unwrap(); + rx3.recv().unwrap(); + for _ in 0..20 { thread::yield_now(); } + }); + + mpsc_select! { + a = rx1.recv() => { assert_eq!(a.unwrap(), 1); }, + _b = rx2.recv() => { panic!() } + } + tx3.send(1).unwrap(); + mpsc_select! { + a = rx1.recv() => { assert!(a.is_err()) }, + _b = rx2.recv() => { panic!() } + } + } + + #[test] + fn both_ready() { + let (tx1, rx1) = channel::(); + let (tx2, rx2) = channel::(); + let (tx3, rx3) = channel::<()>(); + + let _t = thread::spawn(move|| { + for _ in 0..20 { thread::yield_now(); } + tx1.send(1).unwrap(); + tx2.send(2).unwrap(); + rx3.recv().unwrap(); + }); + + mpsc_select! { + a = rx1.recv() => { assert_eq!(a.unwrap(), 1); }, + a = rx2.recv() => { assert_eq!(a.unwrap(), 2); } + } + mpsc_select! { + a = rx1.recv() => { assert_eq!(a.unwrap(), 1); }, + a = rx2.recv() => { assert_eq!(a.unwrap(), 2); } + } + assert_eq!(rx1.try_recv(), Err(TryRecvError::Empty)); + assert_eq!(rx2.try_recv(), Err(TryRecvError::Empty)); + tx3.send(()).unwrap(); + } + + #[test] + fn stress() { + const AMT: i32 = 10000; + let (tx1, rx1) = channel::(); + let (tx2, rx2) = channel::(); + let (tx3, rx3) = channel::<()>(); + + let _t = thread::spawn(move|| { + for i in 0..AMT { + if i % 2 == 0 { + tx1.send(i).unwrap(); + } else { + tx2.send(i).unwrap(); + } + rx3.recv().unwrap(); + } + }); + + for i in 0..AMT { + mpsc_select! { + i1 = rx1.recv() => { assert!(i % 2 == 0 && i == i1.unwrap()); }, + i2 = rx2.recv() => { assert!(i % 2 == 1 && i == i2.unwrap()); } + } + tx3.send(()).unwrap(); + } + } + + #[allow(unused_must_use)] + #[test] + fn cloning() { + let (tx1, rx1) = channel::(); + let (_tx2, rx2) = channel::(); + let (tx3, rx3) = channel::<()>(); + + let _t = thread::spawn(move|| { + rx3.recv().unwrap(); + tx1.clone(); + assert_eq!(rx3.try_recv(), Err(TryRecvError::Empty)); + tx1.send(2).unwrap(); + rx3.recv().unwrap(); + }); + + tx3.send(()).unwrap(); + mpsc_select! { + _i1 = rx1.recv() => {}, + _i2 = rx2.recv() => panic!() + } + tx3.send(()).unwrap(); + } + + #[allow(unused_must_use)] + #[test] + fn cloning2() { + let (tx1, rx1) = channel::(); + let (_tx2, rx2) = channel::(); + let (tx3, rx3) = channel::<()>(); + + let _t = thread::spawn(move|| { + rx3.recv().unwrap(); + tx1.clone(); + assert_eq!(rx3.try_recv(), Err(TryRecvError::Empty)); + tx1.send(2).unwrap(); + rx3.recv().unwrap(); + }); + + tx3.send(()).unwrap(); + mpsc_select! { + _i1 = rx1.recv() => {}, + _i2 = rx2.recv() => panic!() + } + tx3.send(()).unwrap(); + } + + #[test] + fn cloning3() { + let (tx1, rx1) = channel::<()>(); + let (tx2, rx2) = channel::<()>(); + let (tx3, rx3) = channel::<()>(); + let _t = thread::spawn(move|| { + mpsc_select! { + _ = rx1.recv() => panic!(), + _ = rx2.recv() => {} + } + tx3.send(()).unwrap(); + }); + + for _ in 0..1000 { thread::yield_now(); } + drop(tx1.clone()); + tx2.send(()).unwrap(); + rx3.recv().unwrap(); + } + + #[test] + fn preflight1() { + let (tx, rx) = channel(); + tx.send(()).unwrap(); + mpsc_select! { + _n = rx.recv() => {} + } + } + + #[test] + fn preflight2() { + let (tx, rx) = channel(); + tx.send(()).unwrap(); + tx.send(()).unwrap(); + mpsc_select! { + _n = rx.recv() => {} + } + } + + #[test] + fn preflight3() { + let (tx, rx) = channel(); + drop(tx.clone()); + tx.send(()).unwrap(); + mpsc_select! { + _n = rx.recv() => {} + } + } + + #[test] + fn preflight4() { + let (tx, rx) = channel(); + tx.send(()).unwrap(); + mpsc_select! { + _ = rx.recv() => {} + } + } + + #[test] + fn preflight5() { + let (tx, rx) = channel(); + tx.send(()).unwrap(); + tx.send(()).unwrap(); + mpsc_select! { + _ = rx.recv() => {} + } + } + + #[test] + fn preflight6() { + let (tx, rx) = channel(); + drop(tx.clone()); + tx.send(()).unwrap(); + mpsc_select! { + _ = rx.recv() => {} + } + } + + #[test] + fn preflight7() { + let (tx, rx) = channel::<()>(); + drop(tx); + mpsc_select! { + _ = rx.recv() => {} + } + } + + #[test] + fn preflight8() { + let (tx, rx) = channel(); + tx.send(()).unwrap(); + drop(tx); + rx.recv().unwrap(); + mpsc_select! { + _ = rx.recv() => {} + } + } + + #[test] + fn preflight9() { + let (tx, rx) = channel(); + drop(tx.clone()); + tx.send(()).unwrap(); + drop(tx); + rx.recv().unwrap(); + mpsc_select! { + _ = rx.recv() => {} + } + } + + #[test] + fn oneshot_data_waiting() { + let (tx1, rx1) = channel::<()>(); + let (tx2, rx2) = channel::<()>(); + let _t = thread::spawn(move|| { + mpsc_select! { + _n = rx1.recv() => {} + } + tx2.send(()).unwrap(); + }); + + for _ in 0..100 { thread::yield_now() } + tx1.send(()).unwrap(); + rx2.recv().unwrap(); + } + + #[test] + fn stream_data_waiting() { + let (tx1, rx1) = channel(); + let (tx2, rx2) = channel(); + tx1.send(()).unwrap(); + tx1.send(()).unwrap(); + rx1.recv().unwrap(); + rx1.recv().unwrap(); + let _t = thread::spawn(move|| { + mpsc_select! { + _n = rx1.recv() => {} + } + tx2.send(()).unwrap(); + }); + + for _ in 0..100 { thread::yield_now() } + tx1.send(()).unwrap(); + rx2.recv().unwrap(); + } + + #[test] + fn shared_data_waiting() { + let (tx1, rx1) = channel(); + let (tx2, rx2) = channel(); + drop(tx1.clone()); + tx1.send(()).unwrap(); + rx1.recv().unwrap(); + let _t = thread::spawn(move|| { + mpsc_select! { + _n = rx1.recv() => {} + } + tx2.send(()).unwrap(); + }); + + for _ in 0..100 { thread::yield_now() } + tx1.send(()).unwrap(); + rx2.recv().unwrap(); + } + + #[test] + fn sync1() { + let (tx, rx) = sync_channel::(1); + tx.send(1).unwrap(); + mpsc_select! { + n = rx.recv() => { assert_eq!(n.unwrap(), 1); } + } + } + + #[test] + fn sync2() { + let (tx, rx) = sync_channel::(0); + let _t = thread::spawn(move|| { + for _ in 0..100 { thread::yield_now() } + tx.send(1).unwrap(); + }); + mpsc_select! { + n = rx.recv() => { assert_eq!(n.unwrap(), 1); } + } + } + + #[test] + fn sync3() { + let (tx1, rx1) = sync_channel::(0); + let (tx2, rx2): (Sender, Receiver) = channel(); + let _t = thread::spawn(move|| { tx1.send(1).unwrap(); }); + let _t = thread::spawn(move|| { tx2.send(2).unwrap(); }); + mpsc_select! { + n = rx1.recv() => { + let n = n.unwrap(); + assert_eq!(n, 1); + assert_eq!(rx2.recv().unwrap(), 2); + }, + n = rx2.recv() => { + let n = n.unwrap(); + assert_eq!(n, 2); + assert_eq!(rx1.recv().unwrap(), 1); + } + } + } + } + } +} + +mod normal { + tests!(wrappers::normal); +} + +mod cloned { + tests!(wrappers::cloned); +} + +mod select { + tests!(wrappers::select); +} + +mod select_spin { + tests!(wrappers::select_spin); +} + +mod select_multi { + tests!(wrappers::select_multi); +} diff --git a/crossbeam-channel/tests/parse.rs b/crossbeam-channel/tests/parse.rs new file mode 100644 index 000000000..417a4ff5c --- /dev/null +++ b/crossbeam-channel/tests/parse.rs @@ -0,0 +1,405 @@ +//! Tests for the `select!` macro parser. +//! +//! These tests make sure that all possible invocations of `select!` actually compile. + +#![deny(unsafe_code)] + +extern crate crossbeam; +#[macro_use] +extern crate crossbeam_channel as channel; + +use std::ops::Deref; + +#[test] +fn recv() { + let (s1, r1) = channel::unbounded::(); + let (s2, r2) = channel::unbounded::(); + let rs = [r1, r2]; + + s1.send(0); + s2.send(0); + + select! { + recv(rs) => {} + } + + select! { + recv(rs) => {} + default => panic!(), + } + + select! { + recv(rs) => panic!(), + default => {} + } +} + +#[test] +fn references() { + let (s, r) = channel::unbounded::(); + select! { + send(s, 0) => {} + recv(r) => {} + } + select! { + send(&&&&s, 0) => {} + recv(&&&&r) => {} + } + + select! { + send([&s].iter().map(|x| *x), 0) => {} + recv([&r].iter().map(|x| *x)) => {} + } + + let ss = &&&&[s]; + let rr = &&&&[r]; + select! { + send(ss.iter(), 0) => {} + recv(rr.iter()) => {} + } + select! { + send(&&&&ss.iter(), 0) => {} + recv(&&&&rr.iter()) => {} + } +} + +#[test] +fn blocks() { + let (s, r) = channel::unbounded::(); + + select! { + recv(r) => 3.0 + recv(r) => loop { + unreachable!() + } + recv(r) => match 7 + 3 { + _ => unreachable!() + } + default => 7. + }; + + select! { + recv(r, msg) => if msg.is_some() { + unreachable!() + } + default => () + } + + drop(s); +} + +#[test] +fn move_handles() { + let (s, r) = channel::unbounded::(); + select! { + recv((move || r)()) => {} + send((move || s)(), 0) => {} + } +} + +#[test] +fn infer_types() { + let (s, r) = channel::unbounded(); + select! { + recv(r) => {} + default => {} + } + s.send(()); + + let (s, r) = channel::unbounded(); + select! { + send(s, ()) => {} + } + r.recv(); +} + +#[test] +fn default() { + let (s, r) = channel::bounded::(0); + + select! { + recv(r) => panic!(), + default => {} + } + select! { + send(s, 0) => panic!(), + default() => {} + } + select! { + default => {} + } + select! { + default() => {} + } +} + +#[test] +fn same_variable_name() { + let (_, r) = channel::unbounded::(); + select! { + recv(r, r) => assert!(r.is_none()), + } + + let (s, _) = channel::unbounded::(); + let s2 = s.clone(); + select! { + send(s, 0, s) => assert_eq!(s, &s2), + } +} + +#[test] +fn handles_on_heap() { + let (s, r) = channel::unbounded::(); + let (s, r) = (Box::new(s), Box::new(r)); + + select! { + send(*s, 0) => {} + recv(*r) => {} + default => {} + } + + drop(s); + drop(r); +} + +#[test] +fn option_receiver() { + let (_, r) = channel::unbounded::(); + select! { + recv(Some(&r)) => {} + } + select! { + recv(Some(&r)) => {} + recv(Some(&r)) => {} + } + + let r: Option> = None; + select! { + recv(r.as_ref()) => {} + default => {} + } + select! { + recv(r.as_ref()) => {} + recv(r.as_ref()) => {} + default => {} + } + + let r: Option<&&&Box<&&channel::Receiver>> = None; + let r: Option<&channel::Receiver> = match r { + None => None, + Some(r) => Some(r), + }; + select! { + recv(r) => {} + default => {} + } + select! { + recv(r) => {} + recv(r) => {} + default => {} + } +} + +#[test] +fn option_sender() { + let (s, _) = channel::unbounded::(); + select! { + send(Some(&s), 0) => {} + default => {} + } + select! { + send(Some(&s), 0) => {} + send(Some(&s), 0) => {} + default => {} + } + + let s: Option> = None; + select! { + send(s.as_ref(), 0) => {} + default => {} + } + select! { + send(s.as_ref(), 0) => {} + send(s.as_ref(), 0) => {} + default => {} + } + + let s: Option<&&&Box<&&channel::Sender>> = None; + let s: Option<&channel::Sender> = match s { + None => None, + Some(s) => Some(s), + }; + select! { + send(s, 0) => {} + default => {} + } + select! { + send(s, 0) => {} + send(s, 0) => {} + default => {} + } +} + +#[test] +fn once_blocks() { + let (s, r) = channel::unbounded::(); + + let once = Box::new(()); + select! { + send(s, 0) => drop(once), + } + + let once = Box::new(()); + select! { + recv(r) => drop(once), + } + + let once1 = Box::new(()); + let once2 = Box::new(()); + select! { + send(s, 0) => drop(once1), + default => drop(once2), + } + + let once1 = Box::new(()); + let once2 = Box::new(()); + select! { + recv(r) => drop(once1), + default => drop(once2), + } + + let once1 = Box::new(()); + let once2 = Box::new(()); + select! { + recv(r) => drop(once1), + send(s, 0) => drop(once2), + } +} + +#[test] +fn once_receiver() { + let (_, r) = channel::unbounded::(); + + let once = Box::new(()); + let get = move || { + drop(once); + r + }; + + select! { + recv(get()) => {} + } +} + +#[test] +fn once_sender() { + let (s, _) = channel::unbounded::(); + + let once = Box::new(()); + let get = move || { + drop(once); + s + }; + + select! { + send(get(), 5) => {} + } +} + +#[test] +fn once_receive() { + +} + +#[test] +fn nesting() { + let (_, r) = channel::unbounded::(); + + select! { + recv(r) => { + select! { + recv(r) => { + select! { + recv(r) => { + select! { + default => {} + } + } + } + } + } + } + } +} + +#[test] +fn evaluate() { + let (s, r) = channel::unbounded::(); + + let v = select! { + recv(r) => "foo".into(), + send(s, 0) => "bar".to_owned(), + default => "baz".to_string(), + }; + assert_eq!(v, "bar"); + + let v = select! { + recv(r) => "foo".into(), + default => "baz".to_string(), + }; + assert_eq!(v, "foo"); + + let v = select! { + recv(r) => "foo".into(), + default => "baz".to_string(), + }; + assert_eq!(v, "baz"); +} + +#[test] +fn variety() { + let (s1, r1) = channel::unbounded::(); + let (s2, r2) = channel::bounded::(1); + let (s3, r3) = channel::bounded::<()>(0); + + select! { + recv(r1) => {} + send(s1, 7) => {} + recv(Some(r2)) => {} + send(Some(s2), "foo".to_string()) => {} + recv([&r3].iter().map(|x| *x)) => {} + send([&s3].iter().map(|x| *x), ()) => {} + default => {} + } +} + +#[test] +fn deref() { + struct Sender(channel::Sender); + struct Receiver(channel::Receiver); + + impl Deref for Receiver { + type Target = channel::Receiver; + + fn deref(&self) -> &Self::Target { + &self.0 + } + } + + impl Deref for Sender { + type Target = channel::Sender; + + fn deref(&self) -> &Self::Target { + &self.0 + } + } + + let (s, r) = channel::bounded::(0); + let (s, r) = (Sender(s), Receiver(r)); + + select! { + send(s, 0) => panic!(), + recv(r) => panic!(), + default => {} + } +} diff --git a/crossbeam-channel/tests/select.rs b/crossbeam-channel/tests/select.rs new file mode 100644 index 000000000..000b0bb1c --- /dev/null +++ b/crossbeam-channel/tests/select.rs @@ -0,0 +1,893 @@ +//! Tests for the `select!` macro. + +#![deny(unsafe_code)] + +extern crate crossbeam; +#[macro_use] +extern crate crossbeam_channel as channel; + +use std::any::Any; +use std::thread; +use std::time::Duration; + +fn ms(ms: u64) -> Duration { + Duration::from_millis(ms) +} + +#[test] +fn smoke1() { + let (s1, r1) = channel::unbounded::(); + let (s2, r2) = channel::unbounded::(); + + s1.send(1); + + select! { + recv(r1, v) => assert_eq!(v, Some(1)), + recv(r2) => panic!(), + } + + s2.send(2); + + select! { + recv(r1) => panic!(), + recv(r2, v) => assert_eq!(v, Some(2)), + } +} + +#[test] +fn smoke2() { + let (_s1, r1) = channel::unbounded::(); + let (_s2, r2) = channel::unbounded::(); + let (_s3, r3) = channel::unbounded::(); + let (_s4, r4) = channel::unbounded::(); + let (s5, r5) = channel::unbounded::(); + + s5.send(5); + + select! { + recv(r1) => panic!(), + recv(r2) => panic!(), + recv(r3) => panic!(), + recv(r4) => panic!(), + recv(r5, v) => assert_eq!(v, Some(5)), + } +} + +#[test] +fn closed() { + let (s1, r1) = channel::unbounded::(); + let (s2, r2) = channel::unbounded::(); + + crossbeam::scope(|scope| { + scope.spawn(|| { + drop(s1); + thread::sleep(ms(500)); + s2.send(5); + }); + + select! { + recv(r1, v) => assert!(v.is_none()), + recv(r2) => panic!(), + recv(channel::after(ms(1000))) => panic!(), + } + + r2.recv().unwrap(); + }); + + select! { + recv(r1, v) => assert!(v.is_none()), + recv(r2) => panic!(), + recv(channel::after(ms(1000))) => panic!(), + } + + crossbeam::scope(|scope| { + scope.spawn(|| { + thread::sleep(ms(500)); + drop(s2); + }); + + select! { + recv(r2, v) => assert!(v.is_none()), + recv(channel::after(ms(1000))) => panic!(), + } + }); +} + +#[test] +fn default() { + let (s1, r1) = channel::unbounded::(); + let (s2, r2) = channel::unbounded::(); + + select! { + recv(r1) => panic!(), + recv(r2) => panic!(), + default => {} + } + + drop(s1); + + select! { + recv(r1, v) => assert!(v.is_none()), + recv(r2) => panic!(), + default => panic!(), + } + + s2.send(2); + + select! { + recv(r2, v) => assert_eq!(v, Some(2)), + default => panic!(), + } + + select! { + recv(r2) => panic!(), + default => {}, + } + + select! { + default => {}, + } +} + +#[test] +fn timeout() { + let (_s1, r1) = channel::unbounded::(); + let (s2, r2) = channel::unbounded::(); + + crossbeam::scope(|scope| { + scope.spawn(|| { + thread::sleep(ms(1500)); + s2.send(2); + }); + + select! { + recv(r1) => panic!(), + recv(r2) => panic!(), + recv(channel::after(ms(1000))) => {}, + } + + select! { + recv(r1) => panic!(), + recv(r2, v) => assert_eq!(v, Some(2)), + recv(channel::after(ms(1000))) => panic!(), + } + }); + + crossbeam::scope(|scope| { + let (s, r) = channel::unbounded::(); + + scope.spawn(move || { + thread::sleep(ms(500)); + drop(s); + }); + + select! { + recv(channel::after(ms(1000))) => { + select! { + recv(r, v) => assert!(v.is_none()), + default => panic!(), + } + } + } + }); +} + +#[test] +fn default_when_closed() { + let (_, r) = channel::unbounded::(); + + select! { + recv(r, v) => assert!(v.is_none()), + default => panic!(), + } + + let (_, r) = channel::unbounded::(); + + select! { + recv(r, v) => assert!(v.is_none()), + recv(channel::after(ms(1000))) => panic!(), + } +} + +#[test] +fn unblocks() { + let (s1, r1) = channel::bounded::(0); + let (s2, r2) = channel::bounded::(0); + + crossbeam::scope(|scope| { + scope.spawn(|| { + thread::sleep(ms(500)); + s2.send(2); + }); + + select! { + recv(r1) => panic!(), + recv(r2, v) => assert_eq!(v, Some(2)), + recv(channel::after(ms(1000))) => panic!(), + } + }); + + crossbeam::scope(|scope| { + scope.spawn(|| { + thread::sleep(ms(500)); + assert_eq!(r1.recv().unwrap(), 1); + }); + + select! { + send(s1, 1) => {}, + send(s2, 2) => panic!(), + recv(channel::after(ms(1000))) => panic!(), + } + }); +} + +#[test] +fn both_ready() { + let (s1, r1) = channel::bounded(0); + let (s2, r2) = channel::bounded(0); + + crossbeam::scope(|scope| { + scope.spawn(|| { + thread::sleep(ms(500)); + s1.send(1); + assert_eq!(r2.recv().unwrap(), 2); + }); + + for _ in 0..2 { + select! { + recv(r1, v) => assert_eq!(v, Some(1)), + send(s2, 2) => {}, + } + } + }); +} + +#[test] +fn loop_try() { + const RUNS: usize = 20; + + for _ in 0..RUNS { + let (s1, r1) = channel::bounded::(0); + let (s2, r2) = channel::bounded::(0); + let (s_end, r_end) = channel::bounded::<()>(0); + + crossbeam::scope(|scope| { + scope.spawn(|| { + loop { + select! { + send(s1, 1) => break, + default => {} + } + + select! { + recv(r_end) => break, + default => {} + } + } + }); + + scope.spawn(|| { + loop { + if let Some(x) = r2.try_recv() { + assert_eq!(x, 2); + break; + } + + select! { + recv(r_end) => break, + default => {} + } + } + }); + + scope.spawn(|| { + thread::sleep(ms(500)); + + select! { + recv(r1, v) => assert_eq!(v, Some(1)), + send(s2, 2) => {}, + recv(channel::after(ms(500))) => panic!(), + } + + drop(s_end); + }); + }); + } +} + +#[test] +fn cloning1() { + crossbeam::scope(|scope| { + let (s1, r1) = channel::unbounded::(); + let (_s2, r2) = channel::unbounded::(); + let (s3, r3) = channel::unbounded::<()>(); + + scope.spawn(move || { + r3.recv().unwrap(); + drop(s1.clone()); + assert_eq!(r3.try_recv(), None); + s1.send(1); + r3.recv().unwrap(); + }); + + s3.send(()); + + select! { + recv(r1) => {}, + recv(r2) => {}, + } + + s3.send(()); + }); +} + +#[test] +fn cloning2() { + let (s1, r1) = channel::unbounded::<()>(); + let (s2, r2) = channel::unbounded::<()>(); + let (_s3, _r3) = channel::unbounded::<()>(); + + crossbeam::scope(|scope| { + scope.spawn(move || { + select! { + recv(r1) => panic!(), + recv(r2) => {}, + } + }); + + thread::sleep(ms(500)); + drop(s1.clone()); + s2.send(()); + }) +} + +#[test] +fn preflight1() { + let (s, r) = channel::unbounded(); + s.send(()); + + select! { + recv(r) => {} + } +} + +#[test] +fn preflight2() { + let (s, r) = channel::unbounded(); + drop(s.clone()); + s.send(()); + drop(s); + + select! { + recv(r, v) => assert!(v.is_some()), + } + assert_eq!(r.try_recv(), None); +} + +#[test] +fn preflight3() { + let (s, r) = channel::unbounded(); + drop(s.clone()); + s.send(()); + drop(s); + r.recv().unwrap(); + + select! { + recv(r, v) => assert!(v.is_none()) + } +} + +#[test] +fn duplicate_cases() { + let (s, r) = channel::unbounded::(); + let mut hit = [false; 4]; + + while hit.iter().any(|hit| !hit) { + select! { + recv(r) => hit[0] = true, + recv(Some(&r)) => hit[1] = true, + send(s, 0) => hit[2] = true, + send(Some(&s), 0) => hit[3] = true, + } + } +} + +#[test] +fn multiple_receivers() { + let (_, r1) = channel::unbounded::(); + let (_, r2) = channel::bounded::(5); + select! { + recv([&r1, &r2].iter().map(|x| *x), msg) => assert!(msg.is_none()), + } + select! { + recv([r1, r2].iter(), msg) => assert!(msg.is_none()), + } + + let (_, r1) = channel::unbounded::(); + let (_, r2) = channel::bounded::(5); + select! { + recv(&[r1, r2], msg) => assert!(msg.is_none()), + } +} + +#[test] +fn multiple_senders() { + let (s1, _) = channel::unbounded::(); + let (s2, _) = channel::bounded::(5); + select! { + send([&s1, &s2].iter().map(|x| *x), 0) => {} + } + select! { + send([s1, s2].iter(), 0) => {} + } + + let (s1, _) = channel::unbounded::(); + let (s2, _) = channel::bounded::(5); + select! { + send(&[s1, s2], 0) => {}, + } +} + +#[test] +fn recv_handle() { + let (s1, r1) = channel::unbounded::(); + let (s2, r2) = channel::unbounded::(); + let rs = [r1, r2]; + + s2.send(0); + select! { + recv(rs, _, r) => assert_eq!(r, &s2), + default => panic!(), + } + + s1.send(0); + select! { + recv(rs, _, r) => assert_eq!(r, &s1), + default => panic!(), + } +} + +#[test] +fn send_handle() { + let (s1, r1) = channel::bounded::(0); + let (s2, r2) = channel::bounded::(0); + let ss = [s1, s2]; + + crossbeam::scope(|scope| { + scope.spawn(|| { + thread::sleep(ms(500)); + select! { + send(ss, 0, s) => assert_eq!(s, &r2), + default => panic!(), + } + }); + r2.recv(); + }); + + crossbeam::scope(|scope| { + scope.spawn(|| { + thread::sleep(ms(500)); + select! { + send(ss, 0, s) => assert_eq!(s, &r1), + default => panic!(), + } + }); + r1.recv(); + }); +} + +#[test] +fn nesting() { + let (s, r) = channel::unbounded::(); + + select! { + send(s, 0) => { + select! { + recv(r, v) => { + assert_eq!(v, Some(0)); + select! { + send(s, 1) => { + select! { + recv(r, v) => { + assert_eq!(v, Some(1)); + } + } + } + } + } + } + } + } +} + +#[test] +fn conditional_send() { + let (s, _) = channel::unbounded(); + + select! { + send(if 1 + 1 == 3 { Some(&s) } else { None }, ()) => panic!(), + recv(channel::after(ms(1000))) => {} + } + + select! { + send(if 1 + 1 == 2 { Some(&s) } else { None }, ()) => {}, + recv(channel::after(ms(1000))) => panic!(), + } +} + +#[test] +fn conditional_recv() { + let (s, r) = channel::unbounded(); + s.send(()); + + select! { + recv(if 1 + 1 == 3 { Some(&r) } else { None }) => panic!(), + recv(channel::after(ms(1000))) => {} + } + + select! { + recv(if 1 + 1 == 2 { Some(&r) } else { None }) => {}, + recv(channel::after(ms(1000))) => panic!(), + } +} + +#[test] +#[should_panic(expected = "send panicked")] +fn panic_send() { + fn get() -> channel::Sender { + panic!("send panicked") + } + + select! { + send(get(), panic!()) => {} + } +} + +#[test] +#[should_panic(expected = "recv panicked")] +fn panic_recv() { + fn get() -> channel::Receiver { + panic!("recv panicked") + } + + select! { + recv(get()) => {} + } +} + +#[test] +fn stress_recv() { + const COUNT: usize = 10_000; + + let (s1, r1) = channel::unbounded(); + let (s2, r2) = channel::bounded(5); + let (s3, r3) = channel::bounded(100); + + crossbeam::scope(|scope| { + scope.spawn(|| { + for i in 0..COUNT { + s1.send(i); + r3.recv().unwrap(); + + s2.send(i); + r3.recv().unwrap(); + } + }); + + for i in 0..COUNT { + for _ in 0..2 { + select! { + recv(r1, v) => assert_eq!(v, Some(i)), + recv(r2, v) => assert_eq!(v, Some(i)), + } + + s3.send(()); + } + } + }); +} + +#[test] +fn stress_send() { + const COUNT: usize = 10_000; + + let (s1, r1) = channel::bounded(0); + let (s2, r2) = channel::bounded(0); + let (s3, r3) = channel::bounded(100); + + crossbeam::scope(|scope| { + scope.spawn(|| { + for i in 0..COUNT { + assert_eq!(r1.recv().unwrap(), i); + assert_eq!(r2.recv().unwrap(), i); + r3.recv().unwrap(); + } + }); + + for i in 0..COUNT { + for _ in 0..2 { + select! { + send(s1, i) => {}, + send(s2, i) => {}, + } + } + s3.send(()); + } + }); +} + +#[test] +fn stress_mixed() { + const COUNT: usize = 10_000; + + let (s1, r1) = channel::bounded(0); + let (s2, r2) = channel::bounded(0); + let (s3, r3) = channel::bounded(100); + + crossbeam::scope(|scope| { + scope.spawn(|| { + for i in 0..COUNT { + s1.send(i); + assert_eq!(r2.recv().unwrap(), i); + r3.recv().unwrap(); + } + }); + + for i in 0..COUNT { + for _ in 0..2 { + select! { + recv(r1, v) => assert_eq!(v, Some(i)), + send(s2, i) => {}, + } + } + s3.send(()); + } + }); +} + +#[test] +fn stress_timeout_two_threads() { + const COUNT: usize = 20; + + let (s, r) = channel::bounded(2); + + crossbeam::scope(|scope| { + scope.spawn(|| { + for i in 0..COUNT { + if i % 2 == 0 { + thread::sleep(ms(500)); + } + + loop { + select! { + send(s, i) => break, + recv(channel::after(ms(100))) => {} + } + } + } + }); + + scope.spawn(|| { + for i in 0..COUNT { + if i % 2 == 0 { + thread::sleep(ms(500)); + } + + loop { + select! { + recv(r, v) => { + assert_eq!(v, Some(i)); + break; + } + recv(channel::after(ms(100))) => {} + } + } + } + }); + }); +} + +#[test] +fn send_recv_same_channel() { + let (s, r) = channel::bounded::(0); + select! { + send(s, 0) => panic!(), + recv(r) => panic!(), + recv(channel::after(ms(500))) => {} + } + + let (s, r) = channel::unbounded::(); + select! { + send(s, 0) => {}, + recv(r) => panic!(), + recv(channel::after(ms(500))) => panic!(), + } +} + +#[test] +fn matching() { + const THREADS: usize = 44; + + let (s, r) = &channel::bounded::(0); + + crossbeam::scope(|scope| { + for i in 0..THREADS { + scope.spawn(move || { + select! { + recv(r, v) => assert_ne!(v.unwrap(), i), + send(s, i) => {}, + } + }); + } + }); + + assert_eq!(r.try_recv(), None); +} + +#[test] +fn matching_with_leftover() { + const THREADS: usize = 55; + + let (s, r) = &channel::bounded::(0); + + crossbeam::scope(|scope| { + for i in 0..THREADS { + scope.spawn(move || { + select! { + recv(r, v) => assert_ne!(v.unwrap(), i), + send(s, i) => {}, + } + }); + } + s.send(!0); + }); + + assert_eq!(r.try_recv(), None); +} + +#[test] +fn channel_through_channel() { + const COUNT: usize = 1000; + + type T = Box; + + for cap in 0..3 { + let (s, r) = channel::bounded::(cap); + + crossbeam::scope(|scope| { + scope.spawn(move || { + let mut s = s; + + for _ in 0..COUNT { + let (new_s, new_r) = channel::bounded(cap); + let mut new_r: T = Box::new(Some(new_r)); + + select! { + send(s, new_r) => {} + } + + s = new_s; + } + }); + + scope.spawn(move || { + let mut r = r; + + for _ in 0..COUNT { + r = select! { + recv(r, mut msg) => { + msg.unwrap() + .downcast_mut::>>() + .unwrap() + .take() + .unwrap() + } + } + } + }); + }); + } +} + +#[test] +fn linearizable() { + const COUNT: usize = 100_000; + + for step in 0..2 { + let (start_s, start_r) = channel::bounded::<()>(0); + let (end_s, end_r) = channel::bounded::<()>(0); + + let ((s1, r1), (s2, r2)) = if step == 0 { + (channel::bounded::(1), channel::bounded::(1)) + } else { + (channel::unbounded::(), channel::unbounded::()) + }; + + crossbeam::scope(|scope| { + scope.spawn(|| { + for _ in 0..COUNT { + start_s.send(()); + + s1.send(1); + select! { + recv(r1) => {} + recv(r2) => {} + default => unreachable!() + } + + end_s.send(()); + r2.try_recv(); + } + }); + + for _ in 0..COUNT { + start_r.recv(); + + s2.send(1); + r1.try_recv(); + + end_r.recv(); + } + }); + } +} + +#[test] +fn fairness1() { + const COUNT: usize = 10_000; + + let (s1, r1) = channel::bounded::<()>(COUNT); + let (s2, r2) = channel::unbounded::<()>(); + + for _ in 0..COUNT { + s1.send(()); + s2.send(()); + } + + let mut hits = [0usize; 4]; + while hits[0] + hits[1] < 2 * COUNT { + select! { + recv(r1) => hits[0] += 1, + recv(r2) => hits[1] += 1, + recv(channel::after(ms(0))) => hits[2] += 1, + recv(channel::tick(ms(0))) => hits[3] += 1, + } + } + + assert!(r1.is_empty()); + assert!(r2.is_empty()); + + let sum: usize = hits.iter().sum(); + assert!(hits.iter().all(|x| *x >= sum / hits.len() / 2)); +} + +#[test] +fn fairness2() { + const COUNT: usize = 10_000; + + let (s1, r1) = channel::unbounded::<()>(); + let (s2, r2) = channel::bounded::<()>(1); + let (s3, r3) = channel::bounded::<()>(0); + + crossbeam::scope(|scope| { + scope.spawn(|| { + for _ in 0..COUNT { + select! { + send(if s1.is_empty() { Some(&s1) } else { None }, ()) => {} + send(if s2.is_empty() { Some(&s2) } else { None }, ()) => {} + send(s3, ()) => {} + } + } + }); + + let mut hits = [0usize; 3]; + for _ in 0..COUNT { + select! { + recv(r1) => hits[0] += 1, + recv(r2) => hits[1] += 1, + recv(r3) => hits[2] += 1, + } + } + assert!(hits.iter().all(|x| *x >= COUNT / hits.len() / 10)); + }); +} diff --git a/crossbeam-channel/tests/select_struct.rs b/crossbeam-channel/tests/select_struct.rs new file mode 100644 index 000000000..ac4ec3be9 --- /dev/null +++ b/crossbeam-channel/tests/select_struct.rs @@ -0,0 +1,861 @@ +//! Tests for the `Select` struct. + +extern crate crossbeam; +extern crate crossbeam_channel as channel; + +use std::any::Any; +use std::cell::Cell; +use std::sync::atomic::{AtomicUsize, Ordering}; +use std::thread; +use std::time::Duration; + +use channel::Select; + +fn ms(ms: u64) -> Duration { + Duration::from_millis(ms) +} + +#[test] +fn smoke1() { + let (s1, r1) = channel::unbounded::(); + let (s2, r2) = channel::unbounded::(); + + s1.send(1); + + Select::new() + .recv(&r1, |v| assert_eq!(v, Some(1))) + .recv(&r2, |_| panic!()) + .wait(); + + s2.send(2); + + Select::new() + .recv(&r1, |_| panic!()) + .recv(&r2, |v| assert_eq!(v, Some(2))) + .wait(); +} + +#[test] +fn smoke2() { + let (_s1, r1) = channel::unbounded::(); + let (_s2, r2) = channel::unbounded::(); + let (_s3, r3) = channel::unbounded::(); + let (_s4, r4) = channel::unbounded::(); + let (s5, r5) = channel::unbounded::(); + + s5.send(5); + + Select::new() + .recv(&r1, |_| panic!()) + .recv(&r2, |_| panic!()) + .recv(&r3, |_| panic!()) + .recv(&r4, |_| panic!()) + .recv(&r5, |v| assert_eq!(v, Some(5))) + .wait(); +} + +#[test] +fn closed() { + let (s1, r1) = channel::unbounded::(); + let (s2, r2) = channel::unbounded::(); + + crossbeam::scope(|scope| { + scope.spawn(|| { + drop(s1); + thread::sleep(ms(500)); + s2.send(5); + }); + + let after = channel::after(ms(1000)); + Select::new() + .recv(&r1, |v| assert!(v.is_none())) + .recv(&r2, |_| panic!()) + .recv(&after, |_| panic!()) + .wait(); + + r2.recv().unwrap(); + }); + + let after = channel::after(ms(1000)); + Select::new() + .recv(&r1, |v| assert!(v.is_none())) + .recv(&r2, |_| panic!()) + .recv(&after, |_| panic!()) + .wait(); + + crossbeam::scope(|scope| { + scope.spawn(|| { + thread::sleep(ms(500)); + drop(s2); + }); + + let after = channel::after(ms(1000)); + Select::new() + .recv(&r2, |v| assert!(v.is_none())) + .recv(&after, |_| panic!()) + .wait(); + }); +} + +#[test] +fn default() { + let (s1, r1) = channel::unbounded::(); + let (s2, r2) = channel::unbounded::(); + + Select::new() + .recv(&r1, |_| panic!()) + .recv(&r2, |_| panic!()) + .default(|| ()) + .wait(); + + drop(s1); + + Select::new() + .recv(&r1, |v| assert!(v.is_none())) + .recv(&r2, |_| panic!()) + .default(|| panic!()) + .wait(); + + s2.send(2); + + Select::new() + .recv(&r2, |v| assert_eq!(v, Some(2))) + .default(|| panic!()) + .wait(); + + Select::new() + .recv(&r2, |_| panic!()) + .default(|| ()) + .wait(); + + Select::new() + .default(|| ()) + .wait(); +} + +#[test] +fn timeout() { + let (_s1, r1) = channel::unbounded::(); + let (s2, r2) = channel::unbounded::(); + + crossbeam::scope(|scope| { + scope.spawn(|| { + thread::sleep(ms(1500)); + s2.send(2); + }); + + let after = channel::after(ms(1000)); + Select::new() + .recv(&r1, |_| panic!()) + .recv(&r2, |_| panic!()) + .recv(&after, |_| ()) + .wait(); + + let after = channel::after(ms(1000)); + Select::new() + .recv(&r1, |_| panic!()) + .recv(&r2, |v| assert_eq!(v, Some(2))) + .recv(&after, |_| panic!()) + .wait(); + }); + + crossbeam::scope(|scope| { + let (s, r) = channel::unbounded::(); + + scope.spawn(move || { + thread::sleep(ms(500)); + drop(s); + }); + + let after = channel::after(ms(1000)); + Select::new() + .recv(&after, |_| { + Select::new() + .recv(&r, |v| assert!(v.is_none())) + .default(|| panic!()) + .wait(); + }) + .wait(); + }); +} + +#[test] +fn default_when_closed() { + let (_, r) = channel::unbounded::(); + + Select::new() + .recv(&r, |v| assert!(v.is_none())) + .default(|| panic!()) + .wait(); + + let (_, r) = channel::unbounded::(); + + let after = channel::after(ms(1000)); + Select::new() + .recv(&r, |v| assert!(v.is_none())) + .recv(&after, |_| panic!()) + .wait(); +} + +#[test] +fn unblocks() { + let (s1, r1) = channel::bounded::(0); + let (s2, r2) = channel::bounded::(0); + + crossbeam::scope(|scope| { + scope.spawn(|| { + thread::sleep(ms(500)); + s2.send(2); + }); + + let after = channel::after(ms(1000)); + Select::new() + .recv(&r1, |_| panic!()) + .recv(&r2, |v| assert_eq!(v, Some(2))) + .recv(&after, |_| panic!()) + .wait(); + }); + + crossbeam::scope(|scope| { + scope.spawn(|| { + thread::sleep(ms(500)); + assert_eq!(r1.recv().unwrap(), 1); + }); + + let after = channel::after(ms(1000)); + Select::new() + .send(&s1, || 1, || ()) + .send(&s2, || 2, || panic!()) + .recv(&after, |_| panic!()) + .wait(); + }); +} + +#[test] +fn both_ready() { + let (s1, r1) = channel::bounded(0); + let (s2, r2) = channel::bounded(0); + + crossbeam::scope(|scope| { + scope.spawn(|| { + thread::sleep(ms(500)); + s1.send(1); + assert_eq!(r2.recv().unwrap(), 2); + }); + + for _ in 0..2 { + Select::new() + .recv(&r1, |v| assert_eq!(v, Some(1))) + .send(&s2, || 2, || ()) + .wait(); + } + }); +} + +#[test] +fn loop_try() { + const RUNS: usize = 20; + + for _ in 0..RUNS { + let (s1, r1) = channel::bounded::(0); + let (s2, r2) = channel::bounded::(0); + let (s_end, r_end) = channel::bounded::<()>(0); + + crossbeam::scope(|scope| { + scope.spawn(|| { + loop { + let mut done = false; + + Select::new() + .send(&s1, || 1, || done = true) + .default(|| ()) + .wait(); + if done { + break; + } + + Select::new() + .recv(&r_end, |_| done = true) + .default(|| ()) + .wait(); + if done { + break; + } + } + }); + + scope.spawn(|| { + loop { + if let Some(x) = r2.try_recv() { + assert_eq!(x, 2); + break; + } + + let mut done = false; + Select::new() + .recv(&r_end, |_| done = true) + .default(|| ()) + .wait(); + if done { + break; + } + } + }); + + scope.spawn(|| { + thread::sleep(ms(500)); + + let after = channel::after(ms(1000)); + Select::new() + .recv(&r1, |v| assert_eq!(v, Some(1))) + .send(&s2, || 2, || ()) + .recv(&after, |_| panic!()) + .wait(); + + drop(s_end); + }); + }); + } +} + +#[test] +fn cloning1() { + crossbeam::scope(|scope| { + let (s1, r1) = channel::unbounded::(); + let (_s2, r2) = channel::unbounded::(); + let (s3, r3) = channel::unbounded::<()>(); + + scope.spawn(move || { + r3.recv().unwrap(); + drop(s1.clone()); + assert_eq!(r3.try_recv(), None); + s1.send(1); + r3.recv().unwrap(); + }); + + s3.send(()); + + Select::new() + .recv(&r1, |_| ()) + .recv(&r2, |_| ()) + .wait(); + + s3.send(()); + }); +} + +#[test] +fn cloning2() { + let (s1, r1) = channel::unbounded::<()>(); + let (s2, r2) = channel::unbounded::<()>(); + let (_s3, _r3) = channel::unbounded::<()>(); + + crossbeam::scope(|scope| { + scope.spawn(move || { + Select::new() + .recv(&r1, |_| panic!()) + .recv(&r2, |_| ()) + .wait(); + }); + + thread::sleep(ms(500)); + drop(s1.clone()); + s2.send(()); + }) +} + +#[test] +fn preflight1() { + let (s, r) = channel::unbounded(); + s.send(()); + + Select::new() + .recv(&r, |_| ()) + .wait(); +} + +#[test] +fn preflight2() { + let (s, r) = channel::unbounded(); + drop(s.clone()); + s.send(()); + drop(s); + + Select::new() + .recv(&r, |v| assert!(v.is_some())) + .wait(); + assert_eq!(r.try_recv(), None); +} + +#[test] +fn preflight3() { + let (s, r) = channel::unbounded(); + drop(s.clone()); + s.send(()); + drop(s); + r.recv().unwrap(); + + Select::new() + .recv(&r, |v| assert!(v.is_none())) + .wait(); +} + +#[test] +fn duplicate_cases() { + let (s, r) = channel::unbounded::(); + let hit = vec![Cell::new(false); 4]; + + while hit.iter().map(|h| h.get()).any(|hit| !hit) { + Select::new() + .recv(&r, |_| hit[0].set(true)) + .recv(&r, |_| hit[1].set(true)) + .send(&s, || 0, || hit[2].set(true)) + .send(&s, || 0, || hit[3].set(true)) + .wait(); + } +} + +#[test] +fn nesting() { + let (s, r) = channel::unbounded::(); + + Select::new() + .send(&s, || 0, || { + Select::new() + .recv(&r, |v| { + assert_eq!(v, Some(0)); + Select::new() + .send(&s, || 1, || { + Select::new() + .recv(&r, |v| assert_eq!(v, Some(1))) + .wait(); + }) + .wait(); + }) + .wait(); + }) + .wait(); +} + +#[test] +fn stress_recv() { + const COUNT: usize = 10_000; + + let (s1, r1) = channel::unbounded(); + let (s2, r2) = channel::bounded(5); + let (s3, r3) = channel::bounded(100); + + crossbeam::scope(|scope| { + scope.spawn(|| { + for i in 0..COUNT { + s1.send(i); + r3.recv().unwrap(); + + s2.send(i); + r3.recv().unwrap(); + } + }); + + for i in 0..COUNT { + for _ in 0..2 { + Select::new() + .recv(&r1, |v| assert_eq!(v, Some(i))) + .recv(&r2, |v| assert_eq!(v, Some(i))) + .wait(); + + s3.send(()); + } + } + }); +} + +#[test] +fn stress_send() { + const COUNT: usize = 10_000; + + let (s1, r1) = channel::bounded(0); + let (s2, r2) = channel::bounded(0); + let (s3, r3) = channel::bounded(100); + + crossbeam::scope(|scope| { + scope.spawn(|| { + for i in 0..COUNT { + assert_eq!(r1.recv().unwrap(), i); + assert_eq!(r2.recv().unwrap(), i); + r3.recv().unwrap(); + } + }); + + for i in 0..COUNT { + for _ in 0..2 { + Select::new() + .send(&s1, || i, || ()) + .send(&s2, || i, || ()) + .wait(); + } + s3.send(()); + } + }); +} + +#[test] +fn stress_mixed() { + const COUNT: usize = 10_000; + + let (s1, r1) = channel::bounded(0); + let (s2, r2) = channel::bounded(0); + let (s3, r3) = channel::bounded(100); + + crossbeam::scope(|scope| { + scope.spawn(|| { + for i in 0..COUNT { + s1.send(i); + assert_eq!(r2.recv().unwrap(), i); + r3.recv().unwrap(); + } + }); + + for i in 0..COUNT { + for _ in 0..2 { + Select::new() + .recv(&r1, |v| assert_eq!(v, Some(i))) + .send(&s2, || i, || ()) + .wait(); + } + s3.send(()); + } + }); +} + +#[test] +fn stress_timeout_two_threads() { + const COUNT: usize = 20; + + let (s, r) = channel::bounded(2); + + crossbeam::scope(|scope| { + scope.spawn(|| { + for i in 0..COUNT { + if i % 2 == 0 { + thread::sleep(ms(500)); + } + + let mut done = false; + while !done { + let after = channel::after(ms(100)); + Select::new() + .send(&s, || i, || done = true) + .recv(&after, |_| ()) + .wait(); + } + } + }); + + scope.spawn(|| { + for i in 0..COUNT { + if i % 2 == 0 { + thread::sleep(ms(500)); + } + + let mut done = false; + while !done { + let after = channel::after(ms(100)); + Select::new() + .recv(&r, |v| { + assert_eq!(v, Some(i)); + done = true; + }) + .recv(&after, |_| ()) + .wait(); + } + } + }); + }); +} + +#[test] +fn send_recv_same_channel() { + let (s, r) = channel::bounded::(0); + let after = channel::after(ms(500)); + Select::new() + .send(&s, || 0, || panic!()) + .recv(&r, |_| panic!()) + .recv(&after, |_| ()) + .wait(); + + let (s, r) = channel::unbounded::(); + let after = channel::after(ms(500)); + Select::new() + .send(&s, || 0, || ()) + .recv(&r, |_| panic!()) + .recv(&after, |_| panic!()) + .wait(); +} + +#[test] +fn matching() { + const THREADS: usize = 44; + + let (s, r) = &channel::bounded::(0); + + crossbeam::scope(|scope| { + for i in 0..THREADS { + scope.spawn(move || { + Select::new() + .recv(&r, |v| assert_ne!(v.unwrap(), i)) + .send(&s, || i, || ()) + .wait(); + }); + } + }); + + assert_eq!(r.try_recv(), None); +} + +#[test] +fn matching_with_leftover() { + const THREADS: usize = 55; + + let (s, r) = &channel::bounded::(0); + + crossbeam::scope(|scope| { + for i in 0..THREADS { + scope.spawn(move || { + Select::new() + .recv(&r, |v| assert_ne!(v.unwrap(), i)) + .send(&s, || i, || ()) + .wait(); + }); + } + s.send(!0); + }); + + assert_eq!(r.try_recv(), None); +} + +#[test] +fn channel_through_channel() { + const COUNT: usize = 1000; + + type T = Box; + + for cap in 0..3 { + let (s, r) = channel::bounded::(cap); + + crossbeam::scope(|scope| { + scope.spawn(move || { + let mut s = s; + + for _ in 0..COUNT { + let (new_s, new_r) = channel::bounded(cap); + let mut new_r: T = Box::new(Some(new_r)); + + Select::new() + .send(&s, || new_r, || ()) + .wait(); + + s = new_s; + } + }); + + scope.spawn(move || { + let mut r = r; + + for _ in 0..COUNT { + let new = Select::new() + .recv(&r, |msg| { + msg.unwrap() + .downcast_mut::>>() + .unwrap() + .take() + .unwrap() + }) + .wait(); + r = new; + } + }); + }); + } +} + +#[test] +fn linearizable() { + const COUNT: usize = 100_000; + + for step in 0..2 { + let (start_s, start_r) = channel::bounded::<()>(0); + let (end_s, end_r) = channel::bounded::<()>(0); + + let ((s1, r1), (s2, r2)) = if step == 0 { + (channel::bounded::(1), channel::bounded::(1)) + } else { + (channel::unbounded::(), channel::unbounded::()) + }; + + crossbeam::scope(|scope| { + scope.spawn(|| { + for _ in 0..COUNT { + start_s.send(()); + + s1.send(1); + Select::new() + .recv(&r1, |_| ()) + .recv(&r2, |_| ()) + .default(|| unreachable!()) + .wait(); + + end_s.send(()); + r2.try_recv(); + } + }); + + for _ in 0..COUNT { + start_r.recv(); + + s2.send(1); + r1.try_recv(); + + end_r.recv(); + } + }); + } +} + +#[test] +fn fairness1() { + const COUNT: usize = 10_000; + + let (s1, r1) = channel::bounded::<()>(COUNT); + let (s2, r2) = channel::unbounded::<()>(); + + for _ in 0..COUNT { + s1.send(()); + s2.send(()); + } + + let hits = vec![Cell::new(0usize); 4]; + while hits[0].get() + hits[1].get() < 2 * COUNT { + let after = channel::after(ms(0)); + let tick = channel::tick(ms(0)); + Select::new() + .recv(&r1, |_| hits[0].set(hits[0].get() + 1)) + .recv(&r2, |_| hits[1].set(hits[1].get() + 1)) + .recv(&after, |_| hits[2].set(hits[2].get() + 1)) + .recv(&tick, |_| hits[3].set(hits[3].get() + 1)) + .wait(); + } + + assert!(r1.is_empty()); + assert!(r2.is_empty()); + + let sum: usize = hits.iter().map(|h| h.get()).sum(); + assert!(hits.iter().all(|x| x.get() >= sum / hits.len() / 2)); +} + +#[test] +fn fairness2() { + const COUNT: usize = 10_000; + + let (s1, r1) = channel::unbounded::<()>(); + let (s2, r2) = channel::bounded::<()>(1); + let (s3, r3) = channel::bounded::<()>(0); + + crossbeam::scope(|scope| { + scope.spawn(|| { + for _ in 0..COUNT { + let mut sel = Select::new(); + if s1.is_empty() { + sel = sel.send(&s1, || (), || ()); + } + if s2.is_empty() { + sel = sel.send(&s2, || (), || ()); + } + sel = sel.send(&s3, || (), || ()); + sel.wait(); + } + }); + + let hits = vec![Cell::new(0usize); 3]; + for _ in 0..COUNT { + Select::new() + .recv(&r1, |_| hits[0].set(hits[0].get() + 1)) + .recv(&r2, |_| hits[1].set(hits[1].get() + 1)) + .recv(&r3, |_| hits[2].set(hits[2].get() + 1)) + .wait(); + } + assert!(hits.iter().all(|x| x.get() >= COUNT / hits.len() / 10)); + }); +} + +#[test] +fn drop_callbacks() { + static DROPS: AtomicUsize = AtomicUsize::new(0); + + struct Foo; + impl Drop for Foo { + fn drop(&mut self) { + DROPS.fetch_add(1, Ordering::SeqCst); + } + } + + let (s, r) = channel::unbounded::<()>(); + + DROPS.store(0, Ordering::SeqCst); + let foo1 = Foo; + let foo2 = Foo; + let foo3 = Foo; + let foo4 = Foo; + let sel = Select::new() + .recv(&r, |_| drop(foo1)) + .send(&s, || drop(foo2), || drop(foo3)) + .default(|| drop(foo4)); + assert_eq!(DROPS.load(Ordering::SeqCst), 0); + sel.wait(); + assert_eq!(DROPS.load(Ordering::SeqCst), 4); + + DROPS.store(0, Ordering::SeqCst); + let foo1 = Foo; + let foo2 = Foo; + let foo3 = Foo; + let mut sel = Select::new(); + sel = sel.default(|| drop(foo1)); + assert_eq!(DROPS.load(Ordering::SeqCst), 0); + sel = sel.default(|| drop(foo2)); + assert_eq!(DROPS.load(Ordering::SeqCst), 1); + sel = sel.default(|| drop(foo3)); + assert_eq!(DROPS.load(Ordering::SeqCst), 2); + sel.wait(); + assert_eq!(DROPS.load(Ordering::SeqCst), 3); + + DROPS.store(0, Ordering::SeqCst); + let foo1 = Foo; + let foo2 = Foo; + let foo3 = Foo; + let foo4 = Foo; + Select::new() + .recv(&r, |_| drop(foo1)) + .send(&s, || drop(foo2), || drop(foo3)) + .default(|| { + assert_eq!(DROPS.load(Ordering::SeqCst), 3); + drop(foo4); + }) + .wait(); + assert_eq!(DROPS.load(Ordering::SeqCst), 4); + + s.send(()); + + DROPS.store(0, Ordering::SeqCst); + let foo1 = Foo; + let foo2 = Foo; + let foo3 = Foo; + let foo4 = Foo; + Select::new() + .recv(&r, |_| { + assert_eq!(DROPS.load(Ordering::SeqCst), 3); + drop(foo1); + }) + .send(&s, || drop(foo2), || drop(foo3)) + .default(|| drop(foo4)) + .wait(); + assert_eq!(DROPS.load(Ordering::SeqCst), 4); +} diff --git a/crossbeam-channel/tests/thread_locals.rs b/crossbeam-channel/tests/thread_locals.rs new file mode 100644 index 000000000..b3ed865ad --- /dev/null +++ b/crossbeam-channel/tests/thread_locals.rs @@ -0,0 +1,53 @@ +//! Tests that make sure accessing thread-locals while exiting the thread doesn't cause panics. + +#![deny(unsafe_code)] + +extern crate crossbeam; +#[macro_use] +extern crate crossbeam_channel as channel; + +use std::thread; +use std::time::Duration; + +fn ms(ms: u64) -> Duration { + Duration::from_millis(ms) +} + +#[test] +fn use_while_exiting() { + struct Foo; + + impl Drop for Foo { + fn drop(&mut self) { + // A blocking operation after the thread-locals have been dropped. This will attempt to + // use the thread-locals and must not panic. + let (_s, r) = channel::unbounded::<()>(); + select! { + recv(r) => {} + recv(channel::after(ms(100))) => {} + } + } + } + + thread_local! { + static FOO: Foo = Foo; + } + + let (s, r) = channel::unbounded::<()>(); + + crossbeam::scope(|scope| { + scope.spawn(|| { + // First initialize `FOO`, then the thread-locals related to crossbeam-channel and + // crossbeam-epoch. + FOO.with(|_| ()); + r.recv().unwrap(); + // At thread exit, the crossbeam-related thread-locals get dropped first and `FOO` is + // dropped last. + }); + + scope.spawn(|| { + thread::sleep(ms(100)); + s.send(()); + }); + }); +} diff --git a/crossbeam-channel/tests/tick.rs b/crossbeam-channel/tests/tick.rs new file mode 100644 index 000000000..5f74c10d6 --- /dev/null +++ b/crossbeam-channel/tests/tick.rs @@ -0,0 +1,276 @@ +//! Tests for the tick channel flavor. + +extern crate crossbeam; +#[macro_use] +extern crate crossbeam_channel as channel; +extern crate rand; + +mod wrappers; + +macro_rules! tests { + ($channel:path) => { + use std::sync::atomic::AtomicUsize; + use std::sync::atomic::Ordering; + use std::thread; + use std::time::{Duration, Instant}; + + use $channel as channel; + use crossbeam; + + fn ms(ms: u64) -> Duration { + Duration::from_millis(ms) + } + + #[test] + fn fire() { + let start = Instant::now(); + let r = channel::tick(ms(50)); + + assert_eq!(r.try_recv(), None); + thread::sleep(ms(100)); + + let fired = r.try_recv().unwrap(); + assert!(start < fired); + assert!(fired - start >= ms(50)); + + let now = Instant::now(); + assert!(fired < now); + assert!(now - fired >= ms(50)); + + assert_eq!(r.try_recv(), None); + + select! { + recv(r) => panic!(), + default => {} + } + + select! { + recv(r) => {} + recv(channel::after(ms(200))) => panic!(), + } + } + + #[test] + fn intervals() { + let start = Instant::now(); + let r = channel::tick(ms(50)); + + let t1 = r.recv().unwrap(); + assert!(start + ms(50) <= t1); + assert!(start + ms(100) > t1); + + thread::sleep(ms(300)); + let t2 = r.try_recv().unwrap(); + assert!(start + ms(100) <= t2); + assert!(start + ms(150) > t2); + + assert_eq!(r.try_recv(), None); + let t3 = r.recv().unwrap(); + assert!(start + ms(400) <= t3); + assert!(start + ms(450) > t3); + + assert_eq!(r.try_recv(), None); + } + + #[test] + fn capacity() { + const COUNT: usize = 10; + + for i in 0..COUNT { + let r = channel::tick(ms(i as u64)); + assert_eq!(r.capacity(), Some(1)); + } + } + + #[test] + fn len_empty_full() { + let r = channel::tick(ms(50)); + + assert_eq!(r.len(), 0); + assert_eq!(r.is_empty(), true); + assert_eq!(r.is_full(), false); + + thread::sleep(ms(100)); + + assert_eq!(r.len(), 1); + assert_eq!(r.is_empty(), false); + assert_eq!(r.is_full(), true); + + r.try_recv().unwrap(); + + assert_eq!(r.len(), 0); + assert_eq!(r.is_empty(), true); + assert_eq!(r.is_full(), false); + } + + #[test] + fn recv() { + let start = Instant::now(); + let r = channel::tick(ms(50)); + + let fired = r.recv().unwrap(); + assert!(start < fired); + assert!(fired - start >= ms(50)); + + let now = Instant::now(); + assert!(fired < now); + assert!(now - fired < fired - start); + + assert_eq!(r.try_recv(), None); + } + + #[test] + fn recv_two() { + let r1 = channel::tick(ms(50)); + let r2 = channel::tick(ms(50)); + + crossbeam::scope(|scope| { + scope.spawn(|| { + for _ in 0..10 { + select! { + recv(r1) => {} + recv(r2) => {} + } + } + }); + scope.spawn(|| { + for _ in 0..10 { + select! { + recv(r1) => {} + recv(r2) => {} + } + } + }); + }); + } + + #[test] + fn recv_race() { + select! { + recv(channel::tick(ms(50))) => {} + recv(channel::tick(ms(100))) => panic!(), + } + + select! { + recv(channel::tick(ms(100))) => panic!(), + recv(channel::tick(ms(50))) => {} + } + } + + #[test] + fn stress_default() { + const COUNT: usize = 10; + + for _ in 0..COUNT { + select! { + recv(channel::tick(ms(0))) => {} + default => panic!(), + } + } + + for _ in 0..COUNT { + select! { + recv(channel::tick(ms(100))) => panic!(), + default => {} + } + } + } + + #[test] + fn select() { + const THREADS: usize = 4; + + let hits = AtomicUsize::new(0); + let r1 = channel::tick(ms(200)); + let r2 = channel::tick(ms(300)); + + crossbeam::scope(|scope| { + for _ in 0..THREADS { + scope.spawn(|| { + let v = vec![&r1.0, &r2.0]; + let timeout = channel::after(ms(1100)); + + loop { + select! { + recv(v) => { + hits.fetch_add(1, Ordering::SeqCst); + } + recv(timeout) => break + } + } + }); + } + }); + + assert_eq!(hits.load(Ordering::SeqCst), 8); + } + + #[test] + fn fairness() { + const COUNT: usize = 30; + + for &dur in &[0, 1] { + let mut hits = [0usize; 2]; + + for _ in 0..COUNT { + let r1 = channel::tick(ms(dur)); + let r2 = channel::tick(ms(dur)); + + for _ in 0..COUNT { + select! { + recv(r1) => hits[0] += 1, + recv(r2) => hits[1] += 1, + } + } + } + + assert!(hits.iter().all(|x| *x >= COUNT / hits.len() / 2)); + } + } + + #[test] + fn fairness_duplicates() { + const COUNT: usize = 30; + + for &dur in &[0, 1] { + let mut hits = [0usize; 5]; + + for _ in 0..COUNT { + let r = channel::tick(ms(dur)); + + for _ in 0..COUNT { + select! { + recv(r) => hits[0] += 1, + recv(r) => hits[1] += 1, + recv(r) => hits[2] += 1, + recv(r) => hits[3] += 1, + recv(r) => hits[4] += 1, + } + } + } + + assert!(hits.iter().all(|x| *x >= COUNT / hits.len() / 2)); + } + } + }; +} + +mod normal { + tests!(wrappers::normal); +} + +mod cloned { + tests!(wrappers::cloned); +} + +mod select { + tests!(wrappers::select); +} + +mod select_spin { + tests!(wrappers::select_spin); +} + +mod select_multi { + tests!(wrappers::select_multi); +} diff --git a/crossbeam-channel/tests/wrappers/cloned.rs b/crossbeam-channel/tests/wrappers/cloned.rs new file mode 100644 index 000000000..8b8d45f23 --- /dev/null +++ b/crossbeam-channel/tests/wrappers/cloned.rs @@ -0,0 +1,61 @@ +//! Channels with cloned senders and receivers immediately upon construction. +//! +//! Some channel types might take advantage of the fact that the first sender or receiver was never +//! cloned, so this wrapper turns off such optimizations right away. + +use std::ops::Deref; +use std::time::{Duration, Instant}; + +use channel; + +pub struct Sender(pub channel::Sender); + +pub struct Receiver(pub channel::Receiver); + +impl Clone for Sender { + fn clone(&self) -> Sender { + Sender(self.0.clone()) + } +} + +impl Clone for Receiver { + fn clone(&self) -> Receiver { + Receiver(self.0.clone()) + } +} + +impl Deref for Receiver { + type Target = channel::Receiver; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl Deref for Sender { + type Target = channel::Sender; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +pub fn bounded(cap: usize) -> (Sender, Receiver) { + let (s, r) = channel::bounded(cap); + (Sender(s.clone()), Receiver(r.clone())) +} + +pub fn unbounded() -> (Sender, Receiver) { + let (s, r) = channel::unbounded(); + (Sender(s.clone()), Receiver(r.clone())) +} + +pub fn after(dur: Duration) -> Receiver { + let r = channel::after(dur); + Receiver(r.clone()) +} + +pub fn tick(dur: Duration) -> Receiver { + let r = channel::tick(dur); + Receiver(r.clone()) +} diff --git a/crossbeam-channel/tests/wrappers/mod.rs b/crossbeam-channel/tests/wrappers/mod.rs new file mode 100644 index 000000000..8879207a6 --- /dev/null +++ b/crossbeam-channel/tests/wrappers/mod.rs @@ -0,0 +1,16 @@ +//! Wrappers around the channel interface that intentionally enable or disable some optimizations. +//! +//! There's a number of internal optimizations within methods for sending and receiving messages, +//! as well as within the `select!` macro. Such optimizations often diverge from the main code +//! paths to special fast paths with custom implementations. +//! +//! We use these wrappers in order to enable or disable such optimizations and thoroughly exercise +//! the implementation in all possible scenarios. + +#![allow(dead_code)] + +pub mod cloned; +pub mod normal; +pub mod select; +pub mod select_multi; +pub mod select_spin; diff --git a/crossbeam-channel/tests/wrappers/normal.rs b/crossbeam-channel/tests/wrappers/normal.rs new file mode 100644 index 000000000..e17755dd6 --- /dev/null +++ b/crossbeam-channel/tests/wrappers/normal.rs @@ -0,0 +1,60 @@ +//! Native interface with the default behavior. +//! +//! This wrapper does not try to provoke or prevent any optimizations. + +use std::ops::Deref; +use std::time::{Duration, Instant}; + +use channel; + +pub struct Sender(pub channel::Sender); + +pub struct Receiver(pub channel::Receiver); + +impl Clone for Sender { + fn clone(&self) -> Sender { + Sender(self.0.clone()) + } +} + +impl Clone for Receiver { + fn clone(&self) -> Receiver { + Receiver(self.0.clone()) + } +} + +impl Deref for Receiver { + type Target = channel::Receiver; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl Deref for Sender { + type Target = channel::Sender; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +pub fn bounded(cap: usize) -> (Sender, Receiver) { + let (s, r) = channel::bounded(cap); + (Sender(s), Receiver(r)) +} + +pub fn unbounded() -> (Sender, Receiver) { + let (s, r) = channel::unbounded(); + (Sender(s), Receiver(r)) +} + +pub fn after(dur: Duration) -> Receiver { + let r = channel::after(dur); + Receiver(r) +} + +pub fn tick(dur: Duration) -> Receiver { + let r = channel::tick(dur); + Receiver(r) +} diff --git a/crossbeam-channel/tests/wrappers/select.rs b/crossbeam-channel/tests/wrappers/select.rs new file mode 100644 index 000000000..4e0a0c2d6 --- /dev/null +++ b/crossbeam-channel/tests/wrappers/select.rs @@ -0,0 +1,84 @@ +//! Channels with send and receive operations implemented using `select!`. +//! +//! Such `select!` invocations will often try to optimize the macro invocations by converting them +//! into direct calls to `recv()` or `send()`. + +use std::ops::Deref; +use std::time::{Duration, Instant}; + +use channel; + +pub struct Sender(pub channel::Sender); + +pub struct Receiver(pub channel::Receiver); + +impl Clone for Sender { + fn clone(&self) -> Sender { + Sender(self.0.clone()) + } +} + +impl Clone for Receiver { + fn clone(&self) -> Receiver { + Receiver(self.0.clone()) + } +} + +impl Deref for Receiver { + type Target = channel::Receiver; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl Deref for Sender { + type Target = channel::Sender; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl Sender { + pub fn send(&self, msg: T) { + select! { + send(self.0, msg) => {} + } + } +} + +impl Receiver { + pub fn try_recv(&self) -> Option { + select! { + recv(self.0, msg) => msg, + default => None, + } + } + + pub fn recv(&self) -> Option { + select! { + recv(self.0, msg) => msg, + } + } +} + +pub fn bounded(cap: usize) -> (Sender, Receiver) { + let (s, r) = channel::bounded(cap); + (Sender(s), Receiver(r)) +} + +pub fn unbounded() -> (Sender, Receiver) { + let (s, r) = channel::unbounded(); + (Sender(s), Receiver(r)) +} + +pub fn after(dur: Duration) -> Receiver { + let r = channel::after(dur); + Receiver(r) +} + +pub fn tick(dur: Duration) -> Receiver { + let r = channel::tick(dur); + Receiver(r) +} diff --git a/crossbeam-channel/tests/wrappers/select_multi.rs b/crossbeam-channel/tests/wrappers/select_multi.rs new file mode 100644 index 000000000..5492648e3 --- /dev/null +++ b/crossbeam-channel/tests/wrappers/select_multi.rs @@ -0,0 +1,87 @@ +//! Channels with send and receive operations implemented using `select!` with duplicate cases. +//! +//! A `select!` with duplicate cases doesn't perform any optimizations that simply convert the +//! macro invocation into a direct call to `recv()` or `send()`. + +use std::ops::Deref; +use std::time::{Duration, Instant}; + +use channel; + +pub struct Sender(pub channel::Sender); + +pub struct Receiver(pub channel::Receiver); + +impl Clone for Sender { + fn clone(&self) -> Sender { + Sender(self.0.clone()) + } +} + +impl Clone for Receiver { + fn clone(&self) -> Receiver { + Receiver(self.0.clone()) + } +} + +impl Deref for Receiver { + type Target = channel::Receiver; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl Deref for Sender { + type Target = channel::Sender; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl Sender { + pub fn send(&self, msg: T) { + select! { + send(self.0, msg) => {} + send(self.0, msg) => {} + } + } +} + +impl Receiver { + pub fn try_recv(&self) -> Option { + select! { + recv(self.0, msg) => msg, + recv(self.0, msg) => msg, + default => None, + } + } + + pub fn recv(&self) -> Option { + select! { + recv(self.0, msg) => msg, + recv(self.0, msg) => msg, + } + } +} + +pub fn bounded(cap: usize) -> (Sender, Receiver) { + let (s, r) = channel::bounded(cap); + (Sender(s), Receiver(r)) +} + +pub fn unbounded() -> (Sender, Receiver) { + let (s, r) = channel::unbounded(); + (Sender(s), Receiver(r)) +} + +pub fn after(dur: Duration) -> Receiver { + let r = channel::after(dur); + Receiver(r) +} + +pub fn tick(dur: Duration) -> Receiver { + let r = channel::tick(dur); + Receiver(r) +} diff --git a/crossbeam-channel/tests/wrappers/select_spin.rs b/crossbeam-channel/tests/wrappers/select_spin.rs new file mode 100644 index 000000000..35da8cc92 --- /dev/null +++ b/crossbeam-channel/tests/wrappers/select_spin.rs @@ -0,0 +1,105 @@ +//! Channels with send and receive operations implemented using strictly non-blocking `select!`. +//! +//! Such `select!` invocations will often try to optimize the macro invocations by converting them +//! into special non-blocking method calls like `try_recv()`. + +use std::ops::Deref; +use std::thread; +use std::time::{Duration, Instant}; + +use channel; + +pub struct Sender(pub channel::Sender); + +pub struct Receiver(pub channel::Receiver); + +impl Clone for Sender { + fn clone(&self) -> Sender { + Sender(self.0.clone()) + } +} + +impl Clone for Receiver { + fn clone(&self) -> Receiver { + Receiver(self.0.clone()) + } +} + +impl Deref for Receiver { + type Target = channel::Receiver; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl Deref for Sender { + type Target = channel::Sender; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl Sender { + pub fn send(&self, msg: T) { + if self.0.capacity() == Some(0) { + // Zero-capacity channel are an exception - they need truly blocking operations. + select! { + send(self.0, msg) => {} + } + } else { + loop { + select! { + send(self.0, msg) => break, + default => thread::yield_now(), + } + } + } + } +} + +impl Receiver { + pub fn try_recv(&self) -> Option { + select! { + recv(self.0, msg) => msg, + default => None, + } + } + + pub fn recv(&self) -> Option { + if self.0.capacity() == Some(0) { + // Zero-capacity channel are an exception - they need truly blocking operations. + select! { + recv(self.0, msg) => msg, + } + } else { + loop { + select! { + recv(self.0, msg) => break msg, + default => thread::yield_now(), + } + } + } + } +} + +pub fn bounded(cap: usize) -> (Sender, Receiver) { + let (s, r) = channel::bounded(cap); + (Sender(s), Receiver(r)) +} + +pub fn unbounded() -> (Sender, Receiver) { + let (s, r) = channel::unbounded(); + (Sender(s), Receiver(r)) +} + +pub fn after(dur: Duration) -> Receiver { + let r = channel::after(dur); + Receiver(r) +} + +pub fn tick(dur: Duration) -> Receiver { + let r = channel::tick(dur); + Receiver(r) +} diff --git a/crossbeam-channel/tests/zero.rs b/crossbeam-channel/tests/zero.rs new file mode 100644 index 000000000..50558f1e0 --- /dev/null +++ b/crossbeam-channel/tests/zero.rs @@ -0,0 +1,495 @@ +//! Tests for the zero channel flavor. + +extern crate crossbeam; +#[macro_use] +extern crate crossbeam_channel as channel; +extern crate rand; + +mod wrappers; + +macro_rules! tests { + ($channel:path) => { + use std::sync::atomic::AtomicUsize; + use std::sync::atomic::Ordering; + use std::thread; + use std::time::Duration; + + use $channel as channel; + use crossbeam; + use rand::{thread_rng, Rng}; + + fn ms(ms: u64) -> Duration { + Duration::from_millis(ms) + } + + #[test] + fn smoke() { + let (s, r) = channel::bounded(0); + select! { + send(s, 7) => panic!(), + default => {} + } + assert_eq!(r.try_recv(), None); + + assert_eq!(s.capacity(), Some(0)); + assert_eq!(r.capacity(), Some(0)); + } + + #[test] + fn capacity() { + let (s, r) = channel::bounded::<()>(0); + assert_eq!(s.capacity(), Some(0)); + assert_eq!(r.capacity(), Some(0)); + } + + #[test] + fn len_empty_full() { + let (s, r) = channel::bounded(0); + + assert_eq!(s.len(), 0); + assert_eq!(s.is_empty(), true); + assert_eq!(s.is_full(), true); + assert_eq!(r.len(), 0); + assert_eq!(r.is_empty(), true); + assert_eq!(r.is_full(), true); + + crossbeam::scope(|scope| { + scope.spawn(|| s.send(0)); + scope.spawn(|| r.recv().unwrap()); + }); + + assert_eq!(s.len(), 0); + assert_eq!(s.is_empty(), true); + assert_eq!(s.is_full(), true); + assert_eq!(r.len(), 0); + assert_eq!(r.is_empty(), true); + assert_eq!(r.is_full(), true); + } + + #[test] + fn recv() { + let (s, r) = channel::bounded(0); + + crossbeam::scope(|scope| { + scope.spawn(move || { + assert_eq!(r.recv(), Some(7)); + thread::sleep(ms(1000)); + assert_eq!(r.recv(), Some(8)); + thread::sleep(ms(1000)); + assert_eq!(r.recv(), Some(9)); + assert_eq!(r.recv(), None); + }); + scope.spawn(move || { + thread::sleep(ms(1500)); + s.send(7); + s.send(8); + s.send(9); + }); + }); + } + + #[test] + fn recv_timeout() { + let (s, r) = channel::bounded::(0); + + crossbeam::scope(|scope| { + scope.spawn(move || { + select! { + recv(r) => panic!(), + recv(channel::after(ms(1000))) => {} + } + select! { + recv(r, v) => assert_eq!(v, Some(7)), + recv(channel::after(ms(1000))) => panic!(), + } + select! { + recv(r, v) => assert_eq!(v, None), + recv(channel::after(ms(1000))) => panic!(), + } + }); + scope.spawn(move || { + thread::sleep(ms(1500)); + s.send(7); + }); + }); + } + + #[test] + fn try_recv() { + let (s, r) = channel::bounded(0); + + crossbeam::scope(|scope| { + scope.spawn(move || { + assert_eq!(r.try_recv(), None); + thread::sleep(ms(1500)); + assert_eq!(r.try_recv(), Some(7)); + thread::sleep(ms(500)); + assert_eq!(r.try_recv(), None); + }); + scope.spawn(move || { + thread::sleep(ms(1000)); + s.send(7); + }); + }); + } + + #[test] + fn send() { + let (s, r) = channel::bounded(0); + + crossbeam::scope(|scope| { + scope.spawn(move || { + s.send(7); + thread::sleep(ms(1000)); + s.send(8); + thread::sleep(ms(1000)); + s.send(9); + }); + scope.spawn(move || { + thread::sleep(ms(1500)); + assert_eq!(r.recv(), Some(7)); + assert_eq!(r.recv(), Some(8)); + assert_eq!(r.recv(), Some(9)); + }); + }); + } + + #[test] + fn send_timeout() { + let (s, r) = channel::bounded(0); + + crossbeam::scope(|scope| { + scope.spawn(move || { + select! { + send(s, 7) => panic!(), + recv(channel::after(ms(1000))) => {} + } + select! { + send(s, 8) => {} + recv(channel::after(ms(1000))) => panic!(), + } + select! { + send(s, 9) => panic!(), + recv(channel::after(ms(1000))) => {} + } + }); + scope.spawn(move || { + thread::sleep(ms(1500)); + assert_eq!(r.recv(), Some(8)); + }); + }); + } + + #[test] + fn try_send() { + let (s, r) = channel::bounded(0); + + crossbeam::scope(|scope| { + scope.spawn(move || { + select! { + send(s, 7) => panic!(), + default => {} + } + thread::sleep(ms(1500)); + select! { + send(s, 8) => {} + default => panic!(), + } + thread::sleep(ms(500)); + select! { + send(s, 9) => panic!(), + default => {} + } + }); + scope.spawn(move || { + thread::sleep(ms(1000)); + assert_eq!(r.recv(), Some(8)); + }); + }); + } + + #[test] + fn len() { + const COUNT: usize = 25_000; + + let (s, r) = channel::bounded(0); + + assert_eq!(s.len(), 0); + assert_eq!(r.len(), 0); + + crossbeam::scope(|scope| { + scope.spawn(|| { + for i in 0..COUNT { + assert_eq!(r.recv(), Some(i)); + assert_eq!(r.len(), 0); + } + }); + + scope.spawn(|| { + for i in 0..COUNT { + s.send(i); + assert_eq!(s.len(), 0); + } + }); + }); + + assert_eq!(s.len(), 0); + assert_eq!(r.len(), 0); + } + + #[test] + fn close_wakes_receiver() { + let (s, r) = channel::bounded::<()>(0); + + crossbeam::scope(|scope| { + scope.spawn(move || { + assert_eq!(r.recv(), None); + }); + scope.spawn(move || { + thread::sleep(ms(1000)); + drop(s); + }); + }); + } + + #[test] + fn spsc() { + const COUNT: usize = 100_000; + + let (s, r) = channel::bounded(0); + + crossbeam::scope(|scope| { + scope.spawn(move || { + for i in 0..COUNT { + assert_eq!(r.recv(), Some(i)); + } + assert_eq!(r.recv(), None); + }); + scope.spawn(move || { + for i in 0..COUNT { + s.send(i); + } + }); + }); + } + + #[test] + fn mpmc() { + const COUNT: usize = 25_000; + const THREADS: usize = 4; + + let (s, r) = channel::bounded::(0); + let v = (0..COUNT).map(|_| AtomicUsize::new(0)).collect::>(); + + crossbeam::scope(|scope| { + for _ in 0..THREADS { + scope.spawn(|| { + for _ in 0..COUNT { + let n = r.recv().unwrap(); + v[n].fetch_add(1, Ordering::SeqCst); + } + }); + } + for _ in 0..THREADS { + scope.spawn(|| { + for i in 0..COUNT { + s.send(i); + } + }); + } + }); + + for c in v { + assert_eq!(c.load(Ordering::SeqCst), THREADS); + } + } + + #[test] + fn stress_timeout_two_threads() { + const COUNT: usize = 100; + + let (s, r) = channel::bounded(0); + + crossbeam::scope(|scope| { + scope.spawn(|| { + for i in 0..COUNT { + if i % 2 == 0 { + thread::sleep(ms(50)); + } + loop { + select! { + send(s, i) => break, + recv(channel::after(ms(10))) => {} + } + } + } + }); + + scope.spawn(|| { + for i in 0..COUNT { + if i % 2 == 0 { + thread::sleep(ms(50)); + } + loop { + select! { + recv(r, v) => { + assert_eq!(v, Some(i)); + break; + } + recv(channel::after(ms(10))) => {} + } + } + } + }); + }); + } + + #[test] + fn drops() { + static DROPS: AtomicUsize = AtomicUsize::new(0); + + #[derive(Debug, PartialEq)] + struct DropCounter; + + impl Drop for DropCounter { + fn drop(&mut self) { + DROPS.fetch_add(1, Ordering::SeqCst); + } + } + + let mut rng = thread_rng(); + + for _ in 0..100 { + let steps = rng.gen_range(0, 3_000); + + DROPS.store(0, Ordering::SeqCst); + let (s, r) = channel::bounded::(0); + + crossbeam::scope(|scope| { + scope.spawn(|| { + for _ in 0..steps { + r.recv().unwrap(); + } + }); + + scope.spawn(|| { + for _ in 0..steps { + s.send(DropCounter); + } + }); + }); + + assert_eq!(DROPS.load(Ordering::SeqCst), steps); + drop(s); + drop(r); + assert_eq!(DROPS.load(Ordering::SeqCst), steps); + } + } + + #[test] + fn fairness() { + const COUNT: usize = 10_000; + + let (s1, r1) = channel::bounded::<()>(0); + let (s2, r2) = channel::bounded::<()>(0); + + crossbeam::scope(|scope| { + scope.spawn(|| { + let mut hits = [0usize; 2]; + for _ in 0..COUNT { + select! { + recv(r1) => hits[0] += 1, + recv(r2) => hits[1] += 1, + } + } + assert!(hits.iter().all(|x| *x >= COUNT / hits.len() / 2)); + }); + + let mut hits = [0usize; 2]; + for _ in 0..COUNT { + select! { + send(s1, ()) => hits[0] += 1, + send(s2, ()) => hits[1] += 1, + } + } + assert!(hits.iter().all(|x| *x >= COUNT / hits.len() / 2)); + }); + } + + #[test] + fn fairness_duplicates() { + const COUNT: usize = 10_000; + + let (s, r) = channel::bounded::<()>(0); + + crossbeam::scope(|scope| { + scope.spawn(|| { + let mut hits = [0usize; 5]; + for _ in 0..COUNT { + select! { + recv(r) => hits[0] += 1, + recv(r) => hits[1] += 1, + recv(r) => hits[2] += 1, + recv(r) => hits[3] += 1, + recv(r) => hits[4] += 1, + } + } + assert!(hits.iter().all(|x| *x >= COUNT / hits.len() / 2)); + }); + + let mut hits = [0usize; 5]; + for _ in 0..COUNT { + select! { + send(s, ()) => hits[0] += 1, + send(s, ()) => hits[1] += 1, + send(s, ()) => hits[2] += 1, + send(s, ()) => hits[3] += 1, + send(s, ()) => hits[4] += 1, + } + } + assert!(hits.iter().all(|x| *x >= COUNT / hits.len() / 2)); + }); + } + + #[test] + fn recv_in_send() { + let (s, r) = channel::bounded(0); + + crossbeam::scope(|scope| { + scope.spawn(|| { + thread::sleep(ms(100)); + r.recv() + }); + + scope.spawn(|| { + thread::sleep(ms(500)); + s.send(()); + }); + + select! { + send(s, r.recv().unwrap()) => {} + } + }); + } + }; +} + +mod normal { + tests!(wrappers::normal); +} + +mod cloned { + tests!(wrappers::cloned); +} + +mod select { + tests!(wrappers::select); +} + +mod select_spin { + tests!(wrappers::select_spin); +} + +mod select_multi { + tests!(wrappers::select_multi); +} diff --git a/crossbeam-epoch/.cargo-checksum.json b/crossbeam-epoch/.cargo-checksum.json new file mode 100644 index 000000000..512536c68 --- /dev/null +++ b/crossbeam-epoch/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{},"package":"9c90f1474584f38e270b5b613e898c8c328aa4f3dea85e0a27ac2e642f009416"} \ No newline at end of file diff --git a/crossbeam-epoch/CHANGELOG.md b/crossbeam-epoch/CHANGELOG.md new file mode 100644 index 000000000..ad14cc9fb --- /dev/null +++ b/crossbeam-epoch/CHANGELOG.md @@ -0,0 +1,100 @@ +# Changelog +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) +and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). + +## [Unreleased] + +## [0.6.0] - 2018-09-11 +### Changed +- `defer` now requires `F: Send + 'static`. +- Bump the minimum Rust version to 1.26. +- Pinning while TLS is tearing down does not fail anymore. +- Rename `Handle` to `LocalHandle`. + +### Added +- `defer_unchecked` and `defer_destroy`. + +### Removed +- Remove `Clone` impl for `LocalHandle`. + +## [0.5.2] - 2018-08-02 +### Changed +- Update `crossbeam-utils` to `0.5`. + +## [0.5.1] - 2018-07-20 +### Changed +- Fix compatibility with the latest Rust nightly. + +## [0.5.0] - 2018-06-12 +### Changed +- Update `crossbeam-utils` to `0.4`. +- Specify the minimum Rust version to `1.25.0`. + +## [0.4.3] - 2018-06-12 +### Changed +- Downgrade `crossbeam-utils` to `0.3` because it was a breaking change. + +## [0.4.2] - 2018-06-12 +### Added +- Expose the `Pointer` trait. +- Warn missing docs and missing debug impls. + +## Changed +- Update `crossbeam-utils` to `0.4`. + +## [0.4.1] - 2018-03-20 +### Added +- Add `Debug` impls for `Collector`, `Handle`, and `Guard`. +- Add `load_consume` to `Atomic`. + +### Changed +- Rename `Collector::handle` to `Collector::register`. + +### Fixed +- Remove the `Send` implementation for `Handle` (this was a bug). Only + `Collector`s can be shared among multiple threads, while `Handle`s and + `Guard`s must stay within the thread in which they were created. + +## [0.4.0] - 2018-02-10 +### Changed +- Update dependencies. + +### Removed +- Remove support for Rust 1.13. + +## [0.3.0] - 2018-02-10 +### Added +- Add support for Rust 1.13. + +### Changed +- Improve documentation for CAS. + +## [0.2.0] - 2017-11-29 +### Added +- Add method `Owned::into_box`. + +### Changed +- Fix a use-after-free bug in `Local::finalize`. +- Fix an ordering bug in `Global::push_bag`. +- Fix a bug in calculating distance between epochs. + +### Removed +- Remove `impl Into> for Owned`. + +## 0.1.0 - 2017-11-26 +### Added +- First version of the new epoch-based GC. + +[Unreleased]: https://github.com/crossbeam-rs/crossbeam-epoch/compare/v0.6.0...HEAD +[0.6.0]: https://github.com/crossbeam-rs/crossbeam-epoch/compare/v0.5.2...v0.6.0 +[0.5.2]: https://github.com/crossbeam-rs/crossbeam-epoch/compare/v0.5.1...v0.5.2 +[0.5.1]: https://github.com/crossbeam-rs/crossbeam-epoch/compare/v0.5.0...v0.5.1 +[0.5.0]: https://github.com/crossbeam-rs/crossbeam-epoch/compare/v0.4.3...v0.5.0 +[0.4.3]: https://github.com/crossbeam-rs/crossbeam-epoch/compare/v0.4.2...v0.4.3 +[0.4.2]: https://github.com/crossbeam-rs/crossbeam-epoch/compare/v0.4.1...v0.4.2 +[0.4.1]: https://github.com/crossbeam-rs/crossbeam-epoch/compare/v0.4.0...v0.4.1 +[0.4.0]: https://github.com/crossbeam-rs/crossbeam-epoch/compare/v0.3.0...v0.4.0 +[0.3.0]: https://github.com/crossbeam-rs/crossbeam-epoch/compare/v0.2.0...v0.3.0 +[0.2.0]: https://github.com/crossbeam-rs/crossbeam-epoch/compare/v0.1.0...v0.2.0 diff --git a/crossbeam-epoch/Cargo.toml b/crossbeam-epoch/Cargo.toml new file mode 100644 index 000000000..5b3d1d334 --- /dev/null +++ b/crossbeam-epoch/Cargo.toml @@ -0,0 +1,53 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g. crates.io) dependencies +# +# If you believe there's an error in this file please file an +# issue against the rust-lang/cargo repository. If you're +# editing this file be aware that the upstream Cargo.toml +# will likely look very different (and much more reasonable) + +[package] +name = "crossbeam-epoch" +version = "0.6.0" +authors = ["The Crossbeam Project Developers"] +description = "Epoch-based garbage collection" +homepage = "https://github.com/crossbeam-rs/crossbeam-epoch" +documentation = "https://docs.rs/crossbeam-epoch" +readme = "README.md" +keywords = ["lock-free", "rcu", "atomic", "garbage"] +categories = ["concurrency", "memory-management"] +license = "MIT/Apache-2.0" +repository = "https://github.com/crossbeam-rs/crossbeam-epoch" +[dependencies.arrayvec] +version = "0.4" +default-features = false + +[dependencies.cfg-if] +version = "0.1" + +[dependencies.crossbeam-utils] +version = "0.5" +default-features = false + +[dependencies.lazy_static] +version = "1" +optional = true + +[dependencies.memoffset] +version = "0.2" + +[dependencies.scopeguard] +version = "0.3" +default-features = false +[dev-dependencies.rand] +version = "0.5" + +[features] +default = ["use_std"] +nightly = ["arrayvec/use_union"] +sanitize = [] +use_std = ["lazy_static", "crossbeam-utils/use_std"] diff --git a/crossbeam-epoch/LICENSE-APACHE b/crossbeam-epoch/LICENSE-APACHE new file mode 100644 index 000000000..16fe87b06 --- /dev/null +++ b/crossbeam-epoch/LICENSE-APACHE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/crossbeam-epoch/LICENSE-MIT b/crossbeam-epoch/LICENSE-MIT new file mode 100644 index 000000000..25597d583 --- /dev/null +++ b/crossbeam-epoch/LICENSE-MIT @@ -0,0 +1,25 @@ +Copyright (c) 2010 The Rust Project Developers + +Permission is hereby granted, free of charge, to any +person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the +Software without restriction, including without +limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software +is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions +of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/crossbeam-epoch/README.md b/crossbeam-epoch/README.md new file mode 100644 index 000000000..e611bd204 --- /dev/null +++ b/crossbeam-epoch/README.md @@ -0,0 +1,35 @@ +# Epoch-based garbage collection + +[![Build Status](https://travis-ci.org/crossbeam-rs/crossbeam-epoch.svg?branch=master)](https://travis-ci.org/crossbeam-rs/crossbeam-epoch) +[![License](https://img.shields.io/badge/license-MIT%2FApache--2.0-blue.svg)](https://github.com/crossbeam-rs/crossbeam-epoch) +[![Cargo](https://img.shields.io/crates/v/crossbeam-epoch.svg)](https://crates.io/crates/crossbeam-epoch) +[![Documentation](https://docs.rs/crossbeam-epoch/badge.svg)](https://docs.rs/crossbeam-epoch) + +This crate provides epoch-based garbage collection for use in concurrent data structures. + +If a thread removes a node from a concurrent data structure, other threads +may still have pointers to that node, so it cannot be immediately destructed. +Epoch GC allows deferring destruction until it becomes safe to do so. + +## Usage + +Add this to your `Cargo.toml`: + +```toml +[dependencies] +crossbeam-epoch = "0.6" +``` + +Next, add this to your crate: + +```rust +extern crate crossbeam_epoch as epoch; +``` + +The minimum required Rust version is 1.26. + +## License + +Licensed under the terms of MIT license and the Apache License (Version 2.0). + +See [LICENSE-MIT](LICENSE-MIT) and [LICENSE-APACHE](LICENSE-APACHE) for details. diff --git a/crossbeam-epoch/benches/defer.rs b/crossbeam-epoch/benches/defer.rs new file mode 100644 index 000000000..8690b3f57 --- /dev/null +++ b/crossbeam-epoch/benches/defer.rs @@ -0,0 +1,73 @@ +#![feature(test)] + +extern crate crossbeam_epoch as epoch; +extern crate crossbeam_utils as utils; +extern crate test; + +use epoch::Owned; +use test::Bencher; +use utils::thread::scope; + +#[bench] +fn single_alloc_defer_free(b: &mut Bencher) { + b.iter(|| { + let guard = &epoch::pin(); + let p = Owned::new(1).into_shared(guard); + unsafe { + guard.defer(move || p.into_owned()); + } + }); +} + +#[bench] +fn single_defer(b: &mut Bencher) { + b.iter(|| { + let guard = &epoch::pin(); + unsafe { + guard.defer(move || ()); + } + }); +} + +#[bench] +fn multi_alloc_defer_free(b: &mut Bencher) { + const THREADS: usize = 16; + const STEPS: usize = 10_000; + + b.iter(|| { + scope(|s| { + for _ in 0..THREADS { + s.spawn(|| { + for _ in 0..STEPS { + let guard = &epoch::pin(); + let p = Owned::new(1).into_shared(guard); + unsafe { + guard.defer(move || p.into_owned()); + } + } + }); + } + }); + }); +} + +#[bench] +fn multi_defer(b: &mut Bencher) { + const THREADS: usize = 16; + const STEPS: usize = 10_000; + + b.iter(|| { + scope(|s| { + for _ in 0..THREADS { + s.spawn(|| { + for _ in 0..STEPS { + let guard = &epoch::pin(); + unsafe { + guard.defer(move || ()); + } + } + }); + } + }); + }); +} diff --git a/crossbeam-epoch/benches/flush.rs b/crossbeam-epoch/benches/flush.rs new file mode 100644 index 000000000..14268fa18 --- /dev/null +++ b/crossbeam-epoch/benches/flush.rs @@ -0,0 +1,51 @@ +#![feature(test)] + +extern crate crossbeam_epoch as epoch; +extern crate crossbeam_utils as utils; +extern crate test; + +use std::sync::Barrier; + +use test::Bencher; +use utils::thread::scope; + +#[bench] +fn single_flush(b: &mut Bencher) { + const THREADS: usize = 16; + + let start = Barrier::new(THREADS + 1); + let end = Barrier::new(THREADS + 1); + + scope(|s| { + for _ in 0..THREADS { + s.spawn(|| { + epoch::pin(); + start.wait(); + end.wait(); + }); + } + + start.wait(); + b.iter(|| epoch::pin().flush()); + end.wait(); + }); +} + +#[bench] +fn multi_flush(b: &mut Bencher) { + const THREADS: usize = 16; + const STEPS: usize = 10_000; + + b.iter(|| { + scope(|s| { + for _ in 0..THREADS { + s.spawn(|| { + for _ in 0..STEPS { + let guard = &epoch::pin(); + guard.flush(); + } + }); + } + }); + }); +} diff --git a/crossbeam-epoch/benches/pin.rs b/crossbeam-epoch/benches/pin.rs new file mode 100644 index 000000000..1f23e840d --- /dev/null +++ b/crossbeam-epoch/benches/pin.rs @@ -0,0 +1,31 @@ +#![feature(test)] + +extern crate crossbeam_epoch as epoch; +extern crate crossbeam_utils as utils; +extern crate test; + +use test::Bencher; +use utils::thread::scope; + +#[bench] +fn single_pin(b: &mut Bencher) { + b.iter(|| epoch::pin()); +} + +#[bench] +fn multi_pin(b: &mut Bencher) { + const THREADS: usize = 16; + const STEPS: usize = 100_000; + + b.iter(|| { + scope(|s| { + for _ in 0..THREADS { + s.spawn(|| { + for _ in 0..STEPS { + epoch::pin(); + } + }); + } + }); + }); +} diff --git a/crossbeam-epoch/examples/sanitize.rs b/crossbeam-epoch/examples/sanitize.rs new file mode 100644 index 000000000..d0afa8a88 --- /dev/null +++ b/crossbeam-epoch/examples/sanitize.rs @@ -0,0 +1,70 @@ +extern crate crossbeam_epoch as epoch; +extern crate rand; + +use std::sync::Arc; +use std::sync::atomic::AtomicUsize; +use std::sync::atomic::Ordering::{AcqRel, Acquire, Relaxed}; +use std::time::{Duration, Instant}; +use std::thread; + +use epoch::{Atomic, Collector, LocalHandle, Owned, Shared}; +use rand::Rng; + +fn worker(a: Arc>, handle: LocalHandle) -> usize { + let mut rng = rand::thread_rng(); + let mut sum = 0; + + if rng.gen() { + thread::sleep(Duration::from_millis(1)); + } + let timeout = Duration::from_millis(rng.gen_range(0, 10)); + let now = Instant::now(); + + while now.elapsed() < timeout { + for _ in 0..100 { + let guard = &handle.pin(); + guard.flush(); + + let val = if rng.gen() { + let p = a.swap(Owned::new(AtomicUsize::new(sum)), AcqRel, guard); + unsafe { + guard.defer_destroy(p); + guard.flush(); + p.deref().load(Relaxed) + } + } else { + let p = a.load(Acquire, guard); + unsafe { + p.deref().fetch_add(sum, Relaxed) + } + }; + + sum = sum.wrapping_add(val); + } + } + + sum +} + +fn main() { + for _ in 0..100 { + let collector = Collector::new(); + let a = Arc::new(Atomic::new(AtomicUsize::new(777))); + + let threads = (0..16) + .map(|_| { + let a = a.clone(); + let c = collector.clone(); + thread::spawn(move || worker(a, c.register())) + }) + .collect::>(); + + for t in threads { + t.join().unwrap(); + } + + unsafe { + a.swap(Shared::null(), AcqRel, epoch::unprotected()).into_owned(); + } + } +} diff --git a/crossbeam-epoch/src/atomic.rs b/crossbeam-epoch/src/atomic.rs new file mode 100644 index 000000000..eb971ce5e --- /dev/null +++ b/crossbeam-epoch/src/atomic.rs @@ -0,0 +1,1124 @@ +use core::borrow::{Borrow, BorrowMut}; +use core::cmp; +use core::fmt; +use core::marker::PhantomData; +use core::mem; +use core::ptr; +use core::ops::{Deref, DerefMut}; +use core::sync::atomic::{AtomicUsize, ATOMIC_USIZE_INIT}; +use core::sync::atomic::Ordering; +use alloc::boxed::Box; + +use guard::Guard; +use crossbeam_utils::AtomicConsume; + +/// Given ordering for the success case in a compare-exchange operation, returns the strongest +/// appropriate ordering for the failure case. +#[inline] +fn strongest_failure_ordering(ord: Ordering) -> Ordering { + use self::Ordering::*; + match ord { + Relaxed | Release => Relaxed, + Acquire | AcqRel => Acquire, + _ => SeqCst, + } +} + +/// The error returned on failed compare-and-set operation. +pub struct CompareAndSetError<'g, T: 'g, P: Pointer> { + /// The value in the atomic pointer at the time of the failed operation. + pub current: Shared<'g, T>, + + /// The new value, which the operation failed to store. + pub new: P, +} + +impl<'g, T: 'g, P: Pointer + fmt::Debug> fmt::Debug for CompareAndSetError<'g, T, P> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_struct("CompareAndSetError") + .field("current", &self.current) + .field("new", &self.new) + .finish() + } +} + +/// Memory orderings for compare-and-set operations. +/// +/// A compare-and-set operation can have different memory orderings depending on whether it +/// succeeds or fails. This trait generalizes different ways of specifying memory orderings. +/// +/// The two ways of specifying orderings for compare-and-set are: +/// +/// 1. Just one `Ordering` for the success case. In case of failure, the strongest appropriate +/// ordering is chosen. +/// 2. A pair of `Ordering`s. The first one is for the success case, while the second one is +/// for the failure case. +pub trait CompareAndSetOrdering { + /// The ordering of the operation when it succeeds. + fn success(&self) -> Ordering; + + /// The ordering of the operation when it fails. + /// + /// The failure ordering can't be `Release` or `AcqRel` and must be equivalent or weaker than + /// the success ordering. + fn failure(&self) -> Ordering; +} + +impl CompareAndSetOrdering for Ordering { + #[inline] + fn success(&self) -> Ordering { + *self + } + + #[inline] + fn failure(&self) -> Ordering { + strongest_failure_ordering(*self) + } +} + +impl CompareAndSetOrdering for (Ordering, Ordering) { + #[inline] + fn success(&self) -> Ordering { + self.0 + } + + #[inline] + fn failure(&self) -> Ordering { + self.1 + } +} + +/// Panics if the pointer is not properly unaligned. +#[inline] +fn ensure_aligned(raw: *const T) { + assert_eq!(raw as usize & low_bits::(), 0, "unaligned pointer"); +} + +/// Returns a bitmask containing the unused least significant bits of an aligned pointer to `T`. +#[inline] +fn low_bits() -> usize { + (1 << mem::align_of::().trailing_zeros()) - 1 +} + +/// Given a tagged pointer `data`, returns the same pointer, but tagged with `tag`. +/// +/// `tag` is truncated to fit into the unused bits of the pointer to `T`. +#[inline] +fn data_with_tag(data: usize, tag: usize) -> usize { + (data & !low_bits::()) | (tag & low_bits::()) +} + +/// Decomposes a tagged pointer `data` into the pointer and the tag. +#[inline] +fn decompose_data(data: usize) -> (*mut T, usize) { + let raw = (data & !low_bits::()) as *mut T; + let tag = data & low_bits::(); + (raw, tag) +} + +/// An atomic pointer that can be safely shared between threads. +/// +/// The pointer must be properly aligned. Since it is aligned, a tag can be stored into the unused +/// least significant bits of the address. More precisely, a tag should be less than `(1 << +/// mem::align_of::().trailing_zeros())`. +/// +/// Any method that loads the pointer must be passed a reference to a [`Guard`]. +/// +/// [`Guard`]: struct.Guard.html +pub struct Atomic { + data: AtomicUsize, + _marker: PhantomData<*mut T>, +} + +unsafe impl Send for Atomic {} +unsafe impl Sync for Atomic {} + +impl Atomic { + /// Returns a new atomic pointer pointing to the tagged pointer `data`. + fn from_usize(data: usize) -> Self { + Self { + data: AtomicUsize::new(data), + _marker: PhantomData, + } + } + + /// Returns a new null atomic pointer. + /// + /// # Examples + /// + /// ``` + /// use crossbeam_epoch::Atomic; + /// + /// let a = Atomic::::null(); + /// ``` + #[cfg(not(feature = "nightly"))] + pub fn null() -> Atomic { + Self { + data: ATOMIC_USIZE_INIT, + _marker: PhantomData, + } + } + + /// Returns a new null atomic pointer. + /// + /// # Examples + /// + /// ``` + /// use crossbeam_epoch::Atomic; + /// + /// let a = Atomic::::null(); + /// ``` + #[cfg(feature = "nightly")] + pub const fn null() -> Atomic { + Self { + data: ATOMIC_USIZE_INIT, + _marker: PhantomData, + } + } + + /// Allocates `value` on the heap and returns a new atomic pointer pointing to it. + /// + /// # Examples + /// + /// ``` + /// use crossbeam_epoch::Atomic; + /// + /// let a = Atomic::new(1234); + /// ``` + pub fn new(value: T) -> Atomic { + Self::from(Owned::new(value)) + } + + /// Loads a `Shared` from the atomic pointer. + /// + /// This method takes an [`Ordering`] argument which describes the memory ordering of this + /// operation. + /// + /// [`Ordering`]: https://doc.rust-lang.org/std/sync/atomic/enum.Ordering.html + /// + /// # Examples + /// + /// ``` + /// use crossbeam_epoch::{self as epoch, Atomic}; + /// use std::sync::atomic::Ordering::SeqCst; + /// + /// let a = Atomic::new(1234); + /// let guard = &epoch::pin(); + /// let p = a.load(SeqCst, guard); + /// ``` + pub fn load<'g>(&self, ord: Ordering, _: &'g Guard) -> Shared<'g, T> { + unsafe { Shared::from_usize(self.data.load(ord)) } + } + + /// Loads a `Shared` from the atomic pointer using a "consume" memory ordering. + /// + /// This is similar to the "acquire" ordering, except that an ordering is + /// only guaranteed with operations that "depend on" the result of the load. + /// However consume loads are usually much faster than acquire loads on + /// architectures with a weak memory model since they don't require memory + /// fence instructions. + /// + /// The exact definition of "depend on" is a bit vague, but it works as you + /// would expect in practice since a lot of software, especially the Linux + /// kernel, rely on this behavior. + /// + /// # Examples + /// + /// ``` + /// use crossbeam_epoch::{self as epoch, Atomic}; + /// + /// let a = Atomic::new(1234); + /// let guard = &epoch::pin(); + /// let p = a.load_consume(guard); + /// ``` + pub fn load_consume<'g>(&self, _: &'g Guard) -> Shared<'g, T> { + unsafe { Shared::from_usize(self.data.load_consume()) } + } + + /// Stores a `Shared` or `Owned` pointer into the atomic pointer. + /// + /// This method takes an [`Ordering`] argument which describes the memory ordering of this + /// operation. + /// + /// [`Ordering`]: https://doc.rust-lang.org/std/sync/atomic/enum.Ordering.html + /// + /// # Examples + /// + /// ``` + /// use crossbeam_epoch::{self as epoch, Atomic, Owned, Shared}; + /// use std::sync::atomic::Ordering::SeqCst; + /// + /// let a = Atomic::new(1234); + /// a.store(Shared::null(), SeqCst); + /// a.store(Owned::new(1234), SeqCst); + /// ``` + pub fn store<'g, P: Pointer>(&self, new: P, ord: Ordering) { + self.data.store(new.into_usize(), ord); + } + + /// Stores a `Shared` or `Owned` pointer into the atomic pointer, returning the previous + /// `Shared`. + /// + /// This method takes an [`Ordering`] argument which describes the memory ordering of this + /// operation. + /// + /// [`Ordering`]: https://doc.rust-lang.org/std/sync/atomic/enum.Ordering.html + /// + /// # Examples + /// + /// ``` + /// use crossbeam_epoch::{self as epoch, Atomic, Owned, Shared}; + /// use std::sync::atomic::Ordering::SeqCst; + /// + /// let a = Atomic::new(1234); + /// let guard = &epoch::pin(); + /// let p = a.swap(Shared::null(), SeqCst, guard); + /// ``` + pub fn swap<'g, P: Pointer>(&self, new: P, ord: Ordering, _: &'g Guard) -> Shared<'g, T> { + unsafe { Shared::from_usize(self.data.swap(new.into_usize(), ord)) } + } + + /// Stores the pointer `new` (either `Shared` or `Owned`) into the atomic pointer if the current + /// value is the same as `current`. The tag is also taken into account, so two pointers to the + /// same object, but with different tags, will not be considered equal. + /// + /// The return value is a result indicating whether the new pointer was written. On success the + /// pointer that was written is returned. On failure the actual current value and `new` are + /// returned. + /// + /// This method takes a [`CompareAndSetOrdering`] argument which describes the memory + /// ordering of this operation. + /// + /// [`CompareAndSetOrdering`]: trait.CompareAndSetOrdering.html + /// + /// # Examples + /// + /// ``` + /// use crossbeam_epoch::{self as epoch, Atomic, Owned, Shared}; + /// use std::sync::atomic::Ordering::SeqCst; + /// + /// let a = Atomic::new(1234); + /// + /// let guard = &epoch::pin(); + /// let mut curr = a.load(SeqCst, guard); + /// let res1 = a.compare_and_set(curr, Shared::null(), SeqCst, guard); + /// let res2 = a.compare_and_set(curr, Owned::new(5678), SeqCst, guard); + /// ``` + pub fn compare_and_set<'g, O, P>( + &self, + current: Shared, + new: P, + ord: O, + _: &'g Guard, + ) -> Result, CompareAndSetError<'g, T, P>> + where + O: CompareAndSetOrdering, + P: Pointer, + { + let new = new.into_usize(); + self.data + .compare_exchange(current.into_usize(), new, ord.success(), ord.failure()) + .map(|_| unsafe { Shared::from_usize(new) }) + .map_err(|current| unsafe { + CompareAndSetError { + current: Shared::from_usize(current), + new: P::from_usize(new), + } + }) + } + + /// Stores the pointer `new` (either `Shared` or `Owned`) into the atomic pointer if the current + /// value is the same as `current`. The tag is also taken into account, so two pointers to the + /// same object, but with different tags, will not be considered equal. + /// + /// Unlike [`compare_and_set`], this method is allowed to spuriously fail even when comparison + /// succeeds, which can result in more efficient code on some platforms. The return value is a + /// result indicating whether the new pointer was written. On success the pointer that was + /// written is returned. On failure the actual current value and `new` are returned. + /// + /// This method takes a [`CompareAndSetOrdering`] argument which describes the memory + /// ordering of this operation. + /// + /// [`compare_and_set`]: struct.Atomic.html#method.compare_and_set + /// [`CompareAndSetOrdering`]: trait.CompareAndSetOrdering.html + /// + /// # Examples + /// + /// ``` + /// use crossbeam_epoch::{self as epoch, Atomic, Owned, Shared}; + /// use std::sync::atomic::Ordering::SeqCst; + /// + /// let a = Atomic::new(1234); + /// let guard = &epoch::pin(); + /// + /// let mut new = Owned::new(5678); + /// let mut ptr = a.load(SeqCst, guard); + /// loop { + /// match a.compare_and_set_weak(ptr, new, SeqCst, guard) { + /// Ok(p) => { + /// ptr = p; + /// break; + /// } + /// Err(err) => { + /// ptr = err.current; + /// new = err.new; + /// } + /// } + /// } + /// + /// let mut curr = a.load(SeqCst, guard); + /// loop { + /// match a.compare_and_set_weak(curr, Shared::null(), SeqCst, guard) { + /// Ok(_) => break, + /// Err(err) => curr = err.current, + /// } + /// } + /// ``` + pub fn compare_and_set_weak<'g, O, P>( + &self, + current: Shared, + new: P, + ord: O, + _: &'g Guard, + ) -> Result, CompareAndSetError<'g, T, P>> + where + O: CompareAndSetOrdering, + P: Pointer, + { + let new = new.into_usize(); + self.data + .compare_exchange_weak(current.into_usize(), new, ord.success(), ord.failure()) + .map(|_| unsafe { Shared::from_usize(new) }) + .map_err(|current| unsafe { + CompareAndSetError { + current: Shared::from_usize(current), + new: P::from_usize(new), + } + }) + } + + /// Bitwise "and" with the current tag. + /// + /// Performs a bitwise "and" operation on the current tag and the argument `val`, and sets the + /// new tag to the result. Returns the previous pointer. + /// + /// This method takes an [`Ordering`] argument which describes the memory ordering of this + /// operation. + /// + /// [`Ordering`]: https://doc.rust-lang.org/std/sync/atomic/enum.Ordering.html + /// + /// # Examples + /// + /// ``` + /// use crossbeam_epoch::{self as epoch, Atomic, Shared}; + /// use std::sync::atomic::Ordering::SeqCst; + /// + /// let a = Atomic::::from(Shared::null().with_tag(3)); + /// let guard = &epoch::pin(); + /// assert_eq!(a.fetch_and(2, SeqCst, guard).tag(), 3); + /// assert_eq!(a.load(SeqCst, guard).tag(), 2); + /// ``` + pub fn fetch_and<'g>(&self, val: usize, ord: Ordering, _: &'g Guard) -> Shared<'g, T> { + unsafe { Shared::from_usize(self.data.fetch_and(val | !low_bits::(), ord)) } + } + + /// Bitwise "or" with the current tag. + /// + /// Performs a bitwise "or" operation on the current tag and the argument `val`, and sets the + /// new tag to the result. Returns the previous pointer. + /// + /// This method takes an [`Ordering`] argument which describes the memory ordering of this + /// operation. + /// + /// [`Ordering`]: https://doc.rust-lang.org/std/sync/atomic/enum.Ordering.html + /// + /// # Examples + /// + /// ``` + /// use crossbeam_epoch::{self as epoch, Atomic, Shared}; + /// use std::sync::atomic::Ordering::SeqCst; + /// + /// let a = Atomic::::from(Shared::null().with_tag(1)); + /// let guard = &epoch::pin(); + /// assert_eq!(a.fetch_or(2, SeqCst, guard).tag(), 1); + /// assert_eq!(a.load(SeqCst, guard).tag(), 3); + /// ``` + pub fn fetch_or<'g>(&self, val: usize, ord: Ordering, _: &'g Guard) -> Shared<'g, T> { + unsafe { Shared::from_usize(self.data.fetch_or(val & low_bits::(), ord)) } + } + + /// Bitwise "xor" with the current tag. + /// + /// Performs a bitwise "xor" operation on the current tag and the argument `val`, and sets the + /// new tag to the result. Returns the previous pointer. + /// + /// This method takes an [`Ordering`] argument which describes the memory ordering of this + /// operation. + /// + /// [`Ordering`]: https://doc.rust-lang.org/std/sync/atomic/enum.Ordering.html + /// + /// # Examples + /// + /// ``` + /// use crossbeam_epoch::{self as epoch, Atomic, Shared}; + /// use std::sync::atomic::Ordering::SeqCst; + /// + /// let a = Atomic::::from(Shared::null().with_tag(1)); + /// let guard = &epoch::pin(); + /// assert_eq!(a.fetch_xor(3, SeqCst, guard).tag(), 1); + /// assert_eq!(a.load(SeqCst, guard).tag(), 2); + /// ``` + pub fn fetch_xor<'g>(&self, val: usize, ord: Ordering, _: &'g Guard) -> Shared<'g, T> { + unsafe { Shared::from_usize(self.data.fetch_xor(val & low_bits::(), ord)) } + } +} + +impl fmt::Debug for Atomic { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let data = self.data.load(Ordering::SeqCst); + let (raw, tag) = decompose_data::(data); + + f.debug_struct("Atomic") + .field("raw", &raw) + .field("tag", &tag) + .finish() + } +} + +impl fmt::Pointer for Atomic { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let data = self.data.load(Ordering::SeqCst); + let (raw, _) = decompose_data::(data); + fmt::Pointer::fmt(&raw, f) + } +} + +impl Clone for Atomic { + /// Returns a copy of the atomic value. + /// + /// Note that a `Relaxed` load is used here. If you need synchronization, use it with other + /// atomics or fences. + fn clone(&self) -> Self { + let data = self.data.load(Ordering::Relaxed); + Atomic::from_usize(data) + } +} + +impl Default for Atomic { + fn default() -> Self { + Atomic::null() + } +} + +impl From> for Atomic { + /// Returns a new atomic pointer pointing to `owned`. + /// + /// # Examples + /// + /// ``` + /// use crossbeam_epoch::{Atomic, Owned}; + /// + /// let a = Atomic::::from(Owned::new(1234)); + /// ``` + fn from(owned: Owned) -> Self { + let data = owned.data; + mem::forget(owned); + Self::from_usize(data) + } +} + +impl From> for Atomic { + fn from(b: Box) -> Self { + Self::from(Owned::from(b)) + } +} + +impl From for Atomic { + fn from(t: T) -> Self { + Self::new(t) + } +} + +impl<'g, T> From> for Atomic { + /// Returns a new atomic pointer pointing to `ptr`. + /// + /// # Examples + /// + /// ``` + /// use crossbeam_epoch::{Atomic, Shared}; + /// + /// let a = Atomic::::from(Shared::::null()); + /// ``` + fn from(ptr: Shared<'g, T>) -> Self { + Self::from_usize(ptr.data) + } +} + +impl From<*const T> for Atomic { + /// Returns a new atomic pointer pointing to `raw`. + /// + /// # Examples + /// + /// ``` + /// use std::ptr; + /// use crossbeam_epoch::Atomic; + /// + /// let a = Atomic::::from(ptr::null::()); + /// ``` + fn from(raw: *const T) -> Self { + Self::from_usize(raw as usize) + } +} + +/// A trait for either `Owned` or `Shared` pointers. +pub trait Pointer { + /// Returns the machine representation of the pointer. + fn into_usize(self) -> usize; + + /// Returns a new pointer pointing to the tagged pointer `data`. + unsafe fn from_usize(data: usize) -> Self; +} + +/// An owned heap-allocated object. +/// +/// This type is very similar to `Box`. +/// +/// The pointer must be properly aligned. Since it is aligned, a tag can be stored into the unused +/// least significant bits of the address. +pub struct Owned { + data: usize, + _marker: PhantomData>, +} + +impl Pointer for Owned { + #[inline] + fn into_usize(self) -> usize { + let data = self.data; + mem::forget(self); + data + } + + /// Returns a new pointer pointing to the tagged pointer `data`. + /// + /// # Panics + /// + /// Panics if the data is zero in debug mode. + #[inline] + unsafe fn from_usize(data: usize) -> Self { + debug_assert!(data != 0, "converting zero into `Owned`"); + Owned { + data: data, + _marker: PhantomData, + } + } +} + +impl Owned { + /// Allocates `value` on the heap and returns a new owned pointer pointing to it. + /// + /// # Examples + /// + /// ``` + /// use crossbeam_epoch::Owned; + /// + /// let o = Owned::new(1234); + /// ``` + pub fn new(value: T) -> Owned { + Self::from(Box::new(value)) + } + + /// Returns a new owned pointer pointing to `raw`. + /// + /// This function is unsafe because improper use may lead to memory problems. Argument `raw` + /// must be a valid pointer. Also, a double-free may occur if the function is called twice on + /// the same raw pointer. + /// + /// # Panics + /// + /// Panics if `raw` is not properly aligned. + /// + /// # Examples + /// + /// ``` + /// use crossbeam_epoch::Owned; + /// + /// let o = unsafe { Owned::from_raw(Box::into_raw(Box::new(1234))) }; + /// ``` + pub unsafe fn from_raw(raw: *mut T) -> Owned { + ensure_aligned(raw); + Self::from_usize(raw as usize) + } + + /// Converts the owned pointer into a [`Shared`]. + /// + /// # Examples + /// + /// ``` + /// use crossbeam_epoch::{self as epoch, Owned}; + /// + /// let o = Owned::new(1234); + /// let guard = &epoch::pin(); + /// let p = o.into_shared(guard); + /// ``` + /// + /// [`Shared`]: struct.Shared.html + pub fn into_shared<'g>(self, _: &'g Guard) -> Shared<'g, T> { + unsafe { Shared::from_usize(self.into_usize()) } + } + + /// Converts the owned pointer into a `Box`. + /// + /// # Examples + /// + /// ``` + /// use crossbeam_epoch::{self as epoch, Owned}; + /// + /// let o = Owned::new(1234); + /// let b: Box = o.into_box(); + /// assert_eq!(*b, 1234); + /// ``` + pub fn into_box(self) -> Box { + let (raw, _) = decompose_data::(self.data); + mem::forget(self); + unsafe { Box::from_raw(raw) } + } + + /// Returns the tag stored within the pointer. + /// + /// # Examples + /// + /// ``` + /// use crossbeam_epoch::Owned; + /// + /// assert_eq!(Owned::new(1234).tag(), 0); + /// ``` + pub fn tag(&self) -> usize { + let (_, tag) = decompose_data::(self.data); + tag + } + + /// Returns the same pointer, but tagged with `tag`. `tag` is truncated to be fit into the + /// unused bits of the pointer to `T`. + /// + /// # Examples + /// + /// ``` + /// use crossbeam_epoch::Owned; + /// + /// let o = Owned::new(0u64); + /// assert_eq!(o.tag(), 0); + /// let o = o.with_tag(2); + /// assert_eq!(o.tag(), 2); + /// ``` + pub fn with_tag(self, tag: usize) -> Owned { + let data = self.into_usize(); + unsafe { Self::from_usize(data_with_tag::(data, tag)) } + } +} + +impl Drop for Owned { + fn drop(&mut self) { + let (raw, _) = decompose_data::(self.data); + unsafe { + drop(Box::from_raw(raw)); + } + } +} + +impl fmt::Debug for Owned { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let (raw, tag) = decompose_data::(self.data); + + f.debug_struct("Owned") + .field("raw", &raw) + .field("tag", &tag) + .finish() + } +} + +impl Clone for Owned { + fn clone(&self) -> Self { + Owned::new((**self).clone()).with_tag(self.tag()) + } +} + +impl Deref for Owned { + type Target = T; + + fn deref(&self) -> &T { + let (raw, _) = decompose_data::(self.data); + unsafe { &*raw } + } +} + +impl DerefMut for Owned { + fn deref_mut(&mut self) -> &mut T { + let (raw, _) = decompose_data::(self.data); + unsafe { &mut *raw } + } +} + +impl From for Owned { + fn from(t: T) -> Self { + Owned::new(t) + } +} + +impl From> for Owned { + /// Returns a new owned pointer pointing to `b`. + /// + /// # Panics + /// + /// Panics if the pointer (the `Box`) is not properly aligned. + /// + /// # Examples + /// + /// ``` + /// use crossbeam_epoch::Owned; + /// + /// let o = unsafe { Owned::from_raw(Box::into_raw(Box::new(1234))) }; + /// ``` + fn from(b: Box) -> Self { + unsafe { Self::from_raw(Box::into_raw(b)) } + } +} + +impl Borrow for Owned { + fn borrow(&self) -> &T { + &**self + } +} + +impl BorrowMut for Owned { + fn borrow_mut(&mut self) -> &mut T { + &mut **self + } +} + +impl AsRef for Owned { + fn as_ref(&self) -> &T { + &**self + } +} + +impl AsMut for Owned { + fn as_mut(&mut self) -> &mut T { + &mut **self + } +} + +/// A pointer to an object protected by the epoch GC. +/// +/// The pointer is valid for use only during the lifetime `'g`. +/// +/// The pointer must be properly aligned. Since it is aligned, a tag can be stored into the unused +/// least significant bits of the address. +pub struct Shared<'g, T: 'g> { + data: usize, + _marker: PhantomData<(&'g (), *const T)>, +} + +impl<'g, T> Clone for Shared<'g, T> { + fn clone(&self) -> Self { + Shared { + data: self.data, + _marker: PhantomData, + } + } +} + +impl<'g, T> Copy for Shared<'g, T> {} + +impl<'g, T> Pointer for Shared<'g, T> { + #[inline] + fn into_usize(self) -> usize { + self.data + } + + #[inline] + unsafe fn from_usize(data: usize) -> Self { + Shared { + data: data, + _marker: PhantomData, + } + } +} + +impl<'g, T> Shared<'g, T> { + /// Returns a new null pointer. + /// + /// # Examples + /// + /// ``` + /// use crossbeam_epoch::Shared; + /// + /// let p = Shared::::null(); + /// assert!(p.is_null()); + /// ``` + pub fn null() -> Shared<'g, T> { + Shared { + data: 0, + _marker: PhantomData, + } + } + + /// Returns `true` if the pointer is null. + /// + /// # Examples + /// + /// ``` + /// use crossbeam_epoch::{self as epoch, Atomic, Owned}; + /// use std::sync::atomic::Ordering::SeqCst; + /// + /// let a = Atomic::null(); + /// let guard = &epoch::pin(); + /// assert!(a.load(SeqCst, guard).is_null()); + /// a.store(Owned::new(1234), SeqCst); + /// assert!(!a.load(SeqCst, guard).is_null()); + /// ``` + pub fn is_null(&self) -> bool { + self.as_raw().is_null() + } + + /// Converts the pointer to a raw pointer (without the tag). + /// + /// # Examples + /// + /// ``` + /// use crossbeam_epoch::{self as epoch, Atomic, Owned}; + /// use std::sync::atomic::Ordering::SeqCst; + /// + /// let o = Owned::new(1234); + /// let raw = &*o as *const _; + /// let a = Atomic::from(o); + /// + /// let guard = &epoch::pin(); + /// let p = a.load(SeqCst, guard); + /// assert_eq!(p.as_raw(), raw); + /// ``` + pub fn as_raw(&self) -> *const T { + let (raw, _) = decompose_data::(self.data); + raw + } + + /// Dereferences the pointer. + /// + /// Returns a reference to the pointee that is valid during the lifetime `'g`. + /// + /// # Safety + /// + /// Dereferencing a pointer is unsafe because it could be pointing to invalid memory. + /// + /// Another concern is the possiblity of data races due to lack of proper synchronization. + /// For example, consider the following scenario: + /// + /// 1. A thread creates a new object: `a.store(Owned::new(10), Relaxed)` + /// 2. Another thread reads it: `*a.load(Relaxed, guard).as_ref().unwrap()` + /// + /// The problem is that relaxed orderings don't synchronize initialization of the object with + /// the read from the second thread. This is a data race. A possible solution would be to use + /// `Release` and `Acquire` orderings. + /// + /// # Examples + /// + /// ``` + /// use crossbeam_epoch::{self as epoch, Atomic}; + /// use std::sync::atomic::Ordering::SeqCst; + /// + /// let a = Atomic::new(1234); + /// let guard = &epoch::pin(); + /// let p = a.load(SeqCst, guard); + /// unsafe { + /// assert_eq!(p.deref(), &1234); + /// } + /// ``` + pub unsafe fn deref(&self) -> &'g T { + &*self.as_raw() + } + + /// Converts the pointer to a reference. + /// + /// Returns `None` if the pointer is null, or else a reference to the object wrapped in `Some`. + /// + /// # Safety + /// + /// Dereferencing a pointer is unsafe because it could be pointing to invalid memory. + /// + /// Another concern is the possiblity of data races due to lack of proper synchronization. + /// For example, consider the following scenario: + /// + /// 1. A thread creates a new object: `a.store(Owned::new(10), Relaxed)` + /// 2. Another thread reads it: `*a.load(Relaxed, guard).as_ref().unwrap()` + /// + /// The problem is that relaxed orderings don't synchronize initialization of the object with + /// the read from the second thread. This is a data race. A possible solution would be to use + /// `Release` and `Acquire` orderings. + /// + /// # Examples + /// + /// ``` + /// use crossbeam_epoch::{self as epoch, Atomic}; + /// use std::sync::atomic::Ordering::SeqCst; + /// + /// let a = Atomic::new(1234); + /// let guard = &epoch::pin(); + /// let p = a.load(SeqCst, guard); + /// unsafe { + /// assert_eq!(p.as_ref(), Some(&1234)); + /// } + /// ``` + pub unsafe fn as_ref(&self) -> Option<&'g T> { + self.as_raw().as_ref() + } + + /// Takes ownership of the pointee. + /// + /// # Panics + /// + /// Panics if this pointer is null, but only in debug mode. + /// + /// # Safety + /// + /// This method may be called only if the pointer is valid and nobody else is holding a + /// reference to the same object. + /// + /// # Examples + /// + /// ``` + /// use crossbeam_epoch::{self as epoch, Atomic}; + /// use std::sync::atomic::Ordering::SeqCst; + /// + /// let a = Atomic::new(1234); + /// unsafe { + /// let guard = &epoch::unprotected(); + /// let p = a.load(SeqCst, guard); + /// drop(p.into_owned()); + /// } + /// ``` + pub unsafe fn into_owned(self) -> Owned { + debug_assert!( + self.as_raw() != ptr::null(), + "converting a null `Shared` into `Owned`" + ); + Owned::from_usize(self.data) + } + + /// Returns the tag stored within the pointer. + /// + /// # Examples + /// + /// ``` + /// use crossbeam_epoch::{self as epoch, Atomic, Owned}; + /// use std::sync::atomic::Ordering::SeqCst; + /// + /// let a = Atomic::::from(Owned::new(0u64).with_tag(2)); + /// let guard = &epoch::pin(); + /// let p = a.load(SeqCst, guard); + /// assert_eq!(p.tag(), 2); + /// ``` + pub fn tag(&self) -> usize { + let (_, tag) = decompose_data::(self.data); + tag + } + + /// Returns the same pointer, but tagged with `tag`. `tag` is truncated to be fit into the + /// unused bits of the pointer to `T`. + /// + /// # Examples + /// + /// ``` + /// use crossbeam_epoch::{self as epoch, Atomic}; + /// use std::sync::atomic::Ordering::SeqCst; + /// + /// let a = Atomic::new(0u64); + /// let guard = &epoch::pin(); + /// let p1 = a.load(SeqCst, guard); + /// let p2 = p1.with_tag(2); + /// + /// assert_eq!(p1.tag(), 0); + /// assert_eq!(p2.tag(), 2); + /// assert_eq!(p1.as_raw(), p2.as_raw()); + /// ``` + pub fn with_tag(&self, tag: usize) -> Shared<'g, T> { + unsafe { Self::from_usize(data_with_tag::(self.data, tag)) } + } +} + +impl<'g, T> From<*const T> for Shared<'g, T> { + /// Returns a new pointer pointing to `raw`. + /// + /// # Panics + /// + /// Panics if `raw` is not properly aligned. + /// + /// # Examples + /// + /// ``` + /// use crossbeam_epoch::Shared; + /// + /// let p = unsafe { Shared::from(Box::into_raw(Box::new(1234)) as *const _) }; + /// assert!(!p.is_null()); + /// ``` + fn from(raw: *const T) -> Self { + ensure_aligned(raw); + unsafe { Self::from_usize(raw as usize) } + } +} + +impl<'g, T> PartialEq> for Shared<'g, T> { + fn eq(&self, other: &Self) -> bool { + self.data == other.data + } +} + +impl<'g, T> Eq for Shared<'g, T> {} + +impl<'g, T> PartialOrd> for Shared<'g, T> { + fn partial_cmp(&self, other: &Self) -> Option { + self.data.partial_cmp(&other.data) + } +} + +impl<'g, T> Ord for Shared<'g, T> { + fn cmp(&self, other: &Self) -> cmp::Ordering { + self.data.cmp(&other.data) + } +} + +impl<'g, T> fmt::Debug for Shared<'g, T> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let (raw, tag) = decompose_data::(self.data); + + f.debug_struct("Shared") + .field("raw", &raw) + .field("tag", &tag) + .finish() + } +} + +impl<'g, T> fmt::Pointer for Shared<'g, T> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::Pointer::fmt(&self.as_raw(), f) + } +} + +impl<'g, T> Default for Shared<'g, T> { + fn default() -> Self { + Shared::null() + } +} + +#[cfg(test)] +mod tests { + use super::Shared; + + #[test] + fn valid_tag_i8() { + Shared::::null().with_tag(0); + } + + #[test] + fn valid_tag_i64() { + Shared::::null().with_tag(7); + } +} diff --git a/crossbeam-epoch/src/collector.rs b/crossbeam-epoch/src/collector.rs new file mode 100644 index 000000000..dceacbff3 --- /dev/null +++ b/crossbeam-epoch/src/collector.rs @@ -0,0 +1,428 @@ +/// Epoch-based garbage collector. +/// +/// # Examples +/// +/// ``` +/// use crossbeam_epoch::Collector; +/// +/// let collector = Collector::new(); +/// +/// let handle = collector.register(); +/// drop(collector); // `handle` still works after dropping `collector` +/// +/// handle.pin().flush(); +/// ``` + +use alloc::sync::Arc; +use core::fmt; + +use internal::{Global, Local}; +use guard::Guard; + +/// An epoch-based garbage collector. +pub struct Collector { + pub(crate) global: Arc, +} + +unsafe impl Send for Collector {} +unsafe impl Sync for Collector {} + +impl Collector { + /// Creates a new collector. + pub fn new() -> Self { + Collector { global: Arc::new(Global::new()) } + } + + /// Registers a new handle for the collector. + pub fn register(&self) -> LocalHandle { + Local::register(self) + } +} + +impl Clone for Collector { + /// Creates another reference to the same garbage collector. + fn clone(&self) -> Self { + Collector { global: self.global.clone() } + } +} + +impl fmt::Debug for Collector { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_struct("Collector").finish() + } +} + +impl PartialEq for Collector { + /// Checks if both handles point to the same collector. + fn eq(&self, rhs: &Collector) -> bool { + Arc::ptr_eq(&self.global, &rhs.global) + } +} +impl Eq for Collector {} + +/// A handle to a garbage collector. +pub struct LocalHandle { + pub(crate) local: *const Local, +} + +impl LocalHandle { + /// Pins the handle. + #[inline] + pub fn pin(&self) -> Guard { + unsafe { (*self.local).pin() } + } + + /// Returns `true` if the handle is pinned. + #[inline] + pub fn is_pinned(&self) -> bool { + unsafe { (*self.local).is_pinned() } + } + + /// Returns the `Collector` associated with this handle. + #[inline] + pub fn collector(&self) -> &Collector { + unsafe { (*self.local).collector() } + } +} + +impl Drop for LocalHandle { + #[inline] + fn drop(&mut self) { + unsafe { + Local::release_handle(&*self.local); + } + } +} + +impl fmt::Debug for LocalHandle { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_struct("LocalHandle").finish() + } +} + +#[cfg(test)] +mod tests { + use std::mem; + use std::sync::atomic::{AtomicUsize, ATOMIC_USIZE_INIT}; + use std::sync::atomic::Ordering; + + use crossbeam_utils::thread; + + use {Collector, Owned}; + + const NUM_THREADS: usize = 8; + + #[test] + fn pin_reentrant() { + let collector = Collector::new(); + let handle = collector.register(); + drop(collector); + + assert!(!handle.is_pinned()); + { + let _guard = &handle.pin(); + assert!(handle.is_pinned()); + { + let _guard = &handle.pin(); + assert!(handle.is_pinned()); + } + assert!(handle.is_pinned()); + } + assert!(!handle.is_pinned()); + } + + #[test] + fn flush_local_bag() { + let collector = Collector::new(); + let handle = collector.register(); + drop(collector); + + for _ in 0..100 { + let guard = &handle.pin(); + unsafe { + let a = Owned::new(7).into_shared(guard); + guard.defer_destroy(a); + + assert!(!(*(*guard.local).bag.get()).is_empty()); + + while !(*(*guard.local).bag.get()).is_empty() { + guard.flush(); + } + } + } + } + + #[test] + fn garbage_buffering() { + let collector = Collector::new(); + let handle = collector.register(); + drop(collector); + + let guard = &handle.pin(); + unsafe { + for _ in 0..10 { + let a = Owned::new(7).into_shared(guard); + guard.defer_destroy(a); + } + assert!(!(*(*guard.local).bag.get()).is_empty()); + } + } + + #[test] + fn pin_holds_advance() { + let collector = Collector::new(); + + thread::scope(|scope| { + for _ in 0..NUM_THREADS { + scope.spawn(|| { + let handle = collector.register(); + for _ in 0..500_000 { + let guard = &handle.pin(); + + let before = collector.global.epoch.load(Ordering::Relaxed); + collector.global.collect(guard); + let after = collector.global.epoch.load(Ordering::Relaxed); + + assert!(after.wrapping_sub(before) <= 2); + } + }); + } + }) + } + + #[test] + fn incremental() { + const COUNT: usize = 100_000; + static DESTROYS: AtomicUsize = ATOMIC_USIZE_INIT; + + let collector = Collector::new(); + let handle = collector.register(); + + unsafe { + let guard = &handle.pin(); + for _ in 0..COUNT { + let a = Owned::new(7i32).into_shared(guard); + guard.defer_unchecked(move || { + drop(a.into_owned()); + DESTROYS.fetch_add(1, Ordering::Relaxed); + }); + } + guard.flush(); + } + + let mut last = 0; + + while last < COUNT { + let curr = DESTROYS.load(Ordering::Relaxed); + assert!(curr - last <= 1024); + last = curr; + + let guard = &handle.pin(); + collector.global.collect(guard); + } + assert!(DESTROYS.load(Ordering::Relaxed) == 100_000); + } + + #[test] + fn buffering() { + const COUNT: usize = 10; + static DESTROYS: AtomicUsize = ATOMIC_USIZE_INIT; + + let collector = Collector::new(); + let handle = collector.register(); + + unsafe { + let guard = &handle.pin(); + for _ in 0..COUNT { + let a = Owned::new(7i32).into_shared(guard); + guard.defer_unchecked(move || { + drop(a.into_owned()); + DESTROYS.fetch_add(1, Ordering::Relaxed); + }); + } + } + + for _ in 0..100_000 { + collector.global.collect(&handle.pin()); + } + assert!(DESTROYS.load(Ordering::Relaxed) < COUNT); + + handle.pin().flush(); + + while DESTROYS.load(Ordering::Relaxed) < COUNT { + let guard = &handle.pin(); + collector.global.collect(guard); + } + assert_eq!(DESTROYS.load(Ordering::Relaxed), COUNT); + } + + #[test] + fn count_drops() { + const COUNT: usize = 100_000; + static DROPS: AtomicUsize = ATOMIC_USIZE_INIT; + + struct Elem(i32); + + impl Drop for Elem { + fn drop(&mut self) { + DROPS.fetch_add(1, Ordering::Relaxed); + } + } + + let collector = Collector::new(); + let handle = collector.register(); + + unsafe { + let guard = &handle.pin(); + + for _ in 0..COUNT { + let a = Owned::new(Elem(7i32)).into_shared(guard); + guard.defer_destroy(a); + } + guard.flush(); + } + + while DROPS.load(Ordering::Relaxed) < COUNT { + let guard = &handle.pin(); + collector.global.collect(guard); + } + assert_eq!(DROPS.load(Ordering::Relaxed), COUNT); + } + + #[test] + fn count_destroy() { + const COUNT: usize = 100_000; + static DESTROYS: AtomicUsize = ATOMIC_USIZE_INIT; + + let collector = Collector::new(); + let handle = collector.register(); + + unsafe { + let guard = &handle.pin(); + + for _ in 0..COUNT { + let a = Owned::new(7i32).into_shared(guard); + guard.defer_unchecked(move || { + drop(a.into_owned()); + DESTROYS.fetch_add(1, Ordering::Relaxed); + }); + } + guard.flush(); + } + + while DESTROYS.load(Ordering::Relaxed) < COUNT { + let guard = &handle.pin(); + collector.global.collect(guard); + } + assert_eq!(DESTROYS.load(Ordering::Relaxed), COUNT); + } + + #[test] + fn drop_array() { + const COUNT: usize = 700; + static DROPS: AtomicUsize = ATOMIC_USIZE_INIT; + + struct Elem(i32); + + impl Drop for Elem { + fn drop(&mut self) { + DROPS.fetch_add(1, Ordering::Relaxed); + } + } + + let collector = Collector::new(); + let handle = collector.register(); + + let mut guard = handle.pin(); + + let mut v = Vec::with_capacity(COUNT); + for i in 0..COUNT { + v.push(Elem(i as i32)); + } + + { + let a = Owned::new(v).into_shared(&guard); + unsafe { guard.defer_destroy(a); } + guard.flush(); + } + + while DROPS.load(Ordering::Relaxed) < COUNT { + guard.repin(); + collector.global.collect(&guard); + } + assert_eq!(DROPS.load(Ordering::Relaxed), COUNT); + } + + #[test] + fn destroy_array() { + const COUNT: usize = 100_000; + static DESTROYS: AtomicUsize = ATOMIC_USIZE_INIT; + + let collector = Collector::new(); + let handle = collector.register(); + + unsafe { + let guard = &handle.pin(); + + let mut v = Vec::with_capacity(COUNT); + for i in 0..COUNT { + v.push(i as i32); + } + + let ptr = v.as_mut_ptr() as usize; + let len = v.len(); + guard.defer_unchecked(move || { + drop(Vec::from_raw_parts(ptr as *const u8 as *mut u8, len, len)); + DESTROYS.fetch_add(len, Ordering::Relaxed); + }); + guard.flush(); + + mem::forget(v); + } + + while DESTROYS.load(Ordering::Relaxed) < COUNT { + let guard = &handle.pin(); + collector.global.collect(guard); + } + assert_eq!(DESTROYS.load(Ordering::Relaxed), COUNT); + } + + #[test] + fn stress() { + const THREADS: usize = 8; + const COUNT: usize = 100_000; + static DROPS: AtomicUsize = ATOMIC_USIZE_INIT; + + struct Elem(i32); + + impl Drop for Elem { + fn drop(&mut self) { + DROPS.fetch_add(1, Ordering::Relaxed); + } + } + + let collector = Collector::new(); + + thread::scope(|scope| { + for _ in 0..THREADS { + scope.spawn(|| { + let handle = collector.register(); + for _ in 0..COUNT { + let guard = &handle.pin(); + unsafe { + let a = Owned::new(Elem(7i32)).into_shared(guard); + guard.defer_destroy(a); + } + } + }); + } + }); + + let handle = collector.register(); + while DROPS.load(Ordering::Relaxed) < COUNT * THREADS { + let guard = &handle.pin(); + collector.global.collect(guard); + } + assert_eq!(DROPS.load(Ordering::Relaxed), COUNT * THREADS); + } +} diff --git a/crossbeam-epoch/src/default.rs b/crossbeam-epoch/src/default.rs new file mode 100644 index 000000000..6ce053d59 --- /dev/null +++ b/crossbeam-epoch/src/default.rs @@ -0,0 +1,73 @@ +//! The default garbage collector. +//! +//! For each thread, a participant is lazily initialized on its first use, when the current thread +//! is registered in the default collector. If initialized, the thread's participant will get +//! destructed on thread exit, which in turn unregisters the thread. + +use collector::{Collector, LocalHandle}; +use guard::Guard; + +lazy_static! { + /// The global data for the default garbage collector. + static ref COLLECTOR: Collector = Collector::new(); +} + +thread_local! { + /// The per-thread participant for the default garbage collector. + static HANDLE: LocalHandle = COLLECTOR.register(); +} + +/// Pins the current thread. +#[inline] +pub fn pin() -> Guard { + with_handle(|handle| handle.pin()) +} + +/// Returns `true` if the current thread is pinned. +#[inline] +pub fn is_pinned() -> bool { + with_handle(|handle| handle.is_pinned()) +} + +/// Returns the default global collector. +pub fn default_collector() -> &'static Collector { + &COLLECTOR +} + +#[inline] +fn with_handle(mut f: F) -> R +where + F: FnMut(&LocalHandle) -> R, +{ + HANDLE.try_with(|h| f(h)).unwrap_or_else(|_| f(&COLLECTOR.register())) +} + +#[cfg(test)] +mod tests { + use crossbeam_utils::thread; + + #[test] + fn pin_while_exiting() { + struct Foo; + + impl Drop for Foo { + fn drop(&mut self) { + // Pin after `HANDLE` has been dropped. This must not panic. + super::pin(); + } + } + + thread_local! { + static FOO: Foo = Foo; + } + + thread::scope(|scope| { + scope.spawn(|| { + // Initialize `FOO` and then `HANDLE`. + FOO.with(|_| ()); + super::pin(); + // At thread exit, `HANDLE` gets dropped first and `FOO` second. + }); + }); + } +} diff --git a/crossbeam-epoch/src/deferred.rs b/crossbeam-epoch/src/deferred.rs new file mode 100644 index 000000000..3063f9829 --- /dev/null +++ b/crossbeam-epoch/src/deferred.rs @@ -0,0 +1,134 @@ +use core::fmt; +use core::marker::PhantomData; +use core::mem; +use core::ptr; +use alloc::boxed::Box; + +/// Number of words a piece of `Data` can hold. +/// +/// Three words should be enough for the majority of cases. For example, you can fit inside it the +/// function pointer together with a fat pointer representing an object that needs to be destroyed. +const DATA_WORDS: usize = 3; + +/// Some space to keep a `FnOnce()` object on the stack. +type Data = [usize; DATA_WORDS]; + +/// A `FnOnce()` that is stored inline if small, or otherwise boxed on the heap. +/// +/// This is a handy way of keeping an unsized `FnOnce()` within a sized structure. +pub struct Deferred { + call: unsafe fn(*mut u8), + data: Data, + _marker: PhantomData<*mut ()>, // !Send + !Sync +} + +impl fmt::Debug for Deferred { + fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { + write!(f, "Deferred {{ ... }}") + } +} + +impl Deferred { + /// Constructs a new `Deferred` from a `FnOnce()`. + pub fn new(f: F) -> Self { + let size = mem::size_of::(); + let align = mem::align_of::(); + + unsafe { + if size <= mem::size_of::() && align <= mem::align_of::() { + let mut data: Data = mem::uninitialized(); + ptr::write(&mut data as *mut Data as *mut F, f); + + unsafe fn call(raw: *mut u8) { + let f: F = ptr::read(raw as *mut F); + f(); + } + + Deferred { + call: call::, + data, + _marker: PhantomData, + } + } else { + let b: Box = Box::new(f); + let mut data: Data = mem::uninitialized(); + ptr::write(&mut data as *mut Data as *mut Box, b); + + unsafe fn call(raw: *mut u8) { + let b: Box = ptr::read(raw as *mut Box); + (*b)(); + } + + Deferred { + call: call::, + data, + _marker: PhantomData, + } + } + } + } + + /// Calls the function. + #[inline] + pub fn call(mut self) { + let call = self.call; + unsafe { call(&mut self.data as *mut Data as *mut u8) }; + } +} + +#[cfg(test)] +mod tests { + use std::cell::Cell; + use super::Deferred; + + #[test] + fn on_stack() { + let fired = &Cell::new(false); + let a = [0usize; 1]; + + let d = Deferred::new(move || { + drop(a); + fired.set(true); + }); + + assert!(!fired.get()); + d.call(); + assert!(fired.get()); + } + + #[test] + fn on_heap() { + let fired = &Cell::new(false); + let a = [0usize; 10]; + + let d = Deferred::new(move || { + drop(a); + fired.set(true); + }); + + assert!(!fired.get()); + d.call(); + assert!(fired.get()); + } + + #[test] + fn string() { + let a = "hello".to_string(); + let d = Deferred::new(move || assert_eq!(a, "hello")); + d.call(); + } + + #[test] + fn boxed_slice_i32() { + let a: Box<[i32]> = vec![2, 3, 5, 7].into_boxed_slice(); + let d = Deferred::new(move || assert_eq!(*a, [2, 3, 5, 7])); + d.call(); + } + + #[test] + fn long_slice_usize() { + let a: [usize; 5] = [2, 3, 5, 7, 11]; + let d = Deferred::new(move || assert_eq!(a, [2, 3, 5, 7, 11])); + d.call(); + } +} diff --git a/crossbeam-epoch/src/epoch.rs b/crossbeam-epoch/src/epoch.rs new file mode 100644 index 000000000..51076bbaa --- /dev/null +++ b/crossbeam-epoch/src/epoch.rs @@ -0,0 +1,106 @@ +//! The global epoch +//! +//! The last bit in this number is unused and is always zero. Every so often the global epoch is +//! incremented, i.e. we say it "advances". A pinned participant may advance the global epoch only +//! if all currently pinned participants have been pinned in the current epoch. +//! +//! If an object became garbage in some epoch, then we can be sure that after two advancements no +//! participant will hold a reference to it. That is the crux of safe memory reclamation. + +use core::sync::atomic::{AtomicUsize, Ordering}; + +/// An epoch that can be marked as pinned or unpinned. +/// +/// Internally, the epoch is represented as an integer that wraps around at some unspecified point +/// and a flag that represents whether it is pinned or unpinned. +#[derive(Copy, Clone, Default, Debug, Eq, PartialEq)] +pub struct Epoch { + /// The least significant bit is set if pinned. The rest of the bits hold the epoch. + data: usize, +} + +impl Epoch { + /// Returns the starting epoch in unpinned state. + #[inline] + pub fn starting() -> Self { + Self::default() + } + + /// Returns the number of epochs `self` is ahead of `rhs`. + /// + /// Internally, epochs are represented as numbers in the range `(isize::MIN / 2) .. (isize::MAX + /// / 2)`, so the returned distance will be in the same interval. + pub fn wrapping_sub(self, rhs: Self) -> isize { + // The result is the same with `(self.data & !1).wrapping_sub(rhs.data & !1) as isize >> 1`, + // because the possible difference of LSB in `(self.data & !1).wrapping_sub(rhs.data & !1)` + // will be ignored in the shift operation. + self.data.wrapping_sub(rhs.data & !1) as isize >> 1 + } + + /// Returns `true` if the epoch is marked as pinned. + #[inline] + pub fn is_pinned(self) -> bool { + (self.data & 1) == 1 + } + + /// Returns the same epoch, but marked as pinned. + #[inline] + pub fn pinned(self) -> Epoch { + Epoch { data: self.data | 1 } + } + + /// Returns the same epoch, but marked as unpinned. + #[inline] + pub fn unpinned(self) -> Epoch { + Epoch { data: self.data & !1 } + } + + /// Returns the successor epoch. + /// + /// The returned epoch will be marked as pinned only if the previous one was as well. + #[inline] + pub fn successor(self) -> Epoch { + Epoch { data: self.data.wrapping_add(2) } + } +} + +/// An atomic value that holds an `Epoch`. +#[derive(Default, Debug)] +pub struct AtomicEpoch { + /// Since `Epoch` is just a wrapper around `usize`, an `AtomicEpoch` is similarly represented + /// using an `AtomicUsize`. + data: AtomicUsize, +} + +impl AtomicEpoch { + /// Creates a new atomic epoch. + #[inline] + pub fn new(epoch: Epoch) -> Self { + let data = AtomicUsize::new(epoch.data); + AtomicEpoch { data } + } + + /// Loads a value from the atomic epoch. + #[inline] + pub fn load(&self, ord: Ordering) -> Epoch { + Epoch { data: self.data.load(ord) } + } + + /// Stores a value into the atomic epoch. + #[inline] + pub fn store(&self, epoch: Epoch, ord: Ordering) { + self.data.store(epoch.data, ord); + } + + /// Stores a value into the atomic epoch if the current value is the same as `current`. + /// + /// The return value is always the previous value. If it is equal to `current`, then the value + /// is updated. + /// + /// The `Ordering` argument describes the memory ordering of this operation. + #[inline] + pub fn compare_and_swap(&self, current: Epoch, new: Epoch, ord: Ordering) -> Epoch { + let data = self.data.compare_and_swap(current.data, new.data, ord); + Epoch { data } + } +} diff --git a/crossbeam-epoch/src/guard.rs b/crossbeam-epoch/src/guard.rs new file mode 100644 index 000000000..94ab5ea43 --- /dev/null +++ b/crossbeam-epoch/src/guard.rs @@ -0,0 +1,547 @@ +use core::fmt; +use core::ptr; +use core::mem; + +use atomic::Shared; +use collector::Collector; +use deferred::Deferred; +use internal::Local; + +/// A guard that keeps the current thread pinned. +/// +/// # Pinning +/// +/// The current thread is pinned by calling [`pin`], which returns a new guard: +/// +/// ``` +/// use crossbeam_epoch as epoch; +/// +/// // It is often convenient to prefix a call to `pin` with a `&` in order to create a reference. +/// // This is not really necessary, but makes passing references to the guard a bit easier. +/// let guard = &epoch::pin(); +/// ``` +/// +/// When a guard gets dropped, the current thread is automatically unpinned. +/// +/// # Pointers on the stack +/// +/// Having a guard allows us to create pointers on the stack to heap-allocated objects. +/// For example: +/// +/// ``` +/// use crossbeam_epoch::{self as epoch, Atomic, Owned}; +/// use std::sync::atomic::Ordering::SeqCst; +/// +/// // Create a heap-allocated number. +/// let a = Atomic::new(777); +/// +/// // Pin the current thread. +/// let guard = &epoch::pin(); +/// +/// // Load the heap-allocated object and create pointer `p` on the stack. +/// let p = a.load(SeqCst, guard); +/// +/// // Dereference the pointer and print the value: +/// if let Some(num) = unsafe { p.as_ref() } { +/// println!("The number is {}.", num); +/// } +/// ``` +/// +/// # Multiple guards +/// +/// Pinning is reentrant and it is perfectly legal to create multiple guards. In that case, the +/// thread will actually be pinned only when the first guard is created and unpinned when the last +/// one is dropped: +/// +/// ``` +/// use crossbeam_epoch as epoch; +/// +/// let guard1 = epoch::pin(); +/// let guard2 = epoch::pin(); +/// assert!(epoch::is_pinned()); +/// drop(guard1); +/// assert!(epoch::is_pinned()); +/// drop(guard2); +/// assert!(!epoch::is_pinned()); +/// ``` +/// +/// The same can be achieved by cloning guards: +/// +/// ``` +/// use crossbeam_epoch as epoch; +/// +/// let guard1 = epoch::pin(); +/// let guard2 = guard1.clone(); +/// ``` +/// +/// [`pin`]: fn.pin.html +pub struct Guard { + pub(crate) local: *const Local, +} + +impl Guard { + /// Stores a function so that it can be executed at some point after all currently pinned + /// threads get unpinned. + /// + /// This method first stores `f` into the thread-local (or handle-local) cache. If this cache + /// becomes full, some functions are moved into the global cache. At the same time, some + /// functions from both local and global caches may get executed in order to incrementally + /// clean up the caches as they fill up. + /// + /// There is no guarantee when exactly `f` will be executed. The only guarantee is that it + /// won't be executed until all currently pinned threads get unpinned. In theory, `f` might + /// never run, but the epoch-based garbage collection will make an effort to execute it + /// reasonably soon. + /// + /// If this method is called from an [`unprotected`] guard, the function will simply be + /// executed immediately. + /// + /// [`unprotected`]: fn.unprotected.html + pub fn defer(&self, f: F) + where + F: FnOnce() -> R, + F: Send + 'static, + { + unsafe { + self.defer_unchecked(f); + } + } + + /// Stores a function so that it can be executed at some point after all currently pinned + /// threads get unpinned. + /// + /// This method first stores `f` into the thread-local (or handle-local) cache. If this cache + /// becomes full, some functions are moved into the global cache. At the same time, some + /// functions from both local and global caches may get executed in order to incrementally + /// clean up the caches as they fill up. + /// + /// There is no guarantee when exactly `f` will be executed. The only guarantee is that it + /// won't be executed until all currently pinned threads get unpinned. In theory, `f` might + /// never run, but the epoch-based garbage collection will make an effort to execute it + /// reasonably soon. + /// + /// If this method is called from an [`unprotected`] guard, the function will simply be + /// executed immediately. + /// + /// # Safety + /// + /// The given function must not hold reference onto the stack. It is highly recommended that + /// the passed function is **always** marked with `move` in order to prevent accidental + /// borrows. + /// + /// ``` + /// use crossbeam_epoch as epoch; + /// + /// let guard = &epoch::pin(); + /// let message = "Hello!"; + /// unsafe { + /// // ALWAYS use `move` when sending a closure into `defer_unchecked`. + /// guard.defer_unchecked(move || { + /// println!("{}", message); + /// }); + /// } + /// ``` + /// + /// Apart from that, keep in mind that another thread may execute `f`, so anything accessed by + /// the closure must be `Send`. + /// + /// We intentionally didn't require `F: Send`, because Rust's type systems usually cannot prove + /// `F: Send` for typical use cases. For example, consider the following code snippet, which + /// exemplifies the typical use case of deferring the deallocation of a shared reference: + /// + /// ```ignore + /// let shared = Owned::new(7i32).into_shared(guard); + /// guard.defer_unchecked(move || shared.into_owned()); // `Shared` is not `Send`! + /// ``` + /// + /// While `Shared` is not `Send`, it's safe for another thread to call the deferred function, + /// because it's called only after the grace period and `shared` is no longer shared with other + /// threads. But we don't expect type systems to prove this. + /// + /// # Examples + /// + /// When a heap-allocated object in a data structure becomes unreachable, it has to be + /// deallocated. However, the current thread and other threads may be still holding references + /// on the stack to that same object. Therefore it cannot be deallocated before those references + /// get dropped. This method can defer deallocation until all those threads get unpinned and + /// consequently drop all their references on the stack. + /// + /// ```rust + /// use crossbeam_epoch::{self as epoch, Atomic, Owned}; + /// use std::sync::atomic::Ordering::SeqCst; + /// + /// let a = Atomic::new("foo"); + /// + /// // Now suppose that `a` is shared among multiple threads and concurrently + /// // accessed and modified... + /// + /// // Pin the current thread. + /// let guard = &epoch::pin(); + /// + /// // Steal the object currently stored in `a` and swap it with another one. + /// let p = a.swap(Owned::new("bar").into_shared(guard), SeqCst, guard); + /// + /// if !p.is_null() { + /// // The object `p` is pointing to is now unreachable. + /// // Defer its deallocation until all currently pinned threads get unpinned. + /// unsafe { + /// // ALWAYS use `move` when sending a closure into `defer_unchecked`. + /// guard.defer_unchecked(move || { + /// println!("{} is now being deallocated.", p.deref()); + /// // Now we have unique access to the object pointed to by `p` and can turn it + /// // into an `Owned`. Dropping the `Owned` will deallocate the object. + /// drop(p.into_owned()); + /// }); + /// } + /// } + /// ``` + /// + /// [`unprotected`]: fn.unprotected.html + pub unsafe fn defer_unchecked(&self, f: F) + where + F: FnOnce() -> R, + { + if let Some(local) = self.local.as_ref() { + local.defer(Deferred::new(move || drop(f())), self); + } + } + + /// Stores a destructor for an object so that it can be deallocated and dropped at some point + /// after all currently pinned threads get unpinned. + /// + /// This method first stores the destructor into the thread-local (or handle-local) cache. If + /// this cache becomes full, some destructors are moved into the global cache. At the same + /// time, some destructors from both local and global caches may get executed in order to + /// incrementally clean up the caches as they fill up. + /// + /// There is no guarantee when exactly the destructor will be executed. The only guarantee is + /// that it won't be executed until all currently pinned threads get unpinned. In theory, the + /// destructor might never run, but the epoch-based garbage collection will make an effort to + /// execute it reasonably soon. + /// + /// If this method is called from an [`unprotected`] guard, the destructor will simply be + /// executed immediately. + /// + /// # Safety + /// + /// The object must not be reachable by other threads anymore, otherwise it might be still in + /// use when the destructor runs. + /// + /// Apart from that, keep in mind that another thread may execute the destructor, so the object + /// must be sendable to other threads. + /// + /// We intentionally didn't require `T: Send`, because Rust's type systems usually cannot prove + /// `T: Send` for typical use cases. For example, consider the following code snippet, which + /// exemplifies the typical use case of deferring the deallocation of a shared reference: + /// + /// ```ignore + /// let shared = Owned::new(7i32).into_shared(guard); + /// guard.defer_destroy(shared); // `Shared` is not `Send`! + /// ``` + /// + /// While `Shared` is not `Send`, it's safe for another thread to call the destructor, because + /// it's called only after the grace period and `shared` is no longer shared with other + /// threads. But we don't expect type systems to prove this. + /// + /// # Examples + /// + /// When a heap-allocated object in a data structure becomes unreachable, it has to be + /// deallocated. However, the current thread and other threads may be still holding references + /// on the stack to that same object. Therefore it cannot be deallocated before those references + /// get dropped. This method can defer deallocation until all those threads get unpinned and + /// consequently drop all their references on the stack. + /// + /// ```rust + /// use crossbeam_epoch::{self as epoch, Atomic, Owned}; + /// use std::sync::atomic::Ordering::SeqCst; + /// + /// let a = Atomic::new("foo"); + /// + /// // Now suppose that `a` is shared among multiple threads and concurrently + /// // accessed and modified... + /// + /// // Pin the current thread. + /// let guard = &epoch::pin(); + /// + /// // Steal the object currently stored in `a` and swap it with another one. + /// let p = a.swap(Owned::new("bar").into_shared(guard), SeqCst, guard); + /// + /// if !p.is_null() { + /// // The object `p` is pointing to is now unreachable. + /// // Defer its deallocation until all currently pinned threads get unpinned. + /// unsafe { + /// guard.defer_destroy(p); + /// } + /// } + /// ``` + /// + /// [`unprotected`]: fn.unprotected.html + pub unsafe fn defer_destroy(&self, ptr: Shared) { + self.defer_unchecked(move || ptr.into_owned()); + } + + /// Clears up the thread-local cache of deferred functions by executing them or moving into the + /// global cache. + /// + /// Call this method after deferring execution of a function if you want to get it executed as + /// soon as possible. Flushing will make sure it is residing in in the global cache, so that + /// any thread has a chance of taking the function and executing it. + /// + /// If this method is called from an [`unprotected`] guard, it is a no-op (nothing happens). + /// + /// # Examples + /// + /// ``` + /// use crossbeam_epoch as epoch; + /// + /// let guard = &epoch::pin(); + /// unsafe { + /// guard.defer(move || { + /// println!("This better be printed as soon as possible!"); + /// }); + /// } + /// guard.flush(); + /// ``` + /// + /// [`unprotected`]: fn.unprotected.html + pub fn flush(&self) { + if let Some(local) = unsafe { self.local.as_ref() } { + local.flush(self); + } + } + + /// Unpins and then immediately re-pins the thread. + /// + /// This method is useful when you don't want delay the advancement of the global epoch by + /// holding an old epoch. For safety, you should not maintain any guard-based reference across + /// the call (the latter is enforced by `&mut self`). The thread will only be repinned if this + /// is the only active guard for the current thread. + /// + /// If this method is called from an [`unprotected`] guard, then the call will be just no-op. + /// + /// # Examples + /// + /// ``` + /// use crossbeam_epoch::{self as epoch, Atomic}; + /// use std::sync::atomic::Ordering::SeqCst; + /// use std::thread; + /// use std::time::Duration; + /// + /// let a = Atomic::new(777); + /// let mut guard = epoch::pin(); + /// { + /// let p = a.load(SeqCst, &guard); + /// assert_eq!(unsafe { p.as_ref() }, Some(&777)); + /// } + /// guard.repin(); + /// { + /// let p = a.load(SeqCst, &guard); + /// assert_eq!(unsafe { p.as_ref() }, Some(&777)); + /// } + /// ``` + /// + /// [`unprotected`]: fn.unprotected.html + pub fn repin(&mut self) { + if let Some(local) = unsafe { self.local.as_ref() } { + local.repin(); + } + } + + /// Temporarily unpins the thread, executes the given function and then re-pins the thread. + /// + /// This method is useful when you need to perform a long-running operation (e.g. sleeping) + /// and don't need to maintain any guard-based reference across the call (the latter is enforced + /// by `&mut self`). The thread will only be unpinned if this is the only active guard for the + /// current thread. + /// + /// If this method is called from an [`unprotected`] guard, then the passed function is called + /// directly without unpinning the thread. + /// + /// # Examples + /// + /// ``` + /// use crossbeam_epoch::{self as epoch, Atomic}; + /// use std::sync::atomic::Ordering::SeqCst; + /// use std::thread; + /// use std::time::Duration; + /// + /// let a = Atomic::new(777); + /// let mut guard = epoch::pin(); + /// { + /// let p = a.load(SeqCst, &guard); + /// assert_eq!(unsafe { p.as_ref() }, Some(&777)); + /// } + /// guard.repin_after(|| thread::sleep(Duration::from_millis(50))); + /// { + /// let p = a.load(SeqCst, &guard); + /// assert_eq!(unsafe { p.as_ref() }, Some(&777)); + /// } + /// ``` + /// + /// [`unprotected`]: fn.unprotected.html + pub fn repin_after(&mut self, f: F) -> R + where + F: FnOnce() -> R, + { + if let Some(local) = unsafe { self.local.as_ref() } { + // We need to acquire a handle here to ensure the Local doesn't + // disappear from under us. + local.acquire_handle(); + local.unpin(); + } + + // Ensure the Guard is re-pinned even if the function panics + defer! { + if let Some(local) = unsafe { self.local.as_ref() } { + mem::forget(local.pin()); + local.release_handle(); + } + } + + f() + } + + /// Returns the `Collector` associated with this guard. + /// + /// This method is useful when you need to ensure that all guards used with + /// a data structure come from the same collector. + /// + /// If this method is called from an [`unprotected`] guard, then `None` is returned. + /// + /// # Examples + /// + /// ``` + /// use crossbeam_epoch as epoch; + /// + /// let mut guard1 = epoch::pin(); + /// let mut guard2 = epoch::pin(); + /// assert!(guard1.collector() == guard2.collector()); + /// ``` + /// + /// [`unprotected`]: fn.unprotected.html + pub fn collector(&self) -> Option<&Collector> { + unsafe { self.local.as_ref().map(|local| local.collector()) } + } +} + +impl Drop for Guard { + #[inline] + fn drop(&mut self) { + if let Some(local) = unsafe { self.local.as_ref() } { + local.unpin(); + } + } +} + +impl Clone for Guard { + #[inline] + fn clone(&self) -> Guard { + match unsafe { self.local.as_ref() } { + None => Guard { local: ptr::null() }, + Some(local) => local.pin(), + } + } +} + +impl fmt::Debug for Guard { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_struct("Guard").finish() + } +} + +/// Returns a reference to a dummy guard that allows unprotected access to [`Atomic`]s. +/// +/// This guard should be used in special occasions only. Note that it doesn't actually keep any +/// thread pinned - it's just a fake guard that allows loading from [`Atomic`]s unsafely. +/// +/// Note that calling [`defer`] with a dummy guard will not defer the function - it will just +/// execute the function immediately. +/// +/// If necessary, it's possible to create more dummy guards by cloning: `unprotected().clone()`. +/// +/// # Safety +/// +/// Loading and dereferencing data from an [`Atomic`] using this guard is safe only if the +/// [`Atomic`] is not being concurrently modified by other threads. +/// +/// # Examples +/// +/// ``` +/// use crossbeam_epoch::{self as epoch, Atomic}; +/// use std::sync::atomic::Ordering::Relaxed; +/// +/// let a = Atomic::new(7); +/// +/// unsafe { +/// // Load `a` without pinning the current thread. +/// a.load(Relaxed, epoch::unprotected()); +/// +/// // It's possible to create more dummy guards by calling `clone()`. +/// let dummy = &epoch::unprotected().clone(); +/// +/// dummy.defer(move || { +/// println!("This gets executed immediately."); +/// }); +/// +/// // Dropping `dummy` doesn't affect the current thread - it's just a noop. +/// } +/// ``` +/// +/// The most common use of this function is when constructing or destructing a data structure. +/// +/// For example, we can use a dummy guard in the destructor of a Treiber stack because at that +/// point no other thread could concurrently modify the [`Atomic`]s we are accessing. +/// +/// If we were to actually pin the current thread during destruction, that would just unnecessarily +/// delay garbage collection and incur some performance cost, so in cases like these `unprotected` +/// is very helpful. +/// +/// ``` +/// use crossbeam_epoch::{self as epoch, Atomic}; +/// use std::mem::ManuallyDrop; +/// use std::sync::atomic::Ordering::Relaxed; +/// +/// struct Stack { +/// head: Atomic>, +/// } +/// +/// struct Node { +/// data: ManuallyDrop, +/// next: Atomic>, +/// } +/// +/// impl Drop for Stack { +/// fn drop(&mut self) { +/// unsafe { +/// // Unprotected load. +/// let mut node = self.head.load(Relaxed, epoch::unprotected()); +/// +/// while let Some(n) = node.as_ref() { +/// // Unprotected load. +/// let next = n.next.load(Relaxed, epoch::unprotected()); +/// +/// // Take ownership of the node, then drop its data and deallocate it. +/// let mut o = node.into_owned(); +/// ManuallyDrop::drop(&mut o.data); +/// drop(o); +/// +/// node = next; +/// } +/// } +/// } +/// } +/// ``` +/// +/// [`Atomic`]: struct.Atomic.html +/// [`defer`]: struct.Guard.html#method.defer +#[inline] +pub unsafe fn unprotected() -> &'static Guard { + // HACK(stjepang): An unprotected guard is just a `Guard` with its field `local` set to null. + // Since this function returns a `'static` reference to a `Guard`, we must return a reference + // to a global guard. However, it's not possible to create a `static` `Guard` because it does + // not implement `Sync`. To get around the problem, we create a static `usize` initialized to + // zero and then transmute it into a `Guard`. This is safe because `usize` and `Guard` + // (consisting of a single pointer) have the same representation in memory. + static UNPROTECTED: usize = 0; + &*(&UNPROTECTED as *const _ as *const Guard) +} diff --git a/crossbeam-epoch/src/internal.rs b/crossbeam-epoch/src/internal.rs new file mode 100644 index 000000000..3ff26c274 --- /dev/null +++ b/crossbeam-epoch/src/internal.rs @@ -0,0 +1,543 @@ +//! The global data and participant for garbage collection. +//! +//! # Registration +//! +//! In order to track all participants in one place, we need some form of participant +//! registration. When a participant is created, it is registered to a global lock-free +//! singly-linked list of registries; and when a participant is leaving, it is unregistered from the +//! list. +//! +//! # Pinning +//! +//! Every participant contains an integer that tells whether the participant is pinned and if so, +//! what was the global epoch at the time it was pinned. Participants also hold a pin counter that +//! aids in periodic global epoch advancement. +//! +//! When a participant is pinned, a `Guard` is returned as a witness that the participant is pinned. +//! Guards are necessary for performing atomic operations, and for freeing/dropping locations. +//! +//! # Thread-local bag +//! +//! Objects that get unlinked from concurrent data structures must be stashed away until the global +//! epoch sufficiently advances so that they become safe for destruction. Pointers to such objects +//! are pushed into a thread-local bag, and when it becomes full, the bag is marked with the current +//! global epoch and pushed into the global queue of bags. We store objects in thread-local storages +//! for amortizing the synchronization cost of pushing the garbages to a global queue. +//! +//! # Global queue +//! +//! Whenever a bag is pushed into a queue, the objects in some bags in the queue are collected and +//! destroyed along the way. This design reduces contention on data structures. The global queue +//! cannot be explicitly accessed: the only way to interact with it is by calling functions +//! `defer()` that adds an object tothe thread-local bag, or `collect()` that manually triggers +//! garbage collection. +//! +//! Ideally each instance of concurrent data structure may have its own queue that gets fully +//! destroyed as soon as the data structure gets dropped. + +use core::cell::{Cell, UnsafeCell}; +use core::mem::{self, ManuallyDrop}; +use core::num::Wrapping; +use core::ptr; +use core::sync::atomic; +use core::sync::atomic::Ordering; +use alloc::boxed::Box; + +use crossbeam_utils::CachePadded; +use arrayvec::ArrayVec; + +use atomic::Owned; +use collector::{LocalHandle, Collector}; +use epoch::{AtomicEpoch, Epoch}; +use guard::{unprotected, Guard}; +use deferred::Deferred; +use sync::list::{List, Entry, IterError, IsElement}; +use sync::queue::Queue; + +/// Maximum number of objects a bag can contain. +#[cfg(not(feature = "sanitize"))] +const MAX_OBJECTS: usize = 64; +#[cfg(feature = "sanitize")] +const MAX_OBJECTS: usize = 4; + +/// A bag of deferred functions. +#[derive(Default, Debug)] +pub struct Bag { + /// Stashed objects. + deferreds: ArrayVec<[Deferred; MAX_OBJECTS]>, +} + +/// `Bag::try_push()` requires that it is safe for another thread to execute the given functions. +unsafe impl Send for Bag {} + +impl Bag { + /// Returns a new, empty bag. + pub fn new() -> Self { + Self::default() + } + + /// Returns `true` if the bag is empty. + pub fn is_empty(&self) -> bool { + self.deferreds.is_empty() + } + + /// Attempts to insert a deferred function into the bag. + /// + /// Returns `Ok(())` if successful, and `Err(deferred)` for the given `deferred` if the bag is + /// full. + /// + /// # Safety + /// + /// It should be safe for another thread to execute the given function. + pub unsafe fn try_push(&mut self, deferred: Deferred) -> Result<(), Deferred> { + self.deferreds.try_push(deferred).map_err(|e| e.element()) + } + + /// Seals the bag with the given epoch. + fn seal(self, epoch: Epoch) -> SealedBag { + SealedBag { epoch, bag: self } + } +} + +impl Drop for Bag { + fn drop(&mut self) { + // Call all deferred functions. + for deferred in self.deferreds.drain(..) { + deferred.call(); + } + } +} + +/// A pair of an epoch and a bag. +#[derive(Default, Debug)] +struct SealedBag { + epoch: Epoch, + bag: Bag, +} + +/// It is safe to share `SealedBag` because `is_expired` only inspects the epoch. +unsafe impl Sync for SealedBag {} + +impl SealedBag { + /// Checks if it is safe to drop the bag w.r.t. the given global epoch. + fn is_expired(&self, global_epoch: Epoch) -> bool { + // A pinned participant can witness at most one epoch advancement. Therefore, any bag that + // is within one epoch of the current one cannot be destroyed yet. + global_epoch.wrapping_sub(self.epoch) >= 2 + } +} + +/// The global data for a garbage collector. +pub struct Global { + /// The intrusive linked list of `Local`s. + locals: List, + + /// The global queue of bags of deferred functions. + queue: Queue, + + /// The global epoch. + pub(crate) epoch: CachePadded, +} + +impl Global { + /// Number of bags to destroy. + const COLLECT_STEPS: usize = 8; + + /// Creates a new global data for garbage collection. + #[inline] + pub fn new() -> Self { + Self { + locals: List::new(), + queue: Queue::new(), + epoch: CachePadded::new(AtomicEpoch::new(Epoch::starting())), + } + } + + /// Pushes the bag into the global queue and replaces the bag with a new empty bag. + pub fn push_bag(&self, bag: &mut Bag, guard: &Guard) { + let bag = mem::replace(bag, Bag::new()); + + atomic::fence(Ordering::SeqCst); + + let epoch = self.epoch.load(Ordering::Relaxed); + self.queue.push(bag.seal(epoch), guard); + } + + /// Collects several bags from the global queue and executes deferred functions in them. + /// + /// Note: This may itself produce garbage and in turn allocate new bags. + /// + /// `pin()` rarely calls `collect()`, so we want the compiler to place that call on a cold + /// path. In other words, we want the compiler to optimize branching for the case when + /// `collect()` is not called. + #[cold] + pub fn collect(&self, guard: &Guard) { + let global_epoch = self.try_advance(guard); + + let steps = if cfg!(feature = "sanitize") { + usize::max_value() + } else { + Self::COLLECT_STEPS + }; + + for _ in 0..steps { + match self.queue.try_pop_if( + &|sealed_bag: &SealedBag| sealed_bag.is_expired(global_epoch), + guard, + ) + { + None => break, + Some(sealed_bag) => drop(sealed_bag), + } + } + } + + /// Attempts to advance the global epoch. + /// + /// The global epoch can advance only if all currently pinned participants have been pinned in + /// the current epoch. + /// + /// Returns the current global epoch. + /// + /// `try_advance()` is annotated `#[cold]` because it is rarely called. + #[cold] + pub fn try_advance(&self, guard: &Guard) -> Epoch { + let global_epoch = self.epoch.load(Ordering::Relaxed); + atomic::fence(Ordering::SeqCst); + + // TODO(stjepang): `Local`s are stored in a linked list because linked lists are fairly + // easy to implement in a lock-free manner. However, traversal can be slow due to cache + // misses and data dependencies. We should experiment with other data structures as well. + for local in self.locals.iter(&guard) { + match local { + Err(IterError::Stalled) => { + // A concurrent thread stalled this iteration. That thread might also try to + // advance the epoch, in which case we leave the job to it. Otherwise, the + // epoch will not be advanced. + return global_epoch; + } + Ok(local) => { + let local_epoch = local.epoch.load(Ordering::Relaxed); + + // If the participant was pinned in a different epoch, we cannot advance the + // global epoch just yet. + if local_epoch.is_pinned() && local_epoch.unpinned() != global_epoch { + return global_epoch; + } + } + } + } + atomic::fence(Ordering::Acquire); + + // All pinned participants were pinned in the current global epoch. + // Now let's advance the global epoch... + // + // Note that if another thread already advanced it before us, this store will simply + // overwrite the global epoch with the same value. This is true because `try_advance` was + // called from a thread that was pinned in `global_epoch`, and the global epoch cannot be + // advanced two steps ahead of it. + let new_epoch = global_epoch.successor(); + self.epoch.store(new_epoch, Ordering::Release); + new_epoch + } +} + +/// Participant for garbage collection. +pub struct Local { + /// A node in the intrusive linked list of `Local`s. + entry: Entry, + + /// The local epoch. + epoch: AtomicEpoch, + + /// A reference to the global data. + /// + /// When all guards and handles get dropped, this reference is destroyed. + collector: UnsafeCell>, + + /// The local bag of deferred functions. + pub(crate) bag: UnsafeCell, + + /// The number of guards keeping this participant pinned. + guard_count: Cell, + + /// The number of active handles. + handle_count: Cell, + + /// Total number of pinnings performed. + /// + /// This is just an auxilliary counter that sometimes kicks off collection. + pin_count: Cell>, +} + +impl Local { + /// Number of pinnings after which a participant will execute some deferred functions from the + /// global queue. + const PINNINGS_BETWEEN_COLLECT: usize = 128; + + /// Registers a new `Local` in the provided `Global`. + pub fn register(collector: &Collector) -> LocalHandle { + unsafe { + // Since we dereference no pointers in this block, it is safe to use `unprotected`. + + let local = Owned::new(Local { + entry: Entry::default(), + epoch: AtomicEpoch::new(Epoch::starting()), + collector: UnsafeCell::new(ManuallyDrop::new(collector.clone())), + bag: UnsafeCell::new(Bag::new()), + guard_count: Cell::new(0), + handle_count: Cell::new(1), + pin_count: Cell::new(Wrapping(0)), + }).into_shared(&unprotected()); + collector.global.locals.insert(local, &unprotected()); + LocalHandle { local: local.as_raw() } + } + } + + /// Returns a reference to the `Global` in which this `Local` resides. + #[inline] + pub fn global(&self) -> &Global { + &self.collector().global + } + + /// Returns a reference to the `Collector` in which this `Local` resides. + #[inline] + pub fn collector(&self) -> &Collector { + unsafe { &**self.collector.get() } + } + + /// Returns `true` if the current participant is pinned. + #[inline] + pub fn is_pinned(&self) -> bool { + self.guard_count.get() > 0 + } + + /// Adds `deferred` to the thread-local bag. + /// + /// # Safety + /// + /// It should be safe for another thread to execute the given function. + pub unsafe fn defer(&self, mut deferred: Deferred, guard: &Guard) { + let bag = &mut *self.bag.get(); + + while let Err(d) = bag.try_push(deferred) { + self.global().push_bag(bag, guard); + deferred = d; + } + } + + pub fn flush(&self, guard: &Guard) { + let bag = unsafe { &mut *self.bag.get() }; + + if !bag.is_empty() { + self.global().push_bag(bag, guard); + } + + self.global().collect(guard); + } + + /// Pins the `Local`. + #[inline] + pub fn pin(&self) -> Guard { + let guard = Guard { local: self }; + + let guard_count = self.guard_count.get(); + self.guard_count.set(guard_count.checked_add(1).unwrap()); + + if guard_count == 0 { + let global_epoch = self.global().epoch.load(Ordering::Relaxed); + let new_epoch = global_epoch.pinned(); + + // Now we must store `new_epoch` into `self.epoch` and execute a `SeqCst` fence. + // The fence makes sure that any future loads from `Atomic`s will not happen before + // this store. + if cfg!(any(target_arch = "x86", target_arch = "x86_64")) { + // HACK(stjepang): On x86 architectures there are two different ways of executing + // a `SeqCst` fence. + // + // 1. `atomic::fence(SeqCst)`, which compiles into a `mfence` instruction. + // 2. `_.compare_and_swap(_, _, SeqCst)`, which compiles into a `lock cmpxchg` + // instruction. + // + // Both instructions have the effect of a full barrier, but benchmarks have shown + // that the second one makes pinning faster in this particular case. + let current = Epoch::starting(); + let previous = self.epoch.compare_and_swap(current, new_epoch, Ordering::SeqCst); + debug_assert_eq!(current, previous, "participant was expected to be unpinned"); + } else { + self.epoch.store(new_epoch, Ordering::Relaxed); + atomic::fence(Ordering::SeqCst); + } + + // Increment the pin counter. + let count = self.pin_count.get(); + self.pin_count.set(count + Wrapping(1)); + + // After every `PINNINGS_BETWEEN_COLLECT` try advancing the epoch and collecting + // some garbage. + if count.0 % Self::PINNINGS_BETWEEN_COLLECT == 0 { + self.global().collect(&guard); + } + } + + guard + } + + /// Unpins the `Local`. + #[inline] + pub fn unpin(&self) { + let guard_count = self.guard_count.get(); + self.guard_count.set(guard_count - 1); + + if guard_count == 1 { + self.epoch.store(Epoch::starting(), Ordering::Release); + + if self.handle_count.get() == 0 { + self.finalize(); + } + } + } + + /// Unpins and then pins the `Local`. + #[inline] + pub fn repin(&self) { + let guard_count = self.guard_count.get(); + + // Update the local epoch only if there's only one guard. + if guard_count == 1 { + let epoch = self.epoch.load(Ordering::Relaxed); + let global_epoch = self.global().epoch.load(Ordering::Relaxed); + + // Update the local epoch only if the global epoch is greater than the local epoch. + if epoch != global_epoch { + // We store the new epoch with `Release` because we need to ensure any memory + // accesses from the previous epoch do not leak into the new one. + self.epoch.store(global_epoch, Ordering::Release); + + // However, we don't need a following `SeqCst` fence, because it is safe for memory + // accesses from the new epoch to be executed before updating the local epoch. At + // worse, other threads will see the new epoch late and delay GC slightly. + } + } + } + + /// Increments the handle count. + #[inline] + pub fn acquire_handle(&self) { + let handle_count = self.handle_count.get(); + debug_assert!(handle_count >= 1); + self.handle_count.set(handle_count + 1); + } + + /// Decrements the handle count. + #[inline] + pub fn release_handle(&self) { + let guard_count = self.guard_count.get(); + let handle_count = self.handle_count.get(); + debug_assert!(handle_count >= 1); + self.handle_count.set(handle_count - 1); + + if guard_count == 0 && handle_count == 1 { + self.finalize(); + } + } + + /// Removes the `Local` from the global linked list. + #[cold] + fn finalize(&self) { + debug_assert_eq!(self.guard_count.get(), 0); + debug_assert_eq!(self.handle_count.get(), 0); + + // Temporarily increment handle count. This is required so that the following call to `pin` + // doesn't call `finalize` again. + self.handle_count.set(1); + unsafe { + // Pin and move the local bag into the global queue. It's important that `push_bag` + // doesn't defer destruction on any new garbage. + let guard = &self.pin(); + self.global().push_bag(&mut *self.bag.get(), guard); + } + // Revert the handle count back to zero. + self.handle_count.set(0); + + unsafe { + // Take the reference to the `Global` out of this `Local`. Since we're not protected + // by a guard at this time, it's crucial that the reference is read before marking the + // `Local` as deleted. + let collector: Collector = ptr::read(&*(*self.collector.get())); + + // Mark this node in the linked list as deleted. + self.entry.delete(&unprotected()); + + // Finally, drop the reference to the global. Note that this might be the last reference + // to the `Global`. If so, the global data will be destroyed and all deferred functions + // in its queue will be executed. + drop(collector); + } + } +} + +impl IsElement for Local { + fn entry_of(local: &Local) -> &Entry { + let entry_ptr = (local as *const Local as usize + offset_of!(Local, entry)) as *const Entry; + unsafe { &*entry_ptr } + } + + unsafe fn element_of(entry: &Entry) -> &Local { + // offset_of! macro uses unsafe, but it's unnecessary in this context. + #[allow(unused_unsafe)] + let local_ptr = (entry as *const Entry as usize - offset_of!(Local, entry)) as *const Local; + &*local_ptr + } + + unsafe fn finalize(entry: &Entry) { + let local = Self::element_of(entry); + drop(Box::from_raw(local as *const Local as *mut Local)); + } +} + +#[cfg(test)] +mod tests { + use std::sync::atomic::{AtomicUsize, ATOMIC_USIZE_INIT}; + use std::sync::atomic::Ordering; + + use super::*; + + #[test] + fn check_defer() { + static FLAG: AtomicUsize = ATOMIC_USIZE_INIT; + fn set() { + FLAG.store(42, Ordering::Relaxed); + } + + let d = Deferred::new(set); + assert_eq!(FLAG.load(Ordering::Relaxed), 0); + d.call(); + assert_eq!(FLAG.load(Ordering::Relaxed), 42); + } + + #[test] + fn check_bag() { + static FLAG: AtomicUsize = ATOMIC_USIZE_INIT; + fn incr() { + FLAG.fetch_add(1, Ordering::Relaxed); + } + + let mut bag = Bag::new(); + assert!(bag.is_empty()); + + for _ in 0..MAX_OBJECTS { + assert!(unsafe { bag.try_push(Deferred::new(incr)).is_ok() }); + assert!(!bag.is_empty()); + assert_eq!(FLAG.load(Ordering::Relaxed), 0); + } + + let result = unsafe { bag.try_push(Deferred::new(incr)) }; + assert!(result.is_err()); + assert!(!bag.is_empty()); + assert_eq!(FLAG.load(Ordering::Relaxed), 0); + + drop(bag); + assert_eq!(FLAG.load(Ordering::Relaxed), MAX_OBJECTS); + } +} diff --git a/crossbeam-epoch/src/lib.rs b/crossbeam-epoch/src/lib.rs new file mode 100644 index 000000000..a83f58377 --- /dev/null +++ b/crossbeam-epoch/src/lib.rs @@ -0,0 +1,99 @@ +//! Epoch-based memory reclamation. +//! +//! An interesting problem concurrent collections deal with comes from the remove operation. +//! Suppose that a thread removes an element from a lock-free map, while another thread is reading +//! that same element at the same time. The first thread must wait until the second thread stops +//! reading the element. Only then it is safe to destruct it. +//! +//! Programming languages that come with garbage collectors solve this problem trivially. The +//! garbage collector will destruct the removed element when no thread can hold a reference to it +//! anymore. +//! +//! This crate implements a basic memory reclamation mechanism, which is based on epochs. When an +//! element gets removed from a concurrent collection, it is inserted into a pile of garbage and +//! marked with the current epoch. Every time a thread accesses a collection, it checks the current +//! epoch, attempts to increment it, and destructs some garbage that became so old that no thread +//! can be referencing it anymore. +//! +//! That is the general mechanism behind epoch-based memory reclamation, but the details are a bit +//! more complicated. Anyhow, memory reclamation is designed to be fully automatic and something +//! users of concurrent collections don't have to worry much about. +//! +//! # Pointers +//! +//! Concurrent collections are built using atomic pointers. This module provides [`Atomic`], which +//! is just a shared atomic pointer to a heap-allocated object. Loading an [`Atomic`] yields a +//! [`Shared`], which is an epoch-protected pointer through which the loaded object can be safely +//! read. +//! +//! # Pinning +//! +//! Before an [`Atomic`] can be loaded, a participant must be [`pin`]ned. By pinning a participant +//! we declare that any object that gets removed from now on must not be destructed just +//! yet. Garbage collection of newly removed objects is suspended until the participant gets +//! unpinned. +//! +//! # Garbage +//! +//! Objects that get removed from concurrent collections must be stashed away until all currently +//! pinned participants get unpinned. Such objects can be stored into a thread-local or global +//! storage, where they are kept until the right time for their destruction comes. +//! +//! There is a global shared instance of garbage queue. You can [`defer`] the execution of an +//! arbitrary function until the global epoch is advanced enough. Most notably, concurrent data +//! structures may defer the deallocation of an object. +//! +//! # APIs +//! +//! For majority of use cases, just use the default garbage collector by invoking [`pin`]. If you +//! want to create your own garbage collector, use the [`Collector`] API. +//! +//! [`Atomic`]: struct.Atomic.html +//! [`Collector`]: struct.Collector.html +//! [`Shared`]: struct.Shared.html +//! [`pin`]: fn.pin.html +//! [`defer`]: fn.defer.html + +#![cfg_attr(feature = "nightly", feature(const_fn))] +#![cfg_attr(feature = "nightly", feature(alloc))] +#![cfg_attr(not(test), no_std)] + +#![warn(missing_docs, missing_debug_implementations)] + +#[cfg(test)] +extern crate core; +#[cfg(all(not(test), feature = "use_std"))] +#[macro_use] +extern crate std; + +// Use liballoc on nightly to avoid a dependency on libstd +#[cfg(feature = "nightly")] +extern crate alloc; +#[cfg(not(feature = "nightly"))] +extern crate std as alloc; + +extern crate arrayvec; +extern crate crossbeam_utils; +#[cfg(feature = "use_std")] +#[macro_use] +extern crate lazy_static; +#[macro_use] +extern crate memoffset; +#[macro_use] +extern crate scopeguard; + +mod atomic; +mod collector; +#[cfg(feature = "use_std")] +mod default; +mod deferred; +mod epoch; +mod guard; +mod internal; +mod sync; + +pub use self::atomic::{Atomic, CompareAndSetError, CompareAndSetOrdering, Owned, Shared, Pointer}; +pub use self::guard::{unprotected, Guard}; +#[cfg(feature = "use_std")] +pub use self::default::{default_collector, is_pinned, pin}; +pub use self::collector::{Collector, LocalHandle}; diff --git a/crossbeam-epoch/src/sync/list.rs b/crossbeam-epoch/src/sync/list.rs new file mode 100644 index 000000000..46f8143c5 --- /dev/null +++ b/crossbeam-epoch/src/sync/list.rs @@ -0,0 +1,473 @@ +//! Lock-free intrusive linked list. +//! +//! Ideas from Michael. High Performance Dynamic Lock-Free Hash Tables and List-Based Sets. SPAA +//! 2002. http://dl.acm.org/citation.cfm?id=564870.564881 + +use core::marker::PhantomData; +use core::sync::atomic::Ordering::{Acquire, Relaxed, Release}; + +use {Atomic, Shared, Guard, unprotected}; + +/// An entry in a linked list. +/// +/// An Entry is accessed from multiple threads, so it would be beneficial to put it in a different +/// cache-line than thread-local data in terms of performance. +#[derive(Debug)] +pub struct Entry { + /// The next entry in the linked list. + /// If the tag is 1, this entry is marked as deleted. + next: Atomic, +} + +/// Implementing this trait asserts that the type `T` can be used as an element in the intrusive +/// linked list defined in this module. `T` has to contain (or otherwise be linked to) an instance +/// of `Entry`. +/// +/// # Example +/// +/// ```ignore +/// struct A { +/// entry: Entry, +/// data: usize, +/// } +/// +/// impl IsElement for A { +/// fn entry_of(a: &A) -> &Entry { +/// let entry_ptr = ((a as usize) + offset_of!(A, entry)) as *const Entry; +/// unsafe { &*entry_ptr } +/// } +/// +/// unsafe fn element_of(entry: &Entry) -> &T { +/// let elem_ptr = ((entry as usize) - offset_of!(A, entry)) as *const T; +/// &*elem_ptr +/// } +/// +/// unsafe fn finalize(entry: &Entry) { +/// let elem = Self::element_of(entry); +/// drop(Box::from_raw(elem as *const A as *mut A)); +/// } +/// } +/// ``` +/// +/// This trait is implemented on a type separate from `T` (although it can be just `T`), because +/// one type might be placeable into multiple lists, in which case it would require multiple +/// implementations of `IsElement`. In such cases, each struct implementing `IsElement` +/// represents a distinct `Entry` in `T`. +/// +/// For example, we can insert the following struct into two lists using `entry1` for one +/// and `entry2` for the other: +/// +/// ```ignore +/// struct B { +/// entry1: Entry, +/// entry2: Entry, +/// data: usize, +/// } +/// ``` +/// +pub trait IsElement { + /// Returns a reference to this element's `Entry`. + fn entry_of(&T) -> &Entry; + + /// Given a reference to an element's entry, returns that element. + /// + /// ```ignore + /// let elem = ListElement::new(); + /// assert_eq!(elem.entry_of(), + /// unsafe { ListElement::element_of(elem.entry_of()) } ); + /// ``` + /// + /// # Safety + /// The caller has to guarantee that the `Entry` it + /// is called with was retrieved from an instance of the element type (`T`). + unsafe fn element_of(&Entry) -> &T; + + /// Deallocates the whole element given its `Entry`. This is called when the list + /// is ready to actually free the element. + /// + /// # Safety + /// The caller has to guarantee that the `Entry` it + /// is called with was retrieved from an instance of the element type (`T`). + unsafe fn finalize(&Entry); +} + +/// A lock-free, intrusive linked list of type `T`. +#[derive(Debug)] +pub struct List = T> { + /// The head of the linked list. + head: Atomic, + + /// The phantom data for using `T` and `C`. + _marker: PhantomData<(T, C)>, +} + +/// An iterator used for retrieving values from the list. +pub struct Iter<'g, T: 'g, C: IsElement> { + /// The guard that protects the iteration. + guard: &'g Guard, + + /// Pointer from the predecessor to the current entry. + pred: &'g Atomic, + + /// The current entry. + curr: Shared<'g, Entry>, + + /// The list head, needed for restarting iteration. + head: &'g Atomic, + + /// Logically, we store a borrow of an instance of `T` and + /// use the type information from `C`. + _marker: PhantomData<(&'g T, C)>, +} + +/// An error that occurs during iteration over the list. +#[derive(PartialEq, Debug)] +pub enum IterError { + /// A concurrent thread modified the state of the list at the same place that this iterator + /// was inspecting. Subsequent iteration will restart from the beginning of the list. + Stalled, +} + +impl Default for Entry { + /// Returns the empty entry. + fn default() -> Self { + Self { next: Atomic::null() } + } +} + +impl Entry { + /// Marks this entry as deleted, deferring the actual deallocation to a later iteration. + /// + /// # Safety + /// + /// The entry should be a member of a linked list, and it should not have been deleted. + /// It should be safe to call `C::finalize` on the entry after the `guard` is dropped, where `C` + /// is the associated helper for the linked list. + pub unsafe fn delete(&self, guard: &Guard) { + self.next.fetch_or(1, Release, guard); + } +} + +impl> List { + /// Returns a new, empty linked list. + pub fn new() -> Self { + Self { + head: Atomic::null(), + _marker: PhantomData, + } + } + + /// Inserts `entry` into the head of the list. + /// + /// # Safety + /// + /// You should guarantee that: + /// + /// - `container` is not null + /// - `container` is immovable, e.g. inside a `Box` + /// - the same `Entry` is not inserted more than once + /// - the inserted object will be removed before the list is dropped + pub unsafe fn insert<'g>(&'g self, container: Shared<'g, T>, guard: &'g Guard) { + // Insert right after head, i.e. at the beginning of the list. + let to = &self.head; + // Get the intrusively stored Entry of the new element to insert. + let entry: &Entry = C::entry_of(container.deref()); + // Make a Shared ptr to that Entry. + let entry_ptr = Shared::from(entry as *const _); + // Read the current successor of where we want to insert. + let mut next = to.load(Relaxed, guard); + + loop { + // Set the Entry of the to-be-inserted element to point to the previous successor of + // `to`. + entry.next.store(next, Relaxed); + match to.compare_and_set_weak(next, entry_ptr, Release, guard) { + Ok(_) => break, + // We lost the race or weak CAS failed spuriously. Update the successor and try + // again. + Err(err) => next = err.current, + } + } + } + + /// Returns an iterator over all objects. + /// + /// # Caveat + /// + /// Every object that is inserted at the moment this function is called and persists at least + /// until the end of iteration will be returned. Since this iterator traverses a lock-free + /// linked list that may be concurrently modified, some additional caveats apply: + /// + /// 1. If a new object is inserted during iteration, it may or may not be returned. + /// 2. If an object is deleted during iteration, it may or may not be returned. + /// 3. The iteration may be aborted when it lost in a race condition. In this case, the winning + /// thread will continue to iterate over the same list. + pub fn iter<'g>(&'g self, guard: &'g Guard) -> Iter<'g, T, C> { + Iter { + guard, + pred: &self.head, + curr: self.head.load(Acquire, guard), + head: &self.head, + _marker: PhantomData, + } + } +} + +impl> Drop for List { + fn drop(&mut self) { + unsafe { + let guard = &unprotected(); + let mut curr = self.head.load(Relaxed, guard); + while let Some(c) = curr.as_ref() { + let succ = c.next.load(Relaxed, guard); + // Verify that all elements have been removed from the list. + assert_eq!(succ.tag(), 1); + + C::finalize(curr.deref()); + curr = succ; + } + } + } +} + +impl<'g, T: 'g, C: IsElement> Iterator for Iter<'g, T, C> { + type Item = Result<&'g T, IterError>; + + fn next(&mut self) -> Option { + while let Some(c) = unsafe { self.curr.as_ref() } { + let succ = c.next.load(Acquire, self.guard); + + if succ.tag() == 1 { + // This entry was removed. Try unlinking it from the list. + let succ = succ.with_tag(0); + + // The tag should never be zero, because removing a node after a logically deleted + // node leaves the list in an invalid state. + debug_assert!(self.curr.tag() == 0); + + match self.pred.compare_and_set( + self.curr, + succ, + Acquire, + self.guard, + ) { + Ok(_) => { + // We succeeded in unlinking this element from the list, so we have to + // schedule deallocation. Deferred drop is okay, because `list.delete()` + // can only be called if `T: 'static`. + unsafe { + let p = self.curr; + self.guard.defer_unchecked(move || C::finalize(p.deref())); + } + + // Move over the removed by only advancing `curr`, not `pred`. + self.curr = succ; + continue; + } + Err(_) => { + // A concurrent thread modified the predecessor node. Since it might've + // been deleted, we need to restart from `head`. + self.pred = self.head; + self.curr = self.head.load(Acquire, self.guard); + + return Some(Err(IterError::Stalled)); + } + } + } + + // Move one step forward. + self.pred = &c.next; + self.curr = succ; + + return Some(Ok(unsafe { C::element_of(c) })); + } + + // We reached the end of the list. + None + } +} + +#[cfg(test)] +mod tests { + use {Collector, Owned}; + use crossbeam_utils::thread; + use std::sync::Barrier; + use super::*; + + impl IsElement for Entry { + fn entry_of(entry: &Entry) -> &Entry { + entry + } + + unsafe fn element_of(entry: &Entry) -> &Entry { + entry + } + + unsafe fn finalize(entry: &Entry) { + drop(Box::from_raw(entry as *const Entry as *mut Entry)); + } + } + + /// Checks whether the list retains inserted elements + /// and returns them in the correct order. + #[test] + fn insert() { + let collector = Collector::new(); + let handle = collector.register(); + let guard = handle.pin(); + + let l: List = List::new(); + + let e1 = Owned::new(Entry::default()).into_shared(&guard); + let e2 = Owned::new(Entry::default()).into_shared(&guard); + let e3 = Owned::new(Entry::default()).into_shared(&guard); + + unsafe { + l.insert(e1, &guard); + l.insert(e2, &guard); + l.insert(e3, &guard); + } + + let mut iter = l.iter(&guard); + let maybe_e3 = iter.next(); + assert!(maybe_e3.is_some()); + assert!(maybe_e3.unwrap().unwrap() as *const Entry == e3.as_raw()); + let maybe_e2 = iter.next(); + assert!(maybe_e2.is_some()); + assert!(maybe_e2.unwrap().unwrap() as *const Entry == e2.as_raw()); + let maybe_e1 = iter.next(); + assert!(maybe_e1.is_some()); + assert!(maybe_e1.unwrap().unwrap() as *const Entry == e1.as_raw()); + assert!(iter.next().is_none()); + + unsafe { + e1.as_ref().unwrap().delete(&guard); + e2.as_ref().unwrap().delete(&guard); + e3.as_ref().unwrap().delete(&guard); + } + } + + /// Checks whether elements can be removed from the list and whether + /// the correct elements are removed. + #[test] + fn delete() { + let collector = Collector::new(); + let handle = collector.register(); + let guard = handle.pin(); + + let l: List = List::new(); + + let e1 = Owned::new(Entry::default()).into_shared(&guard); + let e2 = Owned::new(Entry::default()).into_shared(&guard); + let e3 = Owned::new(Entry::default()).into_shared(&guard); + unsafe { + l.insert(e1, &guard); + l.insert(e2, &guard); + l.insert(e3, &guard); + e2.as_ref().unwrap().delete(&guard); + } + + let mut iter = l.iter(&guard); + let maybe_e3 = iter.next(); + assert!(maybe_e3.is_some()); + assert!(maybe_e3.unwrap().unwrap() as *const Entry == e3.as_raw()); + let maybe_e1 = iter.next(); + assert!(maybe_e1.is_some()); + assert!(maybe_e1.unwrap().unwrap() as *const Entry == e1.as_raw()); + assert!(iter.next().is_none()); + + unsafe { + e1.as_ref().unwrap().delete(&guard); + e3.as_ref().unwrap().delete(&guard); + } + + let mut iter = l.iter(&guard); + assert!(iter.next().is_none()); + } + + const THREADS: usize = 8; + const ITERS: usize = 512; + + /// Contends the list on insert and delete operations to make sure they can run concurrently. + #[test] + fn insert_delete_multi() { + let collector = Collector::new(); + + let l: List = List::new(); + let b = Barrier::new(THREADS); + + thread::scope(|s| for _ in 0..THREADS { + s.spawn(|| { + b.wait(); + + let handle = collector.register(); + let guard: Guard = handle.pin(); + let mut v = Vec::with_capacity(ITERS); + + for _ in 0..ITERS { + let e = Owned::new(Entry::default()).into_shared(&guard); + v.push(e); + unsafe { + l.insert(e, &guard); + } + } + + for e in v { + unsafe { + e.as_ref().unwrap().delete(&guard); + } + } + }); + }); + + let handle = collector.register(); + let guard = handle.pin(); + + let mut iter = l.iter(&guard); + assert!(iter.next().is_none()); + } + + /// Contends the list on iteration to make sure that it can be iterated over concurrently. + #[test] + fn iter_multi() { + let collector = Collector::new(); + + let l: List = List::new(); + let b = Barrier::new(THREADS); + + thread::scope(|s| for _ in 0..THREADS { + s.spawn(|| { + b.wait(); + + let handle = collector.register(); + let guard: Guard = handle.pin(); + let mut v = Vec::with_capacity(ITERS); + + for _ in 0..ITERS { + let e = Owned::new(Entry::default()).into_shared(&guard); + v.push(e); + unsafe { + l.insert(e, &guard); + } + } + + let mut iter = l.iter(&guard); + for _ in 0..ITERS { + assert!(iter.next().is_some()); + } + + for e in v { + unsafe { + e.as_ref().unwrap().delete(&guard); + } + } + }); + }); + + let handle = collector.register(); + let guard = handle.pin(); + + let mut iter = l.iter(&guard); + assert!(iter.next().is_none()); + } +} diff --git a/crossbeam-epoch/src/sync/mod.rs b/crossbeam-epoch/src/sync/mod.rs new file mode 100644 index 000000000..f8eb25960 --- /dev/null +++ b/crossbeam-epoch/src/sync/mod.rs @@ -0,0 +1,4 @@ +//! Synchronization primitives. + +pub mod list; +pub mod queue; diff --git a/crossbeam-epoch/src/sync/queue.rs b/crossbeam-epoch/src/sync/queue.rs new file mode 100644 index 000000000..9e145d72d --- /dev/null +++ b/crossbeam-epoch/src/sync/queue.rs @@ -0,0 +1,428 @@ +//! Michael-Scott lock-free queue. +//! +//! Usable with any number of producers and consumers. +//! +//! Michael and Scott. Simple, Fast, and Practical Non-Blocking and Blocking Concurrent Queue +//! Algorithms. PODC 1996. http://dl.acm.org/citation.cfm?id=248106 + +use core::mem::{self, ManuallyDrop}; +use core::ptr; +use core::sync::atomic::Ordering::{Acquire, Relaxed, Release}; + +use crossbeam_utils::CachePadded; + +use {unprotected, Atomic, Guard, Owned, Shared}; + +// The representation here is a singly-linked list, with a sentinel node at the front. In general +// the `tail` pointer may lag behind the actual tail. Non-sentinel nodes are either all `Data` or +// all `Blocked` (requests for data from blocked threads). +#[derive(Debug)] +pub struct Queue { + head: CachePadded>>, + tail: CachePadded>>, +} + +#[derive(Debug)] +struct Node { + /// The slot in which a value of type `T` can be stored. + /// + /// The type of `data` is `ManuallyDrop` because a `Node` doesn't always contain a `T`. + /// For example, the sentinel node in a queue never contains a value: its slot is always empty. + /// Other nodes start their life with a push operation and contain a value until it gets popped + /// out. After that such empty nodes get added to the collector for destruction. + data: ManuallyDrop, + + next: Atomic>, +} + +// Any particular `T` should never be accessed concurrently, so no need for `Sync`. +unsafe impl Sync for Queue {} +unsafe impl Send for Queue {} + + +impl Queue { + /// Create a new, empty queue. + pub fn new() -> Queue { + let q = Queue { + head: CachePadded::new(Atomic::null()), + tail: CachePadded::new(Atomic::null()), + }; + let sentinel = Owned::new(Node { + data: unsafe { mem::uninitialized() }, + next: Atomic::null(), + }); + unsafe { + let guard = &unprotected(); + let sentinel = sentinel.into_shared(guard); + q.head.store(sentinel, Relaxed); + q.tail.store(sentinel, Relaxed); + q + } + } + + /// Attempts to atomically place `n` into the `next` pointer of `onto`, and returns `true` on + /// success. The queue's `tail` pointer may be updated. + #[inline(always)] + fn push_internal(&self, onto: Shared>, new: Shared>, guard: &Guard) -> bool { + // is `onto` the actual tail? + let o = unsafe { onto.deref() }; + let next = o.next.load(Acquire, guard); + if unsafe { next.as_ref().is_some() } { + // if not, try to "help" by moving the tail pointer forward + let _ = self.tail.compare_and_set(onto, next, Release, guard); + false + } else { + // looks like the actual tail; attempt to link in `n` + let result = o.next + .compare_and_set(Shared::null(), new, Release, guard) + .is_ok(); + if result { + // try to move the tail pointer forward + let _ = self.tail.compare_and_set(onto, new, Release, guard); + } + result + } + } + + /// Adds `t` to the back of the queue, possibly waking up threads blocked on `pop`. + pub fn push(&self, t: T, guard: &Guard) { + let new = Owned::new(Node { + data: ManuallyDrop::new(t), + next: Atomic::null(), + }); + let new = Owned::into_shared(new, guard); + + loop { + // We push onto the tail, so we'll start optimistically by looking there first. + let tail = self.tail.load(Acquire, guard); + + // Attempt to push onto the `tail` snapshot; fails if `tail.next` has changed. + if self.push_internal(tail, new, guard) { + break; + } + } + } + + /// Attempts to pop a data node. `Ok(None)` if queue is empty; `Err(())` if lost race to pop. + #[inline(always)] + fn pop_internal(&self, guard: &Guard) -> Result, ()> { + let head = self.head.load(Acquire, guard); + let h = unsafe { head.deref() }; + let next = h.next.load(Acquire, guard); + match unsafe { next.as_ref() } { + Some(n) => unsafe { + self.head + .compare_and_set(head, next, Release, guard) + .map(|_| { + guard.defer_destroy(head); + Some(ManuallyDrop::into_inner(ptr::read(&n.data))) + }) + .map_err(|_| ()) + }, + None => Ok(None), + } + } + + /// Attempts to pop a data node, if the data satisfies the given condition. `Ok(None)` if queue + /// is empty or the data does not satisfy the condition; `Err(())` if lost race to pop. + #[inline(always)] + fn pop_if_internal(&self, condition: F, guard: &Guard) -> Result, ()> + where + T: Sync, + F: Fn(&T) -> bool, + { + let head = self.head.load(Acquire, guard); + let h = unsafe { head.deref() }; + let next = h.next.load(Acquire, guard); + match unsafe { next.as_ref() } { + Some(n) if condition(&n.data) => unsafe { + self.head + .compare_and_set(head, next, Release, guard) + .map(|_| { + guard.defer_destroy(head); + Some(ManuallyDrop::into_inner(ptr::read(&n.data))) + }) + .map_err(|_| ()) + }, + None | Some(_) => Ok(None), + } + } + + /// Attempts to dequeue from the front. + /// + /// Returns `None` if the queue is observed to be empty. + pub fn try_pop(&self, guard: &Guard) -> Option { + loop { + if let Ok(head) = self.pop_internal(guard) { + return head; + } + } + } + + /// Attempts to dequeue from the front, if the item satisfies the given condition. + /// + /// Returns `None` if the queue is observed to be empty, or the head does not satisfy the given + /// condition. + pub fn try_pop_if(&self, condition: F, guard: &Guard) -> Option + where + T: Sync, + F: Fn(&T) -> bool, + { + loop { + if let Ok(head) = self.pop_if_internal(&condition, guard) { + return head; + } + } + } +} + +impl Drop for Queue { + fn drop(&mut self) { + unsafe { + let guard = &unprotected(); + + while let Some(_) = self.try_pop(guard) {} + + // Destroy the remaining sentinel node. + let sentinel = self.head.load(Relaxed, guard); + drop(sentinel.into_owned()); + } + } +} + + +#[cfg(test)] +mod test { + use super::*; + use crossbeam_utils::thread; + use pin; + + struct Queue { + queue: super::Queue, + } + + impl Queue { + pub fn new() -> Queue { + Queue { queue: super::Queue::new() } + } + + pub fn push(&self, t: T) { + let guard = &pin(); + self.queue.push(t, guard); + } + + pub fn is_empty(&self) -> bool { + let guard = &pin(); + let head = self.queue.head.load(Acquire, guard); + let h = unsafe { head.deref() }; + h.next.load(Acquire, guard).is_null() + } + + pub fn try_pop(&self) -> Option { + let guard = &pin(); + self.queue.try_pop(guard) + } + + pub fn pop(&self) -> T { + loop { + match self.try_pop() { + None => continue, + Some(t) => return t, + } + } + } + } + + const CONC_COUNT: i64 = 1000000; + + #[test] + fn push_try_pop_1() { + let q: Queue = Queue::new(); + assert!(q.is_empty()); + q.push(37); + assert!(!q.is_empty()); + assert_eq!(q.try_pop(), Some(37)); + assert!(q.is_empty()); + } + + #[test] + fn push_try_pop_2() { + let q: Queue = Queue::new(); + assert!(q.is_empty()); + q.push(37); + q.push(48); + assert_eq!(q.try_pop(), Some(37)); + assert!(!q.is_empty()); + assert_eq!(q.try_pop(), Some(48)); + assert!(q.is_empty()); + } + + #[test] + fn push_try_pop_many_seq() { + let q: Queue = Queue::new(); + assert!(q.is_empty()); + for i in 0..200 { + q.push(i) + } + assert!(!q.is_empty()); + for i in 0..200 { + assert_eq!(q.try_pop(), Some(i)); + } + assert!(q.is_empty()); + } + + #[test] + fn push_pop_1() { + let q: Queue = Queue::new(); + assert!(q.is_empty()); + q.push(37); + assert!(!q.is_empty()); + assert_eq!(q.pop(), 37); + assert!(q.is_empty()); + } + + #[test] + fn push_pop_2() { + let q: Queue = Queue::new(); + q.push(37); + q.push(48); + assert_eq!(q.pop(), 37); + assert_eq!(q.pop(), 48); + } + + #[test] + fn push_pop_many_seq() { + let q: Queue = Queue::new(); + assert!(q.is_empty()); + for i in 0..200 { + q.push(i) + } + assert!(!q.is_empty()); + for i in 0..200 { + assert_eq!(q.pop(), i); + } + assert!(q.is_empty()); + } + + #[test] + fn push_try_pop_many_spsc() { + let q: Queue = Queue::new(); + assert!(q.is_empty()); + + thread::scope(|scope| { + scope.spawn(|| { + let mut next = 0; + + while next < CONC_COUNT { + if let Some(elem) = q.try_pop() { + assert_eq!(elem, next); + next += 1; + } + } + }); + + for i in 0..CONC_COUNT { + q.push(i) + } + }); + } + + #[test] + fn push_try_pop_many_spmc() { + fn recv(_t: i32, q: &Queue) { + let mut cur = -1; + for _i in 0..CONC_COUNT { + if let Some(elem) = q.try_pop() { + assert!(elem > cur); + cur = elem; + + if cur == CONC_COUNT - 1 { + break; + } + } + } + } + + let q: Queue = Queue::new(); + assert!(q.is_empty()); + let qr = &q; + thread::scope(|scope| { + for i in 0..3 { + scope.spawn(move || recv(i, qr)); + } + + scope.spawn(|| for i in 0..CONC_COUNT { + q.push(i); + }) + }); + } + + #[test] + fn push_try_pop_many_mpmc() { + enum LR { + Left(i64), + Right(i64), + } + + let q: Queue = Queue::new(); + assert!(q.is_empty()); + + thread::scope(|scope| for _t in 0..2 { + scope.spawn(|| for i in CONC_COUNT - 1..CONC_COUNT { + q.push(LR::Left(i)) + }); + scope.spawn(|| for i in CONC_COUNT - 1..CONC_COUNT { + q.push(LR::Right(i)) + }); + scope.spawn(|| { + let mut vl = vec![]; + let mut vr = vec![]; + for _i in 0..CONC_COUNT { + match q.try_pop() { + Some(LR::Left(x)) => vl.push(x), + Some(LR::Right(x)) => vr.push(x), + _ => {} + } + } + + let mut vl2 = vl.clone(); + let mut vr2 = vr.clone(); + vl2.sort(); + vr2.sort(); + + assert_eq!(vl, vl2); + assert_eq!(vr, vr2); + }); + }); + } + + #[test] + fn push_pop_many_spsc() { + let q: Queue = Queue::new(); + + thread::scope(|scope| { + scope.spawn(|| { + let mut next = 0; + while next < CONC_COUNT { + assert_eq!(q.pop(), next); + next += 1; + } + }); + + for i in 0..CONC_COUNT { + q.push(i) + } + }); + assert!(q.is_empty()); + } + + #[test] + fn is_empty_dont_pop() { + let q: Queue = Queue::new(); + q.push(20); + q.push(20); + assert!(!q.is_empty()); + assert!(!q.is_empty()); + assert!(q.try_pop().is_some()); + } +} diff --git a/crossbeam-utils/.cargo-checksum.json b/crossbeam-utils/.cargo-checksum.json new file mode 100644 index 000000000..ca5a625ea --- /dev/null +++ b/crossbeam-utils/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{},"package":"677d453a17e8bd2b913fa38e8b9cf04bcdbb5be790aa294f2389661d72036015"} \ No newline at end of file diff --git a/crossbeam-utils/CHANGELOG.md b/crossbeam-utils/CHANGELOG.md new file mode 100644 index 000000000..6737d9d0c --- /dev/null +++ b/crossbeam-utils/CHANGELOG.md @@ -0,0 +1,82 @@ +# Changelog +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) +and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). + +## [Unreleased] + +## [0.5.0] - 2018-07-22 +### Changed +- Reorganize sub-modules and rename functions. + +## [0.4.1] - 2018-06-28 +### Fixed +- Fix a documentation link. + +## [0.4.0] - 2018-05-17 +### Fixed +- `CachePadded` supports types bigger than 64 bytes. +- Fix a bug in scoped threads where unitialized memory was being dropped. + +### Changed +- Minimum required Rust version is now 1.25. + +## [0.3.2] - 2018-03-12 +### Fixed +- Mark `load_consume` with `#[inline]`. + +## [0.3.1] - 2018-03-12 +### Fixed +- `load_consume` on ARM and AArch64. + +## [0.3.0] - 2018-03-11 +### Added +- `join` for scoped thread API. +- `load_consume` for atomic load-consume memory ordering. + +### Removed +- `AtomicOption`. + +## [0.2.2] - 2018-01-14 +### Added +- Support for Rust 1.12.1. + +### Fixed +- Call `T::clone` when cloning a `CachePadded`. + +## [0.2.1] - 2017-11-26 +### Added +- Add `use_std` feature. + +## [0.2.0] - 2017-11-17 +### Added +- Add `nightly` feature. +- Use `repr(align(64))` on `CachePadded` with the `nightly` feature. +- Implement `Drop` for `CachePadded`. +- Implement `Clone` for `CachePadded`. +- Implement `From` for `CachePadded`. +- Implement better `Debug` for `CachePadded`. +- Write more tests. +- Add this changelog. + +### Changed +- Change cache line length to 64 bytes. + +### Removed +- Remove `ZerosValid`. + +## 0.1.0 - 2017-08-27 +### Added +- Old implementation of `CachePadded` from `crossbeam` version 0.3.0 + +[Unreleased]: https://github.com/crossbeam-rs/crossbeam-utils/compare/v0.5.0...HEAD +[0.5.0]: https://github.com/crossbeam-rs/crossbeam-utils/compare/v0.4.1...v0.5.0 +[0.4.1]: https://github.com/crossbeam-rs/crossbeam-utils/compare/v0.4.0...v0.4.1 +[0.4.0]: https://github.com/crossbeam-rs/crossbeam-utils/compare/v0.3.2...v0.4.0 +[0.3.2]: https://github.com/crossbeam-rs/crossbeam-utils/compare/v0.3.1...v0.3.2 +[0.3.1]: https://github.com/crossbeam-rs/crossbeam-utils/compare/v0.3.0...v0.3.1 +[0.3.0]: https://github.com/crossbeam-rs/crossbeam-utils/compare/v0.2.2...v0.3.0 +[0.2.2]: https://github.com/crossbeam-rs/crossbeam-utils/compare/v0.2.1...v0.2.2 +[0.2.1]: https://github.com/crossbeam-rs/crossbeam-utils/compare/v0.2.0...v0.2.1 +[0.2.0]: https://github.com/crossbeam-rs/crossbeam-utils/compare/v0.1.0...v0.2.0 diff --git a/crossbeam-utils/Cargo.toml b/crossbeam-utils/Cargo.toml new file mode 100644 index 000000000..411fb4d70 --- /dev/null +++ b/crossbeam-utils/Cargo.toml @@ -0,0 +1,31 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g. crates.io) dependencies +# +# If you believe there's an error in this file please file an +# issue against the rust-lang/cargo repository. If you're +# editing this file be aware that the upstream Cargo.toml +# will likely look very different (and much more reasonable) + +[package] +name = "crossbeam-utils" +version = "0.5.0" +authors = ["The Crossbeam Project Developers"] +description = "Utilities for concurrent programming" +homepage = "https://github.com/crossbeam-rs/crossbeam-utils" +documentation = "https://docs.rs/crossbeam-utils" +readme = "README.md" +keywords = ["scoped", "thread", "atomic", "cache"] +categories = ["algorithms", "concurrency", "data-structures"] +license = "MIT/Apache-2.0" +repository = "https://github.com/crossbeam-rs/crossbeam-utils" + +[dependencies] + +[features] +default = ["use_std"] +nightly = [] +use_std = [] diff --git a/crossbeam-utils/LICENSE-APACHE b/crossbeam-utils/LICENSE-APACHE new file mode 100644 index 000000000..16fe87b06 --- /dev/null +++ b/crossbeam-utils/LICENSE-APACHE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/crossbeam-utils/LICENSE-MIT b/crossbeam-utils/LICENSE-MIT new file mode 100644 index 000000000..25597d583 --- /dev/null +++ b/crossbeam-utils/LICENSE-MIT @@ -0,0 +1,25 @@ +Copyright (c) 2010 The Rust Project Developers + +Permission is hereby granted, free of charge, to any +person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the +Software without restriction, including without +limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software +is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions +of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/crossbeam-utils/README.md b/crossbeam-utils/README.md new file mode 100644 index 000000000..6f6e0a760 --- /dev/null +++ b/crossbeam-utils/README.md @@ -0,0 +1,31 @@ +# Utilities for concurrent programming + +[![Build Status](https://travis-ci.org/crossbeam-rs/crossbeam-utils.svg?branch=master)](https://travis-ci.org/crossbeam-rs/crossbeam-utils) +[![License](https://img.shields.io/badge/license-MIT%2FApache--2.0-blue.svg)](https://github.com/crossbeam-rs/crossbeam-utils) +[![Cargo](https://img.shields.io/crates/v/crossbeam-utils.svg)](https://crates.io/crates/crossbeam-utils) +[![Documentation](https://docs.rs/crossbeam-utils/badge.svg)](https://docs.rs/crossbeam-utils) + +This crate provides utilities for concurrent programming. + +## Usage + +Add this to your `Cargo.toml`: + +```toml +[dependencies] +crossbeam-utils = "0.5" +``` + +Next, add this to your crate: + +```rust +extern crate crossbeam_utils; +``` + +The minimum required Rust version is 1.25. + +## License + +Licensed under the terms of MIT license and the Apache License (Version 2.0). + +See [LICENSE-MIT](LICENSE-MIT) and [LICENSE-APACHE](LICENSE-APACHE) for details. diff --git a/crossbeam-utils/src/cache_padded.rs b/crossbeam-utils/src/cache_padded.rs new file mode 100644 index 000000000..b288d9b2c --- /dev/null +++ b/crossbeam-utils/src/cache_padded.rs @@ -0,0 +1,172 @@ +use core::fmt; +use core::ops::{Deref, DerefMut}; + +/// Pads `T` to the length of a cache line. +/// +/// Sometimes concurrent programming requires a piece of data to be padded out to the size of a +/// cacheline to avoid "false sharing": cache lines being invalidated due to unrelated concurrent +/// activity. Use this type when you want to *avoid* cache locality. +/// +/// Cache lines are assumed to be 64 bytes on all architectures. +/// +/// # Size and alignment +/// +/// The size of `CachePadded` is the smallest multiple of 64 bytes large enough to accommodate +/// a value of type `T`. +/// +/// The alignment of `CachePadded` is the maximum of 64 bytes and the alignment of `T`. +#[derive(Clone, Default)] +#[repr(align(64))] +pub struct CachePadded { + inner: T, +} + +unsafe impl Send for CachePadded {} +unsafe impl Sync for CachePadded {} + +impl CachePadded { + /// Pads a value to the length of a cache line. + pub fn new(t: T) -> CachePadded { + CachePadded:: { inner: t } + } +} + +impl Deref for CachePadded { + type Target = T; + + fn deref(&self) -> &T { + &self.inner + } +} + +impl DerefMut for CachePadded { + fn deref_mut(&mut self) -> &mut T { + &mut self.inner + } +} + +impl fmt::Debug for CachePadded { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let inner: &T = &*self; + write!(f, "CachePadded {{ {:?} }}", inner) + } +} + +impl From for CachePadded { + fn from(t: T) -> Self { + CachePadded::new(t) + } +} + +#[cfg(test)] +mod test { + use super::*; + use core::mem; + use core::cell::Cell; + + #[test] + fn default() { + let x: CachePadded = Default::default(); + assert_eq!(*x, 0); + } + + #[test] + fn store_u64() { + let x: CachePadded = CachePadded::new(17); + assert_eq!(*x, 17); + } + + #[test] + fn store_pair() { + let x: CachePadded<(u64, u64)> = CachePadded::new((17, 37)); + assert_eq!(x.0, 17); + assert_eq!(x.1, 37); + } + + #[test] + fn distance() { + let arr = [CachePadded::new(17u8), CachePadded::new(37u8)]; + let a = &*arr[0] as *const u8; + let b = &*arr[1] as *const u8; + assert!(unsafe { a.offset(64) } <= b); + } + + #[test] + fn different_sizes() { + CachePadded::new(17u8); + CachePadded::new(17u16); + CachePadded::new(17u32); + CachePadded::new([17u64; 0]); + CachePadded::new([17u64; 1]); + CachePadded::new([17u64; 2]); + CachePadded::new([17u64; 3]); + CachePadded::new([17u64; 4]); + CachePadded::new([17u64; 5]); + CachePadded::new([17u64; 6]); + CachePadded::new([17u64; 7]); + CachePadded::new([17u64; 8]); + } + + #[test] + fn large() { + let a = [17u64; 9]; + let b = CachePadded::new(a); + assert!(mem::size_of_val(&a) <= mem::size_of_val(&b)); + } + + #[test] + fn debug() { + assert_eq!( + format!("{:?}", CachePadded::new(17u64)), + "CachePadded { 17 }" + ); + } + + #[test] + fn drops() { + let count = Cell::new(0); + + struct Foo<'a>(&'a Cell); + + impl<'a> Drop for Foo<'a> { + fn drop(&mut self) { + self.0.set(self.0.get() + 1); + } + } + + let a = CachePadded::new(Foo(&count)); + let b = CachePadded::new(Foo(&count)); + + assert_eq!(count.get(), 0); + drop(a); + assert_eq!(count.get(), 1); + drop(b); + assert_eq!(count.get(), 2); + } + + #[test] + fn clone() { + let a = CachePadded::new(17); + let b = a.clone(); + assert_eq!(*a, *b); + } + + #[test] + fn runs_custom_clone() { + let count = Cell::new(0); + + struct Foo<'a>(&'a Cell); + + impl<'a> Clone for Foo<'a> { + fn clone(&self) -> Foo<'a> { + self.0.set(self.0.get() + 1); + Foo::<'a>(self.0) + } + } + + let a = CachePadded::new(Foo(&count)); + a.clone(); + + assert_eq!(count.get(), 1); + } +} diff --git a/crossbeam-utils/src/consume.rs b/crossbeam-utils/src/consume.rs new file mode 100644 index 000000000..7501040d2 --- /dev/null +++ b/crossbeam-utils/src/consume.rs @@ -0,0 +1,82 @@ +use core::sync::atomic::Ordering; +#[cfg(any(target_arch = "arm", target_arch = "aarch64"))] +use core::sync::atomic::compiler_fence; + +/// Trait which allows reading from an atomic type with "consume" ordering. +pub trait AtomicConsume { + /// Type returned by `load_consume`. + type Val; + + /// Loads a value from the atomic using a "consume" memory ordering. + /// + /// This is similar to the "acquire" ordering, except that an ordering is + /// only guaranteed with operations that "depend on" the result of the load. + /// However consume loads are usually much faster than acquire loads on + /// architectures with a weak memory model since they don't require memory + /// fence instructions. + /// + /// The exact definition of "depend on" is a bit vague, but it works as you + /// would expect in practice since a lot of software, especially the Linux + /// kernel, rely on this behavior. + /// + /// This is currently only implemented on ARM and AArch64, where a fence + /// can be avoided. On other architectures this will fall back to a simple + /// `load(Ordering::Acquire)`. + fn load_consume(&self) -> Self::Val; +} + +#[cfg(any(target_arch = "arm", target_arch = "aarch64"))] +macro_rules! impl_consume { + () => { + #[inline] + fn load_consume(&self) -> Self::Val { + let result = self.load(Ordering::Relaxed); + compiler_fence(Ordering::Acquire); + result + } + }; +} + +#[cfg(not(any(target_arch = "arm", target_arch = "aarch64")))] +macro_rules! impl_consume { + () => { + #[inline] + fn load_consume(&self) -> Self::Val { + self.load(Ordering::Acquire) + } + }; +} + +macro_rules! impl_atomic { + ($atomic:ident, $val:ty) => { + impl AtomicConsume for ::core::sync::atomic::$atomic { + type Val = $val; + impl_consume!(); + } + }; +} + +impl_atomic!(AtomicBool, bool); +impl_atomic!(AtomicUsize, usize); +impl_atomic!(AtomicIsize, isize); +#[cfg(all(feature = "nightly", target_has_atomic = "8"))] +impl_atomic!(AtomicU8, u8); +#[cfg(all(feature = "nightly", target_has_atomic = "8"))] +impl_atomic!(AtomicI8, i8); +#[cfg(all(feature = "nightly", target_has_atomic = "16"))] +impl_atomic!(AtomicU16, u16); +#[cfg(all(feature = "nightly", target_has_atomic = "16"))] +impl_atomic!(AtomicI16, i16); +#[cfg(all(feature = "nightly", target_has_atomic = "32"))] +impl_atomic!(AtomicU32, u32); +#[cfg(all(feature = "nightly", target_has_atomic = "32"))] +impl_atomic!(AtomicI32, i32); +#[cfg(all(feature = "nightly", target_has_atomic = "64"))] +impl_atomic!(AtomicU64, u64); +#[cfg(all(feature = "nightly", target_has_atomic = "64"))] +impl_atomic!(AtomicI64, i64); + +impl AtomicConsume for ::core::sync::atomic::AtomicPtr { + type Val = *mut T; + impl_consume!(); +} diff --git a/crossbeam-utils/src/lib.rs b/crossbeam-utils/src/lib.rs new file mode 100644 index 000000000..1a0f89e20 --- /dev/null +++ b/crossbeam-utils/src/lib.rs @@ -0,0 +1,14 @@ +#![cfg_attr(feature = "nightly", feature(cfg_target_has_atomic, integer_atomics))] +#![cfg_attr(not(feature = "use_std"), no_std)] + +#[cfg(feature = "use_std")] +extern crate core; + +mod cache_padded; +mod consume; + +#[cfg(feature = "use_std")] +pub mod thread; + +pub use cache_padded::CachePadded; +pub use consume::AtomicConsume; diff --git a/crossbeam-utils/src/thread.rs b/crossbeam-utils/src/thread.rs new file mode 100644 index 000000000..072640173 --- /dev/null +++ b/crossbeam-utils/src/thread.rs @@ -0,0 +1,382 @@ +/// Scoped thread. +/// +/// # Examples +/// +/// A basic scoped thread: +/// +/// ``` +/// crossbeam_utils::thread::scope(|scope| { +/// scope.spawn(|| { +/// println!("Hello from a scoped thread!"); +/// }); +/// }); +/// ``` +/// +/// When writing concurrent Rust programs, you'll sometimes see a pattern like this, using +/// [`std::thread::spawn`][spawn]: +/// +/// ```ignore +/// let array = [1, 2, 3]; +/// let mut guards = vec![]; +/// +/// for i in &array { +/// let guard = std::thread::spawn(move || { +/// println!("element: {}", i); +/// }); +/// +/// guards.push(guard); +/// } +/// +/// for guard in guards { +/// guard.join().unwrap(); +/// } +/// ``` +/// +/// The basic pattern is: +/// +/// 1. Iterate over some collection. +/// 2. Spin up a thread to operate on each part of the collection. +/// 3. Join all the threads. +/// +/// However, this code actually gives an error: +/// +/// ```text +/// error: `array` does not live long enough +/// for i in &array { +/// ^~~~~ +/// in expansion of for loop expansion +/// note: expansion site +/// note: reference must be valid for the static lifetime... +/// note: ...but borrowed value is only valid for the block suffix following statement 0 at ... +/// let array = [1, 2, 3]; +/// let mut guards = vec![]; +/// +/// for i in &array { +/// let guard = std::thread::spawn(move || { +/// println!("element: {}", i); +/// ... +/// error: aborting due to previous error +/// ``` +/// +/// Because [`std::thread::spawn`][spawn] doesn't know about this scope, it requires a +/// `'static` lifetime. One way of giving it a proper lifetime is to use an [`Arc`][arc]: +/// +/// [arc]: http://doc.rust-lang.org/stable/std/sync/struct.Arc.html +/// [spawn]: https://doc.rust-lang.org/stable/std/thread/fn.spawn.html +/// +/// ``` +/// use std::sync::Arc; +/// +/// let array = Arc::new([1, 2, 3]); +/// let mut guards = vec![]; +/// +/// for i in 0..array.len() { +/// let a = array.clone(); +/// +/// let guard = std::thread::spawn(move || { +/// println!("element: {}", a[i]); +/// }); +/// +/// guards.push(guard); +/// } +/// +/// for guard in guards { +/// guard.join().unwrap(); +/// } +/// ``` +/// +/// But this introduces unnecessary allocation, as `Arc` puts its data on the heap, and we +/// also end up dealing with reference counts. We know that we're joining the threads before +/// our function returns, so just taking a reference _should_ be safe. Rust can't know that, +/// though. +/// +/// Enter scoped threads. Here's our original example, using `spawn` from crossbeam rather +/// than from `std::thread`: +/// +/// ``` +/// let array = [1, 2, 3]; +/// +/// crossbeam_utils::thread::scope(|scope| { +/// for i in &array { +/// scope.spawn(move || { +/// println!("element: {}", i); +/// }); +/// } +/// }); +/// ``` +/// +/// Much more straightforward. +// FIXME(jeehoonkang): maybe we should create a new crate for scoped threads. + +use std::cell::RefCell; +use std::fmt; +use std::marker::PhantomData; +use std::mem::{self, ManuallyDrop}; +use std::ops::DerefMut; +use std::rc::Rc; +use std::thread; +use std::io; + +#[doc(hidden)] +trait FnBox { + fn call_box(self: Box) -> T; +} + +impl T> FnBox for F { + fn call_box(self: Box) -> T { + (*self)() + } +} + +/// Like `std::thread::spawn`, but without the closure bounds. +pub unsafe fn spawn_unchecked<'a, F>(f: F) -> thread::JoinHandle<()> +where + F: FnOnce() + Send + 'a, +{ + let builder = thread::Builder::new(); + builder_spawn_unchecked(builder, f).unwrap() +} + +/// Like `std::thread::Builder::spawn`, but without the closure bounds. +pub unsafe fn builder_spawn_unchecked<'a, F>( + builder: thread::Builder, + f: F, +) -> io::Result> +where + F: FnOnce() + Send + 'a, +{ + let closure: Box + 'a> = Box::new(f); + let closure: Box + Send> = mem::transmute(closure); + builder.spawn(move || closure.call_box()) +} + +pub struct Scope<'a> { + /// The list of the deferred functions and thread join jobs. + dtors: RefCell>>, + // !Send + !Sync + _marker: PhantomData<*const ()>, +} + +struct DtorChain<'a, T> { + dtor: Box + 'a>, + next: Option>>, +} + +impl<'a, T> DtorChain<'a, T> { + pub fn pop(chain: &mut Option>) -> Option + 'a>> { + chain.take().map(|mut node| { + *chain = node.next.take().map(|b| *b); + node.dtor + }) + } +} + +struct JoinState { + join_handle: thread::JoinHandle<()>, + result: usize, + _marker: PhantomData, +} + +impl JoinState { + fn new(join_handle: thread::JoinHandle<()>, result: usize) -> JoinState { + JoinState { + join_handle: join_handle, + result: result, + _marker: PhantomData, + } + } + + fn join(self) -> thread::Result { + let result = self.result; + self.join_handle.join().map(|_| { + unsafe { ManuallyDrop::into_inner(*Box::from_raw(result as *mut ManuallyDrop))} + }) + } +} + +/// A handle to a scoped thread +pub struct ScopedJoinHandle<'a, T: 'a> { + // !Send + !Sync + inner: Rc>>>, + thread: thread::Thread, + _marker: PhantomData<&'a T>, +} + +/// Create a new `scope`, for deferred destructors. +/// +/// Scopes, in particular, support [*scoped thread spawning*](struct.Scope.html#method.spawn). +/// +/// # Examples +/// +/// Creating and using a scope: +/// +/// ``` +/// crossbeam_utils::thread::scope(|scope| { +/// scope.defer(|| println!("Exiting scope")); +/// scope.spawn(|| println!("Running child thread in scope")) +/// }); +/// // Prints messages in the reverse order written +/// ``` +/// +/// # Panics +/// +/// `thread::scope()` panics if a spawned thread panics but it is not joined inside the scope. +pub fn scope<'a, F, R>(f: F) -> R +where + F: FnOnce(&Scope<'a>) -> R, +{ + let mut scope = Scope { + dtors: RefCell::new(None), + _marker: PhantomData, + }; + let ret = f(&scope); + scope.drop_all(); + ret +} + +impl<'a> fmt::Debug for Scope<'a> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "Scope {{ ... }}") + } +} + +impl<'a, T> fmt::Debug for ScopedJoinHandle<'a, T> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "ScopedJoinHandle {{ ... }}") + } +} + +impl<'a> Scope<'a> { + // This method is carefully written in a transactional style, so + // that it can be called directly and, if any dtor panics, can be + // resumed in the unwinding this causes. By initially running the + // method outside of any destructor, we avoid any leakage problems + // due to @rust-lang/rust#14875. + fn drop_all(&mut self) { + while let Some(dtor) = DtorChain::pop(&mut self.dtors.borrow_mut()) { + dtor.call_box(); + } + } + + /// Schedule code to be executed when exiting the scope. + /// + /// This is akin to having a destructor on the stack, except that it is + /// *guaranteed* to be run. It is guaranteed that the function is called + /// after all the spawned threads are joined. + pub fn defer(&self, f: F) + where + F: FnOnce() + 'a, + { + let mut dtors = self.dtors.borrow_mut(); + *dtors = Some(DtorChain { + dtor: Box::new(f), + next: dtors.take().map(Box::new), + }); + } + + /// Create a scoped thread. + /// + /// `spawn` is similar to the [`spawn`][spawn] function in Rust's standard library. The + /// difference is that this thread is scoped, meaning that it's guaranteed to terminate + /// before the current stack frame goes away, allowing you to reference the parent stack frame + /// directly. This is ensured by having the parent thread join on the child thread before the + /// scope exits. + /// + /// [spawn]: http://doc.rust-lang.org/std/thread/fn.spawn.html + pub fn spawn<'s, F, T>(&'s self, f: F) -> ScopedJoinHandle<'a, T> + where + 'a: 's, + F: FnOnce() -> T + Send + 'a, + T: Send + 'a, + { + self.builder().spawn(f).unwrap() + } + + /// Generates the base configuration for spawning a scoped thread, from which configuration + /// methods can be chained. + pub fn builder<'s>(&'s self) -> ScopedThreadBuilder<'s, 'a> { + ScopedThreadBuilder { + scope: self, + builder: thread::Builder::new(), + } + } +} + +/// Scoped thread configuration. Provides detailed control over the properties and behavior of new +/// scoped threads. +pub struct ScopedThreadBuilder<'s, 'a: 's> { + scope: &'s Scope<'a>, + builder: thread::Builder, +} + +impl<'s, 'a: 's> ScopedThreadBuilder<'s, 'a> { + /// Names the thread-to-be. Currently the name is used for identification only in panic + /// messages. + pub fn name(mut self, name: String) -> ScopedThreadBuilder<'s, 'a> { + self.builder = self.builder.name(name); + self + } + + /// Sets the size of the stack for the new thread. + pub fn stack_size(mut self, size: usize) -> ScopedThreadBuilder<'s, 'a> { + self.builder = self.builder.stack_size(size); + self + } + + /// Spawns a new thread, and returns a join handle for it. + pub fn spawn(self, f: F) -> io::Result> + where + F: FnOnce() -> T + Send + 'a, + T: Send + 'a, + { + // The `Box` constructed below is written only by the spawned thread, + // and read by the current thread only after the spawned thread is + // joined (`JoinState::join()`). Thus there are no data races. + let result = Box::into_raw(Box::>::new(unsafe { mem::uninitialized() })) as usize; + + let join_handle = try!(unsafe { + builder_spawn_unchecked(self.builder, move || { + let mut result = Box::from_raw(result as *mut ManuallyDrop); + *result = ManuallyDrop::new(f()); + mem::forget(result); + }) + }); + let thread = join_handle.thread().clone(); + + let join_state = JoinState::::new(join_handle, result); + let deferred_handle = Rc::new(RefCell::new(Some(join_state))); + let my_handle = deferred_handle.clone(); + + self.scope.defer(move || { + let state = mem::replace(deferred_handle.borrow_mut().deref_mut(), None); + if let Some(state) = state { + state.join().unwrap(); + } + }); + + Ok(ScopedJoinHandle { + inner: my_handle, + thread: thread, + _marker: PhantomData, + }) + } +} + +impl<'a, T: Send + 'a> ScopedJoinHandle<'a, T> { + /// Join the scoped thread, returning the result it produced. + pub fn join(self) -> thread::Result { + let state = mem::replace(self.inner.borrow_mut().deref_mut(), None); + state.unwrap().join() + } + + /// Get the underlying thread handle. + pub fn thread(&self) -> &thread::Thread { + &self.thread + } +} + +impl<'a> Drop for Scope<'a> { + fn drop(&mut self) { + self.drop_all() + } +} diff --git a/crypto-hash/.cargo-checksum.json b/crypto-hash/.cargo-checksum.json new file mode 100644 index 000000000..720e886fc --- /dev/null +++ b/crypto-hash/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{},"package":"09de9ee0fc255ace04c7fa0763c9395a945c37c8292bb554f8d48361d1dcf1b4"} \ No newline at end of file diff --git a/crypto-hash/CONTRIBUTING.md b/crypto-hash/CONTRIBUTING.md new file mode 100644 index 000000000..1b33a9ec1 --- /dev/null +++ b/crypto-hash/CONTRIBUTING.md @@ -0,0 +1,77 @@ +# Contributing to `crypto-hash` + +`crypto-hash` is a part of the Rust ecosystem. As such, all contributions to this project follow the +[Rust language's code of conduct](https://www.rust-lang.org/conduct.html) where appropriate. + +This project is hosted at [GitHub](https://github.com/malept/crypto-hash). Both pull requests and +issues of many different kinds are accepted. + +## Filing Issues + +Issues include bugs, questions, feedback, and feature requests. Before you file a new issue, please +make sure that your issue has not already been filed by someone else. + +### Filing Bugs + +When filing a bug, please include the following information: + +* Operating system and version. If on Linux, please also include the distribution name. +* System architecture. Examples include: x86-64, x86, and ARMv7. +* Rust version that compiled `crypto-hash`. +* The version (and/or git revision) of `crypto-hash`. +* A detailed list of steps to reproduce the bug. A minimal testcase would be very helpful, + if possible. +* If there any any error messages in the console, copying them in the bug summary will be + very helpful. + +## Adding a new implementation + +If you are requesting or adding a new library source for hash algorithms, please make sure that it +supports all of the existing algorithms. For example, while the creator of this project supports the +efforts of the team writing LibreSSL, it does not support the MD5 algorithm. + +## Adding a new hash algorithm + +If you are requesting or adding a wrapper for a new hash algorithm, please make sure that it is +available in all of the supported implementations listed in the README. + +## Filing Pull Requests + +Here are some things to keep in mind as you file a pull request to fix a bug, add a new feature, +etc.: + +* Travis CI (for Linux and OS X) and AppVeyor (for Windows) are used to make sure that the project + builds as expected on the supported platforms, using the current stable and beta versions of Rust. + Make sure the testsuite passes locally by running `cargo test`. +* Unless it's impractical, please write tests for your changes. This will help spot regressions + much easier. +* If your PR changes the behavior of an existing feature, or adds a new feature, please add/edit + the `rustdoc` inline documentation. +* Please ensure that your changes follow the [rustfmt](https://github.com/rust-lang-nursery/rustfmt) + coding standard, and do not produce any warnings when running the + [clippy](https://github.com/Manishearth/rust-clippy) linter. +* If you are contributing a nontrivial change, please add an entry to `NEWS.md`. The format is + similar to the one described at [Keep a Changelog](http://keepachangelog.com/). +* Please make sure your commits are rebased onto the latest commit in the master branch, and that + you limit/squash the number of commits created to a "feature"-level. For instance: + +bad: + +``` +commit 1: add foo algorithm +commit 2: run rustfmt +commit 3: add test +commit 4: add docs +commit 5: add bar +commit 6: add test + docs +``` + +good: + +``` +commit 1: add foo algorithm +commit 2: add bar +``` + +If you are continuing the work of another person's PR and need to rebase/squash, please retain the +attribution of the original author(s) and continue the work in subsequent commits. diff --git a/crypto-hash/Cargo.toml b/crypto-hash/Cargo.toml new file mode 100644 index 000000000..297c4f07e --- /dev/null +++ b/crypto-hash/Cargo.toml @@ -0,0 +1,37 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g. crates.io) dependencies +# +# If you believe there's an error in this file please file an +# issue against the rust-lang/cargo repository. If you're +# editing this file be aware that the upstream Cargo.toml +# will likely look very different (and much more reasonable) + +[package] +name = "crypto-hash" +version = "0.3.1" +authors = ["Mark Lee"] +exclude = [".*.yml", "ci/*"] +description = "A wrapper for OS-level cryptographic hash functions" +documentation = "https://docs.rs/crypto-hash" +readme = "README.md" +keywords = ["crypto", "hash", "digest"] +license = "MIT" +repository = "https://github.com/malept/crypto-hash" +[dependencies.hex] +version = "0.3" +[target."cfg(not(any(target_os = \"windows\", target_os = \"macos\")))".dependencies.openssl] +version = "0.10" +[target."cfg(target_os = \"macos\")".dependencies.commoncrypto] +version = "0.2" +[target."cfg(target_os = \"windows\")".dependencies.winapi] +version = "0.3" +features = ["minwindef", "wincrypt"] +[badges.appveyor] +repository = "malept/crypto-hash" + +[badges.travis-ci] +repository = "malept/crypto-hash" diff --git a/crypto-hash/LICENSE b/crypto-hash/LICENSE new file mode 100644 index 000000000..e3eb5500c --- /dev/null +++ b/crypto-hash/LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2015, 2016 Mark Lee + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/crypto-hash/Makefile b/crypto-hash/Makefile new file mode 100644 index 000000000..bc2510259 --- /dev/null +++ b/crypto-hash/Makefile @@ -0,0 +1,29 @@ +CARGO ?= cargo +CARGO_BUILD_TEST = $(CARGO) test --no-run +KCOV ?= kcov +TEST_APP = debug/crypto_hash-*.exe +WIN_TARGET = x86_64-pc-windows-gnu + +build-test: + $(CARGO_BUILD_TEST) + +check-i686: + PKG_CONFIG_LIBDIR=/usr/lib/i386-linux-gnu/pkgconfig \ + PKG_CONFIG_ALLOW_CROSS=1 \ + $(CARGO) test --target i686-unknown-linux-gnu --verbose + +check-wine64: + $(CARGO_BUILD_TEST) --target $(WIN_TARGET) + WINEPREFIX=$(HOME)/.local/share/wineprefixes/wine64 wine64 target/$(WIN_TARGET)/$(TEST_APP) + +cov: build-test + $(KCOV) --exclude-pattern=/.multirust,test.rs target/cov target/$(TEST_APP) + +debug: build-test + rust-gdb target/$(TEST_APP) + +fmt: + $(CARGO) fmt + +lint: + $(CARGO) +nightly clippy -- --allow clippy-pedantic diff --git a/crypto-hash/NEWS.md b/crypto-hash/NEWS.md new file mode 100644 index 000000000..56be0d3bd --- /dev/null +++ b/crypto-hash/NEWS.md @@ -0,0 +1,62 @@ +# Changes by Version + +## Unreleased + +## [0.3.1] - 2018-02-14 + +[0.3.1]: https://github.com/malept/crypto-hash/compare/v0.3.0...v0.3.1 + +### Changed + +* Upgrade to `openssl` 0.10.x (#1) +* Upgrade to `winapi` 0.3.x + +## [0.3.0] - 2017-06-18 + +[0.3.0]: https://github.com/malept/crypto-hash/compare/v0.2.1...v0.3.0 + +### Changed + +* Upgrade to `commoncrypto` 0.2.x +* Function signatures for `digest` and `hex_digest` changed to use `&[u8]`, per Clippy + +## [0.2.1] - 2016-12-12 + +[0.2.1]: https://github.com/malept/crypto-hash/compare/v0.2.0...v0.2.1 + +### Changed + +* Move CommonCrypto implementation to its own crate + +## [0.2.0] - 2016-11-06 + +[0.2.0]: https://github.com/malept/crypto-hash/compare/v0.1.0...v0.2.0 + +### Added + +* SHA-1 algorithm + +### Changed + +* Upgrade rust-openssl to 0.9 + +## [0.1.0] - 2016-06-26 + +[0.1.0]: https://github.com/malept/crypto-hash/releases/tag/v0.1.0 + +This release signifies the minimum amount of algorithms and implementations necessary for +[HTTP digest authentication](https://tools.ietf.org/html/rfc7616). + +### Added + +Algorithms: + +* MD5 +* SHA256 +* SHA512 + +Implementations: + +* CommonCrypto (OS X) +* CryptoAPI (Windows) +* OpenSSL (Linux/BSD/etc.) diff --git a/crypto-hash/README.md b/crypto-hash/README.md new file mode 100644 index 000000000..de70bb5be --- /dev/null +++ b/crypto-hash/README.md @@ -0,0 +1,54 @@ +# `crypto-hash` + +[![Linux/OS X Status](https://travis-ci.org/malept/crypto-hash.svg?branch=master)](https://travis-ci.org/malept/crypto-hash) +[![Windows status](https://ci.appveyor.com/api/projects/status/xwc9nb4633b5n67r/branch/master?svg=true)](https://ci.appveyor.com/project/malept/crypto-hash) +[![Crates.io](https://img.shields.io/crates/v/crypto-hash.svg?maxAge=2592000)](https://crates.io/crates/crypto-hash) + +`crypto-hash` is a Rust wrapper around OS-level implementations of cryptographic hash functions. + +The purpose of this crate is to provide access to hash algorithms with as few dependencies as +possible. This means that when possible, the library uses the hashing functions that are provided by +the given operating system's bundled cryptographic libraries. + +## Supported Implementations + +By operating system: + +* Windows: CryptoAPI +* OS X: [CommonCrypto](https://crates.io/crates/commoncrypto) +* Linux/BSD/etc.: [OpenSSL](https://crates.io/crates/openssl) + +## Supported Algorithms + +* MD5 +* SHA1 +* SHA256 +* SHA512 + +## Usage + +Add `crypto-hash` to your project's `Cargo.toml`. For more details, consult the +[Cargo guide](http://doc.crates.io/guide.html#adding-dependencies). + +Example: + +```rust +use crypto_hash::{Algorithm, hex_digest}; + +let digest = hex_digest(Algorithm::SHA256, b"crypto-hash"); +``` + +For more examples, consult the [documentation](https://malept.github.io/crypto-hash/). + +## [Release Notes](https://github.com/malept/crypto-hash/blob/master/NEWS.md) + +## [Contributing](https://github.com/malept/crypto-hash/blob/master/CONTRIBUTING.md) + +## Acknowledgements + +This crate was inspired by [rust-native-tls](https://github.com/sfackler/rust-native-tls) and +[crypto-bench](https://github.com/briansmith/crypto-bench). + +## Legal + +`crypto-hash` is copyrighted under the terms of the MIT license. See LICENSE for details. diff --git a/crypto-hash/src/imp/commoncrypto.rs b/crypto-hash/src/imp/commoncrypto.rs new file mode 100644 index 000000000..310dc8018 --- /dev/null +++ b/crypto-hash/src/imp/commoncrypto.rs @@ -0,0 +1,81 @@ +// Copyright (c) 2016 Mark Lee +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. + +//! A cryptographic hash generator dependent upon OSX's `CommonCrypto`. + +use commoncrypto::hash; +use std::io; +use super::Algorithm; + +/// Generator of digests using a cryptographic hash function. +/// +/// # Examples +/// +/// ```rust +/// use crypto_hash::{Algorithm, Hasher}; +/// use std::io::Write; +/// +/// let mut hasher = Hasher::new(Algorithm::SHA256); +/// hasher.write_all(b"crypto"); +/// hasher.write_all(b"-"); +/// hasher.write_all(b"hash"); +/// let result = hasher.finish(); +/// let expected = +/// b"\xfd\x1a\xfb`\"\xcdMG\xc8\x90\x96\x1cS9(\xea\xcf\xe8!\x9f\x1b%$\xf7\xfb*a\x84}\xdf\x8c'" +/// .to_vec(); +/// assert_eq!(expected, result) +/// ``` +#[derive(Debug)] +pub struct Hasher(hash::Hasher); + +impl Hasher { + /// Create a new `Hasher` for the given `Algorithm`. + pub fn new(algorithm: Algorithm) -> Hasher { + let cc_algorithm = match algorithm { + Algorithm::MD5 => hash::CCDigestAlgorithm::kCCDigestMD5, + Algorithm::SHA1 => hash::CCDigestAlgorithm::kCCDigestSHA1, + Algorithm::SHA256 => hash::CCDigestAlgorithm::kCCDigestSHA256, + Algorithm::SHA512 => hash::CCDigestAlgorithm::kCCDigestSHA512, + }; + + Hasher(hash::Hasher::new(cc_algorithm)) + } + + /// Generate a digest from the data written to the `Hasher`. + pub fn finish(&mut self) -> Vec { + let Hasher(ref mut hasher) = *self; + match hasher.finish() { + Ok(digest) => digest, + Err(error) => panic!("CommonCrypto error: {}", error), + } + } +} + +impl io::Write for Hasher { + fn write(&mut self, buf: &[u8]) -> io::Result { + let Hasher(ref mut hasher) = *self; + hasher.write(buf) + } + + fn flush(&mut self) -> io::Result<()> { + let Hasher(ref mut hasher) = *self; + hasher.flush() + } +} diff --git a/crypto-hash/src/imp/cryptoapi.rs b/crypto-hash/src/imp/cryptoapi.rs new file mode 100644 index 000000000..47d95095a --- /dev/null +++ b/crypto-hash/src/imp/cryptoapi.rs @@ -0,0 +1,159 @@ +// Copyright (c) 2016 Mark Lee +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. + +//! A cryptographic hash generator dependent upon Windows's `CryptoAPI`. +//! +//! Originally based on: +//! https://github.com/rust-lang/cargo/blob/0.10.0/src/cargo/util/sha256.rs +//! which is copyright (c) 2014 The Rust Project Developers under the MIT license. + +use std::io; +use std::ptr; +use super::Algorithm; +use winapi::shared::minwindef::DWORD; +use winapi::um::wincrypt::{CALG_MD5, CALG_SHA1, CALG_SHA_256, CALG_SHA_512, CryptAcquireContextW, + CryptCreateHash, CryptDestroyHash, CryptGetHashParam, CryptHashData, + CryptReleaseContext, CRYPT_SILENT, CRYPT_VERIFYCONTEXT, HCRYPTHASH, + HCRYPTPROV, HP_HASHVAL, PROV_RSA_AES}; + +macro_rules! call { + ($e: expr) => ({ + if $e == 0 { + panic!("failed {}: {}", stringify!($e), io::Error::last_os_error()) + } + }) +} + +macro_rules! finish_algorithm { + ($func_name: ident, $size: ident) => { + fn $func_name(&mut self) -> Vec { + let mut len = $size as u32; + let mut hash = [0u8; $size]; + call!(unsafe { + CryptGetHashParam(self.hcrypthash, HP_HASHVAL, hash.as_mut_ptr(), &mut len, 0) + }); + assert_eq!(len as usize, hash.len()); + hash.to_vec() + } + } +} + +const MD5_LENGTH: usize = 16; +const SHA1_LENGTH: usize = 20; +const SHA256_LENGTH: usize = 32; +const SHA512_LENGTH: usize = 64; + +/// Generator of digests using a cryptographic hash function. +/// +/// # Examples +/// +/// ```rust +/// use crypto_hash::{Algorithm, Hasher}; +/// use std::io::Write; +/// +/// let mut hasher = Hasher::new(Algorithm::SHA256); +/// hasher.write_all(b"crypto"); +/// hasher.write_all(b"-"); +/// hasher.write_all(b"hash"); +/// let result = hasher.finish(); +/// let expected = +/// b"\xfd\x1a\xfb`\"\xcdMG\xc8\x90\x96\x1cS9(\xea\xcf\xe8!\x9f\x1b%$\xf7\xfb*a\x84}\xdf\x8c'" +/// .to_vec(); +/// assert_eq!(expected, result) +/// ``` +pub struct Hasher { + algorithm: Algorithm, + hcryptprov: HCRYPTPROV, + hcrypthash: HCRYPTHASH, +} + +impl Hasher { + /// Create a new `Hasher` for the given `Algorithm`. + pub fn new(algorithm: Algorithm) -> Hasher { + let mut hcp = 0; + call!(unsafe { + CryptAcquireContextW( + &mut hcp, + ptr::null(), + ptr::null(), + PROV_RSA_AES, + CRYPT_VERIFYCONTEXT | CRYPT_SILENT, + ) + }); + + let hash_type = match algorithm { + Algorithm::MD5 => CALG_MD5, + Algorithm::SHA1 => CALG_SHA1, + Algorithm::SHA256 => CALG_SHA_256, + Algorithm::SHA512 => CALG_SHA_512, + }; + + let mut ret = Hasher { + algorithm: algorithm, + hcryptprov: hcp, + hcrypthash: 0, + }; + + call!(unsafe { CryptCreateHash(ret.hcryptprov, hash_type, 0, 0, &mut ret.hcrypthash) }); + ret + } + + /// Generate a digest from the data written to the `Hasher`. + pub fn finish(&mut self) -> Vec { + match self.algorithm { + Algorithm::MD5 => self.finish_md5(), + Algorithm::SHA1 => self.finish_sha1(), + Algorithm::SHA256 => self.finish_sha256(), + Algorithm::SHA512 => self.finish_sha512(), + } + } + + finish_algorithm!(finish_md5, MD5_LENGTH); + finish_algorithm!(finish_sha1, SHA1_LENGTH); + finish_algorithm!(finish_sha256, SHA256_LENGTH); + finish_algorithm!(finish_sha512, SHA512_LENGTH); +} + +impl io::Write for Hasher { + fn write(&mut self, buf: &[u8]) -> io::Result { + call!(unsafe { + CryptHashData( + self.hcrypthash, + buf.as_ptr() as *mut _, + buf.len() as DWORD, + 0, + ) + }); + Ok(buf.len()) + } + + fn flush(&mut self) -> io::Result<()> { + Ok(()) + } +} + +impl Drop for Hasher { + fn drop(&mut self) { + if self.hcrypthash != 0 { + call!(unsafe { CryptDestroyHash(self.hcrypthash) }); + } + call!(unsafe { CryptReleaseContext(self.hcryptprov, 0) }); + } +} diff --git a/crypto-hash/src/imp/openssl.rs b/crypto-hash/src/imp/openssl.rs new file mode 100644 index 000000000..f786f628b --- /dev/null +++ b/crypto-hash/src/imp/openssl.rs @@ -0,0 +1,85 @@ +// Copyright (c) 2015, 2016 Mark Lee +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. + +//! A cryptographic hash digest generator dependent upon `OpenSSL`. + +#![warn(missing_docs)] + +use openssl::hash; +use std::io; +use super::Algorithm; + +/// Generator of digests using a cryptographic hash function. +/// +/// # Examples +/// +/// ```rust +/// use crypto_hash::{Algorithm, Hasher}; +/// use std::io::Write; +/// +/// let mut hasher = Hasher::new(Algorithm::SHA256); +/// hasher.write_all(b"crypto"); +/// hasher.write_all(b"-"); +/// hasher.write_all(b"hash"); +/// let result = hasher.finish(); +/// let expected = +/// b"\xfd\x1a\xfb`\"\xcdMG\xc8\x90\x96\x1cS9(\xea\xcf\xe8!\x9f\x1b%$\xf7\xfb*a\x84}\xdf\x8c'" +/// .to_vec(); +/// assert_eq!(expected, result) +/// ``` +pub struct Hasher(hash::Hasher); + +impl Hasher { + /// Create a new `Hasher` for the given `Algorithm`. + pub fn new(algorithm: Algorithm) -> Hasher { + let hash_type = match algorithm { + Algorithm::MD5 => hash::MessageDigest::md5(), + Algorithm::SHA1 => hash::MessageDigest::sha1(), + Algorithm::SHA256 => hash::MessageDigest::sha256(), + Algorithm::SHA512 => hash::MessageDigest::sha512(), + }; + + match hash::Hasher::new(hash_type) { + Ok(hasher) => Hasher(hasher), + Err(error_stack) => panic!("OpenSSL error(s): {}", error_stack), + } + } + + /// Generate a digest from the data written to the `Hasher`. + pub fn finish(&mut self) -> Vec { + let Hasher(ref mut hasher) = *self; + match hasher.finish() { + Ok(digest) => digest.to_vec(), + Err(error_stack) => panic!("OpenSSL error(s): {}", error_stack), + } + } +} + +impl io::Write for Hasher { + fn write(&mut self, buf: &[u8]) -> io::Result { + let Hasher(ref mut hasher) = *self; + hasher.write(buf) + } + + fn flush(&mut self) -> io::Result<()> { + let Hasher(ref mut hasher) = *self; + hasher.flush() + } +} diff --git a/crypto-hash/src/lib.rs b/crypto-hash/src/lib.rs new file mode 100644 index 000000000..e01f7e786 --- /dev/null +++ b/crypto-hash/src/lib.rs @@ -0,0 +1,119 @@ +// Copyright (c) 2015, 2016, 2017 Mark Lee +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. + +//! A set of [cryptographic hash +//! functions](https://en.wikipedia.org/wiki/Cryptographic_hash_function) provided by the operating +//! system, when available. +//! +//! The purpose of this crate is to provide access to hash algorithms with as few dependencies as +//! possible. This means that when possible, the library uses the hashing functions that are +//! provided by the given operating system's bundled cryptographic libraries. +//! +//! # Supported Implementations +//! +//! By operating system: +//! +//! * Windows: `CryptoAPI` +//! * Mac OS X: `CommonCrypto` +//! * Linux/BSD/etc.: `OpenSSL` +//! +//! # Supported Algorithms +//! +//! * MD5 +//! * SHA1 +//! * SHA256 +//! * SHA512 + +#![warn(missing_docs)] + +#[cfg(target_os = "macos")] +extern crate commoncrypto; +extern crate hex; +#[cfg(not(any(target_os = "macos", target_os = "windows")))] +extern crate openssl; +#[cfg(target_os = "windows")] +extern crate winapi; + +use std::io::Write; + +#[cfg(target_os = "macos")] +#[path = "imp/commoncrypto.rs"] +mod imp; +#[cfg(target_os = "windows")] +#[path = "imp/cryptoapi.rs"] +mod imp; +#[cfg(not(any(target_os = "macos", target_os = "windows")))] +#[path = "imp/openssl.rs"] +mod imp; + +mod test; + +pub use imp::Hasher; + +/// Available cryptographic hash functions. +#[derive(Clone, Debug)] +pub enum Algorithm { + /// Popular message digest algorithm, only available for backwards compatibility purposes. + MD5, + /// SHA-1 algorithm from NIST FIPS, only available for backwards compatibility purposes. + SHA1, + /// SHA-2 family algorithm (256 bits). + SHA256, + /// SHA-2 family algorithm (512 bits). + SHA512, +} + +/// Helper function for `Hasher` which generates a cryptographic digest from the given +/// data and algorithm. +/// +/// # Examples +/// +/// ```rust +/// use crypto_hash::{Algorithm, digest}; +/// +/// let data = b"crypto-hash"; +/// let result = digest(Algorithm::SHA256, data); +/// let expected = +/// b"\xfd\x1a\xfb`\"\xcdMG\xc8\x90\x96\x1cS9(\xea\xcf\xe8!\x9f\x1b%$\xf7\xfb*a\x84}\xdf\x8c'" +/// .to_vec(); +/// assert_eq!(expected, result) +/// ``` +pub fn digest(algorithm: Algorithm, data: &[u8]) -> Vec { + let mut hasher = imp::Hasher::new(algorithm); + hasher.write_all(data).expect("Could not write hash data"); + hasher.finish() +} + +/// Helper function for `Hasher` which generates a cryptographic digest serialized in +/// hexadecimal from the given data and algorithm. +/// +/// # Examples +/// +/// ```rust +/// use crypto_hash::{Algorithm, hex_digest}; +/// +/// let data = b"crypto-hash"; +/// let result = hex_digest(Algorithm::SHA256, data); +/// let expected = "fd1afb6022cd4d47c890961c533928eacfe8219f1b2524f7fb2a61847ddf8c27"; +/// assert_eq!(expected, result) +/// ``` +pub fn hex_digest(algorithm: Algorithm, data: &[u8]) -> String { + hex::encode(digest(algorithm, data)) +} diff --git a/crypto-hash/src/test.rs b/crypto-hash/src/test.rs new file mode 100644 index 000000000..c150770df --- /dev/null +++ b/crypto-hash/src/test.rs @@ -0,0 +1,83 @@ +// Copyright (c) 2016, 2017 Mark Lee +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. + +#![cfg(test)] + +use hex; +use std::io::Write; +use super::{hex_digest, Algorithm, Hasher}; + +// From Wikipedia +const MD5_EMPTY_STRING: &'static str = "d41d8cd98f00b204e9800998ecf8427e"; +const SHA1_EMPTY_STRING: &'static str = "da39a3ee5e6b4b0d3255bfef95601890afd80709"; +const SHA256_EMPTY_STRING: &'static str = concat!( + "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649", + "b934ca495991b7852b855" +); +const SHA512_EMPTY_STRING: &'static str = concat!( + "cf83e1357eefb8bdf1542850d66d8007d620e4050b5", + "715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318", + "d2877eec2f63b931bd47417a81a538327af927da3e" +); +const TO_HASH: &'static str = "The quick brown fox jumps over the lazy dog"; +const TO_HASH_MD5: &'static str = "9e107d9d372bb6826bd81d3542a419d6"; + +#[test] +fn md5_empty_string() { + assert_hex_hashed_empty_string(Algorithm::MD5, MD5_EMPTY_STRING) +} + +#[test] +fn sha1_empty_string() { + assert_hex_hashed_empty_string(Algorithm::SHA1, SHA1_EMPTY_STRING) +} + +#[test] +fn sha256_empty_string() { + // From Wikipedia + assert_hex_hashed_empty_string(Algorithm::SHA256, SHA256_EMPTY_STRING) +} + +#[test] +fn sha512_empty_string() { + assert_hex_hashed_empty_string(Algorithm::SHA512, SHA512_EMPTY_STRING) +} + +#[test] +fn hasher_sans_write() { + let mut hasher = Hasher::new(Algorithm::MD5); + let actual = hex::encode(hasher.finish()); + assert_eq!(MD5_EMPTY_STRING, actual) +} + +#[test] +fn hasher_with_write() { + let mut hasher = Hasher::new(Algorithm::MD5); + hasher + .write_all(TO_HASH.as_bytes()) + .expect("Could not write to hasher"); + let actual = hex::encode(hasher.finish()); + assert_eq!(TO_HASH_MD5, actual) +} + +fn assert_hex_hashed_empty_string(algorithm: Algorithm, expected: &str) { + let vec = vec![]; + assert_eq!(expected, hex_digest(algorithm, vec.as_slice()).as_str()) +} diff --git a/curl-sys/.cargo-checksum.json b/curl-sys/.cargo-checksum.json new file mode 100644 index 000000000..f758557b6 --- /dev/null +++ b/curl-sys/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{},"package":"08459503c415173da1ce6b41036a37b8bfdd86af46d45abb9964d4c61fe670ef"} \ No newline at end of file diff --git a/curl-sys/Cargo.toml b/curl-sys/Cargo.toml new file mode 100644 index 000000000..59a27da4e --- /dev/null +++ b/curl-sys/Cargo.toml @@ -0,0 +1,61 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g. crates.io) dependencies +# +# If you believe there's an error in this file please file an +# issue against the rust-lang/cargo repository. If you're +# editing this file be aware that the upstream Cargo.toml +# will likely look very different (and much more reasonable) + +[package] +name = "curl-sys" +version = "0.4.13" +authors = ["Carl Lerche ", "Alex Crichton "] +build = "build.rs" +links = "curl" +description = "Native bindings to the libcurl library" +documentation = "https://docs.rs/curl-sys" +categories = ["external-ffi-bindings"] +license = "MIT" +repository = "https://github.com/alexcrichton/curl-rust" + +[lib] +name = "curl_sys" +path = "lib.rs" +[dependencies.libc] +version = "0.2.2" + +[dependencies.libnghttp2-sys] +version = "0.1" +optional = true + +[dependencies.libz-sys] +version = "1.0.18" +[build-dependencies.cc] +version = "1.0" + +[build-dependencies.pkg-config] +version = "0.3.3" + +[features] +default = ["ssl"] +http2 = ["libnghttp2-sys"] +ssl = ["openssl-sys"] +static-curl = [] +static-ssl = ["openssl-sys/vendored"] +[target."cfg(all(unix, not(target_os = \"macos\")))".dependencies.openssl-sys] +version = "0.9" +optional = true +[target."cfg(target_env = \"msvc\")".build-dependencies.vcpkg] +version = "0.2" +[target."cfg(windows)".dependencies.winapi] +version = "0.3" +features = ["winsock2", "ws2def"] +[badges.appveyor] +repository = "alexcrichton/curl-rust" + +[badges.travis-ci] +repository = "alexcrichton/curl-rust" diff --git a/curl-sys/LICENSE b/curl-sys/LICENSE new file mode 100644 index 000000000..5f5e4b09d --- /dev/null +++ b/curl-sys/LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2014 Carl Lerche + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/curl-sys/build.rs b/curl-sys/build.rs new file mode 100644 index 000000000..8836d0a8d --- /dev/null +++ b/curl-sys/build.rs @@ -0,0 +1,373 @@ +extern crate pkg_config; +#[cfg(target_env = "msvc")] +extern crate vcpkg; +extern crate cc; + +use std::env; +use std::fs; +use std::path::{PathBuf, Path}; +use std::process::Command; + +fn main() { + let target = env::var("TARGET").unwrap(); + let windows = target.contains("windows"); + + // If the static-curl feature is disabled, probe for a system-wide libcurl. + if !cfg!(feature = "static-curl") { + // OSX and Haiku ships libcurl by default, so we just use that version + // unconditionally. + if target.contains("apple") || target.contains("haiku") { + return println!("cargo:rustc-flags=-l curl"); + } + + // Next, fall back and try to use pkg-config if its available. + if windows { + if try_vcpkg() { + return + } + } else { + if try_pkg_config() { + return + } + } + } + + if !Path::new("curl/.git").exists() { + let _ = Command::new("git").args(&["submodule", "update", "--init"]) + .status(); + } + + let dst = PathBuf::from(env::var_os("OUT_DIR").unwrap()); + let include = dst.join("include"); + let build = dst.join("build"); + println!("cargo:root={}", dst.display()); + println!("cargo:include={}", include.display()); + println!("cargo:static=1"); + fs::create_dir_all(include.join("curl")).unwrap(); + fs::copy("curl/include/curl/curl.h", include.join("curl/curl.h")).unwrap(); + fs::copy("curl/include/curl/curlver.h", include.join("curl/curlver.h")).unwrap(); + fs::copy("curl/include/curl/easy.h", include.join("curl/easy.h")).unwrap(); + fs::copy("curl/include/curl/mprintf.h", include.join("curl/mprintf.h")).unwrap(); + fs::copy("curl/include/curl/multi.h", include.join("curl/multi.h")).unwrap(); + fs::copy("curl/include/curl/stdcheaders.h", include.join("curl/stdcheaders.h")).unwrap(); + fs::copy("curl/include/curl/system.h", include.join("curl/system.h")).unwrap(); + fs::copy("curl/include/curl/urlapi.h", include.join("curl/urlapi.h")).unwrap(); + fs::copy("curl/include/curl/typecheck-gcc.h", include.join("curl/typecheck-gcc.h")).unwrap(); + + let pkgconfig = dst.join("lib/pkgconfig"); + fs::create_dir_all(&pkgconfig).unwrap(); + let contents = fs::read_to_string("curl/libcurl.pc.in").unwrap(); + fs::write( + pkgconfig.join("libcurl.pc"), + contents + .replace("@prefix@", dst.to_str().unwrap()) + .replace("@exec_prefix@", "") + .replace("@libdir@", dst.join("lib").to_str().unwrap()) + .replace("@includedir@", include.to_str().unwrap()) + .replace("@CPPFLAG_CURL_STATICLIB@", "-DCURL_STATICLIB") + .replace("@LIBCURL_LIBS@", "") + .replace("@SUPPORT_FEATURES@", "") + .replace("@SUPPORT_PROTOCOLS@", "") + .replace("@CURLVERSION@", "7.61.1"), + ).unwrap(); + + let mut cfg = cc::Build::new(); + cfg.out_dir(&build) + .include("curl/lib") + .include("curl/include") + .define("BUILDING_LIBCURL", None) + .define("CURL_DISABLE_CRYPTO_AUTH", None) + .define("CURL_DISABLE_DICT", None) + .define("CURL_DISABLE_FTP", None) + .define("CURL_DISABLE_GOPHER", None) + .define("CURL_DISABLE_IMAP", None) + .define("CURL_DISABLE_LDAP", None) + .define("CURL_DISABLE_LDAPS", None) + .define("CURL_DISABLE_NTLM", None) + .define("CURL_DISABLE_POP3", None) + .define("CURL_DISABLE_RTSP", None) + .define("CURL_DISABLE_SMB", None) + .define("CURL_DISABLE_SMTP", None) + .define("CURL_DISABLE_TELNET", None) + .define("CURL_DISABLE_TFTP", None) + .define("CURL_STATICLIB", None) + .define("ENABLE_IPV6", None) + .define("HAVE_ASSERT_H", None) + .define("OS", "\"unknown\"") // TODO + .define("HAVE_ZLIB_H", None) + .define("HAVE_LIBZ", None) + + .file("curl/lib/asyn-thread.c") + .file("curl/lib/base64.c") + .file("curl/lib/conncache.c") + .file("curl/lib/connect.c") + .file("curl/lib/content_encoding.c") + .file("curl/lib/cookie.c") + .file("curl/lib/curl_addrinfo.c") + .file("curl/lib/curl_ctype.c") + .file("curl/lib/curl_memrchr.c") + .file("curl/lib/curl_range.c") + .file("curl/lib/curl_threads.c") + .file("curl/lib/dotdot.c") + .file("curl/lib/doh.c") + .file("curl/lib/easy.c") + .file("curl/lib/escape.c") + .file("curl/lib/file.c") + .file("curl/lib/fileinfo.c") + .file("curl/lib/formdata.c") + .file("curl/lib/getenv.c") + .file("curl/lib/getinfo.c") + .file("curl/lib/hash.c") + .file("curl/lib/hostasyn.c") + .file("curl/lib/hostcheck.c") + .file("curl/lib/hostip.c") + .file("curl/lib/hostip6.c") + .file("curl/lib/http.c") + .file("curl/lib/http2.c") + .file("curl/lib/http_chunks.c") + .file("curl/lib/http_proxy.c") + .file("curl/lib/if2ip.c") + .file("curl/lib/inet_ntop.c") + .file("curl/lib/inet_pton.c") + .file("curl/lib/llist.c") + .file("curl/lib/mime.c") + .file("curl/lib/mprintf.c") + .file("curl/lib/multi.c") + .file("curl/lib/netrc.c") + .file("curl/lib/nonblock.c") + .file("curl/lib/parsedate.c") + .file("curl/lib/pipeline.c") + .file("curl/lib/progress.c") + .file("curl/lib/rand.c") + .file("curl/lib/select.c") + .file("curl/lib/sendf.c") + .file("curl/lib/setopt.c") + .file("curl/lib/share.c") + .file("curl/lib/slist.c") + .file("curl/lib/socks.c") + .file("curl/lib/speedcheck.c") + .file("curl/lib/splay.c") + .file("curl/lib/strcase.c") + .file("curl/lib/strdup.c") + .file("curl/lib/strerror.c") + .file("curl/lib/strtok.c") + .file("curl/lib/strtoofft.c") + .file("curl/lib/timeval.c") + .file("curl/lib/transfer.c") + .file("curl/lib/url.c") + .file("curl/lib/urlapi.c") + .file("curl/lib/version.c") + .file("curl/lib/vtls/vtls.c") + .file("curl/lib/warnless.c") + .file("curl/lib/wildcard.c") + + .define("HAVE_GETADDRINFO", None) + + .warnings(false); + + if cfg!(feature = "http2") { + cfg.define("USE_NGHTTP2", None) + .define("NGHTTP2_STATICLIB", None); + + if let Some(path) = env::var_os("DEP_NGHTTP2_ROOT") { + let path = PathBuf::from(path); + cfg.include(path.join("include")); + } + } + + if let Some(path) = env::var_os("DEP_Z_INCLUDE") { + cfg.include(path); + } + + if windows { + cfg.define("USE_THREADS_WIN32", None) + .define("HAVE_IOCTLSOCKET_FIONBIO", None) + .define("USE_WINSOCK", None) + .file("curl/lib/system_win32.c"); + + if cfg!(feature = "ssl") { + cfg.define("USE_WINDOWS_SSPI", None) + .define("USE_SCHANNEL", None) + .file("curl/lib/x509asn1.c") + .file("curl/lib/curl_sspi.c") + .file("curl/lib/socks_sspi.c") + .file("curl/lib/vtls/schannel.c") + .file("curl/lib/vtls/schannel_verify.c"); + } + } else { + cfg.define("RECV_TYPE_ARG1", "int") + .define("HAVE_PTHREAD_H", None) + .define("HAVE_ARPA_INET_H", None) + .define("HAVE_ERRNO_H", None) + .define("HAVE_FCNTL_H", None) + .define("HAVE_NETDB_H", None) + .define("HAVE_NETINET_IN_H", None) + .define("HAVE_POLL_FINE", None) + .define("HAVE_POLL_H", None) + .define("HAVE_FCNTL_O_NONBLOCK", None) + .define("HAVE_SYS_SELECT_H", None) + .define("HAVE_SYS_STAT_H", None) + .define("HAVE_UNISTD_H", None) + .define("HAVE_RECV", None) + .define("HAVE_SELECT", None) + .define("HAVE_SEND", None) + .define("HAVE_SOCKET", None) + .define("HAVE_STERRROR_R", None) + .define("HAVE_STRUCT_TIMEVAL", None) + .define("USE_THREADS_POSIX", None) + .define("RECV_TYPE_ARG2", "void*") + .define("RECV_TYPE_ARG3", "size_t") + .define("RECV_TYPE_ARG4", "int") + .define("RECV_TYPE_RETV", "ssize_t") + .define("SEND_QUAL_ARG2", "const") + .define("SEND_TYPE_ARG1", "int") + .define("SEND_TYPE_ARG2", "void*") + .define("SEND_TYPE_ARG3", "size_t") + .define("SEND_TYPE_ARG4", "int") + .define("SEND_TYPE_RETV", "ssize_t") + .define("SIZEOF_CURL_OFF_T", "8") + .define("SIZEOF_INT", "4") + .define("SIZEOF_SHORT", "2"); + + if cfg!(feature = "ssl") { + if target.contains("-apple-") { + cfg.define("USE_DARWINSSL", None) + .file("curl/lib/vtls/darwinssl.c"); + } else { + cfg.define("USE_OPENSSL", None) + .file("curl/lib/vtls/openssl.c"); + + if let Some(path) = env::var_os("DEP_OPENSSL_INCLUDE") { + cfg.include(path); + } + } + } + + let width = env::var("CARGO_CFG_TARGET_POINTER_WIDTH") + .unwrap() + .parse::() + .unwrap(); + cfg.define("SIZEOF_SSIZE_T", Some(&(width / 8).to_string()[..])); + cfg.define("SIZEOF_SIZE_T", Some(&(width / 8).to_string()[..])); + cfg.define("SIZEOF_LONG", Some(&(width / 8).to_string()[..])); + + cfg.flag("-fvisibility=hidden"); + } + + cfg.compile("curl"); + + if windows { + println!("cargo:rustc-link-lib=ws2_32"); + println!("cargo:rustc-link-lib=crypt32"); + } + + // Illumos/Solaris requires explicit linking with libnsl + if target.contains("solaris") { + println!("cargo:rustc-link-lib=nsl"); + } + + if target.contains("-apple-") { + println!("cargo:rustc-link-lib=framework=Security"); + println!("cargo:rustc-link-lib=framework=CoreFoundation"); + } +} + +#[cfg(not(target_env = "msvc"))] +fn try_vcpkg() -> bool { + false +} + +#[cfg(target_env = "msvc")] +fn try_vcpkg() -> bool { + + // the import library for the dll is called libcurl_imp + let mut successful_probe_details = + match vcpkg::Config::new().lib_names("libcurl_imp", "libcurl") + .emit_includes(true).probe("curl") { + Ok(details) => Some(details), + Err(e) => { + println!("first run of vcpkg did not find libcurl: {}", e); + None + } + }; + + if successful_probe_details.is_none() { + match vcpkg::Config::new().lib_name("libcurl") + .emit_includes(true).probe("curl") { + Ok(details) => successful_probe_details = Some(details), + Err(e) => println!("second run of vcpkg did not find libcurl: {}", e), + } + } + + if successful_probe_details.is_some() { + // Found libcurl which depends on openssl, libssh2 and zlib + // in the a default vcpkg installation. Probe for them + // but do not fail if they are not present as we may be working + // with a customized vcpkg installation. + vcpkg::Config::new() + .lib_name("libeay32") + .lib_name("ssleay32") + .probe("openssl").ok(); + + vcpkg::probe_package("libssh2").ok(); + + vcpkg::Config::new() + .lib_names("zlib", "zlib1") + .probe("zlib").ok(); + + println!("cargo:rustc-link-lib=crypt32"); + println!("cargo:rustc-link-lib=gdi32"); + println!("cargo:rustc-link-lib=user32"); + println!("cargo:rustc-link-lib=wldap32"); + return true; + } + false +} + +fn try_pkg_config() -> bool { + let mut cfg = pkg_config::Config::new(); + cfg.cargo_metadata(false); + let lib = match cfg.probe("libcurl") { + Ok(lib) => lib, + Err(e) => { + println!("Couldn't find libcurl from pkgconfig ({:?}), \ + compiling it from source...", e); + return false + } + }; + + // Not all system builds of libcurl have http2 features enabled, so if we've + // got a http2-requested build then we may fall back to a build from source. + if cfg!(feature = "http2") { + let output = Command::new("curl-config") + .arg("--features") + .output(); + let output = match output { + Ok(out) => out, + Err(e) => { + println!("failed to run curl-config ({}), building from source", e); + return false + } + }; + if !output.status.success() { + println!("curl-config failed: {}", output.status); + return false + } + let stdout = String::from_utf8_lossy(&output.stdout); + if !stdout.contains("HTTP2") { + println!("failed to find http-2 feature enabled in pkg-config-found \ + libcurl, building from source"); + return false + } + } + + // Re-find the library to print cargo's metadata, then print some extra + // metadata as well. + cfg.cargo_metadata(true) + .probe("libcurl") + .unwrap(); + for path in lib.include_paths.iter() { + println!("cargo:include={}", path.display()); + } + return true +} diff --git a/curl-sys/lib.rs b/curl-sys/lib.rs new file mode 100644 index 000000000..0930a1f64 --- /dev/null +++ b/curl-sys/lib.rs @@ -0,0 +1,1062 @@ +#![allow(bad_style)] +#![doc(html_root_url = "https://docs.rs/curl-sys/0.3")] + +extern crate libc; +extern crate libz_sys; +#[cfg(all(unix, not(target_os = "macos"), feature = "ssl"))] +extern crate openssl_sys; +#[cfg(windows)] +extern crate winapi; +#[cfg(feature = "http2")] +extern crate libnghttp2_sys; + +use libc::{c_int, c_char, c_uint, c_short, c_long, c_double, c_void, size_t, time_t}; +use libc::c_ulong; + +#[cfg(unix)] +pub use libc::fd_set; +#[cfg(windows)] +pub use winapi::um::winsock2::fd_set; +#[cfg(windows)] +use winapi::shared::ws2def::SOCKADDR; + +#[cfg(target_env = "msvc")] +#[doc(hidden)] +pub type __enum_ty = libc::c_int; +#[cfg(not(target_env = "msvc"))] +#[doc(hidden)] +pub type __enum_ty = libc::c_uint; + +pub type CURLINFO = __enum_ty; +pub type CURLoption = __enum_ty; +pub type CURLcode = __enum_ty; +pub type CURLversion = __enum_ty; +pub type curl_off_t = i64; + +pub enum CURL {} + +#[cfg(unix)] +pub type curl_socket_t = libc::c_int; +#[cfg(unix)] +pub const CURL_SOCKET_BAD: curl_socket_t = -1; +#[cfg(all(windows, target_pointer_width = "32"))] +pub type curl_socket_t = libc::c_uint; +#[cfg(all(windows, target_pointer_width = "64"))] +pub type curl_socket_t = u64; +#[cfg(windows)] +pub const CURL_SOCKET_BAD: curl_socket_t = !0; + +pub enum curl_httppost { + // Note that this changed in some versions of libcurl, so we currently don't + // bind the fields as they're apparently not stable. + // pub next: *mut curl_httppost, + // pub name: *mut c_char, + // pub namelength: c_long, + // pub contents: *mut c_char, + // pub contentslength: c_long, + // pub buffer: *mut c_char, + // pub bufferlength: c_long, + // pub contenttype: *mut c_char, + // pub contentheader: *mut curl_slist, + // pub more: *mut curl_httppost, + // pub flags: c_long, + // pub showfilename: *mut c_char, + // pub userp: *mut c_void, +} + +// pub const HTTPPOST_FILENAME: c_long = 1 << 0; +// pub const HTTPPOST_READFILE: c_long = 1 << 1; +// pub const HTTPPOST_PTRNAME: c_long = 1 << 2; +// pub const HTTPPOST_PTRCONTENTS: c_long = 1 << 3; +// pub const HTTPPOST_BUFFER: c_long = 1 << 4; +// pub const HTTPPOST_PTRBUFFER: c_long = 1 << 5; +// pub const HTTPPOST_CALLBACK: c_long = 1 << 6; + +pub type curl_progress_callback = extern fn(*mut c_void, + c_double, + c_double, + c_double, + c_double) -> c_int; +// pub type curl_xferinfo_callback = extern fn(*mut c_void, +// curl_off_t, +// curl_off_t, +// curl_off_t, +// curl_off_t) -> c_int; + +pub const CURL_WRITEFUNC_PAUSE: size_t = 0x10000001; + +pub type curl_write_callback = extern fn(*mut c_char, + size_t, + size_t, + *mut c_void) -> size_t; + +pub type curlfiletype = __enum_ty; +pub const CURLFILETYPE_FILE: curlfiletype = 0; +pub const CURLFILETYPE_DIRECTORY: curlfiletype = 1; +pub const CURLFILETYPE_SYMLINK: curlfiletype = 2; +pub const CURLFILETYPE_DEVICE_BLOCK: curlfiletype = 3; +pub const CURLFILETYPE_DEVICE_CHAR: curlfiletype = 4; +pub const CURLFILETYPE_NAMEDPIPE: curlfiletype = 5; +pub const CURLFILETYPE_SOCKET: curlfiletype = 6; +pub const CURLFILETYPE_DOOR: curlfiletype = 7; +pub const CURLFILETYPE_UNKNOWN: curlfiletype = 8; + +pub const CURLFINFOFLAG_KNOWN_FILENAME: c_uint = 1 << 0; +pub const CURLFINFOFLAG_KNOWN_FILETYPE: c_uint = 1 << 1; +pub const CURLFINFOFLAG_KNOWN_TIME: c_uint = 1 << 2; +pub const CURLFINFOFLAG_KNOWN_PERM: c_uint = 1 << 3; +pub const CURLFINFOFLAG_KNOWN_UID: c_uint = 1 << 4; +pub const CURLFINFOFLAG_KNOWN_GID: c_uint = 1 << 5; +pub const CURLFINFOFLAG_KNOWN_SIZE: c_uint = 1 << 6; +pub const CURLFINFOFLAG_KNOWN_HLINKCOUNT: c_uint = 1 << 7; + +#[repr(C)] +pub struct curl_fileinfo { + pub filename: *mut c_char, + pub filetype: curlfiletype, + pub time: time_t, + pub perm: c_uint, + pub uid: c_int, + pub gid: c_int, + pub size: curl_off_t, + pub hardlinks: c_long, + + pub strings_time: *mut c_char, + pub strings_perm: *mut c_char, + pub strings_user: *mut c_char, + pub strings_group: *mut c_char, + pub strings_target: *mut c_char, + + pub flags: c_uint, + pub b_data: *mut c_char, + pub b_size: size_t, + pub b_used: size_t, +} + +pub const CURL_CHUNK_BGN_FUNC_OK: c_long = 0; +pub const CURL_CHUNK_BGN_FUNC_FAIL: c_long = 1; +pub const CURL_CHUNK_BGN_FUNC_SKIP: c_long = 2; +pub type curl_chunk_bgn_callback = extern fn(*const c_void, + *mut c_void, + c_int) -> c_long; + +pub const CURL_CHUNK_END_FUNC_OK: c_long = 0; +pub const CURL_CHUNK_END_FUNC_FAIL: c_long = 1; +pub type curl_chunk_end_callback = extern fn(*mut c_void) -> c_long; + +pub const CURL_FNMATCHFUNC_MATCH: c_int = 0; +pub const CURL_FNMATCHFUNC_NOMATCH: c_int = 1; +pub const CURL_FNMATCHFUNC_FAIL: c_int = 2; +pub type curl_fnmatch_callback = extern fn(*mut c_void, + *const c_char, + *const c_char) -> c_int; + +pub const CURL_SEEKFUNC_OK: c_int = 0; +pub const CURL_SEEKFUNC_FAIL: c_int = 1; +pub const CURL_SEEKFUNC_CANTSEEK: c_int = 2; +pub type curl_seek_callback = extern fn(*mut c_void, + curl_off_t, + c_int) -> c_int; + +pub const CURL_READFUNC_ABORT: size_t = 0x10000000; +pub const CURL_READFUNC_PAUSE: size_t = 0x10000001; +pub type curl_read_callback = extern fn(*mut c_char, + size_t, + size_t, + *mut c_void) -> size_t; + +// pub const CURL_SOCKOPT_OK: c_int = 0; +// pub const CURL_SOCKOPT_ERROR: c_int = 1; +// pub const CURL_SOCKOPT_ALREADY_CONNECTED: c_int = 2; +// pub type curl_sockopt_callback = extern fn(*mut c_void, +// curl_socket_t, +// curlsocktype) -> c_int; + +pub type curlioerr = __enum_ty; +pub const CURLIOE_OK: curlioerr = 0; +pub const CURLIOE_UNKNOWNCMD: curlioerr = 1; +pub const CURLIOE_FAILRESTART: curlioerr = 2; + +pub type curliocmd = __enum_ty; +pub const CURLIOCMD_NOP: curliocmd = 0; +pub const CURLIOCMD_RESTARTREAD: curliocmd = 1; + +pub type curl_ioctl_callback = extern fn(*mut CURL, c_int, *mut c_void) -> curlioerr; + +pub type curl_malloc_callback = extern fn(size_t) -> *mut c_void; +pub type curl_free_callback = extern fn(*mut c_void); +pub type curl_realloc_callback = extern fn(*mut c_void, size_t) -> *mut c_void; +pub type curl_strdup_callback = extern fn(*const c_char) -> *mut c_char; +pub type curl_calloc_callback = extern fn(size_t, size_t) -> *mut c_void; + +pub type curl_infotype = __enum_ty; +pub const CURLINFO_TEXT: curl_infotype = 0; +pub const CURLINFO_HEADER_IN: curl_infotype = 1; +pub const CURLINFO_HEADER_OUT: curl_infotype = 2; +pub const CURLINFO_DATA_IN: curl_infotype = 3; +pub const CURLINFO_DATA_OUT: curl_infotype = 4; +pub const CURLINFO_SSL_DATA_IN: curl_infotype = 5; +pub const CURLINFO_SSL_DATA_OUT: curl_infotype = 6; + +pub type curl_debug_callback = extern fn(*mut CURL, + curl_infotype, + *mut c_char, + size_t, + *mut c_void) -> c_int; + +pub const CURLE_OK: CURLcode = 0; +pub const CURLE_UNSUPPORTED_PROTOCOL: CURLcode = 1; +pub const CURLE_FAILED_INIT: CURLcode = 2; +pub const CURLE_URL_MALFORMAT: CURLcode = 3; +// pub const CURLE_NOT_BUILT_IN: CURLcode = 4; +pub const CURLE_COULDNT_RESOLVE_PROXY: CURLcode = 5; +pub const CURLE_COULDNT_RESOLVE_HOST: CURLcode = 6; +pub const CURLE_COULDNT_CONNECT: CURLcode = 7; +pub const CURLE_FTP_WEIRD_SERVER_REPLY: CURLcode = 8; +pub const CURLE_REMOTE_ACCESS_DENIED: CURLcode = 9; +// pub const CURLE_FTP_ACCEPT_FAILED: CURLcode = 10; +pub const CURLE_FTP_WEIRD_PASS_REPLY: CURLcode = 11; +// pub const CURLE_FTP_ACCEPT_TIMEOUT: CURLcode = 12; +pub const CURLE_FTP_WEIRD_PASV_REPLY: CURLcode = 13; +pub const CURLE_FTP_WEIRD_227_FORMAT: CURLcode = 14; +pub const CURLE_FTP_CANT_GET_HOST: CURLcode = 15; +pub const CURLE_OBSOLETE16: CURLcode = 16; +pub const CURLE_FTP_COULDNT_SET_TYPE: CURLcode = 17; +pub const CURLE_PARTIAL_FILE: CURLcode = 18; +pub const CURLE_FTP_COULDNT_RETR_FILE: CURLcode = 19; +pub const CURLE_OBSOLETE20: CURLcode = 20; +pub const CURLE_QUOTE_ERROR: CURLcode = 21; +pub const CURLE_HTTP_RETURNED_ERROR: CURLcode = 22; +pub const CURLE_WRITE_ERROR: CURLcode = 23; +pub const CURLE_OBSOLETE24: CURLcode = 24; +pub const CURLE_UPLOAD_FAILED: CURLcode = 25; +pub const CURLE_READ_ERROR: CURLcode = 26; +pub const CURLE_OUT_OF_MEMORY: CURLcode = 27; +pub const CURLE_OPERATION_TIMEDOUT: CURLcode = 28; +pub const CURLE_OBSOLETE29: CURLcode = 29; +pub const CURLE_FTP_PORT_FAILED: CURLcode = 30; +pub const CURLE_FTP_COULDNT_USE_REST: CURLcode = 31; +pub const CURLE_OBSOLETE32: CURLcode = 32; +pub const CURLE_RANGE_ERROR: CURLcode = 33; +pub const CURLE_HTTP_POST_ERROR: CURLcode = 34; +pub const CURLE_SSL_CONNECT_ERROR: CURLcode = 35; +pub const CURLE_BAD_DOWNLOAD_RESUME: CURLcode = 36; +pub const CURLE_FILE_COULDNT_READ_FILE: CURLcode = 37; +pub const CURLE_LDAP_CANNOT_BIND: CURLcode = 38; +pub const CURLE_LDAP_SEARCH_FAILED: CURLcode = 39; +pub const CURLE_OBSOLETE40: CURLcode = 40; +pub const CURLE_FUNCTION_NOT_FOUND: CURLcode = 41; +pub const CURLE_ABORTED_BY_CALLBACK: CURLcode = 42; +pub const CURLE_BAD_FUNCTION_ARGUMENT: CURLcode = 43; +pub const CURLE_OBSOLETE44: CURLcode = 44; +pub const CURLE_INTERFACE_FAILED: CURLcode = 45; +pub const CURLE_OBSOLETE46: CURLcode = 46; +pub const CURLE_TOO_MANY_REDIRECTS : CURLcode = 47; +pub const CURLE_UNKNOWN_OPTION: CURLcode = 48; +pub const CURLE_TELNET_OPTION_SYNTAX : CURLcode = 49; +pub const CURLE_OBSOLETE50: CURLcode = 50; +pub const CURLE_PEER_FAILED_VERIFICATION: CURLcode = 60; +pub const CURLE_GOT_NOTHING: CURLcode = 52; +pub const CURLE_SSL_ENGINE_NOTFOUND: CURLcode = 53; +pub const CURLE_SSL_ENGINE_SETFAILED: CURLcode = 54; +pub const CURLE_SEND_ERROR: CURLcode = 55; +pub const CURLE_RECV_ERROR: CURLcode = 56; +pub const CURLE_OBSOLETE57: CURLcode = 57; +pub const CURLE_SSL_CERTPROBLEM: CURLcode = 58; +pub const CURLE_SSL_CIPHER: CURLcode = 59; +pub const CURLE_SSL_CACERT: CURLcode = 60; +pub const CURLE_BAD_CONTENT_ENCODING: CURLcode = 61; +pub const CURLE_LDAP_INVALID_URL: CURLcode = 62; +pub const CURLE_FILESIZE_EXCEEDED: CURLcode = 63; +pub const CURLE_USE_SSL_FAILED: CURLcode = 64; +pub const CURLE_SEND_FAIL_REWIND: CURLcode = 65; +pub const CURLE_SSL_ENGINE_INITFAILED: CURLcode = 66; +pub const CURLE_LOGIN_DENIED: CURLcode = 67; +pub const CURLE_TFTP_NOTFOUND: CURLcode = 68; +pub const CURLE_TFTP_PERM: CURLcode = 69; +pub const CURLE_REMOTE_DISK_FULL: CURLcode = 70; +pub const CURLE_TFTP_ILLEGAL: CURLcode = 71; +pub const CURLE_TFTP_UNKNOWNID: CURLcode = 72; +pub const CURLE_REMOTE_FILE_EXISTS: CURLcode = 73; +pub const CURLE_TFTP_NOSUCHUSER: CURLcode = 74; +pub const CURLE_CONV_FAILED: CURLcode = 75; +pub const CURLE_CONV_REQD: CURLcode = 76; +pub const CURLE_SSL_CACERT_BADFILE: CURLcode = 77; +pub const CURLE_REMOTE_FILE_NOT_FOUND: CURLcode = 78; +pub const CURLE_SSH: CURLcode = 79; +pub const CURLE_SSL_SHUTDOWN_FAILED: CURLcode = 80; +pub const CURLE_AGAIN: CURLcode = 81; +pub const CURLE_SSL_CRL_BADFILE: CURLcode = 82; +pub const CURLE_SSL_ISSUER_ERROR: CURLcode = 83; +pub const CURLE_FTP_PRET_FAILED: CURLcode = 84; +pub const CURLE_RTSP_CSEQ_ERROR: CURLcode = 85; +pub const CURLE_RTSP_SESSION_ERROR: CURLcode = 86; +pub const CURLE_FTP_BAD_FILE_LIST: CURLcode = 87; +pub const CURLE_CHUNK_FAILED: CURLcode = 88; +// pub const CURLE_NO_CONNECTION_AVAILABLE: CURLcode = 89; + +pub type curl_conv_callback = extern fn(*mut c_char, size_t) -> CURLcode; +pub type curl_ssl_ctx_callback = extern fn(*mut CURL, + *mut c_void, + *mut c_void) -> CURLcode; + +pub type curl_proxytype = __enum_ty; +pub const CURLPROXY_HTTP: curl_proxytype = 0; +pub const CURLPROXY_HTTP_1_0: curl_proxytype = 1; +pub const CURLPROXY_SOCKS4: curl_proxytype = 4; +pub const CURLPROXY_SOCKS5: curl_proxytype = 5; +pub const CURLPROXY_SOCKS4A: curl_proxytype = 6; +pub const CURLPROXY_SOCKS5_HOSTNAME: curl_proxytype = 7; + +pub const CURLAUTH_NONE: c_ulong = 0; +pub const CURLAUTH_BASIC: c_ulong = 1 << 0; +pub const CURLAUTH_DIGEST: c_ulong = 1 << 1; +pub const CURLAUTH_GSSNEGOTIATE: c_ulong = 1 << 2; +pub const CURLAUTH_NTLM: c_ulong = 1 << 3; +pub const CURLAUTH_DIGEST_IE: c_ulong = 1 << 4; +pub const CURLAUTH_NTLM_WB: c_ulong = 1 << 5; +// pub const CURLAUTH_ONLY: c_ulong = 1 << 31; +pub const CURLAUTH_ANY: c_ulong = !CURLAUTH_DIGEST_IE; +pub const CURLAUTH_ANYSAFE: c_ulong = !(CURLAUTH_BASIC | CURLAUTH_DIGEST_IE); + +// pub const CURLSSH_AUTH_ANY: c_ulong = !0; +// pub const CURLSSH_AUTH_NONE: c_ulong = 0; +// pub const CURLSSH_AUTH_PUBLICKEY: c_ulong = 1 << 0; +// pub const CURLSSH_AUTH_PASSWORD: c_ulong = 1 << 1; +// pub const CURLSSH_AUTH_HOST: c_ulong = 1 << 2; +// pub const CURLSSH_AUTH_KEYBOARD: c_ulong = 1 << 3; +// pub const CURLSSH_AUTH_AGENT: c_ulong = 1 << 4; +// pub const CURLSSH_AUTH_DEFAULT: c_ulong = CURLSSH_AUTH_ANY; + +pub const CURLGSSAPI_DELEGATION_NONE: c_ulong = 0; +pub const CURLGSSAPI_DELEGATION_POLICY_FLAG: c_ulong = 1 << 0; +pub const CURLGSSAPI_DELEGATION_FLAG: c_ulong = 1 << 1; + +// pub type curl_khtype = __enum_ty; +// pub const CURLKHTYPE_UNKNOWN: curl_khtype = 0; +// pub const CURLKHTYPE_RSA1: curl_khtype = 1; +// pub const CURLKHTYPE_RSA: curl_khtype = 2; +// pub const CURLKHTYPE_DSS: curl_khtype = 3; + +// #[repr(C)] +// pub struct curl_khkey { +// pub key: *const c_char, +// pub len: size_t, +// pub keytype: curl_khtype, +// } + +// pub type curl_khstat = __enum_ty; +// pub const CURLKHSTAT_FINE_ADD_TO_FILE: curl_khstat = 0; +// pub const CURLKHSTAT_FINE: curl_khstat = 1; +// pub const CURLKHSTAT_REJECT: curl_khstat = 2; +// pub const CURLKHSTAT_DEFER: curl_khstat = 3; +// +// pub type curl_khmatch = __enum_ty; +// pub const CURLKHMATCH_OK: curl_khmatch = 0; +// pub const CURLKHMATCH_MISMATCH: curl_khmatch = 1; +// pub const CURLKHMATCH_MISSING: curl_khmatch = 2; + +// pub type curl_sshkeycallback = extern fn(*mut CURL, +// *const curl_khkey, +// *const curl_khkey, +// curl_khmatch, +// *mut c_void) -> c_int; + +pub const CURL_NETRC_IGNORED: c_ulong = 0; +pub const CURL_NETRC_OPTIONAL: c_ulong = 1; +pub const CURL_NETRC_REQUIRED: c_ulong = 2; + +pub type curl_usessl = __enum_ty; +pub const CURLUSESSL_NONE: curl_usessl = 0; +pub const CURLUSESSL_TRY: curl_usessl = 1; +pub const CURLUSESSL_CONTROL: curl_usessl = 2; +pub const CURLUSESSL_ALL: curl_usessl = 3; + +pub const CURLPROTO_HTTP: c_int = 1 << 0; +pub const CURLPROTO_HTTPS: c_int = 1 << 1; +pub const CURLPROTO_FILE: c_int = 1 << 10; + +pub const CURLOPTTYPE_LONG: CURLoption = 0; +pub const CURLOPTTYPE_OBJECTPOINT: CURLoption = 10_000; +pub const CURLOPTTYPE_FUNCTIONPOINT: CURLoption = 20_000; +pub const CURLOPTTYPE_OFF_T: CURLoption = 30_000; + +pub const CURLOPT_FILE: CURLoption = CURLOPTTYPE_OBJECTPOINT + 1; +pub const CURLOPT_URL: CURLoption = CURLOPTTYPE_OBJECTPOINT + 2; +pub const CURLOPT_PORT: CURLoption = CURLOPTTYPE_LONG + 3; +pub const CURLOPT_PROXY: CURLoption = CURLOPTTYPE_OBJECTPOINT + 4; +pub const CURLOPT_USERPWD: CURLoption = CURLOPTTYPE_OBJECTPOINT + 5; +pub const CURLOPT_PROXYUSERPWD: CURLoption = CURLOPTTYPE_OBJECTPOINT + 6; +pub const CURLOPT_RANGE: CURLoption = CURLOPTTYPE_OBJECTPOINT + 7; +pub const CURLOPT_INFILE: CURLoption = CURLOPTTYPE_OBJECTPOINT + 9; +pub const CURLOPT_ERRORBUFFER: CURLoption = CURLOPTTYPE_OBJECTPOINT + 10; +pub const CURLOPT_WRITEFUNCTION: CURLoption = CURLOPTTYPE_FUNCTIONPOINT + 11; +pub const CURLOPT_READFUNCTION: CURLoption = CURLOPTTYPE_FUNCTIONPOINT + 12; +pub const CURLOPT_TIMEOUT: CURLoption = CURLOPTTYPE_LONG + 13; +pub const CURLOPT_INFILESIZE: CURLoption = CURLOPTTYPE_LONG + 14; +pub const CURLOPT_POSTFIELDS: CURLoption = CURLOPTTYPE_OBJECTPOINT + 15; +pub const CURLOPT_REFERER: CURLoption = CURLOPTTYPE_OBJECTPOINT + 16; +pub const CURLOPT_FTPPORT: CURLoption = CURLOPTTYPE_OBJECTPOINT + 17; +pub const CURLOPT_USERAGENT: CURLoption = CURLOPTTYPE_OBJECTPOINT + 18; +pub const CURLOPT_LOW_SPEED_LIMIT: CURLoption = CURLOPTTYPE_LONG + 19; +pub const CURLOPT_LOW_SPEED_TIME: CURLoption = CURLOPTTYPE_LONG + 20; +pub const CURLOPT_RESUME_FROM: CURLoption = CURLOPTTYPE_LONG + 21; +pub const CURLOPT_COOKIE: CURLoption = CURLOPTTYPE_OBJECTPOINT + 22; +pub const CURLOPT_HTTPHEADER: CURLoption = CURLOPTTYPE_OBJECTPOINT + 23; +pub const CURLOPT_HTTPPOST: CURLoption = CURLOPTTYPE_OBJECTPOINT + 24; +pub const CURLOPT_SSLCERT: CURLoption = CURLOPTTYPE_OBJECTPOINT + 25; +pub const CURLOPT_KEYPASSWD: CURLoption = CURLOPTTYPE_OBJECTPOINT + 26; +pub const CURLOPT_CRLF: CURLoption = CURLOPTTYPE_LONG + 27; +pub const CURLOPT_QUOTE: CURLoption = CURLOPTTYPE_OBJECTPOINT + 28; +pub const CURLOPT_WRITEHEADER: CURLoption = CURLOPTTYPE_OBJECTPOINT + 29; +pub const CURLOPT_COOKIEFILE: CURLoption = CURLOPTTYPE_OBJECTPOINT + 31; +pub const CURLOPT_SSLVERSION: CURLoption = CURLOPTTYPE_LONG + 32; +pub const CURLOPT_TIMECONDITION: CURLoption = CURLOPTTYPE_LONG + 33; +pub const CURLOPT_TIMEVALUE: CURLoption = CURLOPTTYPE_LONG + 34; +pub const CURLOPT_CUSTOMREQUEST: CURLoption = CURLOPTTYPE_OBJECTPOINT + 36; +pub const CURLOPT_STDERR: CURLoption = CURLOPTTYPE_OBJECTPOINT + 37; +pub const CURLOPT_POSTQUOTE: CURLoption = CURLOPTTYPE_OBJECTPOINT + 39; +pub const CURLOPT_WRITEINFO: CURLoption = CURLOPTTYPE_OBJECTPOINT + 40; +pub const CURLOPT_VERBOSE: CURLoption = CURLOPTTYPE_LONG + 41; +pub const CURLOPT_HEADER: CURLoption = CURLOPTTYPE_LONG + 42; +pub const CURLOPT_NOPROGRESS: CURLoption = CURLOPTTYPE_LONG + 43; +pub const CURLOPT_NOBODY: CURLoption = CURLOPTTYPE_LONG + 44; +pub const CURLOPT_FAILONERROR: CURLoption = CURLOPTTYPE_LONG + 45; +pub const CURLOPT_UPLOAD: CURLoption = CURLOPTTYPE_LONG + 46; +pub const CURLOPT_POST: CURLoption = CURLOPTTYPE_LONG + 47; +pub const CURLOPT_DIRLISTONLY: CURLoption = CURLOPTTYPE_LONG + 48; +pub const CURLOPT_APPEND: CURLoption = CURLOPTTYPE_LONG + 50; +pub const CURLOPT_NETRC: CURLoption = CURLOPTTYPE_LONG + 51; +pub const CURLOPT_FOLLOWLOCATION: CURLoption = CURLOPTTYPE_LONG + 52; +pub const CURLOPT_TRANSFERTEXT: CURLoption = CURLOPTTYPE_LONG + 53; +pub const CURLOPT_PUT: CURLoption = CURLOPTTYPE_LONG + 54; +pub const CURLOPT_PROGRESSFUNCTION: CURLoption = CURLOPTTYPE_FUNCTIONPOINT + 56; +pub const CURLOPT_PROGRESSDATA: CURLoption = CURLOPTTYPE_OBJECTPOINT + 57; +pub const CURLOPT_AUTOREFERER: CURLoption = CURLOPTTYPE_LONG + 58; +pub const CURLOPT_PROXYPORT: CURLoption = CURLOPTTYPE_LONG + 59; +pub const CURLOPT_POSTFIELDSIZE: CURLoption = CURLOPTTYPE_LONG + 60; +pub const CURLOPT_HTTPPROXYTUNNEL: CURLoption = CURLOPTTYPE_LONG + 61; +pub const CURLOPT_INTERFACE: CURLoption = CURLOPTTYPE_OBJECTPOINT + 62; +pub const CURLOPT_KRBLEVEL: CURLoption = CURLOPTTYPE_OBJECTPOINT + 63; +pub const CURLOPT_SSL_VERIFYPEER: CURLoption = CURLOPTTYPE_LONG + 64; +pub const CURLOPT_CAINFO: CURLoption = CURLOPTTYPE_OBJECTPOINT + 65; +pub const CURLOPT_MAXREDIRS: CURLoption = CURLOPTTYPE_LONG + 68; +pub const CURLOPT_FILETIME: CURLoption = CURLOPTTYPE_LONG + 69; +pub const CURLOPT_TELNETOPTIONS: CURLoption = CURLOPTTYPE_OBJECTPOINT + 70; +pub const CURLOPT_MAXCONNECTS: CURLoption = CURLOPTTYPE_LONG + 71; +pub const CURLOPT_CLOSEPOLICY: CURLoption = CURLOPTTYPE_LONG + 72; +pub const CURLOPT_FRESH_CONNECT: CURLoption = CURLOPTTYPE_LONG + 74; +pub const CURLOPT_FORBID_REUSE: CURLoption = CURLOPTTYPE_LONG + 75; +pub const CURLOPT_RANDOM_FILE: CURLoption = CURLOPTTYPE_OBJECTPOINT + 76; +pub const CURLOPT_EGDSOCKET: CURLoption = CURLOPTTYPE_OBJECTPOINT + 77; +pub const CURLOPT_CONNECTTIMEOUT: CURLoption = CURLOPTTYPE_LONG + 78; +pub const CURLOPT_HEADERFUNCTION: CURLoption = CURLOPTTYPE_FUNCTIONPOINT + 79; +pub const CURLOPT_HTTPGET: CURLoption = CURLOPTTYPE_LONG + 80; +pub const CURLOPT_SSL_VERIFYHOST: CURLoption = CURLOPTTYPE_LONG + 81; +pub const CURLOPT_COOKIEJAR: CURLoption = CURLOPTTYPE_OBJECTPOINT + 82; +pub const CURLOPT_SSL_CIPHER_LIST: CURLoption = CURLOPTTYPE_OBJECTPOINT + 83; +pub const CURLOPT_HTTP_VERSION: CURLoption = CURLOPTTYPE_LONG + 84; +pub const CURLOPT_FTP_USE_EPSV: CURLoption = CURLOPTTYPE_LONG + 85; +pub const CURLOPT_SSLCERTTYPE: CURLoption = CURLOPTTYPE_OBJECTPOINT + 86; +pub const CURLOPT_SSLKEY: CURLoption = CURLOPTTYPE_OBJECTPOINT + 87; +pub const CURLOPT_SSLKEYTYPE: CURLoption = CURLOPTTYPE_OBJECTPOINT + 88; +pub const CURLOPT_SSLENGINE: CURLoption = CURLOPTTYPE_OBJECTPOINT + 89; +pub const CURLOPT_SSLENGINE_DEFAULT: CURLoption = CURLOPTTYPE_LONG + 90; +pub const CURLOPT_DNS_USE_GLOBAL_CACHE: CURLoption = CURLOPTTYPE_LONG + 91; +pub const CURLOPT_DNS_CACHE_TIMEOUT: CURLoption = CURLOPTTYPE_LONG + 92; +pub const CURLOPT_PREQUOTE: CURLoption = CURLOPTTYPE_OBJECTPOINT + 93; +pub const CURLOPT_DEBUGFUNCTION: CURLoption = CURLOPTTYPE_FUNCTIONPOINT + 94; +pub const CURLOPT_DEBUGDATA: CURLoption = CURLOPTTYPE_OBJECTPOINT + 95; +pub const CURLOPT_COOKIESESSION: CURLoption = CURLOPTTYPE_LONG + 96; +pub const CURLOPT_CAPATH: CURLoption = CURLOPTTYPE_OBJECTPOINT + 97; +pub const CURLOPT_BUFFERSIZE: CURLoption = CURLOPTTYPE_LONG + 98; +pub const CURLOPT_NOSIGNAL: CURLoption = CURLOPTTYPE_LONG + 99; +pub const CURLOPT_SHARE: CURLoption = CURLOPTTYPE_OBJECTPOINT + 100; +pub const CURLOPT_PROXYTYPE: CURLoption = CURLOPTTYPE_LONG + 101; +pub const CURLOPT_ACCEPT_ENCODING: CURLoption = CURLOPTTYPE_OBJECTPOINT + 102; +pub const CURLOPT_PRIVATE: CURLoption = CURLOPTTYPE_OBJECTPOINT + 103; +pub const CURLOPT_HTTP200ALIASES: CURLoption = CURLOPTTYPE_OBJECTPOINT + 104; +pub const CURLOPT_UNRESTRICTED_AUTH: CURLoption = CURLOPTTYPE_LONG + 105; +pub const CURLOPT_FTP_USE_EPRT: CURLoption = CURLOPTTYPE_LONG + 106; +pub const CURLOPT_HTTPAUTH: CURLoption = CURLOPTTYPE_LONG + 107; +pub const CURLOPT_SSL_CTX_FUNCTION: CURLoption = CURLOPTTYPE_FUNCTIONPOINT + 108; +pub const CURLOPT_SSL_CTX_DATA: CURLoption = CURLOPTTYPE_OBJECTPOINT + 109; +pub const CURLOPT_FTP_CREATE_MISSING_DIRS: CURLoption = CURLOPTTYPE_LONG + 110; +pub const CURLOPT_PROXYAUTH: CURLoption = CURLOPTTYPE_LONG + 111; +pub const CURLOPT_FTP_RESPONSE_TIMEOUT: CURLoption = CURLOPTTYPE_LONG + 112; +pub const CURLOPT_IPRESOLVE: CURLoption = CURLOPTTYPE_LONG + 113; +pub const CURLOPT_MAXFILESIZE: CURLoption = CURLOPTTYPE_LONG + 114; +pub const CURLOPT_INFILESIZE_LARGE: CURLoption = CURLOPTTYPE_OFF_T + 115; +pub const CURLOPT_RESUME_FROM_LARGE: CURLoption = CURLOPTTYPE_OFF_T + 116; +pub const CURLOPT_MAXFILESIZE_LARGE: CURLoption = CURLOPTTYPE_OFF_T + 117; +pub const CURLOPT_NETRC_FILE: CURLoption = CURLOPTTYPE_OBJECTPOINT + 118; +pub const CURLOPT_USE_SSL: CURLoption = CURLOPTTYPE_LONG + 119; +pub const CURLOPT_POSTFIELDSIZE_LARGE: CURLoption = CURLOPTTYPE_OFF_T + 120; +pub const CURLOPT_TCP_NODELAY: CURLoption = CURLOPTTYPE_LONG + 121; +pub const CURLOPT_FTPSSLAUTH: CURLoption = CURLOPTTYPE_LONG + 129; +pub const CURLOPT_IOCTLFUNCTION: CURLoption = CURLOPTTYPE_FUNCTIONPOINT + 130; +pub const CURLOPT_IOCTLDATA: CURLoption = CURLOPTTYPE_OBJECTPOINT + 131; +pub const CURLOPT_FTP_ACCOUNT: CURLoption = CURLOPTTYPE_OBJECTPOINT + 134; +pub const CURLOPT_COOKIELIST: CURLoption = CURLOPTTYPE_OBJECTPOINT + 135; +pub const CURLOPT_IGNORE_CONTENT_LENGTH: CURLoption = CURLOPTTYPE_LONG + 136; +pub const CURLOPT_FTP_SKIP_PASV_IP: CURLoption = CURLOPTTYPE_LONG + 137; +pub const CURLOPT_FTP_FILEMETHOD: CURLoption = CURLOPTTYPE_LONG + 138; +pub const CURLOPT_LOCALPORT: CURLoption = CURLOPTTYPE_LONG + 139; +pub const CURLOPT_LOCALPORTRANGE: CURLoption = CURLOPTTYPE_LONG + 140; +pub const CURLOPT_CONNECT_ONLY: CURLoption = CURLOPTTYPE_LONG + 141; +pub const CURLOPT_CONV_FROM_NETWORK_FUNCTION: CURLoption = CURLOPTTYPE_FUNCTIONPOINT + 142; +pub const CURLOPT_CONV_TO_NETWORK_FUNCTION: CURLoption = CURLOPTTYPE_FUNCTIONPOINT + 143; +pub const CURLOPT_CONV_FROM_UTF8_FUNCTION: CURLoption = CURLOPTTYPE_FUNCTIONPOINT + 144; +pub const CURLOPT_MAX_SEND_SPEED_LARGE: CURLoption = CURLOPTTYPE_OFF_T + 145; +pub const CURLOPT_MAX_RECV_SPEED_LARGE: CURLoption = CURLOPTTYPE_OFF_T + 146; +pub const CURLOPT_FTP_ALTERNATIVE_TO_USER: CURLoption = CURLOPTTYPE_OBJECTPOINT + 147; +pub const CURLOPT_SOCKOPTFUNCTION: CURLoption = CURLOPTTYPE_FUNCTIONPOINT + 148; +pub const CURLOPT_SOCKOPTDATA: CURLoption = CURLOPTTYPE_OBJECTPOINT + 149; +pub const CURLOPT_SSL_SESSIONID_CACHE: CURLoption = CURLOPTTYPE_LONG + 150; +pub const CURLOPT_SSH_AUTH_TYPES: CURLoption = CURLOPTTYPE_LONG + 151; +pub const CURLOPT_SSH_PUBLIC_KEYFILE: CURLoption = CURLOPTTYPE_OBJECTPOINT + 152; +pub const CURLOPT_SSH_PRIVATE_KEYFILE: CURLoption = CURLOPTTYPE_OBJECTPOINT + 153; +pub const CURLOPT_FTP_SSL_CCC: CURLoption = CURLOPTTYPE_LONG + 154; +pub const CURLOPT_TIMEOUT_MS: CURLoption = CURLOPTTYPE_LONG + 155; +pub const CURLOPT_CONNECTTIMEOUT_MS: CURLoption = CURLOPTTYPE_LONG + 156; +pub const CURLOPT_HTTP_TRANSFER_DECODING: CURLoption = CURLOPTTYPE_LONG + 157; +pub const CURLOPT_HTTP_CONTENT_DECODING: CURLoption = CURLOPTTYPE_LONG + 158; +pub const CURLOPT_NEW_FILE_PERMS: CURLoption = CURLOPTTYPE_LONG + 159; +pub const CURLOPT_NEW_DIRECTORY_PERMS: CURLoption = CURLOPTTYPE_LONG + 160; +pub const CURLOPT_POSTREDIR: CURLoption = CURLOPTTYPE_LONG + 161; +pub const CURLOPT_SSH_HOST_PUBLIC_KEY_MD5: CURLoption = CURLOPTTYPE_OBJECTPOINT + 162; +pub const CURLOPT_OPENSOCKETFUNCTION: CURLoption = CURLOPTTYPE_FUNCTIONPOINT + 163; +pub const CURLOPT_OPENSOCKETDATA: CURLoption = CURLOPTTYPE_OBJECTPOINT + 164; +pub const CURLOPT_COPYPOSTFIELDS: CURLoption = CURLOPTTYPE_OBJECTPOINT + 165; +pub const CURLOPT_PROXY_TRANSFER_MODE: CURLoption = CURLOPTTYPE_LONG + 166; +pub const CURLOPT_SEEKFUNCTION: CURLoption = CURLOPTTYPE_FUNCTIONPOINT + 167; +pub const CURLOPT_SEEKDATA: CURLoption = CURLOPTTYPE_OBJECTPOINT + 168; +pub const CURLOPT_CRLFILE: CURLoption = CURLOPTTYPE_OBJECTPOINT + 169; +pub const CURLOPT_ISSUERCERT: CURLoption = CURLOPTTYPE_OBJECTPOINT + 170; +pub const CURLOPT_ADDRESS_SCOPE: CURLoption = CURLOPTTYPE_LONG + 171; +pub const CURLOPT_CERTINFO: CURLoption = CURLOPTTYPE_LONG + 172; +pub const CURLOPT_USERNAME: CURLoption = CURLOPTTYPE_OBJECTPOINT + 173; +pub const CURLOPT_PASSWORD: CURLoption = CURLOPTTYPE_OBJECTPOINT + 174; +pub const CURLOPT_PROXYUSERNAME: CURLoption = CURLOPTTYPE_OBJECTPOINT + 175; +pub const CURLOPT_PROXYPASSWORD: CURLoption = CURLOPTTYPE_OBJECTPOINT + 176; +pub const CURLOPT_NOPROXY: CURLoption = CURLOPTTYPE_OBJECTPOINT + 177; +pub const CURLOPT_TFTP_BLKSIZE: CURLoption = CURLOPTTYPE_LONG + 178; +pub const CURLOPT_SOCKS5_GSSAPI_SERVICE: CURLoption = CURLOPTTYPE_OBJECTPOINT + 179; +pub const CURLOPT_SOCKS5_GSSAPI_NEC: CURLoption = CURLOPTTYPE_LONG + 180; +pub const CURLOPT_PROTOCOLS: CURLoption = CURLOPTTYPE_LONG + 181; +pub const CURLOPT_REDIR_PROTOCOLS: CURLoption = CURLOPTTYPE_LONG + 182; +pub const CURLOPT_SSH_KNOWNHOSTS: CURLoption = CURLOPTTYPE_OBJECTPOINT + 183; +pub const CURLOPT_SSH_KEYFUNCTION: CURLoption = CURLOPTTYPE_FUNCTIONPOINT + 184; +pub const CURLOPT_SSH_KEYDATA: CURLoption = CURLOPTTYPE_OBJECTPOINT + 185; +pub const CURLOPT_MAIL_FROM: CURLoption = CURLOPTTYPE_OBJECTPOINT + 186; +pub const CURLOPT_MAIL_RCPT: CURLoption = CURLOPTTYPE_OBJECTPOINT + 187; +pub const CURLOPT_FTP_USE_PRET: CURLoption = CURLOPTTYPE_LONG + 188; +pub const CURLOPT_RTSP_REQUEST: CURLoption = CURLOPTTYPE_LONG + 189; +pub const CURLOPT_RTSP_SESSION_ID: CURLoption = CURLOPTTYPE_OBJECTPOINT + 190; +pub const CURLOPT_RTSP_STREAM_URI: CURLoption = CURLOPTTYPE_OBJECTPOINT + 191; +pub const CURLOPT_RTSP_TRANSPORT: CURLoption = CURLOPTTYPE_OBJECTPOINT + 192; +pub const CURLOPT_RTSP_CLIENT_CSEQ: CURLoption = CURLOPTTYPE_LONG + 193; +pub const CURLOPT_RTSP_SERVER_CSEQ: CURLoption = CURLOPTTYPE_LONG + 194; +pub const CURLOPT_INTERLEAVEDATA: CURLoption = CURLOPTTYPE_OBJECTPOINT + 195; +pub const CURLOPT_INTERLEAVEFUNCTION: CURLoption = CURLOPTTYPE_FUNCTIONPOINT + 196; +pub const CURLOPT_WILDCARDMATCH: CURLoption = CURLOPTTYPE_LONG + 197; +pub const CURLOPT_CHUNK_BGN_FUNCTION: CURLoption = CURLOPTTYPE_FUNCTIONPOINT + 198; +pub const CURLOPT_CHUNK_END_FUNCTION: CURLoption = CURLOPTTYPE_FUNCTIONPOINT + 199; +pub const CURLOPT_FNMATCH_FUNCTION: CURLoption = CURLOPTTYPE_FUNCTIONPOINT + 200; +pub const CURLOPT_CHUNK_DATA: CURLoption = CURLOPTTYPE_OBJECTPOINT + 201; +pub const CURLOPT_FNMATCH_DATA: CURLoption = CURLOPTTYPE_OBJECTPOINT + 202; +pub const CURLOPT_RESOLVE: CURLoption = CURLOPTTYPE_OBJECTPOINT + 203; +pub const CURLOPT_TLSAUTH_USERNAME: CURLoption = CURLOPTTYPE_OBJECTPOINT + 204; +pub const CURLOPT_TLSAUTH_PASSWORD: CURLoption = CURLOPTTYPE_OBJECTPOINT + 205; +pub const CURLOPT_TLSAUTH_TYPE: CURLoption = CURLOPTTYPE_OBJECTPOINT + 206; +pub const CURLOPT_TRANSFER_ENCODING: CURLoption = CURLOPTTYPE_LONG + 207; +pub const CURLOPT_CLOSESOCKETFUNCTION: CURLoption = CURLOPTTYPE_FUNCTIONPOINT + 208; +pub const CURLOPT_CLOSESOCKETDATA: CURLoption = CURLOPTTYPE_OBJECTPOINT + 209; +pub const CURLOPT_GSSAPI_DELEGATION: CURLoption = CURLOPTTYPE_LONG + 210; +// pub const CURLOPT_DNS_SERVERS: CURLoption = CURLOPTTYPE_OBJECTPOINT + 211; +// pub const CURLOPT_ACCEPTTIMEOUT_MS: CURLoption = CURLOPTTYPE_LONG + 212; +pub const CURLOPT_TCP_KEEPALIVE: CURLoption = CURLOPTTYPE_LONG + 213; +pub const CURLOPT_TCP_KEEPIDLE: CURLoption = CURLOPTTYPE_LONG + 214; +pub const CURLOPT_TCP_KEEPINTVL: CURLoption = CURLOPTTYPE_LONG + 215; +pub const CURLOPT_SSL_OPTIONS: CURLoption = CURLOPTTYPE_LONG + 216; +// pub const CURLOPT_MAIL_AUTH: CURLoption = CURLOPTTYPE_OBJECTPOINT + 217; +// pub const CURLOPT_SASL_IR: CURLoption = CURLOPTTYPE_LONG + 218; +// pub const CURLOPT_XFERINFOFUNCTION: CURLoption = CURLOPTTYPE_FUNCTIONPOINT + 219; +// pub const CURLOPT_XOAUTH2_BEARER: CURLoption = CURLOPTTYPE_OBJECTPOINT + 220; +// pub const CURLOPT_DNS_INTERFACE: CURLoption = CURLOPTTYPE_OBJECTPOINT + 221; +// pub const CURLOPT_DNS_LOCAL_IP4: CURLoption = CURLOPTTYPE_OBJECTPOINT + 222; +// pub const CURLOPT_DNS_LOCAL_IP6: CURLoption = CURLOPTTYPE_OBJECTPOINT + 223; +// pub const CURLOPT_LOGIN_OPTIONS: CURLoption = CURLOPTTYPE_OBJECTPOINT + 224; +pub const CURLOPT_UNIX_SOCKET_PATH: CURLoption = CURLOPTTYPE_OBJECTPOINT + 231; +pub const CURLOPT_PIPEWAIT: CURLoption = CURLOPTTYPE_LONG + 237; + +pub const CURL_IPRESOLVE_WHATEVER: c_int = 0; +pub const CURL_IPRESOLVE_V4: c_int = 1; +pub const CURL_IPRESOLVE_V6: c_int = 2; + +pub const CURLSSLOPT_ALLOW_BEAST: c_long = 1 << 0; +pub const CURLSSLOPT_NO_REVOKE: c_long = 1 << 1; + +/// These enums are for use with the CURLOPT_HTTP_VERSION option. +/// +/// Setting this means we don't care, and that we'd like the library to choose +/// the best possible for us! +pub const CURL_HTTP_VERSION_NONE: c_int = 0; +/// Please use HTTP 1.0 in the request +pub const CURL_HTTP_VERSION_1_0: c_int = 1; +/// Please use HTTP 1.1 in the request +pub const CURL_HTTP_VERSION_1_1: c_int = 2; +/// Please use HTTP 2 in the request +/// (Added in CURL 7.33.0) +pub const CURL_HTTP_VERSION_2_0: c_int = 3; +/// Use version 2 for HTTPS, version 1.1 for HTTP +/// (Added in CURL 7.47.0) +pub const CURL_HTTP_VERSION_2TLS: c_int = 4; +/// Please use HTTP 2 without HTTP/1.1 Upgrade +/// (Added in CURL 7.49.0) +pub const CURL_HTTP_VERSION_2_PRIOR_KNOWLEDGE: c_int = 5; + +// Note that the type here is wrong, it's just intended to just be an enum. +pub const CURL_SSLVERSION_DEFAULT: CURLoption = 0; +pub const CURL_SSLVERSION_TLSv1: CURLoption = 1; +pub const CURL_SSLVERSION_SSLv2: CURLoption = 2; +pub const CURL_SSLVERSION_SSLv3: CURLoption = 3; +// pub const CURL_SSLVERSION_TLSv1_0: CURLoption = 4; +// pub const CURL_SSLVERSION_TLSv1_1: CURLoption = 5; +// pub const CURL_SSLVERSION_TLSv1_2: CURLoption = 6; + +pub const CURLOPT_READDATA: CURLoption = CURLOPT_INFILE; +pub const CURLOPT_WRITEDATA: CURLoption = CURLOPT_FILE; +pub const CURLOPT_HEADERDATA: CURLoption = CURLOPT_WRITEHEADER; + +pub type curl_TimeCond = __enum_ty; +pub const CURL_TIMECOND_NONE: curl_TimeCond = 0; +pub const CURL_TIMECOND_IFMODSINCE: curl_TimeCond = 1; +pub const CURL_TIMECOND_IFUNMODSINCE: curl_TimeCond = 2; +pub const CURL_TIMECOND_LASTMOD: curl_TimeCond = 3; + +pub type CURLformoption = __enum_ty; +pub const CURLFORM_NOTHING: CURLformoption = 0; +pub const CURLFORM_COPYNAME: CURLformoption = 1; +pub const CURLFORM_PTRNAME: CURLformoption = 2; +pub const CURLFORM_NAMELENGTH: CURLformoption = 3; +pub const CURLFORM_COPYCONTENTS: CURLformoption = 4; +pub const CURLFORM_PTRCONTENTS: CURLformoption = 5; +pub const CURLFORM_CONTENTSLENGTH: CURLformoption = 6; +pub const CURLFORM_FILECONTENT: CURLformoption = 7; +pub const CURLFORM_ARRAY: CURLformoption = 8; +pub const CURLFORM_OBSOLETE: CURLformoption = 9; +pub const CURLFORM_FILE: CURLformoption = 10; +pub const CURLFORM_BUFFER: CURLformoption = 11; +pub const CURLFORM_BUFFERPTR: CURLformoption = 12; +pub const CURLFORM_BUFFERLENGTH: CURLformoption = 13; +pub const CURLFORM_CONTENTTYPE: CURLformoption = 14; +pub const CURLFORM_CONTENTHEADER: CURLformoption = 15; +pub const CURLFORM_FILENAME: CURLformoption = 16; +pub const CURLFORM_END: CURLformoption = 17; +pub const CURLFORM_STREAM: CURLformoption = 19; + +pub type CURLFORMcode = __enum_ty; +pub const CURL_FORMADD_OK: CURLFORMcode = 0; +pub const CURL_FORMADD_MEMORY: CURLFORMcode = 1; +pub const CURL_FORMADD_OPTION_TWICE: CURLFORMcode = 2; +pub const CURL_FORMADD_NULL: CURLFORMcode = 3; +pub const CURL_FORMADD_UNKNOWN_OPTION: CURLFORMcode = 4; +pub const CURL_FORMADD_INCOMPLETE: CURLFORMcode = 5; +pub const CURL_FORMADD_ILLEGAL_ARRAY: CURLFORMcode = 6; +pub const CURL_FORMADD_DISABLED: CURLFORMcode = 7; + +#[repr(C)] +pub struct curl_forms { + pub option: CURLformoption, + pub value: *const c_char, +} + +pub type curl_formget_callback = extern fn(*mut c_void, + *const c_char, + size_t) -> size_t; + +#[repr(C)] +pub struct curl_slist { + pub data: *mut c_char, + pub next: *mut curl_slist, +} + +#[repr(C)] +pub struct curl_certinfo { + pub num_of_certs: c_int, + pub certinfo: *mut *mut curl_slist, +} + +// pub type curl_sslbackend = __enum_ty; +// pub const CURLSSLBACKEND_NONE: curl_sslbackend = 0; +// pub const CURLSSLBACKEND_OPENSSL: curl_sslbackend = 1; +// pub const CURLSSLBACKEND_GNUTLS: curl_sslbackend = 2; +// pub const CURLSSLBACKEND_NSS: curl_sslbackend = 3; +// pub const CURLSSLBACKEND_QSOSSL: curl_sslbackend = 4; +// pub const CURLSSLBACKEND_GSKIT: curl_sslbackend = 5; +// pub const CURLSSLBACKEND_POLARSSL: curl_sslbackend = 6; +// pub const CURLSSLBACKEND_CYASSL: curl_sslbackend = 7; +// pub const CURLSSLBACKEND_SCHANNEL: curl_sslbackend = 8; +// pub const CURLSSLBACKEND_DARWINSSL: curl_sslbackend = 9; + +// #[repr(C)] +// pub struct curl_tlssessioninfo { +// pub backend: curl_sslbackend, +// pub internals: *mut c_void, +// } + +pub const CURLINFO_STRING: CURLINFO = 0x100000; +pub const CURLINFO_LONG: CURLINFO = 0x200000; +pub const CURLINFO_DOUBLE: CURLINFO = 0x300000; +pub const CURLINFO_SLIST: CURLINFO = 0x400000; +pub const CURLINFO_MASK: CURLINFO = 0x0fffff; +pub const CURLINFO_TYPEMASK: CURLINFO = 0xf00000; + +pub const CURLINFO_EFFECTIVE_URL: CURLINFO = CURLINFO_STRING + 1; +pub const CURLINFO_RESPONSE_CODE: CURLINFO = CURLINFO_LONG + 2; +pub const CURLINFO_TOTAL_TIME: CURLINFO = CURLINFO_DOUBLE + 3; +pub const CURLINFO_NAMELOOKUP_TIME: CURLINFO = CURLINFO_DOUBLE + 4; +pub const CURLINFO_CONNECT_TIME: CURLINFO = CURLINFO_DOUBLE + 5; +pub const CURLINFO_PRETRANSFER_TIME: CURLINFO = CURLINFO_DOUBLE + 6; +pub const CURLINFO_SIZE_UPLOAD: CURLINFO = CURLINFO_DOUBLE + 7; +pub const CURLINFO_SIZE_DOWNLOAD: CURLINFO = CURLINFO_DOUBLE + 8; +pub const CURLINFO_SPEED_DOWNLOAD: CURLINFO = CURLINFO_DOUBLE + 9; +pub const CURLINFO_SPEED_UPLOAD: CURLINFO = CURLINFO_DOUBLE + 10; +pub const CURLINFO_HEADER_SIZE: CURLINFO = CURLINFO_LONG + 11; +pub const CURLINFO_REQUEST_SIZE: CURLINFO = CURLINFO_LONG + 12; +pub const CURLINFO_SSL_VERIFYRESULT: CURLINFO = CURLINFO_LONG + 13; +pub const CURLINFO_FILETIME: CURLINFO = CURLINFO_LONG + 14; +pub const CURLINFO_CONTENT_LENGTH_DOWNLOAD: CURLINFO = CURLINFO_DOUBLE + 15; +pub const CURLINFO_CONTENT_LENGTH_UPLOAD: CURLINFO = CURLINFO_DOUBLE + 16; +pub const CURLINFO_STARTTRANSFER_TIME: CURLINFO = CURLINFO_DOUBLE + 17; +pub const CURLINFO_CONTENT_TYPE: CURLINFO = CURLINFO_STRING + 18; +pub const CURLINFO_REDIRECT_TIME: CURLINFO = CURLINFO_DOUBLE + 19; +pub const CURLINFO_REDIRECT_COUNT: CURLINFO = CURLINFO_LONG + 20; +pub const CURLINFO_PRIVATE: CURLINFO = CURLINFO_STRING + 21; +pub const CURLINFO_HTTP_CONNECTCODE: CURLINFO = CURLINFO_LONG + 22; +pub const CURLINFO_HTTPAUTH_AVAIL: CURLINFO = CURLINFO_LONG + 23; +pub const CURLINFO_PROXYAUTH_AVAIL: CURLINFO = CURLINFO_LONG + 24; +pub const CURLINFO_OS_ERRNO: CURLINFO = CURLINFO_LONG + 25; +pub const CURLINFO_NUM_CONNECTS: CURLINFO = CURLINFO_LONG + 26; +pub const CURLINFO_SSL_ENGINES: CURLINFO = CURLINFO_SLIST + 27; +pub const CURLINFO_COOKIELIST: CURLINFO = CURLINFO_SLIST + 28; +pub const CURLINFO_LASTSOCKET: CURLINFO = CURLINFO_LONG + 29; +pub const CURLINFO_FTP_ENTRY_PATH: CURLINFO = CURLINFO_STRING + 30; +pub const CURLINFO_REDIRECT_URL: CURLINFO = CURLINFO_STRING + 31; +pub const CURLINFO_PRIMARY_IP: CURLINFO = CURLINFO_STRING + 32; +pub const CURLINFO_APPCONNECT_TIME: CURLINFO = CURLINFO_DOUBLE + 33; +pub const CURLINFO_CERTINFO: CURLINFO = CURLINFO_SLIST + 34; +pub const CURLINFO_CONDITION_UNMET: CURLINFO = CURLINFO_LONG + 35; +pub const CURLINFO_RTSP_SESSION_ID: CURLINFO = CURLINFO_STRING + 36; +pub const CURLINFO_RTSP_CLIENT_CSEQ: CURLINFO = CURLINFO_LONG + 37; +pub const CURLINFO_RTSP_SERVER_CSEQ: CURLINFO = CURLINFO_LONG + 38; +pub const CURLINFO_RTSP_CSEQ_RECV: CURLINFO = CURLINFO_LONG + 39; +pub const CURLINFO_PRIMARY_PORT: CURLINFO = CURLINFO_LONG + 40; +pub const CURLINFO_LOCAL_IP: CURLINFO = CURLINFO_STRING + 41; +pub const CURLINFO_LOCAL_PORT: CURLINFO = CURLINFO_LONG + 42; +// pub const CURLINFO_TLS_SESSION: CURLINFO = CURLINFO_SLIST + 43; + +pub type curl_closepolicy = __enum_ty; +pub const CURLCLOSEPOLICY_NONE: curl_closepolicy = 0; +pub const CURLCLOSEPOLICY_OLDEST: curl_closepolicy = 1; +pub const CURLCLOSEPOLICY_LEAST_RECENTLY_USED: curl_closepolicy = 2; +pub const CURLCLOSEPOLICY_LEAST_TRAFFIC: curl_closepolicy = 3; +pub const CURLCLOSEPOLICY_SLOWEST: curl_closepolicy = 4; +pub const CURLCLOSEPOLICY_CALLBACK: curl_closepolicy = 5; + +pub const CURL_GLOBAL_SSL: c_long = 1 << 0; +pub const CURL_GLOBAL_WIN32: c_long = 1 << 1; +pub const CURL_GLOBAL_ALL: c_long = CURL_GLOBAL_SSL | CURL_GLOBAL_WIN32; +pub const CURL_GLOBAL_NOTHING: c_long = 0; +pub const CURL_GLOBAL_DEFAULT: c_long = CURL_GLOBAL_ALL; +// pub const CURL_GLOBAL_ACK_EINTR: c_long = 1 << 2; + +pub type curl_lock_data = __enum_ty; +pub const CURL_LOCK_DATA_NONE: curl_lock_data = 0; +pub const CURL_LOCK_DATA_SHARE: curl_lock_data = 1; +pub const CURL_LOCK_DATA_COOKIE: curl_lock_data = 2; +pub const CURL_LOCK_DATA_DNS: curl_lock_data = 3; +pub const CURL_LOCK_DATA_SSL_SESSION: curl_lock_data = 4; +pub const CURL_LOCK_DATA_CONNECT: curl_lock_data = 5; + +pub type curl_lock_access = __enum_ty; +pub const CURL_LOCK_ACCESS_NONE: curl_lock_access = 0; +pub const CURL_LOCK_ACCESS_SHARED: curl_lock_access = 1; +pub const CURL_LOCK_ACCESS_SINGLE: curl_lock_access = 2; + +pub type curl_lock_function = extern fn(*mut CURL, + curl_lock_data, + curl_lock_access, + *mut c_void); +pub type curl_unlock_function = extern fn(*mut CURL, + curl_lock_data, + *mut c_void); + +pub enum CURLSH {} + +pub type CURLSHcode = __enum_ty; +pub const CURLSHE_OK: CURLSHcode = 0; +pub const CURLSHE_BAD_OPTION: CURLSHcode = 1; +pub const CURLSHE_IN_USE: CURLSHcode = 2; +pub const CURLSHE_INVALID: CURLSHcode = 3; +pub const CURLSHE_NOMEM: CURLSHcode = 4; +// pub const CURLSHE_NOT_BUILT_IN: CURLSHcode = 5; + +pub type CURLSHoption = __enum_ty; +pub const CURLSHOPT_NONE: CURLSHoption = 0; +pub const CURLSHOPT_SHARE: CURLSHoption = 1; +pub const CURLSHOPT_UNSHARE: CURLSHoption = 2; +pub const CURLSHOPT_LOCKFUNC: CURLSHoption = 3; +pub const CURLSHOPT_UNLOCKFUNC: CURLSHoption = 4; +pub const CURLSHOPT_USERDATA: CURLSHoption = 5; + +pub const CURLVERSION_FIRST: CURLversion = 0; +pub const CURLVERSION_SECOND: CURLversion = 1; +pub const CURLVERSION_THIRD: CURLversion = 2; +pub const CURLVERSION_FOURTH: CURLversion = 3; +pub const CURLVERSION_FIFTH: CURLversion = 4; +pub const CURLVERSION_NOW: CURLversion = CURLVERSION_FIFTH; + +#[repr(C)] +pub struct curl_version_info_data { + pub age: CURLversion, + pub version: *const c_char, + pub version_num: c_uint, + pub host: *const c_char, + pub features: c_int, + pub ssl_version: *const c_char, + pub ssl_version_num: c_long, + pub libz_version: *const c_char, + pub protocols: *const *const c_char, + pub ares: *const c_char, + pub ares_num: c_int, + pub libidn: *const c_char, + pub iconv_ver_num: c_int, + pub libssh_version: *const c_char, + pub brotli_ver_num: c_uint, + pub brotli_version: *const c_char, +} + +pub const CURL_VERSION_IPV6: c_int = 1 << 0; +pub const CURL_VERSION_KERBEROS4: c_int = 1 << 1; +pub const CURL_VERSION_SSL: c_int = 1 << 2; +pub const CURL_VERSION_LIBZ: c_int = 1 << 3; +pub const CURL_VERSION_NTLM: c_int = 1 << 4; +pub const CURL_VERSION_GSSNEGOTIATE: c_int = 1 << 5; +pub const CURL_VERSION_DEBUG: c_int = 1 << 6; +pub const CURL_VERSION_ASYNCHDNS: c_int = 1 << 7; +pub const CURL_VERSION_SPNEGO: c_int = 1 << 8; +pub const CURL_VERSION_LARGEFILE: c_int = 1 << 9; +pub const CURL_VERSION_IDN: c_int = 1 << 10; +pub const CURL_VERSION_SSPI: c_int = 1 << 11; +pub const CURL_VERSION_CONV: c_int = 1 << 12; +pub const CURL_VERSION_CURLDEBUG: c_int = 1 << 13; +pub const CURL_VERSION_TLSAUTH_SRP: c_int = 1 << 14; +pub const CURL_VERSION_NTLM_WB: c_int = 1 << 15; +pub const CURL_VERSION_HTTP2: c_int = 1 << 16; +pub const CURL_VERSION_UNIX_SOCKETS: c_int = 1 << 19; + +pub const CURLPAUSE_RECV: c_int = 1 << 0; +pub const CURLPAUSE_RECV_CONT: c_int = 0; +pub const CURLPAUSE_SEND: c_int = 1 << 2; +pub const CURLPAUSE_SEND_CONT: c_int = 0; + +pub enum CURLM {} + +pub type CURLMcode = c_int; +pub const CURLM_CALL_MULTI_PERFORM: CURLMcode = -1; +pub const CURLM_OK: CURLMcode = 0; +pub const CURLM_BAD_HANDLE: CURLMcode = 1; +pub const CURLM_BAD_EASY_HANDLE: CURLMcode = 2; +pub const CURLM_OUT_OF_MEMORY: CURLMcode = 3; +pub const CURLM_INTERNAL_ERROR: CURLMcode = 4; +pub const CURLM_BAD_SOCKET: CURLMcode = 5; +pub const CURLM_UNKNOWN_OPTION: CURLMcode = 6; +// pub const CURLM_ADDED_ALREADY: CURLMcode = 7; + +pub type CURLMSG = __enum_ty; +pub const CURLMSG_NONE: CURLMSG = 0; +pub const CURLMSG_DONE: CURLMSG = 1; + +#[repr(C)] +pub struct CURLMsg { + pub msg: CURLMSG, + pub easy_handle: *mut CURL, + pub data: *mut c_void, +} + +pub const CURL_WAIT_POLLIN: c_short = 0x1; +pub const CURL_WAIT_POLLPRI: c_short = 0x2; +pub const CURL_WAIT_POLLOUT: c_short = 0x4; + +#[repr(C)] +pub struct curl_waitfd { + pub fd: curl_socket_t, + pub events: c_short, + pub revents: c_short, +} + +pub const CURL_POLL_NONE: c_int = 0; +pub const CURL_POLL_IN: c_int = 1; +pub const CURL_POLL_OUT: c_int = 2; +pub const CURL_POLL_INOUT: c_int = 3; +pub const CURL_POLL_REMOVE: c_int = 4; +pub const CURL_CSELECT_IN: c_int = 1; +pub const CURL_CSELECT_OUT: c_int = 2; +pub const CURL_CSELECT_ERR: c_int = 4; +pub const CURL_SOCKET_TIMEOUT: curl_socket_t = CURL_SOCKET_BAD; + +pub type curl_socket_callback = extern fn(*mut CURL, + curl_socket_t, + c_int, + *mut c_void, + *mut c_void) -> c_int; +pub type curl_multi_timer_callback = extern fn(*mut CURLM, + c_long, + *mut c_void) -> c_int; + +pub type CURLMoption = __enum_ty; +pub const CURLMOPT_SOCKETFUNCTION: CURLMoption = CURLOPTTYPE_FUNCTIONPOINT + 1; +pub const CURLMOPT_SOCKETDATA: CURLMoption = CURLOPTTYPE_OBJECTPOINT + 2; +pub const CURLMOPT_PIPELINING: CURLMoption = CURLOPTTYPE_LONG + 3; +pub const CURLMOPT_TIMERFUNCTION: CURLMoption = CURLOPTTYPE_FUNCTIONPOINT + 4; +pub const CURLMOPT_TIMERDATA: CURLMoption = CURLOPTTYPE_OBJECTPOINT + 5; +// pub const CURLMOPT_MAXCONNECTS: CURLMoption = CURLOPTTYPE_LONG + 6; +pub const CURLMOPT_MAX_HOST_CONNECTIONS: CURLMoption = CURLOPTTYPE_LONG + 7; +pub const CURLMOPT_MAX_PIPELINE_LENGTH: CURLMoption = CURLOPTTYPE_LONG + 8; +// pub const CURLMOPT_CONTENT_LENGTH_PENALTY_SIZE: CURLMoption = CURLOPTTYPE_OFF_T + 9; +// pub const CURLMOPT_CHUNK_LENGTH_PENALTY_SIZE: CURLMoption = CURLOPTTYPE_OFF_T + 10; +// pub const CURLMOPT_PIPELINING_SITE_BL: CURLMoption = CURLOPTTYPE_OBJECTPOINT + 11; +// pub const CURLMOPT_PIPELINING_SERVER_BL: CURLMoption = CURLOPTTYPE_OBJECTPOINT + 12; +// pub const CURLMOPT_MAX_TOTAL_CONNECTIONS: CURLMoption = CURLOPTTYPE_LONG + 13; + +// These enums are for use with the CURLMOPT_PIPELINING option. +pub const CURLPIPE_NOTHING: c_long = 0; +pub const CURLPIPE_HTTP1: c_long = 1; +pub const CURLPIPE_MULTIPLEX: c_long = 2; + +pub const CURL_ERROR_SIZE: usize = 256; + +pub type curl_opensocket_callback = extern fn(*mut c_void, + curlsocktype, + *mut curl_sockaddr) -> curl_socket_t; +pub type curlsocktype = __enum_ty; +pub const CURLSOCKTYPE_IPCXN: curlsocktype = 0; +pub const CURLSOCKTYPE_ACCEPT: curlsocktype = 1; +pub const CURLSOCKTYPE_LAST: curlsocktype = 2; + +#[repr(C)] +pub struct curl_sockaddr { + pub family: c_int, + pub socktype: c_int, + pub protocol: c_int, + pub addrlen: c_uint, + #[cfg(unix)] + pub addr: libc::sockaddr, + #[cfg(windows)] + pub addr: SOCKADDR, +} + +extern { + pub fn curl_formadd(httppost: *mut *mut curl_httppost, + last_post: *mut *mut curl_httppost, + ...) -> CURLFORMcode; + pub fn curl_formget(form: *mut curl_httppost, + arg: *mut c_void, + append: curl_formget_callback) -> c_int; + pub fn curl_formfree(form: *mut curl_httppost); + + pub fn curl_version() -> *mut c_char; + + pub fn curl_easy_escape(handle: *mut CURL, + string: *const c_char, + length: c_int) -> *mut c_char; + pub fn curl_easy_unescape(handle: *mut CURL, + string: *const c_char, + length: c_int, + outlength: *mut c_int) -> *mut c_char; + pub fn curl_free(p: *mut c_void); + + pub fn curl_global_init(flags: c_long) -> CURLcode; + pub fn curl_global_init_mem(flags: c_long, + m: curl_malloc_callback, + f: curl_free_callback, + r: curl_realloc_callback, + s: curl_strdup_callback, + c: curl_calloc_callback) -> CURLcode; + pub fn curl_global_cleanup(); + + pub fn curl_slist_append(list: *mut curl_slist, + val: *const c_char) -> *mut curl_slist; + pub fn curl_slist_free_all(list: *mut curl_slist); + + pub fn curl_getdate(p: *const c_char, _: *const time_t) -> time_t; + + pub fn curl_share_init() -> *mut CURLSH; + pub fn curl_share_setopt(sh: *mut CURLSH, + opt: CURLSHoption, + ...) -> CURLSHcode; + pub fn curl_share_cleanup(sh: *mut CURLSH) -> CURLSHcode; + + pub fn curl_version_info(t: CURLversion) -> *mut curl_version_info_data; + + pub fn curl_easy_strerror(code: CURLcode) -> *const c_char; + pub fn curl_share_strerror(code: CURLSHcode) -> *const c_char; + pub fn curl_easy_pause(handle: *mut CURL, bitmask: c_int) -> CURLcode; + + pub fn curl_easy_init() -> *mut CURL; + pub fn curl_easy_setopt(curl: *mut CURL, option: CURLoption, ...) -> CURLcode; + pub fn curl_easy_perform(curl: *mut CURL) -> CURLcode; + pub fn curl_easy_cleanup(curl: *mut CURL); + pub fn curl_easy_getinfo(curl: *mut CURL, info: CURLINFO, ...) -> CURLcode; + pub fn curl_easy_duphandle(curl: *mut CURL) -> *mut CURL; + pub fn curl_easy_reset(curl: *mut CURL); + pub fn curl_easy_recv(curl: *mut CURL, + buffer: *mut c_void, + buflen: size_t, + n: *mut size_t) -> CURLcode; + pub fn curl_easy_send(curl: *mut CURL, + buffer: *const c_void, + buflen: size_t, + n: *mut size_t) -> CURLcode; + + pub fn curl_multi_init() -> *mut CURLM; + pub fn curl_multi_add_handle(multi_handle: *mut CURLM, + curl_handle: *mut CURL) -> CURLMcode; + pub fn curl_multi_remove_handle(multi_handle: *mut CURLM, + curl_handle: *mut CURL) -> CURLMcode; + pub fn curl_multi_fdset(multi_handle: *mut CURLM, + read_fd_set: *mut fd_set, + write_fd_set: *mut fd_set, + exc_fd_set: *mut fd_set, + max_fd: *mut c_int) -> CURLMcode; + pub fn curl_multi_wait(multi_handle: *mut CURLM, + extra_fds: *mut curl_waitfd, + extra_nfds: c_uint, + timeout_ms: c_int, + ret: *mut c_int) -> CURLMcode; + pub fn curl_multi_perform(multi_handle: *mut CURLM, + running_handles: *mut c_int) -> CURLMcode; + pub fn curl_multi_cleanup(multi_handle: *mut CURLM) -> CURLMcode; + pub fn curl_multi_info_read(multi_handle: *mut CURLM, + msgs_in_queue: *mut c_int) -> *mut CURLMsg; + pub fn curl_multi_strerror(code: CURLMcode) -> *const c_char; + pub fn curl_multi_socket(multi_handle: *mut CURLM, + s: curl_socket_t, + running_handles: *mut c_int) -> CURLMcode; + pub fn curl_multi_socket_action(multi_handle: *mut CURLM, + s: curl_socket_t, + ev_bitmask: c_int, + running_handles: *mut c_int) -> CURLMcode; + pub fn curl_multi_socket_all(multi_handle: *mut CURLM, + running_handles: *mut c_int) -> CURLMcode; + pub fn curl_multi_timeout(multi_handle: *mut CURLM, + milliseconds: *mut c_long) -> CURLMcode; + pub fn curl_multi_setopt(multi_handle: *mut CURLM, + option: CURLMoption, + ...) -> CURLMcode; + pub fn curl_multi_assign(multi_handle: *mut CURLM, + sockfd: curl_socket_t, + sockp: *mut c_void) -> CURLMcode; +} diff --git a/curl/.cargo-checksum.json b/curl/.cargo-checksum.json new file mode 100644 index 000000000..5f75cdb7d --- /dev/null +++ b/curl/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{},"package":"a9e5285b49b44401518c947d3b808d14d99a538a6c9ffb3ec0205c11f9fc4389"} \ No newline at end of file diff --git a/curl/.pc/.quilt_patches b/curl/.pc/.quilt_patches new file mode 100644 index 000000000..6857a8d44 --- /dev/null +++ b/curl/.pc/.quilt_patches @@ -0,0 +1 @@ +debian/patches diff --git a/curl/.pc/.quilt_series b/curl/.pc/.quilt_series new file mode 100644 index 000000000..c2067066a --- /dev/null +++ b/curl/.pc/.quilt_series @@ -0,0 +1 @@ +series diff --git a/curl/.pc/.version b/curl/.pc/.version new file mode 100644 index 000000000..0cfbf0888 --- /dev/null +++ b/curl/.pc/.version @@ -0,0 +1 @@ +2 diff --git a/curl/.pc/applied-patches b/curl/.pc/applied-patches new file mode 100644 index 000000000..204ee965a --- /dev/null +++ b/curl/.pc/applied-patches @@ -0,0 +1 @@ +winapi3.patch diff --git a/curl/.pc/winapi3.patch/.timestamp b/curl/.pc/winapi3.patch/.timestamp new file mode 100644 index 000000000..e69de29bb diff --git a/curl/.pc/winapi3.patch/Cargo.toml b/curl/.pc/winapi3.patch/Cargo.toml new file mode 100644 index 000000000..0cb86d9d2 --- /dev/null +++ b/curl/.pc/winapi3.patch/Cargo.toml @@ -0,0 +1,60 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g. crates.io) dependencies +# +# If you believe there's an error in this file please file an +# issue against the rust-lang/cargo repository. If you're +# editing this file be aware that the upstream Cargo.toml +# will likely look very different (and much more reasonable) + +[package] +name = "curl" +version = "0.4.18" +authors = ["Alex Crichton "] +autotests = true +description = "Rust bindings to libcurl for making HTTP requests" +homepage = "https://github.com/alexcrichton/curl-rust" +documentation = "https://docs.rs/curl" +categories = ["api-bindings", "web-programming::http-client"] +license = "MIT" +repository = "https://github.com/alexcrichton/curl-rust" + +[[test]] +name = "atexit" +harness = false +[dependencies.curl-sys] +version = "0.4.9" + +[dependencies.libc] +version = "0.2.42" + +[dependencies.socket2] +version = "0.3.7" +[dev-dependencies.mio] +version = "0.6" + +[dev-dependencies.mio-extras] +version = "2.0.3" + +[features] +http2 = ["curl-sys/http2"] +[target."cfg(all(unix, not(target_os = \"macos\")))".dependencies.openssl-probe] +version = "0.1.2" + +[target."cfg(all(unix, not(target_os = \"macos\")))".dependencies.openssl-sys] +version = "0.9.33" +[target."cfg(target_env=\"msvc\")".dependencies.kernel32-sys] +version = "0.2.2" + +[target."cfg(target_env=\"msvc\")".dependencies.schannel] +version = "0.1.13" +[target."cfg(windows)".dependencies.winapi] +version = "0.2.7" +[badges.appveyor] +repository = "alexcrichton/curl-rust" + +[badges.travis-ci] +repository = "alexcrichton/curl-rust" diff --git a/curl/.pc/winapi3.patch/src/easy/windows.rs b/curl/.pc/winapi3.patch/src/easy/windows.rs new file mode 100644 index 000000000..018e3dba2 --- /dev/null +++ b/curl/.pc/winapi3.patch/src/easy/windows.rs @@ -0,0 +1,136 @@ +#![allow(non_camel_case_types, non_snake_case)] + +use libc::c_void; + +#[cfg(target_env = "msvc")] +mod win { + use kernel32; + use std::ffi::CString; + use std::mem; + use std::ptr; + use schannel::cert_context::ValidUses; + use schannel::cert_store::CertStore; + use winapi::{self, c_void, c_uchar, c_long, c_int}; + + fn lookup(module: &str, symbol: &str) -> Option<*const c_void> { + unsafe { + let symbol = CString::new(symbol).unwrap(); + let mut mod_buf: Vec = module.encode_utf16().collect(); + mod_buf.push(0); + let handle = kernel32::GetModuleHandleW(mod_buf.as_mut_ptr()); + let n = kernel32::GetProcAddress(handle, symbol.as_ptr()); + if n == ptr::null() { + None + } else { + Some(n) + } + } + } + + pub enum X509_STORE {} + pub enum X509 {} + pub enum SSL_CTX {} + + type d2i_X509_fn = unsafe extern "C" fn( + a: *mut *mut X509, + pp: *mut *const c_uchar, + length: c_long, + ) -> *mut X509; + type X509_free_fn = unsafe extern "C" fn(x: *mut X509); + type X509_STORE_add_cert_fn = unsafe extern "C" fn(store: *mut X509_STORE, x: *mut X509) + -> c_int; + type SSL_CTX_get_cert_store_fn = unsafe extern "C" fn(ctx: *const SSL_CTX) + -> *mut X509_STORE; + + struct OpenSSL { + d2i_X509: d2i_X509_fn, + X509_free: X509_free_fn, + X509_STORE_add_cert: X509_STORE_add_cert_fn, + SSL_CTX_get_cert_store: SSL_CTX_get_cert_store_fn, + } + + unsafe fn lookup_functions(crypto_module: &str, ssl_module: &str) + -> Option + { + macro_rules! get { + ($(let $sym:ident in $module:expr;)*) => ($( + let $sym = match lookup($module, stringify!($sym)) { + Some(p) => p, + None => return None, + }; + )*) + } + get! { + let d2i_X509 in crypto_module; + let X509_free in crypto_module; + let X509_STORE_add_cert in crypto_module; + let SSL_CTX_get_cert_store in ssl_module; + } + Some(OpenSSL { + d2i_X509: mem::transmute(d2i_X509), + X509_free: mem::transmute(X509_free), + X509_STORE_add_cert: mem::transmute(X509_STORE_add_cert), + SSL_CTX_get_cert_store: mem::transmute(SSL_CTX_get_cert_store), + }) + } + + pub unsafe fn add_certs_to_context(ssl_ctx: *mut c_void) { + // check the runtime version of OpenSSL + let openssl = match ::version::Version::get().ssl_version() { + Some(ssl_ver) if ssl_ver.starts_with("OpenSSL/1.1.0") => { + lookup_functions("libcrypto", "libssl") + } + Some(ssl_ver) if ssl_ver.starts_with("OpenSSL/1.0.2") => { + lookup_functions("libeay32", "ssleay32") + } + _ => return, + }; + let openssl = match openssl { + Some(s) => s, + None => return, + }; + + let openssl_store = (openssl.SSL_CTX_get_cert_store)(ssl_ctx as *const SSL_CTX); + let mut store = match CertStore::open_current_user("ROOT") { + Ok(s) => s, + Err(_) => return, + }; + + for cert in store.certs() { + let valid_uses = match cert.valid_uses() { + Ok(v) => v, + Err(_) => continue, + }; + + // check the extended key usage for the "Server Authentication" OID + match valid_uses { + ValidUses::All => {} + ValidUses::Oids(ref oids) => { + let oid = winapi::wincrypt::szOID_PKIX_KP_SERVER_AUTH.to_owned(); + if !oids.contains(&oid) { + continue + } + } + } + + let der = cert.to_der(); + let x509 = (openssl.d2i_X509)(ptr::null_mut(), + &mut der.as_ptr(), + der.len() as c_long); + if !x509.is_null() { + (openssl.X509_STORE_add_cert)(openssl_store, x509); + (openssl.X509_free)(x509); + } + } + } +} + +#[cfg(target_env = "msvc")] +pub fn add_certs_to_context(ssl_ctx: *mut c_void) { + unsafe { + win::add_certs_to_context(ssl_ctx as *mut _); + } +} + +#[cfg(not(target_env = "msvc"))] +pub fn add_certs_to_context(_: *mut c_void) {} diff --git a/curl/.pc/winapi3.patch/src/lib.rs b/curl/.pc/winapi3.patch/src/lib.rs new file mode 100644 index 000000000..c1d9ef0ad --- /dev/null +++ b/curl/.pc/winapi3.patch/src/lib.rs @@ -0,0 +1,129 @@ +//! Rust bindings to the libcurl C library +//! +//! This crate contains bindings for an HTTP/HTTPS client which is powered by +//! [libcurl], the same library behind the `curl` command line tool. The API +//! currently closely matches that of libcurl itself, except that a Rustic layer +//! of safety is applied on top. +//! +//! [libcurl]: https://curl.haxx.se/libcurl/ +//! +//! # The "Easy" API +//! +//! The easiest way to send a request is to use the `Easy` api which corresponds +//! to `CURL` in libcurl. This handle supports a wide variety of options and can +//! be used to make a single blocking request in a thread. Callbacks can be +//! specified to deal with data as it arrives and a handle can be reused to +//! cache connections and such. +//! +//! ```rust,no_run +//! use std::io::{stdout, Write}; +//! +//! use curl::easy::Easy; +//! +//! // Write the contents of rust-lang.org to stdout +//! let mut easy = Easy::new(); +//! easy.url("https://www.rust-lang.org/").unwrap(); +//! easy.write_function(|data| { +//! stdout().write_all(data).unwrap(); +//! Ok(data.len()) +//! }).unwrap(); +//! easy.perform().unwrap(); +//! ``` +//! +//! # What about multiple concurrent HTTP requests? +//! +//! One option you have currently is to send multiple requests in multiple +//! threads, but otherwise libcurl has a "multi" interface for doing this +//! operation. Initial bindings of this interface can be found in the `multi` +//! module, but feedback is welcome! +//! +//! # Where does libcurl come from? +//! +//! This crate links to the `curl-sys` crate which is in turn responsible for +//! acquiring and linking to the libcurl library. Currently this crate will +//! build libcurl from source if one is not already detected on the system. +//! +//! There is a large number of releases for libcurl, all with different sets of +//! capabilities. Robust programs may wish to inspect `Version::get()` to test +//! what features are implemented in the linked build of libcurl at runtime. + +#![deny(missing_docs, missing_debug_implementations)] +#![doc(html_root_url = "https://docs.rs/curl/0.4")] + +extern crate curl_sys; +extern crate libc; +extern crate socket2; + +#[cfg(all(unix, not(target_os = "macos")))] +extern crate openssl_sys; +#[cfg(all(unix, not(target_os = "macos")))] +extern crate openssl_probe; +#[cfg(windows)] +extern crate winapi; + +#[cfg(target_env = "msvc")] +extern crate kernel32; +#[cfg(target_env = "msvc")] +extern crate schannel; + +use std::ffi::CStr; +use std::str; +use std::sync::{Once, ONCE_INIT}; + +pub use error::{Error, ShareError, MultiError, FormError}; +mod error; + +pub use version::{Version, Protocols}; +mod version; + +mod panic; +pub mod easy; +pub mod multi; + +/// Initializes the underlying libcurl library. +/// +/// It's not required to call this before the library is used, but it's +/// recommended to do so as soon as the program starts. +pub fn init() { + static INIT: Once = ONCE_INIT; + INIT.call_once(|| { + platform_init(); + unsafe { + assert_eq!(curl_sys::curl_global_init(curl_sys::CURL_GLOBAL_ALL), 0); + } + + // Note that we explicitly don't schedule a call to + // `curl_global_cleanup`. The documentation for that function says + // + // > You must not call it when any other thread in the program (i.e. a + // > thread sharing the same memory) is running. This doesn't just mean + // > no other thread that is using libcurl. + // + // We can't ever be sure of that, so unfortunately we can't call the + // function. + }); + + #[cfg(all(unix, not(target_os = "macos")))] + fn platform_init() { + openssl_sys::init(); + } + + #[cfg(not(all(unix, not(target_os = "macos"))))] + fn platform_init() {} +} + +unsafe fn opt_str<'a>(ptr: *const libc::c_char) -> Option<&'a str> { + if ptr.is_null() { + None + } else { + Some(str::from_utf8(CStr::from_ptr(ptr).to_bytes()).unwrap()) + } +} + +fn cvt(r: curl_sys::CURLcode) -> Result<(), Error> { + if r == curl_sys::CURLE_OK { + Ok(()) + } else { + Err(Error::new(r)) + } +} diff --git a/curl/.pc/winapi3.patch/src/multi.rs b/curl/.pc/winapi3.patch/src/multi.rs new file mode 100644 index 000000000..4c15f933f --- /dev/null +++ b/curl/.pc/winapi3.patch/src/multi.rs @@ -0,0 +1,1069 @@ +//! Multi - initiating multiple requests simultaneously + +use std::fmt; +use std::marker; +use std::time::Duration; + +use libc::{c_int, c_char, c_void, c_long, c_short}; +use curl_sys; + +#[cfg(windows)] +use winapi::winsock2::fd_set; +#[cfg(unix)] +use libc::{fd_set, pollfd, POLLIN, POLLPRI, POLLOUT}; + +use {MultiError, Error}; +use easy::{Easy, Easy2}; +use panic; + +/// A multi handle for initiating multiple connections simultaneously. +/// +/// This structure corresponds to `CURLM` in libcurl and provides the ability to +/// have multiple transfers in flight simultaneously. This handle is then used +/// to manage each transfer. The main purpose of a `CURLM` is for the +/// *application* to drive the I/O rather than libcurl itself doing all the +/// blocking. Methods like `action` allow the application to inform libcurl of +/// when events have happened. +/// +/// Lots more documentation can be found on the libcurl [multi tutorial] where +/// the APIs correspond pretty closely with this crate. +/// +/// [multi tutorial]: https://curl.haxx.se/libcurl/c/libcurl-multi.html +pub struct Multi { + raw: *mut curl_sys::CURLM, + data: Box, +} + +struct MultiData { + socket: Box, + timer: Box) -> bool + Send>, +} + +/// Message from the `messages` function of a multi handle. +/// +/// Currently only indicates whether a transfer is done. +pub struct Message<'multi> { + ptr: *mut curl_sys::CURLMsg, + _multi: &'multi Multi, +} + +/// Wrapper around an easy handle while it's owned by a multi handle. +/// +/// Once an easy handle has been added to a multi handle then it can no longer +/// be used via `perform`. This handle is also used to remove the easy handle +/// from the multi handle when desired. +pub struct EasyHandle { + easy: Easy, + // This is now effecitvely bound to a `Multi`, so it is no longer sendable. + _marker: marker::PhantomData<&'static Multi>, +} + +/// Wrapper around an easy handle while it's owned by a multi handle. +/// +/// Once an easy handle has been added to a multi handle then it can no longer +/// be used via `perform`. This handle is also used to remove the easy handle +/// from the multi handle when desired. +pub struct Easy2Handle { + easy: Easy2, + // This is now effecitvely bound to a `Multi`, so it is no longer sendable. + _marker: marker::PhantomData<&'static Multi>, +} + +/// Notification of the events that have happened on a socket. +/// +/// This type is passed as an argument to the `action` method on a multi handle +/// to indicate what events have occurred on a socket. +pub struct Events { + bits: c_int, +} + +/// Notification of events that are requested on a socket. +/// +/// This type is yielded to the `socket_function` callback to indicate what +/// events are requested on a socket. +pub struct SocketEvents { + bits: c_int, +} + +/// Raw underlying socket type that the multi handles use +pub type Socket = curl_sys::curl_socket_t; + +/// File descriptor to wait on for use with the `wait` method on a multi handle. +pub struct WaitFd { + inner: curl_sys::curl_waitfd, +} + +impl Multi { + /// Creates a new multi session through which multiple HTTP transfers can be + /// initiated. + pub fn new() -> Multi { + unsafe { + ::init(); + let ptr = curl_sys::curl_multi_init(); + assert!(!ptr.is_null()); + Multi { + raw: ptr, + data: Box::new(MultiData { + socket: Box::new(|_, _, _| ()), + timer: Box::new(|_| true), + }), + } + } + } + + /// Set the callback informed about what to wait for + /// + /// When the `action` function runs, it informs the application about + /// updates in the socket (file descriptor) status by doing none, one, or + /// multiple calls to the socket callback. The callback gets status updates + /// with changes since the previous time the callback was called. See + /// `action` for more details on how the callback is used and should work. + /// + /// The `SocketEvents` parameter informs the callback on the status of the + /// given socket, and the methods on that type can be used to learn about + /// what's going on with the socket. + /// + /// The third `usize` parameter is a custom value set by the `assign` method + /// below. + pub fn socket_function(&mut self, f: F) -> Result<(), MultiError> + where F: FnMut(Socket, SocketEvents, usize) + Send + 'static, + { + self._socket_function(Box::new(f)) + } + + fn _socket_function(&mut self, + f: Box) + -> Result<(), MultiError> + { + self.data.socket = f; + let cb: curl_sys::curl_socket_callback = cb; + try!(self.setopt_ptr(curl_sys::CURLMOPT_SOCKETFUNCTION, + cb as usize as *const c_char)); + let ptr = &*self.data as *const _; + try!(self.setopt_ptr(curl_sys::CURLMOPT_SOCKETDATA, + ptr as *const c_char)); + return Ok(()); + + // TODO: figure out how to expose `_easy` + extern fn cb(_easy: *mut curl_sys::CURL, + socket: curl_sys::curl_socket_t, + what: c_int, + userptr: *mut c_void, + socketp: *mut c_void) -> c_int { + panic::catch(|| unsafe { + let f = &mut (*(userptr as *mut MultiData)).socket; + f(socket, SocketEvents { bits: what }, socketp as usize) + }); + 0 + } + } + + /// Set data to associate with an internal socket + /// + /// This function creates an association in the multi handle between the + /// given socket and a private token of the application. This is designed + /// for `action` uses. + /// + /// When set, the token will be passed to all future socket callbacks for + /// the specified socket. + /// + /// If the given socket isn't already in use by libcurl, this function will + /// return an error. + /// + /// libcurl only keeps one single token associated with a socket, so + /// calling this function several times for the same socket will make the + /// last set token get used. + /// + /// The idea here being that this association (socket to token) is something + /// that just about every application that uses this API will need and then + /// libcurl can just as well do it since it already has an internal hash + /// table lookup for this. + /// + /// # Typical Usage + /// + /// In a typical application you allocate a struct or at least use some kind + /// of semi-dynamic data for each socket that we must wait for action on + /// when using the `action` approach. + /// + /// When our socket-callback gets called by libcurl and we get to know about + /// yet another socket to wait for, we can use `assign` to point out the + /// particular data so that when we get updates about this same socket + /// again, we don't have to find the struct associated with this socket by + /// ourselves. + pub fn assign(&self, + socket: Socket, + token: usize) -> Result<(), MultiError> { + unsafe { + try!(cvt(curl_sys::curl_multi_assign(self.raw, socket, + token as *mut _))); + Ok(()) + } + } + + /// Set callback to receive timeout values + /// + /// Certain features, such as timeouts and retries, require you to call + /// libcurl even when there is no activity on the file descriptors. + /// + /// Your callback function should install a non-repeating timer with the + /// interval specified. Each time that timer fires, call either `action` or + /// `perform`, depending on which interface you use. + /// + /// A timeout value of `None` means you should delete your timer. + /// + /// A timeout value of 0 means you should call `action` or `perform` (once) + /// as soon as possible. + /// + /// This callback will only be called when the timeout changes. + /// + /// The timer callback should return `true` on success, and `false` on + /// error. This callback can be used instead of, or in addition to, + /// `get_timeout`. + pub fn timer_function(&mut self, f: F) -> Result<(), MultiError> + where F: FnMut(Option) -> bool + Send + 'static, + { + self._timer_function(Box::new(f)) + } + + fn _timer_function(&mut self, + f: Box) -> bool + Send>) + -> Result<(), MultiError> + { + self.data.timer = f; + let cb: curl_sys::curl_multi_timer_callback = cb; + try!(self.setopt_ptr(curl_sys::CURLMOPT_TIMERFUNCTION, + cb as usize as *const c_char)); + let ptr = &*self.data as *const _; + try!(self.setopt_ptr(curl_sys::CURLMOPT_TIMERDATA, + ptr as *const c_char)); + return Ok(()); + + // TODO: figure out how to expose `_multi` + extern fn cb(_multi: *mut curl_sys::CURLM, + timeout_ms: c_long, + user: *mut c_void) -> c_int { + let keep_going = panic::catch(|| unsafe { + let f = &mut (*(user as *mut MultiData)).timer; + if timeout_ms == -1 { + f(None) + } else { + f(Some(Duration::from_millis(timeout_ms as u64))) + } + }).unwrap_or(false); + if keep_going {0} else {-1} + } + } + + /// Enable or disable HTTP pipelining and multiplexing. + /// + /// When http_1 is true, enable HTTP/1.1 pipelining, which means that if + /// you add a second request that can use an already existing connection, + /// the second request will be "piped" on the same connection rather than + /// being executed in parallel. + /// + /// When multiplex is true, enable HTTP/2 multiplexing, which means that + /// follow-up requests can re-use an existing connection and send the new + /// request multiplexed over that at the same time as other transfers are + /// already using that single connection. + pub fn pipelining(&mut self, http_1: bool, multiplex: bool) -> Result<(), MultiError> { + let bitmask = if http_1 { curl_sys::CURLPIPE_HTTP1 } else { 0 } | if multiplex { curl_sys::CURLPIPE_MULTIPLEX } else { 0 }; + self.setopt_long(curl_sys::CURLMOPT_PIPELINING, bitmask) + } + + /// Sets the max number of connections to a single host. + /// + /// Pass a long to indicate the max number of simultaneously open connections + /// to a single host (a host being the same as a host name + port number pair). + /// For each new session to a host, libcurl will open up a new connection up to the + /// limit set by the provided value. When the limit is reached, the sessions will + /// be pending until a connection becomes available. If pipelining is enabled, + /// libcurl will try to pipeline if the host is capable of it. + pub fn set_max_host_connections(&mut self, val: usize) -> Result<(), MultiError> { + self.setopt_long(curl_sys::CURLMOPT_MAX_HOST_CONNECTIONS, val as c_long) + } + + /// Sets the pipeline length. + /// + /// This sets the max number that will be used as the maximum amount of + /// outstanding reuqests in an HTTP/1.1 pipelined connection. This option + /// is only used for HTTP/1.1 pipelining, and not HTTP/2 multiplexing. + pub fn set_pipeline_length(&mut self, val: usize) -> Result<(), MultiError> { + self.setopt_long(curl_sys::CURLMOPT_MAX_PIPELINE_LENGTH, val as c_long) + } + + fn setopt_long(&mut self, + opt: curl_sys::CURLMoption, + val: c_long) -> Result<(), MultiError> { + unsafe { + cvt(curl_sys::curl_multi_setopt(self.raw, opt, val)) + } + } + + fn setopt_ptr(&mut self, + opt: curl_sys::CURLMoption, + val: *const c_char) -> Result<(), MultiError> { + unsafe { + cvt(curl_sys::curl_multi_setopt(self.raw, opt, val)) + } + } + + /// Add an easy handle to a multi session + /// + /// Adds a standard easy handle to the multi stack. This function call will + /// make this multi handle control the specified easy handle. + /// + /// When an easy interface is added to a multi handle, it will use a shared + /// connection cache owned by the multi handle. Removing and adding new easy + /// handles will not affect the pool of connections or the ability to do + /// connection re-use. + /// + /// If you have `timer_function` set in the multi handle (and you really + /// should if you're working event-based with `action` and friends), that + /// callback will be called from within this function to ask for an updated + /// timer so that your main event loop will get the activity on this handle + /// to get started. + /// + /// The easy handle will remain added to the multi handle until you remove + /// it again with `remove` on the returned handle - even when a transfer + /// with that specific easy handle is completed. + pub fn add(&self, mut easy: Easy) -> Result { + // Clear any configuration set by previous transfers because we're + // moving this into a `Send+'static` situation now basically. + easy.transfer(); + + unsafe { + try!(cvt(curl_sys::curl_multi_add_handle(self.raw, easy.raw()))); + } + Ok(EasyHandle { + easy: easy, + _marker: marker::PhantomData, + }) + } + + /// Same as `add`, but works with the `Easy2` type. + pub fn add2(&self, easy: Easy2) -> Result, MultiError> { + unsafe { + try!(cvt(curl_sys::curl_multi_add_handle(self.raw, easy.raw()))); + } + Ok(Easy2Handle { + easy: easy, + _marker: marker::PhantomData, + }) + } + + /// Remove an easy handle from this multi session + /// + /// Removes the easy handle from this multi handle. This will make the + /// returned easy handle be removed from this multi handle's control. + /// + /// When the easy handle has been removed from a multi stack, it is again + /// perfectly legal to invoke `perform` on it. + /// + /// Removing an easy handle while being used is perfectly legal and will + /// effectively halt the transfer in progress involving that easy handle. + /// All other easy handles and transfers will remain unaffected. + pub fn remove(&self, easy: EasyHandle) -> Result { + unsafe { + try!(cvt(curl_sys::curl_multi_remove_handle(self.raw, + easy.easy.raw()))); + } + Ok(easy.easy) + } + + /// Same as `remove`, but for `Easy2Handle`. + pub fn remove2(&self, easy: Easy2Handle) -> Result, MultiError> { + unsafe { + try!(cvt(curl_sys::curl_multi_remove_handle(self.raw, + easy.easy.raw()))); + } + Ok(easy.easy) + } + + /// Read multi stack informationals + /// + /// Ask the multi handle if there are any messages/informationals from the + /// individual transfers. Messages may include informationals such as an + /// error code from the transfer or just the fact that a transfer is + /// completed. More details on these should be written down as well. + pub fn messages(&self, mut f: F) where F: FnMut(Message) { + self._messages(&mut f) + } + + fn _messages(&self, f: &mut FnMut(Message)) { + let mut queue = 0; + unsafe { + loop { + let ptr = curl_sys::curl_multi_info_read(self.raw, &mut queue); + if ptr.is_null() { + break + } + f(Message { ptr: ptr, _multi: self }) + } + } + } + + /// Inform of reads/writes available data given an action + /// + /// When the application has detected action on a socket handled by libcurl, + /// it should call this function with the sockfd argument set to + /// the socket with the action. When the events on a socket are known, they + /// can be passed `events`. When the events on a socket are unknown, pass + /// `Events::new()` instead, and libcurl will test the descriptor + /// internally. + /// + /// The returned integer will contain the number of running easy handles + /// within the multi handle. When this number reaches zero, all transfers + /// are complete/done. When you call `action` on a specific socket and the + /// counter decreases by one, it DOES NOT necessarily mean that this exact + /// socket/transfer is the one that completed. Use `messages` to figure out + /// which easy handle that completed. + /// + /// The `action` function informs the application about updates in the + /// socket (file descriptor) status by doing none, one, or multiple calls to + /// the socket callback function set with the `socket_function` method. They + /// update the status with changes since the previous time the callback was + /// called. + pub fn action(&self, socket: Socket, events: &Events) + -> Result { + let mut remaining = 0; + unsafe { + try!(cvt(curl_sys::curl_multi_socket_action(self.raw, + socket, + events.bits, + &mut remaining))); + Ok(remaining as u32) + } + } + + /// Inform libcurl that a timeout has expired and sockets should be tested. + /// + /// The returned integer will contain the number of running easy handles + /// within the multi handle. When this number reaches zero, all transfers + /// are complete/done. When you call `action` on a specific socket and the + /// counter decreases by one, it DOES NOT necessarily mean that this exact + /// socket/transfer is the one that completed. Use `messages` to figure out + /// which easy handle that completed. + /// + /// Get the timeout time by calling the `timer_function` method. Your + /// application will then get called with information on how long to wait + /// for socket actions at most before doing the timeout action: call the + /// `timeout` method. You can also use the `get_timeout` function to + /// poll the value at any given time, but for an event-based system using + /// the callback is far better than relying on polling the timeout value. + pub fn timeout(&self) -> Result { + let mut remaining = 0; + unsafe { + try!(cvt(curl_sys::curl_multi_socket_action(self.raw, + curl_sys::CURL_SOCKET_BAD, + 0, + &mut remaining))); + Ok(remaining as u32) + } + } + + /// Get how long to wait for action before proceeding + /// + /// An application using the libcurl multi interface should call + /// `get_timeout` to figure out how long it should wait for socket actions - + /// at most - before proceeding. + /// + /// Proceeding means either doing the socket-style timeout action: call the + /// `timeout` function, or call `perform` if you're using the simpler and + /// older multi interface approach. + /// + /// The timeout value returned is the duration at this very moment. If 0, it + /// means you should proceed immediately without waiting for anything. If it + /// returns `None`, there's no timeout at all set. + /// + /// Note: if libcurl returns a `None` timeout here, it just means that + /// libcurl currently has no stored timeout value. You must not wait too + /// long (more than a few seconds perhaps) before you call `perform` again. + pub fn get_timeout(&self) -> Result, MultiError> { + let mut ms = 0; + unsafe { + try!(cvt(curl_sys::curl_multi_timeout(self.raw, &mut ms))); + if ms == -1 { + Ok(None) + } else { + Ok(Some(Duration::from_millis(ms as u64))) + } + } + } + + /// Block until activity is detected or a timeout passes. + /// + /// The timeout is used in millisecond-precision. Large durations are + /// clamped at the maximum value curl accepts. + /// + /// The returned integer will contain the number of internal file + /// descriptors on which interesting events occured. + /// + /// This function is a simpler alternative to using `fdset()` and `select()` + /// and does not suffer from file descriptor limits. + /// + /// # Example + /// + /// ``` + /// use curl::multi::Multi; + /// use std::time::Duration; + /// + /// let m = Multi::new(); + /// + /// // Add some Easy handles... + /// + /// while m.perform().unwrap() > 0 { + /// m.wait(&mut [], Duration::from_secs(1)).unwrap(); + /// } + /// ``` + pub fn wait(&self, waitfds: &mut [WaitFd], timeout: Duration) + -> Result { + let timeout_ms = { + let secs = timeout.as_secs(); + if secs > (i32::max_value() / 1000) as u64 { + // Duration too large, clamp at maximum value. + i32::max_value() + } else { + secs as i32 * 1000 + timeout.subsec_nanos() as i32 / 1000_000 + } + }; + unsafe { + let mut ret = 0; + try!(cvt(curl_sys::curl_multi_wait(self.raw, + waitfds.as_mut_ptr() as *mut _, + waitfds.len() as u32, + timeout_ms, + &mut ret))); + Ok(ret as u32) + } + } + + /// Reads/writes available data from each easy handle. + /// + /// This function handles transfers on all the added handles that need + /// attention in an non-blocking fashion. + /// + /// When an application has found out there's data available for this handle + /// or a timeout has elapsed, the application should call this function to + /// read/write whatever there is to read or write right now etc. This + /// method returns as soon as the reads/writes are done. This function does + /// not require that there actually is any data available for reading or + /// that data can be written, it can be called just in case. It will return + /// the number of handles that still transfer data. + /// + /// If the amount of running handles is changed from the previous call (or + /// is less than the amount of easy handles you've added to the multi + /// handle), you know that there is one or more transfers less "running". + /// You can then call `info` to get information about each individual + /// completed transfer, and that returned info includes `Error` and more. + /// If an added handle fails very quickly, it may never be counted as a + /// running handle. + /// + /// When running_handles is set to zero (0) on the return of this function, + /// there is no longer any transfers in progress. + /// + /// # Return + /// + /// Before libcurl version 7.20.0: If you receive `is_call_perform`, this + /// basically means that you should call `perform` again, before you select + /// on more actions. You don't have to do it immediately, but the return + /// code means that libcurl may have more data available to return or that + /// there may be more data to send off before it is "satisfied". Do note + /// that `perform` will return `is_call_perform` only when it wants to be + /// called again immediately. When things are fine and there is nothing + /// immediate it wants done, it'll return `Ok` and you need to wait for + /// "action" and then call this function again. + /// + /// This function only returns errors etc regarding the whole multi stack. + /// Problems still might have occurred on individual transfers even when + /// this function returns `Ok`. Use `info` to figure out how individual + /// transfers did. + pub fn perform(&self) -> Result { + unsafe { + let mut ret = 0; + try!(cvt(curl_sys::curl_multi_perform(self.raw, &mut ret))); + Ok(ret as u32) + } + } + + /// Extracts file descriptor information from a multi handle + /// + /// This function extracts file descriptor information from a given + /// handle, and libcurl returns its `fd_set` sets. The application can use + /// these to `select()` on, but be sure to `FD_ZERO` them before calling + /// this function as curl_multi_fdset only adds its own descriptors, it + /// doesn't zero or otherwise remove any others. The curl_multi_perform + /// function should be called as soon as one of them is ready to be read + /// from or written to. + /// + /// If no file descriptors are set by libcurl, this function will return + /// `Ok(None)`. Otherwise `Ok(Some(n))` will be returned where `n` the + /// highest descriptor number libcurl set. When `Ok(None)` is returned it + /// is because libcurl currently does something that isn't possible for + /// your application to monitor with a socket and unfortunately you can + /// then not know exactly when the current action is completed using + /// `select()`. You then need to wait a while before you proceed and call + /// `perform` anyway. + /// + /// When doing `select()`, you should use `get_timeout` to figure out + /// how long to wait for action. Call `perform` even if no activity has + /// been seen on the `fd_set`s after the timeout expires as otherwise + /// internal retries and timeouts may not work as you'd think and want. + /// + /// If one of the sockets used by libcurl happens to be larger than what + /// can be set in an `fd_set`, which on POSIX systems means that the file + /// descriptor is larger than `FD_SETSIZE`, then libcurl will try to not + /// set it. Setting a too large file descriptor in an `fd_set` implies an out + /// of bounds write which can cause crashes, or worse. The effect of NOT + /// storing it will possibly save you from the crash, but will make your + /// program NOT wait for sockets it should wait for... + pub fn fdset2(&self, + read: Option<&mut curl_sys::fd_set>, + write: Option<&mut curl_sys::fd_set>, + except: Option<&mut curl_sys::fd_set>) -> Result, MultiError> { + unsafe { + let mut ret = 0; + let read = read.map(|r| r as *mut _).unwrap_or(0 as *mut _); + let write = write.map(|r| r as *mut _).unwrap_or(0 as *mut _); + let except = except.map(|r| r as *mut _).unwrap_or(0 as *mut _); + try!(cvt(curl_sys::curl_multi_fdset(self.raw, + read, + write, + except, + &mut ret))); + if ret == -1 { + Ok(None) + } else { + Ok(Some(ret)) + } + } + } + + #[doc(hidden)] + #[deprecated(note = "renamed to fdset2")] + pub fn fdset(&self, + read: Option<&mut fd_set>, + write: Option<&mut fd_set>, + except: Option<&mut fd_set>) -> Result, MultiError> { + unsafe { + let mut ret = 0; + let read = read.map(|r| r as *mut _).unwrap_or(0 as *mut _); + let write = write.map(|r| r as *mut _).unwrap_or(0 as *mut _); + let except = except.map(|r| r as *mut _).unwrap_or(0 as *mut _); + try!(cvt(curl_sys::curl_multi_fdset(self.raw, + read as *mut _, + write as *mut _, + except as *mut _, + &mut ret))); + if ret == -1 { + Ok(None) + } else { + Ok(Some(ret)) + } + } + } + + /// Attempt to close the multi handle and clean up all associated resources. + /// + /// Cleans up and removes a whole multi stack. It does not free or touch any + /// individual easy handles in any way - they still need to be closed + /// individually. + pub fn close(&self) -> Result<(), MultiError> { + unsafe { + cvt(curl_sys::curl_multi_cleanup(self.raw)) + } + } +} + +fn cvt(code: curl_sys::CURLMcode) -> Result<(), MultiError> { + if code == curl_sys::CURLM_OK { + Ok(()) + } else { + Err(MultiError::new(code)) + } +} + +impl fmt::Debug for Multi { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_struct("Multi") + .field("raw", &self.raw) + .finish() + } +} + +impl Drop for Multi { + fn drop(&mut self) { + let _ = self.close(); + } +} + +impl EasyHandle { + /// Sets an internal private token for this `EasyHandle`. + /// + /// This function will set the `CURLOPT_PRIVATE` field on the underlying + /// easy handle. + pub fn set_token(&mut self, token: usize) -> Result<(), Error> { + unsafe { + ::cvt(curl_sys::curl_easy_setopt(self.easy.raw(), + curl_sys::CURLOPT_PRIVATE, + token)) + } + } + + /// Unpause reading on a connection. + /// + /// Using this function, you can explicitly unpause a connection that was + /// previously paused. + /// + /// A connection can be paused by letting the read or the write callbacks + /// return `ReadError::Pause` or `WriteError::Pause`. + /// + /// The chance is high that you will get your write callback called before + /// this function returns. + pub fn unpause_read(&self) -> Result<(), Error> { + self.easy.unpause_read() + } + + /// Unpause writing on a connection. + /// + /// Using this function, you can explicitly unpause a connection that was + /// previously paused. + /// + /// A connection can be paused by letting the read or the write callbacks + /// return `ReadError::Pause` or `WriteError::Pause`. A write callback that + /// returns pause signals to the library that it couldn't take care of any + /// data at all, and that data will then be delivered again to the callback + /// when the writing is later unpaused. + pub fn unpause_write(&self) -> Result<(), Error> { + self.easy.unpause_write() + } +} + +impl fmt::Debug for EasyHandle { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + self.easy.fmt(f) + } +} + +impl Easy2Handle { + /// Acquires a reference to the underlying handler for events. + pub fn get_ref(&self) -> &H { + self.easy.get_ref() + } + + /// Acquires a reference to the underlying handler for events. + pub fn get_mut(&mut self) -> &mut H { + self.easy.get_mut() + } + + /// Same as `EasyHandle::set_token` + pub fn set_token(&mut self, token: usize) -> Result<(), Error> { + unsafe { + ::cvt(curl_sys::curl_easy_setopt(self.easy.raw(), + curl_sys::CURLOPT_PRIVATE, + token)) + } + } + + /// Unpause reading on a connection. + /// + /// Using this function, you can explicitly unpause a connection that was + /// previously paused. + /// + /// A connection can be paused by letting the read or the write callbacks + /// return `ReadError::Pause` or `WriteError::Pause`. + /// + /// The chance is high that you will get your write callback called before + /// this function returns. + pub fn unpause_read(&self) -> Result<(), Error> { + self.easy.unpause_read() + } + + /// Unpause writing on a connection. + /// + /// Using this function, you can explicitly unpause a connection that was + /// previously paused. + /// + /// A connection can be paused by letting the read or the write callbacks + /// return `ReadError::Pause` or `WriteError::Pause`. A write callback that + /// returns pause signals to the library that it couldn't take care of any + /// data at all, and that data will then be delivered again to the callback + /// when the writing is later unpaused. + pub fn unpause_write(&self) -> Result<(), Error> { + self.easy.unpause_write() + } +} + +impl fmt::Debug for Easy2Handle { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + self.easy.fmt(f) + } +} + +impl<'multi> Message<'multi> { + /// If this message indicates that a transfer has finished, returns the + /// result of the transfer in `Some`. + /// + /// If the message doesn't indicate that a transfer has finished, then + /// `None` is returned. + /// + /// Note that the `result*_for` methods below should be preferred as they + /// provide better error messages as the associated error data on the + /// handle can be associated with the error type. + pub fn result(&self) -> Option> { + unsafe { + if (*self.ptr).msg == curl_sys::CURLMSG_DONE { + Some(::cvt((*self.ptr).data as curl_sys::CURLcode)) + } else { + None + } + } + } + + /// Same as `result`, except only returns `Some` for the specified handle. + /// + /// Note that this function produces better error messages than `result` as + /// it uses `take_error_buf` to associate error information with the + /// returned error. + pub fn result_for(&self, handle: &EasyHandle) -> Option> { + if !self.is_for(handle) { + return None + } + let mut err = self.result(); + if let Some(Err(e)) = &mut err { + if let Some(s) = handle.easy.take_error_buf() { + e.set_extra(s); + } + } + return err + } + + /// Same as `result`, except only returns `Some` for the specified handle. + /// + /// Note that this function produces better error messages than `result` as + /// it uses `take_error_buf` to associate error information with the + /// returned error. + pub fn result_for2(&self, handle: &Easy2Handle) -> Option> { + if !self.is_for2(handle) { + return None + } + let mut err = self.result(); + if let Some(Err(e)) = &mut err { + if let Some(s) = handle.easy.take_error_buf() { + e.set_extra(s); + } + } + return err + } + + /// Returns whether this easy message was for the specified easy handle or + /// not. + pub fn is_for(&self, handle: &EasyHandle) -> bool { + unsafe { (*self.ptr).easy_handle == handle.easy.raw() } + } + + /// Same as `is_for`, but for `Easy2Handle`. + pub fn is_for2(&self, handle: &Easy2Handle) -> bool { + unsafe { (*self.ptr).easy_handle == handle.easy.raw() } + } + + /// Returns the token associated with the easy handle that this message + /// represents a completion for. + /// + /// This function will return the token assigned with + /// `EasyHandle::set_token`. This reads the `CURLINFO_PRIVATE` field of the + /// underlying `*mut CURL`. + pub fn token(&self) -> Result { + unsafe { + let mut p = 0usize; + try!(::cvt(curl_sys::curl_easy_getinfo((*self.ptr).easy_handle, + curl_sys::CURLINFO_PRIVATE, + &mut p))); + Ok(p) + } + } +} + +impl<'a> fmt::Debug for Message<'a> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_struct("Message") + .field("ptr", &self.ptr) + .finish() + } +} + +impl Events { + /// Creates a new blank event bit mask. + pub fn new() -> Events { + Events { bits: 0 } + } + + /// Set or unset the whether these events indicate that input is ready. + pub fn input(&mut self, val: bool) -> &mut Events { + self.flag(curl_sys::CURL_CSELECT_IN, val) + } + + /// Set or unset the whether these events indicate that output is ready. + pub fn output(&mut self, val: bool) -> &mut Events { + self.flag(curl_sys::CURL_CSELECT_OUT, val) + } + + /// Set or unset the whether these events indicate that an error has + /// happened. + pub fn error(&mut self, val: bool) -> &mut Events { + self.flag(curl_sys::CURL_CSELECT_ERR, val) + } + + fn flag(&mut self, flag: c_int, val: bool) -> &mut Events { + if val { + self.bits |= flag; + } else { + self.bits &= !flag; + } + self + } +} + +impl fmt::Debug for Events { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_struct("Events") + .field("input", &(self.bits & curl_sys::CURL_CSELECT_IN != 0)) + .field("output", &(self.bits & curl_sys::CURL_CSELECT_IN != 0)) + .field("error", &(self.bits & curl_sys::CURL_CSELECT_IN != 0)) + .finish() + } +} + +impl SocketEvents { + /// Wait for incoming data. For the socket to become readable. + pub fn input(&self) -> bool { + self.bits & curl_sys::CURL_POLL_IN == curl_sys::CURL_POLL_IN + } + + /// Wait for outgoing data. For the socket to become writable. + pub fn output(&self) -> bool { + self.bits & curl_sys::CURL_POLL_OUT == curl_sys::CURL_POLL_OUT + } + + /// Wait for incoming and outgoing data. For the socket to become readable + /// or writable. + pub fn input_and_output(&self) -> bool { + self.bits & curl_sys::CURL_POLL_INOUT == curl_sys::CURL_POLL_INOUT + } + + /// The specified socket/file descriptor is no longer used by libcurl. + pub fn remove(&self) -> bool { + self.bits & curl_sys::CURL_POLL_REMOVE == curl_sys::CURL_POLL_REMOVE + } +} + +impl fmt::Debug for SocketEvents { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_struct("Events") + .field("input", &self.input()) + .field("output", &self.output()) + .field("remove", &self.remove()) + .finish() + } +} + +impl WaitFd { + /// Constructs an empty (invalid) WaitFd. + pub fn new() -> WaitFd { + WaitFd { + inner: curl_sys::curl_waitfd { + fd: 0, + events: 0, + revents: 0, + } + } + } + + /// Set the file descriptor to wait for. + pub fn set_fd(&mut self, fd: Socket) { + self.inner.fd = fd; + } + + /// Indicate that the socket should poll on read events such as new data + /// received. + /// + /// Corresponds to `CURL_WAIT_POLLIN`. + pub fn poll_on_read(&mut self, val: bool) -> &mut WaitFd { + self.flag(curl_sys::CURL_WAIT_POLLIN, val) + } + + /// Indicate that the socket should poll on high priority read events such + /// as out of band data. + /// + /// Corresponds to `CURL_WAIT_POLLPRI`. + pub fn poll_on_priority_read(&mut self, val: bool) -> &mut WaitFd { + self.flag(curl_sys::CURL_WAIT_POLLPRI, val) + } + + /// Indicate that the socket should poll on write events such as the socket + /// being clear to write without blocking. + /// + /// Corresponds to `CURL_WAIT_POLLOUT`. + pub fn poll_on_write(&mut self, val: bool) -> &mut WaitFd { + self.flag(curl_sys::CURL_WAIT_POLLOUT, val) + } + + fn flag(&mut self, flag: c_short, val: bool) -> &mut WaitFd { + if val { + self.inner.events |= flag; + } else { + self.inner.events &= !flag; + } + self + } + + /// After a call to `wait`, returns `true` if `poll_on_read` was set and a + /// read event occured. + pub fn received_read(&self) -> bool { + self.inner.revents & curl_sys::CURL_WAIT_POLLIN == curl_sys::CURL_WAIT_POLLIN + } + + /// After a call to `wait`, returns `true` if `poll_on_priority_read` was set and a + /// priority read event occured. + pub fn received_priority_read(&self) -> bool { + self.inner.revents & curl_sys::CURL_WAIT_POLLPRI == curl_sys::CURL_WAIT_POLLPRI + } + + /// After a call to `wait`, returns `true` if `poll_on_write` was set and a + /// write event occured. + pub fn received_write(&self) -> bool { + self.inner.revents & curl_sys::CURL_WAIT_POLLOUT == curl_sys::CURL_WAIT_POLLOUT + } +} + +#[cfg(unix)] +impl From for WaitFd { + fn from(pfd: pollfd) -> WaitFd { + let mut events = 0; + if pfd.events & POLLIN == POLLIN { + events |= curl_sys::CURL_WAIT_POLLIN; + } + if pfd.events & POLLPRI == POLLPRI { + events |= curl_sys::CURL_WAIT_POLLPRI; + } + if pfd.events & POLLOUT == POLLOUT { + events |= curl_sys::CURL_WAIT_POLLOUT; + } + WaitFd { + inner: curl_sys::curl_waitfd { + fd: pfd.fd, + events: events, + revents: 0, + } + } + } +} + +impl fmt::Debug for WaitFd { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_struct("WaitFd") + .field("fd", &self.inner.fd) + .field("events", &self.inner.fd) + .field("revents", &self.inner.fd) + .finish() + } +} diff --git a/curl/Cargo.toml b/curl/Cargo.toml new file mode 100644 index 000000000..9c9dfc8f3 --- /dev/null +++ b/curl/Cargo.toml @@ -0,0 +1,59 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g. crates.io) dependencies +# +# If you believe there's an error in this file please file an +# issue against the rust-lang/cargo repository. If you're +# editing this file be aware that the upstream Cargo.toml +# will likely look very different (and much more reasonable) + +[package] +name = "curl" +version = "0.4.18" +authors = ["Alex Crichton "] +autotests = true +description = "Rust bindings to libcurl for making HTTP requests" +homepage = "https://github.com/alexcrichton/curl-rust" +documentation = "https://docs.rs/curl" +categories = ["api-bindings", "web-programming::http-client"] +license = "MIT" +repository = "https://github.com/alexcrichton/curl-rust" + +[[test]] +name = "atexit" +harness = false +[dependencies.curl-sys] +version = "0.4.9" + +[dependencies.libc] +version = "0.2.42" + +[dependencies.socket2] +version = "0.3.7" +[dev-dependencies.mio] +version = "0.6" + +[dev-dependencies.mio-extras] +version = "2.0.3" + +[features] +http2 = ["curl-sys/http2"] +[target."cfg(all(unix, not(target_os = \"macos\")))".dependencies.openssl-probe] +version = "0.1.2" + +[target."cfg(all(unix, not(target_os = \"macos\")))".dependencies.openssl-sys] +version = "0.9.33" + +[target."cfg(target_env=\"msvc\")".dependencies.schannel] +version = "0.1.13" +[target."cfg(windows)".dependencies.winapi] +version = "0.3" +features = ["winsock2", "wincrypt", "libloaderapi"] +[badges.appveyor] +repository = "alexcrichton/curl-rust" + +[badges.travis-ci] +repository = "alexcrichton/curl-rust" diff --git a/curl/LICENSE b/curl/LICENSE new file mode 100644 index 000000000..5f5e4b09d --- /dev/null +++ b/curl/LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2014 Carl Lerche + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/curl/README.md b/curl/README.md new file mode 100644 index 000000000..b458c3b11 --- /dev/null +++ b/curl/README.md @@ -0,0 +1,138 @@ +# curl-rust + +libcurl bindings for Rust + +[![Build Status](https://travis-ci.org/alexcrichton/curl-rust.svg?branch=master)](https://travis-ci.org/alexcrichton/curl-rust) +[![Build status](https://ci.appveyor.com/api/projects/status/lx98wtbxhhhajpr9?svg=true)](https://ci.appveyor.com/project/alexcrichton/curl-rust) + +[Documentation](https://docs.rs/curl) + +## Quick Start + +```rust +extern crate curl; + +use std::io::{stdout, Write}; + +use curl::easy::Easy; + +// Print a web page onto stdout +fn main() { + let mut easy = Easy::new(); + easy.url("https://www.rust-lang.org/").unwrap(); + easy.write_function(|data| { + stdout().write_all(data).unwrap(); + Ok(data.len()) + }).unwrap(); + easy.perform().unwrap(); + + println!("{}", easy.response_code().unwrap()); +} +``` + +```rust +extern crate curl; + +use curl::easy::Easy; + +// Capture output into a local `Vec`. +fn main() { + let mut dst = Vec::new(); + let mut easy = Easy::new(); + easy.url("https://www.rust-lang.org/").unwrap(); + + let mut transfer = easy.transfer(); + transfer.write_function(|data| { + dst.extend_from_slice(data); + Ok(data.len()) + }).unwrap(); + transfer.perform().unwrap(); +} +``` + +## Post / Put requests + +The `put` and `post` methods on `Easy` can configure the method of the HTTP +request, and then `read_function` can be used to specify how data is filled in. +This interface works particularly well with types that implement `Read`. + +```rust,no_run +extern crate curl; + +use std::io::Read; +use curl::easy::Easy; + +fn main() { + let mut data = "this is the body".as_bytes(); + + let mut easy = Easy::new(); + easy.url("http://www.example.com/upload").unwrap(); + easy.post(true).unwrap(); + easy.post_field_size(data.len() as u64).unwrap(); + + let mut transfer = easy.transfer(); + transfer.read_function(|buf| { + Ok(data.read(buf).unwrap_or(0)) + }).unwrap(); + transfer.perform().unwrap(); +} +``` + +## Custom headers + +Custom headers can be specified as part of the request: + +```rust,no_run +extern crate curl; + +use curl::easy::{Easy, List}; + +fn main() { + let mut easy = Easy::new(); + easy.url("http://www.example.com").unwrap(); + + let mut list = List::new(); + list.append("Authorization: Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ==").unwrap(); + easy.http_headers(list).unwrap(); + easy.perform().unwrap(); +} +``` + +## Keep alive + +The handle can be re-used across multiple requests. Curl will attempt to +keep the connections alive. + +```rust,no_run +extern crate curl; + +use curl::easy::Easy; + +fn main() { + let mut handle = Easy::new(); + + handle.url("http://www.example.com/foo").unwrap(); + handle.perform().unwrap(); + + handle.url("http://www.example.com/bar").unwrap(); + handle.perform().unwrap(); +} +``` + +## Multiple requests + +The libcurl library provides support for sending multiple requests +simultaneously through the "multi" interface. This is currently bound in the +`multi` module of this crate and provides the ability to execute multiple +transfers simultaneously. For more information, see that module. + +## Version Support + +The bindings have been developed using curl version 7.24.0. They should +work with any newer version of curl and possibly with older versions, +but this has not been tested. + +## License + +The `curl-rust` crate is licensed under the MIT license, see `LICENSE` for more +details. diff --git a/curl/appveyor.yml b/curl/appveyor.yml new file mode 100644 index 000000000..49f51274b --- /dev/null +++ b/curl/appveyor.yml @@ -0,0 +1,40 @@ +environment: + matrix: + + # Ensure vanilla builds work + - TARGET: i686-pc-windows-msvc + - TARGET: x86_64-pc-windows-msvc + + # Pin to specific VS versions to ensure the build works + - TARGET: x86_64-pc-windows-msvc + ARCH: amd64 + VS: C:\Program Files (x86)\Microsoft Visual Studio 12.0\VC\vcvarsall.bat + - TARGET: x86_64-pc-windows-msvc + ARCH: amd64 + VS: C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat + +install: + # Install rust, x86_64-pc-windows-msvc host + - appveyor-retry appveyor DownloadFile https://win.rustup.rs/ -FileName rustup-init.exe + # use nightly if required until -Ctarget-feature=+crt-static is stable (expected in rust 1.19) + - if not defined RUSTFLAGS rustup-init.exe -y --default-host x86_64-pc-windows-msvc + - if defined RUSTFLAGS rustup-init.exe -y --default-host x86_64-pc-windows-msvc --default-toolchain nightly + - set PATH=%PATH%;C:\Users\appveyor\.cargo\bin + + # Install the target we're compiling for + - if NOT "%TARGET%" == "x86_64-pc-windows-msvc" rustup target add %TARGET% + + # If we're pinning to a specific visual studio, do so now + - if defined VS call "%VS%" %ARCH% + + # let's see what we got + - where gcc rustc cargo + - rustc -vV + - cargo -vV + - set CARGO_TARGET_DIR=%CD%\target + +build: false + +test_script: + - cargo test --target %TARGET% + - cargo run --manifest-path systest/Cargo.toml --target %TARGET% diff --git a/curl/ci/Dockerfile-linux32 b/curl/ci/Dockerfile-linux32 new file mode 100644 index 000000000..4d55dcf04 --- /dev/null +++ b/curl/ci/Dockerfile-linux32 @@ -0,0 +1,14 @@ +FROM ubuntu:16.04 + +RUN dpkg --add-architecture i386 && \ + apt-get update && \ + apt-get install -y --no-install-recommends \ + gcc-multilib \ + ca-certificates \ + make \ + libc6-dev \ + libssl-dev:i386 \ + pkg-config + +ENV PKG_CONFIG=i686-linux-gnu-pkg-config \ + PKG_CONFIG_ALLOW_CROSS=1 diff --git a/curl/ci/Dockerfile-linux64 b/curl/ci/Dockerfile-linux64 new file mode 100644 index 000000000..0acd1223a --- /dev/null +++ b/curl/ci/Dockerfile-linux64 @@ -0,0 +1,7 @@ +FROM ubuntu:16.04 + +RUN apt-get update +RUN apt-get install -y --no-install-recommends \ + gcc ca-certificates make libc6-dev \ + libssl-dev \ + pkg-config diff --git a/curl/ci/Dockerfile-linux64-curl b/curl/ci/Dockerfile-linux64-curl new file mode 100644 index 000000000..be03c24da --- /dev/null +++ b/curl/ci/Dockerfile-linux64-curl @@ -0,0 +1,6 @@ +FROM ubuntu:14.04 + +RUN apt-get update +RUN apt-get install -y --no-install-recommends \ + gcc ca-certificates make libc6-dev \ + libssl-dev libcurl4-openssl-dev pkg-config diff --git a/curl/ci/Dockerfile-mingw b/curl/ci/Dockerfile-mingw new file mode 100644 index 000000000..ee5926c8d --- /dev/null +++ b/curl/ci/Dockerfile-mingw @@ -0,0 +1,6 @@ +FROM ubuntu:16.04 + +RUN apt-get update +RUN apt-get install -y --no-install-recommends \ + gcc ca-certificates make libc6-dev \ + gcc-mingw-w64-x86-64 libz-mingw-w64-dev diff --git a/curl/ci/Dockerfile-musl b/curl/ci/Dockerfile-musl new file mode 100644 index 000000000..47d211fdf --- /dev/null +++ b/curl/ci/Dockerfile-musl @@ -0,0 +1,18 @@ +FROM ubuntu:16.04 + +RUN apt-get update +RUN apt-get install -y --no-install-recommends \ + gcc ca-certificates make libc6-dev curl \ + musl-tools + +RUN \ + curl https://www.openssl.org/source/old/1.0.2/openssl-1.0.2g.tar.gz | tar xzf - && \ + cd openssl-1.0.2g && \ + CC=musl-gcc ./Configure --prefix=/openssl no-dso linux-x86_64 -fPIC && \ + make -j10 && \ + make install && \ + cd .. && \ + rm -rf openssl-1.0.2g + +ENV OPENSSL_STATIC=1 \ + OPENSSL_DIR=/openssl diff --git a/curl/ci/run.sh b/curl/ci/run.sh new file mode 100644 index 000000000..e22c62da2 --- /dev/null +++ b/curl/ci/run.sh @@ -0,0 +1,11 @@ +#!/bin/sh + +set -ex + +cargo test --target $TARGET --no-run +if [ -z "$NO_RUN" ]; then + cargo test --target $TARGET + cargo run --manifest-path systest/Cargo.toml --target $TARGET + cargo doc --no-deps --target $TARGET + cargo doc --no-deps -p curl-sys --target $TARGET +fi diff --git a/curl/debian/patches/series b/curl/debian/patches/series new file mode 100644 index 000000000..204ee965a --- /dev/null +++ b/curl/debian/patches/series @@ -0,0 +1 @@ +winapi3.patch diff --git a/curl/debian/patches/winapi3.patch b/curl/debian/patches/winapi3.patch new file mode 100644 index 000000000..dce231238 --- /dev/null +++ b/curl/debian/patches/winapi3.patch @@ -0,0 +1,62 @@ +--- a/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 ++++ b/Cargo.toml 2018-09-21 18:54:24.693880364 +0000 +@@ -48,4 +48,2 @@ version = "0.1.2" + version = "0.9.33" +-[target."cfg(target_env=\"msvc\")".dependencies.kernel32-sys] +-version = "0.2.2" + +@@ -54,3 +52,4 @@ version = "0.1.13" + [target."cfg(windows)".dependencies.winapi] +-version = "0.2.7" ++version = "0.3" ++features = ["winsock2", "wincrypt", "libloaderapi"] + [badges.appveyor] +--- a/src/easy/windows.rs 2018-09-21 18:01:35.962553903 +0000 ++++ b/src/easy/windows.rs 2018-09-21 18:01:35.962553903 +0000 +@@ -4,21 +4,21 @@ use libc::c_void; + + #[cfg(target_env = "msvc")] + mod win { +- use kernel32; + use std::ffi::CString; + use std::mem; + use std::ptr; + use schannel::cert_context::ValidUses; + use schannel::cert_store::CertStore; + use winapi::{self, c_void, c_uchar, c_long, c_int}; ++ use winapi::um::libloaderapi::{GetModuleHandleW, GetProcAddress}; + + fn lookup(module: &str, symbol: &str) -> Option<*const c_void> { + unsafe { + let symbol = CString::new(symbol).unwrap(); + let mut mod_buf: Vec = module.encode_utf16().collect(); + mod_buf.push(0); +- let handle = kernel32::GetModuleHandleW(mod_buf.as_mut_ptr()); +- let n = kernel32::GetProcAddress(handle, symbol.as_ptr()); ++ let handle = GetModuleHandleW(mod_buf.as_mut_ptr()); ++ let n = GetProcAddress(handle, symbol.as_ptr()); + if n == ptr::null() { + None + } else { +--- a/src/lib.rs 2018-09-21 18:01:35.962553903 +0000 ++++ b/src/lib.rs 2018-09-21 18:01:35.962553903 +0000 +@@ -61,8 +61,6 @@ extern crate openssl_probe; + #[cfg(windows)] + extern crate winapi; + +-#[cfg(target_env = "msvc")] +-extern crate kernel32; + #[cfg(target_env = "msvc")] + extern crate schannel; + +--- a/src/multi.rs 2018-09-21 18:01:35.962553903 +0000 ++++ b/src/multi.rs 2018-09-21 18:01:35.962553903 +0000 +@@ -8,7 +8,7 @@ use libc::{c_int, c_char, c_void, c_long, c_short}; + use curl_sys; + + #[cfg(windows)] +-use winapi::winsock2::fd_set; ++use winapi::um::winsock2::fd_set; + #[cfg(unix)] + use libc::{fd_set, pollfd, POLLIN, POLLPRI, POLLOUT}; + diff --git a/curl/debian/patches/winapi3.patch~ b/curl/debian/patches/winapi3.patch~ new file mode 100644 index 000000000..dce231238 --- /dev/null +++ b/curl/debian/patches/winapi3.patch~ @@ -0,0 +1,62 @@ +--- a/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 ++++ b/Cargo.toml 2018-09-21 18:54:24.693880364 +0000 +@@ -48,4 +48,2 @@ version = "0.1.2" + version = "0.9.33" +-[target."cfg(target_env=\"msvc\")".dependencies.kernel32-sys] +-version = "0.2.2" + +@@ -54,3 +52,4 @@ version = "0.1.13" + [target."cfg(windows)".dependencies.winapi] +-version = "0.2.7" ++version = "0.3" ++features = ["winsock2", "wincrypt", "libloaderapi"] + [badges.appveyor] +--- a/src/easy/windows.rs 2018-09-21 18:01:35.962553903 +0000 ++++ b/src/easy/windows.rs 2018-09-21 18:01:35.962553903 +0000 +@@ -4,21 +4,21 @@ use libc::c_void; + + #[cfg(target_env = "msvc")] + mod win { +- use kernel32; + use std::ffi::CString; + use std::mem; + use std::ptr; + use schannel::cert_context::ValidUses; + use schannel::cert_store::CertStore; + use winapi::{self, c_void, c_uchar, c_long, c_int}; ++ use winapi::um::libloaderapi::{GetModuleHandleW, GetProcAddress}; + + fn lookup(module: &str, symbol: &str) -> Option<*const c_void> { + unsafe { + let symbol = CString::new(symbol).unwrap(); + let mut mod_buf: Vec = module.encode_utf16().collect(); + mod_buf.push(0); +- let handle = kernel32::GetModuleHandleW(mod_buf.as_mut_ptr()); +- let n = kernel32::GetProcAddress(handle, symbol.as_ptr()); ++ let handle = GetModuleHandleW(mod_buf.as_mut_ptr()); ++ let n = GetProcAddress(handle, symbol.as_ptr()); + if n == ptr::null() { + None + } else { +--- a/src/lib.rs 2018-09-21 18:01:35.962553903 +0000 ++++ b/src/lib.rs 2018-09-21 18:01:35.962553903 +0000 +@@ -61,8 +61,6 @@ extern crate openssl_probe; + #[cfg(windows)] + extern crate winapi; + +-#[cfg(target_env = "msvc")] +-extern crate kernel32; + #[cfg(target_env = "msvc")] + extern crate schannel; + +--- a/src/multi.rs 2018-09-21 18:01:35.962553903 +0000 ++++ b/src/multi.rs 2018-09-21 18:01:35.962553903 +0000 +@@ -8,7 +8,7 @@ use libc::{c_int, c_char, c_void, c_long, c_short}; + use curl_sys; + + #[cfg(windows)] +-use winapi::winsock2::fd_set; ++use winapi::um::winsock2::fd_set; + #[cfg(unix)] + use libc::{fd_set, pollfd, POLLIN, POLLPRI, POLLOUT}; + diff --git a/curl/src/easy/form.rs b/curl/src/easy/form.rs new file mode 100644 index 000000000..be98d4e8e --- /dev/null +++ b/curl/src/easy/form.rs @@ -0,0 +1,333 @@ +use std::ffi::CString; +use std::fmt; +use std::path::Path; + +use FormError; +use curl_sys; +use easy::{list, List}; + +/// Multipart/formdata for an HTTP POST request. +/// +/// This structure is built up and then passed to the `Easy::httppost` method to +/// be sent off with a request. +pub struct Form { + head: *mut curl_sys::curl_httppost, + tail: *mut curl_sys::curl_httppost, + headers: Vec, + buffers: Vec>, + strings: Vec, +} + +/// One part in a multipart upload, added to a `Form`. +pub struct Part<'form, 'data> { + form: &'form mut Form, + name: &'data str, + array: Vec, + error: Option, +} + +pub fn raw(form: &Form) -> *mut curl_sys::curl_httppost { + form.head +} + +impl Form { + /// Creates a new blank form ready for the addition of new data. + pub fn new() -> Form { + Form { + head: 0 as *mut _, + tail: 0 as *mut _, + headers: Vec::new(), + buffers: Vec::new(), + strings: Vec::new(), + } + } + + /// Prepares adding a new part to this `Form` + /// + /// Note that the part is not actually added to the form until the `add` + /// method is called on `Part`, which may or may not fail. + pub fn part<'a, 'data>(&'a mut self, name: &'data str) -> Part<'a, 'data> { + Part { + error: None, + form: self, + name: name, + array: vec![curl_sys::curl_forms { + option: curl_sys::CURLFORM_END, + value: 0 as *mut _, + }], + } + } +} + +impl fmt::Debug for Form { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + // TODO: fill this out more + f.debug_struct("Form") + .field("fields", &"...") + .finish() + } +} + +impl Drop for Form { + fn drop(&mut self) { + unsafe { + curl_sys::curl_formfree(self.head); + } + } +} + +impl<'form, 'data> Part<'form, 'data> { + /// A pointer to the contents of this part, the actual data to send away. + pub fn contents(&mut self, contents: &'data [u8]) -> &mut Self { + let pos = self.array.len() - 1; + + // curl has an oddity where if the length if 0 it will call strlen + // on the value. This means that if someone wants to add empty form + // contents we need to make sure the buffer contains a null byte. + let ptr = if contents.is_empty() { + b"\x00" + } else { + contents + }.as_ptr(); + + self.array.insert(pos, curl_sys::curl_forms { + option: curl_sys::CURLFORM_COPYCONTENTS, + value: ptr as *mut _, + }); + self.array.insert(pos + 1, curl_sys::curl_forms { + option: curl_sys::CURLFORM_CONTENTSLENGTH, + value: contents.len() as *mut _, + }); + self + } + + /// Causes this file to be read and its contents used as data in this part + /// + /// This part does not automatically become a file upload part simply + /// because its data was read from a file. + /// + /// # Errors + /// + /// If the filename has any internal nul bytes or if on Windows it does not + /// contain a unicode filename then the `add` function will eventually + /// return an error. + pub fn file_content

(&mut self, file: P) -> &mut Self + where P: AsRef + { + self._file_content(file.as_ref()) + } + + fn _file_content(&mut self, file: &Path) -> &mut Self { + if let Some(bytes) = self.path2cstr(file) { + let pos = self.array.len() - 1; + self.array.insert(pos, curl_sys::curl_forms { + option: curl_sys::CURLFORM_FILECONTENT, + value: bytes.as_ptr() as *mut _, + }); + self.form.strings.push(bytes); + } + self + } + + /// Makes this part a file upload part of the given file. + /// + /// Sets the filename field to the basename of the provided file name, and + /// it reads the contents of the file and passes them as data and sets the + /// content type if the given file matches one of the internally known file + /// extensions. + /// + /// The given upload file must exist entirely on the filesystem before the + /// upload is started because libcurl needs to read the size of it + /// beforehand. + /// + /// Multiple files can be uploaded by calling this method multiple times and + /// content types can also be configured for each file (by calling that + /// next). + /// + /// # Errors + /// + /// If the filename has any internal nul bytes or if on Windows it does not + /// contain a unicode filename then this function will cause `add` to return + /// an error when called. + pub fn file(&mut self, file: &'data P) -> &mut Self + where P: AsRef + { + self._file(file.as_ref()) + } + + fn _file(&mut self, file: &'data Path) -> &mut Self { + if let Some(bytes) = self.path2cstr(file) { + let pos = self.array.len() - 1; + self.array.insert(pos, curl_sys::curl_forms { + option: curl_sys::CURLFORM_FILE, + value: bytes.as_ptr() as *mut _, + }); + self.form.strings.push(bytes); + } + self + } + + /// Used in combination with `Part::file`, provides the content-type for + /// this part, possibly instead of choosing an internal one. + /// + /// # Panics + /// + /// This function will panic if `content_type` contains an internal nul + /// byte. + pub fn content_type(&mut self, content_type: &'data str) -> &mut Self { + if let Some(bytes) = self.bytes2cstr(content_type.as_bytes()) { + let pos = self.array.len() - 1; + self.array.insert(pos, curl_sys::curl_forms { + option: curl_sys::CURLFORM_CONTENTTYPE, + value: bytes.as_ptr() as *mut _, + }); + self.form.strings.push(bytes); + } + self + } + + /// Used in combination with `Part::file`, provides the filename for + /// this part instead of the actual one. + /// + /// # Errors + /// + /// If `name` contains an internal nul byte, or if on Windows the path is + /// not valid unicode then this function will return an error when `add` is + /// called. + pub fn filename(&mut self, name: &'data P) -> &mut Self + where P: AsRef + { + self._filename(name.as_ref()) + } + + fn _filename(&mut self, name: &'data Path) -> &mut Self { + if let Some(bytes) = self.path2cstr(name) { + let pos = self.array.len() - 1; + self.array.insert(pos, curl_sys::curl_forms { + option: curl_sys::CURLFORM_FILENAME, + value: bytes.as_ptr() as *mut _, + }); + self.form.strings.push(bytes); + } + self + } + + /// This is used to provide a custom file upload part without using the + /// `file` method above. + /// + /// The first parameter is for the filename field and the second is the + /// in-memory contents. + /// + /// # Errors + /// + /// If `name` contains an internal nul byte, or if on Windows the path is + /// not valid unicode then this function will return an error when `add` is + /// called. + pub fn buffer(&mut self, name: &'data P, data: Vec) + -> &mut Self + where P: AsRef + { + self._buffer(name.as_ref(), data) + } + + fn _buffer(&mut self, name: &'data Path, data: Vec) -> &mut Self { + if let Some(bytes) = self.path2cstr(name) { + let pos = self.array.len() - 1; + self.array.insert(pos, curl_sys::curl_forms { + option: curl_sys::CURLFORM_BUFFER, + value: bytes.as_ptr() as *mut _, + }); + self.form.strings.push(bytes); + self.array.insert(pos + 1, curl_sys::curl_forms { + option: curl_sys::CURLFORM_BUFFERPTR, + value: data.as_ptr() as *mut _, + }); + self.array.insert(pos + 2, curl_sys::curl_forms { + option: curl_sys::CURLFORM_BUFFERLENGTH, + value: data.len() as *mut _, + }); + self.form.buffers.push(data); + } + self + } + + /// Specifies extra headers for the form POST section. + /// + /// Appends the list of headers to those libcurl automatically generates. + pub fn content_header(&mut self, headers: List) -> &mut Self { + let pos = self.array.len() - 1; + self.array.insert(pos, curl_sys::curl_forms { + option: curl_sys::CURLFORM_CONTENTHEADER, + value: list::raw(&headers) as *mut _, + }); + self.form.headers.push(headers); + self + } + + /// Attempts to add this part to the `Form` that it was created from. + /// + /// If any error happens while adding, that error is returned, otherwise + /// `Ok(())` is returned. + pub fn add(&mut self) -> Result<(), FormError> { + if let Some(err) = self.error.clone() { + return Err(err) + } + let rc = unsafe { + curl_sys::curl_formadd(&mut self.form.head, + &mut self.form.tail, + curl_sys::CURLFORM_COPYNAME, + self.name.as_ptr(), + curl_sys::CURLFORM_NAMELENGTH, + self.name.len(), + curl_sys::CURLFORM_ARRAY, + self.array.as_ptr(), + curl_sys::CURLFORM_END) + }; + if rc == curl_sys::CURL_FORMADD_OK { + Ok(()) + } else { + Err(FormError::new(rc)) + } + } + + #[cfg(unix)] + fn path2cstr(&mut self, p: &Path) -> Option { + use std::os::unix::prelude::*; + self.bytes2cstr(p.as_os_str().as_bytes()) + } + + #[cfg(windows)] + fn path2cstr(&mut self, p: &Path) -> Option { + match p.to_str() { + Some(bytes) => self.bytes2cstr(bytes.as_bytes()), + None if self.error.is_none() => { + // TODO: better error code + self.error = Some(FormError::new(curl_sys::CURL_FORMADD_INCOMPLETE)); + None + } + None => None, + } + } + + fn bytes2cstr(&mut self, bytes: &[u8]) -> Option { + match CString::new(bytes) { + Ok(c) => Some(c), + Err(..) if self.error.is_none() => { + // TODO: better error code + self.error = Some(FormError::new(curl_sys::CURL_FORMADD_INCOMPLETE)); + None + } + Err(..) => None, + } + } +} + +impl<'form, 'data> fmt::Debug for Part<'form, 'data> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + // TODO: fill this out more + f.debug_struct("Part") + .field("name", &self.name) + .field("form", &self.form) + .finish() + } +} diff --git a/curl/src/easy/handle.rs b/curl/src/easy/handle.rs new file mode 100644 index 000000000..8e231b3e2 --- /dev/null +++ b/curl/src/easy/handle.rs @@ -0,0 +1,1471 @@ +use std::cell::Cell; +use std::fmt; +use std::io::SeekFrom; +use std::path::Path; +use std::ptr; +use std::str; +use std::time::Duration; + +use curl_sys; +use libc::c_void; + +use Error; +use easy::{Form, List}; +use easy::handler::{self, InfoType, SeekResult, ReadError, WriteError}; +use easy::handler::{TimeCondition, IpResolve, HttpVersion, SslVersion}; +use easy::handler::{SslOpt, NetRc, Auth, ProxyType}; +use easy::{Easy2, Handler}; + +/// Raw bindings to a libcurl "easy session". +/// +/// This type is the same as the `Easy2` type in this library except that it +/// does not contain a type parameter. Callbacks from curl are all controlled +/// via closures on this `Easy` type, and this type namely has a `transfer` +/// method as well for ergonomic management of these callbacks. +/// +/// There's not necessarily a right answer for which type is correct to use, but +/// as a general rule of thumb `Easy` is typically a reasonable choice for +/// synchronous I/O and `Easy2` is a good choice for asynchronous I/O. +/// +/// ## Examples +/// +/// Creating a handle which can be used later +/// +/// ``` +/// use curl::easy::Easy; +/// +/// let handle = Easy::new(); +/// ``` +/// +/// Send an HTTP request, writing the response to stdout. +/// +/// ``` +/// use std::io::{stdout, Write}; +/// +/// use curl::easy::Easy; +/// +/// let mut handle = Easy::new(); +/// handle.url("https://www.rust-lang.org/").unwrap(); +/// handle.write_function(|data| { +/// stdout().write_all(data).unwrap(); +/// Ok(data.len()) +/// }).unwrap(); +/// handle.perform().unwrap(); +/// ``` +/// +/// Collect all output of an HTTP request to a vector. +/// +/// ``` +/// use curl::easy::Easy; +/// +/// let mut data = Vec::new(); +/// let mut handle = Easy::new(); +/// handle.url("https://www.rust-lang.org/").unwrap(); +/// { +/// let mut transfer = handle.transfer(); +/// transfer.write_function(|new_data| { +/// data.extend_from_slice(new_data); +/// Ok(new_data.len()) +/// }).unwrap(); +/// transfer.perform().unwrap(); +/// } +/// println!("{:?}", data); +/// ``` +/// +/// More examples of various properties of an HTTP request can be found on the +/// specific methods as well. +#[derive(Debug)] +pub struct Easy { + inner: Easy2, +} + +/// A scoped transfer of information which borrows an `Easy` and allows +/// referencing stack-local data of the lifetime `'data`. +/// +/// Usage of `Easy` requires the `'static` and `Send` bounds on all callbacks +/// registered, but that's not often wanted if all you need is to collect a +/// bunch of data in memory to a vector, for example. The `Transfer` structure, +/// created by the `Easy::transfer` method, is used for this sort of request. +/// +/// The callbacks attached to a `Transfer` are only active for that one transfer +/// object, and they allow to elide both the `Send` and `'static` bounds to +/// close over stack-local information. +pub struct Transfer<'easy, 'data> { + easy: &'easy mut Easy, + data: Box>, +} + +pub struct EasyData { + running: Cell, + owned: Callbacks<'static>, + borrowed: Cell<*mut Callbacks<'static>>, +} + +unsafe impl Send for EasyData {} + +#[derive(Default)] +struct Callbacks<'a> { + write: Option Result + 'a>>, + read: Option Result + 'a>>, + seek: Option SeekResult + 'a>>, + debug: Option>, + header: Option bool + 'a>>, + progress: Option bool + 'a>>, + ssl_ctx: Option Result<(), Error> + 'a>>, +} + +impl Easy { + /// Creates a new "easy" handle which is the core of almost all operations + /// in libcurl. + /// + /// To use a handle, applications typically configure a number of options + /// followed by a call to `perform`. Options are preserved across calls to + /// `perform` and need to be reset manually (or via the `reset` method) if + /// this is not desired. + pub fn new() -> Easy { + Easy { + inner: Easy2::new(EasyData { + running: Cell::new(false), + owned: Callbacks::default(), + borrowed: Cell::new(ptr::null_mut()), + }), + } + } + + // ========================================================================= + // Behavior options + + /// Same as [`Easy2::verbose`](struct.Easy2.html#method.verbose) + pub fn verbose(&mut self, verbose: bool) -> Result<(), Error> { + self.inner.verbose(verbose) + } + + /// Same as [`Easy2::show_header`](struct.Easy2.html#method.show_header) + pub fn show_header(&mut self, show: bool) -> Result<(), Error> { + self.inner.show_header(show) + } + + /// Same as [`Easy2::progress`](struct.Easy2.html#method.progress) + pub fn progress(&mut self, progress: bool) -> Result<(), Error> { + self.inner.progress(progress) + } + + /// Same as [`Easy2::signal`](struct.Easy2.html#method.signal) + pub fn signal(&mut self, signal: bool) -> Result<(), Error> { + self.inner.signal(signal) + } + + /// Same as [`Easy2::wildcard_match`](struct.Easy2.html#method.wildcard_match) + pub fn wildcard_match(&mut self, m: bool) -> Result<(), Error> { + self.inner.wildcard_match(m) + } + + /// Same as [`Easy2::unix_socket`](struct.Easy2.html#method.unix_socket) + pub fn unix_socket(&mut self, unix_domain_socket: &str) -> Result<(), Error> { + self.inner.unix_socket(unix_domain_socket) + } + + // ========================================================================= + // Callback options + + /// Set callback for writing received data. + /// + /// This callback function gets called by libcurl as soon as there is data + /// received that needs to be saved. + /// + /// The callback function will be passed as much data as possible in all + /// invokes, but you must not make any assumptions. It may be one byte, it + /// may be thousands. If `show_header` is enabled, which makes header data + /// get passed to the write callback, you can get up to + /// `CURL_MAX_HTTP_HEADER` bytes of header data passed into it. This + /// usually means 100K. + /// + /// This function may be called with zero bytes data if the transferred file + /// is empty. + /// + /// The callback should return the number of bytes actually taken care of. + /// If that amount differs from the amount passed to your callback function, + /// it'll signal an error condition to the library. This will cause the + /// transfer to get aborted and the libcurl function used will return + /// an error with `is_write_error`. + /// + /// If your callback function returns `Err(WriteError::Pause)` it will cause + /// this transfer to become paused. See `unpause_write` for further details. + /// + /// By default data is sent into the void, and this corresponds to the + /// `CURLOPT_WRITEFUNCTION` and `CURLOPT_WRITEDATA` options. + /// + /// Note that the lifetime bound on this function is `'static`, but that + /// is often too restrictive. To use stack data consider calling the + /// `transfer` method and then using `write_function` to configure a + /// callback that can reference stack-local data. + /// + /// # Examples + /// + /// ``` + /// use std::io::{stdout, Write}; + /// use curl::easy::Easy; + /// + /// let mut handle = Easy::new(); + /// handle.url("https://www.rust-lang.org/").unwrap(); + /// handle.write_function(|data| { + /// Ok(stdout().write(data).unwrap()) + /// }).unwrap(); + /// handle.perform().unwrap(); + /// ``` + /// + /// Writing to a stack-local buffer + /// + /// ``` + /// use std::io::{stdout, Write}; + /// use curl::easy::Easy; + /// + /// let mut buf = Vec::new(); + /// let mut handle = Easy::new(); + /// handle.url("https://www.rust-lang.org/").unwrap(); + /// + /// let mut transfer = handle.transfer(); + /// transfer.write_function(|data| { + /// buf.extend_from_slice(data); + /// Ok(data.len()) + /// }).unwrap(); + /// transfer.perform().unwrap(); + /// ``` + pub fn write_function(&mut self, f: F) -> Result<(), Error> + where F: FnMut(&[u8]) -> Result + Send + 'static + { + self.inner.get_mut().owned.write = Some(Box::new(f)); + Ok(()) + } + + /// Read callback for data uploads. + /// + /// This callback function gets called by libcurl as soon as it needs to + /// read data in order to send it to the peer - like if you ask it to upload + /// or post data to the server. + /// + /// Your function must then return the actual number of bytes that it stored + /// in that memory area. Returning 0 will signal end-of-file to the library + /// and cause it to stop the current transfer. + /// + /// If you stop the current transfer by returning 0 "pre-maturely" (i.e + /// before the server expected it, like when you've said you will upload N + /// bytes and you upload less than N bytes), you may experience that the + /// server "hangs" waiting for the rest of the data that won't come. + /// + /// The read callback may return `Err(ReadError::Abort)` to stop the + /// current operation immediately, resulting in a `is_aborted_by_callback` + /// error code from the transfer. + /// + /// The callback can return `Err(ReadError::Pause)` to cause reading from + /// this connection to pause. See `unpause_read` for further details. + /// + /// By default data not input, and this corresponds to the + /// `CURLOPT_READFUNCTION` and `CURLOPT_READDATA` options. + /// + /// Note that the lifetime bound on this function is `'static`, but that + /// is often too restrictive. To use stack data consider calling the + /// `transfer` method and then using `read_function` to configure a + /// callback that can reference stack-local data. + /// + /// # Examples + /// + /// Read input from stdin + /// + /// ```no_run + /// use std::io::{stdin, Read}; + /// use curl::easy::Easy; + /// + /// let mut handle = Easy::new(); + /// handle.url("https://example.com/login").unwrap(); + /// handle.read_function(|into| { + /// Ok(stdin().read(into).unwrap()) + /// }).unwrap(); + /// handle.post(true).unwrap(); + /// handle.perform().unwrap(); + /// ``` + /// + /// Reading from stack-local data: + /// + /// ```no_run + /// use std::io::{stdin, Read}; + /// use curl::easy::Easy; + /// + /// let mut data_to_upload = &b"foobar"[..]; + /// let mut handle = Easy::new(); + /// handle.url("https://example.com/login").unwrap(); + /// handle.post(true).unwrap(); + /// + /// let mut transfer = handle.transfer(); + /// transfer.read_function(|into| { + /// Ok(data_to_upload.read(into).unwrap()) + /// }).unwrap(); + /// transfer.perform().unwrap(); + /// ``` + pub fn read_function(&mut self, f: F) -> Result<(), Error> + where F: FnMut(&mut [u8]) -> Result + Send + 'static + { + self.inner.get_mut().owned.read = Some(Box::new(f)); + Ok(()) + } + + /// User callback for seeking in input stream. + /// + /// This function gets called by libcurl to seek to a certain position in + /// the input stream and can be used to fast forward a file in a resumed + /// upload (instead of reading all uploaded bytes with the normal read + /// function/callback). It is also called to rewind a stream when data has + /// already been sent to the server and needs to be sent again. This may + /// happen when doing a HTTP PUT or POST with a multi-pass authentication + /// method, or when an existing HTTP connection is reused too late and the + /// server closes the connection. + /// + /// The callback function must return `SeekResult::Ok` on success, + /// `SeekResult::Fail` to cause the upload operation to fail or + /// `SeekResult::CantSeek` to indicate that while the seek failed, libcurl + /// is free to work around the problem if possible. The latter can sometimes + /// be done by instead reading from the input or similar. + /// + /// By default data this option is not set, and this corresponds to the + /// `CURLOPT_SEEKFUNCTION` and `CURLOPT_SEEKDATA` options. + /// + /// Note that the lifetime bound on this function is `'static`, but that + /// is often too restrictive. To use stack data consider calling the + /// `transfer` method and then using `seek_function` to configure a + /// callback that can reference stack-local data. + pub fn seek_function(&mut self, f: F) -> Result<(), Error> + where F: FnMut(SeekFrom) -> SeekResult + Send + 'static + { + self.inner.get_mut().owned.seek = Some(Box::new(f)); + Ok(()) + } + + /// Callback to progress meter function + /// + /// This function gets called by libcurl instead of its internal equivalent + /// with a frequent interval. While data is being transferred it will be + /// called very frequently, and during slow periods like when nothing is + /// being transferred it can slow down to about one call per second. + /// + /// The callback gets told how much data libcurl will transfer and has + /// transferred, in number of bytes. The first argument is the total number + /// of bytes libcurl expects to download in this transfer. The second + /// argument is the number of bytes downloaded so far. The third argument is + /// the total number of bytes libcurl expects to upload in this transfer. + /// The fourth argument is the number of bytes uploaded so far. + /// + /// Unknown/unused argument values passed to the callback will be set to + /// zero (like if you only download data, the upload size will remain 0). + /// Many times the callback will be called one or more times first, before + /// it knows the data sizes so a program must be made to handle that. + /// + /// Returning `false` from this callback will cause libcurl to abort the + /// transfer and return `is_aborted_by_callback`. + /// + /// If you transfer data with the multi interface, this function will not be + /// called during periods of idleness unless you call the appropriate + /// libcurl function that performs transfers. + /// + /// `progress` must be set to `true` to make this function actually get + /// called. + /// + /// By default this function calls an internal method and corresponds to + /// `CURLOPT_PROGRESSFUNCTION` and `CURLOPT_PROGRESSDATA`. + /// + /// Note that the lifetime bound on this function is `'static`, but that + /// is often too restrictive. To use stack data consider calling the + /// `transfer` method and then using `progress_function` to configure a + /// callback that can reference stack-local data. + pub fn progress_function(&mut self, f: F) -> Result<(), Error> + where F: FnMut(f64, f64, f64, f64) -> bool + Send + 'static + { + self.inner.get_mut().owned.progress = Some(Box::new(f)); + Ok(()) + } + + /// Callback to SSL context + /// + /// This callback function gets called by libcurl just before the + /// initialization of an SSL connection after having processed all + /// other SSL related options to give a last chance to an + /// application to modify the behaviour of the SSL + /// initialization. The `ssl_ctx` parameter is actually a pointer + /// to the SSL library's SSL_CTX. If an error is returned from the + /// callback no attempt to establish a connection is made and the + /// perform operation will return the callback's error code. + /// + /// This function will get called on all new connections made to a + /// server, during the SSL negotiation. The SSL_CTX pointer will + /// be a new one every time. + /// + /// To use this properly, a non-trivial amount of knowledge of + /// your SSL library is necessary. For example, you can use this + /// function to call library-specific callbacks to add additional + /// validation code for certificates, and even to change the + /// actual URI of a HTTPS request. + /// + /// By default this function calls an internal method and + /// corresponds to `CURLOPT_SSL_CTX_FUNCTION` and + /// `CURLOPT_SSL_CTX_DATA`. + /// + /// Note that the lifetime bound on this function is `'static`, but that + /// is often too restrictive. To use stack data consider calling the + /// `transfer` method and then using `progress_function` to configure a + /// callback that can reference stack-local data. + pub fn ssl_ctx_function(&mut self, f: F) -> Result<(), Error> + where F: FnMut(*mut c_void) -> Result<(), Error> + Send + 'static + { + self.inner.get_mut().owned.ssl_ctx = Some(Box::new(f)); + Ok(()) + } + + /// Specify a debug callback + /// + /// `debug_function` replaces the standard debug function used when + /// `verbose` is in effect. This callback receives debug information, + /// as specified in the type argument. + /// + /// By default this option is not set and corresponds to the + /// `CURLOPT_DEBUGFUNCTION` and `CURLOPT_DEBUGDATA` options. + /// + /// Note that the lifetime bound on this function is `'static`, but that + /// is often too restrictive. To use stack data consider calling the + /// `transfer` method and then using `debug_function` to configure a + /// callback that can reference stack-local data. + pub fn debug_function(&mut self, f: F) -> Result<(), Error> + where F: FnMut(InfoType, &[u8]) + Send + 'static + { + self.inner.get_mut().owned.debug = Some(Box::new(f)); + Ok(()) + } + + /// Callback that receives header data + /// + /// This function gets called by libcurl as soon as it has received header + /// data. The header callback will be called once for each header and only + /// complete header lines are passed on to the callback. Parsing headers is + /// very easy using this. If this callback returns `false` it'll signal an + /// error to the library. This will cause the transfer to get aborted and + /// the libcurl function in progress will return `is_write_error`. + /// + /// A complete HTTP header that is passed to this function can be up to + /// CURL_MAX_HTTP_HEADER (100K) bytes. + /// + /// It's important to note that the callback will be invoked for the headers + /// of all responses received after initiating a request and not just the + /// final response. This includes all responses which occur during + /// authentication negotiation. If you need to operate on only the headers + /// from the final response, you will need to collect headers in the + /// callback yourself and use HTTP status lines, for example, to delimit + /// response boundaries. + /// + /// When a server sends a chunked encoded transfer, it may contain a + /// trailer. That trailer is identical to a HTTP header and if such a + /// trailer is received it is passed to the application using this callback + /// as well. There are several ways to detect it being a trailer and not an + /// ordinary header: 1) it comes after the response-body. 2) it comes after + /// the final header line (CR LF) 3) a Trailer: header among the regular + /// response-headers mention what header(s) to expect in the trailer. + /// + /// For non-HTTP protocols like FTP, POP3, IMAP and SMTP this function will + /// get called with the server responses to the commands that libcurl sends. + /// + /// By default this option is not set and corresponds to the + /// `CURLOPT_HEADERFUNCTION` and `CURLOPT_HEADERDATA` options. + /// + /// Note that the lifetime bound on this function is `'static`, but that + /// is often too restrictive. To use stack data consider calling the + /// `transfer` method and then using `header_function` to configure a + /// callback that can reference stack-local data. + /// + /// # Examples + /// + /// ``` + /// use std::str; + /// + /// use curl::easy::Easy; + /// + /// let mut handle = Easy::new(); + /// handle.url("https://www.rust-lang.org/").unwrap(); + /// handle.header_function(|header| { + /// print!("header: {}", str::from_utf8(header).unwrap()); + /// true + /// }).unwrap(); + /// handle.perform().unwrap(); + /// ``` + /// + /// Collecting headers to a stack local vector + /// + /// ``` + /// use std::str; + /// + /// use curl::easy::Easy; + /// + /// let mut headers = Vec::new(); + /// let mut handle = Easy::new(); + /// handle.url("https://www.rust-lang.org/").unwrap(); + /// + /// { + /// let mut transfer = handle.transfer(); + /// transfer.header_function(|header| { + /// headers.push(str::from_utf8(header).unwrap().to_string()); + /// true + /// }).unwrap(); + /// transfer.perform().unwrap(); + /// } + /// + /// println!("{:?}", headers); + /// ``` + pub fn header_function(&mut self, f: F) -> Result<(), Error> + where F: FnMut(&[u8]) -> bool + Send + 'static + { + self.inner.get_mut().owned.header = Some(Box::new(f)); + Ok(()) + } + + // ========================================================================= + // Error options + + // TODO: error buffer and stderr + + /// Same as [`Easy2::fail_on_error`](struct.Easy2.html#method.fail_on_error) + pub fn fail_on_error(&mut self, fail: bool) -> Result<(), Error> { + self.inner.fail_on_error(fail) + } + + // ========================================================================= + // Network options + + /// Same as [`Easy2::url`](struct.Easy2.html#method.url) + pub fn url(&mut self, url: &str) -> Result<(), Error> { + self.inner.url(url) + } + + /// Same as [`Easy2::port`](struct.Easy2.html#method.port) + pub fn port(&mut self, port: u16) -> Result<(), Error> { + self.inner.port(port) + } + + /// Same as [`Easy2::proxy`](struct.Easy2.html#method.proxy) + pub fn proxy(&mut self, url: &str) -> Result<(), Error> { + self.inner.proxy(url) + } + + /// Same as [`Easy2::proxy_port`](struct.Easy2.html#method.proxy_port) + pub fn proxy_port(&mut self, port: u16) -> Result<(), Error> { + self.inner.proxy_port(port) + } + + /// Same as [`Easy2::proxy_type`](struct.Easy2.html#method.proxy_type) + pub fn proxy_type(&mut self, kind: ProxyType) -> Result<(), Error> { + self.inner.proxy_type(kind) + } + + /// Same as [`Easy2::noproxy`](struct.Easy2.html#method.noproxy) + pub fn noproxy(&mut self, skip: &str) -> Result<(), Error> { + self.inner.noproxy(skip) + } + + /// Same as [`Easy2::http_proxy_tunnel`](struct.Easy2.html#method.http_proxy_tunnel) + pub fn http_proxy_tunnel(&mut self, tunnel: bool) -> Result<(), Error> { + self.inner.http_proxy_tunnel(tunnel) + } + + /// Same as [`Easy2::interface`](struct.Easy2.html#method.interface) + pub fn interface(&mut self, interface: &str) -> Result<(), Error> { + self.inner.interface(interface) + } + + /// Same as [`Easy2::set_local_port`](struct.Easy2.html#method.set_local_port) + pub fn set_local_port(&mut self, port: u16) -> Result<(), Error> { + self.inner.set_local_port(port) + } + + /// Same as [`Easy2::local_port_range`](struct.Easy2.html#method.local_port_range) + pub fn local_port_range(&mut self, range: u16) -> Result<(), Error> { + self.inner.local_port_range(range) + } + + /// Same as [`Easy2::dns_cache_timeout`](struct.Easy2.html#method.dns_cache_timeout) + pub fn dns_cache_timeout(&mut self, dur: Duration) -> Result<(), Error> { + self.inner.dns_cache_timeout(dur) + } + + /// Same as [`Easy2::buffer_size`](struct.Easy2.html#method.buffer_size) + pub fn buffer_size(&mut self, size: usize) -> Result<(), Error> { + self.inner.buffer_size(size) + } + + /// Same as [`Easy2::tcp_nodelay`](struct.Easy2.html#method.tcp_nodelay) + pub fn tcp_nodelay(&mut self, enable: bool) -> Result<(), Error> { + self.inner.tcp_nodelay(enable) + } + + /// Same as [`Easy2::tcp_keepalive`](struct.Easy2.html#method.tcp_keepalive) + pub fn tcp_keepalive(&mut self, enable: bool) -> Result<(), Error> { + self.inner.tcp_keepalive(enable) + } + + /// Same as [`Easy2::tcp_keepintvl`](struct.Easy2.html#method.tcp_keepalive) + pub fn tcp_keepintvl(&mut self, dur: Duration) -> Result<(), Error> { + self.inner.tcp_keepintvl(dur) + } + + /// Same as [`Easy2::tcp_keepidle`](struct.Easy2.html#method.tcp_keepidle) + pub fn tcp_keepidle(&mut self, dur: Duration) -> Result<(), Error> { + self.inner.tcp_keepidle(dur) + } + + /// Same as [`Easy2::address_scope`](struct.Easy2.html#method.address_scope) + pub fn address_scope(&mut self, scope: u32) -> Result<(), Error> { + self.inner.address_scope(scope) + } + + // ========================================================================= + // Names and passwords + + /// Same as [`Easy2::username`](struct.Easy2.html#method.username) + pub fn username(&mut self, user: &str) -> Result<(), Error> { + self.inner.username(user) + } + + /// Same as [`Easy2::password`](struct.Easy2.html#method.password) + pub fn password(&mut self, pass: &str) -> Result<(), Error> { + self.inner.password(pass) + } + + /// Same as [`Easy2::http_auth`](struct.Easy2.html#method.http_auth) + pub fn http_auth(&mut self, auth: &Auth) -> Result<(), Error> { + self.inner.http_auth(auth) + } + + /// Same as [`Easy2::proxy_username`](struct.Easy2.html#method.proxy_username) + pub fn proxy_username(&mut self, user: &str) -> Result<(), Error> { + self.inner.proxy_username(user) + } + + /// Same as [`Easy2::proxy_password`](struct.Easy2.html#method.proxy_password) + pub fn proxy_password(&mut self, pass: &str) -> Result<(), Error> { + self.inner.proxy_password(pass) + } + + /// Same as [`Easy2::proxy_auth`](struct.Easy2.html#method.proxy_auth) + pub fn proxy_auth(&mut self, auth: &Auth) -> Result<(), Error> { + self.inner.proxy_auth(auth) + } + + /// Same as [`Easy2::netrc`](struct.Easy2.html#method.netrc) + pub fn netrc(&mut self, netrc: NetRc) -> Result<(), Error> { + self.inner.netrc(netrc) + } + + // ========================================================================= + // HTTP Options + + /// Same as [`Easy2::autoreferer`](struct.Easy2.html#method.autoreferer) + pub fn autoreferer(&mut self, enable: bool) -> Result<(), Error> { + self.inner.autoreferer(enable) + } + + /// Same as [`Easy2::accept_encoding`](struct.Easy2.html#method.accept_encoding) + pub fn accept_encoding(&mut self, encoding: &str) -> Result<(), Error> { + self.inner.accept_encoding(encoding) + } + + /// Same as [`Easy2::transfer_encoding`](struct.Easy2.html#method.transfer_encoding) + pub fn transfer_encoding(&mut self, enable: bool) -> Result<(), Error> { + self.inner.transfer_encoding(enable) + } + + /// Same as [`Easy2::follow_location`](struct.Easy2.html#method.follow_location) + pub fn follow_location(&mut self, enable: bool) -> Result<(), Error> { + self.inner.follow_location(enable) + } + + /// Same as [`Easy2::unrestricted_auth`](struct.Easy2.html#method.unrestricted_auth) + pub fn unrestricted_auth(&mut self, enable: bool) -> Result<(), Error> { + self.inner.unrestricted_auth(enable) + } + + /// Same as [`Easy2::max_redirections`](struct.Easy2.html#method.max_redirections) + pub fn max_redirections(&mut self, max: u32) -> Result<(), Error> { + self.inner.max_redirections(max) + } + + /// Same as [`Easy2::put`](struct.Easy2.html#method.put) + pub fn put(&mut self, enable: bool) -> Result<(), Error> { + self.inner.put(enable) + } + + /// Same as [`Easy2::post`](struct.Easy2.html#method.post) + pub fn post(&mut self, enable: bool) -> Result<(), Error> { + self.inner.post(enable) + } + + /// Same as [`Easy2::post_field_copy`](struct.Easy2.html#method.post_field_copy) + pub fn post_fields_copy(&mut self, data: &[u8]) -> Result<(), Error> { + self.inner.post_fields_copy(data) + } + + /// Same as [`Easy2::post_field_size`](struct.Easy2.html#method.post_field_size) + pub fn post_field_size(&mut self, size: u64) -> Result<(), Error> { + self.inner.post_field_size(size) + } + + /// Same as [`Easy2::httppost`](struct.Easy2.html#method.httppost) + pub fn httppost(&mut self, form: Form) -> Result<(), Error> { + self.inner.httppost(form) + } + + /// Same as [`Easy2::referer`](struct.Easy2.html#method.referer) + pub fn referer(&mut self, referer: &str) -> Result<(), Error> { + self.inner.referer(referer) + } + + /// Same as [`Easy2::useragent`](struct.Easy2.html#method.useragent) + pub fn useragent(&mut self, useragent: &str) -> Result<(), Error> { + self.inner.useragent(useragent) + } + + /// Same as [`Easy2::http_headers`](struct.Easy2.html#method.http_headers) + pub fn http_headers(&mut self, list: List) -> Result<(), Error> { + self.inner.http_headers(list) + } + + /// Same as [`Easy2::cookie`](struct.Easy2.html#method.cookie) + pub fn cookie(&mut self, cookie: &str) -> Result<(), Error> { + self.inner.cookie(cookie) + } + + /// Same as [`Easy2::cookie_file`](struct.Easy2.html#method.cookie_file) + pub fn cookie_file>(&mut self, file: P) -> Result<(), Error> { + self.inner.cookie_file(file) + } + + /// Same as [`Easy2::cookie_jar`](struct.Easy2.html#method.cookie_jar) + pub fn cookie_jar>(&mut self, file: P) -> Result<(), Error> { + self.inner.cookie_jar(file) + } + + /// Same as [`Easy2::cookie_session`](struct.Easy2.html#method.cookie_session) + pub fn cookie_session(&mut self, session: bool) -> Result<(), Error> { + self.inner.cookie_session(session) + } + + /// Same as [`Easy2::cookie_list`](struct.Easy2.html#method.cookie_list) + pub fn cookie_list(&mut self, cookie: &str) -> Result<(), Error> { + self.inner.cookie_list(cookie) + } + + /// Same as [`Easy2::get`](struct.Easy2.html#method.get) + pub fn get(&mut self, enable: bool) -> Result<(), Error> { + self.inner.get(enable) + } + + /// Same as [`Easy2::ignore_content_length`](struct.Easy2.html#method.ignore_content_length) + pub fn ignore_content_length(&mut self, ignore: bool) -> Result<(), Error> { + self.inner.ignore_content_length(ignore) + } + + /// Same as [`Easy2::http_content_decoding`](struct.Easy2.html#method.http_content_decoding) + pub fn http_content_decoding(&mut self, enable: bool) -> Result<(), Error> { + self.inner.http_content_decoding(enable) + } + + /// Same as [`Easy2::http_transfer_decoding`](struct.Easy2.html#method.http_transfer_decoding) + pub fn http_transfer_decoding(&mut self, enable: bool) -> Result<(), Error> { + self.inner.http_transfer_decoding(enable) + } + + // ========================================================================= + // Protocol Options + + /// Same as [`Easy2::range`](struct.Easy2.html#method.range) + pub fn range(&mut self, range: &str) -> Result<(), Error> { + self.inner.range(range) + } + + /// Same as [`Easy2::resume_from`](struct.Easy2.html#method.resume_from) + pub fn resume_from(&mut self, from: u64) -> Result<(), Error> { + self.inner.resume_from(from) + } + + /// Same as [`Easy2::custom_request`](struct.Easy2.html#method.custom_request) + pub fn custom_request(&mut self, request: &str) -> Result<(), Error> { + self.inner.custom_request(request) + } + + /// Same as [`Easy2::fetch_filetime`](struct.Easy2.html#method.fetch_filetime) + pub fn fetch_filetime(&mut self, fetch: bool) -> Result<(), Error> { + self.inner.fetch_filetime(fetch) + } + + /// Same as [`Easy2::nobody`](struct.Easy2.html#method.nobody) + pub fn nobody(&mut self, enable: bool) -> Result<(), Error> { + self.inner.nobody(enable) + } + + /// Same as [`Easy2::in_filesize`](struct.Easy2.html#method.in_filesize) + pub fn in_filesize(&mut self, size: u64) -> Result<(), Error> { + self.inner.in_filesize(size) + } + + /// Same as [`Easy2::upload`](struct.Easy2.html#method.upload) + pub fn upload(&mut self, enable: bool) -> Result<(), Error> { + self.inner.upload(enable) + } + + /// Same as [`Easy2::max_filesize`](struct.Easy2.html#method.max_filesize) + pub fn max_filesize(&mut self, size: u64) -> Result<(), Error> { + self.inner.max_filesize(size) + } + + /// Same as [`Easy2::time_condition`](struct.Easy2.html#method.time_condition) + pub fn time_condition(&mut self, cond: TimeCondition) -> Result<(), Error> { + self.inner.time_condition(cond) + } + + /// Same as [`Easy2::time_value`](struct.Easy2.html#method.time_value) + pub fn time_value(&mut self, val: i64) -> Result<(), Error> { + self.inner.time_value(val) + } + + // ========================================================================= + // Connection Options + + /// Same as [`Easy2::timeout`](struct.Easy2.html#method.timeout) + pub fn timeout(&mut self, timeout: Duration) -> Result<(), Error> { + self.inner.timeout(timeout) + } + + /// Same as [`Easy2::low_speed_limit`](struct.Easy2.html#method.low_speed_limit) + pub fn low_speed_limit(&mut self, limit: u32) -> Result<(), Error> { + self.inner.low_speed_limit(limit) + } + + /// Same as [`Easy2::low_speed_time`](struct.Easy2.html#method.low_speed_time) + pub fn low_speed_time(&mut self, dur: Duration) -> Result<(), Error> { + self.inner.low_speed_time(dur) + } + + /// Same as [`Easy2::max_send_speed`](struct.Easy2.html#method.max_send_speed) + pub fn max_send_speed(&mut self, speed: u64) -> Result<(), Error> { + self.inner.max_send_speed(speed) + } + + /// Same as [`Easy2::max_recv_speed`](struct.Easy2.html#method.max_recv_speed) + pub fn max_recv_speed(&mut self, speed: u64) -> Result<(), Error> { + self.inner.max_recv_speed(speed) + } + + /// Same as [`Easy2::max_connects`](struct.Easy2.html#method.max_connects) + pub fn max_connects(&mut self, max: u32) -> Result<(), Error> { + self.inner.max_connects(max) + } + + /// Same as [`Easy2::fresh_connect`](struct.Easy2.html#method.fresh_connect) + pub fn fresh_connect(&mut self, enable: bool) -> Result<(), Error> { + self.inner.fresh_connect(enable) + } + + /// Same as [`Easy2::forbid_reuse`](struct.Easy2.html#method.forbid_reuse) + pub fn forbid_reuse(&mut self, enable: bool) -> Result<(), Error> { + self.inner.forbid_reuse(enable) + } + + /// Same as [`Easy2::connect_timeout`](struct.Easy2.html#method.connect_timeout) + pub fn connect_timeout(&mut self, timeout: Duration) -> Result<(), Error> { + self.inner.connect_timeout(timeout) + } + + /// Same as [`Easy2::ip_resolve`](struct.Easy2.html#method.ip_resolve) + pub fn ip_resolve(&mut self, resolve: IpResolve) -> Result<(), Error> { + self.inner.ip_resolve(resolve) + } + + /// Same as [`Easy2::resolve`](struct.Easy2.html#method.resolve) + pub fn resolve(&mut self, list: List) -> Result<(), Error> { + self.inner.resolve(list) + } + + /// Same as [`Easy2::connect_only`](struct.Easy2.html#method.connect_only) + pub fn connect_only(&mut self, enable: bool) -> Result<(), Error> { + self.inner.connect_only(enable) + } + + // ========================================================================= + // SSL/Security Options + + /// Same as [`Easy2::ssl_cert`](struct.Easy2.html#method.ssl_cert) + pub fn ssl_cert>(&mut self, cert: P) -> Result<(), Error> { + self.inner.ssl_cert(cert) + } + + /// Same as [`Easy2::ssl_cert_type`](struct.Easy2.html#method.ssl_cert_type) + pub fn ssl_cert_type(&mut self, kind: &str) -> Result<(), Error> { + self.inner.ssl_cert_type(kind) + } + + /// Same as [`Easy2::ssl_key`](struct.Easy2.html#method.ssl_key) + pub fn ssl_key>(&mut self, key: P) -> Result<(), Error> { + self.inner.ssl_key(key) + } + + /// Same as [`Easy2::ssl_key_type`](struct.Easy2.html#method.ssl_key_type) + pub fn ssl_key_type(&mut self, kind: &str) -> Result<(), Error> { + self.inner.ssl_key_type(kind) + } + + /// Same as [`Easy2::key_password`](struct.Easy2.html#method.key_password) + pub fn key_password(&mut self, password: &str) -> Result<(), Error> { + self.inner.key_password(password) + } + + /// Same as [`Easy2::ssl_engine`](struct.Easy2.html#method.ssl_engine) + pub fn ssl_engine(&mut self, engine: &str) -> Result<(), Error> { + self.inner.ssl_engine(engine) + } + + /// Same as [`Easy2::ssl_engine_default`](struct.Easy2.html#method.ssl_engine_default) + pub fn ssl_engine_default(&mut self, enable: bool) -> Result<(), Error> { + self.inner.ssl_engine_default(enable) + } + + /// Same as [`Easy2::http_version`](struct.Easy2.html#method.http_version) + pub fn http_version(&mut self, version: HttpVersion) -> Result<(), Error> { + self.inner.http_version(version) + } + + /// Same as [`Easy2::ssl_version`](struct.Easy2.html#method.ssl_version) + pub fn ssl_version(&mut self, version: SslVersion) -> Result<(), Error> { + self.inner.ssl_version(version) + } + + /// Same as [`Easy2::ssl_verify_host`](struct.Easy2.html#method.ssl_verify_host) + pub fn ssl_verify_host(&mut self, verify: bool) -> Result<(), Error> { + self.inner.ssl_verify_host(verify) + } + + /// Same as [`Easy2::ssl_verify_peer`](struct.Easy2.html#method.ssl_verify_peer) + pub fn ssl_verify_peer(&mut self, verify: bool) -> Result<(), Error> { + self.inner.ssl_verify_peer(verify) + } + + /// Same as [`Easy2::cainfo`](struct.Easy2.html#method.cainfo) + pub fn cainfo>(&mut self, path: P) -> Result<(), Error> { + self.inner.cainfo(path) + } + + /// Same as [`Easy2::issuer_cert`](struct.Easy2.html#method.issuer_cert) + pub fn issuer_cert>(&mut self, path: P) -> Result<(), Error> { + self.inner.issuer_cert(path) + } + + /// Same as [`Easy2::capath`](struct.Easy2.html#method.capath) + pub fn capath>(&mut self, path: P) -> Result<(), Error> { + self.inner.capath(path) + } + + /// Same as [`Easy2::crlfile`](struct.Easy2.html#method.crlfile) + pub fn crlfile>(&mut self, path: P) -> Result<(), Error> { + self.inner.crlfile(path) + } + + /// Same as [`Easy2::certinfo`](struct.Easy2.html#method.certinfo) + pub fn certinfo(&mut self, enable: bool) -> Result<(), Error> { + self.inner.certinfo(enable) + } + + /// Same as [`Easy2::random_file`](struct.Easy2.html#method.random_file) + pub fn random_file>(&mut self, p: P) -> Result<(), Error> { + self.inner.random_file(p) + } + + /// Same as [`Easy2::egd_socket`](struct.Easy2.html#method.egd_socket) + pub fn egd_socket>(&mut self, p: P) -> Result<(), Error> { + self.inner.egd_socket(p) + } + + /// Same as [`Easy2::ssl_cipher_list`](struct.Easy2.html#method.ssl_cipher_list) + pub fn ssl_cipher_list(&mut self, ciphers: &str) -> Result<(), Error> { + self.inner.ssl_cipher_list(ciphers) + } + + /// Same as [`Easy2::ssl_sessionid_cache`](struct.Easy2.html#method.ssl_sessionid_cache) + pub fn ssl_sessionid_cache(&mut self, enable: bool) -> Result<(), Error> { + self.inner.ssl_sessionid_cache(enable) + } + + /// Same as [`Easy2::ssl_options`](struct.Easy2.html#method.ssl_options) + pub fn ssl_options(&mut self, bits: &SslOpt) -> Result<(), Error> { + self.inner.ssl_options(bits) + } + + // ========================================================================= + // getters + + /// Same as [`Easy2::time_condition_unmet`](struct.Easy2.html#method.time_condition_unmet) + pub fn time_condition_unmet(&mut self) -> Result { + self.inner.time_condition_unmet() + } + + /// Same as [`Easy2::effective_url`](struct.Easy2.html#method.effective_url) + pub fn effective_url(&mut self) -> Result, Error> { + self.inner.effective_url() + } + + /// Same as [`Easy2::effective_url_bytes`](struct.Easy2.html#method.effective_url_bytes) + pub fn effective_url_bytes(&mut self) -> Result, Error> { + self.inner.effective_url_bytes() + } + + /// Same as [`Easy2::response_code`](struct.Easy2.html#method.response_code) + pub fn response_code(&mut self) -> Result { + self.inner.response_code() + } + + /// Same as [`Easy2::http_connectcode`](struct.Easy2.html#method.http_connectcode) + pub fn http_connectcode(&mut self) -> Result { + self.inner.http_connectcode() + } + + /// Same as [`Easy2::filetime`](struct.Easy2.html#method.filetime) + pub fn filetime(&mut self) -> Result, Error> { + self.inner.filetime() + } + + /// Same as [`Easy2::download_size`](struct.Easy2.html#method.download_size) + pub fn download_size(&mut self) -> Result { + self.inner.download_size() + } + /// Same as [`Easy2::content_length_download`](struct.Easy2.html#method.content_length_download) + pub fn content_length_download(&mut self) -> Result { + self.inner.content_length_download() + } + + /// Same as [`Easy2::total_time`](struct.Easy2.html#method.total_time) + pub fn total_time(&mut self) -> Result { + self.inner.total_time() + } + + /// Same as [`Easy2::namelookup_time`](struct.Easy2.html#method.namelookup_time) + pub fn namelookup_time(&mut self) -> Result { + self.inner.namelookup_time() + } + + /// Same as [`Easy2::connect_time`](struct.Easy2.html#method.connect_time) + pub fn connect_time(&mut self) -> Result { + self.inner.connect_time() + } + + /// Same as [`Easy2::appconnect_time`](struct.Easy2.html#method.appconnect_time) + pub fn appconnect_time(&mut self) -> Result { + self.inner.appconnect_time() + } + + /// Same as [`Easy2::pretransfer_time`](struct.Easy2.html#method.pretransfer_time) + pub fn pretransfer_time(&mut self) -> Result { + self.inner.pretransfer_time() + } + + /// Same as [`Easy2::starttransfer_time`](struct.Easy2.html#method.starttransfer_time) + pub fn starttransfer_time(&mut self) -> Result { + self.inner.starttransfer_time() + } + + /// Same as [`Easy2::redirect_time`](struct.Easy2.html#method.redirect_time) + pub fn redirect_time(&mut self) -> Result { + self.inner.redirect_time() + } + + /// Same as [`Easy2::redirect_count`](struct.Easy2.html#method.redirect_count) + pub fn redirect_count(&mut self) -> Result { + self.inner.redirect_count() + } + + /// Same as [`Easy2::redirect_url`](struct.Easy2.html#method.redirect_url) + pub fn redirect_url(&mut self) -> Result, Error> { + self.inner.redirect_url() + } + + /// Same as [`Easy2::redirect_url_bytes`](struct.Easy2.html#method.redirect_url_bytes) + pub fn redirect_url_bytes(&mut self) -> Result, Error> { + self.inner.redirect_url_bytes() + } + + /// Same as [`Easy2::header_size`](struct.Easy2.html#method.header_size) + pub fn header_size(&mut self) -> Result { + self.inner.header_size() + } + + /// Same as [`Easy2::request_size`](struct.Easy2.html#method.request_size) + pub fn request_size(&mut self) -> Result { + self.inner.request_size() + } + + /// Same as [`Easy2::content_type`](struct.Easy2.html#method.content_type) + pub fn content_type(&mut self) -> Result, Error> { + self.inner.content_type() + } + + /// Same as [`Easy2::content_type_bytes`](struct.Easy2.html#method.content_type_bytes) + pub fn content_type_bytes(&mut self) -> Result, Error> { + self.inner.content_type_bytes() + } + + /// Same as [`Easy2::os_errno`](struct.Easy2.html#method.os_errno) + pub fn os_errno(&mut self) -> Result { + self.inner.os_errno() + } + + /// Same as [`Easy2::primary_ip`](struct.Easy2.html#method.primary_ip) + pub fn primary_ip(&mut self) -> Result, Error> { + self.inner.primary_ip() + } + + /// Same as [`Easy2::primary_port`](struct.Easy2.html#method.primary_port) + pub fn primary_port(&mut self) -> Result { + self.inner.primary_port() + } + + /// Same as [`Easy2::local_ip`](struct.Easy2.html#method.local_ip) + pub fn local_ip(&mut self) -> Result, Error> { + self.inner.local_ip() + } + + /// Same as [`Easy2::local_port`](struct.Easy2.html#method.local_port) + pub fn local_port(&mut self) -> Result { + self.inner.local_port() + } + + /// Same as [`Easy2::cookies`](struct.Easy2.html#method.cookies) + pub fn cookies(&mut self) -> Result { + self.inner.cookies() + } + + /// Same as [`Easy2::pipewait`](struct.Easy2.html#method.pipewait) + pub fn pipewait(&mut self, wait: bool) -> Result<(), Error> { + self.inner.pipewait(wait) + } + + // ========================================================================= + // Other methods + + /// Same as [`Easy2::perform`](struct.Easy2.html#method.perform) + pub fn perform(&self) -> Result<(), Error> { + assert!(self.inner.get_ref().borrowed.get().is_null()); + self.do_perform() + } + + fn do_perform(&self) -> Result<(), Error> { + // We don't allow recursive invocations of `perform` because we're + // invoking `FnMut`closures behind a `&self` pointer. This flag acts as + // our own `RefCell` borrow flag sorta. + if self.inner.get_ref().running.get() { + return Err(Error::new(curl_sys::CURLE_FAILED_INIT)) + } + + self.inner.get_ref().running.set(true); + struct Reset<'a>(&'a Cell); + impl<'a> Drop for Reset<'a> { + fn drop(&mut self) { + self.0.set(false); + } + } + let _reset = Reset(&self.inner.get_ref().running); + + self.inner.perform() + } + + /// Creates a new scoped transfer which can be used to set callbacks and + /// data which only live for the scope of the returned object. + /// + /// An `Easy` handle is often reused between different requests to cache + /// connections to servers, but often the lifetime of the data as part of + /// each transfer is unique. This function serves as an ability to share an + /// `Easy` across many transfers while ergonomically using possibly + /// stack-local data as part of each transfer. + /// + /// Configuration can be set on the `Easy` and then a `Transfer` can be + /// created to set scoped configuration (like callbacks). Finally, the + /// `perform` method on the `Transfer` function can be used. + /// + /// When the `Transfer` option is dropped then all configuration set on the + /// transfer itself will be reset. + pub fn transfer<'data, 'easy>(&'easy mut self) -> Transfer<'easy, 'data> { + assert!(!self.inner.get_ref().running.get()); + Transfer { + data: Box::new(Callbacks::default()), + easy: self, + } + } + + /// Same as [`Easy2::unpause_read`](struct.Easy2.html#method.unpause_read) + pub fn unpause_read(&self) -> Result<(), Error> { + self.inner.unpause_read() + } + + /// Same as [`Easy2::unpause_write`](struct.Easy2.html#method.unpause_write) + pub fn unpause_write(&self) -> Result<(), Error> { + self.inner.unpause_write() + } + + /// Same as [`Easy2::url_encode`](struct.Easy2.html#method.url_encode) + pub fn url_encode(&mut self, s: &[u8]) -> String { + self.inner.url_encode(s) + } + + /// Same as [`Easy2::url_decode`](struct.Easy2.html#method.url_decode) + pub fn url_decode(&mut self, s: &str) -> Vec { + self.inner.url_decode(s) + } + + /// Same as [`Easy2::reset`](struct.Easy2.html#method.reset) + pub fn reset(&mut self) { + self.inner.reset() + } + + /// Same as [`Easy2::recv`](struct.Easy2.html#method.recv) + pub fn recv(&mut self, data: &mut [u8]) -> Result { + self.inner.recv(data) + } + + /// Same as [`Easy2::send`](struct.Easy2.html#method.send) + pub fn send(&mut self, data: &[u8]) -> Result { + self.inner.send(data) + } + + /// Same as [`Easy2::raw`](struct.Easy2.html#method.raw) + pub fn raw(&self) -> *mut curl_sys::CURL { + self.inner.raw() + } + + /// Same as [`Easy2::take_error_buf`](struct.Easy2.html#method.take_error_buf) + pub fn take_error_buf(&self) -> Option { + self.inner.take_error_buf() + } +} + +impl EasyData { + /// An unsafe function to get the appropriate callback field. + /// + /// We can have callbacks configured from one of two different sources. + /// We could either have a callback from the `borrowed` field, callbacks on + /// an ephemeral `Transfer`, or the `owned` field which are `'static` + /// callbacks that live for the lifetime of this `EasyData`. + /// + /// The first set of callbacks are unsafe to access because they're actually + /// owned elsewhere and we're just aliasing. Additionally they don't + /// technically live long enough for us to access them, so they're hidden + /// behind unsafe pointers and casts. + /// + /// This function returns `&'a mut T` but that's actually somewhat of a lie. + /// The value should **not be stored to** nor should it be used for the full + /// lifetime of `'a`, but rather immediately in the local scope. + /// + /// Basically this is just intended to acquire a callback, invoke it, and + /// then stop. Nothing else. Super unsafe. + unsafe fn callback<'a, T, F>(&'a mut self, f: F) -> Option<&'a mut T> + where F: for<'b> Fn(&'b mut Callbacks<'static>) -> &'b mut Option, + { + let ptr = self.borrowed.get(); + if !ptr.is_null() { + let val = f(&mut *ptr); + if val.is_some() { + return val.as_mut() + } + } + f(&mut self.owned).as_mut() + } +} + +impl Handler for EasyData { + fn write(&mut self, data: &[u8]) -> Result { + unsafe { + match self.callback(|s| &mut s.write) { + Some(write) => write(data), + None => Ok(data.len()), + } + } + } + + fn read(&mut self, data: &mut [u8]) -> Result { + unsafe { + match self.callback(|s| &mut s.read) { + Some(read) => read(data), + None => Ok(0), + } + } + } + + fn seek(&mut self, whence: SeekFrom) -> SeekResult { + unsafe { + match self.callback(|s| &mut s.seek) { + Some(seek) => seek(whence), + None => SeekResult::CantSeek, + } + } + } + + fn debug(&mut self, kind: InfoType, data: &[u8]) { + unsafe { + match self.callback(|s| &mut s.debug) { + Some(debug) => debug(kind, data), + None => handler::debug(kind, data), + } + } + } + + fn header(&mut self, data: &[u8]) -> bool { + unsafe { + match self.callback(|s| &mut s.header) { + Some(header) => header(data), + None => true, + } + } + } + + fn progress(&mut self, + dltotal: f64, + dlnow: f64, + ultotal: f64, + ulnow: f64) -> bool { + unsafe { + match self.callback(|s| &mut s.progress) { + Some(progress) => progress(dltotal, dlnow, ultotal, ulnow), + None => true, + } + } + } + + fn ssl_ctx(&mut self, cx: *mut c_void) -> Result<(), Error> { + unsafe { + match self.callback(|s| &mut s.ssl_ctx) { + Some(ssl_ctx) => ssl_ctx(cx), + None => handler::ssl_ctx(cx), + } + } + } +} + +impl fmt::Debug for EasyData { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + "callbacks ...".fmt(f) + } +} + +impl<'easy, 'data> Transfer<'easy, 'data> { + /// Same as `Easy::write_function`, just takes a non `'static` lifetime + /// corresponding to the lifetime of this transfer. + pub fn write_function(&mut self, f: F) -> Result<(), Error> + where F: FnMut(&[u8]) -> Result + 'data + { + self.data.write = Some(Box::new(f)); + Ok(()) + } + + /// Same as `Easy::read_function`, just takes a non `'static` lifetime + /// corresponding to the lifetime of this transfer. + pub fn read_function(&mut self, f: F) -> Result<(), Error> + where F: FnMut(&mut [u8]) -> Result + 'data + { + self.data.read = Some(Box::new(f)); + Ok(()) + } + + /// Same as `Easy::seek_function`, just takes a non `'static` lifetime + /// corresponding to the lifetime of this transfer. + pub fn seek_function(&mut self, f: F) -> Result<(), Error> + where F: FnMut(SeekFrom) -> SeekResult + 'data + { + self.data.seek = Some(Box::new(f)); + Ok(()) + } + + /// Same as `Easy::progress_function`, just takes a non `'static` lifetime + /// corresponding to the lifetime of this transfer. + pub fn progress_function(&mut self, f: F) -> Result<(), Error> + where F: FnMut(f64, f64, f64, f64) -> bool + 'data + { + self.data.progress = Some(Box::new(f)); + Ok(()) + } + + /// Same as `Easy::ssl_ctx_function`, just takes a non `'static` + /// lifetime corresponding to the lifetime of this transfer. + pub fn ssl_ctx_function(&mut self, f: F) -> Result<(), Error> + where F: FnMut(*mut c_void) -> Result<(), Error> + Send + 'data + { + self.data.ssl_ctx = Some(Box::new(f)); + Ok(()) + } + + /// Same as `Easy::debug_function`, just takes a non `'static` lifetime + /// corresponding to the lifetime of this transfer. + pub fn debug_function(&mut self, f: F) -> Result<(), Error> + where F: FnMut(InfoType, &[u8]) + 'data + { + self.data.debug = Some(Box::new(f)); + Ok(()) + } + + /// Same as `Easy::header_function`, just takes a non `'static` lifetime + /// corresponding to the lifetime of this transfer. + pub fn header_function(&mut self, f: F) -> Result<(), Error> + where F: FnMut(&[u8]) -> bool + 'data + { + self.data.header = Some(Box::new(f)); + Ok(()) + } + + /// Same as `Easy::perform`. + pub fn perform(&self) -> Result<(), Error> { + let inner = self.easy.inner.get_ref(); + + // Note that we're casting a `&self` pointer to a `*mut`, and then + // during the invocation of this call we're going to invoke `FnMut` + // closures that we ourselves own. + // + // This should be ok, however, because `do_perform` checks for recursive + // invocations of `perform` and disallows them. Our type also isn't + // `Sync`. + inner.borrowed.set(&*self.data as *const _ as *mut _); + + // Make sure to reset everything back to the way it was before when + // we're done. + struct Reset<'a>(&'a Cell<*mut Callbacks<'static>>); + impl<'a> Drop for Reset<'a> { + fn drop(&mut self) { + self.0.set(ptr::null_mut()); + } + } + let _reset = Reset(&inner.borrowed); + + self.easy.do_perform() + } + + /// Same as `Easy::unpause_read`. + pub fn unpause_read(&self) -> Result<(), Error> { + self.easy.unpause_read() + } + + /// Same as `Easy::unpause_write` + pub fn unpause_write(&self) -> Result<(), Error> { + self.easy.unpause_write() + } +} + +impl<'easy, 'data> fmt::Debug for Transfer<'easy, 'data> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_struct("Transfer") + .field("easy", &self.easy) + .finish() + } +} + +impl<'easy, 'data> Drop for Transfer<'easy, 'data> { + fn drop(&mut self) { + // Extra double check to make sure we don't leak a pointer to ourselves. + assert!(self.easy.inner.get_ref().borrowed.get().is_null()); + } +} diff --git a/curl/src/easy/handler.rs b/curl/src/easy/handler.rs new file mode 100644 index 000000000..aa85877e6 --- /dev/null +++ b/curl/src/easy/handler.rs @@ -0,0 +1,3241 @@ +use std::cell::RefCell; +use std::ffi::{CStr, CString}; +use std::fmt; +use std::io::{self, SeekFrom, Write}; +use std::path::Path; +use std::slice; +use std::str; +use std::time::Duration; + +use curl_sys; +use libc::{self, c_void, c_char, c_long, size_t, c_int, c_double, c_ulong}; +use socket2::Socket; + +use Error; +use easy::form; +use easy::list; +use easy::{List, Form}; +use easy::windows; +use panic; + +/// A trait for the various callbacks used by libcurl to invoke user code. +/// +/// This trait represents all operations that libcurl can possibly invoke a +/// client for code during an HTTP transaction. Each callback has a default +/// "noop" implementation, the same as in libcurl. Types implementing this trait +/// may simply override the relevant functions to learn about the callbacks +/// they're interested in. +/// +/// # Examples +/// +/// ``` +/// use curl::easy::{Easy2, Handler, WriteError}; +/// +/// struct Collector(Vec); +/// +/// impl Handler for Collector { +/// fn write(&mut self, data: &[u8]) -> Result { +/// self.0.extend_from_slice(data); +/// Ok(data.len()) +/// } +/// } +/// +/// let mut easy = Easy2::new(Collector(Vec::new())); +/// easy.get(true).unwrap(); +/// easy.url("https://www.rust-lang.org/").unwrap(); +/// easy.perform().unwrap(); +/// +/// assert_eq!(easy.response_code().unwrap(), 200); +/// let contents = easy.get_ref(); +/// println!("{}", String::from_utf8_lossy(&contents.0)); +/// ``` +pub trait Handler { + /// Callback invoked whenever curl has downloaded data for the application. + /// + /// This callback function gets called by libcurl as soon as there is data + /// received that needs to be saved. + /// + /// The callback function will be passed as much data as possible in all + /// invokes, but you must not make any assumptions. It may be one byte, it + /// may be thousands. If `show_header` is enabled, which makes header data + /// get passed to the write callback, you can get up to + /// `CURL_MAX_HTTP_HEADER` bytes of header data passed into it. This + /// usually means 100K. + /// + /// This function may be called with zero bytes data if the transferred file + /// is empty. + /// + /// The callback should return the number of bytes actually taken care of. + /// If that amount differs from the amount passed to your callback function, + /// it'll signal an error condition to the library. This will cause the + /// transfer to get aborted and the libcurl function used will return + /// an error with `is_write_error`. + /// + /// If your callback function returns `Err(WriteError::Pause)` it will cause + /// this transfer to become paused. See `unpause_write` for further details. + /// + /// By default data is sent into the void, and this corresponds to the + /// `CURLOPT_WRITEFUNCTION` and `CURLOPT_WRITEDATA` options. + fn write(&mut self, data: &[u8]) -> Result { + Ok(data.len()) + } + + /// Read callback for data uploads. + /// + /// This callback function gets called by libcurl as soon as it needs to + /// read data in order to send it to the peer - like if you ask it to upload + /// or post data to the server. + /// + /// Your function must then return the actual number of bytes that it stored + /// in that memory area. Returning 0 will signal end-of-file to the library + /// and cause it to stop the current transfer. + /// + /// If you stop the current transfer by returning 0 "pre-maturely" (i.e + /// before the server expected it, like when you've said you will upload N + /// bytes and you upload less than N bytes), you may experience that the + /// server "hangs" waiting for the rest of the data that won't come. + /// + /// The read callback may return `Err(ReadError::Abort)` to stop the + /// current operation immediately, resulting in a `is_aborted_by_callback` + /// error code from the transfer. + /// + /// The callback can return `Err(ReadError::Pause)` to cause reading from + /// this connection to pause. See `unpause_read` for further details. + /// + /// By default data not input, and this corresponds to the + /// `CURLOPT_READFUNCTION` and `CURLOPT_READDATA` options. + /// + /// Note that the lifetime bound on this function is `'static`, but that + /// is often too restrictive. To use stack data consider calling the + /// `transfer` method and then using `read_function` to configure a + /// callback that can reference stack-local data. + fn read(&mut self, data: &mut [u8]) -> Result { + drop(data); + Ok(0) + } + + /// User callback for seeking in input stream. + /// + /// This function gets called by libcurl to seek to a certain position in + /// the input stream and can be used to fast forward a file in a resumed + /// upload (instead of reading all uploaded bytes with the normal read + /// function/callback). It is also called to rewind a stream when data has + /// already been sent to the server and needs to be sent again. This may + /// happen when doing a HTTP PUT or POST with a multi-pass authentication + /// method, or when an existing HTTP connection is reused too late and the + /// server closes the connection. + /// + /// The callback function must return `SeekResult::Ok` on success, + /// `SeekResult::Fail` to cause the upload operation to fail or + /// `SeekResult::CantSeek` to indicate that while the seek failed, libcurl + /// is free to work around the problem if possible. The latter can sometimes + /// be done by instead reading from the input or similar. + /// + /// By default data this option is not set, and this corresponds to the + /// `CURLOPT_SEEKFUNCTION` and `CURLOPT_SEEKDATA` options. + fn seek(&mut self, whence: SeekFrom) -> SeekResult { + drop(whence); + SeekResult::CantSeek + } + + /// Specify a debug callback + /// + /// `debug_function` replaces the standard debug function used when + /// `verbose` is in effect. This callback receives debug information, + /// as specified in the type argument. + /// + /// By default this option is not set and corresponds to the + /// `CURLOPT_DEBUGFUNCTION` and `CURLOPT_DEBUGDATA` options. + fn debug(&mut self, kind: InfoType, data: &[u8]) { + debug(kind, data) + } + + /// Callback that receives header data + /// + /// This function gets called by libcurl as soon as it has received header + /// data. The header callback will be called once for each header and only + /// complete header lines are passed on to the callback. Parsing headers is + /// very easy using this. If this callback returns `false` it'll signal an + /// error to the library. This will cause the transfer to get aborted and + /// the libcurl function in progress will return `is_write_error`. + /// + /// A complete HTTP header that is passed to this function can be up to + /// CURL_MAX_HTTP_HEADER (100K) bytes. + /// + /// It's important to note that the callback will be invoked for the headers + /// of all responses received after initiating a request and not just the + /// final response. This includes all responses which occur during + /// authentication negotiation. If you need to operate on only the headers + /// from the final response, you will need to collect headers in the + /// callback yourself and use HTTP status lines, for example, to delimit + /// response boundaries. + /// + /// When a server sends a chunked encoded transfer, it may contain a + /// trailer. That trailer is identical to a HTTP header and if such a + /// trailer is received it is passed to the application using this callback + /// as well. There are several ways to detect it being a trailer and not an + /// ordinary header: 1) it comes after the response-body. 2) it comes after + /// the final header line (CR LF) 3) a Trailer: header among the regular + /// response-headers mention what header(s) to expect in the trailer. + /// + /// For non-HTTP protocols like FTP, POP3, IMAP and SMTP this function will + /// get called with the server responses to the commands that libcurl sends. + /// + /// By default this option is not set and corresponds to the + /// `CURLOPT_HEADERFUNCTION` and `CURLOPT_HEADERDATA` options. + fn header(&mut self, data: &[u8]) -> bool { + drop(data); + true + } + + /// Callback to progress meter function + /// + /// This function gets called by libcurl instead of its internal equivalent + /// with a frequent interval. While data is being transferred it will be + /// called very frequently, and during slow periods like when nothing is + /// being transferred it can slow down to about one call per second. + /// + /// The callback gets told how much data libcurl will transfer and has + /// transferred, in number of bytes. The first argument is the total number + /// of bytes libcurl expects to download in this transfer. The second + /// argument is the number of bytes downloaded so far. The third argument is + /// the total number of bytes libcurl expects to upload in this transfer. + /// The fourth argument is the number of bytes uploaded so far. + /// + /// Unknown/unused argument values passed to the callback will be set to + /// zero (like if you only download data, the upload size will remain 0). + /// Many times the callback will be called one or more times first, before + /// it knows the data sizes so a program must be made to handle that. + /// + /// Returning `false` from this callback will cause libcurl to abort the + /// transfer and return `is_aborted_by_callback`. + /// + /// If you transfer data with the multi interface, this function will not be + /// called during periods of idleness unless you call the appropriate + /// libcurl function that performs transfers. + /// + /// `progress` must be set to `true` to make this function actually get + /// called. + /// + /// By default this function calls an internal method and corresponds to + /// `CURLOPT_PROGRESSFUNCTION` and `CURLOPT_PROGRESSDATA`. + fn progress(&mut self, + dltotal: f64, + dlnow: f64, + ultotal: f64, + ulnow: f64) -> bool { + drop((dltotal, dlnow, ultotal, ulnow)); + true + } + + /// Callback to SSL context + /// + /// This callback function gets called by libcurl just before the + /// initialization of an SSL connection after having processed all + /// other SSL related options to give a last chance to an + /// application to modify the behaviour of the SSL + /// initialization. The `ssl_ctx` parameter is actually a pointer + /// to the SSL library's SSL_CTX. If an error is returned from the + /// callback no attempt to establish a connection is made and the + /// perform operation will return the callback's error code. + /// + /// This function will get called on all new connections made to a + /// server, during the SSL negotiation. The SSL_CTX pointer will + /// be a new one every time. + /// + /// To use this properly, a non-trivial amount of knowledge of + /// your SSL library is necessary. For example, you can use this + /// function to call library-specific callbacks to add additional + /// validation code for certificates, and even to change the + /// actual URI of a HTTPS request. + /// + /// By default this function calls an internal method and + /// corresponds to `CURLOPT_SSL_CTX_FUNCTION` and + /// `CURLOPT_SSL_CTX_DATA`. + /// + /// Note that this callback is not guaranteed to be called, not all versions + /// of libcurl support calling this callback. + fn ssl_ctx(&mut self, cx: *mut c_void) -> Result<(), Error> { + // By default, if we're on an OpenSSL enabled libcurl and we're on + // Windows, add the system's certificate store to OpenSSL's certificate + // store. + ssl_ctx(cx) + } + + /// Callback to open sockets for libcurl. + /// + /// This callback function gets called by libcurl instead of the socket(2) + /// call. The callback function should return the newly created socket + /// or `None` in case no connection could be established or another + /// error was detected. Any additional `setsockopt(2)` calls can of course + /// be done on the socket at the user's discretion. A `None` return + /// value from the callback function will signal an unrecoverable error to + /// libcurl and it will return `is_couldnt_connect` from the function that + /// triggered this callback. + /// + /// By default this function opens a standard socket and + /// corresponds to `CURLOPT_OPENSOCKETFUNCTION `. + fn open_socket(&mut self, + family: c_int, + socktype: c_int, + protocol: c_int) -> Option { + // Note that we override this to calling a function in `socket2` to + // ensure that we open all sockets with CLOEXEC. Otherwise if we rely on + // libcurl to open sockets it won't use CLOEXEC. + return Socket::new(family.into(), socktype.into(), Some(protocol.into())) + .ok() + .map(cvt); + + #[cfg(unix)] + fn cvt(socket: Socket) -> curl_sys::curl_socket_t { + use std::os::unix::prelude::*; + socket.into_raw_fd() + } + + #[cfg(windows)] + fn cvt(socket: Socket) -> curl_sys::curl_socket_t { + use std::os::windows::prelude::*; + socket.into_raw_socket() + } + } +} + +pub fn debug(kind: InfoType, data: &[u8]) { + let out = io::stderr(); + let prefix = match kind { + InfoType::Text => "*", + InfoType::HeaderIn => "<", + InfoType::HeaderOut => ">", + InfoType::DataIn | + InfoType::SslDataIn => "{", + InfoType::DataOut | + InfoType::SslDataOut => "}", + InfoType::__Nonexhaustive => " ", + }; + let mut out = out.lock(); + drop(write!(out, "{} ", prefix)); + match str::from_utf8(data) { + Ok(s) => drop(out.write_all(s.as_bytes())), + Err(_) => drop(write!(out, "({} bytes of data)\n", data.len())), + } +} + +pub fn ssl_ctx(cx: *mut c_void) -> Result<(), Error> { + windows::add_certs_to_context(cx); + Ok(()) +} + +/// Raw bindings to a libcurl "easy session". +/// +/// This type corresponds to the `CURL` type in libcurl, and is probably what +/// you want for just sending off a simple HTTP request and fetching a response. +/// Each easy handle can be thought of as a large builder before calling the +/// final `perform` function. +/// +/// There are many many configuration options for each `Easy2` handle, and they +/// should all have their own documentation indicating what it affects and how +/// it interacts with other options. Some implementations of libcurl can use +/// this handle to interact with many different protocols, although by default +/// this crate only guarantees the HTTP/HTTPS protocols working. +/// +/// Note that almost all methods on this structure which configure various +/// properties return a `Result`. This is largely used to detect whether the +/// underlying implementation of libcurl actually implements the option being +/// requested. If you're linked to a version of libcurl which doesn't support +/// the option, then an error will be returned. Some options also perform some +/// validation when they're set, and the error is returned through this vector. +/// +/// Note that historically this library contained an `Easy` handle so this one's +/// called `Easy2`. The major difference between the `Easy` type is that an +/// `Easy2` structure uses a trait instead of closures for all of the callbacks +/// that curl can invoke. The `Easy` type is actually built on top of this +/// `Easy` type, and this `Easy2` type can be more flexible in some situations +/// due to the generic parameter. +/// +/// There's not necessarily a right answer for which type is correct to use, but +/// as a general rule of thumb `Easy` is typically a reasonable choice for +/// synchronous I/O and `Easy2` is a good choice for asynchronous I/O. +/// +/// # Examples +/// +/// ``` +/// use curl::easy::{Easy2, Handler, WriteError}; +/// +/// struct Collector(Vec); +/// +/// impl Handler for Collector { +/// fn write(&mut self, data: &[u8]) -> Result { +/// self.0.extend_from_slice(data); +/// Ok(data.len()) +/// } +/// } +/// +/// let mut easy = Easy2::new(Collector(Vec::new())); +/// easy.get(true).unwrap(); +/// easy.url("https://www.rust-lang.org/").unwrap(); +/// easy.perform().unwrap(); +/// +/// assert_eq!(easy.response_code().unwrap(), 200); +/// let contents = easy.get_ref(); +/// println!("{}", String::from_utf8_lossy(&contents.0)); +/// ``` +pub struct Easy2 { + inner: Box>, +} + +struct Inner { + handle: *mut curl_sys::CURL, + header_list: Option, + resolve_list: Option, + form: Option

, + error_buf: RefCell>, + handler: H, +} + +unsafe impl Send for Inner {} + +/// Possible proxy types that libcurl currently understands. +#[allow(missing_docs)] +#[derive(Debug)] +pub enum ProxyType { + Http = curl_sys::CURLPROXY_HTTP as isize, + Http1 = curl_sys::CURLPROXY_HTTP_1_0 as isize, + Socks4 = curl_sys::CURLPROXY_SOCKS4 as isize, + Socks5 = curl_sys::CURLPROXY_SOCKS5 as isize, + Socks4a = curl_sys::CURLPROXY_SOCKS4A as isize, + Socks5Hostname = curl_sys::CURLPROXY_SOCKS5_HOSTNAME as isize, + + /// Hidden variant to indicate that this enum should not be matched on, it + /// may grow over time. + #[doc(hidden)] + __Nonexhaustive, +} + +/// Possible conditions for the `time_condition` method. +#[allow(missing_docs)] +#[derive(Debug)] +pub enum TimeCondition { + None = curl_sys::CURL_TIMECOND_NONE as isize, + IfModifiedSince = curl_sys::CURL_TIMECOND_IFMODSINCE as isize, + IfUnmodifiedSince = curl_sys::CURL_TIMECOND_IFUNMODSINCE as isize, + LastModified = curl_sys::CURL_TIMECOND_LASTMOD as isize, + + /// Hidden variant to indicate that this enum should not be matched on, it + /// may grow over time. + #[doc(hidden)] + __Nonexhaustive, +} + +/// Possible values to pass to the `ip_resolve` method. +#[allow(missing_docs)] +#[derive(Debug)] +pub enum IpResolve { + V4 = curl_sys::CURL_IPRESOLVE_V4 as isize, + V6 = curl_sys::CURL_IPRESOLVE_V6 as isize, + Any = curl_sys::CURL_IPRESOLVE_WHATEVER as isize, + + /// Hidden variant to indicate that this enum should not be matched on, it + /// may grow over time. + #[doc(hidden)] + __Nonexhaustive = 500, +} + +/// Possible values to pass to the `http_version` method. +#[derive(Debug)] +pub enum HttpVersion { + /// We don't care what http version to use, and we'd like the library to + /// choose the best possible for us. + Any = curl_sys::CURL_HTTP_VERSION_NONE as isize, + + /// Please use HTTP 1.0 in the request + V10 = curl_sys::CURL_HTTP_VERSION_1_0 as isize, + + /// Please use HTTP 1.1 in the request + V11 = curl_sys::CURL_HTTP_VERSION_1_1 as isize, + + /// Please use HTTP 2 in the request + /// (Added in CURL 7.33.0) + V2 = curl_sys::CURL_HTTP_VERSION_2_0 as isize, + + /// Use version 2 for HTTPS, version 1.1 for HTTP + /// (Added in CURL 7.47.0) + V2TLS = curl_sys::CURL_HTTP_VERSION_2TLS as isize, + + /// Please use HTTP 2 without HTTP/1.1 Upgrade + /// (Added in CURL 7.49.0) + V2PriorKnowledge = curl_sys::CURL_HTTP_VERSION_2_PRIOR_KNOWLEDGE as isize, + + /// Hidden variant to indicate that this enum should not be matched on, it + /// may grow over time. + #[doc(hidden)] + __Nonexhaustive = 500, +} + +/// Possible values to pass to the `ip_resolve` method. +#[allow(missing_docs)] +#[derive(Debug)] +pub enum SslVersion { + Default = curl_sys::CURL_SSLVERSION_DEFAULT as isize, + Tlsv1 = curl_sys::CURL_SSLVERSION_TLSv1 as isize, + Sslv2 = curl_sys::CURL_SSLVERSION_SSLv2 as isize, + Sslv3 = curl_sys::CURL_SSLVERSION_SSLv3 as isize, + // Tlsv10 = curl_sys::CURL_SSLVERSION_TLSv1_0 as isize, + // Tlsv11 = curl_sys::CURL_SSLVERSION_TLSv1_1 as isize, + // Tlsv12 = curl_sys::CURL_SSLVERSION_TLSv1_2 as isize, + + /// Hidden variant to indicate that this enum should not be matched on, it + /// may grow over time. + #[doc(hidden)] + __Nonexhaustive = 500, +} + +/// Possible return values from the `seek_function` callback. +#[derive(Debug)] +pub enum SeekResult { + /// Indicates that the seek operation was a success + Ok = curl_sys::CURL_SEEKFUNC_OK as isize, + + /// Indicates that the seek operation failed, and the entire request should + /// fail as a result. + Fail = curl_sys::CURL_SEEKFUNC_FAIL as isize, + + /// Indicates that although the seek failed libcurl should attempt to keep + /// working if possible (for example "seek" through reading). + CantSeek = curl_sys::CURL_SEEKFUNC_CANTSEEK as isize, + + /// Hidden variant to indicate that this enum should not be matched on, it + /// may grow over time. + #[doc(hidden)] + __Nonexhaustive = 500, +} + +/// Possible data chunks that can be witnessed as part of the `debug_function` +/// callback. +#[derive(Debug)] +pub enum InfoType { + /// The data is informational text. + Text, + + /// The data is header (or header-like) data received from the peer. + HeaderIn, + + /// The data is header (or header-like) data sent to the peer. + HeaderOut, + + /// The data is protocol data received from the peer. + DataIn, + + /// The data is protocol data sent to the peer. + DataOut, + + /// The data is SSL/TLS (binary) data received from the peer. + SslDataIn, + + /// The data is SSL/TLS (binary) data sent to the peer. + SslDataOut, + + /// Hidden variant to indicate that this enum should not be matched on, it + /// may grow over time. + #[doc(hidden)] + __Nonexhaustive, +} + +/// Possible error codes that can be returned from the `read_function` callback. +#[derive(Debug)] +pub enum ReadError { + /// Indicates that the connection should be aborted immediately + Abort, + + /// Indicates that reading should be paused until `unpause` is called. + Pause, + + /// Hidden variant to indicate that this enum should not be matched on, it + /// may grow over time. + #[doc(hidden)] + __Nonexhaustive, +} + +/// Possible error codes that can be returned from the `write_function` callback. +#[derive(Debug)] +pub enum WriteError { + /// Indicates that reading should be paused until `unpause` is called. + Pause, + + /// Hidden variant to indicate that this enum should not be matched on, it + /// may grow over time. + #[doc(hidden)] + __Nonexhaustive, +} + +/// Options for `.netrc` parsing. +#[derive(Debug)] +pub enum NetRc { + /// Ignoring `.netrc` file and use information from url + /// + /// This option is default + Ignored = curl_sys::CURL_NETRC_IGNORED as isize, + + /// The use of your `~/.netrc` file is optional, and information in the URL is to be + /// preferred. The file will be scanned for the host and user name (to find the password only) + /// or for the host only, to find the first user name and password after that machine, which + /// ever information is not specified in the URL. + Optional = curl_sys::CURL_NETRC_OPTIONAL as isize, + + /// This value tells the library that use of the file is required, to ignore the information in + /// the URL, and to search the file for the host only. + Required = curl_sys::CURL_NETRC_REQUIRED as isize, +} + +/// Structure which stores possible authentication methods to get passed to +/// `http_auth` and `proxy_auth`. +#[derive(Clone)] +pub struct Auth { + bits: c_long, +} + +/// Structure which stores possible ssl options to pass to `ssl_options`. +#[derive(Clone)] +pub struct SslOpt { + bits: c_long, +} + +impl Easy2 { + /// Creates a new "easy" handle which is the core of almost all operations + /// in libcurl. + /// + /// To use a handle, applications typically configure a number of options + /// followed by a call to `perform`. Options are preserved across calls to + /// `perform` and need to be reset manually (or via the `reset` method) if + /// this is not desired. + pub fn new(handler: H) -> Easy2 { + ::init(); + unsafe { + let handle = curl_sys::curl_easy_init(); + assert!(!handle.is_null()); + let mut ret = Easy2 { + inner: Box::new(Inner { + handle: handle, + header_list: None, + resolve_list: None, + form: None, + error_buf: RefCell::new(vec![0; curl_sys::CURL_ERROR_SIZE]), + handler: handler, + }), + }; + ret.default_configure(); + return ret + } + } + + /// Re-initializes this handle to the default values. + /// + /// This puts the handle to the same state as it was in when it was just + /// created. This does, however, keep live connections, the session id + /// cache, the dns cache, and cookies. + pub fn reset(&mut self) { + unsafe { + curl_sys::curl_easy_reset(self.inner.handle); + } + self.default_configure(); + } + + fn default_configure(&mut self) { + self.setopt_ptr(curl_sys::CURLOPT_ERRORBUFFER, + self.inner.error_buf.borrow().as_ptr() as *const _) + .expect("failed to set error buffer"); + let _ = self.signal(false); + self.ssl_configure(); + + let ptr = &*self.inner as *const _ as *const _; + + let cb: extern fn(*mut c_char, size_t, size_t, *mut c_void) -> size_t + = header_cb::; + self.setopt_ptr(curl_sys::CURLOPT_HEADERFUNCTION, cb as *const _) + .expect("failed to set header callback"); + self.setopt_ptr(curl_sys::CURLOPT_HEADERDATA, ptr) + .expect("failed to set header callback"); + + let cb: curl_sys::curl_write_callback = write_cb::; + self.setopt_ptr(curl_sys::CURLOPT_WRITEFUNCTION, cb as *const _) + .expect("failed to set write callback"); + self.setopt_ptr(curl_sys::CURLOPT_WRITEDATA, ptr) + .expect("failed to set write callback"); + + let cb: curl_sys::curl_read_callback = read_cb::; + self.setopt_ptr(curl_sys::CURLOPT_READFUNCTION, cb as *const _) + .expect("failed to set read callback"); + self.setopt_ptr(curl_sys::CURLOPT_READDATA, ptr) + .expect("failed to set read callback"); + + let cb: curl_sys::curl_seek_callback = seek_cb::; + self.setopt_ptr(curl_sys::CURLOPT_SEEKFUNCTION, cb as *const _) + .expect("failed to set seek callback"); + self.setopt_ptr(curl_sys::CURLOPT_SEEKDATA, ptr) + .expect("failed to set seek callback"); + + let cb: curl_sys::curl_progress_callback = progress_cb::; + self.setopt_ptr(curl_sys::CURLOPT_PROGRESSFUNCTION, cb as *const _) + .expect("failed to set progress callback"); + self.setopt_ptr(curl_sys::CURLOPT_PROGRESSDATA, ptr) + .expect("failed to set progress callback"); + + let cb: curl_sys::curl_debug_callback = debug_cb::; + self.setopt_ptr(curl_sys::CURLOPT_DEBUGFUNCTION, cb as *const _) + .expect("failed to set debug callback"); + self.setopt_ptr(curl_sys::CURLOPT_DEBUGDATA, ptr) + .expect("failed to set debug callback"); + + let cb: curl_sys::curl_ssl_ctx_callback = ssl_ctx_cb::; + drop(self.setopt_ptr(curl_sys::CURLOPT_SSL_CTX_FUNCTION, cb as *const _)); + drop(self.setopt_ptr(curl_sys::CURLOPT_SSL_CTX_DATA, ptr)); + + let cb: curl_sys::curl_opensocket_callback = opensocket_cb::; + self.setopt_ptr(curl_sys::CURLOPT_OPENSOCKETFUNCTION , cb as *const _) + .expect("failed to set open socket callback"); + self.setopt_ptr(curl_sys::CURLOPT_OPENSOCKETDATA, ptr) + .expect("failed to set open socket callback"); + } + + #[cfg(all(unix, not(target_os = "macos")))] + fn ssl_configure(&mut self) { + let probe = ::openssl_probe::probe(); + if let Some(ref path) = probe.cert_file { + let _ = self.cainfo(path); + } + if let Some(ref path) = probe.cert_dir { + let _ = self.capath(path); + } + } + + #[cfg(not(all(unix, not(target_os = "macos"))))] + fn ssl_configure(&mut self) {} +} + +impl Easy2 { + // ========================================================================= + // Behavior options + + /// Configures this handle to have verbose output to help debug protocol + /// information. + /// + /// By default output goes to stderr, but the `stderr` function on this type + /// can configure that. You can also use the `debug_function` method to get + /// all protocol data sent and received. + /// + /// By default, this option is `false`. + pub fn verbose(&mut self, verbose: bool) -> Result<(), Error> { + self.setopt_long(curl_sys::CURLOPT_VERBOSE, verbose as c_long) + } + + /// Indicates whether header information is streamed to the output body of + /// this request. + /// + /// This option is only relevant for protocols which have header metadata + /// (like http or ftp). It's not generally possible to extract headers + /// from the body if using this method, that use case should be intended for + /// the `header_function` method. + /// + /// To set HTTP headers, use the `http_header` method. + /// + /// By default, this option is `false` and corresponds to + /// `CURLOPT_HEADER`. + pub fn show_header(&mut self, show: bool) -> Result<(), Error> { + self.setopt_long(curl_sys::CURLOPT_HEADER, show as c_long) + } + + /// Indicates whether a progress meter will be shown for requests done with + /// this handle. + /// + /// This will also prevent the `progress_function` from being called. + /// + /// By default this option is `false` and corresponds to + /// `CURLOPT_NOPROGRESS`. + pub fn progress(&mut self, progress: bool) -> Result<(), Error> { + self.setopt_long(curl_sys::CURLOPT_NOPROGRESS, + (!progress) as c_long) + } + + /// Inform libcurl whether or not it should install signal handlers or + /// attempt to use signals to perform library functions. + /// + /// If this option is disabled then timeouts during name resolution will not + /// work unless libcurl is built against c-ares. Note that enabling this + /// option, however, may not cause libcurl to work with multiple threads. + /// + /// By default this option is `false` and corresponds to `CURLOPT_NOSIGNAL`. + /// Note that this default is **different than libcurl** as it is intended + /// that this library is threadsafe by default. See the [libcurl docs] for + /// some more information. + /// + /// [libcurl docs]: https://curl.haxx.se/libcurl/c/threadsafe.html + pub fn signal(&mut self, signal: bool) -> Result<(), Error> { + self.setopt_long(curl_sys::CURLOPT_NOSIGNAL, + (!signal) as c_long) + } + + /// Indicates whether multiple files will be transferred based on the file + /// name pattern. + /// + /// The last part of a filename uses fnmatch-like pattern matching. + /// + /// By default this option is `false` and corresponds to + /// `CURLOPT_WILDCARDMATCH`. + pub fn wildcard_match(&mut self, m: bool) -> Result<(), Error> { + self.setopt_long(curl_sys::CURLOPT_WILDCARDMATCH, m as c_long) + } + + /// Provides the unix domain socket which this handle will work with. + /// + /// The string provided must be unix domain socket -encoded with the format: + /// + /// ```text + /// /path/file.sock + /// ``` + pub fn unix_socket(&mut self, unix_domain_socket: &str) -> Result<(), Error> { + let socket = try!(CString::new(unix_domain_socket)); + self.setopt_str(curl_sys::CURLOPT_UNIX_SOCKET_PATH, &socket) + } + + + // ========================================================================= + // Internal accessors + + /// Acquires a reference to the underlying handler for events. + pub fn get_ref(&self) -> &H { + &self.inner.handler + } + + /// Acquires a reference to the underlying handler for events. + pub fn get_mut(&mut self) -> &mut H { + &mut self.inner.handler + } + + // ========================================================================= + // Error options + + // TODO: error buffer and stderr + + /// Indicates whether this library will fail on HTTP response codes >= 400. + /// + /// This method is not fail-safe especially when authentication is involved. + /// + /// By default this option is `false` and corresponds to + /// `CURLOPT_FAILONERROR`. + pub fn fail_on_error(&mut self, fail: bool) -> Result<(), Error> { + self.setopt_long(curl_sys::CURLOPT_FAILONERROR, fail as c_long) + } + + // ========================================================================= + // Network options + + /// Provides the URL which this handle will work with. + /// + /// The string provided must be URL-encoded with the format: + /// + /// ```text + /// scheme://host:port/path + /// ``` + /// + /// The syntax is not validated as part of this function and that is + /// deferred until later. + /// + /// By default this option is not set and `perform` will not work until it + /// is set. This option corresponds to `CURLOPT_URL`. + pub fn url(&mut self, url: &str) -> Result<(), Error> { + let url = try!(CString::new(url)); + self.setopt_str(curl_sys::CURLOPT_URL, &url) + } + + /// Configures the port number to connect to, instead of the one specified + /// in the URL or the default of the protocol. + pub fn port(&mut self, port: u16) -> Result<(), Error> { + self.setopt_long(curl_sys::CURLOPT_PORT, port as c_long) + } + + // /// Indicates whether sequences of `/../` and `/./` will be squashed or not. + // /// + // /// By default this option is `false` and corresponds to + // /// `CURLOPT_PATH_AS_IS`. + // pub fn path_as_is(&mut self, as_is: bool) -> Result<(), Error> { + // } + + /// Provide the URL of a proxy to use. + /// + /// By default this option is not set and corresponds to `CURLOPT_PROXY`. + pub fn proxy(&mut self, url: &str) -> Result<(), Error> { + let url = try!(CString::new(url)); + self.setopt_str(curl_sys::CURLOPT_PROXY, &url) + } + + /// Provide port number the proxy is listening on. + /// + /// By default this option is not set (the default port for the proxy + /// protocol is used) and corresponds to `CURLOPT_PROXYPORT`. + pub fn proxy_port(&mut self, port: u16) -> Result<(), Error> { + self.setopt_long(curl_sys::CURLOPT_PROXYPORT, port as c_long) + } + + /// Indicates the type of proxy being used. + /// + /// By default this option is `ProxyType::Http` and corresponds to + /// `CURLOPT_PROXYTYPE`. + pub fn proxy_type(&mut self, kind: ProxyType) -> Result<(), Error> { + self.setopt_long(curl_sys::CURLOPT_PROXYTYPE, kind as c_long) + } + + /// Provide a list of hosts that should not be proxied to. + /// + /// This string is a comma-separated list of hosts which should not use the + /// proxy specified for connections. A single `*` character is also accepted + /// as a wildcard for all hosts. + /// + /// By default this option is not set and corresponds to + /// `CURLOPT_NOPROXY`. + pub fn noproxy(&mut self, skip: &str) -> Result<(), Error> { + let skip = try!(CString::new(skip)); + self.setopt_str(curl_sys::CURLOPT_NOPROXY, &skip) + } + + /// Inform curl whether it should tunnel all operations through the proxy. + /// + /// This essentially means that a `CONNECT` is sent to the proxy for all + /// outbound requests. + /// + /// By default this option is `false` and corresponds to + /// `CURLOPT_HTTPPROXYTUNNEL`. + pub fn http_proxy_tunnel(&mut self, tunnel: bool) -> Result<(), Error> { + self.setopt_long(curl_sys::CURLOPT_HTTPPROXYTUNNEL, + tunnel as c_long) + } + + /// Tell curl which interface to bind to for an outgoing network interface. + /// + /// The interface name, IP address, or host name can be specified here. + /// + /// By default this option is not set and corresponds to + /// `CURLOPT_INTERFACE`. + pub fn interface(&mut self, interface: &str) -> Result<(), Error> { + let s = try!(CString::new(interface)); + self.setopt_str(curl_sys::CURLOPT_INTERFACE, &s) + } + + /// Indicate which port should be bound to locally for this connection. + /// + /// By default this option is 0 (any port) and corresponds to + /// `CURLOPT_LOCALPORT`. + pub fn set_local_port(&mut self, port: u16) -> Result<(), Error> { + self.setopt_long(curl_sys::CURLOPT_LOCALPORT, port as c_long) + } + + /// Indicates the number of attempts libcurl will perform to find a working + /// port number. + /// + /// By default this option is 1 and corresponds to + /// `CURLOPT_LOCALPORTRANGE`. + pub fn local_port_range(&mut self, range: u16) -> Result<(), Error> { + self.setopt_long(curl_sys::CURLOPT_LOCALPORTRANGE, + range as c_long) + } + + /// Sets the timeout of how long name resolves will be kept in memory. + /// + /// This is distinct from DNS TTL options and is entirely speculative. + /// + /// By default this option is 60s and corresponds to + /// `CURLOPT_DNS_CACHE_TIMEOUT`. + pub fn dns_cache_timeout(&mut self, dur: Duration) -> Result<(), Error> { + self.setopt_long(curl_sys::CURLOPT_DNS_CACHE_TIMEOUT, + dur.as_secs() as c_long) + } + + /// Specify the preferred receive buffer size, in bytes. + /// + /// This is treated as a request, not an order, and the main point of this + /// is that the write callback may get called more often with smaller + /// chunks. + /// + /// By default this option is the maximum write size and corresopnds to + /// `CURLOPT_BUFFERSIZE`. + pub fn buffer_size(&mut self, size: usize) -> Result<(), Error> { + self.setopt_long(curl_sys::CURLOPT_BUFFERSIZE, size as c_long) + } + + // /// Enable or disable TCP Fast Open + // /// + // /// By default this options defaults to `false` and corresponds to + // /// `CURLOPT_TCP_FASTOPEN` + // pub fn fast_open(&mut self, enable: bool) -> Result<(), Error> { + // } + + /// Configures whether the TCP_NODELAY option is set, or Nagle's algorithm + /// is disabled. + /// + /// The purpose of Nagle's algorithm is to minimize the number of small + /// packet's on the network, and disabling this may be less efficient in + /// some situations. + /// + /// By default this option is `false` and corresponds to + /// `CURLOPT_TCP_NODELAY`. + pub fn tcp_nodelay(&mut self, enable: bool) -> Result<(), Error> { + self.setopt_long(curl_sys::CURLOPT_TCP_NODELAY, enable as c_long) + } + + /// Configures whether TCP keepalive probes will be sent. + /// + /// The delay and frequency of these probes is controlled by `tcp_keepidle` + /// and `tcp_keepintvl`. + /// + /// By default this option is `false` and corresponds to + /// `CURLOPT_TCP_KEEPALIVE`. + pub fn tcp_keepalive(&mut self, enable: bool) -> Result<(), Error> { + self.setopt_long(curl_sys::CURLOPT_TCP_KEEPALIVE, enable as c_long) + } + + /// Configures the TCP keepalive idle time wait. + /// + /// This is the delay, after which the connection is idle, keepalive probes + /// will be sent. Not all operating systems support this. + /// + /// By default this corresponds to `CURLOPT_TCP_KEEPIDLE`. + pub fn tcp_keepidle(&mut self, amt: Duration) -> Result<(), Error> { + self.setopt_long(curl_sys::CURLOPT_TCP_KEEPIDLE, + amt.as_secs() as c_long) + } + + /// Configures the delay between keepalive probes. + /// + /// By default this corresponds to `CURLOPT_TCP_KEEPINTVL`. + pub fn tcp_keepintvl(&mut self, amt: Duration) -> Result<(), Error> { + self.setopt_long(curl_sys::CURLOPT_TCP_KEEPINTVL, + amt.as_secs() as c_long) + } + + /// Configures the scope for local IPv6 addresses. + /// + /// Sets the scope_id value to use when connecting to IPv6 or link-local + /// addresses. + /// + /// By default this value is 0 and corresponds to `CURLOPT_ADDRESS_SCOPE` + pub fn address_scope(&mut self, scope: u32) -> Result<(), Error> { + self.setopt_long(curl_sys::CURLOPT_ADDRESS_SCOPE, + scope as c_long) + } + + // ========================================================================= + // Names and passwords + + /// Configures the username to pass as authentication for this connection. + /// + /// By default this value is not set and corresponds to `CURLOPT_USERNAME`. + pub fn username(&mut self, user: &str) -> Result<(), Error> { + let user = try!(CString::new(user)); + self.setopt_str(curl_sys::CURLOPT_USERNAME, &user) + } + + /// Configures the password to pass as authentication for this connection. + /// + /// By default this value is not set and corresponds to `CURLOPT_PASSWORD`. + pub fn password(&mut self, pass: &str) -> Result<(), Error> { + let pass = try!(CString::new(pass)); + self.setopt_str(curl_sys::CURLOPT_PASSWORD, &pass) + } + + /// Set HTTP server authentication methods to try + /// + /// If more than one method is set, libcurl will first query the site to see + /// which authentication methods it supports and then pick the best one you + /// allow it to use. For some methods, this will induce an extra network + /// round-trip. Set the actual name and password with the `password` and + /// `username` methods. + /// + /// For authentication with a proxy, see `proxy_auth`. + /// + /// By default this value is basic and corresponds to `CURLOPT_HTTPAUTH`. + pub fn http_auth(&mut self, auth: &Auth) -> Result<(), Error> { + self.setopt_long(curl_sys::CURLOPT_HTTPAUTH, auth.bits) + } + + /// Configures the proxy username to pass as authentication for this + /// connection. + /// + /// By default this value is not set and corresponds to + /// `CURLOPT_PROXYUSERNAME`. + pub fn proxy_username(&mut self, user: &str) -> Result<(), Error> { + let user = try!(CString::new(user)); + self.setopt_str(curl_sys::CURLOPT_PROXYUSERNAME, &user) + } + + /// Configures the proxy password to pass as authentication for this + /// connection. + /// + /// By default this value is not set and corresponds to + /// `CURLOPT_PROXYPASSWORD`. + pub fn proxy_password(&mut self, pass: &str) -> Result<(), Error> { + let pass = try!(CString::new(pass)); + self.setopt_str(curl_sys::CURLOPT_PROXYPASSWORD, &pass) + } + + /// Set HTTP proxy authentication methods to try + /// + /// If more than one method is set, libcurl will first query the site to see + /// which authentication methods it supports and then pick the best one you + /// allow it to use. For some methods, this will induce an extra network + /// round-trip. Set the actual name and password with the `proxy_password` + /// and `proxy_username` methods. + /// + /// By default this value is basic and corresponds to `CURLOPT_PROXYAUTH`. + pub fn proxy_auth(&mut self, auth: &Auth) -> Result<(), Error> { + self.setopt_long(curl_sys::CURLOPT_PROXYAUTH, auth.bits) + } + + /// Enable .netrc parsing + /// + /// By default the .netrc file is ignored and corresponds to `CURL_NETRC_IGNORED`. + pub fn netrc(&mut self, netrc: NetRc) -> Result<(), Error> { + self.setopt_long(curl_sys::CURLOPT_NETRC, netrc as c_long) + } + + // ========================================================================= + // HTTP Options + + /// Indicates whether the referer header is automatically updated + /// + /// By default this option is `false` and corresponds to + /// `CURLOPT_AUTOREFERER`. + pub fn autoreferer(&mut self, enable: bool) -> Result<(), Error> { + self.setopt_long(curl_sys::CURLOPT_AUTOREFERER, enable as c_long) + } + + /// Enables automatic decompression of HTTP downloads. + /// + /// Sets the contents of the Accept-Encoding header sent in an HTTP request. + /// This enables decoding of a response with Content-Encoding. + /// + /// Currently supported encoding are `identity`, `zlib`, and `gzip`. A + /// zero-length string passed in will send all accepted encodings. + /// + /// By default this option is not set and corresponds to + /// `CURLOPT_ACCEPT_ENCODING`. + pub fn accept_encoding(&mut self, encoding: &str) -> Result<(), Error> { + let encoding = try!(CString::new(encoding)); + self.setopt_str(curl_sys::CURLOPT_ACCEPT_ENCODING, &encoding) + } + + /// Request the HTTP Transfer Encoding. + /// + /// By default this option is `false` and corresponds to + /// `CURLOPT_TRANSFER_ENCODING`. + pub fn transfer_encoding(&mut self, enable: bool) -> Result<(), Error> { + self.setopt_long(curl_sys::CURLOPT_TRANSFER_ENCODING, enable as c_long) + } + + /// Follow HTTP 3xx redirects. + /// + /// Indicates whether any `Location` headers in the response should get + /// followed. + /// + /// By default this option is `false` and corresponds to + /// `CURLOPT_FOLLOWLOCATION`. + pub fn follow_location(&mut self, enable: bool) -> Result<(), Error> { + self.setopt_long(curl_sys::CURLOPT_FOLLOWLOCATION, enable as c_long) + } + + /// Send credentials to hosts other than the first as well. + /// + /// Sends username/password credentials even when the host changes as part + /// of a redirect. + /// + /// By default this option is `false` and corresponds to + /// `CURLOPT_UNRESTRICTED_AUTH`. + pub fn unrestricted_auth(&mut self, enable: bool) -> Result<(), Error> { + self.setopt_long(curl_sys::CURLOPT_UNRESTRICTED_AUTH, enable as c_long) + } + + /// Set the maximum number of redirects allowed. + /// + /// A value of 0 will refuse any redirect. + /// + /// By default this option is `-1` (unlimited) and corresponds to + /// `CURLOPT_MAXREDIRS`. + pub fn max_redirections(&mut self, max: u32) -> Result<(), Error> { + self.setopt_long(curl_sys::CURLOPT_MAXREDIRS, max as c_long) + } + + // TODO: post_redirections + + /// Make an HTTP PUT request. + /// + /// By default this option is `false` and corresponds to `CURLOPT_PUT`. + pub fn put(&mut self, enable: bool) -> Result<(), Error> { + self.setopt_long(curl_sys::CURLOPT_PUT, enable as c_long) + } + + /// Make an HTTP POST request. + /// + /// This will also make the library use the + /// `Content-Type: application/x-www-form-urlencoded` header. + /// + /// POST data can be specified through `post_fields` or by specifying a read + /// function. + /// + /// By default this option is `false` and corresponds to `CURLOPT_POST`. + pub fn post(&mut self, enable: bool) -> Result<(), Error> { + self.setopt_long(curl_sys::CURLOPT_POST, enable as c_long) + } + + /// Configures the data that will be uploaded as part of a POST. + /// + /// Note that the data is copied into this handle and if that's not desired + /// then the read callbacks can be used instead. + /// + /// By default this option is not set and corresponds to + /// `CURLOPT_COPYPOSTFIELDS`. + pub fn post_fields_copy(&mut self, data: &[u8]) -> Result<(), Error> { + // Set the length before the pointer so libcurl knows how much to read + try!(self.post_field_size(data.len() as u64)); + self.setopt_ptr(curl_sys::CURLOPT_COPYPOSTFIELDS, + data.as_ptr() as *const _) + } + + /// Configures the size of data that's going to be uploaded as part of a + /// POST operation. + /// + /// This is called automaticsally as part of `post_fields` and should only + /// be called if data is being provided in a read callback (and even then + /// it's optional). + /// + /// By default this option is not set and corresponds to + /// `CURLOPT_POSTFIELDSIZE_LARGE`. + pub fn post_field_size(&mut self, size: u64) -> Result<(), Error> { + // Clear anything previous to ensure we don't read past a buffer + try!(self.setopt_ptr(curl_sys::CURLOPT_POSTFIELDS, 0 as *const _)); + self.setopt_off_t(curl_sys::CURLOPT_POSTFIELDSIZE_LARGE, + size as curl_sys::curl_off_t) + } + + /// Tells libcurl you want a multipart/formdata HTTP POST to be made and you + /// instruct what data to pass on to the server in the `form` argument. + /// + /// By default this option is set to null and corresponds to + /// `CURLOPT_HTTPPOST`. + pub fn httppost(&mut self, form: Form) -> Result<(), Error> { + try!(self.setopt_ptr(curl_sys::CURLOPT_HTTPPOST, + form::raw(&form) as *const _)); + self.inner.form = Some(form); + Ok(()) + } + + /// Sets the HTTP referer header + /// + /// By default this option is not set and corresponds to `CURLOPT_REFERER`. + pub fn referer(&mut self, referer: &str) -> Result<(), Error> { + let referer = try!(CString::new(referer)); + self.setopt_str(curl_sys::CURLOPT_REFERER, &referer) + } + + /// Sets the HTTP user-agent header + /// + /// By default this option is not set and corresponds to + /// `CURLOPT_USERAGENT`. + pub fn useragent(&mut self, useragent: &str) -> Result<(), Error> { + let useragent = try!(CString::new(useragent)); + self.setopt_str(curl_sys::CURLOPT_USERAGENT, &useragent) + } + + /// Add some headers to this HTTP request. + /// + /// If you add a header that is otherwise used internally, the value here + /// takes precedence. If a header is added with no content (like `Accept:`) + /// the internally the header will get disabled. To add a header with no + /// content, use the form `MyHeader;` (not the trailing semicolon). + /// + /// Headers must not be CRLF terminated. Many replaced headers have common + /// shortcuts which should be prefered. + /// + /// By default this option is not set and corresponds to + /// `CURLOPT_HTTPHEADER` + /// + /// # Examples + /// + /// ``` + /// use curl::easy::{Easy, List}; + /// + /// let mut list = List::new(); + /// list.append("Foo: bar").unwrap(); + /// list.append("Bar: baz").unwrap(); + /// + /// let mut handle = Easy::new(); + /// handle.url("https://www.rust-lang.org/").unwrap(); + /// handle.http_headers(list).unwrap(); + /// handle.perform().unwrap(); + /// ``` + pub fn http_headers(&mut self, list: List) -> Result<(), Error> { + let ptr = list::raw(&list); + self.inner.header_list = Some(list); + self.setopt_ptr(curl_sys::CURLOPT_HTTPHEADER, ptr as *const _) + } + + // /// Add some headers to send to the HTTP proxy. + // /// + // /// This function is essentially the same as `http_headers`. + // /// + // /// By default this option is not set and corresponds to + // /// `CURLOPT_PROXYHEADER` + // pub fn proxy_headers(&mut self, list: &'a List) -> Result<(), Error> { + // self.setopt_ptr(curl_sys::CURLOPT_PROXYHEADER, list.raw as *const _) + // } + + /// Set the contents of the HTTP Cookie header. + /// + /// Pass a string of the form `name=contents` for one cookie value or + /// `name1=val1; name2=val2` for multiple values. + /// + /// Using this option multiple times will only make the latest string + /// override the previous ones. This option will not enable the cookie + /// engine, use `cookie_file` or `cookie_jar` to do that. + /// + /// By default this option is not set and corresponds to `CURLOPT_COOKIE`. + pub fn cookie(&mut self, cookie: &str) -> Result<(), Error> { + let cookie = try!(CString::new(cookie)); + self.setopt_str(curl_sys::CURLOPT_COOKIE, &cookie) + } + + /// Set the file name to read cookies from. + /// + /// The cookie data can be in either the old Netscape / Mozilla cookie data + /// format or just regular HTTP headers (Set-Cookie style) dumped to a file. + /// + /// This also enables the cookie engine, making libcurl parse and send + /// cookies on subsequent requests with this handle. + /// + /// Given an empty or non-existing file or by passing the empty string ("") + /// to this option, you can enable the cookie engine without reading any + /// initial cookies. + /// + /// If you use this option multiple times, you just add more files to read. + /// Subsequent files will add more cookies. + /// + /// By default this option is not set and corresponds to + /// `CURLOPT_COOKIEFILE`. + pub fn cookie_file>(&mut self, file: P) -> Result<(), Error> { + self.setopt_path(curl_sys::CURLOPT_COOKIEFILE, file.as_ref()) + } + + /// Set the file name to store cookies to. + /// + /// This will make libcurl write all internally known cookies to the file + /// when this handle is dropped. If no cookies are known, no file will be + /// created. Specify "-" as filename to instead have the cookies written to + /// stdout. Using this option also enables cookies for this session, so if + /// you for example follow a location it will make matching cookies get sent + /// accordingly. + /// + /// Note that libcurl doesn't read any cookies from the cookie jar. If you + /// want to read cookies from a file, use `cookie_file`. + /// + /// By default this option is not set and corresponds to + /// `CURLOPT_COOKIEJAR`. + pub fn cookie_jar>(&mut self, file: P) -> Result<(), Error> { + self.setopt_path(curl_sys::CURLOPT_COOKIEJAR, file.as_ref()) + } + + /// Start a new cookie session + /// + /// Marks this as a new cookie "session". It will force libcurl to ignore + /// all cookies it is about to load that are "session cookies" from the + /// previous session. By default, libcurl always stores and loads all + /// cookies, independent if they are session cookies or not. Session cookies + /// are cookies without expiry date and they are meant to be alive and + /// existing for this "session" only. + /// + /// By default this option is `false` and corresponds to + /// `CURLOPT_COOKIESESSION`. + pub fn cookie_session(&mut self, session: bool) -> Result<(), Error> { + self.setopt_long(curl_sys::CURLOPT_COOKIESESSION, session as c_long) + } + + /// Add to or manipulate cookies held in memory. + /// + /// Such a cookie can be either a single line in Netscape / Mozilla format + /// or just regular HTTP-style header (Set-Cookie: ...) format. This will + /// also enable the cookie engine. This adds that single cookie to the + /// internal cookie store. + /// + /// Exercise caution if you are using this option and multiple transfers may + /// occur. If you use the Set-Cookie format and don't specify a domain then + /// the cookie is sent for any domain (even after redirects are followed) + /// and cannot be modified by a server-set cookie. If a server sets a cookie + /// of the same name (or maybe you've imported one) then both will be sent + /// on a future transfer to that server, likely not what you intended. + /// address these issues set a domain in Set-Cookie or use the Netscape + /// format. + /// + /// Additionally, there are commands available that perform actions if you + /// pass in these exact strings: + /// + /// * "ALL" - erases all cookies held in memory + /// * "SESS" - erases all session cookies held in memory + /// * "FLUSH" - write all known cookies to the specified cookie jar + /// * "RELOAD" - reread all cookies from the cookie file + /// + /// By default this options corresponds to `CURLOPT_COOKIELIST` + pub fn cookie_list(&mut self, cookie: &str) -> Result<(), Error> { + let cookie = try!(CString::new(cookie)); + self.setopt_str(curl_sys::CURLOPT_COOKIELIST, &cookie) + } + + /// Ask for a HTTP GET request. + /// + /// By default this option is `false` and corresponds to `CURLOPT_HTTPGET`. + pub fn get(&mut self, enable: bool) -> Result<(), Error> { + self.setopt_long(curl_sys::CURLOPT_HTTPGET, enable as c_long) + } + + // /// Ask for a HTTP GET request. + // /// + // /// By default this option is `false` and corresponds to `CURLOPT_HTTPGET`. + // pub fn http_version(&mut self, vers: &str) -> Result<(), Error> { + // self.setopt_long(curl_sys::CURLOPT_HTTPGET, enable as c_long) + // } + + /// Ignore the content-length header. + /// + /// By default this option is `false` and corresponds to + /// `CURLOPT_IGNORE_CONTENT_LENGTH`. + pub fn ignore_content_length(&mut self, ignore: bool) -> Result<(), Error> { + self.setopt_long(curl_sys::CURLOPT_IGNORE_CONTENT_LENGTH, + ignore as c_long) + } + + /// Enable or disable HTTP content decoding. + /// + /// By default this option is `true` and corresponds to + /// `CURLOPT_HTTP_CONTENT_DECODING`. + pub fn http_content_decoding(&mut self, enable: bool) -> Result<(), Error> { + self.setopt_long(curl_sys::CURLOPT_HTTP_CONTENT_DECODING, + enable as c_long) + } + + /// Enable or disable HTTP transfer decoding. + /// + /// By default this option is `true` and corresponds to + /// `CURLOPT_HTTP_TRANSFER_DECODING`. + pub fn http_transfer_decoding(&mut self, enable: bool) -> Result<(), Error> { + self.setopt_long(curl_sys::CURLOPT_HTTP_TRANSFER_DECODING, + enable as c_long) + } + + // /// Timeout for the Expect: 100-continue response + // /// + // /// By default this option is 1s and corresponds to + // /// `CURLOPT_EXPECT_100_TIMEOUT_MS`. + // pub fn expect_100_timeout(&mut self, enable: bool) -> Result<(), Error> { + // self.setopt_long(curl_sys::CURLOPT_HTTP_TRANSFER_DECODING, + // enable as c_long) + // } + + // /// Wait for pipelining/multiplexing. + // /// + // /// Tells libcurl to prefer to wait for a connection to confirm or deny that + // /// it can do pipelining or multiplexing before continuing. + // /// + // /// When about to perform a new transfer that allows pipelining or + // /// multiplexing, libcurl will check for existing connections to re-use and + // /// pipeline on. If no such connection exists it will immediately continue + // /// and create a fresh new connection to use. + // /// + // /// By setting this option to `true` - having `pipeline` enabled for the + // /// multi handle this transfer is associated with - libcurl will instead + // /// wait for the connection to reveal if it is possible to + // /// pipeline/multiplex on before it continues. This enables libcurl to much + // /// better keep the number of connections to a minimum when using pipelining + // /// or multiplexing protocols. + // /// + // /// The effect thus becomes that with this option set, libcurl prefers to + // /// wait and re-use an existing connection for pipelining rather than the + // /// opposite: prefer to open a new connection rather than waiting. + // /// + // /// The waiting time is as long as it takes for the connection to get up and + // /// for libcurl to get the necessary response back that informs it about its + // /// protocol and support level. + // pub fn http_pipewait(&mut self, enable: bool) -> Result<(), Error> { + // } + + + // ========================================================================= + // Protocol Options + + /// Indicates the range that this request should retrieve. + /// + /// The string provided should be of the form `N-M` where either `N` or `M` + /// can be left out. For HTTP transfers multiple ranges separated by commas + /// are also accepted. + /// + /// By default this option is not set and corresponds to `CURLOPT_RANGE`. + pub fn range(&mut self, range: &str) -> Result<(), Error> { + let range = try!(CString::new(range)); + self.setopt_str(curl_sys::CURLOPT_RANGE, &range) + } + + /// Set a point to resume transfer from + /// + /// Specify the offset in bytes you want the transfer to start from. + /// + /// By default this option is 0 and corresponds to + /// `CURLOPT_RESUME_FROM_LARGE`. + pub fn resume_from(&mut self, from: u64) -> Result<(), Error> { + self.setopt_off_t(curl_sys::CURLOPT_RESUME_FROM_LARGE, + from as curl_sys::curl_off_t) + } + + /// Set a custom request string + /// + /// Specifies that a custom request will be made (e.g. a custom HTTP + /// method). This does not change how libcurl performs internally, just + /// changes the string sent to the server. + /// + /// By default this option is not set and corresponds to + /// `CURLOPT_CUSTOMREQUEST`. + pub fn custom_request(&mut self, request: &str) -> Result<(), Error> { + let request = try!(CString::new(request)); + self.setopt_str(curl_sys::CURLOPT_CUSTOMREQUEST, &request) + } + + /// Get the modification time of the remote resource + /// + /// If true, libcurl will attempt to get the modification time of the + /// remote document in this operation. This requires that the remote server + /// sends the time or replies to a time querying command. The `filetime` + /// function can be used after a transfer to extract the received time (if + /// any). + /// + /// By default this option is `false` and corresponds to `CURLOPT_FILETIME` + pub fn fetch_filetime(&mut self, fetch: bool) -> Result<(), Error> { + self.setopt_long(curl_sys::CURLOPT_FILETIME, fetch as c_long) + } + + /// Indicate whether to download the request without getting the body + /// + /// This is useful, for example, for doing a HEAD request. + /// + /// By default this option is `false` and corresponds to `CURLOPT_NOBODY`. + pub fn nobody(&mut self, enable: bool) -> Result<(), Error> { + self.setopt_long(curl_sys::CURLOPT_NOBODY, enable as c_long) + } + + /// Set the size of the input file to send off. + /// + /// By default this option is not set and corresponds to + /// `CURLOPT_INFILESIZE_LARGE`. + pub fn in_filesize(&mut self, size: u64) -> Result<(), Error> { + self.setopt_off_t(curl_sys::CURLOPT_INFILESIZE_LARGE, + size as curl_sys::curl_off_t) + } + + /// Enable or disable data upload. + /// + /// This means that a PUT request will be made for HTTP and probably wants + /// to be combined with the read callback as well as the `in_filesize` + /// method. + /// + /// By default this option is `false` and corresponds to `CURLOPT_UPLOAD`. + pub fn upload(&mut self, enable: bool) -> Result<(), Error> { + self.setopt_long(curl_sys::CURLOPT_UPLOAD, enable as c_long) + } + + /// Configure the maximum file size to download. + /// + /// By default this option is not set and corresponds to + /// `CURLOPT_MAXFILESIZE_LARGE`. + pub fn max_filesize(&mut self, size: u64) -> Result<(), Error> { + self.setopt_off_t(curl_sys::CURLOPT_MAXFILESIZE_LARGE, + size as curl_sys::curl_off_t) + } + + /// Selects a condition for a time request. + /// + /// This value indicates how the `time_value` option is interpreted. + /// + /// By default this option is not set and corresponds to + /// `CURLOPT_TIMECONDITION`. + pub fn time_condition(&mut self, cond: TimeCondition) -> Result<(), Error> { + self.setopt_long(curl_sys::CURLOPT_TIMECONDITION, cond as c_long) + } + + /// Sets the time value for a conditional request. + /// + /// The value here should be the number of seconds elapsed since January 1, + /// 1970. To pass how to interpret this value, use `time_condition`. + /// + /// By default this option is not set and corresponds to + /// `CURLOPT_TIMEVALUE`. + pub fn time_value(&mut self, val: i64) -> Result<(), Error> { + self.setopt_long(curl_sys::CURLOPT_TIMEVALUE, val as c_long) + } + + // ========================================================================= + // Connection Options + + /// Set maximum time the request is allowed to take. + /// + /// Normally, name lookups can take a considerable time and limiting + /// operations to less than a few minutes risk aborting perfectly normal + /// operations. + /// + /// If libcurl is built to use the standard system name resolver, that + /// portion of the transfer will still use full-second resolution for + /// timeouts with a minimum timeout allowed of one second. + /// + /// In unix-like systems, this might cause signals to be used unless + /// `nosignal` is set. + /// + /// Since this puts a hard limit for how long a request is allowed to + /// take, it has limited use in dynamic use cases with varying transfer + /// times. You are then advised to explore `low_speed_limit`, + /// `low_speed_time` or using `progress_function` to implement your own + /// timeout logic. + /// + /// By default this option is not set and corresponds to + /// `CURLOPT_TIMEOUT_MS`. + pub fn timeout(&mut self, timeout: Duration) -> Result<(), Error> { + // TODO: checked arithmetic and casts + // TODO: use CURLOPT_TIMEOUT if the timeout is too great + let ms = timeout.as_secs() * 1000 + + (timeout.subsec_nanos() / 1_000_000) as u64; + self.setopt_long(curl_sys::CURLOPT_TIMEOUT_MS, ms as c_long) + + } + + /// Set the low speed limit in bytes per second. + /// + /// This specifies the average transfer speed in bytes per second that the + /// transfer should be below during `low_speed_time` for libcurl to consider + /// it to be too slow and abort. + /// + /// By default this option is not set and corresponds to + /// `CURLOPT_LOW_SPEED_LIMIT`. + pub fn low_speed_limit(&mut self, limit: u32) -> Result<(), Error> { + self.setopt_long(curl_sys::CURLOPT_LOW_SPEED_LIMIT, limit as c_long) + } + + /// Set the low speed time period. + /// + /// Specifies the window of time for which if the transfer rate is below + /// `low_speed_limit` the request will be aborted. + /// + /// By default this option is not set and corresponds to + /// `CURLOPT_LOW_SPEED_TIME`. + pub fn low_speed_time(&mut self, dur: Duration) -> Result<(), Error> { + self.setopt_long(curl_sys::CURLOPT_LOW_SPEED_TIME, + dur.as_secs() as c_long) + } + + /// Rate limit data upload speed + /// + /// If an upload exceeds this speed (counted in bytes per second) on + /// cumulative average during the transfer, the transfer will pause to keep + /// the average rate less than or equal to the parameter value. + /// + /// By default this option is not set (unlimited speed) and corresponds to + /// `CURLOPT_MAX_SEND_SPEED_LARGE`. + pub fn max_send_speed(&mut self, speed: u64) -> Result<(), Error> { + self.setopt_off_t(curl_sys::CURLOPT_MAX_SEND_SPEED_LARGE, + speed as curl_sys::curl_off_t) + } + + /// Rate limit data download speed + /// + /// If a download exceeds this speed (counted in bytes per second) on + /// cumulative average during the transfer, the transfer will pause to keep + /// the average rate less than or equal to the parameter value. + /// + /// By default this option is not set (unlimited speed) and corresponds to + /// `CURLOPT_MAX_RECV_SPEED_LARGE`. + pub fn max_recv_speed(&mut self, speed: u64) -> Result<(), Error> { + self.setopt_off_t(curl_sys::CURLOPT_MAX_RECV_SPEED_LARGE, + speed as curl_sys::curl_off_t) + } + + /// Set the maximum connection cache size. + /// + /// The set amount will be the maximum number of simultaneously open + /// persistent connections that libcurl may cache in the pool associated + /// with this handle. The default is 5, and there isn't much point in + /// changing this value unless you are perfectly aware of how this works and + /// changes libcurl's behaviour. This concerns connections using any of the + /// protocols that support persistent connections. + /// + /// When reaching the maximum limit, curl closes the oldest one in the cache + /// to prevent increasing the number of open connections. + /// + /// By default this option is set to 5 and corresponds to + /// `CURLOPT_MAXCONNECTS` + pub fn max_connects(&mut self, max: u32) -> Result<(), Error> { + self.setopt_long(curl_sys::CURLOPT_MAXCONNECTS, max as c_long) + } + + /// Force a new connection to be used. + /// + /// Makes the next transfer use a new (fresh) connection by force instead of + /// trying to re-use an existing one. This option should be used with + /// caution and only if you understand what it does as it may seriously + /// impact performance. + /// + /// By default this option is `false` and corresponds to + /// `CURLOPT_FRESH_CONNECT`. + pub fn fresh_connect(&mut self, enable: bool) -> Result<(), Error> { + self.setopt_long(curl_sys::CURLOPT_FRESH_CONNECT, enable as c_long) + } + + /// Make connection get closed at once after use. + /// + /// Makes libcurl explicitly close the connection when done with the + /// transfer. Normally, libcurl keeps all connections alive when done with + /// one transfer in case a succeeding one follows that can re-use them. + /// This option should be used with caution and only if you understand what + /// it does as it can seriously impact performance. + /// + /// By default this option is `false` and corresponds to + /// `CURLOPT_FORBID_REUSE`. + pub fn forbid_reuse(&mut self, enable: bool) -> Result<(), Error> { + self.setopt_long(curl_sys::CURLOPT_FORBID_REUSE, enable as c_long) + } + + /// Timeout for the connect phase + /// + /// This is the maximum time that you allow the connection phase to the + /// server to take. This only limits the connection phase, it has no impact + /// once it has connected. + /// + /// By default this value is 300 seconds and corresponds to + /// `CURLOPT_CONNECTTIMEOUT_MS`. + pub fn connect_timeout(&mut self, timeout: Duration) -> Result<(), Error> { + let ms = timeout.as_secs() * 1000 + + (timeout.subsec_nanos() / 1_000_000) as u64; + self.setopt_long(curl_sys::CURLOPT_CONNECTTIMEOUT_MS, ms as c_long) + } + + /// Specify which IP protocol version to use + /// + /// Allows an application to select what kind of IP addresses to use when + /// resolving host names. This is only interesting when using host names + /// that resolve addresses using more than one version of IP. + /// + /// By default this value is "any" and corresponds to `CURLOPT_IPRESOLVE`. + pub fn ip_resolve(&mut self, resolve: IpResolve) -> Result<(), Error> { + self.setopt_long(curl_sys::CURLOPT_IPRESOLVE, resolve as c_long) + } + + /// Specify custom host name to IP address resolves. + /// + /// Allows specifying hostname to IP mappins to use before trying the + /// system resolver. + /// + /// # Examples + /// ``` + /// use curl::easy::{Easy, List}; + /// + /// let mut list = List::new(); + /// list.append("www.rust-lang.org:443:185.199.108.153").unwrap(); + /// + /// let mut handle = Easy::new(); + /// handle.url("https://www.rust-lang.org/").unwrap(); + /// handle.resolve(list).unwrap(); + /// handle.perform().unwrap(); + /// ``` + pub fn resolve(&mut self, list: List) -> Result<(), Error> { + let ptr = list::raw(&list); + self.inner.resolve_list = Some(list); + self.setopt_ptr(curl_sys::CURLOPT_RESOLVE, ptr as *const _) + } + + + /// Configure whether to stop when connected to target server + /// + /// When enabled it tells the library to perform all the required proxy + /// authentication and connection setup, but no data transfer, and then + /// return. + /// + /// The option can be used to simply test a connection to a server. + /// + /// By default this value is `false` and corresponds to + /// `CURLOPT_CONNECT_ONLY`. + pub fn connect_only(&mut self, enable: bool) -> Result<(), Error> { + self.setopt_long(curl_sys::CURLOPT_CONNECT_ONLY, enable as c_long) + } + + // /// Set interface to speak DNS over. + // /// + // /// Set the name of the network interface that the DNS resolver should bind + // /// to. This must be an interface name (not an address). + // /// + // /// By default this option is not set and corresponds to + // /// `CURLOPT_DNS_INTERFACE`. + // pub fn dns_interface(&mut self, interface: &str) -> Result<(), Error> { + // let interface = try!(CString::new(interface)); + // self.setopt_str(curl_sys::CURLOPT_DNS_INTERFACE, &interface) + // } + // + // /// IPv4 address to bind DNS resolves to + // /// + // /// Set the local IPv4 address that the resolver should bind to. The + // /// argument should be of type char * and contain a single numerical IPv4 + // /// address as a string. + // /// + // /// By default this option is not set and corresponds to + // /// `CURLOPT_DNS_LOCAL_IP4`. + // pub fn dns_local_ip4(&mut self, ip: &str) -> Result<(), Error> { + // let ip = try!(CString::new(ip)); + // self.setopt_str(curl_sys::CURLOPT_DNS_LOCAL_IP4, &ip) + // } + // + // /// IPv6 address to bind DNS resolves to + // /// + // /// Set the local IPv6 address that the resolver should bind to. The + // /// argument should be of type char * and contain a single numerical IPv6 + // /// address as a string. + // /// + // /// By default this option is not set and corresponds to + // /// `CURLOPT_DNS_LOCAL_IP6`. + // pub fn dns_local_ip6(&mut self, ip: &str) -> Result<(), Error> { + // let ip = try!(CString::new(ip)); + // self.setopt_str(curl_sys::CURLOPT_DNS_LOCAL_IP6, &ip) + // } + // + // /// Set preferred DNS servers. + // /// + // /// Provides a list of DNS servers to be used instead of the system default. + // /// The format of the dns servers option is: + // /// + // /// ```text + // /// host[:port],[host[:port]]... + // /// ``` + // /// + // /// By default this option is not set and corresponds to + // /// `CURLOPT_DNS_SERVERS`. + // pub fn dns_servers(&mut self, servers: &str) -> Result<(), Error> { + // let servers = try!(CString::new(servers)); + // self.setopt_str(curl_sys::CURLOPT_DNS_SERVERS, &servers) + // } + + // ========================================================================= + // SSL/Security Options + + /// Sets the SSL client certificate. + /// + /// The string should be the file name of your client certificate. The + /// default format is "P12" on Secure Transport and "PEM" on other engines, + /// and can be changed with `ssl_cert_type`. + /// + /// With NSS or Secure Transport, this can also be the nickname of the + /// certificate you wish to authenticate with as it is named in the security + /// database. If you want to use a file from the current directory, please + /// precede it with "./" prefix, in order to avoid confusion with a + /// nickname. + /// + /// When using a client certificate, you most likely also need to provide a + /// private key with `ssl_key`. + /// + /// By default this option is not set and corresponds to `CURLOPT_SSLCERT`. + pub fn ssl_cert>(&mut self, cert: P) -> Result<(), Error> { + self.setopt_path(curl_sys::CURLOPT_SSLCERT, cert.as_ref()) + } + + /// Specify type of the client SSL certificate. + /// + /// The string should be the format of your certificate. Supported formats + /// are "PEM" and "DER", except with Secure Transport. OpenSSL (versions + /// 0.9.3 and later) and Secure Transport (on iOS 5 or later, or OS X 10.7 + /// or later) also support "P12" for PKCS#12-encoded files. + /// + /// By default this option is "PEM" and corresponds to + /// `CURLOPT_SSLCERTTYPE`. + pub fn ssl_cert_type(&mut self, kind: &str) -> Result<(), Error> { + let kind = try!(CString::new(kind)); + self.setopt_str(curl_sys::CURLOPT_SSLCERTTYPE, &kind) + } + + /// Specify private keyfile for TLS and SSL client cert. + /// + /// The string should be the file name of your private key. The default + /// format is "PEM" and can be changed with `ssl_key_type`. + /// + /// (iOS and Mac OS X only) This option is ignored if curl was built against + /// Secure Transport. Secure Transport expects the private key to be already + /// present in the keychain or PKCS#12 file containing the certificate. + /// + /// By default this option is not set and corresponds to `CURLOPT_SSLKEY`. + pub fn ssl_key>(&mut self, key: P) -> Result<(), Error> { + self.setopt_path(curl_sys::CURLOPT_SSLKEY, key.as_ref()) + } + + /// Set type of the private key file. + /// + /// The string should be the format of your private key. Supported formats + /// are "PEM", "DER" and "ENG". + /// + /// The format "ENG" enables you to load the private key from a crypto + /// engine. In this case `ssl_key` is used as an identifier passed to + /// the engine. You have to set the crypto engine with `ssl_engine`. + /// "DER" format key file currently does not work because of a bug in + /// OpenSSL. + /// + /// By default this option is "PEM" and corresponds to + /// `CURLOPT_SSLKEYTYPE`. + pub fn ssl_key_type(&mut self, kind: &str) -> Result<(), Error> { + let kind = try!(CString::new(kind)); + self.setopt_str(curl_sys::CURLOPT_SSLKEYTYPE, &kind) + } + + /// Set passphrase to private key. + /// + /// This will be used as the password required to use the `ssl_key`. + /// You never needed a pass phrase to load a certificate but you need one to + /// load your private key. + /// + /// By default this option is not set and corresponds to + /// `CURLOPT_KEYPASSWD`. + pub fn key_password(&mut self, password: &str) -> Result<(), Error> { + let password = try!(CString::new(password)); + self.setopt_str(curl_sys::CURLOPT_KEYPASSWD, &password) + } + + /// Set the SSL engine identifier. + /// + /// This will be used as the identifier for the crypto engine you want to + /// use for your private key. + /// + /// By default this option is not set and corresponds to + /// `CURLOPT_SSLENGINE`. + pub fn ssl_engine(&mut self, engine: &str) -> Result<(), Error> { + let engine = try!(CString::new(engine)); + self.setopt_str(curl_sys::CURLOPT_SSLENGINE, &engine) + } + + /// Make this handle's SSL engine the default. + /// + /// By default this option is not set and corresponds to + /// `CURLOPT_SSLENGINE_DEFAULT`. + pub fn ssl_engine_default(&mut self, enable: bool) -> Result<(), Error> { + self.setopt_long(curl_sys::CURLOPT_SSLENGINE_DEFAULT, enable as c_long) + } + + // /// Enable TLS false start. + // /// + // /// This option determines whether libcurl should use false start during the + // /// TLS handshake. False start is a mode where a TLS client will start + // /// sending application data before verifying the server's Finished message, + // /// thus saving a round trip when performing a full handshake. + // /// + // /// By default this option is not set and corresponds to + // /// `CURLOPT_SSL_FALSESTARTE`. + // pub fn ssl_false_start(&mut self, enable: bool) -> Result<(), Error> { + // self.setopt_long(curl_sys::CURLOPT_SSLENGINE_DEFAULT, enable as c_long) + // } + + /// Set preferred HTTP version. + /// + /// By default this option is not set and corresponds to + /// `CURLOPT_HTTP_VERSION`. + pub fn http_version(&mut self, version: HttpVersion) -> Result<(), Error> { + self.setopt_long(curl_sys::CURLOPT_HTTP_VERSION, version as c_long) + } + + /// Set preferred TLS/SSL version. + /// + /// By default this option is not set and corresponds to + /// `CURLOPT_SSLVERSION`. + pub fn ssl_version(&mut self, version: SslVersion) -> Result<(), Error> { + self.setopt_long(curl_sys::CURLOPT_SSLVERSION, version as c_long) + } + + /// Verify the certificate's name against host. + /// + /// This should be disabled with great caution! It basically disables the + /// security features of SSL if it is disabled. + /// + /// By default this option is set to `true` and corresponds to + /// `CURLOPT_SSL_VERIFYHOST`. + pub fn ssl_verify_host(&mut self, verify: bool) -> Result<(), Error> { + let val = if verify {2} else {0}; + self.setopt_long(curl_sys::CURLOPT_SSL_VERIFYHOST, val) + } + + /// Verify the peer's SSL certificate. + /// + /// This should be disabled with great caution! It basically disables the + /// security features of SSL if it is disabled. + /// + /// By default this option is set to `true` and corresponds to + /// `CURLOPT_SSL_VERIFYPEER`. + pub fn ssl_verify_peer(&mut self, verify: bool) -> Result<(), Error> { + self.setopt_long(curl_sys::CURLOPT_SSL_VERIFYPEER, verify as c_long) + } + + // /// Verify the certificate's status. + // /// + // /// This option determines whether libcurl verifies the status of the server + // /// cert using the "Certificate Status Request" TLS extension (aka. OCSP + // /// stapling). + // /// + // /// By default this option is set to `false` and corresponds to + // /// `CURLOPT_SSL_VERIFYSTATUS`. + // pub fn ssl_verify_status(&mut self, verify: bool) -> Result<(), Error> { + // self.setopt_long(curl_sys::CURLOPT_SSL_VERIFYSTATUS, verify as c_long) + // } + + /// Specify the path to Certificate Authority (CA) bundle + /// + /// The file referenced should hold one or more certificates to verify the + /// peer with. + /// + /// This option is by default set to the system path where libcurl's cacert + /// bundle is assumed to be stored, as established at build time. + /// + /// If curl is built against the NSS SSL library, the NSS PEM PKCS#11 module + /// (libnsspem.so) needs to be available for this option to work properly. + /// + /// By default this option is the system defaults, and corresponds to + /// `CURLOPT_CAINFO`. + pub fn cainfo>(&mut self, path: P) -> Result<(), Error> { + self.setopt_path(curl_sys::CURLOPT_CAINFO, path.as_ref()) + } + + /// Set the issuer SSL certificate filename + /// + /// Specifies a file holding a CA certificate in PEM format. If the option + /// is set, an additional check against the peer certificate is performed to + /// verify the issuer is indeed the one associated with the certificate + /// provided by the option. This additional check is useful in multi-level + /// PKI where one needs to enforce that the peer certificate is from a + /// specific branch of the tree. + /// + /// This option makes sense only when used in combination with the + /// `ssl_verify_peer` option. Otherwise, the result of the check is not + /// considered as failure. + /// + /// By default this option is not set and corresponds to + /// `CURLOPT_ISSUERCERT`. + pub fn issuer_cert>(&mut self, path: P) -> Result<(), Error> { + self.setopt_path(curl_sys::CURLOPT_ISSUERCERT, path.as_ref()) + } + + /// Specify directory holding CA certificates + /// + /// Names a directory holding multiple CA certificates to verify the peer + /// with. If libcurl is built against OpenSSL, the certificate directory + /// must be prepared using the openssl c_rehash utility. This makes sense + /// only when used in combination with the `ssl_verify_peer` option. + /// + /// By default this option is not set and corresponds to `CURLOPT_CAPATH`. + pub fn capath>(&mut self, path: P) -> Result<(), Error> { + self.setopt_path(curl_sys::CURLOPT_CAPATH, path.as_ref()) + } + + /// Specify a Certificate Revocation List file + /// + /// Names a file with the concatenation of CRL (in PEM format) to use in the + /// certificate validation that occurs during the SSL exchange. + /// + /// When curl is built to use NSS or GnuTLS, there is no way to influence + /// the use of CRL passed to help in the verification process. When libcurl + /// is built with OpenSSL support, X509_V_FLAG_CRL_CHECK and + /// X509_V_FLAG_CRL_CHECK_ALL are both set, requiring CRL check against all + /// the elements of the certificate chain if a CRL file is passed. + /// + /// This option makes sense only when used in combination with the + /// `ssl_verify_peer` option. + /// + /// A specific error code (`is_ssl_crl_badfile`) is defined with the + /// option. It is returned when the SSL exchange fails because the CRL file + /// cannot be loaded. A failure in certificate verification due to a + /// revocation information found in the CRL does not trigger this specific + /// error. + /// + /// By default this option is not set and corresponds to `CURLOPT_CRLFILE`. + pub fn crlfile>(&mut self, path: P) -> Result<(), Error> { + self.setopt_path(curl_sys::CURLOPT_CRLFILE, path.as_ref()) + } + + /// Request SSL certificate information + /// + /// Enable libcurl's certificate chain info gatherer. With this enabled, + /// libcurl will extract lots of information and data about the certificates + /// in the certificate chain used in the SSL connection. + /// + /// By default this option is `false` and corresponds to + /// `CURLOPT_CERTINFO`. + pub fn certinfo(&mut self, enable: bool) -> Result<(), Error> { + self.setopt_long(curl_sys::CURLOPT_CERTINFO, enable as c_long) + } + + // /// Set pinned public key. + // /// + // /// Pass a pointer to a zero terminated string as parameter. The string can + // /// be the file name of your pinned public key. The file format expected is + // /// "PEM" or "DER". The string can also be any number of base64 encoded + // /// sha256 hashes preceded by "sha256//" and separated by ";" + // /// + // /// When negotiating a TLS or SSL connection, the server sends a certificate + // /// indicating its identity. A public key is extracted from this certificate + // /// and if it does not exactly match the public key provided to this option, + // /// curl will abort the connection before sending or receiving any data. + // /// + // /// By default this option is not set and corresponds to + // /// `CURLOPT_PINNEDPUBLICKEY`. + // pub fn pinned_public_key(&mut self, enable: bool) -> Result<(), Error> { + // self.setopt_long(curl_sys::CURLOPT_CERTINFO, enable as c_long) + // } + + /// Specify a source for random data + /// + /// The file will be used to read from to seed the random engine for SSL and + /// more. + /// + /// By default this option is not set and corresponds to + /// `CURLOPT_RANDOM_FILE`. + pub fn random_file>(&mut self, p: P) -> Result<(), Error> { + self.setopt_path(curl_sys::CURLOPT_RANDOM_FILE, p.as_ref()) + } + + /// Specify EGD socket path. + /// + /// Indicates the path name to the Entropy Gathering Daemon socket. It will + /// be used to seed the random engine for SSL. + /// + /// By default this option is not set and corresponds to + /// `CURLOPT_EGDSOCKET`. + pub fn egd_socket>(&mut self, p: P) -> Result<(), Error> { + self.setopt_path(curl_sys::CURLOPT_EGDSOCKET, p.as_ref()) + } + + /// Specify ciphers to use for TLS. + /// + /// Holds the list of ciphers to use for the SSL connection. The list must + /// be syntactically correct, it consists of one or more cipher strings + /// separated by colons. Commas or spaces are also acceptable separators + /// but colons are normally used, !, - and + can be used as operators. + /// + /// For OpenSSL and GnuTLS valid examples of cipher lists include 'RC4-SHA', + /// ´SHA1+DES´, 'TLSv1' and 'DEFAULT'. The default list is normally set when + /// you compile OpenSSL. + /// + /// You'll find more details about cipher lists on this URL: + /// + /// https://www.openssl.org/docs/apps/ciphers.html + /// + /// For NSS, valid examples of cipher lists include 'rsa_rc4_128_md5', + /// ´rsa_aes_128_sha´, etc. With NSS you don't add/remove ciphers. If one + /// uses this option then all known ciphers are disabled and only those + /// passed in are enabled. + /// + /// You'll find more details about the NSS cipher lists on this URL: + /// + /// http://git.fedorahosted.org/cgit/mod_nss.git/plain/docs/mod_nss.html#Directives + /// + /// By default this option is not set and corresponds to + /// `CURLOPT_SSL_CIPHER_LIST`. + pub fn ssl_cipher_list(&mut self, ciphers: &str) -> Result<(), Error> { + let ciphers = try!(CString::new(ciphers)); + self.setopt_str(curl_sys::CURLOPT_SSL_CIPHER_LIST, &ciphers) + } + + /// Enable or disable use of the SSL session-ID cache + /// + /// By default all transfers are done using the cache enabled. While nothing + /// ever should get hurt by attempting to reuse SSL session-IDs, there seem + /// to be or have been broken SSL implementations in the wild that may + /// require you to disable this in order for you to succeed. + /// + /// This corresponds to the `CURLOPT_SSL_SESSIONID_CACHE` option. + pub fn ssl_sessionid_cache(&mut self, enable: bool) -> Result<(), Error> { + self.setopt_long(curl_sys::CURLOPT_SSL_SESSIONID_CACHE, + enable as c_long) + } + + /// Set SSL behavior options + /// + /// Inform libcurl about SSL specific behaviors. + /// + /// This corresponds to the `CURLOPT_SSL_OPTIONS` option. + pub fn ssl_options(&mut self, bits: &SslOpt) -> Result<(), Error> { + self.setopt_long(curl_sys::CURLOPT_SSL_OPTIONS, bits.bits) + } + + // /// Set SSL behavior options for proxies + // /// + // /// Inform libcurl about SSL specific behaviors. + // /// + // /// This corresponds to the `CURLOPT_PROXY_SSL_OPTIONS` option. + // pub fn proxy_ssl_options(&mut self, bits: &SslOpt) -> Result<(), Error> { + // self.setopt_long(curl_sys::CURLOPT_PROXY_SSL_OPTIONS, bits.bits) + // } + + // /// Stores a private pointer-sized piece of data. + // /// + // /// This can be retrieved through the `private` function and otherwise + // /// libcurl does not tamper with this value. This corresponds to + // /// `CURLOPT_PRIVATE` and defaults to 0. + // pub fn set_private(&mut self, private: usize) -> Result<(), Error> { + // self.setopt_ptr(curl_sys::CURLOPT_PRIVATE, private as *const _) + // } + // + // /// Fetches this handle's private pointer-sized piece of data. + // /// + // /// This corresponds to `CURLINFO_PRIVATE` and defaults to 0. + // pub fn private(&mut self) -> Result { + // self.getopt_ptr(curl_sys::CURLINFO_PRIVATE).map(|p| p as usize) + // } + + // ========================================================================= + // getters + + + /// Get info on unmet time conditional + /// + /// Returns if the condition provided in the previous request didn't match + /// + //// This corresponds to `CURLINFO_CONDITION_UNMET` and may return an error if the + /// option is not supported + pub fn time_condition_unmet(&mut self) -> Result { + self.getopt_long(curl_sys::CURLINFO_CONDITION_UNMET).map(|r| { + if r==0 { + false + } else { + true + } + }) + } + + /// Get the last used URL + /// + /// In cases when you've asked libcurl to follow redirects, it may + /// not be the same value you set with `url`. + /// + /// This methods corresponds to the `CURLINFO_EFFECTIVE_URL` option. + /// + /// Returns `Ok(None)` if no effective url is listed or `Err` if an error + /// happens or the underlying bytes aren't valid utf-8. + pub fn effective_url(&mut self) -> Result, Error> { + self.getopt_str(curl_sys::CURLINFO_EFFECTIVE_URL) + } + + /// Get the last used URL, in bytes + /// + /// In cases when you've asked libcurl to follow redirects, it may + /// not be the same value you set with `url`. + /// + /// This methods corresponds to the `CURLINFO_EFFECTIVE_URL` option. + /// + /// Returns `Ok(None)` if no effective url is listed or `Err` if an error + /// happens or the underlying bytes aren't valid utf-8. + pub fn effective_url_bytes(&mut self) -> Result, Error> { + self.getopt_bytes(curl_sys::CURLINFO_EFFECTIVE_URL) + } + + /// Get the last response code + /// + /// The stored value will be zero if no server response code has been + /// received. Note that a proxy's CONNECT response should be read with + /// `http_connectcode` and not this. + /// + /// Corresponds to `CURLINFO_RESPONSE_CODE` and returns an error if this + /// option is not supported. + pub fn response_code(&mut self) -> Result { + self.getopt_long(curl_sys::CURLINFO_RESPONSE_CODE).map(|c| c as u32) + } + + /// Get the CONNECT response code + /// + /// Returns the last received HTTP proxy response code to a CONNECT request. + /// The returned value will be zero if no such response code was available. + /// + /// Corresponds to `CURLINFO_HTTP_CONNECTCODE` and returns an error if this + /// option is not supported. + pub fn http_connectcode(&mut self) -> Result { + self.getopt_long(curl_sys::CURLINFO_HTTP_CONNECTCODE).map(|c| c as u32) + } + + /// Get the remote time of the retrieved document + /// + /// Returns the remote time of the retrieved document (in number of seconds + /// since 1 Jan 1970 in the GMT/UTC time zone). If you get `None`, it can be + /// because of many reasons (it might be unknown, the server might hide it + /// or the server doesn't support the command that tells document time etc) + /// and the time of the document is unknown. + /// + /// Note that you must tell the server to collect this information before + /// the transfer is made, by using the `filetime` method to + /// or you will unconditionally get a `None` back. + /// + /// This corresponds to `CURLINFO_FILETIME` and may return an error if the + /// option is not supported + pub fn filetime(&mut self) -> Result, Error> { + self.getopt_long(curl_sys::CURLINFO_FILETIME).map(|r| { + if r == -1 { + None + } else { + Some(r as i64) + } + }) + } + + /// Get the number of downloaded bytes + /// + /// Returns the total amount of bytes that were downloaded. + /// The amount is only for the latest transfer and will be reset again for each new transfer. + /// This counts actual payload data, what's also commonly called body. + /// All meta and header data are excluded and will not be counted in this number. + /// + /// This corresponds to `CURLINFO_SIZE_DOWNLOAD` and may return an error if the + /// option is not supported + pub fn download_size(&mut self) -> Result { + self.getopt_double(curl_sys::CURLINFO_SIZE_DOWNLOAD) + .map(|r| r as f64) + } + + /// Get the content-length of the download + /// + /// Returns the content-length of the download. + /// This is the value read from the Content-Length: field + /// + /// This corresponds to `CURLINFO_CONTENT_LENGTH_DOWNLOAD` and may return an error if the + /// option is not supported + pub fn content_length_download(&mut self) -> Result { + self.getopt_double(curl_sys::CURLINFO_CONTENT_LENGTH_DOWNLOAD) + .map(|r| r as f64) + } + + /// Get total time of previous transfer + /// + /// Returns the total time for the previous transfer, + /// including name resolving, TCP connect etc. + /// + /// Corresponds to `CURLINFO_TOTAL_TIME` and may return an error if the + /// option isn't supported. + pub fn total_time(&mut self) -> Result { + self.getopt_double(curl_sys::CURLINFO_TOTAL_TIME) + .map(double_seconds_to_duration) + } + + /// Get the name lookup time + /// + /// Returns the total time from the start + /// until the name resolving was completed. + /// + /// Corresponds to `CURLINFO_NAMELOOKUP_TIME` and may return an error if the + /// option isn't supported. + pub fn namelookup_time(&mut self) -> Result { + self.getopt_double(curl_sys::CURLINFO_NAMELOOKUP_TIME) + .map(double_seconds_to_duration) + } + + /// Get the time until connect + /// + /// Returns the total time from the start + /// until the connection to the remote host (or proxy) was completed. + /// + /// Corresponds to `CURLINFO_CONNECT_TIME` and may return an error if the + /// option isn't supported. + pub fn connect_time(&mut self) -> Result { + self.getopt_double(curl_sys::CURLINFO_CONNECT_TIME) + .map(double_seconds_to_duration) + } + + /// Get the time until the SSL/SSH handshake is completed + /// + /// Returns the total time it took from the start until the SSL/SSH + /// connect/handshake to the remote host was completed. This time is most often + /// very near to the `pretransfer_time` time, except for cases such as + /// HTTP pipelining where the pretransfer time can be delayed due to waits in + /// line for the pipeline and more. + /// + /// Corresponds to `CURLINFO_APPCONNECT_TIME` and may return an error if the + /// option isn't supported. + pub fn appconnect_time(&mut self) -> Result { + self.getopt_double(curl_sys::CURLINFO_APPCONNECT_TIME) + .map(double_seconds_to_duration) + } + + /// Get the time until the file transfer start + /// + /// Returns the total time it took from the start until the file + /// transfer is just about to begin. This includes all pre-transfer commands + /// and negotiations that are specific to the particular protocol(s) involved. + /// It does not involve the sending of the protocol- specific request that + /// triggers a transfer. + /// + /// Corresponds to `CURLINFO_PRETRANSFER_TIME` and may return an error if the + /// option isn't supported. + pub fn pretransfer_time(&mut self) -> Result { + self.getopt_double(curl_sys::CURLINFO_PRETRANSFER_TIME) + .map(double_seconds_to_duration) + } + + /// Get the time until the first byte is received + /// + /// Returns the total time it took from the start until the first + /// byte is received by libcurl. This includes `pretransfer_time` and + /// also the time the server needs to calculate the result. + /// + /// Corresponds to `CURLINFO_STARTTRANSFER_TIME` and may return an error if the + /// option isn't supported. + pub fn starttransfer_time(&mut self) -> Result { + self.getopt_double(curl_sys::CURLINFO_STARTTRANSFER_TIME) + .map(double_seconds_to_duration) + } + + /// Get the time for all redirection steps + /// + /// Returns the total time it took for all redirection steps + /// include name lookup, connect, pretransfer and transfer before final + /// transaction was started. `redirect_time` contains the complete + /// execution time for multiple redirections. + /// + /// Corresponds to `CURLINFO_REDIRECT_TIME` and may return an error if the + /// option isn't supported. + pub fn redirect_time(&mut self) -> Result { + self.getopt_double(curl_sys::CURLINFO_REDIRECT_TIME) + .map(double_seconds_to_duration) + } + + /// Get the number of redirects + /// + /// Corresponds to `CURLINFO_REDIRECT_COUNT` and may return an error if the + /// option isn't supported. + pub fn redirect_count(&mut self) -> Result { + self.getopt_long(curl_sys::CURLINFO_REDIRECT_COUNT).map(|c| c as u32) + } + + /// Get the URL a redirect would go to + /// + /// Returns the URL a redirect would take you to if you would enable + /// `follow_location`. This can come very handy if you think using the + /// built-in libcurl redirect logic isn't good enough for you but you would + /// still prefer to avoid implementing all the magic of figuring out the new + /// URL. + /// + /// Corresponds to `CURLINFO_REDIRECT_URL` and may return an error if the + /// url isn't valid utf-8 or an error happens. + pub fn redirect_url(&mut self) -> Result, Error> { + self.getopt_str(curl_sys::CURLINFO_REDIRECT_URL) + } + + /// Get the URL a redirect would go to, in bytes + /// + /// Returns the URL a redirect would take you to if you would enable + /// `follow_location`. This can come very handy if you think using the + /// built-in libcurl redirect logic isn't good enough for you but you would + /// still prefer to avoid implementing all the magic of figuring out the new + /// URL. + /// + /// Corresponds to `CURLINFO_REDIRECT_URL` and may return an error. + pub fn redirect_url_bytes(&mut self) -> Result, Error> { + self.getopt_bytes(curl_sys::CURLINFO_REDIRECT_URL) + } + + /// Get size of retrieved headers + /// + /// Corresponds to `CURLINFO_HEADER_SIZE` and may return an error if the + /// option isn't supported. + pub fn header_size(&mut self) -> Result { + self.getopt_long(curl_sys::CURLINFO_HEADER_SIZE).map(|c| c as u64) + } + + /// Get size of sent request. + /// + /// Corresponds to `CURLINFO_REQUEST_SIZE` and may return an error if the + /// option isn't supported. + pub fn request_size(&mut self) -> Result { + self.getopt_long(curl_sys::CURLINFO_REQUEST_SIZE).map(|c| c as u64) + } + + /// Get Content-Type + /// + /// Returns the content-type of the downloaded object. This is the value + /// read from the Content-Type: field. If you get `None`, it means that the + /// server didn't send a valid Content-Type header or that the protocol + /// used doesn't support this. + /// + /// Corresponds to `CURLINFO_CONTENT_TYPE` and may return an error if the + /// option isn't supported. + pub fn content_type(&mut self) -> Result, Error> { + self.getopt_str(curl_sys::CURLINFO_CONTENT_TYPE) + } + + /// Get Content-Type, in bytes + /// + /// Returns the content-type of the downloaded object. This is the value + /// read from the Content-Type: field. If you get `None`, it means that the + /// server didn't send a valid Content-Type header or that the protocol + /// used doesn't support this. + /// + /// Corresponds to `CURLINFO_CONTENT_TYPE` and may return an error if the + /// option isn't supported. + pub fn content_type_bytes(&mut self) -> Result, Error> { + self.getopt_bytes(curl_sys::CURLINFO_CONTENT_TYPE) + } + + /// Get errno number from last connect failure. + /// + /// Note that the value is only set on failure, it is not reset upon a + /// successful operation. The number is OS and system specific. + /// + /// Corresponds to `CURLINFO_OS_ERRNO` and may return an error if the + /// option isn't supported. + pub fn os_errno(&mut self) -> Result { + self.getopt_long(curl_sys::CURLINFO_OS_ERRNO).map(|c| c as i32) + } + + /// Get IP address of last connection. + /// + /// Returns a string holding the IP address of the most recent connection + /// done with this curl handle. This string may be IPv6 when that is + /// enabled. + /// + /// Corresponds to `CURLINFO_PRIMARY_IP` and may return an error if the + /// option isn't supported. + pub fn primary_ip(&mut self) -> Result, Error> { + self.getopt_str(curl_sys::CURLINFO_PRIMARY_IP) + } + + /// Get the latest destination port number + /// + /// Corresponds to `CURLINFO_PRIMARY_PORT` and may return an error if the + /// option isn't supported. + pub fn primary_port(&mut self) -> Result { + self.getopt_long(curl_sys::CURLINFO_PRIMARY_PORT).map(|c| c as u16) + } + + /// Get local IP address of last connection + /// + /// Returns a string holding the IP address of the local end of most recent + /// connection done with this curl handle. This string may be IPv6 when that + /// is enabled. + /// + /// Corresponds to `CURLINFO_LOCAL_IP` and may return an error if the + /// option isn't supported. + pub fn local_ip(&mut self) -> Result, Error> { + self.getopt_str(curl_sys::CURLINFO_LOCAL_IP) + } + + /// Get the latest local port number + /// + /// Corresponds to `CURLINFO_LOCAL_PORT` and may return an error if the + /// option isn't supported. + pub fn local_port(&mut self) -> Result { + self.getopt_long(curl_sys::CURLINFO_LOCAL_PORT).map(|c| c as u16) + } + + /// Get all known cookies + /// + /// Returns a linked-list of all cookies cURL knows (expired ones, too). + /// + /// Corresponds to the `CURLINFO_COOKIELIST` option and may return an error + /// if the option isn't supported. + pub fn cookies(&mut self) -> Result { + unsafe { + let mut list = 0 as *mut _; + let rc = curl_sys::curl_easy_getinfo(self.inner.handle, + curl_sys::CURLINFO_COOKIELIST, + &mut list); + try!(self.cvt(rc)); + Ok(list::from_raw(list)) + } + } + + /// Wait for pipelining/multiplexing + /// + /// Set wait to `true` to tell libcurl to prefer to wait for a connection to + /// confirm or deny that it can do pipelining or multiplexing before + /// continuing. + /// + /// When about to perform a new transfer that allows pipelining or + /// multiplexing, libcurl will check for existing connections to re-use and + /// pipeline on. If no such connection exists it will immediately continue + /// and create a fresh new connection to use. + /// + /// By setting this option to `true` - and having `pipelining(true, true)` + /// enabled for the multi handle this transfer is associated with - libcurl + /// will instead wait for the connection to reveal if it is possible to + /// pipeline/multiplex on before it continues. This enables libcurl to much + /// better keep the number of connections to a minimum when using pipelining + /// or multiplexing protocols. + /// + /// The effect thus becomes that with this option set, libcurl prefers to + /// wait and re-use an existing connection for pipelining rather than the + /// opposite: prefer to open a new connection rather than waiting. + /// + /// The waiting time is as long as it takes for the connection to get up and + /// for libcurl to get the necessary response back that informs it about its + /// protocol and support level. + /// + /// This corresponds to the `CURLOPT_PIPEWAIT` option. + pub fn pipewait(&mut self, wait: bool) -> Result<(), Error> { + self.setopt_long(curl_sys::CURLOPT_PIPEWAIT, wait as c_long) + } + + // ========================================================================= + // Other methods + + /// After options have been set, this will perform the transfer described by + /// the options. + /// + /// This performs the request in a synchronous fashion. This can be used + /// multiple times for one easy handle and libcurl will attempt to re-use + /// the same connection for all transfers. + /// + /// This method will preserve all options configured in this handle for the + /// next request, and if that is not desired then the options can be + /// manually reset or the `reset` method can be called. + /// + /// Note that this method takes `&self`, which is quite important! This + /// allows applications to close over the handle in various callbacks to + /// call methods like `unpause_write` and `unpause_read` while a transfer is + /// in progress. + pub fn perform(&self) -> Result<(), Error> { + let ret = unsafe { + self.cvt(curl_sys::curl_easy_perform(self.inner.handle)) + }; + panic::propagate(); + return ret + } + + /// Unpause reading on a connection. + /// + /// Using this function, you can explicitly unpause a connection that was + /// previously paused. + /// + /// A connection can be paused by letting the read or the write callbacks + /// return `ReadError::Pause` or `WriteError::Pause`. + /// + /// To unpause, you may for example call this from the progress callback + /// which gets called at least once per second, even if the connection is + /// paused. + /// + /// The chance is high that you will get your write callback called before + /// this function returns. + pub fn unpause_read(&self) -> Result<(), Error> { + unsafe { + let rc = curl_sys::curl_easy_pause(self.inner.handle, + curl_sys::CURLPAUSE_RECV_CONT); + self.cvt(rc) + } + } + + /// Unpause writing on a connection. + /// + /// Using this function, you can explicitly unpause a connection that was + /// previously paused. + /// + /// A connection can be paused by letting the read or the write callbacks + /// return `ReadError::Pause` or `WriteError::Pause`. A write callback that + /// returns pause signals to the library that it couldn't take care of any + /// data at all, and that data will then be delivered again to the callback + /// when the writing is later unpaused. + /// + /// To unpause, you may for example call this from the progress callback + /// which gets called at least once per second, even if the connection is + /// paused. + pub fn unpause_write(&self) -> Result<(), Error> { + unsafe { + let rc = curl_sys::curl_easy_pause(self.inner.handle, + curl_sys::CURLPAUSE_SEND_CONT); + self.cvt(rc) + } + } + + /// URL encodes a string `s` + pub fn url_encode(&mut self, s: &[u8]) -> String { + if s.len() == 0 { + return String::new() + } + unsafe { + let p = curl_sys::curl_easy_escape(self.inner.handle, + s.as_ptr() as *const _, + s.len() as c_int); + assert!(!p.is_null()); + let ret = str::from_utf8(CStr::from_ptr(p).to_bytes()).unwrap(); + let ret = String::from(ret); + curl_sys::curl_free(p as *mut _); + return ret + } + } + + /// URL decodes a string `s`, returning `None` if it fails + pub fn url_decode(&mut self, s: &str) -> Vec { + if s.len() == 0 { + return Vec::new(); + } + + // Work around https://curl.haxx.se/docs/adv_20130622.html, a bug where + // if the last few characters are a bad escape then curl will have a + // buffer overrun. + let mut iter = s.chars().rev(); + let orig_len = s.len(); + let mut data; + let mut s = s; + if iter.next() == Some('%') || + iter.next() == Some('%') || + iter.next() == Some('%') { + data = s.to_string(); + data.push(0u8 as char); + s = &data[..]; + } + unsafe { + let mut len = 0; + let p = curl_sys::curl_easy_unescape(self.inner.handle, + s.as_ptr() as *const _, + orig_len as c_int, + &mut len); + assert!(!p.is_null()); + let slice = slice::from_raw_parts(p as *const u8, len as usize); + let ret = slice.to_vec(); + curl_sys::curl_free(p as *mut _); + return ret + } + } + + // TODO: I don't think this is safe, you can drop this which has all the + // callback data and then the next is use-after-free + // + // /// Attempts to clone this handle, returning a new session handle with the + // /// same options set for this handle. + // /// + // /// Internal state info and things like persistent connections ccannot be + // /// transferred. + // /// + // /// # Errors + // /// + // /// If a new handle could not be allocated or another error happens, `None` + // /// is returned. + // pub fn try_clone<'b>(&mut self) -> Option> { + // unsafe { + // let handle = curl_sys::curl_easy_duphandle(self.handle); + // if handle.is_null() { + // None + // } else { + // Some(Easy { + // handle: handle, + // data: blank_data(), + // _marker: marker::PhantomData, + // }) + // } + // } + // } + + /// Receives data from a connected socket. + /// + /// Only useful after a successful `perform` with the `connect_only` option + /// set as well. + pub fn recv(&mut self, data: &mut [u8]) -> Result { + unsafe { + let mut n = 0; + let r = curl_sys::curl_easy_recv(self.inner.handle, + data.as_mut_ptr() as *mut _, + data.len(), + &mut n); + if r == curl_sys::CURLE_OK { + Ok(n) + } else { + Err(Error::new(r)) + } + } + } + + /// Sends data over the connected socket. + /// + /// Only useful after a successful `perform` with the `connect_only` option + /// set as well. + pub fn send(&mut self, data: &[u8]) -> Result { + unsafe { + let mut n = 0; + let rc = curl_sys::curl_easy_send(self.inner.handle, + data.as_ptr() as *const _, + data.len(), + &mut n); + try!(self.cvt(rc)); + Ok(n) + } + } + + /// Get a pointer to the raw underlying CURL handle. + pub fn raw(&self) -> *mut curl_sys::CURL { + self.inner.handle + } + + #[cfg(unix)] + fn setopt_path(&mut self, + opt: curl_sys::CURLoption, + val: &Path) -> Result<(), Error> { + use std::os::unix::prelude::*; + let s = try!(CString::new(val.as_os_str().as_bytes())); + self.setopt_str(opt, &s) + } + + #[cfg(windows)] + fn setopt_path(&mut self, + opt: curl_sys::CURLoption, + val: &Path) -> Result<(), Error> { + match val.to_str() { + Some(s) => self.setopt_str(opt, &try!(CString::new(s))), + None => Err(Error::new(curl_sys::CURLE_CONV_FAILED)), + } + } + + fn setopt_long(&mut self, + opt: curl_sys::CURLoption, + val: c_long) -> Result<(), Error> { + unsafe { + self.cvt(curl_sys::curl_easy_setopt(self.inner.handle, opt, val)) + } + } + + fn setopt_str(&mut self, + opt: curl_sys::CURLoption, + val: &CStr) -> Result<(), Error> { + self.setopt_ptr(opt, val.as_ptr()) + } + + fn setopt_ptr(&self, + opt: curl_sys::CURLoption, + val: *const c_char) -> Result<(), Error> { + unsafe { + self.cvt(curl_sys::curl_easy_setopt(self.inner.handle, opt, val)) + } + } + + fn setopt_off_t(&mut self, + opt: curl_sys::CURLoption, + val: curl_sys::curl_off_t) -> Result<(), Error> { + unsafe { + let rc = curl_sys::curl_easy_setopt(self.inner.handle, opt, val); + self.cvt(rc) + } + } + + fn getopt_bytes(&mut self, opt: curl_sys::CURLINFO) + -> Result, Error> { + unsafe { + let p = try!(self.getopt_ptr(opt)); + if p.is_null() { + Ok(None) + } else { + Ok(Some(CStr::from_ptr(p).to_bytes())) + } + } + } + + fn getopt_ptr(&mut self, opt: curl_sys::CURLINFO) + -> Result<*const c_char, Error> { + unsafe { + let mut p = 0 as *const c_char; + let rc = curl_sys::curl_easy_getinfo(self.inner.handle, opt, &mut p); + try!(self.cvt(rc)); + Ok(p) + } + } + + fn getopt_str(&mut self, opt: curl_sys::CURLINFO) + -> Result, Error> { + match self.getopt_bytes(opt) { + Ok(None) => Ok(None), + Err(e) => Err(e), + Ok(Some(bytes)) => { + match str::from_utf8(bytes) { + Ok(s) => Ok(Some(s)), + Err(_) => Err(Error::new(curl_sys::CURLE_CONV_FAILED)), + } + } + } + } + + fn getopt_long(&mut self, opt: curl_sys::CURLINFO) -> Result { + unsafe { + let mut p = 0; + let rc = curl_sys::curl_easy_getinfo(self.inner.handle, opt, &mut p); + try!(self.cvt(rc)); + Ok(p) + } + } + + fn getopt_double(&mut self, opt: curl_sys::CURLINFO) -> Result { + unsafe { + let mut p = 0 as c_double; + let rc = curl_sys::curl_easy_getinfo(self.inner.handle, opt, &mut p); + try!(self.cvt(rc)); + Ok(p) + } + } + + /// Returns the contents of the internal error buffer, if available. + /// + /// When an easy handle is created it configured the `CURLOPT_ERRORBUFFER` + /// parameter and instructs libcurl to store more error information into a + /// buffer for better error messages and better debugging. The contents of + /// that buffer are automatically coupled with all errors for methods on + /// this type, but if manually invoking APIs the contents will need to be + /// extracted with this method. + /// + /// Put another way, you probably don't need this, you're probably already + /// getting nice error messages! + /// + /// This function will clear the internal buffer, so this is an operation + /// that mutates the handle internally. + pub fn take_error_buf(&self) -> Option { + let mut buf = self.inner.error_buf.borrow_mut(); + if buf[0] == 0 { + return None + } + let pos = buf.iter().position(|i| *i == 0).unwrap_or(buf.len()); + let msg = String::from_utf8_lossy(&buf[..pos]).into_owned(); + buf[0] = 0; + Some(msg) + } + + fn cvt(&self, rc: curl_sys::CURLcode) -> Result<(), Error> { + if rc == curl_sys::CURLE_OK { + return Ok(()) + } + let mut err = Error::new(rc); + if let Some(msg) = self.take_error_buf() { + err.set_extra(msg); + } + Err(Error::new(rc)) + } +} + +impl fmt::Debug for Easy2 { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_struct("Easy") + .field("handle", &self.inner.handle) + .field("handler", &self.inner.handle) + .finish() + } +} + +impl Drop for Easy2 { + fn drop(&mut self) { + unsafe { + curl_sys::curl_easy_cleanup(self.inner.handle); + } + } +} + +extern fn header_cb(buffer: *mut c_char, + size: size_t, + nitems: size_t, + userptr: *mut c_void) -> size_t { + let keep_going = panic::catch(|| unsafe { + let data = slice::from_raw_parts(buffer as *const u8, + size * nitems); + (*(userptr as *mut Inner)).handler.header(data) + }).unwrap_or(false); + if keep_going { + size * nitems + } else { + !0 + } +} + +extern fn write_cb(ptr: *mut c_char, + size: size_t, + nmemb: size_t, + data: *mut c_void) -> size_t { + panic::catch(|| unsafe { + let input = slice::from_raw_parts(ptr as *const u8, + size * nmemb); + match (*(data as *mut Inner)).handler.write(input) { + Ok(s) => s, + Err(WriteError::Pause) | + Err(WriteError::__Nonexhaustive) => curl_sys::CURL_WRITEFUNC_PAUSE, + } + }).unwrap_or(!0) +} + +extern fn read_cb(ptr: *mut c_char, + size: size_t, + nmemb: size_t, + data: *mut c_void) -> size_t { + panic::catch(|| unsafe { + let input = slice::from_raw_parts_mut(ptr as *mut u8, + size * nmemb); + match (*(data as *mut Inner)).handler.read(input) { + Ok(s) => s, + Err(ReadError::Pause) => { + curl_sys::CURL_READFUNC_PAUSE + } + Err(ReadError::__Nonexhaustive) | + Err(ReadError::Abort) => { + curl_sys::CURL_READFUNC_ABORT + } + } + }).unwrap_or(!0) +} + +extern fn seek_cb(data: *mut c_void, + offset: curl_sys::curl_off_t, + origin: c_int) -> c_int { + panic::catch(|| unsafe { + let from = if origin == libc::SEEK_SET { + SeekFrom::Start(offset as u64) + } else { + panic!("unknown origin from libcurl: {}", origin); + }; + (*(data as *mut Inner)).handler.seek(from) as c_int + }).unwrap_or(!0) +} + +extern fn progress_cb(data: *mut c_void, + dltotal: c_double, + dlnow: c_double, + ultotal: c_double, + ulnow: c_double) -> c_int { + let keep_going = panic::catch(|| unsafe { + (*(data as *mut Inner)).handler.progress(dltotal, dlnow, ultotal, ulnow) + }).unwrap_or(false); + if keep_going { + 0 + } else { + 1 + } +} + +// TODO: expose `handle`? is that safe? +extern fn debug_cb(_handle: *mut curl_sys::CURL, + kind: curl_sys::curl_infotype, + data: *mut c_char, + size: size_t, + userptr: *mut c_void) -> c_int { + panic::catch(|| unsafe { + let data = slice::from_raw_parts(data as *const u8, size); + let kind = match kind { + curl_sys::CURLINFO_TEXT => InfoType::Text, + curl_sys::CURLINFO_HEADER_IN => InfoType::HeaderIn, + curl_sys::CURLINFO_HEADER_OUT => InfoType::HeaderOut, + curl_sys::CURLINFO_DATA_IN => InfoType::DataIn, + curl_sys::CURLINFO_DATA_OUT => InfoType::DataOut, + curl_sys::CURLINFO_SSL_DATA_IN => InfoType::SslDataIn, + curl_sys::CURLINFO_SSL_DATA_OUT => InfoType::SslDataOut, + _ => return, + }; + (*(userptr as *mut Inner)).handler.debug(kind, data) + }); + return 0 +} + +extern fn ssl_ctx_cb(_handle: *mut curl_sys::CURL, + ssl_ctx: *mut c_void, + data: *mut c_void) -> curl_sys::CURLcode { + let res = panic::catch(|| unsafe { + match (*(data as *mut Inner)).handler.ssl_ctx(ssl_ctx) { + Ok(()) => curl_sys::CURLE_OK, + Err(e) => e.code(), + } + }); + // Default to a generic SSL error in case of panic. This + // shouldn't really matter since the error should be + // propagated later on but better safe than sorry... + res.unwrap_or(curl_sys::CURLE_SSL_CONNECT_ERROR) +} + +// TODO: expose `purpose` and `sockaddr` inside of `address` +extern fn opensocket_cb(data: *mut c_void, + _purpose: curl_sys::curlsocktype, + address: *mut curl_sys::curl_sockaddr) + -> curl_sys::curl_socket_t +{ + let res = panic::catch(|| unsafe { + (*(data as *mut Inner)).handler.open_socket((*address).family, + (*address).socktype, + (*address).protocol) + .unwrap_or(curl_sys::CURL_SOCKET_BAD) + }); + res.unwrap_or(curl_sys::CURL_SOCKET_BAD) +} + +fn double_seconds_to_duration(seconds: f64) -> Duration { + let whole_seconds = seconds.trunc() as u64; + let nanos = seconds.fract() * 1_000_000_000f64; + Duration::new(whole_seconds, nanos as u32) +} + +#[test] +fn double_seconds_to_duration_whole_second() { + let dur = double_seconds_to_duration(1.0); + assert_eq!(dur.as_secs(), 1); + assert_eq!(dur.subsec_nanos(), 0); +} + +#[test] +fn double_seconds_to_duration_sub_second1() { + let dur = double_seconds_to_duration(0.0); + assert_eq!(dur.as_secs(), 0); + assert_eq!(dur.subsec_nanos(), 0); +} + +#[test] +fn double_seconds_to_duration_sub_second2() { + let dur = double_seconds_to_duration(0.5); + assert_eq!(dur.as_secs(), 0); + assert_eq!(dur.subsec_nanos(), 500_000_000); +} + +impl Auth { + /// Creates a new set of authentications with no members. + /// + /// An `Auth` structure is used to configure which forms of authentication + /// are attempted when negotiating connections with servers. + pub fn new() -> Auth { + Auth { bits: 0 } + } + + /// HTTP Basic authentication. + /// + /// This is the default choice, and the only method that is in wide-spread + /// use and supported virtually everywhere. This sends the user name and + /// password over the network in plain text, easily captured by others. + pub fn basic(&mut self, on: bool) -> &mut Auth { + self.flag(curl_sys::CURLAUTH_BASIC, on) + } + + /// HTTP Digest authentication. + /// + /// Digest authentication is defined in RFC 2617 and is a more secure way to + /// do authentication over public networks than the regular old-fashioned + /// Basic method. + pub fn digest(&mut self, on: bool) -> &mut Auth { + self.flag(curl_sys::CURLAUTH_DIGEST, on) + } + + /// HTTP Digest authentication with an IE flavor. + /// + /// Digest authentication is defined in RFC 2617 and is a more secure way to + /// do authentication over public networks than the regular old-fashioned + /// Basic method. The IE flavor is simply that libcurl will use a special + /// "quirk" that IE is known to have used before version 7 and that some + /// servers require the client to use. + pub fn digest_ie(&mut self, on: bool) -> &mut Auth { + self.flag(curl_sys::CURLAUTH_DIGEST_IE, on) + } + + /// HTTP Negotiate (SPNEGO) authentication. + /// + /// Negotiate authentication is defined in RFC 4559 and is the most secure + /// way to perform authentication over HTTP. + /// + /// You need to build libcurl with a suitable GSS-API library or SSPI on + /// Windows for this to work. + pub fn gssnegotiate(&mut self, on: bool) -> &mut Auth { + self.flag(curl_sys::CURLAUTH_GSSNEGOTIATE, on) + } + + /// HTTP NTLM authentication. + /// + /// A proprietary protocol invented and used by Microsoft. It uses a + /// challenge-response and hash concept similar to Digest, to prevent the + /// password from being eavesdropped. + /// + /// You need to build libcurl with either OpenSSL, GnuTLS or NSS support for + /// this option to work, or build libcurl on Windows with SSPI support. + pub fn ntlm(&mut self, on: bool) -> &mut Auth { + self.flag(curl_sys::CURLAUTH_NTLM, on) + } + + /// NTLM delegating to winbind helper. + /// + /// Authentication is performed by a separate binary application that is + /// executed when needed. The name of the application is specified at + /// compile time but is typically /usr/bin/ntlm_auth + /// + /// Note that libcurl will fork when necessary to run the winbind + /// application and kill it when complete, calling waitpid() to await its + /// exit when done. On POSIX operating systems, killing the process will + /// cause a SIGCHLD signal to be raised (regardless of whether + /// CURLOPT_NOSIGNAL is set), which must be handled intelligently by the + /// application. In particular, the application must not unconditionally + /// call wait() in its SIGCHLD signal handler to avoid being subject to a + /// race condition. This behavior is subject to change in future versions of + /// libcurl. + /// + /// A proprietary protocol invented and used by Microsoft. It uses a + /// challenge-response and hash concept similar to Digest, to prevent the + /// password from being eavesdropped. + pub fn ntlm_wb(&mut self, on: bool) -> &mut Auth { + self.flag(curl_sys::CURLAUTH_NTLM_WB, on) + } + + fn flag(&mut self, bit: c_ulong, on: bool) -> &mut Auth { + if on { + self.bits |= bit as c_long; + } else { + self.bits &= !bit as c_long; + } + self + } +} + +impl fmt::Debug for Auth { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let bits = self.bits as c_ulong; + f.debug_struct("Auth") + .field("basic", &(bits & curl_sys::CURLAUTH_BASIC != 0)) + .field("digest", &(bits & curl_sys::CURLAUTH_DIGEST != 0)) + .field("digest_ie", &(bits & curl_sys::CURLAUTH_DIGEST_IE != 0)) + .field("gssnegotiate", &(bits & curl_sys::CURLAUTH_GSSNEGOTIATE != 0)) + .field("ntlm", &(bits & curl_sys::CURLAUTH_NTLM != 0)) + .field("ntlm_wb", &(bits & curl_sys::CURLAUTH_NTLM_WB != 0)) + .finish() + } +} + +impl SslOpt { + /// Creates a new set of SSL options. + pub fn new() -> SslOpt { + SslOpt { bits: 0 } + } + + /// Tells libcurl to disable certificate revocation checks for those SSL + /// backends where such behavior is present. + /// + /// Currently this option is only supported for WinSSL (the native Windows + /// SSL library), with an exception in the case of Windows' Untrusted + /// Publishers blacklist which it seems can't be bypassed. This option may + /// have broader support to accommodate other SSL backends in the future. + /// https://curl.haxx.se/docs/ssl-compared.html + pub fn no_revoke(&mut self, on: bool) -> &mut SslOpt { + self.flag(curl_sys::CURLSSLOPT_NO_REVOKE, on) + } + + /// Tells libcurl to not attempt to use any workarounds for a security flaw + /// in the SSL3 and TLS1.0 protocols. + /// + /// If this option isn't used or this bit is set to 0, the SSL layer libcurl + /// uses may use a work-around for this flaw although it might cause + /// interoperability problems with some (older) SSL implementations. + /// + /// > WARNING: avoiding this work-around lessens the security, and by + /// > setting this option to 1 you ask for exactly that. This option is only + /// > supported for DarwinSSL, NSS and OpenSSL. + pub fn allow_beast(&mut self, on: bool) -> &mut SslOpt { + self.flag(curl_sys::CURLSSLOPT_ALLOW_BEAST, on) + } + + fn flag(&mut self, bit: c_long, on: bool) -> &mut SslOpt { + if on { + self.bits |= bit as c_long; + } else { + self.bits &= !bit as c_long; + } + self + } +} + +impl fmt::Debug for SslOpt { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_struct("SslOpt") + .field("no_revoke", &(self.bits & curl_sys::CURLSSLOPT_NO_REVOKE != 0)) + .field("allow_beast", &(self.bits & curl_sys::CURLSSLOPT_ALLOW_BEAST != 0)) + .finish() + } +} diff --git a/curl/src/easy/list.rs b/curl/src/easy/list.rs new file mode 100644 index 000000000..732d00639 --- /dev/null +++ b/curl/src/easy/list.rs @@ -0,0 +1,99 @@ +use std::ffi::{CStr, CString}; +use std::fmt; + +use curl_sys; +use Error; + +/// A linked list of a strings +pub struct List { + raw: *mut curl_sys::curl_slist, +} + +/// An iterator over `List` +#[derive(Clone)] +pub struct Iter<'a> { + _me: &'a List, + cur: *mut curl_sys::curl_slist, +} + +pub fn raw(list: &List) -> *mut curl_sys::curl_slist { + list.raw +} + +pub unsafe fn from_raw(raw: *mut curl_sys::curl_slist) -> List { + List { raw: raw } +} + +unsafe impl Send for List {} + +impl List { + /// Creates a new empty list of strings. + pub fn new() -> List { + List { raw: 0 as *mut _ } + } + + /// Appends some data into this list. + pub fn append(&mut self, data: &str) -> Result<(), Error> { + let data = try!(CString::new(data)); + unsafe { + let raw = curl_sys::curl_slist_append(self.raw, data.as_ptr()); + assert!(!raw.is_null()); + self.raw = raw; + Ok(()) + } + } + + /// Returns an iterator over the nodes in this list. + pub fn iter(&self) -> Iter { + Iter { _me: self, cur: self.raw } + } +} + +impl fmt::Debug for List { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_list() + .entries(self.iter().map(String::from_utf8_lossy)) + .finish() + } +} + +impl<'a> IntoIterator for &'a List { + type IntoIter = Iter<'a>; + type Item = &'a [u8]; + + fn into_iter(self) -> Iter<'a> { + self.iter() + } +} + +impl Drop for List { + fn drop(&mut self) { + unsafe { + curl_sys::curl_slist_free_all(self.raw) + } + } +} + +impl<'a> Iterator for Iter<'a> { + type Item = &'a [u8]; + + fn next(&mut self) -> Option<&'a [u8]> { + if self.cur.is_null() { + return None + } + + unsafe { + let ret = Some(CStr::from_ptr((*self.cur).data).to_bytes()); + self.cur = (*self.cur).next; + return ret + } + } +} + +impl<'a> fmt::Debug for Iter<'a> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_list() + .entries(self.clone().map(String::from_utf8_lossy)) + .finish() + } +} diff --git a/curl/src/easy/mod.rs b/curl/src/easy/mod.rs new file mode 100644 index 000000000..da8db0037 --- /dev/null +++ b/curl/src/easy/mod.rs @@ -0,0 +1,22 @@ +//! Bindings to the "easy" libcurl API. +//! +//! This module contains some simple types like `Easy` and `List` which are just +//! wrappers around the corresponding libcurl types. There's also a few enums +//! scattered about for various options here and there. +//! +//! Most simple usage of libcurl will likely use the `Easy` structure here, and +//! you can find more docs about its usage on that struct. + +mod list; +mod form; +mod handle; +mod handler; +mod windows; + +pub use self::list::{List, Iter}; +pub use self::form::{Form, Part}; +pub use self::handle::{Easy, Transfer}; +pub use self::handler::{Easy2, Handler}; +pub use self::handler::{InfoType, SeekResult, ReadError, WriteError}; +pub use self::handler::{TimeCondition, IpResolve, HttpVersion, SslVersion}; +pub use self::handler::{SslOpt, NetRc, Auth, ProxyType}; diff --git a/curl/src/easy/windows.rs b/curl/src/easy/windows.rs new file mode 100644 index 000000000..44e2f9ccb --- /dev/null +++ b/curl/src/easy/windows.rs @@ -0,0 +1,136 @@ +#![allow(non_camel_case_types, non_snake_case)] + +use libc::c_void; + +#[cfg(target_env = "msvc")] +mod win { + use std::ffi::CString; + use std::mem; + use std::ptr; + use schannel::cert_context::ValidUses; + use schannel::cert_store::CertStore; + use winapi::{self, c_void, c_uchar, c_long, c_int}; + use winapi::um::libloaderapi::{GetModuleHandleW, GetProcAddress}; + + fn lookup(module: &str, symbol: &str) -> Option<*const c_void> { + unsafe { + let symbol = CString::new(symbol).unwrap(); + let mut mod_buf: Vec = module.encode_utf16().collect(); + mod_buf.push(0); + let handle = GetModuleHandleW(mod_buf.as_mut_ptr()); + let n = GetProcAddress(handle, symbol.as_ptr()); + if n == ptr::null() { + None + } else { + Some(n) + } + } + } + + pub enum X509_STORE {} + pub enum X509 {} + pub enum SSL_CTX {} + + type d2i_X509_fn = unsafe extern "C" fn( + a: *mut *mut X509, + pp: *mut *const c_uchar, + length: c_long, + ) -> *mut X509; + type X509_free_fn = unsafe extern "C" fn(x: *mut X509); + type X509_STORE_add_cert_fn = unsafe extern "C" fn(store: *mut X509_STORE, x: *mut X509) + -> c_int; + type SSL_CTX_get_cert_store_fn = unsafe extern "C" fn(ctx: *const SSL_CTX) + -> *mut X509_STORE; + + struct OpenSSL { + d2i_X509: d2i_X509_fn, + X509_free: X509_free_fn, + X509_STORE_add_cert: X509_STORE_add_cert_fn, + SSL_CTX_get_cert_store: SSL_CTX_get_cert_store_fn, + } + + unsafe fn lookup_functions(crypto_module: &str, ssl_module: &str) + -> Option + { + macro_rules! get { + ($(let $sym:ident in $module:expr;)*) => ($( + let $sym = match lookup($module, stringify!($sym)) { + Some(p) => p, + None => return None, + }; + )*) + } + get! { + let d2i_X509 in crypto_module; + let X509_free in crypto_module; + let X509_STORE_add_cert in crypto_module; + let SSL_CTX_get_cert_store in ssl_module; + } + Some(OpenSSL { + d2i_X509: mem::transmute(d2i_X509), + X509_free: mem::transmute(X509_free), + X509_STORE_add_cert: mem::transmute(X509_STORE_add_cert), + SSL_CTX_get_cert_store: mem::transmute(SSL_CTX_get_cert_store), + }) + } + + pub unsafe fn add_certs_to_context(ssl_ctx: *mut c_void) { + // check the runtime version of OpenSSL + let openssl = match ::version::Version::get().ssl_version() { + Some(ssl_ver) if ssl_ver.starts_with("OpenSSL/1.1.0") => { + lookup_functions("libcrypto", "libssl") + } + Some(ssl_ver) if ssl_ver.starts_with("OpenSSL/1.0.2") => { + lookup_functions("libeay32", "ssleay32") + } + _ => return, + }; + let openssl = match openssl { + Some(s) => s, + None => return, + }; + + let openssl_store = (openssl.SSL_CTX_get_cert_store)(ssl_ctx as *const SSL_CTX); + let mut store = match CertStore::open_current_user("ROOT") { + Ok(s) => s, + Err(_) => return, + }; + + for cert in store.certs() { + let valid_uses = match cert.valid_uses() { + Ok(v) => v, + Err(_) => continue, + }; + + // check the extended key usage for the "Server Authentication" OID + match valid_uses { + ValidUses::All => {} + ValidUses::Oids(ref oids) => { + let oid = winapi::wincrypt::szOID_PKIX_KP_SERVER_AUTH.to_owned(); + if !oids.contains(&oid) { + continue + } + } + } + + let der = cert.to_der(); + let x509 = (openssl.d2i_X509)(ptr::null_mut(), + &mut der.as_ptr(), + der.len() as c_long); + if !x509.is_null() { + (openssl.X509_STORE_add_cert)(openssl_store, x509); + (openssl.X509_free)(x509); + } + } + } +} + +#[cfg(target_env = "msvc")] +pub fn add_certs_to_context(ssl_ctx: *mut c_void) { + unsafe { + win::add_certs_to_context(ssl_ctx as *mut _); + } +} + +#[cfg(not(target_env = "msvc"))] +pub fn add_certs_to_context(_: *mut c_void) {} diff --git a/curl/src/error.rs b/curl/src/error.rs new file mode 100644 index 000000000..f3b7c6f01 --- /dev/null +++ b/curl/src/error.rs @@ -0,0 +1,600 @@ +use std::error; +use std::ffi::{self, CStr}; +use std::fmt; +use std::str; +use std::io; + +use curl_sys; + +/// An error returned from various "easy" operations. +/// +/// This structure wraps a `CURLcode`. +#[derive(Clone, PartialEq)] +pub struct Error { + code: curl_sys::CURLcode, + extra: Option>, +} + +impl Error { + /// Creates a new error from the underlying code returned by libcurl. + pub fn new(code: curl_sys::CURLcode) -> Error { + Error { + code: code, + extra: None, + } + } + + /// Stores some extra information about this error inside this error. + /// + /// This is typically used with `take_error_buf` on the easy handles to + /// couple the extra `CURLOPT_ERRORBUFFER` information with an `Error` being + /// returned. + pub fn set_extra(&mut self, extra: String) { + self.extra = Some(extra.into()); + } + + /// Returns whether this error corresponds to CURLE_UNSUPPORTED_PROTOCOL. + pub fn is_unsupported_protocol(&self) -> bool { + self.code == curl_sys::CURLE_UNSUPPORTED_PROTOCOL + } + + /// Returns whether this error corresponds to CURLE_FAILED_INIT. + pub fn is_failed_init(&self) -> bool { + self.code == curl_sys::CURLE_FAILED_INIT + } + + /// Returns whether this error corresponds to CURLE_URL_MALFORMAT. + pub fn is_url_malformed(&self) -> bool { + self.code == curl_sys::CURLE_URL_MALFORMAT + } + + // /// Returns whether this error corresponds to CURLE_NOT_BUILT_IN. + // pub fn is_not_built_in(&self) -> bool { + // self.code == curl_sys::CURLE_NOT_BUILT_IN + // } + + /// Returns whether this error corresponds to CURLE_COULDNT_RESOLVE_PROXY. + pub fn is_couldnt_resolve_proxy(&self) -> bool { + self.code == curl_sys::CURLE_COULDNT_RESOLVE_PROXY + } + + /// Returns whether this error corresponds to CURLE_COULDNT_RESOLVE_HOST. + pub fn is_couldnt_resolve_host(&self) -> bool { + self.code == curl_sys::CURLE_COULDNT_RESOLVE_HOST + } + + /// Returns whether this error corresponds to CURLE_COULDNT_CONNECT. + pub fn is_couldnt_connect(&self) -> bool { + self.code == curl_sys::CURLE_COULDNT_CONNECT + } + + /// Returns whether this error corresponds to CURLE_REMOTE_ACCESS_DENIED. + pub fn is_remote_access_denied(&self) -> bool { + self.code == curl_sys::CURLE_REMOTE_ACCESS_DENIED + } + + /// Returns whether this error corresponds to CURLE_PARTIAL_FILE. + pub fn is_partial_file(&self) -> bool { + self.code == curl_sys::CURLE_PARTIAL_FILE + } + + /// Returns whether this error corresponds to CURLE_QUOTE_ERROR. + pub fn is_quote_error(&self) -> bool { + self.code == curl_sys::CURLE_QUOTE_ERROR + } + + /// Returns whether this error corresponds to CURLE_HTTP_RETURNED_ERROR. + pub fn is_http_returned_error(&self) -> bool { + self.code == curl_sys::CURLE_HTTP_RETURNED_ERROR + } + + /// Returns whether this error corresponds to CURLE_READ_ERROR. + pub fn is_read_error(&self) -> bool { + self.code == curl_sys::CURLE_READ_ERROR + } + + /// Returns whether this error corresponds to CURLE_WRITE_ERROR. + pub fn is_write_error(&self) -> bool { + self.code == curl_sys::CURLE_WRITE_ERROR + } + + /// Returns whether this error corresponds to CURLE_UPLOAD_FAILED. + pub fn is_upload_failed(&self) -> bool { + self.code == curl_sys::CURLE_UPLOAD_FAILED + } + + /// Returns whether this error corresponds to CURLE_OUT_OF_MEMORY. + pub fn is_out_of_memory(&self) -> bool { + self.code == curl_sys::CURLE_OUT_OF_MEMORY + } + + /// Returns whether this error corresponds to CURLE_OPERATION_TIMEDOUT. + pub fn is_operation_timedout(&self) -> bool { + self.code == curl_sys::CURLE_OPERATION_TIMEDOUT + } + + /// Returns whether this error corresponds to CURLE_RANGE_ERROR. + pub fn is_range_error(&self) -> bool { + self.code == curl_sys::CURLE_RANGE_ERROR + } + + /// Returns whether this error corresponds to CURLE_HTTP_POST_ERROR. + pub fn is_http_post_error(&self) -> bool { + self.code == curl_sys::CURLE_HTTP_POST_ERROR + } + + /// Returns whether this error corresponds to CURLE_SSL_CONNECT_ERROR. + pub fn is_ssl_connect_error(&self) -> bool { + self.code == curl_sys::CURLE_SSL_CONNECT_ERROR + } + + /// Returns whether this error corresponds to CURLE_BAD_DOWNLOAD_RESUME. + pub fn is_bad_download_resume(&self) -> bool { + self.code == curl_sys::CURLE_BAD_DOWNLOAD_RESUME + } + + /// Returns whether this error corresponds to CURLE_FILE_COULDNT_READ_FILE. + pub fn is_file_couldnt_read_file(&self) -> bool { + self.code == curl_sys::CURLE_FILE_COULDNT_READ_FILE + } + + /// Returns whether this error corresponds to CURLE_FUNCTION_NOT_FOUND. + pub fn is_function_not_found(&self) -> bool { + self.code == curl_sys::CURLE_FUNCTION_NOT_FOUND + } + + /// Returns whether this error corresponds to CURLE_ABORTED_BY_CALLBACK. + pub fn is_aborted_by_callback(&self) -> bool { + self.code == curl_sys::CURLE_ABORTED_BY_CALLBACK + } + + /// Returns whether this error corresponds to CURLE_BAD_FUNCTION_ARGUMENT. + pub fn is_bad_function_argument(&self) -> bool { + self.code == curl_sys::CURLE_BAD_FUNCTION_ARGUMENT + } + + /// Returns whether this error corresponds to CURLE_INTERFACE_FAILED. + pub fn is_interface_failed(&self) -> bool { + self.code == curl_sys::CURLE_INTERFACE_FAILED + } + + /// Returns whether this error corresponds to CURLE_TOO_MANY_REDIRECTS. + pub fn is_too_many_redirects(&self) -> bool { + self.code == curl_sys::CURLE_TOO_MANY_REDIRECTS + } + + /// Returns whether this error corresponds to CURLE_UNKNOWN_OPTION. + pub fn is_unknown_option(&self) -> bool { + self.code == curl_sys::CURLE_UNKNOWN_OPTION + } + + /// Returns whether this error corresponds to CURLE_PEER_FAILED_VERIFICATION. + pub fn is_peer_failed_verification(&self) -> bool { + self.code == curl_sys::CURLE_PEER_FAILED_VERIFICATION + } + + /// Returns whether this error corresponds to CURLE_GOT_NOTHING. + pub fn is_got_nothing(&self) -> bool { + self.code == curl_sys::CURLE_GOT_NOTHING + } + + /// Returns whether this error corresponds to CURLE_SSL_ENGINE_NOTFOUND. + pub fn is_ssl_engine_notfound(&self) -> bool { + self.code == curl_sys::CURLE_SSL_ENGINE_NOTFOUND + } + + /// Returns whether this error corresponds to CURLE_SSL_ENGINE_SETFAILED. + pub fn is_ssl_engine_setfailed(&self) -> bool { + self.code == curl_sys::CURLE_SSL_ENGINE_SETFAILED + } + + /// Returns whether this error corresponds to CURLE_SEND_ERROR. + pub fn is_send_error(&self) -> bool { + self.code == curl_sys::CURLE_SEND_ERROR + } + + /// Returns whether this error corresponds to CURLE_RECV_ERROR. + pub fn is_recv_error(&self) -> bool { + self.code == curl_sys::CURLE_RECV_ERROR + } + + /// Returns whether this error corresponds to CURLE_SSL_CERTPROBLEM. + pub fn is_ssl_certproblem(&self) -> bool { + self.code == curl_sys::CURLE_SSL_CERTPROBLEM + } + + /// Returns whether this error corresponds to CURLE_SSL_CIPHER. + pub fn is_ssl_cipher(&self) -> bool { + self.code == curl_sys::CURLE_SSL_CIPHER + } + + /// Returns whether this error corresponds to CURLE_SSL_CACERT. + pub fn is_ssl_cacert(&self) -> bool { + self.code == curl_sys::CURLE_SSL_CACERT + } + + /// Returns whether this error corresponds to CURLE_BAD_CONTENT_ENCODING. + pub fn is_bad_content_encoding(&self) -> bool { + self.code == curl_sys::CURLE_BAD_CONTENT_ENCODING + } + + /// Returns whether this error corresponds to CURLE_FILESIZE_EXCEEDED. + pub fn is_filesize_exceeded(&self) -> bool { + self.code == curl_sys::CURLE_FILESIZE_EXCEEDED + } + + /// Returns whether this error corresponds to CURLE_USE_SSL_FAILED. + pub fn is_use_ssl_failed(&self) -> bool { + self.code == curl_sys::CURLE_USE_SSL_FAILED + } + + /// Returns whether this error corresponds to CURLE_SEND_FAIL_REWIND. + pub fn is_send_fail_rewind(&self) -> bool { + self.code == curl_sys::CURLE_SEND_FAIL_REWIND + } + + /// Returns whether this error corresponds to CURLE_SSL_ENGINE_INITFAILED. + pub fn is_ssl_engine_initfailed(&self) -> bool { + self.code == curl_sys::CURLE_SSL_ENGINE_INITFAILED + } + + /// Returns whether this error corresponds to CURLE_LOGIN_DENIED. + pub fn is_login_denied(&self) -> bool { + self.code == curl_sys::CURLE_LOGIN_DENIED + } + + /// Returns whether this error corresponds to CURLE_CONV_FAILED. + pub fn is_conv_failed(&self) -> bool { + self.code == curl_sys::CURLE_CONV_FAILED + } + + /// Returns whether this error corresponds to CURLE_CONV_REQD. + pub fn is_conv_required(&self) -> bool { + self.code == curl_sys::CURLE_CONV_REQD + } + + /// Returns whether this error corresponds to CURLE_SSL_CACERT_BADFILE. + pub fn is_ssl_cacert_badfile(&self) -> bool { + self.code == curl_sys::CURLE_SSL_CACERT_BADFILE + } + + /// Returns whether this error corresponds to CURLE_SSL_CRL_BADFILE. + pub fn is_ssl_crl_badfile(&self) -> bool { + self.code == curl_sys::CURLE_SSL_CRL_BADFILE + } + + /// Returns whether this error corresponds to CURLE_SSL_SHUTDOWN_FAILED. + pub fn is_ssl_shutdown_failed(&self) -> bool { + self.code == curl_sys::CURLE_SSL_SHUTDOWN_FAILED + } + + /// Returns whether this error corresponds to CURLE_AGAIN. + pub fn is_again(&self) -> bool { + self.code == curl_sys::CURLE_AGAIN + } + + /// Returns whether this error corresponds to CURLE_SSL_ISSUER_ERROR. + pub fn is_ssl_issuer_error(&self) -> bool { + self.code == curl_sys::CURLE_SSL_ISSUER_ERROR + } + + /// Returns whether this error corresponds to CURLE_CHUNK_FAILED. + pub fn is_chunk_failed(&self) -> bool { + self.code == curl_sys::CURLE_CHUNK_FAILED + } + + // /// Returns whether this error corresponds to CURLE_NO_CONNECTION_AVAILABLE. + // pub fn is_no_connection_available(&self) -> bool { + // self.code == curl_sys::CURLE_NO_CONNECTION_AVAILABLE + // } + + /// Returns the value of the underlying error corresponding to libcurl. + pub fn code(&self) -> curl_sys::CURLcode { + self.code + } + + /// Returns the extra description of this error, if any is available. + pub fn extra_description(&self) -> Option<&str> { + self.extra.as_ref().map(|s| &**s) + } +} + +impl fmt::Display for Error { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let desc = error::Error::description(self); + match self.extra { + Some(ref s) => write!(f, "[{}] {} ({})", self.code(), desc, s), + None => write!(f, "[{}] {}", self.code(), desc), + } + } +} + +impl fmt::Debug for Error { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_struct("Error") + .field("description", &error::Error::description(self)) + .field("code", &self.code) + .field("extra", &self.extra) + .finish() + } +} + +impl error::Error for Error { + fn description(&self) -> &str { + unsafe { + let s = curl_sys::curl_easy_strerror(self.code); + assert!(!s.is_null()); + str::from_utf8(CStr::from_ptr(s).to_bytes()).unwrap() + } + } +} + +/// An error returned from "share" operations. +/// +/// This structure wraps a `CURLSHcode`. +#[derive(Clone, PartialEq)] +pub struct ShareError { + code: curl_sys::CURLSHcode, +} + +impl ShareError { + /// Creates a new error from the underlying code returned by libcurl. + pub fn new(code: curl_sys::CURLSHcode) -> ShareError { + ShareError { code: code } + } + + /// Returns whether this error corresponds to CURLSHE_BAD_OPTION. + pub fn is_bad_option(&self) -> bool { + self.code == curl_sys::CURLSHE_BAD_OPTION + } + + /// Returns whether this error corresponds to CURLSHE_IN_USE. + pub fn is_in_use(&self) -> bool { + self.code == curl_sys::CURLSHE_IN_USE + } + + /// Returns whether this error corresponds to CURLSHE_INVALID. + pub fn is_invalid(&self) -> bool { + self.code == curl_sys::CURLSHE_INVALID + } + + /// Returns whether this error corresponds to CURLSHE_NOMEM. + pub fn is_nomem(&self) -> bool { + self.code == curl_sys::CURLSHE_NOMEM + } + + // /// Returns whether this error corresponds to CURLSHE_NOT_BUILT_IN. + // pub fn is_not_built_in(&self) -> bool { + // self.code == curl_sys::CURLSHE_NOT_BUILT_IN + // } + + /// Returns the value of the underlying error corresponding to libcurl. + pub fn code(&self) -> curl_sys::CURLSHcode { + self.code + } +} + +impl fmt::Display for ShareError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + error::Error::description(self).fmt(f) + } +} + +impl fmt::Debug for ShareError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "ShareError {{ description: {:?}, code: {} }}", + error::Error::description(self), + self.code) + } +} + +impl error::Error for ShareError { + fn description(&self) -> &str { + unsafe { + let s = curl_sys::curl_share_strerror(self.code); + assert!(!s.is_null()); + str::from_utf8(CStr::from_ptr(s).to_bytes()).unwrap() + } + } +} + +/// An error from "multi" operations. +/// +/// THis structure wraps a `CURLMcode`. +#[derive(Clone, PartialEq)] +pub struct MultiError { + code: curl_sys::CURLMcode, +} + +impl MultiError { + /// Creates a new error from the underlying code returned by libcurl. + pub fn new(code: curl_sys::CURLMcode) -> MultiError { + MultiError { code: code } + } + + /// Returns whether this error corresponds to CURLM_BAD_HANDLE. + pub fn is_bad_handle(&self) -> bool { + self.code == curl_sys::CURLM_BAD_HANDLE + } + + /// Returns whether this error corresponds to CURLM_BAD_EASY_HANDLE. + pub fn is_bad_easy_handle(&self) -> bool { + self.code == curl_sys::CURLM_BAD_EASY_HANDLE + } + + /// Returns whether this error corresponds to CURLM_OUT_OF_MEMORY. + pub fn is_out_of_memory(&self) -> bool { + self.code == curl_sys::CURLM_OUT_OF_MEMORY + } + + /// Returns whether this error corresponds to CURLM_INTERNAL_ERROR. + pub fn is_internal_error(&self) -> bool { + self.code == curl_sys::CURLM_INTERNAL_ERROR + } + + /// Returns whether this error corresponds to CURLM_BAD_SOCKET. + pub fn is_bad_socket(&self) -> bool { + self.code == curl_sys::CURLM_BAD_SOCKET + } + + /// Returns whether this error corresponds to CURLM_UNKNOWN_OPTION. + pub fn is_unknown_option(&self) -> bool { + self.code == curl_sys::CURLM_UNKNOWN_OPTION + } + + /// Returns whether this error corresponds to CURLM_CALL_MULTI_PERFORM. + pub fn is_call_perform(&self) -> bool { + self.code == curl_sys::CURLM_CALL_MULTI_PERFORM + } + + // /// Returns whether this error corresponds to CURLM_ADDED_ALREADY. + // pub fn is_added_already(&self) -> bool { + // self.code == curl_sys::CURLM_ADDED_ALREADY + // } + + /// Returns the value of the underlying error corresponding to libcurl. + pub fn code(&self) -> curl_sys::CURLMcode { + self.code + } +} + +impl fmt::Display for MultiError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + error::Error::description(self).fmt(f) + } +} + +impl fmt::Debug for MultiError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "MultiError {{ description: {:?}, code: {} }}", + error::Error::description(self), + self.code) + } +} + +impl error::Error for MultiError { + fn description(&self) -> &str { + unsafe { + let s = curl_sys::curl_multi_strerror(self.code); + assert!(!s.is_null()); + str::from_utf8(CStr::from_ptr(s).to_bytes()).unwrap() + } + } +} + + +/// An error from "form add" operations. +/// +/// THis structure wraps a `CURLFORMcode`. +#[derive(Clone, PartialEq)] +pub struct FormError { + code: curl_sys::CURLFORMcode, +} + +impl FormError { + /// Creates a new error from the underlying code returned by libcurl. + pub fn new(code: curl_sys::CURLFORMcode) -> FormError { + FormError { code: code } + } + + /// Returns whether this error corresponds to CURL_FORMADD_MEMORY. + pub fn is_memory(&self) -> bool { + self.code == curl_sys::CURL_FORMADD_MEMORY + } + + /// Returns whether this error corresponds to CURL_FORMADD_OPTION_TWICE. + pub fn is_option_twice(&self) -> bool { + self.code == curl_sys::CURL_FORMADD_OPTION_TWICE + } + + /// Returns whether this error corresponds to CURL_FORMADD_NULL. + pub fn is_null(&self) -> bool { + self.code == curl_sys::CURL_FORMADD_NULL + } + + /// Returns whether this error corresponds to CURL_FORMADD_UNKNOWN_OPTION. + pub fn is_unknown_option(&self) -> bool { + self.code == curl_sys::CURL_FORMADD_UNKNOWN_OPTION + } + + /// Returns whether this error corresponds to CURL_FORMADD_INCOMPLETE. + pub fn is_incomplete(&self) -> bool { + self.code == curl_sys::CURL_FORMADD_INCOMPLETE + } + + /// Returns whether this error corresponds to CURL_FORMADD_ILLEGAL_ARRAY. + pub fn is_illegal_array(&self) -> bool { + self.code == curl_sys::CURL_FORMADD_ILLEGAL_ARRAY + } + + /// Returns whether this error corresponds to CURL_FORMADD_DISABLED. + pub fn is_disabled(&self) -> bool { + self.code == curl_sys::CURL_FORMADD_DISABLED + } + + /// Returns the value of the underlying error corresponding to libcurl. + pub fn code(&self) -> curl_sys::CURLFORMcode { + self.code + } +} + +impl fmt::Display for FormError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + error::Error::description(self).fmt(f) + } +} + +impl fmt::Debug for FormError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "FormError {{ description: {:?}, code: {} }}", + error::Error::description(self), + self.code) + } +} + +impl error::Error for FormError { + fn description(&self) -> &str { + match self.code { + curl_sys::CURL_FORMADD_MEMORY => "allocation failure", + curl_sys::CURL_FORMADD_OPTION_TWICE => "one option passed twice", + curl_sys::CURL_FORMADD_NULL => "null pointer given for string", + curl_sys::CURL_FORMADD_UNKNOWN_OPTION => "unknown option", + curl_sys::CURL_FORMADD_INCOMPLETE => "form information not complete", + curl_sys::CURL_FORMADD_ILLEGAL_ARRAY => "illegal array in option", + curl_sys::CURL_FORMADD_DISABLED => { + "libcurl does not have support for this option compiled in" + } + _ => "unknown form error", + } + } +} + +impl From for Error { + fn from(_: ffi::NulError) -> Error { + Error { code: curl_sys::CURLE_CONV_FAILED, extra: None } + } +} + +impl From for io::Error { + fn from(e: Error) -> io::Error { + io::Error::new(io::ErrorKind::Other, e) + } +} + +impl From for io::Error { + fn from(e: ShareError) -> io::Error { + io::Error::new(io::ErrorKind::Other, e) + } +} + +impl From for io::Error { + fn from(e: MultiError) -> io::Error { + io::Error::new(io::ErrorKind::Other, e) + } +} + +impl From for io::Error { + fn from(e: FormError) -> io::Error { + io::Error::new(io::ErrorKind::Other, e) + } +} diff --git a/curl/src/lib.rs b/curl/src/lib.rs new file mode 100644 index 000000000..bf2829db6 --- /dev/null +++ b/curl/src/lib.rs @@ -0,0 +1,127 @@ +//! Rust bindings to the libcurl C library +//! +//! This crate contains bindings for an HTTP/HTTPS client which is powered by +//! [libcurl], the same library behind the `curl` command line tool. The API +//! currently closely matches that of libcurl itself, except that a Rustic layer +//! of safety is applied on top. +//! +//! [libcurl]: https://curl.haxx.se/libcurl/ +//! +//! # The "Easy" API +//! +//! The easiest way to send a request is to use the `Easy` api which corresponds +//! to `CURL` in libcurl. This handle supports a wide variety of options and can +//! be used to make a single blocking request in a thread. Callbacks can be +//! specified to deal with data as it arrives and a handle can be reused to +//! cache connections and such. +//! +//! ```rust,no_run +//! use std::io::{stdout, Write}; +//! +//! use curl::easy::Easy; +//! +//! // Write the contents of rust-lang.org to stdout +//! let mut easy = Easy::new(); +//! easy.url("https://www.rust-lang.org/").unwrap(); +//! easy.write_function(|data| { +//! stdout().write_all(data).unwrap(); +//! Ok(data.len()) +//! }).unwrap(); +//! easy.perform().unwrap(); +//! ``` +//! +//! # What about multiple concurrent HTTP requests? +//! +//! One option you have currently is to send multiple requests in multiple +//! threads, but otherwise libcurl has a "multi" interface for doing this +//! operation. Initial bindings of this interface can be found in the `multi` +//! module, but feedback is welcome! +//! +//! # Where does libcurl come from? +//! +//! This crate links to the `curl-sys` crate which is in turn responsible for +//! acquiring and linking to the libcurl library. Currently this crate will +//! build libcurl from source if one is not already detected on the system. +//! +//! There is a large number of releases for libcurl, all with different sets of +//! capabilities. Robust programs may wish to inspect `Version::get()` to test +//! what features are implemented in the linked build of libcurl at runtime. + +#![deny(missing_docs, missing_debug_implementations)] +#![doc(html_root_url = "https://docs.rs/curl/0.4")] + +extern crate curl_sys; +extern crate libc; +extern crate socket2; + +#[cfg(all(unix, not(target_os = "macos")))] +extern crate openssl_sys; +#[cfg(all(unix, not(target_os = "macos")))] +extern crate openssl_probe; +#[cfg(windows)] +extern crate winapi; + +#[cfg(target_env = "msvc")] +extern crate schannel; + +use std::ffi::CStr; +use std::str; +use std::sync::{Once, ONCE_INIT}; + +pub use error::{Error, ShareError, MultiError, FormError}; +mod error; + +pub use version::{Version, Protocols}; +mod version; + +mod panic; +pub mod easy; +pub mod multi; + +/// Initializes the underlying libcurl library. +/// +/// It's not required to call this before the library is used, but it's +/// recommended to do so as soon as the program starts. +pub fn init() { + static INIT: Once = ONCE_INIT; + INIT.call_once(|| { + platform_init(); + unsafe { + assert_eq!(curl_sys::curl_global_init(curl_sys::CURL_GLOBAL_ALL), 0); + } + + // Note that we explicitly don't schedule a call to + // `curl_global_cleanup`. The documentation for that function says + // + // > You must not call it when any other thread in the program (i.e. a + // > thread sharing the same memory) is running. This doesn't just mean + // > no other thread that is using libcurl. + // + // We can't ever be sure of that, so unfortunately we can't call the + // function. + }); + + #[cfg(all(unix, not(target_os = "macos")))] + fn platform_init() { + openssl_sys::init(); + } + + #[cfg(not(all(unix, not(target_os = "macos"))))] + fn platform_init() {} +} + +unsafe fn opt_str<'a>(ptr: *const libc::c_char) -> Option<&'a str> { + if ptr.is_null() { + None + } else { + Some(str::from_utf8(CStr::from_ptr(ptr).to_bytes()).unwrap()) + } +} + +fn cvt(r: curl_sys::CURLcode) -> Result<(), Error> { + if r == curl_sys::CURLE_OK { + Ok(()) + } else { + Err(Error::new(r)) + } +} diff --git a/curl/src/multi.rs b/curl/src/multi.rs new file mode 100644 index 000000000..fc95d1f01 --- /dev/null +++ b/curl/src/multi.rs @@ -0,0 +1,1069 @@ +//! Multi - initiating multiple requests simultaneously + +use std::fmt; +use std::marker; +use std::time::Duration; + +use libc::{c_int, c_char, c_void, c_long, c_short}; +use curl_sys; + +#[cfg(windows)] +use winapi::um::winsock2::fd_set; +#[cfg(unix)] +use libc::{fd_set, pollfd, POLLIN, POLLPRI, POLLOUT}; + +use {MultiError, Error}; +use easy::{Easy, Easy2}; +use panic; + +/// A multi handle for initiating multiple connections simultaneously. +/// +/// This structure corresponds to `CURLM` in libcurl and provides the ability to +/// have multiple transfers in flight simultaneously. This handle is then used +/// to manage each transfer. The main purpose of a `CURLM` is for the +/// *application* to drive the I/O rather than libcurl itself doing all the +/// blocking. Methods like `action` allow the application to inform libcurl of +/// when events have happened. +/// +/// Lots more documentation can be found on the libcurl [multi tutorial] where +/// the APIs correspond pretty closely with this crate. +/// +/// [multi tutorial]: https://curl.haxx.se/libcurl/c/libcurl-multi.html +pub struct Multi { + raw: *mut curl_sys::CURLM, + data: Box, +} + +struct MultiData { + socket: Box, + timer: Box) -> bool + Send>, +} + +/// Message from the `messages` function of a multi handle. +/// +/// Currently only indicates whether a transfer is done. +pub struct Message<'multi> { + ptr: *mut curl_sys::CURLMsg, + _multi: &'multi Multi, +} + +/// Wrapper around an easy handle while it's owned by a multi handle. +/// +/// Once an easy handle has been added to a multi handle then it can no longer +/// be used via `perform`. This handle is also used to remove the easy handle +/// from the multi handle when desired. +pub struct EasyHandle { + easy: Easy, + // This is now effecitvely bound to a `Multi`, so it is no longer sendable. + _marker: marker::PhantomData<&'static Multi>, +} + +/// Wrapper around an easy handle while it's owned by a multi handle. +/// +/// Once an easy handle has been added to a multi handle then it can no longer +/// be used via `perform`. This handle is also used to remove the easy handle +/// from the multi handle when desired. +pub struct Easy2Handle { + easy: Easy2, + // This is now effecitvely bound to a `Multi`, so it is no longer sendable. + _marker: marker::PhantomData<&'static Multi>, +} + +/// Notification of the events that have happened on a socket. +/// +/// This type is passed as an argument to the `action` method on a multi handle +/// to indicate what events have occurred on a socket. +pub struct Events { + bits: c_int, +} + +/// Notification of events that are requested on a socket. +/// +/// This type is yielded to the `socket_function` callback to indicate what +/// events are requested on a socket. +pub struct SocketEvents { + bits: c_int, +} + +/// Raw underlying socket type that the multi handles use +pub type Socket = curl_sys::curl_socket_t; + +/// File descriptor to wait on for use with the `wait` method on a multi handle. +pub struct WaitFd { + inner: curl_sys::curl_waitfd, +} + +impl Multi { + /// Creates a new multi session through which multiple HTTP transfers can be + /// initiated. + pub fn new() -> Multi { + unsafe { + ::init(); + let ptr = curl_sys::curl_multi_init(); + assert!(!ptr.is_null()); + Multi { + raw: ptr, + data: Box::new(MultiData { + socket: Box::new(|_, _, _| ()), + timer: Box::new(|_| true), + }), + } + } + } + + /// Set the callback informed about what to wait for + /// + /// When the `action` function runs, it informs the application about + /// updates in the socket (file descriptor) status by doing none, one, or + /// multiple calls to the socket callback. The callback gets status updates + /// with changes since the previous time the callback was called. See + /// `action` for more details on how the callback is used and should work. + /// + /// The `SocketEvents` parameter informs the callback on the status of the + /// given socket, and the methods on that type can be used to learn about + /// what's going on with the socket. + /// + /// The third `usize` parameter is a custom value set by the `assign` method + /// below. + pub fn socket_function(&mut self, f: F) -> Result<(), MultiError> + where F: FnMut(Socket, SocketEvents, usize) + Send + 'static, + { + self._socket_function(Box::new(f)) + } + + fn _socket_function(&mut self, + f: Box) + -> Result<(), MultiError> + { + self.data.socket = f; + let cb: curl_sys::curl_socket_callback = cb; + try!(self.setopt_ptr(curl_sys::CURLMOPT_SOCKETFUNCTION, + cb as usize as *const c_char)); + let ptr = &*self.data as *const _; + try!(self.setopt_ptr(curl_sys::CURLMOPT_SOCKETDATA, + ptr as *const c_char)); + return Ok(()); + + // TODO: figure out how to expose `_easy` + extern fn cb(_easy: *mut curl_sys::CURL, + socket: curl_sys::curl_socket_t, + what: c_int, + userptr: *mut c_void, + socketp: *mut c_void) -> c_int { + panic::catch(|| unsafe { + let f = &mut (*(userptr as *mut MultiData)).socket; + f(socket, SocketEvents { bits: what }, socketp as usize) + }); + 0 + } + } + + /// Set data to associate with an internal socket + /// + /// This function creates an association in the multi handle between the + /// given socket and a private token of the application. This is designed + /// for `action` uses. + /// + /// When set, the token will be passed to all future socket callbacks for + /// the specified socket. + /// + /// If the given socket isn't already in use by libcurl, this function will + /// return an error. + /// + /// libcurl only keeps one single token associated with a socket, so + /// calling this function several times for the same socket will make the + /// last set token get used. + /// + /// The idea here being that this association (socket to token) is something + /// that just about every application that uses this API will need and then + /// libcurl can just as well do it since it already has an internal hash + /// table lookup for this. + /// + /// # Typical Usage + /// + /// In a typical application you allocate a struct or at least use some kind + /// of semi-dynamic data for each socket that we must wait for action on + /// when using the `action` approach. + /// + /// When our socket-callback gets called by libcurl and we get to know about + /// yet another socket to wait for, we can use `assign` to point out the + /// particular data so that when we get updates about this same socket + /// again, we don't have to find the struct associated with this socket by + /// ourselves. + pub fn assign(&self, + socket: Socket, + token: usize) -> Result<(), MultiError> { + unsafe { + try!(cvt(curl_sys::curl_multi_assign(self.raw, socket, + token as *mut _))); + Ok(()) + } + } + + /// Set callback to receive timeout values + /// + /// Certain features, such as timeouts and retries, require you to call + /// libcurl even when there is no activity on the file descriptors. + /// + /// Your callback function should install a non-repeating timer with the + /// interval specified. Each time that timer fires, call either `action` or + /// `perform`, depending on which interface you use. + /// + /// A timeout value of `None` means you should delete your timer. + /// + /// A timeout value of 0 means you should call `action` or `perform` (once) + /// as soon as possible. + /// + /// This callback will only be called when the timeout changes. + /// + /// The timer callback should return `true` on success, and `false` on + /// error. This callback can be used instead of, or in addition to, + /// `get_timeout`. + pub fn timer_function(&mut self, f: F) -> Result<(), MultiError> + where F: FnMut(Option) -> bool + Send + 'static, + { + self._timer_function(Box::new(f)) + } + + fn _timer_function(&mut self, + f: Box) -> bool + Send>) + -> Result<(), MultiError> + { + self.data.timer = f; + let cb: curl_sys::curl_multi_timer_callback = cb; + try!(self.setopt_ptr(curl_sys::CURLMOPT_TIMERFUNCTION, + cb as usize as *const c_char)); + let ptr = &*self.data as *const _; + try!(self.setopt_ptr(curl_sys::CURLMOPT_TIMERDATA, + ptr as *const c_char)); + return Ok(()); + + // TODO: figure out how to expose `_multi` + extern fn cb(_multi: *mut curl_sys::CURLM, + timeout_ms: c_long, + user: *mut c_void) -> c_int { + let keep_going = panic::catch(|| unsafe { + let f = &mut (*(user as *mut MultiData)).timer; + if timeout_ms == -1 { + f(None) + } else { + f(Some(Duration::from_millis(timeout_ms as u64))) + } + }).unwrap_or(false); + if keep_going {0} else {-1} + } + } + + /// Enable or disable HTTP pipelining and multiplexing. + /// + /// When http_1 is true, enable HTTP/1.1 pipelining, which means that if + /// you add a second request that can use an already existing connection, + /// the second request will be "piped" on the same connection rather than + /// being executed in parallel. + /// + /// When multiplex is true, enable HTTP/2 multiplexing, which means that + /// follow-up requests can re-use an existing connection and send the new + /// request multiplexed over that at the same time as other transfers are + /// already using that single connection. + pub fn pipelining(&mut self, http_1: bool, multiplex: bool) -> Result<(), MultiError> { + let bitmask = if http_1 { curl_sys::CURLPIPE_HTTP1 } else { 0 } | if multiplex { curl_sys::CURLPIPE_MULTIPLEX } else { 0 }; + self.setopt_long(curl_sys::CURLMOPT_PIPELINING, bitmask) + } + + /// Sets the max number of connections to a single host. + /// + /// Pass a long to indicate the max number of simultaneously open connections + /// to a single host (a host being the same as a host name + port number pair). + /// For each new session to a host, libcurl will open up a new connection up to the + /// limit set by the provided value. When the limit is reached, the sessions will + /// be pending until a connection becomes available. If pipelining is enabled, + /// libcurl will try to pipeline if the host is capable of it. + pub fn set_max_host_connections(&mut self, val: usize) -> Result<(), MultiError> { + self.setopt_long(curl_sys::CURLMOPT_MAX_HOST_CONNECTIONS, val as c_long) + } + + /// Sets the pipeline length. + /// + /// This sets the max number that will be used as the maximum amount of + /// outstanding reuqests in an HTTP/1.1 pipelined connection. This option + /// is only used for HTTP/1.1 pipelining, and not HTTP/2 multiplexing. + pub fn set_pipeline_length(&mut self, val: usize) -> Result<(), MultiError> { + self.setopt_long(curl_sys::CURLMOPT_MAX_PIPELINE_LENGTH, val as c_long) + } + + fn setopt_long(&mut self, + opt: curl_sys::CURLMoption, + val: c_long) -> Result<(), MultiError> { + unsafe { + cvt(curl_sys::curl_multi_setopt(self.raw, opt, val)) + } + } + + fn setopt_ptr(&mut self, + opt: curl_sys::CURLMoption, + val: *const c_char) -> Result<(), MultiError> { + unsafe { + cvt(curl_sys::curl_multi_setopt(self.raw, opt, val)) + } + } + + /// Add an easy handle to a multi session + /// + /// Adds a standard easy handle to the multi stack. This function call will + /// make this multi handle control the specified easy handle. + /// + /// When an easy interface is added to a multi handle, it will use a shared + /// connection cache owned by the multi handle. Removing and adding new easy + /// handles will not affect the pool of connections or the ability to do + /// connection re-use. + /// + /// If you have `timer_function` set in the multi handle (and you really + /// should if you're working event-based with `action` and friends), that + /// callback will be called from within this function to ask for an updated + /// timer so that your main event loop will get the activity on this handle + /// to get started. + /// + /// The easy handle will remain added to the multi handle until you remove + /// it again with `remove` on the returned handle - even when a transfer + /// with that specific easy handle is completed. + pub fn add(&self, mut easy: Easy) -> Result { + // Clear any configuration set by previous transfers because we're + // moving this into a `Send+'static` situation now basically. + easy.transfer(); + + unsafe { + try!(cvt(curl_sys::curl_multi_add_handle(self.raw, easy.raw()))); + } + Ok(EasyHandle { + easy: easy, + _marker: marker::PhantomData, + }) + } + + /// Same as `add`, but works with the `Easy2` type. + pub fn add2(&self, easy: Easy2) -> Result, MultiError> { + unsafe { + try!(cvt(curl_sys::curl_multi_add_handle(self.raw, easy.raw()))); + } + Ok(Easy2Handle { + easy: easy, + _marker: marker::PhantomData, + }) + } + + /// Remove an easy handle from this multi session + /// + /// Removes the easy handle from this multi handle. This will make the + /// returned easy handle be removed from this multi handle's control. + /// + /// When the easy handle has been removed from a multi stack, it is again + /// perfectly legal to invoke `perform` on it. + /// + /// Removing an easy handle while being used is perfectly legal and will + /// effectively halt the transfer in progress involving that easy handle. + /// All other easy handles and transfers will remain unaffected. + pub fn remove(&self, easy: EasyHandle) -> Result { + unsafe { + try!(cvt(curl_sys::curl_multi_remove_handle(self.raw, + easy.easy.raw()))); + } + Ok(easy.easy) + } + + /// Same as `remove`, but for `Easy2Handle`. + pub fn remove2(&self, easy: Easy2Handle) -> Result, MultiError> { + unsafe { + try!(cvt(curl_sys::curl_multi_remove_handle(self.raw, + easy.easy.raw()))); + } + Ok(easy.easy) + } + + /// Read multi stack informationals + /// + /// Ask the multi handle if there are any messages/informationals from the + /// individual transfers. Messages may include informationals such as an + /// error code from the transfer or just the fact that a transfer is + /// completed. More details on these should be written down as well. + pub fn messages(&self, mut f: F) where F: FnMut(Message) { + self._messages(&mut f) + } + + fn _messages(&self, f: &mut FnMut(Message)) { + let mut queue = 0; + unsafe { + loop { + let ptr = curl_sys::curl_multi_info_read(self.raw, &mut queue); + if ptr.is_null() { + break + } + f(Message { ptr: ptr, _multi: self }) + } + } + } + + /// Inform of reads/writes available data given an action + /// + /// When the application has detected action on a socket handled by libcurl, + /// it should call this function with the sockfd argument set to + /// the socket with the action. When the events on a socket are known, they + /// can be passed `events`. When the events on a socket are unknown, pass + /// `Events::new()` instead, and libcurl will test the descriptor + /// internally. + /// + /// The returned integer will contain the number of running easy handles + /// within the multi handle. When this number reaches zero, all transfers + /// are complete/done. When you call `action` on a specific socket and the + /// counter decreases by one, it DOES NOT necessarily mean that this exact + /// socket/transfer is the one that completed. Use `messages` to figure out + /// which easy handle that completed. + /// + /// The `action` function informs the application about updates in the + /// socket (file descriptor) status by doing none, one, or multiple calls to + /// the socket callback function set with the `socket_function` method. They + /// update the status with changes since the previous time the callback was + /// called. + pub fn action(&self, socket: Socket, events: &Events) + -> Result { + let mut remaining = 0; + unsafe { + try!(cvt(curl_sys::curl_multi_socket_action(self.raw, + socket, + events.bits, + &mut remaining))); + Ok(remaining as u32) + } + } + + /// Inform libcurl that a timeout has expired and sockets should be tested. + /// + /// The returned integer will contain the number of running easy handles + /// within the multi handle. When this number reaches zero, all transfers + /// are complete/done. When you call `action` on a specific socket and the + /// counter decreases by one, it DOES NOT necessarily mean that this exact + /// socket/transfer is the one that completed. Use `messages` to figure out + /// which easy handle that completed. + /// + /// Get the timeout time by calling the `timer_function` method. Your + /// application will then get called with information on how long to wait + /// for socket actions at most before doing the timeout action: call the + /// `timeout` method. You can also use the `get_timeout` function to + /// poll the value at any given time, but for an event-based system using + /// the callback is far better than relying on polling the timeout value. + pub fn timeout(&self) -> Result { + let mut remaining = 0; + unsafe { + try!(cvt(curl_sys::curl_multi_socket_action(self.raw, + curl_sys::CURL_SOCKET_BAD, + 0, + &mut remaining))); + Ok(remaining as u32) + } + } + + /// Get how long to wait for action before proceeding + /// + /// An application using the libcurl multi interface should call + /// `get_timeout` to figure out how long it should wait for socket actions - + /// at most - before proceeding. + /// + /// Proceeding means either doing the socket-style timeout action: call the + /// `timeout` function, or call `perform` if you're using the simpler and + /// older multi interface approach. + /// + /// The timeout value returned is the duration at this very moment. If 0, it + /// means you should proceed immediately without waiting for anything. If it + /// returns `None`, there's no timeout at all set. + /// + /// Note: if libcurl returns a `None` timeout here, it just means that + /// libcurl currently has no stored timeout value. You must not wait too + /// long (more than a few seconds perhaps) before you call `perform` again. + pub fn get_timeout(&self) -> Result, MultiError> { + let mut ms = 0; + unsafe { + try!(cvt(curl_sys::curl_multi_timeout(self.raw, &mut ms))); + if ms == -1 { + Ok(None) + } else { + Ok(Some(Duration::from_millis(ms as u64))) + } + } + } + + /// Block until activity is detected or a timeout passes. + /// + /// The timeout is used in millisecond-precision. Large durations are + /// clamped at the maximum value curl accepts. + /// + /// The returned integer will contain the number of internal file + /// descriptors on which interesting events occured. + /// + /// This function is a simpler alternative to using `fdset()` and `select()` + /// and does not suffer from file descriptor limits. + /// + /// # Example + /// + /// ``` + /// use curl::multi::Multi; + /// use std::time::Duration; + /// + /// let m = Multi::new(); + /// + /// // Add some Easy handles... + /// + /// while m.perform().unwrap() > 0 { + /// m.wait(&mut [], Duration::from_secs(1)).unwrap(); + /// } + /// ``` + pub fn wait(&self, waitfds: &mut [WaitFd], timeout: Duration) + -> Result { + let timeout_ms = { + let secs = timeout.as_secs(); + if secs > (i32::max_value() / 1000) as u64 { + // Duration too large, clamp at maximum value. + i32::max_value() + } else { + secs as i32 * 1000 + timeout.subsec_nanos() as i32 / 1000_000 + } + }; + unsafe { + let mut ret = 0; + try!(cvt(curl_sys::curl_multi_wait(self.raw, + waitfds.as_mut_ptr() as *mut _, + waitfds.len() as u32, + timeout_ms, + &mut ret))); + Ok(ret as u32) + } + } + + /// Reads/writes available data from each easy handle. + /// + /// This function handles transfers on all the added handles that need + /// attention in an non-blocking fashion. + /// + /// When an application has found out there's data available for this handle + /// or a timeout has elapsed, the application should call this function to + /// read/write whatever there is to read or write right now etc. This + /// method returns as soon as the reads/writes are done. This function does + /// not require that there actually is any data available for reading or + /// that data can be written, it can be called just in case. It will return + /// the number of handles that still transfer data. + /// + /// If the amount of running handles is changed from the previous call (or + /// is less than the amount of easy handles you've added to the multi + /// handle), you know that there is one or more transfers less "running". + /// You can then call `info` to get information about each individual + /// completed transfer, and that returned info includes `Error` and more. + /// If an added handle fails very quickly, it may never be counted as a + /// running handle. + /// + /// When running_handles is set to zero (0) on the return of this function, + /// there is no longer any transfers in progress. + /// + /// # Return + /// + /// Before libcurl version 7.20.0: If you receive `is_call_perform`, this + /// basically means that you should call `perform` again, before you select + /// on more actions. You don't have to do it immediately, but the return + /// code means that libcurl may have more data available to return or that + /// there may be more data to send off before it is "satisfied". Do note + /// that `perform` will return `is_call_perform` only when it wants to be + /// called again immediately. When things are fine and there is nothing + /// immediate it wants done, it'll return `Ok` and you need to wait for + /// "action" and then call this function again. + /// + /// This function only returns errors etc regarding the whole multi stack. + /// Problems still might have occurred on individual transfers even when + /// this function returns `Ok`. Use `info` to figure out how individual + /// transfers did. + pub fn perform(&self) -> Result { + unsafe { + let mut ret = 0; + try!(cvt(curl_sys::curl_multi_perform(self.raw, &mut ret))); + Ok(ret as u32) + } + } + + /// Extracts file descriptor information from a multi handle + /// + /// This function extracts file descriptor information from a given + /// handle, and libcurl returns its `fd_set` sets. The application can use + /// these to `select()` on, but be sure to `FD_ZERO` them before calling + /// this function as curl_multi_fdset only adds its own descriptors, it + /// doesn't zero or otherwise remove any others. The curl_multi_perform + /// function should be called as soon as one of them is ready to be read + /// from or written to. + /// + /// If no file descriptors are set by libcurl, this function will return + /// `Ok(None)`. Otherwise `Ok(Some(n))` will be returned where `n` the + /// highest descriptor number libcurl set. When `Ok(None)` is returned it + /// is because libcurl currently does something that isn't possible for + /// your application to monitor with a socket and unfortunately you can + /// then not know exactly when the current action is completed using + /// `select()`. You then need to wait a while before you proceed and call + /// `perform` anyway. + /// + /// When doing `select()`, you should use `get_timeout` to figure out + /// how long to wait for action. Call `perform` even if no activity has + /// been seen on the `fd_set`s after the timeout expires as otherwise + /// internal retries and timeouts may not work as you'd think and want. + /// + /// If one of the sockets used by libcurl happens to be larger than what + /// can be set in an `fd_set`, which on POSIX systems means that the file + /// descriptor is larger than `FD_SETSIZE`, then libcurl will try to not + /// set it. Setting a too large file descriptor in an `fd_set` implies an out + /// of bounds write which can cause crashes, or worse. The effect of NOT + /// storing it will possibly save you from the crash, but will make your + /// program NOT wait for sockets it should wait for... + pub fn fdset2(&self, + read: Option<&mut curl_sys::fd_set>, + write: Option<&mut curl_sys::fd_set>, + except: Option<&mut curl_sys::fd_set>) -> Result, MultiError> { + unsafe { + let mut ret = 0; + let read = read.map(|r| r as *mut _).unwrap_or(0 as *mut _); + let write = write.map(|r| r as *mut _).unwrap_or(0 as *mut _); + let except = except.map(|r| r as *mut _).unwrap_or(0 as *mut _); + try!(cvt(curl_sys::curl_multi_fdset(self.raw, + read, + write, + except, + &mut ret))); + if ret == -1 { + Ok(None) + } else { + Ok(Some(ret)) + } + } + } + + #[doc(hidden)] + #[deprecated(note = "renamed to fdset2")] + pub fn fdset(&self, + read: Option<&mut fd_set>, + write: Option<&mut fd_set>, + except: Option<&mut fd_set>) -> Result, MultiError> { + unsafe { + let mut ret = 0; + let read = read.map(|r| r as *mut _).unwrap_or(0 as *mut _); + let write = write.map(|r| r as *mut _).unwrap_or(0 as *mut _); + let except = except.map(|r| r as *mut _).unwrap_or(0 as *mut _); + try!(cvt(curl_sys::curl_multi_fdset(self.raw, + read as *mut _, + write as *mut _, + except as *mut _, + &mut ret))); + if ret == -1 { + Ok(None) + } else { + Ok(Some(ret)) + } + } + } + + /// Attempt to close the multi handle and clean up all associated resources. + /// + /// Cleans up and removes a whole multi stack. It does not free or touch any + /// individual easy handles in any way - they still need to be closed + /// individually. + pub fn close(&self) -> Result<(), MultiError> { + unsafe { + cvt(curl_sys::curl_multi_cleanup(self.raw)) + } + } +} + +fn cvt(code: curl_sys::CURLMcode) -> Result<(), MultiError> { + if code == curl_sys::CURLM_OK { + Ok(()) + } else { + Err(MultiError::new(code)) + } +} + +impl fmt::Debug for Multi { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_struct("Multi") + .field("raw", &self.raw) + .finish() + } +} + +impl Drop for Multi { + fn drop(&mut self) { + let _ = self.close(); + } +} + +impl EasyHandle { + /// Sets an internal private token for this `EasyHandle`. + /// + /// This function will set the `CURLOPT_PRIVATE` field on the underlying + /// easy handle. + pub fn set_token(&mut self, token: usize) -> Result<(), Error> { + unsafe { + ::cvt(curl_sys::curl_easy_setopt(self.easy.raw(), + curl_sys::CURLOPT_PRIVATE, + token)) + } + } + + /// Unpause reading on a connection. + /// + /// Using this function, you can explicitly unpause a connection that was + /// previously paused. + /// + /// A connection can be paused by letting the read or the write callbacks + /// return `ReadError::Pause` or `WriteError::Pause`. + /// + /// The chance is high that you will get your write callback called before + /// this function returns. + pub fn unpause_read(&self) -> Result<(), Error> { + self.easy.unpause_read() + } + + /// Unpause writing on a connection. + /// + /// Using this function, you can explicitly unpause a connection that was + /// previously paused. + /// + /// A connection can be paused by letting the read or the write callbacks + /// return `ReadError::Pause` or `WriteError::Pause`. A write callback that + /// returns pause signals to the library that it couldn't take care of any + /// data at all, and that data will then be delivered again to the callback + /// when the writing is later unpaused. + pub fn unpause_write(&self) -> Result<(), Error> { + self.easy.unpause_write() + } +} + +impl fmt::Debug for EasyHandle { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + self.easy.fmt(f) + } +} + +impl Easy2Handle { + /// Acquires a reference to the underlying handler for events. + pub fn get_ref(&self) -> &H { + self.easy.get_ref() + } + + /// Acquires a reference to the underlying handler for events. + pub fn get_mut(&mut self) -> &mut H { + self.easy.get_mut() + } + + /// Same as `EasyHandle::set_token` + pub fn set_token(&mut self, token: usize) -> Result<(), Error> { + unsafe { + ::cvt(curl_sys::curl_easy_setopt(self.easy.raw(), + curl_sys::CURLOPT_PRIVATE, + token)) + } + } + + /// Unpause reading on a connection. + /// + /// Using this function, you can explicitly unpause a connection that was + /// previously paused. + /// + /// A connection can be paused by letting the read or the write callbacks + /// return `ReadError::Pause` or `WriteError::Pause`. + /// + /// The chance is high that you will get your write callback called before + /// this function returns. + pub fn unpause_read(&self) -> Result<(), Error> { + self.easy.unpause_read() + } + + /// Unpause writing on a connection. + /// + /// Using this function, you can explicitly unpause a connection that was + /// previously paused. + /// + /// A connection can be paused by letting the read or the write callbacks + /// return `ReadError::Pause` or `WriteError::Pause`. A write callback that + /// returns pause signals to the library that it couldn't take care of any + /// data at all, and that data will then be delivered again to the callback + /// when the writing is later unpaused. + pub fn unpause_write(&self) -> Result<(), Error> { + self.easy.unpause_write() + } +} + +impl fmt::Debug for Easy2Handle { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + self.easy.fmt(f) + } +} + +impl<'multi> Message<'multi> { + /// If this message indicates that a transfer has finished, returns the + /// result of the transfer in `Some`. + /// + /// If the message doesn't indicate that a transfer has finished, then + /// `None` is returned. + /// + /// Note that the `result*_for` methods below should be preferred as they + /// provide better error messages as the associated error data on the + /// handle can be associated with the error type. + pub fn result(&self) -> Option> { + unsafe { + if (*self.ptr).msg == curl_sys::CURLMSG_DONE { + Some(::cvt((*self.ptr).data as curl_sys::CURLcode)) + } else { + None + } + } + } + + /// Same as `result`, except only returns `Some` for the specified handle. + /// + /// Note that this function produces better error messages than `result` as + /// it uses `take_error_buf` to associate error information with the + /// returned error. + pub fn result_for(&self, handle: &EasyHandle) -> Option> { + if !self.is_for(handle) { + return None + } + let mut err = self.result(); + if let Some(Err(e)) = &mut err { + if let Some(s) = handle.easy.take_error_buf() { + e.set_extra(s); + } + } + return err + } + + /// Same as `result`, except only returns `Some` for the specified handle. + /// + /// Note that this function produces better error messages than `result` as + /// it uses `take_error_buf` to associate error information with the + /// returned error. + pub fn result_for2(&self, handle: &Easy2Handle) -> Option> { + if !self.is_for2(handle) { + return None + } + let mut err = self.result(); + if let Some(Err(e)) = &mut err { + if let Some(s) = handle.easy.take_error_buf() { + e.set_extra(s); + } + } + return err + } + + /// Returns whether this easy message was for the specified easy handle or + /// not. + pub fn is_for(&self, handle: &EasyHandle) -> bool { + unsafe { (*self.ptr).easy_handle == handle.easy.raw() } + } + + /// Same as `is_for`, but for `Easy2Handle`. + pub fn is_for2(&self, handle: &Easy2Handle) -> bool { + unsafe { (*self.ptr).easy_handle == handle.easy.raw() } + } + + /// Returns the token associated with the easy handle that this message + /// represents a completion for. + /// + /// This function will return the token assigned with + /// `EasyHandle::set_token`. This reads the `CURLINFO_PRIVATE` field of the + /// underlying `*mut CURL`. + pub fn token(&self) -> Result { + unsafe { + let mut p = 0usize; + try!(::cvt(curl_sys::curl_easy_getinfo((*self.ptr).easy_handle, + curl_sys::CURLINFO_PRIVATE, + &mut p))); + Ok(p) + } + } +} + +impl<'a> fmt::Debug for Message<'a> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_struct("Message") + .field("ptr", &self.ptr) + .finish() + } +} + +impl Events { + /// Creates a new blank event bit mask. + pub fn new() -> Events { + Events { bits: 0 } + } + + /// Set or unset the whether these events indicate that input is ready. + pub fn input(&mut self, val: bool) -> &mut Events { + self.flag(curl_sys::CURL_CSELECT_IN, val) + } + + /// Set or unset the whether these events indicate that output is ready. + pub fn output(&mut self, val: bool) -> &mut Events { + self.flag(curl_sys::CURL_CSELECT_OUT, val) + } + + /// Set or unset the whether these events indicate that an error has + /// happened. + pub fn error(&mut self, val: bool) -> &mut Events { + self.flag(curl_sys::CURL_CSELECT_ERR, val) + } + + fn flag(&mut self, flag: c_int, val: bool) -> &mut Events { + if val { + self.bits |= flag; + } else { + self.bits &= !flag; + } + self + } +} + +impl fmt::Debug for Events { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_struct("Events") + .field("input", &(self.bits & curl_sys::CURL_CSELECT_IN != 0)) + .field("output", &(self.bits & curl_sys::CURL_CSELECT_IN != 0)) + .field("error", &(self.bits & curl_sys::CURL_CSELECT_IN != 0)) + .finish() + } +} + +impl SocketEvents { + /// Wait for incoming data. For the socket to become readable. + pub fn input(&self) -> bool { + self.bits & curl_sys::CURL_POLL_IN == curl_sys::CURL_POLL_IN + } + + /// Wait for outgoing data. For the socket to become writable. + pub fn output(&self) -> bool { + self.bits & curl_sys::CURL_POLL_OUT == curl_sys::CURL_POLL_OUT + } + + /// Wait for incoming and outgoing data. For the socket to become readable + /// or writable. + pub fn input_and_output(&self) -> bool { + self.bits & curl_sys::CURL_POLL_INOUT == curl_sys::CURL_POLL_INOUT + } + + /// The specified socket/file descriptor is no longer used by libcurl. + pub fn remove(&self) -> bool { + self.bits & curl_sys::CURL_POLL_REMOVE == curl_sys::CURL_POLL_REMOVE + } +} + +impl fmt::Debug for SocketEvents { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_struct("Events") + .field("input", &self.input()) + .field("output", &self.output()) + .field("remove", &self.remove()) + .finish() + } +} + +impl WaitFd { + /// Constructs an empty (invalid) WaitFd. + pub fn new() -> WaitFd { + WaitFd { + inner: curl_sys::curl_waitfd { + fd: 0, + events: 0, + revents: 0, + } + } + } + + /// Set the file descriptor to wait for. + pub fn set_fd(&mut self, fd: Socket) { + self.inner.fd = fd; + } + + /// Indicate that the socket should poll on read events such as new data + /// received. + /// + /// Corresponds to `CURL_WAIT_POLLIN`. + pub fn poll_on_read(&mut self, val: bool) -> &mut WaitFd { + self.flag(curl_sys::CURL_WAIT_POLLIN, val) + } + + /// Indicate that the socket should poll on high priority read events such + /// as out of band data. + /// + /// Corresponds to `CURL_WAIT_POLLPRI`. + pub fn poll_on_priority_read(&mut self, val: bool) -> &mut WaitFd { + self.flag(curl_sys::CURL_WAIT_POLLPRI, val) + } + + /// Indicate that the socket should poll on write events such as the socket + /// being clear to write without blocking. + /// + /// Corresponds to `CURL_WAIT_POLLOUT`. + pub fn poll_on_write(&mut self, val: bool) -> &mut WaitFd { + self.flag(curl_sys::CURL_WAIT_POLLOUT, val) + } + + fn flag(&mut self, flag: c_short, val: bool) -> &mut WaitFd { + if val { + self.inner.events |= flag; + } else { + self.inner.events &= !flag; + } + self + } + + /// After a call to `wait`, returns `true` if `poll_on_read` was set and a + /// read event occured. + pub fn received_read(&self) -> bool { + self.inner.revents & curl_sys::CURL_WAIT_POLLIN == curl_sys::CURL_WAIT_POLLIN + } + + /// After a call to `wait`, returns `true` if `poll_on_priority_read` was set and a + /// priority read event occured. + pub fn received_priority_read(&self) -> bool { + self.inner.revents & curl_sys::CURL_WAIT_POLLPRI == curl_sys::CURL_WAIT_POLLPRI + } + + /// After a call to `wait`, returns `true` if `poll_on_write` was set and a + /// write event occured. + pub fn received_write(&self) -> bool { + self.inner.revents & curl_sys::CURL_WAIT_POLLOUT == curl_sys::CURL_WAIT_POLLOUT + } +} + +#[cfg(unix)] +impl From for WaitFd { + fn from(pfd: pollfd) -> WaitFd { + let mut events = 0; + if pfd.events & POLLIN == POLLIN { + events |= curl_sys::CURL_WAIT_POLLIN; + } + if pfd.events & POLLPRI == POLLPRI { + events |= curl_sys::CURL_WAIT_POLLPRI; + } + if pfd.events & POLLOUT == POLLOUT { + events |= curl_sys::CURL_WAIT_POLLOUT; + } + WaitFd { + inner: curl_sys::curl_waitfd { + fd: pfd.fd, + events: events, + revents: 0, + } + } + } +} + +impl fmt::Debug for WaitFd { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_struct("WaitFd") + .field("fd", &self.inner.fd) + .field("events", &self.inner.fd) + .field("revents", &self.inner.fd) + .finish() + } +} diff --git a/curl/src/panic.rs b/curl/src/panic.rs new file mode 100644 index 000000000..ae28e982e --- /dev/null +++ b/curl/src/panic.rs @@ -0,0 +1,34 @@ +use std::any::Any; +use std::cell::RefCell; +use std::panic::{self, AssertUnwindSafe}; + +thread_local!(static LAST_ERROR: RefCell>> = { + RefCell::new(None) +}); + +pub fn catch T>(f: F) -> Option { + match LAST_ERROR.try_with(|slot| slot.borrow().is_some()) { + Ok(true) => return None, + Ok(false) => {} + // we're in thread shutdown, so we're for sure not panicking and + // panicking again will abort, so no need to worry! + Err(_) => {} + } + + // Note that `AssertUnwindSafe` is used here as we prevent reentering + // arbitrary code due to the `LAST_ERROR` check above plus propagation of a + // panic after we return back to user code from C. + match panic::catch_unwind(AssertUnwindSafe(f)) { + Ok(ret) => Some(ret), + Err(e) => { + LAST_ERROR.with(|slot| *slot.borrow_mut() = Some(e)); + None + } + } +} + +pub fn propagate() { + if let Ok(Some(t)) = LAST_ERROR.try_with(|slot| slot.borrow_mut().take()) { + panic::resume_unwind(t) + } +} diff --git a/curl/src/version.rs b/curl/src/version.rs new file mode 100644 index 000000000..4ca39c44e --- /dev/null +++ b/curl/src/version.rs @@ -0,0 +1,326 @@ +use std::ffi::CStr; +use std::fmt; +use std::str; + +use curl_sys; +use libc::{c_int, c_char}; + +/// Version information about libcurl and the capabilities that it supports. +pub struct Version { + inner: *mut curl_sys::curl_version_info_data, +} + +unsafe impl Send for Version {} +unsafe impl Sync for Version {} + +/// An iterator over the list of protocols a version supports. +#[derive(Clone)] +pub struct Protocols<'a> { + cur: *const *const c_char, + _inner: &'a Version, +} + +impl Version { + /// Returns the libcurl version that this library is currently linked against. + pub fn num() -> &'static str { + unsafe { + let s = CStr::from_ptr(curl_sys::curl_version() as *const _); + str::from_utf8(s.to_bytes()).unwrap() + } + } + + /// Returns the libcurl version that this library is currently linked against. + pub fn get() -> Version { + unsafe { + let ptr = curl_sys::curl_version_info(curl_sys::CURLVERSION_FOURTH); + assert!(!ptr.is_null()); + Version { inner: ptr } + } + } + + /// Returns the human readable version string, + pub fn version(&self) -> &str { + unsafe { + ::opt_str((*self.inner).version).unwrap() + } + } + + /// Returns a numeric representation of the version number + /// + /// This is a 24 bit number made up of the major number, minor, and then + /// patch number. For example 7.9.8 will return 0x070908. + pub fn version_num(&self) -> u32 { + unsafe { + (*self.inner).version_num as u32 + } + } + + /// Returns a human readable string of the host libcurl is built for. + /// + /// This is discovered as part of the build environment. + pub fn host(&self) -> &str { + unsafe { + ::opt_str((*self.inner).host).unwrap() + } + } + + /// Returns whether libcurl supports IPv6 + pub fn feature_ipv6(&self) -> bool { + self.flag(curl_sys::CURL_VERSION_IPV6) + } + + /// Returns whether libcurl supports SSL + pub fn feature_ssl(&self) -> bool { + self.flag(curl_sys::CURL_VERSION_SSL) + } + + /// Returns whether libcurl supports HTTP deflate via libz + pub fn feature_libz(&self) -> bool { + self.flag(curl_sys::CURL_VERSION_LIBZ) + } + + /// Returns whether libcurl supports HTTP NTLM + pub fn feature_ntlm(&self) -> bool { + self.flag(curl_sys::CURL_VERSION_NTLM) + } + + /// Returns whether libcurl supports HTTP GSSNEGOTIATE + pub fn feature_gss_negotiate(&self) -> bool { + self.flag(curl_sys::CURL_VERSION_GSSNEGOTIATE) + } + + /// Returns whether libcurl was built with debug capabilities + pub fn feature_debug(&self) -> bool { + self.flag(curl_sys::CURL_VERSION_DEBUG) + } + + /// Returns whether libcurl was built with SPNEGO authentication + pub fn feature_spnego(&self) -> bool { + self.flag(curl_sys::CURL_VERSION_SPNEGO) + } + + /// Returns whether libcurl was built with large file support + pub fn feature_largefile(&self) -> bool { + self.flag(curl_sys::CURL_VERSION_LARGEFILE) + } + + /// Returns whether libcurl was built with support for IDNA, domain names + /// with international letters. + pub fn feature_idn(&self) -> bool { + self.flag(curl_sys::CURL_VERSION_IDN) + } + + /// Returns whether libcurl was built with support for SSPI. + pub fn feature_sspi(&self) -> bool { + self.flag(curl_sys::CURL_VERSION_SSPI) + } + + /// Returns whether libcurl was built with asynchronous name lookups. + pub fn feature_async_dns(&self) -> bool { + self.flag(curl_sys::CURL_VERSION_ASYNCHDNS) + } + + /// Returns whether libcurl was built with support for character + /// conversions. + pub fn feature_conv(&self) -> bool { + self.flag(curl_sys::CURL_VERSION_CONV) + } + + /// Returns whether libcurl was built with support for TLS-SRP. + pub fn feature_tlsauth_srp(&self) -> bool { + self.flag(curl_sys::CURL_VERSION_TLSAUTH_SRP) + } + + /// Returns whether libcurl was built with support for NTLM delegation to + /// winbind helper. + pub fn feature_ntlm_wb(&self) -> bool { + self.flag(curl_sys::CURL_VERSION_NTLM_WB) + } + + /// Returns whether libcurl was built with support for unix domain socket + pub fn feature_unix_domain_socket(&self) -> bool { + self.flag(curl_sys::CURL_VERSION_UNIX_SOCKETS) + } + + /// Returns whether libcurl was built with support for HTTP2. + pub fn feature_http2(&self) -> bool { + self.flag(curl_sys::CURL_VERSION_HTTP2) + } + + fn flag(&self, flag: c_int) -> bool { + unsafe { + (*self.inner).features & flag != 0 + } + } + + /// Returns the version of OpenSSL that is used, or None if there is no SSL + /// support. + pub fn ssl_version(&self) -> Option<&str> { + unsafe { + ::opt_str((*self.inner).ssl_version) + } + } + + /// Returns the version of libz that is used, or None if there is no libz + /// support. + pub fn libz_version(&self) -> Option<&str> { + unsafe { + ::opt_str((*self.inner).libz_version) + } + } + + /// Returns an iterator over the list of protocols that this build of + /// libcurl supports. + pub fn protocols(&self) -> Protocols { + unsafe { + Protocols { _inner: self, cur: (*self.inner).protocols } + } + } + + /// If available, the human readable version of ares that libcurl is linked + /// against. + pub fn ares_version(&self) -> Option<&str> { + unsafe { + if (*self.inner).age >= curl_sys::CURLVERSION_SECOND { + ::opt_str((*self.inner).ares) + } else { + None + } + } + } + + /// If available, the version of ares that libcurl is linked against. + pub fn ares_version_num(&self) -> Option { + unsafe { + if (*self.inner).age >= curl_sys::CURLVERSION_SECOND { + Some((*self.inner).ares_num as u32) + } else { + None + } + } + } + + /// If available, the version of libidn that libcurl is linked against. + pub fn libidn_version(&self) -> Option<&str> { + unsafe { + if (*self.inner).age >= curl_sys::CURLVERSION_THIRD { + ::opt_str((*self.inner).libidn) + } else { + None + } + } + } + + /// If available, the version of iconv libcurl is linked against. + pub fn iconv_version_num(&self) -> Option { + unsafe { + if (*self.inner).age >= curl_sys::CURLVERSION_FOURTH { + Some((*self.inner).iconv_ver_num as u32) + } else { + None + } + } + } + + /// If available, the version of iconv libcurl is linked against. + pub fn libssh_version(&self) -> Option<&str> { + unsafe { + if (*self.inner).age >= curl_sys::CURLVERSION_FOURTH { + ::opt_str((*self.inner).libssh_version) + } else { + None + } + } + } + + /// If available, the version of brotli libcurl is linked against. + pub fn brotli_version_num(&self) -> Option { + unsafe { + if (*self.inner).age >= curl_sys::CURLVERSION_FIFTH { + Some((*self.inner).brotli_ver_num) + } else { + None + } + } + } + + /// If available, the version of brotli libcurl is linked against. + pub fn brotli_version(&self) -> Option<&str> { + unsafe { + if (*self.inner).age >= curl_sys::CURLVERSION_FIFTH { + ::opt_str((*self.inner).brotli_version) + } else { + None + } + } + } +} + +impl fmt::Debug for Version { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let mut f = f.debug_struct("Version"); + f.field("version", &self.version()) + .field("host", &self.host()) + .field("feature_ipv6", &self.feature_ipv6()) + .field("feature_ssl", &self.feature_ssl()) + .field("feature_libz", &self.feature_libz()) + .field("feature_ntlm", &self.feature_ntlm()) + .field("feature_gss_negotiate", &self.feature_gss_negotiate()) + .field("feature_debug", &self.feature_debug()) + .field("feature_spnego", &self.feature_debug()) + .field("feature_largefile", &self.feature_debug()) + .field("feature_idn", &self.feature_debug()) + .field("feature_sspi", &self.feature_debug()) + .field("feature_async_dns", &self.feature_debug()) + .field("feature_conv", &self.feature_debug()) + .field("feature_tlsauth_srp", &self.feature_debug()) + .field("feature_ntlm_wb", &self.feature_debug()) + .field("feature_unix_domain_socket", &self.feature_debug()); + + if let Some(s) = self.ssl_version() { + f.field("ssl_version", &s); + } + if let Some(s) = self.libz_version() { + f.field("libz_version", &s); + } + if let Some(s) = self.ares_version() { + f.field("ares_version", &s); + } + if let Some(s) = self.libidn_version() { + f.field("libidn_version", &s); + } + if let Some(s) = self.iconv_version_num() { + f.field("iconv_version_num", &format!("{:x}", s)); + } + if let Some(s) = self.libssh_version() { + f.field("libssh_version", &s); + } + + f.field("protocols", &self.protocols().collect::>()); + + f.finish() + } +} + +impl<'a> Iterator for Protocols<'a> { + type Item = &'a str; + + fn next(&mut self) -> Option<&'a str> { + unsafe { + if (*self.cur).is_null() { + return None + } + let ret = ::opt_str(*self.cur).unwrap(); + self.cur = self.cur.offset(1); + Some(ret) + } + } +} + +impl<'a> fmt::Debug for Protocols<'a> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_list() + .entries(self.clone()) + .finish() + } +} diff --git a/curl/tests/atexit.rs b/curl/tests/atexit.rs new file mode 100644 index 000000000..b95b125b8 --- /dev/null +++ b/curl/tests/atexit.rs @@ -0,0 +1,20 @@ +extern crate curl; +extern crate libc; + +use curl::easy::Easy; + +pub extern "C" fn hook() { + let mut easy = Easy::new(); + easy.url("google.com").unwrap(); + easy.write_function(|data| { + Ok(data.len()) + }).unwrap(); + easy.perform().unwrap(); +} + +fn main() { + curl::init(); + hook(); + unsafe { libc::atexit(hook); } + println!("Finishing...") +} diff --git a/curl/tests/easy.rs b/curl/tests/easy.rs new file mode 100644 index 000000000..09add3149 --- /dev/null +++ b/curl/tests/easy.rs @@ -0,0 +1,693 @@ +extern crate curl; + +use std::cell::{RefCell, Cell}; +use std::io::Read; +use std::rc::Rc; +use std::str; +use std::time::Duration; + +macro_rules! t { + ($e:expr) => (match $e { + Ok(e) => e, + Err(e) => panic!("{} failed with {:?}", stringify!($e), e), + }) +} + +use curl::easy::{Easy, List, WriteError, ReadError, Transfer}; + +use server::Server; +mod server; + +fn handle() -> Easy { + let mut e = Easy::new(); + t!(e.timeout(Duration::new(20, 0))); + return e +} + +fn sink(data: &[u8]) -> Result { + Ok(data.len()) +} + +#[test] +fn get_smoke() { + let s = Server::new(); + s.receive("\ +GET / HTTP/1.1\r\n\ +Host: 127.0.0.1:$PORT\r\n\ +Accept: */*\r\n\ +\r\n"); + s.send("HTTP/1.1 200 OK\r\n\r\n"); + + let mut handle = handle(); + t!(handle.url(&s.url("/"))); + t!(handle.perform()); +} + +#[test] +fn get_path() { + let s = Server::new(); + s.receive("\ +GET /foo HTTP/1.1\r\n\ +Host: 127.0.0.1:$PORT\r\n\ +Accept: */*\r\n\ +\r\n"); + s.send("HTTP/1.1 200 OK\r\n\r\n"); + + let mut handle = handle(); + t!(handle.url(&s.url("/foo"))); + t!(handle.perform()); +} + +#[test] +fn write_callback() { + let s = Server::new(); + s.receive("\ +GET / HTTP/1.1\r\n\ +Host: 127.0.0.1:$PORT\r\n\ +Accept: */*\r\n\ +\r\n"); + s.send("HTTP/1.1 200 OK\r\n\r\nhello!"); + + let mut all = Vec::::new(); + { + let mut handle = handle(); + t!(handle.url(&s.url("/"))); + let mut handle = handle.transfer(); + t!(handle.write_function(|data| { + all.extend(data); + Ok(data.len()) + })); + t!(handle.perform()); + } + assert_eq!(all, b"hello!"); +} + +#[test] +fn resolve() { + let s = Server::new(); + s.receive("\ +GET / HTTP/1.1\r\n\ +Host: example.com:$PORT\r\n\ +Accept: */*\r\n\ +\r\n"); + s.send("HTTP/1.1 200 OK\r\n\r\n"); + + let mut list = List::new(); + t!(list.append(&format!("example.com:{}:127.0.0.1", s.addr().port()))); + let mut handle = handle(); + t!(handle.url(&format!("http://example.com:{}/", s.addr().port()))); + t!(handle.resolve(list)); + t!(handle.perform()); +} + +#[test] +fn progress() { + let s = Server::new(); + s.receive("\ +GET /foo HTTP/1.1\r\n\ +Host: 127.0.0.1:$PORT\r\n\ +Accept: */*\r\n\ +\r\n"); + s.send("HTTP/1.1 200 OK\r\n\r\nHello!"); + + let mut hits = 0; + let mut dl = 0.0; + { + let mut handle = handle(); + t!(handle.url(&s.url("/foo"))); + t!(handle.progress(true)); + t!(handle.write_function(sink)); + + let mut handle = handle.transfer(); + t!(handle.progress_function(|_, a, _, _| { + hits += 1; + dl = a; + true + })); + t!(handle.perform()); + } + assert!(hits > 0); + assert_eq!(dl, 6.0); +} + +#[test] +fn headers() { + let s = Server::new(); + s.receive("\ +GET / HTTP/1.1\r\n\ +Host: 127.0.0.1:$PORT\r\n\ +Accept: */*\r\n\ +\r\n"); + s.send("\ +HTTP/1.1 200 OK\r\n\ +Foo: bar\r\n\ +Bar: baz\r\n\ +\r\n +Hello!"); + + let mut headers = Vec::new(); + { + let mut handle = handle(); + t!(handle.url(&s.url("/"))); + + let mut handle = handle.transfer(); + t!(handle.header_function(|h| { + headers.push(str::from_utf8(h).unwrap().to_string()); + true + })); + t!(handle.write_function(sink)); + t!(handle.perform()); + } + assert_eq!(headers, vec![ + "HTTP/1.1 200 OK\r\n".to_string(), + "Foo: bar\r\n".to_string(), + "Bar: baz\r\n".to_string(), + "\r\n".to_string(), + ]); +} + +#[test] +fn fail_on_error() { + let s = Server::new(); + s.receive("\ +GET / HTTP/1.1\r\n\ +Host: 127.0.0.1:$PORT\r\n\ +Accept: */*\r\n\ +\r\n"); + s.send("\ +HTTP/1.1 401 Not so good\r\n\ +\r\n"); + + let mut h = handle(); + t!(h.url(&s.url("/"))); + t!(h.fail_on_error(true)); + assert!(h.perform().is_err()); + + let s = Server::new(); + s.receive("\ +GET / HTTP/1.1\r\n\ +Host: 127.0.0.1:$PORT\r\n\ +Accept: */*\r\n\ +\r\n"); + s.send("\ +HTTP/1.1 401 Not so good\r\n\ +\r\n"); + + let mut h = handle(); + t!(h.url(&s.url("/"))); + t!(h.fail_on_error(false)); + t!(h.perform()); +} + +#[test] +fn port() { + let s = Server::new(); + s.receive("\ +GET / HTTP/1.1\r\n\ +Host: localhost:$PORT\r\n\ +Accept: */*\r\n\ +\r\n"); + s.send("\ +HTTP/1.1 200 OK\r\n\ +\r\n"); + + let mut h = handle(); + t!(h.url("http://localhost/")); + t!(h.port(s.addr().port())); + t!(h.perform()); +} + +#[test] +fn proxy() { + let s = Server::new(); + s.receive("\ +GET http://example.com/ HTTP/1.1\r\n\ +Host: example.com\r\n\ +Accept: */*\r\n\ +\r\n"); + s.send("\ +HTTP/1.1 200 OK\r\n\ +\r\n"); + + let mut h = handle(); + t!(h.url("http://example.com/")); + t!(h.proxy(&s.url("/"))); + t!(h.perform()); +} + +#[test] +#[ignore] // fails on newer curl versions? seems benign +fn noproxy() { + let s = Server::new(); + s.receive("\ +GET / HTTP/1.1\r\n\ +Host: 127.0.0.1:$PORT\r\n\ +Accept: */*\r\n\ +\r\n"); + s.send("\ +HTTP/1.1 200 OK\r\n\ +\r\n"); + + let mut h = handle(); + t!(h.url(&s.url("/"))); + t!(h.proxy(&s.url("/"))); + t!(h.noproxy("127.0.0.1")); + t!(h.perform()); +} + +#[test] +fn misc() { + let mut h = handle(); + t!(h.tcp_nodelay(true)); + // t!(h.tcp_keepalive(true)); + // t!(h.tcp_keepidle(Duration::new(3, 0))); + // t!(h.tcp_keepintvl(Duration::new(3, 0))); + t!(h.buffer_size(10)); + t!(h.dns_cache_timeout(Duration::new(1, 0))); +} + +#[test] +fn userpass() { + let s = Server::new(); + s.receive("\ +GET / HTTP/1.1\r\n\ +Authorization: Basic YmFyOg==\r\n\ +Host: 127.0.0.1:$PORT\r\n\ +Accept: */*\r\n\ +\r\n"); + s.send("\ +HTTP/1.1 200 OK\r\n\ +\r\n"); + + let mut h = handle(); + t!(h.url(&s.url("/"))); + t!(h.username("foo")); + t!(h.username("bar")); + t!(h.perform()); +} + +#[test] +fn accept_encoding() { + let s = Server::new(); + s.receive("\ +GET / HTTP/1.1\r\n\ +Host: 127.0.0.1:$PORT\r\n\ +Accept: */*\r\n\ +Accept-Encoding: gzip\r\n\ +\r\n"); + s.send("\ +HTTP/1.1 200 OK\r\n\ +\r\n"); + + let mut h = handle(); + t!(h.url(&s.url("/"))); + t!(h.accept_encoding("gzip")); + t!(h.perform()); +} + +#[test] +fn follow_location() { + let s1 = Server::new(); + let s2 = Server::new(); + s1.receive("\ +GET / HTTP/1.1\r\n\ +Host: 127.0.0.1:$PORT\r\n\ +Accept: */*\r\n\ +\r\n"); + s1.send(&format!("\ +HTTP/1.1 301 Moved Permanently\r\n\ +Location: http://{}/foo\r\n\ +\r\n", s2.addr())); + + s2.receive("\ +GET /foo HTTP/1.1\r\n\ +Host: 127.0.0.1:$PORT\r\n\ +Accept: */*\r\n\ +\r\n"); + s2.send("\ +HTTP/1.1 200 OK\r\n\ +\r\n"); + + let mut h = handle(); + t!(h.url(&s1.url("/"))); + t!(h.follow_location(true)); + t!(h.perform()); +} + +#[test] +fn put() { + let s = Server::new(); + s.receive("\ +PUT / HTTP/1.1\r\n\ +Host: 127.0.0.1:$PORT\r\n\ +Accept: */*\r\n\ +Content-Length: 5\r\n\ +\r\n\ +data\n"); + s.send("\ +HTTP/1.1 200 OK\r\n\ +\r\n"); + + let mut data = "data\n".as_bytes(); + let mut list = List::new(); + t!(list.append("Expect:")); + let mut h = handle(); + t!(h.url(&s.url("/"))); + t!(h.put(true)); + t!(h.in_filesize(5)); + t!(h.upload(true)); + t!(h.http_headers(list)); + let mut h = h.transfer(); + t!(h.read_function(|buf| { + Ok(data.read(buf).unwrap()) + })); + t!(h.perform()); +} + +#[test] +fn post1() { + let s = Server::new(); + s.receive("\ +POST / HTTP/1.1\r\n\ +Host: 127.0.0.1:$PORT\r\n\ +Accept: */*\r\n\ +Content-Length: 5\r\n\ +Content-Type: application/x-www-form-urlencoded\r\n\ +\r\n\ +data\n"); + s.send("\ +HTTP/1.1 200 OK\r\n\ +\r\n"); + + let mut h = handle(); + t!(h.url(&s.url("/"))); + t!(h.post(true)); + t!(h.post_fields_copy(b"data\n")); + t!(h.perform()); +} + +#[test] +fn post2() { + let s = Server::new(); + s.receive("\ +POST / HTTP/1.1\r\n\ +Host: 127.0.0.1:$PORT\r\n\ +Accept: */*\r\n\ +Content-Length: 5\r\n\ +Content-Type: application/x-www-form-urlencoded\r\n\ +\r\n\ +data\n"); + s.send("\ +HTTP/1.1 200 OK\r\n\ +\r\n"); + + let mut h = handle(); + t!(h.url(&s.url("/"))); + t!(h.post(true)); + t!(h.post_fields_copy(b"data\n")); + t!(h.write_function(sink)); + t!(h.perform()); +} + +#[test] +fn post3() { + let s = Server::new(); + s.receive("\ +POST / HTTP/1.1\r\n\ +Host: 127.0.0.1:$PORT\r\n\ +Accept: */*\r\n\ +Content-Length: 5\r\n\ +Content-Type: application/x-www-form-urlencoded\r\n\ +\r\n\ +data\n"); + s.send("\ +HTTP/1.1 200 OK\r\n\ +\r\n"); + + let mut data = "data\n".as_bytes(); + let mut h = handle(); + t!(h.url(&s.url("/"))); + t!(h.post(true)); + t!(h.post_field_size(5)); + let mut h = h.transfer(); + t!(h.read_function(|buf| { + Ok(data.read(buf).unwrap()) + })); + t!(h.perform()); +} + +#[test] +fn referer() { + let s = Server::new(); + s.receive("\ +GET / HTTP/1.1\r\n\ +Host: 127.0.0.1:$PORT\r\n\ +Accept: */*\r\n\ +Referer: foo\r\n\ +\r\n"); + s.send("\ +HTTP/1.1 200 OK\r\n\ +\r\n"); + + let mut h = handle(); + t!(h.url(&s.url("/"))); + t!(h.referer("foo")); + t!(h.perform()); +} + +#[test] +fn useragent() { + let s = Server::new(); + s.receive("\ +GET / HTTP/1.1\r\n\ +User-Agent: foo\r\n\ +Host: 127.0.0.1:$PORT\r\n\ +Accept: */*\r\n\ +\r\n"); + s.send("\ +HTTP/1.1 200 OK\r\n\ +\r\n"); + + let mut h = handle(); + t!(h.url(&s.url("/"))); + t!(h.useragent("foo")); + t!(h.perform()); +} + +#[test] +fn custom_headers() { + let s = Server::new(); + s.receive("\ +GET / HTTP/1.1\r\n\ +Host: 127.0.0.1:$PORT\r\n\ +Foo: bar\r\n\ +\r\n"); + s.send("\ +HTTP/1.1 200 OK\r\n\ +\r\n"); + + let mut custom = List::new(); + t!(custom.append("Foo: bar")); + t!(custom.append("Accept:")); + let mut h = handle(); + t!(h.url(&s.url("/"))); + t!(h.http_headers(custom)); + t!(h.perform()); +} + +#[test] +fn cookie() { + let s = Server::new(); + s.receive("\ +GET / HTTP/1.1\r\n\ +Host: 127.0.0.1:$PORT\r\n\ +Accept: */*\r\n\ +Cookie: foo\r\n\ +\r\n"); + s.send("\ +HTTP/1.1 200 OK\r\n\ +\r\n"); + + let mut h = handle(); + t!(h.url(&s.url("/"))); + t!(h.cookie("foo")); + t!(h.perform()); +} + +#[test] +fn url_encoding() { + let mut h = handle(); + assert_eq!(h.url_encode(b"foo"), "foo"); + assert_eq!(h.url_encode(b"foo bar"), "foo%20bar"); + assert_eq!(h.url_encode(b"foo bar\xff"), "foo%20bar%FF"); + assert_eq!(h.url_encode(b""), ""); + assert_eq!(h.url_decode("foo"), b"foo"); + assert_eq!(h.url_decode("foo%20bar"), b"foo bar"); + assert_eq!(h.url_decode("foo%2"), b"foo%2"); + assert_eq!(h.url_decode("foo%xx"), b"foo%xx"); + assert_eq!(h.url_decode("foo%ff"), b"foo\xff"); + assert_eq!(h.url_decode(""), b""); +} + +#[test] +fn getters() { + let s = Server::new(); + s.receive("\ +GET / HTTP/1.1\r\n\ +Host: 127.0.0.1:$PORT\r\n\ +Accept: */*\r\n\ +\r\n"); + s.send("\ +HTTP/1.1 200 OK\r\n\ +\r\n"); + + let mut h = handle(); + t!(h.url(&s.url("/"))); + t!(h.cookie_file("/dev/null")); + t!(h.perform()); + assert_eq!(t!(h.response_code()), 200); + assert_eq!(t!(h.redirect_count()), 0); + assert_eq!(t!(h.redirect_url()), None); + assert_eq!(t!(h.content_type()), None); + + let addr = format!("http://{}/", s.addr()); + assert_eq!(t!(h.effective_url()), Some(&addr[..])); + + // TODO: test this + // let cookies = t!(h.cookies()).iter() + // .map(|s| s.to_vec()) + // .collect::>(); + // assert_eq!(cookies.len(), 1); +} + +#[test] +#[should_panic] +fn panic_in_callback() { + let s = Server::new(); + s.receive("\ +GET / HTTP/1.1\r\n\ +Host: 127.0.0.1:$PORT\r\n\ +Accept: */*\r\n\ +\r\n"); + s.send("\ +HTTP/1.1 200 OK\r\n\ +\r\n"); + + let mut h = handle(); + t!(h.url(&s.url("/"))); + t!(h.header_function(|_| panic!())); + t!(h.perform()); +} + +#[test] +fn abort_read() { + let s = Server::new(); + s.receive("\ +PUT / HTTP/1.1\r\n\ +Host: 127.0.0.1:$PORT\r\n\ +Accept: */*\r\n\ +Content-Length: 2\r\n\ +\r\n"); + s.send("\ +HTTP/1.1 200 OK\r\n\ +\r\n"); + + let mut h = handle(); + t!(h.url(&s.url("/"))); + t!(h.read_function(|_| Err(ReadError::Abort))); + t!(h.put(true)); + t!(h.in_filesize(2)); + let mut list = List::new(); + t!(list.append("Expect:")); + t!(h.http_headers(list)); + let err = h.perform().unwrap_err(); + assert!(err.is_aborted_by_callback()); +} + +#[test] +fn pause_write_then_resume() { + let s = Server::new(); + s.receive("\ +GET / HTTP/1.1\r\n\ +Host: 127.0.0.1:$PORT\r\n\ +Accept: */*\r\n\ +\r\n"); + s.send("\ +HTTP/1.1 200 OK\r\n\ +\r\n +a\n +b"); + + let mut h = handle(); + t!(h.url(&s.url("/"))); + t!(h.progress(true)); + + struct State<'a, 'b> { + paused: Cell, + unpaused: Cell, + transfer: RefCell>, + } + + let h = Rc::new(State { + paused: Cell::new(false), + unpaused: Cell::new(false), + transfer: RefCell::new(h.transfer()), + }); + + let h2 = h.clone(); + t!(h.transfer.borrow_mut().write_function(move |data| { + if h2.unpaused.get() { + h2.unpaused.set(false); + Ok(data.len()) + } else { + h2.paused.set(true); + Err(WriteError::Pause) + } + })); + let h2 = h.clone(); + t!(h.transfer.borrow_mut().progress_function(move |_, _, _, _| { + if h2.paused.get() { + h2.paused.set(false); + h2.unpaused.set(true); + t!(h2.transfer.borrow().unpause_write()); + } + true + })); + t!(h.transfer.borrow().perform()); +} + +#[test] +fn perform_in_perform_is_bad() { + let s = Server::new(); + s.receive("\ +GET / HTTP/1.1\r\n\ +Host: 127.0.0.1:$PORT\r\n\ +Accept: */*\r\n\ +\r\n"); + s.send("\ +HTTP/1.1 200 OK\r\n\ +\r\n +a\n +b"); + + let mut h = handle(); + t!(h.url(&s.url("/"))); + t!(h.progress(true)); + + let h = Rc::new(RefCell::new(h.transfer())); + + let h2 = h.clone(); + t!(h.borrow_mut().write_function(move |data| { + assert!(h2.borrow().perform().is_err()); + Ok(data.len()) + })); + t!(h.borrow().perform()); +} + +// Stupid test to check if unix_socket is callable +#[test] +fn check_unix_socket() { + let mut h = handle(); + h.unix_socket("/var/something.socks").is_ok(); +} + diff --git a/curl/tests/formdata b/curl/tests/formdata new file mode 100644 index 000000000..ce0136250 --- /dev/null +++ b/curl/tests/formdata @@ -0,0 +1 @@ +hello diff --git a/curl/tests/multi.rs b/curl/tests/multi.rs new file mode 100644 index 000000000..c6eeea008 --- /dev/null +++ b/curl/tests/multi.rs @@ -0,0 +1,253 @@ +#![cfg(unix)] + +extern crate curl; +extern crate mio; +extern crate mio_extras; + +use std::collections::HashMap; +use std::io::{Read, Cursor}; +use std::time::Duration; + +use curl::easy::{Easy, List}; +use curl::multi::Multi; + +macro_rules! t { + ($e:expr) => (match $e { + Ok(e) => e, + Err(e) => panic!("{} failed with {:?}", stringify!($e), e), + }) +} + +use server::Server; +mod server; + +#[test] +fn smoke() { + let m = Multi::new(); + let mut e = Easy::new(); + + let s = Server::new(); + s.receive("\ +GET / HTTP/1.1\r\n\ +Host: 127.0.0.1:$PORT\r\n\ +Accept: */*\r\n\ +\r\n"); + s.send("HTTP/1.1 200 OK\r\n\r\n"); + + t!(e.url(&s.url("/"))); + let _e = t!(m.add(e)); + while t!(m.perform()) > 0 { + t!(m.wait(&mut [], Duration::from_secs(1))); + } +} + +#[test] +fn smoke2() { + let m = Multi::new(); + + let s1 = Server::new(); + s1.receive("\ +GET / HTTP/1.1\r\n\ +Host: 127.0.0.1:$PORT\r\n\ +Accept: */*\r\n\ +\r\n"); + s1.send("HTTP/1.1 200 OK\r\n\r\n"); + + let s2 = Server::new(); + s2.receive("\ +GET / HTTP/1.1\r\n\ +Host: 127.0.0.1:$PORT\r\n\ +Accept: */*\r\n\ +\r\n"); + s2.send("HTTP/1.1 200 OK\r\n\r\n"); + + let mut e1 = Easy::new(); + t!(e1.url(&s1.url("/"))); + let _e1 = t!(m.add(e1)); + let mut e2 = Easy::new(); + t!(e2.url(&s2.url("/"))); + let _e2 = t!(m.add(e2)); + + while t!(m.perform()) > 0 { + t!(m.wait(&mut [], Duration::from_secs(1))); + } + + let mut done = 0; + m.messages(|msg| { + msg.result().unwrap().unwrap(); + done += 1; + }); + assert_eq!(done, 2); +} + +#[test] +fn upload_lots() { + use curl::multi::{Socket, SocketEvents, Events}; + + #[derive(Debug)] + enum Message { + Timeout(Option), + Wait(Socket, SocketEvents, usize), + } + + let mut m = Multi::new(); + let poll = t!(mio::Poll::new()); + let (tx, rx) = mio_extras::channel::channel(); + let tx2 = tx.clone(); + t!(m.socket_function(move |socket, events, token| { + t!(tx2.send(Message::Wait(socket, events, token))); + })); + t!(m.timer_function(move |dur| { + t!(tx.send(Message::Timeout(dur))); + true + })); + + let s = Server::new(); + s.receive(&format!("\ +PUT / HTTP/1.1\r\n\ +Host: 127.0.0.1:$PORT\r\n\ +Accept: */*\r\n\ +Content-Length: 131072\r\n\ +\r\n\ +{}\n", vec!["a"; 128 * 1024 - 1].join(""))); + s.send("\ +HTTP/1.1 200 OK\r\n\ +\r\n"); + + let mut data = vec![b'a'; 128 * 1024 - 1]; + data.push(b'\n'); + let mut data = Cursor::new(data); + let mut list = List::new(); + t!(list.append("Expect:")); + let mut h = Easy::new(); + t!(h.url(&s.url("/"))); + t!(h.put(true)); + t!(h.read_function(move |buf| { + Ok(data.read(buf).unwrap()) + })); + t!(h.in_filesize(128 * 1024)); + t!(h.upload(true)); + t!(h.http_headers(list)); + + t!(poll.register(&rx, + mio::Token(0), + mio::Ready::all(), + mio::PollOpt::level())); + + let e = t!(m.add(h)); + + assert!(t!(m.perform()) > 0); + let mut next_token = 1; + let mut token_map = HashMap::new(); + let mut cur_timeout = None; + let mut events = mio::Events::with_capacity(128); + let mut running = true; + + while running { + let n = t!(poll.poll(&mut events, cur_timeout)); + + if n == 0 { + if t!(m.timeout()) == 0 { + running = false; + } + } + + for event in events.iter() { + while event.token() == mio::Token(0) { + match rx.try_recv() { + Ok(Message::Timeout(dur)) => cur_timeout = dur, + Ok(Message::Wait(socket, events, token)) => { + let evented = mio::unix::EventedFd(&socket); + if events.remove() { + token_map.remove(&token).unwrap(); + } else { + let mut e = mio::Ready::none(); + if events.input() { + e = e | mio::Ready::readable(); + } + if events.output() { + e = e | mio::Ready::writable(); + } + if token == 0 { + let token = next_token; + next_token += 1; + t!(m.assign(socket, token)); + token_map.insert(token, socket); + t!(poll.register(&evented, + mio::Token(token), + e, + mio::PollOpt::level())); + } else { + t!(poll.reregister(&evented, + mio::Token(token), + e, + mio::PollOpt::level())); + } + } + } + Err(_) => break, + } + } + + if event.token() == mio::Token(0) { + continue + } + + let token = event.token(); + let socket = token_map[&token.into()]; + let mut e = Events::new(); + if event.kind().is_readable() { + e.input(true); + } + if event.kind().is_writable() { + e.output(true); + } + if event.kind().is_error() { + e.error(true); + } + let remaining = t!(m.action(socket, &e)); + if remaining == 0 { + running = false; + } + } + } + + let mut done = 0; + m.messages(|m| { + m.result().unwrap().unwrap(); + done += 1; + }); + assert_eq!(done, 1); + + let mut e = t!(m.remove(e)); + assert_eq!(t!(e.response_code()), 200); +} + +// Tests passing raw file descriptors to Multi::wait. The test is limited to Linux only as the +// semantics of the underlying poll(2) system call used by curl apparently differ on other +// platforms, making the test fail. +#[cfg(target_os = "linux")] +#[test] +fn waitfds() { + use std::fs::File; + use std::os::unix::io::AsRawFd; + use curl::multi::WaitFd; + + let filenames = ["/dev/null", "/dev/zero", "/dev/urandom"]; + let files: Vec = filenames.iter() + .map(|filename| File::open(filename).unwrap()) + .collect(); + let mut waitfds: Vec = files.iter().map(|f| { + let mut waitfd = WaitFd::new(); + waitfd.set_fd(f.as_raw_fd()); + waitfd.poll_on_read(true); + waitfd + }).collect(); + + let m = Multi::new(); + let events = t!(m.wait(&mut waitfds, Duration::from_secs(1))); + assert_eq!(events, 3); + for waitfd in waitfds { + assert!(waitfd.received_read()); + } +} diff --git a/curl/tests/post.rs b/curl/tests/post.rs new file mode 100644 index 000000000..e13a5ec3f --- /dev/null +++ b/curl/tests/post.rs @@ -0,0 +1,108 @@ +extern crate curl; + +use std::time::Duration; + +macro_rules! t { + ($e:expr) => (match $e { + Ok(e) => e, + Err(e) => panic!("{} failed with {:?}", stringify!($e), e), + }) +} + +use curl::easy::{Easy, List, Form}; + +use server::Server; +mod server; + +fn handle() -> Easy { + let mut e = Easy::new(); + t!(e.timeout(Duration::new(20, 0))); + let mut list = List::new(); + t!(list.append("Expect:")); + t!(e.http_headers(list)); + return e +} + +#[test] +fn custom() { + let s = Server::new(); + s.receive("\ +POST / HTTP/1.1\r\n\ +Host: 127.0.0.1:$PORT\r\n\ +Accept: */*\r\n\ +Content-Length: 142\r\n\ +Content-Type: multipart/form-data; boundary=--[..]\r\n\ +\r\n\ +--[..]\r\n\ +Content-Disposition: form-data; name=\"foo\"\r\n\ +\r\n\ +1234\r\n\ +--[..]\r\n"); + s.send("HTTP/1.1 200 OK\r\n\r\n"); + + let mut handle = handle(); + let mut form = Form::new(); + t!(form.part("foo").contents(b"1234").add()); + t!(handle.url(&s.url("/"))); + t!(handle.httppost(form)); + t!(handle.perform()); +} + +#[test] +fn buffer() { + let s = Server::new(); + s.receive("\ +POST / HTTP/1.1\r\n\ +Host: 127.0.0.1:$PORT\r\n\ +Accept: */*\r\n\ +Content-Length: 181\r\n\ +Content-Type: multipart/form-data; boundary=--[..]\r\n\ +\r\n\ +--[..]\r\n\ +Content-Disposition: form-data; name=\"foo\"; filename=\"bar\"\r\n\ +Content-Type: foo/bar\r\n\ +\r\n\ +1234\r\n\ +--[..]\r\n"); + s.send("HTTP/1.1 200 OK\r\n\r\n"); + + let mut handle = handle(); + let mut form = Form::new(); + t!(form.part("foo") + .buffer("bar", b"1234".to_vec()) + .content_type("foo/bar") + .add()); + t!(handle.url(&s.url("/"))); + t!(handle.httppost(form)); + t!(handle.perform()); +} + +#[test] +fn file() { + let s = Server::new(); + let formdata = include_str!("formdata"); + s.receive(format!("\ +POST / HTTP/1.1\r\n\ +Host: 127.0.0.1:$PORT\r\n\ +Accept: */*\r\n\ +Content-Length: {}\r\n\ +Content-Type: multipart/form-data; boundary=--[..]\r\n\ +\r\n\ +--[..]\r\n\ +Content-Disposition: form-data; name=\"foo\"; filename=\"formdata\"\r\n\ +Content-Type: application/octet-stream\r\n\ +\r\n\ +{}\ +\r\n\ +--[..]\r\n", 199 + formdata.len(), formdata).as_str()); + s.send("HTTP/1.1 200 OK\r\n\r\n"); + + let mut handle = handle(); + let mut form = Form::new(); + t!(form.part("foo") + .file("tests/formdata") + .add()); + t!(handle.url(&s.url("/"))); + t!(handle.httppost(form)); + t!(handle.perform()); +} diff --git a/curl/tests/server/mod.rs b/curl/tests/server/mod.rs new file mode 100644 index 000000000..445cf901e --- /dev/null +++ b/curl/tests/server/mod.rs @@ -0,0 +1,175 @@ +#![allow(dead_code)] + +use std::collections::HashSet; +use std::net::{TcpListener, SocketAddr, TcpStream}; +use std::io::prelude::*; +use std::thread; +use std::sync::mpsc::{Sender, Receiver, channel}; +use std::io::BufReader; + +pub struct Server { + messages: Option>, + addr: SocketAddr, + thread: Option>, +} + +enum Message { + Read(String), + Write(String), +} + +fn run(listener: &TcpListener, rx: &Receiver) { + let mut socket = BufReader::new(listener.accept().unwrap().0); + for msg in rx.iter() { + match msg { + Message::Read(ref expected) => { + let mut expected = &expected[..]; + let mut expected_headers = HashSet::new(); + while let Some(i) = expected.find("\n") { + let line = &expected[..i + 1]; + expected = &expected[i + 1..]; + expected_headers.insert(line); + if line == "\r\n" { + break + } + } + + let mut expected_len = None; + while expected_headers.len() > 0 { + let mut actual = String::new(); + t!(socket.read_line(&mut actual)); + if actual.starts_with("Content-Length") { + let len = actual.split(": ").skip(1).next().unwrap(); + expected_len = len.trim().parse().ok(); + } + // various versions of libcurl do different things here + if actual == "Proxy-Connection: Keep-Alive\r\n" { + continue + } + if expected_headers.remove(&actual[..]) { + continue + } + + let mut found = None; + for header in expected_headers.iter() { + if lines_match(header, &actual) { + found = Some(header.clone()); + break + } + } + if let Some(found) = found { + expected_headers.remove(&found); + continue + } + panic!("unexpected header: {:?} (remaining headers {:?})", + actual, expected_headers); + } + for header in expected_headers { + panic!("expected header but not found: {:?}", header); + } + + let mut line = String::new(); + let mut socket = match expected_len { + Some(amt) => socket.by_ref().take(amt), + None => socket.by_ref().take(expected.len() as u64), + }; + while socket.limit() > 0 { + line.truncate(0); + t!(socket.read_line(&mut line)); + if line.len() == 0 { + break + } + if expected.len() == 0 { + panic!("unexpected line: {:?}", line); + } + let i = expected.find("\n").unwrap_or(expected.len() - 1); + let expected_line = &expected[..i + 1]; + expected = &expected[i + 1..]; + if lines_match(expected_line, &line) { + continue + } + panic!("lines didn't match:\n\ + expected: {:?}\n\ + actual: {:?}\n", expected_line, line) + } + if expected.len() != 0 { + println!("didn't get expected data: {:?}", expected); + } + } + Message::Write(ref to_write) => { + t!(socket.get_mut().write_all(to_write.as_bytes())); + return + } + } + } + + let mut dst = Vec::new(); + t!(socket.read_to_end(&mut dst)); + assert!(dst.len() == 0); +} + +fn lines_match(expected: &str, mut actual: &str) -> bool { + for (i, part) in expected.split("[..]").enumerate() { + match actual.find(part) { + Some(j) => { + if i == 0 && j != 0 { + return false + } + actual = &actual[j + part.len()..]; + } + None => { + return false + } + } + } + actual.is_empty() || expected.ends_with("[..]") +} + +impl Server { + pub fn new() -> Server { + let listener = t!(TcpListener::bind("127.0.0.1:0")); + let addr = t!(listener.local_addr()); + let (tx, rx) = channel(); + let thread = thread::spawn(move || run(&listener, &rx)); + Server { + messages: Some(tx), + addr: addr, + thread: Some(thread), + } + } + + pub fn receive(&self, msg: &str) { + let msg = msg.replace("$PORT", &self.addr.port().to_string()); + self.msg(Message::Read(msg)); + } + + pub fn send(&self, msg: &str) { + let msg = msg.replace("$PORT", &self.addr.port().to_string()); + self.msg(Message::Write(msg)); + } + + fn msg(&self, msg: Message) { + t!(self.messages.as_ref().unwrap().send(msg)); + } + + pub fn addr(&self) -> &SocketAddr { + &self.addr + } + + pub fn url(&self, path: &str) -> String { + format!("http://{}{}", self.addr, path) + } +} + +impl Drop for Server { + fn drop(&mut self) { + drop(TcpStream::connect(&self.addr)); + drop(self.messages.take()); + let res = self.thread.take().unwrap().join(); + if !thread::panicking() { + t!(res); + } else if let Err(e) = res { + println!("child server thread also failed: {:?}", e); + } + } +} diff --git a/env_logger/.cargo-checksum.json b/env_logger/.cargo-checksum.json new file mode 100644 index 000000000..527d0d9cd --- /dev/null +++ b/env_logger/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{},"package":"15b0a4d2e39f8420210be8b27eeda28029729e2fd4291019455016c348240c38"} \ No newline at end of file diff --git a/env_logger/Cargo.toml b/env_logger/Cargo.toml new file mode 100644 index 000000000..3a19c6bca --- /dev/null +++ b/env_logger/Cargo.toml @@ -0,0 +1,50 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g. crates.io) dependencies +# +# If you believe there's an error in this file please file an +# issue against the rust-lang/cargo repository. If you're +# editing this file be aware that the upstream Cargo.toml +# will likely look very different (and much more reasonable) + +[package] +name = "env_logger" +version = "0.5.13" +authors = ["The Rust Project Developers"] +description = "A logging implementation for `log` which is configured via an environment\nvariable.\n" +documentation = "https://docs.rs/env_logger" +readme = "README.md" +keywords = ["logging", "log", "logger"] +categories = ["development-tools::debugging"] +license = "MIT/Apache-2.0" +repository = "https://github.com/sebasmagri/env_logger/" + +[[test]] +name = "regexp_filter" +harness = false + +[[test]] +name = "log-in-log" +harness = false +[dependencies.atty] +version = "0.2.5" + +[dependencies.humantime] +version = "1.1" + +[dependencies.log] +version = "0.4" +features = ["std"] + +[dependencies.regex] +version = "1.0.3" +optional = true + +[dependencies.termcolor] +version = "1" + +[features] +default = ["regex"] diff --git a/env_logger/LICENSE-APACHE b/env_logger/LICENSE-APACHE new file mode 100644 index 000000000..16fe87b06 --- /dev/null +++ b/env_logger/LICENSE-APACHE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/env_logger/LICENSE-MIT b/env_logger/LICENSE-MIT new file mode 100644 index 000000000..39d4bdb5a --- /dev/null +++ b/env_logger/LICENSE-MIT @@ -0,0 +1,25 @@ +Copyright (c) 2014 The Rust Project Developers + +Permission is hereby granted, free of charge, to any +person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the +Software without restriction, including without +limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software +is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions +of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/env_logger/README.md b/env_logger/README.md new file mode 100644 index 000000000..332dee803 --- /dev/null +++ b/env_logger/README.md @@ -0,0 +1,140 @@ +env_logger [![Build Status](https://travis-ci.org/sebasmagri/env_logger.svg?branch=master)](https://travis-ci.org/sebasmagri/env_logger) [![Maintenance](https://img.shields.io/badge/maintenance-actively%20maintained-brightgreen.svg)](https://github.com/sebasmagri/env_logger) [![crates.io](https://img.shields.io/crates/v/env_logger.svg)](https://crates.io/crates/env_logger) [![Documentation](https://img.shields.io/badge/docs-current-blue.svg)](https://docs.rs/env_logger) +========== + +Implements a logger that can be configured via environment variables. + +## Usage + +### In libraries + +`env_logger` makes sense when used in executables (binary projects). Libraries should use the [`log`](https://doc.rust-lang.org/log) crate instead. + +### In executables + +It must be added along with `log` to the project dependencies: + +```toml +[dependencies] +log = "0.4.0" +env_logger = "0.5.13" +``` + +`env_logger` must be initialized as early as possible in the project. After it's initialized, you can use the `log` macros to do actual logging. + +```rust +#[macro_use] +extern crate log; +extern crate env_logger; + +fn main() { + env_logger::init(); + + info!("starting up"); + + // ... +} +``` + +Then when running the executable, specify a value for the `RUST_LOG` +environment variable that corresponds with the log messages you want to show. + +```bash +$ RUST_LOG=info ./main +INFO: 2017-11-09T02:12:24Z: main: starting up +``` + +### In tests + +Tests can use the `env_logger` crate to see log messages generated during that test: + +```toml +[dependencies] +log = "0.4.0" + +[dev-dependencies] +env_logger = "0.5.13" +``` + +```rust +#[macro_use] +extern crate log; + +fn add_one(num: i32) -> i32 { + info!("add_one called with {}", num); + num + 1 +} + +#[cfg(test)] +mod tests { + use super::*; + extern crate env_logger; + + #[test] + fn it_adds_one() { + let _ = env_logger::try_init(); + info!("can log from the test too"); + assert_eq!(3, add_one(2)); + } + + #[test] + fn it_handles_negative_numbers() { + let _ = env_logger::try_init(); + info!("logging from another test"); + assert_eq!(-7, add_one(-8)); + } +} +``` + +Assuming the module under test is called `my_lib`, running the tests with the +`RUST_LOG` filtering to info messages from this module looks like: + +```bash +$ RUST_LOG=my_lib=info cargo test + Running target/debug/my_lib-... + +running 2 tests +INFO: 2017-11-09T02:12:24Z: my_lib::tests: logging from another test +INFO: 2017-11-09T02:12:24Z: my_lib: add_one called with -8 +test tests::it_handles_negative_numbers ... ok +INFO: 2017-11-09T02:12:24Z: my_lib::tests: can log from the test too +INFO: 2017-11-09T02:12:24Z: my_lib: add_one called with 2 +test tests::it_adds_one ... ok + +test result: ok. 2 passed; 0 failed; 0 ignored; 0 measured +``` + +Note that `env_logger::try_init()` needs to be called in each test in which you +want to enable logging. Additionally, the default behavior of tests to +run in parallel means that logging output may be interleaved with test output. +Either run tests in a single thread by specifying `RUST_TEST_THREADS=1` or by +running one test by specifying its name as an argument to the test binaries as +directed by the `cargo test` help docs: + +```bash +$ RUST_LOG=my_lib=info cargo test it_adds_one + Running target/debug/my_lib-... + +running 1 test +INFO: 2017-11-09T02:12:24Z: my_lib::tests: can log from the test too +INFO: 2017-11-09T02:12:24Z: my_lib: add_one called with 2 +test tests::it_adds_one ... ok + +test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured +``` + +## Configuring log target + +By default, `env_logger` logs to stderr. If you want to log to stdout instead, +you can use the `Builder` to change the log target: + +```rust +use std::env; +use env_logger::{Builder, Target}; + +let mut builder = Builder::new(); +builder.target(Target::Stdout); +if env::var("RUST_LOG").is_ok() { + builder.parse(&env::var("RUST_LOG").unwrap()); +} +builder.init(); +``` diff --git a/env_logger/examples/custom_default_format.rs b/env_logger/examples/custom_default_format.rs new file mode 100644 index 000000000..d1a45b608 --- /dev/null +++ b/env_logger/examples/custom_default_format.rs @@ -0,0 +1,44 @@ +/*! +Disabling parts of the default format. + +Before running this example, try setting the `MY_LOG_LEVEL` environment variable to `info`: + +```no_run,shell +$ export MY_LOG_LEVEL='info' +``` + +Also try setting the `MY_LOG_STYLE` environment variable to `never` to disable colors +or `auto` to enable them: + +```no_run,shell +$ export MY_LOG_STYLE=never +``` + +If you want to control the logging output completely, see the `custom_logger` example. +*/ + +#[macro_use] +extern crate log; +extern crate env_logger; + +use env_logger::{Env, Builder}; + +fn init_logger() { + let env = Env::default() + .filter("MY_LOG_LEVEL") + .write_style("MY_LOG_STYLE"); + + let mut builder = Builder::from_env(env); + + builder + .default_format_level(false) + .default_format_timestamp_nanos(true); + + builder.init(); +} + +fn main() { + init_logger(); + + info!("a log from `MyLogger`"); +} diff --git a/env_logger/examples/custom_format.rs b/env_logger/examples/custom_format.rs new file mode 100644 index 000000000..68a064d48 --- /dev/null +++ b/env_logger/examples/custom_format.rs @@ -0,0 +1,52 @@ +/*! +Changing the default logging format. + +Before running this example, try setting the `MY_LOG_LEVEL` environment variable to `info`: + +```no_run,shell +$ export MY_LOG_LEVEL='info' +``` + +Also try setting the `MY_LOG_STYLE` environment variable to `never` to disable colors +or `auto` to enable them: + +```no_run,shell +$ export MY_LOG_STYLE=never +``` + +If you want to control the logging output completely, see the `custom_logger` example. +*/ + +#[macro_use] +extern crate log; +extern crate env_logger; + +use std::io::Write; + +use env_logger::{Env, Builder, fmt}; + +fn init_logger() { + let env = Env::default() + .filter("MY_LOG_LEVEL") + .write_style("MY_LOG_STYLE"); + + let mut builder = Builder::from_env(env); + + // Use a different format for writing log records + builder.format(|buf, record| { + let mut style = buf.style(); + style.set_bg(fmt::Color::Yellow).set_bold(true); + + let timestamp = buf.timestamp(); + + writeln!(buf, "My formatted log ({}): {}", timestamp, style.value(record.args())) + }); + + builder.init(); +} + +fn main() { + init_logger(); + + info!("a log from `MyLogger`"); +} diff --git a/env_logger/examples/custom_logger.rs b/env_logger/examples/custom_logger.rs new file mode 100644 index 000000000..792c9c8e5 --- /dev/null +++ b/env_logger/examples/custom_logger.rs @@ -0,0 +1,60 @@ +/*! +Using `env_logger` to drive a custom logger. + +Before running this example, try setting the `MY_LOG_LEVEL` environment variable to `info`: + +```no_run,shell +$ export MY_LOG_LEVEL='info' +``` + +If you only want to change the way logs are formatted, look at the `custom_format` example. +*/ + +#[macro_use] +extern crate log; +extern crate env_logger; +use env_logger::filter::Filter; +use log::{Log, Metadata, Record, SetLoggerError}; + +struct MyLogger { + inner: Filter +} + +impl MyLogger { + fn new() -> MyLogger { + use env_logger::filter::Builder; + let mut builder = Builder::from_env("MY_LOG_LEVEL"); + + MyLogger { + inner: builder.build() + } + } + + fn init() -> Result<(), SetLoggerError> { + let logger = Self::new(); + + log::set_max_level(logger.inner.filter()); + log::set_boxed_logger(Box::new(logger)) + } +} + +impl Log for MyLogger { + fn enabled(&self, metadata: &Metadata) -> bool { + self.inner.enabled(metadata) + } + + fn log(&self, record: &Record) { + // Check if the record is matched by the logger before logging + if self.inner.matches(record) { + println!("{} - {}", record.level(), record.args()); + } + } + + fn flush(&self) { } +} + +fn main() { + MyLogger::init().unwrap(); + + info!("a log from `MyLogger`"); +} diff --git a/env_logger/examples/default.rs b/env_logger/examples/default.rs new file mode 100644 index 000000000..5d799fe05 --- /dev/null +++ b/env_logger/examples/default.rs @@ -0,0 +1,36 @@ +/*! +Using `env_logger`. + +Before running this example, try setting the `MY_LOG_LEVEL` environment variable to `info`: + +```no_run,shell +$ export MY_LOG_LEVEL='info' +``` + +Also try setting the `MY_LOG_STYLE` environment variable to `never` to disable colors +or `auto` to enable them: + +```no_run,shell +$ export MY_LOG_STYLE=never +``` +*/ + +#[macro_use] +extern crate log; +extern crate env_logger; + +use env_logger::Env; + +fn main() { + let env = Env::default() + .filter_or("MY_LOG_LEVEL", "trace") + .write_style_or("MY_LOG_STYLE", "always"); + + env_logger::init_from_env(env); + + trace!("some trace log"); + debug!("some debug log"); + info!("some information log"); + warn!("some warning log"); + error!("some error log"); +} diff --git a/env_logger/examples/direct_logger.rs b/env_logger/examples/direct_logger.rs new file mode 100644 index 000000000..410230bcd --- /dev/null +++ b/env_logger/examples/direct_logger.rs @@ -0,0 +1,40 @@ +/*! +Using `env_logger::Logger` and the `log::Log` trait directly. + +This example doesn't rely on environment variables, or having a static logger installed. +*/ + +extern crate log; +extern crate env_logger; + +fn record() -> log::Record<'static> { + let error_metadata = log::MetadataBuilder::new() + .target("myApp") + .level(log::Level::Error) + .build(); + + log::Record::builder() + .metadata(error_metadata) + .args(format_args!("Error!")) + .line(Some(433)) + .file(Some("app.rs")) + .module_path(Some("server")) + .build() +} + +fn main() { + use log::Log; + + let stylish_logger = env_logger::Builder::new() + .filter(None, log::LevelFilter::Error) + .write_style(env_logger::WriteStyle::Always) + .build(); + + let unstylish_logger = env_logger::Builder::new() + .filter(None, log::LevelFilter::Error) + .write_style(env_logger::WriteStyle::Never) + .build(); + + stylish_logger.log(&record()); + unstylish_logger.log(&record()); +} \ No newline at end of file diff --git a/env_logger/src/filter/mod.rs b/env_logger/src/filter/mod.rs new file mode 100644 index 000000000..80a3f30fa --- /dev/null +++ b/env_logger/src/filter/mod.rs @@ -0,0 +1,568 @@ +//! Filtering for log records. +//! +//! This module contains the log filtering used by `env_logger` to match records. +//! You can use the `Filter` type in your own logger implementation to use the same +//! filter parsing and matching as `env_logger`. For more details about the format +//! for directive strings see [Enabling Logging]. +//! +//! ## Using `env_logger` in your own logger +//! +//! You can use `env_logger`'s filtering functionality with your own logger. +//! Call [`Builder::parse`] to parse directives from a string when constructing +//! your logger. Call [`Filter::matches`] to check whether a record should be +//! logged based on the parsed filters when log records are received. +//! +//! ``` +//! extern crate log; +//! extern crate env_logger; +//! use env_logger::filter::Filter; +//! use log::{Log, Metadata, Record}; +//! +//! struct MyLogger { +//! filter: Filter +//! } +//! +//! impl MyLogger { +//! fn new() -> MyLogger { +//! use env_logger::filter::Builder; +//! let mut builder = Builder::new(); +//! +//! // Parse a directives string from an environment variable +//! if let Ok(ref filter) = std::env::var("MY_LOG_LEVEL") { +//! builder.parse(filter); +//! } +//! +//! MyLogger { +//! filter: builder.build() +//! } +//! } +//! } +//! +//! impl Log for MyLogger { +//! fn enabled(&self, metadata: &Metadata) -> bool { +//! self.filter.enabled(metadata) +//! } +//! +//! fn log(&self, record: &Record) { +//! // Check if the record is matched by the filter +//! if self.filter.matches(record) { +//! println!("{:?}", record); +//! } +//! } +//! +//! fn flush(&self) {} +//! } +//! # fn main() {} +//! ``` +//! +//! [Enabling Logging]: ../index.html#enabling-logging +//! [`Builder::parse`]: struct.Builder.html#method.parse +//! [`Filter::matches`]: struct.Filter.html#method.matches + +use std::env; +use std::mem; +use std::fmt; +use log::{Level, LevelFilter, Record, Metadata}; + +#[cfg(feature = "regex")] +#[path = "regex.rs"] +mod inner; + +#[cfg(not(feature = "regex"))] +#[path = "string.rs"] +mod inner; + +/// A log filter. +/// +/// This struct can be used to determine whether or not a log record +/// should be written to the output. +/// Use the [`Builder`] type to parse and construct a `Filter`. +/// +/// [`Builder`]: struct.Builder.html +pub struct Filter { + directives: Vec, + filter: Option, +} + +/// A builder for a log filter. +/// +/// It can be used to parse a set of directives from a string before building +/// a [`Filter`] instance. +/// +/// ## Example +/// +/// ``` +/// #[macro_use] +/// extern crate log; +/// extern crate env_logger; +/// +/// use std::env; +/// use std::io; +/// use env_logger::filter::Builder; +/// +/// fn main() { +/// let mut builder = Builder::new(); +/// +/// // Parse a logging filter from an environment variable. +/// if let Ok(rust_log) = env::var("RUST_LOG") { +/// builder.parse(&rust_log); +/// } +/// +/// let filter = builder.build(); +/// } +/// ``` +/// +/// [`Filter`]: struct.Filter.html +pub struct Builder { + directives: Vec, + filter: Option, +} + +#[derive(Debug)] +struct Directive { + name: Option, + level: LevelFilter, +} + +impl Filter { + /// Returns the maximum `LevelFilter` that this filter instance is + /// configured to output. + /// + /// # Example + /// + /// ```rust + /// extern crate log; + /// extern crate env_logger; + /// + /// use log::LevelFilter; + /// use env_logger::filter::Builder; + /// + /// fn main() { + /// let mut builder = Builder::new(); + /// builder.filter(Some("module1"), LevelFilter::Info); + /// builder.filter(Some("module2"), LevelFilter::Error); + /// + /// let filter = builder.build(); + /// assert_eq!(filter.filter(), LevelFilter::Info); + /// } + /// ``` + pub fn filter(&self) -> LevelFilter { + self.directives.iter() + .map(|d| d.level) + .max() + .unwrap_or(LevelFilter::Off) + } + + /// Checks if this record matches the configured filter. + pub fn matches(&self, record: &Record) -> bool { + if !self.enabled(record.metadata()) { + return false; + } + + if let Some(filter) = self.filter.as_ref() { + if !filter.is_match(&*record.args().to_string()) { + return false; + } + } + + true + } + + /// Determines if a log message with the specified metadata would be logged. + pub fn enabled(&self, metadata: &Metadata) -> bool { + let level = metadata.level(); + let target = metadata.target(); + + enabled(&self.directives, level, target) + } +} + +impl Builder { + /// Initializes the filter builder with defaults. + pub fn new() -> Builder { + Builder { + directives: Vec::new(), + filter: None, + } + } + + /// Initializes the filter builder from an environment. + pub fn from_env(env: &str) -> Builder { + let mut builder = Builder::new(); + + if let Ok(s) = env::var(env) { + builder.parse(&s); + } + + builder + } + + /// Adds a directive to the filter for a specific module. + pub fn filter_module(&mut self, module: &str, level: LevelFilter) -> &mut Self { + self.filter(Some(module), level) + } + + /// Adds a directive to the filter for all modules. + pub fn filter_level(&mut self, level: LevelFilter) -> &mut Self { + self.filter(None, level) + } + + /// Adds a directive to the filter. + /// + /// The given module (if any) will log at most the specified level provided. + /// If no module is provided then the filter will apply to all log messages. + pub fn filter(&mut self, + module: Option<&str>, + level: LevelFilter) -> &mut Self { + self.directives.push(Directive { + name: module.map(|s| s.to_string()), + level, + }); + self + } + + /// Parses the directives string. + /// + /// See the [Enabling Logging] section for more details. + /// + /// [Enabling Logging]: ../index.html#enabling-logging + pub fn parse(&mut self, filters: &str) -> &mut Self { + let (directives, filter) = parse_spec(filters); + + self.filter = filter; + + for directive in directives { + self.directives.push(directive); + } + self + } + + /// Build a log filter. + pub fn build(&mut self) -> Filter { + if self.directives.is_empty() { + // Adds the default filter if none exist + self.directives.push(Directive { + name: None, + level: LevelFilter::Error, + }); + } else { + // Sort the directives by length of their name, this allows a + // little more efficient lookup at runtime. + self.directives.sort_by(|a, b| { + let alen = a.name.as_ref().map(|a| a.len()).unwrap_or(0); + let blen = b.name.as_ref().map(|b| b.len()).unwrap_or(0); + alen.cmp(&blen) + }); + } + + Filter { + directives: mem::replace(&mut self.directives, Vec::new()), + filter: mem::replace(&mut self.filter, None), + } + } +} + +impl Default for Builder { + fn default() -> Self { + Builder::new() + } +} + +impl fmt::Debug for Filter { + fn fmt(&self, f: &mut fmt::Formatter)->fmt::Result { + f.debug_struct("Filter") + .field("filter", &self.filter) + .field("directives", &self.directives) + .finish() + } +} + +impl fmt::Debug for Builder { + fn fmt(&self, f: &mut fmt::Formatter)->fmt::Result { + f.debug_struct("Filter") + .field("filter", &self.filter) + .field("directives", &self.directives) + .finish() + } +} + +/// Parse a logging specification string (e.g: "crate1,crate2::mod3,crate3::x=error/foo") +/// and return a vector with log directives. +fn parse_spec(spec: &str) -> (Vec, Option) { + let mut dirs = Vec::new(); + + let mut parts = spec.split('/'); + let mods = parts.next(); + let filter = parts.next(); + if parts.next().is_some() { + println!("warning: invalid logging spec '{}', \ + ignoring it (too many '/'s)", spec); + return (dirs, None); + } + mods.map(|m| { for s in m.split(',') { + if s.len() == 0 { continue } + let mut parts = s.split('='); + let (log_level, name) = match (parts.next(), parts.next().map(|s| s.trim()), parts.next()) { + (Some(part0), None, None) => { + // if the single argument is a log-level string or number, + // treat that as a global fallback + match part0.parse() { + Ok(num) => (num, None), + Err(_) => (LevelFilter::max(), Some(part0)), + } + } + (Some(part0), Some(""), None) => (LevelFilter::max(), Some(part0)), + (Some(part0), Some(part1), None) => { + match part1.parse() { + Ok(num) => (num, Some(part0)), + _ => { + println!("warning: invalid logging spec '{}', \ + ignoring it", part1); + continue + } + } + }, + _ => { + println!("warning: invalid logging spec '{}', \ + ignoring it", s); + continue + } + }; + dirs.push(Directive { + name: name.map(|s| s.to_string()), + level: log_level, + }); + }}); + + let filter = filter.map_or(None, |filter| { + match inner::Filter::new(filter) { + Ok(re) => Some(re), + Err(e) => { + println!("warning: invalid regex filter - {}", e); + None + } + } + }); + + return (dirs, filter); +} + + +// Check whether a level and target are enabled by the set of directives. +fn enabled(directives: &[Directive], level: Level, target: &str) -> bool { + // Search for the longest match, the vector is assumed to be pre-sorted. + for directive in directives.iter().rev() { + match directive.name { + Some(ref name) if !target.starts_with(&**name) => {}, + Some(..) | None => { + return level <= directive.level + } + } + } + false +} + +#[cfg(test)] +mod tests { + use log::{Level, LevelFilter}; + + use super::{Builder, Filter, Directive, parse_spec, enabled}; + + fn make_logger_filter(dirs: Vec) -> Filter { + let mut logger = Builder::new().build(); + logger.directives = dirs; + logger + } + + #[test] + fn filter_info() { + let logger = Builder::new().filter(None, LevelFilter::Info).build(); + assert!(enabled(&logger.directives, Level::Info, "crate1")); + assert!(!enabled(&logger.directives, Level::Debug, "crate1")); + } + + #[test] + fn filter_beginning_longest_match() { + let logger = Builder::new() + .filter(Some("crate2"), LevelFilter::Info) + .filter(Some("crate2::mod"), LevelFilter::Debug) + .filter(Some("crate1::mod1"), LevelFilter::Warn) + .build(); + assert!(enabled(&logger.directives, Level::Debug, "crate2::mod1")); + assert!(!enabled(&logger.directives, Level::Debug, "crate2")); + } + + #[test] + fn parse_default() { + let logger = Builder::new().parse("info,crate1::mod1=warn").build(); + assert!(enabled(&logger.directives, Level::Warn, "crate1::mod1")); + assert!(enabled(&logger.directives, Level::Info, "crate2::mod2")); + } + + #[test] + fn match_full_path() { + let logger = make_logger_filter(vec![ + Directive { + name: Some("crate2".to_string()), + level: LevelFilter::Info + }, + Directive { + name: Some("crate1::mod1".to_string()), + level: LevelFilter::Warn + } + ]); + assert!(enabled(&logger.directives, Level::Warn, "crate1::mod1")); + assert!(!enabled(&logger.directives, Level::Info, "crate1::mod1")); + assert!(enabled(&logger.directives, Level::Info, "crate2")); + assert!(!enabled(&logger.directives, Level::Debug, "crate2")); + } + + #[test] + fn no_match() { + let logger = make_logger_filter(vec![ + Directive { name: Some("crate2".to_string()), level: LevelFilter::Info }, + Directive { name: Some("crate1::mod1".to_string()), level: LevelFilter::Warn } + ]); + assert!(!enabled(&logger.directives, Level::Warn, "crate3")); + } + + #[test] + fn match_beginning() { + let logger = make_logger_filter(vec![ + Directive { name: Some("crate2".to_string()), level: LevelFilter::Info }, + Directive { name: Some("crate1::mod1".to_string()), level: LevelFilter::Warn } + ]); + assert!(enabled(&logger.directives, Level::Info, "crate2::mod1")); + } + + #[test] + fn match_beginning_longest_match() { + let logger = make_logger_filter(vec![ + Directive { name: Some("crate2".to_string()), level: LevelFilter::Info }, + Directive { name: Some("crate2::mod".to_string()), level: LevelFilter::Debug }, + Directive { name: Some("crate1::mod1".to_string()), level: LevelFilter::Warn } + ]); + assert!(enabled(&logger.directives, Level::Debug, "crate2::mod1")); + assert!(!enabled(&logger.directives, Level::Debug, "crate2")); + } + + #[test] + fn match_default() { + let logger = make_logger_filter(vec![ + Directive { name: None, level: LevelFilter::Info }, + Directive { name: Some("crate1::mod1".to_string()), level: LevelFilter::Warn } + ]); + assert!(enabled(&logger.directives, Level::Warn, "crate1::mod1")); + assert!(enabled(&logger.directives, Level::Info, "crate2::mod2")); + } + + #[test] + fn zero_level() { + let logger = make_logger_filter(vec![ + Directive { name: None, level: LevelFilter::Info }, + Directive { name: Some("crate1::mod1".to_string()), level: LevelFilter::Off } + ]); + assert!(!enabled(&logger.directives, Level::Error, "crate1::mod1")); + assert!(enabled(&logger.directives, Level::Info, "crate2::mod2")); + } + + #[test] + fn parse_spec_valid() { + let (dirs, filter) = parse_spec("crate1::mod1=error,crate1::mod2,crate2=debug"); + assert_eq!(dirs.len(), 3); + assert_eq!(dirs[0].name, Some("crate1::mod1".to_string())); + assert_eq!(dirs[0].level, LevelFilter::Error); + + assert_eq!(dirs[1].name, Some("crate1::mod2".to_string())); + assert_eq!(dirs[1].level, LevelFilter::max()); + + assert_eq!(dirs[2].name, Some("crate2".to_string())); + assert_eq!(dirs[2].level, LevelFilter::Debug); + assert!(filter.is_none()); + } + + #[test] + fn parse_spec_invalid_crate() { + // test parse_spec with multiple = in specification + let (dirs, filter) = parse_spec("crate1::mod1=warn=info,crate2=debug"); + assert_eq!(dirs.len(), 1); + assert_eq!(dirs[0].name, Some("crate2".to_string())); + assert_eq!(dirs[0].level, LevelFilter::Debug); + assert!(filter.is_none()); + } + + #[test] + fn parse_spec_invalid_level() { + // test parse_spec with 'noNumber' as log level + let (dirs, filter) = parse_spec("crate1::mod1=noNumber,crate2=debug"); + assert_eq!(dirs.len(), 1); + assert_eq!(dirs[0].name, Some("crate2".to_string())); + assert_eq!(dirs[0].level, LevelFilter::Debug); + assert!(filter.is_none()); + } + + #[test] + fn parse_spec_string_level() { + // test parse_spec with 'warn' as log level + let (dirs, filter) = parse_spec("crate1::mod1=wrong,crate2=warn"); + assert_eq!(dirs.len(), 1); + assert_eq!(dirs[0].name, Some("crate2".to_string())); + assert_eq!(dirs[0].level, LevelFilter::Warn); + assert!(filter.is_none()); + } + + #[test] + fn parse_spec_empty_level() { + // test parse_spec with '' as log level + let (dirs, filter) = parse_spec("crate1::mod1=wrong,crate2="); + assert_eq!(dirs.len(), 1); + assert_eq!(dirs[0].name, Some("crate2".to_string())); + assert_eq!(dirs[0].level, LevelFilter::max()); + assert!(filter.is_none()); + } + + #[test] + fn parse_spec_global() { + // test parse_spec with no crate + let (dirs, filter) = parse_spec("warn,crate2=debug"); + assert_eq!(dirs.len(), 2); + assert_eq!(dirs[0].name, None); + assert_eq!(dirs[0].level, LevelFilter::Warn); + assert_eq!(dirs[1].name, Some("crate2".to_string())); + assert_eq!(dirs[1].level, LevelFilter::Debug); + assert!(filter.is_none()); + } + + #[test] + fn parse_spec_valid_filter() { + let (dirs, filter) = parse_spec("crate1::mod1=error,crate1::mod2,crate2=debug/abc"); + assert_eq!(dirs.len(), 3); + assert_eq!(dirs[0].name, Some("crate1::mod1".to_string())); + assert_eq!(dirs[0].level, LevelFilter::Error); + + assert_eq!(dirs[1].name, Some("crate1::mod2".to_string())); + assert_eq!(dirs[1].level, LevelFilter::max()); + + assert_eq!(dirs[2].name, Some("crate2".to_string())); + assert_eq!(dirs[2].level, LevelFilter::Debug); + assert!(filter.is_some() && filter.unwrap().to_string() == "abc"); + } + + #[test] + fn parse_spec_invalid_crate_filter() { + let (dirs, filter) = parse_spec("crate1::mod1=error=warn,crate2=debug/a.c"); + assert_eq!(dirs.len(), 1); + assert_eq!(dirs[0].name, Some("crate2".to_string())); + assert_eq!(dirs[0].level, LevelFilter::Debug); + assert!(filter.is_some() && filter.unwrap().to_string() == "a.c"); + } + + #[test] + fn parse_spec_empty_with_filter() { + let (dirs, filter) = parse_spec("crate1/a*c"); + assert_eq!(dirs.len(), 1); + assert_eq!(dirs[0].name, Some("crate1".to_string())); + assert_eq!(dirs[0].level, LevelFilter::max()); + assert!(filter.is_some() && filter.unwrap().to_string() == "a*c"); + } +} diff --git a/env_logger/src/filter/regex.rs b/env_logger/src/filter/regex.rs new file mode 100644 index 000000000..a04265413 --- /dev/null +++ b/env_logger/src/filter/regex.rs @@ -0,0 +1,29 @@ +extern crate regex; + +use std::fmt; + +use self::regex::Regex; + +#[derive(Debug)] +pub struct Filter { + inner: Regex, +} + +impl Filter { + pub fn new(spec: &str) -> Result { + match Regex::new(spec){ + Ok(r) => Ok(Filter { inner: r }), + Err(e) => Err(e.to_string()), + } + } + + pub fn is_match(&self, s: &str) -> bool { + self.inner.is_match(s) + } +} + +impl fmt::Display for Filter { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + self.inner.fmt(f) + } +} diff --git a/env_logger/src/filter/string.rs b/env_logger/src/filter/string.rs new file mode 100644 index 000000000..96d7ecca1 --- /dev/null +++ b/env_logger/src/filter/string.rs @@ -0,0 +1,22 @@ +use std::fmt; + +#[derive(Debug)] +pub struct Filter { + inner: String, +} + +impl Filter { + pub fn new(spec: &str) -> Result { + Ok(Filter { inner: spec.to_string() }) + } + + pub fn is_match(&self, s: &str) -> bool { + s.contains(&self.inner) + } +} + +impl fmt::Display for Filter { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + self.inner.fmt(f) + } +} diff --git a/env_logger/src/fmt.rs b/env_logger/src/fmt.rs new file mode 100644 index 000000000..d90210fa3 --- /dev/null +++ b/env_logger/src/fmt.rs @@ -0,0 +1,844 @@ +//! Formatting for log records. +//! +//! This module contains a [`Formatter`] that can be used to format log records +//! into without needing temporary allocations. Usually you won't need to worry +//! about the contents of this module and can use the `Formatter` like an ordinary +//! [`Write`]. +//! +//! # Formatting log records +//! +//! The format used to print log records can be customised using the [`Builder::format`] +//! method. +//! Custom formats can apply different color and weight to printed values using +//! [`Style`] builders. +//! +//! ``` +//! use std::io::Write; +//! use env_logger::fmt::Color; +//! +//! let mut builder = env_logger::Builder::new(); +//! +//! builder.format(|buf, record| { +//! let mut level_style = buf.style(); +//! +//! level_style.set_color(Color::Red).set_bold(true); +//! +//! writeln!(buf, "{}: {}", +//! level_style.value(record.level()), +//! record.args()) +//! }); +//! ``` +//! +//! [`Formatter`]: struct.Formatter.html +//! [`Style`]: struct.Style.html +//! [`Builder::format`]: ../struct.Builder.html#method.format +//! [`Write`]: https://doc.rust-lang.org/stable/std/io/trait.Write.html + +use std::io::prelude::*; +use std::{io, fmt}; +use std::rc::Rc; +use std::str::FromStr; +use std::error::Error; +use std::cell::RefCell; +use std::time::SystemTime; + +use log::Level; +use termcolor::{self, ColorSpec, ColorChoice, Buffer, BufferWriter, WriteColor}; +use atty; +use humantime::{format_rfc3339_seconds, format_rfc3339_nanos}; + +/// A formatter to write logs into. +/// +/// `Formatter` implements the standard [`Write`] trait for writing log records. +/// It also supports terminal colors, through the [`style`] method. +/// +/// # Examples +/// +/// Use the [`writeln`] macro to easily format a log record: +/// +/// ``` +/// use std::io::Write; +/// +/// let mut builder = env_logger::Builder::new(); +/// +/// builder.format(|buf, record| writeln!(buf, "{}: {}", record.level(), record.args())); +/// ``` +/// +/// [`Write`]: https://doc.rust-lang.org/stable/std/io/trait.Write.html +/// [`writeln`]: https://doc.rust-lang.org/stable/std/macro.writeln.html +/// [`style`]: #method.style +pub struct Formatter { + buf: Rc>, + write_style: WriteStyle, +} + +/// A set of styles to apply to the terminal output. +/// +/// Call [`Formatter::style`] to get a `Style` and use the builder methods to +/// set styling properties, like [color] and [weight]. +/// To print a value using the style, wrap it in a call to [`value`] when the log +/// record is formatted. +/// +/// # Examples +/// +/// Create a bold, red colored style and use it to print the log level: +/// +/// ``` +/// use std::io::Write; +/// use env_logger::fmt::Color; +/// +/// let mut builder = env_logger::Builder::new(); +/// +/// builder.format(|buf, record| { +/// let mut level_style = buf.style(); +/// +/// level_style.set_color(Color::Red).set_bold(true); +/// +/// writeln!(buf, "{}: {}", +/// level_style.value(record.level()), +/// record.args()) +/// }); +/// ``` +/// +/// Styles can be re-used to output multiple values: +/// +/// ``` +/// use std::io::Write; +/// use env_logger::fmt::Color; +/// +/// let mut builder = env_logger::Builder::new(); +/// +/// builder.format(|buf, record| { +/// let mut bold = buf.style(); +/// +/// bold.set_bold(true); +/// +/// writeln!(buf, "{}: {} {}", +/// bold.value(record.level()), +/// bold.value("some bold text"), +/// record.args()) +/// }); +/// ``` +/// +/// [`Formatter::style`]: struct.Formatter.html#method.style +/// [color]: #method.set_color +/// [weight]: #method.set_bold +/// [`value`]: #method.value +#[derive(Clone)] +pub struct Style { + buf: Rc>, + spec: ColorSpec, +} + +/// A value that can be printed using the given styles. +/// +/// It is the result of calling [`Style::value`]. +/// +/// [`Style::value`]: struct.Style.html#method.value +pub struct StyledValue<'a, T> { + style: &'a Style, + value: T, +} + +/// An [RFC3339] formatted timestamp. +/// +/// The timestamp implements [`Display`] and can be written to a [`Formatter`]. +/// +/// [RFC3339]: https://www.ietf.org/rfc/rfc3339.txt +/// [`Display`]: https://doc.rust-lang.org/stable/std/fmt/trait.Display.html +/// [`Formatter`]: struct.Formatter.html +pub struct Timestamp(SystemTime); + +/// An [RFC3339] formatted timestamp with nanos +#[derive(Debug)] +pub struct PreciseTimestamp(SystemTime); + +/// Log target, either `stdout` or `stderr`. +#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] +pub enum Target { + /// Logs will be sent to standard output. + Stdout, + /// Logs will be sent to standard error. + Stderr, +} + +impl Default for Target { + fn default() -> Self { + Target::Stderr + } +} + +/// Whether or not to print styles to the target. +#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] +pub enum WriteStyle { + /// Try to print styles, but don't force the issue. + Auto, + /// Try very hard to print styles. + Always, + /// Never print styles. + Never, +} + +impl Default for WriteStyle { + fn default() -> Self { + WriteStyle::Auto + } +} + +/// A terminal target with color awareness. +pub(crate) struct Writer { + inner: BufferWriter, + write_style: WriteStyle, +} + +impl Writer { + pub(crate) fn write_style(&self) -> WriteStyle { + self.write_style + } +} + +/// A builder for a terminal writer. +/// +/// The target and style choice can be configured before building. +pub(crate) struct Builder { + target: Target, + write_style: WriteStyle, +} + +impl Builder { + /// Initialize the writer builder with defaults. + pub fn new() -> Self { + Builder { + target: Default::default(), + write_style: Default::default(), + } + } + + /// Set the target to write to. + pub fn target(&mut self, target: Target) -> &mut Self { + self.target = target; + self + } + + /// Parses a style choice string. + /// + /// See the [Disabling colors] section for more details. + /// + /// [Disabling colors]: ../index.html#disabling-colors + pub fn parse(&mut self, write_style: &str) -> &mut Self { + self.write_style(parse_write_style(write_style)) + } + + /// Whether or not to print style characters when writing. + pub fn write_style(&mut self, write_style: WriteStyle) -> &mut Self { + self.write_style = write_style; + self + } + + /// Build a terminal writer. + pub fn build(&mut self) -> Writer { + let color_choice = match self.write_style { + WriteStyle::Auto => { + if atty::is(match self.target { + Target::Stderr => atty::Stream::Stderr, + Target::Stdout => atty::Stream::Stdout, + }) { + ColorChoice::Auto + } else { + ColorChoice::Never + } + }, + WriteStyle::Always => ColorChoice::Always, + WriteStyle::Never => ColorChoice::Never, + }; + + let writer = match self.target { + Target::Stderr => BufferWriter::stderr(color_choice), + Target::Stdout => BufferWriter::stdout(color_choice), + }; + + Writer { + inner: writer, + write_style: self.write_style, + } + } +} + +impl Default for Builder { + fn default() -> Self { + Builder::new() + } +} + +impl Style { + /// Set the text color. + /// + /// # Examples + /// + /// Create a style with red text: + /// + /// ``` + /// use std::io::Write; + /// use env_logger::fmt::Color; + /// + /// let mut builder = env_logger::Builder::new(); + /// + /// builder.format(|buf, record| { + /// let mut style = buf.style(); + /// + /// style.set_color(Color::Red); + /// + /// writeln!(buf, "{}", style.value(record.args())) + /// }); + /// ``` + pub fn set_color(&mut self, color: Color) -> &mut Style { + self.spec.set_fg(color.into_termcolor()); + self + } + + /// Set the text weight. + /// + /// If `yes` is true then text will be written in bold. + /// If `yes` is false then text will be written in the default weight. + /// + /// # Examples + /// + /// Create a style with bold text: + /// + /// ``` + /// use std::io::Write; + /// + /// let mut builder = env_logger::Builder::new(); + /// + /// builder.format(|buf, record| { + /// let mut style = buf.style(); + /// + /// style.set_bold(true); + /// + /// writeln!(buf, "{}", style.value(record.args())) + /// }); + /// ``` + pub fn set_bold(&mut self, yes: bool) -> &mut Style { + self.spec.set_bold(yes); + self + } + + /// Set the text intensity. + /// + /// If `yes` is true then text will be written in a brighter color. + /// If `yes` is false then text will be written in the default color. + /// + /// # Examples + /// + /// Create a style with intense text: + /// + /// ``` + /// use std::io::Write; + /// + /// let mut builder = env_logger::Builder::new(); + /// + /// builder.format(|buf, record| { + /// let mut style = buf.style(); + /// + /// style.set_intense(true); + /// + /// writeln!(buf, "{}", style.value(record.args())) + /// }); + /// ``` + pub fn set_intense(&mut self, yes: bool) -> &mut Style { + self.spec.set_intense(yes); + self + } + + /// Set the background color. + /// + /// # Examples + /// + /// Create a style with a yellow background: + /// + /// ``` + /// use std::io::Write; + /// use env_logger::fmt::Color; + /// + /// let mut builder = env_logger::Builder::new(); + /// + /// builder.format(|buf, record| { + /// let mut style = buf.style(); + /// + /// style.set_bg(Color::Yellow); + /// + /// writeln!(buf, "{}", style.value(record.args())) + /// }); + /// ``` + pub fn set_bg(&mut self, color: Color) -> &mut Style { + self.spec.set_bg(color.into_termcolor()); + self + } + + /// Wrap a value in the style. + /// + /// The same `Style` can be used to print multiple different values. + /// + /// # Examples + /// + /// Create a bold, red colored style and use it to print the log level: + /// + /// ``` + /// use std::io::Write; + /// use env_logger::fmt::Color; + /// + /// let mut builder = env_logger::Builder::new(); + /// + /// builder.format(|buf, record| { + /// let mut style = buf.style(); + /// + /// style.set_color(Color::Red).set_bold(true); + /// + /// writeln!(buf, "{}: {}", + /// style.value(record.level()), + /// record.args()) + /// }); + /// ``` + pub fn value(&self, value: T) -> StyledValue { + StyledValue { + style: &self, + value + } + } +} + +impl Formatter { + pub(crate) fn new(writer: &Writer) -> Self { + Formatter { + buf: Rc::new(RefCell::new(writer.inner.buffer())), + write_style: writer.write_style(), + } + } + + pub(crate) fn write_style(&self) -> WriteStyle { + self.write_style + } + + /// Begin a new [`Style`]. + /// + /// # Examples + /// + /// Create a bold, red colored style and use it to print the log level: + /// + /// ``` + /// use std::io::Write; + /// use env_logger::fmt::Color; + /// + /// let mut builder = env_logger::Builder::new(); + /// + /// builder.format(|buf, record| { + /// let mut level_style = buf.style(); + /// + /// level_style.set_color(Color::Red).set_bold(true); + /// + /// writeln!(buf, "{}: {}", + /// level_style.value(record.level()), + /// record.args()) + /// }); + /// ``` + /// + /// [`Style`]: struct.Style.html + pub fn style(&self) -> Style { + Style { + buf: self.buf.clone(), + spec: ColorSpec::new(), + } + } + + /// Get the default [`Style`] for the given level. + pub fn default_level_style(&self, level: Level) -> Style { + let mut level_style = self.style(); + match level { + Level::Trace => level_style.set_color(Color::White), + Level::Debug => level_style.set_color(Color::Blue), + Level::Info => level_style.set_color(Color::Green), + Level::Warn => level_style.set_color(Color::Yellow), + Level::Error => level_style.set_color(Color::Red).set_bold(true), + }; + level_style + } + + /// Get a [`Timestamp`] for the current date and time in UTC. + /// + /// # Examples + /// + /// Include the current timestamp with the log record: + /// + /// ``` + /// use std::io::Write; + /// + /// let mut builder = env_logger::Builder::new(); + /// + /// builder.format(|buf, record| { + /// let ts = buf.timestamp(); + /// + /// writeln!(buf, "{}: {}: {}", ts, record.level(), record.args()) + /// }); + /// ``` + /// + /// [`Timestamp`]: struct.Timestamp.html + pub fn timestamp(&self) -> Timestamp { + Timestamp(SystemTime::now()) + } + + /// Get a [`PreciseTimestamp`] for the current date and time in UTC with nanos. + pub fn precise_timestamp(&self) -> PreciseTimestamp { + PreciseTimestamp(SystemTime::now()) + } + + pub(crate) fn print(&self, writer: &Writer) -> io::Result<()> { + writer.inner.print(&self.buf.borrow()) + } + + pub(crate) fn clear(&mut self) { + self.buf.borrow_mut().clear() + } +} + +impl Write for Formatter { + fn write(&mut self, buf: &[u8]) -> io::Result { + self.buf.borrow_mut().write(buf) + } + + fn flush(&mut self) -> io::Result<()> { + self.buf.borrow_mut().flush() + } +} + +impl<'a, T> StyledValue<'a, T> { + fn write_fmt(&self, f: F) -> fmt::Result + where + F: FnOnce() -> fmt::Result, + { + self.style.buf.borrow_mut().set_color(&self.style.spec).map_err(|_| fmt::Error)?; + + // Always try to reset the terminal style, even if writing failed + let write = f(); + let reset = self.style.buf.borrow_mut().reset().map_err(|_| fmt::Error); + + write.and(reset) + } +} + +impl fmt::Debug for Timestamp { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + /// A `Debug` wrapper for `Timestamp` that uses the `Display` implementation. + struct TimestampValue<'a>(&'a Timestamp); + + impl<'a> fmt::Debug for TimestampValue<'a> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::Display::fmt(&self.0, f) + } + } + + f.debug_tuple("Timestamp") + .field(&TimestampValue(&self)) + .finish() + } +} + +impl fmt::Debug for Writer { + fn fmt(&self, f: &mut fmt::Formatter)->fmt::Result { + f.debug_struct("Writer").finish() + } +} + +impl fmt::Debug for Formatter { + fn fmt(&self, f: &mut fmt::Formatter)->fmt::Result { + f.debug_struct("Formatter").finish() + } +} + +impl fmt::Debug for Builder { + fn fmt(&self, f: &mut fmt::Formatter)->fmt::Result { + f.debug_struct("Logger") + .field("target", &self.target) + .field("write_style", &self.write_style) + .finish() + } +} + +impl fmt::Debug for Style { + fn fmt(&self, f: &mut fmt::Formatter)->fmt::Result { + f.debug_struct("Style").field("spec", &self.spec).finish() + } +} + +macro_rules! impl_styled_value_fmt { + ($($fmt_trait:path),*) => { + $( + impl<'a, T: $fmt_trait> $fmt_trait for StyledValue<'a, T> { + fn fmt(&self, f: &mut fmt::Formatter)->fmt::Result { + self.write_fmt(|| T::fmt(&self.value, f)) + } + } + )* + }; +} + +impl_styled_value_fmt!( + fmt::Debug, + fmt::Display, + fmt::Pointer, + fmt::Octal, + fmt::Binary, + fmt::UpperHex, + fmt::LowerHex, + fmt::UpperExp, + fmt::LowerExp); + +impl fmt::Display for Timestamp { + fn fmt(&self, f: &mut fmt::Formatter)->fmt::Result { + format_rfc3339_seconds(self.0).fmt(f) + } +} + +impl fmt::Display for PreciseTimestamp { + fn fmt(&self, f: &mut fmt::Formatter)->fmt::Result { + format_rfc3339_nanos(self.0).fmt(f) + } +} + +// The `Color` type is copied from https://github.com/BurntSushi/ripgrep/tree/master/termcolor + +/// The set of available colors for the terminal foreground/background. +/// +/// The `Ansi256` and `Rgb` colors will only output the correct codes when +/// paired with the `Ansi` `WriteColor` implementation. +/// +/// The `Ansi256` and `Rgb` color types are not supported when writing colors +/// on Windows using the console. If they are used on Windows, then they are +/// silently ignored and no colors will be emitted. +/// +/// This set may expand over time. +/// +/// This type has a `FromStr` impl that can parse colors from their human +/// readable form. The format is as follows: +/// +/// 1. Any of the explicitly listed colors in English. They are matched +/// case insensitively. +/// 2. A single 8-bit integer, in either decimal or hexadecimal format. +/// 3. A triple of 8-bit integers separated by a comma, where each integer is +/// in decimal or hexadecimal format. +/// +/// Hexadecimal numbers are written with a `0x` prefix. +#[allow(missing_docs)] +#[derive(Clone, Debug, Eq, PartialEq)] +pub enum Color { + Black, + Blue, + Green, + Red, + Cyan, + Magenta, + Yellow, + White, + Ansi256(u8), + Rgb(u8, u8, u8), + #[doc(hidden)] + __Nonexhaustive, +} + +/// An error from parsing an invalid color specification. +#[derive(Clone, Debug, Eq, PartialEq)] +pub struct ParseColorError(ParseColorErrorKind); + +#[derive(Clone, Debug, Eq, PartialEq)] +enum ParseColorErrorKind { + /// An error originating from `termcolor`. + TermColor(termcolor::ParseColorError), + /// An error converting the `termcolor` color to a `env_logger::Color`. + /// + /// This variant should only get reached if a user uses a new spec that's + /// valid for `termcolor`, but not recognised in `env_logger` yet. + Unrecognized { + given: String, + } +} + +impl ParseColorError { + fn termcolor(err: termcolor::ParseColorError) -> Self { + ParseColorError(ParseColorErrorKind::TermColor(err)) + } + + fn unrecognized(given: String) -> Self { + ParseColorError(ParseColorErrorKind::Unrecognized { given }) + } + + /// Return the string that couldn't be parsed as a valid color. + pub fn invalid(&self) -> &str { + match self.0 { + ParseColorErrorKind::TermColor(ref err) => err.invalid(), + ParseColorErrorKind::Unrecognized { ref given, .. } => given, + } + } +} + +impl Error for ParseColorError { + fn description(&self) -> &str { + match self.0 { + ParseColorErrorKind::TermColor(ref err) => err.description(), + ParseColorErrorKind::Unrecognized { .. } => "unrecognized color value", + } + } +} + +impl fmt::Display for ParseColorError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self.0 { + ParseColorErrorKind::TermColor(ref err) => fmt::Display::fmt(err, f), + ParseColorErrorKind::Unrecognized { ref given, .. } => { + write!(f, "unrecognized color value '{}'", given) + } + } + } +} + +impl Color { + fn into_termcolor(self) -> Option { + match self { + Color::Black => Some(termcolor::Color::Black), + Color::Blue => Some(termcolor::Color::Blue), + Color::Green => Some(termcolor::Color::Green), + Color::Red => Some(termcolor::Color::Red), + Color::Cyan => Some(termcolor::Color::Cyan), + Color::Magenta => Some(termcolor::Color::Magenta), + Color::Yellow => Some(termcolor::Color::Yellow), + Color::White => Some(termcolor::Color::White), + Color::Ansi256(value) => Some(termcolor::Color::Ansi256(value)), + Color::Rgb(r, g, b) => Some(termcolor::Color::Rgb(r, g, b)), + _ => None, + } + } + + fn from_termcolor(color: termcolor::Color) -> Option { + match color { + termcolor::Color::Black => Some(Color::Black), + termcolor::Color::Blue => Some(Color::Blue), + termcolor::Color::Green => Some(Color::Green), + termcolor::Color::Red => Some(Color::Red), + termcolor::Color::Cyan => Some(Color::Cyan), + termcolor::Color::Magenta => Some(Color::Magenta), + termcolor::Color::Yellow => Some(Color::Yellow), + termcolor::Color::White => Some(Color::White), + termcolor::Color::Ansi256(value) => Some(Color::Ansi256(value)), + termcolor::Color::Rgb(r, g, b) => Some(Color::Rgb(r, g, b)), + _ => None, + } + } +} + +impl FromStr for Color { + type Err = ParseColorError; + + fn from_str(s: &str) -> Result { + let tc = termcolor::Color::from_str(s).map_err(ParseColorError::termcolor)?; + Color::from_termcolor(tc).ok_or_else(|| ParseColorError::unrecognized(s.into())) + } +} + +fn parse_write_style(spec: &str) -> WriteStyle { + match spec { + "auto" => WriteStyle::Auto, + "always" => WriteStyle::Always, + "never" => WriteStyle::Never, + _ => Default::default(), + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn parse_write_style_valid() { + let inputs = vec![ + ("auto", WriteStyle::Auto), + ("always", WriteStyle::Always), + ("never", WriteStyle::Never), + ]; + + for (input, expected) in inputs { + assert_eq!(expected, parse_write_style(input)); + } + } + + #[test] + fn parse_write_style_invalid() { + let inputs = vec![ + "", + "true", + "false", + "NEVER!!" + ]; + + for input in inputs { + assert_eq!(WriteStyle::Auto, parse_write_style(input)); + } + } + + #[test] + fn parse_color_name_valid() { + let inputs = vec![ + "black", + "blue", + "green", + "red", + "cyan", + "magenta", + "yellow", + "white", + ]; + + for input in inputs { + assert!(Color::from_str(input).is_ok()); + } + } + + #[test] + fn parse_color_ansi_valid() { + let inputs = vec![ + "7", + "32", + "0xFF", + ]; + + for input in inputs { + assert!(Color::from_str(input).is_ok()); + } + } + + #[test] + fn parse_color_rgb_valid() { + let inputs = vec![ + "0,0,0", + "0,128,255", + "0x0,0x0,0x0", + "0x33,0x66,0xFF", + ]; + + for input in inputs { + assert!(Color::from_str(input).is_ok()); + } + } + + #[test] + fn parse_color_invalid() { + let inputs = vec![ + "not_a_color", + "256", + "0,0", + "0,0,256", + ]; + + for input in inputs { + let err = Color::from_str(input).unwrap_err(); + assert_eq!(input, err.invalid()); + } + } +} diff --git a/env_logger/src/lib.rs b/env_logger/src/lib.rs new file mode 100644 index 000000000..90058ee77 --- /dev/null +++ b/env_logger/src/lib.rs @@ -0,0 +1,1125 @@ +// Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! A simple logger configured via environment variables which writes +//! to stdout or stderr, for use with the logging facade exposed by the +//! [`log` crate][log-crate-url]. +//! +//! ## Example +//! +//! ``` +//! #[macro_use] extern crate log; +//! extern crate env_logger; +//! +//! use log::Level; +//! +//! fn main() { +//! env_logger::init(); +//! +//! debug!("this is a debug {}", "message"); +//! error!("this is printed by default"); +//! +//! if log_enabled!(Level::Info) { +//! let x = 3 * 4; // expensive computation +//! info!("the answer was: {}", x); +//! } +//! } +//! ``` +//! +//! Assumes the binary is `main`: +//! +//! ```{.bash} +//! $ RUST_LOG=error ./main +//! ERROR: 2017-11-09T02:12:24Z: main: this is printed by default +//! ``` +//! +//! ```{.bash} +//! $ RUST_LOG=info ./main +//! ERROR: 2017-11-09T02:12:24Z: main: this is printed by default +//! INFO: 2017-11-09T02:12:24Z: main: the answer was: 12 +//! ``` +//! +//! ```{.bash} +//! $ RUST_LOG=debug ./main +//! DEBUG: 2017-11-09T02:12:24Z: main: this is a debug message +//! ERROR: 2017-11-09T02:12:24Z: main: this is printed by default +//! INFO: 2017-11-09T02:12:24Z: main: the answer was: 12 +//! ``` +//! +//! You can also set the log level on a per module basis: +//! +//! ```{.bash} +//! $ RUST_LOG=main=info ./main +//! ERROR: 2017-11-09T02:12:24Z: main: this is printed by default +//! INFO: 2017-11-09T02:12:24Z: main: the answer was: 12 +//! ``` +//! +//! And enable all logging: +//! +//! ```{.bash} +//! $ RUST_LOG=main ./main +//! DEBUG: 2017-11-09T02:12:24Z: main: this is a debug message +//! ERROR: 2017-11-09T02:12:24Z: main: this is printed by default +//! INFO: 2017-11-09T02:12:24Z: main: the answer was: 12 +//! ``` +//! +//! If the binary name contains hyphens, you will need to replace +//! them with underscores: +//! +//! ```{.bash} +//! $ RUST_LOG=my_app ./my-app +//! DEBUG: 2017-11-09T02:12:24Z: my_app: this is a debug message +//! ERROR: 2017-11-09T02:12:24Z: my_app: this is printed by default +//! INFO: 2017-11-09T02:12:24Z: my_app: the answer was: 12 +//! ``` +//! +//! This is because Rust modules and crates cannot contain hyphens +//! in their name, although `cargo` continues to accept them. +//! +//! See the documentation for the [`log` crate][log-crate-url] for more +//! information about its API. +//! +//! ## Enabling logging +//! +//! Log levels are controlled on a per-module basis, and by default all logging +//! is disabled except for `error!`. Logging is controlled via the `RUST_LOG` +//! environment variable. The value of this environment variable is a +//! comma-separated list of logging directives. A logging directive is of the +//! form: +//! +//! ```text +//! path::to::module=level +//! ``` +//! +//! The path to the module is rooted in the name of the crate it was compiled +//! for, so if your program is contained in a file `hello.rs`, for example, to +//! turn on logging for this file you would use a value of `RUST_LOG=hello`. +//! Furthermore, this path is a prefix-search, so all modules nested in the +//! specified module will also have logging enabled. +//! +//! The actual `level` is optional to specify. If omitted, all logging will +//! be enabled. If specified, it must be one of the strings `debug`, `error`, +//! `info`, `warn`, or `trace`. +//! +//! As the log level for a module is optional, the module to enable logging for +//! is also optional. If only a `level` is provided, then the global log +//! level for all modules is set to this value. +//! +//! Some examples of valid values of `RUST_LOG` are: +//! +//! * `hello` turns on all logging for the 'hello' module +//! * `info` turns on all info logging +//! * `hello=debug` turns on debug logging for 'hello' +//! * `hello,std::option` turns on hello, and std's option logging +//! * `error,hello=warn` turn on global error logging and also warn for hello +//! +//! ## Filtering results +//! +//! A `RUST_LOG` directive may include a regex filter. The syntax is to append `/` +//! followed by a regex. Each message is checked against the regex, and is only +//! logged if it matches. Note that the matching is done after formatting the +//! log string but before adding any logging meta-data. There is a single filter +//! for all modules. +//! +//! Some examples: +//! +//! * `hello/foo` turns on all logging for the 'hello' module where the log +//! message includes 'foo'. +//! * `info/f.o` turns on all info logging where the log message includes 'foo', +//! 'f1o', 'fao', etc. +//! * `hello=debug/foo*foo` turns on debug logging for 'hello' where the log +//! message includes 'foofoo' or 'fofoo' or 'fooooooofoo', etc. +//! * `error,hello=warn/[0-9]scopes` turn on global error logging and also +//! warn for hello. In both cases the log message must include a single digit +//! number followed by 'scopes'. +//! +//! ## Disabling colors +//! +//! Colors and other styles can be configured with the `RUST_LOG_STYLE` +//! environment variable. It accepts the following values: +//! +//! * `auto` (default) will attempt to print style characters, but don't force the issue. +//! If the console isn't available on Windows, or if TERM=dumb, for example, then don't print colors. +//! * `always` will always print style characters even if they aren't supported by the terminal. +//! This includes emitting ANSI colors on Windows if the console API is unavailable. +//! * `never` will never print style characters. +//! +//! ## Tweaking the default format +//! +//! Parts of the default format can be excluded from the log output using the [`Builder`]. +//! The following example excludes the timestamp from the log output: +//! +//! ``` +//! #[macro_use] extern crate log; +//! extern crate env_logger; +//! +//! use log::Level; +//! +//! fn main() { +//! env_logger::Builder::from_default_env() +//! .default_format_timestamp(false) +//! .init(); +//! +//! debug!("this is a debug {}", "message"); +//! error!("this is printed by default"); +//! +//! if log_enabled!(Level::Info) { +//! let x = 3 * 4; // expensive computation +//! info!("the answer was: {}", x); +//! } +//! } +//! ``` +//! +//! ## Specifying defaults for environment variables +//! +//! `env_logger` can read configuration from environment variables. +//! If these variables aren't present, the default value to use can be tweaked with the [`Env`] type. +//! The following example defaults to log `warn` and above if the `RUST_LOG` environment variable +//! isn't set: +//! +//! ``` +//! #[macro_use] extern crate log; +//! extern crate env_logger; +//! +//! use log::Level; +//! +//! fn main() { +//! let env = env_logger::Env::default() +//! .filter_or(env_logger::DEFAULT_FILTER_ENV, "warn"); +//! +//! env_logger::Builder::from_env(env).init(); +//! } +//! ``` +//! +//! [log-crate-url]: https://docs.rs/log/ +//! [`Builder`]: struct.Builder.html +//! [`Env`]: struct.Env.html + +#![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", + html_favicon_url = "http://www.rust-lang.org/favicon.ico", + html_root_url = "https://docs.rs/env_logger/0.5.13")] +#![cfg_attr(test, deny(warnings))] + +// When compiled for the rustc compiler itself we want to make sure that this is +// an unstable crate +#![cfg_attr(rustbuild, feature(staged_api, rustc_private))] +#![cfg_attr(rustbuild, unstable(feature = "rustc_private", issue = "27812"))] + +#![deny(missing_debug_implementations, missing_docs, warnings)] + +extern crate log; +extern crate termcolor; +extern crate humantime; +extern crate atty; + +use std::env; +use std::borrow::Cow; +use std::io::prelude::*; +use std::io; +use std::mem; +use std::cell::RefCell; + +use log::{Log, LevelFilter, Record, SetLoggerError, Metadata}; + +pub mod filter; +pub mod fmt; + +pub use self::fmt::{Target, WriteStyle, Color, Formatter}; + +/// The default name for the environment variable to read filters from. +pub const DEFAULT_FILTER_ENV: &'static str = "RUST_LOG"; + +/// The default name for the environment variable to read style preferences from. +pub const DEFAULT_WRITE_STYLE_ENV: &'static str = "RUST_LOG_STYLE"; + +/// Set of environment variables to configure from. +/// +/// # Default environment variables +/// +/// By default, the `Env` will read the following environment variables: +/// +/// - `RUST_LOG`: the level filter +/// - `RUST_LOG_STYLE`: whether or not to print styles with records. +/// +/// These sources can be configured using the builder methods on `Env`. +#[derive(Debug)] +pub struct Env<'a> { + filter: Var<'a>, + write_style: Var<'a>, +} + +#[derive(Debug)] +struct Var<'a> { + name: Cow<'a, str>, + default: Option>, +} + +/// The env logger. +/// +/// This struct implements the `Log` trait from the [`log` crate][log-crate-url], +/// which allows it to act as a logger. +/// +/// The [`init()`], [`try_init()`], [`Builder::init()`] and [`Builder::try_init()`] +/// methods will each construct a `Logger` and immediately initialize it as the +/// default global logger. +/// +/// If you'd instead need access to the constructed `Logger`, you can use +/// the associated [`Builder`] and install it with the +/// [`log` crate][log-crate-url] directly. +/// +/// [log-crate-url]: https://docs.rs/log/ +/// [`init()`]: fn.init.html +/// [`try_init()`]: fn.try_init.html +/// [`Builder::init()`]: struct.Builder.html#method.init +/// [`Builder::try_init()`]: struct.Builder.html#method.try_init +/// [`Builder`]: struct.Builder.html +pub struct Logger { + writer: fmt::Writer, + filter: filter::Filter, + format: Box io::Result<()> + Sync + Send>, +} + +struct Format { + default_format_timestamp: bool, + default_format_module_path: bool, + default_format_level: bool, + default_format_timestamp_nanos: bool, + custom_format: Option io::Result<()> + Sync + Send>>, +} + +impl Default for Format { + fn default() -> Self { + Format { + default_format_timestamp: true, + default_format_module_path: true, + default_format_level: true, + default_format_timestamp_nanos: false, + custom_format: None, + } + } +} + +impl Format { + /// Convert the format into a callable function. + /// + /// If the `custom_format` is `Some`, then any `default_format` switches are ignored. + /// If the `custom_format` is `None`, then a default format is returned. + /// Any `default_format` switches set to `false` won't be written by the format. + fn into_boxed_fn(self) -> Box io::Result<()> + Sync + Send> { + if let Some(fmt) = self.custom_format { + fmt + } + else { + Box::new(move |buf, record| { + let write_level = if self.default_format_level { + let level = record.level(); + let level_style = buf.default_level_style(level); + write!(buf, "{:>5} ", level_style.value(level)) + } else { + Ok(()) + }; + + let write_ts = if self.default_format_timestamp { + if self.default_format_timestamp_nanos { + let ts_nanos = buf.precise_timestamp(); + write!(buf, "{}: ", ts_nanos) + } else { + let ts = buf.timestamp(); + write!(buf, "{}: ", ts) + } + } else { + Ok(()) + }; + + let default_format_module_path = (self.default_format_module_path, record.module_path()); + let write_module_path = if let (true, Some(module_path)) = default_format_module_path { + write!(buf, "{}: ", module_path) + } else { + Ok(()) + }; + + let write_args = writeln!(buf, "{}", record.args()); + + write_level.and(write_ts).and(write_module_path).and(write_args) + }) + } + } +} + +/// `Builder` acts as builder for initializing a `Logger`. +/// +/// It can be used to customize the log format, change the environment variable used +/// to provide the logging directives and also set the default log level filter. +/// +/// # Examples +/// +/// ``` +/// #[macro_use] +/// extern crate log; +/// extern crate env_logger; +/// +/// use std::env; +/// use std::io::Write; +/// use log::LevelFilter; +/// use env_logger::Builder; +/// +/// fn main() { +/// let mut builder = Builder::from_default_env(); +/// +/// builder.format(|buf, record| writeln!(buf, "{} - {}", record.level(), record.args())) +/// .filter(None, LevelFilter::Info) +/// .init(); +/// +/// error!("error message"); +/// info!("info message"); +/// } +/// ``` +#[derive(Default)] +pub struct Builder { + filter: filter::Builder, + writer: fmt::Builder, + format: Format, +} + +impl Builder { + /// Initializes the log builder with defaults. + /// + /// **NOTE:** This method won't read from any environment variables. + /// Use the [`filter`] and [`write_style`] methods to configure the builder + /// or use [`from_env`] or [`from_default_env`] instead. + /// + /// # Examples + /// + /// Create a new builder and configure filters and style: + /// + /// ``` + /// # extern crate log; + /// # extern crate env_logger; + /// # fn main() { + /// use log::LevelFilter; + /// use env_logger::{Builder, WriteStyle}; + /// + /// let mut builder = Builder::new(); + /// + /// builder.filter(None, LevelFilter::Info) + /// .write_style(WriteStyle::Always) + /// .init(); + /// # } + /// ``` + /// + /// [`filter`]: #method.filter + /// [`write_style`]: #method.write_style + /// [`from_env`]: #method.from_env + /// [`from_default_env`]: #method.from_default_env + pub fn new() -> Builder { + Default::default() + } + + /// Initializes the log builder from the environment. + /// + /// The variables used to read configuration from can be tweaked before + /// passing in. + /// + /// # Examples + /// + /// Initialise a logger reading the log filter from an environment variable + /// called `MY_LOG`: + /// + /// ``` + /// use env_logger::Builder; + /// + /// let mut builder = Builder::from_env("MY_LOG"); + /// builder.init(); + /// ``` + /// + /// Initialise a logger using the `MY_LOG` variable for filtering and + /// `MY_LOG_STYLE` for whether or not to write styles: + /// + /// ``` + /// use env_logger::{Builder, Env}; + /// + /// let env = Env::new().filter("MY_LOG").write_style("MY_LOG_STYLE"); + /// + /// let mut builder = Builder::from_env(env); + /// builder.init(); + /// ``` + pub fn from_env<'a, E>(env: E) -> Self + where + E: Into> + { + let mut builder = Builder::new(); + let env = env.into(); + + if let Some(s) = env.get_filter() { + builder.parse(&s); + } + + if let Some(s) = env.get_write_style() { + builder.parse_write_style(&s); + } + + builder + } + + /// Initializes the log builder from the environment using default variable names. + /// + /// This method is a convenient way to call `from_env(Env::default())` without + /// having to use the `Env` type explicitly. The builder will use the + /// [default environment variables]. + /// + /// # Examples + /// + /// Initialise a logger using the default environment variables: + /// + /// ``` + /// use env_logger::Builder; + /// + /// let mut builder = Builder::from_default_env(); + /// builder.init(); + /// ``` + /// + /// [default environment variables]: struct.Env.html#default-environment-variables + pub fn from_default_env() -> Self { + Self::from_env(Env::default()) + } + + /// Sets the format function for formatting the log output. + /// + /// This function is called on each record logged and should format the + /// log record and output it to the given [`Formatter`]. + /// + /// The format function is expected to output the string directly to the + /// `Formatter` so that implementations can use the [`std::fmt`] macros + /// to format and output without intermediate heap allocations. The default + /// `env_logger` formatter takes advantage of this. + /// + /// # Examples + /// + /// Use a custom format to write only the log message: + /// + /// ``` + /// use std::io::Write; + /// use env_logger::Builder; + /// + /// let mut builder = Builder::new(); + /// + /// builder.format(|buf, record| write!(buf, "{}", record.args())); + /// ``` + /// + /// [`Formatter`]: fmt/struct.Formatter.html + /// [`String`]: https://doc.rust-lang.org/stable/std/string/struct.String.html + /// [`std::fmt`]: https://doc.rust-lang.org/std/fmt/index.html + pub fn format(&mut self, format: F) -> &mut Self + where F: Fn(&mut Formatter, &Record) -> io::Result<()> + Sync + Send + { + self.format.custom_format = Some(Box::new(format)); + self + } + + /// Use the default format. + /// + /// This method will clear any custom format set on the builder. + pub fn default_format(&mut self) -> &mut Self { + self.format.custom_format = None; + self + } + + /// Whether or not to write the level in the default format. + pub fn default_format_level(&mut self, write: bool) -> &mut Self { + self.format.default_format_level = write; + self + } + + /// Whether or not to write the module path in the default format. + pub fn default_format_module_path(&mut self, write: bool) -> &mut Self { + self.format.default_format_module_path = write; + self + } + + /// Whether or not to write the timestamp in the default format. + pub fn default_format_timestamp(&mut self, write: bool) -> &mut Self { + self.format.default_format_timestamp = write; + self + } + + /// Whether or not to write the timestamp with nanos. + pub fn default_format_timestamp_nanos(&mut self, write: bool) -> &mut Self { + self.format.default_format_timestamp_nanos = write; + self + } + + /// Adds a directive to the filter for a specific module. + /// + /// # Examples + /// + /// Only include messages for warning and above for logs in `path::to::module`: + /// + /// ``` + /// # extern crate log; + /// # extern crate env_logger; + /// # fn main() { + /// use log::LevelFilter; + /// use env_logger::Builder; + /// + /// let mut builder = Builder::new(); + /// + /// builder.filter_module("path::to::module", LevelFilter::Info); + /// # } + /// ``` + pub fn filter_module(&mut self, module: &str, level: LevelFilter) -> &mut Self { + self.filter.filter_module(module, level); + self + } + + /// Adds a directive to the filter for all modules. + /// + /// # Examples + /// + /// Only include messages for warning and above for logs in `path::to::module`: + /// + /// ``` + /// # extern crate log; + /// # extern crate env_logger; + /// # fn main() { + /// use log::LevelFilter; + /// use env_logger::Builder; + /// + /// let mut builder = Builder::new(); + /// + /// builder.filter_level(LevelFilter::Info); + /// # } + /// ``` + pub fn filter_level(&mut self, level: LevelFilter) -> &mut Self { + self.filter.filter_level(level); + self + } + + /// Adds filters to the logger. + /// + /// The given module (if any) will log at most the specified level provided. + /// If no module is provided then the filter will apply to all log messages. + /// + /// # Examples + /// + /// Only include messages for warning and above for logs in `path::to::module`: + /// + /// ``` + /// # extern crate log; + /// # extern crate env_logger; + /// # fn main() { + /// use log::LevelFilter; + /// use env_logger::Builder; + /// + /// let mut builder = Builder::new(); + /// + /// builder.filter(Some("path::to::module"), LevelFilter::Info); + /// # } + /// ``` + pub fn filter(&mut self, + module: Option<&str>, + level: LevelFilter) -> &mut Self { + self.filter.filter(module, level); + self + } + + /// Parses the directives string in the same form as the `RUST_LOG` + /// environment variable. + /// + /// See the module documentation for more details. + pub fn parse(&mut self, filters: &str) -> &mut Self { + self.filter.parse(filters); + self + } + + /// Sets the target for the log output. + /// + /// Env logger can log to either stdout or stderr. The default is stderr. + /// + /// # Examples + /// + /// Write log message to `stdout`: + /// + /// ``` + /// use env_logger::{Builder, Target}; + /// + /// let mut builder = Builder::new(); + /// + /// builder.target(Target::Stdout); + /// ``` + pub fn target(&mut self, target: fmt::Target) -> &mut Self { + self.writer.target(target); + self + } + + /// Sets whether or not styles will be written. + /// + /// This can be useful in environments that don't support control characters + /// for setting colors. + /// + /// # Examples + /// + /// Never attempt to write styles: + /// + /// ``` + /// use env_logger::{Builder, WriteStyle}; + /// + /// let mut builder = Builder::new(); + /// + /// builder.write_style(WriteStyle::Never); + /// ``` + pub fn write_style(&mut self, write_style: fmt::WriteStyle) -> &mut Self { + self.writer.write_style(write_style); + self + } + + /// Parses whether or not to write styles in the same form as the `RUST_LOG_STYLE` + /// environment variable. + /// + /// See the module documentation for more details. + pub fn parse_write_style(&mut self, write_style: &str) -> &mut Self { + self.writer.parse(write_style); + self + } + + /// Initializes the global logger with the built env logger. + /// + /// This should be called early in the execution of a Rust program. Any log + /// events that occur before initialization will be ignored. + /// + /// # Errors + /// + /// This function will fail if it is called more than once, or if another + /// library has already initialized a global logger. + pub fn try_init(&mut self) -> Result<(), SetLoggerError> { + let logger = self.build(); + + log::set_max_level(logger.filter()); + log::set_boxed_logger(Box::new(logger)) + } + + /// Initializes the global logger with the built env logger. + /// + /// This should be called early in the execution of a Rust program. Any log + /// events that occur before initialization will be ignored. + /// + /// # Panics + /// + /// This function will panic if it is called more than once, or if another + /// library has already initialized a global logger. + pub fn init(&mut self) { + self.try_init().expect("Builder::init should not be called after logger initialized"); + } + + /// Build an env logger. + /// + /// The returned logger implements the `Log` trait and can be installed manually + /// or nested within another logger. + pub fn build(&mut self) -> Logger { + Logger { + writer: self.writer.build(), + filter: self.filter.build(), + format: mem::replace(&mut self.format, Default::default()).into_boxed_fn(), + } + } +} + +impl Logger { + /// Creates the logger from the environment. + /// + /// The variables used to read configuration from can be tweaked before + /// passing in. + /// + /// # Examples + /// + /// Create a logger reading the log filter from an environment variable + /// called `MY_LOG`: + /// + /// ``` + /// use env_logger::Logger; + /// + /// let logger = Logger::from_env("MY_LOG"); + /// ``` + /// + /// Create a logger using the `MY_LOG` variable for filtering and + /// `MY_LOG_STYLE` for whether or not to write styles: + /// + /// ``` + /// use env_logger::{Logger, Env}; + /// + /// let env = Env::new().filter_or("MY_LOG", "info").write_style_or("MY_LOG_STYLE", "always"); + /// + /// let logger = Logger::from_env(env); + /// ``` + pub fn from_env<'a, E>(env: E) -> Self + where + E: Into> + { + Builder::from_env(env).build() + } + + /// Creates the logger from the environment using default variable names. + /// + /// This method is a convenient way to call `from_env(Env::default())` without + /// having to use the `Env` type explicitly. The logger will use the + /// [default environment variables]. + /// + /// # Examples + /// + /// Creates a logger using the default environment variables: + /// + /// ``` + /// use env_logger::Logger; + /// + /// let logger = Logger::from_default_env(); + /// ``` + /// + /// [default environment variables]: struct.Env.html#default-environment-variables + pub fn from_default_env() -> Self { + Builder::from_default_env().build() + } + + /// Returns the maximum `LevelFilter` that this env logger instance is + /// configured to output. + pub fn filter(&self) -> LevelFilter { + self.filter.filter() + } + + /// Checks if this record matches the configured filter. + pub fn matches(&self, record: &Record) -> bool { + self.filter.matches(record) + } +} + +impl Log for Logger { + fn enabled(&self, metadata: &Metadata) -> bool { + self.filter.enabled(metadata) + } + + fn log(&self, record: &Record) { + if self.matches(record) { + // Log records are written to a thread-local buffer before being printed + // to the terminal. We clear these buffers afterwards, but they aren't shrinked + // so will always at least have capacity for the largest log record formatted + // on that thread. + // + // If multiple `Logger`s are used by the same threads then the thread-local + // formatter might have different color support. If this is the case the + // formatter and its buffer are discarded and recreated. + + thread_local! { + static FORMATTER: RefCell> = RefCell::new(None); + } + + FORMATTER.with(|tl_buf| { + // It's possible for implementations to sometimes + // log-while-logging (e.g. a `std::fmt` implementation logs + // internally) but it's super rare. If this happens make sure we + // at least don't panic and ship some output to the screen. + let mut a; + let mut b = None; + let tl_buf = match tl_buf.try_borrow_mut() { + Ok(f) => { + a = f; + &mut *a + } + Err(_) => &mut b, + }; + + // Check the buffer style. If it's different from the logger's + // style then drop the buffer and recreate it. + match *tl_buf { + Some(ref mut formatter) => { + if formatter.write_style() != self.writer.write_style() { + *formatter = Formatter::new(&self.writer) + } + }, + ref mut tl_buf => *tl_buf = Some(Formatter::new(&self.writer)) + } + + // The format is guaranteed to be `Some` by this point + let mut formatter = tl_buf.as_mut().unwrap(); + + let _ = (self.format)(&mut formatter, record).and_then(|_| formatter.print(&self.writer)); + + // Always clear the buffer afterwards + formatter.clear(); + }); + } + } + + fn flush(&self) {} +} + +impl<'a> Env<'a> { + /// Get a default set of environment variables. + pub fn new() -> Self { + Self::default() + } + + /// Specify an environment variable to read the filter from. + pub fn filter(mut self, filter_env: E) -> Self + where + E: Into> + { + self.filter = Var::new(filter_env); + + self + } + + /// Specify an environment variable to read the filter from. + /// + /// If the variable is not set, the default value will be used. + pub fn filter_or(mut self, filter_env: E, default: V) -> Self + where + E: Into>, + V: Into>, + { + self.filter = Var::new_with_default(filter_env, default); + + self + } + + fn get_filter(&self) -> Option { + self.filter.get() + } + + /// Specify an environment variable to read the style from. + pub fn write_style(mut self, write_style_env: E) -> Self + where + E: Into> + { + self.write_style = Var::new(write_style_env); + + self + } + + /// Specify an environment variable to read the style from. + /// + /// If the variable is not set, the default value will be used. + pub fn write_style_or(mut self, write_style_env: E, default: V) -> Self + where + E: Into>, + V: Into>, + { + self.write_style = Var::new_with_default(write_style_env, default); + + self + } + + fn get_write_style(&self) -> Option { + self.write_style.get() + } +} + +impl<'a> Var<'a> { + fn new(name: E) -> Self + where + E: Into>, + { + Var { + name: name.into(), + default: None, + } + } + + fn new_with_default(name: E, default: V) -> Self + where + E: Into>, + V: Into>, + { + Var { + name: name.into(), + default: Some(default.into()), + } + } + + fn get(&self) -> Option { + env::var(&*self.name) + .ok() + .or_else(|| self.default + .to_owned() + .map(|v| v.into_owned())) + } +} + +impl<'a, T> From for Env<'a> +where + T: Into> +{ + fn from(filter_env: T) -> Self { + Env::default().filter(filter_env.into()) + } +} + +impl<'a> Default for Env<'a> { + fn default() -> Self { + Env { + filter: Var::new(DEFAULT_FILTER_ENV), + write_style: Var::new(DEFAULT_WRITE_STYLE_ENV), + } + } +} + +mod std_fmt_impls { + use std::fmt; + use super::*; + + impl fmt::Debug for Logger{ + fn fmt(&self, f: &mut fmt::Formatter)->fmt::Result { + f.debug_struct("Logger") + .field("filter", &self.filter) + .finish() + } + } + + impl fmt::Debug for Builder{ + fn fmt(&self, f: &mut fmt::Formatter)->fmt::Result { + f.debug_struct("Logger") + .field("filter", &self.filter) + .field("writer", &self.writer) + .finish() + } + } +} + +/// Attempts to initialize the global logger with an env logger. +/// +/// This should be called early in the execution of a Rust program. Any log +/// events that occur before initialization will be ignored. +/// +/// # Errors +/// +/// This function will fail if it is called more than once, or if another +/// library has already initialized a global logger. +pub fn try_init() -> Result<(), SetLoggerError> { + try_init_from_env(Env::default()) +} + +/// Initializes the global logger with an env logger. +/// +/// This should be called early in the execution of a Rust program. Any log +/// events that occur before initialization will be ignored. +/// +/// # Panics +/// +/// This function will panic if it is called more than once, or if another +/// library has already initialized a global logger. +pub fn init() { + try_init().expect("env_logger::init should not be called after logger initialized"); +} + +/// Attempts to initialize the global logger with an env logger from the given +/// environment variables. +/// +/// This should be called early in the execution of a Rust program. Any log +/// events that occur before initialization will be ignored. +/// +/// # Examples +/// +/// Initialise a logger using the `MY_LOG` environment variable for filters +/// and `MY_LOG_STYLE` for writing colors: +/// +/// ``` +/// # extern crate env_logger; +/// use env_logger::{Builder, Env}; +/// +/// # fn run() -> Result<(), Box<::std::error::Error>> { +/// let env = Env::new().filter("MY_LOG").write_style("MY_LOG_STYLE"); +/// +/// env_logger::try_init_from_env(env)?; +/// +/// Ok(()) +/// # } +/// # fn main() { run().unwrap(); } +/// ``` +/// +/// # Errors +/// +/// This function will fail if it is called more than once, or if another +/// library has already initialized a global logger. +pub fn try_init_from_env<'a, E>(env: E) -> Result<(), SetLoggerError> +where + E: Into> +{ + let mut builder = Builder::from_env(env); + + builder.try_init() +} + +/// Initializes the global logger with an env logger from the given environment +/// variables. +/// +/// This should be called early in the execution of a Rust program. Any log +/// events that occur before initialization will be ignored. +/// +/// # Examples +/// +/// Initialise a logger using the `MY_LOG` environment variable for filters +/// and `MY_LOG_STYLE` for writing colors: +/// +/// ``` +/// use env_logger::{Builder, Env}; +/// +/// let env = Env::new().filter("MY_LOG").write_style("MY_LOG_STYLE"); +/// +/// env_logger::init_from_env(env); +/// ``` +/// +/// # Panics +/// +/// This function will panic if it is called more than once, or if another +/// library has already initialized a global logger. +pub fn init_from_env<'a, E>(env: E) +where + E: Into> +{ + try_init_from_env(env).expect("env_logger::init_from_env should not be called after logger initialized"); +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn env_get_filter_reads_from_var_if_set() { + env::set_var("env_get_filter_reads_from_var_if_set", "from var"); + + let env = Env::new().filter_or("env_get_filter_reads_from_var_if_set", "from default"); + + assert_eq!(Some("from var".to_owned()), env.get_filter()); + } + + #[test] + fn env_get_filter_reads_from_default_if_var_not_set() { + env::remove_var("env_get_filter_reads_from_default_if_var_not_set"); + + let env = Env::new().filter_or("env_get_filter_reads_from_default_if_var_not_set", "from default"); + + assert_eq!(Some("from default".to_owned()), env.get_filter()); + } + + #[test] + fn env_get_write_style_reads_from_var_if_set() { + env::set_var("env_get_write_style_reads_from_var_if_set", "from var"); + + let env = Env::new().write_style_or("env_get_write_style_reads_from_var_if_set", "from default"); + + assert_eq!(Some("from var".to_owned()), env.get_write_style()); + } + + #[test] + fn env_get_write_style_reads_from_default_if_var_not_set() { + env::remove_var("env_get_write_style_reads_from_default_if_var_not_set"); + + let env = Env::new().write_style_or("env_get_write_style_reads_from_default_if_var_not_set", "from default"); + + assert_eq!(Some("from default".to_owned()), env.get_write_style()); + } +} diff --git a/env_logger/tests/log-in-log.rs b/env_logger/tests/log-in-log.rs new file mode 100644 index 000000000..6b2c47e7a --- /dev/null +++ b/env_logger/tests/log-in-log.rs @@ -0,0 +1,38 @@ +#[macro_use] extern crate log; +extern crate env_logger; + +use std::process; +use std::fmt; +use std::env; +use std::str; + +struct Foo; + +impl fmt::Display for Foo { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + info!("test"); + f.write_str("bar") + } +} + +fn main() { + env_logger::init(); + if env::var("YOU_ARE_TESTING_NOW").is_ok() { + return info!("{}", Foo); + } + + let exe = env::current_exe().unwrap(); + let out = process::Command::new(exe) + .env("YOU_ARE_TESTING_NOW", "1") + .env("RUST_LOG", "debug") + .output() + .unwrap_or_else(|e| panic!("Unable to start child process: {}", e)); + if out.status.success() { + return + } + + println!("test failed: {}", out.status); + println!("--- stdout\n{}", str::from_utf8(&out.stdout).unwrap()); + println!("--- stderr\n{}", str::from_utf8(&out.stderr).unwrap()); + process::exit(1); +} diff --git a/env_logger/tests/regexp_filter.rs b/env_logger/tests/regexp_filter.rs new file mode 100644 index 000000000..d23e9223e --- /dev/null +++ b/env_logger/tests/regexp_filter.rs @@ -0,0 +1,51 @@ +#[macro_use] extern crate log; +extern crate env_logger; + +use std::process; +use std::env; +use std::str; + +fn main() { + if env::var("LOG_REGEXP_TEST").ok() == Some(String::from("1")) { + child_main(); + } else { + parent_main() + } +} + +fn child_main() { + env_logger::init(); + info!("XYZ Message"); +} + +fn run_child(rust_log: String) -> bool { + let exe = env::current_exe().unwrap(); + let out = process::Command::new(exe) + .env("LOG_REGEXP_TEST", "1") + .env("RUST_LOG", rust_log) + .output() + .unwrap_or_else(|e| panic!("Unable to start child process: {}", e)); + str::from_utf8(out.stderr.as_ref()).unwrap().contains("XYZ Message") +} + +fn assert_message_printed(rust_log: &str) { + if !run_child(rust_log.to_string()) { + panic!("RUST_LOG={} should allow the test log message", rust_log) + } +} + +fn assert_message_not_printed(rust_log: &str) { + if run_child(rust_log.to_string()) { + panic!("RUST_LOG={} should not allow the test log message", rust_log) + } +} + +fn parent_main() { + // test normal log severity levels + assert_message_printed("info"); + assert_message_not_printed("warn"); + + // test of regular expression filters + assert_message_printed("info/XYZ"); + assert_message_not_printed("info/XXX"); +} diff --git a/failure/.cargo-checksum.json b/failure/.cargo-checksum.json new file mode 100644 index 000000000..60263e402 --- /dev/null +++ b/failure/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{},"package":"6dd377bcc1b1b7ce911967e3ec24fa19c3224394ec05b54aa7b083d498341ac7"} \ No newline at end of file diff --git a/failure/CODE_OF_CONDUCT.md b/failure/CODE_OF_CONDUCT.md new file mode 100644 index 000000000..a2161d0d4 --- /dev/null +++ b/failure/CODE_OF_CONDUCT.md @@ -0,0 +1,46 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at boats@mozilla.com. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version] + +[homepage]: http://contributor-covenant.org +[version]: http://contributor-covenant.org/version/1/4/ diff --git a/failure/Cargo.toml b/failure/Cargo.toml new file mode 100644 index 000000000..e478d731b --- /dev/null +++ b/failure/Cargo.toml @@ -0,0 +1,33 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g. crates.io) dependencies +# +# If you believe there's an error in this file please file an +# issue against the rust-lang/cargo repository. If you're +# editing this file be aware that the upstream Cargo.toml +# will likely look very different (and much more reasonable) + +[package] +name = "failure" +version = "0.1.3" +authors = ["Without Boats "] +description = "Experimental error handling abstraction." +homepage = "https://boats.gitlab.io/failure" +documentation = "https://docs.rs/failure" +license = "MIT OR Apache-2.0" +repository = "https://github.com/rust-lang-nursery/failure" +[dependencies.backtrace] +version = "0.3.3" +optional = true + +[dependencies.failure_derive] +version = "0.1.3" +optional = true + +[features] +default = ["std", "derive"] +derive = ["failure_derive"] +std = ["backtrace"] diff --git a/failure/LICENSE-APACHE b/failure/LICENSE-APACHE new file mode 100644 index 000000000..16fe87b06 --- /dev/null +++ b/failure/LICENSE-APACHE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/failure/LICENSE-MIT b/failure/LICENSE-MIT new file mode 100644 index 000000000..31aa79387 --- /dev/null +++ b/failure/LICENSE-MIT @@ -0,0 +1,23 @@ +Permission is hereby granted, free of charge, to any +person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the +Software without restriction, including without +limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software +is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions +of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/failure/Makefile b/failure/Makefile new file mode 100644 index 000000000..6ff732985 --- /dev/null +++ b/failure/Makefile @@ -0,0 +1,15 @@ +all: test +.PHONY: all + +test: + @echo TEST DEFAULT FEATURES + @cargo test --all + @echo TEST WITH BACKTRACE + @cargo test --features backtrace --all + @echo TEST NO DEFAULT FEATURES + @cargo check --no-default-features --all +.PHONY: test + +check: + @cargo check --all +.PHONY: check diff --git a/failure/README.md b/failure/README.md new file mode 100644 index 000000000..2f228321b --- /dev/null +++ b/failure/README.md @@ -0,0 +1,119 @@ +# failure - a new error management story + +[![Build Status](https://travis-ci.org/rust-lang-nursery/failure.svg?branch=master)](https://travis-ci.org/rust-lang-nursery/failure) +[![Latest Version](https://img.shields.io/crates/v/failure.svg)](https://crates.io/crates/failure) +[![docs](https://docs.rs/failure/badge.svg)](https://docs.rs/failure) + +`failure` is designed to make it easier to manage errors in Rust. It is +intended to replace error management based on `std::error::Error` with a new +system based on lessons learned over the past several years, including those +learned from experience with quick-error and error-chain. + +`failure` provides two core components: + +* `Fail`: A new trait for custom error types. +* `Error`: A struct which any type that implements `Fail` can be cast into. + +## Evolution + +Failure is currently evolving as a library. First of all there is work going +on in Rust itself to [fix the error trait](https://github.com/rust-lang/rfcs/pull/2504) +secondarily the original plan for Failure towards 1.0 is unlikely to happen +in the current form. + +As such the original master branch towards 1.0 of failure was removed and +master now represents the future iteration steps of 0.1 until it's clear +what happens in the stdlib. + +The original 1.0 branch can be found in [evolution/1.0](https://github.com/rust-lang-nursery/failure/tree/evolution/1.0). + +## Example + +```rust +extern crate serde; +extern crate toml; + +#[macro_use] extern crate failure; +#[macro_use] extern crate serde_derive; + +use std::collections::HashMap; +use std::path::PathBuf; +use std::str::FromStr; + +use failure::Error; + +// This is a new error type that you've created. It represents the ways a +// toolchain could be invalid. +// +// The custom derive for Fail derives an impl of both Fail and Display. +// We don't do any other magic like creating new types. +#[derive(Debug, Fail)] +enum ToolchainError { + #[fail(display = "invalid toolchain name: {}", name)] + InvalidToolchainName { + name: String, + }, + #[fail(display = "unknown toolchain version: {}", version)] + UnknownToolchainVersion { + version: String, + } +} + +pub struct ToolchainId { + // ... etc +} + +impl FromStr for ToolchainId { + type Err = ToolchainError; + + fn from_str(s: &str) -> Result { + // ... etc + } +} + +pub type Toolchains = HashMap; + +// This opens a toml file containing associations between ToolchainIds and +// Paths (the roots of those toolchains). +// +// This could encounter an io Error, a toml parsing error, or a ToolchainError, +// all of them will be thrown into the special Error type +pub fn read_toolchains(path: PathBuf) -> Result +{ + use std::fs::File; + use std::io::Read; + + let mut string = String::new(); + File::open(path)?.read_to_string(&mut string)?; + + let toml: HashMap = toml::from_str(&string)?; + + let toolchains = toml.iter().map(|(key, path)| { + let toolchain_id = key.parse()?; + Ok((toolchain_id, path)) + }).collect::>()?; + + Ok(toolchains) +} +``` + +## Requirements + +Both failure and failure_derive are intended to compile on all stable versions +of Rust newer than 1.18.0, as well as the latest beta and the latest nightly. +If either crate fails to compile on any version newer than 1.18.0, please open +an issue. + +failure is **no_std** compatible, though some aspects of it (primarily the +`Error` type) will not be available in no_std mode. + +## License + +failure is licensed under the terms of the MIT License or the Apache License +2.0, at your choosing. + +## Code of Conduct + +Contribution to the failure crate is organized under the terms of the +Contributor Covenant, the maintainer of failure, @withoutboats, promises to +intervene to uphold that code of conduct. diff --git a/failure/RELEASES.md b/failure/RELEASES.md new file mode 100644 index 000000000..2de325eb9 --- /dev/null +++ b/failure/RELEASES.md @@ -0,0 +1,43 @@ +# Version 0.1.3 + +- Added `Context::map` +- Fixed a memory leak for older rust versions on error downcast + +# Version 0.1.2 + +The original plan to release 1.0.0 was changed so that version 0.1.1 is released and a related [RFC to fix the error trait](https://github.com/rust-lang/rfcs/pull/2504) is submitted. See README for details. + +- Fix `failure_derive` to work with Rust 2018. +- Add `#[fail(cause)]` that works similarly with `#[cause]`. The new form is preferred. +- Fix `"backtrace"` feature to work without `"std"` feature. +- Add `Compat::get_ref`. +- Add `Fallible`. +- Deprecate `Fail::causes` and `::causes` in favor of newly added `::iter_causes`. +- Deprecate `Fail::root_cause` and `::root_cause` in favor of newly added `::find_root_cause`. +- Add `::iter_chain`. +- Implement `Box: Fail`. +- Add `Error::from_boxed_compat`. +- Deprecate `Error::cause` in favor of newly added `Error::as_fail`. +- Deprecate `Error::causes` in favor of newly added `Error::iter_chain`. +- Deprecate `Error::root_cause` in favor of newly added `Error::find_root_cause`. +- Add `Error::iter_causes`. +- Implement `Error: AsRef`. +- Fix `Debug` implementation of `SyncFailure`. + +# Version 0.1.1 + +- Add a `Causes` iterator, which iterates over the causes of a failure. Can be + accessed through the `Fail::causes` or `Error::causes` methods. +- Add the `bail!` macro, which "throws" from the function. +- Add the `ensure!` macro, which is like an "assert" which throws instead of + panicking. +- The derive now supports a no_std mode. +- The derive is re-exported from `failure` by default, so that users do not + have to directly depend on `failure_derive`. +- Add a impl of `From for Context`, allowing users to `?` the `D` type to + produce a `Context` (for cases where there is no further underlying + error). + +# Version 0.1.0 + +- Initial version. diff --git a/failure/book/src/SUMMARY.md b/failure/book/src/SUMMARY.md new file mode 100644 index 000000000..e5c8a3b43 --- /dev/null +++ b/failure/book/src/SUMMARY.md @@ -0,0 +1,14 @@ +# Summary + +- [failure](./intro.md) +- [How to use failure](./howto.md) + - [The Fail trait](./fail.md) + - [Deriving Fail](./derive-fail.md) + - [The Error type](./error.md) + - [`bail!` and `ensure!`](./bail-and-ensure.md) +- [Patterns & Guidance](./guidance.md) + - [Strings as errors](./error-msg.md) + - [A Custom Fail type](./custom-fail.md) + - [Using the Error type](./use-error.md) + - [An Error and ErrorKind pair](./error-errorkind.md) + - [Strings and custom fail type](./string-custom-error.md) diff --git a/failure/book/src/bail-and-ensure.md b/failure/book/src/bail-and-ensure.md new file mode 100644 index 000000000..1326f0e05 --- /dev/null +++ b/failure/book/src/bail-and-ensure.md @@ -0,0 +1,18 @@ +# `bail!` and `ensure!` + +If you were a fan of the `bail!` and ensure! macros from error-chain, good news. failure has a version of these macros as well. + +The `bail!` macro returns an error immediately, based on a format string. The `ensure!` macro additionally takes a conditional, and returns the error only if that conditional is false. You can think of `bail!` and `ensure!` as being analogous to `panic!` and `assert!`, but throwing errors instead of panicking. + +`bail!` and `ensure!` macros are useful when you are prototyping and you want to write your custom errors later. It is also the simplest example of using the failure crate. + +## Example +```rust +#[macro_use] extern crate failure; + +fn safe_cast_to_unsigned(n:i32) -> Result +{ + ensure!(n>=0, "number cannot be smaller than 0!"); + (u32) n +} +``` \ No newline at end of file diff --git a/failure/book/src/custom-fail.md b/failure/book/src/custom-fail.md new file mode 100644 index 000000000..324c0417b --- /dev/null +++ b/failure/book/src/custom-fail.md @@ -0,0 +1,75 @@ +# A Custom Fail type + +This pattern is a way to define a new kind of failure. Defining a new kind of +failure can be an effective way of representing an error for which you control +all of the possible failure cases. It has several advantages: + +1. You can enumerate exactly all of the possible failures that can occur in +this context. +2. You have total control over the representation of the failure type. +3. Callers can destructure your error without any sort of downcasting. + +To implement this pattern, you should define your own type that implements +`Fail`. You can use the [custom derive][derive-fail] to make this easier. For +example: + +```rust +#[derive(Fail, Debug)] +#[fail(display = "Input was invalid UTF-8")] +pub struct Utf8Error; +``` + +This type can become as large and complicated as is appropriate to your use +case. It can be an enum with a different variant for each possible error, and +it can carry data with more precise information about the error. For example: + +```rust +#[derive(Fail, Debug)] +#[fail(display = "Input was invalid UTF-8 at index {}", index)] +pub struct Utf8Error { + index: usize, +} +``` + +## When might you use this pattern? + +If you need to raise an error that doesn't come from one of your dependencies, +this is a great pattern to use. + +You can also use this pattern in conjunction with [using `Error`][use-error] or +defining an [Error and ErrorKind pair][error-errorkind]. Those functions which +are "pure logic" and have a very constrained set of errors (such as parsing +simple formats) might each return a different custom Fail type, and then the +function which merges them all together, does IO, and so on, would return a +more complex type like `Error` or your custom Error/ErrorKind. + +## Caveats on this pattern + +When you have a dependency which returns a different error type, often you will +be inclined to add it as a variant on your own error type. When you do that, +you should tag the underlying error as the `#[fail(cause)]` of your error: + +```rust +#[derive(Fail, Debug)] +pub enum MyError { + #[fail(display = "Input was invalid UTF-8 at index {}", _0)] + Utf8Error(usize), + #[fail(display = "{}", _0)] + Io(#[fail(cause)] io::Error), +} +``` + +Up to a limit, this design can work. However, it has some problems: + +- It can be hard to be forward compatible with new dependencies that raise + their own kinds of errors in the future. +- It defines a 1-1 relationship between a variant of the error and an + underlying error. + +Depending on your use case, as your function grows in complexity, it can be +better to transition to [using Error][use-error] or [defining an Error & +ErrorKind pair][error-errorkind]. + +[derive-fail]: ./derive-fail.html +[use-error]: ./use-error.html +[error-errorkind]: ./error-errorkind.html diff --git a/failure/book/src/derive-fail.md b/failure/book/src/derive-fail.md new file mode 100644 index 000000000..6fffd9918 --- /dev/null +++ b/failure/book/src/derive-fail.md @@ -0,0 +1,177 @@ +# Deriving `Fail` + +Though you can implement `Fail` yourself, we also provide a derive macro to +generate the impl for you. To get access to this macro, you must tag the extern +crate declaration with `#[macro_use]`, as in: + +```rust +#[macro_use] extern crate failure; +``` + +In its smallest form, deriving Fail looks like this: + +```rust +#[macro_use] extern crate failure; + +use std::fmt; + +#[derive(Fail, Debug)] +struct MyError; + +impl fmt::Display for MyError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "An error occurred.") + } +} +``` + +All failures need to implement `Display`, so we have added an impl of +Display. However, implementing `Display` is much more boilerplate than +implementing `Fail` - this is why we support deriving `Display` for you. + +## Deriving `Display` + +You can derive an implementation of `Display` with a special attribute: + +```rust +#[macro_use] extern crate failure; + +#[derive(Fail, Debug)] +#[fail(display = "An error occurred.")] +struct MyError; +``` + +This attribute will cause the `Fail` derive to also generate an impl of +`Display`, so that you don't have to implement one yourself. + +### String interpolation + +String literals are not enough for error messages in many cases. Often, you +want to include parts of the error value interpolated into the message. You can +do this with failure using the same string interpolation syntax as Rust's +formatting and printing macros: + +```rust +#[macro_use] extern crate failure; + +#[derive(Fail, Debug)] +#[fail(display = "An error occurred with error code {}. ({})", code, message)] +struct MyError { + code: i32, + message: String, +} +``` + +Note that unlike code that would appear in a method, this does not use +something like `self.code` or `self.message`; it just uses the field names +directly. This is because of a limitation in Rust's current attribute syntax. +As a result, you can only interpolate fields through the derivation; you cannot +perform method calls or use other arbitrary expressions. + +### Tuple structs + +With regular structs, you can use the name of the field in string +interpolation. When deriving Fail for a tuple struct, you might expect to use +the numeric index to refer to fields `0`, `1`, et cetera. However, a compiler +limitation prevents this from parsing today. + +For the time being, tuple field accesses in the display attribute need to be +prefixed with an underscore: + +```rust +#[macro_use] extern crate failure; + +#[derive(Fail, Debug)] +#[fail(display = "An error occurred with error code {}.", _0)] +struct MyError(i32); + + +#[derive(Fail, Debug)] +#[fail(display = "An error occurred with error code {} ({}).", _0, _1)] +struct MyOtherError(i32, String); +``` + +### Enums + +Implementing Display is also supported for enums by applying the attribute to +each variant of the enum, rather than to the enum as a whole. The Display impl +will match over the enum to generate the correct error message. For example: + +```rust +#[macro_use] extern crate failure; + +#[derive(Fail, Debug)] +enum MyError { + #[fail(display = "{} is not a valid version.", _0)] + InvalidVersion(u32), + #[fail(display = "IO error: {}", error)] + IoError { error: io::Error }, + #[fail(display = "An unknown error has occurred.")] + UnknownError, +} +``` + +## Overriding `backtrace` + +The backtrace method will be automatically overridden if the type contains a +field with the type `Backtrace`. This works for both structs and enums. + +```rust +#[macro_use] extern crate failure; + +use failure::Backtrace; + +/// MyError::backtrace will return a reference to the backtrace field +#[derive(Fail, Debug)] +#[fail(display = "An error occurred.")] +struct MyError { + backtrace: Backtrace, +} + +/// MyEnumError::backtrace will return a reference to the backtrace only if it +/// is Variant2, otherwise it will return None. +#[derive(Fail, Debug)] +enum MyEnumError { + #[fail(display = "An error occurred.")] + Variant1, + #[fail(display = "A different error occurred.")] + Variant2(Backtrace), +} +``` + +This happens automatically; no other annotations are necessary. It only works +if the type is named Backtrace, and not if you have created an alias for the +Backtrace type. + +## Overriding `cause` + +In contrast to `backtrace`, the cause cannot be determined by type name alone +because it could be any type which implements `Fail`. For this reason, if your +error has an underlying cause field, you need to annotate that field with +the `#[fail(cause)]` attribute. + +This can be used in fields of enums as well as structs. + + +```rust +#[macro_use] extern crate failure; + +use std::io; + +/// MyError::cause will return a reference to the io_error field +#[derive(Fail, Debug)] +#[fail(display = "An error occurred.")] +struct MyError { + #[fail(cause)] io_error: io::Error, +} + +/// MyEnumError::cause will return a reference only if it is Variant2, +/// otherwise it will return None. +#[derive(Fail, Debug)] +enum MyEnumError { + #[fail(display = "An error occurred.")] + Variant1, + #[fail(display = "A different error occurred.")] + Variant2(#[fail(cause)] io::Error), +} +``` diff --git a/failure/book/src/error-errorkind.md b/failure/book/src/error-errorkind.md new file mode 100644 index 000000000..c5968e813 --- /dev/null +++ b/failure/book/src/error-errorkind.md @@ -0,0 +1,143 @@ +# An Error and ErrorKind pair + +This pattern is the most robust way to manage errors - and also the most high +maintenance. It combines some of the advantages of the [using Error][use-error] +pattern and the [custom failure][custom-fail] patterns, while avoiding some of +the disadvantages each of those patterns has: + +1. Like `Error`, this is forward compatible with new underlying kinds of +errors from your dependencies. +2. Like custom failures, this pattern allows you to specify additional information about the error that your dependencies don't give you. +3. Like `Error`, it can be easier to convert underlying errors from dependency +into this type than for custom failures. +4. Like custom failures, users can gain some information about the error +without downcasting. + +The pattern is to create two new failure types: an `Error` and an `ErrorKind`, +and to leverage [the `Context` type][context-api] provided by failure. + +```rust +#[derive(Debug)] +struct MyError { + inner: Context, +} + +#[derive(Copy, Clone, Eq, PartialEq, Debug, Fail)] +enum MyErrorKind { + // A plain enum with no data in any of its variants + // + // For example: + #[fail(display = "A contextual error message.")] + OneVariant, + // ... +} +``` + +Unfortunately, it is not easy to correctly derive `Fail` for `MyError` so that +it delegates things to its inner `Context`. You should write those impls +yourself: + +```rust +impl Fail for MyError { + fn cause(&self) -> Option<&Fail> { + self.inner.cause() + } + + fn backtrace(&self) -> Option<&Backtrace> { + self.inner.backtrace() + } +} + +impl Display for MyError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + Display::fmt(&self.inner, f) + } +} +``` + +You should also provide some conversions and accessors, to go between a +Context, your ErrorKind, and your Error: + +```rust +impl MyError { + pub fn kind(&self) -> MyErrorKind { + *self.inner.get_context() + } +} + +impl From for MyError { + fn from(kind: MyErrorKind) -> MyError { + MyError { inner: Context::new(kind) } + } +} + +impl From> for MyError { + fn from(inner: Context) -> MyError { + MyError { inner: inner } + } +} +``` + +With this code set up, you can use the context method from failure to apply +your ErrorKind to `Result`s in underlying libraries: + +```rust +use failure::ResultExt; +perform_some_io().context(ErrorKind::NetworkFailure)?; +``` + +You can also directly throw `ErrorKind` without an underlying error when +appropriate: + +```rust +Err(ErrorKind::DomainSpecificError)? +``` + +### What should your ErrorKind contain? + +Your error kind probably should not carry data - and if it does, it should only +carry stateless data types that provide additional information about what the +`ErrorKind` means. This way, your `ErrorKind` can be `Eq`, making it +easy to use as a way of comparing errors. + +Your ErrorKind is a way of providing information about what errors mean +appropriate to the level of abstraction that your library operates at. As some +examples: + +- If your library expects to read from the user's `Cargo.toml`, you might have + a `InvalidCargoToml` variant, to capture what `io::Error` and `toml::Error` + mean in the context of your library. +- If your library does both file system activity and network activity, you + might have `Filesystem` and `Network` variants, to divide up the `io::Error`s + between which system in particular failed. + +Exactly what semantic information is appropriate depends entirely on what this +bit of code is intended to do. + +## When might you use this pattern? + +The most likely use cases for this pattern are mid-layer which perform a +function that requires many dependencies, and that are intended to be used in +production. Libraries with few dependencies do not need to manage many +underlying error types and can probably suffice with a simpler [custom +failure][custom-fail]. Applications that know they are almost always just going +to log these errors can get away with [using the Error type][use-error] rather +than managing extra context information. + +That said, when you need to provide the most expressive information about an +error possible, this can be a good approach. + +## Caveats on this pattern + +This pattern is the most involved pattern documented in this book. It involves +a lot of boilerplate to set up (which may be automated away eventually), and it +requires you to apply a contextual message to every underlying error that is +thrown inside your code. It can be a lot of work to maintain this pattern. + +Additionally, like the Error type, the Context type may use an allocation and a +dynamic dispatch internally. If you know this is too expensive for your use +case, you should not use this pattern. + +[use-error]: ./use-error.html +[custom-fail]: ./custom-fail.html +[context-api]: https://boats.gitlab.io/failure/doc/failure/struct.Context.html diff --git a/failure/book/src/error-msg.md b/failure/book/src/error-msg.md new file mode 100644 index 000000000..e754aa93b --- /dev/null +++ b/failure/book/src/error-msg.md @@ -0,0 +1,59 @@ +# Strings as errors + +This pattern is a way to create new errors without doing much set up. It is +definitely the sloppiest way to throw errors. It can be great to use this +during prototyping, but maybe not in the final product. + +String types do not implement `Fail`, which is why there are two adapters to +create failures from a string: + +- [`failure::err_msg`][err-msg-api] - a function that takes a displayable + type and creates a failure from it. This can take a String or a string + literal. +- [`format_err!`][format-err-api] - a macro with string interpolation, similar + to `format!` or `println!`. + +```rust +fn check_range(x: usize, range: Range) -> Result { + if x < range.start { + return Err(format_err!("{} is below {}", x, range.start)); + } + if x >= range.end { + return Err(format_err!("{} is above {}", x, range.end)); + } + Ok(x) +} +``` + +If you're going to use strings as errors, we recommend [using +`Error`][use-error] as your error type, rather than `ErrorMessage`; this way, +if some of your strings are `String` and some are `&'static str`, you don't +need worry about merging them into a single string type. + +## When might you use this pattern? + +This pattern is the easiest to set up and get going with, so it can be great +when prototyping or spiking out an early design. It can also be great when you +know that an error variant is extremely uncommon, and that there is really no +way to handle it other than to log the error and move on. + +## Caveats on this pattern + +If you are writing a library you plan to publish to crates.io, this is probably +not a good way to handle errors, because it doesn't give your clients very much +control. For public, open source libraries, we'd recommend using [custom +failures][custom-fail] in the cases where you would use a string as an error. + +This pattern can also be very brittle. If you ever want to branch over which +error was returned, you would have to match on the exact contents of the +string. If you ever change the string contents, that will silently break that +match. + +For these reasons, we strongly recommend against using this pattern except for +prototyping and when you know the error is just going to get logged or reported +to the users. + +[custom-fail]: ./custom-fail.html +[use-error]: ./use-error.html +[err-msg-api]: https://boats.gitlab.io/failure/doc/failure/fn.err_msg.html +[format-err-api]: https://boats.gitlab.io/failure/doc/failure/macro.format_err.html diff --git a/failure/book/src/error.md b/failure/book/src/error.md new file mode 100644 index 000000000..f37e4c3fc --- /dev/null +++ b/failure/book/src/error.md @@ -0,0 +1,100 @@ +# The `Error` type + +In addition to the trait `Fail`, failure provides a type called `Error`. Any +type that implements `Fail` can be cast into `Error` using From and Into, which +allows users to throw errors using `?` which have different types, if the +function returns an `Error`. + +For example: + +```rust +// Something you can deserialize +#[derive(Deserialize)] +struct Object { + ... +} + +impl Object { + // This throws both IO Errors and JSON Errors, but they both get converted + // into the Error type. + fn from_file(path: &Path) -> Result { + let mut string = String::new(); + File::open(path)?.read_to_string(&mut string)?; + let object = json::from_str(&string)?; + Ok(object) + } +} +``` + +## Causes and Backtraces + +The Error type has all of the methods from the Fail trait, with a few notable +differences. Most importantly, the cause and backtrace methods on Error do not +return Options - an Error is *guaranteed* to have a cause and a backtrace. + +```rust +// Both methods are guaranteed to return an &Fail and an &Backtrace +println!("{}, {}", error.cause(), error.backtrace()) +``` + +An `Error`'s cause is always the failure that was cast into this `Error`. +That failure may have further underlying causes. Unlike Fail, this means that +the cause of an Error will have the same Display representation as the Error +itself. + +As to the error's guaranteed backtrace, when the conversion into the Error type +happens, if the underlying failure does not provide a backtrace, a new +backtrace is constructed pointing to that conversion point (rather than the +origin of the error). This construction only happens if there is no underlying +backtrace; if it does have a backtrace no new backtrace is constructed. + +## Downcasting + +The Error type also supports downcasting into any concrete Fail type. It can be +downcast by reference or by value - when downcasting by value, the return type +is `Result`, allowing you to get the error back out of it. + +```rust +match error.downcast::() { + Ok(io_error) => { ... } + Err(error) => { ... } +} +``` + +## Implementation details + +`Error` is essentially a trait object, but with some fanciness it may generate +and store the backtrace if the underlying failure did not have one. In +particular, we use a custom dynamically sized type to store the backtrace +information inline with the trait object data. + +```rust +struct Error { + // Inner is a dynamically sized type + inner: Box>, +} + +struct Inner { + backtrace: Backtrace, + failure: F, +} +``` + +By storing the backtrace in the heap this way, we avoid increasing the size of +the Error type beyond that of two non-nullable pointers. This keeps the size of +the `Result` type from getting too large, avoiding having a negative impact on +the "happy path" of returning Ok. For example, a `Result<(), Error>` should be +represented as a pair of nullable pointers, with the null case representing +`Ok`. Similar optimizations can be applied to values up to at least a pointer +in size. + +To emphasize: Error is intended for use cases where the error case is +considered relatively uncommon. This optimization makes the overhead of an +error less than it otherwise would be for the Ok branch. In cases where errors +are going to be returned extremely frequently, returning this Error type is +probably not appropriate, but you should benchmark in those cases. + +(As a rule of thumb: if you're not sure if you can afford to have a trait +object, you probably *can* afford it. Heap allocations are not nearly as cheap +as stack allocations, but they're cheap enough that you can almost always +afford them.) diff --git a/failure/book/src/fail.md b/failure/book/src/fail.md new file mode 100644 index 000000000..4d337c327 --- /dev/null +++ b/failure/book/src/fail.md @@ -0,0 +1,152 @@ +# The `Fail` trait + +The `Fail` trait is a replacement for [`std::error::Error`][stderror]. It has +been designed to support a number of operations: + +- Because it is bound by both `Debug` and `Display`, any failure can be + printed in two ways. +- It has both a `backtrace` and a `cause` method, allowing users to get + information about how the error occurred. +- It supports wrapping failures in additional contextual information. +- Because it is bound by `Send` and `Sync`, failures can be moved and shared + between threads easily. +- Because it is bound by `'static`, the abstract `Fail` trait object can be + downcast into concrete types. + +Every new error type in your code should implement `Fail`, so it can be +integrated into the entire system built around this trait. You can manually +implement `Fail` yourself, or you can use the derive for `Fail` defined +in a separate crate and documented [here][derive-docs]. + +Implementors of this trait are called 'failures'. + +## Cause + +Often, an error type contains (or could contain) another underlying error type +which represents the "cause" of this error - for example, if your custom error +contains an `io::Error`, that is the cause of your error. + +The cause method on the `Fail` trait allows all errors to expose their underlying +cause - if they have one - in a consistent way. Users can loop over the chain +of causes, for example, getting the entire series of causes for an error: + +```rust +// Assume err is a type that implements `Fail` +let mut fail: &Fail = err; + +while let Some(cause) = fail.cause() { + println!("{}", cause); + + // Make `fail` the reference to the cause of the previous fail, making the + // loop "dig deeper" into the cause chain. + fail = cause; +} +``` + +Because `&Fail` supports downcasting, you can also inspect causes in more +detail if you are expecting a certain failure: + +```rust +while let Some(cause) = fail.cause() { + + if let Some(err) = cause.downcast_ref::() { + // treat io::Error specially + } else { + // fallback case + } + + fail = cause; +} +``` + +For convenience an iterator is also provided: + +```rust +// Assume err is a type that implements `Fail` +let mut fail: &Fail = err; + +for cause in fail.iter_causes() { + println!("{}", cause); +} +``` + +## Backtraces + +Errors can also generate a backtrace when they are constructed, helping you +determine the place the error was generated and the function chain that called into +that. Like causes, this is entirely optional - the authors of each failure +have to decide if generating a backtrace is appropriate in their use case. + +The backtrace method allows all errors to expose their backtrace if they have +one. This enables a consistent method for getting the backtrace from an error: + +```rust +// We don't even know the type of the cause, but we can still get its +// backtrace. +if let Some(bt) = err.cause().and_then(|cause| cause.backtrace()) { + println!("{}", bt) +} +``` + +The `Backtrace` type exposed by `failure` is different from the `Backtrace` exposed +by the [backtrace crate][backtrace-crate], in that it has several optimizations: + +- It has a `no_std` compatible form which will never be generated (because + backtraces require heap allocation), and should be entirely compiled out. +- It will not be generated unless the `RUST_BACKTRACE` environment variable has + been set at runtime. +- Symbol resolution is delayed until the backtrace is actually printed, because + this is the most expensive part of generating a backtrace. + +## Context + +Often, the libraries you are using will present error messages that don't +provide very helpful information about what exactly has gone wrong. For +example, if an `io::Error` says that an entity was "Not Found," that doesn't +communicate much about what specific file was missing - if it even was a file +(as opposed to a directory for example). + +You can inject additional context to be carried with this error value, +providing semantic information about the nature of the error appropriate to the +level of abstraction that the code you are writing operates at. The `context` +method on `Fail` takes any displayable value (such as a string) to act as +context for this error. + +Using the `ResultExt` trait, you can also get `context` as a convenient method on +`Result` directly. For example, suppose that your code attempted to read from a +Cargo.toml. You can wrap the `io::Error`s that occur with additional context +about what operation has failed: + +```rust +use failure::ResultExt; + +let mut file = File::open(cargo_toml_path).context("Missing Cargo.toml")?; +file.read_to_end(&buffer).context("Could not read Cargo.toml")?; +``` + +The `Context` object also has a constructor that does not take an underlying +error, allowing you to create ad hoc Context errors alongside those created by +applying the `context` method to an underlying error. + +## Backwards compatibility + +We've taken several steps to make transitioning from `std::error` to `failure` as +painless as possible. + +First, there is a blanket implementation of `Fail` for all types that implement +`std::error::Error`, as long as they are `Send + Sync + 'static`. If you are +dealing with a library that hasn't shifted to `Fail`, it is automatically +compatible with `failure` already. + +Second, `Fail` contains a method called `compat`, which produces a type that +implements `std::error::Error`. If you have a type that implements `Fail`, but +not the older `Error` trait, you can call `compat` to get a type that does +implement that trait (for example, if you need to return a `Box`). + +The biggest hole in our backwards compatibility story is that you cannot +implement `std::error::Error` and also override the backtrace and cause methods +on `Fail`. We intend to enable this with specialization when it becomes stable. + +[derive-docs]: https://boats.gitlab.io/failure/derive-fail.html +[stderror]: https://doc.rust-lang.org/std/error/trait.Error.html +[backtrace-crate]: http://alexcrichton.com/backtrace-rs diff --git a/failure/book/src/guidance.md b/failure/book/src/guidance.md new file mode 100644 index 000000000..7023ca40d --- /dev/null +++ b/failure/book/src/guidance.md @@ -0,0 +1,24 @@ +# Patterns & Guidance + +failure is not a "one size fits all" approach to error management. There are +multiple patterns that emerge from the API this library provides, and users +need to determine which pattern makes sense for them. This section documents +some patterns and how users might use them. + +In brief, these are the patterns documented here: + +- **[Strings as errors](./error-msg.md):** Using strings as your error + type. Good for prototyping. +- **[A Custom Fail type](./custom-fail.md):** Defining a custom type to be + your error type. Good for APIs where you control all or more of the + possible failures. +- **[Using the Error type](./use-error.md):** Using the Error type to pull + together multiple failures of different types. Good for applications and + APIs that know the error won't be inspected much more. +- **[An Error and ErrorKind pair](./error-errorkind.md):** Using both a + custom error type and an ErrorKind enum to create a very robust error + type. Good for public APIs in large crates. + +(Though each of these items identifies a use case which this pattern would be +good for, in truth each of them can be applied in various contexts. Its up to +you to decide what makes the most sense for your particular use case.) diff --git a/failure/book/src/howto.md b/failure/book/src/howto.md new file mode 100644 index 000000000..5c8135b7a --- /dev/null +++ b/failure/book/src/howto.md @@ -0,0 +1,8 @@ +# How to use failure + +This section of the documentation is about how the APIs exposed in failure can +be used. It is organized around the major APIs of failure: + +- **[The Fail trait](./fail.md):** The primary abstraction provided by failure. +- **[Deriving Fail](./derive-fail.md):** A custom derive for the Fail trait. +- **[The Error type](./error.md):** A convenient wrapper around any Fail type. diff --git a/failure/book/src/intro.md b/failure/book/src/intro.md new file mode 100644 index 000000000..d62093b11 --- /dev/null +++ b/failure/book/src/intro.md @@ -0,0 +1,77 @@ +# failure + +This is the documentation for the failure crate, which provides a system for +creating and managing errors in Rust. Additional documentation is found here: + +* [API documentation][api] +* [failure source code][repo] + +[api]: https://boats.gitlab.io/failure/doc/failure +[repo]: https://github.com/rust-lang-nursery/failure + +```rust +extern crate serde; +extern crate toml; + +#[macro_use] extern crate failure; +#[macro_use] extern crate serde_derive; + +use std::collections::HashMap; +use std::path::PathBuf; +use std::str::FromStr; + +use failure::Error; + +// This is a new error type that you've created. It represents the ways a +// toolchain could be invalid. +// +// The custom derive for Fail derives an impl of both Fail and Display. +// We don't do any other magic like creating new types. +#[derive(Debug, Fail)] +enum ToolchainError { + #[fail(display = "invalid toolchain name: {}", name)] + InvalidToolchainName { + name: String, + }, + #[fail(display = "unknown toolchain version: {}", version)] + UnknownToolchainVersion { + version: String, + } +} + +pub struct ToolchainId { + // ... etc +} + +impl FromStr for ToolchainId { + type Err = ToolchainError; + + fn from_str(s: &str) -> Result { + // ... etc + } +} + +pub type Toolchains = HashMap; + +// This opens a toml file containing associations between ToolchainIds and +// Paths (the roots of those toolchains). +// +// This could encounter an io Error, a toml parsing error, or a ToolchainError, +// all of them will be thrown into the special Error type +pub fn read_toolchains(path: PathBuf) -> Result +{ + use std::fs::File; + use std::io::Read; + + let mut string = String::new(); + File::open(path)?.read_to_string(&mut string)?; + + let toml: HashMap = toml::from_str(&string)?; + + let toolchains = toml.iter().map(|(key, path)| { + let toolchain_id = key.parse()?; + Ok((toolchain_id, path)) + }).collect::>()?; + + Ok(toolchains) +} diff --git a/failure/book/src/string-custom-error.md b/failure/book/src/string-custom-error.md new file mode 100644 index 000000000..a56783646 --- /dev/null +++ b/failure/book/src/string-custom-error.md @@ -0,0 +1,160 @@ +# Strings and custom fail type + +This pattern is an hybrid between the [_An Error and ErrorKind pair_](./error-errorkind.md) and +[_Using the Error type_](./use-error.md). + +Such an error type can be implemented in the same way that what was shown in +the [_An Error and ErrorKind pair_](./error-errorkind.md) pattern, but here, the context is a +simple string: + +```rust +extern crate core; +extern crate failure; + +use core::fmt::{self, Display}; +use failure::{Backtrace, Context, Fail, ResultExt}; + +#[derive(Debug)] +pub struct MyError { + inner: Context, +} + +impl Fail for MyError { + fn cause(&self) -> Option<&Fail> { + self.inner.cause() + } + + fn backtrace(&self) -> Option<&Backtrace> { + self.inner.backtrace() + } +} + +impl Display for MyError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + Display::fmt(&self.inner, f) + } +} +``` + +To make the type easier to use, a few impls can be added: + +```rust +// Allows writing `MyError::from("oops"))?` +impl From<&'static str> for MyError { + fn from(msg: &'static str) -> MyError { + MyError { + inner: Context::new(msg), + } + } +} + +// Allows adding more context via a String +impl From> for MyError { + fn from(inner: Context) -> MyError { + MyError { inner } + } +} + +// Allows adding more context via a &str +impl From> for MyError { + fn from(inner: Context<&'static str>) -> MyError { + MyError { + inner: inner.map(|s| s.to_string()), + } + } +} +``` + +Here is how it is used: + +```rust +fn main() { + println!("{:?}", err2()); +} + +// Unlike the "Using the Error type" pattern, functions return our own error +// type here. +fn err1() -> Result<(), MyError> { + Ok(Err(MyError::from("err1"))?) +} + +fn err2() -> Result<(), MyError> { + // Unlike the "An Error and ErrorKind pair" pattern, our context is a + // simple string. We can chain errors and provide detailed error messages, + // but we don't have to deal with the complexity of an error kind type + Ok(err1().context("err2")?) +} +``` + +## Variant with `&'static str` + +If you don't need to format strings, you can avoid an +allocation by using a `Context<&'static str>` instead of a +`Context`. + +```rust +extern crate core; +extern crate failure; + +use core::fmt::{self, Display}; +use failure::{Backtrace, Context, Fail, ResultExt}; + +#[derive(Debug)] +pub struct MyError { + inner: Context<&'static str>, +} + +impl Fail for MyError { + fn cause(&self) -> Option<&Fail> { + self.inner.cause() + } + + fn backtrace(&self) -> Option<&Backtrace> { + self.inner.backtrace() + } +} + +impl Display for MyError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + Display::fmt(&self.inner, f) + } +} + +impl From<&'static str> for MyError { + fn from(msg: &'static str) -> MyError { + MyError { + inner: Context::new(msg.into()), + } + } +} + +impl From> for MyError { + fn from(inner: Context<&'static str>) -> MyError { + MyError { + inner, + } + } +} +``` + +## When might you use this pattern? + +Sometimes, you don't want to use the [_Using the Error type_](./use-error.md) +pattern, because you want to expose a few different error types. But you don't +want to use the [_An Error and ErrorKind pair_](./error-errorkind.md) pattern +either, because there is no need to provide the context as an enum or because +it would be too much work, if the error can occur in many different contexts. + +For instance, if you're writing a library that decodes/encodes a complex binary +format, you might want to expose a `DecodeError` and an `EncodeError` error +type, but provide the context as a simple string instead of an error kind, because: + +- users may not care too much about the context in which a `DecodeError` or + `EncodeError` was encountered, they just want a nice message to explain it +- your binary format is really complex, errors can occur in many different + places, and you don't want to end up with a giant `ErrorKind` enum + + +## Caveats on this pattern + +If using the `Context` variant, an extra allocation is used for the string. diff --git a/failure/book/src/use-error.md b/failure/book/src/use-error.md new file mode 100644 index 000000000..a0a294402 --- /dev/null +++ b/failure/book/src/use-error.md @@ -0,0 +1,66 @@ +# Use the `Error` type + +This pattern is a way to manage errors when you have multiple kinds of failure +that could occur during a single function. It has several distinct advantages: + +1. You can start using it without defining any of your own failure types. +2. All types that implement `Fail` can be thrown into the `Error` type using +the `?` operator. +3. As you start adding new dependencies with their own failure types, you can +start throwing them without making a breaking change. + +To use this pattern, all you need to do is return `Result<_, Error>` from your +functions: + +```rust +use std::io; +use std::io::BufRead; + +use failure::Error; +use failure::err_msg; + +fn my_function() -> Result<(), Error> { + let stdin = io::stdin(); + + for line in stdin.lock().lines() { + let line = line?; + + if line.chars().all(|c| c.is_whitespace()) { + break + } + + if !line.starts_with("$") { + return Err(format_err!("Input did not begin with `$`")); + } + + println!("{}", &line[1..]); + } + + Ok(()) +} +``` + +## When might you use this pattern? + +This pattern is very effective when you know you will usually not need to +destructure the error this function returns. For example: + +- When prototyping. +- When you know you are going to log this error, or display it to the user, + either all of the time or nearly all of the time. +- When it would be impractical for this API to report more custom context for + the error (e.g. because it is a trait that doesn't want to add a new Error + associated type). + +## Caveats on this pattern + +There are two primary downsides to this pattern: + +- The `Error` type allocates. There are cases where this would be too + expensive. In those cases you should use a [custom failure][custom-fail]. +- You cannot recover more information about this error without downcasting. If + your API needs to express more contextual information about the error, use + the [Error and ErrorKind][error-errorkind] pattern. + +[custom-fail]: ./custom-fail.html +[error-errorkind]: ./error-errorkind.html diff --git a/failure/build-docs.sh b/failure/build-docs.sh new file mode 100755 index 000000000..fe2039fbf --- /dev/null +++ b/failure/build-docs.sh @@ -0,0 +1,8 @@ +#!/bin/bash +mkdir public +cargo doc --no-deps +cargo install mdbook --no-default-features +mdbook build ./book +cp -r ./target/doc/ ./public +cp -r ./book/book/* ./public +find $PWD/public | grep "\.html\$" diff --git a/failure/examples/bail_ensure.rs b/failure/examples/bail_ensure.rs new file mode 100644 index 000000000..05c399b5d --- /dev/null +++ b/failure/examples/bail_ensure.rs @@ -0,0 +1,26 @@ +#[macro_use] +extern crate failure; + +use failure::Error; + +fn bailer() -> Result<(), Error> { + // bail!("ruh roh"); + bail!("ruh {}", "roh"); +} + +fn ensures() -> Result<(), Error> { + ensure!(true, "true is false"); + ensure!(false, "false is false"); + Ok(()) +} + +fn main() { + match bailer() { + Ok(_) => println!("ok"), + Err(e) => println!("{}", e), + } + match ensures() { + Ok(_) => println!("ok"), + Err(e) => println!("{}", e), + } +} diff --git a/failure/examples/error_as_cause.rs b/failure/examples/error_as_cause.rs new file mode 100644 index 000000000..24e5b063d --- /dev/null +++ b/failure/examples/error_as_cause.rs @@ -0,0 +1,18 @@ +#[macro_use] +extern crate failure; + +use failure::{err_msg, Error, Fail}; + +#[derive(Debug, Fail)] +#[fail(display = "my wrapping error")] +struct WrappingError(#[fail(cause)] Error); + +fn bad_function() -> Result<(), WrappingError> { + Err(WrappingError(err_msg("this went bad"))) +} + +fn main() { + for cause in Fail::iter_causes(&bad_function().unwrap_err()) { + println!("{}", cause); + } +} diff --git a/failure/examples/simple.rs b/failure/examples/simple.rs new file mode 100644 index 000000000..35d25e16a --- /dev/null +++ b/failure/examples/simple.rs @@ -0,0 +1,22 @@ +#[macro_use] +extern crate failure; + +use failure::Fail; + +#[derive(Debug, Fail)] +#[fail(display = "my error")] +struct MyError; + +#[derive(Debug, Fail)] +#[fail(display = "my wrapping error")] +struct WrappingError(#[fail(cause)] MyError); + +fn bad_function() -> Result<(), WrappingError> { + Err(WrappingError(MyError)) +} + +fn main() { + for cause in Fail::iter_causes(&bad_function().unwrap_err()) { + println!("{}", cause); + } +} diff --git a/failure/examples/string_custom_error_pattern.rs b/failure/examples/string_custom_error_pattern.rs new file mode 100644 index 000000000..72cf65103 --- /dev/null +++ b/failure/examples/string_custom_error_pattern.rs @@ -0,0 +1,76 @@ +//! This example show the pattern "Strings and custom fail type" described in the book +extern crate core; +extern crate failure; + +use core::fmt::{self, Display}; +use failure::{Backtrace, Context, Fail, ResultExt}; + +fn main() { + let err = err1().unwrap_err(); + // Print the error itself + println!("error: {}", err); + // Print the chain of errors that caused it + for cause in Fail::iter_causes(&err) { + println!("caused by: {}", cause); + } +} + +fn err1() -> Result<(), MyError> { + // The context can be a String + Ok(err2().context("err1".to_string())?) +} + +fn err2() -> Result<(), MyError> { + // The context can be a &'static str + Ok(err3().context("err2")?) +} + +fn err3() -> Result<(), MyError> { + Ok(Err(MyError::from("err3"))?) +} + +#[derive(Debug)] +pub struct MyError { + inner: Context, +} + +impl Fail for MyError { + fn cause(&self) -> Option<&Fail> { + self.inner.cause() + } + + fn backtrace(&self) -> Option<&Backtrace> { + self.inner.backtrace() + } +} + +impl Display for MyError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + Display::fmt(&self.inner, f) + } +} + +// Allows writing `MyError::from("oops"))?` +impl From<&'static str> for MyError { + fn from(msg: &'static str) -> MyError { + MyError { + inner: Context::new(msg.into()), + } + } +} + +// Allows adding more context via a String +impl From> for MyError { + fn from(inner: Context) -> MyError { + MyError { inner } + } +} + +// Allows adding more context via a &str +impl From> for MyError { + fn from(inner: Context<&'static str>) -> MyError { + MyError { + inner: inner.map(|s| s.to_string()), + } + } +} diff --git a/failure/src/as_fail.rs b/failure/src/as_fail.rs new file mode 100644 index 000000000..dd53a46c6 --- /dev/null +++ b/failure/src/as_fail.rs @@ -0,0 +1,37 @@ +use Fail; + +/// The `AsFail` trait +/// +/// This trait is similar to `AsRef`, but it is specialized to handle +/// the dynamic object of `Fail`. Implementors of `Fail` have a blanket +/// implementation. It is used in `failure_derive` in order to generate a +/// custom cause. +pub trait AsFail { + /// Converts a reference to `Self` into a dynamic trait object of `Fail`. + fn as_fail(&self) -> &Fail; +} + +impl AsFail for T +where + T: Fail, +{ + fn as_fail(&self) -> &Fail { + self + } +} + +impl AsFail for Fail { + fn as_fail(&self) -> &Fail { + self + } +} + +with_std! { + use error::Error; + + impl AsFail for Error { + fn as_fail(&self) -> &Fail { + self.as_fail() + } + } +} diff --git a/failure/src/backtrace/internal.rs b/failure/src/backtrace/internal.rs new file mode 100644 index 000000000..7be137b75 --- /dev/null +++ b/failure/src/backtrace/internal.rs @@ -0,0 +1,130 @@ +use std::cell::UnsafeCell; +use std::env; +use std::ffi::OsString; +use std::fmt; +use std::sync::atomic::{AtomicUsize, ATOMIC_USIZE_INIT, Ordering}; +use std::sync::Mutex; + +pub use super::backtrace::Backtrace; + +const GENERAL_BACKTRACE: &str = "RUST_BACKTRACE"; +const FAILURE_BACKTRACE: &str = "RUST_FAILURE_BACKTRACE"; + +pub(super) struct InternalBacktrace { + backtrace: Option, +} + +struct MaybeResolved { + resolved: Mutex, + backtrace: UnsafeCell, +} + +unsafe impl Send for MaybeResolved {} +unsafe impl Sync for MaybeResolved {} + +impl InternalBacktrace { + pub(super) fn new() -> InternalBacktrace { + static ENABLED: AtomicUsize = ATOMIC_USIZE_INIT; + + match ENABLED.load(Ordering::SeqCst) { + 0 => { + let enabled = is_backtrace_enabled(|var| env::var_os(var)); + ENABLED.store(enabled as usize + 1, Ordering::SeqCst); + if !enabled { + return InternalBacktrace { backtrace: None } + } + } + 1 => return InternalBacktrace { backtrace: None }, + _ => {} + } + + InternalBacktrace { + backtrace: Some(MaybeResolved { + resolved: Mutex::new(false), + backtrace: UnsafeCell::new(Backtrace::new_unresolved()), + }), + } + } + + pub(super) fn none() -> InternalBacktrace { + InternalBacktrace { backtrace: None } + } + + pub(super) fn as_backtrace(&self) -> Option<&Backtrace> { + let bt = match self.backtrace { + Some(ref bt) => bt, + None => return None, + }; + let mut resolved = bt.resolved.lock().unwrap(); + unsafe { + if !*resolved { + (*bt.backtrace.get()).resolve(); + *resolved = true; + } + Some(&*bt.backtrace.get()) + } + } + + pub(super) fn is_none(&self) -> bool { + self.backtrace.is_none() + } +} + +impl fmt::Debug for InternalBacktrace { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_struct("InternalBacktrace") + .field("backtrace", &self.as_backtrace()) + .finish() + } +} + +fn is_backtrace_enabled Option>(get_var: F) -> bool { + match get_var(FAILURE_BACKTRACE) { + Some(ref val) if val != "0" => true, + Some(ref val) if val == "0" => false, + _ => match get_var(GENERAL_BACKTRACE) { + Some(ref val) if val != "0" => true, + _ => false, + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + const YEA: Option<&str> = Some("1"); + const NAY: Option<&str> = Some("0"); + const NOT_SET: Option<&str> = None; + + macro_rules! test_enabled { + (failure: $failure:ident, general: $general:ident => $result:expr) => {{ + assert_eq!(is_backtrace_enabled(|var| match var { + FAILURE_BACKTRACE => $failure.map(OsString::from), + GENERAL_BACKTRACE => $general.map(OsString::from), + _ => panic!() + }), $result); + }} + } + + #[test] + fn always_enabled_if_failure_is_set_to_yes() { + test_enabled!(failure: YEA, general: YEA => true); + test_enabled!(failure: YEA, general: NOT_SET => true); + test_enabled!(failure: YEA, general: NAY => true); + } + + #[test] + fn never_enabled_if_failure_is_set_to_no() { + test_enabled!(failure: NAY, general: YEA => false); + test_enabled!(failure: NAY, general: NOT_SET => false); + test_enabled!(failure: NAY, general: NAY => false); + } + + #[test] + fn follows_general_if_failure_is_not_set() { + test_enabled!(failure: NOT_SET, general: YEA => true); + test_enabled!(failure: NOT_SET, general: NOT_SET => false); + test_enabled!(failure: NOT_SET, general: NAY => false); + } +} diff --git a/failure/src/backtrace/mod.rs b/failure/src/backtrace/mod.rs new file mode 100644 index 000000000..58f0477b3 --- /dev/null +++ b/failure/src/backtrace/mod.rs @@ -0,0 +1,144 @@ +use core::fmt::{self, Debug, Display}; + +macro_rules! with_backtrace { ($($i:item)*) => ($(#[cfg(all(feature = "backtrace", feature = "std"))]$i)*) } +macro_rules! without_backtrace { ($($i:item)*) => ($(#[cfg(not(all(feature = "backtrace", feature = "std")))]$i)*) } + +without_backtrace! { + /// A `Backtrace`. + /// + /// This is an opaque wrapper around the backtrace provided by + /// libbacktrace. A variety of optimizations have been performed to avoid + /// unnecessary or ill-advised work: + /// + /// - If this crate is compiled in `no_std` compatible mode, `Backtrace` + /// is an empty struct, and will be completely compiled away. + /// - If this crate is run without the `RUST_BACKTRACE` environmental + /// variable enabled, the backtrace will not be generated at runtime. + /// - Even if a backtrace is generated, the most expensive part of + /// generating a backtrace is symbol resolution. This backtrace does not + /// perform symbol resolution until it is actually read (e.g. by + /// printing it). If the Backtrace is never used for anything, symbols + /// never get resolved. + /// + /// Even with these optimizations, including a backtrace in your failure + /// may not be appropriate to your use case. You are not required to put a + /// backtrace in a custom `Fail` type. + /// + /// > (We have detected that this crate was documented with no_std + /// > compatibility turned on. The version of this crate that has been + /// > documented here will never generate a backtrace.) + pub struct Backtrace { + _secret: (), + } + + impl Backtrace { + /// Constructs a new backtrace. This will only create a real backtrace + /// if the crate is compiled in std mode and the `RUST_BACKTRACE` + /// environmental variable is activated. + /// + /// > (We have detected that this crate was documented with no_std + /// > compatibility turned on. The version of this crate that has been + /// > documented here will never generate a backtrace.) + pub fn new() -> Backtrace { + Backtrace { _secret: () } + } + + #[cfg(feature = "std")] + pub(crate) fn none() -> Backtrace { + Backtrace { _secret: () } + } + + #[cfg(feature = "std")] + pub(crate) fn is_none(&self) -> bool { + true + } + } + + impl Default for Backtrace { + fn default() -> Backtrace { + Backtrace::new() + } + } + + impl Debug for Backtrace { + fn fmt(&self, _: &mut fmt::Formatter) -> fmt::Result { + Ok(()) + } + } + + impl Display for Backtrace { + fn fmt(&self, _: &mut fmt::Formatter) -> fmt::Result { + Ok(()) + } + } +} + +with_backtrace! { + extern crate backtrace; + + mod internal; + + use self::internal::InternalBacktrace; + + /// A `Backtrace`. + /// + /// This is an opaque wrapper around the backtrace provided by + /// libbacktrace. A variety of optimizations have been performed to avoid + /// unnecessary or ill-advised work: + /// + /// - If this crate is compiled in `no_std` compatible mode, `Backtrace` + /// is an empty struct, and will be completely compiled away. + /// - If this crate is run without the `RUST_BACKTRACE` environmental + /// variable enabled, the backtrace will not be generated at runtime. + /// - Even if a backtrace is generated, the most expensive part of + /// generating a backtrace is symbol resolution. This backtrace does not + /// perform symbol resolution until it is actually read (e.g. by + /// printing it). If the Backtrace is never used for anything, symbols + /// never get resolved. + /// + /// Even with these optimizations, including a backtrace in your failure + /// may not be appropriate to your use case. You are not required to put a + /// backtrace in a custom `Fail` type. + pub struct Backtrace { + internal: InternalBacktrace + } + + impl Backtrace { + /// Constructs a new backtrace. This will only create a real backtrace + /// if the crate is compiled in std mode and the `RUST_BACKTRACE` + /// environmental variable is activated. + pub fn new() -> Backtrace { + Backtrace { internal: InternalBacktrace::new() } + } + + pub(crate) fn none() -> Backtrace { + Backtrace { internal: InternalBacktrace::none() } + } + + pub(crate) fn is_none(&self) -> bool { + self.internal.is_none() + } + } + + impl Default for Backtrace { + fn default() -> Backtrace { + Backtrace::new() + } + } + + impl Debug for Backtrace { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + if let Some(bt) = self.internal.as_backtrace() { + bt.fmt(f) + } else { Ok(()) } + } + } + + impl Display for Backtrace { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + if let Some(bt) = self.internal.as_backtrace() { + bt.fmt(f) + } else { Ok(()) } + } + } +} diff --git a/failure/src/box_std.rs b/failure/src/box_std.rs new file mode 100644 index 000000000..a58ae6666 --- /dev/null +++ b/failure/src/box_std.rs @@ -0,0 +1,19 @@ +use std::error::Error; +use std::fmt; +use Fail; + +pub struct BoxStd(pub Box); + +impl fmt::Display for BoxStd { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::Display::fmt(&self.0, f) + } +} + +impl fmt::Debug for BoxStd { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::Debug::fmt(&self.0, f) + } +} + +impl Fail for BoxStd {} diff --git a/failure/src/compat.rs b/failure/src/compat.rs new file mode 100644 index 000000000..9a47bdb24 --- /dev/null +++ b/failure/src/compat.rs @@ -0,0 +1,47 @@ +use core::fmt::{self, Display}; + +/// A compatibility wrapper around an error type from this crate. +/// +/// `Compat` implements `std::error::Error`, allowing the types from this +/// crate to be passed to interfaces that expect a type of that trait. +#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash, Default)] +pub struct Compat { + pub(crate) error: E, +} + +impl Display for Compat { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + Display::fmt(&self.error, f) + } +} + +impl Compat { + /// Unwraps this into the inner error. + pub fn into_inner(self) -> E { + self.error + } + + /// Gets a reference to the inner error. + pub fn get_ref(&self) -> &E { + &self.error + } +} + +with_std! { + use std::fmt::Debug; + use std::error::Error as StdError; + + use Error; + + impl StdError for Compat { + fn description(&self) -> &'static str { + "An error has occurred." + } + } + + impl From for Box { + fn from(error: Error) -> Box { + Box::new(Compat { error }) + } + } +} diff --git a/failure/src/context.rs b/failure/src/context.rs new file mode 100644 index 000000000..3316b7693 --- /dev/null +++ b/failure/src/context.rs @@ -0,0 +1,176 @@ +use core::fmt::{self, Debug, Display}; + +use Fail; + +without_std! { + /// An error with context around it. + /// + /// The context is intended to be a human-readable, user-facing explanation for the + /// error that has occurred. The underlying error is not assumed to be end-user-relevant + /// information. + /// + /// The `Display` impl for `Context` only prints the human-readable context, while the + /// `Debug` impl also prints the underlying error. + pub struct Context { + context: D, + } + + impl Context { + /// Creates a new context without an underlying error message. + pub fn new(context: D) -> Context { + Context { context } + } + + /// Returns a reference to the context provided with this error. + pub fn get_context(&self) -> &D { + &self.context + } + + /// Maps `Context` to `Context` by applying a function to the contained context. + pub fn map(self, op: F) -> Context + where F: FnOnce(D) -> T, + T: Display + Send + Sync + 'static + { + Context { + context: op(self.context), + } + } + + pub(crate) fn with_err(context: D, _: E) -> Context { + Context { context } + } + } + + impl Fail for Context { } + + impl Debug for Context { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{}", self.context) + } + } + + impl Display for Context { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{}", self.context) + } + } + + #[test] + fn test_map() { + let ctx = Context::new("a string").map(|s| format!("{} with some more stuff", s)); + assert_eq!(ctx.context, String::from("a string with some more stuff")); + } +} + +with_std! { + use {Error, Backtrace}; + + /// An error with context around it. + /// + /// The context is intended to be a human-readable, user-facing explanation for the + /// error that has occurred. The underlying error is not assumed to be end-user-relevant + /// information. + /// + /// The `Display` impl for `Context` only prints the human-readable context, while the + /// `Debug` impl also prints the underlying error. + pub struct Context { + context: D, + failure: Either, + } + + impl Context { + /// Creates a new context without an underlying error message. + pub fn new(context: D) -> Context { + let failure = Either::This(Backtrace::new()); + Context { context, failure } + } + + /// Returns a reference to the context provided with this error. + pub fn get_context(&self) -> &D { + &self.context + } + + /// Maps `Context` to `Context` by applying a function to the contained context. + pub fn map(self, op: F) -> Context + where F: FnOnce(D) -> T, + T: Display + Send + Sync + 'static + { + Context { + context: op(self.context), + failure: self.failure, + } + } + + pub(crate) fn with_err>(context: D, error: E) -> Context { + let failure = Either::That(error.into()); + Context { context, failure } + } + } + + impl Fail for Context { + fn cause(&self) -> Option<&Fail> { + self.failure.as_cause() + } + + fn backtrace(&self) -> Option<&Backtrace> { + Some(self.failure.backtrace()) + } + } + + impl Debug for Context { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{:?}\n\n{}", self.failure, self.context) + } + } + + impl Display for Context { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{}", self.context) + } + } + + enum Either { + This(A), + That(B), + } + + impl Either { + fn backtrace(&self) -> &Backtrace { + match *self { + Either::This(ref backtrace) => backtrace, + Either::That(ref error) => error.backtrace(), + } + } + + fn as_cause(&self) -> Option<&Fail> { + match *self { + Either::This(_) => None, + Either::That(ref error) => Some(error.as_fail()) + } + } + } + + impl Debug for Either { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match *self { + Either::This(ref backtrace) => write!(f, "{:?}", backtrace), + Either::That(ref error) => write!(f, "{:?}", error), + } + } + } + + #[test] + fn test_map() { + let ctx = Context::new("a string").map(|s| format!("{} with some more stuff", s)); + assert_eq!(ctx.context, String::from("a string with some more stuff")); + } +} + +impl From for Context +where + D: Display + Send + Sync + 'static, +{ + fn from(display: D) -> Context { + Context::new(display) + } +} diff --git a/failure/src/error/error_impl.rs b/failure/src/error/error_impl.rs new file mode 100644 index 000000000..d1644caf0 --- /dev/null +++ b/failure/src/error/error_impl.rs @@ -0,0 +1,50 @@ +use core::any::TypeId; + +use Fail; +use backtrace::Backtrace; + +pub(crate) struct ErrorImpl { + inner: Box>, +} + +struct Inner { + backtrace: Backtrace, + pub(crate) failure: F, +} + +impl From for ErrorImpl { + fn from(failure: F) -> ErrorImpl { + let inner: Inner = { + let backtrace = if failure.backtrace().is_none() { + Backtrace::new() + } else { Backtrace::none() }; + Inner { failure, backtrace } + }; + ErrorImpl { inner: Box::new(inner) } + } +} + +impl ErrorImpl { + pub(crate) fn failure(&self) -> &Fail { + &self.inner.failure + } + + pub(crate) fn failure_mut(&mut self) -> &mut Fail { + &mut self.inner.failure + } + + pub(crate) fn backtrace(&self) -> &Backtrace { + &self.inner.backtrace + } + + pub(crate) fn downcast(self) -> Result { + if self.failure().__private_get_type_id__() == TypeId::of::() { + let ErrorImpl { inner } = self; + let casted = unsafe { Box::from_raw(Box::into_raw(inner) as *mut Inner) }; + let Inner { backtrace:_, failure } = *casted; + Ok(failure) + } else { + Err(self) + } + } +} diff --git a/failure/src/error/error_impl_small.rs b/failure/src/error/error_impl_small.rs new file mode 100644 index 000000000..6ff7c78ec --- /dev/null +++ b/failure/src/error/error_impl_small.rs @@ -0,0 +1,132 @@ +use std::heap::{Heap, Alloc, Layout}; + +use core::mem; +use core::ptr; + +use Fail; +use backtrace::Backtrace; + +pub(crate) struct ErrorImpl { + inner: &'static mut Inner, +} + +// Dynamically sized inner value +struct Inner { + backtrace: Backtrace, + vtable: *const VTable, + failure: FailData, +} + +unsafe impl Send for Inner { } +unsafe impl Sync for Inner { } + +extern { + type VTable; + type FailData; +} + +#[allow(dead_code)] +struct InnerRaw { + header: InnerHeader, + failure: F, +} + +#[allow(dead_code)] +struct InnerHeader { + backtrace: Backtrace, + vtable: *const VTable, +} + +struct TraitObject { + #[allow(dead_code)] + data: *const FailData, + vtable: *const VTable, +} + +impl From for ErrorImpl { + fn from(failure: F) -> ErrorImpl { + let backtrace = if failure.backtrace().is_none() { + Backtrace::new() + } else { + Backtrace::none() + }; + + unsafe { + let vtable = mem::transmute::<_, TraitObject>(&failure as &Fail).vtable; + + let ptr: *mut InnerRaw = match Heap.alloc(Layout::new::>()) { + Ok(p) => p as *mut InnerRaw, + Err(e) => Heap.oom(e), + }; + + // N.B. must use `ptr::write`, not `=`, to avoid dropping the contents of `*ptr` + ptr::write(ptr, InnerRaw { + header: InnerHeader { + backtrace, + vtable, + }, + failure, + }); + + let inner: &'static mut Inner = mem::transmute(ptr); + + ErrorImpl { inner } + } + } +} + +impl ErrorImpl { + pub(crate) fn failure(&self) -> &Fail { + unsafe { + mem::transmute::(TraitObject { + data: &self.inner.failure as *const FailData, + vtable: self.inner.vtable, + }) + } + } + + pub(crate) fn failure_mut(&mut self) -> &mut Fail { + unsafe { + mem::transmute::(TraitObject { + data: &mut self.inner.failure as *const FailData, + vtable: self.inner.vtable, + }) + } + } + + pub(crate) fn backtrace(&self) -> &Backtrace { + &self.inner.backtrace + } + + pub(crate) fn downcast(self) -> Result { + let ret: Option = self.failure().downcast_ref().map(|fail| { + unsafe { + // drop the backtrace + let _ = ptr::read(&self.inner.backtrace as *const Backtrace); + // read out the fail type + ptr::read(fail as *const T) + } + }); + match ret { + Some(ret) => { + // forget self (backtrace is dropped, failure is moved + mem::forget(self); + Ok(ret) + } + _ => Err(self) + } + } +} + + +#[cfg(test)] +mod test { + use std::mem::size_of; + + use super::ErrorImpl; + + #[test] + fn assert_is_one_word() { + assert_eq!(size_of::(), size_of::()); + } +} diff --git a/failure/src/error/mod.rs b/failure/src/error/mod.rs new file mode 100644 index 000000000..30e69cc31 --- /dev/null +++ b/failure/src/error/mod.rs @@ -0,0 +1,243 @@ +use core::fmt::{self, Display, Debug}; + +use {Causes, Fail}; +use backtrace::Backtrace; +use context::Context; +use compat::Compat; + +#[cfg(feature = "std")] +use box_std::BoxStd; + +#[cfg_attr(feature = "small-error", path = "./error_impl_small.rs")] +mod error_impl; +use self::error_impl::ErrorImpl; + +#[cfg(feature = "std")] +use std::error::Error as StdError; + + +/// The `Error` type, which can contain any failure. +/// +/// Functions which accumulate many kinds of errors should return this type. +/// All failures can be converted into it, so functions which catch those +/// errors can be tried with `?` inside of a function that returns this kind +/// of error. +/// +/// In addition to implementing `Debug` and `Display`, this type carries `Backtrace` +/// information, and can be downcast into the failure that underlies it for +/// more detailed inspection. +pub struct Error { + imp: ErrorImpl, +} + +impl From for Error { + fn from(failure: F) -> Error { + Error { + imp: ErrorImpl::from(failure) + } + } +} + +impl Error { + /// Creates an `Error` from `Box`. + /// + /// This method is useful for comparability with code, + /// which does not use the `Fail` trait. + /// + /// # Example + /// + /// ``` + /// use std::error::Error as StdError; + /// use failure::Error; + /// + /// fn app_fn() -> Result { + /// let x = library_fn().map_err(Error::from_boxed_compat)?; + /// Ok(x * 2) + /// } + /// + /// fn library_fn() -> Result> { + /// Ok(92) + /// } + /// ``` + #[cfg(feature = "std")] + pub fn from_boxed_compat(err: Box) -> Error { + Error::from(BoxStd(err)) + } + + /// Return a reference to the underlying failure that this `Error` + /// contains. + pub fn as_fail(&self) -> &Fail { + self.imp.failure() + } + + /// Returns a reference to the underlying cause of this `Error`. Unlike the + /// method on `Fail`, this does not return an `Option`. The `Error` type + /// always has an underlying failure. + /// + /// This method has been deprecated in favor of the [Error::as_fail] method, + /// which does the same thing. + #[deprecated(since = "0.1.2", note = "please use 'as_fail()' method instead")] + pub fn cause(&self) -> &Fail { + self.as_fail() + } + + /// Gets a reference to the `Backtrace` for this `Error`. + /// + /// If the failure this wrapped carried a backtrace, that backtrace will + /// be returned. Otherwise, the backtrace will have been constructed at + /// the point that failure was cast into the `Error` type. + pub fn backtrace(&self) -> &Backtrace { + self.imp.failure().backtrace().unwrap_or(&self.imp.backtrace()) + } + + /// Provides context for this `Error`. + /// + /// This can provide additional information about this error, appropriate + /// to the semantics of the current layer. That is, if you have a + /// lower-level error, such as an IO error, you can provide additional context + /// about what that error means in the context of your function. This + /// gives users of this function more information about what has gone + /// wrong. + /// + /// This takes any type that implements `Display`, as well as + /// `Send`/`Sync`/`'static`. In practice, this means it can take a `String` + /// or a string literal, or a failure, or some other custom context-carrying + /// type. + pub fn context(self, context: D) -> Context { + Context::with_err(context, self) + } + + /// Wraps `Error` in a compatibility type. + /// + /// This type implements the `Error` trait from `std::error`. If you need + /// to pass failure's `Error` to an interface that takes any `Error`, you + /// can use this method to get a compatible type. + pub fn compat(self) -> Compat { + Compat { error: self } + } + + /// Attempts to downcast this `Error` to a particular `Fail` type. + /// + /// This downcasts by value, returning an owned `T` if the underlying + /// failure is of the type `T`. For this reason it returns a `Result` - in + /// the case that the underlying error is of a different type, the + /// original `Error` is returned. + pub fn downcast(self) -> Result { + self.imp.downcast().map_err(|imp| Error { imp }) + } + + /// Returns the "root cause" of this error - the last value in the + /// cause chain which does not return an underlying `cause`. + pub fn find_root_cause(&self) -> &Fail { + self.as_fail().find_root_cause() + } + + /// Returns a iterator over the causes of this error with the cause + /// of the fail as the first item and the `root_cause` as the final item. + /// + /// Use `iter_chain` to also include the fail of this error itself. + pub fn iter_causes(&self) -> Causes { + self.as_fail().iter_causes() + } + + /// Returns a iterator over all fails up the chain from the current + /// as the first item up to the `root_cause` as the final item. + /// + /// This means that the chain also includes the fail itself which + /// means that it does *not* start with `cause`. To skip the outermost + /// fail use `iter_causes` instead. + pub fn iter_chain(&self) -> Causes { + self.as_fail().iter_chain() + } + + /// Attempts to downcast this `Error` to a particular `Fail` type by + /// reference. + /// + /// If the underlying error is not of type `T`, this will return `None`. + pub fn downcast_ref(&self) -> Option<&T> { + self.imp.failure().downcast_ref() + } + + /// Attempts to downcast this `Error` to a particular `Fail` type by + /// mutable reference. + /// + /// If the underlying error is not of type `T`, this will return `None`. + pub fn downcast_mut(&mut self) -> Option<&mut T> { + self.imp.failure_mut().downcast_mut() + } + + /// Deprecated alias to `find_root_cause`. + #[deprecated(since = "0.1.2", note = "please use the 'find_root_cause()' method instead")] + pub fn root_cause(&self) -> &Fail { + ::find_root_cause(self.as_fail()) + } + + /// Deprecated alias to `iter_causes`. + #[deprecated(since = "0.1.2", note = "please use the 'iter_chain()' method instead")] + pub fn causes(&self) -> Causes { + Causes { fail: Some(self.as_fail()) } + } +} + +impl Display for Error { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + Display::fmt(&self.imp.failure(), f) + } +} + +impl Debug for Error { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let backtrace = self.imp.backtrace(); + if backtrace.is_none() { + Debug::fmt(&self.imp.failure(), f) + } else { + write!(f, "{:?}\n\n{:?}", &self.imp.failure(), backtrace) + } + } +} + +impl AsRef for Error { + fn as_ref(&self) -> &Fail { + self.as_fail() + } +} + +#[cfg(test)] +mod test { + use std::io; + use super::Error; + + fn assert_just_data() { } + + #[test] + fn assert_error_is_just_data() { + assert_just_data::(); + } + + #[test] + fn methods_seem_to_work() { + let io_error: io::Error = io::Error::new(io::ErrorKind::NotFound, "test"); + let error: Error = io::Error::new(io::ErrorKind::NotFound, "test").into(); + assert!(error.downcast_ref::().is_some()); + let _: ::Backtrace = *error.backtrace(); + assert_eq!(format!("{:?}", io_error), format!("{:?}", error)); + assert_eq!(format!("{}", io_error), format!("{}", error)); + drop(error); + assert!(true); + } + + #[test] + fn downcast_can_be_used() { + let mut error: Error = io::Error::new(io::ErrorKind::NotFound, "test").into(); + { + let real_io_error_ref = error.downcast_ref::().unwrap(); + assert_eq!(real_io_error_ref.to_string(), "test"); + } + { + let real_io_error_mut = error.downcast_mut::().unwrap(); + assert_eq!(real_io_error_mut.to_string(), "test"); + } + let real_io_error = error.downcast::().unwrap(); + assert_eq!(real_io_error.to_string(), "test"); + } +} diff --git a/failure/src/error_message.rs b/failure/src/error_message.rs new file mode 100644 index 000000000..01ff1ae36 --- /dev/null +++ b/failure/src/error_message.rs @@ -0,0 +1,28 @@ +use core::fmt::{self, Display, Debug}; + +use Fail; +use Error; + +/// Constructs a `Fail` type from a string. +/// +/// This is a convenient way to turn a string into an error value that +/// can be passed around, if you do not want to create a new `Fail` type for +/// this use case. +pub fn err_msg(msg: D) -> Error { + Error::from(ErrorMessage { msg }) +} + +/// A `Fail` type that just contains an error message. You can construct +/// this from the `err_msg` function. +#[derive(Debug)] +struct ErrorMessage { + msg: D, +} + +impl Fail for ErrorMessage { } + +impl Display for ErrorMessage { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + Display::fmt(&self.msg, f) + } +} diff --git a/failure/src/lib.rs b/failure/src/lib.rs new file mode 100644 index 000000000..82c021fa6 --- /dev/null +++ b/failure/src/lib.rs @@ -0,0 +1,306 @@ +//! An experimental new error-handling library. Guide-style introduction +//! is available [here](https://boats.gitlab.io/failure/). +//! +//! The primary items exported by this library are: +//! +//! - `Fail`: a new trait for custom error types in Rust. +//! - `Error`: a wrapper around `Fail` types to make it easy to coalesce them +//! at higher levels. +//! +//! As a general rule, library authors should create their own error types and +//! implement `Fail` for them, whereas application authors should primarily +//! deal with the `Error` type. There are exceptions to this rule, though, in +//! both directions, and users should do whatever seems most appropriate to +//! their situation. +//! +//! ## Backtraces +//! +//! Backtraces are disabled by default. To turn backtraces on, enable +//! the `backtrace` Cargo feature and set the `RUST_BACKTRACE` environment +//! variable to a non-zero value (this also enables backtraces for panics). +//! Use the `RUST_FAILURE_BACKTRACE` variable to enable or disable backtraces +//! for `failure` specifically. +#![cfg_attr(not(feature = "std"), no_std)] +#![deny(missing_docs)] +#![deny(warnings)] +#![cfg_attr( + feature = "small-error", + feature(extern_types, allocator_api) +)] + +macro_rules! with_std { ($($i:item)*) => ($(#[cfg(feature = "std")]$i)*) } +macro_rules! without_std { ($($i:item)*) => ($(#[cfg(not(feature = "std"))]$i)*) } + +// Re-export libcore using an alias so that the macros can work without +// requiring `extern crate core` downstream. +#[doc(hidden)] +pub extern crate core as _core; + +mod as_fail; +mod backtrace; +#[cfg(feature = "std")] +mod box_std; +mod compat; +mod context; +mod result_ext; + +use core::any::TypeId; +use core::fmt::{Debug, Display}; + +pub use as_fail::AsFail; +pub use backtrace::Backtrace; +pub use compat::Compat; +pub use context::Context; +pub use result_ext::ResultExt; + +#[cfg(feature = "failure_derive")] +#[allow(unused_imports)] +#[macro_use] +extern crate failure_derive; + +#[cfg(feature = "failure_derive")] +#[doc(hidden)] +pub use failure_derive::*; + +with_std! { + extern crate core; + + mod sync_failure; + pub use sync_failure::SyncFailure; + + mod error; + + use std::error::Error as StdError; + + pub use error::Error; + + /// A common result with an `Error`. + pub type Fallible = Result; + + mod macros; + mod error_message; + pub use error_message::err_msg; +} + +/// The `Fail` trait. +/// +/// Implementors of this trait are called 'failures'. +/// +/// All error types should implement `Fail`, which provides a baseline of +/// functionality that they all share. +/// +/// `Fail` has no required methods, but it does require that your type +/// implement several other traits: +/// +/// - `Display`: to print a user-friendly representation of the error. +/// - `Debug`: to print a verbose, developer-focused representation of the +/// error. +/// - `Send + Sync`: Your error type is required to be safe to transfer to and +/// reference from another thread +/// +/// Additionally, all failures must be `'static`. This enables downcasting. +/// +/// `Fail` provides several methods with default implementations. Two of these +/// may be appropriate to override depending on the definition of your +/// particular failure: the `cause` and `backtrace` methods. +/// +/// The `failure_derive` crate provides a way to derive the `Fail` trait for +/// your type. Additionally, all types that already implement +/// `std::error::Error`, and are also `Send`, `Sync`, and `'static`, implement +/// `Fail` by a blanket impl. +pub trait Fail: Display + Debug + Send + Sync + 'static { + /// Returns a reference to the underlying cause of this failure, if it + /// is an error that wraps other errors. + /// + /// Returns `None` if this failure does not have another error as its + /// underlying cause. By default, this returns `None`. + /// + /// This should **never** return a reference to `self`, but only return + /// `Some` when it can return a **different** failure. Users may loop + /// over the cause chain, and returning `self` would result in an infinite + /// loop. + fn cause(&self) -> Option<&Fail> { + None + } + + /// Returns a reference to the `Backtrace` carried by this failure, if it + /// carries one. + /// + /// Returns `None` if this failure does not carry a backtrace. By + /// default, this returns `None`. + fn backtrace(&self) -> Option<&Backtrace> { + None + } + + /// Provides context for this failure. + /// + /// This can provide additional information about this error, appropriate + /// to the semantics of the current layer. That is, if you have a + /// lower-level error, such as an IO error, you can provide additional context + /// about what that error means in the context of your function. This + /// gives users of this function more information about what has gone + /// wrong. + /// + /// This takes any type that implements `Display`, as well as + /// `Send`/`Sync`/`'static`. In practice, this means it can take a `String` + /// or a string literal, or another failure, or some other custom context-carrying + /// type. + fn context(self, context: D) -> Context + where + D: Display + Send + Sync + 'static, + Self: Sized, + { + Context::with_err(context, self) + } + + /// Wraps this failure in a compatibility wrapper that implements + /// `std::error::Error`. + /// + /// This allows failures to be compatible with older crates that + /// expect types that implement the `Error` trait from `std::error`. + fn compat(self) -> Compat + where + Self: Sized, + { + Compat { error: self } + } + + #[doc(hidden)] + #[deprecated( + since = "0.1.2", + note = "please use the 'iter_chain()' method instead" + )] + fn causes(&self) -> Causes + where + Self: Sized, + { + Causes { fail: Some(self) } + } + + #[doc(hidden)] + #[deprecated( + since = "0.1.2", + note = "please use the 'find_root_cause()' method instead" + )] + fn root_cause(&self) -> &Fail + where + Self: Sized, + { + find_root_cause(self) + } + + #[doc(hidden)] + fn __private_get_type_id__(&self) -> TypeId { + TypeId::of::() + } +} + +impl Fail { + /// Attempts to downcast this failure to a concrete type by reference. + /// + /// If the underlying error is not of type `T`, this will return `None`. + pub fn downcast_ref(&self) -> Option<&T> { + if self.__private_get_type_id__() == TypeId::of::() { + unsafe { Some(&*(self as *const Fail as *const T)) } + } else { + None + } + } + + /// Attempts to downcast this failure to a concrete type by mutable + /// reference. + /// + /// If the underlying error is not of type `T`, this will return `None`. + pub fn downcast_mut(&mut self) -> Option<&mut T> { + if self.__private_get_type_id__() == TypeId::of::() { + unsafe { Some(&mut *(self as *mut Fail as *mut T)) } + } else { + None + } + } + + /// Returns the "root cause" of this `Fail` - the last value in the + /// cause chain which does not return an underlying `cause`. + /// + /// If this type does not have a cause, `self` is returned, because + /// it is its own root cause. + /// + /// This is equivalent to iterating over `iter_causes()` and taking + /// the last item. + pub fn find_root_cause(&self) -> &Fail { + find_root_cause(self) + } + + /// Returns a iterator over the causes of this `Fail` with the cause + /// of this fail as the first item and the `root_cause` as the final item. + /// + /// Use `iter_chain` to also include the fail itself. + pub fn iter_causes(&self) -> Causes { + Causes { fail: self.cause() } + } + + /// Returns a iterator over all fails up the chain from the current + /// as the first item up to the `root_cause` as the final item. + /// + /// This means that the chain also includes the fail itself which + /// means that it does *not* start with `cause`. To skip the outermost + /// fail use `iter_causes` instead. + pub fn iter_chain(&self) -> Causes { + Causes { fail: Some(self) } + } + + /// Deprecated alias to `find_root_cause`. + #[deprecated( + since = "0.1.2", + note = "please use the 'find_root_cause()' method instead" + )] + pub fn root_cause(&self) -> &Fail { + find_root_cause(self) + } + + /// Deprecated alias to `iter_chain`. + #[deprecated( + since = "0.1.2", + note = "please use the 'iter_chain()' method instead" + )] + pub fn causes(&self) -> Causes { + Causes { fail: Some(self) } + } +} + +#[cfg(feature = "std")] +impl Fail for E {} + +#[cfg(feature = "std")] +impl Fail for Box { + fn cause(&self) -> Option<&Fail> { + (**self).cause() + } + + fn backtrace(&self) -> Option<&Backtrace> { + (**self).backtrace() + } +} + +/// A iterator over the causes of a `Fail` +pub struct Causes<'f> { + fail: Option<&'f Fail>, +} + +impl<'f> Iterator for Causes<'f> { + type Item = &'f Fail; + fn next(&mut self) -> Option<&'f Fail> { + self.fail.map(|fail| { + self.fail = fail.cause(); + fail + }) + } +} + +fn find_root_cause(mut fail: &Fail) -> &Fail { + while let Some(cause) = fail.cause() { + fail = cause; + } + + fail +} diff --git a/failure/src/macros.rs b/failure/src/macros.rs new file mode 100644 index 000000000..b96e91a99 --- /dev/null +++ b/failure/src/macros.rs @@ -0,0 +1,51 @@ +/// Exits a function early with an `Error`. +/// +/// The `bail!` macro provides an easy way to exit a function. `bail!(X)` is +/// equivalent to writing: +/// +/// ```rust,ignore +/// return Err(format_err!(X)) +/// ``` +#[macro_export] +macro_rules! bail { + ($e:expr) => { + return Err($crate::err_msg($e)); + }; + ($fmt:expr, $($arg:tt)+) => { + return Err($crate::err_msg(format!($fmt, $($arg)+))); + }; +} + +/// Exits a function early with an `Error` if the condition is not satisfied. +/// +/// Similar to `assert!`, `ensure!` takes a condition and exits the function +/// if the condition fails. Unlike `assert!`, `ensure!` returns an `Error`, +/// it does not panic. +#[macro_export(local_inner_macros)] +macro_rules! ensure { + ($cond:expr, $e:expr) => { + if !($cond) { + bail!($e); + } + }; + ($cond:expr, $fmt:expr, $($arg:tt)+) => { + if !($cond) { + bail!($fmt, $($arg)+); + } + }; +} + +/// Constructs an `Error` using the standard string interpolation syntax. +/// +/// ```rust +/// #[macro_use] extern crate failure; +/// +/// fn main() { +/// let code = 101; +/// let err = format_err!("Error code: {}", code); +/// } +/// ``` +#[macro_export] +macro_rules! format_err { + ($($arg:tt)*) => { $crate::err_msg(format!($($arg)*)) } +} diff --git a/failure/src/result_ext.rs b/failure/src/result_ext.rs new file mode 100644 index 000000000..f4125cdd6 --- /dev/null +++ b/failure/src/result_ext.rs @@ -0,0 +1,203 @@ +use core::fmt::Display; + +use {Compat, Context, Fail}; + +/// Extension methods for `Result`. +pub trait ResultExt { + /// Wraps the error in `Compat` to make it compatible with older error + /// handling APIs that expect `std::error::Error`. + /// + /// # Examples + /// + /// ``` + /// # fn main() { + /// # tests::run_test(); + /// # } + /// # + /// # #[cfg(not(all(feature = "std", feature = "derive")))] mod tests { pub fn run_test() { } } + /// # + /// # #[cfg(all(feature = "std", feature = "derive"))] mod tests { + /// use std::error::Error; + /// # use std::fmt; + /// # + /// # extern crate failure; + /// # + /// # use tests::failure::ResultExt; + /// # + /// # #[derive(Debug)] + /// struct CustomError; + /// + /// impl Error for CustomError { + /// fn description(&self) -> &str { + /// "My custom error message" + /// } + /// + /// fn cause(&self) -> Option<&Error> { + /// None + /// } + /// } + /// # + /// # impl fmt::Display for CustomError { + /// # fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + /// # write!(f, "{}", self.description()) + /// # } + /// # } + /// # + /// # pub fn run_test() { + /// + /// let x = (|| -> Result<(), failure::Error> { + /// Err(CustomError).compat()? + /// })().with_context(|e| { + /// format!("An error occured: {}", e) + /// }).unwrap_err(); + /// + /// let x = format!("{}", x); + /// + /// assert_eq!(x, "An error occured: My custom error message"); + /// # } + /// + /// # } + /// ``` + fn compat(self) -> Result>; + + /// Wraps the error type in a context type. + /// + /// # Examples + /// + /// ``` + /// # #[cfg(all(feature = "std", feature = "derive"))] + /// # #[macro_use] extern crate failure; + /// # + /// # #[cfg(all(feature = "std", feature = "derive"))] + /// # #[macro_use] extern crate failure_derive; + /// # + /// # fn main() { + /// # tests::run_test(); + /// # } + /// # + /// # #[cfg(not(all(feature = "std", feature = "derive")))] mod tests { pub fn run_test() { } } + /// # + /// # #[cfg(all(feature = "std", feature = "derive"))] mod tests { + /// # + /// # use failure::{self, ResultExt}; + /// # + /// #[derive(Fail, Debug)] + /// #[fail(display = "")] + /// struct CustomError; + /// # + /// # pub fn run_test() { + /// + /// let x = (|| -> Result<(), failure::Error> { + /// Err(CustomError)? + /// })().context(format!("An error occured")).unwrap_err(); + /// + /// let x = format!("{}", x); + /// + /// assert_eq!(x, "An error occured"); + /// # } + /// + /// # } + /// ``` + fn context(self, context: D) -> Result> + where + D: Display + Send + Sync + 'static; + + /// Wraps the error type in a context type generated by looking at the + /// error value. + /// + /// # Examples + /// + /// ``` + /// # #[cfg(all(feature = "std", feature = "derive"))] + /// # #[macro_use] extern crate failure; + /// # + /// # #[cfg(all(feature = "std", feature = "derive"))] + /// # #[macro_use] extern crate failure_derive; + /// # + /// # fn main() { + /// # tests::run_test(); + /// # } + /// # + /// # #[cfg(not(all(feature = "std", feature = "derive")))] mod tests { pub fn run_test() { } } + /// # + /// # #[cfg(all(feature = "std", feature = "derive"))] mod tests { + /// # + /// # use failure::{self, ResultExt}; + /// # + /// #[derive(Fail, Debug)] + /// #[fail(display = "My custom error message")] + /// struct CustomError; + /// # + /// # pub fn run_test() { + /// + /// let x = (|| -> Result<(), failure::Error> { + /// Err(CustomError)? + /// })().with_context(|e| { + /// format!("An error occured: {}", e) + /// }).unwrap_err(); + /// + /// let x = format!("{}", x); + /// + /// assert_eq!(x, "An error occured: My custom error message"); + /// # } + /// + /// # } + /// ``` + fn with_context(self, f: F) -> Result> + where + F: FnOnce(&E) -> D, + D: Display + Send + Sync + 'static; +} + +impl ResultExt for Result +where + E: Fail, +{ + fn compat(self) -> Result> { + self.map_err(|err| err.compat()) + } + + fn context(self, context: D) -> Result> + where + D: Display + Send + Sync + 'static, + { + self.map_err(|failure| failure.context(context)) + } + + fn with_context(self, f: F) -> Result> + where + F: FnOnce(&E) -> D, + D: Display + Send + Sync + 'static, + { + self.map_err(|failure| { + let context = f(&failure); + failure.context(context) + }) + } +} + +with_std! { + use Error; + + impl ResultExt for Result { + fn compat(self) -> Result> { + self.map_err(|err| err.compat()) + } + + fn context(self, context: D) -> Result> where + D: Display + Send + Sync + 'static + { + self.map_err(|failure| failure.context(context)) + } + + fn with_context(self, f: F) -> Result> where + F: FnOnce(&Error) -> D, + D: Display + Send + Sync + 'static + { + self.map_err(|failure| { + let context = f(&failure); + failure.context(context) + }) + } + } +} diff --git a/failure/src/small_error.rs b/failure/src/small_error.rs new file mode 100644 index 000000000..09646e391 --- /dev/null +++ b/failure/src/small_error.rs @@ -0,0 +1,264 @@ +use core::fmt::{self, Display, Debug}; +use std::heap::{Heap, Alloc, Layout}; + +use core::mem; +use core::ptr; + +use {Causes, Fail}; +use backtrace::Backtrace; +use context::Context; +use compat::Compat; + +/// The `Error` type, which can contain any failure. +/// +/// Functions which accumulate many kinds of errors should return this type. +/// All failures can be converted into it, so functions which catch those +/// errors can be tried with `?` inside of a function that returns this kind +/// of error. +/// +/// In addition to implementing `Debug` and `Display`, this type carries `Backtrace` +/// information, and can be downcast into the failure that underlies it for +/// more detailed inspection. +pub struct Error { + inner: &'static mut Inner, +} + +// Dynamically sized inner value +struct Inner { + backtrace: Backtrace, + vtable: *const VTable, + failure: FailData, +} + +unsafe impl Send for Inner { } +unsafe impl Sync for Inner { } + +extern { + type VTable; + type FailData; +} + +struct InnerRaw { + header: InnerHeader, + failure: F, +} + +struct InnerHeader { + backtrace: Backtrace, + vtable: *const VTable, +} + +struct TraitObject { + #[allow(dead_code)] + data: *const FailData, + vtable: *const VTable, +} + +impl From for Error { + fn from(failure: F) -> Error { + let backtrace = if failure.backtrace().is_none() { + Backtrace::new() + } else { + Backtrace::none() + }; + + unsafe { + let vtable = mem::transmute::<_, TraitObject>(&failure as &Fail).vtable; + + let ptr: *mut InnerRaw = match Heap.alloc(Layout::new::>()) { + Ok(p) => p as *mut InnerRaw, + Err(e) => Heap.oom(e), + }; + + // N.B. must use `ptr::write`, not `=`, to avoid dropping the contents of `*ptr` + ptr::write(ptr, InnerRaw { + header: InnerHeader { + backtrace, + vtable, + }, + failure, + }); + + let inner: &'static mut Inner = mem::transmute(ptr); + + Error { inner } + } + } +} + +impl Inner { + fn failure(&self) -> &Fail { + unsafe { + mem::transmute::(TraitObject { + data: &self.failure as *const FailData, + vtable: self.vtable, + }) + } + } + + fn failure_mut(&mut self) -> &mut Fail { + unsafe { + mem::transmute::(TraitObject { + data: &mut self.failure as *const FailData, + vtable: self.vtable, + }) + } + } +} + +impl Error { + /// Returns a reference to the underlying cause of this `Error`. Unlike the + /// method on `Fail`, this does not return an `Option`. The `Error` type + /// always has an underlying failure. + pub fn cause(&self) -> &Fail { + self.inner.failure() + } + + /// Gets a reference to the `Backtrace` for this `Error`. + /// + /// If the failure this wrapped carried a backtrace, that backtrace will + /// be returned. Otherwise, the backtrace will have been constructed at + /// the point that failure was cast into the `Error` type. + pub fn backtrace(&self) -> &Backtrace { + self.inner.failure().backtrace().unwrap_or(&self.inner.backtrace) + } + + /// Provides context for this `Error`. + /// + /// This can provide additional information about this error, appropriate + /// to the semantics of the current layer. That is, if you have a + /// lower-level error, such as an IO error, you can provide additional context + /// about what that error means in the context of your function. This + /// gives users of this function more information about what has gone + /// wrong. + /// + /// This takes any type that implements `Display`, as well as + /// `Send`/`Sync`/`'static`. In practice, this means it can take a `String` + /// or a string literal, or a failure, or some other custom context-carrying + /// type. + pub fn context(self, context: D) -> Context { + Context::with_err(context, self) + } + + /// Wraps `Error` in a compatibility type. + /// + /// This type implements the `Error` trait from `std::error`. If you need + /// to pass failure's `Error` to an interface that takes any `Error`, you + /// can use this method to get a compatible type. + pub fn compat(self) -> Compat { + Compat { error: self } + } + + /// Attempts to downcast this `Error` to a particular `Fail` type. + /// + /// This downcasts by value, returning an owned `T` if the underlying + /// failure is of the type `T`. For this reason it returns a `Result` - in + /// the case that the underlying error is of a different type, the + /// original `Error` is returned. + pub fn downcast(self) -> Result { + let ret: Option = self.downcast_ref().map(|fail| { + unsafe { + // drop the backtrace + let _ = ptr::read(&self.inner.backtrace as *const Backtrace); + // read out the fail type + ptr::read(fail as *const T) + } + }); + match ret { + Some(ret) => { + // forget self (backtrace is dropped, failure is moved + mem::forget(self); + Ok(ret) + } + _ => Err(self) + } + } + + /// Returns the "root cause" of this error - the last value in the + /// cause chain which does not return an underlying `cause`. + pub fn root_cause(&self) -> &Fail { + ::find_root_cause(self.cause()) + } + + /// Attempts to downcast this `Error` to a particular `Fail` type by + /// reference. + /// + /// If the underlying error is not of type `T`, this will return `None`. + pub fn downcast_ref(&self) -> Option<&T> { + self.inner.failure().downcast_ref() + } + + /// Attempts to downcast this `Error` to a particular `Fail` type by + /// mutable reference. + /// + /// If the underlying error is not of type `T`, this will return `None`. + pub fn downcast_mut(&mut self) -> Option<&mut T> { + self.inner.failure_mut().downcast_mut() + } + + /// Returns a iterator over the causes of the `Error`, beginning with + /// the failure returned by the `cause` method and ending with the failure + /// returned by `root_cause`. + pub fn causes(&self) -> Causes { + Causes { fail: Some(self.cause()) } + } +} + +impl Display for Error { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + Display::fmt(self.inner.failure(), f) + } +} + +impl Debug for Error { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + if self.inner.backtrace.is_none() { + Debug::fmt(self.inner.failure(), f) + } else { + write!(f, "{:?}\n\n{:?}", self.inner.failure(), self.inner.backtrace) + } + } +} + +impl Drop for Error { + fn drop(&mut self) { + unsafe { + let layout = { + let header = Layout::new::(); + header.extend(Layout::for_value(self.inner.failure())).unwrap().0 + }; + Heap.dealloc(self.inner as *const _ as *const u8 as *mut u8, layout); + } + } +} + +#[cfg(test)] +mod test { + use std::mem::size_of; + use std::io; + + use super::Error; + + #[test] + fn assert_error_is_just_data() { + fn assert_just_data() { } + assert_just_data::(); + } + + #[test] + fn assert_is_one_word() { + assert_eq!(size_of::(), size_of::()); + } + + #[test] + fn methods_seem_to_work() { + let io_error: io::Error = io::Error::new(io::ErrorKind::NotFound, "test"); + let error: Error = io::Error::new(io::ErrorKind::NotFound, "test").into(); + assert!(error.downcast_ref::().is_some()); + let _: ::Backtrace = *error.backtrace(); + assert_eq!(format!("{:?}", io_error), format!("{:?}", error)); + assert_eq!(format!("{}", io_error), format!("{}", error)); + drop(error); + assert!(true); + } +} diff --git a/failure/src/sync_failure.rs b/failure/src/sync_failure.rs new file mode 100644 index 000000000..63e966cdd --- /dev/null +++ b/failure/src/sync_failure.rs @@ -0,0 +1,97 @@ +use Fail; +use std::error::Error; +use std::fmt::{self, Debug, Display}; +use std::sync::Mutex; + +/// Wrapper for `std` errors to make them `Sync`. +/// +/// This exists to coerce existing types that are only `Error + Send + +/// 'static` into a `Fail`-compatible representation, most notably for +/// types generated by `error-chain`. +/// +/// Unfortunately, this requires wrapping the error in a `Mutex`, which must +/// be locked for every `Debug`/`Display`. Therefore, this should be +/// something of a last resort in making the error work with `failure`. +/// +pub struct SyncFailure { + inner: Mutex, +} + +impl SyncFailure { + /// Wraps a non-`Sync` `Error` in order to make it implement `Fail`. + /// + /// # Example + /// + /// ```rust + /// extern crate failure; + /// + /// # use std::error::Error as StdError; + /// # use std::fmt::{self, Display}; + /// use failure::{Error, SyncFailure}; + /// use std::cell::RefCell; + /// + /// #[derive(Debug)] + /// struct NonSyncError { + /// // RefCells are non-Sync, so structs containing them will be + /// // non-Sync as well. + /// count: RefCell, + /// } + /// + /// // implement Display/Error for NonSyncError... + /// # + /// # impl StdError for NonSyncError { + /// # fn description(&self) -> &str { + /// # "oops!" + /// # } + /// # } + /// # + /// # impl Display for NonSyncError { + /// # fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + /// # write!(f, "oops!") + /// # } + /// # } + /// + /// fn returns_error() -> Result<(), NonSyncError> { + /// // Do stuff + /// # Ok(()) + /// } + /// + /// fn my_function() -> Result<(), Error> { + /// // without the map_err here, we end up with a compile error + /// // complaining that NonSyncError doesn't implement Sync. + /// returns_error().map_err(SyncFailure::new)?; + /// // Do more stuff + /// # Ok(()) + /// } + /// # + /// # fn main() { + /// # my_function().unwrap(); + /// # } + /// ``` + /// + pub fn new(err: E) -> Self { + SyncFailure { + inner: Mutex::new(err), + } + } +} + +impl Display for SyncFailure +where + T: Display, +{ + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + self.inner.lock().unwrap().fmt(f) + } +} + +impl Debug for SyncFailure +where + T: Debug, +{ + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + (*self.inner.lock().unwrap()).fmt(f) + } +} + +impl Fail for SyncFailure {} diff --git a/failure/travis.sh b/failure/travis.sh new file mode 100644 index 000000000..6c621ca5d --- /dev/null +++ b/failure/travis.sh @@ -0,0 +1,39 @@ +#!/bin/bash + +DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" + +cargo_test() { + cargo test "$@" || { exit 101; } +} + +test_failure_in() { + cd $1 + cargo_test + cargo_test --no-default-features + cargo_test --features backtrace + test_derive_in "$1/failure_derive" + cd $DIR +} + +test_derive_in() { + cd $1 + cargo_test + cd $DIR +} + +test_nightly_features_in() { + cd $1 + #cargo_test --features small-error + cargo_test --all-features + cd $DIR +} + +main() { + test_failure_in "$DIR/failure-1.X" + test_failure_in "$DIR/failure-0.1.X" + if [ "${TRAVIS_RUST_VERSION}" = "nightly" ]; then + test_nightly_features_in "$DIR/failure-1.X" + fi +} + +main diff --git a/failure_derive/.cargo-checksum.json b/failure_derive/.cargo-checksum.json new file mode 100644 index 000000000..12be472cc --- /dev/null +++ b/failure_derive/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{},"package":"64c2d913fe8ed3b6c6518eedf4538255b989945c14c2a7d5cbff62a5e2120596"} \ No newline at end of file diff --git a/failure_derive/Cargo.toml b/failure_derive/Cargo.toml new file mode 100644 index 000000000..cba085a95 --- /dev/null +++ b/failure_derive/Cargo.toml @@ -0,0 +1,41 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g. crates.io) dependencies +# +# If you believe there's an error in this file please file an +# issue against the rust-lang/cargo repository. If you're +# editing this file be aware that the upstream Cargo.toml +# will likely look very different (and much more reasonable) + +[package] +name = "failure_derive" +version = "0.1.3" +authors = ["Without Boats "] +build = "build.rs" +description = "derives for the failure crate" +homepage = "https://boats.gitlab.io/failure" +documentation = "https://boats.gitlab.io/failure" +license = "MIT OR Apache-2.0" +repository = "https://github.com/withoutboats/failure_derive" + +[lib] +proc-macro = true +[dependencies.proc-macro2] +version = "0.4.8" + +[dependencies.quote] +version = "0.6.3" + +[dependencies.syn] +version = "0.15.0" + +[dependencies.synstructure] +version = "0.10.0" +[dev-dependencies.failure] +version = "0.1.0" + +[features] +std = [] diff --git a/failure_derive/build.rs b/failure_derive/build.rs new file mode 100644 index 000000000..8f220458d --- /dev/null +++ b/failure_derive/build.rs @@ -0,0 +1,39 @@ +use std::env; +use std::process::Command; +use std::str; +use std::str::FromStr; + +fn main() { + if rustc_has_dyn_trait() { + println!("cargo:rustc-cfg=has_dyn_trait"); + } +} + +fn rustc_has_dyn_trait() -> bool { + let rustc = match env::var_os("RUSTC") { + Some(rustc) => rustc, + None => return false, + }; + + let output = match Command::new(rustc).arg("--version").output() { + Ok(output) => output, + Err(_) => return false, + }; + + let version = match str::from_utf8(&output.stdout) { + Ok(version) => version, + Err(_) => return false, + }; + + let mut pieces = version.split('.'); + if pieces.next() != Some("rustc 1") { + return true; + } + + let next = match pieces.next() { + Some(next) => next, + None => return false, + }; + + u32::from_str(next).unwrap_or(0) >= 27 +} diff --git a/failure_derive/src/lib.rs b/failure_derive/src/lib.rs new file mode 100644 index 000000000..4e9b22f8e --- /dev/null +++ b/failure_derive/src/lib.rs @@ -0,0 +1,200 @@ +extern crate proc_macro2; +extern crate syn; + +#[macro_use] +extern crate synstructure; +#[macro_use] +extern crate quote; + +use proc_macro2::TokenStream; + +decl_derive!([Fail, attributes(fail, cause)] => fail_derive); + +fn fail_derive(s: synstructure::Structure) -> TokenStream { + let make_dyn = if cfg!(has_dyn_trait) { + quote! { &dyn } + } else { + quote! { & } + }; + + let cause_body = s.each_variant(|v| { + if let Some(cause) = v.bindings().iter().find(is_cause) { + quote!(return Some(::failure::AsFail::as_fail(#cause))) + } else { + quote!(return None) + } + }); + + let bt_body = s.each_variant(|v| { + if let Some(bi) = v.bindings().iter().find(is_backtrace) { + quote!(return Some(#bi)) + } else { + quote!(return None) + } + }); + + let fail = s.unbound_impl( + quote!(::failure::Fail), + quote! { + #[allow(unreachable_code)] + fn cause(&self) -> ::failure::_core::option::Option<#make_dyn(::failure::Fail)> { + match *self { #cause_body } + None + } + + #[allow(unreachable_code)] + fn backtrace(&self) -> ::failure::_core::option::Option<&::failure::Backtrace> { + match *self { #bt_body } + None + } + }, + ); + let display = display_body(&s).map(|display_body| { + s.unbound_impl( + quote!(::failure::_core::fmt::Display), + quote! { + #[allow(unreachable_code)] + fn fmt(&self, f: &mut ::failure::_core::fmt::Formatter) -> ::failure::_core::fmt::Result { + match *self { #display_body } + write!(f, "An error has occurred.") + } + }, + ) + }); + + (quote! { + #fail + #display + }).into() +} + +fn display_body(s: &synstructure::Structure) -> Option { + let mut msgs = s.variants().iter().map(|v| find_error_msg(&v.ast().attrs)); + if msgs.all(|msg| msg.is_none()) { + return None; + } + + Some(s.each_variant(|v| { + let msg = + find_error_msg(&v.ast().attrs).expect("All variants must have display attribute."); + if msg.nested.is_empty() { + panic!("Expected at least one argument to fail attribute"); + } + + let format_string = match msg.nested[0] { + syn::NestedMeta::Meta(syn::Meta::NameValue(ref nv)) if nv.ident == "display" => { + nv.lit.clone() + } + _ => { + panic!("Fail attribute must begin `display = \"\"` to control the Display message.") + } + }; + let args = msg.nested.iter().skip(1).map(|arg| match *arg { + syn::NestedMeta::Literal(syn::Lit::Int(ref i)) => { + let bi = &v.bindings()[i.value() as usize]; + quote!(#bi) + } + syn::NestedMeta::Meta(syn::Meta::Word(ref id)) => { + let id_s = id.to_string(); + if id_s.starts_with("_") { + if let Ok(idx) = id_s[1..].parse::() { + let bi = match v.bindings().get(idx) { + Some(bi) => bi, + None => { + panic!( + "display attempted to access field `{}` in `{}::{}` which \ + does not exist (there are {} field{})", + idx, + s.ast().ident, + v.ast().ident, + v.bindings().len(), + if v.bindings().len() != 1 { "s" } else { "" } + ); + } + }; + return quote!(#bi); + } + } + for bi in v.bindings() { + if bi.ast().ident.as_ref() == Some(id) { + return quote!(#bi); + } + } + panic!( + "Couldn't find field `{}` in `{}::{}`", + id, + s.ast().ident, + v.ast().ident + ); + } + _ => panic!("Invalid argument to fail attribute!"), + }); + + quote! { + return write!(f, #format_string #(, #args)*) + } + })) +} + +fn find_error_msg(attrs: &[syn::Attribute]) -> Option { + let mut error_msg = None; + for attr in attrs { + if let Some(meta) = attr.interpret_meta() { + if meta.name() == "fail" { + if error_msg.is_some() { + panic!("Cannot have two display attributes") + } else { + if let syn::Meta::List(list) = meta { + error_msg = Some(list); + } else { + panic!("fail attribute must take a list in parentheses") + } + } + } + } + } + error_msg +} + +fn is_backtrace(bi: &&synstructure::BindingInfo) -> bool { + match bi.ast().ty { + syn::Type::Path(syn::TypePath { + qself: None, + path: syn::Path { + segments: ref path, .. + }, + }) => path.last().map_or(false, |s| { + s.value().ident == "Backtrace" && s.value().arguments.is_empty() + }), + _ => false, + } +} + +fn is_cause(bi: &&synstructure::BindingInfo) -> bool { + let mut found_cause = false; + for attr in &bi.ast().attrs { + if let Some(meta) = attr.interpret_meta() { + if meta.name() == "cause" { + if found_cause { + panic!("Cannot have two `cause` attributes"); + } + found_cause = true; + } + if meta.name() == "fail" { + if let syn::Meta::List(ref list) = meta { + if let Some(ref pair) = list.nested.first() { + if let &&syn::NestedMeta::Meta(syn::Meta::Word(ref word)) = pair.value() { + if word == "cause" { + if found_cause { + panic!("Cannot have two `cause` attributes"); + } + found_cause = true; + } + } + } + } + } + } + } + found_cause +} diff --git a/failure_derive/tests/backtrace.rs b/failure_derive/tests/backtrace.rs new file mode 100644 index 000000000..a30718411 --- /dev/null +++ b/failure_derive/tests/backtrace.rs @@ -0,0 +1,64 @@ +extern crate failure; +#[macro_use] +extern crate failure_derive; + +use failure::{Backtrace, Fail}; + +#[derive(Fail, Debug)] +#[fail(display = "Error code: {}", code)] +struct BacktraceError { + backtrace: Backtrace, + code: u32, +} + +#[test] +fn backtrace_error() { + let err = BacktraceError { + backtrace: Backtrace::new(), + code: 7, + }; + let s = format!("{}", err); + assert_eq!(&s[..], "Error code: 7"); + assert!(err.backtrace().is_some()); +} + +#[derive(Fail, Debug)] +#[fail(display = "An error has occurred.")] +struct BacktraceTupleError(Backtrace); + +#[test] +fn backtrace_tuple_error() { + let err = BacktraceTupleError(Backtrace::new()); + let s = format!("{}", err); + assert_eq!(&s[..], "An error has occurred."); + assert!(err.backtrace().is_some()); +} + +#[derive(Fail, Debug)] +enum BacktraceEnumError { + #[fail(display = "Error code: {}", code)] + StructVariant { code: i32, backtrace: Backtrace }, + #[fail(display = "Error: {}", _0)] + TupleVariant(&'static str, Backtrace), + #[fail(display = "An error has occurred.")] + UnitVariant, +} + +#[test] +fn backtrace_enum_error() { + let err = BacktraceEnumError::StructVariant { + code: 2, + backtrace: Backtrace::new(), + }; + let s = format!("{}", err); + assert_eq!(&s[..], "Error code: 2"); + assert!(err.backtrace().is_some()); + let err = BacktraceEnumError::TupleVariant("foobar", Backtrace::new()); + let s = format!("{}", err); + assert_eq!(&s[..], "Error: foobar"); + assert!(err.backtrace().is_some()); + let err = BacktraceEnumError::UnitVariant; + let s = format!("{}", err); + assert_eq!(&s[..], "An error has occurred."); + assert!(err.backtrace().is_none()); +} diff --git a/failure_derive/tests/custom_type_bounds.rs b/failure_derive/tests/custom_type_bounds.rs new file mode 100644 index 000000000..fd1c8b975 --- /dev/null +++ b/failure_derive/tests/custom_type_bounds.rs @@ -0,0 +1,45 @@ +#[macro_use] +extern crate failure; + +use std::fmt::Debug; + +use failure::Fail; + +#[derive(Debug, Fail)] +#[fail(display = "An error has occurred.")] +pub struct UnboundedGenericTupleError(T); + +#[test] +fn unbounded_generic_tuple_error() { + let s = format!("{}", UnboundedGenericTupleError(())); + assert_eq!(&s[..], "An error has occurred."); +} + +#[derive(Debug, Fail)] +#[fail(display = "An error has occurred: {}", _0)] +pub struct FailBoundsGenericTupleError(T); + +#[test] +fn fail_bounds_generic_tuple_error() { + let error = FailBoundsGenericTupleError(UnboundedGenericTupleError(())); + let s = format!("{}", error); + assert_eq!(&s[..], "An error has occurred: An error has occurred."); +} + +pub trait NoDisplay: 'static + Debug + Send + Sync {} + +impl NoDisplay for &'static str {} + +#[derive(Debug, Fail)] +#[fail(display = "An error has occurred: {:?}", _0)] +pub struct CustomBoundsGenericTupleError(T); + +#[test] +fn custom_bounds_generic_tuple_error() { + let error = CustomBoundsGenericTupleError("more details unavailable."); + let s = format!("{}", error); + assert_eq!( + &s[..], + "An error has occurred: \"more details unavailable.\"" + ); +} diff --git a/failure_derive/tests/no_derive_display.rs b/failure_derive/tests/no_derive_display.rs new file mode 100644 index 000000000..20eeb308c --- /dev/null +++ b/failure_derive/tests/no_derive_display.rs @@ -0,0 +1,21 @@ +extern crate failure; +#[macro_use] +extern crate failure_derive; + +use failure::Fail; +use std::fmt::{self, Display}; + +#[derive(Debug, Fail)] +struct Foo; + +impl Display for Foo { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.write_str("An error occurred.") + } +} + +#[test] +fn handwritten_display() { + assert!(Foo.cause().is_none()); + assert_eq!(&format!("{}", Foo)[..], "An error occurred."); +} diff --git a/failure_derive/tests/tests.rs b/failure_derive/tests/tests.rs new file mode 100644 index 000000000..4e73255ef --- /dev/null +++ b/failure_derive/tests/tests.rs @@ -0,0 +1,55 @@ +extern crate failure; +#[macro_use] +extern crate failure_derive; + +#[derive(Fail, Debug)] +#[fail(display = "An error has occurred.")] +struct UnitError; + +#[test] +fn unit_struct() { + let s = format!("{}", UnitError); + assert_eq!(&s[..], "An error has occurred."); +} + +#[derive(Fail, Debug)] +#[fail(display = "Error code: {}", code)] +struct RecordError { + code: u32, +} + +#[test] +fn record_struct() { + let s = format!("{}", RecordError { code: 0 }); + assert_eq!(&s[..], "Error code: 0"); +} + +#[derive(Fail, Debug)] +#[fail(display = "Error code: {}", _0)] +struct TupleError(i32); + +#[test] +fn tuple_struct() { + let s = format!("{}", TupleError(2)); + assert_eq!(&s[..], "Error code: 2"); +} + +#[derive(Fail, Debug)] +enum EnumError { + #[fail(display = "Error code: {}", code)] + StructVariant { code: i32 }, + #[fail(display = "Error: {}", _0)] + TupleVariant(&'static str), + #[fail(display = "An error has occurred.")] + UnitVariant, +} + +#[test] +fn enum_error() { + let s = format!("{}", EnumError::StructVariant { code: 2 }); + assert_eq!(&s[..], "Error code: 2"); + let s = format!("{}", EnumError::TupleVariant("foobar")); + assert_eq!(&s[..], "Error: foobar"); + let s = format!("{}", EnumError::UnitVariant); + assert_eq!(&s[..], "An error has occurred."); +} diff --git a/failure_derive/tests/wraps.rs b/failure_derive/tests/wraps.rs new file mode 100644 index 000000000..b33b6da95 --- /dev/null +++ b/failure_derive/tests/wraps.rs @@ -0,0 +1,99 @@ +extern crate failure; +#[macro_use] +extern crate failure_derive; + +use std::fmt; +use std::io; + +use failure::{Backtrace, Fail}; + +#[derive(Fail, Debug)] +#[fail(display = "An error has occurred: {}", inner)] +struct WrapError { + #[fail(cause)] + inner: io::Error, +} + +#[test] +fn wrap_error() { + let inner = io::Error::from_raw_os_error(98); + let err = WrapError { inner }; + assert!( + err.cause() + .and_then(|err| err.downcast_ref::()) + .is_some() + ); +} + +#[derive(Fail, Debug)] +#[fail(display = "An error has occurred: {}", _0)] +struct WrapTupleError(#[fail(cause)] io::Error); + +#[test] +fn wrap_tuple_error() { + let io_error = io::Error::from_raw_os_error(98); + let err: WrapTupleError = WrapTupleError(io_error); + assert!( + err.cause() + .and_then(|err| err.downcast_ref::()) + .is_some() + ); +} + +#[derive(Fail, Debug)] +#[fail(display = "An error has occurred: {}", inner)] +struct WrapBacktraceError { + #[fail(cause)] + inner: io::Error, + backtrace: Backtrace, +} + +#[test] +fn wrap_backtrace_error() { + let inner = io::Error::from_raw_os_error(98); + let err: WrapBacktraceError = WrapBacktraceError { + inner, + backtrace: Backtrace::new(), + }; + assert!( + err.cause() + .and_then(|err| err.downcast_ref::()) + .is_some() + ); + assert!(err.backtrace().is_some()); +} + +#[derive(Fail, Debug)] +enum WrapEnumError { + #[fail(display = "An error has occurred: {}", _0)] + Io(#[fail(cause)] io::Error), + #[fail(display = "An error has occurred: {}", inner)] + Fmt { + #[fail(cause)] + inner: fmt::Error, + backtrace: Backtrace, + }, +} + +#[test] +fn wrap_enum_error() { + let io_error = io::Error::from_raw_os_error(98); + let err: WrapEnumError = WrapEnumError::Io(io_error); + assert!( + err.cause() + .and_then(|err| err.downcast_ref::()) + .is_some() + ); + assert!(err.backtrace().is_none()); + let fmt_error = fmt::Error::default(); + let err: WrapEnumError = WrapEnumError::Fmt { + inner: fmt_error, + backtrace: Backtrace::new(), + }; + assert!( + err.cause() + .and_then(|err| err.downcast_ref::()) + .is_some() + ); + assert!(err.backtrace().is_some()); +} diff --git a/filetime/.cargo-checksum.json b/filetime/.cargo-checksum.json new file mode 100644 index 000000000..07f6f6abd --- /dev/null +++ b/filetime/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{},"package":"da4b9849e77b13195302c174324b5ba73eec9b236b24c221a61000daefb95c5f"} \ No newline at end of file diff --git a/filetime/Cargo.toml b/filetime/Cargo.toml new file mode 100644 index 000000000..0ed1f5381 --- /dev/null +++ b/filetime/Cargo.toml @@ -0,0 +1,31 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g. crates.io) dependencies +# +# If you believe there's an error in this file please file an +# issue against the rust-lang/cargo repository. If you're +# editing this file be aware that the upstream Cargo.toml +# will likely look very different (and much more reasonable) + +[package] +name = "filetime" +version = "0.2.1" +authors = ["Alex Crichton "] +description = "Platform-agnostic accessors of timestamps in File metadata\n" +homepage = "https://github.com/alexcrichton/filetime" +documentation = "https://docs.rs/filetime" +readme = "README.md" +keywords = ["timestamp", "mtime"] +license = "MIT/Apache-2.0" +repository = "https://github.com/alexcrichton/filetime" +[dependencies.cfg-if] +version = "0.1" +[dev-dependencies.tempdir] +version = "0.3" +[target."cfg(target_os = \"redox\")".dependencies.redox_syscall] +version = "0.1" +[target."cfg(unix)".dependencies.libc] +version = "0.2" diff --git a/filetime/LICENSE-APACHE b/filetime/LICENSE-APACHE new file mode 100644 index 000000000..16fe87b06 --- /dev/null +++ b/filetime/LICENSE-APACHE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/filetime/LICENSE-MIT b/filetime/LICENSE-MIT new file mode 100644 index 000000000..39e0ed660 --- /dev/null +++ b/filetime/LICENSE-MIT @@ -0,0 +1,25 @@ +Copyright (c) 2014 Alex Crichton + +Permission is hereby granted, free of charge, to any +person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the +Software without restriction, including without +limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software +is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions +of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/filetime/README.md b/filetime/README.md new file mode 100644 index 000000000..2d1d72236 --- /dev/null +++ b/filetime/README.md @@ -0,0 +1,34 @@ +# filetime + +[![Build Status](https://travis-ci.org/alexcrichton/filetime.svg?branch=master)](https://travis-ci.org/alexcrichton/filetime) +[![Build status](https://ci.appveyor.com/api/projects/status/9tatexq47i3ee13k?svg=true)](https://ci.appveyor.com/project/alexcrichton/filetime) + +[Documentation](https://docs.rs/filetime) + +A helper library for inspecting the various timestamps of files in Rust. This +library takes into account cross-platform differences in terms of where the +timestamps are located, what they are called, and how to convert them into a +platform-independent representation. + +```toml +# Cargo.toml +[dependencies] +filetime = "0.2" +``` + +# License + +This project is licensed under either of + + * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or + http://www.apache.org/licenses/LICENSE-2.0) + * MIT license ([LICENSE-MIT](LICENSE-MIT) or + http://opensource.org/licenses/MIT) + +at your option. + +### Contribution + +Unless you explicitly state otherwise, any contribution intentionally submitted +for inclusion in Serde by you, as defined in the Apache-2.0 license, shall be +dual licensed as above, without any additional terms or conditions. diff --git a/filetime/appveyor.yml b/filetime/appveyor.yml new file mode 100644 index 000000000..4a6104291 --- /dev/null +++ b/filetime/appveyor.yml @@ -0,0 +1,17 @@ +environment: + matrix: + - TARGET: x86_64-pc-windows-msvc + - TARGET: i686-pc-windows-msvc + - TARGET: i686-pc-windows-gnu +install: + - ps: Start-FileDownload "https://static.rust-lang.org/dist/rust-nightly-${env:TARGET}.exe" + - rust-nightly-%TARGET%.exe /VERYSILENT /NORESTART /DIR="C:\Program Files (x86)\Rust" + - SET PATH=%PATH%;C:\Program Files (x86)\Rust\bin + - SET PATH=%PATH%;C:\MinGW\bin + - rustc -V + - cargo -V + +build: false + +test_script: + - cargo test --verbose --target %TARGET% diff --git a/filetime/src/lib.rs b/filetime/src/lib.rs new file mode 100644 index 000000000..7b7846ee9 --- /dev/null +++ b/filetime/src/lib.rs @@ -0,0 +1,429 @@ +//! Timestamps for files in Rust +//! +//! This library provides platform-agnostic inspection of the various timestamps +//! present in the standard `fs::Metadata` structure. +//! +//! # Installation +//! +//! Add this to your `Cargo.toml`: +//! +//! ```toml +//! [dependencies] +//! filetime = "0.1" +//! ``` +//! +//! # Usage +//! +//! ```no_run +//! use std::fs; +//! use filetime::FileTime; +//! +//! let metadata = fs::metadata("foo.txt").unwrap(); +//! +//! let mtime = FileTime::from_last_modification_time(&metadata); +//! println!("{}", mtime); +//! +//! let atime = FileTime::from_last_access_time(&metadata); +//! assert!(mtime < atime); +//! +//! // Inspect values that can be interpreted across platforms +//! println!("{}", mtime.unix_seconds()); +//! println!("{}", mtime.nanoseconds()); +//! +//! // Print the platform-specific value of seconds +//! println!("{}", mtime.seconds()); +//! ``` + +#[macro_use] +extern crate cfg_if; + +use std::fmt; +use std::fs; +use std::io; +use std::path::Path; +use std::time::{Duration, SystemTime, UNIX_EPOCH}; + +cfg_if! { + if #[cfg(target_os = "redox")] { + #[path = "redox.rs"] + mod imp; + } else if #[cfg(windows)] { + #[path = "windows.rs"] + mod imp; + } else { + #[path = "unix/mod.rs"] + mod imp; + } +} + +/// A helper structure to represent a timestamp for a file. +/// +/// The actual value contined within is platform-specific and does not have the +/// same meaning across platforms, but comparisons and stringification can be +/// significant among the same platform. +#[derive(Eq, PartialEq, Ord, PartialOrd, Debug, Copy, Clone, Hash)] +pub struct FileTime { + seconds: i64, + nanos: u32, +} + +impl FileTime { + /// Creates a new timestamp representing a 0 time. + /// + /// Useful for creating the base of a cmp::max chain of times. + pub fn zero() -> FileTime { + FileTime { seconds: 0, nanos: 0 } + } + + /// Creates a new instance of `FileTime` with a number of seconds and + /// nanoseconds relative to the Unix epoch, 1970-01-01T00:00:00Z. + /// + /// Negative seconds represent times before the Unix epoch, and positive + /// values represent times after it. Nanos always count forwards in time. + /// + /// Note that this is typically the relative point that Unix time stamps are + /// from, but on Windows the native time stamp is relative to January 1, + /// 1601 so the return value of `seconds` from the returned `FileTime` + /// instance may not be the same as that passed in. + pub fn from_unix_time(seconds: i64, nanos: u32) -> FileTime { + FileTime { + seconds: seconds + if cfg!(windows) {11644473600} else {0}, + nanos, + } + } + + /// Creates a new timestamp from the last modification time listed in the + /// specified metadata. + /// + /// The returned value corresponds to the `mtime` field of `stat` on Unix + /// platforms and the `ftLastWriteTime` field on Windows platforms. + pub fn from_last_modification_time(meta: &fs::Metadata) -> FileTime { + imp::from_last_modification_time(meta) + } + + /// Creates a new timestamp from the last access time listed in the + /// specified metadata. + /// + /// The returned value corresponds to the `atime` field of `stat` on Unix + /// platforms and the `ftLastAccessTime` field on Windows platforms. + pub fn from_last_access_time(meta: &fs::Metadata) -> FileTime { + imp::from_last_access_time(meta) + } + + /// Creates a new timestamp from the creation time listed in the specified + /// metadata. + /// + /// The returned value corresponds to the `birthtime` field of `stat` on + /// Unix platforms and the `ftCreationTime` field on Windows platforms. Note + /// that not all Unix platforms have this field available and may return + /// `None` in some circumstances. + pub fn from_creation_time(meta: &fs::Metadata) -> Option { + imp::from_creation_time(meta) + } + + /// Creates a new timestamp from the given SystemTime. + /// + /// Windows counts file times since 1601-01-01T00:00:00Z, and cannot + /// represent times before this, but it's possible to create a SystemTime + /// that does. This function will error if passed such a SystemTime. + pub fn from_system_time(time: SystemTime) -> FileTime { + let epoch = if cfg!(windows) { + UNIX_EPOCH - Duration::from_secs(11644473600) + } else { + UNIX_EPOCH + }; + + time.duration_since(epoch).map(|d| FileTime { + seconds: d.as_secs() as i64, + nanos: d.subsec_nanos() + }) + .unwrap_or_else(|e| { + let until_epoch = e.duration(); + let (sec_offset, nanos) = if until_epoch.subsec_nanos() == 0 { + (0, 0) + } else { + (-1, 1_000_000_000 - until_epoch.subsec_nanos()) + }; + + FileTime { + seconds: -1 * until_epoch.as_secs() as i64 + sec_offset, + nanos + } + }) + } + + /// Returns the whole number of seconds represented by this timestamp. + /// + /// Note that this value's meaning is **platform specific**. On Unix + /// platform time stamps are typically relative to January 1, 1970, but on + /// Windows platforms time stamps are relative to January 1, 1601. + pub fn seconds(&self) -> i64 { self.seconds } + + /// Returns the whole number of seconds represented by this timestamp, + /// relative to the Unix epoch start of January 1, 1970. + /// + /// Note that this does not return the same value as `seconds` for Windows + /// platforms as seconds are relative to a different date there. + pub fn unix_seconds(&self) -> i64 { + self.seconds - if cfg!(windows) {11644473600} else {0} + } + + /// Returns the nanosecond precision of this timestamp. + /// + /// The returned value is always less than one billion and represents a + /// portion of a second forward from the seconds returned by the `seconds` + /// method. + pub fn nanoseconds(&self) -> u32 { self.nanos } +} + +impl fmt::Display for FileTime { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{}.{:09}s", self.seconds, self.nanos) + } +} + +/// Set the last access and modification times for a file on the filesystem. +/// +/// This function will set the `atime` and `mtime` metadata fields for a file +/// on the local filesystem, returning any error encountered. +pub fn set_file_times

(p: P, atime: FileTime, mtime: FileTime) + -> io::Result<()> + where P: AsRef +{ + imp::set_file_times(p.as_ref(), atime, mtime) +} + +/// Set the last access and modification times for a file on the filesystem. +/// This function does not follow symlink. +/// +/// This function will set the `atime` and `mtime` metadata fields for a file +/// on the local filesystem, returning any error encountered. +pub fn set_symlink_file_times

(p: P, atime: FileTime, mtime: FileTime) + -> io::Result<()> + where P: AsRef +{ + imp::set_symlink_file_times(p.as_ref(), atime, mtime) +} + +#[cfg(test)] +mod tests { + extern crate tempdir; + + use std::io; + use std::path::Path; + use std::fs::{self, File}; + use self::tempdir::TempDir; + use super::{FileTime, set_file_times, set_symlink_file_times}; + use std::time::{Duration, UNIX_EPOCH}; + + #[cfg(unix)] + fn make_symlink(src: P, dst: Q) -> io::Result<()> + where P: AsRef, + Q: AsRef, + { + use std::os::unix::fs::symlink; + symlink(src, dst) + } + + #[cfg(windows)] + fn make_symlink(src: P, dst: Q) -> io::Result<()> + where P: AsRef, + Q: AsRef, + { + use std::os::windows::fs::symlink_file; + symlink_file(src, dst) + } + + #[test] + #[cfg(windows)] + fn from_unix_time_test() { + let time = FileTime::from_unix_time(10, 100_000_000); + assert_eq!(11644473610, time.seconds); + assert_eq!(100_000_000, time.nanos); + + let time = FileTime::from_unix_time(-10, 100_000_000); + assert_eq!(11644473590, time.seconds); + assert_eq!(100_000_000, time.nanos); + + let time = FileTime::from_unix_time(-12_000_000_000, 0); + assert_eq!(-355526400, time.seconds); + assert_eq!(0, time.nanos); + } + + #[test] + #[cfg(not(windows))] + fn from_unix_time_test() { + let time = FileTime::from_unix_time(10, 100_000_000); + assert_eq!(10, time.seconds); + assert_eq!(100_000_000, time.nanos); + + let time = FileTime::from_unix_time(-10, 100_000_000); + assert_eq!(-10, time.seconds); + assert_eq!(100_000_000, time.nanos); + + let time = FileTime::from_unix_time(-12_000_000_000, 0); + assert_eq!(-12_000_000_000, time.seconds); + assert_eq!(0, time.nanos); + } + + #[test] + #[cfg(windows)] + fn from_system_time_test() { + let time = FileTime::from_system_time(UNIX_EPOCH + Duration::from_secs(10)); + assert_eq!(11644473610, time.seconds); + assert_eq!(0, time.nanos); + + let time = FileTime::from_system_time(UNIX_EPOCH - Duration::from_secs(10)); + assert_eq!(11644473590, time.seconds); + assert_eq!(0, time.nanos); + + let time = FileTime::from_system_time(UNIX_EPOCH - Duration::from_millis(1100)); + assert_eq!(11644473598, time.seconds); + assert_eq!(900_000_000, time.nanos); + + let time = FileTime::from_system_time(UNIX_EPOCH - Duration::from_secs(12_000_000_000)); + assert_eq!(-355526400, time.seconds); + assert_eq!(0, time.nanos); + } + + #[test] + #[cfg(not(windows))] + fn from_system_time_test() { + let time = FileTime::from_system_time(UNIX_EPOCH + Duration::from_secs(10)); + assert_eq!(10, time.seconds); + assert_eq!(0, time.nanos); + + let time = FileTime::from_system_time(UNIX_EPOCH - Duration::from_secs(10)); + assert_eq!(-10, time.seconds); + assert_eq!(0, time.nanos); + + let time = FileTime::from_system_time(UNIX_EPOCH - Duration::from_millis(1100)); + assert_eq!(-2, time.seconds); + assert_eq!(900_000_000, time.nanos); + + let time = FileTime::from_system_time(UNIX_EPOCH - Duration::from_secs(12_000_000)); + assert_eq!(-12_000_000, time.seconds); + assert_eq!(0, time.nanos); + } + + #[test] + fn set_file_times_test() { + let td = TempDir::new("filetime").unwrap(); + let path = td.path().join("foo.txt"); + File::create(&path).unwrap(); + + let metadata = fs::metadata(&path).unwrap(); + let mtime = FileTime::from_last_modification_time(&metadata); + let atime = FileTime::from_last_access_time(&metadata); + set_file_times(&path, atime, mtime).unwrap(); + + let new_mtime = FileTime::from_unix_time(10_000, 0); + set_file_times(&path, atime, new_mtime).unwrap(); + + let metadata = fs::metadata(&path).unwrap(); + let mtime = FileTime::from_last_modification_time(&metadata); + assert_eq!(mtime, new_mtime); + + let spath = td.path().join("bar.txt"); + make_symlink(&path, &spath).unwrap(); + let metadata = fs::symlink_metadata(&spath).unwrap(); + let smtime = FileTime::from_last_modification_time(&metadata); + + set_file_times(&spath, atime, mtime).unwrap(); + + let metadata = fs::metadata(&path).unwrap(); + let cur_mtime = FileTime::from_last_modification_time(&metadata); + assert_eq!(mtime, cur_mtime); + + let metadata = fs::symlink_metadata(&spath).unwrap(); + let cur_mtime = FileTime::from_last_modification_time(&metadata); + assert_eq!(smtime, cur_mtime); + + set_file_times(&spath, atime, new_mtime).unwrap(); + + let metadata = fs::metadata(&path).unwrap(); + let mtime = FileTime::from_last_modification_time(&metadata); + assert_eq!(mtime, new_mtime); + + let metadata = fs::symlink_metadata(&spath).unwrap(); + let mtime = FileTime::from_last_modification_time(&metadata); + assert_eq!(mtime, smtime); + } + + #[test] + fn set_file_times_pre_unix_epoch_test() { + let td = TempDir::new("filetime").unwrap(); + let path = td.path().join("foo.txt"); + File::create(&path).unwrap(); + + let metadata = fs::metadata(&path).unwrap(); + let mtime = FileTime::from_last_modification_time(&metadata); + let atime = FileTime::from_last_access_time(&metadata); + set_file_times(&path, atime, mtime).unwrap(); + + let new_mtime = FileTime::from_unix_time(-10_000, 0); + set_file_times(&path, atime, new_mtime).unwrap(); + + let metadata = fs::metadata(&path).unwrap(); + let mtime = FileTime::from_last_modification_time(&metadata); + assert_eq!(mtime, new_mtime); + } + + #[test] + #[cfg(windows)] + fn set_file_times_pre_windows_epoch_test() { + let td = TempDir::new("filetime").unwrap(); + let path = td.path().join("foo.txt"); + File::create(&path).unwrap(); + + let metadata = fs::metadata(&path).unwrap(); + let mtime = FileTime::from_last_modification_time(&metadata); + let atime = FileTime::from_last_access_time(&metadata); + set_file_times(&path, atime, mtime).unwrap(); + + let new_mtime = FileTime::from_unix_time(-12_000_000_000, 0); + assert!(set_file_times(&path, atime, new_mtime).is_err()); + } + + #[test] + fn set_symlink_file_times_test() { + let td = TempDir::new("filetime").unwrap(); + let path = td.path().join("foo.txt"); + File::create(&path).unwrap(); + + let metadata = fs::metadata(&path).unwrap(); + let mtime = FileTime::from_last_modification_time(&metadata); + let atime = FileTime::from_last_access_time(&metadata); + set_symlink_file_times(&path, atime, mtime).unwrap(); + + let new_mtime = FileTime::from_unix_time(10_000, 0); + set_symlink_file_times(&path, atime, new_mtime).unwrap(); + + let metadata = fs::metadata(&path).unwrap(); + let mtime = FileTime::from_last_modification_time(&metadata); + assert_eq!(mtime, new_mtime); + + let spath = td.path().join("bar.txt"); + make_symlink(&path, &spath).unwrap(); + + let metadata = fs::symlink_metadata(&spath).unwrap(); + let smtime = FileTime::from_last_modification_time(&metadata); + let satime = FileTime::from_last_access_time(&metadata); + set_symlink_file_times(&spath, smtime, satime).unwrap(); + + let metadata = fs::metadata(&path).unwrap(); + let mtime = FileTime::from_last_modification_time(&metadata); + assert_eq!(mtime, new_mtime); + + let new_smtime = FileTime::from_unix_time(20_000, 0); + set_symlink_file_times(&spath, atime, new_smtime).unwrap(); + + let metadata = fs::metadata(&spath).unwrap(); + let mtime = FileTime::from_last_modification_time(&metadata); + assert_eq!(mtime, new_mtime); + + let metadata = fs::symlink_metadata(&spath).unwrap(); + let mtime = FileTime::from_last_modification_time(&metadata); + assert_eq!(mtime, new_smtime); + } +} diff --git a/filetime/src/redox.rs b/filetime/src/redox.rs new file mode 100644 index 000000000..1fb7f5613 --- /dev/null +++ b/filetime/src/redox.rs @@ -0,0 +1,57 @@ +extern crate syscall; + +use std::fs; +use std::io; +use std::os::unix::prelude::*; +use std::path::Path; + +use FileTime; + +pub fn set_file_times(p: &Path, atime: FileTime, mtime: FileTime) -> io::Result<()> { + let fd = syscall::open(p.as_os_str().as_bytes(), 0) + .map_err(|err| io::Error::from_raw_os_error(err.errno))?; + set_file_times_redox(fd, atime, mtime) +} + +pub fn set_symlink_file_times(p: &Path, atime: FileTime, mtime: FileTime) -> io::Result<()> { + let fd = syscall::open(p.as_os_str().as_bytes(), syscall::O_NOFOLLOW) + .map_err(|err| io::Error::from_raw_os_error(err.errno))?; + set_file_times_redox(fd, atime, mtime) +} + +fn set_file_times_redox(fd: usize, atime: FileTime, mtime: FileTime) -> io::Result<()> { + use self::syscall::TimeSpec; + + fn to_timespec(ft: &FileTime) -> TimeSpec { + TimeSpec { + tv_sec: ft.seconds(), + tv_nsec: ft.nanoseconds() as i32 + } + } + + let times = [to_timespec(&atime), to_timespec(&mtime)]; + let res = syscall::futimens(fd, ×); + let _ = syscall::close(fd); + match res { + Ok(_) => Ok(()), + Err(err) => Err(io::Error::from_raw_os_error(err.errno)) + } +} + +pub fn from_last_modification_time(meta: &fs::Metadata) -> FileTime { + FileTime { + seconds: meta.mtime(), + nanos: meta.mtime_nsec() as u32, + } +} + +pub fn from_last_access_time(meta: &fs::Metadata) -> FileTime { + FileTime { + seconds: meta.atime(), + nanos: meta.atime_nsec() as u32, + } +} + +pub fn from_creation_time(_meta: &fs::Metadata) -> Option { + None +} diff --git a/filetime/src/unix/linux.rs b/filetime/src/unix/linux.rs new file mode 100644 index 000000000..d378c9e72 --- /dev/null +++ b/filetime/src/unix/linux.rs @@ -0,0 +1,62 @@ +//! On Linux we try to use the more accurate `utimensat` syscall but this isn't +//! always available so we also fall back to `utimes` if we couldn't find +//! `utimensat` at runtime. + +use std::io; +use std::mem; +use std::path::Path; +use std::sync::atomic::{AtomicUsize, Ordering, ATOMIC_USIZE_INIT}; +use std::sync::atomic::{AtomicBool, ATOMIC_BOOL_INIT}; + +use FileTime; +use super::libc::{self, c_int, c_char, timespec}; + +pub fn set_file_times(p: &Path, atime: FileTime, mtime: FileTime) -> io::Result<()> { + set_times(p, atime, mtime, false) +} + +pub fn set_symlink_file_times(p: &Path, atime: FileTime, mtime: FileTime) -> io::Result<()> { + set_times(p, atime, mtime, true) +} + +fn set_times(p: &Path, atime: FileTime, mtime: FileTime, symlink: bool) -> io::Result<()> { + let flags = if symlink { libc::AT_SYMLINK_NOFOLLOW } else { 0 }; + let utimes = if symlink { libc::lutimes } else { libc::utimes }; + + // Try to use the more-accurate `utimensat` when possible. + static INVALID: AtomicBool = ATOMIC_BOOL_INIT; + if !INVALID.load(Ordering::SeqCst) { + if let Some(f) = utimensat() { + // Even when libc has `utimensat`, the kernel may return `ENOSYS`, + // and then we'll need to use the `utimes` fallback instead. + match super::utimensat(p, atime, mtime, f, flags) { + Err(ref e) if e.raw_os_error() == Some(libc::ENOSYS) => { + INVALID.store(true, Ordering::SeqCst); + } + valid => return valid, + } + } + } + + super::utimes(p, atime, mtime, utimes) +} + +fn utimensat() -> Option c_int> { + static ADDR: AtomicUsize = ATOMIC_USIZE_INIT; + unsafe { + match ADDR.load(Ordering::SeqCst) { + 0 => {} + 1 => return None, + n => return Some(mem::transmute(n)), + } + let name = b"utimensat\0"; + let sym = libc::dlsym(libc::RTLD_DEFAULT, name.as_ptr() as *const _); + let (val, ret) = if sym.is_null() { + (1, None) + } else { + (sym as usize, Some(mem::transmute(sym))) + }; + ADDR.store(val, Ordering::SeqCst); + return ret + } +} diff --git a/filetime/src/unix/mod.rs b/filetime/src/unix/mod.rs new file mode 100644 index 000000000..8cca81529 --- /dev/null +++ b/filetime/src/unix/mod.rs @@ -0,0 +1,125 @@ +extern crate libc; + +use std::ffi::CString; +use std::fs; +use std::io; +use std::os::unix::prelude::*; +use std::path::Path; + +use self::libc::{c_int, c_char, timeval, time_t, suseconds_t}; +use self::libc::{timespec}; + +use FileTime; + +cfg_if! { + if #[cfg(target_os = "linux")] { + mod linux; + pub use self::linux::*; + } else if #[cfg(any(target_os = "android", + target_os = "solaris", + target_os = "netbsd", + target_os = "openbsd"))] { + mod utimensat; + pub use self::utimensat::*; + } else { + mod utimes; + pub use self::utimes::*; + } +} + +#[allow(dead_code)] +fn utimes(p: &Path, + atime: FileTime, + mtime: FileTime, + utimes: unsafe extern fn(*const c_char, *const timeval) -> c_int) + -> io::Result<()> +{ + let times = [to_timeval(&atime), to_timeval(&mtime)]; + let p = try!(CString::new(p.as_os_str().as_bytes())); + return if unsafe { utimes(p.as_ptr() as *const _, times.as_ptr()) == 0 } { + Ok(()) + } else { + Err(io::Error::last_os_error()) + }; + + fn to_timeval(ft: &FileTime) -> timeval { + timeval { + tv_sec: ft.seconds() as time_t, + tv_usec: (ft.nanoseconds() / 1000) as suseconds_t, + } + } +} + +#[allow(dead_code)] +fn utimensat(p: &Path, + atime: FileTime, + mtime: FileTime, + f: unsafe extern fn(c_int, *const c_char, *const timespec, c_int) -> c_int, + flags: c_int) + -> io::Result<()> +{ + let times = [to_timespec(&atime), to_timespec(&mtime)]; + let p = try!(CString::new(p.as_os_str().as_bytes())); + let rc = unsafe { + f(libc::AT_FDCWD, p.as_ptr() as *const _, times.as_ptr(), flags) + }; + return if rc == 0 { + Ok(()) + } else { + Err(io::Error::last_os_error()) + }; + + fn to_timespec(ft: &FileTime) -> timespec { + timespec { + tv_sec: ft.seconds() as time_t, + tv_nsec: ft.nanoseconds() as _, + } + } +} + +pub fn from_last_modification_time(meta: &fs::Metadata) -> FileTime { + FileTime { + seconds: meta.mtime(), + nanos: meta.mtime_nsec() as u32, + } +} + +pub fn from_last_access_time(meta: &fs::Metadata) -> FileTime { + FileTime { + seconds: meta.atime(), + nanos: meta.atime_nsec() as u32, + } +} + +pub fn from_creation_time(meta: &fs::Metadata) -> Option { + macro_rules! birthtim { + ($(($e:expr, $i:ident)),*) => { + #[cfg(any($(target_os = $e),*))] + fn imp(meta: &fs::Metadata) -> Option { + $( + #[cfg(target_os = $e)] + use std::os::$i::fs::MetadataExt; + )* + Some(FileTime { + seconds: meta.st_birthtime(), + nanos: meta.st_birthtime_nsec() as u32, + }) + } + + #[cfg(all($(not(target_os = $e)),*))] + fn imp(_meta: &fs::Metadata) -> Option { + None + } + } + } + + birthtim! { + ("bitrig", bitrig), + ("freebsd", freebsd), + ("ios", ios), + ("macos", macos), + ("openbsd", openbsd) + } + + imp(meta) +} diff --git a/filetime/src/unix/utimensat.rs b/filetime/src/unix/utimensat.rs new file mode 100644 index 000000000..47969fa0f --- /dev/null +++ b/filetime/src/unix/utimensat.rs @@ -0,0 +1,13 @@ +use std::path::Path; +use std::io; + +use FileTime; +use super::libc; + +pub fn set_file_times(p: &Path, atime: FileTime, mtime: FileTime) -> io::Result<()> { + super::utimensat(p, atime, mtime, libc::utimensat, 0) +} + +pub fn set_symlink_file_times(p: &Path, atime: FileTime, mtime: FileTime) -> io::Result<()> { + super::utimensat(p, atime, mtime, libc::utimensat, libc::AT_SYMLINK_NOFOLLOW) +} diff --git a/filetime/src/unix/utimes.rs b/filetime/src/unix/utimes.rs new file mode 100644 index 000000000..6ec12f76b --- /dev/null +++ b/filetime/src/unix/utimes.rs @@ -0,0 +1,13 @@ +use std::path::Path; +use std::io; + +use FileTime; +use super::libc; + +pub fn set_file_times(p: &Path, atime: FileTime, mtime: FileTime) -> io::Result<()> { + super::utimes(p, atime, mtime, libc::utimes) +} + +pub fn set_symlink_file_times(p: &Path, atime: FileTime, mtime: FileTime) -> io::Result<()> { + super::utimes(p, atime, mtime, libc::lutimes) +} diff --git a/filetime/src/windows.rs b/filetime/src/windows.rs new file mode 100644 index 000000000..16a979cfb --- /dev/null +++ b/filetime/src/windows.rs @@ -0,0 +1,87 @@ +#![allow(bad_style)] + +use std::fs::{self, OpenOptions}; +use std::io; +use std::os::windows::prelude::*; +use std::path::Path; + +use FileTime; + +pub fn set_file_times(p: &Path, atime: FileTime, mtime: FileTime) -> io::Result<()> { + set_file_times_w(p, atime, mtime, OpenOptions::new()) +} + +pub fn set_symlink_file_times(p: &Path, atime: FileTime, mtime: FileTime) -> io::Result<()> { + use std::os::windows::fs::OpenOptionsExt; + const FILE_FLAG_OPEN_REPARSE_POINT: u32 = 0x00200000; + + let mut options = OpenOptions::new(); + options.custom_flags(FILE_FLAG_OPEN_REPARSE_POINT); + set_file_times_w(p, atime, mtime, options) +} + +pub fn set_file_times_w(p: &Path, + atime: FileTime, + mtime: FileTime, + mut options: OpenOptions) -> io::Result<()> { + type BOOL = i32; + type HANDLE = *mut u8; + type DWORD = u32; + + #[repr(C)] + struct FILETIME { + dwLowDateTime: u32, + dwHighDateTime: u32, + } + + extern "system" { + fn SetFileTime(hFile: HANDLE, + lpCreationTime: *const FILETIME, + lpLastAccessTime: *const FILETIME, + lpLastWriteTime: *const FILETIME) -> BOOL; + } + + let f = try!(options.write(true).open(p)); + let atime = to_filetime(&atime); + let mtime = to_filetime(&mtime); + return unsafe { + let ret = SetFileTime(f.as_raw_handle() as *mut _, + 0 as *const _, + &atime, &mtime); + if ret != 0 { + Ok(()) + } else { + Err(io::Error::last_os_error()) + } + }; + + fn to_filetime(ft: &FileTime) -> FILETIME { + let intervals = ft.seconds() * (1_000_000_000 / 100) + + ((ft.nanoseconds() as i64) / 100); + FILETIME { + dwLowDateTime: intervals as DWORD, + dwHighDateTime: (intervals >> 32) as DWORD, + } + } +} + +pub fn from_last_modification_time(meta: &fs::Metadata) -> FileTime { + from_intervals(meta.last_write_time()) +} + +pub fn from_last_access_time(meta: &fs::Metadata) -> FileTime { + from_intervals(meta.last_access_time()) +} + +pub fn from_creation_time(meta: &fs::Metadata) -> Option { + Some(from_intervals(meta.creation_time())) +} + +fn from_intervals(ticks: u64) -> FileTime { + // Windows write times are in 100ns intervals, so do a little math to + // get it into the right representation. + FileTime { + seconds: (ticks / (1_000_000_000 / 100)) as i64, + nanos: ((ticks % (1_000_000_000 / 100)) * 100) as u32, + } +} diff --git a/flate2/.cargo-checksum.json b/flate2/.cargo-checksum.json new file mode 100644 index 000000000..818772416 --- /dev/null +++ b/flate2/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{},"package":"3b0c7353385f92079524de3b7116cf99d73947c08a7472774e9b3b04bff3b901"} \ No newline at end of file diff --git a/flate2/.pc/.quilt_patches b/flate2/.pc/.quilt_patches new file mode 100644 index 000000000..6857a8d44 --- /dev/null +++ b/flate2/.pc/.quilt_patches @@ -0,0 +1 @@ +debian/patches diff --git a/flate2/.pc/.quilt_series b/flate2/.pc/.quilt_series new file mode 100644 index 000000000..c2067066a --- /dev/null +++ b/flate2/.pc/.quilt_series @@ -0,0 +1 @@ +series diff --git a/flate2/.pc/.version b/flate2/.pc/.version new file mode 100644 index 000000000..0cfbf0888 --- /dev/null +++ b/flate2/.pc/.version @@ -0,0 +1 @@ +2 diff --git a/flate2/.pc/applied-patches b/flate2/.pc/applied-patches new file mode 100644 index 000000000..393adff1b --- /dev/null +++ b/flate2/.pc/applied-patches @@ -0,0 +1,2 @@ +disable-miniz.patch +drop-deps-for-cargo.patch diff --git a/flate2/.pc/disable-miniz.patch/.timestamp b/flate2/.pc/disable-miniz.patch/.timestamp new file mode 100644 index 000000000..e69de29bb diff --git a/flate2/.pc/disable-miniz.patch/Cargo.toml b/flate2/.pc/disable-miniz.patch/Cargo.toml new file mode 100644 index 000000000..d4dd0ebdf --- /dev/null +++ b/flate2/.pc/disable-miniz.patch/Cargo.toml @@ -0,0 +1,70 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g. crates.io) dependencies +# +# If you believe there's an error in this file please file an +# issue against the rust-lang/cargo repository. If you're +# editing this file be aware that the upstream Cargo.toml +# will likely look very different (and much more reasonable) + +[package] +name = "flate2" +version = "1.0.4" +authors = ["Alex Crichton "] +description = "Bindings to miniz.c for DEFLATE compression and decompression exposed as\nReader/Writer streams. Contains bindings for zlib, deflate, and gzip-based\nstreams.\n" +homepage = "https://github.com/alexcrichton/flate2-rs" +documentation = "https://docs.rs/flate2" +readme = "README.md" +keywords = ["gzip", "flate", "zlib", "encoding"] +categories = ["compression", "api-bindings"] +license = "MIT/Apache-2.0" +repository = "https://github.com/alexcrichton/flate2-rs" +[dependencies.futures] +version = "0.1" +optional = true + +[dependencies.libc] +version = "0.2" + +[dependencies.libz-sys] +version = "1.0" +optional = true + +[dependencies.miniz-sys] +version = "0.1.11" +optional = true + +[dependencies.miniz_oxide_c_api] +version = "0.2" +features = ["no_c_export"] +optional = true + +[dependencies.tokio-io] +version = "0.1" +optional = true +[dev-dependencies.quickcheck] +version = "0.7" +default-features = false + +[dev-dependencies.rand] +version = "0.5" + +[dev-dependencies.tokio-core] +version = "0.1" + +[features] +default = ["miniz-sys"] +rust_backend = ["miniz_oxide_c_api"] +tokio = ["tokio-io", "futures"] +zlib = ["libz-sys"] +[target."cfg(all(target_arch = \"wasm32\", not(target_os = \"emscripten\")))".dependencies.miniz_oxide_c_api] +version = "0.2" +features = ["no_c_export"] +[badges.appveyor] +repository = "alexcrichton/flate2-rs" + +[badges.travis-ci] +repository = "alexcrichton/flate2-rs" diff --git a/flate2/.pc/drop-deps-for-cargo.patch/.timestamp b/flate2/.pc/drop-deps-for-cargo.patch/.timestamp new file mode 100644 index 000000000..e69de29bb diff --git a/flate2/.pc/drop-deps-for-cargo.patch/Cargo.toml b/flate2/.pc/drop-deps-for-cargo.patch/Cargo.toml new file mode 100644 index 000000000..20a1270b0 --- /dev/null +++ b/flate2/.pc/drop-deps-for-cargo.patch/Cargo.toml @@ -0,0 +1,67 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g. crates.io) dependencies +# +# If you believe there's an error in this file please file an +# issue against the rust-lang/cargo repository. If you're +# editing this file be aware that the upstream Cargo.toml +# will likely look very different (and much more reasonable) + +[package] +name = "flate2" +version = "1.0.4" +authors = ["Alex Crichton "] +description = "Bindings to miniz.c for DEFLATE compression and decompression exposed as\nReader/Writer streams. Contains bindings for zlib, deflate, and gzip-based\nstreams.\n" +homepage = "https://github.com/alexcrichton/flate2-rs" +documentation = "https://docs.rs/flate2" +readme = "README.md" +keywords = ["gzip", "flate", "zlib", "encoding"] +categories = ["compression", "api-bindings"] +license = "MIT/Apache-2.0" +repository = "https://github.com/alexcrichton/flate2-rs" +[dependencies.futures] +version = "0.1" +optional = true + +[dependencies.libc] +version = "0.2" + +[dependencies.libz-sys] +version = "1.0" +optional = true + +[dependencies.miniz_oxide_c_api] +version = "0.2" +features = ["no_c_export"] +optional = true + +[dependencies.tokio-io] +version = "0.1" +optional = true +[dev-dependencies.quickcheck] +version = "0.7" +default-features = false + +[dev-dependencies.rand] +version = "0.5" + +[dev-dependencies.tokio-core] +version = "0.1" + +[features] +default = ["zlib"] +miniz-sys = ["zlib"] +rust_backend = ["miniz_oxide_c_api"] +tokio = ["tokio-io", "futures"] +zlib = ["libz-sys"] +[target."cfg(all(target_arch = \"wasm32\", not(target_os = \"emscripten\")))".dependencies.miniz_oxide_c_api] +version = "0.2" +features = ["no_c_export"] +[badges.appveyor] +repository = "alexcrichton/flate2-rs" + +[badges.travis-ci] +repository = "alexcrichton/flate2-rs" diff --git a/flate2/Cargo.toml b/flate2/Cargo.toml new file mode 100644 index 000000000..92672d496 --- /dev/null +++ b/flate2/Cargo.toml @@ -0,0 +1,59 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g. crates.io) dependencies +# +# If you believe there's an error in this file please file an +# issue against the rust-lang/cargo repository. If you're +# editing this file be aware that the upstream Cargo.toml +# will likely look very different (and much more reasonable) + +[package] +name = "flate2" +version = "1.0.4" +authors = ["Alex Crichton "] +description = "Bindings to miniz.c for DEFLATE compression and decompression exposed as\nReader/Writer streams. Contains bindings for zlib, deflate, and gzip-based\nstreams.\n" +homepage = "https://github.com/alexcrichton/flate2-rs" +documentation = "https://docs.rs/flate2" +readme = "README.md" +keywords = ["gzip", "flate", "zlib", "encoding"] +categories = ["compression", "api-bindings"] +license = "MIT/Apache-2.0" +repository = "https://github.com/alexcrichton/flate2-rs" +[dependencies.futures] +version = "0.1" +optional = true + +[dependencies.libc] +version = "0.2" + +[dependencies.libz-sys] +version = "1.0" +optional = true + +[dependencies.tokio-io] +version = "0.1" +optional = true +[dev-dependencies.quickcheck] +version = "0.7" +default-features = false + +[dev-dependencies.rand] +version = "0.5" + +[dev-dependencies.tokio-core] +version = "0.1" + +[features] +default = ["zlib"] +miniz-sys = ["zlib"] +rust_backend = ["zlib"] +tokio = ["tokio-io", "futures"] +zlib = ["libz-sys"] +[badges.appveyor] +repository = "alexcrichton/flate2-rs" + +[badges.travis-ci] +repository = "alexcrichton/flate2-rs" diff --git a/flate2/LICENSE-APACHE b/flate2/LICENSE-APACHE new file mode 100644 index 000000000..16fe87b06 --- /dev/null +++ b/flate2/LICENSE-APACHE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/flate2/LICENSE-MIT b/flate2/LICENSE-MIT new file mode 100644 index 000000000..39e0ed660 --- /dev/null +++ b/flate2/LICENSE-MIT @@ -0,0 +1,25 @@ +Copyright (c) 2014 Alex Crichton + +Permission is hereby granted, free of charge, to any +person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the +Software without restriction, including without +limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software +is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions +of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/flate2/README.md b/flate2/README.md new file mode 100644 index 000000000..8b403e53c --- /dev/null +++ b/flate2/README.md @@ -0,0 +1,90 @@ +# flate2 + +[![Build Status](https://travis-ci.org/alexcrichton/flate2-rs.svg?branch=master)](https://travis-ci.org/alexcrichton/flate2-rs) +[![Build status](https://ci.appveyor.com/api/projects/status/9tatexq47i3ee13k?svg=true)](https://ci.appveyor.com/project/alexcrichton/flate2-rs) +[![Crates.io](https://img.shields.io/crates/v/flate2.svg?maxAge=2592000)](https://crates.io/crates/flate2) +[![Documentation](https://docs.rs/flate2/badge.svg)](https://docs.rs/flate2) + +A streaming compression/decompression library for Rust. The underlying +implementation by default uses [`miniz`](https://github.com/richgel999/miniz) but +can optionally be configured to use the system zlib, if available. + +There is also an experimental rust backend that uses the +[`miniz_oxide`](https://crates.io/crates/miniz_oxide) crate. This avoids the need +to build C code, but hasn't gone through as much testing as the other backends. + +Supported formats: + +* deflate +* zlib +* gzip + +```toml +# Cargo.toml +[dependencies] +flate2 = "1.0" +``` + +Using zlib instead of miniz: + +```toml +[dependencies] +flate2 = { version = "1.0", features = ["zlib"], default-features = false } +``` + +Using the rust back-end: + +```toml +[dependencies] +flate2 = { version = "1.0", features = ["rust_backend"], default-features = false } +``` + +## Compression + +```rust +extern crate flate2; + +use std::io::prelude::*; +use flate2::Compression; +use flate2::write::ZlibEncoder; + +fn main() { + let mut e = ZlibEncoder::new(Vec::new(), Compression::default()); + e.write_all(b"foo"); + e.write_all(b"bar"); + let compressed_bytes = e.finish(); +} +``` + +## Decompression + +```rust,no_run +extern crate flate2; + +use std::io::prelude::*; +use flate2::read::GzDecoder; + +fn main() { + let mut d = GzDecoder::new("...".as_bytes()); + let mut s = String::new(); + d.read_to_string(&mut s).unwrap(); + println!("{}", s); +} +``` + +# License + +This project is licensed under either of + + * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or + http://www.apache.org/licenses/LICENSE-2.0) + * MIT license ([LICENSE-MIT](LICENSE-MIT) or + http://opensource.org/licenses/MIT) + +at your option. + +### Contribution + +Unless you explicitly state otherwise, any contribution intentionally submitted +for inclusion in this project by you, as defined in the Apache-2.0 license, +shall be dual licensed as above, without any additional terms or conditions. diff --git a/flate2/appveyor.yml b/flate2/appveyor.yml new file mode 100644 index 000000000..0a140f7bc --- /dev/null +++ b/flate2/appveyor.yml @@ -0,0 +1,24 @@ +environment: + matrix: + - TARGET: x86_64-pc-windows-msvc + - TARGET: x86_64-pc-windows-gnu + - TARGET: i686-pc-windows-msvc + - TARGET: i686-pc-windows-gnu +install: + - ps: >- + If ($Env:TARGET -eq 'x86_64-pc-windows-gnu') { + $Env:PATH += ';C:\msys64\mingw64\bin' + } ElseIf ($Env:TARGET -eq 'i686-pc-windows-gnu') { + $Env:PATH += ';C:\MinGW\bin' + } + - ps: Start-FileDownload "https://static.rust-lang.org/dist/rust-nightly-${env:TARGET}.exe" + - rust-nightly-%TARGET%.exe /VERYSILENT /NORESTART /DIR="C:\Program Files (x86)\Rust" + - SET PATH=%PATH%;C:\Program Files (x86)\Rust\bin + - rustc -V + - cargo -V + +build: false + +test_script: + - cargo test --verbose --target %TARGET% + - cargo test --verbose --target %TARGET% --features tokio diff --git a/flate2/debian/patches/disable-miniz.patch b/flate2/debian/patches/disable-miniz.patch new file mode 100644 index 000000000..34a0ec51c --- /dev/null +++ b/flate2/debian/patches/disable-miniz.patch @@ -0,0 +1,25 @@ +Index: flate2/Cargo.toml +=================================================================== +--- flate2.orig/Cargo.toml ++++ flate2/Cargo.toml +@@ -33,10 +33,6 @@ version = "0.2" + version = "1.0" + optional = true + +-[dependencies.miniz-sys] +-version = "0.1.11" +-optional = true +- + [dependencies.miniz_oxide_c_api] + version = "0.2" + features = ["no_c_export"] +@@ -56,7 +52,8 @@ version = "0.5" + version = "0.1" + + [features] +-default = ["miniz-sys"] ++default = ["zlib"] ++miniz-sys = ["zlib"] + rust_backend = ["miniz_oxide_c_api"] + tokio = ["tokio-io", "futures"] + zlib = ["libz-sys"] diff --git a/flate2/debian/patches/drop-deps-for-cargo.patch b/flate2/debian/patches/drop-deps-for-cargo.patch new file mode 100644 index 000000000..06810a810 --- /dev/null +++ b/flate2/debian/patches/drop-deps-for-cargo.patch @@ -0,0 +1,30 @@ +Index: flate2/Cargo.toml +=================================================================== +--- flate2.orig/Cargo.toml ++++ flate2/Cargo.toml +@@ -33,11 +33,6 @@ version = "0.2" + version = "1.0" + optional = true + +-[dependencies.miniz_oxide_c_api] +-version = "0.2" +-features = ["no_c_export"] +-optional = true +- + [dependencies.tokio-io] + version = "0.1" + optional = true +@@ -54,12 +49,9 @@ version = "0.1" + [features] + default = ["zlib"] + miniz-sys = ["zlib"] +-rust_backend = ["miniz_oxide_c_api"] ++rust_backend = ["zlib"] + tokio = ["tokio-io", "futures"] + zlib = ["libz-sys"] +-[target."cfg(all(target_arch = \"wasm32\", not(target_os = \"emscripten\")))".dependencies.miniz_oxide_c_api] +-version = "0.2" +-features = ["no_c_export"] + [badges.appveyor] + repository = "alexcrichton/flate2-rs" + diff --git a/flate2/debian/patches/drop-deps-for-cargo.patch~ b/flate2/debian/patches/drop-deps-for-cargo.patch~ new file mode 100644 index 000000000..371640702 --- /dev/null +++ b/flate2/debian/patches/drop-deps-for-cargo.patch~ @@ -0,0 +1,28 @@ +--- a/Cargo.toml ++++ b/Cargo.toml +@@ -33,11 +33,6 @@ + version = "1.0" + optional = true + +-[dependencies.miniz_oxide_c_api] +-version = "0.2" +-features = ["no_c_export"] +-optional = true +- + [dependencies.tokio-io] + version = "0.1" + optional = true +@@ -54,12 +49,9 @@ + [features] + default = ["zlib"] + miniz-sys = ["zlib"] +-rust_backend = ["miniz_oxide_c_api"] ++rust_backend = ["zlib"] + tokio = ["tokio-io", "futures"] + zlib = ["libz-sys"] +-[target."cfg(all(target_arch = \"wasm32\", not(target_os = \"emscripten\")))".dependencies.miniz_oxide_c_api] +-version = "0.2" +-features = ["no_c_export"] + [badges.appveyor] + repository = "alexcrichton/flate2-rs" + diff --git a/flate2/debian/patches/series b/flate2/debian/patches/series new file mode 100644 index 000000000..393adff1b --- /dev/null +++ b/flate2/debian/patches/series @@ -0,0 +1,2 @@ +disable-miniz.patch +drop-deps-for-cargo.patch diff --git a/flate2/examples/deflatedecoder-bufread.rs b/flate2/examples/deflatedecoder-bufread.rs new file mode 100644 index 000000000..24bd5e668 --- /dev/null +++ b/flate2/examples/deflatedecoder-bufread.rs @@ -0,0 +1,24 @@ +extern crate flate2; + +use std::io::prelude::*; +use std::io; +use flate2::Compression; +use flate2::write::DeflateEncoder; +use flate2::bufread::DeflateDecoder; + +// Compress a sample string and print it after transformation. +fn main() { + let mut e = DeflateEncoder::new(Vec::new(), Compression::default()); + e.write_all(b"Hello World").unwrap(); + let bytes = e.finish().unwrap(); + println!("{}", decode_reader(bytes).unwrap()); +} + +// Uncompresses a Deflate Encoded vector of bytes and returns a string or error +// Here &[u8] implements Read +fn decode_reader(bytes: Vec) -> io::Result { + let mut deflater = DeflateDecoder::new(&bytes[..]); + let mut s = String::new(); + deflater.read_to_string(&mut s)?; + Ok(s) +} diff --git a/flate2/examples/deflatedecoder-read.rs b/flate2/examples/deflatedecoder-read.rs new file mode 100644 index 000000000..87ac7e987 --- /dev/null +++ b/flate2/examples/deflatedecoder-read.rs @@ -0,0 +1,24 @@ +extern crate flate2; + +use std::io::prelude::*; +use std::io; +use flate2::Compression; +use flate2::write::DeflateEncoder; +use flate2::read::DeflateDecoder; + +// Compress a sample string and print it after transformation. +fn main() { + let mut e = DeflateEncoder::new(Vec::new(), Compression::default()); + e.write_all(b"Hello World").unwrap(); + let bytes = e.finish().unwrap(); + println!("{}", decode_reader(bytes).unwrap()); +} + +// Uncompresses a Deflate Encoded vector of bytes and returns a string or error +// Here &[u8] implements Read +fn decode_reader(bytes: Vec) -> io::Result { + let mut deflater = DeflateDecoder::new(&bytes[..]); + let mut s = String::new(); + deflater.read_to_string(&mut s)?; + Ok(s) +} diff --git a/flate2/examples/deflatedecoder-write.rs b/flate2/examples/deflatedecoder-write.rs new file mode 100644 index 000000000..14226324c --- /dev/null +++ b/flate2/examples/deflatedecoder-write.rs @@ -0,0 +1,26 @@ +extern crate flate2; + +use std::io::prelude::*; +use std::io; +use flate2::Compression; +use flate2::write::DeflateEncoder; +use flate2::write::DeflateDecoder; + +// Compress a sample string and print it after transformation. +fn main() { + let mut e = DeflateEncoder::new(Vec::new(), Compression::default()); + e.write_all(b"Hello World").unwrap(); + let bytes = e.finish().unwrap(); + println!("{}", decode_reader(bytes).unwrap()); +} + +// Uncompresses a Deflate Encoded vector of bytes and returns a string or error +// Here Vec implements Write +fn decode_reader(bytes: Vec) -> io::Result { + let mut writer = Vec::new(); + let mut deflater = DeflateDecoder::new(writer); + deflater.write_all(&bytes[..])?; + writer = deflater.finish()?; + let return_string = String::from_utf8(writer).expect("String parsing error"); + Ok(return_string) +} diff --git a/flate2/examples/deflateencoder-bufread.rs b/flate2/examples/deflateencoder-bufread.rs new file mode 100644 index 000000000..abdebbb07 --- /dev/null +++ b/flate2/examples/deflateencoder-bufread.rs @@ -0,0 +1,24 @@ +extern crate flate2; + +use std::io::prelude::*; +use std::io; +use flate2::Compression; +use flate2::bufread::DeflateEncoder; +use std::fs::File; +use std::io::BufReader; + +// Open file and debug print the contents compressed with Deflate +fn main() { + println!("{:?}", open_hello_world().unwrap()); +} + +// Opens sample file, compresses the contents and returns a Vector or error +// File wrapped in a BufReader implements Bufread +fn open_hello_world() -> io::Result> { + let f = File::open("examples/hello_world.txt")?; + let b = BufReader::new(f); + let mut deflater = DeflateEncoder::new(b, Compression::fast()); + let mut buffer = Vec::new(); + deflater.read_to_end(&mut buffer)?; + Ok(buffer) +} diff --git a/flate2/examples/deflateencoder-read.rs b/flate2/examples/deflateencoder-read.rs new file mode 100644 index 000000000..7937af3fc --- /dev/null +++ b/flate2/examples/deflateencoder-read.rs @@ -0,0 +1,20 @@ +extern crate flate2; + +use std::io::prelude::*; +use std::io; +use flate2::Compression; +use flate2::read::DeflateEncoder; + +// Print the Deflate compressed representation of hello world +fn main() { + println!("{:?}", deflateencoder_read_hello_world().unwrap()); +} + +// Return a vector containing the Defalte compressed version of hello world +fn deflateencoder_read_hello_world() -> io::Result> { + let mut ret_vec = [0; 100]; + let c = b"hello world"; + let mut deflater = DeflateEncoder::new(&c[..], Compression::fast()); + let count = deflater.read(&mut ret_vec)?; + Ok(ret_vec[0..count].to_vec()) +} diff --git a/flate2/examples/deflateencoder-write.rs b/flate2/examples/deflateencoder-write.rs new file mode 100644 index 000000000..830429308 --- /dev/null +++ b/flate2/examples/deflateencoder-write.rs @@ -0,0 +1,12 @@ +extern crate flate2; + +use std::io::prelude::*; +use flate2::Compression; +use flate2::write::DeflateEncoder; + +// Vec implements Write to print the compressed bytes of sample string +fn main() { + let mut e = DeflateEncoder::new(Vec::new(), Compression::default()); + e.write_all(b"Hello World").unwrap(); + println!("{:?}", e.finish().unwrap()); +} diff --git a/flate2/examples/gzbuilder.rs b/flate2/examples/gzbuilder.rs new file mode 100644 index 000000000..4b3c9c597 --- /dev/null +++ b/flate2/examples/gzbuilder.rs @@ -0,0 +1,24 @@ +extern crate flate2; + +use std::io::prelude::*; +use std::io; +use std::fs::File; +use flate2::GzBuilder; +use flate2::Compression; + +// Open file and debug print the contents compressed with gzip +fn main() { + sample_builder().unwrap(); +} + +// GzBuilder opens a file and writes a sample string using Builder pattern +fn sample_builder() -> Result<(), io::Error> { + let f = File::create("examples/hello_world.gz")?; + let mut gz = GzBuilder::new() + .filename("hello_world.txt") + .comment("test file, please delete") + .write(f, Compression::default()); + gz.write_all(b"hello world")?; + gz.finish()?; + Ok(()) +} diff --git a/flate2/examples/gzdecoder-bufread.rs b/flate2/examples/gzdecoder-bufread.rs new file mode 100644 index 000000000..268792425 --- /dev/null +++ b/flate2/examples/gzdecoder-bufread.rs @@ -0,0 +1,24 @@ +extern crate flate2; + +use std::io::prelude::*; +use std::io; +use flate2::Compression; +use flate2::write::GzEncoder; +use flate2::bufread::GzDecoder; + +// Compress a sample string and print it after transformation. +fn main() { + let mut e = GzEncoder::new(Vec::new(), Compression::default()); + e.write_all(b"Hello World").unwrap(); + let bytes = e.finish().unwrap(); + println!("{}", decode_reader(bytes).unwrap()); +} + +// Uncompresses a Gz Encoded vector of bytes and returns a string or error +// Here &[u8] implements BufRead +fn decode_reader(bytes: Vec) -> io::Result { + let mut gz = GzDecoder::new(&bytes[..]); + let mut s = String::new(); + gz.read_to_string(&mut s)?; + Ok(s) +} diff --git a/flate2/examples/gzdecoder-read.rs b/flate2/examples/gzdecoder-read.rs new file mode 100644 index 000000000..1ab0ecb6c --- /dev/null +++ b/flate2/examples/gzdecoder-read.rs @@ -0,0 +1,24 @@ +extern crate flate2; + +use std::io::prelude::*; +use std::io; +use flate2::Compression; +use flate2::write::GzEncoder; +use flate2::read::GzDecoder; + +// Compress a sample string and print it after transformation. +fn main() { + let mut e = GzEncoder::new(Vec::new(), Compression::default()); + e.write_all(b"Hello World").unwrap(); + let bytes = e.finish().unwrap(); + println!("{}", decode_reader(bytes).unwrap()); +} + +// Uncompresses a Gz Encoded vector of bytes and returns a string or error +// Here &[u8] implements Read +fn decode_reader(bytes: Vec) -> io::Result { + let mut gz = GzDecoder::new(&bytes[..]); + let mut s = String::new(); + gz.read_to_string(&mut s)?; + Ok(s) +} diff --git a/flate2/examples/gzdecoder-write.rs b/flate2/examples/gzdecoder-write.rs new file mode 100644 index 000000000..7f9237ebc --- /dev/null +++ b/flate2/examples/gzdecoder-write.rs @@ -0,0 +1,26 @@ +extern crate flate2; + +use std::io::prelude::*; +use std::io; +use flate2::Compression; +use flate2::write::{GzEncoder, GzDecoder}; + +// Compress a sample string and print it after transformation. +fn main() { + let mut e = GzEncoder::new(Vec::new(), Compression::default()); + e.write_all(b"Hello World").unwrap(); + let bytes = e.finish().unwrap(); + println!("{}", decode_writer(bytes).unwrap()); +} + +// Uncompresses a Gz Encoded vector of bytes and returns a string or error +// Here &[u8] implements Read +fn decode_writer(bytes: Vec) -> io::Result { + let mut writer = Vec::new(); + let mut decoder = GzDecoder::new(writer); + decoder.write_all(&bytes[..])?; + decoder.try_finish()?; + writer = decoder.finish()?; + let return_string = String::from_utf8(writer).expect("String parsing error"); + Ok(return_string) +} diff --git a/flate2/examples/gzencoder-bufread.rs b/flate2/examples/gzencoder-bufread.rs new file mode 100644 index 000000000..3d57cd1d5 --- /dev/null +++ b/flate2/examples/gzencoder-bufread.rs @@ -0,0 +1,24 @@ +extern crate flate2; + +use std::io::prelude::*; +use std::io; +use flate2::Compression; +use flate2::bufread::GzEncoder; +use std::fs::File; +use std::io::BufReader; + +// Open file and debug print the contents compressed with gzip +fn main() { + println!("{:?}", open_hello_world().unwrap()); +} + +// Opens sample file, compresses the contents and returns a Vector or error +// File wrapped in a BufReader implements Bufread +fn open_hello_world() -> io::Result> { + let f = File::open("examples/hello_world.txt")?; + let b = BufReader::new(f); + let mut gz = GzEncoder::new(b, Compression::fast()); + let mut buffer = Vec::new(); + gz.read_to_end(&mut buffer)?; + Ok(buffer) +} diff --git a/flate2/examples/gzencoder-read.rs b/flate2/examples/gzencoder-read.rs new file mode 100644 index 000000000..673296520 --- /dev/null +++ b/flate2/examples/gzencoder-read.rs @@ -0,0 +1,20 @@ +extern crate flate2; + +use std::io::prelude::*; +use std::io; +use flate2::Compression; +use flate2::read::GzEncoder; + +// Print the GZ compressed representation of hello world +fn main() { + println!("{:?}", gzencoder_read_hello_world().unwrap()); +} + +// Return a vector containing the GZ compressed version of hello world +fn gzencoder_read_hello_world() -> io::Result> { + let mut ret_vec = [0; 100]; + let c = b"hello world"; + let mut z = GzEncoder::new(&c[..], Compression::fast()); + let count = z.read(&mut ret_vec)?; + Ok(ret_vec[0..count].to_vec()) +} diff --git a/flate2/examples/gzencoder-write.rs b/flate2/examples/gzencoder-write.rs new file mode 100644 index 000000000..0bbe67d01 --- /dev/null +++ b/flate2/examples/gzencoder-write.rs @@ -0,0 +1,12 @@ +extern crate flate2; + +use std::io::prelude::*; +use flate2::Compression; +use flate2::write::GzEncoder; + +// Vec implements Write to print the compressed bytes of sample string +fn main() { + let mut e = GzEncoder::new(Vec::new(), Compression::default()); + e.write_all(b"Hello World").unwrap(); + println!("{:?}", e.finish().unwrap()); +} diff --git a/flate2/examples/gzmultidecoder-bufread.rs b/flate2/examples/gzmultidecoder-bufread.rs new file mode 100644 index 000000000..037bbd9c3 --- /dev/null +++ b/flate2/examples/gzmultidecoder-bufread.rs @@ -0,0 +1,24 @@ +extern crate flate2; + +use std::io::prelude::*; +use std::io; +use flate2::Compression; +use flate2::write::GzEncoder; +use flate2::bufread::MultiGzDecoder; + +// Compress a sample string and print it after transformation. +fn main() { + let mut e = GzEncoder::new(Vec::new(), Compression::default()); + e.write_all(b"Hello World").unwrap(); + let bytes = e.finish().unwrap(); + println!("{}", decode_reader(bytes).unwrap()); +} + +// Uncompresses a Gz Encoded vector of bytes and returns a string or error +// Here &[u8] implements BufRead +fn decode_reader(bytes: Vec) -> io::Result { + let mut gz = MultiGzDecoder::new(&bytes[..]); + let mut s = String::new(); + gz.read_to_string(&mut s)?; + Ok(s) +} diff --git a/flate2/examples/gzmultidecoder-read.rs b/flate2/examples/gzmultidecoder-read.rs new file mode 100644 index 000000000..792331f28 --- /dev/null +++ b/flate2/examples/gzmultidecoder-read.rs @@ -0,0 +1,24 @@ +extern crate flate2; + +use std::io::prelude::*; +use std::io; +use flate2::Compression; +use flate2::write::GzEncoder; +use flate2::read::MultiGzDecoder; + +// Compress a sample string and print it after transformation. +fn main() { + let mut e = GzEncoder::new(Vec::new(), Compression::default()); + e.write_all(b"Hello World").unwrap(); + let bytes = e.finish().unwrap(); + println!("{}", decode_reader(bytes).unwrap()); +} + +// Uncompresses a Gz Encoded vector of bytes and returns a string or error +// Here &[u8] implements Read +fn decode_reader(bytes: Vec) -> io::Result { + let mut gz = MultiGzDecoder::new(&bytes[..]); + let mut s = String::new(); + gz.read_to_string(&mut s)?; + Ok(s) +} diff --git a/flate2/examples/hello_world.txt b/flate2/examples/hello_world.txt new file mode 100644 index 000000000..557db03de --- /dev/null +++ b/flate2/examples/hello_world.txt @@ -0,0 +1 @@ +Hello World diff --git a/flate2/examples/zlibdecoder-bufread.rs b/flate2/examples/zlibdecoder-bufread.rs new file mode 100644 index 000000000..96fc84d37 --- /dev/null +++ b/flate2/examples/zlibdecoder-bufread.rs @@ -0,0 +1,24 @@ +extern crate flate2; + +use std::io::prelude::*; +use std::io; +use flate2::Compression; +use flate2::write::ZlibEncoder; +use flate2::bufread::ZlibDecoder; + +// Compress a sample string and print it after transformation. +fn main() { + let mut e = ZlibEncoder::new(Vec::new(), Compression::default()); + e.write_all(b"Hello World").unwrap(); + let bytes = e.finish().unwrap(); + println!("{}", decode_bufreader(bytes).unwrap()); +} + +// Uncompresses a Zlib Encoded vector of bytes and returns a string or error +// Here &[u8] implements BufRead +fn decode_bufreader(bytes: Vec) -> io::Result { + let mut z = ZlibDecoder::new(&bytes[..]); + let mut s = String::new(); + z.read_to_string(&mut s)?; + Ok(s) +} diff --git a/flate2/examples/zlibdecoder-read.rs b/flate2/examples/zlibdecoder-read.rs new file mode 100644 index 000000000..106ae8f7e --- /dev/null +++ b/flate2/examples/zlibdecoder-read.rs @@ -0,0 +1,24 @@ +extern crate flate2; + +use std::io::prelude::*; +use std::io; +use flate2::Compression; +use flate2::write::ZlibEncoder; +use flate2::read::ZlibDecoder; + +// Compress a sample string and print it after transformation. +fn main() { + let mut e = ZlibEncoder::new(Vec::new(), Compression::default()); + e.write_all(b"Hello World").unwrap(); + let bytes = e.finish().unwrap(); + println!("{}", decode_reader(bytes).unwrap()); +} + +// Uncompresses a Zlib Encoded vector of bytes and returns a string or error +// Here &[u8] implements Read +fn decode_reader(bytes: Vec) -> io::Result { + let mut z = ZlibDecoder::new(&bytes[..]); + let mut s = String::new(); + z.read_to_string(&mut s)?; + Ok(s) +} diff --git a/flate2/examples/zlibdecoder-write.rs b/flate2/examples/zlibdecoder-write.rs new file mode 100644 index 000000000..24aa60f7c --- /dev/null +++ b/flate2/examples/zlibdecoder-write.rs @@ -0,0 +1,26 @@ +extern crate flate2; + +use std::io::prelude::*; +use std::io; +use flate2::Compression; +use flate2::write::ZlibEncoder; +use flate2::write::ZlibDecoder; + +// Compress a sample string and print it after transformation. +fn main() { + let mut e = ZlibEncoder::new(Vec::new(), Compression::default()); + e.write_all(b"Hello World").unwrap(); + let bytes = e.finish().unwrap(); + println!("{}", decode_reader(bytes).unwrap()); +} + +// Uncompresses a Zlib Encoded vector of bytes and returns a string or error +// Here Vec implements Write +fn decode_reader(bytes: Vec) -> io::Result { + let mut writer = Vec::new(); + let mut z = ZlibDecoder::new(writer); + z.write_all(&bytes[..])?; + writer = z.finish()?; + let return_string = String::from_utf8(writer).expect("String parsing error"); + Ok(return_string) +} diff --git a/flate2/examples/zlibencoder-bufread.rs b/flate2/examples/zlibencoder-bufread.rs new file mode 100644 index 000000000..8b8d3d084 --- /dev/null +++ b/flate2/examples/zlibencoder-bufread.rs @@ -0,0 +1,24 @@ +extern crate flate2; + +use std::io::prelude::*; +use std::io; +use flate2::Compression; +use flate2::bufread::ZlibEncoder; +use std::fs::File; +use std::io::BufReader; + +// Open file and debug print the contents compressed with zlib +fn main() { + println!("{:?}", open_hello_world().unwrap()); +} + +// Opens sample file, compresses the contents and returns a Vector or error +// File wrapped in a BufReader implements Bufread +fn open_hello_world() -> io::Result> { + let f = File::open("examples/hello_world.txt")?; + let b = BufReader::new(f); + let mut z = ZlibEncoder::new(b, Compression::fast()); + let mut buffer = Vec::new(); + z.read_to_end(&mut buffer)?; + Ok(buffer) +} diff --git a/flate2/examples/zlibencoder-read.rs b/flate2/examples/zlibencoder-read.rs new file mode 100644 index 000000000..a7cd3410f --- /dev/null +++ b/flate2/examples/zlibencoder-read.rs @@ -0,0 +1,21 @@ +extern crate flate2; + +use std::io::prelude::*; +use flate2::Compression; +use flate2::read::ZlibEncoder; +use std::fs::File; + +// Open file and debug print the compressed contents +fn main() { + println!("{:?}", open_hello_world().unwrap()); +} + +// Opens sample file, compresses the contents and returns a Vector or error +// File implements Read +fn open_hello_world() -> std::io::Result> { + let f = File::open("examples/hello_world.txt")?; + let mut z = ZlibEncoder::new(f, Compression::fast()); + let mut buffer = [0; 50]; + let byte_count = z.read(&mut buffer)?; + Ok(buffer[0..byte_count].to_vec()) +} diff --git a/flate2/examples/zlibencoder-write.rs b/flate2/examples/zlibencoder-write.rs new file mode 100644 index 000000000..35f704132 --- /dev/null +++ b/flate2/examples/zlibencoder-write.rs @@ -0,0 +1,12 @@ +extern crate flate2; + +use std::io::prelude::*; +use flate2::Compression; +use flate2::write::ZlibEncoder; + +// Vec implements Write to print the compressed bytes of sample string +fn main() { + let mut e = ZlibEncoder::new(Vec::new(), Compression::default()); + e.write_all(b"Hello World").unwrap(); + println!("{:?}", e.finish().unwrap()); +} diff --git a/flate2/src/bufreader.rs b/flate2/src/bufreader.rs new file mode 100644 index 000000000..9aa6a3ae9 --- /dev/null +++ b/flate2/src/bufreader.rs @@ -0,0 +1,104 @@ +// Copyright 2013 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use std::cmp; +use std::io; +use std::io::prelude::*; +use std::mem; + +pub struct BufReader { + inner: R, + buf: Box<[u8]>, + pos: usize, + cap: usize, +} + +impl ::std::fmt::Debug for BufReader +where + R: ::std::fmt::Debug, +{ + fn fmt(&self, fmt: &mut ::std::fmt::Formatter) -> Result<(), ::std::fmt::Error> { + fmt.debug_struct("BufReader") + .field("reader", &self.inner) + .field( + "buffer", + &format_args!("{}/{}", self.cap - self.pos, self.buf.len()), + ) + .finish() + } +} + +impl BufReader { + pub fn new(inner: R) -> BufReader { + BufReader::with_buf(vec![0; 32 * 1024], inner) + } + + pub fn with_buf(buf: Vec, inner: R) -> BufReader { + BufReader { + inner: inner, + buf: buf.into_boxed_slice(), + pos: 0, + cap: 0, + } + } +} + +impl BufReader { + pub fn get_ref(&self) -> &R { + &self.inner + } + + pub fn get_mut(&mut self) -> &mut R { + &mut self.inner + } + + pub fn into_inner(self) -> R { + self.inner + } + + pub fn reset(&mut self, inner: R) -> R { + self.pos = 0; + self.cap = 0; + mem::replace(&mut self.inner, inner) + } +} + +impl Read for BufReader { + fn read(&mut self, buf: &mut [u8]) -> io::Result { + // If we don't have any buffered data and we're doing a massive read + // (larger than our internal buffer), bypass our internal buffer + // entirely. + if self.pos == self.cap && buf.len() >= self.buf.len() { + return self.inner.read(buf); + } + let nread = { + let mut rem = self.fill_buf()?; + rem.read(buf)? + }; + self.consume(nread); + Ok(nread) + } +} + +impl BufRead for BufReader { + fn fill_buf(&mut self) -> io::Result<&[u8]> { + // If we've reached the end of our internal buffer then we need to fetch + // some more data from the underlying reader. + if self.pos == self.cap { + self.cap = self.inner.read(&mut self.buf)?; + self.pos = 0; + } + Ok(&self.buf[self.pos..self.cap]) + } + + fn consume(&mut self, amt: usize) { + self.pos = cmp::min(self.pos + amt, self.cap); + } +} diff --git a/flate2/src/crc.rs b/flate2/src/crc.rs new file mode 100644 index 000000000..0d621a921 --- /dev/null +++ b/flate2/src/crc.rs @@ -0,0 +1,182 @@ +//! Simple CRC bindings backed by miniz.c + +use std::io::prelude::*; +use std::io; +use libc; + +use ffi; + +/// The CRC calculated by a [`CrcReader`]. +/// +/// [`CrcReader`]: struct.CrcReader.html +#[derive(Debug)] +pub struct Crc { + crc: libc::c_ulong, + amt: u32, +} + +/// A wrapper around a [`Read`] that calculates the CRC. +/// +/// [`Read`]: https://doc.rust-lang.org/std/io/trait.Read.html +#[derive(Debug)] +pub struct CrcReader { + inner: R, + crc: Crc, +} + +impl Crc { + /// Create a new CRC. + pub fn new() -> Crc { + Crc { crc: 0, amt: 0 } + } + + /// bla + pub fn sum(&self) -> u32 { + self.crc as u32 + } + + /// The number of bytes that have been used to calculate the CRC. + /// This value is only accurate if the amount is lower than 232. + pub fn amount(&self) -> u32 { + self.amt + } + + /// Update the CRC with the bytes in `data`. + pub fn update(&mut self, data: &[u8]) { + self.amt = self.amt.wrapping_add(data.len() as u32); + self.crc = unsafe { ffi::mz_crc32(self.crc, data.as_ptr(), data.len() as libc::size_t) }; + } + + /// Reset the CRC. + pub fn reset(&mut self) { + self.crc = 0; + self.amt = 0; + } + + /// Combine the CRC with the CRC for the subsequent block of bytes. + pub fn combine(&mut self, additional_crc: &Crc) { + self.crc = unsafe { + ffi::mz_crc32_combine( + self.crc as ::libc::c_ulong, + additional_crc.crc as ::libc::c_ulong, + additional_crc.amt as ::libc::off_t, + ) + }; + self.amt += additional_crc.amt; + } +} + +impl CrcReader { + /// Create a new CrcReader. + pub fn new(r: R) -> CrcReader { + CrcReader { + inner: r, + crc: Crc::new(), + } + } +} + +impl CrcReader { + /// Get the Crc for this CrcReader. + pub fn crc(&self) -> &Crc { + &self.crc + } + + /// Get the reader that is wrapped by this CrcReader. + pub fn into_inner(self) -> R { + self.inner + } + + /// Get the reader that is wrapped by this CrcReader by reference. + pub fn get_ref(&self) -> &R { + &self.inner + } + + /// Get a mutable reference to the reader that is wrapped by this CrcReader. + pub fn get_mut(&mut self) -> &mut R { + &mut self.inner + } + + /// Reset the Crc in this CrcReader. + pub fn reset(&mut self) { + self.crc.reset(); + } +} + +impl Read for CrcReader { + fn read(&mut self, into: &mut [u8]) -> io::Result { + let amt = self.inner.read(into)?; + self.crc.update(&into[..amt]); + Ok(amt) + } +} + +impl BufRead for CrcReader { + fn fill_buf(&mut self) -> io::Result<&[u8]> { + self.inner.fill_buf() + } + fn consume(&mut self, amt: usize) { + if let Ok(data) = self.inner.fill_buf() { + self.crc.update(&data[..amt]); + } + self.inner.consume(amt); + } +} + +/// A wrapper around a [`Write`] that calculates the CRC. +/// +/// [`Write`]: https://doc.rust-lang.org/std/io/trait.Write.html +#[derive(Debug)] +pub struct CrcWriter { + inner: W, + crc: Crc, +} + +impl CrcWriter { + /// Get the Crc for this CrcWriter. + pub fn crc(&self) -> &Crc { + &self.crc + } + + /// Get the writer that is wrapped by this CrcWriter. + pub fn into_inner(self) -> W { + self.inner + } + + /// Get the writer that is wrapped by this CrcWriter by reference. + pub fn get_ref(&self) -> &W { + &self.inner + } + + /// Get a mutable reference to the writer that is wrapped by this CrcWriter. + pub fn get_mut(&mut self) -> &mut W { + &mut self.inner + } + + /// Reset the Crc in this CrcWriter. + pub fn reset(&mut self) { + self.crc.reset(); + } +} + +impl CrcWriter { + /// Create a new CrcWriter. + pub fn new(w: W) -> CrcWriter { + CrcWriter { + inner: w, + crc: Crc::new(), + } + } +} + +impl Write for CrcWriter { + fn write(&mut self, buf: &[u8]) -> io::Result { + let amt = try!(self.inner.write(buf)); + self.crc.update(&buf[..amt]); + Ok(amt) + } + + fn flush(&mut self) -> io::Result<()> { + self.inner.flush() + } +} diff --git a/flate2/src/deflate/bufread.rs b/flate2/src/deflate/bufread.rs new file mode 100644 index 000000000..ab4dd2969 --- /dev/null +++ b/flate2/src/deflate/bufread.rs @@ -0,0 +1,268 @@ +use std::io::prelude::*; +use std::io; +use std::mem; + +#[cfg(feature = "tokio")] +use futures::Poll; +#[cfg(feature = "tokio")] +use tokio_io::{AsyncRead, AsyncWrite}; + +use zio; +use {Compress, Decompress}; + +/// A DEFLATE encoder, or compressor. +/// +/// This structure implements a [`BufRead`] interface and will read uncompressed +/// data from an underlying stream and emit a stream of compressed data. +/// +/// [`BufRead`]: https://doc.rust-lang.org/std/io/trait.BufRead.html +/// +/// # Examples +/// +/// ``` +/// use std::io::prelude::*; +/// use std::io; +/// use flate2::Compression; +/// use flate2::bufread::DeflateEncoder; +/// use std::fs::File; +/// use std::io::BufReader; +/// +/// # fn main() { +/// # println!("{:?}", open_hello_world().unwrap()); +/// # } +/// # +/// // Opens sample file, compresses the contents and returns a Vector +/// fn open_hello_world() -> io::Result> { +/// let f = File::open("examples/hello_world.txt")?; +/// let b = BufReader::new(f); +/// let mut deflater = DeflateEncoder::new(b, Compression::fast()); +/// let mut buffer = Vec::new(); +/// deflater.read_to_end(&mut buffer)?; +/// Ok(buffer) +/// } +/// ``` +#[derive(Debug)] +pub struct DeflateEncoder { + obj: R, + data: Compress, +} + +impl DeflateEncoder { + /// Creates a new encoder which will read uncompressed data from the given + /// stream and emit the compressed stream. + pub fn new(r: R, level: ::Compression) -> DeflateEncoder { + DeflateEncoder { + obj: r, + data: Compress::new(level, false), + } + } +} + +pub fn reset_encoder_data(zlib: &mut DeflateEncoder) { + zlib.data.reset(); +} + +impl DeflateEncoder { + /// Resets the state of this encoder entirely, swapping out the input + /// stream for another. + /// + /// This function will reset the internal state of this encoder and replace + /// the input stream with the one provided, returning the previous input + /// stream. Future data read from this encoder will be the compressed + /// version of `r`'s data. + pub fn reset(&mut self, r: R) -> R { + reset_encoder_data(self); + mem::replace(&mut self.obj, r) + } + + /// Acquires a reference to the underlying reader + pub fn get_ref(&self) -> &R { + &self.obj + } + + /// Acquires a mutable reference to the underlying stream + /// + /// Note that mutation of the stream may result in surprising results if + /// this encoder is continued to be used. + pub fn get_mut(&mut self) -> &mut R { + &mut self.obj + } + + /// Consumes this encoder, returning the underlying reader. + pub fn into_inner(self) -> R { + self.obj + } + + /// Returns the number of bytes that have been read into this compressor. + /// + /// Note that not all bytes read from the underlying object may be accounted + /// for, there may still be some active buffering. + pub fn total_in(&self) -> u64 { + self.data.total_in() + } + + /// Returns the number of bytes that the compressor has produced. + /// + /// Note that not all bytes may have been read yet, some may still be + /// buffered. + pub fn total_out(&self) -> u64 { + self.data.total_out() + } +} + +impl Read for DeflateEncoder { + fn read(&mut self, buf: &mut [u8]) -> io::Result { + zio::read(&mut self.obj, &mut self.data, buf) + } +} + +#[cfg(feature = "tokio")] +impl AsyncRead for DeflateEncoder {} + +impl Write for DeflateEncoder { + fn write(&mut self, buf: &[u8]) -> io::Result { + self.get_mut().write(buf) + } + + fn flush(&mut self) -> io::Result<()> { + self.get_mut().flush() + } +} + +#[cfg(feature = "tokio")] +impl AsyncWrite for DeflateEncoder { + fn shutdown(&mut self) -> Poll<(), io::Error> { + self.get_mut().shutdown() + } +} + +/// A DEFLATE decoder, or decompressor. +/// +/// This structure implements a [`BufRead`] interface and takes a stream of +/// compressed data as input, providing the decompressed data when read from. +/// +/// [`BufRead`]: https://doc.rust-lang.org/std/io/trait.BufRead.html +/// +/// # Examples +/// +/// ``` +/// use std::io::prelude::*; +/// use std::io; +/// # use flate2::Compression; +/// # use flate2::write::DeflateEncoder; +/// use flate2::bufread::DeflateDecoder; +/// +/// # fn main() { +/// # let mut e = DeflateEncoder::new(Vec::new(), Compression::default()); +/// # e.write_all(b"Hello World").unwrap(); +/// # let bytes = e.finish().unwrap(); +/// # println!("{}", decode_reader(bytes).unwrap()); +/// # } +/// // Uncompresses a Deflate Encoded vector of bytes and returns a string or error +/// // Here &[u8] implements Read +/// fn decode_reader(bytes: Vec) -> io::Result { +/// let mut deflater = DeflateDecoder::new(&bytes[..]); +/// let mut s = String::new(); +/// deflater.read_to_string(&mut s)?; +/// Ok(s) +/// } +/// ``` +#[derive(Debug)] +pub struct DeflateDecoder { + obj: R, + data: Decompress, +} + +pub fn reset_decoder_data(zlib: &mut DeflateDecoder) { + zlib.data = Decompress::new(false); +} + +impl DeflateDecoder { + /// Creates a new decoder which will decompress data read from the given + /// stream. + pub fn new(r: R) -> DeflateDecoder { + DeflateDecoder { + obj: r, + data: Decompress::new(false), + } + } +} + +impl DeflateDecoder { + /// Resets the state of this decoder entirely, swapping out the input + /// stream for another. + /// + /// This will reset the internal state of this decoder and replace the + /// input stream with the one provided, returning the previous input + /// stream. Future data read from this decoder will be the decompressed + /// version of `r`'s data. + pub fn reset(&mut self, r: R) -> R { + reset_decoder_data(self); + mem::replace(&mut self.obj, r) + } + + /// Resets the state of this decoder's data + /// + /// This will reset the internal state of this decoder. It will continue + /// reading from the same stream. + pub fn reset_data(&mut self) { + reset_decoder_data(self); + } + + /// Acquires a reference to the underlying stream + pub fn get_ref(&self) -> &R { + &self.obj + } + + /// Acquires a mutable reference to the underlying stream + /// + /// Note that mutation of the stream may result in surprising results if + /// this encoder is continued to be used. + pub fn get_mut(&mut self) -> &mut R { + &mut self.obj + } + + /// Consumes this decoder, returning the underlying reader. + pub fn into_inner(self) -> R { + self.obj + } + + /// Returns the number of bytes that the decompressor has consumed. + /// + /// Note that this will likely be smaller than what the decompressor + /// actually read from the underlying stream due to buffering. + pub fn total_in(&self) -> u64 { + self.data.total_in() + } + + /// Returns the number of bytes that the decompressor has produced. + pub fn total_out(&self) -> u64 { + self.data.total_out() + } +} + +impl Read for DeflateDecoder { + fn read(&mut self, into: &mut [u8]) -> io::Result { + zio::read(&mut self.obj, &mut self.data, into) + } +} + +#[cfg(feature = "tokio")] +impl AsyncRead for DeflateDecoder {} + +impl Write for DeflateDecoder { + fn write(&mut self, buf: &[u8]) -> io::Result { + self.get_mut().write(buf) + } + + fn flush(&mut self) -> io::Result<()> { + self.get_mut().flush() + } +} + +#[cfg(feature = "tokio")] +impl AsyncWrite for DeflateDecoder { + fn shutdown(&mut self) -> Poll<(), io::Error> { + self.get_mut().shutdown() + } +} diff --git a/flate2/src/deflate/mod.rs b/flate2/src/deflate/mod.rs new file mode 100644 index 000000000..9ea81dcc6 --- /dev/null +++ b/flate2/src/deflate/mod.rs @@ -0,0 +1,193 @@ +pub mod bufread; +pub mod read; +pub mod write; + +#[cfg(test)] +mod tests { + use std::io::prelude::*; + + use rand::{thread_rng, Rng}; + + use super::{read, write}; + use Compression; + + #[test] + fn roundtrip() { + let mut real = Vec::new(); + let mut w = write::DeflateEncoder::new(Vec::new(), Compression::default()); + let v = ::random_bytes().take(1024).collect::>(); + for _ in 0..200 { + let to_write = &v[..thread_rng().gen_range(0, v.len())]; + real.extend(to_write.iter().map(|x| *x)); + w.write_all(to_write).unwrap(); + } + let result = w.finish().unwrap(); + let mut r = read::DeflateDecoder::new(&result[..]); + let mut ret = Vec::new(); + r.read_to_end(&mut ret).unwrap(); + assert!(ret == real); + } + + #[test] + fn drop_writes() { + let mut data = Vec::new(); + write::DeflateEncoder::new(&mut data, Compression::default()) + .write_all(b"foo") + .unwrap(); + let mut r = read::DeflateDecoder::new(&data[..]); + let mut ret = Vec::new(); + r.read_to_end(&mut ret).unwrap(); + assert!(ret == b"foo"); + } + + #[test] + fn total_in() { + let mut real = Vec::new(); + let mut w = write::DeflateEncoder::new(Vec::new(), Compression::default()); + let v = ::random_bytes().take(1024).collect::>(); + for _ in 0..200 { + let to_write = &v[..thread_rng().gen_range(0, v.len())]; + real.extend(to_write.iter().map(|x| *x)); + w.write_all(to_write).unwrap(); + } + let mut result = w.finish().unwrap(); + + let result_len = result.len(); + + for _ in 0..200 { + result.extend(v.iter().map(|x| *x)); + } + + let mut r = read::DeflateDecoder::new(&result[..]); + let mut ret = Vec::new(); + r.read_to_end(&mut ret).unwrap(); + assert!(ret == real); + assert_eq!(r.total_in(), result_len as u64); + } + + #[test] + fn roundtrip2() { + let v = ::random_bytes().take(1024 * 1024).collect::>(); + let mut r = + read::DeflateDecoder::new(read::DeflateEncoder::new(&v[..], Compression::default())); + let mut ret = Vec::new(); + r.read_to_end(&mut ret).unwrap(); + assert_eq!(ret, v); + } + + #[test] + fn roundtrip3() { + let v = ::random_bytes().take(1024 * 1024).collect::>(); + let mut w = write::DeflateEncoder::new( + write::DeflateDecoder::new(Vec::new()), + Compression::default(), + ); + w.write_all(&v).unwrap(); + let w = w.finish().unwrap().finish().unwrap(); + assert!(w == v); + } + + #[test] + fn reset_writer() { + let v = ::random_bytes().take(1024 * 1024).collect::>(); + let mut w = write::DeflateEncoder::new(Vec::new(), Compression::default()); + w.write_all(&v).unwrap(); + let a = w.reset(Vec::new()).unwrap(); + w.write_all(&v).unwrap(); + let b = w.finish().unwrap(); + + let mut w = write::DeflateEncoder::new(Vec::new(), Compression::default()); + w.write_all(&v).unwrap(); + let c = w.finish().unwrap(); + assert!(a == b && b == c); + } + + #[test] + fn reset_reader() { + let v = ::random_bytes().take(1024 * 1024).collect::>(); + let (mut a, mut b, mut c) = (Vec::new(), Vec::new(), Vec::new()); + let mut r = read::DeflateEncoder::new(&v[..], Compression::default()); + r.read_to_end(&mut a).unwrap(); + r.reset(&v[..]); + r.read_to_end(&mut b).unwrap(); + + let mut r = read::DeflateEncoder::new(&v[..], Compression::default()); + r.read_to_end(&mut c).unwrap(); + assert!(a == b && b == c); + } + + #[test] + fn reset_decoder() { + let v = ::random_bytes().take(1024 * 1024).collect::>(); + let mut w = write::DeflateEncoder::new(Vec::new(), Compression::default()); + w.write_all(&v).unwrap(); + let data = w.finish().unwrap(); + + { + let (mut a, mut b, mut c) = (Vec::new(), Vec::new(), Vec::new()); + let mut r = read::DeflateDecoder::new(&data[..]); + r.read_to_end(&mut a).unwrap(); + r.reset(&data); + r.read_to_end(&mut b).unwrap(); + + let mut r = read::DeflateDecoder::new(&data[..]); + r.read_to_end(&mut c).unwrap(); + assert!(a == b && b == c && c == v); + } + + { + let mut w = write::DeflateDecoder::new(Vec::new()); + w.write_all(&data).unwrap(); + let a = w.reset(Vec::new()).unwrap(); + w.write_all(&data).unwrap(); + let b = w.finish().unwrap(); + + let mut w = write::DeflateDecoder::new(Vec::new()); + w.write_all(&data).unwrap(); + let c = w.finish().unwrap(); + assert!(a == b && b == c && c == v); + } + } + + #[test] + fn zero_length_read_with_data() { + let m = vec![3u8; 128 * 1024 + 1]; + let mut c = read::DeflateEncoder::new(&m[..], Compression::default()); + + let mut result = Vec::new(); + c.read_to_end(&mut result).unwrap(); + + let mut d = read::DeflateDecoder::new(&result[..]); + let mut data = Vec::new(); + assert!(d.read(&mut data).unwrap() == 0); + } + + #[test] + fn qc_reader() { + ::quickcheck::quickcheck(test as fn(_) -> _); + + fn test(v: Vec) -> bool { + let mut r = read::DeflateDecoder::new(read::DeflateEncoder::new( + &v[..], + Compression::default(), + )); + let mut v2 = Vec::new(); + r.read_to_end(&mut v2).unwrap(); + v == v2 + } + } + + #[test] + fn qc_writer() { + ::quickcheck::quickcheck(test as fn(_) -> _); + + fn test(v: Vec) -> bool { + let mut w = write::DeflateEncoder::new( + write::DeflateDecoder::new(Vec::new()), + Compression::default(), + ); + w.write_all(&v).unwrap(); + v == w.finish().unwrap().finish().unwrap() + } + } +} diff --git a/flate2/src/deflate/read.rs b/flate2/src/deflate/read.rs new file mode 100644 index 000000000..6dc6ee150 --- /dev/null +++ b/flate2/src/deflate/read.rs @@ -0,0 +1,266 @@ +use std::io::prelude::*; +use std::io; + +#[cfg(feature = "tokio")] +use futures::Poll; +#[cfg(feature = "tokio")] +use tokio_io::{AsyncRead, AsyncWrite}; + +use bufreader::BufReader; +use super::bufread; + +/// A DEFLATE encoder, or compressor. +/// +/// This structure implements a [`Read`] interface and will read uncompressed +/// data from an underlying stream and emit a stream of compressed data. +/// +/// [`Read`]: https://doc.rust-lang.org/std/io/trait.Read.html +/// +/// # Examples +/// +/// ``` +/// use std::io::prelude::*; +/// use std::io; +/// use flate2::Compression; +/// use flate2::read::DeflateEncoder; +/// +/// # fn main() { +/// # println!("{:?}", deflateencoder_read_hello_world().unwrap()); +/// # } +/// # +/// // Return a vector containing the Deflate compressed version of hello world +/// fn deflateencoder_read_hello_world() -> io::Result> { +/// let mut ret_vec = [0;100]; +/// let c = b"hello world"; +/// let mut deflater = DeflateEncoder::new(&c[..], Compression::fast()); +/// let count = deflater.read(&mut ret_vec)?; +/// Ok(ret_vec[0..count].to_vec()) +/// } +/// ``` +#[derive(Debug)] +pub struct DeflateEncoder { + inner: bufread::DeflateEncoder>, +} + +impl DeflateEncoder { + /// Creates a new encoder which will read uncompressed data from the given + /// stream and emit the compressed stream. + pub fn new(r: R, level: ::Compression) -> DeflateEncoder { + DeflateEncoder { + inner: bufread::DeflateEncoder::new(BufReader::new(r), level), + } + } +} + +impl DeflateEncoder { + /// Resets the state of this encoder entirely, swapping out the input + /// stream for another. + /// + /// This function will reset the internal state of this encoder and replace + /// the input stream with the one provided, returning the previous input + /// stream. Future data read from this encoder will be the compressed + /// version of `r`'s data. + /// + /// Note that there may be currently buffered data when this function is + /// called, and in that case the buffered data is discarded. + pub fn reset(&mut self, r: R) -> R { + super::bufread::reset_encoder_data(&mut self.inner); + self.inner.get_mut().reset(r) + } + + /// Acquires a reference to the underlying reader + pub fn get_ref(&self) -> &R { + self.inner.get_ref().get_ref() + } + + /// Acquires a mutable reference to the underlying stream + /// + /// Note that mutation of the stream may result in surprising results if + /// this encoder is continued to be used. + pub fn get_mut(&mut self) -> &mut R { + self.inner.get_mut().get_mut() + } + + /// Consumes this encoder, returning the underlying reader. + /// + /// Note that there may be buffered bytes which are not re-acquired as part + /// of this transition. It's recommended to only call this function after + /// EOF has been reached. + pub fn into_inner(self) -> R { + self.inner.into_inner().into_inner() + } + + /// Returns the number of bytes that have been read into this compressor. + /// + /// Note that not all bytes read from the underlying object may be accounted + /// for, there may still be some active buffering. + pub fn total_in(&self) -> u64 { + self.inner.total_in() + } + + /// Returns the number of bytes that the compressor has produced. + /// + /// Note that not all bytes may have been read yet, some may still be + /// buffered. + pub fn total_out(&self) -> u64 { + self.inner.total_out() + } +} + +impl Read for DeflateEncoder { + fn read(&mut self, buf: &mut [u8]) -> io::Result { + self.inner.read(buf) + } +} + +#[cfg(feature = "tokio")] +impl AsyncRead for DeflateEncoder {} + +impl Write for DeflateEncoder { + fn write(&mut self, buf: &[u8]) -> io::Result { + self.get_mut().write(buf) + } + + fn flush(&mut self) -> io::Result<()> { + self.get_mut().flush() + } +} + +#[cfg(feature = "tokio")] +impl AsyncWrite for DeflateEncoder { + fn shutdown(&mut self) -> Poll<(), io::Error> { + self.get_mut().shutdown() + } +} + +/// A DEFLATE decoder, or decompressor. +/// +/// This structure implements a [`Read`] interface and takes a stream of +/// compressed data as input, providing the decompressed data when read from. +/// +/// [`Read`]: https://doc.rust-lang.org/std/io/trait.Read.html +/// +/// # Examples +/// +/// ``` +/// use std::io::prelude::*; +/// use std::io; +/// # use flate2::Compression; +/// # use flate2::write::DeflateEncoder; +/// use flate2::read::DeflateDecoder; +/// +/// # fn main() { +/// # let mut e = DeflateEncoder::new(Vec::new(), Compression::default()); +/// # e.write_all(b"Hello World").unwrap(); +/// # let bytes = e.finish().unwrap(); +/// # println!("{}", decode_reader(bytes).unwrap()); +/// # } +/// // Uncompresses a Deflate Encoded vector of bytes and returns a string or error +/// // Here &[u8] implements Read +/// fn decode_reader(bytes: Vec) -> io::Result { +/// let mut deflater = DeflateDecoder::new(&bytes[..]); +/// let mut s = String::new(); +/// deflater.read_to_string(&mut s)?; +/// Ok(s) +/// } +/// ``` +#[derive(Debug)] +pub struct DeflateDecoder { + inner: bufread::DeflateDecoder>, +} + +impl DeflateDecoder { + /// Creates a new decoder which will decompress data read from the given + /// stream. + pub fn new(r: R) -> DeflateDecoder { + DeflateDecoder::new_with_buf(r, vec![0; 32 * 1024]) + } + + /// Same as `new`, but the intermediate buffer for data is specified. + /// + /// Note that the capacity of the intermediate buffer is never increased, + /// and it is recommended for it to be large. + pub fn new_with_buf(r: R, buf: Vec) -> DeflateDecoder { + DeflateDecoder { + inner: bufread::DeflateDecoder::new(BufReader::with_buf(buf, r)), + } + } +} + +impl DeflateDecoder { + /// Resets the state of this decoder entirely, swapping out the input + /// stream for another. + /// + /// This will reset the internal state of this decoder and replace the + /// input stream with the one provided, returning the previous input + /// stream. Future data read from this decoder will be the decompressed + /// version of `r`'s data. + /// + /// Note that there may be currently buffered data when this function is + /// called, and in that case the buffered data is discarded. + pub fn reset(&mut self, r: R) -> R { + super::bufread::reset_decoder_data(&mut self.inner); + self.inner.get_mut().reset(r) + } + + /// Acquires a reference to the underlying stream + pub fn get_ref(&self) -> &R { + self.inner.get_ref().get_ref() + } + + /// Acquires a mutable reference to the underlying stream + /// + /// Note that mutation of the stream may result in surprising results if + /// this encoder is continued to be used. + pub fn get_mut(&mut self) -> &mut R { + self.inner.get_mut().get_mut() + } + + /// Consumes this decoder, returning the underlying reader. + /// + /// Note that there may be buffered bytes which are not re-acquired as part + /// of this transition. It's recommended to only call this function after + /// EOF has been reached. + pub fn into_inner(self) -> R { + self.inner.into_inner().into_inner() + } + + /// Returns the number of bytes that the decompressor has consumed. + /// + /// Note that this will likely be smaller than what the decompressor + /// actually read from the underlying stream due to buffering. + pub fn total_in(&self) -> u64 { + self.inner.total_in() + } + + /// Returns the number of bytes that the decompressor has produced. + pub fn total_out(&self) -> u64 { + self.inner.total_out() + } +} + +impl Read for DeflateDecoder { + fn read(&mut self, into: &mut [u8]) -> io::Result { + self.inner.read(into) + } +} + +#[cfg(feature = "tokio")] +impl AsyncRead for DeflateDecoder {} + +impl Write for DeflateDecoder { + fn write(&mut self, buf: &[u8]) -> io::Result { + self.get_mut().write(buf) + } + + fn flush(&mut self) -> io::Result<()> { + self.get_mut().flush() + } +} + +#[cfg(feature = "tokio")] +impl AsyncWrite for DeflateDecoder { + fn shutdown(&mut self) -> Poll<(), io::Error> { + self.get_mut().shutdown() + } +} diff --git a/flate2/src/deflate/write.rs b/flate2/src/deflate/write.rs new file mode 100644 index 000000000..76dadad37 --- /dev/null +++ b/flate2/src/deflate/write.rs @@ -0,0 +1,349 @@ +use std::io::prelude::*; +use std::io; + +#[cfg(feature = "tokio")] +use futures::Poll; +#[cfg(feature = "tokio")] +use tokio_io::{AsyncRead, AsyncWrite}; + +use zio; +use {Compress, Decompress}; + +/// A DEFLATE encoder, or compressor. +/// +/// This structure implements a [`Write`] interface and takes a stream of +/// uncompressed data, writing the compressed data to the wrapped writer. +/// +/// [`Write`]: https://doc.rust-lang.org/std/io/trait.Write.html +/// +/// # Examples +/// +/// ``` +/// use std::io::prelude::*; +/// use flate2::Compression; +/// use flate2::write::DeflateEncoder; +/// +/// // Vec implements Write to print the compressed bytes of sample string +/// # fn main() { +/// +/// let mut e = DeflateEncoder::new(Vec::new(), Compression::default()); +/// e.write_all(b"Hello World").unwrap(); +/// println!("{:?}", e.finish().unwrap()); +/// # } +/// ``` +#[derive(Debug)] +pub struct DeflateEncoder { + inner: zio::Writer, +} + +impl DeflateEncoder { + /// Creates a new encoder which will write compressed data to the stream + /// given at the given compression level. + /// + /// When this encoder is dropped or unwrapped the final pieces of data will + /// be flushed. + pub fn new(w: W, level: ::Compression) -> DeflateEncoder { + DeflateEncoder { + inner: zio::Writer::new(w, Compress::new(level, false)), + } + } + + /// Acquires a reference to the underlying writer. + pub fn get_ref(&self) -> &W { + self.inner.get_ref() + } + + /// Acquires a mutable reference to the underlying writer. + /// + /// Note that mutating the output/input state of the stream may corrupt this + /// object, so care must be taken when using this method. + pub fn get_mut(&mut self) -> &mut W { + self.inner.get_mut() + } + + /// Resets the state of this encoder entirely, swapping out the output + /// stream for another. + /// + /// This function will finish encoding the current stream into the current + /// output stream before swapping out the two output streams. If the stream + /// cannot be finished an error is returned. + /// + /// After the current stream has been finished, this will reset the internal + /// state of this encoder and replace the output stream with the one + /// provided, returning the previous output stream. Future data written to + /// this encoder will be the compressed into the stream `w` provided. + /// + /// # Errors + /// + /// This function will perform I/O to complete this stream, and any I/O + /// errors which occur will be returned from this function. + pub fn reset(&mut self, w: W) -> io::Result { + self.inner.finish()?; + self.inner.data.reset(); + Ok(self.inner.replace(w)) + } + + /// Attempt to finish this output stream, writing out final chunks of data. + /// + /// Note that this function can only be used once data has finished being + /// written to the output stream. After this function is called then further + /// calls to `write` may result in a panic. + /// + /// # Panics + /// + /// Attempts to write data to this stream may result in a panic after this + /// function is called. + /// + /// # Errors + /// + /// This function will perform I/O to complete this stream, and any I/O + /// errors which occur will be returned from this function. + pub fn try_finish(&mut self) -> io::Result<()> { + self.inner.finish() + } + + /// Consumes this encoder, flushing the output stream. + /// + /// This will flush the underlying data stream, close off the compressed + /// stream and, if successful, return the contained writer. + /// + /// Note that this function may not be suitable to call in a situation where + /// the underlying stream is an asynchronous I/O stream. To finish a stream + /// the `try_finish` (or `shutdown`) method should be used instead. To + /// re-acquire ownership of a stream it is safe to call this method after + /// `try_finish` or `shutdown` has returned `Ok`. + /// + /// # Errors + /// + /// This function will perform I/O to complete this stream, and any I/O + /// errors which occur will be returned from this function. + pub fn finish(mut self) -> io::Result { + self.inner.finish()?; + Ok(self.inner.take_inner()) + } + + /// Consumes this encoder, flushing the output stream. + /// + /// This will flush the underlying data stream and then return the contained + /// writer if the flush succeeded. + /// The compressed stream will not closed but only flushed. This + /// means that obtained byte array can by extended by another deflated + /// stream. To close the stream add the two bytes 0x3 and 0x0. + /// + /// # Errors + /// + /// This function will perform I/O to complete this stream, and any I/O + /// errors which occur will be returned from this function. + pub fn flush_finish(mut self) -> io::Result { + self.inner.flush()?; + Ok(self.inner.take_inner()) + } + + /// Returns the number of bytes that have been written to this compresor. + /// + /// Note that not all bytes written to this object may be accounted for, + /// there may still be some active buffering. + pub fn total_in(&self) -> u64 { + self.inner.data.total_in() + } + + /// Returns the number of bytes that the compressor has produced. + /// + /// Note that not all bytes may have been written yet, some may still be + /// buffered. + pub fn total_out(&self) -> u64 { + self.inner.data.total_out() + } +} + +impl Write for DeflateEncoder { + fn write(&mut self, buf: &[u8]) -> io::Result { + self.inner.write(buf) + } + + fn flush(&mut self) -> io::Result<()> { + self.inner.flush() + } +} + +#[cfg(feature = "tokio")] +impl AsyncWrite for DeflateEncoder { + fn shutdown(&mut self) -> Poll<(), io::Error> { + try_nb!(self.inner.finish()); + self.inner.get_mut().shutdown() + } +} + +impl Read for DeflateEncoder { + fn read(&mut self, buf: &mut [u8]) -> io::Result { + self.inner.get_mut().read(buf) + } +} + +#[cfg(feature = "tokio")] +impl AsyncRead for DeflateEncoder {} + +/// A DEFLATE decoder, or decompressor. +/// +/// This structure implements a [`Write`] and will emit a stream of decompressed +/// data when fed a stream of compressed data. +/// +/// [`Write`]: https://doc.rust-lang.org/std/io/trait.Read.html +/// +/// # Examples +/// +/// ``` +/// use std::io::prelude::*; +/// use std::io; +/// # use flate2::Compression; +/// # use flate2::write::DeflateEncoder; +/// use flate2::write::DeflateDecoder; +/// +/// # fn main() { +/// # let mut e = DeflateEncoder::new(Vec::new(), Compression::default()); +/// # e.write_all(b"Hello World").unwrap(); +/// # let bytes = e.finish().unwrap(); +/// # println!("{}", decode_writer(bytes).unwrap()); +/// # } +/// // Uncompresses a Deflate Encoded vector of bytes and returns a string or error +/// // Here Vec implements Write +/// fn decode_writer(bytes: Vec) -> io::Result { +/// let mut writer = Vec::new(); +/// let mut deflater = DeflateDecoder::new(writer); +/// deflater.write_all(&bytes[..])?; +/// writer = deflater.finish()?; +/// let return_string = String::from_utf8(writer).expect("String parsing error"); +/// Ok(return_string) +/// } +/// ``` +#[derive(Debug)] +pub struct DeflateDecoder { + inner: zio::Writer, +} + +impl DeflateDecoder { + /// Creates a new decoder which will write uncompressed data to the stream. + /// + /// When this encoder is dropped or unwrapped the final pieces of data will + /// be flushed. + pub fn new(w: W) -> DeflateDecoder { + DeflateDecoder { + inner: zio::Writer::new(w, Decompress::new(false)), + } + } + + /// Acquires a reference to the underlying writer. + pub fn get_ref(&self) -> &W { + self.inner.get_ref() + } + + /// Acquires a mutable reference to the underlying writer. + /// + /// Note that mutating the output/input state of the stream may corrupt this + /// object, so care must be taken when using this method. + pub fn get_mut(&mut self) -> &mut W { + self.inner.get_mut() + } + + /// Resets the state of this decoder entirely, swapping out the output + /// stream for another. + /// + /// This function will finish encoding the current stream into the current + /// output stream before swapping out the two output streams. + /// + /// This will then reset the internal state of this decoder and replace the + /// output stream with the one provided, returning the previous output + /// stream. Future data written to this decoder will be decompressed into + /// the output stream `w`. + /// + /// # Errors + /// + /// This function will perform I/O to finish the stream, and if that I/O + /// returns an error then that will be returned from this function. + pub fn reset(&mut self, w: W) -> io::Result { + self.inner.finish()?; + self.inner.data = Decompress::new(false); + Ok(self.inner.replace(w)) + } + + /// Attempt to finish this output stream, writing out final chunks of data. + /// + /// Note that this function can only be used once data has finished being + /// written to the output stream. After this function is called then further + /// calls to `write` may result in a panic. + /// + /// # Panics + /// + /// Attempts to write data to this stream may result in a panic after this + /// function is called. + /// + /// # Errors + /// + /// This function will perform I/O to finish the stream, returning any + /// errors which happen. + pub fn try_finish(&mut self) -> io::Result<()> { + self.inner.finish() + } + + /// Consumes this encoder, flushing the output stream. + /// + /// This will flush the underlying data stream and then return the contained + /// writer if the flush succeeded. + /// + /// Note that this function may not be suitable to call in a situation where + /// the underlying stream is an asynchronous I/O stream. To finish a stream + /// the `try_finish` (or `shutdown`) method should be used instead. To + /// re-acquire ownership of a stream it is safe to call this method after + /// `try_finish` or `shutdown` has returned `Ok`. + /// + /// # Errors + /// + /// This function will perform I/O to complete this stream, and any I/O + /// errors which occur will be returned from this function. + pub fn finish(mut self) -> io::Result { + self.inner.finish()?; + Ok(self.inner.take_inner()) + } + + /// Returns the number of bytes that the decompressor has consumed for + /// decompression. + /// + /// Note that this will likely be smaller than the number of bytes + /// successfully written to this stream due to internal buffering. + pub fn total_in(&self) -> u64 { + self.inner.data.total_in() + } + + /// Returns the number of bytes that the decompressor has written to its + /// output stream. + pub fn total_out(&self) -> u64 { + self.inner.data.total_out() + } +} + +impl Write for DeflateDecoder { + fn write(&mut self, buf: &[u8]) -> io::Result { + self.inner.write(buf) + } + + fn flush(&mut self) -> io::Result<()> { + self.inner.flush() + } +} + +#[cfg(feature = "tokio")] +impl AsyncWrite for DeflateDecoder { + fn shutdown(&mut self) -> Poll<(), io::Error> { + try_nb!(self.inner.finish()); + self.inner.get_mut().shutdown() + } +} + +impl Read for DeflateDecoder { + fn read(&mut self, buf: &mut [u8]) -> io::Result { + self.inner.get_mut().read(buf) + } +} + +#[cfg(feature = "tokio")] +impl AsyncRead for DeflateDecoder {} diff --git a/flate2/src/ffi.rs b/flate2/src/ffi.rs new file mode 100644 index 000000000..f8b31cc9f --- /dev/null +++ b/flate2/src/ffi.rs @@ -0,0 +1,278 @@ +pub use self::imp::*; + +#[cfg(feature = "zlib")] +#[allow(bad_style)] +mod imp { + extern crate libz_sys as z; + use std::mem; + use std::ops::{Deref, DerefMut}; + use libc::{c_char, c_int, c_uint, c_ulong, size_t}; + + pub use self::z::*; + pub use self::z::deflateEnd as mz_deflateEnd; + pub use self::z::inflateEnd as mz_inflateEnd; + pub use self::z::deflateReset as mz_deflateReset; + pub use self::z::deflate as mz_deflate; + pub use self::z::inflate as mz_inflate; + pub use self::z::z_stream as mz_stream; + + pub use self::z::Z_BLOCK as MZ_BLOCK; + pub use self::z::Z_BUF_ERROR as MZ_BUF_ERROR; + pub use self::z::Z_DATA_ERROR as MZ_DATA_ERROR; + pub use self::z::Z_DEFAULT_STRATEGY as MZ_DEFAULT_STRATEGY; + pub use self::z::Z_DEFLATED as MZ_DEFLATED; + pub use self::z::Z_FINISH as MZ_FINISH; + pub use self::z::Z_FULL_FLUSH as MZ_FULL_FLUSH; + pub use self::z::Z_NO_FLUSH as MZ_NO_FLUSH; + pub use self::z::Z_OK as MZ_OK; + pub use self::z::Z_PARTIAL_FLUSH as MZ_PARTIAL_FLUSH; + pub use self::z::Z_STREAM_END as MZ_STREAM_END; + pub use self::z::Z_SYNC_FLUSH as MZ_SYNC_FLUSH; + pub use self::z::Z_STREAM_ERROR as MZ_STREAM_ERROR; + pub use self::z::Z_NEED_DICT as MZ_NEED_DICT; + + pub const MZ_DEFAULT_WINDOW_BITS: c_int = 15; + + pub unsafe extern "C" fn mz_crc32(crc: c_ulong, ptr: *const u8, len: size_t) -> c_ulong { + z::crc32(crc, ptr, len as c_uint) + } + + pub unsafe extern "C" fn mz_crc32_combine( + crc1: c_ulong, + crc2: c_ulong, + len2: z_off_t, + ) -> c_ulong { + z::crc32_combine(crc1, crc2, len2) + } + + const ZLIB_VERSION: &'static str = "1.2.8\0"; + + pub unsafe extern "C" fn mz_deflateInit2( + stream: *mut mz_stream, + level: c_int, + method: c_int, + window_bits: c_int, + mem_level: c_int, + strategy: c_int, + ) -> c_int { + z::deflateInit2_( + stream, + level, + method, + window_bits, + mem_level, + strategy, + ZLIB_VERSION.as_ptr() as *const c_char, + mem::size_of::() as c_int, + ) + } + pub unsafe extern "C" fn mz_inflateInit2(stream: *mut mz_stream, window_bits: c_int) -> c_int { + z::inflateInit2_( + stream, + window_bits, + ZLIB_VERSION.as_ptr() as *const c_char, + mem::size_of::() as c_int, + ) + } + + pub struct StreamWrapper { + inner: Box, + } + + impl ::std::fmt::Debug for StreamWrapper { + fn fmt(&self, f: &mut ::std::fmt::Formatter) -> Result<(), ::std::fmt::Error> { + write!(f, "StreamWrapper") + } + } + + impl Default for StreamWrapper { + fn default() -> StreamWrapper { + StreamWrapper { + inner: Box::new(unsafe { mem::zeroed() }), + } + } + } + + impl Deref for StreamWrapper { + type Target = mz_stream; + + fn deref(&self) -> &Self::Target { + &*self.inner + } + } + + impl DerefMut for StreamWrapper { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut *self.inner + } + } +} + +#[cfg(any(all(not(feature = "zlib"), feature = "rust_backend"), all(target_arch = "wasm32", not(target_os = "emscripten"))))] +mod imp { + extern crate miniz_oxide_c_api; + use std::ops::{Deref, DerefMut}; + + pub use ffi::crc_imp::*; + pub use self::miniz_oxide_c_api::*; + pub use self::miniz_oxide_c_api::lib_oxide::*; + + #[derive(Debug, Default)] + pub struct StreamWrapper { + inner: mz_stream, + } + + impl Deref for StreamWrapper { + type Target = mz_stream; + + fn deref(&self) -> &Self::Target { + &self.inner + } + } + + impl DerefMut for StreamWrapper { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.inner + } + } +} + +#[cfg(all(not(feature = "zlib"), not(feature = "rust_backend"), not(all(target_arch = "wasm32", not(target_os = "emscripten")))))] +mod imp { + extern crate miniz_sys; + use std::mem; + use std::ops::{Deref, DerefMut}; + + pub use self::miniz_sys::*; + pub use ffi::crc_imp::*; + + pub struct StreamWrapper { + inner: mz_stream, + } + + impl ::std::fmt::Debug for StreamWrapper { + fn fmt(&self, f: &mut ::std::fmt::Formatter) -> Result<(), ::std::fmt::Error> { + write!(f, "StreamWrapper") + } + } + + impl Default for StreamWrapper { + fn default() -> StreamWrapper { + StreamWrapper { + inner: unsafe { mem::zeroed() }, + } + } + } + + impl Deref for StreamWrapper { + type Target = mz_stream; + + fn deref(&self) -> &Self::Target { + &self.inner + } + } + + impl DerefMut for StreamWrapper { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.inner + } + } +} + +#[cfg(not(feature = "zlib"))] +mod crc_imp { + use libc::{c_ulong, off_t}; + pub unsafe extern "C" fn mz_crc32_combine( + crc1: c_ulong, + crc2: c_ulong, + len2: off_t, + ) -> c_ulong { + crc32_combine_(crc1, crc2, len2) + } + + // gf2_matrix_times, gf2_matrix_square and crc32_combine_ are ported from + // zlib. + + fn gf2_matrix_times(mat: &[c_ulong; 32], mut vec: c_ulong) -> c_ulong { + let mut sum = 0; + let mut mat_pos = 0; + while vec != 0 { + if vec & 1 == 1 { + sum ^= mat[mat_pos]; + } + vec >>= 1; + mat_pos += 1; + } + sum + } + + fn gf2_matrix_square(square: &mut [c_ulong; 32], mat: &[c_ulong; 32]) { + for n in 0..32 { + square[n] = gf2_matrix_times(mat, mat[n]); + } + } + + fn crc32_combine_(mut crc1: c_ulong, crc2: c_ulong, mut len2: off_t) -> c_ulong { + let mut row; + + let mut even = [0; 32]; /* even-power-of-two zeros operator */ + let mut odd = [0; 32]; /* odd-power-of-two zeros operator */ + + /* degenerate case (also disallow negative lengths) */ + if len2 <= 0 { + return crc1; + } + + /* put operator for one zero bit in odd */ + odd[0] = 0xedb88320; /* CRC-32 polynomial */ + row = 1; + for n in 1..32 { + odd[n] = row; + row <<= 1; + } + + /* put operator for two zero bits in even */ + gf2_matrix_square(&mut even, &odd); + + /* put operator for four zero bits in odd */ + gf2_matrix_square(&mut odd, &even); + + /* apply len2 zeros to crc1 (first square will put the operator for one + zero byte, eight zero bits, in even) */ + loop { + /* apply zeros operator for this bit of len2 */ + gf2_matrix_square(&mut even, &odd); + if len2 & 1 == 1 { + crc1 = gf2_matrix_times(&even, crc1); + } + len2 >>= 1; + + /* if no more bits set, then done */ + if len2 == 0 { + break; + } + + /* another iteration of the loop with odd and even swapped */ + gf2_matrix_square(&mut odd, &even); + if len2 & 1 == 1 { + crc1 = gf2_matrix_times(&odd, crc1); + } + len2 >>= 1; + + /* if no more bits set, then done */ + if len2 == 0 { + break; + } + } + + /* return combined crc */ + crc1 ^= crc2; + crc1 + } +} + +#[test] +fn crc32_combine() { + let crc32 = unsafe { imp::mz_crc32_combine(1, 2, 3) }; + assert_eq!(crc32, 29518389); +} diff --git a/flate2/src/gz/bufread.rs b/flate2/src/gz/bufread.rs new file mode 100644 index 000000000..3982e80ad --- /dev/null +++ b/flate2/src/gz/bufread.rs @@ -0,0 +1,556 @@ +use std::cmp; +use std::io; +use std::io::prelude::*; +use std::mem; + +use super::{GzBuilder, GzHeader}; +use super::{FCOMMENT, FEXTRA, FHCRC, FNAME}; +use crc::CrcReader; +use deflate; +use Compression; + +fn copy(into: &mut [u8], from: &[u8], pos: &mut usize) -> usize { + let min = cmp::min(into.len(), from.len() - *pos); + for (slot, val) in into.iter_mut().zip(from[*pos..*pos + min].iter()) { + *slot = *val; + } + *pos += min; + return min; +} + +pub(crate) fn corrupt() -> io::Error { + io::Error::new( + io::ErrorKind::InvalidInput, + "corrupt gzip stream does not have a matching checksum", + ) +} + +fn bad_header() -> io::Error { + io::Error::new(io::ErrorKind::InvalidInput, "invalid gzip header") +} + +fn read_le_u16(r: &mut R) -> io::Result { + let mut b = [0; 2]; + r.read_exact(&mut b)?; + Ok((b[0] as u16) | ((b[1] as u16) << 8)) +} + +pub(crate) fn read_gz_header(r: &mut R) -> io::Result { + let mut crc_reader = CrcReader::new(r); + let mut header = [0; 10]; + crc_reader.read_exact(&mut header)?; + + let id1 = header[0]; + let id2 = header[1]; + if id1 != 0x1f || id2 != 0x8b { + return Err(bad_header()); + } + let cm = header[2]; + if cm != 8 { + return Err(bad_header()); + } + + let flg = header[3]; + let mtime = ((header[4] as u32) << 0) + | ((header[5] as u32) << 8) + | ((header[6] as u32) << 16) + | ((header[7] as u32) << 24); + let _xfl = header[8]; + let os = header[9]; + + let extra = if flg & FEXTRA != 0 { + let xlen = read_le_u16(&mut crc_reader)?; + let mut extra = vec![0; xlen as usize]; + crc_reader.read_exact(&mut extra)?; + Some(extra) + } else { + None + }; + let filename = if flg & FNAME != 0 { + // wow this is slow + let mut b = Vec::new(); + for byte in crc_reader.by_ref().bytes() { + let byte = byte?; + if byte == 0 { + break; + } + b.push(byte); + } + Some(b) + } else { + None + }; + let comment = if flg & FCOMMENT != 0 { + // wow this is slow + let mut b = Vec::new(); + for byte in crc_reader.by_ref().bytes() { + let byte = byte?; + if byte == 0 { + break; + } + b.push(byte); + } + Some(b) + } else { + None + }; + + if flg & FHCRC != 0 { + let calced_crc = crc_reader.crc().sum() as u16; + let stored_crc = read_le_u16(&mut crc_reader)?; + if calced_crc != stored_crc { + return Err(corrupt()); + } + } + + Ok(GzHeader { + extra: extra, + filename: filename, + comment: comment, + operating_system: os, + mtime: mtime, + }) +} + +/// A gzip streaming encoder +/// +/// This structure exposes a [`BufRead`] interface that will read uncompressed data +/// from the underlying reader and expose the compressed version as a [`BufRead`] +/// interface. +/// +/// [`BufRead`]: https://doc.rust-lang.org/std/io/trait.BufRead.html +/// +/// # Examples +/// +/// ``` +/// use std::io::prelude::*; +/// use std::io; +/// use flate2::Compression; +/// use flate2::bufread::GzEncoder; +/// use std::fs::File; +/// use std::io::BufReader; +/// +/// // Opens sample file, compresses the contents and returns a Vector or error +/// // File wrapped in a BufReader implements BufRead +/// +/// fn open_hello_world() -> io::Result> { +/// let f = File::open("examples/hello_world.txt")?; +/// let b = BufReader::new(f); +/// let mut gz = GzEncoder::new(b, Compression::fast()); +/// let mut buffer = Vec::new(); +/// gz.read_to_end(&mut buffer)?; +/// Ok(buffer) +/// } +/// ``` +#[derive(Debug)] +pub struct GzEncoder { + inner: deflate::bufread::DeflateEncoder>, + header: Vec, + pos: usize, + eof: bool, +} + +pub fn gz_encoder(header: Vec, r: R, lvl: Compression) -> GzEncoder { + let crc = CrcReader::new(r); + GzEncoder { + inner: deflate::bufread::DeflateEncoder::new(crc, lvl), + header: header, + pos: 0, + eof: false, + } +} + +impl GzEncoder { + /// Creates a new encoder which will use the given compression level. + /// + /// The encoder is not configured specially for the emitted header. For + /// header configuration, see the `GzBuilder` type. + /// + /// The data read from the stream `r` will be compressed and available + /// through the returned reader. + pub fn new(r: R, level: Compression) -> GzEncoder { + GzBuilder::new().buf_read(r, level) + } + + fn read_footer(&mut self, into: &mut [u8]) -> io::Result { + if self.pos == 8 { + return Ok(0); + } + let crc = self.inner.get_ref().crc(); + let ref arr = [ + (crc.sum() >> 0) as u8, + (crc.sum() >> 8) as u8, + (crc.sum() >> 16) as u8, + (crc.sum() >> 24) as u8, + (crc.amount() >> 0) as u8, + (crc.amount() >> 8) as u8, + (crc.amount() >> 16) as u8, + (crc.amount() >> 24) as u8, + ]; + Ok(copy(into, arr, &mut self.pos)) + } +} + +impl GzEncoder { + /// Acquires a reference to the underlying reader. + pub fn get_ref(&self) -> &R { + self.inner.get_ref().get_ref() + } + + /// Acquires a mutable reference to the underlying reader. + /// + /// Note that mutation of the reader may result in surprising results if + /// this encoder is continued to be used. + pub fn get_mut(&mut self) -> &mut R { + self.inner.get_mut().get_mut() + } + + /// Returns the underlying stream, consuming this encoder + pub fn into_inner(self) -> R { + self.inner.into_inner().into_inner() + } +} + +impl Read for GzEncoder { + fn read(&mut self, mut into: &mut [u8]) -> io::Result { + let mut amt = 0; + if self.eof { + return self.read_footer(into); + } else if self.pos < self.header.len() { + amt += copy(into, &self.header, &mut self.pos); + if amt == into.len() { + return Ok(amt); + } + let tmp = into; + into = &mut tmp[amt..]; + } + match self.inner.read(into)? { + 0 => { + self.eof = true; + self.pos = 0; + self.read_footer(into) + } + n => Ok(amt + n), + } + } +} + +impl Write for GzEncoder { + fn write(&mut self, buf: &[u8]) -> io::Result { + self.get_mut().write(buf) + } + + fn flush(&mut self) -> io::Result<()> { + self.get_mut().flush() + } +} + +/// A gzip streaming decoder +/// +/// This structure exposes a [`ReadBuf`] interface that will consume compressed +/// data from the underlying reader and emit uncompressed data. +/// +/// [`ReadBuf`]: https://doc.rust-lang.org/std/io/trait.BufRead.html +/// +/// # Examples +/// +/// ``` +/// use std::io::prelude::*; +/// use std::io; +/// # use flate2::Compression; +/// # use flate2::write::GzEncoder; +/// use flate2::bufread::GzDecoder; +/// +/// # fn main() { +/// # let mut e = GzEncoder::new(Vec::new(), Compression::default()); +/// # e.write_all(b"Hello World").unwrap(); +/// # let bytes = e.finish().unwrap(); +/// # println!("{}", decode_reader(bytes).unwrap()); +/// # } +/// # +/// // Uncompresses a Gz Encoded vector of bytes and returns a string or error +/// // Here &[u8] implements BufRead +/// +/// fn decode_reader(bytes: Vec) -> io::Result { +/// let mut gz = GzDecoder::new(&bytes[..]); +/// let mut s = String::new(); +/// gz.read_to_string(&mut s)?; +/// Ok(s) +/// } +/// ``` +#[derive(Debug)] +pub struct GzDecoder { + inner: CrcReader>, + header: Option>, + finished: bool, +} + +impl GzDecoder { + /// Creates a new decoder from the given reader, immediately parsing the + /// gzip header. + pub fn new(mut r: R) -> GzDecoder { + let header = read_gz_header(&mut r); + + let flate = deflate::bufread::DeflateDecoder::new(r); + GzDecoder { + inner: CrcReader::new(flate), + header: Some(header), + finished: false, + } + } + + fn finish(&mut self) -> io::Result<()> { + if self.finished { + return Ok(()); + } + let ref mut buf = [0u8; 8]; + { + let mut len = 0; + + while len < buf.len() { + match self.inner.get_mut().get_mut().read(&mut buf[len..])? { + 0 => return Err(corrupt()), + n => len += n, + } + } + } + + let crc = ((buf[0] as u32) << 0) + | ((buf[1] as u32) << 8) + | ((buf[2] as u32) << 16) + | ((buf[3] as u32) << 24); + let amt = ((buf[4] as u32) << 0) + | ((buf[5] as u32) << 8) + | ((buf[6] as u32) << 16) + | ((buf[7] as u32) << 24); + if crc != self.inner.crc().sum() { + return Err(corrupt()); + } + if amt != self.inner.crc().amount() { + return Err(corrupt()); + } + self.finished = true; + Ok(()) + } +} + +impl GzDecoder { + /// Returns the header associated with this stream, if it was valid + pub fn header(&self) -> Option<&GzHeader> { + self.header.as_ref().and_then(|h| h.as_ref().ok()) + } + + /// Acquires a reference to the underlying reader. + pub fn get_ref(&self) -> &R { + self.inner.get_ref().get_ref() + } + + /// Acquires a mutable reference to the underlying stream. + /// + /// Note that mutation of the stream may result in surprising results if + /// this encoder is continued to be used. + pub fn get_mut(&mut self) -> &mut R { + self.inner.get_mut().get_mut() + } + + /// Consumes this decoder, returning the underlying reader. + pub fn into_inner(self) -> R { + self.inner.into_inner().into_inner() + } +} + +impl Read for GzDecoder { + fn read(&mut self, into: &mut [u8]) -> io::Result { + match self.header { + None => return Ok(0), // error already returned, + Some(Ok(_)) => {} + Some(Err(_)) => match self.header.take().unwrap() { + Ok(_) => panic!(), + Err(e) => return Err(e), + }, + } + if into.is_empty() { + return Ok(0); + } + match self.inner.read(into)? { + 0 => { + self.finish()?; + Ok(0) + } + n => Ok(n), + } + } +} + +impl Write for GzDecoder { + fn write(&mut self, buf: &[u8]) -> io::Result { + self.get_mut().write(buf) + } + + fn flush(&mut self) -> io::Result<()> { + self.get_mut().flush() + } +} + +/// A gzip streaming decoder that decodes all members of a multistream +/// +/// A gzip member consists of a header, compressed data and a trailer. The [gzip +/// specification](https://tools.ietf.org/html/rfc1952), however, allows multiple +/// gzip members to be joined in a single stream. `MultiGzDecoder` will +/// decode all consecutive members while `GzDecoder` will only decompress +/// the first gzip member. The multistream format is commonly used in +/// bioinformatics, for example when using the BGZF compressed data. +/// +/// This structure exposes a [`BufRead`] interface that will consume all gzip members +/// from the underlying reader and emit uncompressed data. +/// +/// [`BufRead`]: https://doc.rust-lang.org/std/io/trait.BufRead.html +/// +/// # Examples +/// +/// ``` +/// use std::io::prelude::*; +/// use std::io; +/// # use flate2::Compression; +/// # use flate2::write::GzEncoder; +/// use flate2::bufread::MultiGzDecoder; +/// +/// # fn main() { +/// # let mut e = GzEncoder::new(Vec::new(), Compression::default()); +/// # e.write_all(b"Hello World").unwrap(); +/// # let bytes = e.finish().unwrap(); +/// # println!("{}", decode_reader(bytes).unwrap()); +/// # } +/// # +/// // Uncompresses a Gz Encoded vector of bytes and returns a string or error +/// // Here &[u8] implements BufRead +/// +/// fn decode_reader(bytes: Vec) -> io::Result { +/// let mut gz = MultiGzDecoder::new(&bytes[..]); +/// let mut s = String::new(); +/// gz.read_to_string(&mut s)?; +/// Ok(s) +/// } +/// ``` +#[derive(Debug)] +pub struct MultiGzDecoder { + inner: CrcReader>, + header: io::Result, + finished: bool, +} + +impl MultiGzDecoder { + /// Creates a new decoder from the given reader, immediately parsing the + /// (first) gzip header. If the gzip stream contains multiple members all will + /// be decoded. + pub fn new(mut r: R) -> MultiGzDecoder { + let header = read_gz_header(&mut r); + + let flate = deflate::bufread::DeflateDecoder::new(r); + MultiGzDecoder { + inner: CrcReader::new(flate), + header: header, + finished: false, + } + } + + fn finish_member(&mut self) -> io::Result { + if self.finished { + return Ok(0); + } + let ref mut buf = [0u8; 8]; + { + let mut len = 0; + + while len < buf.len() { + match self.inner.get_mut().get_mut().read(&mut buf[len..])? { + 0 => return Err(corrupt()), + n => len += n, + } + } + } + + let crc = ((buf[0] as u32) << 0) + | ((buf[1] as u32) << 8) + | ((buf[2] as u32) << 16) + | ((buf[3] as u32) << 24); + let amt = ((buf[4] as u32) << 0) + | ((buf[5] as u32) << 8) + | ((buf[6] as u32) << 16) + | ((buf[7] as u32) << 24); + if crc != self.inner.crc().sum() as u32 { + return Err(corrupt()); + } + if amt != self.inner.crc().amount() { + return Err(corrupt()); + } + let remaining = match self.inner.get_mut().get_mut().fill_buf() { + Ok(b) => if b.is_empty() { + self.finished = true; + return Ok(0); + } else { + b.len() + }, + Err(e) => return Err(e), + }; + + let next_header = read_gz_header(self.inner.get_mut().get_mut()); + drop(mem::replace(&mut self.header, next_header)); + self.inner.reset(); + self.inner.get_mut().reset_data(); + + Ok(remaining) + } +} + +impl MultiGzDecoder { + /// Returns the current header associated with this stream, if it's valid + pub fn header(&self) -> Option<&GzHeader> { + self.header.as_ref().ok() + } + + /// Acquires a reference to the underlying reader. + pub fn get_ref(&self) -> &R { + self.inner.get_ref().get_ref() + } + + /// Acquires a mutable reference to the underlying stream. + /// + /// Note that mutation of the stream may result in surprising results if + /// this encoder is continued to be used. + pub fn get_mut(&mut self) -> &mut R { + self.inner.get_mut().get_mut() + } + + /// Consumes this decoder, returning the underlying reader. + pub fn into_inner(self) -> R { + self.inner.into_inner().into_inner() + } +} + +impl Read for MultiGzDecoder { + fn read(&mut self, into: &mut [u8]) -> io::Result { + if let Err(ref mut e) = self.header { + let another_error = io::ErrorKind::Other.into(); + return Err(mem::replace(e, another_error)); + } + match self.inner.read(into)? { + 0 => match self.finish_member() { + Ok(0) => Ok(0), + Ok(_) => self.read(into), + Err(e) => Err(e), + }, + n => Ok(n), + } + } +} + +impl Write for MultiGzDecoder { + fn write(&mut self, buf: &[u8]) -> io::Result { + self.get_mut().write(buf) + } + + fn flush(&mut self) -> io::Result<()> { + self.get_mut().flush() + } +} diff --git a/flate2/src/gz/mod.rs b/flate2/src/gz/mod.rs new file mode 100644 index 000000000..5fcbe50d1 --- /dev/null +++ b/flate2/src/gz/mod.rs @@ -0,0 +1,359 @@ +use std::ffi::CString; +use std::io::prelude::*; +use std::time; + +use Compression; +use bufreader::BufReader; + +pub static FHCRC: u8 = 1 << 1; +pub static FEXTRA: u8 = 1 << 2; +pub static FNAME: u8 = 1 << 3; +pub static FCOMMENT: u8 = 1 << 4; + +pub mod bufread; +pub mod read; +pub mod write; + +/// A structure representing the header of a gzip stream. +/// +/// The header can contain metadata about the file that was compressed, if +/// present. +#[derive(PartialEq, Clone, Debug)] +pub struct GzHeader { + extra: Option>, + filename: Option>, + comment: Option>, + operating_system: u8, + mtime: u32, +} + +impl GzHeader { + /// Returns the `filename` field of this gzip stream's header, if present. + pub fn filename(&self) -> Option<&[u8]> { + self.filename.as_ref().map(|s| &s[..]) + } + + /// Returns the `extra` field of this gzip stream's header, if present. + pub fn extra(&self) -> Option<&[u8]> { + self.extra.as_ref().map(|s| &s[..]) + } + + /// Returns the `comment` field of this gzip stream's header, if present. + pub fn comment(&self) -> Option<&[u8]> { + self.comment.as_ref().map(|s| &s[..]) + } + + /// Returns the `operating_system` field of this gzip stream's header. + /// + /// There are predefined values for various operating systems. + /// 255 means that the value is unknown. + pub fn operating_system(&self) -> u8 { + self.operating_system + } + + /// This gives the most recent modification time of the original file being compressed. + /// + /// The time is in Unix format, i.e., seconds since 00:00:00 GMT, Jan. 1, 1970. + /// (Note that this may cause problems for MS-DOS and other systems that use local + /// rather than Universal time.) If the compressed data did not come from a file, + /// `mtime` is set to the time at which compression started. + /// `mtime` = 0 means no time stamp is available. + /// + /// The usage of `mtime` is discouraged because of Year 2038 problem. + pub fn mtime(&self) -> u32 { + self.mtime + } + + /// Returns the most recent modification time represented by a date-time type. + /// Returns `None` if the value of the underlying counter is 0, + /// indicating no time stamp is available. + /// + /// + /// The time is measured as seconds since 00:00:00 GMT, Jan. 1 1970. + /// See [`mtime`](#method.mtime) for more detail. + pub fn mtime_as_datetime(&self) -> Option { + if self.mtime == 0 { + None + } else { + let duration = time::Duration::new(u64::from(self.mtime), 0); + let datetime = time::UNIX_EPOCH + duration; + Some(datetime) + } + } +} + +/// A builder structure to create a new gzip Encoder. +/// +/// This structure controls header configuration options such as the filename. +/// +/// # Examples +/// +/// ``` +/// use std::io::prelude::*; +/// # use std::io; +/// use std::fs::File; +/// use flate2::GzBuilder; +/// use flate2::Compression; +/// +/// // GzBuilder opens a file and writes a sample string using GzBuilder pattern +/// +/// # fn sample_builder() -> Result<(), io::Error> { +/// let f = File::create("examples/hello_world.gz")?; +/// let mut gz = GzBuilder::new() +/// .filename("hello_world.txt") +/// .comment("test file, please delete") +/// .write(f, Compression::default()); +/// gz.write_all(b"hello world")?; +/// gz.finish()?; +/// # Ok(()) +/// # } +/// ``` +#[derive(Debug)] +pub struct GzBuilder { + extra: Option>, + filename: Option, + comment: Option, + operating_system: Option, + mtime: u32, +} + +impl GzBuilder { + /// Create a new blank builder with no header by default. + pub fn new() -> GzBuilder { + GzBuilder { + extra: None, + filename: None, + comment: None, + operating_system: None, + mtime: 0, + } + } + + /// Configure the `mtime` field in the gzip header. + pub fn mtime(mut self, mtime: u32) -> GzBuilder { + self.mtime = mtime; + self + } + + /// Configure the `operating_system` field in the gzip header. + pub fn operating_system(mut self, os: u8) -> GzBuilder { + self.operating_system = Some(os); + self + } + + /// Configure the `extra` field in the gzip header. + pub fn extra>>(mut self, extra: T) -> GzBuilder { + self.extra = Some(extra.into()); + self + } + + /// Configure the `filename` field in the gzip header. + /// + /// # Panics + /// + /// Panics if the `filename` slice contains a zero. + pub fn filename>>(mut self, filename: T) -> GzBuilder { + self.filename = Some(CString::new(filename.into()).unwrap()); + self + } + + /// Configure the `comment` field in the gzip header. + /// + /// # Panics + /// + /// Panics if the `comment` slice contains a zero. + pub fn comment>>(mut self, comment: T) -> GzBuilder { + self.comment = Some(CString::new(comment.into()).unwrap()); + self + } + + /// Consume this builder, creating a writer encoder in the process. + /// + /// The data written to the returned encoder will be compressed and then + /// written out to the supplied parameter `w`. + pub fn write(self, w: W, lvl: Compression) -> write::GzEncoder { + write::gz_encoder(self.into_header(lvl), w, lvl) + } + + /// Consume this builder, creating a reader encoder in the process. + /// + /// Data read from the returned encoder will be the compressed version of + /// the data read from the given reader. + pub fn read(self, r: R, lvl: Compression) -> read::GzEncoder { + read::gz_encoder(self.buf_read(BufReader::new(r), lvl)) + } + + /// Consume this builder, creating a reader encoder in the process. + /// + /// Data read from the returned encoder will be the compressed version of + /// the data read from the given reader. + pub fn buf_read(self, r: R, lvl: Compression) -> bufread::GzEncoder + where + R: BufRead, + { + bufread::gz_encoder(self.into_header(lvl), r, lvl) + } + + fn into_header(self, lvl: Compression) -> Vec { + let GzBuilder { + extra, + filename, + comment, + operating_system, + mtime, + } = self; + let mut flg = 0; + let mut header = vec![0u8; 10]; + match extra { + Some(v) => { + flg |= FEXTRA; + header.push((v.len() >> 0) as u8); + header.push((v.len() >> 8) as u8); + header.extend(v); + } + None => {} + } + match filename { + Some(filename) => { + flg |= FNAME; + header.extend(filename.as_bytes_with_nul().iter().map(|x| *x)); + } + None => {} + } + match comment { + Some(comment) => { + flg |= FCOMMENT; + header.extend(comment.as_bytes_with_nul().iter().map(|x| *x)); + } + None => {} + } + header[0] = 0x1f; + header[1] = 0x8b; + header[2] = 8; + header[3] = flg; + header[4] = (mtime >> 0) as u8; + header[5] = (mtime >> 8) as u8; + header[6] = (mtime >> 16) as u8; + header[7] = (mtime >> 24) as u8; + header[8] = if lvl.0 >= Compression::best().0 { + 2 + } else if lvl.0 <= Compression::fast().0 { + 4 + } else { + 0 + }; + + // Typically this byte indicates what OS the gz stream was created on, + // but in an effort to have cross-platform reproducible streams just + // default this value to 255. I'm not sure that if we "correctly" set + // this it'd do anything anyway... + header[9] = operating_system.unwrap_or(255); + return header; + } +} + +#[cfg(test)] +mod tests { + use std::io::prelude::*; + + use super::{read, write, GzBuilder}; + use Compression; + use rand::{thread_rng, Rng}; + + #[test] + fn roundtrip() { + let mut e = write::GzEncoder::new(Vec::new(), Compression::default()); + e.write_all(b"foo bar baz").unwrap(); + let inner = e.finish().unwrap(); + let mut d = read::GzDecoder::new(&inner[..]); + let mut s = String::new(); + d.read_to_string(&mut s).unwrap(); + assert_eq!(s, "foo bar baz"); + } + + #[test] + fn roundtrip_zero() { + let e = write::GzEncoder::new(Vec::new(), Compression::default()); + let inner = e.finish().unwrap(); + let mut d = read::GzDecoder::new(&inner[..]); + let mut s = String::new(); + d.read_to_string(&mut s).unwrap(); + assert_eq!(s, ""); + } + + #[test] + fn roundtrip_big() { + let mut real = Vec::new(); + let mut w = write::GzEncoder::new(Vec::new(), Compression::default()); + let v = ::random_bytes().take(1024).collect::>(); + for _ in 0..200 { + let to_write = &v[..thread_rng().gen_range(0, v.len())]; + real.extend(to_write.iter().map(|x| *x)); + w.write_all(to_write).unwrap(); + } + let result = w.finish().unwrap(); + let mut r = read::GzDecoder::new(&result[..]); + let mut v = Vec::new(); + r.read_to_end(&mut v).unwrap(); + assert!(v == real); + } + + #[test] + fn roundtrip_big2() { + let v = ::random_bytes().take(1024 * 1024).collect::>(); + let mut r = read::GzDecoder::new(read::GzEncoder::new(&v[..], Compression::default())); + let mut res = Vec::new(); + r.read_to_end(&mut res).unwrap(); + assert!(res == v); + } + + #[test] + fn fields() { + let r = vec![0, 2, 4, 6]; + let e = GzBuilder::new() + .filename("foo.rs") + .comment("bar") + .extra(vec![0, 1, 2, 3]) + .read(&r[..], Compression::default()); + let mut d = read::GzDecoder::new(e); + assert_eq!(d.header().unwrap().filename(), Some(&b"foo.rs"[..])); + assert_eq!(d.header().unwrap().comment(), Some(&b"bar"[..])); + assert_eq!(d.header().unwrap().extra(), Some(&b"\x00\x01\x02\x03"[..])); + let mut res = Vec::new(); + d.read_to_end(&mut res).unwrap(); + assert_eq!(res, vec![0, 2, 4, 6]); + } + + #[test] + fn keep_reading_after_end() { + let mut e = write::GzEncoder::new(Vec::new(), Compression::default()); + e.write_all(b"foo bar baz").unwrap(); + let inner = e.finish().unwrap(); + let mut d = read::GzDecoder::new(&inner[..]); + let mut s = String::new(); + d.read_to_string(&mut s).unwrap(); + assert_eq!(s, "foo bar baz"); + d.read_to_string(&mut s).unwrap(); + assert_eq!(s, "foo bar baz"); + } + + #[test] + fn qc_reader() { + ::quickcheck::quickcheck(test as fn(_) -> _); + + fn test(v: Vec) -> bool { + let r = read::GzEncoder::new(&v[..], Compression::default()); + let mut r = read::GzDecoder::new(r); + let mut v2 = Vec::new(); + r.read_to_end(&mut v2).unwrap(); + v == v2 + } + } + + #[test] + fn flush_after_write() { + let mut f = write::GzEncoder::new(Vec::new(), Compression::default()); + write!(f, "Hello world").unwrap(); + f.flush().unwrap(); + } +} diff --git a/flate2/src/gz/read.rs b/flate2/src/gz/read.rs new file mode 100644 index 000000000..0ffb9f6a4 --- /dev/null +++ b/flate2/src/gz/read.rs @@ -0,0 +1,278 @@ +use std::io::prelude::*; +use std::io; + +use super::{GzBuilder, GzHeader}; +use Compression; +use bufreader::BufReader; +use super::bufread; + +/// A gzip streaming encoder +/// +/// This structure exposes a [`Read`] interface that will read uncompressed data +/// from the underlying reader and expose the compressed version as a [`Read`] +/// interface. +/// +/// [`Read`]: https://doc.rust-lang.org/std/io/trait.Read.html +/// +/// # Examples +/// +/// ``` +/// use std::io::prelude::*; +/// use std::io; +/// use flate2::Compression; +/// use flate2::read::GzEncoder; +/// +/// // Return a vector containing the GZ compressed version of hello world +/// +/// fn gzencode_hello_world() -> io::Result> { +/// let mut ret_vec = [0;100]; +/// let bytestring = b"hello world"; +/// let mut gz = GzEncoder::new(&bytestring[..], Compression::fast()); +/// let count = gz.read(&mut ret_vec)?; +/// Ok(ret_vec[0..count].to_vec()) +/// } +/// ``` +#[derive(Debug)] +pub struct GzEncoder { + inner: bufread::GzEncoder>, +} + +pub fn gz_encoder(inner: bufread::GzEncoder>) -> GzEncoder { + GzEncoder { inner: inner } +} + +impl GzEncoder { + /// Creates a new encoder which will use the given compression level. + /// + /// The encoder is not configured specially for the emitted header. For + /// header configuration, see the `GzBuilder` type. + /// + /// The data read from the stream `r` will be compressed and available + /// through the returned reader. + pub fn new(r: R, level: Compression) -> GzEncoder { + GzBuilder::new().read(r, level) + } +} + +impl GzEncoder { + /// Acquires a reference to the underlying reader. + pub fn get_ref(&self) -> &R { + self.inner.get_ref().get_ref() + } + + /// Acquires a mutable reference to the underlying reader. + /// + /// Note that mutation of the reader may result in surprising results if + /// this encoder is continued to be used. + pub fn get_mut(&mut self) -> &mut R { + self.inner.get_mut().get_mut() + } + + /// Returns the underlying stream, consuming this encoder + pub fn into_inner(self) -> R { + self.inner.into_inner().into_inner() + } +} + +impl Read for GzEncoder { + fn read(&mut self, into: &mut [u8]) -> io::Result { + self.inner.read(into) + } +} + +impl Write for GzEncoder { + fn write(&mut self, buf: &[u8]) -> io::Result { + self.get_mut().write(buf) + } + + fn flush(&mut self) -> io::Result<()> { + self.get_mut().flush() + } +} + +/// A gzip streaming decoder +/// +/// This structure exposes a [`Read`] interface that will consume compressed +/// data from the underlying reader and emit uncompressed data. +/// +/// [`Read`]: https://doc.rust-lang.org/std/io/trait.Read.html +/// +/// # Examples +/// +/// ``` +/// +/// use std::io::prelude::*; +/// use std::io; +/// # use flate2::Compression; +/// # use flate2::write::GzEncoder; +/// use flate2::read::GzDecoder; +/// +/// # fn main() { +/// # let mut e = GzEncoder::new(Vec::new(), Compression::default()); +/// # e.write_all(b"Hello World").unwrap(); +/// # let bytes = e.finish().unwrap(); +/// # println!("{}", decode_reader(bytes).unwrap()); +/// # } +/// # +/// // Uncompresses a Gz Encoded vector of bytes and returns a string or error +/// // Here &[u8] implements Read +/// +/// fn decode_reader(bytes: Vec) -> io::Result { +/// let mut gz = GzDecoder::new(&bytes[..]); +/// let mut s = String::new(); +/// gz.read_to_string(&mut s)?; +/// Ok(s) +/// } +/// ``` +#[derive(Debug)] +pub struct GzDecoder { + inner: bufread::GzDecoder>, +} + +impl GzDecoder { + /// Creates a new decoder from the given reader, immediately parsing the + /// gzip header. + pub fn new(r: R) -> GzDecoder { + GzDecoder { + inner: bufread::GzDecoder::new(BufReader::new(r)), + } + } +} + +impl GzDecoder { + /// Returns the header associated with this stream, if it was valid. + pub fn header(&self) -> Option<&GzHeader> { + self.inner.header() + } + + /// Acquires a reference to the underlying reader. + pub fn get_ref(&self) -> &R { + self.inner.get_ref().get_ref() + } + + /// Acquires a mutable reference to the underlying stream. + /// + /// Note that mutation of the stream may result in surprising results if + /// this encoder is continued to be used. + pub fn get_mut(&mut self) -> &mut R { + self.inner.get_mut().get_mut() + } + + /// Consumes this decoder, returning the underlying reader. + pub fn into_inner(self) -> R { + self.inner.into_inner().into_inner() + } +} + +impl Read for GzDecoder { + fn read(&mut self, into: &mut [u8]) -> io::Result { + self.inner.read(into) + } +} + +impl Write for GzDecoder { + fn write(&mut self, buf: &[u8]) -> io::Result { + self.get_mut().write(buf) + } + + fn flush(&mut self) -> io::Result<()> { + self.get_mut().flush() + } +} + +/// A gzip streaming decoder that decodes all members of a multistream +/// +/// A gzip member consists of a header, compressed data and a trailer. The [gzip +/// specification](https://tools.ietf.org/html/rfc1952), however, allows multiple +/// gzip members to be joined in a single stream. `MultiGzDecoder` will +/// decode all consecutive members while `GzDecoder` will only decompress the +/// first gzip member. The multistream format is commonly used in bioinformatics, +/// for example when using the BGZF compressed data. +/// +/// This structure exposes a [`Read`] interface that will consume all gzip members +/// from the underlying reader and emit uncompressed data. +/// +/// [`Read`]: https://doc.rust-lang.org/std/io/trait.Read.html +/// +/// # Examples +/// +/// ``` +/// use std::io::prelude::*; +/// use std::io; +/// # use flate2::Compression; +/// # use flate2::write::GzEncoder; +/// use flate2::read::MultiGzDecoder; +/// +/// # fn main() { +/// # let mut e = GzEncoder::new(Vec::new(), Compression::default()); +/// # e.write_all(b"Hello World").unwrap(); +/// # let bytes = e.finish().unwrap(); +/// # println!("{}", decode_reader(bytes).unwrap()); +/// # } +/// # +/// // Uncompresses a Gz Encoded vector of bytes and returns a string or error +/// // Here &[u8] implements Read +/// +/// fn decode_reader(bytes: Vec) -> io::Result { +/// let mut gz = MultiGzDecoder::new(&bytes[..]); +/// let mut s = String::new(); +/// gz.read_to_string(&mut s)?; +/// Ok(s) +/// } +/// ``` +#[derive(Debug)] +pub struct MultiGzDecoder { + inner: bufread::MultiGzDecoder>, +} + +impl MultiGzDecoder { + /// Creates a new decoder from the given reader, immediately parsing the + /// (first) gzip header. If the gzip stream contains multiple members all will + /// be decoded. + pub fn new(r: R) -> MultiGzDecoder { + MultiGzDecoder { + inner: bufread::MultiGzDecoder::new(BufReader::new(r)), + } + } +} + +impl MultiGzDecoder { + /// Returns the current header associated with this stream, if it's valid. + pub fn header(&self) -> Option<&GzHeader> { + self.inner.header() + } + + /// Acquires a reference to the underlying reader. + pub fn get_ref(&self) -> &R { + self.inner.get_ref().get_ref() + } + + /// Acquires a mutable reference to the underlying stream. + /// + /// Note that mutation of the stream may result in surprising results if + /// this encoder is continued to be used. + pub fn get_mut(&mut self) -> &mut R { + self.inner.get_mut().get_mut() + } + + /// Consumes this decoder, returning the underlying reader. + pub fn into_inner(self) -> R { + self.inner.into_inner().into_inner() + } +} + +impl Read for MultiGzDecoder { + fn read(&mut self, into: &mut [u8]) -> io::Result { + self.inner.read(into) + } +} + +impl Write for MultiGzDecoder { + fn write(&mut self, buf: &[u8]) -> io::Result { + self.get_mut().write(buf) + } + + fn flush(&mut self) -> io::Result<()> { + self.get_mut().flush() + } +} diff --git a/flate2/src/gz/write.rs b/flate2/src/gz/write.rs new file mode 100644 index 000000000..39f24a6a1 --- /dev/null +++ b/flate2/src/gz/write.rs @@ -0,0 +1,480 @@ +use std::cmp; +use std::io; +use std::io::prelude::*; + +#[cfg(feature = "tokio")] +use futures::Poll; +#[cfg(feature = "tokio")] +use tokio_io::{AsyncRead, AsyncWrite}; + +use super::bufread::{corrupt, read_gz_header}; +use super::{GzBuilder, GzHeader}; +use crc::{Crc, CrcWriter}; +use zio; +use {Compress, Compression, Decompress, Status}; + +/// A gzip streaming encoder +/// +/// This structure exposes a [`Write`] interface that will emit compressed data +/// to the underlying writer `W`. +/// +/// [`Write`]: https://doc.rust-lang.org/std/io/trait.Write.html +/// +/// # Examples +/// +/// ``` +/// use std::io::prelude::*; +/// use flate2::Compression; +/// use flate2::write::GzEncoder; +/// +/// // Vec implements Write to print the compressed bytes of sample string +/// # fn main() { +/// +/// let mut e = GzEncoder::new(Vec::new(), Compression::default()); +/// e.write_all(b"Hello World").unwrap(); +/// println!("{:?}", e.finish().unwrap()); +/// # } +/// ``` +#[derive(Debug)] +pub struct GzEncoder { + inner: zio::Writer, + crc: Crc, + crc_bytes_written: usize, + header: Vec, +} + +pub fn gz_encoder(header: Vec, w: W, lvl: Compression) -> GzEncoder { + GzEncoder { + inner: zio::Writer::new(w, Compress::new(lvl, false)), + crc: Crc::new(), + header: header, + crc_bytes_written: 0, + } +} + +impl GzEncoder { + /// Creates a new encoder which will use the given compression level. + /// + /// The encoder is not configured specially for the emitted header. For + /// header configuration, see the `GzBuilder` type. + /// + /// The data written to the returned encoder will be compressed and then + /// written to the stream `w`. + pub fn new(w: W, level: Compression) -> GzEncoder { + GzBuilder::new().write(w, level) + } + + /// Acquires a reference to the underlying writer. + pub fn get_ref(&self) -> &W { + self.inner.get_ref() + } + + /// Acquires a mutable reference to the underlying writer. + /// + /// Note that mutation of the writer may result in surprising results if + /// this encoder is continued to be used. + pub fn get_mut(&mut self) -> &mut W { + self.inner.get_mut() + } + + /// Attempt to finish this output stream, writing out final chunks of data. + /// + /// Note that this function can only be used once data has finished being + /// written to the output stream. After this function is called then further + /// calls to `write` may result in a panic. + /// + /// # Panics + /// + /// Attempts to write data to this stream may result in a panic after this + /// function is called. + /// + /// # Errors + /// + /// This function will perform I/O to complete this stream, and any I/O + /// errors which occur will be returned from this function. + pub fn try_finish(&mut self) -> io::Result<()> { + self.write_header()?; + self.inner.finish()?; + + while self.crc_bytes_written < 8 { + let (sum, amt) = (self.crc.sum() as u32, self.crc.amount()); + let buf = [ + (sum >> 0) as u8, + (sum >> 8) as u8, + (sum >> 16) as u8, + (sum >> 24) as u8, + (amt >> 0) as u8, + (amt >> 8) as u8, + (amt >> 16) as u8, + (amt >> 24) as u8, + ]; + let inner = self.inner.get_mut(); + let n = inner.write(&buf[self.crc_bytes_written..])?; + self.crc_bytes_written += n; + } + Ok(()) + } + + /// Finish encoding this stream, returning the underlying writer once the + /// encoding is done. + /// + /// Note that this function may not be suitable to call in a situation where + /// the underlying stream is an asynchronous I/O stream. To finish a stream + /// the `try_finish` (or `shutdown`) method should be used instead. To + /// re-acquire ownership of a stream it is safe to call this method after + /// `try_finish` or `shutdown` has returned `Ok`. + /// + /// # Errors + /// + /// This function will perform I/O to complete this stream, and any I/O + /// errors which occur will be returned from this function. + pub fn finish(mut self) -> io::Result { + self.try_finish()?; + Ok(self.inner.take_inner()) + } + + fn write_header(&mut self) -> io::Result<()> { + while self.header.len() > 0 { + let n = self.inner.get_mut().write(&self.header)?; + self.header.drain(..n); + } + Ok(()) + } +} + +impl Write for GzEncoder { + fn write(&mut self, buf: &[u8]) -> io::Result { + assert_eq!(self.crc_bytes_written, 0); + self.write_header()?; + let n = self.inner.write(buf)?; + self.crc.update(&buf[..n]); + Ok(n) + } + + fn flush(&mut self) -> io::Result<()> { + assert_eq!(self.crc_bytes_written, 0); + self.write_header()?; + self.inner.flush() + } +} + +#[cfg(feature = "tokio")] +impl AsyncWrite for GzEncoder { + fn shutdown(&mut self) -> Poll<(), io::Error> { + try_nb!(self.try_finish()); + self.get_mut().shutdown() + } +} + +impl Read for GzEncoder { + fn read(&mut self, buf: &mut [u8]) -> io::Result { + self.get_mut().read(buf) + } +} + +#[cfg(feature = "tokio")] +impl AsyncRead for GzEncoder {} + +impl Drop for GzEncoder { + fn drop(&mut self) { + if self.inner.is_present() { + let _ = self.try_finish(); + } + } +} + +/// A gzip streaming decoder +/// +/// This structure exposes a [`Write`] interface that will emit compressed data +/// to the underlying writer `W`. +/// +/// [`Write`]: https://doc.rust-lang.org/std/io/trait.Write.html +/// +/// # Examples +/// +/// ``` +/// use std::io::prelude::*; +/// use std::io; +/// use flate2::Compression; +/// use flate2::write::{GzEncoder, GzDecoder}; +/// +/// # fn main() { +/// # let mut e = GzEncoder::new(Vec::new(), Compression::default()); +/// # e.write(b"Hello World").unwrap(); +/// # let bytes = e.finish().unwrap(); +/// # assert_eq!("Hello World", decode_writer(bytes).unwrap()); +/// # } +/// // Uncompresses a gzip encoded vector of bytes and returns a string or error +/// // Here Vec implements Write +/// fn decode_writer(bytes: Vec) -> io::Result { +/// let mut writer = Vec::new(); +/// let mut decoder = GzDecoder::new(writer); +/// decoder.write_all(&bytes[..])?; +/// writer = decoder.finish()?; +/// let return_string = String::from_utf8(writer).expect("String parsing error"); +/// Ok(return_string) +/// } +/// ``` +#[derive(Debug)] +pub struct GzDecoder { + inner: zio::Writer, Decompress>, + crc_bytes: Vec, + header: Option, + header_buf: Vec, +} + +const CRC_BYTES_LEN: usize = 8; + +impl GzDecoder { + /// Creates a new decoder which will write uncompressed data to the stream. + /// + /// When this encoder is dropped or unwrapped the final pieces of data will + /// be flushed. + pub fn new(w: W) -> GzDecoder { + GzDecoder { + inner: zio::Writer::new(CrcWriter::new(w), Decompress::new(false)), + crc_bytes: Vec::with_capacity(CRC_BYTES_LEN), + header: None, + header_buf: Vec::new(), + } + } + + /// Returns the header associated with this stream. + pub fn header(&self) -> Option<&GzHeader> { + self.header.as_ref() + } + + /// Acquires a reference to the underlying writer. + pub fn get_ref(&self) -> &W { + self.inner.get_ref().get_ref() + } + + /// Acquires a mutable reference to the underlying writer. + /// + /// Note that mutating the output/input state of the stream may corrupt this + /// object, so care must be taken when using this method. + pub fn get_mut(&mut self) -> &mut W { + self.inner.get_mut().get_mut() + } + + /// Attempt to finish this output stream, writing out final chunks of data. + /// + /// Note that this function can only be used once data has finished being + /// written to the output stream. After this function is called then further + /// calls to `write` may result in a panic. + /// + /// # Panics + /// + /// Attempts to write data to this stream may result in a panic after this + /// function is called. + /// + /// # Errors + /// + /// This function will perform I/O to finish the stream, returning any + /// errors which happen. + pub fn try_finish(&mut self) -> io::Result<()> { + self.finish_and_check_crc()?; + Ok(()) + } + + /// Consumes this decoder, flushing the output stream. + /// + /// This will flush the underlying data stream and then return the contained + /// writer if the flush succeeded. + /// + /// Note that this function may not be suitable to call in a situation where + /// the underlying stream is an asynchronous I/O stream. To finish a stream + /// the `try_finish` (or `shutdown`) method should be used instead. To + /// re-acquire ownership of a stream it is safe to call this method after + /// `try_finish` or `shutdown` has returned `Ok`. + /// + /// # Errors + /// + /// This function will perform I/O to complete this stream, and any I/O + /// errors which occur will be returned from this function. + pub fn finish(mut self) -> io::Result { + self.finish_and_check_crc()?; + Ok(self.inner.take_inner().into_inner()) + } + + fn finish_and_check_crc(&mut self) -> io::Result<()> { + self.inner.finish()?; + + if self.crc_bytes.len() != 8 { + return Err(corrupt()); + } + + let crc = ((self.crc_bytes[0] as u32) << 0) + | ((self.crc_bytes[1] as u32) << 8) + | ((self.crc_bytes[2] as u32) << 16) + | ((self.crc_bytes[3] as u32) << 24); + let amt = ((self.crc_bytes[4] as u32) << 0) + | ((self.crc_bytes[5] as u32) << 8) + | ((self.crc_bytes[6] as u32) << 16) + | ((self.crc_bytes[7] as u32) << 24); + if crc != self.inner.get_ref().crc().sum() as u32 { + return Err(corrupt()); + } + if amt != self.inner.get_ref().crc().amount() { + return Err(corrupt()); + } + Ok(()) + } +} + +struct Counter { + inner: T, + pos: usize, +} + +impl Read for Counter { + fn read(&mut self, buf: &mut [u8]) -> io::Result { + let pos = self.inner.read(buf)?; + self.pos += pos; + Ok(pos) + } +} + +impl Write for GzDecoder { + fn write(&mut self, buf: &[u8]) -> io::Result { + if self.header.is_none() { + // trying to avoid buffer usage + let (res, pos) = { + let mut counter = Counter { + inner: self.header_buf.chain(buf), + pos: 0, + }; + let res = read_gz_header(&mut counter); + (res, counter.pos) + }; + + match res { + Err(err) => { + if err.kind() == io::ErrorKind::UnexpectedEof { + // not enough data for header, save to the buffer + self.header_buf.extend(buf); + Ok(buf.len()) + } else { + Err(err) + } + } + Ok(header) => { + self.header = Some(header); + let pos = pos - self.header_buf.len(); + self.header_buf.truncate(0); + Ok(pos) + } + } + } else { + let (n, status) = self.inner.write_with_status(buf)?; + + if status == Status::StreamEnd { + if n < buf.len() && self.crc_bytes.len() < 8 { + let remaining = buf.len() - n; + let crc_bytes = cmp::min(remaining, CRC_BYTES_LEN - self.crc_bytes.len()); + self.crc_bytes.extend(&buf[n..n + crc_bytes]); + return Ok(n + crc_bytes); + } + } + Ok(n) + } + } + + fn flush(&mut self) -> io::Result<()> { + self.inner.flush() + } +} + +#[cfg(feature = "tokio")] +impl AsyncWrite for GzDecoder { + fn shutdown(&mut self) -> Poll<(), io::Error> { + try_nb!(self.try_finish()); + self.inner.get_mut().get_mut().shutdown() + } +} + +impl Read for GzDecoder { + fn read(&mut self, buf: &mut [u8]) -> io::Result { + self.inner.get_mut().get_mut().read(buf) + } +} + +#[cfg(feature = "tokio")] +impl AsyncRead for GzDecoder {} + +#[cfg(test)] +mod tests { + use super::*; + + const STR: &'static str = "Hello World Hello World Hello World Hello World Hello World \ + Hello World Hello World Hello World Hello World Hello World \ + Hello World Hello World Hello World Hello World Hello World \ + Hello World Hello World Hello World Hello World Hello World \ + Hello World Hello World Hello World Hello World Hello World"; + + #[test] + fn decode_writer_one_chunk() { + let mut e = GzEncoder::new(Vec::new(), Compression::default()); + e.write(STR.as_ref()).unwrap(); + let bytes = e.finish().unwrap(); + + let mut writer = Vec::new(); + let mut decoder = GzDecoder::new(writer); + let n = decoder.write(&bytes[..]).unwrap(); + decoder.write(&bytes[n..]).unwrap(); + decoder.try_finish().unwrap(); + writer = decoder.finish().unwrap(); + let return_string = String::from_utf8(writer).expect("String parsing error"); + assert_eq!(return_string, STR); + } + + #[test] + fn decode_writer_partial_header() { + let mut e = GzEncoder::new(Vec::new(), Compression::default()); + e.write(STR.as_ref()).unwrap(); + let bytes = e.finish().unwrap(); + + let mut writer = Vec::new(); + let mut decoder = GzDecoder::new(writer); + assert_eq!(decoder.write(&bytes[..5]).unwrap(), 5); + let n = decoder.write(&bytes[5..]).unwrap(); + if n < bytes.len() - 5 { + decoder.write(&bytes[n + 5..]).unwrap(); + } + writer = decoder.finish().unwrap(); + let return_string = String::from_utf8(writer).expect("String parsing error"); + assert_eq!(return_string, STR); + } + + #[test] + fn decode_writer_exact_header() { + let mut e = GzEncoder::new(Vec::new(), Compression::default()); + e.write(STR.as_ref()).unwrap(); + let bytes = e.finish().unwrap(); + + let mut writer = Vec::new(); + let mut decoder = GzDecoder::new(writer); + assert_eq!(decoder.write(&bytes[..10]).unwrap(), 10); + decoder.write(&bytes[10..]).unwrap(); + writer = decoder.finish().unwrap(); + let return_string = String::from_utf8(writer).expect("String parsing error"); + assert_eq!(return_string, STR); + } + + #[test] + fn decode_writer_partial_crc() { + let mut e = GzEncoder::new(Vec::new(), Compression::default()); + e.write(STR.as_ref()).unwrap(); + let bytes = e.finish().unwrap(); + + let mut writer = Vec::new(); + let mut decoder = GzDecoder::new(writer); + let l = bytes.len() - 5; + let n = decoder.write(&bytes[..l]).unwrap(); + decoder.write(&bytes[n..]).unwrap(); + writer = decoder.finish().unwrap(); + let return_string = String::from_utf8(writer).expect("String parsing error"); + assert_eq!(return_string, STR); + } + +} diff --git a/flate2/src/lib.rs b/flate2/src/lib.rs new file mode 100644 index 000000000..550e68b60 --- /dev/null +++ b/flate2/src/lib.rs @@ -0,0 +1,234 @@ +//! A DEFLATE-based stream compression/decompression library +//! +//! This library is meant to supplement/replace the +//! `flate` library that was previously part of the standard rust distribution +//! providing a streaming encoder/decoder rather than purely +//! an in-memory encoder/decoder. +//! +//! Like with [`flate`], flate2 is based on [`miniz.c`][1] +//! +//! [1]: https://github.com/richgel999/miniz +//! [`flate`]: https://github.com/rust-lang/rust/tree/1.19.0/src/libflate +//! +//! # Organization +//! +//! This crate consists mainly of three modules, [`read`], [`write`], and +//! [`bufread`]. Each module contains a number of types used to encode and +//! decode various streams of data. +//! +//! All types in the [`write`] module work on instances of [`Write`][write], +//! whereas all types in the [`read`] module work on instances of +//! [`Read`][read] and [`bufread`] works with [`BufRead`][bufread]. If you +//! are decoding directly from a `&[u8]`, use the [`bufread`] types. +//! +//! ``` +//! use flate2::write::GzEncoder; +//! use flate2::Compression; +//! use std::io; +//! use std::io::prelude::*; +//! +//! # fn main() { let _ = run(); } +//! # fn run() -> io::Result<()> { +//! let mut encoder = GzEncoder::new(Vec::new(), Compression::default()); +//! encoder.write_all(b"Example")?; +//! # Ok(()) +//! # } +//! ``` +//! +//! +//! Other various types are provided at the top-level of the crate for +//! management and dealing with encoders/decoders. Also note that types which +//! operate over a specific trait often implement the mirroring trait as well. +//! For example a `flate2::read::DeflateDecoder` *also* implements the +//! `Write` trait if `T: Write`. That is, the "dual trait" is forwarded directly +//! to the underlying object if available. +//! +//! [`read`]: read/index.html +//! [`bufread`]: bufread/index.html +//! [`write`]: write/index.html +//! [read]: https://doc.rust-lang.org/std/io/trait.Read.html +//! [write]: https://doc.rust-lang.org/std/io/trait.Write.html +//! [bufread]: https://doc.rust-lang.org/std/io/trait.BufRead.html +//! +//! # Async I/O +//! +//! This crate optionally can support async I/O streams with the [Tokio stack] via +//! the `tokio` feature of this crate: +//! +//! [Tokio stack]: https://tokio.rs/ +//! +//! ```toml +//! flate2 = { version = "0.2", features = ["tokio"] } +//! ``` +//! +//! All methods are internally capable of working with streams that may return +//! [`ErrorKind::WouldBlock`] when they're not ready to perform the particular +//! operation. +//! +//! [`ErrorKind::WouldBlock`]: https://doc.rust-lang.org/std/io/enum.ErrorKind.html +//! +//! Note that care needs to be taken when using these objects, however. The +//! Tokio runtime, in particular, requires that data is fully flushed before +//! dropping streams. For compatibility with blocking streams all streams are +//! flushed/written when they are dropped, and this is not always a suitable +//! time to perform I/O. If I/O streams are flushed before drop, however, then +//! these operations will be a noop. +#![doc(html_root_url = "https://docs.rs/flate2/0.2")] +#![deny(missing_docs)] +#![deny(missing_debug_implementations)] +#![allow(trivial_numeric_casts)] +#![cfg_attr(test, deny(warnings))] + +#[cfg(feature = "tokio")] +extern crate futures; +#[cfg(not(all(target_arch = "wasm32", not(target_os = "emscripten"))))] +extern crate libc; +#[cfg(test)] +extern crate quickcheck; +#[cfg(test)] +extern crate rand; +#[cfg(feature = "tokio")] +#[macro_use] +extern crate tokio_io; + +// These must currently agree with here -- +// https://github.com/Frommi/miniz_oxide/blob/e6c214efd253491ac072c2c9adba87ef5b4cd5cb/src/lib.rs#L14-L19 +// +// Eventually we'll want to actually move these into `libc` itself for wasm, or +// otherwise not use the capi crate for miniz_oxide but rather use the +// underlying types. +#[cfg(all(target_arch = "wasm32", not(target_os = "emscripten")))] +mod libc { + #![allow(non_camel_case_types)] + pub type c_ulong = u64; + pub type off_t = i64; + pub type c_int = i32; + pub type c_uint = u32; + pub type size_t = usize; +} + +pub use gz::GzBuilder; +pub use gz::GzHeader; +pub use mem::{Compress, CompressError, Decompress, DecompressError, Status}; +pub use mem::{FlushCompress, FlushDecompress}; +pub use crc::{Crc, CrcReader, CrcWriter}; + +mod bufreader; +mod crc; +mod deflate; +mod ffi; +mod gz; +mod zio; +mod mem; +mod zlib; + +/// Types which operate over [`Read`] streams, both encoders and decoders for +/// various formats. +/// +/// [`Read`]: https://doc.rust-lang.org/std/io/trait.Read.html +pub mod read { + pub use deflate::read::DeflateEncoder; + pub use deflate::read::DeflateDecoder; + pub use zlib::read::ZlibEncoder; + pub use zlib::read::ZlibDecoder; + pub use gz::read::GzEncoder; + pub use gz::read::GzDecoder; + pub use gz::read::MultiGzDecoder; +} + +/// Types which operate over [`Write`] streams, both encoders and decoders for +/// various formats. +/// +/// [`Write`]: https://doc.rust-lang.org/std/io/trait.Write.html +pub mod write { + pub use deflate::write::DeflateEncoder; + pub use deflate::write::DeflateDecoder; + pub use zlib::write::ZlibEncoder; + pub use zlib::write::ZlibDecoder; + pub use gz::write::GzEncoder; + pub use gz::write::GzDecoder; +} + +/// Types which operate over [`BufRead`] streams, both encoders and decoders for +/// various formats. +/// +/// [`BufRead`]: https://doc.rust-lang.org/std/io/trait.BufRead.html +pub mod bufread { + pub use deflate::bufread::DeflateEncoder; + pub use deflate::bufread::DeflateDecoder; + pub use zlib::bufread::ZlibEncoder; + pub use zlib::bufread::ZlibDecoder; + pub use gz::bufread::GzEncoder; + pub use gz::bufread::GzDecoder; + pub use gz::bufread::MultiGzDecoder; +} + +fn _assert_send_sync() { + fn _assert_send_sync() {} + + _assert_send_sync::>(); + _assert_send_sync::>(); + _assert_send_sync::>(); + _assert_send_sync::>(); + _assert_send_sync::>(); + _assert_send_sync::>(); + _assert_send_sync::>(); + _assert_send_sync::>>(); + _assert_send_sync::>>(); + _assert_send_sync::>>(); + _assert_send_sync::>>(); + _assert_send_sync::>>(); + _assert_send_sync::>>(); +} + +/// When compressing data, the compression level can be specified by a value in +/// this enum. +#[derive(Copy, Clone, PartialEq, Eq, Debug)] +pub struct Compression(u32); + +impl Compression { + /// Creates a new description of the compression level with an explicitly + /// specified integer. + /// + /// The integer here is typically on a scale of 0-9 where 0 means "no + /// compression" and 9 means "take as long as you'd like". + pub fn new(level: u32) -> Compression { + Compression(level) + } + + /// No compression is to be performed, this may actually inflate data + /// slightly when encoding. + pub fn none() -> Compression { + Compression(0) + } + + /// Optimize for the best speed of encoding. + pub fn fast() -> Compression { + Compression(1) + } + + /// Optimize for the size of data being encoded. + pub fn best() -> Compression { + Compression(9) + } + + /// Returns an integer representing the compression level, typically on a + /// scale of 0-9 + pub fn level(&self) -> u32 { + self.0 + } +} + +impl Default for Compression { + fn default() -> Compression { + Compression(6) + } +} + +#[cfg(test)] +fn random_bytes() -> impl Iterator { + use std::iter; + use rand::Rng; + + iter::repeat(()).map(|_| rand::thread_rng().gen()) +} diff --git a/flate2/src/mem.rs b/flate2/src/mem.rs new file mode 100644 index 000000000..91c606aaf --- /dev/null +++ b/flate2/src/mem.rs @@ -0,0 +1,737 @@ +use std::error::Error; +use std::fmt; +use std::io; +use std::marker; +use std::slice; + +use libc::{c_int, c_uint}; + +use ffi; +use Compression; + +/// Raw in-memory compression stream for blocks of data. +/// +/// This type is the building block for the I/O streams in the rest of this +/// crate. It requires more management than the [`Read`]/[`Write`] API but is +/// maximally flexible in terms of accepting input from any source and being +/// able to produce output to any memory location. +/// +/// It is recommended to use the I/O stream adaptors over this type as they're +/// easier to use. +/// +/// [`Read`]: https://doc.rust-lang.org/std/io/trait.Read.html +/// [`Write`]: https://doc.rust-lang.org/std/io/trait.Write.html +#[derive(Debug)] +pub struct Compress { + inner: Stream, +} + +/// Raw in-memory decompression stream for blocks of data. +/// +/// This type is the building block for the I/O streams in the rest of this +/// crate. It requires more management than the [`Read`]/[`Write`] API but is +/// maximally flexible in terms of accepting input from any source and being +/// able to produce output to any memory location. +/// +/// It is recommended to use the I/O stream adaptors over this type as they're +/// easier to use. +/// +/// [`Read`]: https://doc.rust-lang.org/std/io/trait.Read.html +/// [`Write`]: https://doc.rust-lang.org/std/io/trait.Write.html +#[derive(Debug)] +pub struct Decompress { + inner: Stream, +} + +#[derive(Debug)] +struct Stream { + stream_wrapper: ffi::StreamWrapper, + total_in: u64, + total_out: u64, + _marker: marker::PhantomData, +} + +unsafe impl Send for Stream {} +unsafe impl Sync for Stream {} + +trait Direction { + unsafe fn destroy(stream: *mut ffi::mz_stream) -> c_int; +} + +#[derive(Debug)] +enum DirCompress {} +#[derive(Debug)] +enum DirDecompress {} + +#[derive(Copy, Clone, PartialEq, Eq, Debug)] +/// Values which indicate the form of flushing to be used when compressing +/// in-memory data. +pub enum FlushCompress { + /// A typical parameter for passing to compression/decompression functions, + /// this indicates that the underlying stream to decide how much data to + /// accumulate before producing output in order to maximize compression. + None = ffi::MZ_NO_FLUSH as isize, + + /// All pending output is flushed to the output buffer and the output is + /// aligned on a byte boundary so that the decompressor can get all input + /// data available so far. + /// + /// Flushing may degrade compression for some compression algorithms and so + /// it should only be used when necessary. This will complete the current + /// deflate block and follow it with an empty stored block. + Sync = ffi::MZ_SYNC_FLUSH as isize, + + /// All pending output is flushed to the output buffer, but the output is + /// not aligned to a byte boundary. + /// + /// All of the input data so far will be available to the decompressor (as + /// with `Flush::Sync`. This completes the current deflate block and follows + /// it with an empty fixed codes block that is 10 bites long, and it assures + /// that enough bytes are output in order for the decompessor to finish the + /// block before the empty fixed code block. + Partial = ffi::MZ_PARTIAL_FLUSH as isize, + + /// All output is flushed as with `Flush::Sync` and the compression state is + /// reset so decompression can restart from this point if previous + /// compressed data has been damaged or if random access is desired. + /// + /// Using this option too often can seriously degrade compression. + Full = ffi::MZ_FULL_FLUSH as isize, + + /// Pending input is processed and pending output is flushed. + /// + /// The return value may indicate that the stream is not yet done and more + /// data has yet to be processed. + Finish = ffi::MZ_FINISH as isize, + + #[doc(hidden)] + _Nonexhaustive, +} + +#[derive(Copy, Clone, PartialEq, Eq, Debug)] +/// Values which indicate the form of flushing to be used when +/// decompressing in-memory data. +pub enum FlushDecompress { + /// A typical parameter for passing to compression/decompression functions, + /// this indicates that the underlying stream to decide how much data to + /// accumulate before producing output in order to maximize compression. + None = ffi::MZ_NO_FLUSH as isize, + + /// All pending output is flushed to the output buffer and the output is + /// aligned on a byte boundary so that the decompressor can get all input + /// data available so far. + /// + /// Flushing may degrade compression for some compression algorithms and so + /// it should only be used when necessary. This will complete the current + /// deflate block and follow it with an empty stored block. + Sync = ffi::MZ_SYNC_FLUSH as isize, + + /// Pending input is processed and pending output is flushed. + /// + /// The return value may indicate that the stream is not yet done and more + /// data has yet to be processed. + Finish = ffi::MZ_FINISH as isize, + + #[doc(hidden)] + _Nonexhaustive, +} + +/// The inner state for an error when decompressing +#[derive(Debug, Default)] +struct DecompressErrorInner { + needs_dictionary: Option, +} + +/// Error returned when a decompression object finds that the input stream of +/// bytes was not a valid input stream of bytes. +#[derive(Debug)] +pub struct DecompressError(DecompressErrorInner); + +impl DecompressError { + /// Indicates whether decompression failed due to requiring a dictionary. + /// + /// The resulting integer is the Adler-32 checksum of the dictionary + /// required. + pub fn needs_dictionary(&self) -> Option { + self.0.needs_dictionary + } +} + +/// Error returned when a compression object is used incorrectly or otherwise +/// generates an error. +#[derive(Debug)] +pub struct CompressError(()); + +/// Possible status results of compressing some data or successfully +/// decompressing a block of data. +#[derive(Copy, Clone, PartialEq, Eq, Debug)] +pub enum Status { + /// Indicates success. + /// + /// Means that more input may be needed but isn't available + /// and/or there's more output to be written but the output buffer is full. + Ok, + + /// Indicates that forward progress is not possible due to input or output + /// buffers being empty. + /// + /// For compression it means the input buffer needs some more data or the + /// output buffer needs to be freed up before trying again. + /// + /// For decompression this means that more input is needed to continue or + /// the output buffer isn't large enough to contain the result. The function + /// can be called again after fixing both. + BufError, + + /// Indicates that all input has been consumed and all output bytes have + /// been written. Decompression/compression should not be called again. + /// + /// For decompression with zlib streams the adler-32 of the decompressed + /// data has also been verified. + StreamEnd, +} + +impl Compress { + /// Creates a new object ready for compressing data that it's given. + /// + /// The `level` argument here indicates what level of compression is going + /// to be performed, and the `zlib_header` argument indicates whether the + /// output data should have a zlib header or not. + pub fn new(level: Compression, zlib_header: bool) -> Compress { + unsafe { + let mut state = ffi::StreamWrapper::default(); + let ret = ffi::mz_deflateInit2( + &mut *state, + level.0 as c_int, + ffi::MZ_DEFLATED, + if zlib_header { + ffi::MZ_DEFAULT_WINDOW_BITS + } else { + -ffi::MZ_DEFAULT_WINDOW_BITS + }, + 9, + ffi::MZ_DEFAULT_STRATEGY, + ); + debug_assert_eq!(ret, 0); + Compress { + inner: Stream { + stream_wrapper: state, + total_in: 0, + total_out: 0, + _marker: marker::PhantomData, + }, + } + } + } + + /// Returns the total number of input bytes which have been processed by + /// this compression object. + pub fn total_in(&self) -> u64 { + self.inner.total_in + } + + /// Returns the total number of output bytes which have been produced by + /// this compression object. + pub fn total_out(&self) -> u64 { + self.inner.total_out + } + + /// Specifies the compression dictionary to use. + /// + /// Returns the Adler-32 checksum of the dictionary. + #[cfg(feature = "zlib")] + pub fn set_dictionary(&mut self, dictionary: &[u8]) -> Result { + let stream = &mut *self.inner.stream_wrapper; + let rc = unsafe { + ffi::deflateSetDictionary(stream, dictionary.as_ptr(), dictionary.len() as ffi::uInt) + }; + + match rc { + ffi::MZ_STREAM_ERROR => Err(CompressError(())), + ffi::MZ_OK => Ok(stream.adler as u32), + c => panic!("unknown return code: {}", c), + } + } + + /// Quickly resets this compressor without having to reallocate anything. + /// + /// This is equivalent to dropping this object and then creating a new one. + pub fn reset(&mut self) { + let rc = unsafe { ffi::mz_deflateReset(&mut *self.inner.stream_wrapper) }; + assert_eq!(rc, ffi::MZ_OK); + + self.inner.total_in = 0; + self.inner.total_out = 0; + } + + /// Dynamically updates the compression level. + /// + /// This can be used to switch between compression levels for different + /// kinds of data, or it can be used in conjunction with a call to reset + /// to reuse the compressor. + /// + /// This may return an error if there wasn't enough output space to complete + /// the compression of the available input data before changing the + /// compression level. Flushing the stream before calling this method + /// ensures that the function will succeed on the first call. + #[cfg(feature = "zlib")] + pub fn set_level(&mut self, level: Compression) -> Result<(), CompressError> { + let stream = &mut *self.inner.stream_wrapper; + + let rc = unsafe { ffi::deflateParams(stream, level.0 as c_int, ffi::MZ_DEFAULT_STRATEGY) }; + + match rc { + ffi::MZ_OK => Ok(()), + ffi::MZ_BUF_ERROR => Err(CompressError(())), + c => panic!("unknown return code: {}", c), + } + } + + /// Compresses the input data into the output, consuming only as much + /// input as needed and writing as much output as possible. + /// + /// The flush option can be any of the available `FlushCompress` parameters. + /// + /// To learn how much data was consumed or how much output was produced, use + /// the `total_in` and `total_out` functions before/after this is called. + pub fn compress( + &mut self, + input: &[u8], + output: &mut [u8], + flush: FlushCompress, + ) -> Result { + let raw = &mut *self.inner.stream_wrapper; + raw.next_in = input.as_ptr() as *mut _; + raw.avail_in = input.len() as c_uint; + raw.next_out = output.as_mut_ptr(); + raw.avail_out = output.len() as c_uint; + + let rc = unsafe { ffi::mz_deflate(raw, flush as c_int) }; + + // Unfortunately the total counters provided by zlib might be only + // 32 bits wide and overflow while processing large amounts of data. + self.inner.total_in += (raw.next_in as usize - input.as_ptr() as usize) as u64; + self.inner.total_out += (raw.next_out as usize - output.as_ptr() as usize) as u64; + + match rc { + ffi::MZ_OK => Ok(Status::Ok), + ffi::MZ_BUF_ERROR => Ok(Status::BufError), + ffi::MZ_STREAM_END => Ok(Status::StreamEnd), + ffi::MZ_STREAM_ERROR => Err(CompressError(())), + c => panic!("unknown return code: {}", c), + } + } + + /// Compresses the input data into the extra space of the output, consuming + /// only as much input as needed and writing as much output as possible. + /// + /// This function has the same semantics as `compress`, except that the + /// length of `vec` is managed by this function. This will not reallocate + /// the vector provided or attempt to grow it, so space for the output must + /// be reserved in the output vector by the caller before calling this + /// function. + pub fn compress_vec( + &mut self, + input: &[u8], + output: &mut Vec, + flush: FlushCompress, + ) -> Result { + let cap = output.capacity(); + let len = output.len(); + + unsafe { + let before = self.total_out(); + let ret = { + let ptr = output.as_mut_ptr().offset(len as isize); + let out = slice::from_raw_parts_mut(ptr, cap - len); + self.compress(input, out, flush) + }; + output.set_len((self.total_out() - before) as usize + len); + return ret; + } + } +} + +impl Decompress { + /// Creates a new object ready for decompressing data that it's given. + /// + /// The `zlib_header` argument indicates whether the input data is expected + /// to have a zlib header or not. + pub fn new(zlib_header: bool) -> Decompress { + unsafe { + let mut state = ffi::StreamWrapper::default(); + let ret = ffi::mz_inflateInit2( + &mut *state, + if zlib_header { + ffi::MZ_DEFAULT_WINDOW_BITS + } else { + -ffi::MZ_DEFAULT_WINDOW_BITS + }, + ); + debug_assert_eq!(ret, 0); + Decompress { + inner: Stream { + stream_wrapper: state, + total_in: 0, + total_out: 0, + _marker: marker::PhantomData, + }, + } + } + } + + /// Returns the total number of input bytes which have been processed by + /// this decompression object. + pub fn total_in(&self) -> u64 { + self.inner.total_in + } + + /// Returns the total number of output bytes which have been produced by + /// this decompression object. + pub fn total_out(&self) -> u64 { + self.inner.total_out + } + + /// Decompresses the input data into the output, consuming only as much + /// input as needed and writing as much output as possible. + /// + /// The flush option can be any of the available `FlushDecompress` parameters. + /// + /// If the first call passes `FlushDecompress::Finish` it is assumed that + /// the input and output buffers are both sized large enough to decompress + /// the entire stream in a single call. + /// + /// A flush value of `FlushDecompress::Finish` indicates that there are no + /// more source bytes available beside what's already in the input buffer, + /// and the output buffer is large enough to hold the rest of the + /// decompressed data. + /// + /// To learn how much data was consumed or how much output was produced, use + /// the `total_in` and `total_out` functions before/after this is called. + /// + /// # Errors + /// + /// If the input data to this instance of `Decompress` is not a valid + /// zlib/deflate stream then this function may return an instance of + /// `DecompressError` to indicate that the stream of input bytes is corrupted. + pub fn decompress( + &mut self, + input: &[u8], + output: &mut [u8], + flush: FlushDecompress, + ) -> Result { + let raw = &mut *self.inner.stream_wrapper; + raw.next_in = input.as_ptr() as *mut u8; + raw.avail_in = input.len() as c_uint; + raw.next_out = output.as_mut_ptr(); + raw.avail_out = output.len() as c_uint; + + let rc = unsafe { ffi::mz_inflate(raw, flush as c_int) }; + + // Unfortunately the total counters provided by zlib might be only + // 32 bits wide and overflow while processing large amounts of data. + self.inner.total_in += (raw.next_in as usize - input.as_ptr() as usize) as u64; + self.inner.total_out += (raw.next_out as usize - output.as_ptr() as usize) as u64; + + match rc { + ffi::MZ_DATA_ERROR | ffi::MZ_STREAM_ERROR => Err(DecompressError(Default::default())), + ffi::MZ_OK => Ok(Status::Ok), + ffi::MZ_BUF_ERROR => Ok(Status::BufError), + ffi::MZ_STREAM_END => Ok(Status::StreamEnd), + ffi::MZ_NEED_DICT => Err(DecompressError(DecompressErrorInner { + needs_dictionary: Some(raw.adler as u32), + })), + c => panic!("unknown return code: {}", c), + } + } + + /// Decompresses the input data into the extra space in the output vector + /// specified by `output`. + /// + /// This function has the same semantics as `decompress`, except that the + /// length of `vec` is managed by this function. This will not reallocate + /// the vector provided or attempt to grow it, so space for the output must + /// be reserved in the output vector by the caller before calling this + /// function. + /// + /// # Errors + /// + /// If the input data to this instance of `Decompress` is not a valid + /// zlib/deflate stream then this function may return an instance of + /// `DecompressError` to indicate that the stream of input bytes is corrupted. + pub fn decompress_vec( + &mut self, + input: &[u8], + output: &mut Vec, + flush: FlushDecompress, + ) -> Result { + let cap = output.capacity(); + let len = output.len(); + + unsafe { + let before = self.total_out(); + let ret = { + let ptr = output.as_mut_ptr().offset(len as isize); + let out = slice::from_raw_parts_mut(ptr, cap - len); + self.decompress(input, out, flush) + }; + output.set_len((self.total_out() - before) as usize + len); + return ret; + } + } + + /// Specifies the decompression dictionary to use. + #[cfg(feature = "zlib")] + pub fn set_dictionary(&mut self, dictionary: &[u8]) -> Result { + let stream = &mut *self.inner.stream_wrapper; + let rc = unsafe { + ffi::inflateSetDictionary(stream, dictionary.as_ptr(), dictionary.len() as ffi::uInt) + }; + + match rc { + ffi::MZ_STREAM_ERROR => Err(DecompressError(Default::default())), + ffi::MZ_DATA_ERROR => Err(DecompressError(DecompressErrorInner { + needs_dictionary: Some(stream.adler as u32), + })), + ffi::MZ_OK => Ok(stream.adler as u32), + c => panic!("unknown return code: {}", c), + } + } + + /// Performs the equivalent of replacing this decompression state with a + /// freshly allocated copy. + /// + /// This function may not allocate memory, though, and attempts to reuse any + /// previously existing resources. + /// + /// The argument provided here indicates whether the reset state will + /// attempt to decode a zlib header first or not. + pub fn reset(&mut self, zlib_header: bool) { + self._reset(zlib_header); + } + + #[cfg(feature = "zlib")] + fn _reset(&mut self, zlib_header: bool) { + let bits = if zlib_header { + ffi::MZ_DEFAULT_WINDOW_BITS + } else { + -ffi::MZ_DEFAULT_WINDOW_BITS + }; + unsafe { + ffi::inflateReset2(&mut *self.inner.stream_wrapper, bits); + } + self.inner.total_out = 0; + self.inner.total_in = 0; + } + + #[cfg(not(feature = "zlib"))] + fn _reset(&mut self, zlib_header: bool) { + *self = Decompress::new(zlib_header); + } +} + +impl Error for DecompressError { + fn description(&self) -> &str { + "deflate decompression error" + } +} + +impl From for io::Error { + fn from(data: DecompressError) -> io::Error { + io::Error::new(io::ErrorKind::Other, data) + } +} + +impl fmt::Display for DecompressError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + self.description().fmt(f) + } +} + +impl Error for CompressError { + fn description(&self) -> &str { + "deflate compression error" + } +} + +impl From for io::Error { + fn from(data: CompressError) -> io::Error { + io::Error::new(io::ErrorKind::Other, data) + } +} + +impl fmt::Display for CompressError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + self.description().fmt(f) + } +} + +impl Direction for DirCompress { + unsafe fn destroy(stream: *mut ffi::mz_stream) -> c_int { + ffi::mz_deflateEnd(stream) + } +} +impl Direction for DirDecompress { + unsafe fn destroy(stream: *mut ffi::mz_stream) -> c_int { + ffi::mz_inflateEnd(stream) + } +} + +impl Drop for Stream { + fn drop(&mut self) { + unsafe { + let _ = D::destroy(&mut *self.stream_wrapper); + } + } +} + +#[cfg(test)] +mod tests { + use std::io::Write; + + use write; + use {Compression, Decompress, FlushDecompress}; + + #[cfg(feature = "zlib")] + use {Compress, FlushCompress}; + + #[test] + fn issue51() { + let data = vec![ + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, 0xb3, 0xc9, 0x28, 0xc9, + 0xcd, 0xb1, 0xe3, 0xe5, 0xb2, 0xc9, 0x48, 0x4d, 0x4c, 0xb1, 0xb3, 0x29, 0xc9, 0x2c, + 0xc9, 0x49, 0xb5, 0x33, 0x31, 0x30, 0x51, 0xf0, 0xcb, 0x2f, 0x51, 0x70, 0xcb, 0x2f, + 0xcd, 0x4b, 0xb1, 0xd1, 0x87, 0x08, 0xda, 0xe8, 0x83, 0x95, 0x00, 0x95, 0x26, 0xe5, + 0xa7, 0x54, 0x2a, 0x24, 0xa5, 0x27, 0xe7, 0xe7, 0xe4, 0x17, 0xd9, 0x2a, 0x95, 0x67, + 0x64, 0x96, 0xa4, 0x2a, 0x81, 0x8c, 0x48, 0x4e, 0xcd, 0x2b, 0x49, 0x2d, 0xb2, 0xb3, + 0xc9, 0x30, 0x44, 0x37, 0x01, 0x28, 0x62, 0xa3, 0x0f, 0x95, 0x06, 0xd9, 0x05, 0x54, + 0x04, 0xe5, 0xe5, 0xa5, 0x67, 0xe6, 0x55, 0xe8, 0x1b, 0xea, 0x99, 0xe9, 0x19, 0x21, + 0xab, 0xd0, 0x07, 0xd9, 0x01, 0x32, 0x53, 0x1f, 0xea, 0x3e, 0x00, 0x94, 0x85, 0xeb, + 0xe4, 0xa8, 0x00, 0x00, 0x00, + ]; + + let mut decoded = Vec::with_capacity(data.len() * 2); + + let mut d = Decompress::new(false); + // decompressed whole deflate stream + assert!( + d.decompress_vec(&data[10..], &mut decoded, FlushDecompress::Finish) + .is_ok() + ); + + // decompress data that has nothing to do with the deflate stream (this + // used to panic) + drop(d.decompress_vec(&[0], &mut decoded, FlushDecompress::None)); + } + + #[test] + fn reset() { + let string = "hello world".as_bytes(); + let mut zlib = Vec::new(); + let mut deflate = Vec::new(); + + let comp = Compression::default(); + write::ZlibEncoder::new(&mut zlib, comp) + .write_all(string) + .unwrap(); + write::DeflateEncoder::new(&mut deflate, comp) + .write_all(string) + .unwrap(); + + let mut dst = [0; 1024]; + let mut decoder = Decompress::new(true); + decoder + .decompress(&zlib, &mut dst, FlushDecompress::Finish) + .unwrap(); + assert_eq!(decoder.total_out(), string.len() as u64); + assert!(dst.starts_with(string)); + + decoder.reset(false); + decoder + .decompress(&deflate, &mut dst, FlushDecompress::Finish) + .unwrap(); + assert_eq!(decoder.total_out(), string.len() as u64); + assert!(dst.starts_with(string)); + } + + #[cfg(feature = "zlib")] + #[test] + fn set_dictionary_with_zlib_header() { + let string = "hello, hello!".as_bytes(); + let dictionary = "hello".as_bytes(); + + let mut encoded = Vec::with_capacity(1024); + + let mut encoder = Compress::new(Compression::default(), true); + + let dictionary_adler = encoder.set_dictionary(&dictionary).unwrap(); + + encoder + .compress_vec(string, &mut encoded, FlushCompress::Finish) + .unwrap(); + + assert_eq!(encoder.total_in(), string.len() as u64); + assert_eq!(encoder.total_out(), encoded.len() as u64); + + let mut decoder = Decompress::new(true); + let mut decoded = [0; 1024]; + let decompress_error = decoder + .decompress(&encoded, &mut decoded, FlushDecompress::Finish) + .expect_err("decompression should fail due to requiring a dictionary"); + + let required_adler = decompress_error.needs_dictionary() + .expect("the first call to decompress should indicate a dictionary is required along with the required Adler-32 checksum"); + + assert_eq!(required_adler, dictionary_adler, + "the Adler-32 checksum should match the value when the dictionary was set on the compressor"); + + let actual_adler = decoder.set_dictionary(&dictionary).unwrap(); + + assert_eq!(required_adler, actual_adler); + + // Decompress the rest of the input to the remainder of the output buffer + let total_in = decoder.total_in(); + let total_out = decoder.total_out(); + + let decompress_result = decoder.decompress( + &encoded[total_in as usize..], + &mut decoded[total_out as usize..], + FlushDecompress::Finish, + ); + assert!(decompress_result.is_ok()); + + assert_eq!(&decoded[..decoder.total_out() as usize], string); + } + + #[cfg(feature = "zlib")] + #[test] + fn set_dictionary_raw() { + let string = "hello, hello!".as_bytes(); + let dictionary = "hello".as_bytes(); + + let mut encoded = Vec::with_capacity(1024); + + let mut encoder = Compress::new(Compression::default(), false); + + encoder.set_dictionary(&dictionary).unwrap(); + + encoder + .compress_vec(string, &mut encoded, FlushCompress::Finish) + .unwrap(); + + assert_eq!(encoder.total_in(), string.len() as u64); + assert_eq!(encoder.total_out(), encoded.len() as u64); + + let mut decoder = Decompress::new(false); + + decoder.set_dictionary(&dictionary).unwrap(); + + let mut decoded = [0; 1024]; + let decompress_result = decoder.decompress(&encoded, &mut decoded, FlushDecompress::Finish); + + assert!(decompress_result.is_ok()); + + assert_eq!(&decoded[..decoder.total_out() as usize], string); + } + +} diff --git a/flate2/src/zio.rs b/flate2/src/zio.rs new file mode 100644 index 000000000..1222a6c3e --- /dev/null +++ b/flate2/src/zio.rs @@ -0,0 +1,290 @@ +use std::io; +use std::io::prelude::*; +use std::mem; + +use {Compress, Decompress, DecompressError, FlushCompress, FlushDecompress, Status}; + +#[derive(Debug)] +pub struct Writer { + obj: Option, + pub data: D, + buf: Vec, +} + +pub trait Ops { + type Flush: Flush; + fn total_in(&self) -> u64; + fn total_out(&self) -> u64; + fn run( + &mut self, + input: &[u8], + output: &mut [u8], + flush: Self::Flush, + ) -> Result; + fn run_vec( + &mut self, + input: &[u8], + output: &mut Vec, + flush: Self::Flush, + ) -> Result; +} + +impl Ops for Compress { + type Flush = FlushCompress; + fn total_in(&self) -> u64 { + self.total_in() + } + fn total_out(&self) -> u64 { + self.total_out() + } + fn run( + &mut self, + input: &[u8], + output: &mut [u8], + flush: FlushCompress, + ) -> Result { + Ok(self.compress(input, output, flush).unwrap()) + } + fn run_vec( + &mut self, + input: &[u8], + output: &mut Vec, + flush: FlushCompress, + ) -> Result { + Ok(self.compress_vec(input, output, flush).unwrap()) + } +} + +impl Ops for Decompress { + type Flush = FlushDecompress; + fn total_in(&self) -> u64 { + self.total_in() + } + fn total_out(&self) -> u64 { + self.total_out() + } + fn run( + &mut self, + input: &[u8], + output: &mut [u8], + flush: FlushDecompress, + ) -> Result { + self.decompress(input, output, flush) + } + fn run_vec( + &mut self, + input: &[u8], + output: &mut Vec, + flush: FlushDecompress, + ) -> Result { + self.decompress_vec(input, output, flush) + } +} + +pub trait Flush { + fn none() -> Self; + fn sync() -> Self; + fn finish() -> Self; +} + +impl Flush for FlushCompress { + fn none() -> Self { + FlushCompress::None + } + + fn sync() -> Self { + FlushCompress::Sync + } + + fn finish() -> Self { + FlushCompress::Finish + } +} + +impl Flush for FlushDecompress { + fn none() -> Self { + FlushDecompress::None + } + + fn sync() -> Self { + FlushDecompress::Sync + } + + fn finish() -> Self { + FlushDecompress::Finish + } +} + +pub fn read(obj: &mut R, data: &mut D, dst: &mut [u8]) -> io::Result +where + R: BufRead, + D: Ops, +{ + loop { + let (read, consumed, ret, eof); + { + let input = obj.fill_buf()?; + eof = input.is_empty(); + let before_out = data.total_out(); + let before_in = data.total_in(); + let flush = if eof { + D::Flush::finish() + } else { + D::Flush::none() + }; + ret = data.run(input, dst, flush); + read = (data.total_out() - before_out) as usize; + consumed = (data.total_in() - before_in) as usize; + } + obj.consume(consumed); + + match ret { + // If we haven't ready any data and we haven't hit EOF yet, + // then we need to keep asking for more data because if we + // return that 0 bytes of data have been read then it will + // be interpreted as EOF. + Ok(Status::Ok) | Ok(Status::BufError) if read == 0 && !eof && dst.len() > 0 => continue, + Ok(Status::Ok) | Ok(Status::BufError) | Ok(Status::StreamEnd) => return Ok(read), + + Err(..) => { + return Err(io::Error::new( + io::ErrorKind::InvalidInput, + "corrupt deflate stream", + )) + } + } + } +} + +impl Writer { + pub fn new(w: W, d: D) -> Writer { + Writer { + obj: Some(w), + data: d, + buf: Vec::with_capacity(32 * 1024), + } + } + + pub fn finish(&mut self) -> io::Result<()> { + loop { + self.dump()?; + + let before = self.data.total_out(); + self.data.run_vec(&[], &mut self.buf, D::Flush::finish())?; + if before == self.data.total_out() { + return Ok(()); + } + } + } + + pub fn replace(&mut self, w: W) -> W { + self.buf.truncate(0); + mem::replace(self.get_mut(), w) + } + + pub fn get_ref(&self) -> &W { + self.obj.as_ref().unwrap() + } + + pub fn get_mut(&mut self) -> &mut W { + self.obj.as_mut().unwrap() + } + + // Note that this should only be called if the outer object is just about + // to be consumed! + // + // (e.g. an implementation of `into_inner`) + pub fn take_inner(&mut self) -> W { + self.obj.take().unwrap() + } + + pub fn is_present(&self) -> bool { + self.obj.is_some() + } + + // Returns total written bytes and status of underlying codec + pub(crate) fn write_with_status(&mut self, buf: &[u8]) -> io::Result<(usize, Status)> { + // miniz isn't guaranteed to actually write any of the buffer provided, + // it may be in a flushing mode where it's just giving us data before + // we're actually giving it any data. We don't want to spuriously return + // `Ok(0)` when possible as it will cause calls to write_all() to fail. + // As a result we execute this in a loop to ensure that we try our + // darndest to write the data. + loop { + self.dump()?; + + let before_in = self.data.total_in(); + let ret = self.data.run_vec(buf, &mut self.buf, D::Flush::none()); + let written = (self.data.total_in() - before_in) as usize; + + let is_stream_end = match ret { + Ok(Status::StreamEnd) => true, + _ => false, + }; + + if buf.len() > 0 && written == 0 && ret.is_ok() && !is_stream_end { + continue; + } + return match ret { + Ok(st) => match st { + Status::Ok | Status::BufError | Status::StreamEnd => Ok((written, st)), + }, + Err(..) => Err(io::Error::new( + io::ErrorKind::InvalidInput, + "corrupt deflate stream", + )), + }; + } + } + + fn dump(&mut self) -> io::Result<()> { + // TODO: should manage this buffer not with `drain` but probably more of + // a deque-like strategy. + while self.buf.len() > 0 { + let n = try!(self.obj.as_mut().unwrap().write(&self.buf)); + if n == 0 { + return Err(io::ErrorKind::WriteZero.into()); + } + self.buf.drain(..n); + } + Ok(()) + } +} + +impl Write for Writer { + fn write(&mut self, buf: &[u8]) -> io::Result { + self.write_with_status(buf).map(|res| res.0) + } + + fn flush(&mut self) -> io::Result<()> { + self.data + .run_vec(&[], &mut self.buf, D::Flush::sync()) + .unwrap(); + + // Unfortunately miniz doesn't actually tell us when we're done with + // pulling out all the data from the internal stream. To remedy this we + // have to continually ask the stream for more memory until it doesn't + // give us a chunk of memory the same size as our own internal buffer, + // at which point we assume it's reached the end. + loop { + self.dump()?; + let before = self.data.total_out(); + self.data + .run_vec(&[], &mut self.buf, D::Flush::none()) + .unwrap(); + if before == self.data.total_out() { + break; + } + } + + self.obj.as_mut().unwrap().flush() + } +} + +impl Drop for Writer { + fn drop(&mut self) { + if self.obj.is_some() { + let _ = self.finish(); + } + } +} diff --git a/flate2/src/zlib/bufread.rs b/flate2/src/zlib/bufread.rs new file mode 100644 index 000000000..9556e2506 --- /dev/null +++ b/flate2/src/zlib/bufread.rs @@ -0,0 +1,258 @@ +use std::io::prelude::*; +use std::io; +use std::mem; + +#[cfg(feature = "tokio")] +use futures::Poll; +#[cfg(feature = "tokio")] +use tokio_io::{AsyncRead, AsyncWrite}; + +use zio; +use {Compress, Decompress}; + +/// A ZLIB encoder, or compressor. +/// +/// This structure implements a [`BufRead`] interface and will read uncompressed +/// data from an underlying stream and emit a stream of compressed data. +/// +/// [`BufRead`]: https://doc.rust-lang.org/std/io/trait.BufRead.html +/// +/// # Examples +/// +/// ``` +/// use std::io::prelude::*; +/// use flate2::Compression; +/// use flate2::bufread::ZlibEncoder; +/// use std::fs::File; +/// use std::io::BufReader; +/// +/// // Use a buffered file to compress contents into a Vec +/// +/// # fn open_hello_world() -> std::io::Result> { +/// let f = File::open("examples/hello_world.txt")?; +/// let b = BufReader::new(f); +/// let mut z = ZlibEncoder::new(b, Compression::fast()); +/// let mut buffer = Vec::new(); +/// z.read_to_end(&mut buffer)?; +/// # Ok(buffer) +/// # } +/// ``` +#[derive(Debug)] +pub struct ZlibEncoder { + obj: R, + data: Compress, +} + +impl ZlibEncoder { + /// Creates a new encoder which will read uncompressed data from the given + /// stream and emit the compressed stream. + pub fn new(r: R, level: ::Compression) -> ZlibEncoder { + ZlibEncoder { + obj: r, + data: Compress::new(level, true), + } + } +} + +pub fn reset_encoder_data(zlib: &mut ZlibEncoder) { + zlib.data.reset() +} + +impl ZlibEncoder { + /// Resets the state of this encoder entirely, swapping out the input + /// stream for another. + /// + /// This function will reset the internal state of this encoder and replace + /// the input stream with the one provided, returning the previous input + /// stream. Future data read from this encoder will be the compressed + /// version of `r`'s data. + pub fn reset(&mut self, r: R) -> R { + reset_encoder_data(self); + mem::replace(&mut self.obj, r) + } + + /// Acquires a reference to the underlying reader + pub fn get_ref(&self) -> &R { + &self.obj + } + + /// Acquires a mutable reference to the underlying stream + /// + /// Note that mutation of the stream may result in surprising results if + /// this encoder is continued to be used. + pub fn get_mut(&mut self) -> &mut R { + &mut self.obj + } + + /// Consumes this encoder, returning the underlying reader. + pub fn into_inner(self) -> R { + self.obj + } + + /// Returns the number of bytes that have been read into this compressor. + /// + /// Note that not all bytes read from the underlying object may be accounted + /// for, there may still be some active buffering. + pub fn total_in(&self) -> u64 { + self.data.total_in() + } + + /// Returns the number of bytes that the compressor has produced. + /// + /// Note that not all bytes may have been read yet, some may still be + /// buffered. + pub fn total_out(&self) -> u64 { + self.data.total_out() + } +} + +impl Read for ZlibEncoder { + fn read(&mut self, buf: &mut [u8]) -> io::Result { + zio::read(&mut self.obj, &mut self.data, buf) + } +} + +#[cfg(feature = "tokio")] +impl AsyncRead for ZlibEncoder {} + +impl Write for ZlibEncoder { + fn write(&mut self, buf: &[u8]) -> io::Result { + self.get_mut().write(buf) + } + + fn flush(&mut self) -> io::Result<()> { + self.get_mut().flush() + } +} + +#[cfg(feature = "tokio")] +impl AsyncWrite for ZlibEncoder { + fn shutdown(&mut self) -> Poll<(), io::Error> { + self.get_mut().shutdown() + } +} + +/// A ZLIB decoder, or decompressor. +/// +/// This structure implements a [`BufRead`] interface and takes a stream of +/// compressed data as input, providing the decompressed data when read from. +/// +/// [`BufRead`]: https://doc.rust-lang.org/std/io/trait.BufRead.html +/// +/// # Examples +/// +/// ``` +/// use std::io::prelude::*; +/// use std::io; +/// # use flate2::Compression; +/// # use flate2::write::ZlibEncoder; +/// use flate2::bufread::ZlibDecoder; +/// +/// # fn main() { +/// # let mut e = ZlibEncoder::new(Vec::new(), Compression::default()); +/// # e.write_all(b"Hello World").unwrap(); +/// # let bytes = e.finish().unwrap(); +/// # println!("{}", decode_bufreader(bytes).unwrap()); +/// # } +/// # +/// // Uncompresses a Zlib Encoded vector of bytes and returns a string or error +/// // Here &[u8] implements BufRead +/// +/// fn decode_bufreader(bytes: Vec) -> io::Result { +/// let mut z = ZlibDecoder::new(&bytes[..]); +/// let mut s = String::new(); +/// z.read_to_string(&mut s)?; +/// Ok(s) +/// } +/// ``` +#[derive(Debug)] +pub struct ZlibDecoder { + obj: R, + data: Decompress, +} + +impl ZlibDecoder { + /// Creates a new decoder which will decompress data read from the given + /// stream. + pub fn new(r: R) -> ZlibDecoder { + ZlibDecoder { + obj: r, + data: Decompress::new(true), + } + } +} + +pub fn reset_decoder_data(zlib: &mut ZlibDecoder) { + zlib.data = Decompress::new(true); +} + +impl ZlibDecoder { + /// Resets the state of this decoder entirely, swapping out the input + /// stream for another. + /// + /// This will reset the internal state of this decoder and replace the + /// input stream with the one provided, returning the previous input + /// stream. Future data read from this decoder will be the decompressed + /// version of `r`'s data. + pub fn reset(&mut self, r: R) -> R { + reset_decoder_data(self); + mem::replace(&mut self.obj, r) + } + + /// Acquires a reference to the underlying stream + pub fn get_ref(&self) -> &R { + &self.obj + } + + /// Acquires a mutable reference to the underlying stream + /// + /// Note that mutation of the stream may result in surprising results if + /// this encoder is continued to be used. + pub fn get_mut(&mut self) -> &mut R { + &mut self.obj + } + + /// Consumes this decoder, returning the underlying reader. + pub fn into_inner(self) -> R { + self.obj + } + + /// Returns the number of bytes that the decompressor has consumed. + /// + /// Note that this will likely be smaller than what the decompressor + /// actually read from the underlying stream due to buffering. + pub fn total_in(&self) -> u64 { + self.data.total_in() + } + + /// Returns the number of bytes that the decompressor has produced. + pub fn total_out(&self) -> u64 { + self.data.total_out() + } +} + +impl Read for ZlibDecoder { + fn read(&mut self, into: &mut [u8]) -> io::Result { + zio::read(&mut self.obj, &mut self.data, into) + } +} + +#[cfg(feature = "tokio")] +impl AsyncRead for ZlibDecoder {} + +impl Write for ZlibDecoder { + fn write(&mut self, buf: &[u8]) -> io::Result { + self.get_mut().write(buf) + } + + fn flush(&mut self) -> io::Result<()> { + self.get_mut().flush() + } +} + +#[cfg(feature = "tokio")] +impl AsyncWrite for ZlibDecoder { + fn shutdown(&mut self) -> Poll<(), io::Error> { + self.get_mut().shutdown() + } +} diff --git a/flate2/src/zlib/mod.rs b/flate2/src/zlib/mod.rs new file mode 100644 index 000000000..c729df1c2 --- /dev/null +++ b/flate2/src/zlib/mod.rs @@ -0,0 +1,159 @@ +pub mod bufread; +pub mod read; +pub mod write; + +#[cfg(test)] +mod tests { + use std::io::prelude::*; + use std::io; + + use rand::{thread_rng, Rng}; + + use zlib::{read, write}; + use Compression; + + #[test] + fn roundtrip() { + let mut real = Vec::new(); + let mut w = write::ZlibEncoder::new(Vec::new(), Compression::default()); + let v = ::random_bytes().take(1024).collect::>(); + for _ in 0..200 { + let to_write = &v[..thread_rng().gen_range(0, v.len())]; + real.extend(to_write.iter().map(|x| *x)); + w.write_all(to_write).unwrap(); + } + let result = w.finish().unwrap(); + let mut r = read::ZlibDecoder::new(&result[..]); + let mut ret = Vec::new(); + r.read_to_end(&mut ret).unwrap(); + assert!(ret == real); + } + + #[test] + fn drop_writes() { + let mut data = Vec::new(); + write::ZlibEncoder::new(&mut data, Compression::default()) + .write_all(b"foo") + .unwrap(); + let mut r = read::ZlibDecoder::new(&data[..]); + let mut ret = Vec::new(); + r.read_to_end(&mut ret).unwrap(); + assert!(ret == b"foo"); + } + + #[test] + fn total_in() { + let mut real = Vec::new(); + let mut w = write::ZlibEncoder::new(Vec::new(), Compression::default()); + let v = ::random_bytes().take(1024).collect::>(); + for _ in 0..200 { + let to_write = &v[..thread_rng().gen_range(0, v.len())]; + real.extend(to_write.iter().map(|x| *x)); + w.write_all(to_write).unwrap(); + } + let mut result = w.finish().unwrap(); + + let result_len = result.len(); + + for _ in 0..200 { + result.extend(v.iter().map(|x| *x)); + } + + let mut r = read::ZlibDecoder::new(&result[..]); + let mut ret = Vec::new(); + r.read_to_end(&mut ret).unwrap(); + assert!(ret == real); + assert_eq!(r.total_in(), result_len as u64); + } + + #[test] + fn roundtrip2() { + let v = ::random_bytes().take(1024 * 1024).collect::>(); + let mut r = read::ZlibDecoder::new(read::ZlibEncoder::new(&v[..], Compression::default())); + let mut ret = Vec::new(); + r.read_to_end(&mut ret).unwrap(); + assert_eq!(ret, v); + } + + #[test] + fn roundtrip3() { + let v = ::random_bytes().take(1024 * 1024).collect::>(); + let mut w = + write::ZlibEncoder::new(write::ZlibDecoder::new(Vec::new()), Compression::default()); + w.write_all(&v).unwrap(); + let w = w.finish().unwrap().finish().unwrap(); + assert!(w == v); + } + + #[test] + fn reset_decoder() { + let v = ::random_bytes().take(1024 * 1024).collect::>(); + let mut w = write::ZlibEncoder::new(Vec::new(), Compression::default()); + w.write_all(&v).unwrap(); + let data = w.finish().unwrap(); + + { + let (mut a, mut b, mut c) = (Vec::new(), Vec::new(), Vec::new()); + let mut r = read::ZlibDecoder::new(&data[..]); + r.read_to_end(&mut a).unwrap(); + r.reset(&data); + r.read_to_end(&mut b).unwrap(); + + let mut r = read::ZlibDecoder::new(&data[..]); + r.read_to_end(&mut c).unwrap(); + assert!(a == b && b == c && c == v); + } + + { + let mut w = write::ZlibDecoder::new(Vec::new()); + w.write_all(&data).unwrap(); + let a = w.reset(Vec::new()).unwrap(); + w.write_all(&data).unwrap(); + let b = w.finish().unwrap(); + + let mut w = write::ZlibDecoder::new(Vec::new()); + w.write_all(&data).unwrap(); + let c = w.finish().unwrap(); + assert!(a == b && b == c && c == v); + } + } + + #[test] + fn bad_input() { + // regress tests: previously caused a panic on drop + let mut out: Vec = Vec::new(); + let data: Vec = (0..255).cycle().take(1024).collect(); + let mut w = write::ZlibDecoder::new(&mut out); + match w.write_all(&data[..]) { + Ok(_) => panic!("Expected an error to be returned!"), + Err(e) => assert_eq!(e.kind(), io::ErrorKind::InvalidInput), + } + } + + #[test] + fn qc_reader() { + ::quickcheck::quickcheck(test as fn(_) -> _); + + fn test(v: Vec) -> bool { + let mut r = + read::ZlibDecoder::new(read::ZlibEncoder::new(&v[..], Compression::default())); + let mut v2 = Vec::new(); + r.read_to_end(&mut v2).unwrap(); + v == v2 + } + } + + #[test] + fn qc_writer() { + ::quickcheck::quickcheck(test as fn(_) -> _); + + fn test(v: Vec) -> bool { + let mut w = write::ZlibEncoder::new( + write::ZlibDecoder::new(Vec::new()), + Compression::default(), + ); + w.write_all(&v).unwrap(); + v == w.finish().unwrap().finish().unwrap() + } + } +} diff --git a/flate2/src/zlib/read.rs b/flate2/src/zlib/read.rs new file mode 100644 index 000000000..4b6bec8dd --- /dev/null +++ b/flate2/src/zlib/read.rs @@ -0,0 +1,265 @@ +use std::io::prelude::*; +use std::io; + +#[cfg(feature = "tokio")] +use futures::Poll; +#[cfg(feature = "tokio")] +use tokio_io::{AsyncRead, AsyncWrite}; + +use bufreader::BufReader; +use super::bufread; + +/// A ZLIB encoder, or compressor. +/// +/// This structure implements a [`Read`] interface and will read uncompressed +/// data from an underlying stream and emit a stream of compressed data. +/// +/// [`Read`]: https://doc.rust-lang.org/std/io/trait.Read.html +/// +/// # Examples +/// +/// ``` +/// use std::io::prelude::*; +/// use flate2::Compression; +/// use flate2::read::ZlibEncoder; +/// use std::fs::File; +/// +/// // Open example file and compress the contents using Read interface +/// +/// # fn open_hello_world() -> std::io::Result> { +/// let f = File::open("examples/hello_world.txt")?; +/// let mut z = ZlibEncoder::new(f, Compression::fast()); +/// let mut buffer = [0;50]; +/// let byte_count = z.read(&mut buffer)?; +/// # Ok(buffer[0..byte_count].to_vec()) +/// # } +/// ``` +#[derive(Debug)] +pub struct ZlibEncoder { + inner: bufread::ZlibEncoder>, +} + +impl ZlibEncoder { + /// Creates a new encoder which will read uncompressed data from the given + /// stream and emit the compressed stream. + pub fn new(r: R, level: ::Compression) -> ZlibEncoder { + ZlibEncoder { + inner: bufread::ZlibEncoder::new(BufReader::new(r), level), + } + } +} + +impl ZlibEncoder { + /// Resets the state of this encoder entirely, swapping out the input + /// stream for another. + /// + /// This function will reset the internal state of this encoder and replace + /// the input stream with the one provided, returning the previous input + /// stream. Future data read from this encoder will be the compressed + /// version of `r`'s data. + /// + /// Note that there may be currently buffered data when this function is + /// called, and in that case the buffered data is discarded. + pub fn reset(&mut self, r: R) -> R { + super::bufread::reset_encoder_data(&mut self.inner); + self.inner.get_mut().reset(r) + } + + /// Acquires a reference to the underlying stream + pub fn get_ref(&self) -> &R { + self.inner.get_ref().get_ref() + } + + /// Acquires a mutable reference to the underlying stream + /// + /// Note that mutation of the stream may result in surprising results if + /// this encoder is continued to be used. + pub fn get_mut(&mut self) -> &mut R { + self.inner.get_mut().get_mut() + } + + /// Consumes this encoder, returning the underlying reader. + /// + /// Note that there may be buffered bytes which are not re-acquired as part + /// of this transition. It's recommended to only call this function after + /// EOF has been reached. + pub fn into_inner(self) -> R { + self.inner.into_inner().into_inner() + } + + /// Returns the number of bytes that have been read into this compressor. + /// + /// Note that not all bytes read from the underlying object may be accounted + /// for, there may still be some active buffering. + pub fn total_in(&self) -> u64 { + self.inner.total_in() + } + + /// Returns the number of bytes that the compressor has produced. + /// + /// Note that not all bytes may have been read yet, some may still be + /// buffered. + pub fn total_out(&self) -> u64 { + self.inner.total_out() + } +} + +impl Read for ZlibEncoder { + fn read(&mut self, buf: &mut [u8]) -> io::Result { + self.inner.read(buf) + } +} + +#[cfg(feature = "tokio")] +impl AsyncRead for ZlibEncoder {} + +impl Write for ZlibEncoder { + fn write(&mut self, buf: &[u8]) -> io::Result { + self.get_mut().write(buf) + } + + fn flush(&mut self) -> io::Result<()> { + self.get_mut().flush() + } +} + +#[cfg(feature = "tokio")] +impl AsyncWrite for ZlibEncoder { + fn shutdown(&mut self) -> Poll<(), io::Error> { + self.get_mut().shutdown() + } +} + +/// A ZLIB decoder, or decompressor. +/// +/// This structure implements a [`Read`] interface and takes a stream of +/// compressed data as input, providing the decompressed data when read from. +/// +/// [`Read`]: https://doc.rust-lang.org/std/io/trait.Read.html +/// +/// # Examples +/// +/// ``` +/// use std::io::prelude::*; +/// use std::io; +/// # use flate2::Compression; +/// # use flate2::write::ZlibEncoder; +/// use flate2::read::ZlibDecoder; +/// +/// # fn main() { +/// # let mut e = ZlibEncoder::new(Vec::new(), Compression::default()); +/// # e.write_all(b"Hello World").unwrap(); +/// # let bytes = e.finish().unwrap(); +/// # println!("{}", decode_reader(bytes).unwrap()); +/// # } +/// # +/// // Uncompresses a Zlib Encoded vector of bytes and returns a string or error +/// // Here &[u8] implements Read +/// +/// fn decode_reader(bytes: Vec) -> io::Result { +/// let mut z = ZlibDecoder::new(&bytes[..]); +/// let mut s = String::new(); +/// z.read_to_string(&mut s)?; +/// Ok(s) +/// } +/// ``` +#[derive(Debug)] +pub struct ZlibDecoder { + inner: bufread::ZlibDecoder>, +} + +impl ZlibDecoder { + /// Creates a new decoder which will decompress data read from the given + /// stream. + pub fn new(r: R) -> ZlibDecoder { + ZlibDecoder::new_with_buf(r, vec![0; 32 * 1024]) + } + + /// Same as `new`, but the intermediate buffer for data is specified. + /// + /// Note that the specified buffer will only be used up to its current + /// length. The buffer's capacity will also not grow over time. + pub fn new_with_buf(r: R, buf: Vec) -> ZlibDecoder { + ZlibDecoder { + inner: bufread::ZlibDecoder::new(BufReader::with_buf(buf, r)), + } + } +} + +impl ZlibDecoder { + /// Resets the state of this decoder entirely, swapping out the input + /// stream for another. + /// + /// This will reset the internal state of this decoder and replace the + /// input stream with the one provided, returning the previous input + /// stream. Future data read from this decoder will be the decompressed + /// version of `r`'s data. + /// + /// Note that there may be currently buffered data when this function is + /// called, and in that case the buffered data is discarded. + pub fn reset(&mut self, r: R) -> R { + super::bufread::reset_decoder_data(&mut self.inner); + self.inner.get_mut().reset(r) + } + + /// Acquires a reference to the underlying stream + pub fn get_ref(&self) -> &R { + self.inner.get_ref().get_ref() + } + + /// Acquires a mutable reference to the underlying stream + /// + /// Note that mutation of the stream may result in surprising results if + /// this encoder is continued to be used. + pub fn get_mut(&mut self) -> &mut R { + self.inner.get_mut().get_mut() + } + + /// Consumes this decoder, returning the underlying reader. + /// + /// Note that there may be buffered bytes which are not re-acquired as part + /// of this transition. It's recommended to only call this function after + /// EOF has been reached. + pub fn into_inner(self) -> R { + self.inner.into_inner().into_inner() + } + + /// Returns the number of bytes that the decompressor has consumed. + /// + /// Note that this will likely be smaller than what the decompressor + /// actually read from the underlying stream due to buffering. + pub fn total_in(&self) -> u64 { + self.inner.total_in() + } + + /// Returns the number of bytes that the decompressor has produced. + pub fn total_out(&self) -> u64 { + self.inner.total_out() + } +} + +impl Read for ZlibDecoder { + fn read(&mut self, into: &mut [u8]) -> io::Result { + self.inner.read(into) + } +} + +#[cfg(feature = "tokio")] +impl AsyncRead for ZlibDecoder {} + +impl Write for ZlibDecoder { + fn write(&mut self, buf: &[u8]) -> io::Result { + self.get_mut().write(buf) + } + + fn flush(&mut self) -> io::Result<()> { + self.get_mut().flush() + } +} + +#[cfg(feature = "tokio")] +impl AsyncWrite for ZlibDecoder { + fn shutdown(&mut self) -> Poll<(), io::Error> { + self.get_mut().shutdown() + } +} diff --git a/flate2/src/zlib/write.rs b/flate2/src/zlib/write.rs new file mode 100644 index 000000000..1ea9887b5 --- /dev/null +++ b/flate2/src/zlib/write.rs @@ -0,0 +1,348 @@ +use std::io::prelude::*; +use std::io; + +#[cfg(feature = "tokio")] +use futures::Poll; +#[cfg(feature = "tokio")] +use tokio_io::{AsyncRead, AsyncWrite}; + +use zio; +use {Compress, Decompress}; + +/// A ZLIB encoder, or compressor. +/// +/// This structure implements a [`Write`] interface and takes a stream of +/// uncompressed data, writing the compressed data to the wrapped writer. +/// +/// [`Write`]: https://doc.rust-lang.org/std/io/trait.Write.html +/// +/// # Examples +/// +/// ``` +/// use std::io::prelude::*; +/// use flate2::Compression; +/// use flate2::write::ZlibEncoder; +/// +/// // Vec implements Write, assigning the compressed bytes of sample string +/// +/// # fn zlib_encoding() -> std::io::Result<()> { +/// let mut e = ZlibEncoder::new(Vec::new(), Compression::default()); +/// e.write_all(b"Hello World")?; +/// let compressed = e.finish()?; +/// # Ok(()) +/// # } +/// ``` +#[derive(Debug)] +pub struct ZlibEncoder { + inner: zio::Writer, +} + +impl ZlibEncoder { + /// Creates a new encoder which will write compressed data to the stream + /// given at the given compression level. + /// + /// When this encoder is dropped or unwrapped the final pieces of data will + /// be flushed. + pub fn new(w: W, level: ::Compression) -> ZlibEncoder { + ZlibEncoder { + inner: zio::Writer::new(w, Compress::new(level, true)), + } + } + + /// Acquires a reference to the underlying writer. + pub fn get_ref(&self) -> &W { + self.inner.get_ref() + } + + /// Acquires a mutable reference to the underlying writer. + /// + /// Note that mutating the output/input state of the stream may corrupt this + /// object, so care must be taken when using this method. + pub fn get_mut(&mut self) -> &mut W { + self.inner.get_mut() + } + + /// Resets the state of this encoder entirely, swapping out the output + /// stream for another. + /// + /// This function will finish encoding the current stream into the current + /// output stream before swapping out the two output streams. + /// + /// After the current stream has been finished, this will reset the internal + /// state of this encoder and replace the output stream with the one + /// provided, returning the previous output stream. Future data written to + /// this encoder will be the compressed into the stream `w` provided. + /// + /// # Errors + /// + /// This function will perform I/O to complete this stream, and any I/O + /// errors which occur will be returned from this function. + pub fn reset(&mut self, w: W) -> io::Result { + self.inner.finish()?; + self.inner.data.reset(); + Ok(self.inner.replace(w)) + } + + /// Attempt to finish this output stream, writing out final chunks of data. + /// + /// Note that this function can only be used once data has finished being + /// written to the output stream. After this function is called then further + /// calls to `write` may result in a panic. + /// + /// # Panics + /// + /// Attempts to write data to this stream may result in a panic after this + /// function is called. + /// + /// # Errors + /// + /// This function will perform I/O to complete this stream, and any I/O + /// errors which occur will be returned from this function. + pub fn try_finish(&mut self) -> io::Result<()> { + self.inner.finish() + } + + /// Consumes this encoder, flushing the output stream. + /// + /// This will flush the underlying data stream, close off the compressed + /// stream and, if successful, return the contained writer. + /// + /// Note that this function may not be suitable to call in a situation where + /// the underlying stream is an asynchronous I/O stream. To finish a stream + /// the `try_finish` (or `shutdown`) method should be used instead. To + /// re-acquire ownership of a stream it is safe to call this method after + /// `try_finish` or `shutdown` has returned `Ok`. + /// + /// # Errors + /// + /// This function will perform I/O to complete this stream, and any I/O + /// errors which occur will be returned from this function. + pub fn finish(mut self) -> io::Result { + self.inner.finish()?; + Ok(self.inner.take_inner()) + } + + /// Consumes this encoder, flushing the output stream. + /// + /// This will flush the underlying data stream and then return the contained + /// writer if the flush succeeded. + /// The compressed stream will not closed but only flushed. This + /// means that obtained byte array can by extended by another deflated + /// stream. To close the stream add the two bytes 0x3 and 0x0. + /// + /// # Errors + /// + /// This function will perform I/O to complete this stream, and any I/O + /// errors which occur will be returned from this function. + pub fn flush_finish(mut self) -> io::Result { + self.inner.flush()?; + Ok(self.inner.take_inner()) + } + + /// Returns the number of bytes that have been written to this compresor. + /// + /// Note that not all bytes written to this object may be accounted for, + /// there may still be some active buffering. + pub fn total_in(&self) -> u64 { + self.inner.data.total_in() + } + + /// Returns the number of bytes that the compressor has produced. + /// + /// Note that not all bytes may have been written yet, some may still be + /// buffered. + pub fn total_out(&self) -> u64 { + self.inner.data.total_out() + } +} + +impl Write for ZlibEncoder { + fn write(&mut self, buf: &[u8]) -> io::Result { + self.inner.write(buf) + } + + fn flush(&mut self) -> io::Result<()> { + self.inner.flush() + } +} + +#[cfg(feature = "tokio")] +impl AsyncWrite for ZlibEncoder { + fn shutdown(&mut self) -> Poll<(), io::Error> { + try_nb!(self.try_finish()); + self.get_mut().shutdown() + } +} + +impl Read for ZlibEncoder { + fn read(&mut self, buf: &mut [u8]) -> io::Result { + self.get_mut().read(buf) + } +} + +#[cfg(feature = "tokio")] +impl AsyncRead for ZlibEncoder {} + +/// A ZLIB decoder, or decompressor. +/// +/// This structure implements a [`Write`] and will emit a stream of decompressed +/// data when fed a stream of compressed data. +/// +/// [`Write`]: https://doc.rust-lang.org/std/io/trait.Write.html +/// +/// # Examples +/// +/// ``` +/// use std::io::prelude::*; +/// use std::io; +/// # use flate2::Compression; +/// # use flate2::write::ZlibEncoder; +/// use flate2::write::ZlibDecoder; +/// +/// # fn main() { +/// # let mut e = ZlibEncoder::new(Vec::new(), Compression::default()); +/// # e.write_all(b"Hello World").unwrap(); +/// # let bytes = e.finish().unwrap(); +/// # println!("{}", decode_reader(bytes).unwrap()); +/// # } +/// # +/// // Uncompresses a Zlib Encoded vector of bytes and returns a string or error +/// // Here Vec implements Write +/// +/// fn decode_reader(bytes: Vec) -> io::Result { +/// let mut writer = Vec::new(); +/// let mut z = ZlibDecoder::new(writer); +/// z.write_all(&bytes[..])?; +/// writer = z.finish()?; +/// let return_string = String::from_utf8(writer).expect("String parsing error"); +/// Ok(return_string) +/// } +/// ``` +#[derive(Debug)] +pub struct ZlibDecoder { + inner: zio::Writer, +} + +impl ZlibDecoder { + /// Creates a new decoder which will write uncompressed data to the stream. + /// + /// When this decoder is dropped or unwrapped the final pieces of data will + /// be flushed. + pub fn new(w: W) -> ZlibDecoder { + ZlibDecoder { + inner: zio::Writer::new(w, Decompress::new(true)), + } + } + + /// Acquires a reference to the underlying writer. + pub fn get_ref(&self) -> &W { + self.inner.get_ref() + } + + /// Acquires a mutable reference to the underlying writer. + /// + /// Note that mutating the output/input state of the stream may corrupt this + /// object, so care must be taken when using this method. + pub fn get_mut(&mut self) -> &mut W { + self.inner.get_mut() + } + + /// Resets the state of this decoder entirely, swapping out the output + /// stream for another. + /// + /// This will reset the internal state of this decoder and replace the + /// output stream with the one provided, returning the previous output + /// stream. Future data written to this decoder will be decompressed into + /// the output stream `w`. + /// + /// # Errors + /// + /// This function will perform I/O to complete this stream, and any I/O + /// errors which occur will be returned from this function. + pub fn reset(&mut self, w: W) -> io::Result { + self.inner.finish()?; + self.inner.data = Decompress::new(true); + Ok(self.inner.replace(w)) + } + + /// Attempt to finish this output stream, writing out final chunks of data. + /// + /// Note that this function can only be used once data has finished being + /// written to the output stream. After this function is called then further + /// calls to `write` may result in a panic. + /// + /// # Panics + /// + /// Attempts to write data to this stream may result in a panic after this + /// function is called. + /// + /// # Errors + /// + /// This function will perform I/O to complete this stream, and any I/O + /// errors which occur will be returned from this function. + pub fn try_finish(&mut self) -> io::Result<()> { + self.inner.finish() + } + + /// Consumes this encoder, flushing the output stream. + /// + /// This will flush the underlying data stream and then return the contained + /// writer if the flush succeeded. + /// + /// Note that this function may not be suitable to call in a situation where + /// the underlying stream is an asynchronous I/O stream. To finish a stream + /// the `try_finish` (or `shutdown`) method should be used instead. To + /// re-acquire ownership of a stream it is safe to call this method after + /// `try_finish` or `shutdown` has returned `Ok`. + /// + /// # Errors + /// + /// This function will perform I/O to complete this stream, and any I/O + /// errors which occur will be returned from this function. + pub fn finish(mut self) -> io::Result { + self.inner.finish()?; + Ok(self.inner.take_inner()) + } + + /// Returns the number of bytes that the decompressor has consumed for + /// decompression. + /// + /// Note that this will likely be smaller than the number of bytes + /// successfully written to this stream due to internal buffering. + pub fn total_in(&self) -> u64 { + self.inner.data.total_in() + } + + /// Returns the number of bytes that the decompressor has written to its + /// output stream. + pub fn total_out(&self) -> u64 { + self.inner.data.total_out() + } +} + +impl Write for ZlibDecoder { + fn write(&mut self, buf: &[u8]) -> io::Result { + self.inner.write(buf) + } + + fn flush(&mut self) -> io::Result<()> { + self.inner.flush() + } +} + +#[cfg(feature = "tokio")] +impl AsyncWrite for ZlibDecoder { + fn shutdown(&mut self) -> Poll<(), io::Error> { + try_nb!(self.inner.finish()); + self.inner.get_mut().shutdown() + } +} + +impl Read for ZlibDecoder { + fn read(&mut self, buf: &mut [u8]) -> io::Result { + self.inner.get_mut().read(buf) + } +} + +#[cfg(feature = "tokio")] +impl AsyncRead for ZlibDecoder {} diff --git a/flate2/tests/corrupt-file.gz b/flate2/tests/corrupt-file.gz new file mode 100644 index 0000000000000000000000000000000000000000..159333b032751969263eeb152e7e94782ab2cd39 GIT binary patch literal 7128 zcmeH}3`T8Bd^E~*{sR93UWTs`tIH^whgwc1l%>Czl5mp~aWyj6i8WXLt$movJ+e~8!|2g5T~UafXuTIJu# zoS~3m6PA$4Q;uFDI8m7>q+XZ&N^Gjzdn>6v%OFv?Go}67YjH_mY)Uz#0=%;*6Dqa? z?t{I-75TdNvQ}L~-ONn8cU`PXs{TyB{yv!8>iJf!Mw89_!sc@KR`fbp5@X0!>iZ}E zYU7oHq?2~Nqnbz5>o@hi;+AB%A0ZXE_WY~7dEqw&4lqG@d->r>wd4=; z_frA~ie+z1GGz$%NHbFmqzZ;Zze*!6^g;4%q)jqg+Pi9@i+61o#ucZn*BT|$yF=?n zI(#41LL^L85hEST#S{Ul8y5^~C_dtxGu%Vf%`QdZ1+nISvfpcr5x%!uN!98-In?; z3fFx^x(n8gMKWrpOZArTc=w&9$UJ!Y0p|Nj7B<3J2=mswps}|MMm&;^SQEYYn5xD!=rxs7sS`MxHsb6xrd7*UOdwC zan!!BbV|cCVea9d$fT_s^Zg&A|HtV6az^bG`rbnOYI*L7wz+)T)WX#YfJ93Jj5fl& zS0bt*EfBpC7)nF13u-+2opxJ zmoG1tJWIpk1HB&#KMXZrn329jVpn`EB~2e|6Nc?ymZ-pjzQU6Fx|7jFWYleZ@8s)! z7K$HI5y>px!wb)u6~hrYar4SjhrDM^;^t4#2QygOu~w2a zJR7mAb!$X;S_vCs!AwmB+|5PvS(igNfgT(oYb~V4E0S0xf7U9UjrMt0 z%+}w!%GY%tt?CUA&@(&Y+TPCMW5Z2DglRF}sgkg0cI@edoUkuPmzIy=_Xvv_qvKi`y5Us;0F?Br zGFciYoHX26hX2)0;V}h&W4f@hyo8>z()gpJtp^|RMn|m|@fnH9ZGgnACk%3N=>_J) z6nZx|pL4>qEeRgKmO+(Yh-~TeJ*4%1gogF|u7EoJJ5k z9abxxtAP}LUe`ddQGx_NJ7suR@}1HdsW{B=i$RRXBZ`&Prw(iegqQbUy6?ZyD%)x$ zQax!hmr0f|W=FafQP{Pz0>E3X-!kk`BlEx86iHDSWk_nAiHpRH&DEC;cGR&h>-Tcf z?VYuDYYCbkt>kt-Hd@hNYB%b}-`L)ljeetz(ZRb& zvVUcpY(xMGf6@G%mQ|E^B@}A33uh}r>8%cmyDcY&3`MiCN`o z*nz~wRPFAf9NVoa` zMUCVE$tLl!(Py!({W$r}&{IdAEI-1I1Duqrs<$SZ(=L?_(%VNrU0%Hu;if+CU|_VD z&4`;IZ|3v~36GK2Ze|^6TXcEYtTGBP2zebNKO~v_0+;=%U*-7%>GA}4CLmZ^1Dj>R zcDGz2d3g)}Y84ubi-8gLTDt$Fp-GojwmHuLlGHC4Z=uar6jHlNzW|V`hGqLSA8&yK zj}{>ZRBZbXU;eEUN@k#5ESk3=9-T(ZRk_^U@9CCa6QegMF+kI`3MX@z36@39ET(8| zaCntuc*d3l+T6G#RyM%HI2~$QoBh-rl~v}=g&gXOv+qvDaZFfcqT>MfkRK`z?N#2c zPnXN(&0K05QDOAk97_rWL@}&`$bXV+rl?ZHw@{x0P$F4rMImDlV!Mc2fk#8SfzSwD0G;ShnlK?Ei-PTybHhVDw=-qP&3!7Jcd1*ws`YANBfnu$!J@G)) z%l;wHSObFhxB(hSlN*Aq#U)p_>TLtPtbUN8(`Q3|)t@=Pf|>d&SK;dVc4KW`z%>lh z1D6f`dNgWW|O1iRD_&F;;S|n_|+d$TH3pxmfwsY7*fG)I_&$#jK72o)^3$V z<%X5(^fw$Wm58T5<4U(8-=smD3Y6D5(QL&@g&P3SpOYq}>Cu5ystYC6zciA1W6`U0 zqaR38UH1q6DCN`GX$BgE!}x(HM-ry*i(k#Tg}h$eH`0`k<qq${E>m!BjEW zTl=KdJ&a`eI&Ds3^&UpYe`GnAd@FF7vU8c^!~_->k-#;9@9kZ zbhBxQ#s3d0ACFaE$t~pu7bn29;^L7?#6iB{J65VS*7@>#HS(Oq1S=Qr!HjC)vrU3s zdADdKB(}#TVl~C(g(~uxcrKC{7o0xOdvGK8{YOs|8=x&D`m+#ulC5&DU(+PqRqS-} z!ByF14mlMEbds>!glW4Q6$lb9i&s(fK$~Ax-C0v9&&KjhaMBSPnz71IAnl0>XRFPt z)zx4?q3)2_b8d?28haZ54wLg70B;TJ^xWN;-KEd76#&|vB?8kS4K6-cDO+!tYmTbl z_B=_GW{tX;$+*Ln#{BL-1NjBT0=Q(F6~>O$&K!nGCCV@tG_NoNzrb^PK%MBZv@}PS zvrZJPcXhVx6f2!DEfuH^R8IqlsPohoC%unUFMQ!GR85Ax?#cseqDNy}6-L3>KwcS* ze57)@uOhaH0&n@>=SrYombbDxSWGBz^UoP*Vp?4S1`6UZ_8mmYyc@}76j@^vo$pm0 zb*bZI^ogAE+v5NTJHMoBD#-iJ5yhA`Yw^>bx)`PU>Lf%v`+f`dAoN-FC+-!`ysW~t z`^c?gB{N_BDd6Vy&vvS*U3Zgg4;hgT+!2)%Hip6SX7BydSJ%}E$^G5u=yX!Gji;H| zir)rN)h++kVmlvpAON;tdoS)Myur}@gFv2vOz7y{q~%QbL+VIype{pkAsG$rL6@nv zUGvT9a>qYuAMSe}y?hs|!LKnO(zfF`EPXS*7NJNJ1rAK?kjgSvN&{`0I7x1f^@KN& zVT}ez)@)!?ZucD1gYpaBG;=oNmjJGJ&X764I>u+iEesH-zW#0r)EzrBlYrqIo3|dD z_?(E5SQ@Y%!<%A zTw67eF_U;Ye5iWun6BtXDi-3Aw-fO@2F&TS_Rpl9_Y*Ciikp9Vs~&UYHHbPz0;#Iy zK?=Wh!l}FySFiSaKVP{MuYt3fWY{pSxru!2Pf~1GZq(6g!=3B4p&6%kdTS^- z)(?Na{IMI(QOze5IW;#?`W_?qQ!L5$Ijik*`_uAC%x=a`0Wld11rt#b^2~ zhrY-ycsx*m4!vM4!=fU-Y%o9)KY>W*=t9*oOL4JZcP;7IPN zv+_PYO>Z7|7ew}4SdjXd?=+iM;WdkKjXYDbsKLIugwQT-&X&h%mS8uPE@r4XOK@O& zYZJlIdPzXcS-to0=ury*F+n~4RY$$hG96B?Y?{q$#O)$61Sl-U7^!inPi>o`Uh8Qm zy{}Kb2@VP$EKc2{D;~jKxFYt3FcrH**2dXiCY7WxfiL56#CW_ zfDjntb2K%%(Hcmj!|h<%L3G~NGXHbi&3~&c&*lYu^KCgYaYQgwJ77$f9+OW5*h)-x ziUknR!aIHf#4j8kbB=|?8WkRdw$AuwS`IKImREzK-w6z_qR71rQP@4i8r*x@3eN#E1?i7+pm1^ud3V*FKH**wPf8@W}cz z)~~Wk=M6n<(Wex?6MX+!f__J&A^IsWWa7(|%^2q*U8(A52|B7LP;=LBbtY)$K2<;R zj#USgu~n+t<_n6EKbdRIr7O2e?X=*zyf;HO#nb06JBQCAXcN%1t%(@eO~z z*;cm$Q5FIe3>~(cL=}=VFM_|E%M5qH3E~eqIH)41%{KBJS&1J+$#6*EDC%tEl8=_@ zvWzM;r@|qWw@8TF+8cT!1qGYsKB);p@>}0Bf^PWBUo}sjJyJFbL=fQikll@tYWX~Q z@rAE*X3RFRuAGjv4SBMb{QbQL3W0kl`Z7Rsv4SwdBoRfs6rCjEb+@T%kE_;yV%WU< zo#P#vSv`y8Aq z8*X>ztqX);Ffmt z1o*B~%hjB%bU!?lGB1HH$CN*84aM~6c_p<6s9k0OQr{OGd zNxyZQ;+_QjCnb)vrVH`~K78Is3|rG!ph)gStC>K;Ovn=<8y*llPUG$f*eVgv(v$Bt z7IM4M@(MEf0dyN#r14C)Bj_QC2wrE@Jsq;Mu3Nl+(cUD4W|YJ6T>O*DxP?9OWF^(#!Ob+-EC%bHeisX2K`|~;B>p^I`r`OuxmZ975&80 zCceZo?YLoWvISWZxi+?-Z9t0yOE>XWCQiv&rS~+B{m`bnh+lf0QX-A;G9fM}7azE` zE?ZG4+h6uqY~+@UtVX6&gH2Ijjni`{7M*X6_Nl3~5=&YSbR6 zzvmcFD6N56{3LOdxg^7~LgRg>l;*9)xAX)$eF|?QXs-*Zf#yBBOWLOS8z;vR+shu9 zB<_qLG-h~9$SJ>06J63(zL$o~A z>zAX+)N;!t6r>WV1>9VUI!kd}O}aRC{_&u8-6x?gQ9Byw>+ajtvEr_w8>_g_G>0P> zvZVUggA}WZ-vxU0Z3dTlywC>OYIGU52PTQW6Ose!BvVvZ#;ND;tKI$XtK~!TA zm71(3{tX|x(k-M;7Fvx|$7pnXABLlO+~u3EbNrvTb1Ur1iE^7!l;2R3*5bFl#Ta^| zDF*$OifQakLq7ilRUJ$F%q=J+>MtQLw0~i&6FD#sT+!uo>)lELS>@!ypZd{t(wK$R zJPrI-*wVk$NZ3x+w%Jx9!os{o`zLxHj+m2)kJ>yODHD|71ifL%i1+_N)!d>_wYgnr z5DSbfK%m6t0w1!qZ+q_sslBpIlrK3PunDEhPaVDkR@)$%Lc`!M$Su}6Qql`Trp*C(~rdCId~1;;f?28aCcFVxBT|gpd(4CR{1^lVJ_WQ{|8`uM_fH)`BKH)i18)lVjMZnVD?|)u`HlQ2u z?zne}#@nT0El~fAqS=k)omrtV6OP;i-!qAjk%i(7rIW4N*RY8 zv+xu4N?D*qbsf!;Xwm4M+B`Lm^a;m5sWmk3_%-fny>M0>_XBHsWFV@=DE03P#J@dd zwSL^YEsSYfdm4ZsG32i~xf|45QD+Ux;unIol(f_clHUyj_?mwW;yZ@#H4 zXz$K1RyzXfoN%mvSxnP~5&Un)(iEb0$=-51EI@PXxj_vzflSsEv0ht#qA@VZ_sy!# zdsY734fuyd-f}zGudC7xMIh< zKhwwdw7E%xNskh~(&rw7t9Cx=jm)bLWa-#h>l?qz>SOWM?A#_+SsAbOx>TQ{Fm;BH zlN$10+O(#c9J2e0Y&`bcmKVP)?N+w}WE)0rJTWB3`Og1*P+g&%b$83cWWK;mOq|5E z_WtaiPks#ille$Zx!BBe`$TnqebIq@;n4Ej1lcY&=iQ0$EZ|k-81`{+l~7BQP(@qQ#Bg-$mdb)J>nF% zUOVO}tdb1;#3-iVIp-209%K|l@ugP=n~cE^OFTlrTOa0KqWa4`d&as3Vrtha{xDjW LalHo10|5UIqgUPt literal 0 HcmV?d00001 diff --git a/flate2/tests/early-flush.rs b/flate2/tests/early-flush.rs new file mode 100644 index 000000000..537e9e9ac --- /dev/null +++ b/flate2/tests/early-flush.rs @@ -0,0 +1,20 @@ +extern crate flate2; + +use std::io::{Read, Write}; + +use flate2::write::GzEncoder; +use flate2::read::GzDecoder; + +#[test] +fn smoke() { + let mut w = GzEncoder::new(Vec::new(), flate2::Compression::default()); + w.flush().unwrap(); + w.write_all(b"hello").unwrap(); + + let bytes = w.finish().unwrap(); + + let mut r = GzDecoder::new(&bytes[..]); + let mut s = String::new(); + r.read_to_string(&mut s).unwrap(); + assert_eq!(s, "hello"); +} diff --git a/flate2/tests/empty-read.rs b/flate2/tests/empty-read.rs new file mode 100644 index 000000000..755123833 --- /dev/null +++ b/flate2/tests/empty-read.rs @@ -0,0 +1,82 @@ +extern crate flate2; + +use std::io::{Read, Write}; + +#[test] +fn deflate_decoder_empty_read() { + let original: &[u8] = b"Lorem ipsum dolor sit amet."; + let mut encoder = + flate2::write::DeflateEncoder::new(Vec::new(), flate2::Compression::default()); + encoder.write_all(original).unwrap(); + let encoded: Vec = encoder.finish().unwrap(); + let mut decoder = flate2::read::DeflateDecoder::new(encoded.as_slice()); + assert_eq!(decoder.read(&mut []).unwrap(), 0); + let mut decoded = Vec::new(); + decoder.read_to_end(&mut decoded).unwrap(); + assert_eq!(decoded.as_slice(), original); +} + +#[test] +fn deflate_encoder_empty_read() { + let original: &[u8] = b"Lorem ipsum dolor sit amet."; + let mut encoder = flate2::read::DeflateEncoder::new(original, flate2::Compression::default()); + assert_eq!(encoder.read(&mut []).unwrap(), 0); + let mut encoded = Vec::new(); + encoder.read_to_end(&mut encoded).unwrap(); + let mut decoder = flate2::read::DeflateDecoder::new(encoded.as_slice()); + let mut decoded = Vec::new(); + decoder.read_to_end(&mut decoded).unwrap(); + assert_eq!(decoded.as_slice(), original); +} + +#[test] +fn gzip_decoder_empty_read() { + let original: &[u8] = b"Lorem ipsum dolor sit amet."; + let mut encoder = flate2::write::GzEncoder::new(Vec::new(), flate2::Compression::default()); + encoder.write_all(original).unwrap(); + let encoded: Vec = encoder.finish().unwrap(); + let mut decoder = flate2::read::GzDecoder::new(encoded.as_slice()); + assert_eq!(decoder.read(&mut []).unwrap(), 0); + let mut decoded = Vec::new(); + decoder.read_to_end(&mut decoded).unwrap(); + assert_eq!(decoded.as_slice(), original); +} + +#[test] +fn gzip_encoder_empty_read() { + let original: &[u8] = b"Lorem ipsum dolor sit amet."; + let mut encoder = flate2::read::GzEncoder::new(original, flate2::Compression::default()); + assert_eq!(encoder.read(&mut []).unwrap(), 0); + let mut encoded = Vec::new(); + encoder.read_to_end(&mut encoded).unwrap(); + let mut decoder = flate2::read::GzDecoder::new(encoded.as_slice()); + let mut decoded = Vec::new(); + decoder.read_to_end(&mut decoded).unwrap(); + assert_eq!(decoded.as_slice(), original); +} + +#[test] +fn zlib_decoder_empty_read() { + let original: &[u8] = b"Lorem ipsum dolor sit amet."; + let mut encoder = flate2::write::ZlibEncoder::new(Vec::new(), flate2::Compression::default()); + encoder.write_all(original).unwrap(); + let encoded: Vec = encoder.finish().unwrap(); + let mut decoder = flate2::read::ZlibDecoder::new(encoded.as_slice()); + assert_eq!(decoder.read(&mut []).unwrap(), 0); + let mut decoded = Vec::new(); + decoder.read_to_end(&mut decoded).unwrap(); + assert_eq!(decoded.as_slice(), original); +} + +#[test] +fn zlib_encoder_empty_read() { + let original: &[u8] = b"Lorem ipsum dolor sit amet."; + let mut encoder = flate2::read::ZlibEncoder::new(original, flate2::Compression::default()); + assert_eq!(encoder.read(&mut []).unwrap(), 0); + let mut encoded = Vec::new(); + encoder.read_to_end(&mut encoded).unwrap(); + let mut decoder = flate2::read::ZlibDecoder::new(encoded.as_slice()); + let mut decoded = Vec::new(); + decoder.read_to_end(&mut decoded).unwrap(); + assert_eq!(decoded.as_slice(), original); +} diff --git a/flate2/tests/good-file.gz b/flate2/tests/good-file.gz new file mode 100644 index 0000000000000000000000000000000000000000..f968689cce01efba445a1d33eb4a21aabf551981 GIT binary patch literal 6766 zcmV-!8j4BrBZ80f#~(j`{PFzy`#=8t>(`(E{Q2we|MR%S-so-X+CFp76$KCKQf!GXY>F2 zD7LX!-%n+!rR~wD@zCr~WwE8M?%wpBUG!87Q!T|XS<_Q2xD>Z;T}vLL+EaVj%xht?Phd?WN4ohCb7(_4d@2#?goIcInlhx?iJLZM1G5 z^J(Qi8ArT|KdpW?9dlhe?WeVCTzmc0ID}^RlT)9sKDKVCzs6}gVO@#tpp9$X+G8E2 z18aVbUi_!(AA0g^NS~TI+S*%fPdgjhzN^bzID8*cO6xo zx;mTTqqCLC8rph{<+tiuDI4I%L_VhS*YimrGY07zf z9D4QbT^7aD)APhvuS0j&w$<{CJn>#O5xB}&&&VUMdlO-prgYke6?>XI%H`4K&cNJ>&HrnOxD##XsYQLme(Z+gAEB zZaCD{aDBPk<2M|1DY|rhV?2K2q3)&YnhMLu&xZ6wBOcszhn^b}J}rSk9XgH2b3by? zBJhx7O}(7k(PApL&Mmp8Pmb2Z>lKJT6Hs&m`1j-&Vk`1ti!kWIDo=3Qx<;NdZDYnZ zsQM9n5<0s9Q3xHJ zQs2A*QKn8?S6r58?v04G46hLMiAu`Z(;h<5aKsHIwgbo>A831Gw1ddjbe^>hNUQV{ z0JFm|I@l@Wxge2g14;$KwRpmE34)L-v!sM7mk$EDf?3&;vBh@aGL)^0P6q?_6bZVr z8l)`@Sc~O(t1{I9P#qwf(@DsbDBHs45f(C8K=BZG>aMhKnR=M|273h9I+fsTy%=zW z3XsVY`8&Qe%VH5p52M22iduxjItaZ1+Q~z48S+7efoyy0Tgla?YghpJmb2F6YGI0) z35?cJ9Hw4Eb^TYx2Qum~wGga78gIj*(0PKnSIE6e)&mPAG+EShPR zm#ar*#X7OVv#)mvQ~0qaVwd%GuVE{MN=4CFxz^cHDvcwuH)MeJMRYWtePd>f5{Nyv zuEy+IxAv>};6&|>8euH4Fso!yhM;lZww_On)0usr&*i|JqWYd1N&R}x|K@Fj0(j&6Pjs~=g39k)8moqNBR&VN> zg3)!|-Pm7`7)Vanae?N-3oR~4UF+t41c8gE!qr3)x&|Ep4iltaB{)PWz%k-;C&r_E zz%iM|5#kPt@kyCNic5$Nw8w5bqagJPV8G~;XirLCgT14r7jO2TqoxHA3J(EqO+p6{ zwd^(r5tV!{hvh~jfY7YtP@qAm2psJy>0I&MU;qb<>tS=LYUu@>mOwsiE~JpiMpxtX zhs~ST&E7g5aL8P{^UWFbg4nhf%y&o@T{PC~SPCJ~yyFA@;<5Z>01*Ki$Li`n6fVHJ~#^16Xg+NO9pl zi|@{}H|H4fDXFA7`>WAJua4lr(`If>KWe&W1tHEb{X_>*q^lwi$!(1(-qdpqVy^SO zDq=_;C=UtV7v^YrpuFR{$^7!OAqW++O0U#I{UVm?A;`7Dl@2H2ifl(1*5H1mJv7ud zkrv!9)B&F4)=TSHiOh~D<`8+2Ef_q_eT{f=h6$NMc2!u#$`?aS3jSF{NOwI`@KWQV zck;>xX0--s(;BP~`^ewK5}l{wDjXpv{1#y7V@df6DlMPIU`pvS#g}0E0KO03FX_c# z4d*lt-wV@I>o-W^@I9DbP*KXZWK8F(xH!=_^s%#~w*92Q2Fi8)0nczzaWi!$wHDfvC>2H*7pmTF14XR&Ll#goYe@ zVLf3pcd4OyTru9tei3~Jhm!=f`lg09z)?|)R!~oNxH@|T7nOmYti33>KzSwovROj9 z5^*wB{a4vCLFD8jTnyVtNS|Ccd8!U7^LDw4^gf^-w%E>AG_q_QU$zvK_8}6_q_7MZ|JB)0iel1T;&GZ zXv=JIy5l~$cT4hD=BBI=5bpeBtlx16l#OUwww0j1_`!${$)IBQYXpphRAA%>>FB~TFDU6wZrw=T$YB0(6Ak< z*Y1`>fE%}F88E1VmE4repX<>Ci@K(Z`AY_&bYX@?k}V(iwR8UX!?9Hy+zzv zKD%B_A0beLNHXTmjLH}QzNM?%$?nI3-(~Cs=y>l6ttqq=cD79L<$R6h*NBA_16s~9 zV12~9DF#N1_#iZ8OP6z^NLW>$7E(a}aSc+2O{9hrkPpXiipu%T1m{hGLfi#V?lPN1g z0;ysl)U}`FeIZ3xU~-;3mqJDW*AefIHy+N{m8?+e@-rFti|PUOx;MGUVf+Tz1gaCf zhwv+Qve!;|^yB-4YArz@ou5di3Z~fNa_0xiVpatY!8xyYDMJC&ji0)ZoKR8Zu(IB` z8D3MS^X!cq=g1Z2Mpf^|jfWVRsO+|G;LI8!A}W=&UQj0#264CA3+ZEoNEcPM_2!mx zZYqt4@3#62=_4vB%Kx7BCjXKkL=4G#%j{o%xT#%Q_|H0JFl$jb!8cNXyqdPs1@ zZSS}4GPx?lRrXQ*2G1J#U<>+Jmy0kT9+h)T>X#bbP`ob09!uiaVN-yV z7f3c)(taNj&}bd<087@_;}cJuLr7=I`C7Y<7eJ?Ynv9Q_3Y8A_+2MQPzLn4+g%2Jr zDS=6(31M8uBV&Y3;jc({J7f=EdC2t2Z|@h8o#E9aTph?y*uQZTeXRp=PPsfBbl?dSrU(keh*MZCUaZL{y!sXg6 z*pJCBVfQ8Lej#1p2Iv*7_O00O9k~;{s&$>3eG~%lTxDr8wu9(O83RY-eLLYX`61y0 z51zX!QBi3qQ_DI4m6;iZ?M~S|fo+N=0r|l>y+XiI=dvw+BVA7ZLY<}1r3oFN7^ifk zm9V^!0rX{}B1uAQmcYC>gyF)^$3g}e)hG~s;2a?Ww5x$vHrU=m0_a731w@}tR+%g_GsW1++%_cEeT= zqytn4b#aRw%I^@8JakdvvBUU+(vd+(d*LPgy|Z?zQ|0|z?b|FwA) z(0=_(AXOuc;k#dUIVZY@GK=+#$+!WzY}2oeFLWrF)Ezf7zU)%61DqOii1B5Yi-L5N z|80Ds(=B^NSkK<3_%5#3I-1<7XKYUL5*-HZ}DHNTp+V9 zL`yi;f9liW%nf2W)d1=REDN%h(NB0#InfYR>oB4>I>mAQu)Z>2r=7kjTJ=NvVsUD_LiNtC_(An< ziRyHA59e2Iydito?nn89dbb31eDq5%bsXZI)`{HJX#Mxo}iZUP;N}Uh9(+6vn)an!K(P>%%#z76L zpZdS_BTBv?vl5`B6#%3CI#Pbv!qufL>`akg^F3%WC-w4#Y|OsmIJ{?!^K0 zGqw_adMd&)rR>kHrLbFH3I!lYpJ4icIeRwm%h=JPM2Gbxe+9*&GBO!(IqmzN<01j+ zJ$b)JKQZbotSn<)%f^)^a;+!lD}GSWqA8h@@GBDzf#^iRp?lX>=x&A%>4)t_Pf#Ys z7oM~i(Kq>~s8NePTrYV^{ZsBb7(;lwFG^oYSZPo`{dVz3u@23=CG%A781#X;DfKo9s zn+ilceV*IOCan%#o6Smqa{3Zo((ta-0ytO9vn)(01q2tQ1C=UU&j!B^eMKn;q|6S# z*Jw9uCE7^{0cn%Sd33v&0-HsjG!R@@_w!j^BtX;oGYT*& z5g!78`a{+Yl^pae^^X;Jw~+}wOa0=!l-n#dnco?sX@I(88wu+2_l<39mnYR1_HU+J zv^Ux23+kI>bRz)!p?s>rDB>m2^Yp{`R9}d3U##X3KGheCIWC;}H@=VIm6#sr`?r!G z^~nmf?5`vfwHG{jfAv1Pj>Ht!3&`eCd@(%ccKa399x}Z!pmXC!{@w$kwAK9^*D$~m zK=#%Do$?QOFF)K2gK^Wb6yqJ?PBS5N25YUmdfE;N6DAqkA%K??~rxi7I zY&~I}`bdEKNHL%jjc!#mMaNMC68k%<>;k8ejsGGzL*#GnDI5Q^GH8_MlB1KUTiCfb z=Ij!jm1Nh`niwO;5T~QwH$9>eLO`Y!@&>OA6~L_HUNM=-d!hOzq@$z`zKtyOOH4%l zMVoy_R{BenFqnPO`6Ty8Yr4jqi&me^Z$`ZgP;aa?rOc$GZc;M(+F|{rnygh1>-mu=dkun`I`J&a{27punmq@o% z)_0g;#z2iTP%}>bUv3mmCryO$3?A0O2t8WHARPmpVWc5uO@C162db>BmyeJvD}sDwJ;s?8fJJ#RCuEF)B1HnUwo>zppkqG7Y<2Upfbk;goepGC#RdjcPeQpUkgRXE8#+tg>v$ zkfM^2vovz0eWpZbj5>M@%pT5nvO0GE599oJ&PwzxZ7{X zU-(M7LA=bf^ICfp4&?M+ZrGG;s!Uy@m;G5ADJP{WzfFK%f>@M+=*MmoV2vorNc%qi z_5c`=?J>{DlV_aQM*y`nCm%ht`f-d*5lU!yeFu7_qEPDcR2k1*Idgs5kpjuH6hMWF z)|i3@TVflN%`1i%wV|U1Zn>J4KDfgi*!i{51ZfX4RM$^^gLMaEUR6$wPt7 zO6G$aX?=MJ0VRel`yHYT4;dg+l=bQiQ>cFT9pj9Hq300&G3z&#V?Umi{!ycEuJGX7 zM~E$_Nvd??@#K9uz7GrOu3g)ihnqE!U+P}A)G`nr~JGKvJcIDQ-c)@e95=As|J z9e=D9jmURz$KU#hF)%7(emnlCs2TI4SZ~yL3#Hz;tK0Dx{6*R-7?<@%jmLL= zKq+>i)9Dl%{4@Y)A(1!_rd<8h|D_|Nw#dYxj3>Qh3X#PKH>f|vJ9N&xmOkGusz9`OQi`~$iCmD%JQcoG6z$Yx?k3)Q% zA##`!RVDuzSt{>6I;G019v}N)dUQ$|{B_TQOt_>V9ZQP-PP|CVnvOL1pE&{UpT9JD zJRAOk6#!4{rmrXUuMV~%qJgl}@u%O~qlCijz9*zd$toe;ll7%R(D;+dSogz@7JtN` zJd$61$46(>>`}RX_}=*gk(g#At`6PXh}2N2IDa7Usu&NSxA@`1^#b?eQA*|S2Hsx3 z6bvNY5@v3*U%$YFJ=HW>uZk&@l_f$pzjcFVEs#$M2wzyAwMMZ}$8nqeO4>q$H1Pa3 z`(^c^s*Vf%QW+IJJKDKc%eT154YHuFCSG3S8Y}?{D<^lD^c&TK1;mK1 io::Result> { + let mut v = Vec::new(); + let f = File::open(path_compressed)?; + GzDecoder::new(f).read_to_end(&mut v)?; + Ok(v) +} + +// Tries to extract path into memory (decompressing all members in case +// of a multi member .gz file). +fn extract_file_multi(path_compressed: &Path) -> io::Result> { + let mut v = Vec::new(); + let f = File::open(path_compressed)?; + MultiGzDecoder::new(f).read_to_end(&mut v)?; + Ok(v) +} + +#[test] +fn empty_error_once() { + let data: &[u8] = &[]; + let cbjson = GzDecoder::new(data); + let reader = BufReader::new(cbjson); + let mut stream = reader.lines(); + assert!(stream.next().unwrap().is_err()); + assert!(stream.next().is_none()); +} diff --git a/flate2/tests/multi.gz b/flate2/tests/multi.gz new file mode 100644 index 0000000000000000000000000000000000000000..cabc89630fe00d6a611e16fdad958ecfd479cd8d GIT binary patch literal 53 zcmb2|=3qED)ij)e+55DPmfk}q2CdCr$JrPd803L6C%`h=e!l0() + .take(1024 * 1024) + .collect::>(); + let mut core = Core::new().unwrap(); + let listener = TcpListener::bind("127.0.0.1:0").unwrap(); + let addr = listener.local_addr().unwrap(); + let v2 = v.clone(); + let t = thread::spawn(move || { + let a = listener.accept().unwrap().0; + let b = a.try_clone().unwrap(); + + let mut v3 = v2.clone(); + let t = thread::spawn(move || { + let mut b = read::DeflateDecoder::new(b); + let mut buf = [0; 1024]; + while v3.len() > 0 { + let n = b.read(&mut buf).unwrap(); + for (actual, expected) in buf[..n].iter().zip(&v3) { + assert_eq!(*actual, *expected); + } + v3.drain(..n); + } + + assert_eq!(b.read(&mut buf).unwrap(), 0); + }); + + let mut a = write::ZlibEncoder::new(a, Compression::default()); + a.write_all(&v2).unwrap(); + a.finish().unwrap().shutdown(Shutdown::Write).unwrap(); + + t.join().unwrap(); + }); + + let handle = core.handle(); + let stream = TcpStream::connect(&addr, &handle); + let copy = stream + .and_then(|s| { + let (a, b) = s.split(); + let a = read::ZlibDecoder::new(a); + let b = write::DeflateEncoder::new(b, Compression::default()); + copy(a, b) + }) + .then(|result| { + let (amt, _a, b) = result.unwrap(); + assert_eq!(amt, v.len() as u64); + shutdown(b).map(|_| ()) + }); + + core.run(copy).unwrap(); + t.join().unwrap(); +} diff --git a/flate2/tests/zero-write.rs b/flate2/tests/zero-write.rs new file mode 100644 index 000000000..f0db86cb8 --- /dev/null +++ b/flate2/tests/zero-write.rs @@ -0,0 +1,8 @@ +extern crate flate2; + +#[test] +fn zero_write_is_error() { + let mut buf = [0u8]; + let writer = flate2::write::DeflateEncoder::new(&mut buf[..], flate2::Compression::default()); + assert!(writer.finish().is_err()); +} diff --git a/fnv/.cargo-checksum.json b/fnv/.cargo-checksum.json new file mode 100644 index 000000000..19ad74848 --- /dev/null +++ b/fnv/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{},"package":"2fad85553e09a6f881f739c29f0b00b0f01357c743266d478b68951ce23285f3"} \ No newline at end of file diff --git a/fnv/Cargo.toml b/fnv/Cargo.toml new file mode 100644 index 000000000..115779985 --- /dev/null +++ b/fnv/Cargo.toml @@ -0,0 +1,25 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g. crates.io) dependencies +# +# If you believe there's an error in this file please file an +# issue against the rust-lang/cargo repository. If you're +# editing this file be aware that the upstream Cargo.toml +# will likely look very different (and much more reasonable) + +[package] +name = "fnv" +version = "1.0.6" +authors = ["Alex Crichton "] +description = "Fowler–Noll–Vo hash function" +documentation = "https://doc.servo.org/fnv/" +readme = "README.md" +license = "Apache-2.0 / MIT" +repository = "https://github.com/servo/rust-fnv" + +[lib] +name = "fnv" +path = "lib.rs" diff --git a/fnv/LICENSE-APACHE b/fnv/LICENSE-APACHE new file mode 100644 index 000000000..16fe87b06 --- /dev/null +++ b/fnv/LICENSE-APACHE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/fnv/LICENSE-MIT b/fnv/LICENSE-MIT new file mode 100644 index 000000000..bc976a272 --- /dev/null +++ b/fnv/LICENSE-MIT @@ -0,0 +1,25 @@ +Copyright (c) 2017 Contributors + +Permission is hereby granted, free of charge, to any +person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the +Software without restriction, including without +limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software +is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions +of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/fnv/README.md b/fnv/README.md new file mode 100644 index 000000000..6a4c4aeeb --- /dev/null +++ b/fnv/README.md @@ -0,0 +1,81 @@ +# rust-fnv + +An implementation of the [Fowler–Noll–Vo hash function][chongo]. + +### [Read the documentation](https://doc.servo.org/fnv/) + + +## About + +The FNV hash function is a custom `Hasher` implementation that is more +efficient for smaller hash keys. + +[The Rust FAQ states that][faq] while the default `Hasher` implementation, +SipHash, is good in many cases, it is notably slower than other algorithms +with short keys, such as when you have a map of integers to other values. +In cases like these, [FNV is demonstrably faster][graphs]. + +Its disadvantages are that it performs badly on larger inputs, and +provides no protection against collision attacks, where a malicious user +can craft specific keys designed to slow a hasher down. Thus, it is +important to profile your program to ensure that you are using small hash +keys, and be certain that your program could not be exposed to malicious +inputs (including being a networked server). + +The Rust compiler itself uses FNV, as it is not worried about +denial-of-service attacks, and can assume that its inputs are going to be +small—a perfect use case for FNV. + + +## Usage + +To include this crate in your program, add the following to your `Cargo.toml`: + +```toml +[dependencies] +fnv = "1.0.3" +``` + + +## Using FNV in a HashMap + +The `FnvHashMap` type alias is the easiest way to use the standard library’s +`HashMap` with FNV. + +```rust +use fnv::FnvHashMap; + +let mut map = FnvHashMap::default(); +map.insert(1, "one"); +map.insert(2, "two"); + +map = FnvHashMap::with_capacity_and_hasher(10, Default::default()); +map.insert(1, "one"); +map.insert(2, "two"); +``` + +Note, the standard library’s `HashMap::new` and `HashMap::with_capacity` +are only implemented for the `RandomState` hasher, so using `Default` to +get the hasher is the next best option. + + +## Using FNV in a HashSet + +Similarly, `FnvHashSet` is a type alias for the standard library’s `HashSet` +with FNV. + +```rust +use fnv::FnvHashSet; + +let mut set = FnvHashSet::default(); +set.insert(1); +set.insert(2); + +set = FnvHashSet::with_capacity_and_hasher(10, Default::default()); +set.insert(1); +set.insert(2); +``` + +[chongo]: http://www.isthe.com/chongo/tech/comp/fnv/index.html +[faq]: https://www.rust-lang.org/en-US/faq.html#why-are-rusts-hashmaps-slow +[graphs]: http://cglab.ca/~abeinges/blah/hash-rs/ diff --git a/fnv/lib.rs b/fnv/lib.rs new file mode 100644 index 000000000..eaf3d44a3 --- /dev/null +++ b/fnv/lib.rs @@ -0,0 +1,349 @@ +//! An implementation of the [Fowler–Noll–Vo hash function][chongo]. +//! +//! ## About +//! +//! The FNV hash function is a custom `Hasher` implementation that is more +//! efficient for smaller hash keys. +//! +//! [The Rust FAQ states that][faq] while the default `Hasher` implementation, +//! SipHash, is good in many cases, it is notably slower than other algorithms +//! with short keys, such as when you have a map of integers to other values. +//! In cases like these, [FNV is demonstrably faster][graphs]. +//! +//! Its disadvantages are that it performs badly on larger inputs, and +//! provides no protection against collision attacks, where a malicious user +//! can craft specific keys designed to slow a hasher down. Thus, it is +//! important to profile your program to ensure that you are using small hash +//! keys, and be certain that your program could not be exposed to malicious +//! inputs (including being a networked server). +//! +//! The Rust compiler itself uses FNV, as it is not worried about +//! denial-of-service attacks, and can assume that its inputs are going to be +//! small—a perfect use case for FNV. +//! +//! +//! ## Using FNV in a `HashMap` +//! +//! The `FnvHashMap` type alias is the easiest way to use the standard library’s +//! `HashMap` with FNV. +//! +//! ```rust +//! use fnv::FnvHashMap; +//! +//! let mut map = FnvHashMap::default(); +//! map.insert(1, "one"); +//! map.insert(2, "two"); +//! +//! map = FnvHashMap::with_capacity_and_hasher(10, Default::default()); +//! map.insert(1, "one"); +//! map.insert(2, "two"); +//! ``` +//! +//! Note, the standard library’s `HashMap::new` and `HashMap::with_capacity` +//! are only implemented for the `RandomState` hasher, so using `Default` to +//! get the hasher is the next best option. +//! +//! ## Using FNV in a `HashSet` +//! +//! Similarly, `FnvHashSet` is a type alias for the standard library’s `HashSet` +//! with FNV. +//! +//! ```rust +//! use fnv::FnvHashSet; +//! +//! let mut set = FnvHashSet::default(); +//! set.insert(1); +//! set.insert(2); +//! +//! set = FnvHashSet::with_capacity_and_hasher(10, Default::default()); +//! set.insert(1); +//! set.insert(2); +//! ``` +//! +//! [chongo]: http://www.isthe.com/chongo/tech/comp/fnv/index.html +//! [faq]: https://www.rust-lang.org/en-US/faq.html#why-are-rusts-hashmaps-slow +//! [graphs]: http://cglab.ca/~abeinges/blah/hash-rs/ + + +use std::default::Default; +use std::hash::{Hasher, BuildHasherDefault}; +use std::collections::{HashMap, HashSet}; + +/// An implementation of the Fowler–Noll–Vo hash function. +/// +/// See the [crate documentation](index.html) for more details. +#[allow(missing_copy_implementations)] +pub struct FnvHasher(u64); + +impl Default for FnvHasher { + + #[inline] + fn default() -> FnvHasher { + FnvHasher(0xcbf29ce484222325) + } +} + +impl FnvHasher { + /// Create an FNV hasher starting with a state corresponding + /// to the hash `key`. + #[inline] + pub fn with_key(key: u64) -> FnvHasher { + FnvHasher(key) + } +} + +impl Hasher for FnvHasher { + #[inline] + fn finish(&self) -> u64 { + self.0 + } + + #[inline] + fn write(&mut self, bytes: &[u8]) { + let FnvHasher(mut hash) = *self; + + for byte in bytes.iter() { + hash = hash ^ (*byte as u64); + hash = hash.wrapping_mul(0x100000001b3); + } + + *self = FnvHasher(hash); + } +} + +/// A builder for default FNV hashers. +pub type FnvBuildHasher = BuildHasherDefault; + +/// A `HashMap` using a default FNV hasher. +pub type FnvHashMap = HashMap; + +/// A `HashSet` using a default FNV hasher. +pub type FnvHashSet = HashSet; + + +#[cfg(test)] +mod test { + use super::*; + use std::hash::Hasher; + + fn fnv1a(bytes: &[u8]) -> u64 { + let mut hasher = FnvHasher::default(); + hasher.write(bytes); + hasher.finish() + } + + fn repeat_10(bytes: &[u8]) -> Vec { + (0..10).flat_map(|_| bytes.iter().cloned()).collect() + } + + fn repeat_500(bytes: &[u8]) -> Vec { + (0..500).flat_map(|_| bytes.iter().cloned()).collect() + } + + #[test] + fn basic_tests() { + assert_eq!(fnv1a(b""), 0xcbf29ce484222325); + assert_eq!(fnv1a(b"a"), 0xaf63dc4c8601ec8c); + assert_eq!(fnv1a(b"b"), 0xaf63df4c8601f1a5); + assert_eq!(fnv1a(b"c"), 0xaf63de4c8601eff2); + assert_eq!(fnv1a(b"d"), 0xaf63d94c8601e773); + assert_eq!(fnv1a(b"e"), 0xaf63d84c8601e5c0); + assert_eq!(fnv1a(b"f"), 0xaf63db4c8601ead9); + assert_eq!(fnv1a(b"fo"), 0x08985907b541d342); + assert_eq!(fnv1a(b"foo"), 0xdcb27518fed9d577); + assert_eq!(fnv1a(b"foob"), 0xdd120e790c2512af); + assert_eq!(fnv1a(b"fooba"), 0xcac165afa2fef40a); + assert_eq!(fnv1a(b"foobar"), 0x85944171f73967e8); + assert_eq!(fnv1a(b"\0"), 0xaf63bd4c8601b7df); + assert_eq!(fnv1a(b"a\0"), 0x089be207b544f1e4); + assert_eq!(fnv1a(b"b\0"), 0x08a61407b54d9b5f); + assert_eq!(fnv1a(b"c\0"), 0x08a2ae07b54ab836); + assert_eq!(fnv1a(b"d\0"), 0x0891b007b53c4869); + assert_eq!(fnv1a(b"e\0"), 0x088e4a07b5396540); + assert_eq!(fnv1a(b"f\0"), 0x08987c07b5420ebb); + assert_eq!(fnv1a(b"fo\0"), 0xdcb28a18fed9f926); + assert_eq!(fnv1a(b"foo\0"), 0xdd1270790c25b935); + assert_eq!(fnv1a(b"foob\0"), 0xcac146afa2febf5d); + assert_eq!(fnv1a(b"fooba\0"), 0x8593d371f738acfe); + assert_eq!(fnv1a(b"foobar\0"), 0x34531ca7168b8f38); + assert_eq!(fnv1a(b"ch"), 0x08a25607b54a22ae); + assert_eq!(fnv1a(b"cho"), 0xf5faf0190cf90df3); + assert_eq!(fnv1a(b"chon"), 0xf27397910b3221c7); + assert_eq!(fnv1a(b"chong"), 0x2c8c2b76062f22e0); + assert_eq!(fnv1a(b"chongo"), 0xe150688c8217b8fd); + assert_eq!(fnv1a(b"chongo "), 0xf35a83c10e4f1f87); + assert_eq!(fnv1a(b"chongo w"), 0xd1edd10b507344d0); + assert_eq!(fnv1a(b"chongo wa"), 0x2a5ee739b3ddb8c3); + assert_eq!(fnv1a(b"chongo was"), 0xdcfb970ca1c0d310); + assert_eq!(fnv1a(b"chongo was "), 0x4054da76daa6da90); + assert_eq!(fnv1a(b"chongo was h"), 0xf70a2ff589861368); + assert_eq!(fnv1a(b"chongo was he"), 0x4c628b38aed25f17); + assert_eq!(fnv1a(b"chongo was her"), 0x9dd1f6510f78189f); + assert_eq!(fnv1a(b"chongo was here"), 0xa3de85bd491270ce); + assert_eq!(fnv1a(b"chongo was here!"), 0x858e2fa32a55e61d); + assert_eq!(fnv1a(b"chongo was here!\n"), 0x46810940eff5f915); + assert_eq!(fnv1a(b"ch\0"), 0xf5fadd190cf8edaa); + assert_eq!(fnv1a(b"cho\0"), 0xf273ed910b32b3e9); + assert_eq!(fnv1a(b"chon\0"), 0x2c8c5276062f6525); + assert_eq!(fnv1a(b"chong\0"), 0xe150b98c821842a0); + assert_eq!(fnv1a(b"chongo\0"), 0xf35aa3c10e4f55e7); + assert_eq!(fnv1a(b"chongo \0"), 0xd1ed680b50729265); + assert_eq!(fnv1a(b"chongo w\0"), 0x2a5f0639b3dded70); + assert_eq!(fnv1a(b"chongo wa\0"), 0xdcfbaa0ca1c0f359); + assert_eq!(fnv1a(b"chongo was\0"), 0x4054ba76daa6a430); + assert_eq!(fnv1a(b"chongo was \0"), 0xf709c7f5898562b0); + assert_eq!(fnv1a(b"chongo was h\0"), 0x4c62e638aed2f9b8); + assert_eq!(fnv1a(b"chongo was he\0"), 0x9dd1a8510f779415); + assert_eq!(fnv1a(b"chongo was her\0"), 0xa3de2abd4911d62d); + assert_eq!(fnv1a(b"chongo was here\0"), 0x858e0ea32a55ae0a); + assert_eq!(fnv1a(b"chongo was here!\0"), 0x46810f40eff60347); + assert_eq!(fnv1a(b"chongo was here!\n\0"), 0xc33bce57bef63eaf); + assert_eq!(fnv1a(b"cu"), 0x08a24307b54a0265); + assert_eq!(fnv1a(b"cur"), 0xf5b9fd190cc18d15); + assert_eq!(fnv1a(b"curd"), 0x4c968290ace35703); + assert_eq!(fnv1a(b"curds"), 0x07174bd5c64d9350); + assert_eq!(fnv1a(b"curds "), 0x5a294c3ff5d18750); + assert_eq!(fnv1a(b"curds a"), 0x05b3c1aeb308b843); + assert_eq!(fnv1a(b"curds an"), 0xb92a48da37d0f477); + assert_eq!(fnv1a(b"curds and"), 0x73cdddccd80ebc49); + assert_eq!(fnv1a(b"curds and "), 0xd58c4c13210a266b); + assert_eq!(fnv1a(b"curds and w"), 0xe78b6081243ec194); + assert_eq!(fnv1a(b"curds and wh"), 0xb096f77096a39f34); + assert_eq!(fnv1a(b"curds and whe"), 0xb425c54ff807b6a3); + assert_eq!(fnv1a(b"curds and whey"), 0x23e520e2751bb46e); + assert_eq!(fnv1a(b"curds and whey\n"), 0x1a0b44ccfe1385ec); + assert_eq!(fnv1a(b"cu\0"), 0xf5ba4b190cc2119f); + assert_eq!(fnv1a(b"cur\0"), 0x4c962690ace2baaf); + assert_eq!(fnv1a(b"curd\0"), 0x0716ded5c64cda19); + assert_eq!(fnv1a(b"curds\0"), 0x5a292c3ff5d150f0); + assert_eq!(fnv1a(b"curds \0"), 0x05b3e0aeb308ecf0); + assert_eq!(fnv1a(b"curds a\0"), 0xb92a5eda37d119d9); + assert_eq!(fnv1a(b"curds an\0"), 0x73ce41ccd80f6635); + assert_eq!(fnv1a(b"curds and\0"), 0xd58c2c132109f00b); + assert_eq!(fnv1a(b"curds and \0"), 0xe78baf81243f47d1); + assert_eq!(fnv1a(b"curds and w\0"), 0xb0968f7096a2ee7c); + assert_eq!(fnv1a(b"curds and wh\0"), 0xb425a84ff807855c); + assert_eq!(fnv1a(b"curds and whe\0"), 0x23e4e9e2751b56f9); + assert_eq!(fnv1a(b"curds and whey\0"), 0x1a0b4eccfe1396ea); + assert_eq!(fnv1a(b"curds and whey\n\0"), 0x54abd453bb2c9004); + assert_eq!(fnv1a(b"hi"), 0x08ba5f07b55ec3da); + assert_eq!(fnv1a(b"hi\0"), 0x337354193006cb6e); + assert_eq!(fnv1a(b"hello"), 0xa430d84680aabd0b); + assert_eq!(fnv1a(b"hello\0"), 0xa9bc8acca21f39b1); + assert_eq!(fnv1a(b"\xff\x00\x00\x01"), 0x6961196491cc682d); + assert_eq!(fnv1a(b"\x01\x00\x00\xff"), 0xad2bb1774799dfe9); + assert_eq!(fnv1a(b"\xff\x00\x00\x02"), 0x6961166491cc6314); + assert_eq!(fnv1a(b"\x02\x00\x00\xff"), 0x8d1bb3904a3b1236); + assert_eq!(fnv1a(b"\xff\x00\x00\x03"), 0x6961176491cc64c7); + assert_eq!(fnv1a(b"\x03\x00\x00\xff"), 0xed205d87f40434c7); + assert_eq!(fnv1a(b"\xff\x00\x00\x04"), 0x6961146491cc5fae); + assert_eq!(fnv1a(b"\x04\x00\x00\xff"), 0xcd3baf5e44f8ad9c); + assert_eq!(fnv1a(b"\x40\x51\x4e\x44"), 0xe3b36596127cd6d8); + assert_eq!(fnv1a(b"\x44\x4e\x51\x40"), 0xf77f1072c8e8a646); + assert_eq!(fnv1a(b"\x40\x51\x4e\x4a"), 0xe3b36396127cd372); + assert_eq!(fnv1a(b"\x4a\x4e\x51\x40"), 0x6067dce9932ad458); + assert_eq!(fnv1a(b"\x40\x51\x4e\x54"), 0xe3b37596127cf208); + assert_eq!(fnv1a(b"\x54\x4e\x51\x40"), 0x4b7b10fa9fe83936); + assert_eq!(fnv1a(b"127.0.0.1"), 0xaabafe7104d914be); + assert_eq!(fnv1a(b"127.0.0.1\0"), 0xf4d3180b3cde3eda); + assert_eq!(fnv1a(b"127.0.0.2"), 0xaabafd7104d9130b); + assert_eq!(fnv1a(b"127.0.0.2\0"), 0xf4cfb20b3cdb5bb1); + assert_eq!(fnv1a(b"127.0.0.3"), 0xaabafc7104d91158); + assert_eq!(fnv1a(b"127.0.0.3\0"), 0xf4cc4c0b3cd87888); + assert_eq!(fnv1a(b"64.81.78.68"), 0xe729bac5d2a8d3a7); + assert_eq!(fnv1a(b"64.81.78.68\0"), 0x74bc0524f4dfa4c5); + assert_eq!(fnv1a(b"64.81.78.74"), 0xe72630c5d2a5b352); + assert_eq!(fnv1a(b"64.81.78.74\0"), 0x6b983224ef8fb456); + assert_eq!(fnv1a(b"64.81.78.84"), 0xe73042c5d2ae266d); + assert_eq!(fnv1a(b"64.81.78.84\0"), 0x8527e324fdeb4b37); + assert_eq!(fnv1a(b"feedface"), 0x0a83c86fee952abc); + assert_eq!(fnv1a(b"feedface\0"), 0x7318523267779d74); + assert_eq!(fnv1a(b"feedfacedaffdeed"), 0x3e66d3d56b8caca1); + assert_eq!(fnv1a(b"feedfacedaffdeed\0"), 0x956694a5c0095593); + assert_eq!(fnv1a(b"feedfacedeadbeef"), 0xcac54572bb1a6fc8); + assert_eq!(fnv1a(b"feedfacedeadbeef\0"), 0xa7a4c9f3edebf0d8); + assert_eq!(fnv1a(b"line 1\nline 2\nline 3"), 0x7829851fac17b143); + assert_eq!(fnv1a(b"chongo /\\../\\"), 0x2c8f4c9af81bcf06); + assert_eq!(fnv1a(b"chongo /\\../\\\0"), 0xd34e31539740c732); + assert_eq!(fnv1a(b"chongo (Landon Curt Noll) /\\../\\"), 0x3605a2ac253d2db1); + assert_eq!(fnv1a(b"chongo (Landon Curt Noll) /\\../\\\0"), 0x08c11b8346f4a3c3); + assert_eq!(fnv1a(b"http://antwrp.gsfc.nasa.gov/apod/astropix.html"), 0x6be396289ce8a6da); + assert_eq!(fnv1a(b"http://en.wikipedia.org/wiki/Fowler_Noll_Vo_hash"), 0xd9b957fb7fe794c5); + assert_eq!(fnv1a(b"http://epod.usra.edu/"), 0x05be33da04560a93); + assert_eq!(fnv1a(b"http://exoplanet.eu/"), 0x0957f1577ba9747c); + assert_eq!(fnv1a(b"http://hvo.wr.usgs.gov/cam3/"), 0xda2cc3acc24fba57); + assert_eq!(fnv1a(b"http://hvo.wr.usgs.gov/cams/HMcam/"), 0x74136f185b29e7f0); + assert_eq!(fnv1a(b"http://hvo.wr.usgs.gov/kilauea/update/deformation.html"), 0xb2f2b4590edb93b2); + assert_eq!(fnv1a(b"http://hvo.wr.usgs.gov/kilauea/update/images.html"), 0xb3608fce8b86ae04); + assert_eq!(fnv1a(b"http://hvo.wr.usgs.gov/kilauea/update/maps.html"), 0x4a3a865079359063); + assert_eq!(fnv1a(b"http://hvo.wr.usgs.gov/volcanowatch/current_issue.html"), 0x5b3a7ef496880a50); + assert_eq!(fnv1a(b"http://neo.jpl.nasa.gov/risk/"), 0x48fae3163854c23b); + assert_eq!(fnv1a(b"http://norvig.com/21-days.html"), 0x07aaa640476e0b9a); + assert_eq!(fnv1a(b"http://primes.utm.edu/curios/home.php"), 0x2f653656383a687d); + assert_eq!(fnv1a(b"http://slashdot.org/"), 0xa1031f8e7599d79c); + assert_eq!(fnv1a(b"http://tux.wr.usgs.gov/Maps/155.25-19.5.html"), 0xa31908178ff92477); + assert_eq!(fnv1a(b"http://volcano.wr.usgs.gov/kilaueastatus.php"), 0x097edf3c14c3fb83); + assert_eq!(fnv1a(b"http://www.avo.alaska.edu/activity/Redoubt.php"), 0xb51ca83feaa0971b); + assert_eq!(fnv1a(b"http://www.dilbert.com/fast/"), 0xdd3c0d96d784f2e9); + assert_eq!(fnv1a(b"http://www.fourmilab.ch/gravitation/orbits/"), 0x86cd26a9ea767d78); + assert_eq!(fnv1a(b"http://www.fpoa.net/"), 0xe6b215ff54a30c18); + assert_eq!(fnv1a(b"http://www.ioccc.org/index.html"), 0xec5b06a1c5531093); + assert_eq!(fnv1a(b"http://www.isthe.com/cgi-bin/number.cgi"), 0x45665a929f9ec5e5); + assert_eq!(fnv1a(b"http://www.isthe.com/chongo/bio.html"), 0x8c7609b4a9f10907); + assert_eq!(fnv1a(b"http://www.isthe.com/chongo/index.html"), 0x89aac3a491f0d729); + assert_eq!(fnv1a(b"http://www.isthe.com/chongo/src/calc/lucas-calc"), 0x32ce6b26e0f4a403); + assert_eq!(fnv1a(b"http://www.isthe.com/chongo/tech/astro/venus2004.html"), 0x614ab44e02b53e01); + assert_eq!(fnv1a(b"http://www.isthe.com/chongo/tech/astro/vita.html"), 0xfa6472eb6eef3290); + assert_eq!(fnv1a(b"http://www.isthe.com/chongo/tech/comp/c/expert.html"), 0x9e5d75eb1948eb6a); + assert_eq!(fnv1a(b"http://www.isthe.com/chongo/tech/comp/calc/index.html"), 0xb6d12ad4a8671852); + assert_eq!(fnv1a(b"http://www.isthe.com/chongo/tech/comp/fnv/index.html"), 0x88826f56eba07af1); + assert_eq!(fnv1a(b"http://www.isthe.com/chongo/tech/math/number/howhigh.html"), 0x44535bf2645bc0fd); + assert_eq!(fnv1a(b"http://www.isthe.com/chongo/tech/math/number/number.html"), 0x169388ffc21e3728); + assert_eq!(fnv1a(b"http://www.isthe.com/chongo/tech/math/prime/mersenne.html"), 0xf68aac9e396d8224); + assert_eq!(fnv1a(b"http://www.isthe.com/chongo/tech/math/prime/mersenne.html#largest"), 0x8e87d7e7472b3883); + assert_eq!(fnv1a(b"http://www.lavarnd.org/cgi-bin/corpspeak.cgi"), 0x295c26caa8b423de); + assert_eq!(fnv1a(b"http://www.lavarnd.org/cgi-bin/haiku.cgi"), 0x322c814292e72176); + assert_eq!(fnv1a(b"http://www.lavarnd.org/cgi-bin/rand-none.cgi"), 0x8a06550eb8af7268); + assert_eq!(fnv1a(b"http://www.lavarnd.org/cgi-bin/randdist.cgi"), 0xef86d60e661bcf71); + assert_eq!(fnv1a(b"http://www.lavarnd.org/index.html"), 0x9e5426c87f30ee54); + assert_eq!(fnv1a(b"http://www.lavarnd.org/what/nist-test.html"), 0xf1ea8aa826fd047e); + assert_eq!(fnv1a(b"http://www.macosxhints.com/"), 0x0babaf9a642cb769); + assert_eq!(fnv1a(b"http://www.mellis.com/"), 0x4b3341d4068d012e); + assert_eq!(fnv1a(b"http://www.nature.nps.gov/air/webcams/parks/havoso2alert/havoalert.cfm"), 0xd15605cbc30a335c); + assert_eq!(fnv1a(b"http://www.nature.nps.gov/air/webcams/parks/havoso2alert/timelines_24.cfm"), 0x5b21060aed8412e5); + assert_eq!(fnv1a(b"http://www.paulnoll.com/"), 0x45e2cda1ce6f4227); + assert_eq!(fnv1a(b"http://www.pepysdiary.com/"), 0x50ae3745033ad7d4); + assert_eq!(fnv1a(b"http://www.sciencenews.org/index/home/activity/view"), 0xaa4588ced46bf414); + assert_eq!(fnv1a(b"http://www.skyandtelescope.com/"), 0xc1b0056c4a95467e); + assert_eq!(fnv1a(b"http://www.sput.nl/~rob/sirius.html"), 0x56576a71de8b4089); + assert_eq!(fnv1a(b"http://www.systemexperts.com/"), 0xbf20965fa6dc927e); + assert_eq!(fnv1a(b"http://www.tq-international.com/phpBB3/index.php"), 0x569f8383c2040882); + assert_eq!(fnv1a(b"http://www.travelquesttours.com/index.htm"), 0xe1e772fba08feca0); + assert_eq!(fnv1a(b"http://www.wunderground.com/global/stations/89606.html"), 0x4ced94af97138ac4); + assert_eq!(fnv1a(&repeat_10(b"21701")), 0xc4112ffb337a82fb); + assert_eq!(fnv1a(&repeat_10(b"M21701")), 0xd64a4fd41de38b7d); + assert_eq!(fnv1a(&repeat_10(b"2^21701-1")), 0x4cfc32329edebcbb); + assert_eq!(fnv1a(&repeat_10(b"\x54\xc5")), 0x0803564445050395); + assert_eq!(fnv1a(&repeat_10(b"\xc5\x54")), 0xaa1574ecf4642ffd); + assert_eq!(fnv1a(&repeat_10(b"23209")), 0x694bc4e54cc315f9); + assert_eq!(fnv1a(&repeat_10(b"M23209")), 0xa3d7cb273b011721); + assert_eq!(fnv1a(&repeat_10(b"2^23209-1")), 0x577c2f8b6115bfa5); + assert_eq!(fnv1a(&repeat_10(b"\x5a\xa9")), 0xb7ec8c1a769fb4c1); + assert_eq!(fnv1a(&repeat_10(b"\xa9\x5a")), 0x5d5cfce63359ab19); + assert_eq!(fnv1a(&repeat_10(b"391581216093")), 0x33b96c3cd65b5f71); + assert_eq!(fnv1a(&repeat_10(b"391581*2^216093-1")), 0xd845097780602bb9); + assert_eq!(fnv1a(&repeat_10(b"\x05\xf9\x9d\x03\x4c\x81")), 0x84d47645d02da3d5); + assert_eq!(fnv1a(&repeat_10(b"FEDCBA9876543210")), 0x83544f33b58773a5); + assert_eq!(fnv1a(&repeat_10(b"\xfe\xdc\xba\x98\x76\x54\x32\x10")), 0x9175cbb2160836c5); + assert_eq!(fnv1a(&repeat_10(b"EFCDAB8967452301")), 0xc71b3bc175e72bc5); + assert_eq!(fnv1a(&repeat_10(b"\xef\xcd\xab\x89\x67\x45\x23\x01")), 0x636806ac222ec985); + assert_eq!(fnv1a(&repeat_10(b"0123456789ABCDEF")), 0xb6ef0e6950f52ed5); + assert_eq!(fnv1a(&repeat_10(b"\x01\x23\x45\x67\x89\xab\xcd\xef")), 0xead3d8a0f3dfdaa5); + assert_eq!(fnv1a(&repeat_10(b"1032547698BADCFE")), 0x922908fe9a861ba5); + assert_eq!(fnv1a(&repeat_10(b"\x10\x32\x54\x76\x98\xba\xdc\xfe")), 0x6d4821de275fd5c5); + assert_eq!(fnv1a(&repeat_500(b"\x00")), 0x1fe3fce62bd816b5); + assert_eq!(fnv1a(&repeat_500(b"\x07")), 0xc23e9fccd6f70591); + assert_eq!(fnv1a(&repeat_500(b"~")), 0xc1af12bdfe16b5b5); + assert_eq!(fnv1a(&repeat_500(b"\x7f")), 0x39e9f18f2f85e221); + } +} diff --git a/foreign-types-shared/.cargo-checksum.json b/foreign-types-shared/.cargo-checksum.json new file mode 100644 index 000000000..5fc4da45f --- /dev/null +++ b/foreign-types-shared/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{},"package":"00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b"} \ No newline at end of file diff --git a/foreign-types-shared/Cargo.toml b/foreign-types-shared/Cargo.toml new file mode 100644 index 000000000..e2cb783b9 --- /dev/null +++ b/foreign-types-shared/Cargo.toml @@ -0,0 +1,21 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g. crates.io) dependencies +# +# If you believe there's an error in this file please file an +# issue against the rust-lang/cargo repository. If you're +# editing this file be aware that the upstream Cargo.toml +# will likely look very different (and much more reasonable) + +[package] +name = "foreign-types-shared" +version = "0.1.1" +authors = ["Steven Fackler "] +description = "An internal crate used by foreign-types" +license = "MIT/Apache-2.0" +repository = "https://github.com/sfackler/foreign-types" + +[dependencies] diff --git a/foreign-types-shared/LICENSE-APACHE b/foreign-types-shared/LICENSE-APACHE new file mode 100644 index 000000000..8f71f43fe --- /dev/null +++ b/foreign-types-shared/LICENSE-APACHE @@ -0,0 +1,202 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + diff --git a/foreign-types-shared/LICENSE-MIT b/foreign-types-shared/LICENSE-MIT new file mode 100644 index 000000000..bb76d0146 --- /dev/null +++ b/foreign-types-shared/LICENSE-MIT @@ -0,0 +1,19 @@ +Copyright (c) 2017 The foreign-types Developers + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/foreign-types-shared/src/lib.rs b/foreign-types-shared/src/lib.rs new file mode 100644 index 000000000..cbebc3353 --- /dev/null +++ b/foreign-types-shared/src/lib.rs @@ -0,0 +1,51 @@ +//! Internal crate used by foreign-types + +#![no_std] +#![warn(missing_docs)] +#![doc(html_root_url="https://docs.rs/foreign-types-shared/0.1")] + +use core::cell::UnsafeCell; + +/// An opaque type used to define `ForeignTypeRef` types. +/// +/// A type implementing `ForeignTypeRef` should simply be a newtype wrapper around this type. +pub struct Opaque(UnsafeCell<()>); + +/// A type implemented by wrappers over foreign types. +pub trait ForeignType: Sized { + /// The raw C type. + type CType; + + /// The type representing a reference to this type. + type Ref: ForeignTypeRef; + + /// Constructs an instance of this type from its raw type. + unsafe fn from_ptr(ptr: *mut Self::CType) -> Self; + + /// Returns a raw pointer to the wrapped value. + fn as_ptr(&self) -> *mut Self::CType; +} + +/// A trait implemented by types which reference borrowed foreign types. +pub trait ForeignTypeRef: Sized { + /// The raw C type. + type CType; + + /// Constructs a shared instance of this type from its raw type. + #[inline] + unsafe fn from_ptr<'a>(ptr: *mut Self::CType) -> &'a Self { + &*(ptr as *mut _) + } + + /// Constructs a mutable reference of this type from its raw type. + #[inline] + unsafe fn from_ptr_mut<'a>(ptr: *mut Self::CType) -> &'a mut Self { + &mut *(ptr as *mut _) + } + + /// Returns a raw pointer to the wrapped value. + #[inline] + fn as_ptr(&self) -> *mut Self::CType { + self as *const _ as *mut _ + } +} diff --git a/foreign-types/.cargo-checksum.json b/foreign-types/.cargo-checksum.json new file mode 100644 index 000000000..4ba7de062 --- /dev/null +++ b/foreign-types/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{},"package":"f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1"} \ No newline at end of file diff --git a/foreign-types/Cargo.toml b/foreign-types/Cargo.toml new file mode 100644 index 000000000..6ff5567de --- /dev/null +++ b/foreign-types/Cargo.toml @@ -0,0 +1,22 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g. crates.io) dependencies +# +# If you believe there's an error in this file please file an +# issue against the rust-lang/cargo repository. If you're +# editing this file be aware that the upstream Cargo.toml +# will likely look very different (and much more reasonable) + +[package] +name = "foreign-types" +version = "0.3.2" +authors = ["Steven Fackler "] +description = "A framework for Rust wrappers over C APIs" +readme = "README.md" +license = "MIT/Apache-2.0" +repository = "https://github.com/sfackler/foreign-types" +[dependencies.foreign-types-shared] +version = "0.1" diff --git a/foreign-types/LICENSE-APACHE b/foreign-types/LICENSE-APACHE new file mode 100644 index 000000000..8f71f43fe --- /dev/null +++ b/foreign-types/LICENSE-APACHE @@ -0,0 +1,202 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + diff --git a/foreign-types/LICENSE-MIT b/foreign-types/LICENSE-MIT new file mode 100644 index 000000000..bb76d0146 --- /dev/null +++ b/foreign-types/LICENSE-MIT @@ -0,0 +1,19 @@ +Copyright (c) 2017 The foreign-types Developers + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/foreign-types/README.md b/foreign-types/README.md new file mode 100644 index 000000000..cd15965fa --- /dev/null +++ b/foreign-types/README.md @@ -0,0 +1,23 @@ +# foreign-types + +[![CircleCI](https://circleci.com/gh/sfackler/foreign-types.svg?style=shield)](https://circleci.com/gh/sfackler/foreign-types) + +[Documentation](https://docs.rs/foreign-types) + +A framework for Rust wrappers over C APIs. + +## License + +Licensed under either of + + * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0) + * MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT) + +at your option. + +### Contribution + +Unless you explicitly state otherwise, any contribution intentionally +submitted for inclusion in the work by you, as defined in the Apache-2.0 +license, shall be dual licensed as above, without any additional terms or +conditions. diff --git a/foreign-types/src/lib.rs b/foreign-types/src/lib.rs new file mode 100644 index 000000000..6ce959207 --- /dev/null +++ b/foreign-types/src/lib.rs @@ -0,0 +1,306 @@ +//! A framework for Rust wrappers over C APIs. +//! +//! Ownership is as important in C as it is in Rust, but the semantics are often implicit. In +//! particular, pointer-to-value is commonly used to pass C values both when transferring ownership +//! or a borrow. +//! +//! This crate provides a framework to define a Rust wrapper over these kinds of raw C APIs in a way +//! that allows ownership semantics to be expressed in an ergonomic manner. The framework takes a +//! dual-type approach similar to APIs in the standard library such as `PathBuf`/`Path` or `String`/ +//! `str`. One type represents an owned value and references to the other represent borrowed +//! values. +//! +//! # Examples +//! +//! ``` +//! use foreign_types::{ForeignType, ForeignTypeRef, Opaque}; +//! use std::ops::{Deref, DerefMut}; +//! +//! mod foo_sys { +//! pub enum FOO {} +//! +//! extern { +//! pub fn FOO_free(foo: *mut FOO); +//! } +//! } +//! +//! // The borrowed type is a newtype wrapper around an `Opaque` value. +//! // +//! // `FooRef` values never exist; we instead create references to `FooRef`s +//! // from raw C pointers. +//! pub struct FooRef(Opaque); +//! +//! impl ForeignTypeRef for FooRef { +//! type CType = foo_sys::FOO; +//! } +//! +//! // The owned type is simply a newtype wrapper around the raw C type. +//! // +//! // It dereferences to `FooRef`, so methods that do not require ownership +//! // should be defined there. +//! pub struct Foo(*mut foo_sys::FOO); +//! +//! impl Drop for Foo { +//! fn drop(&mut self) { +//! unsafe { foo_sys::FOO_free(self.0) } +//! } +//! } +//! +//! impl ForeignType for Foo { +//! type CType = foo_sys::FOO; +//! type Ref = FooRef; +//! +//! unsafe fn from_ptr(ptr: *mut foo_sys::FOO) -> Foo { +//! Foo(ptr) +//! } +//! +//! fn as_ptr(&self) -> *mut foo_sys::FOO { +//! self.0 +//! } +//! } +//! +//! impl Deref for Foo { +//! type Target = FooRef; +//! +//! fn deref(&self) -> &FooRef { +//! unsafe { FooRef::from_ptr(self.0) } +//! } +//! } +//! +//! impl DerefMut for Foo { +//! fn deref_mut(&mut self) -> &mut FooRef { +//! unsafe { FooRef::from_ptr_mut(self.0) } +//! } +//! } +//! ``` +//! +//! The `foreign_type!` macro can generate this boilerplate for you: +//! +//! ``` +//! #[macro_use] +//! extern crate foreign_types; +//! +//! mod foo_sys { +//! pub enum FOO {} +//! +//! extern { +//! pub fn FOO_free(foo: *mut FOO); +//! pub fn FOO_duplicate(foo: *mut FOO) -> *mut FOO; // Optional +//! } +//! } +//! +//! foreign_type! { +//! type CType = foo_sys::FOO; +//! fn drop = foo_sys::FOO_free; +//! fn clone = foo_sys::FOO_duplicate; // Optional +//! /// A Foo. +//! pub struct Foo; +//! /// A borrowed Foo. +//! pub struct FooRef; +//! } +//! +//! # fn main() {} +//! ``` +//! +//! If `fn clone` is specified, then it must take `CType` as an argument and return a copy of it as `CType`. +//! It will be used to implement `ToOwned` and `Clone`. +//! +//! `#[derive(…)] is permitted before the lines with `pub struct`. +//! `#[doc(hidden)]` before the `type CType` line will hide the `foreign_type!` implementations from documentation. +//! +//! Say we then have a separate type in our C API that contains a `FOO`: +//! +//! ``` +//! mod foo_sys { +//! pub enum FOO {} +//! pub enum BAR {} +//! +//! extern { +//! pub fn FOO_free(foo: *mut FOO); +//! pub fn BAR_free(bar: *mut BAR); +//! pub fn BAR_get_foo(bar: *mut BAR) -> *mut FOO; +//! } +//! } +//! ``` +//! +//! The documentation for the C library states that `BAR_get_foo` returns a reference into the `BAR` +//! passed to it, which translates into a reference in Rust. It also says that we're allowed to +//! modify the `FOO`, so we'll define a pair of accessor methods, one immutable and one mutable: +//! +//! ``` +//! #[macro_use] +//! extern crate foreign_types; +//! +//! use foreign_types::ForeignTypeRef; +//! +//! mod foo_sys { +//! pub enum FOO {} +//! pub enum BAR {} +//! +//! extern { +//! pub fn FOO_free(foo: *mut FOO); +//! pub fn BAR_free(bar: *mut BAR); +//! pub fn BAR_get_foo(bar: *mut BAR) -> *mut FOO; +//! } +//! } +//! +//! foreign_type! { +//! #[doc(hidden)] +//! type CType = foo_sys::FOO; +//! fn drop = foo_sys::FOO_free; +//! /// A Foo. +//! pub struct Foo; +//! /// A borrowed Foo. +//! pub struct FooRef; +//! } +//! +//! foreign_type! { +//! type CType = foo_sys::BAR; +//! fn drop = foo_sys::BAR_free; +//! /// A Foo. +//! pub struct Bar; +//! /// A borrowed Bar. +//! pub struct BarRef; +//! } +//! +//! impl BarRef { +//! fn foo(&self) -> &FooRef { +//! unsafe { FooRef::from_ptr(foo_sys::BAR_get_foo(self.as_ptr())) } +//! } +//! +//! fn foo_mut(&mut self) -> &mut FooRef { +//! unsafe { FooRef::from_ptr_mut(foo_sys::BAR_get_foo(self.as_ptr())) } +//! } +//! } +//! +//! # fn main() {} +//! ``` +#![no_std] +#![warn(missing_docs)] +#![doc(html_root_url="https://docs.rs/foreign-types/0.3")] +extern crate foreign_types_shared; + +#[doc(inline)] +pub use foreign_types_shared::*; + +/// A macro to easily define wrappers for foreign types. +/// +/// # Examples +/// +/// ``` +/// #[macro_use] +/// extern crate foreign_types; +/// +/// # mod openssl_sys { pub type SSL = (); pub unsafe fn SSL_free(_: *mut SSL) {} pub unsafe fn SSL_dup(x: *mut SSL) -> *mut SSL {x} } +/// foreign_type! { +/// type CType = openssl_sys::SSL; +/// fn drop = openssl_sys::SSL_free; +/// fn clone = openssl_sys::SSL_dup; +/// /// Documentation for the owned type. +/// pub struct Ssl; +/// /// Documentation for the borrowed type. +/// pub struct SslRef; +/// } +/// +/// # fn main() {} +/// ``` +#[macro_export] +macro_rules! foreign_type { + ( + $(#[$impl_attr:meta])* + type CType = $ctype:ty; + fn drop = $drop:expr; + $(fn clone = $clone:expr;)* + $(#[$owned_attr:meta])* + pub struct $owned:ident; + $(#[$borrowed_attr:meta])* + pub struct $borrowed:ident; + ) => { + $(#[$owned_attr])* + pub struct $owned(*mut $ctype); + + $(#[$impl_attr])* + impl $crate::ForeignType for $owned { + type CType = $ctype; + type Ref = $borrowed; + + #[inline] + unsafe fn from_ptr(ptr: *mut $ctype) -> $owned { + $owned(ptr) + } + + #[inline] + fn as_ptr(&self) -> *mut $ctype { + self.0 + } + } + + impl Drop for $owned { + #[inline] + fn drop(&mut self) { + unsafe { $drop(self.0) } + } + } + + $( + impl Clone for $owned { + #[inline] + fn clone(&self) -> $owned { + unsafe { + let handle: *mut $ctype = $clone(self.0); + $crate::ForeignType::from_ptr(handle) + } + } + } + + impl ::std::borrow::ToOwned for $borrowed { + type Owned = $owned; + #[inline] + fn to_owned(&self) -> $owned { + unsafe { + let handle: *mut $ctype = $clone($crate::ForeignTypeRef::as_ptr(self)); + $crate::ForeignType::from_ptr(handle) + } + } + } + )* + + impl ::std::ops::Deref for $owned { + type Target = $borrowed; + + #[inline] + fn deref(&self) -> &$borrowed { + unsafe { $crate::ForeignTypeRef::from_ptr(self.0) } + } + } + + impl ::std::ops::DerefMut for $owned { + #[inline] + fn deref_mut(&mut self) -> &mut $borrowed { + unsafe { $crate::ForeignTypeRef::from_ptr_mut(self.0) } + } + } + + impl ::std::borrow::Borrow<$borrowed> for $owned { + #[inline] + fn borrow(&self) -> &$borrowed { + &**self + } + } + + impl ::std::convert::AsRef<$borrowed> for $owned { + #[inline] + fn as_ref(&self) -> &$borrowed { + &**self + } + } + + $(#[$borrowed_attr])* + pub struct $borrowed($crate::Opaque); + + $(#[$impl_attr])* + impl $crate::ForeignTypeRef for $borrowed { + type CType = $ctype; + } + } +} diff --git a/fs2/.cargo-checksum.json b/fs2/.cargo-checksum.json new file mode 100644 index 000000000..b5f1e1d84 --- /dev/null +++ b/fs2/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{},"package":"9564fc758e15025b46aa6643b1b77d047d1a56a1aea6e01002ac0c7026876213"} \ No newline at end of file diff --git a/fs2/Cargo.toml b/fs2/Cargo.toml new file mode 100644 index 000000000..4c6aded51 --- /dev/null +++ b/fs2/Cargo.toml @@ -0,0 +1,33 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g. crates.io) dependencies +# +# If you believe there's an error in this file please file an +# issue against the rust-lang/cargo repository. If you're +# editing this file be aware that the upstream Cargo.toml +# will likely look very different (and much more reasonable) + +[package] +name = "fs2" +version = "0.4.3" +authors = ["Dan Burkert "] +description = "Cross-platform file locks and file duplication." +documentation = "https://docs.rs/fs2" +keywords = ["file", "file-system", "lock", "duplicate", "flock"] +license = "MIT/Apache-2.0" +repository = "https://github.com/danburkert/fs2-rs" +[dev-dependencies.tempdir] +version = "0.3" +[target."cfg(unix)".dependencies.libc] +version = "0.2.30" +[target."cfg(windows)".dependencies.winapi] +version = "0.3" +features = ["handleapi", "processthreadsapi", "winerror", "fileapi", "winbase", "std"] +[badges.appveyor] +repository = "danburkert/fs2-rs" + +[badges.travis-ci] +repository = "danburkert/fs2-rs" diff --git a/fs2/LICENSE-APACHE b/fs2/LICENSE-APACHE new file mode 100644 index 000000000..16fe87b06 --- /dev/null +++ b/fs2/LICENSE-APACHE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/fs2/LICENSE-MIT b/fs2/LICENSE-MIT new file mode 100644 index 000000000..e69282e38 --- /dev/null +++ b/fs2/LICENSE-MIT @@ -0,0 +1,25 @@ +Copyright (c) 2015 The Rust Project Developers + +Permission is hereby granted, free of charge, to any +person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the +Software without restriction, including without +limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software +is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions +of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/fs2/README.md b/fs2/README.md new file mode 100644 index 000000000..c10459a68 --- /dev/null +++ b/fs2/README.md @@ -0,0 +1,50 @@ +# fs2 + +Extended utilities for working with files and filesystems in Rust. `fs2` +requires Rust stable 1.8 or greater. + +[![Build Status](https://travis-ci.org/danburkert/fs2-rs.svg?branch=master)](https://travis-ci.org/danburkert/fs2-rs) +[![Windows Build status](https://ci.appveyor.com/api/projects/status/iuvjv1aaaml0rntt/branch/master?svg=true)](https://ci.appveyor.com/project/danburkert/fs2-rs/branch/master) +[![Documentation](https://docs.rs/fs2/badge.svg)](https://docs.rs/memmap) +[![Crate](https://img.shields.io/crates/v/fs2.svg)](https://crates.io/crates/memmap) + +## Features + +- [x] file descriptor duplication. +- [x] file locks. +- [x] file (pre)allocation. +- [x] file allocation information. +- [x] filesystem space usage information. + +## Platforms + +`fs2` should work on any platform supported by +[`libc`](https://github.com/rust-lang-nursery/libc#platforms-and-documentation). + +`fs2` is continuously tested on: + * `x86_64-unknown-linux-gnu` (Linux) + * `i686-unknown-linux-gnu` + * `x86_64-apple-darwin` (OSX) + * `i686-apple-darwin` + * `x86_64-pc-windows-msvc` (Windows) + * `i686-pc-windows-msvc` + * `x86_64-pc-windows-gnu` + * `i686-pc-windows-gnu` + +## Benchmarks + +Simple benchmarks are provided for the methods provided. Many of these +benchmarks use files in a temporary directory. On many modern Linux distros the +default temporary directory, `/tmp`, is mounted on a tempfs filesystem, which +will have different performance characteristics than a disk-backed filesystem. +The temporary directory is configurable at runtime through the environment (see +[`env::temp_dir`](https://doc.rust-lang.org/stable/std/env/fn.temp_dir.html)). + +## License + +`fs2` is primarily distributed under the terms of both the MIT license and the +Apache License (Version 2.0). + +See [LICENSE-APACHE](LICENSE-APACHE), [LICENSE-MIT](LICENSE-MIT) for details. + +Copyright (c) 2015 Dan Burkert. diff --git a/fs2/src/lib.rs b/fs2/src/lib.rs new file mode 100644 index 000000000..51b22db6a --- /dev/null +++ b/fs2/src/lib.rs @@ -0,0 +1,458 @@ +//! Extended utilities for working with files and filesystems in Rust. + +#![doc(html_root_url = "https://docs.rs/fs2/0.4.3")] + +#![cfg_attr(test, feature(test))] + +#[cfg(windows)] +extern crate winapi; + +#[cfg(unix)] +mod unix; +#[cfg(unix)] +use unix as sys; + +#[cfg(windows)] +mod windows; +#[cfg(windows)] +use windows as sys; + +use std::fs::File; +use std::io::{Error, Result}; +use std::path::Path; + +/// Extension trait for `std::fs::File` which provides allocation, duplication and locking methods. +/// +/// ## Notes on File Locks +/// +/// This library provides whole-file locks in both shared (read) and exclusive +/// (read-write) varieties. +/// +/// File locks are a cross-platform hazard since the file lock APIs exposed by +/// operating system kernels vary in subtle and not-so-subtle ways. +/// +/// The API exposed by this library can be safely used across platforms as long +/// as the following rules are followed: +/// +/// * Multiple locks should not be created on an individual `File` instance +/// concurrently. +/// * Duplicated files should not be locked without great care. +/// * Files to be locked should be opened with at least read or write +/// permissions. +/// * File locks may only be relied upon to be advisory. +/// +/// See the tests in `lib.rs` for cross-platform lock behavior that may be +/// relied upon; see the tests in `unix.rs` and `windows.rs` for examples of +/// platform-specific behavior. File locks are implemented with +/// [`flock(2)`](http://man7.org/linux/man-pages/man2/flock.2.html) on Unix and +/// [`LockFile`](https://msdn.microsoft.com/en-us/library/windows/desktop/aa365202(v=vs.85).aspx) +/// on Windows. +pub trait FileExt { + + /// Returns a duplicate instance of the file. + /// + /// The returned file will share the same file position as the original + /// file. + /// + /// If using rustc version 1.9 or later, prefer using `File::try_clone` to this. + /// + /// # Notes + /// + /// This is implemented with + /// [`dup(2)`](http://man7.org/linux/man-pages/man2/dup.2.html) on Unix and + /// [`DuplicateHandle`](https://msdn.microsoft.com/en-us/library/windows/desktop/ms724251(v=vs.85).aspx) + /// on Windows. + fn duplicate(&self) -> Result; + + /// Returns the amount of physical space allocated for a file. + fn allocated_size(&self) -> Result; + + /// Ensures that at least `len` bytes of disk space are allocated for the + /// file, and the file size is at least `len` bytes. After a successful call + /// to `allocate`, subsequent writes to the file within the specified length + /// are guaranteed not to fail because of lack of disk space. + fn allocate(&self, len: u64) -> Result<()>; + + /// Locks the file for shared usage, blocking if the file is currently + /// locked exclusively. + fn lock_shared(&self) -> Result<()>; + + /// Locks the file for exclusive usage, blocking if the file is currently + /// locked. + fn lock_exclusive(&self) -> Result<()>; + + /// Locks the file for shared usage, or returns a an error if the file is + /// currently locked (see `lock_contended_error`). + fn try_lock_shared(&self) -> Result<()>; + + /// Locks the file for shared usage, or returns a an error if the file is + /// currently locked (see `lock_contended_error`). + fn try_lock_exclusive(&self) -> Result<()>; + + /// Unlocks the file. + fn unlock(&self) -> Result<()>; +} + +impl FileExt for File { + fn duplicate(&self) -> Result { + sys::duplicate(self) + } + fn allocated_size(&self) -> Result { + sys::allocated_size(self) + } + fn allocate(&self, len: u64) -> Result<()> { + sys::allocate(self, len) + } + fn lock_shared(&self) -> Result<()> { + sys::lock_shared(self) + } + fn lock_exclusive(&self) -> Result<()> { + sys::lock_exclusive(self) + } + fn try_lock_shared(&self) -> Result<()> { + sys::try_lock_shared(self) + } + fn try_lock_exclusive(&self) -> Result<()> { + sys::try_lock_exclusive(self) + } + fn unlock(&self) -> Result<()> { + sys::unlock(self) + } +} + +/// Returns the error that a call to a try lock method on a contended file will +/// return. +pub fn lock_contended_error() -> Error { + sys::lock_error() +} + +/// `FsStats` contains some common stats about a file system. +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +pub struct FsStats { + free_space: u64, + available_space: u64, + total_space: u64, + allocation_granularity: u64, +} + +impl FsStats { + /// Returns the number of free bytes in the file system containing the provided + /// path. + pub fn free_space(&self) -> u64 { + self.free_space + } + + /// Returns the available space in bytes to non-priveleged users in the file + /// system containing the provided path. + pub fn available_space(&self) -> u64 { + self.available_space + } + + /// Returns the total space in bytes in the file system containing the provided + /// path. + pub fn total_space(&self) -> u64 { + self.total_space + } + + /// Returns the filesystem's disk space allocation granularity in bytes. + /// The provided path may be for any file in the filesystem. + /// + /// On Posix, this is equivalent to the filesystem's block size. + /// On Windows, this is equivalent to the filesystem's cluster size. + pub fn allocation_granularity(&self) -> u64 { + self.allocation_granularity + } +} + +/// Get the stats of the file system containing the provided path. +pub fn statvfs

(path: P) -> Result where P: AsRef { + sys::statvfs(path.as_ref()) +} + +/// Returns the number of free bytes in the file system containing the provided +/// path. +pub fn free_space

(path: P) -> Result where P: AsRef { + statvfs(path).map(|stat| stat.free_space) +} + +/// Returns the available space in bytes to non-priveleged users in the file +/// system containing the provided path. +pub fn available_space

(path: P) -> Result where P: AsRef { + statvfs(path).map(|stat| stat.available_space) +} + +/// Returns the total space in bytes in the file system containing the provided +/// path. +pub fn total_space

(path: P) -> Result where P: AsRef { + statvfs(path).map(|stat| stat.total_space) +} + +/// Returns the filesystem's disk space allocation granularity in bytes. +/// The provided path may be for any file in the filesystem. +/// +/// On Posix, this is equivalent to the filesystem's block size. +/// On Windows, this is equivalent to the filesystem's cluster size. +pub fn allocation_granularity

(path: P) -> Result where P: AsRef { + statvfs(path).map(|stat| stat.allocation_granularity) +} + +#[cfg(test)] +mod test { + + extern crate tempdir; + extern crate test; + + use std::fs; + use super::*; + use std::io::{Read, Seek, SeekFrom, Write}; + + /// Tests file duplication. + #[test] + fn duplicate() { + let tempdir = tempdir::TempDir::new("fs2").unwrap(); + let path = tempdir.path().join("fs2"); + let mut file1 = + fs::OpenOptions::new().read(true).write(true).create(true).open(&path).unwrap(); + let mut file2 = file1.duplicate().unwrap(); + + // Write into the first file and then drop it. + file1.write_all(b"foo").unwrap(); + drop(file1); + + let mut buf = vec![]; + + // Read from the second file; since the position is shared it will already be at EOF. + file2.read_to_end(&mut buf).unwrap(); + assert_eq!(0, buf.len()); + + // Rewind and read. + file2.seek(SeekFrom::Start(0)).unwrap(); + file2.read_to_end(&mut buf).unwrap(); + assert_eq!(&buf, &b"foo"); + } + + /// Tests shared file lock operations. + #[test] + fn lock_shared() { + let tempdir = tempdir::TempDir::new("fs2").unwrap(); + let path = tempdir.path().join("fs2"); + let file1 = fs::OpenOptions::new().read(true).write(true).create(true).open(&path).unwrap(); + let file2 = fs::OpenOptions::new().read(true).write(true).create(true).open(&path).unwrap(); + let file3 = fs::OpenOptions::new().read(true).write(true).create(true).open(&path).unwrap(); + + // Concurrent shared access is OK, but not shared and exclusive. + file1.lock_shared().unwrap(); + file2.lock_shared().unwrap(); + assert_eq!(file3.try_lock_exclusive().unwrap_err().kind(), + lock_contended_error().kind()); + file1.unlock().unwrap(); + assert_eq!(file3.try_lock_exclusive().unwrap_err().kind(), + lock_contended_error().kind()); + + // Once all shared file locks are dropped, an exclusive lock may be created; + file2.unlock().unwrap(); + file3.lock_exclusive().unwrap(); + } + + /// Tests exclusive file lock operations. + #[test] + fn lock_exclusive() { + let tempdir = tempdir::TempDir::new("fs2").unwrap(); + let path = tempdir.path().join("fs2"); + let file1 = fs::OpenOptions::new().read(true).write(true).create(true).open(&path).unwrap(); + let file2 = fs::OpenOptions::new().read(true).write(true).create(true).open(&path).unwrap(); + + // No other access is possible once an exclusive lock is created. + file1.lock_exclusive().unwrap(); + assert_eq!(file2.try_lock_exclusive().unwrap_err().kind(), + lock_contended_error().kind()); + assert_eq!(file2.try_lock_shared().unwrap_err().kind(), + lock_contended_error().kind()); + + // Once the exclusive lock is dropped, the second file is able to create a lock. + file1.unlock().unwrap(); + file2.lock_exclusive().unwrap(); + } + + /// Tests that a lock is released after the file that owns it is dropped. + #[test] + fn lock_cleanup() { + let tempdir = tempdir::TempDir::new("fs2").unwrap(); + let path = tempdir.path().join("fs2"); + let file1 = fs::OpenOptions::new().read(true).write(true).create(true).open(&path).unwrap(); + let file2 = fs::OpenOptions::new().read(true).write(true).create(true).open(&path).unwrap(); + + file1.lock_exclusive().unwrap(); + assert_eq!(file2.try_lock_shared().unwrap_err().kind(), + lock_contended_error().kind()); + + // Drop file1; the lock should be released. + drop(file1); + file2.lock_shared().unwrap(); + } + + /// Tests file allocation. + #[test] + fn allocate() { + let tempdir = tempdir::TempDir::new("fs2").unwrap(); + let path = tempdir.path().join("fs2"); + let file = fs::OpenOptions::new().write(true).create(true).open(&path).unwrap(); + let blksize = allocation_granularity(&path).unwrap(); + + // New files are created with no allocated size. + assert_eq!(0, file.allocated_size().unwrap()); + assert_eq!(0, file.metadata().unwrap().len()); + + // Allocate space for the file, checking that the allocated size steps + // up by block size, and the file length matches the allocated size. + + file.allocate(2 * blksize - 1).unwrap(); + assert_eq!(2 * blksize, file.allocated_size().unwrap()); + assert_eq!(2 * blksize - 1, file.metadata().unwrap().len()); + + // Truncate the file, checking that the allocated size steps down by + // block size. + + file.set_len(blksize + 1).unwrap(); + assert_eq!(2 * blksize, file.allocated_size().unwrap()); + assert_eq!(blksize + 1, file.metadata().unwrap().len()); + } + + /// Checks filesystem space methods. + #[test] + fn filesystem_space() { + let tempdir = tempdir::TempDir::new("fs2").unwrap(); + let total_space = total_space(&tempdir.path()).unwrap(); + let free_space = free_space(&tempdir.path()).unwrap(); + let available_space = available_space(&tempdir.path()).unwrap(); + + assert!(total_space > free_space); + assert!(total_space > available_space); + assert!(available_space <= free_space); + } + + /// Benchmarks creating and removing a file. This is a baseline benchmark + /// for comparing against the truncate and allocate benchmarks. + #[bench] + fn bench_file_create(b: &mut test::Bencher) { + let tempdir = tempdir::TempDir::new("fs2").unwrap(); + let path = tempdir.path().join("file"); + + b.iter(|| { + fs::OpenOptions::new() + .read(true) + .write(true) + .create(true) + .open(&path) + .unwrap(); + fs::remove_file(&path).unwrap(); + }); + } + + /// Benchmarks creating a file, truncating it to 32MiB, and deleting it. + #[bench] + fn bench_file_truncate(b: &mut test::Bencher) { + let size = 32 * 1024 * 1024; + let tempdir = tempdir::TempDir::new("fs2").unwrap(); + let path = tempdir.path().join("file"); + + b.iter(|| { + let file = fs::OpenOptions::new() + .read(true) + .write(true) + .create(true) + .open(&path) + .unwrap(); + file.set_len(size).unwrap(); + fs::remove_file(&path).unwrap(); + }); + } + + /// Benchmarks creating a file, allocating 32MiB for it, and deleting it. + #[bench] + fn bench_file_allocate(b: &mut test::Bencher) { + let size = 32 * 1024 * 1024; + let tempdir = tempdir::TempDir::new("fs2").unwrap(); + let path = tempdir.path().join("file"); + + b.iter(|| { + let file = fs::OpenOptions::new() + .read(true) + .write(true) + .create(true) + .open(&path) + .unwrap(); + file.allocate(size).unwrap(); + fs::remove_file(&path).unwrap(); + }); + } + + /// Benchmarks creating a file, allocating 32MiB for it, and deleting it. + #[bench] + fn bench_allocated_size(b: &mut test::Bencher) { + let size = 32 * 1024 * 1024; + let tempdir = tempdir::TempDir::new("fs2").unwrap(); + let path = tempdir.path().join("file"); + let file = fs::OpenOptions::new() + .read(true) + .write(true) + .create(true) + .open(&path) + .unwrap(); + file.allocate(size).unwrap(); + + b.iter(|| { + file.allocated_size().unwrap(); + }); + } + + /// Benchmarks duplicating a file descriptor or handle. + #[bench] + fn bench_duplicate(b: &mut test::Bencher) { + let tempdir = tempdir::TempDir::new("fs2").unwrap(); + let path = tempdir.path().join("fs2"); + let file = fs::OpenOptions::new().read(true).write(true).create(true).open(&path).unwrap(); + + b.iter(|| test::black_box(file.duplicate().unwrap())); + } + + /// Benchmarks locking and unlocking a file lock. + #[bench] + fn bench_lock_unlock(b: &mut test::Bencher) { + let tempdir = tempdir::TempDir::new("fs2").unwrap(); + let path = tempdir.path().join("fs2"); + let file = fs::OpenOptions::new().read(true).write(true).create(true).open(&path).unwrap(); + + b.iter(|| { + file.lock_exclusive().unwrap(); + file.unlock().unwrap(); + }); + } + + /// Benchmarks the free space method. + #[bench] + fn bench_free_space(b: &mut test::Bencher) { + let tempdir = tempdir::TempDir::new("fs2").unwrap(); + b.iter(|| { + test::black_box(free_space(&tempdir.path()).unwrap()); + }); + } + + /// Benchmarks the available space method. + #[bench] + fn bench_available_space(b: &mut test::Bencher) { + let tempdir = tempdir::TempDir::new("fs2").unwrap(); + b.iter(|| { + test::black_box(available_space(&tempdir.path()).unwrap()); + }); + } + + /// Benchmarks the total space method. + #[bench] + fn bench_total_space(b: &mut test::Bencher) { + let tempdir = tempdir::TempDir::new("fs2").unwrap(); + b.iter(|| { + test::black_box(total_space(&tempdir.path()).unwrap()); + }); + } +} diff --git a/fs2/src/unix.rs b/fs2/src/unix.rs new file mode 100644 index 000000000..91867f7e3 --- /dev/null +++ b/fs2/src/unix.rs @@ -0,0 +1,250 @@ +extern crate libc; + +use std::ffi::CString; +use std::fs::File; +use std::io::{Error, ErrorKind, Result}; +use std::mem; +use std::os::unix::ffi::OsStrExt; +use std::os::unix::fs::MetadataExt; +use std::os::unix::io::{AsRawFd, FromRawFd}; +use std::path::Path; + +use FsStats; + +pub fn duplicate(file: &File) -> Result { + unsafe { + let fd = libc::dup(file.as_raw_fd()); + + if fd < 0 { + Err(Error::last_os_error()) + } else { + Ok(File::from_raw_fd(fd)) + } + } +} + +pub fn lock_shared(file: &File) -> Result<()> { + flock(file, libc::LOCK_SH) +} + +pub fn lock_exclusive(file: &File) -> Result<()> { + flock(file, libc::LOCK_EX) +} + +pub fn try_lock_shared(file: &File) -> Result<()> { + flock(file, libc::LOCK_SH | libc::LOCK_NB) +} + +pub fn try_lock_exclusive(file: &File) -> Result<()> { + flock(file, libc::LOCK_EX | libc::LOCK_NB) +} + +pub fn unlock(file: &File) -> Result<()> { + flock(file, libc::LOCK_UN) +} + +pub fn lock_error() -> Error { + Error::from_raw_os_error(libc::EWOULDBLOCK) +} + +#[cfg(not(target_os = "solaris"))] +fn flock(file: &File, flag: libc::c_int) -> Result<()> { + let ret = unsafe { libc::flock(file.as_raw_fd(), flag) }; + if ret < 0 { Err(Error::last_os_error()) } else { Ok(()) } +} + +/// Simulate flock() using fcntl(); primarily for Oracle Solaris. +#[cfg(target_os = "solaris")] +fn flock(file: &File, flag: libc::c_int) -> Result<()> { + let mut fl = libc::flock { + l_whence: 0, + l_start: 0, + l_len: 0, + l_type: 0, + l_pad: [0; 4], + l_pid: 0, + l_sysid: 0, + }; + + // In non-blocking mode, use F_SETLK for cmd, F_SETLKW otherwise, and don't forget to clear + // LOCK_NB. + let (cmd, operation) = match flag & libc::LOCK_NB { + 0 => (libc::F_SETLKW, flag), + _ => (libc::F_SETLK, flag & !libc::LOCK_NB), + }; + + match operation { + libc::LOCK_SH => fl.l_type |= libc::F_RDLCK, + libc::LOCK_EX => fl.l_type |= libc::F_WRLCK, + libc::LOCK_UN => fl.l_type |= libc::F_UNLCK, + _ => return Err(Error::from_raw_os_error(libc::EINVAL)), + } + + let ret = unsafe { libc::fcntl(file.as_raw_fd(), cmd, &fl) }; + match ret { + // Translate EACCES to EWOULDBLOCK + -1 => match Error::last_os_error().raw_os_error() { + Some(libc::EACCES) => return Err(lock_error()), + _ => return Err(Error::last_os_error()) + }, + _ => Ok(()) + } +} + +pub fn allocated_size(file: &File) -> Result { + file.metadata().map(|m| m.blocks() as u64 * 512) +} + +#[cfg(any(target_os = "linux", + target_os = "freebsd", + target_os = "android", + target_os = "nacl"))] +pub fn allocate(file: &File, len: u64) -> Result<()> { + let ret = unsafe { libc::posix_fallocate(file.as_raw_fd(), 0, len as libc::off_t) }; + if ret == 0 { Ok(()) } else { Err(Error::last_os_error()) } +} + +#[cfg(any(target_os = "macos", target_os = "ios"))] +pub fn allocate(file: &File, len: u64) -> Result<()> { + let stat = try!(file.metadata()); + + if len > stat.blocks() as u64 * 512 { + let mut fstore = libc::fstore_t { + fst_flags: libc::F_ALLOCATECONTIG, + fst_posmode: libc::F_PEOFPOSMODE, + fst_offset: 0, + fst_length: len as libc::off_t, + fst_bytesalloc: 0, + }; + + let ret = unsafe { libc::fcntl(file.as_raw_fd(), libc::F_PREALLOCATE, &fstore) }; + if ret == -1 { + // Unable to allocate contiguous disk space; attempt to allocate non-contiguously. + fstore.fst_flags = libc::F_ALLOCATEALL; + let ret = unsafe { libc::fcntl(file.as_raw_fd(), libc::F_PREALLOCATE, &fstore) }; + if ret == -1 { + return Err(Error::last_os_error()); + } + } + } + + if len > stat.size() as u64 { + file.set_len(len) + } else { + Ok(()) + } +} + +#[cfg(any(target_os = "openbsd", + target_os = "netbsd", + target_os = "dragonfly", + target_os = "solaris", + target_os = "haiku"))] +pub fn allocate(file: &File, len: u64) -> Result<()> { + // No file allocation API available, just set the length if necessary. + if len > try!(file.metadata()).len() as u64 { + file.set_len(len) + } else { + Ok(()) + } +} + +pub fn statvfs(path: &Path) -> Result { + let cstr = match CString::new(path.as_os_str().as_bytes()) { + Ok(cstr) => cstr, + Err(..) => return Err(Error::new(ErrorKind::InvalidInput, "path contained a null")), + }; + + unsafe { + let mut stat: libc::statvfs = mem::zeroed(); + // danburkert/fs2-rs#1: cast is necessary for platforms where c_char != u8. + if libc::statvfs(cstr.as_ptr() as *const _, &mut stat) != 0 { + Err(Error::last_os_error()) + } else { + Ok(FsStats { + free_space: stat.f_frsize as u64 * stat.f_bfree as u64, + available_space: stat.f_frsize as u64 * stat.f_bavail as u64, + total_space: stat.f_frsize as u64 * stat.f_blocks as u64, + allocation_granularity: stat.f_frsize as u64, + }) + } + } +} + +#[cfg(test)] +mod test { + extern crate tempdir; + extern crate libc; + + use std::fs::{self, File}; + use std::os::unix::io::AsRawFd; + + use {FileExt, lock_contended_error}; + + /// The duplicate method returns a file with a new file descriptor. + #[test] + fn duplicate_new_fd() { + let tempdir = tempdir::TempDir::new("fs2").unwrap(); + let path = tempdir.path().join("fs2"); + let file1 = fs::OpenOptions::new().write(true).create(true).open(&path).unwrap(); + let file2 = file1.duplicate().unwrap(); + assert!(file1.as_raw_fd() != file2.as_raw_fd()); + } + + /// The duplicate method should preservesthe close on exec flag. + #[test] + fn duplicate_cloexec() { + + fn flags(file: &File) -> libc::c_int { + unsafe { libc::fcntl(file.as_raw_fd(), libc::F_GETFL, 0) } + } + + let tempdir = tempdir::TempDir::new("fs2").unwrap(); + let path = tempdir.path().join("fs2"); + let file1 = fs::OpenOptions::new().write(true).create(true).open(&path).unwrap(); + let file2 = file1.duplicate().unwrap(); + + assert_eq!(flags(&file1), flags(&file2)); + } + + /// Tests that locking a file descriptor will replace any existing locks + /// held on the file descriptor. + #[test] + fn lock_replace() { + let tempdir = tempdir::TempDir::new("fs2").unwrap(); + let path = tempdir.path().join("fs2"); + let file1 = fs::OpenOptions::new().write(true).create(true).open(&path).unwrap(); + let file2 = fs::OpenOptions::new().write(true).create(true).open(&path).unwrap(); + + // Creating a shared lock will drop an exclusive lock. + file1.lock_exclusive().unwrap(); + file1.lock_shared().unwrap(); + file2.lock_shared().unwrap(); + + // Attempting to replace a shared lock with an exclusive lock will fail + // with multiple lock holders, and remove the original shared lock. + assert_eq!(file2.try_lock_exclusive().unwrap_err().raw_os_error(), + lock_contended_error().raw_os_error()); + file1.lock_shared().unwrap(); + } + + /// Tests that locks are shared among duplicated file descriptors. + #[test] + fn lock_duplicate() { + let tempdir = tempdir::TempDir::new("fs2").unwrap(); + let path = tempdir.path().join("fs2"); + let file1 = fs::OpenOptions::new().write(true).create(true).open(&path).unwrap(); + let file2 = file1.duplicate().unwrap(); + let file3 = fs::OpenOptions::new().write(true).create(true).open(&path).unwrap(); + + // Create a lock through fd1, then replace it through fd2. + file1.lock_shared().unwrap(); + file2.lock_exclusive().unwrap(); + assert_eq!(file3.try_lock_shared().unwrap_err().raw_os_error(), + lock_contended_error().raw_os_error()); + + // Either of the file descriptors should be able to unlock. + file1.unlock().unwrap(); + file3.lock_shared().unwrap(); + } +} diff --git a/fs2/src/windows.rs b/fs2/src/windows.rs new file mode 100644 index 000000000..0e37c6657 --- /dev/null +++ b/fs2/src/windows.rs @@ -0,0 +1,279 @@ +use std::fs::File; +use std::io::{Error, Result}; +use std::mem; +use std::os::windows::ffi::OsStrExt; +use std::os::windows::io::{AsRawHandle, FromRawHandle}; +use std::path::Path; +use std::ptr; + +use winapi::shared::minwindef::{BOOL, DWORD}; +use winapi::shared::winerror::ERROR_LOCK_VIOLATION; +use winapi::um::fileapi::{FILE_ALLOCATION_INFO, FILE_STANDARD_INFO, GetDiskFreeSpaceW}; +use winapi::um::fileapi::{GetVolumePathNameW, LockFileEx, UnlockFile, SetFileInformationByHandle}; +use winapi::um::handleapi::DuplicateHandle; +use winapi::um::minwinbase::{FileAllocationInfo, FileStandardInfo}; +use winapi::um::minwinbase::{LOCKFILE_FAIL_IMMEDIATELY, LOCKFILE_EXCLUSIVE_LOCK}; +use winapi::um::processthreadsapi::GetCurrentProcess; +use winapi::um::winbase::GetFileInformationByHandleEx; +use winapi::um::winnt::DUPLICATE_SAME_ACCESS; + +use FsStats; + +pub fn duplicate(file: &File) -> Result { + unsafe { + let mut handle = ptr::null_mut(); + let current_process = GetCurrentProcess(); + let ret = DuplicateHandle(current_process, + file.as_raw_handle(), + current_process, + &mut handle, + 0, + true as BOOL, + DUPLICATE_SAME_ACCESS); + if ret == 0 { + Err(Error::last_os_error()) + } else { + Ok(File::from_raw_handle(handle)) + } + } +} + +pub fn allocated_size(file: &File) -> Result { + unsafe { + let mut info: FILE_STANDARD_INFO = mem::zeroed(); + + let ret = GetFileInformationByHandleEx( + file.as_raw_handle(), + FileStandardInfo, + &mut info as *mut _ as *mut _, + mem::size_of::() as DWORD); + + if ret == 0 { + Err(Error::last_os_error()) + } else { + Ok(*info.AllocationSize.QuadPart() as u64) + } + } +} + +pub fn allocate(file: &File, len: u64) -> Result<()> { + if try!(allocated_size(file)) < len { + unsafe { + let mut info: FILE_ALLOCATION_INFO = mem::zeroed(); + *info.AllocationSize.QuadPart_mut() = len as i64; + let ret = SetFileInformationByHandle( + file.as_raw_handle(), + FileAllocationInfo, + &mut info as *mut _ as *mut _, + mem::size_of::() as DWORD); + if ret == 0 { + return Err(Error::last_os_error()); + } + } + } + if try!(file.metadata()).len() < len { + file.set_len(len) + } else { + Ok(()) + } +} + +pub fn lock_shared(file: &File) -> Result<()> { + lock_file(file, 0) +} + +pub fn lock_exclusive(file: &File) -> Result<()> { + lock_file(file, LOCKFILE_EXCLUSIVE_LOCK) +} + +pub fn try_lock_shared(file: &File) -> Result<()> { + lock_file(file, LOCKFILE_FAIL_IMMEDIATELY) +} + +pub fn try_lock_exclusive(file: &File) -> Result<()> { + lock_file(file, LOCKFILE_EXCLUSIVE_LOCK | LOCKFILE_FAIL_IMMEDIATELY) +} + +pub fn unlock(file: &File) -> Result<()> { + unsafe { + let ret = UnlockFile(file.as_raw_handle(), 0, 0, !0, !0); + if ret == 0 { Err(Error::last_os_error()) } else { Ok(()) } + } +} + +pub fn lock_error() -> Error { + Error::from_raw_os_error(ERROR_LOCK_VIOLATION as i32) +} + +fn lock_file(file: &File, flags: DWORD) -> Result<()> { + unsafe { + let mut overlapped = mem::zeroed(); + let ret = LockFileEx(file.as_raw_handle(), flags, 0, !0, !0, &mut overlapped); + if ret == 0 { Err(Error::last_os_error()) } else { Ok(()) } + } +} + +fn volume_path(path: &Path, volume_path: &mut [u16]) -> Result<()> { + let path_utf8: Vec = path.as_os_str().encode_wide().chain(Some(0)).collect(); + unsafe { + let ret = GetVolumePathNameW(path_utf8.as_ptr(), + volume_path.as_mut_ptr(), + volume_path.len() as DWORD); + if ret == 0 { Err(Error::last_os_error()) } else { Ok(()) + } + } +} + +pub fn statvfs(path: &Path) -> Result { + let root_path: &mut [u16] = &mut [0; 261]; + try!(volume_path(path, root_path)); + unsafe { + + let mut sectors_per_cluster = 0; + let mut bytes_per_sector = 0; + let mut number_of_free_clusters = 0; + let mut total_number_of_clusters = 0; + let ret = GetDiskFreeSpaceW(root_path.as_ptr(), + &mut sectors_per_cluster, + &mut bytes_per_sector, + &mut number_of_free_clusters, + &mut total_number_of_clusters); + if ret == 0 { + Err(Error::last_os_error()) + } else { + let bytes_per_cluster = sectors_per_cluster as u64 * bytes_per_sector as u64; + let free_space = bytes_per_cluster * number_of_free_clusters as u64; + let total_space = bytes_per_cluster * total_number_of_clusters as u64; + Ok(FsStats { + free_space: free_space, + available_space: free_space, + total_space: total_space, + allocation_granularity: bytes_per_cluster, + }) + } + } +} + +#[cfg(test)] +mod test { + + extern crate tempdir; + + use std::fs; + use std::os::windows::io::AsRawHandle; + + use {FileExt, lock_contended_error}; + + /// The duplicate method returns a file with a new file handle. + #[test] + fn duplicate_new_handle() { + let tempdir = tempdir::TempDir::new("fs2").unwrap(); + let path = tempdir.path().join("fs2"); + let file1 = fs::OpenOptions::new().write(true).create(true).open(&path).unwrap(); + let file2 = file1.duplicate().unwrap(); + assert!(file1.as_raw_handle() != file2.as_raw_handle()); + } + + /// A duplicated file handle does not have access to the original handle's locks. + #[test] + fn lock_duplicate_handle_independence() { + let tempdir = tempdir::TempDir::new("fs2").unwrap(); + let path = tempdir.path().join("fs2"); + let file1 = fs::OpenOptions::new().read(true).write(true).create(true).open(&path).unwrap(); + let file2 = file1.duplicate().unwrap(); + + // Locking the original file handle will block the duplicate file handle from opening a lock. + file1.lock_shared().unwrap(); + assert_eq!(file2.try_lock_exclusive().unwrap_err().raw_os_error(), + lock_contended_error().raw_os_error()); + + // Once the original file handle is unlocked, the duplicate handle can proceed with a lock. + file1.unlock().unwrap(); + file2.lock_exclusive().unwrap(); + } + + /// A file handle may not be exclusively locked multiple times, or exclusively locked and then + /// shared locked. + #[test] + fn lock_non_reentrant() { + let tempdir = tempdir::TempDir::new("fs2").unwrap(); + let path = tempdir.path().join("fs2"); + let file = fs::OpenOptions::new().read(true).write(true).create(true).open(&path).unwrap(); + + // Multiple exclusive locks fails. + file.lock_exclusive().unwrap(); + assert_eq!(file.try_lock_exclusive().unwrap_err().raw_os_error(), + lock_contended_error().raw_os_error()); + file.unlock().unwrap(); + + // Shared then Exclusive locks fails. + file.lock_shared().unwrap(); + assert_eq!(file.try_lock_exclusive().unwrap_err().raw_os_error(), + lock_contended_error().raw_os_error()); + } + + /// A file handle can hold an exclusive lock and any number of shared locks, all of which must + /// be unlocked independently. + #[test] + fn lock_layering() { + let tempdir = tempdir::TempDir::new("fs2").unwrap(); + let path = tempdir.path().join("fs2"); + let file = fs::OpenOptions::new().read(true).write(true).create(true).open(&path).unwrap(); + + // Open two shared locks on the file, and then try and fail to open an exclusive lock. + file.lock_exclusive().unwrap(); + file.lock_shared().unwrap(); + file.lock_shared().unwrap(); + assert_eq!(file.try_lock_exclusive().unwrap_err().raw_os_error(), + lock_contended_error().raw_os_error()); + + // Pop one of the shared locks and try again. + file.unlock().unwrap(); + assert_eq!(file.try_lock_exclusive().unwrap_err().raw_os_error(), + lock_contended_error().raw_os_error()); + + // Pop the second shared lock and try again. + file.unlock().unwrap(); + assert_eq!(file.try_lock_exclusive().unwrap_err().raw_os_error(), + lock_contended_error().raw_os_error()); + + // Pop the exclusive lock and finally succeed. + file.unlock().unwrap(); + file.lock_exclusive().unwrap(); + } + + /// A file handle with multiple open locks will have all locks closed on drop. + #[test] + fn lock_layering_cleanup() { + let tempdir = tempdir::TempDir::new("fs2").unwrap(); + let path = tempdir.path().join("fs2"); + let file1 = fs::OpenOptions::new().read(true).write(true).create(true).open(&path).unwrap(); + let file2 = fs::OpenOptions::new().read(true).write(true).create(true).open(&path).unwrap(); + + // Open two shared locks on the file, and then try and fail to open an exclusive lock. + file1.lock_shared().unwrap(); + assert_eq!(file2.try_lock_exclusive().unwrap_err().raw_os_error(), + lock_contended_error().raw_os_error()); + + drop(file1); + file2.lock_exclusive().unwrap(); + } + + /// A file handle's locks will not be released until the original handle and all of its + /// duplicates have been closed. This on really smells like a bug in Windows. + #[test] + fn lock_duplicate_cleanup() { + let tempdir = tempdir::TempDir::new("fs2").unwrap(); + let path = tempdir.path().join("fs2"); + let file1 = fs::OpenOptions::new().read(true).write(true).create(true).open(&path).unwrap(); + let file2 = file1.duplicate().unwrap(); + + // Open a lock on the original handle, then close it. + file1.lock_shared().unwrap(); + drop(file1); + + // Attempting to create a lock on the file with the duplicate handle will fail. + assert_eq!(file2.try_lock_exclusive().unwrap_err().raw_os_error(), + lock_contended_error().raw_os_error()); + } +} diff --git a/fwdansi/.cargo-checksum.json b/fwdansi/.cargo-checksum.json new file mode 100644 index 000000000..e20d120fe --- /dev/null +++ b/fwdansi/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{},"package":"34dd4c507af68d37ffef962063dfa1944ce0dd4d5b82043dbab1dabe088610c3"} \ No newline at end of file diff --git a/fwdansi/Cargo.toml b/fwdansi/Cargo.toml new file mode 100644 index 000000000..03f35a7db --- /dev/null +++ b/fwdansi/Cargo.toml @@ -0,0 +1,36 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g. crates.io) dependencies +# +# If you believe there's an error in this file please file an +# issue against the rust-lang/cargo repository. If you're +# editing this file be aware that the upstream Cargo.toml +# will likely look very different (and much more reasonable) + +[package] +name = "fwdansi" +version = "1.0.1" +authors = ["kennytm "] +description = "Forwards a byte string with ANSI escape code to a termcolor terminal" +keywords = ["ansi", "windows", "console", "terminal", "color"] +categories = ["command-line-interface"] +license = "MIT" +repository = "https://github.com/kennytm/fwdansi" +[dependencies.memchr] +version = "2" + +[dependencies.termcolor] +version = "1" +[dev-dependencies.proptest] +version = "0.8" +[badges.appveyor] +repository = "kennytm/fwdansi" + +[badges.maintenance] +status = "passively-maintained" + +[badges.travis-ci] +repository = "kennytm/fwdansi" diff --git a/fwdansi/appveyor.yml b/fwdansi/appveyor.yml new file mode 100644 index 000000000..dc3d893c8 --- /dev/null +++ b/fwdansi/appveyor.yml @@ -0,0 +1,20 @@ +environment: + matrix: + - TOOLCHAIN: 1.27.2 + - TOOLCHAIN: stable + - TOOLCHAIN: beta + - TOOLCHAIN: nightly + +install: + - appveyor DownloadFile https://win.rustup.rs/x86_64 -FileName rustup-init.exe + - rustup-init -y --default-toolchain %TOOLCHAIN% + - SET PATH=%PATH%;C:\Users\appveyor\.cargo\bin + - rustc -vV + - cargo -vV + +test_script: + - cargo build + - cargo test + - cargo run --example run-rustc + +build: false diff --git a/fwdansi/examples/run-rustc.rs b/fwdansi/examples/run-rustc.rs new file mode 100644 index 000000000..3bc303a0d --- /dev/null +++ b/fwdansi/examples/run-rustc.rs @@ -0,0 +1,17 @@ +extern crate termcolor; +extern crate fwdansi; + +use termcolor::*; +use std::io; +use std::process::Command; +use fwdansi::write_ansi; + +fn main() -> io::Result<()> { + let output = Command::new("rustc").args(&["--color", "always"]).output()?; + + let mut stderr = StandardStream::stderr(ColorChoice::Always); + write_ansi(&mut stderr, &output.stderr)?; + //^ should print "error: no input filename given" with appropriate color everywhere. + + Ok(()) +} diff --git a/fwdansi/src/lib.rs b/fwdansi/src/lib.rs new file mode 100644 index 000000000..d5dcdbc5c --- /dev/null +++ b/fwdansi/src/lib.rs @@ -0,0 +1,235 @@ +//! Write colored strings with ANSI escape code into a `termcolor` terminal. +//! +//! This package provides a single function, [`write_ansi`], which parses ANSI +//! escape codes in the provided byte string and transforms them into the +//! corresponding `termcolor` commands. The colors will be supported even on a +//! Windows console. +//! +//! The main purpose of this package is to forward colored output from a child +//! process. +//! +//! ```rust +// #![doc(include = "../examples/rustc.rs")] // still unstable, see issue 44732 +//! extern crate termcolor; +//! extern crate fwdansi; +//! +//! use termcolor::*; +//! use std::io; +//! use std::process::Command; +//! use fwdansi::write_ansi; +//! +//! fn main() -> io::Result<()> { +//! let output = Command::new("rustc").args(&["--color", "always"]).output()?; +//! +//! let mut stderr = StandardStream::stderr(ColorChoice::Always); +//! write_ansi(&mut stderr, &output.stderr)?; +//! //^ should print "error: no input filename given" with appropriate color everywhere. +//! +//! Ok(()) +//! } +//! ``` + +extern crate termcolor; +extern crate memchr; + +use memchr::memchr; +use termcolor::{Color, ColorSpec, WriteColor}; + +use std::io; +use std::mem; + +/// Writes a string with ANSI escape code into the colored output stream. +/// +/// Only SGR (`\x1b[…m`) is supported. Other input will be printed as-is. +pub fn write_ansi(mut writer: W, mut ansi: &[u8]) -> io::Result<()> { + while let Some(index) = memchr(0x1b, ansi) { + let (left, right) = ansi.split_at(index); + writer.write_all(left)?; + if right.is_empty() { + return Ok(()); + } + + let mut parser = ColorSpecParser::new(right); + parser.parse(); + if parser.ansi.as_ptr() == right.as_ptr() { + writer.write_all(&right[..1])?; + ansi = &right[1..]; + } else { + writer.set_color(&parser.spec)?; + ansi = parser.ansi; + } + } + writer.write_all(ansi) +} + +#[derive(Copy, Clone, PartialEq, Eq, Debug)] +enum State { + Normal, + PrepareCustomColor, + Ansi256, + Rgb, +} +struct ColorSpecParser<'a> { + spec: ColorSpec, + ansi: &'a [u8], + reset: bool, + state: State, + is_bg: bool, + red: Option, + green: Option, +} +impl<'a> ColorSpecParser<'a> { + fn new(ansi: &'a [u8]) -> Self { + Self { + spec: ColorSpec::new(), + ansi, + reset: false, + state: State::Normal, + is_bg: false, + red: None, + green: None, + } + } + + fn parse(&mut self) { + #[derive(PartialEq, Eq)] + enum Expected { + Escape, + OpenBracket, + Number(u8), + } + + while !self.ansi.is_empty() { + let mut expected = Expected::Escape; + let mut it = self.ansi.iter(); + for b in &mut it { + match (*b, expected) { + (0x1b, Expected::Escape) => { + expected = Expected::OpenBracket; + continue; + } + (b'[', Expected::OpenBracket) => { + expected = Expected::Number(0); + continue; + } + (b'0'..=b'9', Expected::Number(number)) => { + if let Some(n) = number.checked_mul(10).and_then(|n| n.checked_add(b - b'0')) { + expected = Expected::Number(n); + continue; + } + } + (b':', Expected::Number(number)) + | (b';', Expected::Number(number)) + | (b'm', Expected::Number(number)) => { + if self.apply_number(number) { + if *b == b'm' { + expected = Expected::Escape; + break; + } else { + expected = Expected::Number(0); + continue; + } + } + } + _ => {} + } + return; + } + if let Expected::Escape = expected { + self.ansi = it.as_slice(); + } else { + break; + } + } + } + + fn set_color(&mut self, color: Color) { + if self.is_bg { + self.spec.set_bg(Some(color)); + } else { + self.spec.set_fg(Some(color)); + } + } + + fn apply_number(&mut self, number: u8) -> bool { + match (number, self.state) { + (0, State::Normal) => { + if mem::replace(&mut self.reset, true) { + return false; + } + } + (1, State::Normal) => { + self.spec.set_bold(true); + } + (4, State::Normal) => { + self.spec.set_underline(true); + } + (21, State::Normal) => { + self.spec.set_bold(false); + } + (24, State::Normal) => { + self.spec.set_underline(false); + } + (38, State::Normal) | (48, State::Normal) => { + self.is_bg = number == 48; + self.state = State::PrepareCustomColor; + } + (30..=39, State::Normal) => { + self.spec.set_fg(parse_color(number - 30)); + } + (40..=49, State::Normal) => { + self.spec.set_bg(parse_color(number - 40)); + } + (90..=97, State::Normal) => { + self.spec.set_intense(true).set_fg(parse_color(number - 90)); + } + (100..=107, State::Normal) => { + self.spec.set_intense(true).set_bg(parse_color(number - 100)); + } + (5, State::PrepareCustomColor) => { + self.state = State::Ansi256; + } + (2, State::PrepareCustomColor) => { + self.state = State::Rgb; + self.red = None; + self.green = None; + } + (n, State::Ansi256) => { + self.set_color(Color::Ansi256(n)); + self.state = State::Normal; + } + (b, State::Rgb) => { + match (self.red, self.green) { + (None, _) => { + self.red = Some(b); + } + (Some(_), None) => { + self.green = Some(b); + } + (Some(r), Some(g)) => { + self.set_color(Color::Rgb(r, g, b)); + self.state = State::Normal; + } + } + } + _ => { + self.state = State::Normal; + } + } + true + } +} + +fn parse_color(digit: u8) -> Option { + match digit { + 0 => Some(Color::Black), + 1 => Some(Color::Red), + 2 => Some(Color::Green), + 3 => Some(Color::Yellow), + 4 => Some(Color::Blue), + 5 => Some(Color::Magenta), + 6 => Some(Color::Cyan), + 7 => Some(Color::White), + _ => None, + } +} diff --git a/fwdansi/tests/tests.proptest-regressions b/fwdansi/tests/tests.proptest-regressions new file mode 100644 index 000000000..ad5349d13 --- /dev/null +++ b/fwdansi/tests/tests.proptest-regressions @@ -0,0 +1,11 @@ +# Seeds for failure cases proptest has generated in the past. It is +# automatically read and these particular cases re-run before any +# novel cases are generated. +# +# It is recommended to check this file in to source control so that +# everyone who runs the test benefits from these saved cases. +xs 3335351554 386647235 3584534272 266266105 # shrinks to ref elements = [Reset] +xs 1928044643 2253320966 2424427606 448028530 # shrinks to ref ansi_string = [27] +xs 1984785545 2043478319 191885995 1442765875 # shrinks to ref elements = [Text([27])] +xs 4274884889 8924921 2258884976 859679705 # shrinks to ref elements = [Text([27, 91, 109])] +xs 428132133 722552347 3851126282 1913029940 # shrinks to ref elements = [Text([27, 91, 58])] diff --git a/fwdansi/tests/tests.rs b/fwdansi/tests/tests.rs new file mode 100644 index 000000000..5ecaecee0 --- /dev/null +++ b/fwdansi/tests/tests.rs @@ -0,0 +1,99 @@ +#[macro_use] +extern crate proptest; +extern crate fwdansi; +extern crate termcolor; + +use fwdansi::write_ansi; +use proptest::prelude::*; +use std::io; +use termcolor::{Ansi, Color, ColorSpec, WriteColor}; + +proptest! { + #[test] + fn check_no_crash(ref ansi_string in any::>()) { + let mut ansi = Ansi::new(Vec::with_capacity(ansi_string.len())); + prop_assert!(write_ansi(&mut ansi, &ansi_string).is_ok()); + } + + #[test] + fn ansi_idempotent(ref elements in prop::collection::vec(Element::any(), 0..100)) { + // first, write the test string into an ANSI buffer. + let mut original = Ansi::new(Vec::new()); + for e in elements { + e.write(&mut original).unwrap(); + } + + // recover the original string, and forward it using `write_ansi`. + let original = original.into_inner(); + let mut forwarded = Ansi::new(Vec::with_capacity(original.len())); + prop_assert!(write_ansi(&mut forwarded, &original).is_ok()); + prop_assert_eq!(original, forwarded.into_inner()); + } +} + +#[derive(Debug, Clone)] +enum Element { + ColorSpec(ColorSpec), + Reset, + Text(Vec), +} + +fn any_opt_color() -> impl Strategy> { + let color = prop_oneof![ + Just(Color::Black), + Just(Color::Red), + Just(Color::Green), + Just(Color::Yellow), + Just(Color::Blue), + Just(Color::Magenta), + Just(Color::Cyan), + Just(Color::White), + any::().prop_map(Color::Ansi256), + any::<[u8; 3]>().prop_map(|[r, g, b]| Color::Rgb(r, g, b)), + ]; + prop::option::weighted(0.9, color) +} + +prop_compose! { + fn any_color_spec()( + bold in any::(), + underline in any::(), + intense in any::(), + fg_color in any_opt_color(), + bg_color in any_opt_color(), + ) -> ColorSpec { + let mut spec = ColorSpec::new(); + spec.set_bold(bold) + .set_underline(underline) + .set_intense(intense) + .set_fg(fg_color) + .set_bg(bg_color); + spec + } +} + +impl Element { + fn any() -> impl Strategy { + prop_oneof![ + Just(Element::Reset), + any_color_spec().prop_map(Element::ColorSpec), + any::>() + .prop_filter_map( + "ignored empty SGR", + |v| if v.windows(3).find(|w| w == b"\x1b[m").is_some() { + None + } else { + Some(Element::Text(v)) + } + ), + ] + } + + fn write(&self, mut w: W) -> io::Result<()> { + match self { + Element::ColorSpec(cs) => w.set_color(cs), + Element::Reset => w.reset(), + Element::Text(text) => w.write_all(text), + } + } +} diff --git a/git2-curl/.cargo-checksum.json b/git2-curl/.cargo-checksum.json new file mode 100644 index 000000000..4e5b7249a --- /dev/null +++ b/git2-curl/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{},"package":"0173e317f8ba21f3fff0f71549fead5e42e67961dbd402bf69f42775f3cc78b4"} \ No newline at end of file diff --git a/git2-curl/Cargo.toml b/git2-curl/Cargo.toml new file mode 100644 index 000000000..552c52d3f --- /dev/null +++ b/git2-curl/Cargo.toml @@ -0,0 +1,48 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g. crates.io) dependencies +# +# If you believe there's an error in this file please file an +# issue against the rust-lang/cargo repository. If you're +# editing this file be aware that the upstream Cargo.toml +# will likely look very different (and much more reasonable) + +[package] +name = "git2-curl" +version = "0.8.2" +authors = ["Alex Crichton "] +description = "Backend for an HTTP transport in libgit2 powered by libcurl.\n\nIntended to be used with the git2 crate.\n" +homepage = "https://github.com/alexcrichton/git2-rs" +documentation = "https://docs.rs/git2-curl" +license = "MIT/Apache-2.0" +repository = "https://github.com/alexcrichton/git2-rs" + +[[test]] +name = "all" +harness = false +[dependencies.curl] +version = "0.4" + +[dependencies.git2] +version = "0.7" +default-features = false + +[dependencies.log] +version = "0.4" + +[dependencies.url] +version = "1.0" +[dev-dependencies.civet] +version = "0.11" + +[dev-dependencies.conduit] +version = "0.8" + +[dev-dependencies.conduit-git-http-backend] +version = "0.8" + +[dev-dependencies.tempfile] +version = "3.0" diff --git a/git2-curl/LICENSE-APACHE b/git2-curl/LICENSE-APACHE new file mode 100644 index 000000000..16fe87b06 --- /dev/null +++ b/git2-curl/LICENSE-APACHE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/git2-curl/LICENSE-MIT b/git2-curl/LICENSE-MIT new file mode 100644 index 000000000..39e0ed660 --- /dev/null +++ b/git2-curl/LICENSE-MIT @@ -0,0 +1,25 @@ +Copyright (c) 2014 Alex Crichton + +Permission is hereby granted, free of charge, to any +person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the +Software without restriction, including without +limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software +is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions +of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/git2-curl/src/lib.rs b/git2-curl/src/lib.rs new file mode 100644 index 000000000..90fe412b7 --- /dev/null +++ b/git2-curl/src/lib.rs @@ -0,0 +1,280 @@ +//! A crate for using libcurl as a backend for HTTP git requests with git2-rs. +//! +//! This crate provides one public function, `register`, which will register +//! a custom HTTP transport with libcurl for any HTTP requests made by libgit2. +//! At this time the `register` function is unsafe for the same reasons that +//! `git2::transport::register` is also unsafe. +//! +//! It is not recommended to use this crate wherever possible. The current +//! libcurl backend used, `curl-rust`, only supports executing a request in one +//! method call implying no streaming support. This consequently means that +//! when a repository is cloned the entire contents of the repo are downloaded +//! into memory, and *then* written off to disk by libgit2 afterwards. It +//! should be possible to alleviate this problem in the future. +//! +//! > **NOTE**: At this time this crate likely does not support a `git push` +//! > operation, only clones. + +#![doc(html_root_url = "http://alexcrichton.com/git2-rs")] + +extern crate git2; +extern crate curl; +extern crate url; +#[macro_use] extern crate log; + +use std::error; +use std::io::prelude::*; +use std::io::{self, Cursor}; +use std::str; +use std::sync::{Once, ONCE_INIT, Arc, Mutex}; + +use curl::easy::{Easy, List}; +use git2::Error; +use git2::transport::{SmartSubtransportStream}; +use git2::transport::{Transport, SmartSubtransport, Service}; +use url::Url; + +struct CurlTransport { + handle: Arc>, + /// The URL of the remote server, e.g. "https://github.com/user/repo" + /// + /// This is an empty string until the first action is performed. + /// If there is an HTTP redirect, this will be updated with the new URL. + base_url: Arc> +} + +struct CurlSubtransport { + handle: Arc>, + service: &'static str, + url_path: &'static str, + base_url: Arc>, + method: &'static str, + reader: Option>>, + sent_request: bool, +} + +/// Register the libcurl backend for HTTP requests made by libgit2. +/// +/// This function takes one parameter, a `handle`, which is used to perform all +/// future HTTP requests. The handle can be previously configured with +/// information such as proxies, SSL information, etc. +/// +/// This function is unsafe largely for the same reasons as +/// `git2::transport::register`: +/// +/// * The function needs to be synchronized against all other creations of +/// transport (any API calls to libgit2). +/// * The function will leak `handle` as once registered it is not currently +/// possible to unregister the backend. +/// +/// This function may be called concurrently, but only the first `handle` will +/// be used. All others will be discarded. +pub unsafe fn register(handle: Easy) { + static INIT: Once = ONCE_INIT; + + let handle = Arc::new(Mutex::new(handle)); + let handle2 = handle.clone(); + INIT.call_once(move || { + git2::transport::register("http", move |remote| { + factory(remote, handle.clone()) + }).unwrap(); + git2::transport::register("https", move |remote| { + factory(remote, handle2.clone()) + }).unwrap(); + }); +} + +fn factory(remote: &git2::Remote, handle: Arc>) + -> Result { + Transport::smart(remote, true, CurlTransport { + handle: handle, + base_url: Arc::new(Mutex::new(String::new())) + }) +} + +impl SmartSubtransport for CurlTransport { + fn action(&self, url: &str, action: Service) + -> Result, Error> { + let mut base_url = self.base_url.lock().unwrap(); + if base_url.len() == 0 { + *base_url = url.to_string(); + } + let (service, path, method) = match action { + Service::UploadPackLs => { + ("upload-pack", "/info/refs?service=git-upload-pack", "GET") + } + Service::UploadPack => { + ("upload-pack", "/git-upload-pack", "POST") + } + Service::ReceivePackLs => { + ("receive-pack", "/info/refs?service=git-receive-pack", "GET") + } + Service::ReceivePack => { + ("receive-pack", "/git-receive-pack", "POST") + } + }; + info!("action {} {}", service, path); + Ok(Box::new(CurlSubtransport { + handle: self.handle.clone(), + service: service, + url_path: path, + base_url: self.base_url.clone(), + method: method, + reader: None, + sent_request: false, + })) + } + + fn close(&self) -> Result<(), Error> { + Ok(()) // ... + } +} + +impl CurlSubtransport { + fn err>>(&self, err: E) -> io::Error { + io::Error::new(io::ErrorKind::Other, err) + } + + fn execute(&mut self, data: &[u8]) -> io::Result<()> { + if self.sent_request { + return Err(self.err("already sent HTTP request")) + } + let agent = format!("git/1.0 (git2-curl {})", env!("CARGO_PKG_VERSION")); + + // Parse our input URL to figure out the host + let url = format!("{}{}", self.base_url.lock().unwrap(), self.url_path); + let parsed = try!(Url::parse(&url).map_err(|_| { + self.err("invalid url, failed to parse") + })); + let host = match parsed.host_str() { + Some(host) => host, + None => return Err(self.err("invalid url, did not have a host")), + }; + + // Prep the request + debug!("request to {}", url); + let mut h = self.handle.lock().unwrap(); + try!(h.url(&url)); + try!(h.useragent(&agent)); + try!(h.follow_location(true)); + match self.method { + "GET" => try!(h.get(true)), + "PUT" => try!(h.put(true)), + "POST" => try!(h.post(true)), + other => try!(h.custom_request(other)), + } + + let mut headers = List::new(); + try!(headers.append(&format!("Host: {}", host))); + if data.len() > 0 { + try!(h.post_fields_copy(data)); + try!(headers.append(&format!("Accept: application/x-git-{}-result", + self.service))); + try!(headers.append(&format!("Content-Type: \ + application/x-git-{}-request", + self.service))); + } else { + try!(headers.append("Accept: */*")); + } + try!(headers.append("Expect:")); + try!(h.http_headers(headers)); + + let mut content_type = None; + let mut data = Vec::new(); + { + let mut h = h.transfer(); + + // Look for the Content-Type header + try!(h.header_function(|header| { + let header = match str::from_utf8(header) { + Ok(s) => s, + Err(..) => return true, + }; + let mut parts = header.splitn(2, ": "); + let name = parts.next().unwrap(); + let value = match parts.next() { + Some(value) => value, + None => return true, + }; + if name.eq_ignore_ascii_case("Content-Type") { + content_type = Some(value.trim().to_string()); + } + + true + })); + + // Collect the request's response in-memory + try!(h.write_function(|buf| { + data.extend_from_slice(buf); + Ok(buf.len()) + })); + + // Send the request + try!(h.perform()); + } + + let code = try!(h.response_code()); + if code != 200 { + return Err(self.err(&format!("failed to receive HTTP 200 response: \ + got {}", code)[..])) + } + + // Check returned headers + let expected = match self.method { + "GET" => format!("application/x-git-{}-advertisement", + self.service), + _ => format!("application/x-git-{}-result", self.service), + }; + match content_type { + Some(ref content_type) if *content_type != expected => { + return Err(self.err(&format!("expected a Content-Type header \ + with `{}` but found `{}`", + expected, content_type)[..])) + } + Some(..) => {} + None => { + return Err(self.err(&format!("expected a Content-Type header \ + with `{}` but didn't find one", + expected)[..])) + } + } + + // Ok, time to read off some data. + let rdr = Cursor::new(data); + self.reader = Some(rdr); + + // If there was a redirect, update the `CurlTransport` with the new base. + if let Ok(Some(effective_url)) = h.effective_url() { + let new_base = if effective_url.ends_with(self.url_path) { + // Strip the action from the end. + &effective_url[..effective_url.len() - self.url_path.len()] + } else { + // I'm not sure if this code path makes sense, but it's what + // libgit does. + effective_url + }; + *self.base_url.lock().unwrap() = new_base.to_string(); + } + + Ok(()) + } +} + +impl Read for CurlSubtransport { + fn read(&mut self, buf: &mut [u8]) -> io::Result { + if self.reader.is_none() { + try!(self.execute(&[])); + } + self.reader.as_mut().unwrap().read(buf) + } +} + +impl Write for CurlSubtransport { + fn write(&mut self, data: &[u8]) -> io::Result { + if self.reader.is_none() { + try!(self.execute(data)); + } + Ok(data.len()) + } + fn flush(&mut self) -> io::Result<()> { Ok(()) } +} diff --git a/git2-curl/tests/all.rs b/git2-curl/tests/all.rs new file mode 100644 index 000000000..941020e18 --- /dev/null +++ b/git2-curl/tests/all.rs @@ -0,0 +1,68 @@ +extern crate conduit_git_http_backend as git_backend; +extern crate git2_curl; +extern crate civet; +extern crate conduit; +extern crate curl; +extern crate git2; +extern crate tempfile; + +use civet::{Server, Config}; +use std::fs::File; +use std::path::Path; +use tempfile::TempDir; + +const PORT: u16 = 7848; + +fn main() { + unsafe { + git2_curl::register(curl::easy::Easy::new()); + } + + // Spin up a server for git-http-backend + let td = TempDir::new().unwrap(); + let mut cfg = Config::new(); + cfg.port(PORT).threads(1); + let _a = Server::start(cfg, git_backend::Serve(td.path().to_path_buf())); + + // Prep a repo with one file called `foo` + let sig = git2::Signature::now("foo", "bar").unwrap(); + let r1 = git2::Repository::init(td.path()).unwrap(); + File::create(&td.path().join(".git").join("git-daemon-export-ok")).unwrap(); + { + let mut index = r1.index().unwrap(); + File::create(&td.path().join("foo")).unwrap(); + index.add_path(Path::new("foo")).unwrap(); + index.write().unwrap(); + let tree_id = index.write_tree().unwrap(); + r1.commit(Some("HEAD"), &sig, &sig, "test", + &r1.find_tree(tree_id).unwrap(), + &[]).unwrap(); + } + + // Clone through the git-http-backend + let td2 = TempDir::new().unwrap(); + let r = git2::Repository::clone(&format!("http://localhost:{}", PORT), + td2.path()).unwrap(); + assert!(File::open(&td2.path().join("foo")).is_ok()); + { + File::create(&td.path().join("bar")).unwrap(); + let mut index = r1.index().unwrap(); + index.add_path(&Path::new("bar")).unwrap(); + index.write().unwrap(); + let tree_id = index.write_tree().unwrap(); + let parent = r1.head().ok().and_then(|h| h.target()).unwrap(); + let parent = r1.find_commit(parent).unwrap(); + r1.commit(Some("HEAD"), &sig, &sig, "test", + &r1.find_tree(tree_id).unwrap(), + &[&parent]).unwrap(); + } + + let mut remote = r.find_remote("origin").unwrap(); + remote.fetch(&["refs/heads/*:refs/heads/*"], None, None).unwrap(); + let b = r.find_branch("master", git2::BranchType::Local).unwrap(); + let id = b.get().target().unwrap(); + let obj = r.find_object(id, None).unwrap(); + r.reset(&obj, git2::ResetType::Hard, None).unwrap();; + + assert!(File::open(&td2.path().join("bar")).is_ok()); +} diff --git a/git2/.cargo-checksum.json b/git2/.cargo-checksum.json new file mode 100644 index 000000000..6859a82fb --- /dev/null +++ b/git2/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{},"package":"591f8be1674b421644b6c030969520bc3fa12114d2eb467471982ed3e9584e71"} \ No newline at end of file diff --git a/git2/Cargo.toml b/git2/Cargo.toml new file mode 100644 index 000000000..84699d7b4 --- /dev/null +++ b/git2/Cargo.toml @@ -0,0 +1,75 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g. crates.io) dependencies +# +# If you believe there's an error in this file please file an +# issue against the rust-lang/cargo repository. If you're +# editing this file be aware that the upstream Cargo.toml +# will likely look very different (and much more reasonable) + +[package] +name = "git2" +version = "0.7.5" +authors = ["Alex Crichton "] +description = "Bindings to libgit2 for interoperating with git repositories. This library is\nboth threadsafe and memory safe and allows both reading and writing git\nrepositories.\n" +homepage = "https://github.com/alexcrichton/git2-rs" +documentation = "https://docs.rs/git2" +readme = "README.md" +keywords = ["git"] +categories = ["api-bindings"] +license = "MIT/Apache-2.0" +repository = "https://github.com/alexcrichton/git2-rs" +[dependencies.bitflags] +version = "1.0" + +[dependencies.libc] +version = "0.2" + +[dependencies.libgit2-sys] +version = "0.7.7" + +[dependencies.log] +version = "0.4" + +[dependencies.url] +version = "1.0" +[dev-dependencies.docopt] +version = "1.0" + +[dev-dependencies.serde] +version = "1.0" + +[dev-dependencies.serde_derive] +version = "1.0" + +[dev-dependencies.tempdir] +version = "0.3.7" + +[dev-dependencies.thread-id] +version = "3.3.0" + +[dev-dependencies.time] +version = "0.1.39" + +[features] +curl = ["libgit2-sys/curl"] +default = ["ssh", "https", "curl", "ssh_key_from_memory"] +https = ["libgit2-sys/https", "openssl-sys", "openssl-probe"] +ssh = ["libgit2-sys/ssh"] +ssh_key_from_memory = ["libgit2-sys/ssh_key_from_memory"] +unstable = [] +[target."cfg(all(unix, not(target_os = \"macos\")))".dependencies.openssl-probe] +version = "0.1" +optional = true + +[target."cfg(all(unix, not(target_os = \"macos\")))".dependencies.openssl-sys] +version = "0.9.0" +optional = true +[badges.appveyor] +repository = "alexcrichton/git2-rs" + +[badges.travis-ci] +repository = "alexcrichton/git2-rs" diff --git a/git2/LICENSE-APACHE b/git2/LICENSE-APACHE new file mode 100644 index 000000000..16fe87b06 --- /dev/null +++ b/git2/LICENSE-APACHE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/git2/LICENSE-MIT b/git2/LICENSE-MIT new file mode 100644 index 000000000..39e0ed660 --- /dev/null +++ b/git2/LICENSE-MIT @@ -0,0 +1,25 @@ +Copyright (c) 2014 Alex Crichton + +Permission is hereby granted, free of charge, to any +person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the +Software without restriction, including without +limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software +is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions +of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/git2/README.md b/git2/README.md new file mode 100644 index 000000000..b060602d3 --- /dev/null +++ b/git2/README.md @@ -0,0 +1,71 @@ +# git2-rs + +[![Build Status](https://travis-ci.org/alexcrichton/git2-rs.svg?branch=master)](https://travis-ci.org/alexcrichton/git2-rs) +[![Build Status](https://ci.appveyor.com/api/projects/status/6vem3xgno2kuxnfm?svg=true)](https://ci.appveyor.com/project/alexcrichton/git2-rs) + +[Documentation](https://docs.rs/git2) + +libgit2 bindings for Rust + +```toml +[dependencies] +git2 = "0.7" +``` + +## Version of libgit2 + +Currently this library requires libgit2 0.25.1. The source for libgit2 is +included in the libgit2-sys crate so there's no need to pre-install the libgit2 +library, the libgit2-sys crate will figure that and/or build that for you. + +## Building git2-rs + +First, you'll need to install _CMake_. Afterwards, just run: + +```sh +$ git clone https://github.com/alexcrichton/git2-rs +$ cd git2-rs +$ cargo build +``` + +### Automating Testing + +Running tests and handling all of the associated edge cases on every commit +proves tedious very quickly. To automate tests and handle proper stashing and +unstashing of unstaged changes and thus avoid nasty surprises, use the +pre-commit hook found [here][pre-commit-hook] and place it into the +`.git/hooks/` with the name `pre-commit`. You may need to add execution +permissions with `chmod +x`. + + +To skip tests on a simple commit or doc-fixes, use `git commit --no-verify`. + +## Building on OSX 10.10+ + +Currently libssh2 requires linking against OpenSSL, and to compile libssh2 it +also needs to find the OpenSSL headers. On OSX 10.10+ the OpenSSL headers have +been removed, but if you're using Homebrew you can install them via: + +```sh +brew install openssl +``` + + +# License + +This project is licensed under either of + + * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or + http://www.apache.org/licenses/LICENSE-2.0) + * MIT license ([LICENSE-MIT](LICENSE-MIT) or + http://opensource.org/licenses/MIT) + +at your option. + +### Contribution + +Unless you explicitly state otherwise, any contribution intentionally submitted +for inclusion in git2-rs by you, as defined in the Apache-2.0 license, shall be +dual licensed as above, without any additional terms or conditions. + +[pre-commit-hook]: https://gist.github.com/glfmn/0c5e9e2b41b48007ed3497d11e3dbbfa diff --git a/git2/appveyor.yml b/git2/appveyor.yml new file mode 100644 index 000000000..0182bfd8b --- /dev/null +++ b/git2/appveyor.yml @@ -0,0 +1,19 @@ +environment: + matrix: + - TARGET: x86_64-pc-windows-msvc + - TARGET: i686-pc-windows-msvc +install: + - ps: Start-FileDownload "https://static.rust-lang.org/dist/rust-nightly-${env:TARGET}.exe" + - rust-nightly-%TARGET%.exe /VERYSILENT /NORESTART /DIR="C:\Program Files (x86)\Rust" + - set PATH=%PATH%;C:\Program Files (x86)\Rust\bin + - if defined MSYS_BITS set PATH=C:\msys64\mingw%MSYS_BITS%\bin;C:\msys64\usr\bin;%PATH% + - set CARGO_TARGET_DIR=%APPVEYOR_BUILD_FOLDER%\target + - rustc -V + - cargo -V + +build: false + +test_script: + - cargo test --target %TARGET% + - cargo test --no-default-features --target %TARGET% + - cargo run --manifest-path systest/Cargo.toml --target %TARGET% diff --git a/git2/examples/add.rs b/git2/examples/add.rs new file mode 100644 index 000000000..3167cb074 --- /dev/null +++ b/git2/examples/add.rs @@ -0,0 +1,85 @@ +/* + * libgit2 "add" example - shows how to modify the index + * + * Written by the libgit2 contributors + * + * To the extent possible under law, the author(s) have dedicated all copyright + * and related and neighboring rights to this software to the public domain + * worldwide. This software is distributed without any warranty. + * + * You should have received a copy of the CC0 Public Domain Dedication along + * with this software. If not, see + * . + */ + +#![deny(warnings)] +#![allow(trivial_casts)] + +extern crate git2; +extern crate docopt; +#[macro_use] +extern crate serde_derive; + +use std::path::Path; +use docopt::Docopt; +use git2::Repository; + +#[derive(Deserialize)] +struct Args { + arg_spec: Vec, + flag_dry_run: bool, + flag_verbose: bool, + flag_update: bool, +} + +fn run(args: &Args) -> Result<(), git2::Error> { + let repo = try!(Repository::open(&Path::new("."))); + let mut index = try!(repo.index()); + + let cb = &mut |path: &Path, _matched_spec: &[u8]| -> i32 { + let status = repo.status_file(path).unwrap(); + + let ret = if status.contains(git2::Status::WT_MODIFIED) || + status.contains(git2::Status::WT_NEW) { + println!("add '{}'", path.display()); + 0 + } else { + 1 + }; + + if args.flag_dry_run {1} else {ret} + }; + let cb = if args.flag_verbose || args.flag_update { + Some(cb as &mut git2::IndexMatchedPath) + } else { + None + }; + + if args.flag_update { + try!(index.update_all(args.arg_spec.iter(), cb)); + } else { + try!(index.add_all(args.arg_spec.iter(), git2::IndexAddOption::DEFAULT, cb)); + } + + try!(index.write()); + Ok(()) +} + +fn main() { + const USAGE: &'static str = " +usage: add [options] [--] [..] + +Options: + -n, --dry-run dry run + -v, --verbose be verbose + -u, --update update tracked files + -h, --help show this message +"; + + let args = Docopt::new(USAGE).and_then(|d| d.deserialize()) + .unwrap_or_else(|e| e.exit()); + match run(&args) { + Ok(()) => {} + Err(e) => println!("error: {}", e), + } +} diff --git a/git2/examples/blame.rs b/git2/examples/blame.rs new file mode 100644 index 000000000..5f7bee368 --- /dev/null +++ b/git2/examples/blame.rs @@ -0,0 +1,106 @@ +/* + * libgit2 "blame" example - shows how to use the blame API + * + * Written by the libgit2 contributors + * + * To the extent possible under law, the author(s) have dedicated all copyright + * and related and neighboring rights to this software to the public domain + * worldwide. This software is distributed without any warranty. + * + * You should have received a copy of the CC0 Public Domain Dedication along + * with this software. If not, see + * . + */ + +extern crate git2; +extern crate docopt; +#[macro_use] +extern crate serde_derive; + +use docopt::Docopt; +use git2::{Repository, BlameOptions}; +use std::path::Path; +use std::io::{BufReader, BufRead}; + +#[derive(Deserialize)] #[allow(non_snake_case)] +struct Args { + arg_path: String, + arg_spec: Option, + flag_M: bool, + flag_C: bool, + flag_F: bool, +} + +fn run(args: &Args) -> Result<(), git2::Error> { + let repo = try!(Repository::open(".")); + let path = Path::new(&args.arg_path[..]); + + // Prepare our blame options + let mut opts = BlameOptions::new(); + opts.track_copies_same_commit_moves(args.flag_M) + .track_copies_same_commit_copies(args.flag_C) + .first_parent(args.flag_F); + + let mut commit_id = "HEAD".to_string(); + + // Parse spec + if let Some(spec) = args.arg_spec.as_ref() { + + let revspec = try!(repo.revparse(spec)); + + let (oldest, newest) = if revspec.mode().contains(git2::RevparseMode::SINGLE) { + (None, revspec.from()) + } else if revspec.mode().contains(git2::RevparseMode::RANGE) { + (revspec.from(), revspec.to()) + } else { + (None, None) + }; + + if let Some(commit) = oldest { + opts.oldest_commit(commit.id()); + } + + if let Some(commit) = newest { + opts.newest_commit(commit.id()); + if !commit.id().is_zero() { + commit_id = format!("{}", commit.id()) + } + } + + } + + let spec = format!("{}:{}", commit_id, path.display()); + let blame = try!(repo.blame_file(path, Some(&mut opts))); + let object = try!(repo.revparse_single(&spec[..])); + let blob = try!(repo.find_blob(object.id())); + let reader = BufReader::new(blob.content()); + + for (i, line) in reader.lines().enumerate() { + if let (Ok(line), Some(hunk)) = (line, blame.get_line(i+1)) { + let sig = hunk.final_signature(); + println!("{} {} <{}> {}", hunk.final_commit_id(), + String::from_utf8_lossy(sig.name_bytes()), + String::from_utf8_lossy(sig.email_bytes()), line); + } + } + + Ok(()) +} + +fn main() { + const USAGE: &'static str = " +usage: blame [options] [] + +Options: + -M find line moves within and across files + -C find line copies within and across files + -F follow only the first parent commits +"; + + let args = Docopt::new(USAGE).and_then(|d| d.deserialize()) + .unwrap_or_else(|e| e.exit()); + match run(&args) { + Ok(()) => {} + Err(e) => println!("error: {}", e), + } +} diff --git a/git2/examples/cat-file.rs b/git2/examples/cat-file.rs new file mode 100644 index 000000000..68117bd88 --- /dev/null +++ b/git2/examples/cat-file.rs @@ -0,0 +1,142 @@ +/* + * libgit2 "cat-file" example - shows how to print data from the ODB + * + * Written by the libgit2 contributors + * + * To the extent possible under law, the author(s) have dedicated all copyright + * and related and neighboring rights to this software to the public domain + * worldwide. This software is distributed without any warranty. + * + * You should have received a copy of the CC0 Public Domain Dedication along + * with this software. If not, see + * . + */ + +#![deny(warnings)] + +extern crate git2; +extern crate docopt; +#[macro_use] +extern crate serde_derive; + +use std::io::{self, Write}; + +use docopt::Docopt; +use git2::{Repository, ObjectType, Blob, Commit, Signature, Tag, Tree}; + +#[derive(Deserialize)] +struct Args { + arg_object: String, + flag_t: bool, + flag_s: bool, + flag_e: bool, + flag_p: bool, + flag_q: bool, + flag_v: bool, + flag_git_dir: Option, +} + +fn run(args: &Args) -> Result<(), git2::Error> { + let path = args.flag_git_dir.as_ref().map(|s| &s[..]).unwrap_or("."); + let repo = try!(Repository::open(path)); + + let obj = try!(repo.revparse_single(&args.arg_object)); + if args.flag_v && !args.flag_q { + println!("{} {}\n--", obj.kind().unwrap().str(), obj.id()); + } + + if args.flag_t { + println!("{}", obj.kind().unwrap().str()); + } else if args.flag_s || args.flag_e { + /* ... */ + } else if args.flag_p { + match obj.kind() { + Some(ObjectType::Blob) => { + show_blob(obj.as_blob().unwrap()); + } + Some(ObjectType::Commit) => { + show_commit(obj.as_commit().unwrap()); + } + Some(ObjectType::Tag) => { + show_tag(obj.as_tag().unwrap()); + } + Some(ObjectType::Tree) => { + show_tree(obj.as_tree().unwrap()); + } + Some(ObjectType::Any) | None => { + println!("unknown {}", obj.id()) + } + } + } + Ok(()) +} + +fn show_blob(blob: &Blob) { + io::stdout().write_all(blob.content()).unwrap(); +} + +fn show_commit(commit: &Commit) { + println!("tree {}", commit.tree_id()); + for parent in commit.parent_ids() { + println!("parent {}", parent); + } + show_sig("author", Some(commit.author())); + show_sig("committer", Some(commit.committer())); + if let Some(msg) = commit.message() { + println!("\n{}", msg); + } +} + +fn show_tag(tag: &Tag) { + println!("object {}", tag.target_id()); + println!("type {}", tag.target_type().unwrap().str()); + println!("tag {}", tag.name().unwrap()); + show_sig("tagger", tag.tagger()); + + if let Some(msg) = tag.message() { + println!("\n{}", msg); + } +} + +fn show_tree(tree: &Tree) { + for entry in tree.iter() { + println!("{:06o} {} {}\t{}", + entry.filemode(), + entry.kind().unwrap().str(), + entry.id(), + entry.name().unwrap()); + } +} + +fn show_sig(header: &str, sig: Option) { + let sig = match sig { Some(s) => s, None => return }; + let offset = sig.when().offset_minutes(); + let (sign, offset) = if offset < 0 {('-', -offset)} else {('+', offset)}; + let (hours, minutes) = (offset / 60, offset % 60); + println!("{} {} {} {}{:02}{:02}", + header, sig, sig.when().seconds(), sign, hours, minutes); + +} + +fn main() { + const USAGE: &'static str = " +usage: cat-file (-t | -s | -e | -p) [options] + +Options: + -t show the object type + -s show the object size + -e suppress all output + -p pretty print the contents of the object + -q suppress output + -v use verbose output + --git-dir use the specified directory as the base directory + -h, --help show this message +"; + + let args = Docopt::new(USAGE).and_then(|d| d.deserialize()) + .unwrap_or_else(|e| e.exit()); + match run(&args) { + Ok(()) => {} + Err(e) => println!("error: {}", e), + } +} diff --git a/git2/examples/clone.rs b/git2/examples/clone.rs new file mode 100644 index 000000000..3bd72bc2e --- /dev/null +++ b/git2/examples/clone.rs @@ -0,0 +1,124 @@ +/* + * libgit2 "clone" example + * + * Written by the libgit2 contributors + * + * To the extent possible under law, the author(s) have dedicated all copyright + * and related and neighboring rights to this software to the public domain + * worldwide. This software is distributed without any warranty. + * + * You should have received a copy of the CC0 Public Domain Dedication along + * with this software. If not, see + * . + */ + +#![deny(warnings)] + +extern crate git2; +extern crate docopt; +#[macro_use] +extern crate serde_derive; + +use docopt::Docopt; +use git2::build::{RepoBuilder, CheckoutBuilder}; +use git2::{RemoteCallbacks, Progress, FetchOptions}; +use std::cell::RefCell; +use std::io::{self, Write}; +use std::path::{Path, PathBuf}; + +#[derive(Deserialize)] +struct Args { + arg_url: String, + arg_path: String, +} + +struct State { + progress: Option>, + total: usize, + current: usize, + path: Option, + newline: bool, +} + +fn print(state: &mut State) { + let stats = state.progress.as_ref().unwrap(); + let network_pct = (100 * stats.received_objects()) / stats.total_objects(); + let index_pct = (100 * stats.indexed_objects()) / stats.total_objects(); + let co_pct = if state.total > 0 { + (100 * state.current) / state.total + } else { + 0 + }; + let kbytes = stats.received_bytes() / 1024; + if stats.received_objects() == stats.total_objects() { + if !state.newline { + println!(""); + state.newline = true; + } + print!("Resolving deltas {}/{}\r", stats.indexed_deltas(), + stats.total_deltas()); + } else { + print!("net {:3}% ({:4} kb, {:5}/{:5}) / idx {:3}% ({:5}/{:5}) \ + / chk {:3}% ({:4}/{:4}) {}\r", + network_pct, kbytes, stats.received_objects(), + stats.total_objects(), + index_pct, stats.indexed_objects(), stats.total_objects(), + co_pct, state.current, state.total, + state.path + .as_ref() + .map(|s| s.to_string_lossy().into_owned()) + .unwrap_or_default()) + } + io::stdout().flush().unwrap(); +} + +fn run(args: &Args) -> Result<(), git2::Error> { + let state = RefCell::new(State { + progress: None, + total: 0, + current: 0, + path: None, + newline: false, + }); + let mut cb = RemoteCallbacks::new(); + cb.transfer_progress(|stats| { + let mut state = state.borrow_mut(); + state.progress = Some(stats.to_owned()); + print(&mut *state); + true + }); + + let mut co = CheckoutBuilder::new(); + co.progress(|path, cur, total| { + let mut state = state.borrow_mut(); + state.path = path.map(|p| p.to_path_buf()); + state.current = cur; + state.total = total; + print(&mut *state); + }); + + let mut fo = FetchOptions::new(); + fo.remote_callbacks(cb); + try!(RepoBuilder::new().fetch_options(fo).with_checkout(co) + .clone(&args.arg_url, Path::new(&args.arg_path))); + println!(""); + + Ok(()) +} + +fn main() { + const USAGE: &'static str = " +usage: add [options] + +Options: + -h, --help show this message +"; + + let args = Docopt::new(USAGE).and_then(|d| d.deserialize()) + .unwrap_or_else(|e| e.exit()); + match run(&args) { + Ok(()) => {} + Err(e) => println!("error: {}", e), + } +} + diff --git a/git2/examples/diff.rs b/git2/examples/diff.rs new file mode 100644 index 000000000..8664a0e30 --- /dev/null +++ b/git2/examples/diff.rs @@ -0,0 +1,284 @@ +/* + * libgit2 "diff" example - shows how to use the diff API + * + * Written by the libgit2 contributors + * + * To the extent possible under law, the author(s) have dedicated all copyright + * and related and neighboring rights to this software to the public domain + * worldwide. This software is distributed without any warranty. + * + * You should have received a copy of the CC0 Public Domain Dedication along + * with this software. If not, see + * . + */ + +#![deny(warnings)] + +extern crate git2; +extern crate docopt; +#[macro_use] +extern crate serde_derive; + +use std::str; + +use docopt::Docopt; +use git2::{Repository, Error, Object, ObjectType, DiffOptions, Diff}; +use git2::{DiffFindOptions, DiffFormat}; + +#[derive(Deserialize)] #[allow(non_snake_case)] +struct Args { + arg_from_oid: Option, + arg_to_oid: Option, + flag_patch: bool, + flag_cached: bool, + flag_nocached: bool, + flag_name_only: bool, + flag_name_status: bool, + flag_raw: bool, + flag_format: Option, + flag_color: bool, + flag_no_color: bool, + flag_R: bool, + flag_text: bool, + flag_ignore_space_at_eol: bool, + flag_ignore_space_change: bool, + flag_ignore_all_space: bool, + flag_ignored: bool, + flag_untracked: bool, + flag_patience: bool, + flag_minimal: bool, + flag_stat: bool, + flag_numstat: bool, + flag_shortstat: bool, + flag_summary: bool, + flag_find_renames: Option, + flag_find_copies: Option, + flag_find_copies_harder: bool, + flag_break_rewrites: bool, + flag_unified: Option, + flag_inter_hunk_context: Option, + flag_abbrev: Option, + flag_src_prefix: Option, + flag_dst_prefix: Option, + flag_git_dir: Option, +} + +const RESET: &'static str = "\u{1b}[m"; +const BOLD: &'static str = "\u{1b}[1m"; +const RED: &'static str = "\u{1b}[31m"; +const GREEN: &'static str = "\u{1b}[32m"; +const CYAN: &'static str = "\u{1b}[36m"; + +#[derive(PartialEq, Eq, Copy, Clone)] +enum Cache { Normal, Only, None } + +fn run(args: &Args) -> Result<(), Error> { + let path = args.flag_git_dir.as_ref().map(|s| &s[..]).unwrap_or("."); + let repo = try!(Repository::open(path)); + + // Prepare our diff options based on the arguments given + let mut opts = DiffOptions::new(); + opts.reverse(args.flag_R) + .force_text(args.flag_text) + .ignore_whitespace_eol(args.flag_ignore_space_at_eol) + .ignore_whitespace_change(args.flag_ignore_space_change) + .ignore_whitespace(args.flag_ignore_all_space) + .include_ignored(args.flag_ignored) + .include_untracked(args.flag_untracked) + .patience(args.flag_patience) + .minimal(args.flag_minimal); + if let Some(amt) = args.flag_unified { opts.context_lines(amt); } + if let Some(amt) = args.flag_inter_hunk_context { opts.interhunk_lines(amt); } + if let Some(amt) = args.flag_abbrev { opts.id_abbrev(amt); } + if let Some(ref s) = args.flag_src_prefix { opts.old_prefix(&s); } + if let Some(ref s) = args.flag_dst_prefix { opts.new_prefix(&s); } + if let Some("diff-index") = args.flag_format.as_ref().map(|s| &s[..]) { + opts.id_abbrev(40); + } + + // Prepare the diff to inspect + let t1 = try!(tree_to_treeish(&repo, args.arg_from_oid.as_ref())); + let t2 = try!(tree_to_treeish(&repo, args.arg_to_oid.as_ref())); + let head = try!(tree_to_treeish(&repo, Some(&"HEAD".to_string()))).unwrap(); + let mut diff = match (t1, t2, args.cache()) { + (Some(t1), Some(t2), _) => { + try!(repo.diff_tree_to_tree(t1.as_tree(), t2.as_tree(), + Some(&mut opts))) + } + (t1, None, Cache::None) => { + let t1 = t1.unwrap_or(head); + try!(repo.diff_tree_to_workdir(t1.as_tree(), Some(&mut opts))) + } + (t1, None, Cache::Only) => { + let t1 = t1.unwrap_or(head); + try!(repo.diff_tree_to_index(t1.as_tree(), None, Some(&mut opts))) + } + (Some(t1), None, _) => { + try!(repo.diff_tree_to_workdir_with_index(t1.as_tree(), + Some(&mut opts))) + } + (None, None, _) => { + try!(repo.diff_index_to_workdir(None, Some(&mut opts))) + } + (None, Some(_), _) => unreachable!(), + }; + + // Apply rename and copy detection if requested + if args.flag_break_rewrites || args.flag_find_copies_harder || + args.flag_find_renames.is_some() || args.flag_find_copies.is_some() + { + let mut opts = DiffFindOptions::new(); + if let Some(t) = args.flag_find_renames { + opts.rename_threshold(t); + opts.renames(true); + } + if let Some(t) = args.flag_find_copies { + opts.copy_threshold(t); + opts.copies(true); + } + opts.copies_from_unmodified(args.flag_find_copies_harder) + .rewrites(args.flag_break_rewrites); + try!(diff.find_similar(Some(&mut opts))); + } + + // Generate simple output + let stats = args.flag_stat | args.flag_numstat | args.flag_shortstat | + args.flag_summary; + if stats { + try!(print_stats(&diff, args)); + } + if args.flag_patch || !stats { + if args.color() { print!("{}", RESET); } + let mut last_color = None; + try!(diff.print(args.diff_format(), |_delta, _hunk, line| { + if args.color() { + let next = match line.origin() { + '+' => Some(GREEN), + '-' => Some(RED), + '>' => Some(GREEN), + '<' => Some(RED), + 'F' => Some(BOLD), + 'H' => Some(CYAN), + _ => None + }; + if args.color() && next != last_color { + if last_color == Some(BOLD) || next == Some(BOLD) { + print!("{}", RESET); + } + print!("{}", next.unwrap_or(RESET)); + last_color = next; + } + } + + match line.origin() { + '+' | '-' | ' ' => print!("{}", line.origin()), + _ => {} + } + print!("{}", str::from_utf8(line.content()).unwrap()); + true + })); + if args.color() { print!("{}", RESET); } + } + + Ok(()) +} + +fn print_stats(diff: &Diff, args: &Args) -> Result<(), Error> { + let stats = try!(diff.stats()); + let mut format = git2::DiffStatsFormat::NONE; + if args.flag_stat { + format |= git2::DiffStatsFormat::FULL; + } + if args.flag_shortstat { + format |= git2::DiffStatsFormat::SHORT; + } + if args.flag_numstat { + format |= git2::DiffStatsFormat::NUMBER; + } + if args.flag_summary { + format |= git2::DiffStatsFormat::INCLUDE_SUMMARY; + } + let buf = try!(stats.to_buf(format, 80)); + print!("{}", str::from_utf8(&*buf).unwrap()); + Ok(()) +} + +fn tree_to_treeish<'a>(repo: &'a Repository, arg: Option<&String>) + -> Result>, Error> { + let arg = match arg { Some(s) => s, None => return Ok(None) }; + let obj = try!(repo.revparse_single(arg)); + let tree = try!(obj.peel(ObjectType::Tree)); + Ok(Some(tree)) +} + +impl Args { + fn cache(&self) -> Cache { + if self.flag_cached {Cache::Only} + else if self.flag_nocached {Cache::None} + else {Cache::Normal} + } + fn color(&self) -> bool { self.flag_color && !self.flag_no_color } + fn diff_format(&self) -> DiffFormat { + if self.flag_patch {DiffFormat::Patch} + else if self.flag_name_only {DiffFormat::NameOnly} + else if self.flag_name_status {DiffFormat::NameStatus} + else if self.flag_raw {DiffFormat::Raw} + else { + match self.flag_format.as_ref().map(|s| &s[..]) { + Some("name") => DiffFormat::NameOnly, + Some("name-status") => DiffFormat::NameStatus, + Some("raw") => DiffFormat::Raw, + Some("diff-index") => DiffFormat::Raw, + _ => DiffFormat::Patch, + } + } + } +} + +fn main() { + const USAGE: &'static str = " +usage: diff [options] [ []] + +Options: + -p, --patch show output in patch format + --cached use staged changes as diff + --nocached do not use staged changes + --name-only show only names of changed files + --name-status show only names and status changes + --raw generate the raw format + --format= specify format for stat summary + --color use color output + --no-color never use color output + -R swap two inputs + -a, --text treat all files as text + --ignore-space-at-eol ignore changes in whitespace at EOL + -b, --ignore-space-change ignore changes in amount of whitespace + -w, --ignore-all-space ignore whitespace when comparing lines + --ignored show ignored files as well + --untracked show untracked files + --patience generate diff using the patience algorithm + --minimal spend extra time to find smallest diff + --stat generate a diffstat + --numstat similar to --stat, but more machine friendly + --shortstat only output last line of --stat + --summary output condensed summary of header info + -M, --find-renames set threshold for findind renames (default 50) + -C, --find-copies set threshold for finding copies (default 50) + --find-copies-harder inspect unmodified files for sources of copies + -B, --break-rewrites break complete rewrite changes into pairs + -U, --unified lints of context to show + --inter-hunk-context maximum lines of change between hunks + --abbrev length to abbreviate commits to + --src-prefix show given source prefix instead of 'a/' + --dst-prefix show given destinction prefix instead of 'b/' + --git-dir path to git repository to use + -h, --help show this message +"; + + let args = Docopt::new(USAGE).and_then(|d| d.deserialize()) + .unwrap_or_else(|e| e.exit()); + match run(&args) { + Ok(()) => {} + Err(e) => println!("error: {}", e), + } +} diff --git a/git2/examples/fetch.rs b/git2/examples/fetch.rs new file mode 100644 index 000000000..c667d7b63 --- /dev/null +++ b/git2/examples/fetch.rs @@ -0,0 +1,128 @@ +/* + * libgit2 "fetch" example - shows how to fetch remote data + * + * Written by the libgit2 contributors + * + * To the extent possible under law, the author(s) have dedicated all copyright + * and related and neighboring rights to this software to the public domain + * worldwide. This software is distributed without any warranty. + * + * You should have received a copy of the CC0 Public Domain Dedication along + * with this software. If not, see + * . + */ + +#![deny(warnings)] + +extern crate git2; +extern crate docopt; +#[macro_use] +extern crate serde_derive; + +use docopt::Docopt; +use git2::{Repository, RemoteCallbacks, AutotagOption, FetchOptions}; +use std::io::{self, Write}; +use std::str; + +#[derive(Deserialize)] +struct Args { + arg_remote: Option, +} + +fn run(args: &Args) -> Result<(), git2::Error> { + let repo = try!(Repository::open(".")); + let remote = args.arg_remote.as_ref().map(|s| &s[..]).unwrap_or("origin"); + + // Figure out whether it's a named remote or a URL + println!("Fetching {} for repo", remote); + let mut cb = RemoteCallbacks::new(); + let mut remote = try!(repo.find_remote(remote).or_else(|_| { + repo.remote_anonymous(remote) + })); + cb.sideband_progress(|data| { + print!("remote: {}", str::from_utf8(data).unwrap()); + io::stdout().flush().unwrap(); + true + }); + + // This callback gets called for each remote-tracking branch that gets + // updated. The message we output depends on whether it's a new one or an + // update. + cb.update_tips(|refname, a, b| { + if a.is_zero() { + println!("[new] {:20} {}", b, refname); + } else { + println!("[updated] {:10}..{:10} {}", a, b, refname); + } + true + }); + + // Here we show processed and total objects in the pack and the amount of + // received data. Most frontends will probably want to show a percentage and + // the download rate. + cb.transfer_progress(|stats| { + if stats.received_objects() == stats.total_objects() { + print!("Resolving deltas {}/{}\r", stats.indexed_deltas(), + stats.total_deltas()); + } else if stats.total_objects() > 0 { + print!("Received {}/{} objects ({}) in {} bytes\r", + stats.received_objects(), + stats.total_objects(), + stats.indexed_objects(), + stats.received_bytes()); + } + io::stdout().flush().unwrap(); + true + }); + + // Download the packfile and index it. This function updates the amount of + // received data and the indexer stats which lets you inform the user about + // progress. + let mut fo = FetchOptions::new(); + fo.remote_callbacks(cb); + try!(remote.download(&[], Some(&mut fo))); + + { + // If there are local objects (we got a thin pack), then tell the user + // how many objects we saved from having to cross the network. + let stats = remote.stats(); + if stats.local_objects() > 0 { + println!("\rReceived {}/{} objects in {} bytes (used {} local \ + objects)", stats.indexed_objects(), + stats.total_objects(), stats.received_bytes(), + stats.local_objects()); + } else { + println!("\rReceived {}/{} objects in {} bytes", + stats.indexed_objects(), stats.total_objects(), + stats.received_bytes()); + } + } + + // Disconnect the underlying connection to prevent from idling. + remote.disconnect(); + + // Update the references in the remote's namespace to point to the right + // commits. This may be needed even if there was no packfile to download, + // which can happen e.g. when the branches have been changed but all the + // needed objects are available locally. + try!(remote.update_tips(None, true, + AutotagOption::Unspecified, None)); + + Ok(()) +} + +fn main() { + const USAGE: &'static str = " +usage: fetch [options] [] + +Options: + -h, --help show this message +"; + + let args = Docopt::new(USAGE).and_then(|d| d.deserialize()) + .unwrap_or_else(|e| e.exit()); + match run(&args) { + Ok(()) => {} + Err(e) => println!("error: {}", e), + } +} diff --git a/git2/examples/init.rs b/git2/examples/init.rs new file mode 100644 index 000000000..0f6b83809 --- /dev/null +++ b/git2/examples/init.rs @@ -0,0 +1,152 @@ +/* + * libgit2 "init" example - shows how to initialize a new repo + * + * Written by the libgit2 contributors + * + * To the extent possible under law, the author(s) have dedicated all copyright + * and related and neighboring rights to this software to the public domain + * worldwide. This software is distributed without any warranty. + * + * You should have received a copy of the CC0 Public Domain Dedication along + * with this software. If not, see + * . + */ + +#![deny(warnings)] + +extern crate git2; +extern crate docopt; +#[macro_use] +extern crate serde_derive; + +use docopt::Docopt; +use git2::{Repository, RepositoryInitOptions, RepositoryInitMode, Error}; +use std::path::{PathBuf, Path}; + +#[derive(Deserialize)] +struct Args { + arg_directory: String, + flag_quiet: bool, + flag_bare: bool, + flag_template: Option, + flag_separate_git_dir: Option, + flag_initial_commit: bool, + flag_shared: Option, +} + +fn run(args: &Args) -> Result<(), Error> { + let mut path = PathBuf::from(&args.arg_directory); + let repo = if !args.flag_bare && args.flag_template.is_none() && + args.flag_shared.is_none() && + args.flag_separate_git_dir.is_none() { + try!(Repository::init(&path)) + } else { + let mut opts = RepositoryInitOptions::new(); + opts.bare(args.flag_bare); + if let Some(ref s) = args.flag_template { + opts.template_path(Path::new(s)); + } + + // If you specified a separate git directory, then initialize + // the repository at that path and use the second path as the + // working directory of the repository (with a git-link file) + if let Some(ref s) = args.flag_separate_git_dir { + opts.workdir_path(&path); + path = PathBuf::from(s); + } + + if let Some(ref s) = args.flag_shared { + opts.mode(try!(parse_shared(s))); + } + try!(Repository::init_opts(&path, &opts)) + }; + + // Print a message to stdout like "git init" does + if !args.flag_quiet { + if args.flag_bare || args.flag_separate_git_dir.is_some() { + path = repo.path().to_path_buf(); + } else { + path = repo.workdir().unwrap().to_path_buf(); + } + println!("Initialized empty Git repository in {}", path.display()); + } + + if args.flag_initial_commit { + try!(create_initial_commit(&repo)); + println!("Created empty initial commit"); + } + + Ok(()) +} + +/// Unlike regular "git init", this example shows how to create an initial empty +/// commit in the repository. This is the helper function that does that. +fn create_initial_commit(repo: &Repository) -> Result<(), Error> { + // First use the config to initialize a commit signature for the user. + let sig = try!(repo.signature()); + + // Now let's create an empty tree for this commit + let tree_id = { + let mut index = try!(repo.index()); + + // Outside of this example, you could call index.add_path() + // here to put actual files into the index. For our purposes, we'll + // leave it empty for now. + + try!(index.write_tree()) + }; + + let tree = try!(repo.find_tree(tree_id)); + + // Ready to create the initial commit. + // + // Normally creating a commit would involve looking up the current HEAD + // commit and making that be the parent of the initial commit, but here this + // is the first commit so there will be no parent. + try!(repo.commit(Some("HEAD"), &sig, &sig, "Initial commit", &tree, &[])); + + Ok(()) +} + +fn parse_shared(shared: &str) -> Result { + match shared { + "false" | "umask" => Ok(git2::RepositoryInitMode::SHARED_UMASK), + "true" | "group" => Ok(git2::RepositoryInitMode::SHARED_GROUP), + "all" | "world" => Ok(git2::RepositoryInitMode::SHARED_ALL), + _ => { + if shared.starts_with('0') { + match u32::from_str_radix(&shared[1..], 8).ok() { + Some(n) => { + Ok(RepositoryInitMode::from_bits_truncate(n)) + } + None => { + Err(Error::from_str("invalid octal value for --shared")) + } + } + } else { + Err(Error::from_str("unknown value for --shared")) + } + } + } +} + +fn main() { + const USAGE: &'static str = " +usage: init [options] + +Options: + -q, --quiet don't print information to stdout + --bare initialize a new bare repository + --template use as an initialization template + --separate-git-dir use as the .git directory + --initial-commit create an initial empty commit + --shared permissions to create the repository with +"; + + let args = Docopt::new(USAGE).and_then(|d| d.deserialize()) + .unwrap_or_else(|e| e.exit()); + match run(&args) { + Ok(()) => {} + Err(e) => println!("error: {}", e), + } +} diff --git a/git2/examples/log.rs b/git2/examples/log.rs new file mode 100644 index 000000000..70e8145da --- /dev/null +++ b/git2/examples/log.rs @@ -0,0 +1,263 @@ +/* + * libgit2 "log" example - shows how to walk history and get commit info + * + * Written by the libgit2 contributors + * + * To the extent possible under law, the author(s) have dedicated all copyright + * and related and neighboring rights to this software to the public domain + * worldwide. This software is distributed without any warranty. + * + * You should have received a copy of the CC0 Public Domain Dedication along + * with this software. If not, see + * . + */ + +#![deny(warnings)] + +#[macro_use] +extern crate serde_derive; +extern crate docopt; +extern crate git2; +extern crate time; + +use std::str; +use docopt::Docopt; +use git2::{Repository, Signature, Commit, ObjectType, Time, DiffOptions}; +use git2::{Pathspec, Error, DiffFormat}; + +#[derive(Deserialize)] +struct Args { + arg_commit: Vec, + arg_spec: Vec, + flag_topo_order: bool, + flag_date_order: bool, + flag_reverse: bool, + flag_author: Option, + flag_committer: Option, + flag_grep: Option, + flag_git_dir: Option, + flag_skip: Option, + flag_max_count: Option, + flag_merges: bool, + flag_no_merges: bool, + flag_no_min_parents: bool, + flag_no_max_parents: bool, + flag_max_parents: Option, + flag_min_parents: Option, + flag_patch: bool, +} + +fn run(args: &Args) -> Result<(), Error> { + let path = args.flag_git_dir.as_ref().map(|s| &s[..]).unwrap_or("."); + let repo = try!(Repository::open(path)); + let mut revwalk = try!(repo.revwalk()); + + // Prepare the revwalk based on CLI parameters + let base = if args.flag_reverse {git2::Sort::REVERSE} else {git2::Sort::NONE}; + revwalk.set_sorting(base | if args.flag_topo_order { + git2::Sort::TOPOLOGICAL + } else if args.flag_date_order { + git2::Sort::TIME + } else { + git2::Sort::NONE + }); + for commit in &args.arg_commit { + if commit.starts_with('^') { + let obj = try!(repo.revparse_single(&commit[1..])); + try!(revwalk.hide(obj.id())); + continue + } + let revspec = try!(repo.revparse(commit)); + if revspec.mode().contains(git2::RevparseMode::SINGLE) { + try!(revwalk.push(revspec.from().unwrap().id())); + } else { + let from = revspec.from().unwrap().id(); + let to = revspec.to().unwrap().id(); + try!(revwalk.push(to)); + if revspec.mode().contains(git2::RevparseMode::MERGE_BASE) { + let base = try!(repo.merge_base(from, to)); + let o = try!(repo.find_object(base, Some(ObjectType::Commit))); + try!(revwalk.push(o.id())); + } + try!(revwalk.hide(from)); + } + } + if args.arg_commit.is_empty() { + try!(revwalk.push_head()); + } + + // Prepare our diff options and pathspec matcher + let (mut diffopts, mut diffopts2) = (DiffOptions::new(), DiffOptions::new()); + for spec in &args.arg_spec { + diffopts.pathspec(spec); + diffopts2.pathspec(spec); + } + let ps = try!(Pathspec::new(args.arg_spec.iter())); + + // Filter our revwalk based on the CLI parameters + macro_rules! filter_try { + ($e:expr) => (match $e { Ok(t) => t, Err(e) => return Some(Err(e)) }) + } + let revwalk = revwalk.filter_map(|id| { + let id = filter_try!(id); + let commit = filter_try!(repo.find_commit(id)); + let parents = commit.parents().len(); + if parents < args.min_parents() { return None } + if let Some(n) = args.max_parents() { + if parents >= n { return None } + } + if !args.arg_spec.is_empty() { + match commit.parents().len() { + 0 => { + let tree = filter_try!(commit.tree()); + let flags = git2::PathspecFlags::NO_MATCH_ERROR; + if ps.match_tree(&tree, flags).is_err() { return None } + } + _ => { + let m = commit.parents().all(|parent| { + match_with_parent(&repo, &commit, &parent, &mut diffopts) + .unwrap_or(false) + }); + if !m { return None } + } + } + } + if !sig_matches(&commit.author(), &args.flag_author) { return None } + if !sig_matches(&commit.committer(), &args.flag_committer) { return None } + if !log_message_matches(commit.message(), &args.flag_grep) { return None } + Some(Ok(commit)) + }).skip(args.flag_skip.unwrap_or(0)).take(args.flag_max_count.unwrap_or(!0)); + + // print! + for commit in revwalk { + let commit = try!(commit); + print_commit(&commit); + if !args.flag_patch || commit.parents().len() > 1 { continue } + let a = if commit.parents().len() == 1 { + let parent = try!(commit.parent(0)); + Some(try!(parent.tree())) + } else { + None + }; + let b = try!(commit.tree()); + let diff = try!(repo.diff_tree_to_tree(a.as_ref(), Some(&b), + Some(&mut diffopts2))); + try!(diff.print(DiffFormat::Patch, |_delta, _hunk, line| { + match line.origin() { + ' ' | '+' | '-' => print!("{}", line.origin()), + _ => {} + } + print!("{}", str::from_utf8(line.content()).unwrap()); + true + })); + } + + Ok(()) +} + +fn sig_matches(sig: &Signature, arg: &Option) -> bool { + match *arg { + Some(ref s) => { + sig.name().map(|n| n.contains(s)).unwrap_or(false) || + sig.email().map(|n| n.contains(s)).unwrap_or(false) + } + None => true + } +} + +fn log_message_matches(msg: Option<&str>, grep: &Option) -> bool { + match (grep, msg) { + (&None, _) => true, + (&Some(_), None) => false, + (&Some(ref s), Some(msg)) => msg.contains(s), + } +} + +fn print_commit(commit: &Commit) { + println!("commit {}", commit.id()); + + if commit.parents().len() > 1 { + print!("Merge:"); + for id in commit.parent_ids() { + print!(" {:.8}", id); + } + println!(""); + } + + let author = commit.author(); + println!("Author: {}", author); + print_time(&author.when(), "Date: "); + println!(""); + + for line in String::from_utf8_lossy(commit.message_bytes()).lines() { + println!(" {}", line); + } + println!(""); +} + +fn print_time(time: &Time, prefix: &str) { + let (offset, sign) = match time.offset_minutes() { + n if n < 0 => (-n, '-'), + n => (n, '+'), + }; + let (hours, minutes) = (offset / 60, offset % 60); + let ts = time::Timespec::new(time.seconds() + + (time.offset_minutes() as i64) * 60, 0); + let time = time::at(ts); + + println!("{}{} {}{:02}{:02}", prefix, + time.strftime("%a %b %e %T %Y").unwrap(), sign, hours, minutes); + +} + +fn match_with_parent(repo: &Repository, commit: &Commit, parent: &Commit, + opts: &mut DiffOptions) -> Result { + let a = try!(parent.tree()); + let b = try!(commit.tree()); + let diff = try!(repo.diff_tree_to_tree(Some(&a), Some(&b), Some(opts))); + Ok(diff.deltas().len() > 0) +} + +impl Args { + fn min_parents(&self) -> usize { + if self.flag_no_min_parents { return 0 } + self.flag_min_parents.unwrap_or(if self.flag_merges {2} else {0}) + } + + fn max_parents(&self) -> Option { + if self.flag_no_max_parents { return None } + self.flag_max_parents.or(if self.flag_no_merges {Some(1)} else {None}) + } +} + +fn main() { + const USAGE: &'static str = " +usage: log [options] [..] [--] [..] + +Options: + --topo-order sort commits in topological order + --date-order sort commits in date order + --reverse sort commits in reverse + --author author to sort by + --committer committer to sort by + --grep pattern to filter commit messages by + --git-dir alternative git directory to use + --skip number of commits to skip + -n, --max-count maximum number of commits to show + --merges only show merge commits + --no-merges don't show merge commits + --no-min-parents don't require a minimum number of parents + --no-max-parents don't require a maximum number of parents + --max-parents specify a maximum number of parents for a commit + --min-parents specify a minimum number of parents for a commit + -p, --patch show commit diff + -h, --help show this message +"; + + let args = Docopt::new(USAGE).and_then(|d| d.deserialize()) + .unwrap_or_else(|e| e.exit()); + match run(&args) { + Ok(()) => {} + Err(e) => println!("error: {}", e), + } +} diff --git a/git2/examples/ls-remote.rs b/git2/examples/ls-remote.rs new file mode 100644 index 000000000..5da9575f7 --- /dev/null +++ b/git2/examples/ls-remote.rs @@ -0,0 +1,63 @@ +/* + * libgit2 "ls-remote" example + * + * Written by the libgit2 contributors + * + * To the extent possible under law, the author(s) have dedicated all copyright + * and related and neighboring rights to this software to the public domain + * worldwide. This software is distributed without any warranty. + * + * You should have received a copy of the CC0 Public Domain Dedication along + * with this software. If not, see + * . + */ + +#![deny(warnings)] + +extern crate git2; +extern crate docopt; +#[macro_use] +extern crate serde_derive; + +use docopt::Docopt; +use git2::{Repository, Direction}; + +#[derive(Deserialize)] +struct Args { + arg_remote: String, +} + +fn run(args: &Args) -> Result<(), git2::Error> { + let repo = try!(Repository::open(".")); + let remote = &args.arg_remote; + let mut remote = try!(repo.find_remote(remote).or_else(|_| { + repo.remote_anonymous(remote) + })); + + // Connect to the remote and call the printing function for each of the + // remote references. + let connection = try!(remote.connect_auth(Direction::Fetch, None, None)); + + // Get the list of references on the remote and print out their name next to + // what they point to. + for head in try!(connection.list()).iter() { + println!("{}\t{}", head.oid(), head.name()); + } + Ok(()) +} + +fn main() { + const USAGE: &'static str = " +usage: ls-remote [option] + +Options: + -h, --help show this message +"; + + let args = Docopt::new(USAGE).and_then(|d| d.deserialize()) + .unwrap_or_else(|e| e.exit()); + match run(&args) { + Ok(()) => {} + Err(e) => println!("error: {}", e), + } +} diff --git a/git2/examples/rev-list.rs b/git2/examples/rev-list.rs new file mode 100644 index 000000000..db9bd82ca --- /dev/null +++ b/git2/examples/rev-list.rs @@ -0,0 +1,97 @@ +/* + * libgit2 "rev-list" example - shows how to transform a rev-spec into a list + * of commit ids + * + * Written by the libgit2 contributors + * + * To the extent possible under law, the author(s) have dedicated all copyright + * and related and neighboring rights to this software to the public domain + * worldwide. This software is distributed without any warranty. + * + * You should have received a copy of the CC0 Public Domain Dedication along + * with this software. If not, see + * . + */ + +#![deny(warnings)] + +extern crate git2; +extern crate docopt; +#[macro_use] +extern crate serde_derive; + +use docopt::Docopt; +use git2::{Repository, Error, Revwalk, Oid}; + +#[derive(Deserialize)] +struct Args { + arg_spec: Vec, + flag_topo_order: bool, + flag_date_order: bool, + flag_reverse: bool, + flag_not: Vec, +} + +fn run(args: &Args) -> Result<(), git2::Error> { + let repo = try!(Repository::open(".")); + let mut revwalk = try!(repo.revwalk()); + + let base = if args.flag_reverse {git2::Sort::REVERSE} else {git2::Sort::NONE}; + revwalk.set_sorting(base | if args.flag_topo_order { + git2::Sort::TOPOLOGICAL + } else if args.flag_date_order { + git2::Sort::TIME + } else { + git2::Sort::NONE + }); + + let specs = args.flag_not.iter().map(|s| (s, true)) + .chain(args.arg_spec.iter().map(|s| (s, false))) + .map(|(spec, hide)| { + if spec.starts_with('^') {(&spec[1..], !hide)} else {(&spec[..], hide)} + }); + for (spec, hide) in specs { + let id = if spec.contains("..") { + let revspec = try!(repo.revparse(spec)); + if revspec.mode().contains(git2::RevparseMode::MERGE_BASE) { + return Err(Error::from_str("merge bases not implemented")) + } + try!(push(&mut revwalk, revspec.from().unwrap().id(), !hide)); + revspec.to().unwrap().id() + } else { + try!(repo.revparse_single(spec)).id() + }; + try!(push(&mut revwalk, id, hide)); + } + + for id in revwalk { + let id = try!(id); + println!("{}", id); + } + Ok(()) +} + +fn push(revwalk: &mut Revwalk, id: Oid, hide: bool) -> Result<(), Error> { + if hide {revwalk.hide(id)} else {revwalk.push(id)} +} + +fn main() { + const USAGE: &'static str = " +usage: rev-list [options] [--] ... + +Options: + --topo-order sort commits in topological order + --date-order sort commits in date order + --reverse sort commits in reverse + --not don't show + -h, --help show this message +"; + + let args = Docopt::new(USAGE).and_then(|d| d.deserialize()) + .unwrap_or_else(|e| e.exit()); + match run(&args) { + Ok(()) => {} + Err(e) => println!("error: {}", e), + } +} + diff --git a/git2/examples/rev-parse.rs b/git2/examples/rev-parse.rs new file mode 100644 index 000000000..f2416f7b4 --- /dev/null +++ b/git2/examples/rev-parse.rs @@ -0,0 +1,70 @@ +/* + * libgit2 "rev-parse" example - shows how to parse revspecs + * + * Written by the libgit2 contributors + * + * To the extent possible under law, the author(s) have dedicated all copyright + * and related and neighboring rights to this software to the public domain + * worldwide. This software is distributed without any warranty. + * + * You should have received a copy of the CC0 Public Domain Dedication along + * with this software. If not, see + * . + */ + +#![deny(warnings)] + +extern crate git2; +extern crate docopt; +#[macro_use] +extern crate serde_derive; + +use docopt::Docopt; +use git2::Repository; + +#[derive(Deserialize)] +struct Args { + arg_spec: String, + flag_git_dir: Option, +} + +fn run(args: &Args) -> Result<(), git2::Error> { + let path = args.flag_git_dir.as_ref().map(|s| &s[..]).unwrap_or("."); + let repo = try!(Repository::open(path)); + + let revspec = try!(repo.revparse(&args.arg_spec)); + + if revspec.mode().contains(git2::RevparseMode::SINGLE) { + println!("{}", revspec.from().unwrap().id()); + } else if revspec.mode().contains(git2::RevparseMode::RANGE) { + let to = revspec.to().unwrap(); + let from = revspec.from().unwrap(); + println!("{}", to.id()); + + if revspec.mode().contains(git2::RevparseMode::MERGE_BASE) { + let base = try!(repo.merge_base(from.id(), to.id())); + println!("{}", base); + } + + println!("^{}", from.id()); + } else { + return Err(git2::Error::from_str("invalid results from revparse")) + } + Ok(()) +} + +fn main() { + const USAGE: &'static str = " +usage: rev-parse [options] + +Options: + --git-dir directory for the git repository to check +"; + + let args = Docopt::new(USAGE).and_then(|d| d.deserialize()) + .unwrap_or_else(|e| e.exit()); + match run(&args) { + Ok(()) => {} + Err(e) => println!("error: {}", e), + } +} diff --git a/git2/examples/status.rs b/git2/examples/status.rs new file mode 100644 index 000000000..87ca6ec31 --- /dev/null +++ b/git2/examples/status.rs @@ -0,0 +1,369 @@ +/* + * libgit2 "status" example - shows how to use the status APIs + * + * Written by the libgit2 contributors + * + * To the extent possible under law, the author(s) have dedicated all copyright + * and related and neighboring rights to this software to the public domain + * worldwide. This software is distributed without any warranty. + * + * You should have received a copy of the CC0 Public Domain Dedication along + * with this software. If not, see + * . + */ + +#![deny(warnings)] + +extern crate git2; +extern crate docopt; +#[macro_use] +extern crate serde_derive; + +use std::str; +use std::time::Duration; +use docopt::Docopt; +use git2::{Repository, Error, StatusOptions, ErrorCode, SubmoduleIgnore}; + +#[derive(Deserialize)] +struct Args { + arg_spec: Vec, + flag_short: bool, + flag_porcelain: bool, + flag_branch: bool, + flag_z: bool, + flag_ignored: bool, + flag_untracked_files: Option, + flag_ignore_submodules: Option, + flag_git_dir: Option, + flag_repeat: bool, + flag_list_submodules: bool, +} + +#[derive(Eq, PartialEq)] +enum Format { Long, Short, Porcelain } + +fn run(args: &Args) -> Result<(), Error> { + let path = args.flag_git_dir.clone().unwrap_or_else(|| ".".to_string()); + let repo = try!(Repository::open(&path)); + if repo.is_bare() { + return Err(Error::from_str("cannot report status on bare repository")) + } + + let mut opts = StatusOptions::new(); + opts.include_ignored(args.flag_ignored); + match args.flag_untracked_files.as_ref().map(|s| &s[..]) { + Some("no") => { opts.include_untracked(false); } + Some("normal") => { opts.include_untracked(true); } + Some("all") => { + opts.include_untracked(true).recurse_untracked_dirs(true); + } + Some(_) => return Err(Error::from_str("invalid untracked-files value")), + None => {} + } + match args.flag_ignore_submodules.as_ref().map(|s| &s[..]) { + Some("all") => { opts.exclude_submodules(true); } + Some(_) => return Err(Error::from_str("invalid ignore-submodules value")), + None => {} + } + opts.include_untracked(!args.flag_ignored); + for spec in &args.arg_spec { + opts.pathspec(spec); + } + + loop { + if args.flag_repeat { + println!("\u{1b}[H\u{1b}[2J"); + } + + let statuses = try!(repo.statuses(Some(&mut opts))); + + if args.flag_branch { + try!(show_branch(&repo, &args.format())); + } + if args.flag_list_submodules { + try!(print_submodules(&repo)); + } + + if args.format() == Format::Long { + print_long(&statuses); + } else { + print_short(&repo, &statuses); + } + + if args.flag_repeat { + std::thread::sleep(Duration::new(10, 0)); + } else { + return Ok(()) + } + } +} + +fn show_branch(repo: &Repository, format: &Format) -> Result<(), Error> { + let head = match repo.head() { + Ok(head) => Some(head), + Err(ref e) if e.code() == ErrorCode::UnbornBranch || + e.code() == ErrorCode::NotFound => None, + Err(e) => return Err(e), + }; + let head = head.as_ref().and_then(|h| h.shorthand()); + + if format == &Format::Long { + println!("# On branch {}", + head.unwrap_or("Not currently on any branch")); + } else { + println!("## {}", head.unwrap_or("HEAD (no branch)")); + } + Ok(()) +} + +fn print_submodules(repo: &Repository) -> Result<(), Error> { + let modules = try!(repo.submodules()); + println!("# Submodules"); + for sm in &modules { + println!("# - submodule '{}' at {}", sm.name().unwrap(), + sm.path().display()); + } + Ok(()) +} + +// This function print out an output similar to git's status command in long +// form, including the command-line hints. +fn print_long(statuses: &git2::Statuses) { + let mut header = false; + let mut rm_in_workdir = false; + let mut changes_in_index = false; + let mut changed_in_workdir = false; + + // Print index changes + for entry in statuses.iter().filter(|e| e.status() != git2::Status::CURRENT) { + if entry.status().contains(git2::Status::WT_DELETED) { + rm_in_workdir = true; + } + let istatus = match entry.status() { + s if s.contains(git2::Status::INDEX_NEW) => "new file: ", + s if s.contains(git2::Status::INDEX_MODIFIED) => "modified: ", + s if s.contains(git2::Status::INDEX_DELETED) => "deleted: ", + s if s.contains(git2::Status::INDEX_RENAMED) => "renamed: ", + s if s.contains(git2::Status::INDEX_TYPECHANGE) => "typechange:", + _ => continue, + }; + if !header { + println!("\ +# Changes to be committed: +# (use \"git reset HEAD ...\" to unstage) +#"); + header = true; + } + + let old_path = entry.head_to_index().unwrap().old_file().path(); + let new_path = entry.head_to_index().unwrap().new_file().path(); + match (old_path, new_path) { + (Some(old), Some(new)) if old != new => { + println!("#\t{} {} -> {}", istatus, old.display(), + new.display()); + } + (old, new) => { + println!("#\t{} {}", istatus, old.or(new).unwrap().display()); + } + } + } + + if header { + changes_in_index = true; + println!("#"); + } + header = false; + + // Print workdir changes to tracked files + for entry in statuses.iter() { + // With `Status::OPT_INCLUDE_UNMODIFIED` (not used in this example) + // `index_to_workdir` may not be `None` even if there are no differences, + // in which case it will be a `Delta::Unmodified`. + if entry.status() == git2::Status::CURRENT || + entry.index_to_workdir().is_none() { + continue + } + + let istatus = match entry.status() { + s if s.contains(git2::Status::WT_MODIFIED) => "modified: ", + s if s.contains(git2::Status::WT_DELETED) => "deleted: ", + s if s.contains(git2::Status::WT_RENAMED) => "renamed: ", + s if s.contains(git2::Status::WT_TYPECHANGE) => "typechange:", + _ => continue, + }; + + if !header { + println!("\ +# Changes not staged for commit: +# (use \"git add{} ...\" to update what will be committed) +# (use \"git checkout -- ...\" to discard changes in working directory) +#\ + ", if rm_in_workdir {"/rm"} else {""}); + header = true; + } + + let old_path = entry.index_to_workdir().unwrap().old_file().path(); + let new_path = entry.index_to_workdir().unwrap().new_file().path(); + match (old_path, new_path) { + (Some(old), Some(new)) if old != new => { + println!("#\t{} {} -> {}", istatus, old.display(), + new.display()); + } + (old, new) => { + println!("#\t{} {}", istatus, old.or(new).unwrap().display()); + } + } + } + + if header { + changed_in_workdir = true; + println!("#"); + } + header = false; + + // Print untracked files + for entry in statuses.iter().filter(|e| e.status() == git2::Status::WT_NEW) { + if !header { + println!("\ +# Untracked files +# (use \"git add ...\" to include in what will be committed) +#"); + header = true; + } + let file = entry.index_to_workdir().unwrap().old_file().path().unwrap(); + println!("#\t{}", file.display()); + } + header = false; + + // Print ignored files + for entry in statuses.iter().filter(|e| e.status() == git2::Status::IGNORED) { + if !header { + println!("\ +# Ignored files +# (use \"git add -f ...\" to include in what will be committed) +#"); + header = true; + } + let file = entry.index_to_workdir().unwrap().old_file().path().unwrap(); + println!("#\t{}", file.display()); + } + + if !changes_in_index && changed_in_workdir { + println!("no changes added to commit (use \"git add\" and/or \ + \"git commit -a\")"); + } +} + +// This version of the output prefixes each path with two status columns and +// shows submodule status information. +fn print_short(repo: &Repository, statuses: &git2::Statuses) { + for entry in statuses.iter().filter(|e| e.status() != git2::Status::CURRENT) { + let mut istatus = match entry.status() { + s if s.contains(git2::Status::INDEX_NEW) => 'A', + s if s.contains(git2::Status::INDEX_MODIFIED) => 'M', + s if s.contains(git2::Status::INDEX_DELETED) => 'D', + s if s.contains(git2::Status::INDEX_RENAMED) => 'R', + s if s.contains(git2::Status::INDEX_TYPECHANGE) => 'T', + _ => ' ', + }; + let mut wstatus = match entry.status() { + s if s.contains(git2::Status::WT_NEW) => { + if istatus == ' ' { istatus = '?'; } '?' + } + s if s.contains(git2::Status::WT_MODIFIED) => 'M', + s if s.contains(git2::Status::WT_DELETED) => 'D', + s if s.contains(git2::Status::WT_RENAMED) => 'R', + s if s.contains(git2::Status::WT_TYPECHANGE) => 'T', + _ => ' ', + }; + + if entry.status().contains(git2::Status::IGNORED) { + istatus = '!'; + wstatus = '!'; + } + if istatus == '?' && wstatus == '?' { continue } + let mut extra = ""; + + // A commit in a tree is how submodules are stored, so let's go take a + // look at its status. + // + // TODO: check for GIT_FILEMODE_COMMIT + let status = entry.index_to_workdir().and_then(|diff| { + let ignore = SubmoduleIgnore::Unspecified; + diff.new_file().path_bytes() + .and_then(|s| str::from_utf8(s).ok()) + .and_then(|name| repo.submodule_status(name, ignore).ok()) + }); + if let Some(status) = status { + if status.contains(git2::SubmoduleStatus::WD_MODIFIED) { + extra = " (new commits)"; + } else if status.contains(git2::SubmoduleStatus::WD_INDEX_MODIFIED) || status.contains(git2::SubmoduleStatus::WD_WD_MODIFIED) { + extra = " (modified content)"; + } else if status.contains(git2::SubmoduleStatus::WD_UNTRACKED) { + extra = " (untracked content)"; + } + } + + let (mut a, mut b, mut c) = (None, None, None); + if let Some(diff) = entry.head_to_index() { + a = diff.old_file().path(); + b = diff.new_file().path(); + } + if let Some(diff) = entry.index_to_workdir() { + a = a.or_else(|| diff.old_file().path()); + b = b.or_else(|| diff.old_file().path()); + c = diff.new_file().path(); + } + + match (istatus, wstatus) { + ('R', 'R') => println!("RR {} {} {}{}", a.unwrap().display(), + b.unwrap().display(), c.unwrap().display(), + extra), + ('R', w) => println!("R{} {} {}{}", w, a.unwrap().display(), + b.unwrap().display(), extra), + (i, 'R') => println!("{}R {} {}{}", i, a.unwrap().display(), + c.unwrap().display(), extra), + (i, w) => println!("{}{} {}{}", i, w, a.unwrap().display(), extra), + } + } + + for entry in statuses.iter().filter(|e| e.status() == git2::Status::WT_NEW) { + println!("?? {}", entry.index_to_workdir().unwrap().old_file() + .path().unwrap().display()); + } +} + +impl Args { + fn format(&self) -> Format { + if self.flag_short { Format::Short } + else if self.flag_porcelain || self.flag_z { Format::Porcelain } + else { Format::Long } + } +} + +fn main() { + const USAGE: &'static str = " +usage: status [options] [--] [..] + +Options: + -s, --short show short statuses + --long show longer statuses (default) + --porcelain ?? + -b, --branch show branch information + -z ?? + --ignored show ignored files as well + --untracked-files setting for showing untracked files [no|normal|all] + --ignore-submodules setting for ignoring submodules [all] + --git-dir git directory to analyze + --repeat repeatedly show status, sleeping inbetween + --list-submodules show submodules + -h, --help show this message +"; + + let args = Docopt::new(USAGE).and_then(|d| d.deserialize()) + .unwrap_or_else(|e| e.exit()); + match run(&args) { + Ok(()) => {} + Err(e) => println!("error: {}", e), + } +} diff --git a/git2/examples/tag.rs b/git2/examples/tag.rs new file mode 100644 index 000000000..f0ff3a219 --- /dev/null +++ b/git2/examples/tag.rs @@ -0,0 +1,134 @@ +/* + * libgit2 "tag" example - shows how to list, create and delete tags + * + * Written by the libgit2 contributors + * + * To the extent possible under law, the author(s) have dedicated all copyright + * and related and neighboring rights to this software to the public domain + * worldwide. This software is distributed without any warranty. + * + * You should have received a copy of the CC0 Public Domain Dedication along + * with this software. If not, see + * . + */ + +#![deny(warnings)] + +extern crate git2; +extern crate docopt; +#[macro_use] +extern crate serde_derive; + +use std::str; +use docopt::Docopt; +use git2::{Repository, Error, Tag, Commit}; + +#[derive(Deserialize)] +struct Args { + arg_tagname: Option, + arg_object: Option, + arg_pattern: Option, + flag_n: Option, + flag_force: bool, + flag_list: bool, + flag_delete: Option, + flag_message: Option, +} + +fn run(args: &Args) -> Result<(), Error> { + let repo = try!(Repository::open(".")); + + if let Some(ref name) = args.arg_tagname { + let target = args.arg_object.as_ref().map(|s| &s[..]).unwrap_or("HEAD"); + let obj = try!(repo.revparse_single(target)); + + if let Some(ref message) = args.flag_message { + let sig = try!(repo.signature()); + try!(repo.tag(name, &obj, &sig, message, args.flag_force)); + } else { + try!(repo.tag_lightweight(name, &obj, args.flag_force)); + } + + } else if let Some(ref name) = args.flag_delete { + let obj = try!(repo.revparse_single(name)); + let id = try!(obj.short_id()); + try!(repo.tag_delete(name)); + println!("Deleted tag '{}' (was {})", name, + str::from_utf8(&*id).unwrap()); + + } else if args.flag_list { + let pattern = args.arg_pattern.as_ref().map(|s| &s[..]).unwrap_or("*"); + for name in try!(repo.tag_names(Some(pattern))).iter() { + let name = name.unwrap(); + let obj = try!(repo.revparse_single(name)); + + if let Some(tag) = obj.as_tag() { + print_tag(tag, args); + } else if let Some(commit) = obj.as_commit() { + print_commit(commit, name, args); + } else { + print_name(name); + } + } + } + Ok(()) +} + +fn print_tag(tag: &Tag, args: &Args) { + print!("{:<16}", tag.name().unwrap()); + if args.flag_n.is_some() { + print_list_lines(tag.message(), args); + } else { + println!(""); + } +} + +fn print_commit(commit: &Commit, name: &str, args: &Args) { + print!("{:<16}", name); + if args.flag_n.is_some() { + print_list_lines(commit.message(), args); + } else { + println!(""); + } +} + +fn print_name(name: &str) { + println!("{}", name); +} + +fn print_list_lines(message: Option<&str>, args: &Args) { + let message = match message { Some(s) => s, None => return }; + let mut lines = message.lines().filter(|l| !l.trim().is_empty()); + if let Some(first) = lines.next() { + print!("{}", first); + } + println!(""); + + for line in lines.take(args.flag_n.unwrap_or(0) as usize) { + print!(" {}", line); + } +} + +fn main() { + const USAGE: &'static str = " +usage: + tag [-a] [-f] [-m ] [] + tag -d + tag [-n ] -l [] + +Options: + -n specify number of lines from teh annotation to print + -f, --force replace an existing tag with the given name + -l, --list list tags with names matching the pattern given + -d, --delete delete the tag specified + -m, --message message for a new tag + -h, --help show this message +"; + + let args = Docopt::new(USAGE).and_then(|d| d.deserialize()) + .unwrap_or_else(|e| e.exit()); + match run(&args) { + Ok(()) => {} + Err(e) => println!("error: {}", e), + } +} diff --git a/git2/src/blame.rs b/git2/src/blame.rs new file mode 100644 index 000000000..9f6b15571 --- /dev/null +++ b/git2/src/blame.rs @@ -0,0 +1,315 @@ +use std::marker; +use {raw, Repository, Oid, signature, Signature}; +use util::{self, Binding}; +use std::path::Path; +use std::ops::Range; +use std::mem; + +/// Opaque structure to hold blame results. +pub struct Blame<'repo> { + raw: *mut raw::git_blame, + _marker: marker::PhantomData<&'repo Repository>, +} + +/// Structure that represents a blame hunk. +pub struct BlameHunk<'blame> { + raw: *mut raw::git_blame_hunk, + _marker: marker::PhantomData<&'blame raw::git_blame>, +} + +/// Blame options +pub struct BlameOptions { + raw: raw::git_blame_options, +} + +/// An iterator over the hunks in a blame. +pub struct BlameIter<'blame> { + range: Range, + blame: &'blame Blame<'blame>, +} + +impl<'repo> Blame<'repo> { + + /// Gets the number of hunks that exist in the blame structure. + pub fn len(&self) -> usize { + unsafe { raw::git_blame_get_hunk_count(self.raw) as usize } + } + + /// Return `true` is there is no hunk in the blame structure. + pub fn is_empty(&self) -> bool { + self.len() == 0 + } + + /// Gets the blame hunk at the given index. + pub fn get_index(&self, index: usize) -> Option { + unsafe { + let ptr = raw::git_blame_get_hunk_byindex(self.raw(), index as u32); + if ptr.is_null() { + None + } else { + Some(BlameHunk::from_raw_const(ptr)) + } + } + } + + /// Gets the hunk that relates to the given line number in the newest + /// commit. + pub fn get_line(&self, lineno: usize) -> Option { + unsafe { + let ptr = raw::git_blame_get_hunk_byline(self.raw(), lineno); + if ptr.is_null() { + None + } else { + Some(BlameHunk::from_raw_const(ptr)) + } + } + } + + /// Returns an iterator over the hunks in this blame. + pub fn iter(&self) -> BlameIter { + BlameIter { range: 0..self.len(), blame: self } + } + +} + +impl<'blame> BlameHunk<'blame> { + + unsafe fn from_raw_const(raw: *const raw::git_blame_hunk) + -> BlameHunk<'blame> { + BlameHunk { + raw: raw as *mut raw::git_blame_hunk, + _marker: marker::PhantomData, + } + } + + /// Returns OID of the commit where this line was last changed + pub fn final_commit_id(&self) -> Oid { + unsafe { Oid::from_raw(&(*self.raw).final_commit_id) } + } + + /// Returns signature of the commit. + pub fn final_signature(&self) -> Signature { + unsafe { signature::from_raw_const(self, (*self.raw).final_signature) } + } + + /// Returns line number where this hunk begins. + /// + /// Note that the start line is counting from 1. + pub fn final_start_line(&self) -> usize { + unsafe { (*self.raw).final_start_line_number } + } + + /// Returns the OID of the commit where this hunk was found. + /// + /// This will usually be the same as `final_commit_id`, + /// except when `BlameOptions::track_copies_any_commit_copies` has been + /// turned on + pub fn orig_commit_id(&self) -> Oid { + unsafe { Oid::from_raw(&(*self.raw).orig_commit_id) } + } + + /// Returns signature of the commit. + pub fn orig_signature(&self) -> Signature { + unsafe { signature::from_raw_const(self, (*self.raw).orig_signature) } + } + + /// Returns line number where this hunk begins. + /// + /// Note that the start line is counting from 1. + pub fn orig_start_line(&self) -> usize { + unsafe { (*self.raw).orig_start_line_number} + } + + /// Returns path to the file where this hunk originated. + /// + /// Note: `None` could be returned for non-unicode paths on Widnows. + pub fn path(&self) -> Option<&Path> { + unsafe { + if let Some(bytes) = ::opt_bytes(self, (*self.raw).orig_path) { + Some(util::bytes2path(bytes)) + } else { + None + } + } + } + + /// Tests whether this hunk has been tracked to a boundary commit + /// (the root, or the commit specified in git_blame_options.oldest_commit). + pub fn is_boundary(&self) -> bool { + unsafe { (*self.raw).boundary == 1 } + } + + /// Returns number of lines in this hunk. + pub fn lines_in_hunk(&self) -> usize { + unsafe { (*self.raw).lines_in_hunk as usize } + } +} + + +impl Default for BlameOptions { + fn default() -> Self { + Self::new() + } +} + +impl BlameOptions { + + /// Initialize options + pub fn new() -> BlameOptions { + unsafe { + let mut raw: raw::git_blame_options = mem::zeroed(); + assert_eq!( + raw::git_blame_init_options(&mut raw, + raw::GIT_BLAME_OPTIONS_VERSION) + , 0); + + Binding::from_raw(&raw as *const _ as *mut _) + } + } + + fn flag(&mut self, opt: u32, val: bool) -> &mut BlameOptions { + if val { + self.raw.flags |= opt; + } else { + self.raw.flags &= !opt; + } + self + } + + /// Track lines that have moved within a file. + pub fn track_copies_same_file(&mut self, opt: bool) -> &mut BlameOptions { + self.flag(raw::GIT_BLAME_TRACK_COPIES_SAME_FILE, opt) + } + + /// Track lines that have moved across files in the same commit. + pub fn track_copies_same_commit_moves(&mut self, opt: bool) -> &mut BlameOptions { + self.flag(raw::GIT_BLAME_TRACK_COPIES_SAME_COMMIT_MOVES, opt) + } + + /// Track lines that have been copied from another file that exists + /// in the same commit. + pub fn track_copies_same_commit_copies(&mut self, opt: bool) -> &mut BlameOptions { + self.flag(raw::GIT_BLAME_TRACK_COPIES_SAME_COMMIT_COPIES, opt) + } + + /// Track lines that have been copied from another file that exists + /// in any commit. + pub fn track_copies_any_commit_copies(&mut self, opt: bool) -> &mut BlameOptions { + self.flag(raw::GIT_BLAME_TRACK_COPIES_ANY_COMMIT_COPIES, opt) + } + + /// Restrict the search of commits to those reachable following only + /// the first parents. + pub fn first_parent(&mut self, opt: bool) -> &mut BlameOptions { + self.flag(raw::GIT_BLAME_FIRST_PARENT, opt) + } + + /// Setter for the id of the newest commit to consider. + pub fn newest_commit(&mut self, id: Oid) -> &mut BlameOptions { + unsafe { self.raw.newest_commit = *id.raw(); } + self + } + + /// Setter for the id of the oldest commit to consider. + pub fn oldest_commit(&mut self, id: Oid) -> &mut BlameOptions { + unsafe { self.raw.oldest_commit = *id.raw(); } + self + } + +} + +impl<'repo> Binding for Blame<'repo> { + type Raw = *mut raw::git_blame; + + unsafe fn from_raw(raw: *mut raw::git_blame) -> Blame<'repo> { + Blame { raw: raw, _marker: marker::PhantomData } + } + + fn raw(&self) -> *mut raw::git_blame { self.raw } +} + +impl<'repo> Drop for Blame<'repo> { + fn drop(&mut self) { + unsafe { raw::git_blame_free(self.raw) } + } +} + +impl<'blame> Binding for BlameHunk<'blame> { + type Raw = *mut raw::git_blame_hunk; + + unsafe fn from_raw(raw: *mut raw::git_blame_hunk) -> BlameHunk<'blame> { + BlameHunk { raw: raw, _marker: marker::PhantomData } + } + + fn raw(&self) -> *mut raw::git_blame_hunk { self.raw } +} + +impl Binding for BlameOptions { + type Raw = *mut raw::git_blame_options; + + unsafe fn from_raw(opts: *mut raw::git_blame_options) -> BlameOptions { + BlameOptions { raw: *opts } + } + + fn raw(&self) -> *mut raw::git_blame_options { + &self.raw as *const _ as *mut _ + } +} + +impl<'blame> Iterator for BlameIter<'blame> { + type Item = BlameHunk<'blame>; + fn next(&mut self) -> Option> { + self.range.next().and_then(|i| self.blame.get_index(i)) + } + + fn size_hint(&self) -> (usize, Option) { self.range.size_hint() } +} + +impl<'blame> DoubleEndedIterator for BlameIter<'blame> { + fn next_back(&mut self) -> Option> { + self.range.next_back().and_then(|i| self.blame.get_index(i)) + } +} + +impl<'blame> ExactSizeIterator for BlameIter<'blame> {} + +#[cfg(test)] +mod tests { + use std::fs::{self, File}; + use std::path::Path; + + #[test] + fn smoke() { + let (_td, repo) = ::test::repo_init(); + let mut index = repo.index().unwrap(); + + let root = repo.path().parent().unwrap(); + fs::create_dir(&root.join("foo")).unwrap(); + File::create(&root.join("foo/bar")).unwrap(); + index.add_path(Path::new("foo/bar")).unwrap(); + + let id = index.write_tree().unwrap(); + let tree = repo.find_tree(id).unwrap(); + let sig = repo.signature().unwrap(); + let id = repo.refname_to_id("HEAD").unwrap(); + let parent = repo.find_commit(id).unwrap(); + let commit = repo.commit(Some("HEAD"), &sig, &sig, "commit", + &tree, &[&parent]).unwrap(); + + let blame = repo.blame_file(Path::new("foo/bar"), None).unwrap(); + + assert_eq!(blame.len(), 1); + assert_eq!(blame.iter().count(), 1); + + let hunk = blame.get_index(0).unwrap(); + assert_eq!(hunk.final_commit_id(), commit); + assert_eq!(hunk.final_signature().name(), sig.name()); + assert_eq!(hunk.final_signature().email(), sig.email()); + assert_eq!(hunk.final_start_line(), 1); + assert_eq!(hunk.path(), Some(Path::new("foo/bar"))); + assert_eq!(hunk.lines_in_hunk(), 0); + assert!(!hunk.is_boundary()) + } + +} + diff --git a/git2/src/blob.rs b/git2/src/blob.rs new file mode 100644 index 000000000..5e255eb6d --- /dev/null +++ b/git2/src/blob.rs @@ -0,0 +1,186 @@ +use std::marker; +use std::mem; +use std::slice; +use std::io; + +use {raw, Oid, Object, Error}; +use util::Binding; + +/// A structure to represent a git [blob][1] +/// +/// [1]: http://git-scm.com/book/en/Git-Internals-Git-Objects +pub struct Blob<'repo> { + raw: *mut raw::git_blob, + _marker: marker::PhantomData>, +} + +impl<'repo> Blob<'repo> { + /// Get the id (SHA1) of a repository blob + pub fn id(&self) -> Oid { + unsafe { Binding::from_raw(raw::git_blob_id(&*self.raw)) } + } + + /// Determine if the blob content is most certainly binary or not. + pub fn is_binary(&self) -> bool { + unsafe { raw::git_blob_is_binary(&*self.raw) == 1 } + } + + /// Get the content of this blob. + pub fn content(&self) -> &[u8] { + unsafe { + let data = raw::git_blob_rawcontent(&*self.raw) as *const u8; + let len = raw::git_blob_rawsize(&*self.raw) as usize; + slice::from_raw_parts(data, len) + } + } + + /// Casts this Blob to be usable as an `Object` + pub fn as_object(&self) -> &Object<'repo> { + unsafe { + &*(self as *const _ as *const Object<'repo>) + } + } + + /// Consumes Blob to be returned as an `Object` + pub fn into_object(self) -> Object<'repo> { + assert_eq!(mem::size_of_val(&self), mem::size_of::()); + unsafe { + mem::transmute(self) + } + } +} + +impl<'repo> Binding for Blob<'repo> { + type Raw = *mut raw::git_blob; + + unsafe fn from_raw(raw: *mut raw::git_blob) -> Blob<'repo> { + Blob { + raw: raw, + _marker: marker::PhantomData, + } + } + fn raw(&self) -> *mut raw::git_blob { self.raw } +} + +impl<'repo> ::std::fmt::Debug for Blob<'repo> { + fn fmt(&self, f: &mut ::std::fmt::Formatter) -> Result<(), ::std::fmt::Error> { + f.debug_struct("Blob").field("id", &self.id()).finish() + } +} + +impl<'repo> Clone for Blob<'repo> { + fn clone(&self) -> Self { + self.as_object().clone().into_blob().ok().unwrap() + } +} + +impl<'repo> Drop for Blob<'repo> { + fn drop(&mut self) { + unsafe { raw::git_blob_free(self.raw) } + } +} + +/// A structure to represent a git writestream for blobs +pub struct BlobWriter<'repo> { + raw: *mut raw::git_writestream, + need_cleanup: bool, + _marker: marker::PhantomData>, +} + +impl<'repo> BlobWriter<'repo> { + /// Finalize blob writing stream and write the blob to the object db + pub fn commit(mut self) -> Result { + // After commit we already doesn't need cleanup on drop + self.need_cleanup = false; + let mut raw = raw::git_oid { id: [0; raw::GIT_OID_RAWSZ] }; + unsafe { + try_call!(raw::git_blob_create_fromstream_commit(&mut raw, self.raw)); + Ok(Binding::from_raw(&raw as *const _)) + } + } +} + +impl<'repo> Binding for BlobWriter<'repo> { + type Raw = *mut raw::git_writestream; + + unsafe fn from_raw(raw: *mut raw::git_writestream) -> BlobWriter<'repo> { + BlobWriter { + raw: raw, + need_cleanup: true, + _marker: marker::PhantomData, + } + } + fn raw(&self) -> *mut raw::git_writestream { self.raw } +} + +impl<'repo> Drop for BlobWriter<'repo> { + fn drop(&mut self) { + // We need cleanup in case the stream has not been committed + if self.need_cleanup { + unsafe { ((*self.raw).free)(self.raw) } + } + } +} + +impl<'repo> io::Write for BlobWriter<'repo> { + fn write(&mut self, buf: &[u8]) -> io::Result { + unsafe { + let res = ((*self.raw).write)(self.raw, buf.as_ptr() as *const _, buf.len()); + if res < 0 { + Err(io::Error::new(io::ErrorKind::Other, "Write error")) + } else { + Ok(buf.len()) + } + } + } + fn flush(&mut self) -> io::Result<()> { Ok(()) } +} + +#[cfg(test)] +mod tests { + use std::io::prelude::*; + use std::fs::File; + use std::path::Path; + use tempdir::TempDir; + use Repository; + + #[test] + fn buffer() { + let td = TempDir::new("test").unwrap(); + let repo = Repository::init(td.path()).unwrap(); + let id = repo.blob(&[5, 4, 6]).unwrap(); + let blob = repo.find_blob(id).unwrap(); + + assert_eq!(blob.id(), id); + assert_eq!(blob.content(), [5, 4, 6]); + assert!(blob.is_binary()); + + repo.find_object(id, None).unwrap().as_blob().unwrap(); + repo.find_object(id, None).unwrap().into_blob().ok().unwrap(); + } + + #[test] + fn path() { + let td = TempDir::new("test").unwrap(); + let path = td.path().join("foo"); + File::create(&path).unwrap().write_all(&[7, 8, 9]).unwrap(); + let repo = Repository::init(td.path()).unwrap(); + let id = repo.blob_path(&path).unwrap(); + let blob = repo.find_blob(id).unwrap(); + assert_eq!(blob.content(), [7, 8, 9]); + blob.into_object(); + } + + #[test] + fn stream() { + let td = TempDir::new("test").unwrap(); + let repo = Repository::init(td.path()).unwrap(); + let mut ws = repo.blob_writer(Some(Path::new("foo"))).unwrap(); + let wl = ws.write(&[10, 11, 12]).unwrap(); + assert_eq!(wl, 3); + let id = ws.commit().unwrap(); + let blob = repo.find_blob(id).unwrap(); + assert_eq!(blob.content(), [10, 11, 12]); + blob.into_object(); + } +} diff --git a/git2/src/branch.rs b/git2/src/branch.rs new file mode 100644 index 000000000..3f035ed0d --- /dev/null +++ b/git2/src/branch.rs @@ -0,0 +1,162 @@ +use std::ffi::CString; +use std::marker; +use std::ptr; +use std::str; + +use {raw, Error, Reference, BranchType, References}; +use util::Binding; + +/// A structure to represent a git [branch][1] +/// +/// A branch is currently just a wrapper to an underlying `Reference`. The +/// reference can be accessed through the `get` and `unwrap` methods. +/// +/// [1]: http://git-scm.com/book/en/Git-Branching-What-a-Branch-Is +pub struct Branch<'repo> { + inner: Reference<'repo>, +} + +/// An iterator over the branches inside of a repository. +pub struct Branches<'repo> { + raw: *mut raw::git_branch_iterator, + _marker: marker::PhantomData>, +} + +impl<'repo> Branch<'repo> { + /// Creates Branch type from a Reference + pub fn wrap(reference: Reference) -> Branch { Branch { inner: reference } } + + /// Gain access to the reference that is this branch + pub fn get(&self) -> &Reference<'repo> { &self.inner } + + /// Take ownership of the underlying reference. + pub fn into_reference(self) -> Reference<'repo> { self.inner } + + /// Delete an existing branch reference. + pub fn delete(&mut self) -> Result<(), Error> { + unsafe { try_call!(raw::git_branch_delete(self.get().raw())); } + Ok(()) + } + + /// Determine if the current local branch is pointed at by HEAD. + pub fn is_head(&self) -> bool { + unsafe { raw::git_branch_is_head(&*self.get().raw()) == 1 } + } + + /// Move/rename an existing local branch reference. + pub fn rename(&mut self, new_branch_name: &str, force: bool) + -> Result, Error> { + let mut ret = ptr::null_mut(); + let new_branch_name = try!(CString::new(new_branch_name)); + unsafe { + try_call!(raw::git_branch_move(&mut ret, self.get().raw(), + new_branch_name, force)); + Ok(Branch::wrap(Binding::from_raw(ret))) + } + } + + /// Return the name of the given local or remote branch. + /// + /// May return `Ok(None)` if the name is not valid utf-8. + pub fn name(&self) -> Result, Error> { + self.name_bytes().map(|s| str::from_utf8(s).ok()) + } + + /// Return the name of the given local or remote branch. + pub fn name_bytes(&self) -> Result<&[u8], Error> { + let mut ret = ptr::null(); + unsafe { + try_call!(raw::git_branch_name(&mut ret, &*self.get().raw())); + Ok(::opt_bytes(self, ret).unwrap()) + } + } + + /// Return the reference supporting the remote tracking branch, given a + /// local branch reference. + pub fn upstream<'a>(&'a self) -> Result, Error> { + let mut ret = ptr::null_mut(); + unsafe { + try_call!(raw::git_branch_upstream(&mut ret, &*self.get().raw())); + Ok(Branch::wrap(Binding::from_raw(ret))) + } + } + + /// Set the upstream configuration for a given local branch. + /// + /// If `None` is specified, then the upstream branch is unset. The name + /// provided is the name of the branch to set as upstream. + pub fn set_upstream(&mut self, + upstream_name: Option<&str>) -> Result<(), Error> { + let upstream_name = try!(::opt_cstr(upstream_name)); + unsafe { + try_call!(raw::git_branch_set_upstream(self.get().raw(), + upstream_name)); + Ok(()) + } + } +} + +impl<'repo> Branches<'repo> { + /// Creates a new iterator from the raw pointer given. + /// + /// This function is unsafe as it is not guaranteed that `raw` is a valid + /// pointer. + pub unsafe fn from_raw(raw: *mut raw::git_branch_iterator) + -> Branches<'repo> { + Branches { + raw: raw, + _marker: marker::PhantomData, + } + } +} + +impl<'repo> Iterator for Branches<'repo> { + type Item = Result<(Branch<'repo>, BranchType), Error>; + fn next(&mut self) -> Option, BranchType), Error>> { + let mut ret = ptr::null_mut(); + let mut typ = raw::GIT_BRANCH_LOCAL; + unsafe { + try_call_iter!(raw::git_branch_next(&mut ret, &mut typ, self.raw)); + let typ = match typ { + raw::GIT_BRANCH_LOCAL => BranchType::Local, + raw::GIT_BRANCH_REMOTE => BranchType::Remote, + n => panic!("unexected branch type: {}", n), + }; + Some(Ok((Branch::wrap(Binding::from_raw(ret)), typ))) + } + } +} + +impl<'repo> Drop for Branches<'repo> { + fn drop(&mut self) { + unsafe { raw::git_branch_iterator_free(self.raw) } + } +} + +#[cfg(test)] +mod tests { + use BranchType; + + #[test] + fn smoke() { + let (_td, repo) = ::test::repo_init(); + let head = repo.head().unwrap(); + let target = head.target().unwrap(); + let commit = repo.find_commit(target).unwrap(); + + let mut b1 = repo.branch("foo", &commit, false).unwrap(); + assert!(!b1.is_head()); + repo.branch("foo2", &commit, false).unwrap(); + + assert_eq!(repo.branches(None).unwrap().count(), 3); + repo.find_branch("foo", BranchType::Local).unwrap(); + let mut b1 = b1.rename("bar", false).unwrap(); + assert_eq!(b1.name().unwrap(), Some("bar")); + assert!(b1.upstream().is_err()); + b1.set_upstream(Some("master")).unwrap(); + b1.upstream().unwrap(); + b1.set_upstream(None).unwrap(); + + b1.delete().unwrap(); + } +} diff --git a/git2/src/buf.rs b/git2/src/buf.rs new file mode 100644 index 000000000..78e958e2e --- /dev/null +++ b/git2/src/buf.rs @@ -0,0 +1,73 @@ +use std::slice; +use std::str; +use std::ptr; +use std::ops::{Deref, DerefMut}; + +use raw; +use util::Binding; + +/// A structure to wrap an intermediate buffer used by libgit2. +/// +/// A buffer can be thought of a `Vec`, but the `Vec` type is not used to +/// avoid copying data back and forth. +pub struct Buf { + raw: raw::git_buf, +} + +impl Default for Buf { + fn default() -> Self { + Self::new() + } +} + +impl Buf { + /// Creates a new empty buffer. + pub fn new() -> Buf { + ::init(); + unsafe { + Binding::from_raw(&mut raw::git_buf { + ptr: ptr::null_mut(), + size: 0, + asize: 0, + } as *mut _) + } + } + + /// Attempt to view this buffer as a string slice. + /// + /// Returns `None` if the buffer is not valid utf-8. + pub fn as_str(&self) -> Option<&str> { str::from_utf8(&**self).ok() } +} + +impl Deref for Buf { + type Target = [u8]; + fn deref(&self) -> &[u8] { + unsafe { + slice::from_raw_parts(self.raw.ptr as *const u8, + self.raw.size as usize) + } + } +} + +impl DerefMut for Buf { + fn deref_mut(&mut self) -> &mut [u8] { + unsafe { + slice::from_raw_parts_mut(self.raw.ptr as *mut u8, + self.raw.size as usize) + } + } +} + +impl Binding for Buf { + type Raw = *mut raw::git_buf; + unsafe fn from_raw(raw: *mut raw::git_buf) -> Buf { + Buf { raw: *raw } + } + fn raw(&self) -> *mut raw::git_buf { &self.raw as *const _ as *mut _ } +} + +impl Drop for Buf { + fn drop(&mut self) { + unsafe { raw::git_buf_free(&mut self.raw) } + } +} diff --git a/git2/src/build.rs b/git2/src/build.rs new file mode 100644 index 000000000..4e4d2959a --- /dev/null +++ b/git2/src/build.rs @@ -0,0 +1,638 @@ +//! Builder-pattern objects for configuration various git operations. + +use std::ffi::{CStr, CString}; +use std::mem; +use std::path::Path; +use std::ptr; +use libc::{c_char, size_t, c_void, c_uint, c_int}; + +use {raw, panic, Error, Repository, FetchOptions, IntoCString}; +use {CheckoutNotificationType, DiffFile, Remote}; +use util::{self, Binding}; + +/// A builder struct which is used to build configuration for cloning a new git +/// repository. +pub struct RepoBuilder<'cb> { + bare: bool, + branch: Option, + local: bool, + hardlinks: bool, + checkout: Option>, + fetch_opts: Option>, + clone_local: Option, + remote_create: Option>>, +} + +/// Type of callback passed to `RepoBuilder::remote_create`. +/// +/// The second and third arguments are the remote's name and the remote's url. +pub type RemoteCreate<'cb> = for<'a> FnMut(&'a Repository, &str, &str) + -> Result, Error> + 'cb; + +/// A builder struct for configuring checkouts of a repository. +pub struct CheckoutBuilder<'cb> { + their_label: Option, + our_label: Option, + ancestor_label: Option, + target_dir: Option, + paths: Vec, + path_ptrs: Vec<*const c_char>, + file_perm: Option, + dir_perm: Option, + disable_filters: bool, + checkout_opts: u32, + progress: Option>>, + notify: Option>>, + notify_flags: CheckoutNotificationType, +} + +/// Checkout progress notification callback. +/// +/// The first argument is the path for the notification, the next is the numver +/// of completed steps so far, and the final is the total number of steps. +pub type Progress<'a> = FnMut(Option<&Path>, usize, usize) + 'a; + +/// Checkout notifications callback. +/// +/// The first argument is the notification type, the next is the path for the +/// the notification, followed by the baseline diff, target diff, and workdir diff. +/// +/// The callback must return a bool specifying whether the checkout should +/// continue. +pub type Notify<'a> = FnMut(CheckoutNotificationType, Option<&Path>, DiffFile, + DiffFile, DiffFile) -> bool + 'a; + + +impl<'cb> Default for RepoBuilder<'cb> { + fn default() -> Self { + Self::new() + } +} + +/// Options that can be passed to `RepoBuilder::clone_local`. +#[derive(Clone, Copy)] +pub enum CloneLocal { + /// Auto-detect (default) + /// + /// Here libgit2 will bypass the git-aware transport for local paths, but + /// use a normal fetch for `file://` urls. + Auto = raw::GIT_CLONE_LOCAL_AUTO as isize, + + /// Bypass the git-aware transport even for `file://` urls. + Local = raw::GIT_CLONE_LOCAL as isize, + + /// Never bypass the git-aware transport + None = raw::GIT_CLONE_NO_LOCAL as isize, + + /// Bypass the git-aware transport, but don't try to use hardlinks. + NoLinks = raw::GIT_CLONE_LOCAL_NO_LINKS as isize, + + #[doc(hidden)] + __Nonexhaustive = 0xff, +} + +impl<'cb> RepoBuilder<'cb> { + /// Creates a new repository builder with all of the default configuration. + /// + /// When ready, the `clone()` method can be used to clone a new repository + /// using this configuration. + pub fn new() -> RepoBuilder<'cb> { + ::init(); + RepoBuilder { + bare: false, + branch: None, + local: true, + clone_local: None, + hardlinks: true, + checkout: None, + fetch_opts: None, + remote_create: None, + } + } + + /// Indicate whether the repository will be cloned as a bare repository or + /// not. + pub fn bare(&mut self, bare: bool) -> &mut RepoBuilder<'cb> { + self.bare = bare; + self + } + + /// Specify the name of the branch to check out after the clone. + /// + /// If not specified, the remote's default branch will be used. + pub fn branch(&mut self, branch: &str) -> &mut RepoBuilder<'cb> { + self.branch = Some(CString::new(branch).unwrap()); + self + } + + /// Configures options for bypassing the git-aware transport on clone. + /// + /// Bypassing it means that instead of a fetch libgit2 will copy the object + /// database directory instead of figuring out what it needs, which is + /// faster. If possible, it will hardlink the files to save space. + pub fn clone_local(&mut self, clone_local: CloneLocal) -> &mut RepoBuilder<'cb> { + self.clone_local = Some(clone_local); + self + } + + /// Set the flag for bypassing the git aware transport mechanism for local + /// paths. + /// + /// If `true`, the git-aware transport will be bypassed for local paths. If + /// `false`, the git-aware transport will not be bypassed. + #[deprecated(note = "use `clone_local` instead")] + #[doc(hidden)] + pub fn local(&mut self, local: bool) -> &mut RepoBuilder<'cb> { + self.local = local; + self + } + + /// Set the flag for whether hardlinks are used when using a local git-aware + /// transport mechanism. + #[deprecated(note = "use `clone_local` instead")] + #[doc(hidden)] + pub fn hardlinks(&mut self, links: bool) -> &mut RepoBuilder<'cb> { + self.hardlinks = links; + self + } + + /// Configure the checkout which will be performed by consuming a checkout + /// builder. + pub fn with_checkout(&mut self, checkout: CheckoutBuilder<'cb>) + -> &mut RepoBuilder<'cb> { + self.checkout = Some(checkout); + self + } + + /// Options which control the fetch, including callbacks. + /// + /// The callbacks are used for reporting fetch progress, and for acquiring + /// credentials in the event they are needed. + pub fn fetch_options(&mut self, fetch_opts: FetchOptions<'cb>) + -> &mut RepoBuilder<'cb> { + self.fetch_opts = Some(fetch_opts); + self + } + + /// Configures a callback used to create the git remote, prior to its being + /// used to perform the clone operation. + pub fn remote_create(&mut self, f: F) -> &mut RepoBuilder<'cb> + where F: for<'a> FnMut(&'a Repository, &str, &str) + -> Result, Error> + 'cb, + { + self.remote_create = Some(Box::new(f)); + self + } + + /// Clone a remote repository. + /// + /// This will use the options configured so far to clone the specified url + /// into the specified local path. + pub fn clone(&mut self, url: &str, into: &Path) -> Result { + let mut opts: raw::git_clone_options = unsafe { mem::zeroed() }; + unsafe { + try_call!(raw::git_clone_init_options(&mut opts, + raw::GIT_CLONE_OPTIONS_VERSION)); + } + opts.bare = self.bare as c_int; + opts.checkout_branch = self.branch.as_ref().map(|s| { + s.as_ptr() + }).unwrap_or(ptr::null()); + + if let Some(ref local) = self.clone_local { + opts.local = *local as raw::git_clone_local_t; + } else { + opts.local = match (self.local, self.hardlinks) { + (true, false) => raw::GIT_CLONE_LOCAL_NO_LINKS, + (false, _) => raw::GIT_CLONE_NO_LOCAL, + (true, _) => raw::GIT_CLONE_LOCAL_AUTO, + }; + } + + if let Some(ref mut cbs) = self.fetch_opts { + opts.fetch_opts = cbs.raw(); + } + + if let Some(ref mut c) = self.checkout { + unsafe { + c.configure(&mut opts.checkout_opts); + } + } + + if let Some(ref mut callback) = self.remote_create { + opts.remote_cb = Some(remote_create_cb); + opts.remote_cb_payload = callback as *mut _ as *mut _; + } + + let url = try!(CString::new(url)); + let into = try!(into.into_c_string()); + let mut raw = ptr::null_mut(); + unsafe { + try_call!(raw::git_clone(&mut raw, url, into, &opts)); + Ok(Binding::from_raw(raw)) + } + } +} + +extern fn remote_create_cb(out: *mut *mut raw::git_remote, + repo: *mut raw::git_repository, + name: *const c_char, + url: *const c_char, + payload: *mut c_void) -> c_int { + unsafe { + let repo = Repository::from_raw(repo); + let code = panic::wrap(|| { + let name = CStr::from_ptr(name).to_str().unwrap(); + let url = CStr::from_ptr(url).to_str().unwrap(); + let f = payload as *mut Box; + match (*f)(&repo, name, url) { + Ok(remote) => { + *out = ::remote::remote_into_raw(remote); + 0 + } + Err(e) => e.raw_code(), + } + }); + mem::forget(repo); + code.unwrap_or(-1) + } +} + +impl<'cb> Default for CheckoutBuilder<'cb> { + fn default() -> Self { + Self::new() + } +} + +impl<'cb> CheckoutBuilder<'cb> { + /// Creates a new builder for checkouts with all of its default + /// configuration. + pub fn new() -> CheckoutBuilder<'cb> { + ::init(); + CheckoutBuilder { + disable_filters: false, + dir_perm: None, + file_perm: None, + path_ptrs: Vec::new(), + paths: Vec::new(), + target_dir: None, + ancestor_label: None, + our_label: None, + their_label: None, + checkout_opts: raw::GIT_CHECKOUT_SAFE as u32, + progress: None, + notify: None, + notify_flags: CheckoutNotificationType::empty(), + } + } + + /// Indicate that this checkout should perform a dry run by checking for + /// conflicts but not make any actual changes. + pub fn dry_run(&mut self) -> &mut CheckoutBuilder<'cb> { + self.checkout_opts &= !((1 << 4) - 1); + self.checkout_opts |= raw::GIT_CHECKOUT_NONE as u32; + self + } + + /// Take any action necessary to get the working directory to match the + /// target including potentially discarding modified files. + pub fn force(&mut self) -> &mut CheckoutBuilder<'cb> { + self.checkout_opts &= !((1 << 4) - 1); + self.checkout_opts |= raw::GIT_CHECKOUT_FORCE as u32; + self + } + + /// Indicate that the checkout should be performed safely, allowing new + /// files to be created but not overwriting extisting files or changes. + /// + /// This is the default. + pub fn safe(&mut self) -> &mut CheckoutBuilder<'cb> { + self.checkout_opts &= !((1 << 4) - 1); + self.checkout_opts |= raw::GIT_CHECKOUT_SAFE as u32; + self + } + + fn flag(&mut self, bit: raw::git_checkout_strategy_t, + on: bool) -> &mut CheckoutBuilder<'cb> { + if on { + self.checkout_opts |= bit as u32; + } else { + self.checkout_opts &= !(bit as u32); + } + self + } + + /// In safe mode, create files that don't exist. + /// + /// Defaults to false. + pub fn recreate_missing(&mut self, allow: bool) -> &mut CheckoutBuilder<'cb> { + self.flag(raw::GIT_CHECKOUT_RECREATE_MISSING, allow) + } + + /// In safe mode, apply safe file updates even when there are conflicts + /// instead of canceling the checkout. + /// + /// Defaults to false. + pub fn allow_conflicts(&mut self, allow: bool) -> &mut CheckoutBuilder<'cb> { + self.flag(raw::GIT_CHECKOUT_ALLOW_CONFLICTS, allow) + } + + /// Remove untracked files from the working dir. + /// + /// Defaults to false. + pub fn remove_untracked(&mut self, remove: bool) + -> &mut CheckoutBuilder<'cb> { + self.flag(raw::GIT_CHECKOUT_REMOVE_UNTRACKED, remove) + } + + /// Remove ignored files from the working dir. + /// + /// Defaults to false. + pub fn remove_ignored(&mut self, remove: bool) -> &mut CheckoutBuilder<'cb> { + self.flag(raw::GIT_CHECKOUT_REMOVE_IGNORED, remove) + } + + /// Only update the contents of files that already exist. + /// + /// If set, files will not be created or deleted. + /// + /// Defaults to false. + pub fn update_only(&mut self, update: bool) -> &mut CheckoutBuilder<'cb> { + self.flag(raw::GIT_CHECKOUT_UPDATE_ONLY, update) + } + + /// Prevents checkout from writing the updated files' information to the + /// index. + /// + /// Defaults to true. + pub fn update_index(&mut self, update: bool) -> &mut CheckoutBuilder<'cb> { + self.flag(raw::GIT_CHECKOUT_DONT_UPDATE_INDEX, !update) + } + + /// Indicate whether the index and git attributes should be refreshed from + /// disk before any operations. + /// + /// Defaults to true, + pub fn refresh(&mut self, refresh: bool) -> &mut CheckoutBuilder<'cb> { + self.flag(raw::GIT_CHECKOUT_NO_REFRESH, !refresh) + } + + /// Skip files with unmerged index entries. + /// + /// Defaults to false. + pub fn skip_unmerged(&mut self, skip: bool) -> &mut CheckoutBuilder<'cb> { + self.flag(raw::GIT_CHECKOUT_SKIP_UNMERGED, skip) + } + + /// Indicate whether the checkout should proceed on conflicts by using the + /// stage 2 version of the file ("ours"). + /// + /// Defaults to false. + pub fn use_ours(&mut self, ours: bool) -> &mut CheckoutBuilder<'cb> { + self.flag(raw::GIT_CHECKOUT_USE_OURS, ours) + } + + /// Indicate whether the checkout should proceed on conflicts by using the + /// stage 3 version of the file ("theirs"). + /// + /// Defaults to false. + pub fn use_theirs(&mut self, theirs: bool) -> &mut CheckoutBuilder<'cb> { + self.flag(raw::GIT_CHECKOUT_USE_THEIRS, theirs) + } + + /// Indicate whether ignored files should be overwritten during the checkout. + /// + /// Defaults to true. + pub fn overwrite_ignored(&mut self, overwrite: bool) + -> &mut CheckoutBuilder<'cb> { + self.flag(raw::GIT_CHECKOUT_DONT_OVERWRITE_IGNORED, !overwrite) + } + + /// Indicate whether a normal merge file should be written for conflicts. + /// + /// Defaults to false. + pub fn conflict_style_merge(&mut self, on: bool) + -> &mut CheckoutBuilder<'cb> { + self.flag(raw::GIT_CHECKOUT_CONFLICT_STYLE_MERGE, on) + } + + /// Specify for which notification types to invoke the notification + /// callback. + /// + /// Defaults to none. + pub fn notify_on(&mut self, notification_types: CheckoutNotificationType) + -> &mut CheckoutBuilder<'cb> { + self.notify_flags = notification_types; + self + } + + /// Indicates whether to include common ancestor data in diff3 format files + /// for conflicts. + /// + /// Defaults to false. + pub fn conflict_style_diff3(&mut self, on: bool) + -> &mut CheckoutBuilder<'cb> { + self.flag(raw::GIT_CHECKOUT_CONFLICT_STYLE_DIFF3, on) + } + + /// Indicate whether to apply filters like CRLF conversion. + pub fn disable_filters(&mut self, disable: bool) + -> &mut CheckoutBuilder<'cb> { + self.disable_filters = disable; + self + } + + /// Set the mode with which new directories are created. + /// + /// Default is 0755 + pub fn dir_perm(&mut self, perm: i32) -> &mut CheckoutBuilder<'cb> { + self.dir_perm = Some(perm); + self + } + + /// Set the mode with which new files are created. + /// + /// The default is 0644 or 0755 as dictated by the blob. + pub fn file_perm(&mut self, perm: i32) -> &mut CheckoutBuilder<'cb> { + self.file_perm = Some(perm); + self + } + + /// Add a path to be checked out. + /// + /// If no paths are specified, then all files are checked out. Otherwise + /// only these specified paths are checked out. + pub fn path(&mut self, path: T) + -> &mut CheckoutBuilder<'cb> { + let path = path.into_c_string().unwrap(); + self.path_ptrs.push(path.as_ptr()); + self.paths.push(path); + self + } + + /// Set the directory to check out to + pub fn target_dir(&mut self, dst: &Path) -> &mut CheckoutBuilder<'cb> { + self.target_dir = Some(dst.into_c_string().unwrap()); + self + } + + /// The name of the common ancestor side of conflicts + pub fn ancestor_label(&mut self, label: &str) -> &mut CheckoutBuilder<'cb> { + self.ancestor_label = Some(CString::new(label).unwrap()); + self + } + + /// The name of the common our side of conflicts + pub fn our_label(&mut self, label: &str) -> &mut CheckoutBuilder<'cb> { + self.our_label = Some(CString::new(label).unwrap()); + self + } + + /// The name of the common their side of conflicts + pub fn their_label(&mut self, label: &str) -> &mut CheckoutBuilder<'cb> { + self.their_label = Some(CString::new(label).unwrap()); + self + } + + /// Set a callback to receive notifications of checkout progress. + pub fn progress(&mut self, cb: F) -> &mut CheckoutBuilder<'cb> + where F: FnMut(Option<&Path>, usize, usize) + 'cb { + self.progress = Some(Box::new(cb) as Box>); + self + } + + /// Set a callback to receive checkout notifications. + /// + /// Callbacks are invoked prior to modifying any files on disk. + /// Returning `false` from the callback will cancel the checkout. + pub fn notify(&mut self, cb: F) -> &mut CheckoutBuilder<'cb> + where F: FnMut(CheckoutNotificationType, Option<&Path>, DiffFile, + DiffFile, DiffFile) -> bool + 'cb + { + self.notify = Some(Box::new(cb) as Box>); + self + } + + /// Configure a raw checkout options based on this configuration. + /// + /// This method is unsafe as there is no guarantee that this structure will + /// outlive the provided checkout options. + pub unsafe fn configure(&mut self, opts: &mut raw::git_checkout_options) { + opts.version = raw::GIT_CHECKOUT_OPTIONS_VERSION; + opts.disable_filters = self.disable_filters as c_int; + opts.dir_mode = self.dir_perm.unwrap_or(0) as c_uint; + opts.file_mode = self.file_perm.unwrap_or(0) as c_uint; + + if !self.path_ptrs.is_empty() { + opts.paths.strings = self.path_ptrs.as_ptr() as *mut _; + opts.paths.count = self.path_ptrs.len() as size_t; + } + + if let Some(ref c) = self.target_dir { + opts.target_directory = c.as_ptr(); + } + if let Some(ref c) = self.ancestor_label { + opts.ancestor_label = c.as_ptr(); + } + if let Some(ref c) = self.our_label { + opts.our_label = c.as_ptr(); + } + if let Some(ref c) = self.their_label { + opts.their_label = c.as_ptr(); + } + if self.progress.is_some() { + let f: raw::git_checkout_progress_cb = progress_cb; + opts.progress_cb = Some(f); + opts.progress_payload = self as *mut _ as *mut _; + } + if self.notify.is_some() { + let f: raw::git_checkout_notify_cb = notify_cb; + opts.notify_cb = Some(f); + opts.notify_payload = self as *mut _ as *mut _; + opts.notify_flags = self.notify_flags.bits() as c_uint; + } + opts.checkout_strategy = self.checkout_opts as c_uint; + } +} + +extern fn progress_cb(path: *const c_char, + completed: size_t, + total: size_t, + data: *mut c_void) { + panic::wrap(|| unsafe { + let payload = &mut *(data as *mut CheckoutBuilder); + let callback = match payload.progress { + Some(ref mut c) => c, + None => return, + }; + let path = if path.is_null() { + None + } else { + Some(util::bytes2path(CStr::from_ptr(path).to_bytes())) + }; + callback(path, completed as usize, total as usize) + }); +} + +extern fn notify_cb(why: raw::git_checkout_notify_t, + path: *const c_char, + baseline: *const raw::git_diff_file, + target: *const raw::git_diff_file, + workdir: *const raw::git_diff_file, + data: *mut c_void) -> c_int { + // pack callback etc + panic::wrap(|| unsafe { + let payload = &mut *(data as *mut CheckoutBuilder); + let callback = match payload.notify { + Some(ref mut c) => c, + None => return 0, + }; + let path = if path.is_null() { + None + } else { + Some(util::bytes2path(CStr::from_ptr(path).to_bytes())) + }; + + let why = CheckoutNotificationType::from_bits_truncate(why as u32); + let keep_going = callback(why, + path, + DiffFile::from_raw(baseline), + DiffFile::from_raw(target), + DiffFile::from_raw(workdir)); + if keep_going {0} else {1} + }).unwrap_or(2) +} + +#[cfg(test)] +mod tests { + use std::fs; + use std::path::Path; + use tempdir::TempDir; + use super::RepoBuilder; + use Repository; + + #[test] + fn smoke() { + let r = RepoBuilder::new().clone("/path/to/nowhere", Path::new("foo")); + assert!(r.is_err()); + } + + #[test] + fn smoke2() { + let td = TempDir::new("test").unwrap(); + Repository::init_bare(&td.path().join("bare")).unwrap(); + let url = if cfg!(unix) { + format!("file://{}/bare", td.path().display()) + } else { + format!("file:///{}/bare", td.path().display().to_string() + .replace("\\", "/")) + }; + + let dst = td.path().join("foo"); + RepoBuilder::new().clone(&url, &dst).unwrap(); + fs::remove_dir_all(&dst).unwrap(); + assert!(RepoBuilder::new().branch("foo") + .clone(&url, &dst).is_err()); + } + +} diff --git a/git2/src/call.rs b/git2/src/call.rs new file mode 100644 index 000000000..3367275bf --- /dev/null +++ b/git2/src/call.rs @@ -0,0 +1,217 @@ +#![macro_use] +use libc; + +use Error; + +macro_rules! call { + (raw::$p:ident ($($e:expr),*)) => ( + raw::$p($(::call::convert(&$e)),*) + ) +} + +macro_rules! try_call { + (raw::$p:ident ($($e:expr),*)) => ({ + match ::call::try(raw::$p($(::call::convert(&$e)),*)) { + Ok(o) => o, + Err(e) => { ::panic::check(); return Err(e) } + } + }) +} + +macro_rules! try_call_iter { + ($($f:tt)*) => { + match call!($($f)*) { + 0 => {} + raw::GIT_ITEROVER => return None, + e => return Some(Err(::call::last_error(e))) + } + } +} + +#[doc(hidden)] +pub trait Convert { + fn convert(&self) -> T; +} + +pub fn convert>(u: &U) -> T { u.convert() } + +pub fn try(ret: libc::c_int) -> Result { + match ret { + n if n < 0 => Err(last_error(n)), + n => Ok(n), + } +} + +pub fn last_error(code: libc::c_int) -> Error { + // nowadays this unwrap is safe as `Error::last_error` always returns + // `Some`. + Error::last_error(code).unwrap() +} + +mod impls { + use std::ffi::CString; + use std::ptr; + + use libc; + + use {raw, ConfigLevel, ResetType, ObjectType, BranchType, Direction}; + use {DiffFormat, FileFavor, SubmoduleIgnore, AutotagOption, FetchPrune}; + use call::Convert; + + impl Convert for T { + fn convert(&self) -> T { *self } + } + + impl Convert for bool { + fn convert(&self) -> libc::c_int { *self as libc::c_int } + } + impl<'a, T> Convert<*const T> for &'a T { + fn convert(&self) -> *const T { *self as *const T } + } + impl<'a, T> Convert<*mut T> for &'a mut T { + fn convert(&self) -> *mut T { &**self as *const T as *mut T } + } + impl Convert<*const T> for *mut T { + fn convert(&self) -> *const T { *self as *const T } + } + + impl Convert<*const libc::c_char> for CString { + fn convert(&self) -> *const libc::c_char { self.as_ptr() } + } + + impl> Convert<*const T> for Option { + fn convert(&self) -> *const T { + self.as_ref().map(|s| s.convert()).unwrap_or(ptr::null()) + } + } + + impl> Convert<*mut T> for Option { + fn convert(&self) -> *mut T { + self.as_ref().map(|s| s.convert()).unwrap_or(ptr::null_mut()) + } + } + + impl Convert for ResetType { + fn convert(&self) -> raw::git_reset_t { + match *self { + ResetType::Soft => raw::GIT_RESET_SOFT, + ResetType::Hard => raw::GIT_RESET_HARD, + ResetType::Mixed => raw::GIT_RESET_MIXED, + } + } + } + + impl Convert for Direction { + fn convert(&self) -> raw::git_direction { + match *self { + Direction::Push => raw::GIT_DIRECTION_PUSH, + Direction::Fetch => raw::GIT_DIRECTION_FETCH, + } + } + } + + impl Convert for ObjectType { + fn convert(&self) -> raw::git_otype { + match *self { + ObjectType::Any => raw::GIT_OBJ_ANY, + ObjectType::Commit => raw::GIT_OBJ_COMMIT, + ObjectType::Tree => raw::GIT_OBJ_TREE, + ObjectType::Blob => raw::GIT_OBJ_BLOB, + ObjectType::Tag => raw::GIT_OBJ_TAG, + } + } + } + + impl Convert for Option { + fn convert(&self) -> raw::git_otype { + self.unwrap_or(ObjectType::Any).convert() + } + } + + impl Convert for BranchType { + fn convert(&self) -> raw::git_branch_t { + match *self { + BranchType::Remote => raw::GIT_BRANCH_REMOTE, + BranchType::Local => raw::GIT_BRANCH_LOCAL, + } + } + } + + impl Convert for Option { + fn convert(&self) -> raw::git_branch_t { + self.map(|s| s.convert()).unwrap_or(raw::GIT_BRANCH_ALL) + } + } + + impl Convert for ConfigLevel { + fn convert(&self) -> raw::git_config_level_t { + match *self { + ConfigLevel::ProgramData => raw::GIT_CONFIG_LEVEL_PROGRAMDATA, + ConfigLevel::System => raw::GIT_CONFIG_LEVEL_SYSTEM, + ConfigLevel::XDG => raw::GIT_CONFIG_LEVEL_XDG, + ConfigLevel::Global => raw::GIT_CONFIG_LEVEL_GLOBAL, + ConfigLevel::Local => raw::GIT_CONFIG_LEVEL_LOCAL, + ConfigLevel::App => raw::GIT_CONFIG_LEVEL_APP, + ConfigLevel::Highest => raw::GIT_CONFIG_HIGHEST_LEVEL, + } + } + } + + impl Convert for DiffFormat { + fn convert(&self) -> raw::git_diff_format_t { + match *self { + DiffFormat::Patch => raw::GIT_DIFF_FORMAT_PATCH, + DiffFormat::PatchHeader => raw::GIT_DIFF_FORMAT_PATCH_HEADER, + DiffFormat::Raw => raw::GIT_DIFF_FORMAT_RAW, + DiffFormat::NameOnly => raw::GIT_DIFF_FORMAT_NAME_ONLY, + DiffFormat::NameStatus => raw::GIT_DIFF_FORMAT_NAME_STATUS, + } + } + } + + impl Convert for FileFavor { + fn convert(&self) -> raw::git_merge_file_favor_t { + match *self { + FileFavor::Normal => raw::GIT_MERGE_FILE_FAVOR_NORMAL, + FileFavor::Ours => raw::GIT_MERGE_FILE_FAVOR_OURS, + FileFavor::Theirs => raw::GIT_MERGE_FILE_FAVOR_THEIRS, + FileFavor::Union => raw::GIT_MERGE_FILE_FAVOR_UNION, + } + } + } + + impl Convert for SubmoduleIgnore { + fn convert(&self) -> raw::git_submodule_ignore_t { + match *self { + SubmoduleIgnore::Unspecified => + raw::GIT_SUBMODULE_IGNORE_UNSPECIFIED, + SubmoduleIgnore::None => raw::GIT_SUBMODULE_IGNORE_NONE, + SubmoduleIgnore::Untracked => raw::GIT_SUBMODULE_IGNORE_UNTRACKED, + SubmoduleIgnore::Dirty => raw::GIT_SUBMODULE_IGNORE_DIRTY, + SubmoduleIgnore::All => raw::GIT_SUBMODULE_IGNORE_ALL, + } + } + } + + impl Convert for AutotagOption { + fn convert(&self) -> raw::git_remote_autotag_option_t { + match *self { + AutotagOption::Unspecified => + raw::GIT_REMOTE_DOWNLOAD_TAGS_UNSPECIFIED, + AutotagOption::None => raw::GIT_REMOTE_DOWNLOAD_TAGS_NONE, + AutotagOption::Auto => raw::GIT_REMOTE_DOWNLOAD_TAGS_AUTO, + AutotagOption::All => raw::GIT_REMOTE_DOWNLOAD_TAGS_ALL, + } + } + } + + impl Convert for FetchPrune { + fn convert(&self) -> raw::git_fetch_prune_t { + match *self { + FetchPrune::Unspecified => raw::GIT_FETCH_PRUNE_UNSPECIFIED, + FetchPrune::On => raw::GIT_FETCH_PRUNE, + FetchPrune::Off => raw::GIT_FETCH_NO_PRUNE, + } + } + } +} diff --git a/git2/src/cert.rs b/git2/src/cert.rs new file mode 100644 index 000000000..70ab9498f --- /dev/null +++ b/git2/src/cert.rs @@ -0,0 +1,97 @@ +//! Certificate types which are passed to `CertificateCheck` in +//! `RemoteCallbacks`. + +use std::marker; +use std::mem; +use std::slice; + +use raw; +use util::Binding; + +/// A certificate for a remote connection, viewable as one of `CertHostkey` or +/// `CertX509` currently. +pub struct Cert<'a> { + raw: *mut raw::git_cert, + _marker: marker::PhantomData<&'a raw::git_cert>, +} + +/// Hostkey information taken from libssh2 +pub struct CertHostkey<'a> { + raw: *mut raw::git_cert_hostkey, + _marker: marker::PhantomData<&'a raw::git_cert>, +} + +/// X.509 certificate information +pub struct CertX509<'a> { + raw: *mut raw::git_cert_x509, + _marker: marker::PhantomData<&'a raw::git_cert>, +} + +impl<'a> Cert<'a> { + /// Attempt to view this certificate as an SSH hostkey. + /// + /// Returns `None` if this is not actually an SSH hostkey. + pub fn as_hostkey(&self) -> Option<&CertHostkey<'a>> { + self.cast(raw::GIT_CERT_HOSTKEY_LIBSSH2) + } + + /// Attempt to view this certificate as an X.509 certificate. + /// + /// Returns `None` if this is not actually an X.509 certificate. + pub fn as_x509(&self) -> Option<&CertX509<'a>> { + self.cast(raw::GIT_CERT_X509) + } + + fn cast(&self, kind: raw::git_cert_t) -> Option<&T> { + assert_eq!(mem::size_of::>(), mem::size_of::()); + unsafe { + if kind == (*self.raw).cert_type { + Some(&*(self as *const Cert<'a> as *const T)) + } else { + None + } + } + } +} + +impl<'a> CertHostkey<'a> { + /// Returns the md5 hash of the hostkey, if available. + pub fn hash_md5(&self) -> Option<&[u8; 16]> { + unsafe { + if (*self.raw).kind as u32 & raw::GIT_CERT_SSH_MD5 as u32 == 0 { + None + } else { + Some(&(*self.raw).hash_md5) + } + } + } + + /// Returns the SHA-1 hash of the hostkey, if available. + pub fn hash_sha1(&self) -> Option<&[u8; 20]> { + unsafe { + if (*self.raw).kind as u32 & raw::GIT_CERT_SSH_SHA1 as u32 == 0 { + None + } else { + Some(&(*self.raw).hash_sha1) + } + } + } +} + +impl<'a> CertX509<'a> { + /// Return the X.509 certificate data as a byte slice + pub fn data(&self) -> &[u8] { + unsafe { + slice::from_raw_parts((*self.raw).data as *const u8, + (*self.raw).len as usize) + } + } +} + +impl<'a> Binding for Cert<'a> { + type Raw = *mut raw::git_cert; + unsafe fn from_raw(raw: *mut raw::git_cert) -> Cert<'a> { + Cert { raw: raw, _marker: marker::PhantomData } + } + fn raw(&self) -> *mut raw::git_cert { self.raw } +} diff --git a/git2/src/commit.rs b/git2/src/commit.rs new file mode 100644 index 000000000..f6086309c --- /dev/null +++ b/git2/src/commit.rs @@ -0,0 +1,359 @@ +use std::marker; +use std::mem; +use std::ops::Range; +use std::ptr; +use std::str; +use libc; + +use {raw, signature, Oid, Error, Signature, Tree, Time, Object}; +use util::Binding; + +/// A structure to represent a git [commit][1] +/// +/// [1]: http://git-scm.com/book/en/Git-Internals-Git-Objects +pub struct Commit<'repo> { + raw: *mut raw::git_commit, + _marker: marker::PhantomData>, +} + +/// An iterator over the parent commits of a commit. +pub struct Parents<'commit, 'repo: 'commit> { + range: Range, + commit: &'commit Commit<'repo>, +} + +/// An iterator over the parent commits' ids of a commit. +pub struct ParentIds<'commit> { + range: Range, + commit: &'commit Commit<'commit>, +} + +impl<'repo> Commit<'repo> { + /// Get the id (SHA1) of a repository commit + pub fn id(&self) -> Oid { + unsafe { Binding::from_raw(raw::git_commit_id(&*self.raw)) } + } + + /// Get the id of the tree pointed to by this commit. + /// + /// No attempts are made to fetch an object from the ODB. + pub fn tree_id(&self) -> Oid { + unsafe { Binding::from_raw(raw::git_commit_tree_id(&*self.raw)) } + } + + /// Get the tree pointed to by a commit. + pub fn tree(&self) -> Result, Error> { + let mut ret = ptr::null_mut(); + unsafe { + try_call!(raw::git_commit_tree(&mut ret, &*self.raw)); + Ok(Binding::from_raw(ret)) + } + } + + /// Get access to the underlying raw pointer. + pub fn raw(&self) -> *mut raw::git_commit { self.raw } + + /// Get the full message of a commit. + /// + /// The returned message will be slightly prettified by removing any + /// potential leading newlines. + /// + /// `None` will be returned if the message is not valid utf-8 + pub fn message(&self) -> Option<&str> { + str::from_utf8(self.message_bytes()).ok() + } + + /// Get the full message of a commit as a byte slice. + /// + /// The returned message will be slightly prettified by removing any + /// potential leading newlines. + pub fn message_bytes(&self) -> &[u8] { + unsafe { + ::opt_bytes(self, raw::git_commit_message(&*self.raw)).unwrap() + } + } + + /// Get the encoding for the message of a commit, as a string representing a + /// standard encoding name. + /// + /// `None` will be returned if the encoding is not known + pub fn message_encoding(&self) -> Option<&str> { + let bytes = unsafe { + ::opt_bytes(self, raw::git_commit_message(&*self.raw)) + }; + bytes.map(|b| str::from_utf8(b).unwrap()) + } + + /// Get the full raw message of a commit. + /// + /// `None` will be returned if the message is not valid utf-8 + pub fn message_raw(&self) -> Option<&str> { + str::from_utf8(self.message_raw_bytes()).ok() + } + + /// Get the full raw message of a commit. + pub fn message_raw_bytes(&self) -> &[u8] { + unsafe { + ::opt_bytes(self, raw::git_commit_message_raw(&*self.raw)).unwrap() + } + } + + /// Get the full raw text of the commit header. + /// + /// `None` will be returned if the message is not valid utf-8 + pub fn raw_header(&self) -> Option<&str> { + str::from_utf8(self.raw_header_bytes()).ok() + } + + /// Get the full raw text of the commit header. + pub fn raw_header_bytes(&self) -> &[u8] { + unsafe { + ::opt_bytes(self, raw::git_commit_raw_header(&*self.raw)).unwrap() + } + } + + /// Get the short "summary" of the git commit message. + /// + /// The returned message is the summary of the commit, comprising the first + /// paragraph of the message with whitespace trimmed and squashed. + /// + /// `None` may be returned if an error occurs or if the summary is not valid + /// utf-8. + pub fn summary(&self) -> Option<&str> { + self.summary_bytes().and_then(|s| str::from_utf8(s).ok()) + } + + /// Get the short "summary" of the git commit message. + /// + /// The returned message is the summary of the commit, comprising the first + /// paragraph of the message with whitespace trimmed and squashed. + /// + /// `None` may be returned if an error occurs + pub fn summary_bytes(&self) -> Option<&[u8]> { + unsafe { ::opt_bytes(self, raw::git_commit_summary(self.raw)) } + } + + /// Get the commit time (i.e. committer time) of a commit. + /// + /// The first element of the tuple is the time, in seconds, since the epoch. + /// The second element is the offset, in minutes, of the time zone of the + /// committer's preferred time zone. + pub fn time(&self) -> Time { + unsafe { + Time::new(raw::git_commit_time(&*self.raw) as i64, + raw::git_commit_time_offset(&*self.raw) as i32) + } + } + + /// Creates a new iterator over the parents of this commit. + pub fn parents<'a>(&'a self) -> Parents<'a, 'repo> { + let max = unsafe { raw::git_commit_parentcount(&*self.raw) as usize }; + Parents { range: 0..max, commit: self } + } + + /// Creates a new iterator over the parents of this commit. + pub fn parent_ids(&self) -> ParentIds { + let max = unsafe { raw::git_commit_parentcount(&*self.raw) as usize }; + ParentIds { range: 0..max, commit: self } + } + + /// Get the author of this commit. + pub fn author(&self) -> Signature { + unsafe { + let ptr = raw::git_commit_author(&*self.raw); + signature::from_raw_const(self, ptr) + } + } + + /// Get the committer of this commit. + pub fn committer(&self) -> Signature { + unsafe { + let ptr = raw::git_commit_committer(&*self.raw); + signature::from_raw_const(self, ptr) + } + } + + /// Amend this existing commit with all non-`None` values + /// + /// This creates a new commit that is exactly the same as the old commit, + /// except that any non-`None` values will be updated. The new commit has + /// the same parents as the old commit. + /// + /// For information about `update_ref`, see [`Repository::commit`]. + /// + /// [`Repository::commit`]: struct.Repository.html#method.commit + pub fn amend(&self, + update_ref: Option<&str>, + author: Option<&Signature>, + committer: Option<&Signature>, + message_encoding: Option<&str>, + message: Option<&str>, + tree: Option<&Tree<'repo>>) -> Result { + let mut raw = raw::git_oid { id: [0; raw::GIT_OID_RAWSZ] }; + let update_ref = try!(::opt_cstr(update_ref)); + let encoding = try!(::opt_cstr(message_encoding)); + let message = try!(::opt_cstr(message)); + unsafe { + try_call!(raw::git_commit_amend(&mut raw, + self.raw(), + update_ref, + author.map(|s| s.raw()), + committer.map(|s| s.raw()), + encoding, + message, + tree.map(|t| t.raw()))); + Ok(Binding::from_raw(&raw as *const _)) + } + } + + /// Get the specified parent of the commit. + /// + /// Use the `parents` iterator to return an iterator over all parents. + pub fn parent(&self, i: usize) -> Result, Error> { + unsafe { + let mut raw = ptr::null_mut(); + try_call!(raw::git_commit_parent(&mut raw, &*self.raw, + i as libc::c_uint)); + Ok(Binding::from_raw(raw)) + } + } + + /// Get the specified parent id of the commit. + /// + /// This is different from `parent`, which will attempt to load the + /// parent commit from the ODB. + /// + /// Use the `parent_ids` iterator to return an iterator over all parents. + pub fn parent_id(&self, i: usize) -> Result { + unsafe { + let id = raw::git_commit_parent_id(self.raw, i as libc::c_uint); + if id.is_null() { + Err(Error::from_str("parent index out of bounds")) + } else { + Ok(Binding::from_raw(id)) + } + } + } + + /// Casts this Commit to be usable as an `Object` + pub fn as_object(&self) -> &Object<'repo> { + unsafe { + &*(self as *const _ as *const Object<'repo>) + } + } + + /// Consumes Commit to be returned as an `Object` + pub fn into_object(self) -> Object<'repo> { + assert_eq!(mem::size_of_val(&self), mem::size_of::()); + unsafe { + mem::transmute(self) + } + } +} + +impl<'repo> Binding for Commit<'repo> { + type Raw = *mut raw::git_commit; + unsafe fn from_raw(raw: *mut raw::git_commit) -> Commit<'repo> { + Commit { + raw: raw, + _marker: marker::PhantomData, + } + } + fn raw(&self) -> *mut raw::git_commit { self.raw } +} + +impl<'repo> ::std::fmt::Debug for Commit<'repo> { + fn fmt(&self, f: &mut ::std::fmt::Formatter) -> Result<(), ::std::fmt::Error> { + let mut ds = f.debug_struct("Commit"); + ds.field("id", &self.id()); + if let Some(summary) = self.summary() { + ds.field("summary", &summary); + } + ds.finish() + } +} + +impl<'repo, 'commit> Iterator for Parents<'commit, 'repo> { + type Item = Commit<'repo>; + fn next(&mut self) -> Option> { + self.range.next().map(|i| self.commit.parent(i).unwrap()) + } + fn size_hint(&self) -> (usize, Option) { self.range.size_hint() } +} + +impl<'repo, 'commit> DoubleEndedIterator for Parents<'commit, 'repo> { + fn next_back(&mut self) -> Option> { + self.range.next_back().map(|i| self.commit.parent(i).unwrap()) + } +} + +impl<'repo, 'commit> ExactSizeIterator for Parents<'commit, 'repo> {} + +impl<'commit> Iterator for ParentIds<'commit> { + type Item = Oid; + fn next(&mut self) -> Option { + self.range.next().map(|i| self.commit.parent_id(i).unwrap()) + } + fn size_hint(&self) -> (usize, Option) { self.range.size_hint() } +} + +impl<'commit> DoubleEndedIterator for ParentIds<'commit> { + fn next_back(&mut self) -> Option { + self.range.next_back().map(|i| self.commit.parent_id(i).unwrap()) + } +} + +impl<'commit> ExactSizeIterator for ParentIds<'commit> {} + +impl<'repo> Clone for Commit<'repo> { + fn clone(&self) -> Self { + self.as_object().clone().into_commit().ok().unwrap() + } +} + +impl<'repo> Drop for Commit<'repo> { + fn drop(&mut self) { + unsafe { raw::git_commit_free(self.raw) } + } +} + +#[cfg(test)] +mod tests { + #[test] + fn smoke() { + let (_td, repo) = ::test::repo_init(); + let head = repo.head().unwrap(); + let target = head.target().unwrap(); + let commit = repo.find_commit(target).unwrap(); + assert_eq!(commit.message(), Some("initial")); + assert_eq!(commit.id(), target); + commit.message_raw().unwrap(); + commit.raw_header().unwrap(); + commit.message_encoding(); + commit.summary().unwrap(); + commit.tree_id(); + commit.tree().unwrap(); + assert_eq!(commit.parents().count(), 0); + + assert_eq!(commit.author().name(), Some("name")); + assert_eq!(commit.author().email(), Some("email")); + assert_eq!(commit.committer().name(), Some("name")); + assert_eq!(commit.committer().email(), Some("email")); + + let sig = repo.signature().unwrap(); + let tree = repo.find_tree(commit.tree_id()).unwrap(); + let id = repo.commit(Some("HEAD"), &sig, &sig, "bar", &tree, + &[&commit]).unwrap(); + let head = repo.find_commit(id).unwrap(); + + let new_head = head.amend(Some("HEAD"), None, None, None, + Some("new message"), None).unwrap(); + let new_head = repo.find_commit(new_head).unwrap(); + assert_eq!(new_head.message(), Some("new message")); + new_head.into_object(); + + repo.find_object(target, None).unwrap().as_commit().unwrap(); + repo.find_object(target, None).unwrap().into_commit().ok().unwrap(); + } +} + diff --git a/git2/src/config.rs b/git2/src/config.rs new file mode 100644 index 000000000..b01729aeb --- /dev/null +++ b/git2/src/config.rs @@ -0,0 +1,628 @@ +use std::ffi::CString; +use std::marker; +use std::path::{Path, PathBuf}; +use std::ptr; +use std::str; +use libc; + +use {raw, Error, ConfigLevel, Buf, IntoCString}; +use util::{self, Binding}; + +/// A structure representing a git configuration key/value store +pub struct Config { + raw: *mut raw::git_config, +} + +/// A struct representing a certain entry owned by a `Config` instance. +/// +/// An entry has a name, a value, and a level it applies to. +pub struct ConfigEntry<'cfg> { + raw: *mut raw::git_config_entry, + _marker: marker::PhantomData<&'cfg Config>, + owned: bool, +} + +/// An iterator over the `ConfigEntry` values of a `Config` structure. +pub struct ConfigEntries<'cfg> { + raw: *mut raw::git_config_iterator, + _marker: marker::PhantomData<&'cfg Config>, +} + +impl Config { + /// Allocate a new configuration object + /// + /// This object is empty, so you have to add a file to it before you can do + /// anything with it. + pub fn new() -> Result { + ::init(); + let mut raw = ptr::null_mut(); + unsafe { + try_call!(raw::git_config_new(&mut raw)); + Ok(Binding::from_raw(raw)) + } + } + + /// Create a new config instance containing a single on-disk file + pub fn open(path: &Path) -> Result { + ::init(); + let mut raw = ptr::null_mut(); + let path = try!(path.into_c_string()); + unsafe { + try_call!(raw::git_config_open_ondisk(&mut raw, path)); + Ok(Binding::from_raw(raw)) + } + } + + /// Open the global, XDG and system configuration files + /// + /// Utility wrapper that finds the global, XDG and system configuration + /// files and opens them into a single prioritized config object that can + /// be used when accessing default config data outside a repository. + pub fn open_default() -> Result { + ::init(); + let mut raw = ptr::null_mut(); + unsafe { + try_call!(raw::git_config_open_default(&mut raw)); + Ok(Binding::from_raw(raw)) + } + } + + /// Locate the path to the global configuration file + /// + /// The user or global configuration file is usually located in + /// `$HOME/.gitconfig`. + /// + /// This method will try to guess the full path to that file, if the file + /// exists. The returned path may be used on any method call to load + /// the global configuration file. + /// + /// This method will not guess the path to the xdg compatible config file + /// (`.config/git/config`). + pub fn find_global() -> Result { + ::init(); + let buf = Buf::new(); + unsafe { try_call!(raw::git_config_find_global(buf.raw())); } + Ok(util::bytes2path(&buf).to_path_buf()) + } + + /// Locate the path to the system configuration file + /// + /// If /etc/gitconfig doesn't exist, it will look for %PROGRAMFILES% + pub fn find_system() -> Result { + ::init(); + let buf = Buf::new(); + unsafe { try_call!(raw::git_config_find_system(buf.raw())); } + Ok(util::bytes2path(&buf).to_path_buf()) + } + + /// Locate the path to the global xdg compatible configuration file + /// + /// The xdg compatible configuration file is usually located in + /// `$HOME/.config/git/config`. + pub fn find_xdg() -> Result { + ::init(); + let buf = Buf::new(); + unsafe { try_call!(raw::git_config_find_xdg(buf.raw())); } + Ok(util::bytes2path(&buf).to_path_buf()) + } + + /// Add an on-disk config file instance to an existing config + /// + /// The on-disk file pointed at by path will be opened and parsed; it's + /// expected to be a native Git config file following the default Git config + /// syntax (see man git-config). + /// + /// Further queries on this config object will access each of the config + /// file instances in order (instances with a higher priority level will be + /// accessed first). + pub fn add_file(&mut self, path: &Path, level: ConfigLevel, + force: bool) -> Result<(), Error> { + let path = try!(path.into_c_string()); + unsafe { + try_call!(raw::git_config_add_file_ondisk(self.raw, path, level, + ptr::null(), force)); + Ok(()) + } + } + + /// Delete a config variable from the config file with the highest level + /// (usually the local one). + pub fn remove(&mut self, name: &str) -> Result<(), Error> { + let name = try!(CString::new(name)); + unsafe { + try_call!(raw::git_config_delete_entry(self.raw, name)); + Ok(()) + } + } + + /// Remove multivar config variables in the config file with the highest level (usually the + /// local one). + pub fn remove_multivar(&mut self, name: &str, regexp: &str) -> Result<(), Error> { + let name = try!(CString::new(name)); + let regexp = try!(CString::new(regexp)); + unsafe { + try_call!(raw::git_config_delete_multivar(self.raw, name, regexp)); + } + Ok(()) + } + + /// Get the value of a boolean config variable. + /// + /// All config files will be looked into, in the order of their defined + /// level. A higher level means a higher priority. The first occurrence of + /// the variable will be returned here. + pub fn get_bool(&self, name: &str) -> Result { + let mut out = 0 as libc::c_int; + let name = try!(CString::new(name)); + unsafe { + try_call!(raw::git_config_get_bool(&mut out, &*self.raw, name)); + + } + Ok(!(out == 0)) + } + + /// Get the value of an integer config variable. + /// + /// All config files will be looked into, in the order of their defined + /// level. A higher level means a higher priority. The first occurrence of + /// the variable will be returned here. + pub fn get_i32(&self, name: &str) -> Result { + let mut out = 0i32; + let name = try!(CString::new(name)); + unsafe { + try_call!(raw::git_config_get_int32(&mut out, &*self.raw, name)); + + } + Ok(out) + } + + /// Get the value of an integer config variable. + /// + /// All config files will be looked into, in the order of their defined + /// level. A higher level means a higher priority. The first occurrence of + /// the variable will be returned here. + pub fn get_i64(&self, name: &str) -> Result { + let mut out = 0i64; + let name = try!(CString::new(name)); + unsafe { + try_call!(raw::git_config_get_int64(&mut out, &*self.raw, name)); + } + Ok(out) + } + + /// Get the value of a string config variable. + /// + /// This is the same as `get_bytes` except that it may return `Err` if + /// the bytes are not valid utf-8. + pub fn get_str(&self, name: &str) -> Result<&str, Error> { + str::from_utf8(try!(self.get_bytes(name))).map_err(|_| { + Error::from_str("configuration value is not valid utf8") + }) + } + + /// Get the value of a string config variable as a byte slice. + /// + /// This method will return an error if this `Config` is not a snapshot. + pub fn get_bytes(&self, name: &str) -> Result<&[u8], Error> { + let mut ret = ptr::null(); + let name = try!(CString::new(name)); + unsafe { + try_call!(raw::git_config_get_string(&mut ret, &*self.raw, name)); + Ok(::opt_bytes(self, ret).unwrap()) + } + } + + /// Get the value of a string config variable as an owned string. + /// + /// An error will be returned if the config value is not valid utf-8. + pub fn get_string(&self, name: &str) -> Result { + let ret = Buf::new(); + let name = try!(CString::new(name)); + unsafe { + try_call!(raw::git_config_get_string_buf(ret.raw(), self.raw, name)); + } + str::from_utf8(&ret).map(|s| s.to_string()).map_err(|_| { + Error::from_str("configuration value is not valid utf8") + }) + } + + /// Get the value of a path config variable as an owned . + pub fn get_path(&self, name: &str) -> Result { + let ret = Buf::new(); + let name = try!(CString::new(name)); + unsafe { + try_call!(raw::git_config_get_path(ret.raw(), self.raw, name)); + } + Ok(::util::bytes2path(&ret).to_path_buf()) + } + + /// Get the ConfigEntry for a config variable. + pub fn get_entry(&self, name: &str) -> Result { + let mut ret = ptr::null_mut(); + let name = try!(CString::new(name)); + unsafe { + try_call!(raw::git_config_get_entry(&mut ret, self.raw, name)); + Ok(Binding::from_raw(ret)) + } + } + + /// Iterate over all the config variables + /// + /// If `glob` is `Some`, then the iterator will only iterate over all + /// variables whose name matches the pattern. + /// + /// # Example + /// + /// ``` + /// # #![allow(unstable)] + /// use git2::Config; + /// + /// let cfg = Config::new().unwrap(); + /// + /// for entry in &cfg.entries(None).unwrap() { + /// let entry = entry.unwrap(); + /// println!("{} => {}", entry.name().unwrap(), entry.value().unwrap()); + /// } + /// ``` + pub fn entries(&self, glob: Option<&str>) -> Result { + let mut ret = ptr::null_mut(); + unsafe { + match glob { + Some(s) => { + let s = try!(CString::new(s)); + try_call!(raw::git_config_iterator_glob_new(&mut ret, + &*self.raw, + s)); + } + None => { + try_call!(raw::git_config_iterator_new(&mut ret, &*self.raw)); + } + } + Ok(Binding::from_raw(ret)) + } + } + + /// Open the global/XDG configuration file according to git's rules + /// + /// Git allows you to store your global configuration at `$HOME/.config` or + /// `$XDG_CONFIG_HOME/git/config`. For backwards compatability, the XDG file + /// shouldn't be used unless the use has created it explicitly. With this + /// function you'll open the correct one to write to. + pub fn open_global(&mut self) -> Result { + let mut raw = ptr::null_mut(); + unsafe { + try_call!(raw::git_config_open_global(&mut raw, self.raw)); + Ok(Binding::from_raw(raw)) + } + } + + /// Build a single-level focused config object from a multi-level one. + /// + /// The returned config object can be used to perform get/set/delete + /// operations on a single specific level. + pub fn open_level(&self, level: ConfigLevel) -> Result { + let mut raw = ptr::null_mut(); + unsafe { + try_call!(raw::git_config_open_level(&mut raw, &*self.raw, level)); + Ok(Binding::from_raw(raw)) + } + } + + /// Set the value of a boolean config variable in the config file with the + /// highest level (usually the local one). + pub fn set_bool(&mut self, name: &str, value: bool) -> Result<(), Error> { + let name = try!(CString::new(name)); + unsafe { + try_call!(raw::git_config_set_bool(self.raw, name, value)); + } + Ok(()) + } + + /// Set the value of an integer config variable in the config file with the + /// highest level (usually the local one). + pub fn set_i32(&mut self, name: &str, value: i32) -> Result<(), Error> { + let name = try!(CString::new(name)); + unsafe { + try_call!(raw::git_config_set_int32(self.raw, name, value)); + } + Ok(()) + } + + /// Set the value of an integer config variable in the config file with the + /// highest level (usually the local one). + pub fn set_i64(&mut self, name: &str, value: i64) -> Result<(), Error> { + let name = try!(CString::new(name)); + unsafe { + try_call!(raw::git_config_set_int64(self.raw, name, value)); + } + Ok(()) + } + + /// Set the value of an multivar config variable in the config file with the + /// highest level (usually the local one). + pub fn set_multivar(&mut self, name: &str, regexp: &str, value: &str) -> Result<(), Error> { + let name = try!(CString::new(name)); + let regexp = try!(CString::new(regexp)); + let value = try!(CString::new(value)); + unsafe { + try_call!(raw::git_config_set_multivar(self.raw, name, regexp, value)); + } + Ok(()) + } + + /// Set the value of a string config variable in the config file with the + /// highest level (usually the local one). + pub fn set_str(&mut self, name: &str, value: &str) -> Result<(), Error> { + let name = try!(CString::new(name)); + let value = try!(CString::new(value)); + unsafe { + try_call!(raw::git_config_set_string(self.raw, name, value)); + } + Ok(()) + } + + /// Create a snapshot of the configuration + /// + /// Create a snapshot of the current state of a configuration, which allows + /// you to look into a consistent view of the configuration for looking up + /// complex values (e.g. a remote, submodule). + pub fn snapshot(&mut self) -> Result { + let mut ret = ptr::null_mut(); + unsafe { + try_call!(raw::git_config_snapshot(&mut ret, self.raw)); + Ok(Binding::from_raw(ret)) + } + } + + /// Parse a string as a bool. + /// Interprets "true", "yes", "on", 1, or any non-zero number as true. + /// Interprets "false", "no", "off", 0, or an empty string as false. + pub fn parse_bool(s: S) -> Result { + let s = try!(s.into_c_string()); + let mut out = 0; + ::init(); + unsafe { + try_call!(raw::git_config_parse_bool(&mut out, s)); + } + Ok(out != 0) + } + + /// Parse a string as an i32; handles suffixes like k, M, or G, and + /// multiplies by the appropriate power of 1024. + pub fn parse_i32(s: S) -> Result { + let s = try!(s.into_c_string()); + let mut out = 0; + ::init(); + unsafe { + try_call!(raw::git_config_parse_int32(&mut out, s)); + } + Ok(out) + } + + /// Parse a string as an i64; handles suffixes like k, M, or G, and + /// multiplies by the appropriate power of 1024. + pub fn parse_i64(s: S) -> Result { + let s = try!(s.into_c_string()); + let mut out = 0; + ::init(); + unsafe { + try_call!(raw::git_config_parse_int64(&mut out, s)); + } + Ok(out) + } +} + +impl Binding for Config { + type Raw = *mut raw::git_config; + unsafe fn from_raw(raw: *mut raw::git_config) -> Config { + Config { raw: raw } + } + fn raw(&self) -> *mut raw::git_config { self.raw } +} + +impl Drop for Config { + fn drop(&mut self) { + unsafe { raw::git_config_free(self.raw) } + } +} + +impl<'cfg> ConfigEntry<'cfg> { + /// Gets the name of this entry. + /// + /// May return `None` if the name is not valid utf-8 + pub fn name(&self) -> Option<&str> { str::from_utf8(self.name_bytes()).ok() } + + /// Gets the name of this entry as a byte slice. + pub fn name_bytes(&self) -> &[u8] { + unsafe { ::opt_bytes(self, (*self.raw).name).unwrap() } + } + + /// Gets the value of this entry. + /// + /// May return `None` if the value is not valid utf-8 + pub fn value(&self) -> Option<&str> { str::from_utf8(self.value_bytes()).ok() } + + /// Gets the value of this entry as a byte slice. + pub fn value_bytes(&self) -> &[u8] { + unsafe { ::opt_bytes(self, (*self.raw).value).unwrap() } + } + + /// Gets the configuration level of this entry. + pub fn level(&self) -> ConfigLevel { + unsafe { ConfigLevel::from_raw((*self.raw).level) } + } + + /// Depth of includes where this variable was found + pub fn include_depth(&self) -> u32 { + unsafe { (*self.raw).include_depth as u32 } + } +} + +impl<'cfg> Binding for ConfigEntry<'cfg> { + type Raw = *mut raw::git_config_entry; + + unsafe fn from_raw(raw: *mut raw::git_config_entry) + -> ConfigEntry<'cfg> { + ConfigEntry { + raw: raw, + _marker: marker::PhantomData, + owned: true, + } + } + fn raw(&self) -> *mut raw::git_config_entry { self.raw } +} + +impl<'cfg> Binding for ConfigEntries<'cfg> { + type Raw = *mut raw::git_config_iterator; + + unsafe fn from_raw(raw: *mut raw::git_config_iterator) + -> ConfigEntries<'cfg> { + ConfigEntries { + raw: raw, + _marker: marker::PhantomData, + } + } + fn raw(&self) -> *mut raw::git_config_iterator { self.raw } +} + +// entries are only valid until the iterator is freed, so this impl is for +// `&'b T` instead of `T` to have a lifetime to tie them to. +// +// It's also not implemented for `&'b mut T` so we can have multiple entries +// (ok). +impl<'cfg, 'b> Iterator for &'b ConfigEntries<'cfg> { + type Item = Result, Error>; + fn next(&mut self) -> Option, Error>> { + let mut raw = ptr::null_mut(); + unsafe { + try_call_iter!(raw::git_config_next(&mut raw, self.raw)); + Some(Ok(ConfigEntry { + owned: false, + raw: raw, + _marker: marker::PhantomData, + })) + } + } +} + +impl<'cfg> Drop for ConfigEntries<'cfg> { + fn drop(&mut self) { + unsafe { raw::git_config_iterator_free(self.raw) } + } +} + +impl<'cfg> Drop for ConfigEntry<'cfg> { + fn drop(&mut self) { + if self.owned { + unsafe { raw::git_config_entry_free(self.raw) } + } + } +} + +#[cfg(test)] +mod tests { + use std::fs::File; + use tempdir::TempDir; + + use Config; + + #[test] + fn smoke() { + let _cfg = Config::new().unwrap(); + let _ = Config::find_global(); + let _ = Config::find_system(); + let _ = Config::find_xdg(); + } + + #[test] + fn persisted() { + let td = TempDir::new("test").unwrap(); + let path = td.path().join("foo"); + File::create(&path).unwrap(); + + let mut cfg = Config::open(&path).unwrap(); + assert!(cfg.get_bool("foo.bar").is_err()); + cfg.set_bool("foo.k1", true).unwrap(); + cfg.set_i32("foo.k2", 1).unwrap(); + cfg.set_i64("foo.k3", 2).unwrap(); + cfg.set_str("foo.k4", "bar").unwrap(); + cfg.snapshot().unwrap(); + drop(cfg); + + let cfg = Config::open(&path).unwrap().snapshot().unwrap(); + assert_eq!(cfg.get_bool("foo.k1").unwrap(), true); + assert_eq!(cfg.get_i32("foo.k2").unwrap(), 1); + assert_eq!(cfg.get_i64("foo.k3").unwrap(), 2); + assert_eq!(cfg.get_str("foo.k4").unwrap(), "bar"); + + for entry in &cfg.entries(None).unwrap() { + let entry = entry.unwrap(); + entry.name(); + entry.value(); + entry.level(); + } + } + + #[test] + fn multivar() { + let td = TempDir::new("test").unwrap(); + let path = td.path().join("foo"); + File::create(&path).unwrap(); + + let mut cfg = Config::open(&path).unwrap(); + cfg.set_multivar("foo.bar", "^$", "baz").unwrap(); + cfg.set_multivar("foo.bar", "^$", "qux").unwrap(); + + let mut values: Vec = cfg.entries(None) + .unwrap() + .into_iter() + .map(|entry| entry.unwrap().value().unwrap().into()) + .collect(); + values.sort(); + assert_eq!(values, ["baz", "qux"]); + + cfg.remove_multivar("foo.bar", ".*").unwrap(); + + assert_eq!(cfg.entries(None).unwrap().count(), 0); + } + + #[test] + fn parse() { + assert_eq!(Config::parse_bool("").unwrap(), false); + assert_eq!(Config::parse_bool("false").unwrap(), false); + assert_eq!(Config::parse_bool("no").unwrap(), false); + assert_eq!(Config::parse_bool("off").unwrap(), false); + assert_eq!(Config::parse_bool("0").unwrap(), false); + + assert_eq!(Config::parse_bool("true").unwrap(), true); + assert_eq!(Config::parse_bool("yes").unwrap(), true); + assert_eq!(Config::parse_bool("on").unwrap(), true); + assert_eq!(Config::parse_bool("1").unwrap(), true); + assert_eq!(Config::parse_bool("42").unwrap(), true); + + assert!(Config::parse_bool(" ").is_err()); + assert!(Config::parse_bool("some-string").is_err()); + assert!(Config::parse_bool("-").is_err()); + + assert_eq!(Config::parse_i32("0").unwrap(), 0); + assert_eq!(Config::parse_i32("1").unwrap(), 1); + assert_eq!(Config::parse_i32("100").unwrap(), 100); + assert_eq!(Config::parse_i32("-1").unwrap(), -1); + assert_eq!(Config::parse_i32("-100").unwrap(), -100); + assert_eq!(Config::parse_i32("1k").unwrap(), 1024); + assert_eq!(Config::parse_i32("4k").unwrap(), 4096); + assert_eq!(Config::parse_i32("1M").unwrap(), 1048576); + assert_eq!(Config::parse_i32("1G").unwrap(), 1024*1024*1024); + + assert_eq!(Config::parse_i64("0").unwrap(), 0); + assert_eq!(Config::parse_i64("1").unwrap(), 1); + assert_eq!(Config::parse_i64("100").unwrap(), 100); + assert_eq!(Config::parse_i64("-1").unwrap(), -1); + assert_eq!(Config::parse_i64("-100").unwrap(), -100); + assert_eq!(Config::parse_i64("1k").unwrap(), 1024); + assert_eq!(Config::parse_i64("4k").unwrap(), 4096); + assert_eq!(Config::parse_i64("1M").unwrap(), 1048576); + assert_eq!(Config::parse_i64("1G").unwrap(), 1024*1024*1024); + assert_eq!(Config::parse_i64("100G").unwrap(), 100*1024*1024*1024); + } +} diff --git a/git2/src/cred.rs b/git2/src/cred.rs new file mode 100644 index 000000000..29d514885 --- /dev/null +++ b/git2/src/cred.rs @@ -0,0 +1,577 @@ +use std::ffi::CString; +use std::io::Write; +use std::mem; +use std::path::Path; +use std::process::{Command, Stdio}; +use std::ptr; +use url; + +use {raw, Error, Config, IntoCString}; +use util::Binding; + +/// A structure to represent git credentials in libgit2. +pub struct Cred { + raw: *mut raw::git_cred, +} + +/// Management of the gitcredentials(7) interface. +pub struct CredentialHelper { + /// A public field representing the currently discovered username from + /// configuration. + pub username: Option, + protocol: Option, + host: Option, + url: String, + commands: Vec, +} + +impl Cred { + /// Create a "default" credential usable for Negotiate mechanisms like NTLM + /// or Kerberos authentication. + pub fn default() -> Result { + ::init(); + let mut out = ptr::null_mut(); + unsafe { + try_call!(raw::git_cred_default_new(&mut out)); + Ok(Binding::from_raw(out)) + } + } + + /// Create a new ssh key credential object used for querying an ssh-agent. + /// + /// The username specified is the username to authenticate. + pub fn ssh_key_from_agent(username: &str) -> Result { + ::init(); + let mut out = ptr::null_mut(); + let username = try!(CString::new(username)); + unsafe { + try_call!(raw::git_cred_ssh_key_from_agent(&mut out, username)); + Ok(Binding::from_raw(out)) + } + } + + /// Create a new passphrase-protected ssh key credential object. + pub fn ssh_key(username: &str, + publickey: Option<&Path>, + privatekey: &Path, + passphrase: Option<&str>) -> Result { + ::init(); + let username = try!(CString::new(username)); + let publickey = try!(::opt_cstr(publickey)); + let privatekey = try!(privatekey.into_c_string()); + let passphrase = try!(::opt_cstr(passphrase)); + let mut out = ptr::null_mut(); + unsafe { + try_call!(raw::git_cred_ssh_key_new(&mut out, username, publickey, + privatekey, passphrase)); + Ok(Binding::from_raw(out)) + } + } + + /// Create a new ssh key credential object reading the keys from memory. + pub fn ssh_key_from_memory(username: &str, + publickey: Option<&str>, + privatekey: &str, + passphrase: Option<&str>) -> Result { + ::init(); + let username = try!(CString::new(username)); + let publickey = try!(::opt_cstr(publickey)); + let privatekey = try!(CString::new(privatekey)); + let passphrase = try!(::opt_cstr(passphrase)); + let mut out = ptr::null_mut(); + unsafe { + try_call!(raw::git_cred_ssh_key_memory_new(&mut out, username, publickey, + privatekey, passphrase)); + Ok(Binding::from_raw(out)) + } + } + + /// Create a new plain-text username and password credential object. + pub fn userpass_plaintext(username: &str, + password: &str) -> Result { + ::init(); + let username = try!(CString::new(username)); + let password = try!(CString::new(password)); + let mut out = ptr::null_mut(); + unsafe { + try_call!(raw::git_cred_userpass_plaintext_new(&mut out, username, + password)); + Ok(Binding::from_raw(out)) + } + } + + /// Attempt to read `credential.helper` according to gitcredentials(7) [1] + /// + /// This function will attempt to parse the user's `credential.helper` + /// configuration, invoke the necessary processes, and read off what the + /// username/password should be for a particular url. + /// + /// The returned credential type will be a username/password credential if + /// successful. + /// + /// [1]: https://www.kernel.org/pub/software/scm/git/docs/gitcredentials.html + pub fn credential_helper(config: &Config, + url: &str, + username: Option<&str>) + -> Result { + match CredentialHelper::new(url).config(config).username(username) + .execute() { + Some((username, password)) => { + Cred::userpass_plaintext(&username, &password) + } + None => Err(Error::from_str("failed to acquire username/password \ + from local configuration")) + } + } + + /// Create a credential to specify a username. + /// + /// THis is used with ssh authentication to query for the username if non is + /// specified in the url. + pub fn username(username: &str) -> Result { + ::init(); + let username = try!(CString::new(username)); + let mut out = ptr::null_mut(); + unsafe { + try_call!(raw::git_cred_username_new(&mut out, username)); + Ok(Binding::from_raw(out)) + } + } + + /// Check whether a credential object contains username information. + pub fn has_username(&self) -> bool { + unsafe { raw::git_cred_has_username(self.raw) == 1 } + } + + /// Return the type of credentials that this object represents. + pub fn credtype(&self) -> raw::git_credtype_t { + unsafe { (*self.raw).credtype } + } + + /// Unwrap access to the underlying raw pointer, canceling the destructor + pub unsafe fn unwrap(mut self) -> *mut raw::git_cred { + mem::replace(&mut self.raw, ptr::null_mut()) + } +} + +impl Binding for Cred { + type Raw = *mut raw::git_cred; + + unsafe fn from_raw(raw: *mut raw::git_cred) -> Cred { + Cred { raw: raw } + } + fn raw(&self) -> *mut raw::git_cred { self.raw } +} + +impl Drop for Cred { + fn drop(&mut self) { + if !self.raw.is_null() { + unsafe { ((*self.raw).free)(self.raw) } + } + } +} + +impl CredentialHelper { + /// Create a new credential helper object which will be used to probe git's + /// local credential configuration. + /// + /// The url specified is the namespace on which this will query credentials. + /// Invalid urls are currently ignored. + pub fn new(url: &str) -> CredentialHelper { + let mut ret = CredentialHelper { + protocol: None, + host: None, + username: None, + url: url.to_string(), + commands: Vec::new(), + }; + + // Parse out the (protocol, host) if one is available + if let Ok(url) = url::Url::parse(url) { + if let Some(url::Host::Domain(s)) = url.host() { + ret.host = Some(s.to_string()); + } + ret.protocol = Some(url.scheme().to_string()) + } + ret + } + + /// Set the username that this credential helper will query with. + /// + /// By default the username is `None`. + pub fn username(&mut self, username: Option<&str>) -> &mut CredentialHelper { + self.username = username.map(|s| s.to_string()); + self + } + + /// Query the specified configuration object to discover commands to + /// execute, usernames to query, etc. + pub fn config(&mut self, config: &Config) -> &mut CredentialHelper { + // Figure out the configured username/helper program. + // + // see http://git-scm.com/docs/gitcredentials.html#_configuration_options + // + // TODO: implement useHttpPath + if self.username.is_none() { + self.config_username(config); + } + self.config_helper(config); + self + } + + // Configure the queried username from `config` + fn config_username(&mut self, config: &Config) { + let key = self.exact_key("username"); + self.username = config.get_string(&key).ok().or_else(|| { + self.url_key("username").and_then(|s| { + config.get_string(&s).ok() + }) + }).or_else(|| { + config.get_string("credential.username").ok() + }) + } + + // Discover all `helper` directives from `config` + fn config_helper(&mut self, config: &Config) { + let exact = config.get_string(&self.exact_key("helper")); + self.add_command(exact.as_ref().ok().map(|s| &s[..])); + if let Some(key) = self.url_key("helper") { + let url = config.get_string(&key); + self.add_command(url.as_ref().ok().map(|s| &s[..])); + } + let global = config.get_string("credential.helper"); + self.add_command(global.as_ref().ok().map(|s| &s[..])); + } + + // Add a `helper` configured command to the list of commands to execute. + // + // see https://www.kernel.org/pub/software/scm/git/docs/technical + // /api-credentials.html#_credential_helpers + fn add_command(&mut self, cmd: Option<&str>) { + let cmd = match cmd { + Some("") | None => return, + Some(s) => s, + }; + + if cmd.starts_with('!') { + self.commands.push(cmd[1..].to_string()); + } else if cmd.starts_with('/') || cmd.starts_with('\\') || + cmd[1..].starts_with(":\\") { + self.commands.push(format!("\"{}\"", cmd)); + } else { + self.commands.push(format!("git credential-{}", cmd)); + } + } + + fn exact_key(&self, name: &str) -> String { + format!("credential.{}.{}", self.url, name) + } + + fn url_key(&self, name: &str) -> Option { + match (&self.host, &self.protocol) { + (&Some(ref host), &Some(ref protocol)) => { + Some(format!("credential.{}://{}.{}", protocol, host, name)) + } + _ => None + } + } + + /// Execute this helper, attempting to discover a username/password pair. + /// + /// All I/O errors are ignored, (to match git behavior), and this function + /// only succeeds if both a username and a password were found + pub fn execute(&self) -> Option<(String, String)> { + let mut username = self.username.clone(); + let mut password = None; + for cmd in &self.commands { + let (u, p) = self.execute_cmd(cmd, &username); + if u.is_some() && username.is_none() { + username = u; + } + if p.is_some() && password.is_none() { + password = p; + } + if username.is_some() && password.is_some() { break } + } + + match (username, password) { + (Some(u), Some(p)) => Some((u, p)), + _ => None, + } + } + + // Execute the given `cmd`, providing the appropriate variables on stdin and + // then afterwards parsing the output into the username/password on stdout. + fn execute_cmd(&self, cmd: &str, username: &Option) + -> (Option, Option) { + macro_rules! my_try( ($e:expr) => ( + match $e { + Ok(e) => e, + Err(e) => { + debug!("{} failed with {}", stringify!($e), e); + return (None, None) + } + } + ) ); + + // It looks like the `cmd` specification is typically bourne-shell-like + // syntax, so try that first. If that fails, though, we may be on a + // Windows machine for example where `sh` isn't actually available by + // default. Most credential helper configurations though are pretty + // simple (aka one or two space-separated strings) so also try to invoke + // the process directly. + // + // If that fails then it's up to the user to put `sh` in path and make + // sure it works. + let mut c = Command::new("sh"); + c.arg("-c") + .arg(&format!("{} get", cmd)) + .stdin(Stdio::piped()) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()); + let mut p = match c.spawn() { + Ok(p) => p, + Err(e) => { + debug!("`sh` failed to spawn: {}", e); + let mut parts = cmd.split_whitespace(); + let mut c = Command::new(parts.next().unwrap()); + for arg in parts { + c.arg(arg); + } + c.arg("get") + .stdin(Stdio::piped()) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()); + match c.spawn() { + Ok(p) => p, + Err(e) => { + debug!("fallback of {:?} failed with {}", cmd, e); + return (None, None); + } + } + } + }; + + // Ignore write errors as the command may not actually be listening for + // stdin + { + let stdin = p.stdin.as_mut().unwrap(); + if let Some(ref p) = self.protocol { + let _ = writeln!(stdin, "protocol={}", p); + } + if let Some(ref p) = self.host { + let _ = writeln!(stdin, "host={}", p); + } + if let Some(ref p) = *username { + let _ = writeln!(stdin, "username={}", p); + } + } + let output = my_try!(p.wait_with_output()); + if !output.status.success() { + debug!("credential helper failed: {}\nstdout ---\n{}\nstdout ---\n{}", + output.status, + String::from_utf8_lossy(&output.stdout), + String::from_utf8_lossy(&output.stderr)); + return (None, None) + } + trace!("credential helper stderr ---\n{}", + String::from_utf8_lossy(&output.stderr)); + self.parse_output(output.stdout) + } + + // Parse the output of a command into the username/password found + fn parse_output(&self, output: Vec) -> (Option, Option) { + // Parse the output of the command, looking for username/password + let mut username = None; + let mut password = None; + for line in output.split(|t| *t == b'\n') { + let mut parts = line.splitn(2, |t| *t == b'='); + let key = parts.next().unwrap(); + let value = match parts.next() { + Some(s) => s, + None => { + trace!("ignoring output line: {}", String::from_utf8_lossy(line)); + continue + } + }; + let value = match String::from_utf8(value.to_vec()) { + Ok(s) => s, + Err(..) => continue, + }; + match key { + b"username" => username = Some(value), + b"password" => password = Some(value), + _ => {} + } + } + (username, password) + } +} + +#[cfg(all(test, feature = "unstable"))] +mod test { + use std::env; + use std::fs::File; + use std::io::prelude::*; + use std::path::Path; + use tempdir::TempDir; + + use {Cred, Config, CredentialHelper, ConfigLevel}; + + macro_rules! cfg( ($($k:expr => $v:expr),*) => ({ + let td = TempDir::new("git2-rs").unwrap(); + let mut cfg = Config::new().unwrap(); + cfg.add_file(&td.path().join("cfg"), ConfigLevel::Highest, false).unwrap(); + $(cfg.set_str($k, $v).unwrap();)* + cfg + }) ); + + #[test] + fn smoke() { + Cred::default().unwrap(); + } + + #[test] + fn credential_helper1() { + let cfg = cfg! { + "credential.helper" => "!f() { echo username=a; echo password=b; }; f" + }; + let (u, p) = CredentialHelper::new("https://example.com/foo/bar") + .config(&cfg) + .execute().unwrap(); + assert_eq!(u, "a"); + assert_eq!(p, "b"); + } + + #[test] + fn credential_helper2() { + let cfg = cfg! {}; + assert!(CredentialHelper::new("https://example.com/foo/bar") + .config(&cfg) + .execute().is_none()); + } + + #[test] + fn credential_helper3() { + let cfg = cfg! { + "credential.https://example.com.helper" => + "!f() { echo username=c; }; f", + "credential.helper" => "!f() { echo username=a; echo password=b; }; f" + }; + let (u, p) = CredentialHelper::new("https://example.com/foo/bar") + .config(&cfg) + .execute().unwrap(); + assert_eq!(u, "c"); + assert_eq!(p, "b"); + } + + #[test] + fn credential_helper4() { + let td = TempDir::new("git2-rs").unwrap(); + let path = td.path().join("script"); + File::create(&path).unwrap().write(br"\ +#!/bin/sh +echo username=c +").unwrap(); + chmod(&path); + let cfg = cfg! { + "credential.https://example.com.helper" => + &path.display().to_string()[..], + "credential.helper" => "!f() { echo username=a; echo password=b; }; f" + }; + let (u, p) = CredentialHelper::new("https://example.com/foo/bar") + .config(&cfg) + .execute().unwrap(); + assert_eq!(u, "c"); + assert_eq!(p, "b"); + } + + #[test] + fn credential_helper5() { + let td = TempDir::new("git2-rs").unwrap(); + let path = td.path().join("git-credential-script"); + File::create(&path).unwrap().write(br"\ +#!/bin/sh +echo username=c +").unwrap(); + chmod(&path); + + let paths = env::var("PATH").unwrap(); + let paths = env::split_paths(&paths) + .chain(path.parent().map(|p| p.to_path_buf()).into_iter()); + env::set_var("PATH", &env::join_paths(paths).unwrap()); + + let cfg = cfg! { + "credential.https://example.com.helper" => "script", + "credential.helper" => "!f() { echo username=a; echo password=b; }; f" + }; + let (u, p) = CredentialHelper::new("https://example.com/foo/bar") + .config(&cfg) + .execute().unwrap(); + assert_eq!(u, "c"); + assert_eq!(p, "b"); + } + + #[test] + fn credential_helper6() { + let cfg = cfg! { + "credential.helper" => "" + }; + assert!(CredentialHelper::new("https://example.com/foo/bar") + .config(&cfg) + .execute().is_none()); + } + + #[test] + fn ssh_key_from_memory() { + let cred = Cred::ssh_key_from_memory( + "test", + Some("ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDByAO8uj+kXicj6C2ODMspgmUoVyl5eaw8vR6a1yEnFuJFzevabNlN6Ut+CPT3TRnYk5BW73pyXBtnSL2X95BOnbjMDXc4YIkgs3YYHWnxbqsD4Pj/RoGqhf+gwhOBtL0poh8tT8WqXZYxdJQKLQC7oBqf3ykCEYulE4oeRUmNh4IzEE+skD/zDkaJ+S1HRD8D8YCiTO01qQnSmoDFdmIZTi8MS8Cw+O/Qhym1271ThMlhD6PubSYJXfE6rVbE7A9RzH73A6MmKBlzK8VTb4SlNSrr/DOk+L0uq+wPkv+pm+D9WtxoqQ9yl6FaK1cPawa3+7yRNle3m+72KCtyMkQv"), + r#" + -----BEGIN RSA PRIVATE KEY----- + Proc-Type: 4,ENCRYPTED + DEK-Info: AES-128-CBC,818C7722D3B01F2161C2ACF6A5BBAAE8 + + 3Cht4QB3PcoQ0I55j1B3m2ZzIC/mrh+K5nQeA1Vy2GBTMyM7yqGHqTOv7qLhJscd + H+cB0Pm6yCr3lYuNrcKWOCUto+91P7ikyARruHVwyIxKdNx15uNulOzQJHQWNbA4 + RQHlhjON4atVo2FyJ6n+ujK6QiBg2PR5Vbbw/AtV6zBCFW3PhzDn+qqmHjpBFqj2 + vZUUe+MkDQcaF5J45XMHahhSdo/uKCDhfbylExp/+ACWkvxdPpsvcARM6X434ucD + aPY+4i0/JyLkdbm0GFN9/q3i53qf4kCBhojFl4AYJdGI0AzAgbdTXZ7EJHbAGZHS + os5K0oTwDVXMI0sSE2I/qHxaZZsDP1dOKq6di6SFPUp8liYimm7rNintRX88Gl2L + g1ko9abp/NlgD0YY/3mad+NNAISDL/YfXq2fklH3En3/7ZrOVZFKfZXwQwas5g+p + VQPKi3+ae74iOjLyuPDSc1ePmhUNYeP+9rLSc0wiaiHqls+2blPPDxAGMEo63kbz + YPVjdmuVX4VWnyEsfTxxJdFDYGSNh6rlrrO1RFrex7kJvpg5gTX4M/FT8TfCd7Hn + M6adXsLMqwu5tz8FuDmAtVdq8zdSrgZeAbpJ9D3EDOmZ70xz4XBL19ImxDp+Qqs2 + kQX7kobRzeeP2URfRoGr7XZikQWyQ2UASfPcQULY8R58QoZWWsQ4w51GZHg7TDnw + 1DRo/0OgkK7Gqf215nFmMpB4uyi58cq3WFwWQa1IqslkObpVgBQZcNZb/hKUYPGk + g4zehfIgAfCdnQHwZvQ6Fdzhcs3SZeO+zVyuiZN3Gsi9HU0/1vpAKiuuOzcG02vF + b6Y6hwsAA9yphF3atI+ARD4ZwXdDfzuGb3yJglMT3Fr/xuLwAvdchRo1spANKA0E + tT5okLrK0H4wnHvf2SniVVWRhmJis0lQo9LjGGwRIdsPpVnJSDvaISIVF+fHT90r + HvxN8zXI93x9jcPtwp7puQ1C7ehKJK10sZ71OLIZeuUgwt+5DRunqg6evPco9Go7 + UOGwcVhLY200KT+1k7zWzCS0yVQp2HRm6cxsZXAp4ClBSwIx15eIoLIrjZdJRjCq + COp6pZx1fnvJ9ERIvl5hon+Ty+renMcFKz2HmchC7egpcqIxW9Dsv6zjhHle6pxb + 37GaEKHF2KA3RN+dSV/K8n+C9Yent5tx5Y9a/pMcgRGtgu+G+nyFmkPKn5Zt39yX + qDpyM0LtbRVZPs+MgiqoGIwYc/ujoCq7GL38gezsBQoHaTt79yYBqCp6UR0LMuZ5 + f/7CtWqffgySfJ/0wjGidDAumDv8CK45AURpL/Z+tbFG3M9ar/LZz/Y6EyBcLtGY + Wwb4zs8zXIA0qHrjNTnPqHDvezziArYfgPjxCIHMZzms9Yn8+N02p39uIytqg434 + BAlCqZ7GYdDFfTpWIwX+segTK9ux0KdBqcQv+9Fwwjkq9KySnRKqNl7ZJcefFZJq + c6PA1iinZWBjuaO1HKx3PFulrl0bcpR9Kud1ZIyfnh5rwYN8UQkkcR/wZPla04TY + 8l5dq/LI/3G5sZXwUHKOcuQWTj7Saq7Q6gkKoMfqt0wC5bpZ1m17GHPoMz6GtX9O + -----END RSA PRIVATE KEY----- + "#, + Some("test123")); + assert!(cred.is_ok()); + } + + + #[cfg(unix)] + fn chmod(path: &Path) { + use std::os::unix::prelude::*; + use std::fs; + let mut perms = fs::metadata(path).unwrap().permissions(); + perms.set_mode(0o755); + fs::set_permissions(path, perms).unwrap(); + } + #[cfg(windows)] + fn chmod(_path: &Path) {} +} diff --git a/git2/src/describe.rs b/git2/src/describe.rs new file mode 100644 index 000000000..c06208871 --- /dev/null +++ b/git2/src/describe.rs @@ -0,0 +1,199 @@ +use std::marker; +use std::mem; +use std::ffi::CString; +use std::ptr; + +use libc::{c_uint, c_int}; + +use {raw, Repository, Error, Buf}; +use util::Binding; + +/// The result of a `describe` operation on either an `Describe` or a +/// `Repository`. +pub struct Describe<'repo> { + raw: *mut raw::git_describe_result, + _marker: marker::PhantomData<&'repo Repository>, +} + +/// Options which indicate how a `Describe` is created. +pub struct DescribeOptions { + raw: raw::git_describe_options, + pattern: CString, +} + +/// Options which can be used to customize how a description is formatted. +pub struct DescribeFormatOptions { + raw: raw::git_describe_format_options, + dirty_suffix: CString, +} + +impl<'repo> Describe<'repo> { + /// Prints this describe result, returning the result as a string. + pub fn format(&self, opts: Option<&DescribeFormatOptions>) + -> Result { + let buf = Buf::new(); + let opts = opts.map(|o| &o.raw as *const _).unwrap_or(ptr::null()); + unsafe { + try_call!(raw::git_describe_format(buf.raw(), self.raw, opts)); + } + Ok(String::from_utf8(buf.to_vec()).unwrap()) + } +} + +impl<'repo> Binding for Describe<'repo> { + type Raw = *mut raw::git_describe_result; + + unsafe fn from_raw(raw: *mut raw::git_describe_result) -> Describe<'repo> { + Describe { raw: raw, _marker: marker::PhantomData, } + } + fn raw(&self) -> *mut raw::git_describe_result { self.raw } +} + +impl<'repo> Drop for Describe<'repo> { + fn drop(&mut self) { + unsafe { raw::git_describe_result_free(self.raw) } + } +} + +impl Default for DescribeFormatOptions { + fn default() -> Self { + Self::new() + } +} + +impl DescribeFormatOptions { + /// Creates a new blank set of formatting options for a description. + pub fn new() -> DescribeFormatOptions { + let mut opts = DescribeFormatOptions { + raw: unsafe { mem::zeroed() }, + dirty_suffix: CString::new(Vec::new()).unwrap(), + }; + opts.raw.version = 1; + opts.raw.abbreviated_size = 7; + opts + } + + /// Sets the size of the abbreviated commit id to use. + /// + /// The value is the lower bound for the length of the abbreviated string, + /// and the default is 7. + pub fn abbreviated_size(&mut self, size: u32) -> &mut Self { + self.raw.abbreviated_size = size as c_uint; + self + } + + /// Sets whether or not the long format is used even when a shorter name + /// could be used. + pub fn always_use_long_format(&mut self, long: bool) -> &mut Self { + self.raw.always_use_long_format = long as c_int; + self + } + + /// If the workdir is dirty and this is set, this string will be appended to + /// the description string. + pub fn dirty_suffix(&mut self, suffix: &str) -> &mut Self { + self.dirty_suffix = CString::new(suffix).unwrap(); + self.raw.dirty_suffix = self.dirty_suffix.as_ptr(); + self + } +} + +impl Default for DescribeOptions { + fn default() -> Self { + Self::new() + } +} + +impl DescribeOptions { + /// Creates a new blank set of formatting options for a description. + pub fn new() -> DescribeOptions { + let mut opts = DescribeOptions { + raw: unsafe { mem::zeroed() }, + pattern: CString::new(Vec::new()).unwrap(), + }; + opts.raw.version = 1; + opts.raw.max_candidates_tags = 10; + opts + } + + #[allow(missing_docs)] + pub fn max_candidates_tags(&mut self, max: u32) -> &mut Self { + self.raw.max_candidates_tags = max as c_uint; + self + } + + /// Sets the reference lookup strategy + /// + /// This behaves like the `--tags` option to git-decribe. + pub fn describe_tags(&mut self) -> &mut Self { + self.raw.describe_strategy = raw::GIT_DESCRIBE_TAGS as c_uint; + self + } + + /// Sets the reference lookup strategy + /// + /// This behaves like the `--all` option to git-decribe. + pub fn describe_all(&mut self) -> &mut Self { + self.raw.describe_strategy = raw::GIT_DESCRIBE_ALL as c_uint; + self + } + + /// Indicates when calculating the distance from the matching tag or + /// reference whether to only walk down the first-parent ancestry. + pub fn only_follow_first_parent(&mut self, follow: bool) -> &mut Self { + self.raw.only_follow_first_parent = follow as c_int; + self + } + + /// If no matching tag or reference is found whether a describe option would + /// normally fail. This option indicates, however, that it will instead fall + /// back to showing the full id of the commit. + pub fn show_commit_oid_as_fallback(&mut self, show: bool) -> &mut Self { + self.raw.show_commit_oid_as_fallback = show as c_int; + self + } + + #[allow(missing_docs)] + pub fn pattern(&mut self, pattern: &str) -> &mut Self { + self.pattern = CString::new(pattern).unwrap(); + self.raw.pattern = self.pattern.as_ptr(); + self + } +} + +impl Binding for DescribeOptions { + type Raw = *mut raw::git_describe_options; + + unsafe fn from_raw(_raw: *mut raw::git_describe_options) + -> DescribeOptions { + panic!("unimplemened") + } + fn raw(&self) -> *mut raw::git_describe_options { + &self.raw as *const _ as *mut _ + } +} + +#[cfg(test)] +mod tests { + use DescribeOptions; + + #[test] + fn smoke() { + let (_td, repo) = ::test::repo_init(); + let head = t!(repo.head()).target().unwrap(); + + let d = t!(repo.describe(DescribeOptions::new() + .show_commit_oid_as_fallback(true))); + let id = head.to_string(); + assert_eq!(t!(d.format(None)), &id[..7]); + + let obj = t!(repo.find_object(head, None)); + let sig = t!(repo.signature()); + t!(repo.tag("foo", &obj, &sig, "message", true)); + let d = t!(repo.describe(&DescribeOptions::new())); + assert_eq!(t!(d.format(None)), "foo"); + + let d = t!(obj.describe(&DescribeOptions::new())); + assert_eq!(t!(d.format(None)), "foo"); + } +} diff --git a/git2/src/diff.rs b/git2/src/diff.rs new file mode 100644 index 000000000..dffd6117c --- /dev/null +++ b/git2/src/diff.rs @@ -0,0 +1,1258 @@ +use std::ffi::CString; +use std::marker; +use std::mem; +use std::ops::Range; +use std::path::Path; +use std::ptr; +use std::slice; +use libc::{c_char, size_t, c_void, c_int}; + +use {raw, panic, Buf, Delta, Oid, Repository, Error, DiffFormat}; +use {DiffStatsFormat, IntoCString}; +use util::{self, Binding}; + +/// The diff object that contains all individual file deltas. +/// +/// This is an opaque structure which will be allocated by one of the diff +/// generator functions on the `Repository` structure (e.g. `diff_tree_to_tree` +/// or other `diff_*` functions). +pub struct Diff<'repo> { + raw: *mut raw::git_diff, + _marker: marker::PhantomData<&'repo Repository>, +} + +unsafe impl<'repo> Send for Diff<'repo> {} + +/// Description of changes to one entry. +pub struct DiffDelta<'a> { + raw: *mut raw::git_diff_delta, + _marker: marker::PhantomData<&'a raw::git_diff_delta>, +} + +/// Description of one side of a delta. +/// +/// Although this is called a "file" it could represent a file, a symbolic +/// link, a submodule commit id, or even a tree (although that only happens if +/// you are tracking type changes or ignored/untracked directories). +pub struct DiffFile<'a> { + raw: *const raw::git_diff_file, + _marker: marker::PhantomData<&'a raw::git_diff_file>, +} + +/// Structure describing options about how the diff should be executed. +pub struct DiffOptions { + pathspec: Vec, + pathspec_ptrs: Vec<*const c_char>, + old_prefix: Option, + new_prefix: Option, + raw: raw::git_diff_options, +} + +/// Control behavior of rename and copy detection +pub struct DiffFindOptions { + raw: raw::git_diff_find_options, +} + +/// An iterator over the diffs in a delta +pub struct Deltas<'diff> { + range: Range, + diff: &'diff Diff<'diff>, +} + +/// Structure describing a line (or data span) of a diff. +pub struct DiffLine<'a> { + raw: *const raw::git_diff_line, + _marker: marker::PhantomData<&'a raw::git_diff_line>, +} + +/// Structure describing a hunk of a diff. +pub struct DiffHunk<'a> { + raw: *const raw::git_diff_hunk, + _marker: marker::PhantomData<&'a raw::git_diff_hunk>, +} + +/// Structure describing a hunk of a diff. +pub struct DiffStats { + raw: *mut raw::git_diff_stats, +} + +/// Structure describing the binary contents of a diff. +pub struct DiffBinary<'a> { + raw: *const raw::git_diff_binary, + _marker: marker::PhantomData<&'a raw::git_diff_binary>, +} + +/// The contents of one of the files in a binary diff. +pub struct DiffBinaryFile<'a> { + raw: *const raw::git_diff_binary_file, + _marker: marker::PhantomData<&'a raw::git_diff_binary_file>, +} + +/// When producing a binary diff, the binary data returned will be +/// either the deflated full ("literal") contents of the file, or +/// the deflated binary delta between the two sides (whichever is +/// smaller). +#[derive(Copy, Clone, Debug)] +pub enum DiffBinaryKind { + /// There is no binary delta + None, + /// The binary data is the literal contents of the file + Literal, + /// The binary data is the delta from one side to the other + Delta, +} + +type PrintCb<'a> = FnMut(DiffDelta, Option, DiffLine) -> bool + 'a; + +pub type FileCb<'a> = FnMut(DiffDelta, f32) -> bool + 'a; +pub type BinaryCb<'a> = FnMut(DiffDelta, DiffBinary) -> bool + 'a; +pub type HunkCb<'a> = FnMut(DiffDelta, DiffHunk) -> bool + 'a; +pub type LineCb<'a> = FnMut(DiffDelta, Option, DiffLine) -> bool + 'a; + +struct ForeachCallbacks<'a, 'b: 'a, 'c, 'd: 'c, 'e, 'f: 'e, 'g, 'h: 'g> { + file: &'a mut FileCb<'b>, + binary: Option<&'c mut BinaryCb<'d>>, + hunk: Option<&'e mut HunkCb<'f>>, + line: Option<&'g mut LineCb<'h>>, +} + +impl<'repo> Diff<'repo> { + /// Merge one diff into another. + /// + /// This merges items from the "from" list into the "self" list. The + /// resulting diff will have all items that appear in either list. + /// If an item appears in both lists, then it will be "merged" to appear + /// as if the old version was from the "onto" list and the new version + /// is from the "from" list (with the exception that if the item has a + /// pending DELETE in the middle, then it will show as deleted). + pub fn merge(&mut self, from: &Diff<'repo>) -> Result<(), Error> { + unsafe { try_call!(raw::git_diff_merge(self.raw, &*from.raw)); } + Ok(()) + } + + /// Returns an iterator over the deltas in this diff. + pub fn deltas(&self) -> Deltas { + let num_deltas = unsafe { raw::git_diff_num_deltas(&*self.raw) }; + Deltas { range: 0..(num_deltas as usize), diff: self } + } + + /// Return the diff delta for an entry in the diff list. + pub fn get_delta(&self, i: usize) -> Option { + unsafe { + let ptr = raw::git_diff_get_delta(&*self.raw, i as size_t); + Binding::from_raw_opt(ptr as *mut _) + } + } + + /// Check if deltas are sorted case sensitively or insensitively. + pub fn is_sorted_icase(&self) -> bool { + unsafe { raw::git_diff_is_sorted_icase(&*self.raw) == 1 } + } + + /// Iterate over a diff generating formatted text output. + /// + /// Returning `false` from the callback will terminate the iteration and + /// return an error from this function. + pub fn print(&self, format: DiffFormat, mut cb: F) -> Result<(), Error> + where F: FnMut(DiffDelta, + Option, + DiffLine) -> bool { + let mut cb: &mut PrintCb = &mut cb; + let ptr = &mut cb as *mut _; + unsafe { + try_call!(raw::git_diff_print(self.raw, format, print_cb, + ptr as *mut _)); + Ok(()) + } + } + + /// Loop over all deltas in a diff issuing callbacks. + /// + /// Returning `false` from any callback will terminate the iteration and + /// return an error from this function. + pub fn foreach(&self, + file_cb: &mut FileCb, + binary_cb: Option<&mut BinaryCb>, + hunk_cb: Option<&mut HunkCb>, + line_cb: Option<&mut LineCb>) -> Result<(), Error> { + let mut cbs = ForeachCallbacks { + file: file_cb, + binary: binary_cb, + hunk: hunk_cb, + line: line_cb, + }; + let ptr = &mut cbs as *mut _; + unsafe { + let binary_cb_c = if cbs.binary.is_some() { + Some(binary_cb_c as raw::git_diff_binary_cb) + } else { + None + }; + let hunk_cb_c = if cbs.hunk.is_some() { + Some(hunk_cb_c as raw::git_diff_hunk_cb) + } else { + None + }; + let line_cb_c = if cbs.line.is_some() { + Some(line_cb_c as raw::git_diff_line_cb) + } else { + None + }; + try_call!(raw::git_diff_foreach(self.raw, file_cb_c, binary_cb_c, + hunk_cb_c, line_cb_c, + ptr as *mut _)); + Ok(()) + } + } + + /// Accumulate diff statistics for all patches. + pub fn stats(&self) -> Result { + let mut ret = ptr::null_mut(); + unsafe { + try_call!(raw::git_diff_get_stats(&mut ret, self.raw)); + Ok(Binding::from_raw(ret)) + } + } + + /// Transform a diff marking file renames, copies, etc. + /// + /// This modifies a diff in place, replacing old entries that look like + /// renames or copies with new entries reflecting those changes. This also + /// will, if requested, break modified files into add/remove pairs if the + /// amount of change is above a threshold. + pub fn find_similar(&mut self, opts: Option<&mut DiffFindOptions>) + -> Result<(), Error> { + let opts = opts.map(|opts| &opts.raw); + unsafe { try_call!(raw::git_diff_find_similar(self.raw, opts)); } + Ok(()) + } + + // TODO: num_deltas_of_type, format_email, find_similar +} + +pub extern fn print_cb(delta: *const raw::git_diff_delta, + hunk: *const raw::git_diff_hunk, + line: *const raw::git_diff_line, + data: *mut c_void) -> c_int { + unsafe { + let delta = Binding::from_raw(delta as *mut _); + let hunk = Binding::from_raw_opt(hunk); + let line = Binding::from_raw(line); + + let r = panic::wrap(|| { + let data = data as *mut &mut PrintCb; + (*data)(delta, hunk, line) + }); + if r == Some(true) {0} else {-1} + } +} + +extern fn file_cb_c(delta: *const raw::git_diff_delta, + progress: f32, + data: *mut c_void) -> c_int { + unsafe { + let delta = Binding::from_raw(delta as *mut _); + + let r = panic::wrap(|| { + let cbs = data as *mut ForeachCallbacks; + ((*cbs).file)(delta, progress) + }); + if r == Some(true) {0} else {-1} + } +} + +extern fn binary_cb_c(delta: *const raw::git_diff_delta, + binary: *const raw::git_diff_binary, + data: *mut c_void) -> c_int { + unsafe { + let delta = Binding::from_raw(delta as *mut _); + let binary = Binding::from_raw(binary); + + let r = panic::wrap(|| { + let cbs = data as *mut ForeachCallbacks; + match (*cbs).binary { + Some(ref mut cb) => cb(delta, binary), + None => false, + } + }); + if r == Some(true) {0} else {-1} + } +} + +extern fn hunk_cb_c(delta: *const raw::git_diff_delta, + hunk: *const raw::git_diff_hunk, + data: *mut c_void) -> c_int { + unsafe { + let delta = Binding::from_raw(delta as *mut _); + let hunk = Binding::from_raw(hunk); + + let r = panic::wrap(|| { + let cbs = data as *mut ForeachCallbacks; + match (*cbs).hunk { + Some(ref mut cb) => cb(delta, hunk), + None => false, + } + }); + if r == Some(true) {0} else {-1} + } +} + +extern fn line_cb_c(delta: *const raw::git_diff_delta, + hunk: *const raw::git_diff_hunk, + line: *const raw::git_diff_line, + data: *mut c_void) -> c_int { + unsafe { + let delta = Binding::from_raw(delta as *mut _); + let hunk = Binding::from_raw_opt(hunk); + let line = Binding::from_raw(line); + + let r = panic::wrap(|| { + let cbs = data as *mut ForeachCallbacks; + match (*cbs).line { + Some(ref mut cb) => cb(delta, hunk, line), + None => false, + } + }); + if r == Some(true) {0} else {-1} + } +} + + +impl<'repo> Binding for Diff<'repo> { + type Raw = *mut raw::git_diff; + unsafe fn from_raw(raw: *mut raw::git_diff) -> Diff<'repo> { + Diff { + raw: raw, + _marker: marker::PhantomData, + } + } + fn raw(&self) -> *mut raw::git_diff { self.raw } +} + +impl<'repo> Drop for Diff<'repo> { + fn drop(&mut self) { + unsafe { raw::git_diff_free(self.raw) } + } +} + +impl<'a> DiffDelta<'a> { + // TODO: expose when diffs are more exposed + // pub fn similarity(&self) -> u16 { + // unsafe { (*self.raw).similarity } + // } + + /// Returns the number of files in this delta. + pub fn nfiles(&self) -> u16 { + unsafe { (*self.raw).nfiles } + } + + /// Returns the status of this entry + /// + /// For more information, see `Delta`'s documentation + pub fn status(&self) -> Delta { + match unsafe { (*self.raw).status } { + raw::GIT_DELTA_UNMODIFIED => Delta::Unmodified, + raw::GIT_DELTA_ADDED => Delta::Added, + raw::GIT_DELTA_DELETED => Delta::Deleted, + raw::GIT_DELTA_MODIFIED => Delta::Modified, + raw::GIT_DELTA_RENAMED => Delta::Renamed, + raw::GIT_DELTA_COPIED => Delta::Copied, + raw::GIT_DELTA_IGNORED => Delta::Ignored, + raw::GIT_DELTA_UNTRACKED => Delta::Untracked, + raw::GIT_DELTA_TYPECHANGE => Delta::Typechange, + raw::GIT_DELTA_UNREADABLE => Delta::Unreadable, + raw::GIT_DELTA_CONFLICTED => Delta::Conflicted, + n => panic!("unknown diff status: {}", n), + } + } + + /// Return the file which represents the "from" side of the diff. + /// + /// What side this means depends on the function that was used to generate + /// the diff and will be documented on the function itself. + pub fn old_file(&self) -> DiffFile<'a> { + unsafe { Binding::from_raw(&(*self.raw).old_file as *const _) } + } + + /// Return the file which represents the "to" side of the diff. + /// + /// What side this means depends on the function that was used to generate + /// the diff and will be documented on the function itself. + pub fn new_file(&self) -> DiffFile<'a> { + unsafe { Binding::from_raw(&(*self.raw).new_file as *const _) } + } +} + +impl<'a> Binding for DiffDelta<'a> { + type Raw = *mut raw::git_diff_delta; + unsafe fn from_raw(raw: *mut raw::git_diff_delta) -> DiffDelta<'a> { + DiffDelta { + raw: raw, + _marker: marker::PhantomData, + } + } + fn raw(&self) -> *mut raw::git_diff_delta { self.raw } +} + +impl<'a> DiffFile<'a> { + /// Returns the Oid of this item. + /// + /// If this entry represents an absent side of a diff (e.g. the `old_file` + /// of a `Added` delta), then the oid returned will be zeroes. + pub fn id(&self) -> Oid { + unsafe { Binding::from_raw(&(*self.raw).id as *const _) } + } + + /// Returns the path, in bytes, of the entry relative to the working + /// directory of the repository. + pub fn path_bytes(&self) -> Option<&'a [u8]> { + static FOO: () = (); + unsafe { ::opt_bytes(&FOO, (*self.raw).path) } + } + + /// Returns the path of the entry relative to the working directory of the + /// repository. + pub fn path(&self) -> Option<&'a Path> { + self.path_bytes().map(util::bytes2path) + } + + /// Returns the size of this entry, in bytes + pub fn size(&self) -> u64 { unsafe { (*self.raw).size as u64 } } + + // TODO: expose flags/mode +} + +impl<'a> Binding for DiffFile<'a> { + type Raw = *const raw::git_diff_file; + unsafe fn from_raw(raw: *const raw::git_diff_file) -> DiffFile<'a> { + DiffFile { + raw: raw, + _marker: marker::PhantomData, + } + } + fn raw(&self) -> *const raw::git_diff_file { self.raw } +} + +impl Default for DiffOptions { + fn default() -> Self { + Self::new() + } +} + +impl DiffOptions { + /// Creates a new set of empty diff options. + /// + /// All flags and other options are defaulted to false or their otherwise + /// zero equivalents. + pub fn new() -> DiffOptions { + let mut opts = DiffOptions { + pathspec: Vec::new(), + pathspec_ptrs: Vec::new(), + raw: unsafe { mem::zeroed() }, + old_prefix: None, + new_prefix: None, + }; + assert_eq!(unsafe { + raw::git_diff_init_options(&mut opts.raw, 1) + }, 0); + opts + } + + fn flag(&mut self, opt: i32, val: bool) -> &mut DiffOptions { + let opt = opt as u32; + if val { + self.raw.flags |= opt; + } else { + self.raw.flags &= !opt; + } + self + } + + /// Flag indicating whether the sides of the diff will be reversed. + pub fn reverse(&mut self, reverse: bool) -> &mut DiffOptions { + self.flag(raw::GIT_DIFF_REVERSE, reverse) + } + + /// Flag indicating whether ignored files are included. + pub fn include_ignored(&mut self, include: bool) -> &mut DiffOptions { + self.flag(raw::GIT_DIFF_INCLUDE_IGNORED, include) + } + + /// Flag indicating whether ignored directories are traversed deeply or not. + pub fn recurse_ignored_dirs(&mut self, recurse: bool) -> &mut DiffOptions { + self.flag(raw::GIT_DIFF_RECURSE_IGNORED_DIRS, recurse) + } + + /// Flag indicating whether untracked files are in the diff + pub fn include_untracked(&mut self, include: bool) -> &mut DiffOptions { + self.flag(raw::GIT_DIFF_INCLUDE_UNTRACKED, include) + } + + /// Flag indicating whether untracked directories are deeply traversed or + /// not. + pub fn recurse_untracked_dirs(&mut self, recurse: bool) -> &mut DiffOptions { + self.flag(raw::GIT_DIFF_RECURSE_UNTRACKED_DIRS, recurse) + } + + /// Flag indicating whether unmodified files are in the diff. + pub fn include_unmodified(&mut self, include: bool) -> &mut DiffOptions { + self.flag(raw::GIT_DIFF_INCLUDE_UNMODIFIED, include) + } + + /// If entrabled, then Typechange delta records are generated. + pub fn include_typechange(&mut self, include: bool) -> &mut DiffOptions { + self.flag(raw::GIT_DIFF_INCLUDE_TYPECHANGE, include) + } + + /// Event with `include_typechange`, the tree treturned generally shows a + /// deleted blow. This flag correctly labels the tree transitions as a + /// typechange record with the `new_file`'s mode set to tree. + /// + /// Note that the tree SHA will not be available. + pub fn include_typechange_trees(&mut self, include: bool) -> &mut DiffOptions { + self.flag(raw::GIT_DIFF_INCLUDE_TYPECHANGE_TREES, include) + } + + /// Flag indicating whether file mode changes are ignored. + pub fn ignore_filemode(&mut self, ignore: bool) -> &mut DiffOptions { + self.flag(raw::GIT_DIFF_IGNORE_FILEMODE, ignore) + } + + /// Flag indicating whether all submodules should be treated as unmodified. + pub fn ignore_submodules(&mut self, ignore: bool) -> &mut DiffOptions { + self.flag(raw::GIT_DIFF_IGNORE_SUBMODULES, ignore) + } + + /// Flag indicating whether case insensitive filenames should be used. + pub fn ignore_case(&mut self, ignore: bool) -> &mut DiffOptions { + self.flag(raw::GIT_DIFF_IGNORE_CASE, ignore) + } + + /// If pathspecs are specified, this flag means that they should be applied + /// as an exact match instead of a fnmatch pattern. + pub fn disable_pathspec_match(&mut self, disable: bool) -> &mut DiffOptions { + self.flag(raw::GIT_DIFF_DISABLE_PATHSPEC_MATCH, disable) + } + + /// Disable updating the `binary` flag in delta records. This is useful when + /// iterating over a diff if you don't need hunk and data callbacks and want + /// to avoid having to load a file completely. + pub fn skip_binary_check(&mut self, skip: bool) -> &mut DiffOptions { + self.flag(raw::GIT_DIFF_SKIP_BINARY_CHECK, skip) + } + + /// When diff finds an untracked directory, to match the behavior of core + /// Git, it scans the contents for ignored and untracked files. If all + /// contents are ignored, then the directory is ignored; if any contents are + /// not ignored, then the directory is untracked. This is extra work that + /// may not matter in many cases. + /// + /// This flag turns off that scan and immediately labels an untracked + /// directory as untracked (changing the behavior to not match core git). + pub fn enable_fast_untracked_dirs(&mut self, enable: bool) + -> &mut DiffOptions { + self.flag(raw::GIT_DIFF_ENABLE_FAST_UNTRACKED_DIRS, enable) + } + + /// When diff finds a file in the working directory with stat information + /// different from the index, but the OID ends up being the same, write the + /// correct stat information into the index. Note: without this flag, diff + /// will always leave the index untouched. + pub fn update_index(&mut self, update: bool) -> &mut DiffOptions { + self.flag(raw::GIT_DIFF_UPDATE_INDEX, update) + } + + /// Include unreadable files in the diff + pub fn include_unreadable(&mut self, include: bool) -> &mut DiffOptions { + self.flag(raw::GIT_DIFF_INCLUDE_UNREADABLE, include) + } + + /// Include unreadable files in the diff + pub fn include_unreadable_as_untracked(&mut self, include: bool) + -> &mut DiffOptions { + self.flag(raw::GIT_DIFF_INCLUDE_UNREADABLE_AS_UNTRACKED, include) + } + + /// Treat all files as text, disabling binary attributes and detection. + pub fn force_text(&mut self, force: bool) -> &mut DiffOptions { + self.flag(raw::GIT_DIFF_FORCE_TEXT, force) + } + + /// Treat all files as binary, disabling text diffs + pub fn force_binary(&mut self, force: bool) -> &mut DiffOptions { + self.flag(raw::GIT_DIFF_FORCE_TEXT, force) + } + + /// Ignore all whitespace + pub fn ignore_whitespace(&mut self, ignore: bool) -> &mut DiffOptions { + self.flag(raw::GIT_DIFF_IGNORE_WHITESPACE, ignore) + } + + /// Ignore changes in the amount of whitespace + pub fn ignore_whitespace_change(&mut self, ignore: bool) -> &mut DiffOptions { + self.flag(raw::GIT_DIFF_IGNORE_WHITESPACE_CHANGE, ignore) + } + + /// Ignore whitespace at tend of line + pub fn ignore_whitespace_eol(&mut self, ignore: bool) -> &mut DiffOptions { + self.flag(raw::GIT_DIFF_IGNORE_WHITESPACE_EOL, ignore) + } + + /// When generating patch text, include the content of untracked files. + /// + /// This automatically turns on `include_untracked` but it does not turn on + /// `recurse_untracked_dirs`. Add that flag if you want the content of every + /// single untracked file. + pub fn show_untracked_content(&mut self, show: bool) -> &mut DiffOptions { + self.flag(raw::GIT_DIFF_SHOW_UNTRACKED_CONTENT, show) + } + + /// When generating output, include the names of unmodified files if they + /// are included in the `Diff`. Normally these are skipped in the formats + /// that list files (e.g. name-only, name-status, raw). Even with this these + /// will not be included in the patch format. + pub fn show_unmodified(&mut self, show: bool) -> &mut DiffOptions { + self.flag(raw::GIT_DIFF_SHOW_UNMODIFIED, show) + } + + /// Use the "patience diff" algorithm + pub fn patience(&mut self, patience: bool) -> &mut DiffOptions { + self.flag(raw::GIT_DIFF_PATIENCE, patience) + } + + /// Take extra time to find the minimal diff + pub fn minimal(&mut self, minimal: bool) -> &mut DiffOptions { + self.flag(raw::GIT_DIFF_MINIMAL, minimal) + } + + /// Include the necessary deflate/delta information so that `git-apply` can + /// apply given diff information to binary files. + pub fn show_binary(&mut self, show: bool) -> &mut DiffOptions { + self.flag(raw::GIT_DIFF_SHOW_BINARY, show) + } + + /// Use a heuristic that takes indentation and whitespace into account + /// which generally can produce better diffs when dealing with ambiguous + /// diff hunks. + pub fn indent_heuristic(&mut self, heuristic: bool) -> &mut DiffOptions { + self.flag(raw::GIT_DIFF_INDENT_HEURISTIC, heuristic) + } + + /// Set the number of unchanged lines that define the boundary of a hunk + /// (and to display before and after). + /// + /// The default value for this is 3. + pub fn context_lines(&mut self, lines: u32) -> &mut DiffOptions { + self.raw.context_lines = lines; + self + } + + /// Set the maximum number of unchanged lines between hunk boundaries before + /// the hunks will be merged into one. + /// + /// The default value for this is 0. + pub fn interhunk_lines(&mut self, lines: u32) -> &mut DiffOptions { + self.raw.interhunk_lines = lines; + self + } + + /// The default value for this is `core.abbrev` or 7 if unset. + pub fn id_abbrev(&mut self, abbrev: u16) -> &mut DiffOptions { + self.raw.id_abbrev = abbrev; + self + } + + /// Maximum size (in bytes) above which a blob will be marked as binary + /// automatically. + /// + /// A negative value will disable this entirely. + /// + /// The default value for this is 512MB. + pub fn max_size(&mut self, size: i64) -> &mut DiffOptions { + self.raw.max_size = size as raw::git_off_t; + self + } + + /// The virtual "directory" to prefix old file names with in hunk headers. + /// + /// The default value for this is "a". + pub fn old_prefix(&mut self, t: T) -> &mut DiffOptions { + self.old_prefix = Some(t.into_c_string().unwrap()); + self + } + + /// The virtual "directory" to prefix new file names with in hunk headers. + /// + /// The default value for this is "b". + pub fn new_prefix(&mut self, t: T) -> &mut DiffOptions { + self.new_prefix = Some(t.into_c_string().unwrap()); + self + } + + /// Add to the array of paths/fnmatch patterns to constrain the diff. + pub fn pathspec(&mut self, pathspec: T) + -> &mut DiffOptions { + let s = pathspec.into_c_string().unwrap(); + self.pathspec_ptrs.push(s.as_ptr()); + self.pathspec.push(s); + self + } + + /// Acquire a pointer to the underlying raw options. + /// + /// This function is unsafe as the pointer is only valid so long as this + /// structure is not moved, modified, or used elsewhere. + pub unsafe fn raw(&mut self) -> *const raw::git_diff_options { + self.raw.old_prefix = self.old_prefix.as_ref().map(|s| s.as_ptr()) + .unwrap_or(ptr::null()); + self.raw.new_prefix = self.new_prefix.as_ref().map(|s| s.as_ptr()) + .unwrap_or(ptr::null()); + self.raw.pathspec.count = self.pathspec_ptrs.len() as size_t; + self.raw.pathspec.strings = self.pathspec_ptrs.as_ptr() as *mut _; + &self.raw as *const _ + } + + // TODO: expose ignore_submodules, notify_cb/notify_payload +} + +impl<'diff> Iterator for Deltas<'diff> { + type Item = DiffDelta<'diff>; + fn next(&mut self) -> Option> { + self.range.next().and_then(|i| self.diff.get_delta(i)) + } + fn size_hint(&self) -> (usize, Option) { self.range.size_hint() } +} +impl<'diff> DoubleEndedIterator for Deltas<'diff> { + fn next_back(&mut self) -> Option> { + self.range.next_back().and_then(|i| self.diff.get_delta(i)) + } +} +impl<'diff> ExactSizeIterator for Deltas<'diff> {} + +impl<'a> DiffLine<'a> { + /// Line number in old file or `None` for added line + pub fn old_lineno(&self) -> Option { + match unsafe { (*self.raw).old_lineno } { + n if n < 0 => None, + n => Some(n as u32), + } + } + + /// Line number in new file or `None` for deleted line + pub fn new_lineno(&self) -> Option { + match unsafe { (*self.raw).new_lineno } { + n if n < 0 => None, + n => Some(n as u32), + } + } + + /// Number of newline characters in content + pub fn num_lines(&self) -> u32 { + unsafe { (*self.raw).num_lines as u32 } + } + + /// Offset in the original file to the content + pub fn content_offset(&self) -> i64 { + unsafe { (*self.raw).content_offset as i64 } + } + + /// Content of this line as bytes. + pub fn content(&self) -> &[u8] { + unsafe { + slice::from_raw_parts((*self.raw).content as *const u8, + (*self.raw).content_len as usize) + } + } + + /// Sigil showing the origin of this `DiffLine`. + /// + /// * ` ` - Line context + /// * `+` - Line addition + /// * `-` - Line deletion + /// * `=` - Context (End of file) + /// * `>` - Add (End of file) + /// * `<` - Remove (End of file) + /// * `F` - File header + /// * `H` - Hunk header + /// * `B` - Line binary + pub fn origin(&self) -> char { + match unsafe { (*self.raw).origin as raw::git_diff_line_t } { + raw::GIT_DIFF_LINE_CONTEXT => ' ', + raw::GIT_DIFF_LINE_ADDITION => '+', + raw::GIT_DIFF_LINE_DELETION => '-', + raw::GIT_DIFF_LINE_CONTEXT_EOFNL => '=', + raw::GIT_DIFF_LINE_ADD_EOFNL => '>', + raw::GIT_DIFF_LINE_DEL_EOFNL => '<', + raw::GIT_DIFF_LINE_FILE_HDR => 'F', + raw::GIT_DIFF_LINE_HUNK_HDR => 'H', + raw::GIT_DIFF_LINE_BINARY => 'B', + _ => ' ', + } + } +} + +impl<'a> Binding for DiffLine<'a> { + type Raw = *const raw::git_diff_line; + unsafe fn from_raw(raw: *const raw::git_diff_line) -> DiffLine<'a> { + DiffLine { + raw: raw, + _marker: marker::PhantomData, + } + } + fn raw(&self) -> *const raw::git_diff_line { self.raw } +} + +impl<'a> DiffHunk<'a> { + /// Starting line number in old_file + pub fn old_start(&self) -> u32 { + unsafe { (*self.raw).old_start as u32 } + } + + /// Number of lines in old_file + pub fn old_lines(&self) -> u32 { + unsafe { (*self.raw).old_lines as u32 } + } + + /// Starting line number in new_file + pub fn new_start(&self) -> u32 { + unsafe { (*self.raw).new_start as u32 } + } + + /// Number of lines in new_file + pub fn new_lines(&self) -> u32 { + unsafe { (*self.raw).new_lines as u32 } + } + + /// Header text + pub fn header(&self) -> &[u8] { + unsafe { + slice::from_raw_parts((*self.raw).header.as_ptr() as *const u8, + (*self.raw).header_len as usize) + } + } +} + +impl<'a> Binding for DiffHunk<'a> { + type Raw = *const raw::git_diff_hunk; + unsafe fn from_raw(raw: *const raw::git_diff_hunk) -> DiffHunk<'a> { + DiffHunk { + raw: raw, + _marker: marker::PhantomData, + } + } + fn raw(&self) -> *const raw::git_diff_hunk { self.raw } +} + +impl DiffStats { + /// Get the total number of files chaned in a diff. + pub fn files_changed(&self) -> usize { + unsafe { raw::git_diff_stats_files_changed(&*self.raw) as usize } + } + + /// Get the total number of insertions in a diff + pub fn insertions(&self) -> usize { + unsafe { raw::git_diff_stats_insertions(&*self.raw) as usize } + } + + /// Get the total number of deletions in a diff + pub fn deletions(&self) -> usize { + unsafe { raw::git_diff_stats_deletions(&*self.raw) as usize } + } + + /// Print diff statistics to a Buf + pub fn to_buf(&self, format: DiffStatsFormat, width: usize) + -> Result { + let buf = Buf::new(); + unsafe { + try_call!(raw::git_diff_stats_to_buf(buf.raw(), self.raw, + format.bits(), + width as size_t)); + } + Ok(buf) + } +} + +impl Binding for DiffStats { + type Raw = *mut raw::git_diff_stats; + + unsafe fn from_raw(raw: *mut raw::git_diff_stats) -> DiffStats { + DiffStats { raw: raw } + } + fn raw(&self) -> *mut raw::git_diff_stats { self.raw } +} + +impl Drop for DiffStats { + fn drop(&mut self) { + unsafe { raw::git_diff_stats_free(self.raw) } + } +} + +impl<'a> DiffBinary<'a> { + /// Returns whether there is data in this binary structure or not. + /// + /// If this is `true`, then this was produced and included binary content. + /// If this is `false` then this was generated knowing only that a binary + /// file changed but without providing the data, probably from a patch that + /// said `Binary files a/file.txt and b/file.txt differ`. + pub fn contains_data(&self) -> bool { + unsafe { (*self.raw).contains_data == 1 } + } + + /// The contents of the old file. + pub fn old_file(&self) -> DiffBinaryFile<'a> { + unsafe { Binding::from_raw(&(*self.raw).old_file as *const _) } + } + + /// The contents of the new file. + pub fn new_file(&self) -> DiffBinaryFile<'a> { + unsafe { Binding::from_raw(&(*self.raw).new_file as *const _) } + } +} + +impl<'a> Binding for DiffBinary<'a> { + type Raw = *const raw::git_diff_binary; + unsafe fn from_raw(raw: *const raw::git_diff_binary) -> DiffBinary<'a> { + DiffBinary { + raw: raw, + _marker: marker::PhantomData, + } + } + fn raw(&self) -> *const raw::git_diff_binary { self.raw } +} + +impl<'a> DiffBinaryFile<'a> { + /// The type of binary data for this file + pub fn kind(&self) -> DiffBinaryKind { + unsafe { Binding::from_raw((*self.raw).kind) } + } + + /// The binary data, deflated + pub fn data(&self) -> &[u8] { + unsafe { + slice::from_raw_parts((*self.raw).data as *const u8, + (*self.raw).datalen as usize) + } + } + + /// The length of the binary data after inflation + pub fn inflated_len(&self) -> usize { + unsafe { (*self.raw).inflatedlen as usize } + } + +} + +impl<'a> Binding for DiffBinaryFile<'a> { + type Raw = *const raw::git_diff_binary_file; + unsafe fn from_raw(raw: *const raw::git_diff_binary_file) -> DiffBinaryFile<'a> { + DiffBinaryFile { + raw: raw, + _marker: marker::PhantomData, + } + } + fn raw(&self) -> *const raw::git_diff_binary_file { self.raw } +} + +impl Binding for DiffBinaryKind { + type Raw = raw::git_diff_binary_t; + unsafe fn from_raw(raw: raw::git_diff_binary_t) -> DiffBinaryKind { + match raw { + raw::GIT_DIFF_BINARY_NONE => DiffBinaryKind::None, + raw::GIT_DIFF_BINARY_LITERAL => DiffBinaryKind::Literal, + raw::GIT_DIFF_BINARY_DELTA => DiffBinaryKind::Delta, + _ => panic!("Unknown git diff binary kind"), + } + } + fn raw(&self) -> raw::git_diff_binary_t { + match *self { + DiffBinaryKind::None => raw::GIT_DIFF_BINARY_NONE, + DiffBinaryKind::Literal => raw::GIT_DIFF_BINARY_LITERAL, + DiffBinaryKind::Delta => raw::GIT_DIFF_BINARY_DELTA, + } + } +} + +impl Default for DiffFindOptions { + fn default() -> Self { + Self::new() + } +} + +impl DiffFindOptions { + /// Creates a new set of empty diff find options. + /// + /// All flags and other options are defaulted to false or their otherwise + /// zero equivalents. + pub fn new() -> DiffFindOptions { + let mut opts = DiffFindOptions { + raw: unsafe { mem::zeroed() }, + }; + assert_eq!(unsafe { + raw::git_diff_find_init_options(&mut opts.raw, 1) + }, 0); + opts + } + + fn flag(&mut self, opt: u32, val: bool) -> &mut DiffFindOptions { + if val { + self.raw.flags |= opt; + } else { + self.raw.flags &= !opt; + } + self + } + + /// Reset all flags back to their unset state, indicating that + /// `diff.renames` should be used instead. This is overridden once any flag + /// is set. + pub fn by_config(&mut self) -> &mut DiffFindOptions { + self.flag(0xffffffff, false) + } + + /// Look for renames? + pub fn renames(&mut self, find: bool) -> &mut DiffFindOptions { + self.flag(raw::GIT_DIFF_FIND_RENAMES, find) + } + + /// Consider old side of modified for renames? + pub fn renames_from_rewrites(&mut self, find: bool) -> &mut DiffFindOptions { + self.flag(raw::GIT_DIFF_FIND_RENAMES_FROM_REWRITES, find) + } + + /// Look for copies? + pub fn copies(&mut self, find: bool) -> &mut DiffFindOptions { + self.flag(raw::GIT_DIFF_FIND_COPIES, find) + } + + /// Consider unmodified as copy sources? + /// + /// For this to work correctly, use `include_unmodified` when the initial + /// diff is being generated. + pub fn copies_from_unmodified(&mut self, find: bool) + -> &mut DiffFindOptions { + self.flag(raw::GIT_DIFF_FIND_COPIES_FROM_UNMODIFIED, find) + } + + /// Mark significant rewrites for split. + pub fn rewrites(&mut self, find: bool) -> &mut DiffFindOptions { + self.flag(raw::GIT_DIFF_FIND_REWRITES, find) + } + + /// Actually split large rewrites into delete/add pairs + pub fn break_rewrites(&mut self, find: bool) -> &mut DiffFindOptions { + self.flag(raw::GIT_DIFF_BREAK_REWRITES, find) + } + + #[doc(hidden)] + pub fn break_rewries(&mut self, find: bool) -> &mut DiffFindOptions { + self.break_rewrites(find) + } + + /// Find renames/copies for untracked items in working directory. + /// + /// For this to work correctly use the `include_untracked` option when the + /// initial diff is being generated. + pub fn for_untracked(&mut self, find: bool) -> &mut DiffFindOptions { + self.flag(raw::GIT_DIFF_FIND_FOR_UNTRACKED, find) + } + + /// Turn on all finding features. + pub fn all(&mut self, find: bool) -> &mut DiffFindOptions { + self.flag(raw::GIT_DIFF_FIND_ALL, find) + } + + /// Measure similarity ignoring leading whitespace (default) + pub fn ignore_leading_whitespace(&mut self, ignore: bool) + -> &mut DiffFindOptions { + self.flag(raw::GIT_DIFF_FIND_IGNORE_LEADING_WHITESPACE, ignore) + } + + /// Measure similarity ignoring all whitespace + pub fn ignore_whitespace(&mut self, ignore: bool) -> &mut DiffFindOptions { + self.flag(raw::GIT_DIFF_FIND_IGNORE_WHITESPACE, ignore) + } + + /// Measure similarity including all data + pub fn dont_ignore_whitespace(&mut self, dont: bool) -> &mut DiffFindOptions { + self.flag(raw::GIT_DIFF_FIND_DONT_IGNORE_WHITESPACE, dont) + } + + /// Measure similarity only by comparing SHAs (fast and cheap) + pub fn exact_match_only(&mut self, exact: bool) -> &mut DiffFindOptions { + self.flag(raw::GIT_DIFF_FIND_EXACT_MATCH_ONLY, exact) + } + + /// Do not break rewrites unless they contribute to a rename. + /// + /// Normally, `break_rewrites` and `rewrites` will measure the + /// self-similarity of modified files and split the ones that have changed a + /// lot into a delete/add pair. Then the sides of that pair will be + /// considered candidates for rename and copy detection + /// + /// If you add this flag in and the split pair is not used for an actual + /// rename or copy, then the modified record will be restored to a regular + /// modified record instead of being split. + pub fn break_rewrites_for_renames_only(&mut self, b: bool) + -> &mut DiffFindOptions { + self.flag(raw::GIT_DIFF_BREAK_REWRITES_FOR_RENAMES_ONLY, b) + } + + /// Remove any unmodified deltas after find_similar is done. + /// + /// Using `copies_from_unmodified` to emulate the `--find-copies-harder` + /// behavior requires building a diff with the `include_unmodified` flag. If + /// you do not want unmodified records in the final result, pas this flag to + /// have them removed. + pub fn remove_unmodified(&mut self, remove: bool) -> &mut DiffFindOptions { + self.flag(raw::GIT_DIFF_FIND_REMOVE_UNMODIFIED, remove) + } + + /// Similarity to consider a file renamed (default 50) + pub fn rename_threshold(&mut self, thresh: u16) -> &mut DiffFindOptions { + self.raw.rename_threshold = thresh; + self + } + + /// Similarity of modified to be glegible rename source (default 50) + pub fn rename_from_rewrite_threshold(&mut self, thresh: u16) + -> &mut DiffFindOptions { + self.raw.rename_from_rewrite_threshold = thresh; + self + } + + /// Similarity to consider a file copy (default 50) + pub fn copy_threshold(&mut self, thresh: u16) -> &mut DiffFindOptions { + self.raw.copy_threshold = thresh; + self + } + + /// Similarity to split modify into delete/add pair (default 60) + pub fn break_rewrite_threshold(&mut self, thresh: u16) + -> &mut DiffFindOptions { + self.raw.break_rewrite_threshold = thresh; + self + } + + /// Maximum similarity sources to examine for a file (somewhat like + /// git-diff's `-l` option or `diff.renameLimit` config) + /// + /// Defaults to 200 + pub fn rename_limit(&mut self, limit: usize) -> &mut DiffFindOptions { + self.raw.rename_limit = limit as size_t; + self + } + + // TODO: expose git_diff_similarity_metric +} + +#[cfg(test)] +mod tests { + use DiffOptions; + use std::fs::File; + use std::path::Path; + use std::borrow::Borrow; + use std::io::Write; + + #[test] + fn smoke() { + let (_td, repo) = ::test::repo_init(); + let diff = repo.diff_tree_to_workdir(None, None).unwrap(); + assert_eq!(diff.deltas().len(), 0); + let stats = diff.stats().unwrap(); + assert_eq!(stats.insertions(), 0); + assert_eq!(stats.deletions(), 0); + assert_eq!(stats.files_changed(), 0); + } + + #[test] + fn foreach_smoke() { + let (_td, repo) = ::test::repo_init(); + let diff = t!(repo.diff_tree_to_workdir(None, None)); + let mut count = 0; + t!(diff.foreach(&mut |_file, _progress| { count = count + 1; true }, + None, None, None)); + assert_eq!(count, 0); + } + + #[test] + fn foreach_file_only() { + let path = Path::new("foo"); + let (td, repo) = ::test::repo_init(); + t!(t!(File::create(&td.path().join(path))).write_all(b"bar")); + let mut opts = DiffOptions::new(); + opts.include_untracked(true); + let diff = t!(repo.diff_tree_to_workdir(None, Some(&mut opts))); + let mut count = 0; + let mut result = None; + t!(diff.foreach(&mut |file, _progress| { + count = count + 1; + result = file.new_file().path().map(ToOwned::to_owned); + true + }, None, None, None)); + assert_eq!(result.as_ref().map(Borrow::borrow), Some(path)); + assert_eq!(count, 1); + } + + #[test] + fn foreach_file_and_hunk() { + let path = Path::new("foo"); + let (td, repo) = ::test::repo_init(); + t!(t!(File::create(&td.path().join(path))).write_all(b"bar")); + let mut index = t!(repo.index()); + t!(index.add_path(path)); + let mut opts = DiffOptions::new(); + opts.include_untracked(true); + let diff = t!(repo.diff_tree_to_index(None, Some(&index), + Some(&mut opts))); + let mut new_lines = 0; + t!(diff.foreach( + &mut |_file, _progress| { true }, + None, + Some(&mut |_file, hunk| { + new_lines = hunk.new_lines(); + true + }), + None)); + assert_eq!(new_lines, 1); + } + + #[test] + fn foreach_all_callbacks() { + let fib = vec![0, 1, 1, 2, 3, 5, 8]; + // Verified with a node implementation of deflate, might be worth + // adding a deflate lib to do this inline here. + let deflated_fib = vec![120, 156, 99, 96, 100, 100, 98, 102, 229, 0, 0, + 0, 53, 0, 21]; + let foo_path = Path::new("foo"); + let bin_path = Path::new("bin"); + let (td, repo) = ::test::repo_init(); + t!(t!(File::create(&td.path().join(foo_path))).write_all(b"bar\n")); + t!(t!(File::create(&td.path().join(bin_path))).write_all(&fib)); + let mut index = t!(repo.index()); + t!(index.add_path(foo_path)); + t!(index.add_path(bin_path)); + let mut opts = DiffOptions::new(); + opts.include_untracked(true).show_binary(true); + let diff = t!(repo.diff_tree_to_index(None, Some(&index), + Some(&mut opts))); + let mut bin_content = None; + let mut new_lines = 0; + let mut line_content = None; + t!(diff.foreach( + &mut |_file, _progress| { true }, + Some(&mut |_file, binary| { + bin_content = Some(binary.new_file().data().to_owned()); + true + }), + Some(&mut |_file, hunk| { + new_lines = hunk.new_lines(); + true + }), + Some(&mut |_file, _hunk, line| { + line_content = String::from_utf8(line.content().into()).ok(); + true + }))); + assert_eq!(bin_content, Some(deflated_fib)); + assert_eq!(new_lines, 1); + assert_eq!(line_content, Some("bar\n".to_string())); + } +} diff --git a/git2/src/error.rs b/git2/src/error.rs new file mode 100644 index 000000000..1e92b23a0 --- /dev/null +++ b/git2/src/error.rs @@ -0,0 +1,284 @@ +use std::env::JoinPathsError; +use std::ffi::{CStr, NulError}; +use std::error; +use std::fmt; +use std::str; +use libc::c_int; + +use {raw, ErrorClass, ErrorCode}; + +/// A structure to represent errors coming out of libgit2. +#[derive(Debug,PartialEq)] +pub struct Error { + code: c_int, + klass: c_int, + message: String, +} + +impl Error { + /// Returns the last error that happened with the code specified by `code`. + /// + /// The `code` argument typically comes from the return value of a function + /// call. This code will later be returned from the `code` function. + /// + /// Historically this function returned `Some` or `None` based on the return + /// value of `giterr_last` but nowadays it always returns `Some` so it's + /// safe to unwrap the return value. This API will change in the next major + /// version. + pub fn last_error(code: c_int) -> Option { + ::init(); + unsafe { + // Note that whenever libgit2 returns an error any negative value + // indicates that an error happened. Auxiliary information is + // *usually* in `giterr_last` but unfortunately that's not always + // the case. Sometimes a negative error code is returned from + // libgit2 *without* calling `giterr_set` internally to configure + // the error. + // + // To handle this case and hopefully provide better error messages + // on our end we unconditionally call `giterr_clear` when we're done + // with an error. This is an attempt to clear it as aggressively as + // possible when we can to ensure that error information from one + // api invocation doesn't leak over to the next api invocation. + // + // Additionally if `giterr_last` returns null then we returned a + // canned error out. + let ptr = raw::giterr_last(); + let err = if ptr.is_null() { + let mut error = Error::from_str("an unknown git error occurred"); + error.code = code; + error + } else { + Error::from_raw(code, ptr) + }; + raw::giterr_clear(); + Some(err) + } + } + + unsafe fn from_raw(code: c_int, ptr: *const raw::git_error) -> Error { + let msg = CStr::from_ptr((*ptr).message as *const _).to_bytes(); + let msg = String::from_utf8_lossy(msg).into_owned(); + Error { code: code, klass: (*ptr).klass, message: msg } + } + + /// Creates a new error from the given string as the error. + /// + /// The error returned will have the code `GIT_ERROR` and the class + /// `GITERR_NONE`. + pub fn from_str(s: &str) -> Error { + Error { + code: raw::GIT_ERROR as c_int, + klass: raw::GITERR_NONE as c_int, + message: s.to_string(), + } + } + + /// Return the error code associated with this error. + /// + /// An error code is intended to be programmatically actionable most of the + /// time. For example the code `GIT_EAGAIN` indicates that an error could be + /// fixed by trying again, while the code `GIT_ERROR` is more bland and + /// doesn't convey anything in particular. + pub fn code(&self) -> ErrorCode { + match self.raw_code() { + raw::GIT_OK => super::ErrorCode::GenericError, + raw::GIT_ERROR => super::ErrorCode::GenericError, + raw::GIT_ENOTFOUND => super::ErrorCode::NotFound, + raw::GIT_EEXISTS => super::ErrorCode::Exists, + raw::GIT_EAMBIGUOUS => super::ErrorCode::Ambiguous, + raw::GIT_EBUFS => super::ErrorCode::BufSize, + raw::GIT_EUSER => super::ErrorCode::User, + raw::GIT_EBAREREPO => super::ErrorCode::BareRepo, + raw::GIT_EUNBORNBRANCH => super::ErrorCode::UnbornBranch, + raw::GIT_EUNMERGED => super::ErrorCode::Unmerged, + raw::GIT_ENONFASTFORWARD => super::ErrorCode::NotFastForward, + raw::GIT_EINVALIDSPEC => super::ErrorCode::InvalidSpec, + raw::GIT_ECONFLICT => super::ErrorCode::Conflict, + raw::GIT_ELOCKED => super::ErrorCode::Locked, + raw::GIT_EMODIFIED => super::ErrorCode::Modified, + raw::GIT_PASSTHROUGH => super::ErrorCode::GenericError, + raw::GIT_ITEROVER => super::ErrorCode::GenericError, + raw::GIT_EAUTH => super::ErrorCode::Auth, + raw::GIT_ECERTIFICATE => super::ErrorCode::Certificate, + raw::GIT_EAPPLIED => super::ErrorCode::Applied, + raw::GIT_EPEEL => super::ErrorCode::Peel, + raw::GIT_EEOF => super::ErrorCode::Eof, + raw::GIT_EINVALID => super::ErrorCode::Invalid, + raw::GIT_EUNCOMMITTED => super::ErrorCode::Uncommitted, + raw::GIT_EDIRECTORY => super::ErrorCode::Directory, + _ => super::ErrorCode::GenericError, + } + } + + /// Return the error class associated with this error. + /// + /// Error classes are in general mostly just informative. For example the + /// class will show up in the error message but otherwise an error class is + /// typically not directly actionable. + pub fn class(&self) -> ErrorClass { + match self.raw_class() { + raw::GITERR_NONE => super::ErrorClass::None, + raw::GITERR_NOMEMORY => super::ErrorClass::NoMemory, + raw::GITERR_OS => super::ErrorClass::Os, + raw::GITERR_INVALID => super::ErrorClass::Invalid, + raw::GITERR_REFERENCE => super::ErrorClass::Reference, + raw::GITERR_ZLIB => super::ErrorClass::Zlib, + raw::GITERR_REPOSITORY => super::ErrorClass::Repository, + raw::GITERR_CONFIG => super::ErrorClass::Config, + raw::GITERR_REGEX => super::ErrorClass::Regex, + raw::GITERR_ODB => super::ErrorClass::Odb, + raw::GITERR_INDEX => super::ErrorClass::Index, + raw::GITERR_OBJECT => super::ErrorClass::Object, + raw::GITERR_NET => super::ErrorClass::Net, + raw::GITERR_TAG => super::ErrorClass::Tag, + raw::GITERR_TREE => super::ErrorClass::Tree, + raw::GITERR_INDEXER => super::ErrorClass::Indexer, + raw::GITERR_SSL => super::ErrorClass::Ssl, + raw::GITERR_SUBMODULE => super::ErrorClass::Submodule, + raw::GITERR_THREAD => super::ErrorClass::Thread, + raw::GITERR_STASH => super::ErrorClass::Stash, + raw::GITERR_CHECKOUT => super::ErrorClass::Checkout, + raw::GITERR_FETCHHEAD => super::ErrorClass::FetchHead, + raw::GITERR_MERGE => super::ErrorClass::Merge, + raw::GITERR_SSH => super::ErrorClass::Ssh, + raw::GITERR_FILTER => super::ErrorClass::Filter, + raw::GITERR_REVERT => super::ErrorClass::Revert, + raw::GITERR_CALLBACK => super::ErrorClass::Callback, + raw::GITERR_CHERRYPICK => super::ErrorClass::CherryPick, + raw::GITERR_DESCRIBE => super::ErrorClass::Describe, + raw::GITERR_REBASE => super::ErrorClass::Rebase, + raw::GITERR_FILESYSTEM => super::ErrorClass::Filesystem, + _ => super::ErrorClass::None, + } + } + + /// Return the raw error code associated with this error. + pub fn raw_code(&self) -> raw::git_error_code { + macro_rules! check( ($($e:ident,)*) => ( + $(if self.code == raw::$e as c_int { raw::$e }) else * + else { + raw::GIT_ERROR + } + ) ); + check!( + GIT_OK, + GIT_ERROR, + GIT_ENOTFOUND, + GIT_EEXISTS, + GIT_EAMBIGUOUS, + GIT_EBUFS, + GIT_EUSER, + GIT_EBAREREPO, + GIT_EUNBORNBRANCH, + GIT_EUNMERGED, + GIT_ENONFASTFORWARD, + GIT_EINVALIDSPEC, + GIT_ECONFLICT, + GIT_ELOCKED, + GIT_EMODIFIED, + GIT_EAUTH, + GIT_ECERTIFICATE, + GIT_EAPPLIED, + GIT_EPEEL, + GIT_EEOF, + GIT_EINVALID, + GIT_EUNCOMMITTED, + GIT_PASSTHROUGH, + GIT_ITEROVER, + ) + } + + /// Return the raw error class associated with this error. + pub fn raw_class(&self) -> raw::git_error_t { + macro_rules! check( ($($e:ident,)*) => ( + $(if self.klass == raw::$e as c_int { raw::$e }) else * + else { + raw::GITERR_NONE + } + ) ); + check!( + GITERR_NONE, + GITERR_NOMEMORY, + GITERR_OS, + GITERR_INVALID, + GITERR_REFERENCE, + GITERR_ZLIB, + GITERR_REPOSITORY, + GITERR_CONFIG, + GITERR_REGEX, + GITERR_ODB, + GITERR_INDEX, + GITERR_OBJECT, + GITERR_NET, + GITERR_TAG, + GITERR_TREE, + GITERR_INDEXER, + GITERR_SSL, + GITERR_SUBMODULE, + GITERR_THREAD, + GITERR_STASH, + GITERR_CHECKOUT, + GITERR_FETCHHEAD, + GITERR_MERGE, + GITERR_SSH, + GITERR_FILTER, + GITERR_REVERT, + GITERR_CALLBACK, + GITERR_CHERRYPICK, + GITERR_DESCRIBE, + GITERR_REBASE, + GITERR_FILESYSTEM, + ) + } + + /// Return the message associated with this error + pub fn message(&self) -> &str { &self.message } +} + +impl error::Error for Error { + fn description(&self) -> &str { &self.message } +} + +impl fmt::Display for Error { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{}", self.message)?; + match self.class() { + ErrorClass::None => {} + other => write!(f, "; class={:?} ({})", other, self.klass)?, + } + match self.code() { + ErrorCode::GenericError => {} + other => write!(f, "; code={:?} ({})", other, self.code)?, + } + Ok(()) + } +} + +impl From for Error { + fn from(_: NulError) -> Error { + Error::from_str("data contained a nul byte that could not be \ + represented as a string") + } +} + +impl From for Error { + fn from(e: JoinPathsError) -> Error { + Error::from_str(error::Error::description(&e)) + } +} + + +#[cfg(test)] +mod tests { + use {ErrorClass, ErrorCode}; + + #[test] + fn smoke() { + let (_td, repo) = ::test::repo_init(); + + let err = repo.find_submodule("does_not_exist").err().unwrap(); + assert_eq!(err.code(), ErrorCode::NotFound); + assert_eq!(err.class(), ErrorClass::Submodule); + } +} diff --git a/git2/src/index.rs b/git2/src/index.rs new file mode 100644 index 000000000..0f603a5b0 --- /dev/null +++ b/git2/src/index.rs @@ -0,0 +1,634 @@ +use std::ffi::{CStr, OsString, CString}; +use std::ops::Range; +use std::path::Path; +use std::ptr; +use std::slice; + +use libc::{c_int, c_uint, size_t, c_void, c_char}; + +use {raw, panic, Repository, Error, Tree, Oid, IndexAddOption, IndexTime}; +use IntoCString; +use util::{self, Binding}; + +/// A structure to represent a git [index][1] +/// +/// [1]: http://git-scm.com/book/en/Git-Internals-Git-Objects +pub struct Index { + raw: *mut raw::git_index, +} + +/// An iterator over the entries in an index +pub struct IndexEntries<'index> { + range: Range, + index: &'index Index, +} + +/// A callback function to filter index matches. +/// +/// Used by `Index::{add_all,remove_all,update_all}`. The first argument is the +/// path, and the second is the patchspec that matched it. Return 0 to confirm +/// the operation on the item, > 0 to skip the item, and < 0 to abort the scan. +pub type IndexMatchedPath<'a> = FnMut(&Path, &[u8]) -> i32 + 'a; + +/// A structure to represent an entry or a file inside of an index. +/// +/// All fields of an entry are public for modification and inspection. This is +/// also how a new index entry is created. +#[allow(missing_docs)] +pub struct IndexEntry { + pub ctime: IndexTime, + pub mtime: IndexTime, + pub dev: u32, + pub ino: u32, + pub mode: u32, + pub uid: u32, + pub gid: u32, + pub file_size: u32, + pub id: Oid, + pub flags: u16, + pub flags_extended: u16, + pub path: Vec, +} + +impl Index { + /// Creates a new in-memory index. + /// + /// This index object cannot be read/written to the filesystem, but may be + /// used to perform in-memory index operations. + pub fn new() -> Result { + ::init(); + let mut raw = ptr::null_mut(); + unsafe { + try_call!(raw::git_index_new(&mut raw)); + Ok(Binding::from_raw(raw)) + } + } + + /// Create a new bare Git index object as a memory representation of the Git + /// index file in 'index_path', without a repository to back it. + /// + /// Since there is no ODB or working directory behind this index, any Index + /// methods which rely on these (e.g. add_path) will fail. + /// + /// If you need an index attached to a repository, use the `index()` method + /// on `Repository`. + pub fn open(index_path: &Path) -> Result { + ::init(); + let mut raw = ptr::null_mut(); + let index_path = try!(index_path.into_c_string()); + unsafe { + try_call!(raw::git_index_open(&mut raw, index_path)); + Ok(Binding::from_raw(raw)) + } + } + + /// Add or update an index entry from an in-memory struct + /// + /// If a previous index entry exists that has the same path and stage as the + /// given 'source_entry', it will be replaced. Otherwise, the 'source_entry' + /// will be added. + pub fn add(&mut self, entry: &IndexEntry) -> Result<(), Error> { + let path = try!(CString::new(&entry.path[..])); + + // libgit2 encodes the length of the path in the lower bits of the + // `flags` entry, so mask those out and recalculate here to ensure we + // don't corrupt anything. + let mut flags = entry.flags & !raw::GIT_IDXENTRY_NAMEMASK; + + if entry.path.len() < raw::GIT_IDXENTRY_NAMEMASK as usize { + flags |= entry.path.len() as u16; + } else { + flags |= raw::GIT_IDXENTRY_NAMEMASK; + } + + unsafe { + let raw = raw::git_index_entry { + dev: entry.dev, + ino: entry.ino, + mode: entry.mode, + uid: entry.uid, + gid: entry.gid, + file_size: entry.file_size, + id: *entry.id.raw(), + flags: flags, + flags_extended: entry.flags_extended, + path: path.as_ptr(), + mtime: raw::git_index_time { + seconds: entry.mtime.seconds(), + nanoseconds: entry.mtime.nanoseconds(), + }, + ctime: raw::git_index_time { + seconds: entry.ctime.seconds(), + nanoseconds: entry.ctime.nanoseconds(), + }, + }; + try_call!(raw::git_index_add(self.raw, &raw)); + Ok(()) + } + } + + /// Add or update an index entry from a file on disk + /// + /// The file path must be relative to the repository's working folder and + /// must be readable. + /// + /// This method will fail in bare index instances. + /// + /// This forces the file to be added to the index, not looking at gitignore + /// rules. + /// + /// If this file currently is the result of a merge conflict, this file will + /// no longer be marked as conflicting. The data about the conflict will be + /// moved to the "resolve undo" (REUC) section. + pub fn add_path(&mut self, path: &Path) -> Result<(), Error> { + // Git apparently expects '/' to be separators for paths + let mut posix_path = OsString::new(); + for (i, comp) in path.components().enumerate() { + if i != 0 { posix_path.push("/"); } + posix_path.push(comp.as_os_str()); + } + let posix_path = try!(posix_path.into_c_string()); + unsafe { + try_call!(raw::git_index_add_bypath(self.raw, posix_path)); + Ok(()) + } + } + + /// Add or update index entries matching files in the working directory. + /// + /// This method will fail in bare index instances. + /// + /// The `pathspecs` are a list of file names or shell glob patterns that + /// will matched against files in the repository's working directory. Each + /// file that matches will be added to the index (either updating an + /// existing entry or adding a new entry). You can disable glob expansion + /// and force exact matching with the `AddDisablePathspecMatch` flag. + /// + /// Files that are ignored will be skipped (unlike `add_path`). If a file is + /// already tracked in the index, then it will be updated even if it is + /// ignored. Pass the `AddForce` flag to skip the checking of ignore rules. + /// + /// To emulate `git add -A` and generate an error if the pathspec contains + /// the exact path of an ignored file (when not using `AddForce`), add the + /// `AddCheckPathspec` flag. This checks that each entry in `pathspecs` + /// that is an exact match to a filename on disk is either not ignored or + /// already in the index. If this check fails, the function will return + /// an error. + /// + /// To emulate `git add -A` with the "dry-run" option, just use a callback + /// function that always returns a positive value. See below for details. + /// + /// If any files are currently the result of a merge conflict, those files + /// will no longer be marked as conflicting. The data about the conflicts + /// will be moved to the "resolve undo" (REUC) section. + /// + /// If you provide a callback function, it will be invoked on each matching + /// item in the working directory immediately before it is added to / + /// updated in the index. Returning zero will add the item to the index, + /// greater than zero will skip the item, and less than zero will abort the + /// scan an return an error to the caller. + pub fn add_all(&mut self, + pathspecs: I, + flag: IndexAddOption, + mut cb: Option<&mut IndexMatchedPath>) + -> Result<(), Error> + where T: IntoCString, I: IntoIterator, + { + let (_a, _b, raw_strarray) = try!(::util::iter2cstrs(pathspecs)); + let ptr = cb.as_mut(); + let callback = ptr.as_ref().map(|_| { + index_matched_path_cb as raw::git_index_matched_path_cb + }); + unsafe { + try_call!(raw::git_index_add_all(self.raw, + &raw_strarray, + flag.bits() as c_uint, + callback, + ptr.map(|p| p as *mut _) + .unwrap_or(ptr::null_mut()) + as *mut c_void)); + } + Ok(()) + } + + /// Clear the contents (all the entries) of an index object. + /// + /// This clears the index object in memory; changes must be explicitly + /// written to disk for them to take effect persistently via `write_*`. + pub fn clear(&mut self) -> Result<(), Error> { + unsafe { try_call!(raw::git_index_clear(self.raw)); } + Ok(()) + } + + /// Get the count of entries currently in the index + pub fn len(&self) -> usize { + unsafe { raw::git_index_entrycount(&*self.raw) as usize } + } + + /// Return `true` is there is no entry in the index + pub fn is_empty(&self) -> bool { + self.len() == 0 + } + + /// Get one of the entries in the index by its position. + pub fn get(&self, n: usize) -> Option { + unsafe { + let ptr = raw::git_index_get_byindex(self.raw, n as size_t); + if ptr.is_null() {None} else {Some(Binding::from_raw(*ptr))} + } + } + + /// Get an iterator over the entries in this index. + pub fn iter(&self) -> IndexEntries { + IndexEntries { range: 0..self.len(), index: self } + } + + /// Get one of the entries in the index by its path. + pub fn get_path(&self, path: &Path, stage: i32) -> Option { + let path = path.into_c_string().unwrap(); + unsafe { + let ptr = call!(raw::git_index_get_bypath(self.raw, path, + stage as c_int)); + if ptr.is_null() {None} else {Some(Binding::from_raw(*ptr))} + } + } + + /// Does this index have conflicts? + /// + /// Returns `true` if the index contains conflicts, `false` if it does not. + pub fn has_conflicts(&self) -> bool { + unsafe { + raw::git_index_has_conflicts(self.raw) == 1 + } + } + + /// Get the full path to the index file on disk. + /// + /// Returns `None` if this is an in-memory index. + pub fn path(&self) -> Option<&Path> { + unsafe { + ::opt_bytes(self, raw::git_index_path(&*self.raw)).map(util::bytes2path) + } + } + + /// Update the contents of an existing index object in memory by reading + /// from the hard disk. + /// + /// If force is true, this performs a "hard" read that discards in-memory + /// changes and always reloads the on-disk index data. If there is no + /// on-disk version, the index will be cleared. + /// + /// If force is false, this does a "soft" read that reloads the index data + /// from disk only if it has changed since the last time it was loaded. + /// Purely in-memory index data will be untouched. Be aware: if there are + /// changes on disk, unwritten in-memory changes are discarded. + pub fn read(&mut self, force: bool) -> Result<(), Error> { + unsafe { try_call!(raw::git_index_read(self.raw, force)); } + Ok(()) + } + + /// Read a tree into the index file with stats + /// + /// The current index contents will be replaced by the specified tree. + pub fn read_tree(&mut self, tree: &Tree) -> Result<(), Error> { + unsafe { try_call!(raw::git_index_read_tree(self.raw, &*tree.raw())); } + Ok(()) + } + + /// Remove an entry from the index + pub fn remove(&mut self, path: &Path, stage: i32) -> Result<(), Error> { + let path = try!(path.into_c_string()); + unsafe { + try_call!(raw::git_index_remove(self.raw, path, stage as c_int)); + } + Ok(()) + } + + /// Remove an index entry corresponding to a file on disk. + /// + /// The file path must be relative to the repository's working folder. It + /// may exist. + /// + /// If this file currently is the result of a merge conflict, this file will + /// no longer be marked as conflicting. The data about the conflict will be + /// moved to the "resolve undo" (REUC) section. + pub fn remove_path(&mut self, path: &Path) -> Result<(), Error> { + let path = try!(path.into_c_string()); + unsafe { + try_call!(raw::git_index_remove_bypath(self.raw, path)); + } + Ok(()) + } + + /// Remove all entries from the index under a given directory. + pub fn remove_dir(&mut self, path: &Path, stage: i32) -> Result<(), Error> { + let path = try!(path.into_c_string()); + unsafe { + try_call!(raw::git_index_remove_directory(self.raw, path, + stage as c_int)); + } + Ok(()) + } + + /// Remove all matching index entries. + /// + /// If you provide a callback function, it will be invoked on each matching + /// item in the index immediately before it is removed. Return 0 to remove + /// the item, > 0 to skip the item, and < 0 to abort the scan. + pub fn remove_all(&mut self, + pathspecs: I, + mut cb: Option<&mut IndexMatchedPath>) + -> Result<(), Error> + where T: IntoCString, I: IntoIterator, + { + let (_a, _b, raw_strarray) = try!(::util::iter2cstrs(pathspecs)); + let ptr = cb.as_mut(); + let callback = ptr.as_ref().map(|_| { + index_matched_path_cb as raw::git_index_matched_path_cb + }); + unsafe { + try_call!(raw::git_index_remove_all(self.raw, + &raw_strarray, + callback, + ptr.map(|p| p as *mut _) + .unwrap_or(ptr::null_mut()) + as *mut c_void)); + } + Ok(()) + } + + /// Update all index entries to match the working directory + /// + /// This method will fail in bare index instances. + /// + /// This scans the existing index entries and synchronizes them with the + /// working directory, deleting them if the corresponding working directory + /// file no longer exists otherwise updating the information (including + /// adding the latest version of file to the ODB if needed). + /// + /// If you provide a callback function, it will be invoked on each matching + /// item in the index immediately before it is updated (either refreshed or + /// removed depending on working directory state). Return 0 to proceed with + /// updating the item, > 0 to skip the item, and < 0 to abort the scan. + pub fn update_all(&mut self, + pathspecs: I, + mut cb: Option<&mut IndexMatchedPath>) + -> Result<(), Error> + where T: IntoCString, I: IntoIterator, + { + let (_a, _b, raw_strarray) = try!(::util::iter2cstrs(pathspecs)); + let ptr = cb.as_mut(); + let callback = ptr.as_ref().map(|_| { + index_matched_path_cb as raw::git_index_matched_path_cb + }); + unsafe { + try_call!(raw::git_index_update_all(self.raw, + &raw_strarray, + callback, + ptr.map(|p| p as *mut _) + .unwrap_or(ptr::null_mut()) + as *mut c_void)); + } + Ok(()) + } + + /// Write an existing index object from memory back to disk using an atomic + /// file lock. + pub fn write(&mut self) -> Result<(), Error> { + unsafe { try_call!(raw::git_index_write(self.raw)); } + Ok(()) + } + + /// Write the index as a tree. + /// + /// This method will scan the index and write a representation of its + /// current state back to disk; it recursively creates tree objects for each + /// of the subtrees stored in the index, but only returns the OID of the + /// root tree. This is the OID that can be used e.g. to create a commit. + /// + /// The index instance cannot be bare, and needs to be associated to an + /// existing repository. + /// + /// The index must not contain any file in conflict. + pub fn write_tree(&mut self) -> Result { + let mut raw = raw::git_oid { id: [0; raw::GIT_OID_RAWSZ] }; + unsafe { + try_call!(raw::git_index_write_tree(&mut raw, self.raw)); + Ok(Binding::from_raw(&raw as *const _)) + } + } + + /// Write the index as a tree to the given repository + /// + /// This is the same as `write_tree` except that the destination repository + /// can be chosen. + pub fn write_tree_to(&mut self, repo: &Repository) -> Result { + let mut raw = raw::git_oid { id: [0; raw::GIT_OID_RAWSZ] }; + unsafe { + try_call!(raw::git_index_write_tree_to(&mut raw, self.raw, + repo.raw())); + Ok(Binding::from_raw(&raw as *const _)) + } + } +} + +impl Binding for Index { + type Raw = *mut raw::git_index; + unsafe fn from_raw(raw: *mut raw::git_index) -> Index { + Index { raw: raw } + } + fn raw(&self) -> *mut raw::git_index { self.raw } +} + +extern fn index_matched_path_cb(path: *const c_char, + matched_pathspec: *const c_char, + payload: *mut c_void) -> c_int { + unsafe { + let path = CStr::from_ptr(path).to_bytes(); + let matched_pathspec = CStr::from_ptr(matched_pathspec).to_bytes(); + + panic::wrap(|| { + let payload = payload as *mut &mut IndexMatchedPath; + (*payload)(util::bytes2path(path), matched_pathspec) as c_int + }).unwrap_or(-1) + } +} + +impl Drop for Index { + fn drop(&mut self) { + unsafe { raw::git_index_free(self.raw) } + } +} + +impl<'index> Iterator for IndexEntries<'index> { + type Item = IndexEntry; + fn next(&mut self) -> Option { + self.range.next().map(|i| self.index.get(i).unwrap()) + } +} + +impl Binding for IndexEntry { + type Raw = raw::git_index_entry; + + unsafe fn from_raw(raw: raw::git_index_entry) -> IndexEntry { + let raw::git_index_entry { + ctime, mtime, dev, ino, mode, uid, gid, file_size, id, flags, + flags_extended, path + } = raw; + + // libgit2 encodes the length of the path in the lower bits of `flags`, + // but if the length exceeds the number of bits then the path is + // nul-terminated. + let mut pathlen = (flags & raw::GIT_IDXENTRY_NAMEMASK) as usize; + if pathlen == raw::GIT_IDXENTRY_NAMEMASK as usize { + pathlen = CStr::from_ptr(path).to_bytes().len(); + } + + let path = slice::from_raw_parts(path as *const u8, pathlen); + + IndexEntry { + dev: dev, + ino: ino, + mode: mode, + uid: uid, + gid: gid, + file_size: file_size, + id: Binding::from_raw(&id as *const _), + flags: flags, + flags_extended: flags_extended, + path: path.to_vec(), + mtime: Binding::from_raw(mtime), + ctime: Binding::from_raw(ctime), + } + } + + fn raw(&self) -> raw::git_index_entry { + // not implemented, may require a CString in storage + panic!() + } +} + +#[cfg(test)] +mod tests { + use std::fs::{self, File}; + use std::path::Path; + use tempdir::TempDir; + + use {Index, IndexEntry, Repository, ResetType, Oid, IndexTime}; + + #[test] + fn smoke() { + let mut index = Index::new().unwrap(); + assert!(index.add_path(&Path::new(".")).is_err()); + index.clear().unwrap(); + assert_eq!(index.len(), 0); + assert!(index.get(0).is_none()); + assert!(index.path().is_none()); + assert!(index.read(true).is_err()); + } + + #[test] + fn smoke_from_repo() { + let (_td, repo) = ::test::repo_init(); + let mut index = repo.index().unwrap(); + assert_eq!(index.path().map(|s| s.to_path_buf()), + Some(repo.path().join("index"))); + Index::open(&repo.path().join("index")).unwrap(); + + index.clear().unwrap(); + index.read(true).unwrap(); + index.write().unwrap(); + index.write_tree().unwrap(); + index.write_tree_to(&repo).unwrap(); + } + + #[test] + fn add_all() { + let (_td, repo) = ::test::repo_init(); + let mut index = repo.index().unwrap(); + + let root = repo.path().parent().unwrap(); + fs::create_dir(&root.join("foo")).unwrap(); + File::create(&root.join("foo/bar")).unwrap(); + let mut called = false; + index.add_all(["foo"].iter(), ::IndexAddOption::DEFAULT, + Some(&mut |a: &Path, b: &[u8]| { + assert!(!called); + called = true; + assert_eq!(b, b"foo"); + assert_eq!(a, Path::new("foo/bar")); + 0 + })).unwrap(); + assert!(called); + + called = false; + index.remove_all(["."].iter(), Some(&mut |a: &Path, b: &[u8]| { + assert!(!called); + called = true; + assert_eq!(b, b"."); + assert_eq!(a, Path::new("foo/bar")); + 0 + })).unwrap(); + assert!(called); + } + + #[test] + fn smoke_add() { + let (_td, repo) = ::test::repo_init(); + let mut index = repo.index().unwrap(); + + let root = repo.path().parent().unwrap(); + fs::create_dir(&root.join("foo")).unwrap(); + File::create(&root.join("foo/bar")).unwrap(); + index.add_path(Path::new("foo/bar")).unwrap(); + index.write().unwrap(); + assert_eq!(index.iter().count(), 1); + + // Make sure we can use this repo somewhere else now. + let id = index.write_tree().unwrap(); + let tree = repo.find_tree(id).unwrap(); + let sig = repo.signature().unwrap(); + let id = repo.refname_to_id("HEAD").unwrap(); + let parent = repo.find_commit(id).unwrap(); + let commit = repo.commit(Some("HEAD"), &sig, &sig, "commit", + &tree, &[&parent]).unwrap(); + let obj = repo.find_object(commit, None).unwrap(); + repo.reset(&obj, ResetType::Hard, None).unwrap(); + + let td2 = TempDir::new("git").unwrap(); + let url = ::test::path2url(&root); + let repo = Repository::clone(&url, td2.path()).unwrap(); + let obj = repo.find_object(commit, None).unwrap(); + repo.reset(&obj, ResetType::Hard, None).unwrap(); + } + + #[test] + fn add_then_read() { + let mut index = Index::new().unwrap(); + assert!(index.add(&entry()).is_err()); + + let mut index = Index::new().unwrap(); + let mut e = entry(); + e.path = b"foobar".to_vec(); + index.add(&e).unwrap(); + let e = index.get(0).unwrap(); + assert_eq!(e.path.len(), 6); + } + + fn entry() -> IndexEntry { + IndexEntry { + ctime: IndexTime::new(0, 0), + mtime: IndexTime::new(0, 0), + dev: 0, + ino: 0, + mode: 0o100644, + uid: 0, + gid: 0, + file_size: 0, + id: Oid::from_bytes(&[0; 20]).unwrap(), + flags: 0, + flags_extended: 0, + path: Vec::new(), + } + } +} diff --git a/git2/src/lib.rs b/git2/src/lib.rs new file mode 100644 index 000000000..8a6c64b94 --- /dev/null +++ b/git2/src/lib.rs @@ -0,0 +1,1337 @@ +//! # libgit2 bindings for Rust +//! +//! This library contains bindings to the [libgit2][1] C library which is used +//! to manage git repositories. The library itself is a work in progress and is +//! likely lacking some bindings here and there, so be warned. +//! +//! [1]: https://libgit2.github.com/ +//! +//! The git2-rs library strives to be as close to libgit2 as possible, but also +//! strives to make using libgit2 as safe as possible. All resource management +//! is automatic as well as adding strong types to all interfaces (including +//! `Result`) +//! +//! ## Creating a `Repository` +//! +//! The `Repository` is the source from which almost all other objects in git-rs +//! are spawned. A repository can be created through opening, initializing, or +//! cloning. +//! +//! ### Initializing a new repository +//! +//! The `init` method will create a new repository, assuming one does not +//! already exist. +//! +//! ```no_run +//! # #![allow(unstable)] +//! use git2::Repository; +//! +//! let repo = match Repository::init("/path/to/a/repo") { +//! Ok(repo) => repo, +//! Err(e) => panic!("failed to init: {}", e), +//! }; +//! ``` +//! +//! ### Opening an existing repository +//! +//! ```no_run +//! # #![allow(unstable)] +//! use git2::Repository; +//! +//! let repo = match Repository::open("/path/to/a/repo") { +//! Ok(repo) => repo, +//! Err(e) => panic!("failed to open: {}", e), +//! }; +//! ``` +//! +//! ### Cloning an existing repository +//! +//! ```no_run +//! # #![allow(unstable)] +//! use git2::Repository; +//! +//! let url = "https://github.com/alexcrichton/git2-rs"; +//! let repo = match Repository::clone(url, "/path/to/a/repo") { +//! Ok(repo) => repo, +//! Err(e) => panic!("failed to clone: {}", e), +//! }; +//! ``` +//! +//! ## Working with a `Repository` +//! +//! All deriviative objects, references, etc are attached to the lifetime of the +//! source `Repository`, to ensure that they do not outlive the repository +//! itself. + +#![doc(html_root_url = "https://docs.rs/git2/0.6")] +#![allow(trivial_numeric_casts, trivial_casts)] +#![deny(missing_docs)] +#![cfg_attr(test, deny(warnings))] + +extern crate libc; +extern crate url; +extern crate libgit2_sys as raw; +#[macro_use] extern crate bitflags; +#[macro_use] extern crate log; +#[cfg(test)] extern crate tempdir; + +use std::ffi::{CStr, CString}; +use std::fmt; +use std::str; +use std::sync::{Once, ONCE_INIT}; + +pub use blame::{Blame, BlameHunk, BlameIter, BlameOptions}; +pub use blob::{Blob, BlobWriter}; +pub use branch::{Branch, Branches}; +pub use buf::Buf; +pub use commit::{Commit, Parents}; +pub use config::{Config, ConfigEntry, ConfigEntries}; +pub use cred::{Cred, CredentialHelper}; +pub use describe::{Describe, DescribeFormatOptions, DescribeOptions}; +pub use diff::{Diff, DiffDelta, DiffFile, DiffOptions, Deltas}; +pub use diff::{DiffBinary, DiffBinaryFile, DiffBinaryKind}; +pub use diff::{DiffLine, DiffHunk, DiffStats, DiffFindOptions}; +pub use error::Error; +pub use index::{Index, IndexEntry, IndexEntries, IndexMatchedPath}; +pub use merge::{AnnotatedCommit, MergeOptions}; +pub use message::{message_prettify, DEFAULT_COMMENT_CHAR}; +pub use note::{Note, Notes}; +pub use object::Object; +pub use oid::Oid; +pub use packbuilder::{PackBuilder, PackBuilderStage}; +pub use pathspec::{Pathspec, PathspecMatchList, PathspecFailedEntries}; +pub use pathspec::{PathspecDiffEntries, PathspecEntries}; +pub use patch::Patch; +pub use proxy_options::ProxyOptions; +pub use reference::{Reference, References, ReferenceNames}; +pub use reflog::{Reflog, ReflogEntry, ReflogIter}; +pub use refspec::Refspec; +pub use remote::{Remote, RemoteConnection, Refspecs, RemoteHead, FetchOptions, PushOptions}; +pub use remote_callbacks::{RemoteCallbacks, Credentials, TransferProgress}; +pub use remote_callbacks::{TransportMessage, Progress, UpdateTips}; +pub use repo::{Repository, RepositoryInitOptions}; +pub use revspec::Revspec; +pub use revwalk::Revwalk; +pub use signature::Signature; +pub use status::{StatusOptions, Statuses, StatusIter, StatusEntry, StatusShow}; +pub use stash::{StashApplyOptions, StashCb, StashApplyProgressCb}; +pub use submodule::{Submodule, SubmoduleUpdateOptions}; +pub use tag::Tag; +pub use time::{Time, IndexTime}; +pub use tree::{Tree, TreeEntry, TreeIter}; +pub use treebuilder::TreeBuilder; +pub use odb::{Odb, OdbObject, OdbReader, OdbWriter}; +pub use util::IntoCString; + +// Create a convinience method on bitflag struct which checks the given flag +macro_rules! is_bit_set { + ($name:ident, $flag:expr) => ( + #[allow(missing_docs)] + pub fn $name(&self) -> bool { + self.intersects($flag) + } + ) +} + +/// An enumeration of possible errors that can happen when working with a git +/// repository. +#[derive(PartialEq, Eq, Clone, Debug, Copy)] +pub enum ErrorCode { + /// Generic error + GenericError, + /// Requested object could not be found + NotFound, + /// Object exists preventing operation + Exists, + /// More than one object matches + Ambiguous, + /// Output buffer too short to hold data + BufSize, + /// User-generated error + User, + /// Operation not allowed on bare repository + BareRepo, + /// HEAD refers to branch with no commits + UnbornBranch, + /// Merge in progress prevented operation + Unmerged, + /// Reference was not fast-forwardable + NotFastForward, + /// Name/ref spec was not in a valid format + InvalidSpec, + /// Checkout conflicts prevented operation + Conflict, + /// Lock file prevented operation + Locked, + /// Reference value does not match expected + Modified, + /// Authentication error + Auth, + /// Server certificate is invalid + Certificate, + /// Patch/merge has already been applied + Applied, + /// The requested peel operation is not possible + Peel, + /// Unexpected EOF + Eof, + /// Invalid operation or input + Invalid, + /// Uncommitted changes in index prevented operation + Uncommitted, + /// Operation was not valid for a directory, + Directory, +} + +/// An enumeration of possible categories of things that can have +/// errors when working with a git repository. +#[derive(PartialEq, Eq, Clone, Debug, Copy)] +pub enum ErrorClass { + /// Uncategorized + None, + /// Out of memory or insufficient allocated space + NoMemory, + /// Syscall or standard system library error + Os, + /// Invalid input + Invalid, + /// Error resolving or manipulating a reference + Reference, + /// ZLib failure + Zlib, + /// Bad repository state + Repository, + /// Bad configuration + Config, + /// Regex failure + Regex, + /// Bad object + Odb, + /// Invalid index data + Index, + /// Error creating or obtaining an object + Object, + /// Network error + Net, + /// Error manpulating a tag + Tag, + /// Invalid value in tree + Tree, + /// Hashing or packing error + Indexer, + /// Error from SSL + Ssl, + /// Error involing submodules + Submodule, + /// Threading error + Thread, + /// Error manipulating a stash + Stash, + /// Checkout failure + Checkout, + /// Invalid FETCH_HEAD + FetchHead, + /// Merge failure + Merge, + /// SSH failure + Ssh, + /// Error manipulating filters + Filter, + /// Error reverting commit + Revert, + /// Error from a user callback + Callback, + /// Error cherry-picking commit + CherryPick, + /// Can't describe object + Describe, + /// Error during rebase + Rebase, + /// Filesystem-related error + Filesystem, +} + +/// A listing of the possible states that a repository can be in. +#[derive(PartialEq, Eq, Clone, Debug, Copy)] +#[allow(missing_docs)] +pub enum RepositoryState { + Clean, + Merge, + Revert, + RevertSequence, + CherryPick, + CherryPickSequence, + Bisect, + Rebase, + RebaseInteractive, + RebaseMerge, + ApplyMailbox, + ApplyMailboxOrRebase, +} + +/// An enumeration of the possible directions for a remote. +#[derive(Copy, Clone)] +pub enum Direction { + /// Data will be fetched (read) from this remote. + Fetch, + /// Data will be pushed (written) to this remote. + Push, +} + +/// An enumeration of the operations that can be performed for the `reset` +/// method on a `Repository`. +#[derive(Copy, Clone)] +pub enum ResetType { + /// Move the head to the given commit. + Soft, + /// Soft plus reset the index to the commit. + Mixed, + /// Mixed plus changes in the working tree are discarded. + Hard, +} + +/// An enumeration all possible kinds objects may have. +#[derive(PartialEq, Eq, Copy, Clone, Debug)] +pub enum ObjectType { + /// Any kind of git object + Any, + /// An object which corresponds to a git commit + Commit, + /// An object which corresponds to a git tree + Tree, + /// An object which corresponds to a git blob + Blob, + /// An object which corresponds to a git tag + Tag, +} + +/// An enumeration of all possile kinds of references. +#[derive(PartialEq, Eq, Copy, Clone, Debug)] +pub enum ReferenceType { + /// A reference which points at an object id. + Oid, + + /// A reference which points at another reference. + Symbolic, +} + +/// An enumeration for the possible types of branches +#[derive(PartialEq, Eq, Debug, Copy, Clone)] +pub enum BranchType { + /// A local branch not on a remote. + Local, + /// A branch for a remote. + Remote, +} + +/// An enumeration of the possible priority levels of a config file. +/// +/// The levels corresponding to the escalation logic (higher to lower) when +/// searching for config entries. +#[derive(PartialEq, Eq, Debug, Copy, Clone)] +pub enum ConfigLevel { + /// System-wide on Windows, for compatibility with portable git + ProgramData, + /// System-wide configuration file, e.g. /etc/gitconfig + System, + /// XDG-compatible configuration file, e.g. ~/.config/git/config + XDG, + /// User-specific configuration, e.g. ~/.gitconfig + Global, + /// Repository specific config, e.g. $PWD/.git/config + Local, + /// Application specific configuration file + App, + /// Highest level available + Highest, +} + +/// Merge file favor options for `MergeOptions` instruct the file-level +/// merging functionality how to deal with conflicting regions of the files. +#[derive(PartialEq, Eq, Debug, Copy, Clone)] +pub enum FileFavor { + /// When a region of a file is changed in both branches, a conflict will be + /// recorded in the index so that git_checkout can produce a merge file with + /// conflict markers in the working directory. This is the default. + Normal, + /// When a region of a file is changed in both branches, the file created + /// in the index will contain the "ours" side of any conflicting region. + /// The index will not record a conflict. + Ours, + /// When a region of a file is changed in both branches, the file created + /// in the index will contain the "theirs" side of any conflicting region. + /// The index will not record a conflict. + Theirs, + /// When a region of a file is changed in both branches, the file created + /// in the index will contain each unique line from each side, which has + /// the result of combining both files. The index will not record a conflict. + Union, +} + +bitflags! { + /// Orderings that may be specified for Revwalk iteration. + pub struct Sort: u32 { + /// Sort the repository contents in no particular ordering. + /// + /// This sorting is arbitrary, implementation-specific, and subject to + /// change at any time. This is the default sorting for new walkers. + const NONE = raw::GIT_SORT_NONE as u32; + + /// Sort the repository contents in topological order (parents before + /// children). + /// + /// This sorting mode can be combined with time sorting. + const TOPOLOGICAL = raw::GIT_SORT_TOPOLOGICAL as u32; + + /// Sort the repository contents by commit time. + /// + /// This sorting mode can be combined with topological sorting. + const TIME = raw::GIT_SORT_TIME as u32; + + /// Iterate through the repository contents in reverse order. + /// + /// This sorting mode can be combined with any others. + const REVERSE = raw::GIT_SORT_REVERSE as u32; + } +} + +impl Sort { + is_bit_set!(is_none, Sort::NONE); + is_bit_set!(is_topological, Sort::TOPOLOGICAL); + is_bit_set!(is_time, Sort::TIME); + is_bit_set!(is_reverse, Sort::REVERSE); +} + +bitflags! { + /// Types of credentials that can be requested by a credential callback. + pub struct CredentialType: u32 { + #[allow(missing_docs)] + const USER_PASS_PLAINTEXT = raw::GIT_CREDTYPE_USERPASS_PLAINTEXT as u32; + #[allow(missing_docs)] + const SSH_KEY = raw::GIT_CREDTYPE_SSH_KEY as u32; + #[allow(missing_docs)] + const SSH_MEMORY = raw::GIT_CREDTYPE_SSH_MEMORY as u32; + #[allow(missing_docs)] + const SSH_CUSTOM = raw::GIT_CREDTYPE_SSH_CUSTOM as u32; + #[allow(missing_docs)] + const DEFAULT = raw::GIT_CREDTYPE_DEFAULT as u32; + #[allow(missing_docs)] + const SSH_INTERACTIVE = raw::GIT_CREDTYPE_SSH_INTERACTIVE as u32; + #[allow(missing_docs)] + const USERNAME = raw::GIT_CREDTYPE_USERNAME as u32; + } +} + +impl CredentialType { + is_bit_set!(is_user_pass_plaintext, CredentialType::USER_PASS_PLAINTEXT); + is_bit_set!(is_ssh_key, CredentialType::SSH_KEY); + is_bit_set!(is_ssh_memory, CredentialType::SSH_MEMORY); + is_bit_set!(is_ssh_custom, CredentialType::SSH_CUSTOM); + is_bit_set!(is_default, CredentialType::DEFAULT); + is_bit_set!(is_ssh_interactive, CredentialType::SSH_INTERACTIVE); + is_bit_set!(is_username, CredentialType::USERNAME); +} + +impl Default for CredentialType { + fn default() -> Self { + CredentialType::DEFAULT + } +} + +bitflags! { + /// Flags for the `flags` field of an IndexEntry. + pub struct IndexEntryFlag: u16 { + /// Set when the `extended_flags` field is valid. + const EXTENDED = raw::GIT_IDXENTRY_EXTENDED as u16; + /// "Assume valid" flag + const VALID = raw::GIT_IDXENTRY_VALID as u16; + } +} + +impl IndexEntryFlag { + is_bit_set!(is_extended, IndexEntryFlag::EXTENDED); + is_bit_set!(is_valid, IndexEntryFlag::VALID); +} + +bitflags! { + /// Flags for the `extended_flags` field of an IndexEntry. + pub struct IndexEntryExtendedFlag: u16 { + /// An "intent to add" entry from "git add -N" + const INTENT_TO_ADD = raw::GIT_IDXENTRY_INTENT_TO_ADD as u16; + /// Skip the associated worktree file, for sparse checkouts + const SKIP_WORKTREE = raw::GIT_IDXENTRY_SKIP_WORKTREE as u16; + /// Reserved for a future on-disk extended flag + const EXTENDED2 = raw::GIT_IDXENTRY_EXTENDED2 as u16; + + #[allow(missing_docs)] + const UPDATE = raw::GIT_IDXENTRY_UPDATE as u16; + #[allow(missing_docs)] + const REMOVE = raw::GIT_IDXENTRY_REMOVE as u16; + #[allow(missing_docs)] + const UPTODATE = raw::GIT_IDXENTRY_UPTODATE as u16; + #[allow(missing_docs)] + const ADDED = raw::GIT_IDXENTRY_ADDED as u16; + + #[allow(missing_docs)] + const HASHED = raw::GIT_IDXENTRY_HASHED as u16; + #[allow(missing_docs)] + const UNHASHED = raw::GIT_IDXENTRY_UNHASHED as u16; + #[allow(missing_docs)] + const WT_REMOVE = raw::GIT_IDXENTRY_WT_REMOVE as u16; + #[allow(missing_docs)] + const CONFLICTED = raw::GIT_IDXENTRY_CONFLICTED as u16; + + #[allow(missing_docs)] + const UNPACKED = raw::GIT_IDXENTRY_UNPACKED as u16; + #[allow(missing_docs)] + const NEW_SKIP_WORKTREE = raw::GIT_IDXENTRY_NEW_SKIP_WORKTREE as u16; + } +} + +impl IndexEntryExtendedFlag { + is_bit_set!(is_intent_to_add, IndexEntryExtendedFlag::INTENT_TO_ADD); + is_bit_set!(is_skip_worktree, IndexEntryExtendedFlag::SKIP_WORKTREE); + is_bit_set!(is_extended2, IndexEntryExtendedFlag::EXTENDED2); + is_bit_set!(is_update, IndexEntryExtendedFlag::UPDATE); + is_bit_set!(is_remove, IndexEntryExtendedFlag::REMOVE); + is_bit_set!(is_up_to_date, IndexEntryExtendedFlag::UPTODATE); + is_bit_set!(is_added, IndexEntryExtendedFlag::ADDED); + is_bit_set!(is_hashed, IndexEntryExtendedFlag::HASHED); + is_bit_set!(is_unhashed, IndexEntryExtendedFlag::UNHASHED); + is_bit_set!(is_wt_remove, IndexEntryExtendedFlag::WT_REMOVE); + is_bit_set!(is_conflicted, IndexEntryExtendedFlag::CONFLICTED); + is_bit_set!(is_unpacked, IndexEntryExtendedFlag::UNPACKED); + is_bit_set!(is_new_skip_worktree, IndexEntryExtendedFlag::NEW_SKIP_WORKTREE); +} + +bitflags! { + /// Flags for APIs that add files matching pathspec + pub struct IndexAddOption: u32 { + #[allow(missing_docs)] + const DEFAULT = raw::GIT_INDEX_ADD_DEFAULT as u32; + #[allow(missing_docs)] + const FORCE = raw::GIT_INDEX_ADD_FORCE as u32; + #[allow(missing_docs)] + const DISABLE_PATHSPEC_MATCH = + raw::GIT_INDEX_ADD_DISABLE_PATHSPEC_MATCH as u32; + #[allow(missing_docs)] + const CHECK_PATHSPEC = raw::GIT_INDEX_ADD_CHECK_PATHSPEC as u32; + } +} + +impl IndexAddOption { + is_bit_set!(is_default, IndexAddOption::DEFAULT); + is_bit_set!(is_force, IndexAddOption::FORCE); + is_bit_set!(is_disable_pathspec_match, IndexAddOption::DISABLE_PATHSPEC_MATCH); + is_bit_set!(is_check_pathspec, IndexAddOption::CHECK_PATHSPEC); +} + +impl Default for IndexAddOption { + fn default() -> Self { + IndexAddOption::DEFAULT + } +} + +bitflags! { + /// Flags for `Repository::open_ext` + pub struct RepositoryOpenFlags: u32 { + /// Only open the specified path; don't walk upward searching. + const NO_SEARCH = raw::GIT_REPOSITORY_OPEN_NO_SEARCH as u32; + /// Search across filesystem boundaries. + const CROSS_FS = raw::GIT_REPOSITORY_OPEN_CROSS_FS as u32; + /// Force opening as bare repository, and defer loading its config. + const BARE = raw::GIT_REPOSITORY_OPEN_BARE as u32; + /// Don't try appending `/.git` to the specified repository path. + const NO_DOTGIT = raw::GIT_REPOSITORY_OPEN_NO_DOTGIT as u32; + /// Respect environment variables like `$GIT_DIR`. + const FROM_ENV = raw::GIT_REPOSITORY_OPEN_FROM_ENV as u32; + } +} + +impl RepositoryOpenFlags { + is_bit_set!(is_no_search, RepositoryOpenFlags::NO_SEARCH); + is_bit_set!(is_cross_fs, RepositoryOpenFlags::CROSS_FS); + is_bit_set!(is_bare, RepositoryOpenFlags::BARE); + is_bit_set!(is_no_dotgit, RepositoryOpenFlags::NO_DOTGIT); + is_bit_set!(is_from_env, RepositoryOpenFlags::FROM_ENV); +} + +bitflags! { + /// Flags for the return value of `Repository::revparse` + pub struct RevparseMode: u32 { + /// The spec targeted a single object + const SINGLE = raw::GIT_REVPARSE_SINGLE as u32; + /// The spec targeted a range of commits + const RANGE = raw::GIT_REVPARSE_RANGE as u32; + /// The spec used the `...` operator, which invokes special semantics. + const MERGE_BASE = raw::GIT_REVPARSE_MERGE_BASE as u32; + } +} + +impl RevparseMode { + is_bit_set!(is_no_single, RevparseMode::SINGLE); + is_bit_set!(is_range, RevparseMode::RANGE); + is_bit_set!(is_merge_base, RevparseMode::MERGE_BASE); +} + +bitflags! { + /// The results of `merge_analysis` indicating the merge opportunities. + pub struct MergeAnalysis: u32 { + /// No merge is possible. + const ANALYSIS_NONE = raw::GIT_MERGE_ANALYSIS_NONE as u32; + /// A "normal" merge; both HEAD and the given merge input have diverged + /// from their common ancestor. The divergent commits must be merged. + const ANALYSIS_NORMAL = raw::GIT_MERGE_ANALYSIS_NORMAL as u32; + /// All given merge inputs are reachable from HEAD, meaning the + /// repository is up-to-date and no merge needs to be performed. + const ANALYSIS_UP_TO_DATE = raw::GIT_MERGE_ANALYSIS_UP_TO_DATE as u32; + /// The given merge input is a fast-forward from HEAD and no merge + /// needs to be performed. Instead, the client can check out the + /// given merge input. + const ANALYSIS_FASTFORWARD = raw::GIT_MERGE_ANALYSIS_FASTFORWARD as u32; + /// The HEAD of the current repository is "unborn" and does not point to + /// a valid commit. No merge can be performed, but the caller may wish + /// to simply set HEAD to the target commit(s). + const ANALYSIS_UNBORN = raw::GIT_MERGE_ANALYSIS_UNBORN as u32; + } +} + +impl MergeAnalysis { + is_bit_set!(is_none, MergeAnalysis::ANALYSIS_NONE); + is_bit_set!(is_normal, MergeAnalysis::ANALYSIS_NORMAL); + is_bit_set!(is_up_to_date, MergeAnalysis::ANALYSIS_UP_TO_DATE); + is_bit_set!(is_fast_forward, MergeAnalysis::ANALYSIS_FASTFORWARD); + is_bit_set!(is_unborn, MergeAnalysis::ANALYSIS_UNBORN); +} + +bitflags! { + /// The user's stated preference for merges. + pub struct MergePreference: u32 { + /// No configuration was found that suggests a preferred behavior for + /// merge. + const NONE = raw::GIT_MERGE_PREFERENCE_NONE as u32; + /// There is a `merge.ff=false` configuration setting, suggesting that + /// the user does not want to allow a fast-forward merge. + const NO_FAST_FORWARD = raw::GIT_MERGE_PREFERENCE_NO_FASTFORWARD as u32; + /// There is a `merge.ff=only` configuration setting, suggesting that + /// the user only wants fast-forward merges. + const FASTFORWARD_ONLY = raw::GIT_MERGE_PREFERENCE_FASTFORWARD_ONLY as u32; + } +} + +impl MergePreference { + is_bit_set!(is_none, MergePreference::NONE); + is_bit_set!(is_no_fast_forward, MergePreference::NO_FAST_FORWARD); + is_bit_set!(is_fastforward_only, MergePreference::FASTFORWARD_ONLY); +} + +#[cfg(test)] #[macro_use] mod test; +#[macro_use] mod panic; +mod call; +mod util; + +pub mod build; +pub mod cert; +pub mod string_array; +pub mod oid_array; +pub mod transport; + +mod blame; +mod blob; +mod branch; +mod buf; +mod commit; +mod config; +mod cred; +mod describe; +mod diff; +mod error; +mod index; +mod merge; +mod message; +mod note; +mod object; +mod odb; +mod oid; +mod packbuilder; +mod pathspec; +mod patch; +mod proxy_options; +mod reference; +mod reflog; +mod refspec; +mod remote; +mod remote_callbacks; +mod repo; +mod revspec; +mod revwalk; +mod signature; +mod status; +mod submodule; +mod stash; +mod tag; +mod time; +mod tree; +mod treebuilder; + +fn init() { + static INIT: Once = ONCE_INIT; + + INIT.call_once(|| { + openssl_env_init(); + }); + + raw::init(); +} + +#[cfg(all(unix, not(target_os = "macos"), not(target_os = "ios"), feature = "https"))] +fn openssl_env_init() { + extern crate openssl_probe; + + // Currently, libgit2 leverages OpenSSL for SSL support when cloning + // repositories over HTTPS. This means that we're picking up an OpenSSL + // dependency on non-Windows platforms (where it has its own HTTPS + // subsystem). As a result, we need to link to OpenSSL. + // + // Now actually *linking* to OpenSSL isn't so hard. We just need to make + // sure to use pkg-config to discover any relevant system dependencies for + // differences between distributions like CentOS and Ubuntu. The actual + // trickiness comes about when we start *distributing* the resulting + // binaries. Currently Cargo is distributed in binary form as nightlies, + // which means we're distributing a binary with OpenSSL linked in. + // + // For historical reasons, the Linux nightly builder is running a CentOS + // distribution in order to have as much ABI compatibility with other + // distributions as possible. Sadly, however, this compatibility does not + // extend to OpenSSL. Currently OpenSSL has two major versions, 0.9 and 1.0, + // which are incompatible (many ABI differences). The CentOS builder we + // build on has version 1.0, as do most distributions today. Some still have + // 0.9, however. This means that if we are to distribute the binaries built + // by the CentOS machine, we would only be compatible with OpenSSL 1.0 and + // we would fail to run (a dynamic linker error at runtime) on systems with + // only 9.8 installed (hopefully). + // + // But wait, the plot thickens! Apparently CentOS has dubbed their OpenSSL + // library as `libssl.so.10`, notably the `10` is included at the end. On + // the other hand Ubuntu, for example, only distributes `libssl.so`. This + // means that the binaries created at CentOS are hard-wired to probe for a + // file called `libssl.so.10` at runtime (using the LD_LIBRARY_PATH), which + // will not be found on ubuntu. The conclusion of this is that binaries + // built on CentOS cannot be distributed to Ubuntu and run successfully. + // + // There are a number of sneaky things we could do, including, but not + // limited to: + // + // 1. Create a shim program which runs "just before" cargo runs. The + // responsibility of this shim program would be to locate `libssl.so`, + // whatever it's called, on the current system, make sure there's a + // symlink *somewhere* called `libssl.so.10`, and then set up + // LD_LIBRARY_PATH and run the actual cargo. + // + // This approach definitely seems unconventional, and is borderline + // overkill for this problem. It's also dubious if we can find a + // libssl.so reliably on the target system. + // + // 2. Somehow re-work the CentOS installation so that the linked-against + // library is called libssl.so instead of libssl.so.10 + // + // The problem with this approach is that systems with 0.9 installed will + // start to silently fail, due to also having libraries called libssl.so + // (probably symlinked under a more appropriate version). + // + // 3. Compile Cargo against both OpenSSL 1.0 *and* OpenSSL 0.9, and + // distribute both. Also make sure that the linked-against name of the + // library is `libssl.so`. At runtime we determine which version is + // installed, and we then the appropriate binary. + // + // This approach clearly has drawbacks in terms of infrastructure and + // feasibility. + // + // 4. Build a nightly of Cargo for each distribution we'd like to support. + // You would then pick the appropriate Cargo nightly to install locally. + // + // So, with all this in mind, the decision was made to *statically* link + // OpenSSL. This solves any problem of relying on a downstream OpenSSL + // version being available. This does, however, open a can of worms related + // to security issues. It's generally a good idea to dynamically link + // OpenSSL as you'll get security updates over time without having to do + // anything (the system administrator will update the local openssl + // package). By statically linking, we're forfeiting this feature. + // + // The conclusion was made it is likely appropriate for the Cargo nightlies + // to statically link OpenSSL, but highly encourage distributions and + // packagers of Cargo to dynamically link OpenSSL. Packagers are targeting + // one system and are distributing to only that system, so none of the + // problems mentioned above would arise. + // + // In order to support this, a new package was made: openssl-static-sys. + // This package currently performs a fairly simple task: + // + // 1. Run pkg-config to discover where openssl is installed. + // 2. If openssl is installed in a nonstandard location, *and* static copies + // of the libraries are available, copy them to $OUT_DIR. + // + // This library will bring in libssl.a and libcrypto.a into the local build, + // allowing them to be picked up by this crate. This allows us to configure + // our own buildbots to have pkg-config point to these local pre-built + // copies of a static OpenSSL (with very few dependencies) while allowing + // most other builds of Cargo to naturally dynamically link OpenSSL. + // + // So in summary, if you're with me so far, we've statically linked OpenSSL + // to the Cargo binary (or any binary, for that matter) and we're ready to + // distribute it to *all* linux distributions. Remember that our original + // intent for openssl was for HTTPS support, which implies that we need some + // for of CA certificate store to validate certificates. This is normally + // installed in a standard system location. + // + // Unfortunately, as one might imagine, OpenSSL is configured for where this + // standard location is at *build time*, but it often varies widely + // per-system. Consequently, it was discovered that OpenSSL will respect the + // SSL_CERT_FILE and SSL_CERT_DIR environment variables in order to assist + // in discovering the location of this file (hurray!). + // + // So, finally getting to the point, this function solely exists to support + // our static builds of OpenSSL by probing for the "standard system + // location" of certificates and setting relevant environment variable to + // point to them. + // + // Ah, and as a final note, this is only a problem on Linux, not on OS X. On + // OS X the OpenSSL binaries are stable enough that we can just rely on + // dynamic linkage (plus they have some weird modifications to OpenSSL which + // means we wouldn't want to link statically). + openssl_probe::init_ssl_cert_env_vars(); +} + +#[cfg(any(windows, target_os = "macos", target_os = "ios", not(feature = "https")))] +fn openssl_env_init() {} + +unsafe fn opt_bytes<'a, T>(_anchor: &'a T, + c: *const libc::c_char) -> Option<&'a [u8]> { + if c.is_null() { + None + } else { + Some(CStr::from_ptr(c).to_bytes()) + } +} + +fn opt_cstr(o: Option) -> Result, Error> { + match o { + Some(s) => s.into_c_string().map(Some), + None => Ok(None) + } +} + +impl ObjectType { + /// Convert an object type to its string representation. + pub fn str(&self) -> &'static str { + unsafe { + let ptr = call!(raw::git_object_type2string(*self)) as *const _; + let data = CStr::from_ptr(ptr).to_bytes(); + str::from_utf8(data).unwrap() + } + } + + /// Determine if the given git_otype is a valid loose object type. + pub fn is_loose(&self) -> bool { + unsafe { (call!(raw::git_object_typeisloose(*self)) == 1) } + } + + /// Convert a raw git_otype to an ObjectType + pub fn from_raw(raw: raw::git_otype) -> Option { + match raw { + raw::GIT_OBJ_ANY => Some(ObjectType::Any), + raw::GIT_OBJ_COMMIT => Some(ObjectType::Commit), + raw::GIT_OBJ_TREE => Some(ObjectType::Tree), + raw::GIT_OBJ_BLOB => Some(ObjectType::Blob), + raw::GIT_OBJ_TAG => Some(ObjectType::Tag), + _ => None, + } + } + + /// Convert this kind into its raw representation + pub fn raw(&self) -> raw::git_otype { + call::convert(self) + } + + /// Convert a string object type representation to its object type. + pub fn from_str(s: &str) -> Option { + let raw = unsafe { call!(raw::git_object_string2type(CString::new(s).unwrap())) }; + ObjectType::from_raw(raw) + } +} + +impl fmt::Display for ObjectType { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + self.str().fmt(f) + } +} + +impl ReferenceType { + /// Convert an object type to its string representation. + pub fn str(&self) -> &'static str { + match self { + &ReferenceType::Oid => "oid", + &ReferenceType::Symbolic => "symbolic", + } + } + + /// Convert a raw git_ref_t to a ReferenceType. + pub fn from_raw(raw: raw::git_ref_t) -> Option { + match raw { + raw::GIT_REF_OID => Some(ReferenceType::Oid), + raw::GIT_REF_SYMBOLIC => Some(ReferenceType::Symbolic), + _ => None, + } + } +} + +impl fmt::Display for ReferenceType { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + self.str().fmt(f) + } +} + +impl ConfigLevel { + /// Converts a raw configuration level to a ConfigLevel + pub fn from_raw(raw: raw::git_config_level_t) -> ConfigLevel { + match raw { + raw::GIT_CONFIG_LEVEL_PROGRAMDATA => ConfigLevel::ProgramData, + raw::GIT_CONFIG_LEVEL_SYSTEM => ConfigLevel::System, + raw::GIT_CONFIG_LEVEL_XDG => ConfigLevel::XDG, + raw::GIT_CONFIG_LEVEL_GLOBAL => ConfigLevel::Global, + raw::GIT_CONFIG_LEVEL_LOCAL => ConfigLevel::Local, + raw::GIT_CONFIG_LEVEL_APP => ConfigLevel::App, + raw::GIT_CONFIG_HIGHEST_LEVEL => ConfigLevel::Highest, + n => panic!("unknown config level: {}", n), + } + } +} + +bitflags! { + /// Status flags for a single file + /// + /// A combination of these values will be returned to indicate the status of + /// a file. Status compares the working directory, the index, and the + /// current HEAD of the repository. The `STATUS_INDEX_*` set of flags + /// represents the status of file in the index relative to the HEAD, and the + /// `STATUS_WT_*` set of flags represent the status of the file in the + /// working directory relative to the index. + pub struct Status: u32 { + #[allow(missing_docs)] + const CURRENT = raw::GIT_STATUS_CURRENT as u32; + + #[allow(missing_docs)] + const INDEX_NEW = raw::GIT_STATUS_INDEX_NEW as u32; + #[allow(missing_docs)] + const INDEX_MODIFIED = raw::GIT_STATUS_INDEX_MODIFIED as u32; + #[allow(missing_docs)] + const INDEX_DELETED = raw::GIT_STATUS_INDEX_DELETED as u32; + #[allow(missing_docs)] + const INDEX_RENAMED = raw::GIT_STATUS_INDEX_RENAMED as u32; + #[allow(missing_docs)] + const INDEX_TYPECHANGE = raw::GIT_STATUS_INDEX_TYPECHANGE as u32; + + #[allow(missing_docs)] + const WT_NEW = raw::GIT_STATUS_WT_NEW as u32; + #[allow(missing_docs)] + const WT_MODIFIED = raw::GIT_STATUS_WT_MODIFIED as u32; + #[allow(missing_docs)] + const WT_DELETED = raw::GIT_STATUS_WT_DELETED as u32; + #[allow(missing_docs)] + const WT_TYPECHANGE = raw::GIT_STATUS_WT_TYPECHANGE as u32; + #[allow(missing_docs)] + const WT_RENAMED = raw::GIT_STATUS_WT_RENAMED as u32; + + #[allow(missing_docs)] + const IGNORED = raw::GIT_STATUS_IGNORED as u32; + #[allow(missing_docs)] + const CONFLICTED = raw::GIT_STATUS_CONFLICTED as u32; + } +} + +impl Status { + is_bit_set!(is_index_new, Status::INDEX_NEW); + is_bit_set!(is_index_modified, Status::INDEX_MODIFIED); + is_bit_set!(is_index_deleted, Status::INDEX_DELETED); + is_bit_set!(is_index_renamed, Status::INDEX_RENAMED); + is_bit_set!(is_index_typechange, Status::INDEX_TYPECHANGE); + is_bit_set!(is_wt_new, Status::WT_NEW); + is_bit_set!(is_wt_modified, Status::WT_MODIFIED); + is_bit_set!(is_wt_deleted, Status::WT_DELETED); + is_bit_set!(is_wt_typechange, Status::WT_TYPECHANGE); + is_bit_set!(is_wt_renamed, Status::WT_RENAMED); + is_bit_set!(is_ignored, Status::IGNORED); + is_bit_set!(is_conflicted, Status::CONFLICTED); +} + +bitflags! { + /// Mode options for RepositoryInitOptions + pub struct RepositoryInitMode: u32 { + /// Use permissions configured by umask - the default + const SHARED_UMASK = raw::GIT_REPOSITORY_INIT_SHARED_UMASK as u32; + /// Use `--shared=group` behavior, chmod'ing the new repo to be + /// group writable and \"g+sx\" for sticky group assignment + const SHARED_GROUP = raw::GIT_REPOSITORY_INIT_SHARED_GROUP as u32; + /// Use `--shared=all` behavior, adding world readability. + const SHARED_ALL = raw::GIT_REPOSITORY_INIT_SHARED_ALL as u32; + } +} + +impl RepositoryInitMode { + is_bit_set!(is_shared_umask, RepositoryInitMode::SHARED_UMASK); + is_bit_set!(is_shared_group, RepositoryInitMode::SHARED_GROUP); + is_bit_set!(is_shared_all, RepositoryInitMode::SHARED_ALL); +} + +/// What type of change is described by a `DiffDelta`? +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub enum Delta { + /// No changes + Unmodified, + /// Entry does not exist in old version + Added, + /// Entry does not exist in new version + Deleted, + /// Entry content changed between old and new + Modified, + /// Entry was renamed between old and new + Renamed, + /// Entry was copied from another old entry + Copied, + /// Entry is ignored item in workdir + Ignored, + /// Entry is untracked item in workdir + Untracked, + /// Type of entry changed between old and new + Typechange, + /// Entry is unreadable + Unreadable, + /// Entry in the index is conflicted + Conflicted, +} + +bitflags! { + /// Return codes for submodule status. + /// + /// A combination of these flags will be returned to describe the status of a + /// submodule. Depending on the "ignore" property of the submodule, some of + /// the flags may never be returned because they indicate changes that are + /// supposed to be ignored. + /// + /// Submodule info is contained in 4 places: the HEAD tree, the index, config + /// files (both .git/config and .gitmodules), and the working directory. Any + /// or all of those places might be missing information about the submodule + /// depending on what state the repo is in. We consider all four places to + /// build the combination of status flags. + /// + /// There are four values that are not really status, but give basic info + /// about what sources of submodule data are available. These will be + /// returned even if ignore is set to "ALL". + /// + /// * IN_HEAD - superproject head contains submodule + /// * IN_INDEX - superproject index contains submodule + /// * IN_CONFIG - superproject gitmodules has submodule + /// * IN_WD - superproject workdir has submodule + /// + /// The following values will be returned so long as ignore is not "ALL". + /// + /// * INDEX_ADDED - in index, not in head + /// * INDEX_DELETED - in head, not in index + /// * INDEX_MODIFIED - index and head don't match + /// * WD_UNINITIALIZED - workdir contains empty directory + /// * WD_ADDED - in workdir, not index + /// * WD_DELETED - in index, not workdir + /// * WD_MODIFIED - index and workdir head don't match + /// + /// The following can only be returned if ignore is "NONE" or "UNTRACKED". + /// + /// * WD_INDEX_MODIFIED - submodule workdir index is dirty + /// * WD_WD_MODIFIED - submodule workdir has modified files + /// + /// Lastly, the following will only be returned for ignore "NONE". + /// + /// * WD_UNTRACKED - wd contains untracked files + pub struct SubmoduleStatus: u32 { + #[allow(missing_docs)] + const IN_HEAD = raw::GIT_SUBMODULE_STATUS_IN_HEAD as u32; + #[allow(missing_docs)] + const IN_INDEX = raw::GIT_SUBMODULE_STATUS_IN_INDEX as u32; + #[allow(missing_docs)] + const IN_CONFIG = raw::GIT_SUBMODULE_STATUS_IN_CONFIG as u32; + #[allow(missing_docs)] + const IN_WD = raw::GIT_SUBMODULE_STATUS_IN_WD as u32; + #[allow(missing_docs)] + const INDEX_ADDED = raw::GIT_SUBMODULE_STATUS_INDEX_ADDED as u32; + #[allow(missing_docs)] + const INDEX_DELETED = raw::GIT_SUBMODULE_STATUS_INDEX_DELETED as u32; + #[allow(missing_docs)] + const INDEX_MODIFIED = raw::GIT_SUBMODULE_STATUS_INDEX_MODIFIED as u32; + #[allow(missing_docs)] + const WD_UNINITIALIZED = + raw::GIT_SUBMODULE_STATUS_WD_UNINITIALIZED as u32; + #[allow(missing_docs)] + const WD_ADDED = raw::GIT_SUBMODULE_STATUS_WD_ADDED as u32; + #[allow(missing_docs)] + const WD_DELETED = raw::GIT_SUBMODULE_STATUS_WD_DELETED as u32; + #[allow(missing_docs)] + const WD_MODIFIED = raw::GIT_SUBMODULE_STATUS_WD_MODIFIED as u32; + #[allow(missing_docs)] + const WD_INDEX_MODIFIED = + raw::GIT_SUBMODULE_STATUS_WD_INDEX_MODIFIED as u32; + #[allow(missing_docs)] + const WD_WD_MODIFIED = raw::GIT_SUBMODULE_STATUS_WD_WD_MODIFIED as u32; + #[allow(missing_docs)] + const WD_UNTRACKED = raw::GIT_SUBMODULE_STATUS_WD_UNTRACKED as u32; + } +} + +impl SubmoduleStatus { + is_bit_set!(is_in_head, SubmoduleStatus::IN_HEAD); + is_bit_set!(is_in_index, SubmoduleStatus::IN_INDEX); + is_bit_set!(is_in_config, SubmoduleStatus::IN_CONFIG); + is_bit_set!(is_in_wd, SubmoduleStatus::IN_WD); + is_bit_set!(is_index_added, SubmoduleStatus::INDEX_ADDED); + is_bit_set!(is_index_deleted, SubmoduleStatus::INDEX_DELETED); + is_bit_set!(is_index_modified, SubmoduleStatus::INDEX_MODIFIED); + is_bit_set!(is_wd_uninitialized, SubmoduleStatus::WD_UNINITIALIZED); + is_bit_set!(is_wd_added, SubmoduleStatus::WD_ADDED); + is_bit_set!(is_wd_deleted, SubmoduleStatus::WD_DELETED); + is_bit_set!(is_wd_modified, SubmoduleStatus::WD_MODIFIED); + is_bit_set!(is_wd_wd_modified, SubmoduleStatus::WD_WD_MODIFIED); + is_bit_set!(is_wd_untracked, SubmoduleStatus::WD_UNTRACKED); +} + +/// Submodule ignore values +/// +/// These values represent settings for the `submodule.$name.ignore` +/// configuration value which says how deeply to look at the working +/// directory when getting the submodule status. +pub enum SubmoduleIgnore { + /// Use the submodule's configuration + Unspecified, + /// Any change or untracked file is considered dirty + None, + /// Only dirty if tracked files have changed + Untracked, + /// Only dirty if HEAD has moved + Dirty, + /// Never dirty + All, +} + +bitflags! { + /// ... + pub struct PathspecFlags: u32 { + /// Use the default pathspec matching configuration. + const DEFAULT = raw::GIT_PATHSPEC_DEFAULT as u32; + /// Force matching to ignore case, otherwise matching will use native + /// case sensitivity fo the platform filesystem. + const IGNORE_CASE = raw::GIT_PATHSPEC_IGNORE_CASE as u32; + /// Force case sensitive matches, otherwise match will use the native + /// case sensitivity of the platform filesystem. + const USE_CASE = raw::GIT_PATHSPEC_USE_CASE as u32; + /// Disable glob patterns and just use simple string comparison for + /// matching. + const NO_GLOB = raw::GIT_PATHSPEC_NO_GLOB as u32; + /// Means that match functions return the error code `NotFound` if no + /// matches are found. By default no matches is a success. + const NO_MATCH_ERROR = raw::GIT_PATHSPEC_NO_MATCH_ERROR as u32; + /// Means that the list returned should track which patterns matched + /// which files so that at the end of the match we can identify patterns + /// that did not match any files. + const FIND_FAILURES = raw::GIT_PATHSPEC_FIND_FAILURES as u32; + /// Means that the list returned does not need to keep the actual + /// matching filenames. Use this to just test if there were any matches + /// at all or in combination with `PATHSPEC_FAILURES` to validate a + /// pathspec. + const FAILURES_ONLY = raw::GIT_PATHSPEC_FAILURES_ONLY as u32; + } +} + +impl PathspecFlags { + is_bit_set!(is_default, PathspecFlags::DEFAULT); + is_bit_set!(is_ignore_case, PathspecFlags::IGNORE_CASE); + is_bit_set!(is_use_case, PathspecFlags::USE_CASE); + is_bit_set!(is_no_glob, PathspecFlags::NO_GLOB); + is_bit_set!(is_no_match_error, PathspecFlags::NO_MATCH_ERROR); + is_bit_set!(is_find_failures, PathspecFlags::FIND_FAILURES); + is_bit_set!(is_failures_only, PathspecFlags::FAILURES_ONLY); +} + +impl Default for PathspecFlags { + fn default() -> Self { + PathspecFlags::DEFAULT + } +} + +bitflags! { + /// Types of notifications emitted from checkouts. + pub struct CheckoutNotificationType: u32 { + /// Notification about a conflict. + const CONFLICT = raw::GIT_CHECKOUT_NOTIFY_CONFLICT as u32; + /// Notification about a dirty file. + const DIRTY = raw::GIT_CHECKOUT_NOTIFY_DIRTY as u32; + /// Notification about an updated file. + const UPDATED = raw::GIT_CHECKOUT_NOTIFY_UPDATED as u32; + /// Notification about an untracked file. + const UNTRACKED = raw::GIT_CHECKOUT_NOTIFY_UNTRACKED as u32; + /// Notification about an ignored file. + const IGNORED = raw::GIT_CHECKOUT_NOTIFY_IGNORED as u32; + } +} + +impl CheckoutNotificationType { + is_bit_set!(is_conflict, CheckoutNotificationType::CONFLICT); + is_bit_set!(is_dirty, CheckoutNotificationType::DIRTY); + is_bit_set!(is_updated, CheckoutNotificationType::UPDATED); + is_bit_set!(is_untracked, CheckoutNotificationType::UNTRACKED); + is_bit_set!(is_ignored, CheckoutNotificationType::IGNORED); +} + +/// Possible output formats for diff data +#[derive(Copy, Clone)] +pub enum DiffFormat { + /// full git diff + Patch, + /// just the headers of the patch + PatchHeader, + /// like git diff --raw + Raw, + /// like git diff --name-only + NameOnly, + /// like git diff --name-status + NameStatus, +} + +bitflags! { + /// Formatting options for diff stats + pub struct DiffStatsFormat: raw::git_diff_stats_format_t { + /// Don't generate any stats + const NONE = raw::GIT_DIFF_STATS_NONE; + /// Equivalent of `--stat` in git + const FULL = raw::GIT_DIFF_STATS_FULL; + /// Equivalent of `--shortstat` in git + const SHORT = raw::GIT_DIFF_STATS_SHORT; + /// Equivalent of `--numstat` in git + const NUMBER = raw::GIT_DIFF_STATS_NUMBER; + /// Extended header information such as creations, renames and mode + /// changes, equivalent of `--summary` in git + const INCLUDE_SUMMARY = raw::GIT_DIFF_STATS_INCLUDE_SUMMARY; + } +} + +impl DiffStatsFormat { + is_bit_set!(is_none, DiffStatsFormat::NONE); + is_bit_set!(is_full, DiffStatsFormat::FULL); + is_bit_set!(is_short, DiffStatsFormat::SHORT); + is_bit_set!(is_number, DiffStatsFormat::NUMBER); + is_bit_set!(is_include_summary, DiffStatsFormat::INCLUDE_SUMMARY); +} + +/// Automatic tag following options. +pub enum AutotagOption { + /// Use the setting from the remote's configuration + Unspecified, + /// Ask the server for tags pointing to objects we're already downloading + Auto, + /// Don't ask for any tags beyond the refspecs + None, + /// Ask for all the tags + All, +} + +/// Configuration for how pruning is done on a fetch +pub enum FetchPrune { + /// Use the setting from the configuration + Unspecified, + /// Force pruning on + On, + /// Force pruning off + Off, +} + +#[allow(missing_docs)] +#[derive(Debug)] +pub enum StashApplyProgress { + /// None + None, + /// Loading the stashed data from the object database + LoadingStash, + /// The stored index is being analyzed + AnalyzeIndex, + /// The modified files are being analyzed + AnalyzeModified, + /// The untracked and ignored files are being analyzed + AnalyzeUntracked, + /// The untracked files are being written to disk + CheckoutUntracked, + /// The modified files are being written to disk + CheckoutModified, + /// The stash was applied successfully + Done, +} + +bitflags! { + #[allow(missing_docs)] + pub struct StashApplyFlags: u32 { + #[allow(missing_docs)] + const DEFAULT = raw::GIT_STASH_APPLY_DEFAULT as u32; + /// Try to reinstate not only the working tree's changes, + /// but also the index's changes. + const REINSTATE_INDEX = raw::GIT_STASH_APPLY_REINSTATE_INDEX as u32; + } +} + +impl StashApplyFlags { + is_bit_set!(is_default, StashApplyFlags::DEFAULT); + is_bit_set!(is_reinstate_index, StashApplyFlags::REINSTATE_INDEX); +} + +impl Default for StashApplyFlags { + fn default() -> Self { + StashApplyFlags::DEFAULT + } +} + +bitflags! { + #[allow(missing_docs)] + pub struct StashFlags: u32 { + #[allow(missing_docs)] + const DEFAULT = raw::GIT_STASH_DEFAULT as u32; + /// All changes already added to the index are left intact in + /// the working directory + const KEEP_INDEX = raw::GIT_STASH_KEEP_INDEX as u32; + /// All untracked files are also stashed and then cleaned up + /// from the working directory + const INCLUDE_UNTRACKED = raw::GIT_STASH_INCLUDE_UNTRACKED as u32; + /// All ignored files are also stashed and then cleaned up from + /// the working directory + const INCLUDE_IGNORED = raw::GIT_STASH_INCLUDE_IGNORED as u32; + } +} + +impl StashFlags { + is_bit_set!(is_default, StashFlags::DEFAULT); + is_bit_set!(is_keep_index, StashFlags::KEEP_INDEX); + is_bit_set!(is_include_untracked, StashFlags::INCLUDE_UNTRACKED); + is_bit_set!(is_include_ignored, StashFlags::INCLUDE_IGNORED); +} + +impl Default for StashFlags { + fn default() -> Self { + StashFlags::DEFAULT + } +} + +#[cfg(test)] +mod tests { + use super::ObjectType; + + #[test] + fn convert() { + assert_eq!(ObjectType::Blob.str(), "blob"); + assert_eq!(ObjectType::from_str("blob"), Some(ObjectType::Blob)); + assert!(ObjectType::Blob.is_loose()); + } + +} diff --git a/git2/src/merge.rs b/git2/src/merge.rs new file mode 100644 index 000000000..15089403e --- /dev/null +++ b/git2/src/merge.rs @@ -0,0 +1,160 @@ +use std::marker; +use std::mem; +use libc::c_uint; + +use {raw, Oid, Commit, FileFavor}; +use util::Binding; +use call::Convert; + +/// A structure to represent an annotated commit, the input to merge and rebase. +/// +/// An annotated commit contains information about how it was looked up, which +/// may be useful for functions like merge or rebase to provide context to the +/// operation. +pub struct AnnotatedCommit<'repo> { + raw: *mut raw::git_annotated_commit, + _marker: marker::PhantomData>, +} + +/// Options to specify when merging. +pub struct MergeOptions { + raw: raw::git_merge_options, +} + +impl<'repo> AnnotatedCommit<'repo> { + /// Gets the commit ID that the given git_annotated_commit refers to + pub fn id(&self) -> Oid { + unsafe { Binding::from_raw(raw::git_annotated_commit_id(self.raw)) } + } +} + +impl Default for MergeOptions { + fn default() -> Self { + Self::new() + } +} + +impl MergeOptions { + /// Creates a default set of merge options. + pub fn new() -> MergeOptions { + let mut opts = MergeOptions { + raw: unsafe { mem::zeroed() }, + }; + assert_eq!(unsafe { + raw::git_merge_init_options(&mut opts.raw, 1) + }, 0); + opts + } + + /// Detect file renames + pub fn find_renames(&mut self, find: bool) -> &mut MergeOptions { + if find { + self.raw.flags |= raw::GIT_MERGE_FIND_RENAMES; + } else { + self.raw.flags &= !raw::GIT_MERGE_FIND_RENAMES; + } + self + } + + /// Similarity to consider a file renamed (default 50) + pub fn rename_threshold(&mut self, thresh: u32) -> &mut MergeOptions { + self.raw.rename_threshold = thresh; + self + } + + /// Maximum similarity sources to examine for renames (default 200). + /// If the number of rename candidates (add / delete pairs) is greater + /// than this value, inexact rename detection is aborted. This setting + /// overrides the `merge.renameLimit` configuration value. + pub fn target_limit(&mut self, limit: u32) -> &mut MergeOptions { + self.raw.target_limit = limit as c_uint; + self + } + + /// Maximum number of times to merge common ancestors to build a + /// virtual merge base when faced with criss-cross merges. When + /// this limit is reached, the next ancestor will simply be used + /// instead of attempting to merge it. The default is unlimited. + pub fn recursion_limit(&mut self, limit: u32) -> &mut MergeOptions { + self.raw.recursion_limit = limit as c_uint; + self + } + + /// Specify a side to favor for resolving conflicts + pub fn file_favor(&mut self, favor: FileFavor) -> &mut MergeOptions { + self.raw.file_favor = favor.convert(); + self + } + + fn flag(&mut self, opt: raw::git_merge_file_flag_t, val: bool) -> &mut MergeOptions { + if val { + self.raw.file_flags |= opt; + } else { + self.raw.file_flags &= !opt; + } + self + } + + /// Create standard conflicted merge files + pub fn standard_style(&mut self, standard: bool) -> &mut MergeOptions { + self.flag(raw::GIT_MERGE_FILE_STYLE_MERGE, standard) + } + + /// Create diff3-style file + pub fn diff3_style(&mut self, diff3: bool) -> &mut MergeOptions { + self.flag(raw::GIT_MERGE_FILE_STYLE_DIFF3, diff3) + } + + /// Condense non-alphanumeric regions for simplified diff file + pub fn simplify_alnum(&mut self, simplify: bool) -> &mut MergeOptions { + self.flag(raw::GIT_MERGE_FILE_SIMPLIFY_ALNUM, simplify) + } + + /// Ignore all whitespace + pub fn ignore_whitespace(&mut self, ignore: bool) -> &mut MergeOptions { + self.flag(raw::GIT_MERGE_FILE_IGNORE_WHITESPACE, ignore) + } + + /// Ignore changes in amount of whitespace + pub fn ignore_whitespace_change(&mut self, ignore: bool) -> &mut MergeOptions { + self.flag(raw::GIT_MERGE_FILE_IGNORE_WHITESPACE_CHANGE, ignore) + } + + /// Ignore whitespace at end of line + pub fn ignore_whitespace_eol(&mut self, ignore: bool) -> &mut MergeOptions { + self.flag(raw::GIT_MERGE_FILE_IGNORE_WHITESPACE_EOL, ignore) + } + + /// Use the "patience diff" algorithm + pub fn patience(&mut self, patience: bool) -> &mut MergeOptions { + self.flag(raw::GIT_MERGE_FILE_DIFF_PATIENCE, patience) + } + + /// Take extra time to find minimal diff + pub fn minimal(&mut self, minimal: bool) -> &mut MergeOptions { + self.flag(raw::GIT_MERGE_FILE_DIFF_MINIMAL, minimal) + } + + /// Acquire a pointer to the underlying raw options. + pub unsafe fn raw(&self) -> *const raw::git_merge_options { + &self.raw as *const _ + } +} + +impl<'repo> Binding for AnnotatedCommit<'repo> { + type Raw = *mut raw::git_annotated_commit; + unsafe fn from_raw(raw: *mut raw::git_annotated_commit) + -> AnnotatedCommit<'repo> { + AnnotatedCommit { + raw: raw, + _marker: marker::PhantomData, + } + } + fn raw(&self) -> *mut raw::git_annotated_commit { self.raw } +} + +impl<'repo> Drop for AnnotatedCommit<'repo> { + fn drop(&mut self) { + unsafe { raw::git_annotated_commit_free(self.raw) } + } +} diff --git a/git2/src/message.rs b/git2/src/message.rs new file mode 100644 index 000000000..96cdd589c --- /dev/null +++ b/git2/src/message.rs @@ -0,0 +1,52 @@ +use std::ffi::CString; + +use libc::{c_char, c_int}; + +use {raw, Buf, Error, IntoCString}; +use util::Binding; + +/// Clean up a message, removing extraneous whitespace, and ensure that the +/// message ends with a newline. If `comment_char` is `Some`, also remove comment +/// lines starting with that character. +pub fn message_prettify(message: T, comment_char: Option) + -> Result { + _message_prettify(try!(message.into_c_string()), comment_char) +} + +fn _message_prettify(message: CString, comment_char: Option) + -> Result { + let ret = Buf::new(); + unsafe { + try_call!(raw::git_message_prettify(ret.raw(), message, + comment_char.is_some() as c_int, + comment_char.unwrap_or(0) as c_char)); + } + Ok(ret.as_str().unwrap().to_string()) +} + +/// The default comment character for `message_prettify` ('#') +pub const DEFAULT_COMMENT_CHAR: Option = Some(b'#'); + +#[cfg(test)] +mod tests { + use {message_prettify, DEFAULT_COMMENT_CHAR}; + + #[test] + fn prettify() { + // This does not attempt to duplicate the extensive tests for + // git_message_prettify in libgit2, just a few representative values to + // make sure the interface works as expected. + assert_eq!(message_prettify("1\n\n\n2", None).unwrap(), + "1\n\n2\n"); + assert_eq!(message_prettify("1\n\n\n2\n\n\n3", None).unwrap(), + "1\n\n2\n\n3\n"); + assert_eq!(message_prettify("1\n# comment\n# more", None).unwrap(), + "1\n# comment\n# more\n"); + assert_eq!(message_prettify("1\n# comment\n# more", + DEFAULT_COMMENT_CHAR).unwrap(), + "1\n"); + assert_eq!(message_prettify("1\n; comment\n; more", + Some(';' as u8)).unwrap(), + "1\n"); + } +} diff --git a/git2/src/note.rs b/git2/src/note.rs new file mode 100644 index 000000000..5295e5988 --- /dev/null +++ b/git2/src/note.rs @@ -0,0 +1,130 @@ +use std::marker; +use std::str; + +use {raw, signature, Signature, Oid, Repository, Error}; +use util::Binding; + +/// A structure representing a [note][note] in git. +/// +/// [note]: http://git-scm.com/blog/2010/08/25/notes.html +pub struct Note<'repo> { + raw: *mut raw::git_note, + + // Hmm, the current libgit2 version does not have this inside of it, but + // perhaps it's a good idea to keep it around? Can always remove it later I + // suppose... + _marker: marker::PhantomData<&'repo Repository>, +} + +/// An iterator over all of the notes within a repository. +pub struct Notes<'repo> { + raw: *mut raw::git_note_iterator, + _marker: marker::PhantomData<&'repo Repository>, +} + +impl<'repo> Note<'repo> { + /// Get the note author + pub fn author(&self) -> Signature { + unsafe { + signature::from_raw_const(self, raw::git_note_author(&*self.raw)) + } + } + + /// Get the note committer + pub fn committer(&self) -> Signature { + unsafe { + signature::from_raw_const(self, raw::git_note_committer(&*self.raw)) + } + } + + /// Get the note message, in bytes. + pub fn message_bytes(&self) -> &[u8] { + unsafe { ::opt_bytes(self, raw::git_note_message(&*self.raw)).unwrap() } + } + + /// Get the note message as a string, returning `None` if it is not UTF-8. + pub fn message(&self) -> Option<&str> { + str::from_utf8(self.message_bytes()).ok() + } + + /// Get the note object's id + pub fn id(&self) -> Oid { + unsafe { Binding::from_raw(raw::git_note_id(&*self.raw)) } + } +} + +impl<'repo> Binding for Note<'repo> { + type Raw = *mut raw::git_note; + unsafe fn from_raw(raw: *mut raw::git_note) -> Note<'repo> { + Note { raw: raw, _marker: marker::PhantomData, } + } + fn raw(&self) -> *mut raw::git_note { self.raw } +} + +impl<'repo> ::std::fmt::Debug for Note<'repo> { + fn fmt(&self, f: &mut ::std::fmt::Formatter) -> Result<(), ::std::fmt::Error> { + f.debug_struct("Note").field("id", &self.id()).finish() + } +} + +impl<'repo> Drop for Note<'repo> { + fn drop(&mut self) { + unsafe { raw::git_note_free(self.raw); } + } +} + +impl<'repo> Binding for Notes<'repo> { + type Raw = *mut raw::git_note_iterator; + unsafe fn from_raw(raw: *mut raw::git_note_iterator) -> Notes<'repo> { + Notes { raw: raw, _marker: marker::PhantomData, } + } + fn raw(&self) -> *mut raw::git_note_iterator { self.raw } +} + +impl<'repo> Iterator for Notes<'repo> { + type Item = Result<(Oid, Oid), Error>; + fn next(&mut self) -> Option> { + let mut note_id = raw::git_oid { id: [0; raw::GIT_OID_RAWSZ] }; + let mut annotated_id = note_id; + unsafe { + try_call_iter!(raw::git_note_next(&mut note_id, &mut annotated_id, + self.raw)); + Some(Ok((Binding::from_raw(¬e_id as *const _), + Binding::from_raw(&annotated_id as *const _)))) + } + } +} + +impl<'repo> Drop for Notes<'repo> { + fn drop(&mut self) { + unsafe { raw::git_note_iterator_free(self.raw); } + } +} + +#[cfg(test)] +mod tests { + #[test] + fn smoke() { + let (_td, repo) = ::test::repo_init(); + assert!(repo.notes(None).is_err()); + + let sig = repo.signature().unwrap(); + let head = repo.head().unwrap().target().unwrap(); + let note = repo.note(&sig, &sig, None, head, "foo", false).unwrap(); + assert_eq!(repo.notes(None).unwrap().count(), 1); + + let note_obj = repo.find_note(None, head).unwrap(); + assert_eq!(note_obj.id(), note); + assert_eq!(note_obj.message(), Some("foo")); + + let (a, b) = repo.notes(None).unwrap().next().unwrap().unwrap(); + assert_eq!(a, note); + assert_eq!(b, head); + + assert_eq!(repo.note_default_ref().unwrap(), "refs/notes/commits"); + + assert_eq!(sig.name(), note_obj.author().name()); + assert_eq!(sig.name(), note_obj.committer().name()); + assert!(sig.when() == note_obj.committer().when()); + } +} diff --git a/git2/src/object.rs b/git2/src/object.rs new file mode 100644 index 000000000..725e4c77f --- /dev/null +++ b/git2/src/object.rs @@ -0,0 +1,234 @@ +use std::marker; +use std::mem; +use std::ptr; + +use {raw, Oid, ObjectType, Error, Buf, Commit, Tag, Blob, Tree, Repository}; +use {Describe, DescribeOptions}; +use util::Binding; + +/// A structure to represent a git [object][1] +/// +/// [1]: http://git-scm.com/book/en/Git-Internals-Git-Objects +pub struct Object<'repo> { + raw: *mut raw::git_object, + _marker: marker::PhantomData<&'repo Repository>, +} + +impl<'repo> Object<'repo> { + /// Get the id (SHA1) of a repository object + pub fn id(&self) -> Oid { + unsafe { + Binding::from_raw(raw::git_object_id(&*self.raw)) + } + } + + /// Get the object type of an object. + /// + /// If the type is unknown, then `None` is returned. + pub fn kind(&self) -> Option { + ObjectType::from_raw(unsafe { raw::git_object_type(&*self.raw) }) + } + + /// Recursively peel an object until an object of the specified type is met. + /// + /// If you pass `Any` as the target type, then the object will be + /// peeled until the type changes (e.g. a tag will be chased until the + /// referenced object is no longer a tag). + pub fn peel(&self, kind: ObjectType) -> Result, Error> { + let mut raw = ptr::null_mut(); + unsafe { + try_call!(raw::git_object_peel(&mut raw, &*self.raw(), kind)); + Ok(Binding::from_raw(raw)) + } + } + + /// Recursively peel an object until a blob is found + pub fn peel_to_blob(&self) -> Result, Error> { + self.peel(ObjectType::Blob).map(|o| o.cast_or_panic(ObjectType::Blob)) + } + + /// Recursively peel an object until a commit is found + pub fn peel_to_commit(&self) -> Result, Error> { + self.peel(ObjectType::Commit).map(|o| o.cast_or_panic(ObjectType::Commit)) + } + + /// Recursively peel an object until a tag is found + pub fn peel_to_tag(&self) -> Result, Error> { + self.peel(ObjectType::Tag).map(|o| o.cast_or_panic(ObjectType::Tag)) + } + + /// Recursively peel an object until a tree is found + pub fn peel_to_tree(&self) -> Result, Error> { + self.peel(ObjectType::Tree).map(|o| o.cast_or_panic(ObjectType::Tree)) + } + + /// Get a short abbreviated OID string for the object + /// + /// This starts at the "core.abbrev" length (default 7 characters) and + /// iteratively extends to a longer string if that length is ambiguous. The + /// result will be unambiguous (at least until new objects are added to the + /// repository). + pub fn short_id(&self) -> Result { + unsafe { + let buf = Buf::new(); + try_call!(raw::git_object_short_id(buf.raw(), &*self.raw())); + Ok(buf) + } + } + + /// Attempt to view this object as a commit. + /// + /// Returns `None` if the object is not actually a commit. + pub fn as_commit(&self) -> Option<&Commit<'repo>> { + self.cast(ObjectType::Commit) + } + + /// Attempt to consume this object and return a commit. + /// + /// Returns `Err(self)` if this object is not actually a commit. + pub fn into_commit(self) -> Result, Object<'repo>> { + self.cast_into(ObjectType::Commit) + } + + /// Attempt to view this object as a tag. + /// + /// Returns `None` if the object is not actually a tag. + pub fn as_tag(&self) -> Option<&Tag<'repo>> { + self.cast(ObjectType::Tag) + } + + /// Attempt to consume this object and return a tag. + /// + /// Returns `Err(self)` if this object is not actually a tag. + pub fn into_tag(self) -> Result, Object<'repo>> { + self.cast_into(ObjectType::Tag) + } + + /// Attempt to view this object as a tree. + /// + /// Returns `None` if the object is not actually a tree. + pub fn as_tree(&self) -> Option<&Tree<'repo>> { + self.cast(ObjectType::Tree) + } + + /// Attempt to consume this object and return a tree. + /// + /// Returns `Err(self)` if this object is not actually a tree. + pub fn into_tree(self) -> Result, Object<'repo>> { + self.cast_into(ObjectType::Tree) + } + + /// Attempt to view this object as a blob. + /// + /// Returns `None` if the object is not actually a blob. + pub fn as_blob(&self) -> Option<&Blob<'repo>> { + self.cast(ObjectType::Blob) + } + + /// Attempt to consume this object and return a blob. + /// + /// Returns `Err(self)` if this object is not actually a blob. + pub fn into_blob(self) -> Result, Object<'repo>> { + self.cast_into(ObjectType::Blob) + } + + /// Describes a commit + /// + /// Performs a describe operation on this commitish object. + pub fn describe(&self, opts: &DescribeOptions) + -> Result { + let mut ret = ptr::null_mut(); + unsafe { + try_call!(raw::git_describe_commit(&mut ret, self.raw, opts.raw())); + Ok(Binding::from_raw(ret)) + } + } + + fn cast(&self, kind: ObjectType) -> Option<&T> { + assert_eq!(mem::size_of::(), mem::size_of::()); + if self.kind() == Some(kind) { + unsafe { Some(&*(self as *const _ as *const T)) } + } else { + None + } + } + + fn cast_into(self, kind: ObjectType) -> Result> { + assert_eq!(mem::size_of_val(&self), mem::size_of::()); + if self.kind() == Some(kind) { + Ok(unsafe { + let other = ptr::read(&self as *const _ as *const T); + mem::forget(self); + other + }) + } else { + Err(self) + } + } +} + +/// This trait is useful to export cast_or_panic into crate but not outside +pub trait CastOrPanic { + fn cast_or_panic(self, kind: ObjectType) -> T; +} + +impl<'repo> CastOrPanic for Object<'repo> { + fn cast_or_panic(self, kind: ObjectType) -> T { + assert_eq!(mem::size_of_val(&self), mem::size_of::()); + if self.kind() == Some(kind) { + unsafe { + let other = ptr::read(&self as *const _ as *const T); + mem::forget(self); + other + } + } else { + let buf; + let akind = match self.kind() { + Some(akind) => akind.str(), + None => { + buf = format!("unknown ({})", unsafe { raw::git_object_type(&*self.raw) }); + &buf + } + }; + panic!("Expected object {} to be {} but it is {}", self.id(), kind.str(), akind) + } + } +} + +impl<'repo> Clone for Object<'repo> { + fn clone(&self) -> Object<'repo> { + let mut raw = ptr::null_mut(); + unsafe { + let rc = raw::git_object_dup(&mut raw, self.raw); + assert_eq!(rc, 0); + Binding::from_raw(raw) + } + } +} + +impl<'repo> ::std::fmt::Debug for Object<'repo> { + fn fmt(&self, f: &mut ::std::fmt::Formatter) -> Result<(), ::std::fmt::Error> { + let mut ds = f.debug_struct("Object"); + match self.kind() { + Some(kind) => ds.field("kind", &kind), + None => ds.field("kind", &format!("Unknow ({})", unsafe { raw::git_object_type(&*self.raw) })) + }; + ds.field("id", &self.id()); + ds.finish() + } +} + +impl<'repo> Binding for Object<'repo> { + type Raw = *mut raw::git_object; + + unsafe fn from_raw(raw: *mut raw::git_object) -> Object<'repo> { + Object { raw: raw, _marker: marker::PhantomData, } + } + fn raw(&self) -> *mut raw::git_object { self.raw } +} + +impl<'repo> Drop for Object<'repo> { + fn drop(&mut self) { + unsafe { raw::git_object_free(self.raw) } + } +} diff --git a/git2/src/odb.rs b/git2/src/odb.rs new file mode 100644 index 000000000..c3a07a474 --- /dev/null +++ b/git2/src/odb.rs @@ -0,0 +1,419 @@ +use std::marker; +use std::io; +use std::ptr; +use std::slice; + +use std::ffi::CString; + +use libc::{c_char, c_int, c_void, size_t}; + +use {raw, Oid, Object, ObjectType, Error}; +use panic; +use util::Binding; + +/// A structure to represent a git object database +pub struct Odb<'repo> { + raw: *mut raw::git_odb, + _marker: marker::PhantomData>, +} + +impl<'repo> Binding for Odb<'repo> { + type Raw = *mut raw::git_odb; + + unsafe fn from_raw(raw: *mut raw::git_odb) -> Odb<'repo> { + Odb { + raw: raw, + _marker: marker::PhantomData, + } + } + fn raw(&self) -> *mut raw::git_odb { self.raw } +} + +impl<'repo> Drop for Odb<'repo> { + fn drop(&mut self) { + unsafe { raw::git_odb_free(self.raw) } + } +} + +impl<'repo> Odb<'repo> { + /// Creates an object database without any backends. + pub fn new<'a>() -> Result, Error> { + unsafe { + let mut out = ptr::null_mut(); + try_call!(raw::git_odb_new(&mut out)); + Ok(Odb::from_raw(out)) + } + } + + /// Create object database reading stream. + /// + /// Note that most backends do not support streaming reads because they store their objects as compressed/delta'ed blobs. + /// If the backend does not support streaming reads, use the `read` method instead. + pub fn reader(&self, oid: Oid) -> Result<(OdbReader, usize, ObjectType), Error> { + let mut out = ptr::null_mut(); + let mut size = 0usize; + let mut otype: raw::git_otype = ObjectType::Any.raw(); + unsafe { + try_call!(raw::git_odb_open_rstream(&mut out, &mut size, &mut otype, self.raw, oid.raw())); + Ok((OdbReader::from_raw(out), size, ObjectType::from_raw(otype).unwrap())) + } + } + + /// Create object database writing stream. + /// + /// The type and final length of the object must be specified when opening the stream. + /// If the backend does not support streaming writes, use the `write` method instead. + pub fn writer(&self, size: usize, obj_type: ObjectType) -> Result { + let mut out = ptr::null_mut(); + unsafe { + try_call!(raw::git_odb_open_wstream(&mut out, self.raw, size as raw::git_off_t, obj_type.raw())); + Ok(OdbWriter::from_raw(out)) + } + } + + /// Iterate over all objects in the object database.s + pub fn foreach(&self, mut callback: C) -> Result<(), Error> + where C: FnMut(&Oid) -> bool + { + unsafe { + let mut data = ForeachCbData { callback: &mut callback }; + try_call!(raw::git_odb_foreach(self.raw(), + foreach_cb, + &mut data as *mut _ as *mut _)); + Ok(()) + } + } + + /// Read an object from the database. + pub fn read(&self, oid: Oid) -> Result { + let mut out = ptr::null_mut(); + unsafe { + try_call!(raw::git_odb_read(&mut out, self.raw, oid.raw())); + Ok(OdbObject::from_raw(out)) + } + } + + /// Reads the header of an object from the database + /// without reading the full content. + pub fn read_header(&self, oid: Oid) -> Result<(usize, ObjectType), Error> { + let mut size: usize = 0; + let mut kind_id: i32 = ObjectType::Any.raw(); + + unsafe { + try_call!(raw::git_odb_read_header(&mut size + as *mut size_t, + &mut kind_id + as *mut raw::git_otype, + self.raw, + oid.raw())); + + Ok((size, ObjectType::from_raw(kind_id).unwrap())) + } + } + + /// Write an object to the database. + pub fn write(&self, kind: ObjectType, data: &[u8]) -> Result { + unsafe { + let mut out = raw::git_oid { id: [0; raw::GIT_OID_RAWSZ] }; + try_call!(raw::git_odb_write(&mut out, + self.raw, + data.as_ptr() + as *const c_void, + data.len(), + kind.raw())); + Ok(Oid::from_raw(&mut out)) + } + } + + /// Checks if the object database has an object. + pub fn exists(&self, oid: Oid) -> bool { + unsafe { raw::git_odb_exists(self.raw, oid.raw()) != -1 } + } + + /// Potentially finds an object that starts with the given prefix. + pub fn exists_prefix(&self, short_oid: Oid, len: usize) -> Result { + unsafe { + let mut out = raw::git_oid { id: [0; raw::GIT_OID_RAWSZ] }; + try_call!(raw::git_odb_exists_prefix(&mut out, + self.raw, + short_oid.raw(), + len)); + Ok(Oid::from_raw(&out)) + } + } + + /// Refresh the object database. + /// This should never be needed, and is + /// provided purely for convenience. + /// The object database will automatically + /// refresh when an object is not found when + /// requested. + pub fn refresh(&self) -> Result<(), Error> { + unsafe { + try_call!(raw::git_odb_refresh(self.raw)); + Ok(()) + } + } + + /// Adds an alternate disk backend to the object database. + pub fn add_disk_alternate(&self, path: &str) -> Result<(), Error> { + unsafe { + let path = try!(CString::new(path)); + try_call!(raw::git_odb_add_disk_alternate(self.raw, path)); + Ok(()) + } + } +} + +/// An object from the Object Database. +pub struct OdbObject<'a> { + raw: *mut raw::git_odb_object, + _marker: marker::PhantomData>, +} + +impl<'a> Binding for OdbObject<'a> { + type Raw = *mut raw::git_odb_object; + + unsafe fn from_raw(raw: *mut raw::git_odb_object) -> OdbObject<'a> { + OdbObject { + raw: raw, + _marker: marker::PhantomData, + } + } + + fn raw(&self) -> *mut raw::git_odb_object { self.raw } +} + +impl<'a> Drop for OdbObject<'a> { + fn drop(&mut self) { + unsafe { raw::git_odb_object_free(self.raw) } + } +} + +impl<'a> OdbObject<'a> { + /// Get the object type. + pub fn kind(&self) -> ObjectType { + unsafe { ObjectType::from_raw(raw::git_odb_object_type(self.raw)).unwrap() } + } + + /// Get the object size. + pub fn len(&self) -> usize { + unsafe { raw::git_odb_object_size(self.raw) } + } + + /// Get the object data. + pub fn data(&self) -> &[u8] { + unsafe { + let size = self.len(); + let ptr : *const u8 = raw::git_odb_object_data(self.raw) as *const u8; + let buffer = slice::from_raw_parts(ptr, size); + return buffer; + } + } + + /// Get the object id. + pub fn id(&self) -> Oid { + unsafe { Oid::from_raw(raw::git_odb_object_id(self.raw)) } + } +} + +/// A structure to represent a git ODB rstream +pub struct OdbReader<'repo> { + raw: *mut raw::git_odb_stream, + _marker: marker::PhantomData>, +} + +impl<'repo> Binding for OdbReader<'repo> { + type Raw = *mut raw::git_odb_stream; + + unsafe fn from_raw(raw: *mut raw::git_odb_stream) -> OdbReader<'repo> { + OdbReader { + raw: raw, + _marker: marker::PhantomData, + } + } + fn raw(&self) -> *mut raw::git_odb_stream { self.raw } +} + +impl<'repo> Drop for OdbReader<'repo> { + fn drop(&mut self) { + unsafe { raw::git_odb_stream_free(self.raw) } + } +} + +impl<'repo> io::Read for OdbReader<'repo> { + fn read(&mut self, buf: &mut [u8]) -> io::Result { + unsafe { + let ptr = buf.as_ptr() as *mut c_char; + let len = buf.len(); + let res = raw::git_odb_stream_read(self.raw, ptr, len); + if res < 0 { + Err(io::Error::new(io::ErrorKind::Other, "Read error")) + } else { + Ok(len) + } + } + } +} + +/// A structure to represent a git ODB wstream +pub struct OdbWriter<'repo> { + raw: *mut raw::git_odb_stream, + _marker: marker::PhantomData>, +} + +impl<'repo> OdbWriter<'repo> { + /// Finish writing to an ODB stream + /// + /// This method can be used to finalize writing object to the database and get an identifier. + /// The object will take its final name and will be available to the odb. + /// This method will fail if the total number of received bytes differs from the size declared with odb_writer() + /// Attepting write after finishing will be ignored. + pub fn finalize(&mut self) -> Result { + let mut raw = raw::git_oid { id: [0; raw::GIT_OID_RAWSZ] }; + unsafe { + try_call!(raw::git_odb_stream_finalize_write(&mut raw, self.raw)); + Ok(Binding::from_raw(&raw as *const _)) + } + } +} + +impl<'repo> Binding for OdbWriter<'repo> { + type Raw = *mut raw::git_odb_stream; + + unsafe fn from_raw(raw: *mut raw::git_odb_stream) -> OdbWriter<'repo> { + OdbWriter { + raw: raw, + _marker: marker::PhantomData, + } + } + fn raw(&self) -> *mut raw::git_odb_stream { self.raw } +} + +impl<'repo> Drop for OdbWriter<'repo> { + fn drop(&mut self) { + unsafe { raw::git_odb_stream_free(self.raw) } + } +} + +impl<'repo> io::Write for OdbWriter<'repo> { + fn write(&mut self, buf: &[u8]) -> io::Result { + unsafe { + let ptr = buf.as_ptr() as *const c_char; + let len = buf.len(); + let res = raw::git_odb_stream_write(self.raw, ptr, len); + if res < 0 { + Err(io::Error::new(io::ErrorKind::Other, "Write error")) + } else { + Ok(buf.len()) + } + } + } + fn flush(&mut self) -> io::Result<()> { Ok(()) } +} + +pub type ForeachCb<'a> = FnMut(&Oid) -> bool + 'a; + +struct ForeachCbData<'a> { + pub callback: &'a mut ForeachCb<'a> +} + +extern fn foreach_cb(id: *const raw::git_oid, + payload: *mut c_void) + -> c_int +{ + panic::wrap(|| unsafe { + let data = &mut *(payload as *mut ForeachCbData); + let res = { + let callback = &mut data.callback; + callback(&Binding::from_raw(id)) + }; + + if res { 0 } else { 1 } + }).unwrap_or(1) +} + +#[cfg(test)] +mod tests { + use std::io::prelude::*; + use tempdir::TempDir; + use {Repository, ObjectType, Oid}; + + #[test] + fn read() { + let td = TempDir::new("test").unwrap(); + let repo = Repository::init(td.path()).unwrap(); + let dat = [4, 3, 5, 6, 9]; + let id = repo.blob(&dat).unwrap(); + let db = repo.odb().unwrap(); + let obj = db.read(id).unwrap(); + let data = obj.data(); + let size = obj.len(); + assert_eq!(size, 5); + assert_eq!(dat, data); + assert_eq!(id, obj.id()); + } + + #[test] + fn read_header() { + let td = TempDir::new("test").unwrap(); + let repo = Repository::init(td.path()).unwrap(); + let dat = [4, 3, 5, 6, 9]; + let id = repo.blob(&dat).unwrap(); + let db = repo.odb().unwrap(); + let (size, kind) = db.read_header(id).unwrap(); + + assert_eq!(size, 5); + assert_eq!(kind, ObjectType::Blob); + } + + #[test] + fn write() { + let td = TempDir::new("test").unwrap(); + let repo = Repository::init(td.path()).unwrap(); + let dat = [4, 3, 5, 6, 9]; + let db = repo.odb().unwrap(); + let id = db.write(ObjectType::Blob, &dat).unwrap(); + let blob = repo.find_blob(id).unwrap(); + assert_eq!(blob.content(), dat); + } + + #[test] + fn writer() { + let td = TempDir::new("test").unwrap(); + let repo = Repository::init(td.path()).unwrap(); + let dat = [4, 3, 5, 6, 9]; + let db = repo.odb().unwrap(); + let mut ws = db.writer(dat.len(), ObjectType::Blob).unwrap(); + let wl = ws.write(&dat[0..3]).unwrap(); + assert_eq!(wl, 3); + let wl = ws.write(&dat[3..5]).unwrap(); + assert_eq!(wl, 2); + let id = ws.finalize().unwrap(); + let blob = repo.find_blob(id).unwrap(); + assert_eq!(blob.content(), dat); + } + + #[test] + fn exists() { + let td = TempDir::new("test").unwrap(); + let repo = Repository::init(td.path()).unwrap(); + let dat = [4, 3, 5, 6, 9]; + let db = repo.odb().unwrap(); + let id = db.write(ObjectType::Blob, &dat).unwrap(); + assert!(db.exists(id)); + } + + #[test] + fn exists_prefix() { + let td = TempDir::new("test").unwrap(); + let repo = Repository::init(td.path()).unwrap(); + let dat = [4, 3, 5, 6, 9]; + let db = repo.odb().unwrap(); + let id = db.write(ObjectType::Blob, &dat).unwrap(); + let id_prefix_str = &id.to_string()[0..10]; + let id_prefix = Oid::from_str(id_prefix_str).unwrap(); + let found_oid = db.exists_prefix(id_prefix, 10).unwrap(); + assert_eq!(found_oid, id); + } +} diff --git a/git2/src/oid.rs b/git2/src/oid.rs new file mode 100644 index 000000000..b63fe3451 --- /dev/null +++ b/git2/src/oid.rs @@ -0,0 +1,214 @@ +use std::fmt; +use std::cmp::Ordering; +use std::hash::{Hasher, Hash}; +use std::str; +use std::path::Path; +use libc; + +use {raw, Error, ObjectType, IntoCString}; + +use util::Binding; + +/// Unique identity of any object (commit, tree, blob, tag). +#[derive(Copy, Clone)] +pub struct Oid { + raw: raw::git_oid +} + +impl Oid { + /// Parse a hex-formatted object id into an Oid structure. + /// + /// # Errors + /// + /// Returns an error if the string is empty, is longer than 40 hex + /// characters, or contains any non-hex characters. + pub fn from_str(s: &str) -> Result { + ::init(); + let mut raw = raw::git_oid { id: [0; raw::GIT_OID_RAWSZ] }; + unsafe { + try_call!(raw::git_oid_fromstrn(&mut raw, + s.as_bytes().as_ptr() + as *const libc::c_char, + s.len() as libc::size_t)); + } + Ok(Oid { raw: raw }) + } + + /// Parse a raw object id into an Oid structure. + /// + /// If the array given is not 20 bytes in length, an error is returned. + pub fn from_bytes(bytes: &[u8]) -> Result { + ::init(); + let mut raw = raw::git_oid { id: [0; raw::GIT_OID_RAWSZ] }; + if bytes.len() != raw::GIT_OID_RAWSZ { + Err(Error::from_str("raw byte array must be 20 bytes")) + } else { + unsafe { raw::git_oid_fromraw(&mut raw, bytes.as_ptr()) } + Ok(Oid { raw: raw }) + } + } + + /// Creates an all zero Oid structure. + pub fn zero() -> Oid { + let out = raw::git_oid { id: [0; raw::GIT_OID_RAWSZ] }; + Oid { raw: out } + } + + /// Hashes the provided data as an object of the provided type, and returns + /// an Oid corresponding to the result. This does not store the object + /// inside any object database or repository. + pub fn hash_object(kind: ObjectType, bytes: &[u8]) -> Result { + ::init(); + + let mut out = raw::git_oid { id: [0; raw::GIT_OID_RAWSZ] }; + unsafe { + try_call!(raw::git_odb_hash(&mut out, + bytes.as_ptr() + as *const libc::c_void, + bytes.len(), + kind.raw())); + } + + Ok(Oid { raw: out }) + } + + /// Hashes the content of the provided file as an object of the provided type, + /// and returns an Oid corresponding to the result. This does not store the object + /// inside any object database or repository. + pub fn hash_file>(kind: ObjectType, path: P) -> Result { + ::init(); + + let rpath = try!(path.as_ref().into_c_string()); + + let mut out = raw::git_oid { id: [0; raw::GIT_OID_RAWSZ] }; + unsafe { + try_call!(raw::git_odb_hashfile(&mut out, + rpath, + kind.raw())); + } + + Ok(Oid { raw: out }) + } + + /// View this OID as a byte-slice 20 bytes in length. + pub fn as_bytes(&self) -> &[u8] { &self.raw.id } + + /// Test if this OID is all zeros. + pub fn is_zero(&self) -> bool { + unsafe { raw::git_oid_iszero(&self.raw) == 1 } + } +} + +impl Binding for Oid { + type Raw = *const raw::git_oid; + + unsafe fn from_raw(oid: *const raw::git_oid) -> Oid { + Oid { raw: *oid } + } + fn raw(&self) -> *const raw::git_oid { &self.raw as *const _ } +} + +impl fmt::Debug for Oid { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::Display::fmt(self, f) + } +} + +impl fmt::Display for Oid { + /// Hex-encode this Oid into a formatter. + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let mut dst = [0u8; raw::GIT_OID_HEXSZ + 1]; + unsafe { + raw::git_oid_tostr(dst.as_mut_ptr() as *mut libc::c_char, + dst.len() as libc::size_t, &self.raw); + } + let s = &dst[..dst.iter().position(|&a| a == 0).unwrap()]; + str::from_utf8(s).unwrap().fmt(f) + } +} + +impl str::FromStr for Oid { + type Err = Error; + + /// Parse a hex-formatted object id into an Oid structure. + /// + /// # Errors + /// + /// Returns an error if the string is empty, is longer than 40 hex + /// characters, or contains any non-hex characters. + fn from_str(s: &str) -> Result { + Oid::from_str(s) + } +} + +impl PartialEq for Oid { + fn eq(&self, other: &Oid) -> bool { + unsafe { raw::git_oid_equal(&self.raw, &other.raw) != 0 } + } +} +impl Eq for Oid {} + +impl PartialOrd for Oid { + fn partial_cmp(&self, other: &Oid) -> Option { + Some(self.cmp(other)) + } +} + +impl Ord for Oid { + fn cmp(&self, other: &Oid) -> Ordering { + match unsafe { raw::git_oid_cmp(&self.raw, &other.raw) } { + 0 => Ordering::Equal, + n if n < 0 => Ordering::Less, + _ => Ordering::Greater, + } + } +} + +impl Hash for Oid { + fn hash(&self, into: &mut H) { + self.raw.id.hash(into) + } +} + +impl AsRef<[u8]> for Oid { + fn as_ref(&self) -> &[u8] { self.as_bytes() } +} + +#[cfg(test)] +mod tests { + use std::io::prelude::*; + use std::fs::File; + + use tempdir::TempDir; + use {ObjectType}; + use super::Oid; + + #[test] + fn conversions() { + assert!(Oid::from_str("foo").is_err()); + assert!(Oid::from_str("decbf2be529ab6557d5429922251e5ee36519817").is_ok()); + assert!(Oid::from_bytes(b"foo").is_err()); + assert!(Oid::from_bytes(b"00000000000000000000").is_ok()); + } + + #[test] + fn zero_is_zero() { + assert!(Oid::zero().is_zero()); + } + + #[test] + fn hash_object() { + let bytes = "Hello".as_bytes(); + assert!(Oid::hash_object(ObjectType::Blob, bytes).is_ok()); + } + + #[test] + fn hash_file() { + let td = TempDir::new("test").unwrap(); + let path = td.path().join("hello.txt"); + let mut file = File::create(&path).unwrap(); + file.write_all("Hello".as_bytes()).unwrap(); + assert!(Oid::hash_file(ObjectType::Blob, &path).is_ok()); + } +} + diff --git a/git2/src/oid_array.rs b/git2/src/oid_array.rs new file mode 100644 index 000000000..d1108e6c5 --- /dev/null +++ b/git2/src/oid_array.rs @@ -0,0 +1,50 @@ +//! Bindings to libgit2's raw `git_oidarray` type + +use std::ops::Deref; + +use oid::Oid; +use raw; +use util::Binding; +use std::slice; +use std::mem; + +/// An oid array structure used by libgit2 +/// +/// Some apis return arrays of oids which originate from libgit2. This +/// wrapper type behaves a little like `Vec<&Oid>` but does so without copying +/// the underlying Oids until necessary. +pub struct OidArray { + raw: raw::git_oidarray, +} + +impl Deref for OidArray { + type Target = [Oid]; + + fn deref(&self) -> &[Oid] { + unsafe { + debug_assert_eq!(mem::size_of::(), mem::size_of_val(&*self.raw.ids)); + + slice::from_raw_parts(self.raw.ids as *const Oid, self.raw.count as usize) + } + } +} + +impl Binding for OidArray { + type Raw = raw::git_oidarray; + unsafe fn from_raw(raw: raw::git_oidarray) -> OidArray { + OidArray { raw: raw } + } + fn raw(&self) -> raw::git_oidarray { self.raw } +} + +impl<'repo> ::std::fmt::Debug for OidArray { + fn fmt(&self, f: &mut ::std::fmt::Formatter) -> Result<(), ::std::fmt::Error> { + f.debug_tuple("OidArray").field(&self.deref()).finish() + } +} + +impl Drop for OidArray { + fn drop(&mut self) { + unsafe { raw::git_oidarray_free(&mut self.raw) } + } +} diff --git a/git2/src/packbuilder.rs b/git2/src/packbuilder.rs new file mode 100644 index 000000000..e3ed5132b --- /dev/null +++ b/git2/src/packbuilder.rs @@ -0,0 +1,386 @@ +use std::marker; +use std::ptr; +use std::slice; +use libc::{c_int, c_uint, c_void, size_t}; + +use {raw, panic, Repository, Error, Oid, Revwalk, Buf}; +use util::Binding; + +/// Stages that are reported by the `PackBuilder` progress callback. +pub enum PackBuilderStage { + /// Adding objects to the pack + AddingObjects, + /// Deltafication of the pack + Deltafication, +} + +pub type ProgressCb<'a> = FnMut(PackBuilderStage, u32, u32) -> bool + 'a; +pub type ForEachCb<'a> = FnMut(&[u8]) -> bool + 'a; + +/// A builder for creating a packfile +pub struct PackBuilder<'repo> { + raw: *mut raw::git_packbuilder, + progress: Option>>>, + _marker: marker::PhantomData<&'repo Repository>, +} + +impl<'repo> PackBuilder<'repo> { + /// Insert a single object. For an optimal pack it's mandatory to insert + /// objects in recency order, commits followed by trees and blobs. + pub fn insert_object(&mut self, id: Oid, name: Option<&str>) + -> Result<(), Error> { + let name = try!(::opt_cstr(name)); + unsafe { + try_call!(raw::git_packbuilder_insert(self.raw, id.raw(), name)); + } + Ok(()) + } + + /// Insert a root tree object. This will add the tree as well as all + /// referenced trees and blobs. + pub fn insert_tree(&mut self, id: Oid) -> Result<(), Error> { + unsafe { + try_call!(raw::git_packbuilder_insert_tree(self.raw, id.raw())); + } + Ok(()) + } + + /// Insert a commit object. This will add a commit as well as the completed + /// referenced tree. + pub fn insert_commit(&mut self, id: Oid) -> Result<(), Error> { + unsafe { + try_call!(raw::git_packbuilder_insert_commit(self.raw, id.raw())); + } + Ok(()) + } + + /// Insert objects as given by the walk. Those commits and all objects they + /// reference will be inserted into the packbuilder. + pub fn insert_walk(&mut self, walk: &mut Revwalk) -> Result<(), Error> { + unsafe { + try_call!(raw::git_packbuilder_insert_walk(self.raw, walk.raw())); + } + Ok(()) + } + + /// Recursively insert an object and its referenced objects. Insert the + /// object as well as any object it references. + pub fn insert_recursive(&mut self, id: Oid, name: Option<&str>) + -> Result<(), Error> { + let name = try!(::opt_cstr(name)); + unsafe { + try_call!(raw::git_packbuilder_insert_recur(self.raw, + id.raw(), + name)); + } + Ok(()) + } + + /// Write the contents of the packfile to an in-memory buffer. The contents + /// of the buffer will become a valid packfile, even though there will be + /// no attached index. + pub fn write_buf(&mut self, buf: &mut Buf) -> Result<(), Error> { + unsafe { + try_call!(raw::git_packbuilder_write_buf(buf.raw(), self.raw)); + } + Ok(()) + } + + /// Create the new pack and pass each object to the callback. + pub fn foreach(&mut self, mut cb: F) -> Result<(), Error> + where F: FnMut(&[u8]) -> bool + { + let mut cb = &mut cb as &mut ForEachCb; + let ptr = &mut cb as *mut _; + unsafe { + try_call!(raw::git_packbuilder_foreach(self.raw, + foreach_c, + ptr as *mut _)); + } + Ok(()) + } + + /// `progress` will be called with progress information during pack + /// building. Be aware that this is called inline with pack building + /// operations, so performance may be affected. + /// + /// There can only be one progress callback attached, this will replace any + /// existing one. See `unset_progress_callback` to remove the current + /// progress callback without attaching a new one. + pub fn set_progress_callback(&mut self, progress: F) -> Result<(), Error> + where F: FnMut(PackBuilderStage, u32, u32) -> bool + 'repo + { + let mut progress = Box::new(Box::new(progress) as Box); + let ptr = &mut *progress as *mut _; + let progress_c = Some(progress_c as raw::git_packbuilder_progress); + unsafe { + try_call!(raw::git_packbuilder_set_callbacks(self.raw, + progress_c, + ptr as *mut _)); + } + self.progress = Some(progress); + Ok(()) + } + + /// Remove the current progress callback. See `set_progress_callback` to + /// set the progress callback. + pub fn unset_progress_callback(&mut self) -> Result<(), Error> { + unsafe { + try_call!(raw::git_packbuilder_set_callbacks(self.raw, + None, + ptr::null_mut())); + self.progress = None; + } + Ok(()) + } + + /// Get the total number of objects the packbuilder will write out. + pub fn object_count(&self) -> usize { + unsafe { raw::git_packbuilder_object_count(self.raw) } + } + + /// Get the number of objects the packbuilder has already written out. + pub fn written(&self) -> usize { + unsafe { raw::git_packbuilder_written(self.raw) } + } + + /// Get the packfile's hash. A packfile's name is derived from the sorted + /// hashing of all object names. This is only correct after the packfile + /// has been written. + pub fn hash(&self) -> Option { + if self.object_count() == 0 { + unsafe { + Some(Binding::from_raw(raw::git_packbuilder_hash(self.raw))) + } + } else { + None + } + } +} + +impl<'repo> Binding for PackBuilder<'repo> { + type Raw = *mut raw::git_packbuilder; + unsafe fn from_raw(ptr: *mut raw::git_packbuilder) -> PackBuilder<'repo> { + PackBuilder { + raw: ptr, + progress: None, + _marker: marker::PhantomData, + } + } + fn raw(&self) -> *mut raw::git_packbuilder { + self.raw + } +} + +impl<'repo> Drop for PackBuilder<'repo> { + fn drop(&mut self) { + unsafe { + raw::git_packbuilder_set_callbacks(self.raw, None, ptr::null_mut()); + raw::git_packbuilder_free(self.raw); + } + } +} + +impl Binding for PackBuilderStage { + type Raw = raw::git_packbuilder_stage_t; + unsafe fn from_raw(raw: raw::git_packbuilder_stage_t) -> PackBuilderStage { + match raw { + raw::GIT_PACKBUILDER_ADDING_OBJECTS => PackBuilderStage::AddingObjects, + raw::GIT_PACKBUILDER_DELTAFICATION => PackBuilderStage::Deltafication, + _ => panic!("Unknown git diff binary kind"), + } + } + fn raw(&self) -> raw::git_packbuilder_stage_t { + match *self { + PackBuilderStage::AddingObjects => raw::GIT_PACKBUILDER_ADDING_OBJECTS, + PackBuilderStage::Deltafication => raw::GIT_PACKBUILDER_DELTAFICATION, + } + } +} + +extern fn foreach_c(buf: *const c_void, + size: size_t, + data: *mut c_void) + -> c_int { + unsafe { + let buf = slice::from_raw_parts(buf as *const u8, size as usize); + + let r = panic::wrap(|| { + let data = data as *mut &mut ForEachCb; + (*data)(buf) + }); + if r == Some(true) { + 0 + } else { + -1 + } + } +} + +extern fn progress_c(stage: raw::git_packbuilder_stage_t, + current: c_uint, + total: c_uint, + data: *mut c_void) + -> c_int { + unsafe { + let stage = Binding::from_raw(stage); + + let r = panic::wrap(|| { + let data = data as *mut Box; + (*data)(stage, current, total) + }); + if r == Some(true) { + 0 + } else { + -1 + } + } +} + +#[cfg(test)] +mod tests { + use std::fs::File; + use std::path::Path; + use {Buf, Repository, Oid}; + + fn commit(repo: &Repository) -> (Oid, Oid) { + let mut index = t!(repo.index()); + let root = repo.path().parent().unwrap(); + t!(File::create(&root.join("foo"))); + t!(index.add_path(Path::new("foo"))); + + let tree_id = t!(index.write_tree()); + let tree = t!(repo.find_tree(tree_id)); + let sig = t!(repo.signature()); + let head_id = t!(repo.refname_to_id("HEAD")); + let parent = t!(repo.find_commit(head_id)); + let commit = t!(repo.commit(Some("HEAD"), + &sig, + &sig, + "commit", + &tree, + &[&parent])); + (commit, tree_id) + } + + fn pack_header(len: u8) -> Vec { + [].into_iter() + .chain(b"PACK") // signature + .chain(&[0, 0, 0, 2]) // version number + .chain(&[0, 0, 0, len]) // number of objects + .cloned().collect::>() + } + + fn empty_pack_header() -> Vec { + pack_header(0).iter() + .chain(&[0x02, 0x9d, 0x08, 0x82, 0x3b, // ^ + 0xd8, 0xa8, 0xea, 0xb5, 0x10, // | SHA-1 of the zero + 0xad, 0x6a, 0xc7, 0x5c, 0x82, // | object pack header + 0x3c, 0xfd, 0x3e, 0xd3, 0x1e]) // v + .cloned().collect::>() + } + + #[test] + fn smoke() { + let (_td, repo) = ::test::repo_init(); + let _builder = t!(repo.packbuilder()); + } + + #[test] + fn smoke_write_buf() { + let (_td, repo) = ::test::repo_init(); + let mut builder = t!(repo.packbuilder()); + let mut buf = Buf::new(); + t!(builder.write_buf(&mut buf)); + assert!(builder.hash().unwrap().is_zero()); + assert_eq!(&*buf, &*empty_pack_header()); + } + + #[test] + fn smoke_foreach() { + let (_td, repo) = ::test::repo_init(); + let mut builder = t!(repo.packbuilder()); + let mut buf = Vec::::new(); + t!(builder.foreach(|bytes| { + buf.extend(bytes); + true + })); + assert_eq!(&*buf, &*empty_pack_header()); + } + + #[test] + fn insert_write_buf() { + let (_td, repo) = ::test::repo_init(); + let mut builder = t!(repo.packbuilder()); + let mut buf = Buf::new(); + let (commit, _tree) = commit(&repo); + t!(builder.insert_object(commit, None)); + assert_eq!(builder.object_count(), 1); + t!(builder.write_buf(&mut buf)); + // Just check that the correct number of objects are written + assert_eq!(&buf[0..12], &*pack_header(1)); + } + + #[test] + fn insert_tree_write_buf() { + let (_td, repo) = ::test::repo_init(); + let mut builder = t!(repo.packbuilder()); + let mut buf = Buf::new(); + let (_commit, tree) = commit(&repo); + // will insert the tree itself and the blob, 2 objects + t!(builder.insert_tree(tree)); + assert_eq!(builder.object_count(), 2); + t!(builder.write_buf(&mut buf)); + // Just check that the correct number of objects are written + assert_eq!(&buf[0..12], &*pack_header(2)); + } + + #[test] + fn insert_commit_write_buf() { + let (_td, repo) = ::test::repo_init(); + let mut builder = t!(repo.packbuilder()); + let mut buf = Buf::new(); + let (commit, _tree) = commit(&repo); + // will insert the commit, its tree and the blob, 3 objects + t!(builder.insert_commit(commit)); + assert_eq!(builder.object_count(), 3); + t!(builder.write_buf(&mut buf)); + // Just check that the correct number of objects are written + assert_eq!(&buf[0..12], &*pack_header(3)); + } + + #[test] + fn progress_callback() { + let mut progress_called = false; + { + let (_td, repo) = ::test::repo_init(); + let mut builder = t!(repo.packbuilder()); + let (commit, _tree) = commit(&repo); + t!(builder.set_progress_callback(|_, _, _| { + progress_called = true; + true + })); + t!(builder.insert_commit(commit)); + t!(builder.write_buf(&mut Buf::new())); + } + assert_eq!(progress_called, true); + } + + #[test] + fn clear_progress_callback() { + let mut progress_called = false; + { + let (_td, repo) = ::test::repo_init(); + let mut builder = t!(repo.packbuilder()); + let (commit, _tree) = commit(&repo); + t!(builder.set_progress_callback(|_, _, _| { + progress_called = true; + true + })); + t!(builder.unset_progress_callback()); + t!(builder.insert_commit(commit)); + t!(builder.write_buf(&mut Buf::new())); + } + assert_eq!(progress_called, false); + } +} diff --git a/git2/src/panic.rs b/git2/src/panic.rs new file mode 100644 index 000000000..35f2c0939 --- /dev/null +++ b/git2/src/panic.rs @@ -0,0 +1,55 @@ +use std::any::Any; +use std::cell::RefCell; + +thread_local!(static LAST_ERROR: RefCell>> = { + RefCell::new(None) +}); + +#[cfg(feature = "unstable")] +pub fn wrap T + ::std::panic::UnwindSafe>(f: F) -> Option { + use std::panic; + if LAST_ERROR.with(|slot| slot.borrow().is_some()) { + return None + } + match panic::catch_unwind(f) { + Ok(ret) => Some(ret), + Err(e) => { + LAST_ERROR.with(move |slot| { + *slot.borrow_mut() = Some(e); + }); + None + } + } +} + +#[cfg(not(feature = "unstable"))] +pub fn wrap T>(f: F) -> Option { + struct Bomb { + enabled: bool, + } + impl Drop for Bomb { + fn drop(&mut self) { + if !self.enabled { + return + } + panic!("callback has panicked, and continuing to unwind into C \ + is not safe, so aborting the process"); + + } + } + let mut bomb = Bomb { enabled: true }; + let ret = Some(f()); + bomb.enabled = false; + ret +} + +pub fn check() { + let err = LAST_ERROR.with(|slot| slot.borrow_mut().take()); + if let Some(err) = err { + panic!(err) + } +} + +pub fn panicked() -> bool { + LAST_ERROR.with(|slot| slot.borrow().is_some()) +} diff --git a/git2/src/patch.rs b/git2/src/patch.rs new file mode 100644 index 000000000..af7e74ac1 --- /dev/null +++ b/git2/src/patch.rs @@ -0,0 +1,202 @@ +use std::path::Path; +use std::ptr; +use libc::{c_int, c_void}; + +use {raw, Blob, Buf, Diff, DiffDelta, DiffHunk, DiffLine, DiffOptions, Error}; +use diff::{LineCb, print_cb}; +use util::{Binding, into_opt_c_string}; + +/// A structure representing the text changes in a single diff delta. +/// +/// This is an opaque structure. +pub struct Patch { + raw: *mut raw::git_patch, +} + +unsafe impl Send for Patch {} + +impl Binding for Patch { + type Raw = *mut raw::git_patch; + unsafe fn from_raw(raw: Self::Raw) -> Patch { + Patch { raw: raw } + } + fn raw(&self) -> Self::Raw { self.raw } +} + +impl Drop for Patch { + fn drop(&mut self) { + unsafe { raw::git_patch_free(self.raw) } + } +} + +impl Patch { + /// Return a Patch for one file in a Diff. + /// + /// Returns Ok(None) for an unchanged or binary file. + pub fn from_diff(diff: &Diff, idx: usize) -> Result, Error> { + let mut ret = ptr::null_mut(); + unsafe { + try_call!(raw::git_patch_from_diff(&mut ret, diff.raw(), idx)); + Ok(Binding::from_raw_opt(ret)) + } + } + + /// Generate a Patch by diffing two blobs. + pub fn from_blobs(old_blob: &Blob, + old_path: Option<&Path>, + new_blob: &Blob, + new_path: Option<&Path>, + opts: Option<&mut DiffOptions>) + -> Result + { + let mut ret = ptr::null_mut(); + let old_path = try!(into_opt_c_string(old_path)); + let new_path = try!(into_opt_c_string(new_path)); + unsafe { + try_call!(raw::git_patch_from_blobs(&mut ret, + old_blob.raw(), + old_path, + new_blob.raw(), + new_path, + opts.map(|s| s.raw()))); + Ok(Binding::from_raw(ret)) + } + } + + /// Generate a Patch by diffing a blob and a buffer. + pub fn from_blob_and_buffer(old_blob: &Blob, + old_path: Option<&Path>, + new_buffer: &[u8], + new_path: Option<&Path>, + opts: Option<&mut DiffOptions>) + -> Result + { + let mut ret = ptr::null_mut(); + let old_path = try!(into_opt_c_string(old_path)); + let new_path = try!(into_opt_c_string(new_path)); + unsafe { + try_call!(raw::git_patch_from_blob_and_buffer(&mut ret, + old_blob.raw(), + old_path, + new_buffer.as_ptr() as *const c_void, + new_buffer.len(), + new_path, + opts.map(|s| s.raw()))); + Ok(Binding::from_raw(ret)) + } + } + + /// Generate a Patch by diffing two buffers. + pub fn from_buffers(old_buffer: &[u8], + old_path: Option<&Path>, + new_buffer: &[u8], + new_path: Option<&Path>, + opts: Option<&mut DiffOptions>) + -> Result + { + let mut ret = ptr::null_mut(); + let old_path = try!(into_opt_c_string(old_path)); + let new_path = try!(into_opt_c_string(new_path)); + unsafe { + try_call!(raw::git_patch_from_buffers(&mut ret, + old_buffer.as_ptr() as *const c_void, + old_buffer.len(), + old_path, + new_buffer.as_ptr() as *const c_void, + new_buffer.len(), + new_path, + opts.map(|s| s.raw()))); + Ok(Binding::from_raw(ret)) + } + } + + /// Get the DiffDelta associated with the Patch. + pub fn delta(&self) -> DiffDelta { + unsafe { + Binding::from_raw(raw::git_patch_get_delta(self.raw) as *mut _) + } + } + + /// Get the number of hunks in the Patch. + pub fn num_hunks(&self) -> usize { + unsafe { + raw::git_patch_num_hunks(self.raw) + } + } + + /// Get the number of lines of context, additions, and deletions in the Patch. + pub fn line_stats(&self) -> Result<(usize, usize, usize), Error> { + let mut context = 0; + let mut additions = 0; + let mut deletions = 0; + unsafe { + try_call!(raw::git_patch_line_stats(&mut context, + &mut additions, + &mut deletions, + self.raw)); + } + Ok((context, additions, deletions)) + } + + /// Get a DiffHunk and its total line count from the Patch. + pub fn hunk(&self, hunk_idx: usize) -> Result<(DiffHunk, usize), Error> { + let mut ret = ptr::null(); + let mut lines = 0; + unsafe { + try_call!(raw::git_patch_get_hunk(&mut ret, &mut lines, self.raw, hunk_idx)); + Ok((Binding::from_raw(ret), lines)) + } + } + + /// Get the number of lines in a hunk. + pub fn num_lines_in_hunk(&self, hunk_idx: usize) -> Result { + unsafe { + Ok(try_call!(raw::git_patch_num_lines_in_hunk(self.raw, hunk_idx)) as usize) + } + } + + /// Get a DiffLine from a hunk of the Patch. + pub fn line_in_hunk(&self, + hunk_idx: usize, + line_of_hunk: usize) -> Result { + let mut ret = ptr::null(); + unsafe { + try_call!(raw::git_patch_get_line_in_hunk(&mut ret, + self.raw, + hunk_idx, + line_of_hunk)); + Ok(Binding::from_raw(ret)) + } + } + + /// Get the size of a Patch's diff data in bytes. + pub fn size(&self, + include_context: bool, + include_hunk_headers: bool, + include_file_headers: bool) -> usize { + unsafe { + raw::git_patch_size(self.raw, + include_context as c_int, + include_hunk_headers as c_int, + include_file_headers as c_int) + } + } + + /// Print the Patch to text via a callback. + pub fn print(&mut self, mut line_cb: &mut LineCb) -> Result<(), Error> { + let ptr = &mut line_cb as *mut _ as *mut c_void; + unsafe { + try_call!(raw::git_patch_print(self.raw, print_cb, ptr)); + Ok(()) + } + } + + /// Get the Patch text as a Buf. + pub fn to_buf(&mut self) -> Result { + let buf = Buf::new(); + unsafe { + try_call!(raw::git_patch_to_buf(buf.raw(), self.raw)); + } + Ok(buf) + } +} diff --git a/git2/src/pathspec.rs b/git2/src/pathspec.rs new file mode 100644 index 000000000..5cc03ab3b --- /dev/null +++ b/git2/src/pathspec.rs @@ -0,0 +1,301 @@ +use std::iter::IntoIterator; +use std::marker; +use std::ops::Range; +use std::path::Path; +use std::ptr; +use libc::size_t; + +use {raw, Error, Diff, Tree, PathspecFlags, Index, Repository, DiffDelta, IntoCString}; +use util::Binding; + +/// Structure representing a compiled pathspec used for matching against various +/// structures. +pub struct Pathspec { + raw: *mut raw::git_pathspec, +} + +/// List of filenames matching a pathspec. +pub struct PathspecMatchList<'ps> { + raw: *mut raw::git_pathspec_match_list, + _marker: marker::PhantomData<&'ps Pathspec>, +} + +/// Iterator over the matched paths in a pathspec. +pub struct PathspecEntries<'list> { + range: Range, + list: &'list PathspecMatchList<'list>, +} + +/// Iterator over the matching diff deltas. +pub struct PathspecDiffEntries<'list> { + range: Range, + list: &'list PathspecMatchList<'list>, +} + +/// Iterator over the failed list of pathspec items that did not match. +pub struct PathspecFailedEntries<'list> { + range: Range, + list: &'list PathspecMatchList<'list>, +} + +impl Pathspec { + /// Creates a new pathspec from a list of specs to match against. + pub fn new(specs: I) -> Result + where T: IntoCString, I: IntoIterator { + let (_a, _b, arr) = try!(::util::iter2cstrs(specs)); + unsafe { + let mut ret = ptr::null_mut(); + try_call!(raw::git_pathspec_new(&mut ret, &arr)); + Ok(Binding::from_raw(ret)) + } + } + + /// Match a pathspec against files in a diff. + /// + /// The list returned contains the list of all matched filenames (unless you + /// pass `PATHSPEC_FAILURES_ONLY` in the flags) and may also contain the + /// list of pathspecs with no match if the `PATHSPEC_FIND_FAILURES` flag is + /// specified. + pub fn match_diff(&self, diff: &Diff, flags: PathspecFlags) + -> Result { + let mut ret = ptr::null_mut(); + unsafe { + try_call!(raw::git_pathspec_match_diff(&mut ret, diff.raw(), + flags.bits(), self.raw)); + Ok(Binding::from_raw(ret)) + } + } + + /// Match a pathspec against files in a tree. + /// + /// The list returned contains the list of all matched filenames (unless you + /// pass `PATHSPEC_FAILURES_ONLY` in the flags) and may also contain the + /// list of pathspecs with no match if the `PATHSPEC_FIND_FAILURES` flag is + /// specified. + pub fn match_tree(&self, tree: &Tree, flags: PathspecFlags) + -> Result { + let mut ret = ptr::null_mut(); + unsafe { + try_call!(raw::git_pathspec_match_tree(&mut ret, tree.raw(), + flags.bits(), self.raw)); + Ok(Binding::from_raw(ret)) + } + } + + /// This matches the pathspec against the files in the repository index. + /// + /// The list returned contains the list of all matched filenames (unless you + /// pass `PATHSPEC_FAILURES_ONLY` in the flags) and may also contain the + /// list of pathspecs with no match if the `PATHSPEC_FIND_FAILURES` flag is + /// specified. + pub fn match_index(&self, index: &Index, flags: PathspecFlags) + -> Result { + let mut ret = ptr::null_mut(); + unsafe { + try_call!(raw::git_pathspec_match_index(&mut ret, index.raw(), + flags.bits(), self.raw)); + Ok(Binding::from_raw(ret)) + } + } + + /// Match a pathspec against the working directory of a repository. + /// + /// This matches the pathspec against the current files in the working + /// directory of the repository. It is an error to invoke this on a bare + /// repo. This handles git ignores (i.e. ignored files will not be + /// considered to match the pathspec unless the file is tracked in the + /// index). + /// + /// The list returned contains the list of all matched filenames (unless you + /// pass `PATHSPEC_FAILURES_ONLY` in the flags) and may also contain the + /// list of pathspecs with no match if the `PATHSPEC_FIND_FAILURES` flag is + /// specified. + pub fn match_workdir(&self, repo: &Repository, flags: PathspecFlags) + -> Result { + let mut ret = ptr::null_mut(); + unsafe { + try_call!(raw::git_pathspec_match_workdir(&mut ret, repo.raw(), + flags.bits(), self.raw)); + Ok(Binding::from_raw(ret)) + } + } + + /// Try to match a path against a pathspec + /// + /// Unlike most of the other pathspec matching functions, this will not fall + /// back on the native case-sensitivity for your platform. You must + /// explicitly pass flags to control case sensitivity or else this will fall + /// back on being case sensitive. + pub fn matches_path(&self, path: &Path, flags: PathspecFlags) -> bool { + let path = path.into_c_string().unwrap(); + unsafe { + raw::git_pathspec_matches_path(&*self.raw, flags.bits(), + path.as_ptr()) == 1 + } + } +} + +impl Binding for Pathspec { + type Raw = *mut raw::git_pathspec; + + unsafe fn from_raw(raw: *mut raw::git_pathspec) -> Pathspec { + Pathspec { raw: raw } + } + fn raw(&self) -> *mut raw::git_pathspec { self.raw } +} + +impl Drop for Pathspec { + fn drop(&mut self) { + unsafe { raw::git_pathspec_free(self.raw) } + } +} + +impl<'ps> PathspecMatchList<'ps> { + fn entrycount(&self) -> usize { + unsafe { raw::git_pathspec_match_list_entrycount(&*self.raw) as usize } + } + + fn failed_entrycount(&self) -> usize { + unsafe { raw::git_pathspec_match_list_failed_entrycount(&*self.raw) as usize } + } + + /// Returns an iterator over the matching filenames in this list. + pub fn entries(&self) -> PathspecEntries { + let n = self.entrycount(); + let n = if n > 0 && self.entry(0).is_none() {0} else {n}; + PathspecEntries { range: 0..n, list: self } + } + + /// Get a matching filename by position. + /// + /// If this list was generated from a diff, then the return value will + /// always be `None. + pub fn entry(&self, i: usize) -> Option<&[u8]> { + unsafe { + let ptr = raw::git_pathspec_match_list_entry(&*self.raw, i as size_t); + ::opt_bytes(self, ptr) + } + } + + /// Returns an iterator over the matching diff entries in this list. + pub fn diff_entries(&self) -> PathspecDiffEntries { + let n = self.entrycount(); + let n = if n > 0 && self.diff_entry(0).is_none() {0} else {n}; + PathspecDiffEntries { range: 0..n, list: self } + } + + /// Get a matching diff delta by position. + /// + /// If the list was not generated from a diff, then the return value will + /// always be `None`. + pub fn diff_entry(&self, i: usize) -> Option { + unsafe { + let ptr = raw::git_pathspec_match_list_diff_entry(&*self.raw, + i as size_t); + Binding::from_raw_opt(ptr as *mut _) + } + } + + /// Returns an iterator over the non-matching entries in this list. + pub fn failed_entries(&self) -> PathspecFailedEntries { + let n = self.failed_entrycount(); + let n = if n > 0 && self.failed_entry(0).is_none() {0} else {n}; + PathspecFailedEntries { range: 0..n, list: self } + } + + /// Get an original pathspec string that had no matches. + pub fn failed_entry(&self, i: usize) -> Option<&[u8]> { + unsafe { + let ptr = raw::git_pathspec_match_list_failed_entry(&*self.raw, + i as size_t); + ::opt_bytes(self, ptr) + } + } +} + +impl<'ps> Binding for PathspecMatchList<'ps> { + type Raw = *mut raw::git_pathspec_match_list; + + unsafe fn from_raw(raw: *mut raw::git_pathspec_match_list) + -> PathspecMatchList<'ps> { + PathspecMatchList { raw: raw, _marker: marker::PhantomData } + } + fn raw(&self) -> *mut raw::git_pathspec_match_list { self.raw } +} + +impl<'ps> Drop for PathspecMatchList<'ps> { + fn drop(&mut self) { + unsafe { raw::git_pathspec_match_list_free(self.raw) } + } +} + +impl<'list> Iterator for PathspecEntries<'list> { + type Item = &'list [u8]; + fn next(&mut self) -> Option<&'list [u8]> { + self.range.next().and_then(|i| self.list.entry(i)) + } + fn size_hint(&self) -> (usize, Option) { self.range.size_hint() } +} +impl<'list> DoubleEndedIterator for PathspecEntries<'list> { + fn next_back(&mut self) -> Option<&'list [u8]> { + self.range.next_back().and_then(|i| self.list.entry(i)) + } +} +impl<'list> ExactSizeIterator for PathspecEntries<'list> {} + +impl<'list> Iterator for PathspecDiffEntries<'list> { + type Item = DiffDelta<'list>; + fn next(&mut self) -> Option> { + self.range.next().and_then(|i| self.list.diff_entry(i)) + } + fn size_hint(&self) -> (usize, Option) { self.range.size_hint() } +} +impl<'list> DoubleEndedIterator for PathspecDiffEntries<'list> { + fn next_back(&mut self) -> Option> { + self.range.next_back().and_then(|i| self.list.diff_entry(i)) + } +} +impl<'list> ExactSizeIterator for PathspecDiffEntries<'list> {} + +impl<'list> Iterator for PathspecFailedEntries<'list> { + type Item = &'list [u8]; + fn next(&mut self) -> Option<&'list [u8]> { + self.range.next().and_then(|i| self.list.failed_entry(i)) + } + fn size_hint(&self) -> (usize, Option) { self.range.size_hint() } +} +impl<'list> DoubleEndedIterator for PathspecFailedEntries<'list> { + fn next_back(&mut self) -> Option<&'list [u8]> { + self.range.next_back().and_then(|i| self.list.failed_entry(i)) + } +} +impl<'list> ExactSizeIterator for PathspecFailedEntries<'list> {} + +#[cfg(test)] +mod tests { + use PathspecFlags; + use super::Pathspec; + use std::fs::File; + use std::path::Path; + + #[test] + fn smoke() { + let ps = Pathspec::new(["a"].iter()).unwrap(); + assert!(ps.matches_path(Path::new("a"), PathspecFlags::DEFAULT)); + assert!(ps.matches_path(Path::new("a/b"), PathspecFlags::DEFAULT)); + assert!(!ps.matches_path(Path::new("b"), PathspecFlags::DEFAULT)); + assert!(!ps.matches_path(Path::new("ab/c"), PathspecFlags::DEFAULT)); + + let (td, repo) = ::test::repo_init(); + let list = ps.match_workdir(&repo, PathspecFlags::DEFAULT).unwrap(); + assert_eq!(list.entries().len(), 0); + assert_eq!(list.diff_entries().len(), 0); + assert_eq!(list.failed_entries().len(), 0); + + File::create(&td.path().join("a")).unwrap(); + + let list = ps.match_workdir(&repo, ::PathspecFlags::FIND_FAILURES).unwrap(); + assert_eq!(list.entries().len(), 1); + assert_eq!(list.entries().next(), Some("a".as_bytes())); + } +} diff --git a/git2/src/proxy_options.rs b/git2/src/proxy_options.rs new file mode 100644 index 000000000..e1601749b --- /dev/null +++ b/git2/src/proxy_options.rs @@ -0,0 +1,56 @@ +use std::ffi::CString; +use std::marker; +use std::ptr; + +use raw; +use util::Binding; + +/// Options which can be specified to various fetch operations. +#[derive(Default)] +pub struct ProxyOptions<'a> { + url: Option, + proxy_kind: raw::git_proxy_t, + _marker: marker::PhantomData<&'a i32>, +} + +impl<'a> ProxyOptions<'a> { + /// Creates a new set of proxy options ready to be configured. + pub fn new() -> ProxyOptions<'a> { + Default::default() + } + + /// Try to auto-detect the proxy from the git configuration. + /// + /// Note that this will override `url` specified before. + pub fn auto(&mut self) -> &mut Self { + self.proxy_kind = raw::GIT_PROXY_AUTO; + self + } + + /// Specify the exact URL of the proxy to use. + /// + /// Note that this will override `auto` specified before. + pub fn url(&mut self, url: &str) -> &mut Self { + self.proxy_kind = raw::GIT_PROXY_SPECIFIED; + self.url = Some(CString::new(url).unwrap()); + self + } +} + +impl<'a> Binding for ProxyOptions<'a> { + type Raw = raw::git_proxy_options; + unsafe fn from_raw(_raw: raw::git_proxy_options) -> ProxyOptions<'a> { + panic!("can't create proxy from raw options") + } + + fn raw(&self) -> raw::git_proxy_options { + raw::git_proxy_options { + version: raw::GIT_PROXY_OPTIONS_VERSION, + kind: self.proxy_kind, + url: self.url.as_ref().map(|s| s.as_ptr()).unwrap_or(ptr::null()), + credentials: None, + certificate_check: None, + payload: ptr::null_mut(), + } + } +} diff --git a/git2/src/reference.rs b/git2/src/reference.rs new file mode 100644 index 000000000..265d76614 --- /dev/null +++ b/git2/src/reference.rs @@ -0,0 +1,401 @@ +use std::cmp::Ordering; +use std::ffi::CString; +use std::marker; +use std::mem; +use std::ptr; +use std::str; + +use {raw, Error, Oid, Repository, ReferenceType, Object, ObjectType, Blob, Commit, Tree, Tag}; +use object::CastOrPanic; +use util::Binding; + +struct Refdb<'repo>(&'repo Repository); + +/// A structure to represent a git [reference][1]. +/// +/// [1]: http://git-scm.com/book/en/Git-Internals-Git-References +pub struct Reference<'repo> { + raw: *mut raw::git_reference, + _marker: marker::PhantomData>, +} + +/// An iterator over the references in a repository. +pub struct References<'repo> { + raw: *mut raw::git_reference_iterator, + _marker: marker::PhantomData>, +} + +/// An iterator over the names of references in a repository. +pub struct ReferenceNames<'repo: 'references, 'references> { + inner: &'references mut References<'repo>, +} + +impl<'repo> Reference<'repo> { + /// Ensure the reference name is well-formed. + pub fn is_valid_name(refname: &str) -> bool { + ::init(); + let refname = CString::new(refname).unwrap(); + unsafe { raw::git_reference_is_valid_name(refname.as_ptr()) == 1 } + } + + /// Get access to the underlying raw pointer. + pub fn raw(&self) -> *mut raw::git_reference { self.raw } + + /// Delete an existing reference. + /// + /// This method works for both direct and symbolic references. The reference + /// will be immediately removed on disk. + /// + /// This function will return an error if the reference has changed from the + /// time it was looked up. + pub fn delete(&mut self) -> Result<(), Error> { + unsafe { try_call!(raw::git_reference_delete(self.raw)); } + Ok(()) + } + + /// Check if a reference is a local branch. + pub fn is_branch(&self) -> bool { + unsafe { raw::git_reference_is_branch(&*self.raw) == 1 } + } + + /// Check if a reference is a note. + pub fn is_note(&self) -> bool { + unsafe { raw::git_reference_is_note(&*self.raw) == 1 } + } + + /// Check if a reference is a remote tracking branch + pub fn is_remote(&self) -> bool { + unsafe { raw::git_reference_is_remote(&*self.raw) == 1 } + } + + /// Check if a reference is a tag + pub fn is_tag(&self) -> bool { + unsafe { raw::git_reference_is_tag(&*self.raw) == 1 } + } + + /// Get the reference type of a reference. + /// + /// If the type is unknown, then `None` is returned. + pub fn kind(&self) -> Option { + ReferenceType::from_raw(unsafe { raw::git_reference_type(&*self.raw) }) + } + + /// Get the full name of a reference. + /// + /// Returns `None` if the name is not valid utf-8. + pub fn name(&self) -> Option<&str> { str::from_utf8(self.name_bytes()).ok() } + + /// Get the full name of a reference. + pub fn name_bytes(&self) -> &[u8] { + unsafe { ::opt_bytes(self, raw::git_reference_name(&*self.raw)).unwrap() } + } + + /// Get the full shorthand of a reference. + /// + /// This will transform the reference name into a name "human-readable" + /// version. If no shortname is appropriate, it will return the full name. + /// + /// Returns `None` if the shorthand is not valid utf-8. + pub fn shorthand(&self) -> Option<&str> { + str::from_utf8(self.shorthand_bytes()).ok() + } + + /// Get the full shorthand of a reference. + pub fn shorthand_bytes(&self) -> &[u8] { + unsafe { + ::opt_bytes(self, raw::git_reference_shorthand(&*self.raw)).unwrap() + } + } + + /// Get the OID pointed to by a direct reference. + /// + /// Only available if the reference is direct (i.e. an object id reference, + /// not a symbolic one). + pub fn target(&self) -> Option { + unsafe { + Binding::from_raw_opt(raw::git_reference_target(&*self.raw)) + } + } + + /// Return the peeled OID target of this reference. + /// + /// This peeled OID only applies to direct references that point to a hard + /// Tag object: it is the result of peeling such Tag. + pub fn target_peel(&self) -> Option { + unsafe { + Binding::from_raw_opt(raw::git_reference_target_peel(&*self.raw)) + } + } + + /// Get full name to the reference pointed to by a symbolic reference. + /// + /// May return `None` if the reference is either not symbolic or not a + /// valid utf-8 string. + pub fn symbolic_target(&self) -> Option<&str> { + self.symbolic_target_bytes().and_then(|s| str::from_utf8(s).ok()) + } + + /// Get full name to the reference pointed to by a symbolic reference. + /// + /// Only available if the reference is symbolic. + pub fn symbolic_target_bytes(&self) -> Option<&[u8]> { + unsafe { ::opt_bytes(self, raw::git_reference_symbolic_target(&*self.raw)) } + } + + /// Resolve a symbolic reference to a direct reference. + /// + /// This method iteratively peels a symbolic reference until it resolves to + /// a direct reference to an OID. + /// + /// If a direct reference is passed as an argument, a copy of that + /// reference is returned. + pub fn resolve(&self) -> Result, Error> { + let mut raw = ptr::null_mut(); + unsafe { + try_call!(raw::git_reference_resolve(&mut raw, &*self.raw)); + Ok(Binding::from_raw(raw)) + } + } + + /// Peel a reference to an object + /// + /// This method recursively peels the reference until it reaches + /// an object of the specified type. + pub fn peel(&self, kind: ObjectType) -> Result, Error> { + let mut raw = ptr::null_mut(); + unsafe { + try_call!(raw::git_reference_peel(&mut raw, self.raw, kind)); + Ok(Binding::from_raw(raw)) + } + } + + /// Peel a reference to a blob + /// + /// This method recursively peels the reference until it reaches + /// a blob. + pub fn peel_to_blob(&self) -> Result, Error> { + Ok(try!(self.peel(ObjectType::Blob)).cast_or_panic(ObjectType::Blob)) + } + + /// Peel a reference to a commit + /// + /// This method recursively peels the reference until it reaches + /// a blob. + pub fn peel_to_commit(&self) -> Result, Error> { + Ok(try!(self.peel(ObjectType::Commit)).cast_or_panic(ObjectType::Commit)) + } + + /// Peel a reference to a tree + /// + /// This method recursively peels the reference until it reaches + /// a blob. + pub fn peel_to_tree(&self) -> Result, Error> { + Ok(try!(self.peel(ObjectType::Tree)).cast_or_panic(ObjectType::Tree)) + } + + /// Peel a reference to a tag + /// + /// This method recursively peels the reference until it reaches + /// a tag. + pub fn peel_to_tag(&self) -> Result, Error> { + Ok(try!(self.peel(ObjectType::Tag)).cast_or_panic(ObjectType::Tag)) + } + + /// Rename an existing reference. + /// + /// This method works for both direct and symbolic references. + /// + /// If the force flag is not enabled, and there's already a reference with + /// the given name, the renaming will fail. + pub fn rename(&mut self, new_name: &str, force: bool, + msg: &str) -> Result, Error> { + let mut raw = ptr::null_mut(); + let new_name = try!(CString::new(new_name)); + let msg = try!(CString::new(msg)); + unsafe { + try_call!(raw::git_reference_rename(&mut raw, self.raw, new_name, + force, msg)); + Ok(Binding::from_raw(raw)) + } + } + + /// Conditionally create a new reference with the same name as the given + /// reference but a different OID target. The reference must be a direct + /// reference, otherwise this will fail. + /// + /// The new reference will be written to disk, overwriting the given + /// reference. + pub fn set_target(&mut self, id: Oid, reflog_msg: &str) + -> Result, Error> { + let mut raw = ptr::null_mut(); + let msg = try!(CString::new(reflog_msg)); + unsafe { + try_call!(raw::git_reference_set_target(&mut raw, self.raw, + id.raw(), msg)); + Ok(Binding::from_raw(raw)) + } + } + +} + +impl<'repo> PartialOrd for Reference<'repo> { + fn partial_cmp(&self, other: &Reference<'repo>) -> Option { + Some(self.cmp(other)) + } +} + +impl<'repo> Ord for Reference<'repo> { + fn cmp(&self, other: &Reference<'repo>) -> Ordering { + match unsafe { raw::git_reference_cmp(&*self.raw, &*other.raw) } { + 0 => Ordering::Equal, + n if n < 0 => Ordering::Less, + _ => Ordering::Greater, + } + } +} + +impl<'repo> PartialEq for Reference<'repo> { + fn eq(&self, other: &Reference<'repo>) -> bool { + self.cmp(other) == Ordering::Equal + } +} + +impl<'repo> Eq for Reference<'repo> {} + +impl<'repo> Binding for Reference<'repo> { + type Raw = *mut raw::git_reference; + unsafe fn from_raw(raw: *mut raw::git_reference) -> Reference<'repo> { + Reference { raw: raw, _marker: marker::PhantomData } + } + fn raw(&self) -> *mut raw::git_reference { self.raw } +} + +impl<'repo> Drop for Reference<'repo> { + fn drop(&mut self) { + unsafe { raw::git_reference_free(self.raw) } + } +} + +impl<'repo> References<'repo> { + /// Consumes a `References` iterator to create an iterator over just the + /// name of some references. + /// + /// This is more efficient if only the names are desired of references as + /// the references themselves don't have to be allocated and deallocated. + /// + /// The returned iterator will yield strings as opposed to a `Reference`. + pub fn names<'a>(&'a mut self) -> ReferenceNames<'repo, 'a> { + ReferenceNames { inner: self } + } +} + +impl<'repo> Binding for References<'repo> { + type Raw = *mut raw::git_reference_iterator; + unsafe fn from_raw(raw: *mut raw::git_reference_iterator) + -> References<'repo> { + References { raw: raw, _marker: marker::PhantomData } + } + fn raw(&self) -> *mut raw::git_reference_iterator { self.raw } +} + +impl<'repo> Iterator for References<'repo> { + type Item = Result, Error>; + fn next(&mut self) -> Option, Error>> { + let mut out = ptr::null_mut(); + unsafe { + try_call_iter!(raw::git_reference_next(&mut out, self.raw)); + Some(Ok(Binding::from_raw(out))) + } + } +} + +impl<'repo> Drop for References<'repo> { + fn drop(&mut self) { + unsafe { raw::git_reference_iterator_free(self.raw) } + } +} + +impl<'repo, 'references> Iterator for ReferenceNames<'repo, 'references> { + type Item = Result<&'references str, Error>; + fn next(&mut self) -> Option> { + let mut out = ptr::null(); + unsafe { + try_call_iter!(raw::git_reference_next_name(&mut out, + self.inner.raw)); + let bytes = ::opt_bytes(self, out).unwrap(); + let s = str::from_utf8(bytes).unwrap(); + Some(Ok(mem::transmute::<&str, &'references str>(s))) + } + } +} + +#[cfg(test)] +mod tests { + use {Reference, ObjectType, ReferenceType}; + + #[test] + fn smoke() { + assert!(Reference::is_valid_name("refs/foo")); + assert!(!Reference::is_valid_name("foo")); + } + + #[test] + fn smoke2() { + let (_td, repo) = ::test::repo_init(); + let mut head = repo.head().unwrap(); + assert!(head.is_branch()); + assert!(!head.is_remote()); + assert!(!head.is_tag()); + assert!(!head.is_note()); + + // HEAD is a symbolic reference but git_repository_head resolves it + // so it is a GIT_REF_OID. + assert_eq!(head.kind().unwrap(), ReferenceType::Oid); + + assert!(head == repo.head().unwrap()); + assert_eq!(head.name(), Some("refs/heads/master")); + + assert!(head == repo.find_reference("refs/heads/master").unwrap()); + assert_eq!(repo.refname_to_id("refs/heads/master").unwrap(), + head.target().unwrap()); + + assert!(head.symbolic_target().is_none()); + assert!(head.target_peel().is_none()); + + assert_eq!(head.shorthand(), Some("master")); + assert!(head.resolve().unwrap() == head); + + let mut tag1 = repo.reference("refs/tags/tag1", + head.target().unwrap(), + false, "test").unwrap(); + assert!(tag1.is_tag()); + assert_eq!(tag1.kind().unwrap(), ReferenceType::Oid); + + let peeled_commit = tag1.peel(ObjectType::Commit).unwrap(); + assert_eq!(ObjectType::Commit, peeled_commit.kind().unwrap()); + assert_eq!(tag1.target().unwrap(), peeled_commit.id()); + + tag1.delete().unwrap(); + + let mut sym1 = repo.reference_symbolic("refs/tags/tag1", + "refs/heads/master", false, + "test").unwrap(); + assert_eq!(sym1.kind().unwrap(), ReferenceType::Symbolic); + sym1.delete().unwrap(); + + { + assert!(repo.references().unwrap().count() == 1); + assert!(repo.references().unwrap().next().unwrap().unwrap() == head); + let mut names = repo.references().unwrap(); + let mut names = names.names(); + assert_eq!(names.next().unwrap().unwrap(), "refs/heads/master"); + assert!(names.next().is_none()); + assert!(repo.references_glob("foo").unwrap().count() == 0); + assert!(repo.references_glob("refs/heads/*").unwrap().count() == 1); + } + + let mut head = head.rename("refs/foo", true, "test").unwrap(); + head.delete().unwrap(); + + } +} diff --git a/git2/src/reflog.rs b/git2/src/reflog.rs new file mode 100644 index 000000000..6996a89da --- /dev/null +++ b/git2/src/reflog.rs @@ -0,0 +1,172 @@ +use std::ops::Range; +use std::marker; +use std::str; +use libc::size_t; + +use {raw, signature, Oid, Error, Signature}; +use util::Binding; + +/// A reference log of a git repository. +pub struct Reflog { + raw: *mut raw::git_reflog, +} + +/// An entry inside the reflog of a repository +pub struct ReflogEntry<'reflog> { + raw: *const raw::git_reflog_entry, + _marker: marker::PhantomData<&'reflog Reflog>, +} + +/// An iterator over the entries inside of a reflog. +pub struct ReflogIter<'reflog> { + range: Range, + reflog: &'reflog Reflog, +} + +impl Reflog { + /// Add a new entry to the in-memory reflog. + pub fn append(&mut self, new_oid: Oid, committer: &Signature, + msg: Option<&str>) -> Result<(), Error> { + let msg = try!(::opt_cstr(msg)); + unsafe { + try_call!(raw::git_reflog_append(self.raw, new_oid.raw(), + committer.raw(), msg)); + } + Ok(()) + } + + /// Remove an entry from the reflog by its index + /// + /// To ensure there's no gap in the log history, set rewrite_previous_entry + /// param value to `true`. When deleting entry n, member old_oid of entry + /// n-1 (if any) will be updated with the value of member new_oid of entry + /// n+1. + pub fn remove(&mut self, i: usize, rewrite_previous_entry: bool) + -> Result<(), Error> { + unsafe { + try_call!(raw::git_reflog_drop(self.raw, i as size_t, + rewrite_previous_entry)); + } + Ok(()) + } + + /// Lookup an entry by its index + /// + /// Requesting the reflog entry with an index of 0 (zero) will return the + /// most recently created entry. + pub fn get(&self, i: usize) -> Option { + unsafe { + let ptr = raw::git_reflog_entry_byindex(self.raw, i as size_t); + Binding::from_raw_opt(ptr) + } + } + + /// Get the number of log entries in a reflog + pub fn len(&self) -> usize { + unsafe { raw::git_reflog_entrycount(self.raw) as usize } + } + + /// Return `true ` is there is no log entry in a reflog + pub fn is_empty(&self) -> bool { + self.len() == 0 + } + + /// Get an iterator to all entries inside of this reflog + pub fn iter(&self) -> ReflogIter { + ReflogIter { range: 0..self.len(), reflog: self } + } + + /// Write an existing in-memory reflog object back to disk using an atomic + /// file lock. + pub fn write(&mut self) -> Result<(), Error> { + unsafe { try_call!(raw::git_reflog_write(self.raw)); } + Ok(()) + } +} + +impl Binding for Reflog { + type Raw = *mut raw::git_reflog; + + unsafe fn from_raw(raw: *mut raw::git_reflog) -> Reflog { + Reflog { raw: raw } + } + fn raw(&self) -> *mut raw::git_reflog { self.raw } +} + +impl Drop for Reflog { + fn drop(&mut self) { + unsafe { raw::git_reflog_free(self.raw) } + } +} + +impl<'reflog> ReflogEntry<'reflog> { + /// Get the committer of this entry + pub fn committer(&self) -> Signature { + unsafe { + let ptr = raw::git_reflog_entry_committer(self.raw); + signature::from_raw_const(self, ptr) + } + } + + /// Get the new oid + pub fn id_new(&self) -> Oid { + unsafe { Binding::from_raw(raw::git_reflog_entry_id_new(self.raw)) } + } + + /// Get the old oid + pub fn id_old(&self) -> Oid { + unsafe { Binding::from_raw(raw::git_reflog_entry_id_new(self.raw)) } + } + + /// Get the log message, returning `None` on invalid UTF-8. + pub fn message(&self) -> Option<&str> { + self.message_bytes().and_then(|s| str::from_utf8(s).ok()) + } + + /// Get the log message as a byte array. + pub fn message_bytes(&self) -> Option<&[u8]> { + unsafe { + ::opt_bytes(self, raw::git_reflog_entry_message(self.raw)) + } + } +} + +impl<'reflog> Binding for ReflogEntry<'reflog> { + type Raw = *const raw::git_reflog_entry; + + unsafe fn from_raw(raw: *const raw::git_reflog_entry) -> ReflogEntry<'reflog> { + ReflogEntry { raw: raw, _marker: marker::PhantomData } + } + fn raw(&self) -> *const raw::git_reflog_entry { self.raw } +} + +impl<'reflog> Iterator for ReflogIter<'reflog> { + type Item = ReflogEntry<'reflog>; + fn next(&mut self) -> Option> { + self.range.next().and_then(|i| self.reflog.get(i)) + } + fn size_hint(&self) -> (usize, Option) { self.range.size_hint() } +} +impl<'reflog> DoubleEndedIterator for ReflogIter<'reflog> { + fn next_back(&mut self) -> Option> { + self.range.next_back().and_then(|i| self.reflog.get(i)) + } +} +impl<'reflog> ExactSizeIterator for ReflogIter<'reflog> {} + +#[cfg(test)] +mod tests { + #[test] + fn smoke() { + let (_td, repo) = ::test::repo_init(); + let mut reflog = repo.reflog("HEAD").unwrap(); + assert_eq!(reflog.iter().len(), 1); + reflog.write().unwrap(); + + let entry = reflog.iter().next().unwrap(); + assert!(entry.message().is_some()); + + repo.reflog_rename("HEAD", "refs/heads/foo").unwrap(); + repo.reflog_delete("refs/heads/foo").unwrap(); + } +} diff --git a/git2/src/refspec.rs b/git2/src/refspec.rs new file mode 100644 index 000000000..c814d23e5 --- /dev/null +++ b/git2/src/refspec.rs @@ -0,0 +1,89 @@ +use std::ffi::CString; +use std::marker; +use std::str; + +use {raw, Direction}; +use util::Binding; + +/// A structure to represent a git [refspec][1]. +/// +/// Refspecs are currently mainly accessed/created through a `Remote`. +/// +/// [1]: http://git-scm.com/book/en/Git-Internals-The-Refspec +pub struct Refspec<'remote> { + raw: *const raw::git_refspec, + _marker: marker::PhantomData<&'remote raw::git_remote>, +} + +impl<'remote> Refspec<'remote> { + /// Get the refspec's direction. + pub fn direction(&self) -> Direction { + match unsafe { raw::git_refspec_direction(self.raw) } { + raw::GIT_DIRECTION_FETCH => Direction::Fetch, + raw::GIT_DIRECTION_PUSH => Direction::Push, + n => panic!("unknown refspec direction: {}", n), + } + } + + /// Get the destination specifier. + /// + /// If the destination is not utf-8, None is returned. + pub fn dst(&self) -> Option<&str> { + str::from_utf8(self.dst_bytes()).ok() + } + + /// Get the destination specifier, in bytes. + pub fn dst_bytes(&self) -> &[u8] { + unsafe { ::opt_bytes(self, raw::git_refspec_dst(self.raw)).unwrap() } + } + + /// Check if a refspec's destination descriptor matches a reference + pub fn dst_matches(&self, refname: &str) -> bool { + let refname = CString::new(refname).unwrap(); + unsafe { raw::git_refspec_dst_matches(self.raw, refname.as_ptr()) == 1 } + } + + /// Get the source specifier. + /// + /// If the source is not utf-8, None is returned. + pub fn src(&self) -> Option<&str> { + str::from_utf8(self.src_bytes()).ok() + } + + /// Get the source specifier, in bytes. + pub fn src_bytes(&self) -> &[u8] { + unsafe { ::opt_bytes(self, raw::git_refspec_src(self.raw)).unwrap() } + } + + /// Check if a refspec's source descriptor matches a reference + pub fn src_matches(&self, refname: &str) -> bool { + let refname = CString::new(refname).unwrap(); + unsafe { raw::git_refspec_src_matches(self.raw, refname.as_ptr()) == 1 } + } + + /// Get the force update setting. + pub fn is_force(&self) -> bool { + unsafe { raw::git_refspec_force(self.raw) == 1 } + } + + /// Get the refspec's string. + /// + /// Returns None if the string is not valid utf8. + pub fn str(&self) -> Option<&str> { + str::from_utf8(self.bytes()).ok() + } + + /// Get the refspec's string as a byte array + pub fn bytes(&self) -> &[u8] { + unsafe { ::opt_bytes(self, raw::git_refspec_string(self.raw)).unwrap() } + } +} + +impl<'remote> Binding for Refspec<'remote> { + type Raw = *const raw::git_refspec; + + unsafe fn from_raw(raw: *const raw::git_refspec) -> Refspec<'remote> { + Refspec { raw: raw, _marker: marker::PhantomData } + } + fn raw(&self) -> *const raw::git_refspec { self.raw } +} diff --git a/git2/src/remote.rs b/git2/src/remote.rs new file mode 100644 index 000000000..b385a1dfa --- /dev/null +++ b/git2/src/remote.rs @@ -0,0 +1,753 @@ +use std::ffi::CString; +use std::ops::Range; +use std::marker; +use std::mem; +use std::ptr; +use std::slice; +use std::str; +use libc; + +use {raw, Direction, Error, Refspec, Oid, FetchPrune, ProxyOptions}; +use {RemoteCallbacks, Progress, Repository, AutotagOption}; +use string_array::StringArray; +use util::Binding; + +/// A structure representing a [remote][1] of a git repository. +/// +/// [1]: http://git-scm.com/book/en/Git-Basics-Working-with-Remotes +/// +/// The lifetime is the lifetime of the repository that it is attached to. The +/// remote is used to manage fetches and pushes as well as refspecs. +pub struct Remote<'repo> { + raw: *mut raw::git_remote, + _marker: marker::PhantomData<&'repo Repository>, +} + +/// An iterator over the refspecs that a remote contains. +pub struct Refspecs<'remote> { + range: Range, + remote: &'remote Remote<'remote>, +} + +/// Description of a reference advertised bya remote server, given out on calls +/// to `list`. +pub struct RemoteHead<'remote> { + raw: *const raw::git_remote_head, + _marker: marker::PhantomData<&'remote str>, +} + +/// Options which can be specified to various fetch operations. +pub struct FetchOptions<'cb> { + callbacks: Option>, + proxy: Option>, + prune: FetchPrune, + update_fetchhead: bool, + download_tags: AutotagOption, +} + +/// Options to control the behavior of a git push. +pub struct PushOptions<'cb> { + callbacks: Option>, + proxy: Option>, + pb_parallelism: u32, +} + +/// Holds callbacks for a connection to a `Remote`. Disconnects when dropped +pub struct RemoteConnection<'repo, 'connection, 'cb> where 'repo: 'connection { + _callbacks: Box>, + _proxy: ProxyOptions<'cb>, + remote: &'connection mut Remote<'repo>, +} + +pub fn remote_into_raw(remote: Remote) -> *mut raw::git_remote { + let ret = remote.raw; + mem::forget(remote); + return ret +} + +impl<'repo> Remote<'repo> { + /// Ensure the remote name is well-formed. + pub fn is_valid_name(remote_name: &str) -> bool { + ::init(); + let remote_name = CString::new(remote_name).unwrap(); + unsafe { raw::git_remote_is_valid_name(remote_name.as_ptr()) == 1 } + } + + /// Get the remote's name. + /// + /// Returns `None` if this remote has not yet been named or if the name is + /// not valid utf-8 + pub fn name(&self) -> Option<&str> { + self.name_bytes().and_then(|s| str::from_utf8(s).ok()) + } + + /// Get the remote's name, in bytes. + /// + /// Returns `None` if this remote has not yet been named + pub fn name_bytes(&self) -> Option<&[u8]> { + unsafe { ::opt_bytes(self, raw::git_remote_name(&*self.raw)) } + } + + /// Get the remote's url. + /// + /// Returns `None` if the url is not valid utf-8 + pub fn url(&self) -> Option<&str> { + str::from_utf8(self.url_bytes()).ok() + } + + /// Get the remote's url as a byte array. + pub fn url_bytes(&self) -> &[u8] { + unsafe { ::opt_bytes(self, raw::git_remote_url(&*self.raw)).unwrap() } + } + + /// Get the remote's pushurl. + /// + /// Returns `None` if the pushurl is not valid utf-8 + pub fn pushurl(&self) -> Option<&str> { + self.pushurl_bytes().and_then(|s| str::from_utf8(s).ok()) + } + + /// Get the remote's pushurl as a byte array. + pub fn pushurl_bytes(&self) -> Option<&[u8]> { + unsafe { ::opt_bytes(self, raw::git_remote_pushurl(&*self.raw)) } + } + + /// Open a connection to a remote. + pub fn connect(&mut self, dir: Direction) -> Result<(), Error> { + // TODO: can callbacks be exposed safely? + unsafe { + try_call!(raw::git_remote_connect(self.raw, dir, + ptr::null(), + ptr::null(), + ptr::null())); + } + Ok(()) + } + + /// Open a connection to a remote with callbacks and proxy settings + /// + /// Returns a `RemoteConnection` that will disconnect once dropped + pub fn connect_auth<'connection, 'cb>(&'connection mut self, + dir: Direction, + cb: Option>, + proxy_options: Option>) + -> Result, Error> { + + let cb = Box::new(cb.unwrap_or_else(RemoteCallbacks::new)); + let proxy_options = proxy_options.unwrap_or_else(ProxyOptions::new); + unsafe { + try_call!(raw::git_remote_connect(self.raw, dir, + &cb.raw(), + &proxy_options.raw(), + ptr::null())); + } + + Ok(RemoteConnection { + _callbacks: cb, + _proxy: proxy_options, + remote: self, + }) + } + + /// Check whether the remote is connected + pub fn connected(&mut self) -> bool { + unsafe { raw::git_remote_connected(self.raw) == 1 } + } + + /// Disconnect from the remote + pub fn disconnect(&mut self) { + unsafe { raw::git_remote_disconnect(self.raw) } + } + + /// Download and index the packfile + /// + /// Connect to the remote if it hasn't been done yet, negotiate with the + /// remote git which objects are missing, download and index the packfile. + /// + /// The .idx file will be created and both it and the packfile with be + /// renamed to their final name. + /// + /// The `specs` argument is a list of refspecs to use for this negotiation + /// and download. Use an empty array to use the base refspecs. + pub fn download(&mut self, specs: &[&str], opts: Option<&mut FetchOptions>) + -> Result<(), Error> { + let (_a, _b, arr) = try!(::util::iter2cstrs(specs.iter())); + let raw = opts.map(|o| o.raw()); + unsafe { + try_call!(raw::git_remote_download(self.raw, &arr, raw.as_ref())); + } + Ok(()) + } + + /// Get the number of refspecs for a remote + pub fn refspecs<'a>(&'a self) -> Refspecs<'a> { + let cnt = unsafe { raw::git_remote_refspec_count(&*self.raw) as usize }; + Refspecs { range: 0..cnt, remote: self } + } + + /// Get the `nth` refspec from this remote. + /// + /// The `refspecs` iterator can be used to iterate over all refspecs. + pub fn get_refspec(&self, i: usize) -> Option> { + unsafe { + let ptr = raw::git_remote_get_refspec(&*self.raw, + i as libc::size_t); + Binding::from_raw_opt(ptr) + } + } + + /// Download new data and update tips + /// + /// Convenience function to connect to a remote, download the data, + /// disconnect and update the remote-tracking branches. + /// + /// # Examples + /// + /// Example of functionality similar to `git fetch origin/master`: + /// + /// ```no_run + /// fn fetch_origin_master(repo: git2::Repository) -> Result<(), git2::Error> { + /// repo.find_remote("origin")?.fetch(&["master"], None, None) + /// } + /// + /// let repo = git2::Repository::discover("rust").unwrap(); + /// fetch_origin_master(repo).unwrap(); + /// ``` + pub fn fetch(&mut self, + refspecs: &[&str], + opts: Option<&mut FetchOptions>, + reflog_msg: Option<&str>) -> Result<(), Error> { + let (_a, _b, arr) = try!(::util::iter2cstrs(refspecs.iter())); + let msg = try!(::opt_cstr(reflog_msg)); + let raw = opts.map(|o| o.raw()); + unsafe { + try_call!(raw::git_remote_fetch(self.raw, &arr, raw.as_ref(), msg)); + } + Ok(()) + } + + /// Update the tips to the new state + pub fn update_tips(&mut self, + callbacks: Option<&mut RemoteCallbacks>, + update_fetchhead: bool, + download_tags: AutotagOption, + msg: Option<&str>) -> Result<(), Error> { + let msg = try!(::opt_cstr(msg)); + let cbs = callbacks.map(|cb| cb.raw()); + unsafe { + try_call!(raw::git_remote_update_tips(self.raw, cbs.as_ref(), + update_fetchhead, + download_tags, msg)); + } + Ok(()) + } + + /// Perform a push + /// + /// Perform all the steps for a push. If no refspecs are passed then the + /// configured refspecs will be used. + /// + /// Note that you'll likely want to use `RemoteCallbacks` and set + /// `push_update_reference` to test whether all the references were pushed + /// successfully. + pub fn push(&mut self, + refspecs: &[&str], + opts: Option<&mut PushOptions>) -> Result<(), Error> { + let (_a, _b, arr) = try!(::util::iter2cstrs(refspecs.iter())); + let raw = opts.map(|o| o.raw()); + unsafe { + try_call!(raw::git_remote_push(self.raw, &arr, raw.as_ref())); + } + Ok(()) + } + + /// Get the statistics structure that is filled in by the fetch operation. + pub fn stats(&self) -> Progress { + unsafe { + Binding::from_raw(raw::git_remote_stats(self.raw)) + } + } + + /// Get the remote repository's reference advertisement list. + /// + /// Get the list of references with which the server responds to a new + /// connection. + /// + /// The remote (or more exactly its transport) must have connected to the + /// remote repository. This list is available as soon as the connection to + /// the remote is initiated and it remains available after disconnecting. + pub fn list(&self) -> Result<&[RemoteHead], Error> { + let mut size = 0; + let mut base = ptr::null_mut(); + unsafe { + try_call!(raw::git_remote_ls(&mut base, &mut size, self.raw)); + assert_eq!(mem::size_of::(), + mem::size_of::<*const raw::git_remote_head>()); + let slice = slice::from_raw_parts(base as *const _, size as usize); + Ok(mem::transmute::<&[*const raw::git_remote_head], + &[RemoteHead]>(slice)) + } + } + + /// Get the remote's list of fetch refspecs + pub fn fetch_refspecs(&self) -> Result { + unsafe { + let mut raw: raw::git_strarray = mem::zeroed(); + try_call!(raw::git_remote_get_fetch_refspecs(&mut raw, self.raw)); + Ok(StringArray::from_raw(raw)) + } + } + + /// Get the remote's list of push refspecs + pub fn push_refspecs(&self) -> Result { + unsafe { + let mut raw: raw::git_strarray = mem::zeroed(); + try_call!(raw::git_remote_get_push_refspecs(&mut raw, self.raw)); + Ok(StringArray::from_raw(raw)) + } + } +} + +impl<'repo> Clone for Remote<'repo> { + fn clone(&self) -> Remote<'repo> { + let mut ret = ptr::null_mut(); + let rc = unsafe { call!(raw::git_remote_dup(&mut ret, self.raw)) }; + assert_eq!(rc, 0); + Remote { + raw: ret, + _marker: marker::PhantomData, + } + } +} + +impl<'repo> Binding for Remote<'repo> { + type Raw = *mut raw::git_remote; + + unsafe fn from_raw(raw: *mut raw::git_remote) -> Remote<'repo> { + Remote { + raw: raw, + _marker: marker::PhantomData, + } + } + fn raw(&self) -> *mut raw::git_remote { self.raw } +} + +impl<'repo> Drop for Remote<'repo> { + fn drop(&mut self) { + unsafe { raw::git_remote_free(self.raw) } + } +} + +impl<'repo> Iterator for Refspecs<'repo> { + type Item = Refspec<'repo>; + fn next(&mut self) -> Option> { + self.range.next().and_then(|i| self.remote.get_refspec(i)) + } + fn size_hint(&self) -> (usize, Option) { self.range.size_hint() } +} +impl<'repo> DoubleEndedIterator for Refspecs<'repo> { + fn next_back(&mut self) -> Option> { + self.range.next_back().and_then(|i| self.remote.get_refspec(i)) + } +} +impl<'repo> ExactSizeIterator for Refspecs<'repo> {} + +#[allow(missing_docs)] // not documented in libgit2 :( +impl<'remote> RemoteHead<'remote> { + /// Flag if this is available locally. + pub fn is_local(&self) -> bool { + unsafe { (*self.raw).local != 0 } + } + + pub fn oid(&self) -> Oid { + unsafe { Binding::from_raw(&(*self.raw).oid as *const _) } + } + pub fn loid(&self) -> Oid { + unsafe { Binding::from_raw(&(*self.raw).loid as *const _) } + } + + pub fn name(&self) -> &str { + let b = unsafe { ::opt_bytes(self, (*self.raw).name).unwrap() }; + str::from_utf8(b).unwrap() + } + + pub fn symref_target(&self) -> Option<&str> { + let b = unsafe { ::opt_bytes(self, (*self.raw).symref_target) }; + b.map(|b| str::from_utf8(b).unwrap()) + } +} + +impl<'cb> Default for FetchOptions<'cb> { + fn default() -> Self { + Self::new() + } +} + +impl<'cb> FetchOptions<'cb> { + /// Creates a new blank set of fetch options + pub fn new() -> FetchOptions<'cb> { + FetchOptions { + callbacks: None, + proxy: None, + prune: FetchPrune::Unspecified, + update_fetchhead: true, + download_tags: AutotagOption::Unspecified, + } + } + + /// Set the callbacks to use for the fetch operation. + pub fn remote_callbacks(&mut self, cbs: RemoteCallbacks<'cb>) -> &mut Self { + self.callbacks = Some(cbs); + self + } + + /// Set the proxy options to use for the fetch operation. + pub fn proxy_options(&mut self, opts: ProxyOptions<'cb>) -> &mut Self { + self.proxy = Some(opts); + self + } + + /// Set whether to perform a prune after the fetch. + pub fn prune(&mut self, prune: FetchPrune) -> &mut Self { + self.prune = prune; + self + } + + /// Set whether to write the results to FETCH_HEAD. + /// + /// Defaults to `true`. + pub fn update_fetchhead(&mut self, update: bool) -> &mut Self { + self.update_fetchhead = update; + self + } + + /// Set how to behave regarding tags on the remote, such as auto-downloading + /// tags for objects we're downloading or downloading all of them. + /// + /// The default is to auto-follow tags. + pub fn download_tags(&mut self, opt: AutotagOption) -> &mut Self { + self.download_tags = opt; + self + } +} + +impl<'cb> Binding for FetchOptions<'cb> { + type Raw = raw::git_fetch_options; + + unsafe fn from_raw(_raw: raw::git_fetch_options) -> FetchOptions<'cb> { + panic!("unimplemented"); + } + fn raw(&self) -> raw::git_fetch_options { + raw::git_fetch_options { + version: 1, + callbacks: self.callbacks.as_ref().map(|m| m.raw()) + .unwrap_or_else(|| RemoteCallbacks::new().raw()), + proxy_opts: self.proxy.as_ref().map(|m| m.raw()) + .unwrap_or_else(|| ProxyOptions::new().raw()), + prune: ::call::convert(&self.prune), + update_fetchhead: ::call::convert(&self.update_fetchhead), + download_tags: ::call::convert(&self.download_tags), + // TODO: expose this as a builder option + custom_headers: raw::git_strarray { + count: 0, + strings: ptr::null_mut(), + }, + } + } +} + + +impl<'cb> Default for PushOptions<'cb> { + fn default() -> Self { + Self::new() + } +} + +impl<'cb> PushOptions<'cb> { + /// Creates a new blank set of push options + pub fn new() -> PushOptions<'cb> { + PushOptions { + callbacks: None, + proxy: None, + pb_parallelism: 1, + } + } + + /// Set the callbacks to use for the fetch operation. + pub fn remote_callbacks(&mut self, cbs: RemoteCallbacks<'cb>) -> &mut Self { + self.callbacks = Some(cbs); + self + } + + /// Set the proxy options to use for the fetch operation. + pub fn proxy_options(&mut self, opts: ProxyOptions<'cb>) -> &mut Self { + self.proxy = Some(opts); + self + } + + /// If the transport being used to push to the remote requires the creation + /// of a pack file, this controls the number of worker threads used by the + /// packbuilder when creating that pack file to be sent to the remote. + /// + /// if set to 0 the packbuilder will auto-detect the number of threads to + /// create, and the default value is 1. + pub fn packbuilder_parallelism(&mut self, parallel: u32) -> &mut Self { + self.pb_parallelism = parallel; + self + } +} + +impl<'cb> Binding for PushOptions<'cb> { + type Raw = raw::git_push_options; + + unsafe fn from_raw(_raw: raw::git_push_options) -> PushOptions<'cb> { + panic!("unimplemented"); + } + fn raw(&self) -> raw::git_push_options { + raw::git_push_options { + version: 1, + callbacks: self.callbacks.as_ref() + .map(|m| m.raw()) + .unwrap_or_else(|| RemoteCallbacks::new().raw()), + proxy_opts: self.proxy.as_ref().map(|m| m.raw()) + .unwrap_or_else(|| ProxyOptions::new().raw()), + pb_parallelism: self.pb_parallelism as libc::c_uint, + // TODO: expose this as a builder option + custom_headers: raw::git_strarray { + count: 0, + strings: ptr::null_mut(), + }, + } + } +} + +impl<'repo, 'connection, 'cb> RemoteConnection<'repo, 'connection, 'cb> { + /// Check whether the remote is (still) connected + pub fn connected(&mut self) -> bool { + self.remote.connected() + } + + /// Get the remote repository's reference advertisement list. + /// + /// This list is available as soon as the connection to + /// the remote is initiated and it remains available after disconnecting. + pub fn list(&self) -> Result<&[RemoteHead], Error> { + self.remote.list() + } +} + +impl<'repo, 'connection, 'cb> Drop for RemoteConnection<'repo, 'connection, 'cb> { + fn drop(&mut self) { + self.remote.disconnect() + } +} + +#[cfg(test)] +mod tests { + use std::cell::Cell; + use tempdir::TempDir; + use {Repository, Remote, RemoteCallbacks, Direction, FetchOptions}; + use {AutotagOption, PushOptions}; + + #[test] + fn smoke() { + let (td, repo) = ::test::repo_init(); + t!(repo.remote("origin", "/path/to/nowhere")); + drop(repo); + + let repo = t!(Repository::init(td.path())); + let origin = t!(repo.find_remote("origin")); + assert_eq!(origin.name(), Some("origin")); + assert_eq!(origin.url(), Some("/path/to/nowhere")); + assert_eq!(origin.pushurl(), None); + + t!(repo.remote_set_url("origin", "/path/to/elsewhere")); + t!(repo.remote_set_pushurl("origin", Some("/path/to/elsewhere"))); + + let stats = origin.stats(); + assert_eq!(stats.total_objects(), 0); + } + + #[test] + fn create_remote() { + let td = TempDir::new("test").unwrap(); + let remote = td.path().join("remote"); + Repository::init_bare(&remote).unwrap(); + + let (_td, repo) = ::test::repo_init(); + let url = if cfg!(unix) { + format!("file://{}", remote.display()) + } else { + format!("file:///{}", remote.display().to_string() + .replace("\\", "/")) + }; + + let mut origin = repo.remote("origin", &url).unwrap(); + assert_eq!(origin.name(), Some("origin")); + assert_eq!(origin.url(), Some(&url[..])); + assert_eq!(origin.pushurl(), None); + + { + let mut specs = origin.refspecs(); + let spec = specs.next().unwrap(); + assert!(specs.next().is_none()); + assert_eq!(spec.str(), Some("+refs/heads/*:refs/remotes/origin/*")); + assert_eq!(spec.dst(), Some("refs/remotes/origin/*")); + assert_eq!(spec.src(), Some("refs/heads/*")); + assert!(spec.is_force()); + } + assert!(origin.refspecs().next_back().is_some()); + { + let remotes = repo.remotes().unwrap(); + assert_eq!(remotes.len(), 1); + assert_eq!(remotes.get(0), Some("origin")); + assert_eq!(remotes.iter().count(), 1); + assert_eq!(remotes.iter().next().unwrap(), Some("origin")); + } + + origin.connect(Direction::Push).unwrap(); + assert!(origin.connected()); + origin.disconnect(); + + origin.connect(Direction::Fetch).unwrap(); + assert!(origin.connected()); + origin.download(&[], None).unwrap(); + origin.disconnect(); + + { + let mut connection = origin.connect_auth(Direction::Push, None, None).unwrap(); + assert!(connection.connected()); + } + assert!(!origin.connected()); + + { + let mut connection = origin.connect_auth(Direction::Fetch, None, None).unwrap(); + assert!(connection.connected()); + } + assert!(!origin.connected()); + + origin.fetch(&[], None, None).unwrap(); + origin.fetch(&[], None, Some("foo")).unwrap(); + origin.update_tips(None, true, AutotagOption::Unspecified, None).unwrap(); + origin.update_tips(None, true, AutotagOption::All, Some("foo")).unwrap(); + + t!(repo.remote_add_fetch("origin", "foo")); + t!(repo.remote_add_fetch("origin", "bar")); + } + + #[test] + fn rename_remote() { + let (_td, repo) = ::test::repo_init(); + repo.remote("origin", "foo").unwrap(); + drop(repo.remote_rename("origin", "foo")); + drop(repo.remote_delete("foo")); + } + + #[test] + fn create_remote_anonymous() { + let td = TempDir::new("test").unwrap(); + let repo = Repository::init(td.path()).unwrap(); + + let origin = repo.remote_anonymous("/path/to/nowhere").unwrap(); + assert_eq!(origin.name(), None); + drop(origin.clone()); + } + + #[test] + fn is_valid() { + assert!(Remote::is_valid_name("foobar")); + assert!(!Remote::is_valid_name("\x01")); + } + + #[test] + fn transfer_cb() { + let (td, _repo) = ::test::repo_init(); + let td2 = TempDir::new("git").unwrap(); + let url = ::test::path2url(&td.path()); + + let repo = Repository::init(td2.path()).unwrap(); + let progress_hit = Cell::new(false); + { + let mut callbacks = RemoteCallbacks::new(); + let mut origin = repo.remote("origin", &url).unwrap(); + + callbacks.transfer_progress(|_progress| { + progress_hit.set(true); + true + }); + origin.fetch(&[], + Some(FetchOptions::new().remote_callbacks(callbacks)), + None).unwrap(); + + let list = t!(origin.list()); + assert_eq!(list.len(), 2); + assert_eq!(list[0].name(), "HEAD"); + assert!(!list[0].is_local()); + assert_eq!(list[1].name(), "refs/heads/master"); + assert!(!list[1].is_local()); + } + assert!(progress_hit.get()); + } + + /// This test is meant to assure that the callbacks provided to connect will not cause + /// segfaults + #[test] + fn connect_list() { + let (td, _repo) = ::test::repo_init(); + let td2 = TempDir::new("git").unwrap(); + let url = ::test::path2url(&td.path()); + + let repo = Repository::init(td2.path()).unwrap(); + let mut callbacks = RemoteCallbacks::new(); + callbacks.sideband_progress(|_progress| { + // no-op + true + }); + + let mut origin = repo.remote("origin", &url).unwrap(); + + { + let mut connection = origin.connect_auth(Direction::Fetch, Some(callbacks), None).unwrap(); + assert!(connection.connected()); + + let list = t!(connection.list()); + assert_eq!(list.len(), 2); + assert_eq!(list[0].name(), "HEAD"); + assert!(!list[0].is_local()); + assert_eq!(list[1].name(), "refs/heads/master"); + assert!(!list[1].is_local()); + } + assert!(!origin.connected()); + } + + #[test] + fn push() { + let (_td, repo) = ::test::repo_init(); + let td2 = TempDir::new("git1").unwrap(); + let td3 = TempDir::new("git2").unwrap(); + let url = ::test::path2url(&td2.path()); + + Repository::init_bare(td2.path()).unwrap(); + // git push + let mut remote = repo.remote("origin", &url).unwrap(); + let mut updated = false; + { + let mut callbacks = RemoteCallbacks::new(); + callbacks.push_update_reference(|refname, status| { + updated = true; + assert_eq!(refname, "refs/heads/master"); + assert_eq!(status, None); + Ok(()) + }); + let mut options = PushOptions::new(); + options.remote_callbacks(callbacks); + remote.push(&["refs/heads/master"], Some(&mut options)).unwrap(); + } + assert!(updated); + + let repo = Repository::clone(&url, td3.path()).unwrap(); + let commit = repo.head().unwrap().target().unwrap(); + let commit = repo.find_commit(commit).unwrap(); + assert_eq!(commit.message(), Some("initial")); + } +} diff --git a/git2/src/remote_callbacks.rs b/git2/src/remote_callbacks.rs new file mode 100644 index 000000000..fdeaa83f6 --- /dev/null +++ b/git2/src/remote_callbacks.rs @@ -0,0 +1,391 @@ +use std::ffi::{CStr, CString}; +use std::marker; +use std::mem; +use std::slice; +use std::ptr; +use std::str; +use libc::{c_void, c_int, c_char, c_uint}; + +use {raw, panic, Error, Cred, CredentialType, Oid}; +use cert::Cert; +use util::Binding; + +/// A structure to contain the callbacks which are invoked when a repository is +/// being updated or downloaded. +/// +/// These callbacks are used to manage facilities such as authentication, +/// transfer progress, etc. +pub struct RemoteCallbacks<'a> { + progress: Option>>, + credentials: Option>>, + sideband_progress: Option>>, + update_tips: Option>>, + certificate_check: Option>>, + push_update_reference: Option>>, +} + +/// Struct representing the progress by an in-flight transfer. +pub struct Progress<'a> { + raw: ProgressState, + _marker: marker::PhantomData<&'a raw::git_transfer_progress>, +} + +enum ProgressState { + Borrowed(*const raw::git_transfer_progress), + Owned(raw::git_transfer_progress), +} + +/// Callback used to acquire credentials for when a remote is fetched. +/// +/// * `url` - the resource for which the credentials are required. +/// * `username_from_url` - the username that was embedded in the url, or `None` +/// if it was not included. +/// * `allowed_types` - a bitmask stating which cred types are ok to return. +pub type Credentials<'a> = FnMut(&str, Option<&str>, CredentialType) + -> Result + 'a; + +/// Callback to be invoked while a transfer is in progress. +/// +/// This callback will be periodically called with updates to the progress of +/// the transfer so far. The return value indicates whether the transfer should +/// continue. A return value of `false` will cancel the transfer. +/// +/// * `progress` - the progress being made so far. +pub type TransferProgress<'a> = FnMut(Progress) -> bool + 'a; + +/// Callback for receiving messages delivered by the transport. +/// +/// The return value indicates whether the network operation should continue. +pub type TransportMessage<'a> = FnMut(&[u8]) -> bool + 'a; + +/// Callback for whenever a reference is updated locally. +pub type UpdateTips<'a> = FnMut(&str, Oid, Oid) -> bool + 'a; + +/// Callback for a custom certificate check. +/// +/// The first argument is the certificate receved on the connection. +/// Certificates are typically either an SSH or X509 certificate. +/// +/// The second argument is the hostname for the connection is passed as the last +/// argument. +pub type CertificateCheck<'a> = FnMut(&Cert, &str) -> bool + 'a; + +/// Callback for each updated reference on push. +/// +/// The first argument here is the `refname` of the reference, and the second is +/// the status message sent by a server. If the status is `Some` then the update +/// was rejected by the remote server with a reason why. +pub type PushUpdateReference<'a> = FnMut(&str, Option<&str>) -> Result<(), Error> + 'a; + +impl<'a> Default for RemoteCallbacks<'a> { + fn default() -> Self { + Self::new() + } +} + +impl<'a> RemoteCallbacks<'a> { + /// Creates a new set of empty callbacks + pub fn new() -> RemoteCallbacks<'a> { + RemoteCallbacks { + credentials: None, + progress: None, + sideband_progress: None, + update_tips: None, + certificate_check: None, + push_update_reference: None, + } + } + + /// The callback through which to fetch credentials if required. + pub fn credentials(&mut self, cb: F) -> &mut RemoteCallbacks<'a> + where F: FnMut(&str, Option<&str>, CredentialType) + -> Result + 'a + { + self.credentials = Some(Box::new(cb) as Box>); + self + } + + /// The callback through which progress is monitored. + pub fn transfer_progress(&mut self, cb: F) -> &mut RemoteCallbacks<'a> + where F: FnMut(Progress) -> bool + 'a { + self.progress = Some(Box::new(cb) as Box>); + self + } + + /// Textual progress from the remote. + /// + /// Text sent over the progress side-band will be passed to this function + /// (this is the 'counting objects' output. + pub fn sideband_progress(&mut self, cb: F) -> &mut RemoteCallbacks<'a> + where F: FnMut(&[u8]) -> bool + 'a { + self.sideband_progress = Some(Box::new(cb) as Box>); + self + } + + /// Each time a reference is updated locally, the callback will be called + /// with information about it. + pub fn update_tips(&mut self, cb: F) -> &mut RemoteCallbacks<'a> + where F: FnMut(&str, Oid, Oid) -> bool + 'a { + self.update_tips = Some(Box::new(cb) as Box>); + self + } + + /// If certificate verification fails, then this callback will be invoked to + /// let the caller make the final decision of whether to allow the + /// connection to proceed. + pub fn certificate_check(&mut self, cb: F) -> &mut RemoteCallbacks<'a> + where F: FnMut(&Cert, &str) -> bool + 'a + { + self.certificate_check = Some(Box::new(cb) as Box>); + self + } + + /// Set a callback to get invoked for each updated reference on a push. + /// + /// The first argument to the callback is the name of the reference and the + /// second is a status message sent by the server. If the status is `Some` + /// then the push was rejected. + pub fn push_update_reference(&mut self, cb: F) -> &mut RemoteCallbacks<'a> + where F: FnMut(&str, Option<&str>) -> Result<(), Error> + 'a, + { + self.push_update_reference = Some(Box::new(cb) as Box>); + self + } +} + +impl<'a> Binding for RemoteCallbacks<'a> { + type Raw = raw::git_remote_callbacks; + unsafe fn from_raw(_raw: raw::git_remote_callbacks) -> RemoteCallbacks<'a> { + panic!("unimplemented"); + } + + fn raw(&self) -> raw::git_remote_callbacks { + unsafe { + let mut callbacks: raw::git_remote_callbacks = mem::zeroed(); + assert_eq!(raw::git_remote_init_callbacks(&mut callbacks, + raw::GIT_REMOTE_CALLBACKS_VERSION), 0); + if self.progress.is_some() { + let f: raw::git_transfer_progress_cb = transfer_progress_cb; + callbacks.transfer_progress = Some(f); + } + if self.credentials.is_some() { + let f: raw::git_cred_acquire_cb = credentials_cb; + callbacks.credentials = Some(f); + } + if self.sideband_progress.is_some() { + let f: raw::git_transport_message_cb = sideband_progress_cb; + callbacks.sideband_progress = Some(f); + } + if self.certificate_check.is_some() { + let f: raw::git_transport_certificate_check_cb = + certificate_check_cb; + callbacks.certificate_check = Some(f); + } + if self.push_update_reference.is_some() { + let f: extern fn(_, _, _) -> c_int = push_update_reference_cb; + callbacks.push_update_reference = Some(f); + } + if self.update_tips.is_some() { + let f: extern fn(*const c_char, *const raw::git_oid, + *const raw::git_oid, *mut c_void) -> c_int + = update_tips_cb; + callbacks.update_tips = Some(f); + } + callbacks.payload = self as *const _ as *mut _; + callbacks + } + } +} + +impl<'a> Progress<'a> { + /// Number of objects in the packfile being downloaded + pub fn total_objects(&self) -> usize { + unsafe { (*self.raw()).total_objects as usize } + } + /// Received objects that have been hashed + pub fn indexed_objects(&self) -> usize { + unsafe { (*self.raw()).indexed_objects as usize } + } + /// Objects which have been downloaded + pub fn received_objects(&self) -> usize { + unsafe { (*self.raw()).received_objects as usize } + } + /// Locally-available objects that have been injected in order to fix a thin + /// pack. + pub fn local_objects(&self) -> usize { + unsafe { (*self.raw()).local_objects as usize } + } + /// Number of deltas in the packfile being downloaded + pub fn total_deltas(&self) -> usize { + unsafe { (*self.raw()).total_deltas as usize } + } + /// Received deltas that have been hashed. + pub fn indexed_deltas(&self) -> usize { + unsafe { (*self.raw()).indexed_deltas as usize } + } + /// Size of the packfile received up to now + pub fn received_bytes(&self) -> usize { + unsafe { (*self.raw()).received_bytes as usize } + } + + /// Convert this to an owned version of `Progress`. + pub fn to_owned(&self) -> Progress<'static> { + Progress { + raw: ProgressState::Owned(unsafe { *self.raw() }), + _marker: marker::PhantomData, + } + } +} + +impl<'a> Binding for Progress<'a> { + type Raw = *const raw::git_transfer_progress; + unsafe fn from_raw(raw: *const raw::git_transfer_progress) + -> Progress<'a> { + Progress { + raw: ProgressState::Borrowed(raw), + _marker: marker::PhantomData, + } + } + + fn raw(&self) -> *const raw::git_transfer_progress { + match self.raw { + ProgressState::Borrowed(raw) => raw, + ProgressState::Owned(ref raw) => raw as *const _, + } + } +} + +extern fn credentials_cb(ret: *mut *mut raw::git_cred, + url: *const c_char, + username_from_url: *const c_char, + allowed_types: c_uint, + payload: *mut c_void) -> c_int { + unsafe { + let ok = panic::wrap(|| { + let payload = &mut *(payload as *mut RemoteCallbacks); + let callback = try!(payload.credentials.as_mut() + .ok_or(raw::GIT_PASSTHROUGH as c_int)); + *ret = ptr::null_mut(); + let url = try!(str::from_utf8(CStr::from_ptr(url).to_bytes()) + .map_err(|_| raw::GIT_PASSTHROUGH as c_int)); + let username_from_url = match ::opt_bytes(&url, username_from_url) { + Some(username) => { + Some(try!(str::from_utf8(username) + .map_err(|_| raw::GIT_PASSTHROUGH as c_int))) + } + None => None, + }; + + let cred_type = CredentialType::from_bits_truncate(allowed_types as u32); + + callback(url, username_from_url, cred_type).map_err(|e| { + let s = CString::new(e.to_string()).unwrap(); + raw::giterr_set_str(e.raw_code() as c_int, s.as_ptr()); + e.raw_code() as c_int + }) + }); + match ok { + Some(Ok(cred)) => { + // Turns out it's a memory safety issue if we pass through any + // and all credentials into libgit2 + if allowed_types & (cred.credtype() as c_uint) != 0 { + *ret = cred.unwrap(); + 0 + } else { + raw::GIT_PASSTHROUGH as c_int + } + } + Some(Err(e)) => e, + None => -1, + } + } +} + +extern fn transfer_progress_cb(stats: *const raw::git_transfer_progress, + payload: *mut c_void) -> c_int { + let ok = panic::wrap(|| unsafe { + let payload = &mut *(payload as *mut RemoteCallbacks); + let callback = match payload.progress { + Some(ref mut c) => c, + None => return true, + }; + let progress = Binding::from_raw(stats); + callback(progress) + }); + if ok == Some(true) {0} else {-1} +} + +extern fn sideband_progress_cb(str: *const c_char, + len: c_int, + payload: *mut c_void) -> c_int { + let ok = panic::wrap(|| unsafe { + let payload = &mut *(payload as *mut RemoteCallbacks); + let callback = match payload.sideband_progress { + Some(ref mut c) => c, + None => return true, + }; + let buf = slice::from_raw_parts(str as *const u8, len as usize); + callback(buf) + }); + if ok == Some(true) {0} else {-1} +} + +extern fn update_tips_cb(refname: *const c_char, + a: *const raw::git_oid, + b: *const raw::git_oid, + data: *mut c_void) -> c_int { + let ok = panic::wrap(|| unsafe { + let payload = &mut *(data as *mut RemoteCallbacks); + let callback = match payload.update_tips { + Some(ref mut c) => c, + None => return true, + }; + let refname = str::from_utf8(CStr::from_ptr(refname).to_bytes()) + .unwrap(); + let a = Binding::from_raw(a); + let b = Binding::from_raw(b); + callback(refname, a, b) + }); + if ok == Some(true) {0} else {-1} +} + +extern fn certificate_check_cb(cert: *mut raw::git_cert, + _valid: c_int, + hostname: *const c_char, + data: *mut c_void) -> c_int { + let ok = panic::wrap(|| unsafe { + let payload = &mut *(data as *mut RemoteCallbacks); + let callback = match payload.certificate_check { + Some(ref mut c) => c, + None => return true, + }; + let cert = Binding::from_raw(cert); + let hostname = str::from_utf8(CStr::from_ptr(hostname).to_bytes()) + .unwrap(); + callback(&cert, hostname) + }); + if ok == Some(true) {0} else {-1} +} + +extern fn push_update_reference_cb(refname: *const c_char, + status: *const c_char, + data: *mut c_void) -> c_int { + panic::wrap(|| unsafe { + let payload = &mut *(data as *mut RemoteCallbacks); + let callback = match payload.push_update_reference { + Some(ref mut c) => c, + None => return 0, + }; + let refname = str::from_utf8(CStr::from_ptr(refname).to_bytes()) + .unwrap(); + let status = if status.is_null() { + None + } else { + Some(str::from_utf8(CStr::from_ptr(status).to_bytes()).unwrap()) + }; + match callback(refname, status) { + Ok(()) => 0, + Err(e) => e.raw_code(), + } + }).unwrap_or(-1) +} diff --git a/git2/src/repo.rs b/git2/src/repo.rs new file mode 100644 index 000000000..8b8edf48a --- /dev/null +++ b/git2/src/repo.rs @@ -0,0 +1,2562 @@ +use std::env; +use std::ffi::{CStr, CString, OsStr}; +use std::iter::IntoIterator; +use std::mem; +use std::path::Path; +use std::ptr; +use std::str; +use libc::{c_int, c_char, size_t, c_void, c_uint}; + +use {raw, Revspec, Error, init, Object, RepositoryOpenFlags, RepositoryState, Remote, Buf, StashFlags}; +use {ResetType, Signature, Reference, References, Submodule, Blame, BlameOptions}; +use {Branches, BranchType, Index, Config, Oid, Blob, BlobWriter, Branch, Commit, Tree}; +use {AnnotatedCommit, MergeOptions, SubmoduleIgnore, SubmoduleStatus, MergeAnalysis, MergePreference}; +use {ObjectType, Tag, Note, Notes, StatusOptions, Statuses, Status, Revwalk}; +use {RevparseMode, RepositoryInitMode, Reflog, IntoCString, Describe}; +use {DescribeOptions, TreeBuilder, Diff, DiffOptions, PackBuilder, Odb}; +use build::{RepoBuilder, CheckoutBuilder}; +use stash::{StashApplyOptions, StashCbData, stash_cb}; +use string_array::StringArray; +use oid_array::OidArray; +use util::{self, Binding}; + +/// An owned git repository, representing all state associated with the +/// underlying filesystem. +/// +/// This structure corresponds to a `git_repository` in libgit2. Many other +/// types in git2-rs are derivative from this structure and are attached to its +/// lifetime. +/// +/// When a repository goes out of scope it is freed in memory but not deleted +/// from the filesystem. +pub struct Repository { + raw: *mut raw::git_repository, +} + +// It is the current belief that a `Repository` can be sent among threads, or +// even shared among threads in a mutex. +unsafe impl Send for Repository {} + +/// Options which can be used to configure how a repository is initialized +pub struct RepositoryInitOptions { + flags: u32, + mode: u32, + workdir_path: Option, + description: Option, + template_path: Option, + initial_head: Option, + origin_url: Option, +} + +impl Repository { + /// Attempt to open an already-existing repository at `path`. + /// + /// The path can point to either a normal or bare repository. + pub fn open>(path: P) -> Result { + init(); + let path = try!(path.as_ref().into_c_string()); + let mut ret = ptr::null_mut(); + unsafe { + try_call!(raw::git_repository_open(&mut ret, path)); + Ok(Binding::from_raw(ret)) + } + } + + /// Attempt to open an already-existing bare repository at `path`. + /// + /// The path can point to only a bare repository. + pub fn open_bare>(path: P) -> Result { + init(); + let path = try!(path.as_ref().into_c_string()); + let mut ret = ptr::null_mut(); + unsafe { + try_call!(raw::git_repository_open_bare(&mut ret, path)); + Ok(Binding::from_raw(ret)) + } + } + + /// Find and open an existing repository, respecting git environment + /// variables. This acts like `open_ext` with the + /// `REPOSITORY_OPEN_FROM_ENV` flag, but additionally respects `$GIT_DIR`. + /// With `$GIT_DIR` unset, this will search for a repository starting in + /// the current directory. + pub fn open_from_env() -> Result { + init(); + let mut ret = ptr::null_mut(); + let flags = raw::GIT_REPOSITORY_OPEN_FROM_ENV; + unsafe { + try_call!(raw::git_repository_open_ext(&mut ret, + ptr::null(), + flags as c_uint, + ptr::null())); + Ok(Binding::from_raw(ret)) + } + } + + /// Find and open an existing repository, with additional options. + /// + /// If flags contains REPOSITORY_OPEN_NO_SEARCH, the path must point + /// directly to a repository; otherwise, this may point to a subdirectory + /// of a repository, and `open_ext` will search up through parent + /// directories. + /// + /// If flags contains REPOSITORY_OPEN_CROSS_FS, the search through parent + /// directories will not cross a filesystem boundary (detected when the + /// stat st_dev field changes). + /// + /// If flags contains REPOSITORY_OPEN_BARE, force opening the repository as + /// bare even if it isn't, ignoring any working directory, and defer + /// loading the repository configuration for performance. + /// + /// If flags contains REPOSITORY_OPEN_NO_DOTGIT, don't try appending + /// `/.git` to `path`. + /// + /// If flags contains REPOSITORY_OPEN_FROM_ENV, `open_ext` will ignore + /// other flags and `ceiling_dirs`, and respect the same environment + /// variables git does. Note, however, that `path` overrides `$GIT_DIR`; to + /// respect `$GIT_DIR` as well, use `open_from_env`. + /// + /// ceiling_dirs specifies a list of paths that the search through parent + /// directories will stop before entering. Use the functions in std::env + /// to construct or manipulate such a path list. + pub fn open_ext(path: P, + flags: RepositoryOpenFlags, + ceiling_dirs: I) + -> Result + where P: AsRef, O: AsRef, I: IntoIterator + { + init(); + let path = try!(path.as_ref().into_c_string()); + let ceiling_dirs_os = try!(env::join_paths(ceiling_dirs)); + let ceiling_dirs = try!(ceiling_dirs_os.into_c_string()); + let mut ret = ptr::null_mut(); + unsafe { + try_call!(raw::git_repository_open_ext(&mut ret, + path, + flags.bits() as c_uint, + ceiling_dirs)); + Ok(Binding::from_raw(ret)) + } + } + + /// Attempt to open an already-existing repository at or above `path` + /// + /// This starts at `path` and looks up the filesystem hierarchy + /// until it finds a repository. + pub fn discover>(path: P) -> Result { + // TODO: this diverges significantly from the libgit2 API + init(); + let buf = Buf::new(); + let path = try!(path.as_ref().into_c_string()); + unsafe { + try_call!(raw::git_repository_discover(buf.raw(), path, 1, + ptr::null())); + } + Repository::open(util::bytes2path(&*buf)) + } + + /// Creates a new repository in the specified folder. + /// + /// This by default will create any necessary directories to create the + /// repository, and it will read any user-specified templates when creating + /// the repository. This behavior can be configured through `init_opts`. + pub fn init>(path: P) -> Result { + Repository::init_opts(path, &RepositoryInitOptions::new()) + } + + /// Creates a new `--bare` repository in the specified folder. + /// + /// The folder must exist prior to invoking this function. + pub fn init_bare>(path: P) -> Result { + Repository::init_opts(path, RepositoryInitOptions::new().bare(true)) + } + + /// Creates a new `--bare` repository in the specified folder. + /// + /// The folder must exist prior to invoking this function. + pub fn init_opts>(path: P, opts: &RepositoryInitOptions) + -> Result { + init(); + let path = try!(path.as_ref().into_c_string()); + let mut ret = ptr::null_mut(); + unsafe { + let mut opts = opts.raw(); + try_call!(raw::git_repository_init_ext(&mut ret, path, &mut opts)); + Ok(Binding::from_raw(ret)) + } + } + + /// Clone a remote repository. + /// + /// See the `RepoBuilder` struct for more information. This function will + /// delegate to a fresh `RepoBuilder` + pub fn clone>(url: &str, into: P) + -> Result { + ::init(); + RepoBuilder::new().clone(url, into.as_ref()) + } + + /// Clone a remote repository, initialize and update its submodules + /// recursively. + /// + /// This is similar to `git clone --recursive`. + pub fn clone_recurse>(url: &str, into: P) + -> Result { + let repo = Repository::clone(url, into)?; + repo.update_submodules()?; + Ok(repo) + } + + /// Attempt to wrap an object database as a repository. + pub fn from_odb(odb: Odb) -> Result { + init(); + let mut ret = ptr::null_mut(); + unsafe { + try_call!(raw::git_repository_wrap_odb(&mut ret, odb.raw())); + Ok(Binding::from_raw(ret)) + } + } + + /// Update submodules recursively. + /// + /// Uninitialized submodules will be initialized. + fn update_submodules(&self) -> Result<(), Error> { + + fn add_subrepos(repo: &Repository, list: &mut Vec) + -> Result<(), Error> { + for mut subm in repo.submodules()? { + subm.update(true, None)?; + list.push(subm.open()?); + } + Ok(()) + } + + let mut repos = Vec::new(); + add_subrepos(self, &mut repos)?; + while let Some(repo) = repos.pop() { + add_subrepos(&repo, &mut repos)?; + } + Ok(()) + } + + /// Execute a rev-parse operation against the `spec` listed. + /// + /// The resulting revision specification is returned, or an error is + /// returned if one occurs. + pub fn revparse(&self, spec: &str) -> Result { + let mut raw = raw::git_revspec { + from: ptr::null_mut(), + to: ptr::null_mut(), + flags: 0, + }; + let spec = try!(CString::new(spec)); + unsafe { + try_call!(raw::git_revparse(&mut raw, self.raw, spec)); + let to = Binding::from_raw_opt(raw.to); + let from = Binding::from_raw_opt(raw.from); + let mode = RevparseMode::from_bits_truncate(raw.flags as u32); + Ok(Revspec::from_objects(from, to, mode)) + } + } + + /// Find a single object, as specified by a revision string. + pub fn revparse_single(&self, spec: &str) -> Result { + let spec = try!(CString::new(spec)); + let mut obj = ptr::null_mut(); + unsafe { + try_call!(raw::git_revparse_single(&mut obj, self.raw, spec)); + assert!(!obj.is_null()); + Ok(Binding::from_raw(obj)) + } + } + + /// Find a single object and intermediate reference by a revision string. + /// + /// See `man gitrevisions`, or + /// http://git-scm.com/docs/git-rev-parse.html#_specifying_revisions for + /// information on the syntax accepted. + /// + /// In some cases (`@{<-n>}` or `@{upstream}`), the expression + /// may point to an intermediate reference. When such expressions are being + /// passed in, this intermediate reference is returned. + pub fn revparse_ext(&self, spec: &str) + -> Result<(Object, Option), Error> { + let spec = try!(CString::new(spec)); + let mut git_obj = ptr::null_mut(); + let mut git_ref = ptr::null_mut(); + unsafe { + try_call!(raw::git_revparse_ext(&mut git_obj, &mut git_ref, + self.raw, spec)); + assert!(!git_obj.is_null()); + Ok((Binding::from_raw(git_obj), Binding::from_raw_opt(git_ref))) + } + } + + /// Tests whether this repository is a bare repository or not. + pub fn is_bare(&self) -> bool { + unsafe { raw::git_repository_is_bare(self.raw) == 1 } + } + + /// Tests whether this repository is a shallow clone. + pub fn is_shallow(&self) -> bool { + unsafe { raw::git_repository_is_shallow(self.raw) == 1 } + } + + /// Tests whether this repository is a worktree. + pub fn is_worktree(&self) -> bool { + unsafe { raw::git_repository_is_worktree(self.raw) == 1 } + } + + /// Tests whether this repository is empty. + pub fn is_empty(&self) -> Result { + let empty = unsafe { + try_call!(raw::git_repository_is_empty(self.raw)) + }; + Ok(empty == 1) + } + + /// Returns the path to the `.git` folder for normal repositories or the + /// repository itself for bare repositories. + pub fn path(&self) -> &Path { + unsafe { + let ptr = raw::git_repository_path(self.raw); + util::bytes2path(::opt_bytes(self, ptr).unwrap()) + } + } + + /// Returns the current state of this repository + pub fn state(&self) -> RepositoryState { + let state = unsafe { raw::git_repository_state(self.raw) }; + macro_rules! check( ($($raw:ident => $real:ident),*) => ( + $(if state == raw::$raw as c_int { + super::RepositoryState::$real + }) else * + else { + panic!("unknown repository state: {}", state) + } + ) ); + + check!( + GIT_REPOSITORY_STATE_NONE => Clean, + GIT_REPOSITORY_STATE_MERGE => Merge, + GIT_REPOSITORY_STATE_REVERT => Revert, + GIT_REPOSITORY_STATE_REVERT_SEQUENCE => RevertSequence, + GIT_REPOSITORY_STATE_CHERRYPICK => CherryPick, + GIT_REPOSITORY_STATE_CHERRYPICK_SEQUENCE => CherryPickSequence, + GIT_REPOSITORY_STATE_BISECT => Bisect, + GIT_REPOSITORY_STATE_REBASE => Rebase, + GIT_REPOSITORY_STATE_REBASE_INTERACTIVE => RebaseInteractive, + GIT_REPOSITORY_STATE_REBASE_MERGE => RebaseMerge, + GIT_REPOSITORY_STATE_APPLY_MAILBOX => ApplyMailbox, + GIT_REPOSITORY_STATE_APPLY_MAILBOX_OR_REBASE => ApplyMailboxOrRebase + ) + } + + /// Get the path of the working directory for this repository. + /// + /// If this repository is bare, then `None` is returned. + pub fn workdir(&self) -> Option<&Path> { + unsafe { + let ptr = raw::git_repository_workdir(self.raw); + if ptr.is_null() { + None + } else { + Some(util::bytes2path(CStr::from_ptr(ptr).to_bytes())) + } + } + } + + /// Set the path to the working directory for this repository. + /// + /// If `update_link` is true, create/update the gitlink file in the workdir + /// and set config "core.worktree" (if workdir is not the parent of the .git + /// directory). + pub fn set_workdir(&self, path: &Path, update_gitlink: bool) + -> Result<(), Error> { + let path = try!(path.into_c_string()); + unsafe { + try_call!(raw::git_repository_set_workdir(self.raw(), path, + update_gitlink)); + } + Ok(()) + } + + /// Get the currently active namespace for this repository. + /// + /// If there is no namespace, or the namespace is not a valid utf8 string, + /// `None` is returned. + pub fn namespace(&self) -> Option<&str> { + self.namespace_bytes().and_then(|s| str::from_utf8(s).ok()) + } + + /// Get the currently active namespace for this repository as a byte array. + /// + /// If there is no namespace, `None` is returned. + pub fn namespace_bytes(&self) -> Option<&[u8]> { + unsafe { ::opt_bytes(self, raw::git_repository_get_namespace(self.raw)) } + } + + /// Set the active namespace for this repository. + pub fn set_namespace(&self, namespace: &str) -> Result<(), Error> { + self.set_namespace_bytes(namespace.as_bytes()) + } + + /// Set the active namespace for this repository as a byte array. + pub fn set_namespace_bytes(&self, namespace: &[u8]) -> Result<(), Error> { + unsafe { + let namespace = try!(CString::new(namespace)); + try_call!(raw::git_repository_set_namespace(self.raw, + namespace)); + Ok(()) + } + } + + /// Remove the active namespace for this repository. + pub fn remove_namespace(&self) -> Result<(), Error> { + unsafe { + try_call!(raw::git_repository_set_namespace(self.raw, + ptr::null())); + Ok(()) + } + } + + /// Retrieves the Git merge message. + /// Remember to remove the message when finished. + pub fn message(&self) -> Result { + unsafe { + let buf = Buf::new(); + try_call!(raw::git_repository_message(buf.raw(), self.raw)); + Ok(str::from_utf8(&buf).unwrap().to_string()) + } + } + + /// Remove the Git merge message. + pub fn remove_message(&self) -> Result<(), Error> { + unsafe { + try_call!(raw::git_repository_message_remove(self.raw)); + Ok(()) + } + } + + /// List all remotes for a given repository + pub fn remotes(&self) -> Result { + let mut arr = raw::git_strarray { + strings: 0 as *mut *mut c_char, + count: 0, + }; + unsafe { + try_call!(raw::git_remote_list(&mut arr, self.raw)); + Ok(Binding::from_raw(arr)) + } + } + + /// Get the information for a particular remote + pub fn find_remote(&self, name: &str) -> Result { + let mut ret = ptr::null_mut(); + let name = try!(CString::new(name)); + unsafe { + try_call!(raw::git_remote_lookup(&mut ret, self.raw, name)); + Ok(Binding::from_raw(ret)) + } + } + + /// Add a remote with the default fetch refspec to the repository's + /// configuration. + pub fn remote(&self, name: &str, url: &str) -> Result { + let mut ret = ptr::null_mut(); + let name = try!(CString::new(name)); + let url = try!(CString::new(url)); + unsafe { + try_call!(raw::git_remote_create(&mut ret, self.raw, name, url)); + Ok(Binding::from_raw(ret)) + } + } + + /// Create an anonymous remote + /// + /// Create a remote with the given url and refspec in memory. You can use + /// this when you have a URL instead of a remote's name. Note that anonymous + /// remotes cannot be converted to persisted remotes. + pub fn remote_anonymous(&self, url: &str) -> Result { + let mut ret = ptr::null_mut(); + let url = try!(CString::new(url)); + unsafe { + try_call!(raw::git_remote_create_anonymous(&mut ret, self.raw, url)); + Ok(Binding::from_raw(ret)) + } + } + + /// Give a remote a new name + /// + /// All remote-tracking branches and configuration settings for the remote + /// are updated. + /// + /// A temporary in-memory remote cannot be given a name with this method. + /// + /// No loaded instances of the remote with the old name will change their + /// name or their list of refspecs. + /// + /// The returned array of strings is a list of the non-default refspecs + /// which cannot be renamed and are returned for further processing by the + /// caller. + pub fn remote_rename(&self, name: &str, + new_name: &str) -> Result { + let name = try!(CString::new(name)); + let new_name = try!(CString::new(new_name)); + let mut problems = raw::git_strarray { + count: 0, + strings: 0 as *mut *mut c_char, + }; + unsafe { + try_call!(raw::git_remote_rename(&mut problems, self.raw, name, + new_name)); + Ok(Binding::from_raw(problems)) + } + } + + /// Delete an existing persisted remote. + /// + /// All remote-tracking branches and configuration settings for the remote + /// will be removed. + pub fn remote_delete(&self, name: &str) -> Result<(), Error> { + let name = try!(CString::new(name)); + unsafe { try_call!(raw::git_remote_delete(self.raw, name)); } + Ok(()) + } + + /// Add a fetch refspec to the remote's configuration + /// + /// Add the given refspec to the fetch list in the configuration. No loaded + /// remote instances will be affected. + pub fn remote_add_fetch(&self, name: &str, spec: &str) + -> Result<(), Error> { + let name = try!(CString::new(name)); + let spec = try!(CString::new(spec)); + unsafe { + try_call!(raw::git_remote_add_fetch(self.raw, name, spec)); + } + Ok(()) + } + + /// Add a push refspec to the remote's configuration. + /// + /// Add the given refspec to the push list in the configuration. No + /// loaded remote instances will be affected. + pub fn remote_add_push(&self, name: &str, spec: &str) + -> Result<(), Error> { + let name = try!(CString::new(name)); + let spec = try!(CString::new(spec)); + unsafe { + try_call!(raw::git_remote_add_push(self.raw, name, spec)); + } + Ok(()) + } + + /// Set the remote's url in the configuration + /// + /// Remote objects already in memory will not be affected. This assumes + /// the common case of a single-url remote and will otherwise return an + /// error. + pub fn remote_set_url(&self, name: &str, url: &str) -> Result<(), Error> { + let name = try!(CString::new(name)); + let url = try!(CString::new(url)); + unsafe { try_call!(raw::git_remote_set_url(self.raw, name, url)); } + Ok(()) + } + + /// Set the remote's url for pushing in the configuration. + /// + /// Remote objects already in memory will not be affected. This assumes + /// the common case of a single-url remote and will otherwise return an + /// error. + /// + /// `None` indicates that it should be cleared. + pub fn remote_set_pushurl(&self, name: &str, pushurl: Option<&str>) + -> Result<(), Error> { + let name = try!(CString::new(name)); + let pushurl = try!(::opt_cstr(pushurl)); + unsafe { + try_call!(raw::git_remote_set_pushurl(self.raw, name, pushurl)); + } + Ok(()) + } + + /// Sets the current head to the specified object and optionally resets + /// the index and working tree to match. + /// + /// A soft reset means the head will be moved to the commit. + /// + /// A mixed reset will trigger a soft reset, plus the index will be + /// replaced with the content of the commit tree. + /// + /// A hard reset will trigger a mixed reset and the working directory will + /// be replaced with the content of the index. (Untracked and ignored files + /// will be left alone, however.) + /// + /// The `target` is a commit-ish to which the head should be moved to. The + /// object can either be a commit or a tag, but tags must be dereferenceable + /// to a commit. + /// + /// The `checkout` options will only be used for a hard reset. + pub fn reset(&self, + target: &Object, + kind: ResetType, + checkout: Option<&mut CheckoutBuilder>) + -> Result<(), Error> { + unsafe { + let mut opts: raw::git_checkout_options = mem::zeroed(); + try_call!(raw::git_checkout_init_options(&mut opts, + raw::GIT_CHECKOUT_OPTIONS_VERSION)); + let opts = checkout.map(|c| { + c.configure(&mut opts); &mut opts + }); + try_call!(raw::git_reset(self.raw, target.raw(), kind, opts)); + } + Ok(()) + } + + /// Updates some entries in the index from the target commit tree. + /// + /// The scope of the updated entries is determined by the paths being + /// in the iterator provided. + /// + /// Passing a `None` target will result in removing entries in the index + /// matching the provided pathspecs. + pub fn reset_default(&self, + target: Option<&Object>, + paths: I) -> Result<(), Error> + where T: IntoCString, I: IntoIterator, + { + let (_a, _b, mut arr) = try!(::util::iter2cstrs(paths)); + let target = target.map(|t| t.raw()); + unsafe { + try_call!(raw::git_reset_default(self.raw, target, &mut arr)); + } + Ok(()) + } + + /// Retrieve and resolve the reference pointed at by HEAD. + pub fn head(&self) -> Result { + let mut ret = ptr::null_mut(); + unsafe { + try_call!(raw::git_repository_head(&mut ret, self.raw)); + Ok(Binding::from_raw(ret)) + } + } + + /// Make the repository HEAD point to the specified reference. + /// + /// If the provided reference points to a tree or a blob, the HEAD is + /// unaltered and an error is returned. + /// + /// If the provided reference points to a branch, the HEAD will point to + /// that branch, staying attached, or become attached if it isn't yet. If + /// the branch doesn't exist yet, no error will be returned. The HEAD will + /// then be attached to an unborn branch. + /// + /// Otherwise, the HEAD will be detached and will directly point to the + /// commit. + pub fn set_head(&self, refname: &str) -> Result<(), Error> { + let refname = try!(CString::new(refname)); + unsafe { + try_call!(raw::git_repository_set_head(self.raw, refname)); + } + Ok(()) + } + + /// Determines whether the repository HEAD is detached. + pub fn head_detached(&self) -> Result { + unsafe { + let value = raw::git_repository_head_detached(self.raw); + match value { + 0 => Ok(false), + 1 => Ok(true), + _ => Err(Error::last_error(value).unwrap()) + } + } + } + + /// Make the repository HEAD directly point to the commit. + /// + /// If the provided committish cannot be found in the repository, the HEAD + /// is unaltered and an error is returned. + /// + /// If the provided commitish cannot be peeled into a commit, the HEAD is + /// unaltered and an error is returned. + /// + /// Otherwise, the HEAD will eventually be detached and will directly point + /// to the peeled commit. + pub fn set_head_detached(&self, commitish: Oid) -> Result<(), Error> { + unsafe { + try_call!(raw::git_repository_set_head_detached(self.raw, + commitish.raw())); + } + Ok(()) + } + + /// Create an iterator for the repo's references + pub fn references(&self) -> Result { + let mut ret = ptr::null_mut(); + unsafe { + try_call!(raw::git_reference_iterator_new(&mut ret, self.raw)); + Ok(Binding::from_raw(ret)) + } + } + + /// Create an iterator for the repo's references that match the specified + /// glob + pub fn references_glob(&self, glob: &str) -> Result { + let mut ret = ptr::null_mut(); + let glob = try!(CString::new(glob)); + unsafe { + try_call!(raw::git_reference_iterator_glob_new(&mut ret, self.raw, + glob)); + + Ok(Binding::from_raw(ret)) + } + } + + /// Load all submodules for this repository and return them. + pub fn submodules(&self) -> Result, Error> { + struct Data<'a, 'b:'a> { + repo: &'b Repository, + ret: &'a mut Vec>, + } + let mut ret = Vec::new(); + + unsafe { + let mut data = Data { + repo: self, + ret: &mut ret, + }; + try_call!(raw::git_submodule_foreach(self.raw, append, + &mut data as *mut _ + as *mut c_void)); + } + + return Ok(ret); + + extern fn append(_repo: *mut raw::git_submodule, + name: *const c_char, + data: *mut c_void) -> c_int { + unsafe { + let data = &mut *(data as *mut Data); + let mut raw = ptr::null_mut(); + let rc = raw::git_submodule_lookup(&mut raw, data.repo.raw(), + name); + assert_eq!(rc, 0); + data.ret.push(Binding::from_raw(raw)); + } + 0 + } + } + + /// Gather file status information and populate the returned structure. + /// + /// Note that if a pathspec is given in the options to filter the + /// status, then the results from rename detection (if you enable it) may + /// not be accurate. To do rename detection properly, this must be called + /// with no pathspec so that all files can be considered. + pub fn statuses(&self, options: Option<&mut StatusOptions>) + -> Result { + let mut ret = ptr::null_mut(); + unsafe { + try_call!(raw::git_status_list_new(&mut ret, self.raw, + options.map(|s| s.raw()) + .unwrap_or(ptr::null()))); + Ok(Binding::from_raw(ret)) + } + } + + /// Test if the ignore rules apply to a given file. + /// + /// This function checks the ignore rules to see if they would apply to the + /// given file. This indicates if the file would be ignored regardless of + /// whether the file is already in the index or committed to the repository. + /// + /// One way to think of this is if you were to do "git add ." on the + /// directory containing the file, would it be added or not? + pub fn status_should_ignore(&self, path: &Path) -> Result { + let mut ret = 0 as c_int; + let path = try!(path.into_c_string()); + unsafe { + try_call!(raw::git_status_should_ignore(&mut ret, self.raw, + path)); + } + Ok(ret != 0) + } + + /// Get file status for a single file. + /// + /// This tries to get status for the filename that you give. If no files + /// match that name (in either the HEAD, index, or working directory), this + /// returns NotFound. + /// + /// If the name matches multiple files (for example, if the path names a + /// directory or if running on a case- insensitive filesystem and yet the + /// HEAD has two entries that both match the path), then this returns + /// Ambiguous because it cannot give correct results. + /// + /// This does not do any sort of rename detection. Renames require a set of + /// targets and because of the path filtering, there is not enough + /// information to check renames correctly. To check file status with rename + /// detection, there is no choice but to do a full `statuses` and scan + /// through looking for the path that you are interested in. + pub fn status_file(&self, path: &Path) -> Result { + let mut ret = 0 as c_uint; + let path = if cfg!(windows) { + // `git_status_file` dose not work with windows path separator + // so we convert \ to / + try!(::std::ffi::CString::new(path.to_string_lossy().replace('\\', "/"))) + } else { + try!(path.into_c_string()) + }; + unsafe { + try_call!(raw::git_status_file(&mut ret, self.raw, + path)); + } + Ok(Status::from_bits_truncate(ret as u32)) + } + + /// Create an iterator which loops over the requested branches. + pub fn branches(&self, filter: Option) + -> Result { + let mut raw = ptr::null_mut(); + unsafe { + try_call!(raw::git_branch_iterator_new(&mut raw, self.raw(), filter)); + Ok(Branches::from_raw(raw)) + } + } + + /// Get the Index file for this repository. + /// + /// If a custom index has not been set, the default index for the repository + /// will be returned (the one located in .git/index). + pub fn index(&self) -> Result { + let mut raw = ptr::null_mut(); + unsafe { + try_call!(raw::git_repository_index(&mut raw, self.raw())); + Ok(Binding::from_raw(raw)) + } + } + + /// Set the Index file for this repository. + pub fn set_index(&self, index: &mut Index) { + unsafe { + raw::git_repository_set_index(self.raw(), index.raw()); + } + } + + /// Get the configuration file for this repository. + /// + /// If a configuration file has not been set, the default config set for the + /// repository will be returned, including global and system configurations + /// (if they are available). + pub fn config(&self) -> Result { + let mut raw = ptr::null_mut(); + unsafe { + try_call!(raw::git_repository_config(&mut raw, self.raw())); + Ok(Binding::from_raw(raw)) + } + } + + /// Write an in-memory buffer to the ODB as a blob. + /// + /// The Oid returned can in turn be passed to `find_blob` to get a handle to + /// the blob. + pub fn blob(&self, data: &[u8]) -> Result { + let mut raw = raw::git_oid { id: [0; raw::GIT_OID_RAWSZ] }; + unsafe { + let ptr = data.as_ptr() as *const c_void; + let len = data.len() as size_t; + try_call!(raw::git_blob_create_frombuffer(&mut raw, self.raw(), + ptr, len)); + Ok(Binding::from_raw(&raw as *const _)) + } + } + + /// Read a file from the filesystem and write its content to the Object + /// Database as a loose blob + /// + /// The Oid returned can in turn be passed to `find_blob` to get a handle to + /// the blob. + pub fn blob_path(&self, path: &Path) -> Result { + let path = try!(path.into_c_string()); + let mut raw = raw::git_oid { id: [0; raw::GIT_OID_RAWSZ] }; + unsafe { + try_call!(raw::git_blob_create_fromdisk(&mut raw, self.raw(), + path)); + Ok(Binding::from_raw(&raw as *const _)) + } + } + + /// Create a stream to write blob + /// + /// This function may need to buffer the data on disk and will in general + /// not be the right choice if you know the size of the data to write. + /// + /// Use `BlobWriter::commit()` to commit the write to the object db + /// and get the object id. + /// + /// If the `hintpath` parameter is filled, it will be used to determine + /// what git filters should be applied to the object before it is written + /// to the object database. + pub fn blob_writer(&self, hintpath: Option<&Path>) -> Result { + let path_str = match hintpath { + Some(path) => Some(try!(path.into_c_string())), + None => None, + }; + let path = match path_str { + Some(ref path) => path.as_ptr(), + None => ptr::null(), + }; + let mut out = ptr::null_mut(); + unsafe { + try_call!(raw::git_blob_create_fromstream(&mut out, self.raw(), path)); + Ok(BlobWriter::from_raw(out)) + } + } + + /// Lookup a reference to one of the objects in a repository. + pub fn find_blob(&self, oid: Oid) -> Result { + let mut raw = ptr::null_mut(); + unsafe { + try_call!(raw::git_blob_lookup(&mut raw, self.raw(), oid.raw())); + Ok(Binding::from_raw(raw)) + } + } + + /// Get the object database for this repository + pub fn odb(&self) -> Result { + let mut odb = ptr::null_mut(); + unsafe { + try_call!(raw::git_repository_odb(&mut odb, self.raw())); + Ok(Odb::from_raw(odb)) + } + } + + /// Create a new branch pointing at a target commit + /// + /// A new direct reference will be created pointing to this target commit. + /// If `force` is true and a reference already exists with the given name, + /// it'll be replaced. + pub fn branch(&self, + branch_name: &str, + target: &Commit, + force: bool) -> Result { + let branch_name = try!(CString::new(branch_name)); + let mut raw = ptr::null_mut(); + unsafe { + try_call!(raw::git_branch_create(&mut raw, + self.raw(), + branch_name, + target.raw(), + force)); + Ok(Branch::wrap(Binding::from_raw(raw))) + } + } + + /// Lookup a branch by its name in a repository. + pub fn find_branch(&self, name: &str, branch_type: BranchType) + -> Result { + let name = try!(CString::new(name)); + let mut ret = ptr::null_mut(); + unsafe { + try_call!(raw::git_branch_lookup(&mut ret, self.raw(), name, + branch_type)); + Ok(Branch::wrap(Binding::from_raw(ret))) + } + } + + /// Create new commit in the repository + /// + /// If the `update_ref` is not `None`, name of the reference that will be + /// updated to point to this commit. If the reference is not direct, it will + /// be resolved to a direct reference. Use "HEAD" to update the HEAD of the + /// current branch and make it point to this commit. If the reference + /// doesn't exist yet, it will be created. If it does exist, the first + /// parent must be the tip of this branch. + pub fn commit(&self, + update_ref: Option<&str>, + author: &Signature, + committer: &Signature, + message: &str, + tree: &Tree, + parents: &[&Commit]) -> Result { + let update_ref = try!(::opt_cstr(update_ref)); + let mut parent_ptrs = parents.iter().map(|p| { + p.raw() as *const raw::git_commit + }).collect::>(); + let message = try!(CString::new(message)); + let mut raw = raw::git_oid { id: [0; raw::GIT_OID_RAWSZ] }; + unsafe { + try_call!(raw::git_commit_create(&mut raw, + self.raw(), + update_ref, + author.raw(), + committer.raw(), + ptr::null(), + message, + tree.raw(), + parents.len() as size_t, + parent_ptrs.as_mut_ptr())); + Ok(Binding::from_raw(&raw as *const _)) + } + } + + /// Create a commit object from the given buffer and signature + /// + /// Given the unsigned commit object's contents, its signature and the + /// header field in which to store the signature, attach the signature to + /// the commit and write it into the given repository. + /// + /// Use `None` in `signature_field` to use the default of `gpgsig`, which is + /// almost certainly what you want. + /// + /// Returns the resulting (signed) commit id. + pub fn commit_signed(&self, + commit_content: &str, + signature: &str, + signature_field: Option<&str>) -> Result { + let commit_content = try!(CString::new(commit_content)); + let signature = try!(CString::new(signature)); + let signature_field = try!(::opt_cstr(signature_field)); + let mut raw = raw::git_oid { id: [0; raw::GIT_OID_RAWSZ] }; + unsafe { + try_call!(raw::git_commit_create_with_signature(&mut raw, + self.raw(), + commit_content, + signature, + signature_field)); + Ok(Binding::from_raw(&raw as *const _)) + } + } + + + /// Extract the signature from a commit + /// + /// Returns a tuple containing the signature in the first value and the + /// signed data in the second. + pub fn extract_signature(&self, + commit_id: &Oid, + signature_field: Option<&str>) + -> Result<(Buf, Buf), Error> { + let signature_field = try!(::opt_cstr(signature_field)); + let signature = Buf::new(); + let content = Buf::new(); + unsafe { + try_call!(raw::git_commit_extract_signature(signature.raw(), + content.raw(), + self.raw(), + commit_id.raw() as *mut _, + signature_field)); + Ok((signature, content)) + } + } + + + /// Lookup a reference to one of the commits in a repository. + pub fn find_commit(&self, oid: Oid) -> Result { + let mut raw = ptr::null_mut(); + unsafe { + try_call!(raw::git_commit_lookup(&mut raw, self.raw(), oid.raw())); + Ok(Binding::from_raw(raw)) + } + } + + /// Creates a `AnnotatedCommit` from the given commit id. + pub fn find_annotated_commit(&self, id: Oid) -> Result { + unsafe { + let mut raw = 0 as *mut raw::git_annotated_commit; + try_call!(raw::git_annotated_commit_lookup(&mut raw, self.raw(), id.raw())); + Ok(Binding::from_raw(raw)) + } + } + + /// Lookup a reference to one of the objects in a repository. + pub fn find_object(&self, oid: Oid, + kind: Option) -> Result { + let mut raw = ptr::null_mut(); + unsafe { + try_call!(raw::git_object_lookup(&mut raw, self.raw(), oid.raw(), + kind)); + Ok(Binding::from_raw(raw)) + } + } + + /// Create a new direct reference. + /// + /// This function will return an error if a reference already exists with + /// the given name unless force is true, in which case it will be + /// overwritten. + pub fn reference(&self, name: &str, id: Oid, force: bool, + log_message: &str) -> Result { + let name = try!(CString::new(name)); + let log_message = try!(CString::new(log_message)); + let mut raw = ptr::null_mut(); + unsafe { + try_call!(raw::git_reference_create(&mut raw, self.raw(), name, + id.raw(), force, + log_message)); + Ok(Binding::from_raw(raw)) + } + } + + /// Conditionally create new direct reference. + /// + /// A direct reference (also called an object id reference) refers directly + /// to a specific object id (a.k.a. OID or SHA) in the repository. The id + /// permanently refers to the object (although the reference itself can be + /// moved). For example, in libgit2 the direct ref "refs/tags/v0.17.0" + /// refers to OID 5b9fac39d8a76b9139667c26a63e6b3f204b3977. + /// + /// The direct reference will be created in the repository and written to + /// the disk. + /// + /// Valid reference names must follow one of two patterns: + /// + /// 1. Top-level names must contain only capital letters and underscores, + /// and must begin and end with a letter. (e.g. "HEAD", "ORIG_HEAD"). + /// 2. Names prefixed with "refs/" can be almost anything. You must avoid + /// the characters `~`, `^`, `:`, `\\`, `?`, `[`, and `*`, and the + /// sequences ".." and "@{" which have special meaning to revparse. + /// + /// This function will return an error if a reference already exists with + /// the given name unless `force` is true, in which case it will be + /// overwritten. + /// + /// The message for the reflog will be ignored if the reference does not + /// belong in the standard set (HEAD, branches and remote-tracking + /// branches) and it does not have a reflog. + /// + /// It will return GIT_EMODIFIED if the reference's value at the time of + /// updating does not match the one passed through `current_id` (i.e. if the + /// ref has changed since the user read it). + pub fn reference_matching(&self, + name: &str, + id: Oid, + force: bool, + current_id: Oid, + log_message: &str) -> Result { + let name = try!(CString::new(name)); + let log_message = try!(CString::new(log_message)); + let mut raw = ptr::null_mut(); + unsafe { + try_call!(raw::git_reference_create_matching(&mut raw, + self.raw(), + name, + id.raw(), + force, + current_id.raw(), + log_message)); + Ok(Binding::from_raw(raw)) + } + } + + /// Create a new symbolic reference. + /// + /// This function will return an error if a reference already exists with + /// the given name unless force is true, in which case it will be + /// overwritten. + pub fn reference_symbolic(&self, name: &str, target: &str, + force: bool, + log_message: &str) + -> Result { + let name = try!(CString::new(name)); + let target = try!(CString::new(target)); + let log_message = try!(CString::new(log_message)); + let mut raw = ptr::null_mut(); + unsafe { + try_call!(raw::git_reference_symbolic_create(&mut raw, self.raw(), + name, target, force, + log_message)); + Ok(Binding::from_raw(raw)) + } + } + + /// Create a new symbolic reference. + /// + /// This function will return an error if a reference already exists with + /// the given name unless force is true, in which case it will be + /// overwritten. + /// + /// It will return GIT_EMODIFIED if the reference's value at the time of + /// updating does not match the one passed through current_value (i.e. if + /// the ref has changed since the user read it). + pub fn reference_symbolic_matching(&self, + name: &str, + target: &str, + force: bool, + current_value: &str, + log_message: &str) + -> Result { + let name = try!(CString::new(name)); + let target = try!(CString::new(target)); + let current_value = try!(CString::new(current_value)); + let log_message = try!(CString::new(log_message)); + let mut raw = ptr::null_mut(); + unsafe { + try_call!(raw::git_reference_symbolic_create_matching(&mut raw, + self.raw(), + name, + target, + force, + current_value, + log_message)); + Ok(Binding::from_raw(raw)) + } + } + + /// Lookup a reference to one of the objects in a repository. + pub fn find_reference(&self, name: &str) -> Result { + let name = try!(CString::new(name)); + let mut raw = ptr::null_mut(); + unsafe { + try_call!(raw::git_reference_lookup(&mut raw, self.raw(), name)); + Ok(Binding::from_raw(raw)) + } + } + + /// Lookup a reference by name and resolve immediately to OID. + /// + /// This function provides a quick way to resolve a reference name straight + /// through to the object id that it refers to. This avoids having to + /// allocate or free any `Reference` objects for simple situations. + pub fn refname_to_id(&self, name: &str) -> Result { + let name = try!(CString::new(name)); + let mut ret = raw::git_oid { id: [0; raw::GIT_OID_RAWSZ] }; + unsafe { + try_call!(raw::git_reference_name_to_id(&mut ret, self.raw(), name)); + Ok(Binding::from_raw(&ret as *const _)) + } + } + + /// Creates a git_annotated_commit from the given reference. + pub fn reference_to_annotated_commit(&self, reference: &Reference) + -> Result { + let mut ret = ptr::null_mut(); + unsafe { + try_call!(raw::git_annotated_commit_from_ref(&mut ret, + self.raw(), + reference.raw())); + Ok(AnnotatedCommit::from_raw(ret)) + } + } + + /// Create a new action signature with default user and now timestamp. + /// + /// This looks up the user.name and user.email from the configuration and + /// uses the current time as the timestamp, and creates a new signature + /// based on that information. It will return `NotFound` if either the + /// user.name or user.email are not set. + pub fn signature(&self) -> Result, Error> { + let mut ret = ptr::null_mut(); + unsafe { + try_call!(raw::git_signature_default(&mut ret, self.raw())); + Ok(Binding::from_raw(ret)) + } + } + + /// Set up a new git submodule for checkout. + /// + /// This does "git submodule add" up to the fetch and checkout of the + /// submodule contents. It preps a new submodule, creates an entry in + /// `.gitmodules` and creates an empty initialized repository either at the + /// given path in the working directory or in `.git/modules` with a gitlink + /// from the working directory to the new repo. + /// + /// To fully emulate "git submodule add" call this function, then `open()` + /// the submodule repo and perform the clone step as needed. Lastly, call + /// `add_finalize()` to wrap up adding the new submodule and `.gitmodules` + /// to the index to be ready to commit. + pub fn submodule(&self, url: &str, path: &Path, + use_gitlink: bool) -> Result { + let url = try!(CString::new(url)); + let path = try!(path.into_c_string()); + let mut raw = ptr::null_mut(); + unsafe { + try_call!(raw::git_submodule_add_setup(&mut raw, self.raw(), + url, path, use_gitlink)); + Ok(Binding::from_raw(raw)) + } + } + + /// Lookup submodule information by name or path. + /// + /// Given either the submodule name or path (they are usually the same), + /// this returns a structure describing the submodule. + pub fn find_submodule(&self, name: &str) -> Result { + let name = try!(CString::new(name)); + let mut raw = ptr::null_mut(); + unsafe { + try_call!(raw::git_submodule_lookup(&mut raw, self.raw(), name)); + Ok(Binding::from_raw(raw)) + } + } + + /// Get the status for a submodule. + /// + /// This looks at a submodule and tries to determine the status. It + /// will return a combination of the `SubmoduleStatus` values. + pub fn submodule_status(&self, name: &str, ignore: SubmoduleIgnore) + -> Result { + let mut ret = 0; + let name = try!(CString::new(name)); + unsafe { + try_call!(raw::git_submodule_status(&mut ret, self.raw, name, + ignore)); + } + Ok(SubmoduleStatus::from_bits_truncate(ret as u32)) + } + + /// Lookup a reference to one of the objects in a repository. + pub fn find_tree(&self, oid: Oid) -> Result { + let mut raw = ptr::null_mut(); + unsafe { + try_call!(raw::git_tree_lookup(&mut raw, self.raw(), oid.raw())); + Ok(Binding::from_raw(raw)) + } + } + + /// Create a new TreeBuilder, optionally initialized with the + /// entries of the given Tree. + /// + /// The tree builder can be used to create or modify trees in memory and + /// write them as tree objects to the database. + pub fn treebuilder(&self, tree: Option<&Tree>) -> Result { + unsafe { + let mut ret = ptr::null_mut(); + let tree = match tree { + Some(tree) => tree.raw(), + None => ptr::null_mut(), + }; + try_call!(raw::git_treebuilder_new(&mut ret, self.raw, tree)); + Ok(Binding::from_raw(ret)) + } + } + + + /// Create a new tag in the repository from an object + /// + /// A new reference will also be created pointing to this tag object. If + /// `force` is true and a reference already exists with the given name, + /// it'll be replaced. + /// + /// The message will not be cleaned up. + /// + /// The tag name will be checked for validity. You must avoid the characters + /// '~', '^', ':', ' \ ', '?', '[', and '*', and the sequences ".." and " @ + /// {" which have special meaning to revparse. + pub fn tag(&self, name: &str, target: &Object, + tagger: &Signature, message: &str, + force: bool) -> Result { + let name = try!(CString::new(name)); + let message = try!(CString::new(message)); + let mut raw = raw::git_oid { id: [0; raw::GIT_OID_RAWSZ] }; + unsafe { + try_call!(raw::git_tag_create(&mut raw, self.raw, name, + target.raw(), tagger.raw(), + message, force)); + Ok(Binding::from_raw(&raw as *const _)) + } + } + + /// Create a new lightweight tag pointing at a target object + /// + /// A new direct reference will be created pointing to this target object. + /// If force is true and a reference already exists with the given name, + /// it'll be replaced. + pub fn tag_lightweight(&self, + name: &str, + target: &Object, + force: bool) -> Result { + let name = try!(CString::new(name)); + let mut raw = raw::git_oid { id: [0; raw::GIT_OID_RAWSZ] }; + unsafe { + try_call!(raw::git_tag_create_lightweight(&mut raw, self.raw, name, + target.raw(), force)); + Ok(Binding::from_raw(&raw as *const _)) + } + } + + /// Lookup a tag object from the repository. + pub fn find_tag(&self, id: Oid) -> Result { + let mut raw = ptr::null_mut(); + unsafe { + try_call!(raw::git_tag_lookup(&mut raw, self.raw, id.raw())); + Ok(Binding::from_raw(raw)) + } + } + + /// Delete an existing tag reference. + /// + /// The tag name will be checked for validity, see `tag` for some rules + /// about valid names. + pub fn tag_delete(&self, name: &str) -> Result<(), Error> { + let name = try!(CString::new(name)); + unsafe { + try_call!(raw::git_tag_delete(self.raw, name)); + Ok(()) + } + } + + /// Get a list with all the tags in the repository. + /// + /// An optional fnmatch pattern can also be specified. + pub fn tag_names(&self, pattern: Option<&str>) -> Result { + let mut arr = raw::git_strarray { + strings: 0 as *mut *mut c_char, + count: 0, + }; + unsafe { + match pattern { + Some(s) => { + let s = try!(CString::new(s)); + try_call!(raw::git_tag_list_match(&mut arr, s, self.raw)); + } + None => { try_call!(raw::git_tag_list(&mut arr, self.raw)); } + } + Ok(Binding::from_raw(arr)) + } + } + + /// Updates files in the index and the working tree to match the content of + /// the commit pointed at by HEAD. + pub fn checkout_head(&self, opts: Option<&mut CheckoutBuilder>) + -> Result<(), Error> { + unsafe { + let mut raw_opts = mem::zeroed(); + try_call!(raw::git_checkout_init_options(&mut raw_opts, + raw::GIT_CHECKOUT_OPTIONS_VERSION)); + if let Some(c) = opts { + c.configure(&mut raw_opts); + } + + try_call!(raw::git_checkout_head(self.raw, &raw_opts)); + } + Ok(()) + } + + /// Updates files in the working tree to match the content of the index. + /// + /// If the index is `None`, the repository's index will be used. + pub fn checkout_index(&self, + index: Option<&mut Index>, + opts: Option<&mut CheckoutBuilder>) -> Result<(), Error> { + unsafe { + let mut raw_opts = mem::zeroed(); + try_call!(raw::git_checkout_init_options(&mut raw_opts, + raw::GIT_CHECKOUT_OPTIONS_VERSION)); + if let Some(c) = opts { + c.configure(&mut raw_opts); + } + + try_call!(raw::git_checkout_index(self.raw, + index.map(|i| &mut *i.raw()), + &raw_opts)); + } + Ok(()) + } + + /// Updates files in the index and working tree to match the content of the + /// tree pointed at by the treeish. + pub fn checkout_tree(&self, + treeish: &Object, + opts: Option<&mut CheckoutBuilder>) -> Result<(), Error> { + unsafe { + let mut raw_opts = mem::zeroed(); + try_call!(raw::git_checkout_init_options(&mut raw_opts, + raw::GIT_CHECKOUT_OPTIONS_VERSION)); + if let Some(c) = opts { + c.configure(&mut raw_opts); + } + + try_call!(raw::git_checkout_tree(self.raw, &*treeish.raw(), + &raw_opts)); + } + Ok(()) + } + + /// Merges the given commit(s) into HEAD, writing the results into the + /// working directory. Any changes are staged for commit and any conflicts + /// are written to the index. Callers should inspect the repository's index + /// after this completes, resolve any conflicts and prepare a commit. + /// + /// For compatibility with git, the repository is put into a merging state. + /// Once the commit is done (or if the uses wishes to abort), you should + /// clear this state by calling git_repository_state_cleanup(). + pub fn merge(&self, + annotated_commits: &[&AnnotatedCommit], + merge_opts: Option<&mut MergeOptions>, + checkout_opts: Option<&mut CheckoutBuilder>) + -> Result<(), Error> + { + unsafe { + let mut raw_checkout_opts = mem::zeroed(); + try_call!(raw::git_checkout_init_options(&mut raw_checkout_opts, + raw::GIT_CHECKOUT_OPTIONS_VERSION)); + if let Some(c) = checkout_opts { + c.configure(&mut raw_checkout_opts); + } + + let mut commit_ptrs = annotated_commits.iter().map(|c| { + c.raw() as *const raw::git_annotated_commit + }).collect::>(); + + try_call!(raw::git_merge(self.raw, + commit_ptrs.as_mut_ptr(), + annotated_commits.len() as size_t, + merge_opts.map(|o| o.raw()) + .unwrap_or(ptr::null()), + &raw_checkout_opts)); + } + Ok(()) + } + + /// Merge two commits, producing an index that reflects the result of + /// the merge. The index may be written as-is to the working directory or + /// checked out. If the index is to be converted to a tree, the caller + /// should resolve any conflicts that arose as part of the merge. + pub fn merge_commits(&self, our_commit: &Commit, their_commit: &Commit, + opts: Option<&MergeOptions>) -> Result { + let mut raw = ptr::null_mut(); + unsafe { + try_call!(raw::git_merge_commits(&mut raw, self.raw, + our_commit.raw(), + their_commit.raw(), + opts.map(|o| o.raw()))); + Ok(Binding::from_raw(raw)) + } + } + + /// Merge two trees, producing an index that reflects the result of + /// the merge. The index may be written as-is to the working directory or + /// checked out. If the index is to be converted to a tree, the caller + /// should resolve any conflicts that arose as part of the merge. + pub fn merge_trees(&self, ancestor_tree: &Tree, our_tree: &Tree, + their_tree: &Tree, opts: Option<&MergeOptions>) -> Result { + let mut raw = ptr::null_mut(); + unsafe { + try_call!(raw::git_merge_trees(&mut raw, self.raw, ancestor_tree.raw(), + our_tree.raw(), their_tree.raw(), + opts.map(|o| o.raw()))); + Ok(Binding::from_raw(raw)) + } + } + + /// Remove all the metadata associated with an ongoing command like merge, + /// revert, cherry-pick, etc. For example: MERGE_HEAD, MERGE_MSG, etc. + pub fn cleanup_state(&self) -> Result<(), Error> { + unsafe { + try_call!(raw::git_repository_state_cleanup(self.raw)); + } + Ok(()) + } + + /// Analyzes the given branch(es) and determines the opportunities for + /// merging them into the HEAD of the repository. + pub fn merge_analysis(&self, + their_heads: &[&AnnotatedCommit]) + -> Result<(MergeAnalysis, MergePreference), Error> { + unsafe { + let mut raw_merge_analysis = 0 as raw::git_merge_analysis_t; + let mut raw_merge_preference = 0 as raw::git_merge_preference_t; + let mut their_heads = their_heads + .iter() + .map(|v| v.raw() as *const _) + .collect::>(); + try_call!(raw::git_merge_analysis(&mut raw_merge_analysis, + &mut raw_merge_preference, + self.raw, + their_heads.as_mut_ptr() as *mut _, + their_heads.len())); + Ok((MergeAnalysis::from_bits_truncate(raw_merge_analysis as u32), MergePreference::from_bits_truncate(raw_merge_preference as u32))) + } + } + + /// Add a note for an object + /// + /// The `notes_ref` argument is the canonical name of the reference to use, + /// defaulting to "refs/notes/commits". If `force` is specified then + /// previous notes are overwritten. + pub fn note(&self, + author: &Signature, + committer: &Signature, + notes_ref: Option<&str>, + oid: Oid, + note: &str, + force: bool) -> Result { + let notes_ref = try!(::opt_cstr(notes_ref)); + let note = try!(CString::new(note)); + let mut ret = raw::git_oid { id: [0; raw::GIT_OID_RAWSZ] }; + unsafe { + try_call!(raw::git_note_create(&mut ret, + self.raw, + notes_ref, + author.raw(), + committer.raw(), + oid.raw(), + note, + force)); + Ok(Binding::from_raw(&ret as *const _)) + } + } + + /// Get the default notes reference for this repository + pub fn note_default_ref(&self) -> Result { + let ret = Buf::new(); + unsafe { + try_call!(raw::git_note_default_ref(ret.raw(), self.raw)); + } + Ok(str::from_utf8(&ret).unwrap().to_string()) + } + + /// Creates a new iterator for notes in this repository. + /// + /// The `notes_ref` argument is the canonical name of the reference to use, + /// defaulting to "refs/notes/commits". + /// + /// The iterator returned yields pairs of (Oid, Oid) where the first element + /// is the id of the note and the second id is the id the note is + /// annotating. + pub fn notes(&self, notes_ref: Option<&str>) -> Result { + let notes_ref = try!(::opt_cstr(notes_ref)); + let mut ret = ptr::null_mut(); + unsafe { + try_call!(raw::git_note_iterator_new(&mut ret, self.raw, notes_ref)); + Ok(Binding::from_raw(ret)) + } + } + + /// Read the note for an object. + /// + /// The `notes_ref` argument is the canonical name of the reference to use, + /// defaulting to "refs/notes/commits". + /// + /// The id specified is the Oid of the git object to read the note from. + pub fn find_note(&self, notes_ref: Option<&str>, id: Oid) + -> Result { + let notes_ref = try!(::opt_cstr(notes_ref)); + let mut ret = ptr::null_mut(); + unsafe { + try_call!(raw::git_note_read(&mut ret, self.raw, notes_ref, + id.raw())); + Ok(Binding::from_raw(ret)) + } + } + + /// Remove the note for an object. + /// + /// The `notes_ref` argument is the canonical name of the reference to use, + /// defaulting to "refs/notes/commits". + /// + /// The id specified is the Oid of the git object to remove the note from. + pub fn note_delete(&self, + id: Oid, + notes_ref: Option<&str>, + author: &Signature, + committer: &Signature) -> Result<(), Error> { + let notes_ref = try!(::opt_cstr(notes_ref)); + unsafe { + try_call!(raw::git_note_remove(self.raw, notes_ref, author.raw(), + committer.raw(), id.raw())); + Ok(()) + } + } + + /// Create a revwalk that can be used to traverse the commit graph. + pub fn revwalk(&self) -> Result { + let mut raw = ptr::null_mut(); + unsafe { + try_call!(raw::git_revwalk_new(&mut raw, self.raw())); + Ok(Binding::from_raw(raw)) + } + } + + /// Get the blame for a single file. + pub fn blame_file(&self, path: &Path, opts: Option<&mut BlameOptions>) + -> Result { + let path = try!(path.into_c_string()); + let mut raw = ptr::null_mut(); + + unsafe { + try_call!(raw::git_blame_file(&mut raw, + self.raw(), + path, + opts.map(|s| s.raw()))); + Ok(Binding::from_raw(raw)) + } + } + + /// Find a merge base between two commits + pub fn merge_base(&self, one: Oid, two: Oid) -> Result { + let mut raw = raw::git_oid { id: [0; raw::GIT_OID_RAWSZ] }; + unsafe { + try_call!(raw::git_merge_base(&mut raw, self.raw, + one.raw(), two.raw())); + Ok(Binding::from_raw(&raw as *const _)) + } + } + + /// Find all merge bases between two commits + pub fn merge_bases(&self, one: Oid, two: Oid) -> Result { + let mut arr = raw::git_oidarray { + ids: ptr::null_mut(), + count: 0, + }; + unsafe { + try_call!(raw::git_merge_bases(&mut arr, self.raw, + one.raw(), two.raw())); + Ok(Binding::from_raw(arr)) + } + } + + /// Count the number of unique commits between two commit objects + /// + /// There is no need for branches containing the commits to have any + /// upstream relationship, but it helps to think of one as a branch and the + /// other as its upstream, the ahead and behind values will be what git + /// would report for the branches. + pub fn graph_ahead_behind(&self, local: Oid, upstream: Oid) + -> Result<(usize, usize), Error> { + unsafe { + let mut ahead: size_t = 0; + let mut behind: size_t = 0; + try_call!(raw::git_graph_ahead_behind(&mut ahead, &mut behind, + self.raw(), local.raw(), + upstream.raw())); + Ok((ahead as usize, behind as usize)) + } + } + + /// Determine if a commit is the descendant of another commit + pub fn graph_descendant_of(&self, commit: Oid, ancestor: Oid) + -> Result { + unsafe { + let rv = try_call!(raw::git_graph_descendant_of(self.raw(), + commit.raw(), + ancestor.raw())); + Ok(rv != 0) + } + } + + /// Read the reflog for the given reference + /// + /// If there is no reflog file for the given reference yet, an empty reflog + /// object will be returned. + pub fn reflog(&self, name: &str) -> Result { + let name = try!(CString::new(name)); + let mut ret = ptr::null_mut(); + unsafe { + try_call!(raw::git_reflog_read(&mut ret, self.raw, name)); + Ok(Binding::from_raw(ret)) + } + } + + /// Delete the reflog for the given reference + pub fn reflog_delete(&self, name: &str) -> Result<(), Error> { + let name = try!(CString::new(name)); + unsafe { try_call!(raw::git_reflog_delete(self.raw, name)); } + Ok(()) + } + + /// Rename a reflog + /// + /// The reflog to be renamed is expected to already exist. + pub fn reflog_rename(&self, old_name: &str, new_name: &str) + -> Result<(), Error> { + let old_name = try!(CString::new(old_name)); + let new_name = try!(CString::new(new_name)); + unsafe { + try_call!(raw::git_reflog_rename(self.raw, old_name, new_name)); + } + Ok(()) + } + + /// Check if the given reference has a reflog. + pub fn reference_has_log(&self, name: &str) -> Result { + let name = try!(CString::new(name)); + let ret = unsafe { + try_call!(raw::git_reference_has_log(self.raw, name)) + }; + Ok(ret != 0) + } + + /// Ensure that the given reference has a reflog. + pub fn reference_ensure_log(&self, name: &str) -> Result<(), Error> { + let name = try!(CString::new(name)); + unsafe { + try_call!(raw::git_reference_ensure_log(self.raw, name)); + } + Ok(()) + } + + /// Describes a commit + /// + /// Performs a describe operation on the current commit and the worktree. + /// After performing a describe on HEAD, a status is run and description is + /// considered to be dirty if there are. + pub fn describe(&self, opts: &DescribeOptions) -> Result { + let mut ret = ptr::null_mut(); + unsafe { + try_call!(raw::git_describe_workdir(&mut ret, self.raw, opts.raw())); + Ok(Binding::from_raw(ret)) + } + } + + /// Create a diff with the difference between two tree objects. + /// + /// This is equivalent to `git diff ` + /// + /// The first tree will be used for the "old_file" side of the delta and the + /// second tree will be used for the "new_file" side of the delta. You can + /// pass `None` to indicate an empty tree, although it is an error to pass + /// `None` for both the `old_tree` and `new_tree`. + pub fn diff_tree_to_tree(&self, + old_tree: Option<&Tree>, + new_tree: Option<&Tree>, + opts: Option<&mut DiffOptions>) + -> Result { + let mut ret = ptr::null_mut(); + unsafe { + try_call!(raw::git_diff_tree_to_tree(&mut ret, + self.raw(), + old_tree.map(|s| s.raw()), + new_tree.map(|s| s.raw()), + opts.map(|s| s.raw()))); + Ok(Binding::from_raw(ret)) + } + } + + /// Create a diff between a tree and repository index. + /// + /// This is equivalent to `git diff --cached ` or if you pass + /// the HEAD tree, then like `git diff --cached`. + /// + /// The tree you pass will be used for the "old_file" side of the delta, and + /// the index will be used for the "new_file" side of the delta. + /// + /// If you pass `None` for the index, then the existing index of the `repo` + /// will be used. In this case, the index will be refreshed from disk + /// (if it has changed) before the diff is generated. + /// + /// If the tree is `None`, then it is considered an empty tree. + pub fn diff_tree_to_index(&self, + old_tree: Option<&Tree>, + index: Option<&Index>, + opts: Option<&mut DiffOptions>) + -> Result { + let mut ret = ptr::null_mut(); + unsafe { + try_call!(raw::git_diff_tree_to_index(&mut ret, + self.raw(), + old_tree.map(|s| s.raw()), + index.map(|s| s.raw()), + opts.map(|s| s.raw()))); + Ok(Binding::from_raw(ret)) + } + } + + /// Create a diff between two index objects. + /// + /// The first index will be used for the "old_file" side of the delta, and + /// the second index will be used for the "new_file" side of the delta. + pub fn diff_index_to_index(&self, + old_index: &Index, + new_index: &Index, + opts: Option<&mut DiffOptions>) + -> Result { + let mut ret = ptr::null_mut(); + unsafe { + try_call!(raw::git_diff_index_to_index(&mut ret, + self.raw(), + old_index.raw(), + new_index.raw(), + opts.map(|s| s.raw()))); + Ok(Binding::from_raw(ret)) + } + } + + /// Create a diff between the repository index and the workdir directory. + /// + /// This matches the `git diff` command. See the note below on + /// `tree_to_workdir` for a discussion of the difference between + /// `git diff` and `git diff HEAD` and how to emulate a `git diff ` + /// using libgit2. + /// + /// The index will be used for the "old_file" side of the delta, and the + /// working directory will be used for the "new_file" side of the delta. + /// + /// If you pass `None` for the index, then the existing index of the `repo` + /// will be used. In this case, the index will be refreshed from disk + /// (if it has changed) before the diff is generated. + pub fn diff_index_to_workdir(&self, + index: Option<&Index>, + opts: Option<&mut DiffOptions>) + -> Result { + let mut ret = ptr::null_mut(); + unsafe { + try_call!(raw::git_diff_index_to_workdir(&mut ret, + self.raw(), + index.map(|s| s.raw()), + opts.map(|s| s.raw()))); + Ok(Binding::from_raw(ret)) + } + } + + /// Create a diff between a tree and the working directory. + /// + /// The tree you provide will be used for the "old_file" side of the delta, + /// and the working directory will be used for the "new_file" side. + /// + /// This is not the same as `git diff ` or `git diff-index + /// `. Those commands use information from the index, whereas this + /// function strictly returns the differences between the tree and the files + /// in the working directory, regardless of the state of the index. Use + /// `tree_to_workdir_with_index` to emulate those commands. + /// + /// To see difference between this and `tree_to_workdir_with_index`, + /// consider the example of a staged file deletion where the file has then + /// been put back into the working dir and further modified. The + /// tree-to-workdir diff for that file is 'modified', but `git diff` would + /// show status 'deleted' since there is a staged delete. + /// + /// If `None` is passed for `tree`, then an empty tree is used. + pub fn diff_tree_to_workdir(&self, + old_tree: Option<&Tree>, + opts: Option<&mut DiffOptions>) + -> Result { + let mut ret = ptr::null_mut(); + unsafe { + try_call!(raw::git_diff_tree_to_workdir(&mut ret, + self.raw(), + old_tree.map(|s| s.raw()), + opts.map(|s| s.raw()))); + Ok(Binding::from_raw(ret)) + } + } + + /// Create a diff between a tree and the working directory using index data + /// to account for staged deletes, tracked files, etc. + /// + /// This emulates `git diff ` by diffing the tree to the index and + /// the index to the working directory and blending the results into a + /// single diff that includes staged deleted, etc. + pub fn diff_tree_to_workdir_with_index(&self, + old_tree: Option<&Tree>, + opts: Option<&mut DiffOptions>) + -> Result { + let mut ret = ptr::null_mut(); + unsafe { + try_call!(raw::git_diff_tree_to_workdir_with_index(&mut ret, + self.raw(), old_tree.map(|s| s.raw()), opts.map(|s| s.raw()))); + Ok(Binding::from_raw(ret)) + } + } + + /// Create a PackBuilder + pub fn packbuilder(&self) -> Result { + let mut ret = ptr::null_mut(); + unsafe { + try_call!(raw::git_packbuilder_new(&mut ret, self.raw())); + Ok(Binding::from_raw(ret)) + } + } + + /// Save the local modifications to a new stash. + pub fn stash_save(&mut self, + stasher: &Signature, + message: &str, + flags: Option) + -> Result { + unsafe { + let mut raw_oid = raw::git_oid { id: [0; raw::GIT_OID_RAWSZ] }; + let message = try!(CString::new(message)); + let flags = flags.unwrap_or_else(StashFlags::empty); + try_call!(raw::git_stash_save(&mut raw_oid, + self.raw(), + stasher.raw(), + message, + flags.bits() as c_uint)); + Ok(Binding::from_raw(&raw_oid as *const _)) + } + } + + /// Apply a single stashed state from the stash list. + pub fn stash_apply(&mut self, + index: usize, + opts: Option<&mut StashApplyOptions>) + -> Result<(), Error> { + unsafe { + let opts = opts.map(|opts| opts.raw()); + try_call!(raw::git_stash_apply(self.raw(), index, opts)); + Ok(()) + } + } + + /// Loop over all the stashed states and issue a callback for each one. + /// + /// Return `true` to continue iterating or `false` to stop. + pub fn stash_foreach(&mut self, mut callback: C) -> Result<(), Error> + where C: FnMut(usize, &str, &Oid) -> bool + { + unsafe { + let mut data = StashCbData { callback: &mut callback }; + try_call!(raw::git_stash_foreach(self.raw(), + stash_cb, + &mut data as *mut _ as *mut _)); + Ok(()) + } + } + + /// Remove a single stashed state from the stash list. + pub fn stash_drop(&mut self, index: usize) -> Result<(), Error> { + unsafe { + try_call!(raw::git_stash_drop(self.raw(), index)); + Ok(()) + } + } + + /// Apply a single stashed state from the stash list and remove it from the list if successful. + pub fn stash_pop(&mut self, + index: usize, + opts: Option<&mut StashApplyOptions>) + -> Result<(), Error> { + unsafe { + let opts = opts.map(|opts| opts.raw()); + try_call!(raw::git_stash_pop(self.raw(), index, opts)); + Ok(()) + } + } + + /// Add ignore rules for a repository. + /// + /// The format of the rules is the same one of the .gitignore file. + pub fn add_ignore_rule(&self, rules: &str) -> Result<(), Error> { + let rules = CString::new(rules)?; + unsafe { + try_call!(raw::git_ignore_add_rule(self.raw, rules)); + } + Ok(()) + } + + /// Clear ignore rules that were explicitly added. + pub fn clear_ignore_rules(&self) -> Result<(), Error> { + unsafe { + try_call!(raw::git_ignore_clear_internal_rules(self.raw)); + } + Ok(()) + } + + /// Test if the ignore rules apply to a given path. + pub fn is_path_ignored>(&self, path: P) -> Result { + let path = if cfg!(windows) { + // `git_ignore_path_is_ignored` dose not work with windows path separator + // so we convert \ to / + try!(::std::ffi::CString::new(path.as_ref().to_string_lossy().replace('\\', "/"))) + } else { + try!(path.as_ref().into_c_string()) + }; + let mut ignored: c_int = 0; + unsafe { + try_call!(raw::git_ignore_path_is_ignored(&mut ignored, self.raw, path)); + } + Ok(ignored == 1) + } +} + +impl Binding for Repository { + type Raw = *mut raw::git_repository; + unsafe fn from_raw(ptr: *mut raw::git_repository) -> Repository { + Repository { raw: ptr } + } + fn raw(&self) -> *mut raw::git_repository { self.raw } +} + +impl Drop for Repository { + fn drop(&mut self) { + unsafe { raw::git_repository_free(self.raw) } + } +} + +impl RepositoryInitOptions { + /// Creates a default set of initialization options. + /// + /// By default this will set flags for creating all necessary directories + /// and initializing a directory from the user-configured templates path. + pub fn new() -> RepositoryInitOptions { + RepositoryInitOptions { + flags: raw::GIT_REPOSITORY_INIT_MKDIR as u32 | + raw::GIT_REPOSITORY_INIT_MKPATH as u32 | + raw::GIT_REPOSITORY_INIT_EXTERNAL_TEMPLATE as u32, + mode: 0, + workdir_path: None, + description: None, + template_path: None, + initial_head: None, + origin_url: None, + } + } + + /// Create a bare repository with no working directory. + /// + /// Defaults to false. + pub fn bare(&mut self, bare: bool) -> &mut RepositoryInitOptions { + self.flag(raw::GIT_REPOSITORY_INIT_BARE, bare) + } + + /// Return an error if the repository path appears to already be a git + /// repository. + /// + /// Defaults to false. + pub fn no_reinit(&mut self, enabled: bool) -> &mut RepositoryInitOptions { + self.flag(raw::GIT_REPOSITORY_INIT_NO_REINIT, enabled) + } + + /// Normally a '/.git/' will be appended to the repo path for non-bare repos + /// (if it is not already there), but passing this flag prevents that + /// behavior. + /// + /// Defaults to false. + pub fn no_dotgit_dir(&mut self, enabled: bool) -> &mut RepositoryInitOptions { + self.flag(raw::GIT_REPOSITORY_INIT_NO_DOTGIT_DIR, enabled) + } + + /// Make the repo path (and workdir path) as needed. The ".git" directory + /// will always be created regardless of this flag. + /// + /// Defaults to true. + pub fn mkdir(&mut self, enabled: bool) -> &mut RepositoryInitOptions { + self.flag(raw::GIT_REPOSITORY_INIT_MKDIR, enabled) + } + + /// Recursively make all components of the repo and workdir path sas + /// necessary. + /// + /// Defaults to true. + pub fn mkpath(&mut self, enabled: bool) -> &mut RepositoryInitOptions { + self.flag(raw::GIT_REPOSITORY_INIT_MKPATH, enabled) + } + + /// Set to one of the `RepositoryInit` constants, or a custom value. + pub fn mode(&mut self, mode: RepositoryInitMode) + -> &mut RepositoryInitOptions { + self.mode = mode.bits(); + self + } + + /// Enable or disable using external templates. + /// + /// If enabled, then the `template_path` option will be queried first, then + /// `init.templatedir` from the global config, and finally + /// `/usr/share/git-core-templates` will be used (if it exists). + /// + /// Defaults to true. + pub fn external_template(&mut self, enabled: bool) + -> &mut RepositoryInitOptions { + self.flag(raw::GIT_REPOSITORY_INIT_EXTERNAL_TEMPLATE, enabled) + } + + fn flag(&mut self, flag: raw::git_repository_init_flag_t, on: bool) + -> &mut RepositoryInitOptions { + if on { + self.flags |= flag as u32; + } else { + self.flags &= !(flag as u32); + } + self + } + + /// The path do the working directory. + /// + /// If this is a relative path it will be evaulated relative to the repo + /// path. If this is not the "natural" working directory, a .git gitlink + /// file will be created here linking to the repo path. + pub fn workdir_path(&mut self, path: &Path) -> &mut RepositoryInitOptions { + self.workdir_path = Some(path.into_c_string().unwrap()); + self + } + + /// If set, this will be used to initialize the "description" file in the + /// repository instead of using the template content. + pub fn description(&mut self, desc: &str) -> &mut RepositoryInitOptions { + self.description = Some(CString::new(desc).unwrap()); + self + } + + /// When the `external_template` option is set, this is the first location + /// to check for the template directory. + /// + /// If this is not configured, then the default locations will be searched + /// instead. + pub fn template_path(&mut self, path: &Path) -> &mut RepositoryInitOptions { + self.template_path = Some(path.into_c_string().unwrap()); + self + } + + /// The name of the head to point HEAD at. + /// + /// If not configured, this will be treated as `master` and the HEAD ref + /// will be set to `refs/heads/master`. If this begins with `refs/` it will + /// be used verbatim; otherwise `refs/heads/` will be prefixed + pub fn initial_head(&mut self, head: &str) -> &mut RepositoryInitOptions { + self.initial_head = Some(CString::new(head).unwrap()); + self + } + + /// If set, then after the rest of the repository initialization is + /// completed an `origin` remote will be added pointing to this URL. + pub fn origin_url(&mut self, url: &str) -> &mut RepositoryInitOptions { + self.origin_url = Some(CString::new(url).unwrap()); + self + } + + /// Creates a set of raw init options to be used with + /// `git_repository_init_ext`. + /// + /// This method is unsafe as the returned value may have pointers to the + /// interior of this structure. + pub unsafe fn raw(&self) -> raw::git_repository_init_options { + let mut opts = mem::zeroed(); + assert_eq!(raw::git_repository_init_init_options(&mut opts, + raw::GIT_REPOSITORY_INIT_OPTIONS_VERSION), 0); + opts.flags = self.flags; + opts.mode = self.mode; + opts.workdir_path = ::call::convert(&self.workdir_path); + opts.description = ::call::convert(&self.description); + opts.template_path = ::call::convert(&self.template_path); + opts.initial_head = ::call::convert(&self.initial_head); + opts.origin_url = ::call::convert(&self.origin_url); + opts + } +} + +#[cfg(test)] +mod tests { + use std::ffi::OsStr; + use std::fs; + use std::path::Path; + use tempdir::TempDir; + use {Repository, Oid, ObjectType, ResetType}; + use build::CheckoutBuilder; + + #[test] + fn smoke_init() { + let td = TempDir::new("test").unwrap(); + let path = td.path(); + + let repo = Repository::init(path).unwrap(); + assert!(!repo.is_bare()); + } + + #[test] + fn smoke_init_bare() { + let td = TempDir::new("test").unwrap(); + let path = td.path(); + + let repo = Repository::init_bare(path).unwrap(); + assert!(repo.is_bare()); + assert!(repo.namespace().is_none()); + } + + #[test] + fn smoke_open() { + let td = TempDir::new("test").unwrap(); + let path = td.path(); + Repository::init(td.path()).unwrap(); + let repo = Repository::open(path).unwrap(); + assert!(!repo.is_bare()); + assert!(!repo.is_shallow()); + assert!(repo.is_empty().unwrap()); + assert_eq!(::test::realpath(&repo.path()).unwrap(), + ::test::realpath(&td.path().join(".git/")).unwrap()); + assert_eq!(repo.state(), ::RepositoryState::Clean); + } + + #[test] + fn smoke_open_bare() { + let td = TempDir::new("test").unwrap(); + let path = td.path(); + Repository::init_bare(td.path()).unwrap(); + + let repo = Repository::open(path).unwrap(); + assert!(repo.is_bare()); + assert_eq!(::test::realpath(&repo.path()).unwrap(), + ::test::realpath(&td.path().join("")).unwrap()); + } + + #[test] + fn smoke_checkout() { + let (_td, repo) = ::test::repo_init(); + repo.checkout_head(None).unwrap(); + } + + #[test] + fn smoke_revparse() { + let (_td, repo) = ::test::repo_init(); + let rev = repo.revparse("HEAD").unwrap(); + assert!(rev.to().is_none()); + let from = rev.from().unwrap(); + assert!(rev.from().is_some()); + + assert_eq!(repo.revparse_single("HEAD").unwrap().id(), from.id()); + let obj = repo.find_object(from.id(), None).unwrap().clone(); + obj.peel(ObjectType::Any).unwrap(); + obj.short_id().unwrap(); + repo.reset(&obj, ResetType::Hard, None).unwrap(); + let mut opts = CheckoutBuilder::new(); + t!(repo.reset(&obj, ResetType::Soft, Some(&mut opts))); + } + + #[test] + fn makes_dirs() { + let td = TempDir::new("foo").unwrap(); + Repository::init(&td.path().join("a/b/c/d")).unwrap(); + } + + #[test] + fn smoke_discover() { + let td = TempDir::new("test").unwrap(); + let subdir = td.path().join("subdi"); + fs::create_dir(&subdir).unwrap(); + Repository::init_bare(td.path()).unwrap(); + let repo = Repository::discover(&subdir).unwrap(); + assert_eq!(::test::realpath(&repo.path()).unwrap(), + ::test::realpath(&td.path().join("")).unwrap()); + } + + #[test] + fn smoke_open_ext() { + let td = TempDir::new("test").unwrap(); + let subdir = td.path().join("subdir"); + fs::create_dir(&subdir).unwrap(); + Repository::init(td.path()).unwrap(); + + let repo = Repository::open_ext(&subdir, ::RepositoryOpenFlags::empty(), &[] as &[&OsStr]).unwrap(); + assert!(!repo.is_bare()); + assert_eq!(::test::realpath(&repo.path()).unwrap(), + ::test::realpath(&td.path().join(".git")).unwrap()); + + let repo = Repository::open_ext(&subdir, ::RepositoryOpenFlags::BARE, &[] as &[&OsStr]).unwrap(); + assert!(repo.is_bare()); + assert_eq!(::test::realpath(&repo.path()).unwrap(), + ::test::realpath(&td.path().join(".git")).unwrap()); + + let err = Repository::open_ext(&subdir, ::RepositoryOpenFlags::NO_SEARCH, &[] as &[&OsStr]).err().unwrap(); + assert_eq!(err.code(), ::ErrorCode::NotFound); + + assert!(Repository::open_ext(&subdir, + ::RepositoryOpenFlags::empty(), + &[&subdir]).is_ok()); + } + + fn graph_repo_init() -> (TempDir, Repository) { + let (_td, repo) = ::test::repo_init(); + { + let head = repo.head().unwrap().target().unwrap(); + let head = repo.find_commit(head).unwrap(); + + let mut index = repo.index().unwrap(); + let id = index.write_tree().unwrap(); + + let tree = repo.find_tree(id).unwrap(); + let sig = repo.signature().unwrap(); + repo.commit(Some("HEAD"), &sig, &sig, "second", + &tree, &[&head]).unwrap(); + } + (_td, repo) + } + + #[test] + fn smoke_graph_ahead_behind() { + let (_td, repo) = graph_repo_init(); + let head = repo.head().unwrap().target().unwrap(); + let head = repo.find_commit(head).unwrap(); + let head_id = head.id(); + let head_parent_id = head.parent(0).unwrap().id(); + let (ahead, behind) = repo.graph_ahead_behind(head_id, + head_parent_id).unwrap(); + assert_eq!(ahead, 1); + assert_eq!(behind, 0); + let (ahead, behind) = repo.graph_ahead_behind(head_parent_id, + head_id).unwrap(); + assert_eq!(ahead, 0); + assert_eq!(behind, 1); + } + + #[test] + fn smoke_graph_descendant_of() { + let (_td, repo) = graph_repo_init(); + let head = repo.head().unwrap().target().unwrap(); + let head = repo.find_commit(head).unwrap(); + let head_id = head.id(); + let head_parent_id = head.parent(0).unwrap().id(); + assert!(repo.graph_descendant_of(head_id, head_parent_id).unwrap()); + assert!(!repo.graph_descendant_of(head_parent_id, head_id).unwrap()); + } + + #[test] + fn smoke_reference_has_log_ensure_log() { + let (_td, repo) = ::test::repo_init(); + + assert_eq!(repo.reference_has_log("HEAD").unwrap(), true); + assert_eq!(repo.reference_has_log("refs/heads/master").unwrap(), true); + assert_eq!(repo.reference_has_log("NOT_HEAD").unwrap(), false); + let master_oid = repo.revparse_single("master").unwrap().id(); + assert!(repo.reference("NOT_HEAD", master_oid, false, "creating a new branch").is_ok()); + assert_eq!(repo.reference_has_log("NOT_HEAD").unwrap(), false); + assert!(repo.reference_ensure_log("NOT_HEAD").is_ok()); + assert_eq!(repo.reference_has_log("NOT_HEAD").unwrap(), true); + } + + #[test] + fn smoke_set_head() { + let (_td, repo) = ::test::repo_init(); + + assert!(repo.set_head("refs/heads/does-not-exist").is_ok()); + assert!(repo.head().is_err()); + + assert!(repo.set_head("refs/heads/master").is_ok()); + assert!(repo.head().is_ok()); + + assert!(repo.set_head("*").is_err()); + } + + #[test] + fn smoke_set_head_detached() { + let (_td, repo) = ::test::repo_init(); + + let void_oid = Oid::from_bytes(b"00000000000000000000").unwrap(); + assert!(repo.set_head_detached(void_oid).is_err()); + + let master_oid = repo.revparse_single("master").unwrap().id(); + assert!(repo.set_head_detached(master_oid).is_ok()); + assert_eq!(repo.head().unwrap().target().unwrap(), master_oid); + } + + /// create an octopus: + /// /---o2-o4 + /// o1 X + /// \---o3-o5 + /// and checks that the merge bases of (o4,o5) are (o2,o3) + #[test] + fn smoke_merge_bases() { + let (_td, repo) = graph_repo_init(); + let sig = repo.signature().unwrap(); + + // let oid1 = head + let oid1 = repo.head().unwrap().target().unwrap(); + let commit1 = repo.find_commit(oid1).unwrap(); + println!("created oid1 {:?}", oid1); + + repo.branch("branch_a", &commit1, true).unwrap(); + repo.branch("branch_b", &commit1, true).unwrap(); + + // create commit oid2 on branchA + let mut index = repo.index().unwrap(); + let p = Path::new(repo.workdir().unwrap()).join("file_a"); + println!("using path {:?}", p); + fs::File::create(&p).unwrap(); + index.add_path(Path::new("file_a")).unwrap(); + let id_a = index.write_tree().unwrap(); + let tree_a = repo.find_tree(id_a).unwrap(); + let oid2 = repo.commit(Some("refs/heads/branch_a"), &sig, &sig, + "commit 2", &tree_a, &[&commit1]).unwrap(); + let commit2 = repo.find_commit(oid2).unwrap(); + println!("created oid2 {:?}", oid2); + + t!(repo.reset(commit1.as_object(), ResetType::Hard, None)); + + // create commit oid3 on branchB + let mut index = repo.index().unwrap(); + let p = Path::new(repo.workdir().unwrap()).join("file_b"); + fs::File::create(&p).unwrap(); + index.add_path(Path::new("file_b")).unwrap(); + let id_b = index.write_tree().unwrap(); + let tree_b = repo.find_tree(id_b).unwrap(); + let oid3 = repo.commit(Some("refs/heads/branch_b"), &sig, &sig, + "commit 3", &tree_b, &[&commit1]).unwrap(); + let commit3 = repo.find_commit(oid3).unwrap(); + println!("created oid3 {:?}", oid3); + + // create merge commit oid4 on branchA with parents oid2 and oid3 + //let mut index4 = repo.merge_commits(&commit2, &commit3, None).unwrap(); + repo.set_head("refs/heads/branch_a").unwrap(); + repo.checkout_head(None).unwrap(); + let oid4 = repo.commit(Some("refs/heads/branch_a"), &sig, &sig, + "commit 4", &tree_a, + &[&commit2, &commit3]).unwrap(); + //index4.write_tree_to(&repo).unwrap(); + println!("created oid4 {:?}", oid4); + + // create merge commit oid5 on branchB with parents oid2 and oid3 + //let mut index5 = repo.merge_commits(&commit3, &commit2, None).unwrap(); + repo.set_head("refs/heads/branch_b").unwrap(); + repo.checkout_head(None).unwrap(); + let oid5 = repo.commit(Some("refs/heads/branch_b"), &sig, &sig, + "commit 5", &tree_a, + &[&commit3, &commit2]).unwrap(); + //index5.write_tree_to(&repo).unwrap(); + println!("created oid5 {:?}", oid5); + + // merge bases of (oid4,oid5) should be (oid2,oid3) + let merge_bases = repo.merge_bases(oid4, oid5).unwrap(); + let mut found_oid2 = false; + let mut found_oid3 = false; + for mg in merge_bases.iter() { + println!("found merge base {:?}", mg); + if mg == &oid2 { + found_oid2 = true; + } else if mg == &oid3 { + found_oid3 = true; + } else { + assert!(false); + } + } + assert!(found_oid2); + assert!(found_oid3); + assert_eq!(merge_bases.len(), 2); + } + + #[test] + fn smoke_revparse_ext() { + let (_td, repo) = graph_repo_init(); + + { + let short_refname = "master"; + let expected_refname = "refs/heads/master"; + let (obj, reference) = repo.revparse_ext(short_refname).unwrap(); + let expected_obj = repo.revparse_single(expected_refname).unwrap(); + assert_eq!(obj.id(), expected_obj.id()); + assert_eq!(reference.unwrap().name().unwrap(), expected_refname); + } + { + let missing_refname = "refs/heads/does-not-exist"; + assert!(repo.revparse_ext(missing_refname).is_err()); + } + { + let (_obj, reference) = repo.revparse_ext("HEAD^").unwrap(); + assert!(reference.is_none()); + } + } + + #[test] + fn smoke_is_path_ignored() { + let (_td, repo) = graph_repo_init(); + + assert!(!repo.is_path_ignored(Path::new("/foo")).unwrap()); + + let _ = repo.add_ignore_rule("/foo"); + assert!(repo.is_path_ignored(Path::new("/foo")).unwrap()); + if cfg!(windows){ + assert!(repo.is_path_ignored(Path::new("\\foo\\thing")).unwrap()); + } + + + let _ = repo.clear_ignore_rules(); + assert!(!repo.is_path_ignored(Path::new("/foo")).unwrap()); + if cfg!(windows){ + assert!(!repo.is_path_ignored(Path::new("\\foo\\thing")).unwrap()); + } + } +} diff --git a/git2/src/revspec.rs b/git2/src/revspec.rs new file mode 100644 index 000000000..eb18492ef --- /dev/null +++ b/git2/src/revspec.rs @@ -0,0 +1,26 @@ +use {Object, RevparseMode}; + +/// A revspec represents a range of revisions within a repository. +pub struct Revspec<'repo> { + from: Option>, + to: Option>, + mode: RevparseMode, +} + +impl<'repo> Revspec<'repo> { + /// Assembles a new revspec from the from/to components. + pub fn from_objects(from: Option>, + to: Option>, + mode: RevparseMode) -> Revspec<'repo> { + Revspec { from: from, to: to, mode: mode } + } + + /// Access the `from` range of this revspec. + pub fn from(&self) -> Option<&Object<'repo>> { self.from.as_ref() } + + /// Access the `to` range of this revspec. + pub fn to(&self) -> Option<&Object<'repo>> { self.to.as_ref() } + + /// Returns the intent of the revspec. + pub fn mode(&self) -> RevparseMode { self.mode } +} diff --git a/git2/src/revwalk.rs b/git2/src/revwalk.rs new file mode 100644 index 000000000..1e661db52 --- /dev/null +++ b/git2/src/revwalk.rs @@ -0,0 +1,203 @@ +use std::marker; +use std::ffi::CString; +use libc::c_uint; + +use {raw, Error, Sort, Oid, Repository}; +use util::Binding; + +/// A revwalk allows traversal of the commit graph defined by including one or +/// more leaves and excluding one or more roots. +pub struct Revwalk<'repo> { + raw: *mut raw::git_revwalk, + _marker: marker::PhantomData<&'repo Repository>, +} + +impl<'repo> Revwalk<'repo> { + /// Reset a revwalk to allow re-configuring it. + /// + /// The revwalk is automatically reset when iteration of its commits + /// completes. + pub fn reset(&mut self) { + unsafe { raw::git_revwalk_reset(self.raw()) } + } + + /// Set the order in which commits are visited. + pub fn set_sorting(&mut self, sort_mode: Sort) { + unsafe { + raw::git_revwalk_sorting(self.raw(), sort_mode.bits() as c_uint) + } + } + + /// Simplify the history by first-parent + /// + /// No parents other than the first for each commit will be enqueued. + pub fn simplify_first_parent(&mut self) { + unsafe { raw::git_revwalk_simplify_first_parent(self.raw) } + } + + /// Mark a commit to start traversal from. + /// + /// The given OID must belong to a committish on the walked repository. + /// + /// The given commit will be used as one of the roots when starting the + /// revision walk. At least one commit must be pushed onto the walker before + /// a walk can be started. + pub fn push(&mut self, oid: Oid) -> Result<(), Error> { + unsafe { + try_call!(raw::git_revwalk_push(self.raw(), oid.raw())); + } + Ok(()) + } + + /// Push the repository's HEAD + /// + /// For more information, see `push`. + pub fn push_head(&mut self) -> Result<(), Error> { + unsafe { + try_call!(raw::git_revwalk_push_head(self.raw())); + } + Ok(()) + } + + /// Push matching references + /// + /// The OIDs pointed to by the references that match the given glob pattern + /// will be pushed to the revision walker. + /// + /// A leading 'refs/' is implied if not present as well as a trailing `/ \ + /// *` if the glob lacks '?', ' \ *' or '['. + /// + /// Any references matching this glob which do not point to a committish + /// will be ignored. + pub fn push_glob(&mut self, glob: &str) -> Result<(), Error> { + let glob = try!(CString::new(glob)); + unsafe { + try_call!(raw::git_revwalk_push_glob(self.raw, glob)); + } + Ok(()) + } + + /// Push and hide the respective endpoints of the given range. + /// + /// The range should be of the form `..` where each + /// `` is in the form accepted by `revparse_single`. The left-hand + /// commit will be hidden and the right-hand commit pushed. + pub fn push_range(&mut self, range: &str) -> Result<(), Error> { + let range = try!(CString::new(range)); + unsafe { + try_call!(raw::git_revwalk_push_range(self.raw, range)); + } + Ok(()) + } + + /// Push the OID pointed to by a reference + /// + /// The reference must point to a committish. + pub fn push_ref(&mut self, reference: &str) -> Result<(), Error> { + let reference = try!(CString::new(reference)); + unsafe { + try_call!(raw::git_revwalk_push_ref(self.raw, reference)); + } + Ok(()) + } + + /// Mark a commit as not of interest to this revwalk. + pub fn hide(&mut self, oid: Oid) -> Result<(), Error> { + unsafe { + try_call!(raw::git_revwalk_hide(self.raw(), oid.raw())); + } + Ok(()) + } + + /// Hide the repository's HEAD + /// + /// For more information, see `hide`. + pub fn hide_head(&mut self) -> Result<(), Error> { + unsafe { + try_call!(raw::git_revwalk_hide_head(self.raw())); + } + Ok(()) + } + + /// Hide matching references. + /// + /// The OIDs pointed to by the references that match the given glob pattern + /// and their ancestors will be hidden from the output on the revision walk. + /// + /// A leading 'refs/' is implied if not present as well as a trailing `/ \ + /// *` if the glob lacks '?', ' \ *' or '['. + /// + /// Any references matching this glob which do not point to a committish + /// will be ignored. + pub fn hide_glob(&mut self, glob: &str) -> Result<(), Error> { + let glob = try!(CString::new(glob)); + unsafe { + try_call!(raw::git_revwalk_hide_glob(self.raw, glob)); + } + Ok(()) + } + + /// Hide the OID pointed to by a reference. + /// + /// The reference must point to a committish. + pub fn hide_ref(&mut self, reference: &str) -> Result<(), Error> { + let reference = try!(CString::new(reference)); + unsafe { + try_call!(raw::git_revwalk_hide_ref(self.raw, reference)); + } + Ok(()) + } +} + +impl<'repo> Binding for Revwalk<'repo> { + type Raw = *mut raw::git_revwalk; + unsafe fn from_raw(raw: *mut raw::git_revwalk) -> Revwalk<'repo> { + Revwalk { raw: raw, _marker: marker::PhantomData } + } + fn raw(&self) -> *mut raw::git_revwalk { self.raw } +} + +impl<'repo> Drop for Revwalk<'repo> { + fn drop(&mut self) { + unsafe { raw::git_revwalk_free(self.raw) } + } +} + +impl<'repo> Iterator for Revwalk<'repo> { + type Item = Result; + fn next(&mut self) -> Option> { + let mut out: raw::git_oid = raw::git_oid{ id: [0; raw::GIT_OID_RAWSZ] }; + unsafe { + try_call_iter!(raw::git_revwalk_next(&mut out, self.raw())); + Some(Ok(Binding::from_raw(&out as *const _))) + } + } +} + +#[cfg(test)] +mod tests { + #[test] + fn smoke() { + let (_td, repo) = ::test::repo_init(); + let head = repo.head().unwrap(); + let target = head.target().unwrap(); + + let mut walk = repo.revwalk().unwrap(); + walk.push(target).unwrap(); + + let oids: Vec<::Oid> = walk.by_ref().collect::, _>>() + .unwrap(); + + assert_eq!(oids.len(), 1); + assert_eq!(oids[0], target); + + walk.reset(); + walk.push_head().unwrap(); + assert_eq!(walk.by_ref().count(), 1); + + walk.reset(); + walk.push_head().unwrap(); + walk.hide_head().unwrap(); + assert_eq!(walk.by_ref().count(), 0); + } +} diff --git a/git2/src/signature.rs b/git2/src/signature.rs new file mode 100644 index 000000000..61e7a8948 --- /dev/null +++ b/git2/src/signature.rs @@ -0,0 +1,175 @@ +use std::ffi::CString; +use std::marker; +use std::mem; +use std::ptr; +use std::str; +use std::fmt; +use libc; + +use {raw, Error, Time}; +use util::Binding; + +/// A Signature is used to indicate authorship of various actions throughout the +/// library. +/// +/// Signatures contain a name, email, and timestamp. All fields can be specified +/// with `new` while the `now` constructor omits the timestamp. The +/// [`Repository::signature`] method can be used to create a default signature +/// with name and email values read from the configuration. +/// +/// [`Repository::signature`]: struct.Repository.html#method.signature +pub struct Signature<'a> { + raw: *mut raw::git_signature, + _marker: marker::PhantomData<&'a str>, + owned: bool, +} + +impl<'a> Signature<'a> { + /// Create a new action signature with a timestamp of 'now'. + /// + /// See `new` for more information + pub fn now(name: &str, email: &str) -> Result, Error> { + ::init(); + let mut ret = ptr::null_mut(); + let name = try!(CString::new(name)); + let email = try!(CString::new(email)); + unsafe { + try_call!(raw::git_signature_now(&mut ret, name, email)); + Ok(Binding::from_raw(ret)) + } + } + + /// Create a new action signature. + /// + /// The `time` specified is in seconds since the epoch, and the `offset` is + /// the time zone offset in minutes. + /// + /// Returns error if either `name` or `email` contain angle brackets. + pub fn new(name: &str, email: &str, time: &Time) + -> Result, Error> { + ::init(); + let mut ret = ptr::null_mut(); + let name = try!(CString::new(name)); + let email = try!(CString::new(email)); + unsafe { + try_call!(raw::git_signature_new(&mut ret, name, email, + time.seconds() as raw::git_time_t, + time.offset_minutes() as libc::c_int)); + Ok(Binding::from_raw(ret)) + } + } + + /// Gets the name on the signature. + /// + /// Returns `None` if the name is not valid utf-8 + pub fn name(&self) -> Option<&str> { + str::from_utf8(self.name_bytes()).ok() + } + + /// Gets the name on the signature as a byte slice. + pub fn name_bytes(&self) -> &[u8] { + unsafe { ::opt_bytes(self, (*self.raw).name).unwrap() } + } + + /// Gets the email on the signature. + /// + /// Returns `None` if the email is not valid utf-8 + pub fn email(&self) -> Option<&str> { + str::from_utf8(self.email_bytes()).ok() + } + + /// Gets the email on the signature as a byte slice. + pub fn email_bytes(&self) -> &[u8] { + unsafe { ::opt_bytes(self, (*self.raw).email).unwrap() } + } + + /// Get the `when` of this signature. + pub fn when(&self) -> Time { + unsafe { Binding::from_raw((*self.raw).when) } + } + + /// Convert a signature of any lifetime into an owned signature with a + /// static lifetime. + pub fn to_owned(&self) -> Signature<'static> { + unsafe { + let me = mem::transmute::<&Signature<'a>, &Signature<'static>>(self); + me.clone() + } + } +} + +impl<'a> Binding for Signature<'a> { + type Raw = *mut raw::git_signature; + unsafe fn from_raw(raw: *mut raw::git_signature) -> Signature<'a> { + Signature { + raw: raw, + _marker: marker::PhantomData, + owned: true, + } + } + fn raw(&self) -> *mut raw::git_signature { self.raw } +} + +/// Creates a new signature from the give raw pointer, tied to the lifetime +/// of the given object. +/// +/// This function is unsafe as there is no guarantee that `raw` is valid for +/// `'a` nor if it's a valid pointer. +pub unsafe fn from_raw_const<'b, T>(_lt: &'b T, + raw: *const raw::git_signature) + -> Signature<'b> { + Signature { + raw: raw as *mut raw::git_signature, + _marker: marker::PhantomData, + owned: false, + } +} + +impl Clone for Signature<'static> { + fn clone(&self) -> Signature<'static> { + // TODO: can this be defined for 'a and just do a plain old copy if the + // lifetime isn't static? + let mut raw = ptr::null_mut(); + let rc = unsafe { raw::git_signature_dup(&mut raw, &*self.raw) }; + assert_eq!(rc, 0); + unsafe { Binding::from_raw(raw) } + } +} + +impl<'a> Drop for Signature<'a> { + fn drop(&mut self) { + if self.owned { + unsafe { raw::git_signature_free(self.raw) } + } + } +} + +impl<'a> fmt::Display for Signature<'a> { + + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{} <{}>", + String::from_utf8_lossy(self.name_bytes()), + String::from_utf8_lossy(self.email_bytes())) + } + +} + +#[cfg(test)] +mod tests { + use {Signature, Time}; + + #[test] + fn smoke() { + Signature::new("foo", "bar", &Time::new(89, 0)).unwrap(); + Signature::now("foo", "bar").unwrap(); + assert!(Signature::new("", "bar", &Time::new(89, 0)).is_err()); + assert!(Signature::now("", "bar").is_err()); + + let s = Signature::now("foo", "bar").unwrap(); + assert_eq!(s.name(), Some("foo")); + assert_eq!(s.email(), Some("bar")); + + drop(s.clone()); + drop(s.to_owned()); + } +} diff --git a/git2/src/stash.rs b/git2/src/stash.rs new file mode 100644 index 000000000..3d07465a3 --- /dev/null +++ b/git2/src/stash.rs @@ -0,0 +1,210 @@ +use {raw, panic, Oid, StashApplyProgress}; +use std::ffi::{CStr}; +use util::{Binding}; +use libc::{c_int, c_char, size_t, c_void}; +use build::{CheckoutBuilder}; +use std::mem; + +/// Stash application progress notification function. +/// +/// Return `true` to continue processing, or `false` to +/// abort the stash application. +pub type StashApplyProgressCb<'a> = FnMut(StashApplyProgress) -> bool + 'a; + +/// This is a callback function you can provide to iterate over all the +/// stashed states that will be invoked per entry. +pub type StashCb<'a> = FnMut(usize, &str, &Oid) -> bool + 'a; + +#[allow(unused)] +/// Stash application options structure +pub struct StashApplyOptions<'cb> { + progress: Option>>, + checkout_options: Option>, + raw_opts: raw::git_stash_apply_options +} + +impl<'cb> Default for StashApplyOptions<'cb> { + fn default() -> Self { + Self::new() + } +} + +impl<'cb> StashApplyOptions<'cb> { + /// Creates a default set of merge options. + pub fn new() -> StashApplyOptions<'cb> { + let mut opts = StashApplyOptions { + progress: None, + checkout_options: None, + raw_opts: unsafe { mem::zeroed() }, + }; + assert_eq!(unsafe { + raw::git_stash_apply_init_options(&mut opts.raw_opts, 1) + }, 0); + opts + } + + /// Set stash application flag to GIT_STASH_APPLY_REINSTATE_INDEX + pub fn reinstantiate_index(&mut self) -> &mut StashApplyOptions<'cb> { + self.raw_opts.flags = raw::GIT_STASH_APPLY_REINSTATE_INDEX; + self + } + + /// Options to use when writing files to the working directory + pub fn checkout_options(&mut self, opts: CheckoutBuilder<'cb>) -> &mut StashApplyOptions<'cb> { + self.checkout_options = Some(opts); + self + } + + /// Optional callback to notify the consumer of application progress. + /// + /// Return `true` to continue processing, or `false` to + /// abort the stash application. + pub fn progress_cb(&mut self, callback: C) -> &mut StashApplyOptions<'cb> + where C: FnMut(StashApplyProgress) -> bool + 'cb + { + self.progress = Some(Box::new(callback) as Box>); + self.raw_opts.progress_cb = stash_apply_progress_cb; + self.raw_opts.progress_payload = self as *mut _ as *mut _; + self + } + + /// Pointer to a raw git_stash_apply_options + pub fn raw(&mut self) -> &raw::git_stash_apply_options { + unsafe { + if let Some(opts) = self.checkout_options.as_mut() { + opts.configure(&mut self.raw_opts.checkout_options); + } + } + &self.raw_opts + } +} + +#[allow(unused)] +pub struct StashCbData<'a> { + pub callback: &'a mut StashCb<'a> +} + +#[allow(unused)] +pub extern fn stash_cb(index: size_t, + message: *const c_char, + stash_id: *const raw::git_oid, + payload: *mut c_void) + -> c_int +{ + panic::wrap(|| unsafe { + let mut data = &mut *(payload as *mut StashCbData); + let res = { + let mut callback = &mut data.callback; + callback(index, + CStr::from_ptr(message).to_str().unwrap(), + &Binding::from_raw(stash_id)) + }; + + if res { 0 } else { 1 } + }).unwrap_or(1) +} + +fn convert_progress(progress: raw::git_stash_apply_progress_t) -> StashApplyProgress { + match progress { + raw::GIT_STASH_APPLY_PROGRESS_NONE => StashApplyProgress::None, + raw::GIT_STASH_APPLY_PROGRESS_LOADING_STASH => StashApplyProgress::LoadingStash, + raw::GIT_STASH_APPLY_PROGRESS_ANALYZE_INDEX => StashApplyProgress::AnalyzeIndex, + raw::GIT_STASH_APPLY_PROGRESS_ANALYZE_MODIFIED => StashApplyProgress::AnalyzeModified, + raw::GIT_STASH_APPLY_PROGRESS_ANALYZE_UNTRACKED => StashApplyProgress::AnalyzeUntracked, + raw::GIT_STASH_APPLY_PROGRESS_CHECKOUT_UNTRACKED => StashApplyProgress::CheckoutUntracked, + raw::GIT_STASH_APPLY_PROGRESS_CHECKOUT_MODIFIED => StashApplyProgress::CheckoutModified, + raw::GIT_STASH_APPLY_PROGRESS_DONE => StashApplyProgress::Done, + + _ => StashApplyProgress::None + } +} + +#[allow(unused)] +extern fn stash_apply_progress_cb(progress: raw::git_stash_apply_progress_t, + payload: *mut c_void) + -> c_int +{ + panic::wrap(|| unsafe { + let mut options = &mut *(payload as *mut StashApplyOptions); + let res = { + let mut callback = options.progress.as_mut().unwrap(); + callback(convert_progress(progress)) + }; + + if res { 0 } else { -1 } + }).unwrap_or(-1) +} + +#[cfg(test)] +mod tests { + use stash::{StashApplyOptions}; + use std::io::{Write}; + use std::fs; + use std::path::Path; + use test::{repo_init}; + use {Repository, Status, StashFlags}; + + fn make_stash(next: C) where C: FnOnce(&mut Repository) { + let (_td, mut repo) = repo_init(); + let signature = repo.signature().unwrap(); + + let p = Path::new(repo.workdir().unwrap()).join("file_b.txt"); + println!("using path {:?}", p); + fs::File::create(&p).unwrap() + .write("data".as_bytes()).unwrap(); + + let rel_p = Path::new("file_b.txt"); + assert!(repo.status_file(&rel_p).unwrap() == Status::WT_NEW); + + repo.stash_save(&signature, "msg1", Some(StashFlags::INCLUDE_UNTRACKED)).unwrap(); + + assert!(repo.status_file(&rel_p).is_err()); + + let mut count = 0; + repo.stash_foreach(|index, name, _oid| { + count += 1; + assert!(index == 0); + assert!(name == "On master: msg1"); + true + }).unwrap(); + + assert!(count == 1); + next(&mut repo); + } + + fn count_stash(repo: &mut Repository) -> usize { + let mut count = 0; + repo.stash_foreach(|_, _, _| { count += 1; true }).unwrap(); + count + } + + #[test] + fn smoke_stash_save_drop() { + make_stash(|repo| { + repo.stash_drop(0).unwrap(); + assert!(count_stash(repo) == 0) + }) + } + + #[test] + fn smoke_stash_save_pop() { + make_stash(|repo| { + repo.stash_pop(0, None).unwrap(); + assert!(count_stash(repo) == 0) + }) + } + + #[test] + fn smoke_stash_save_apply() { + make_stash(|repo| { + let mut options = StashApplyOptions::new(); + options.progress_cb(|progress| { + println!("{:?}", progress); + true + }); + + repo.stash_apply(0, Some(&mut options)).unwrap(); + assert!(count_stash(repo) == 1) + }) + } +} diff --git a/git2/src/status.rs b/git2/src/status.rs new file mode 100644 index 000000000..cb67305e4 --- /dev/null +++ b/git2/src/status.rs @@ -0,0 +1,418 @@ +use std::ffi::CString; +use std::ops::Range; +use std::marker; +use std::mem; +use std::str; +use libc::{c_char, size_t, c_uint}; + +use {raw, Status, DiffDelta, IntoCString, Repository}; +use util::Binding; + +/// Options that can be provided to `repo.statuses()` to control how the status +/// information is gathered. +pub struct StatusOptions { + raw: raw::git_status_options, + pathspec: Vec, + ptrs: Vec<*const c_char>, +} + +/// Enumeration of possible methods of what can be shown through a status +/// operation. +#[derive(Copy, Clone)] +pub enum StatusShow { + /// Only gives status based on HEAD to index comparison, not looking at + /// working directory changes. + Index, + + /// Only gives status based on index to working directory comparison, not + /// comparing the index to the HEAD. + Workdir, + + /// The default, this roughly matches `git status --porcelain` regarding + /// which files are included and in what order. + IndexAndWorkdir, +} + +/// A container for a list of status information about a repository. +/// +/// Each instance appears as if it were a collection, having a length and +/// allowing indexing, as well as providing an iterator. +pub struct Statuses<'repo> { + raw: *mut raw::git_status_list, + + // Hm, not currently present, but can't hurt? + _marker: marker::PhantomData<&'repo Repository>, +} + +/// An iterator over the statuses in a `Statuses` instance. +pub struct StatusIter<'statuses> { + statuses: &'statuses Statuses<'statuses>, + range: Range, +} + +/// A structure representing an entry in the `Statuses` structure. +/// +/// Instances are created through the `.iter()` method or the `.get()` method. +pub struct StatusEntry<'statuses> { + raw: *const raw::git_status_entry, + _marker: marker::PhantomData<&'statuses DiffDelta<'statuses>>, +} + +impl Default for StatusOptions { + fn default() -> Self { + Self::new() + } +} + +impl StatusOptions { + /// Creates a new blank set of status options. + pub fn new() -> StatusOptions { + unsafe { + let mut raw = mem::zeroed(); + let r = raw::git_status_init_options(&mut raw, + raw::GIT_STATUS_OPTIONS_VERSION); + assert_eq!(r, 0); + StatusOptions { + raw: raw, + pathspec: Vec::new(), + ptrs: Vec::new(), + } + } + } + + /// Select the files on which to report status. + /// + /// The default, if unspecified, is to show the index and the working + /// directory. + pub fn show(&mut self, show: StatusShow) -> &mut StatusOptions { + self.raw.show = match show { + StatusShow::Index => raw::GIT_STATUS_SHOW_INDEX_ONLY, + StatusShow::Workdir => raw::GIT_STATUS_SHOW_WORKDIR_ONLY, + StatusShow::IndexAndWorkdir => raw::GIT_STATUS_SHOW_INDEX_AND_WORKDIR, + }; + self + } + + /// Add a path pattern to match (using fnmatch-style matching). + /// + /// If the `disable_pathspec_match` option is given, then this is a literal + /// path to match. If this is not called, then there will be no patterns to + /// match and the entire directory will be used. + pub fn pathspec(&mut self, pathspec: T) + -> &mut StatusOptions { + let s = pathspec.into_c_string().unwrap(); + self.ptrs.push(s.as_ptr()); + self.pathspec.push(s); + self + } + + fn flag(&mut self, flag: raw::git_status_opt_t, val: bool) + -> &mut StatusOptions { + if val { + self.raw.flags |= flag as c_uint; + } else { + self.raw.flags &= !(flag as c_uint); + } + self + } + + /// Flag whether untracked files will be included. + /// + /// Untracked files will only be included if the workdir files are included + /// in the status "show" option. + pub fn include_untracked(&mut self, include: bool) -> &mut StatusOptions { + self.flag(raw::GIT_STATUS_OPT_INCLUDE_UNTRACKED, include) + } + + /// Flag whether ignored files will be included. + /// + /// The files will only be included if the workdir files are included + /// in the status "show" option. + pub fn include_ignored(&mut self, include: bool) -> &mut StatusOptions { + self.flag(raw::GIT_STATUS_OPT_INCLUDE_IGNORED, include) + } + + /// Flag to include unmodified files. + pub fn include_unmodified(&mut self, include: bool) -> &mut StatusOptions { + self.flag(raw::GIT_STATUS_OPT_INCLUDE_UNMODIFIED, include) + } + + /// Flag that submodules should be skipped. + /// + /// This only applies if there are no pending typechanges to the submodule + /// (either from or to another type). + pub fn exclude_submodules(&mut self, exclude: bool) -> &mut StatusOptions { + self.flag(raw::GIT_STATUS_OPT_EXCLUDE_SUBMODULES, exclude) + } + + /// Flag that all files in untracked directories should be included. + /// + /// Normally if an entire directory is new then just the top-level directory + /// is included (with a trailing slash on the entry name). + pub fn recurse_untracked_dirs(&mut self, include: bool) + -> &mut StatusOptions { + self.flag(raw::GIT_STATUS_OPT_RECURSE_UNTRACKED_DIRS, include) + } + + /// Indicates that the given paths should be treated as literals paths, note + /// patterns. + pub fn disable_pathspec_match(&mut self, include: bool) + -> &mut StatusOptions { + self.flag(raw::GIT_STATUS_OPT_DISABLE_PATHSPEC_MATCH, include) + } + + /// Indicates that the contents of ignored directories should be included in + /// the status. + pub fn recurse_ignored_dirs(&mut self, include: bool) + -> &mut StatusOptions { + self.flag(raw::GIT_STATUS_OPT_RECURSE_IGNORED_DIRS, include) + } + + /// Indicates that rename detection should be processed between the head. + pub fn renames_head_to_index(&mut self, include: bool) + -> &mut StatusOptions { + self.flag(raw::GIT_STATUS_OPT_RENAMES_HEAD_TO_INDEX, include) + } + + /// Indicates that rename detection should be run between the index and the + /// working directory. + pub fn renames_index_to_workdir(&mut self, include: bool) + -> &mut StatusOptions { + self.flag(raw::GIT_STATUS_OPT_RENAMES_INDEX_TO_WORKDIR, include) + } + + /// Override the native case sensitivity for the file system and force the + /// output to be in case sensitive order. + pub fn sort_case_sensitively(&mut self, include: bool) + -> &mut StatusOptions { + self.flag(raw::GIT_STATUS_OPT_SORT_CASE_SENSITIVELY, include) + } + + /// Override the native case sensitivity for the file system and force the + /// output to be in case-insensitive order. + pub fn sort_case_insensitively(&mut self, include: bool) + -> &mut StatusOptions { + self.flag(raw::GIT_STATUS_OPT_SORT_CASE_INSENSITIVELY, include) + } + + /// Indicates that rename detection should include rewritten files. + pub fn renames_from_rewrites(&mut self, include: bool) + -> &mut StatusOptions { + self.flag(raw::GIT_STATUS_OPT_RENAMES_FROM_REWRITES, include) + } + + /// Bypasses the default status behavior of doing a "soft" index reload. + pub fn no_refresh(&mut self, include: bool) -> &mut StatusOptions { + self.flag(raw::GIT_STATUS_OPT_NO_REFRESH, include) + } + + /// Refresh the stat cache in the index for files are unchanged but have + /// out of date stat information in the index. + /// + /// This will result in less work being done on subsequent calls to fetching + /// the status. + pub fn update_index(&mut self, include: bool) -> &mut StatusOptions { + self.flag(raw::GIT_STATUS_OPT_UPDATE_INDEX, include) + } + + // erm... + #[allow(missing_docs)] + pub fn include_unreadable(&mut self, include: bool) -> &mut StatusOptions { + self.flag(raw::GIT_STATUS_OPT_INCLUDE_UNREADABLE, include) + } + + // erm... + #[allow(missing_docs)] + pub fn include_unreadable_as_untracked(&mut self, include: bool) + -> &mut StatusOptions { + self.flag(raw::GIT_STATUS_OPT_INCLUDE_UNREADABLE_AS_UNTRACKED, include) + } + + /// Get a pointer to the inner list of status options. + /// + /// This function is unsafe as the returned structure has interior pointers + /// and may no longer be valid if these options continue to be mutated. + pub unsafe fn raw(&mut self) -> *const raw::git_status_options { + self.raw.pathspec.strings = self.ptrs.as_ptr() as *mut _; + self.raw.pathspec.count = self.ptrs.len() as size_t; + &self.raw + } +} + +impl<'repo> Statuses<'repo> { + /// Gets a status entry from this list at the specified index. + /// + /// Returns `None` if the index is out of bounds. + pub fn get(&self, index: usize) -> Option { + unsafe { + let p = raw::git_status_byindex(self.raw, index as size_t); + Binding::from_raw_opt(p) + } + } + + /// Gets the count of status entries in this list. + /// + /// If there are no changes in status (according to the options given + /// when the status list was created), this should return 0. + pub fn len(&self) -> usize { + unsafe { raw::git_status_list_entrycount(self.raw) as usize } + } + + /// Return `true` if there is no status entry in this list. + pub fn is_empty(&self) -> bool { + self.len() == 0 + } + + /// Returns an iterator over the statuses in this list. + pub fn iter(&self) -> StatusIter { + StatusIter { + statuses: self, + range: 0..self.len(), + } + } +} + +impl<'repo> Binding for Statuses<'repo> { + type Raw = *mut raw::git_status_list; + unsafe fn from_raw(raw: *mut raw::git_status_list) -> Statuses<'repo> { + Statuses { raw: raw, _marker: marker::PhantomData } + } + fn raw(&self) -> *mut raw::git_status_list { self.raw } +} + +impl<'repo> Drop for Statuses<'repo> { + fn drop(&mut self) { + unsafe { raw::git_status_list_free(self.raw); } + } +} + +impl<'a> Iterator for StatusIter<'a> { + type Item = StatusEntry<'a>; + fn next(&mut self) -> Option> { + self.range.next().and_then(|i| self.statuses.get(i)) + } + fn size_hint(&self) -> (usize, Option) { self.range.size_hint() } +} +impl<'a> DoubleEndedIterator for StatusIter<'a> { + fn next_back(&mut self) -> Option> { + self.range.next_back().and_then(|i| self.statuses.get(i)) + } +} +impl<'a> ExactSizeIterator for StatusIter<'a> {} + +impl<'statuses> StatusEntry<'statuses> { + /// Access the bytes for this entry's corresponding pathname + pub fn path_bytes(&self) -> &[u8] { + unsafe { + if (*self.raw).head_to_index.is_null() { + ::opt_bytes(self, (*(*self.raw).index_to_workdir).old_file.path) + } else { + ::opt_bytes(self, (*(*self.raw).head_to_index).old_file.path) + }.unwrap() + } + } + + /// Access this entry's path name as a string. + /// + /// Returns `None` if the path is not valid utf-8. + pub fn path(&self) -> Option<&str> { str::from_utf8(self.path_bytes()).ok() } + + /// Access the status flags for this file + pub fn status(&self) -> Status { + Status::from_bits_truncate(unsafe { (*self.raw).status as u32 }) + } + + /// Access detailed information about the differences between the file in + /// HEAD and the file in the index. + pub fn head_to_index(&self) -> Option> { + unsafe { + Binding::from_raw_opt((*self.raw).head_to_index) + } + } + + /// Access detailed information about the differences between the file in + /// the index and the file in the working directory. + pub fn index_to_workdir(&self) -> Option> { + unsafe { + Binding::from_raw_opt((*self.raw).index_to_workdir) + } + } +} + +impl<'statuses> Binding for StatusEntry<'statuses> { + type Raw = *const raw::git_status_entry; + + unsafe fn from_raw(raw: *const raw::git_status_entry) + -> StatusEntry<'statuses> { + StatusEntry { raw: raw, _marker: marker::PhantomData } + } + fn raw(&self) -> *const raw::git_status_entry { self.raw } +} + +#[cfg(test)] +mod tests { + use std::fs::File; + use std::path::Path; + use std::io::prelude::*; + use super::StatusOptions; + + #[test] + fn smoke() { + let (td, repo) = ::test::repo_init(); + assert_eq!(repo.statuses(None).unwrap().len(), 0); + File::create(&td.path().join("foo")).unwrap(); + let statuses = repo.statuses(None).unwrap(); + assert_eq!(statuses.iter().count(), 1); + let status = statuses.iter().next().unwrap(); + assert_eq!(status.path(), Some("foo")); + assert!(status.status().contains(::Status::WT_NEW)); + assert!(!status.status().contains(::Status::INDEX_NEW)); + assert!(status.head_to_index().is_none()); + let diff = status.index_to_workdir().unwrap(); + assert_eq!(diff.old_file().path_bytes().unwrap(), b"foo"); + assert_eq!(diff.new_file().path_bytes().unwrap(), b"foo"); + } + + #[test] + fn filter() { + let (td, repo) = ::test::repo_init(); + t!(File::create(&td.path().join("foo"))); + t!(File::create(&td.path().join("bar"))); + let mut opts = StatusOptions::new(); + opts.include_untracked(true) + .pathspec("foo"); + + let statuses = t!(repo.statuses(Some(&mut opts))); + assert_eq!(statuses.iter().count(), 1); + let status = statuses.iter().next().unwrap(); + assert_eq!(status.path(), Some("foo")); + } + + #[test] + fn gitignore() { + let (td, repo) = ::test::repo_init(); + t!(t!(File::create(td.path().join(".gitignore"))).write_all(b"foo\n")); + assert!(!t!(repo.status_should_ignore(Path::new("bar")))); + assert!(t!(repo.status_should_ignore(Path::new("foo")))); + } + + #[test] + fn status_file() { + let (td, repo) = ::test::repo_init(); + assert!(repo.status_file(Path::new("foo")).is_err()); + if cfg!(windows) { + assert!(repo.status_file(Path::new("bar\\foo.txt")).is_err()); + } + t!(File::create(td.path().join("foo"))); + if cfg!(windows) { + t!(::std::fs::create_dir_all(td.path().join("bar"))); + t!(File::create(td.path().join("bar").join("foo.txt"))); + } + let status = t!(repo.status_file(Path::new("foo"))); + assert!(status.contains(::Status::WT_NEW)); + if cfg!(windows) { + let status = t!(repo.status_file(Path::new("bar\\foo.txt"))); + assert!(status.contains(::Status::WT_NEW)); + } + } +} diff --git a/git2/src/string_array.rs b/git2/src/string_array.rs new file mode 100644 index 000000000..97af2090e --- /dev/null +++ b/git2/src/string_array.rs @@ -0,0 +1,117 @@ +//! Bindings to libgit2's raw `git_strarray` type + +use std::str; +use std::ops::Range; + +use raw; +use util::Binding; + +/// A string array structure used by libgit2 +/// +/// Some apis return arrays of strings which originate from libgit2. This +/// wrapper type behaves a little like `Vec<&str>` but does so without copying +/// the underlying strings until necessary. +pub struct StringArray { + raw: raw::git_strarray, +} + +/// A forward iterator over the strings of an array, casted to `&str`. +pub struct Iter<'a> { + range: Range, + arr: &'a StringArray, +} + +/// A forward iterator over the strings of an array, casted to `&[u8]`. +pub struct IterBytes<'a> { + range: Range, + arr: &'a StringArray, +} + +impl StringArray { + /// Returns None if the i'th string is not utf8 or if i is out of bounds. + pub fn get(&self, i: usize) -> Option<&str> { + self.get_bytes(i).and_then(|s| str::from_utf8(s).ok()) + } + + /// Returns None if `i` is out of bounds. + pub fn get_bytes(&self, i: usize) -> Option<&[u8]> { + if i < self.raw.count as usize { + unsafe { + let ptr = *self.raw.strings.offset(i as isize) as *const _; + Some(::opt_bytes(self, ptr).unwrap()) + } + } else { + None + } + } + + /// Returns an iterator over the strings contained within this array. + /// + /// The iterator yields `Option<&str>` as it is unknown whether the contents + /// are utf-8 or not. + pub fn iter(&self) -> Iter { + Iter { range: 0..self.len(), arr: self } + } + + /// Returns an iterator over the strings contained within this array, + /// yielding byte slices. + pub fn iter_bytes(&self) -> IterBytes { + IterBytes { range: 0..self.len(), arr: self } + } + + /// Returns the number of strings in this array. + pub fn len(&self) -> usize { self.raw.count as usize } + + /// Return `true` if this array is empty. + pub fn is_empty(&self) -> bool { self.len() == 0 } +} + +impl Binding for StringArray { + type Raw = raw::git_strarray; + unsafe fn from_raw(raw: raw::git_strarray) -> StringArray { + StringArray { raw: raw } + } + fn raw(&self) -> raw::git_strarray { self.raw } +} + +impl<'a> IntoIterator for &'a StringArray { + type Item = Option<&'a str>; + type IntoIter = Iter<'a>; + fn into_iter(self) -> Self::IntoIter { + self.iter() + } +} + +impl<'a> Iterator for Iter<'a> { + type Item = Option<&'a str>; + fn next(&mut self) -> Option> { + self.range.next().map(|i| self.arr.get(i)) + } + fn size_hint(&self) -> (usize, Option) { self.range.size_hint() } +} +impl<'a> DoubleEndedIterator for Iter<'a> { + fn next_back(&mut self) -> Option> { + self.range.next_back().map(|i| self.arr.get(i)) + } +} +impl<'a> ExactSizeIterator for Iter<'a> {} + +impl<'a> Iterator for IterBytes<'a> { + type Item = &'a [u8]; + fn next(&mut self) -> Option<&'a [u8]> { + self.range.next().and_then(|i| self.arr.get_bytes(i)) + } + fn size_hint(&self) -> (usize, Option) { self.range.size_hint() } +} +impl<'a> DoubleEndedIterator for IterBytes<'a> { + fn next_back(&mut self) -> Option<&'a [u8]> { + self.range.next_back().and_then(|i| self.arr.get_bytes(i)) + } +} +impl<'a> ExactSizeIterator for IterBytes<'a> {} + +impl Drop for StringArray { + fn drop(&mut self) { + unsafe { raw::git_strarray_free(&mut self.raw) } + } +} diff --git a/git2/src/submodule.rs b/git2/src/submodule.rs new file mode 100644 index 000000000..9cdb3566f --- /dev/null +++ b/git2/src/submodule.rs @@ -0,0 +1,344 @@ +use std::marker; +use std::mem; +use std::ptr; +use std::str; +use std::os::raw::c_int; +use std::path::Path; + +use {raw, Oid, Repository, Error, FetchOptions}; +use build::CheckoutBuilder; +use util::{self, Binding}; + +/// A structure to represent a git [submodule][1] +/// +/// [1]: http://git-scm.com/book/en/Git-Tools-Submodules +pub struct Submodule<'repo> { + raw: *mut raw::git_submodule, + _marker: marker::PhantomData<&'repo Repository>, +} + +impl<'repo> Submodule<'repo> { + /// Get the submodule's branch. + /// + /// Returns `None` if the branch is not valid utf-8 or if the branch is not + /// yet available. + pub fn branch(&self) -> Option<&str> { + self.branch_bytes().and_then(|s| str::from_utf8(s).ok()) + } + + /// Get the branch for the submodule. + /// + /// Returns `None` if the branch is not yet available. + pub fn branch_bytes(&self) -> Option<&[u8]> { + unsafe { + ::opt_bytes(self, raw::git_submodule_branch(self.raw)) + } + } + + /// Get the submodule's url. + /// + /// Returns `None` if the url is not valid utf-8 + pub fn url(&self) -> Option<&str> { str::from_utf8(self.url_bytes()).ok() } + + /// Get the url for the submodule. + pub fn url_bytes(&self) -> &[u8] { + unsafe { + ::opt_bytes(self, raw::git_submodule_url(self.raw)).unwrap() + } + } + + /// Get the submodule's name. + /// + /// Returns `None` if the name is not valid utf-8 + pub fn name(&self) -> Option<&str> { str::from_utf8(self.name_bytes()).ok() } + + /// Get the name for the submodule. + pub fn name_bytes(&self) -> &[u8] { + unsafe { + ::opt_bytes(self, raw::git_submodule_name(self.raw)).unwrap() + } + } + + /// Get the path for the submodule. + pub fn path(&self) -> &Path { + util::bytes2path(unsafe { + ::opt_bytes(self, raw::git_submodule_path(self.raw)).unwrap() + }) + } + + /// Get the OID for the submodule in the current HEAD tree. + pub fn head_id(&self) -> Option { + unsafe { + Binding::from_raw_opt(raw::git_submodule_head_id(self.raw)) + } + } + + /// Get the OID for the submodule in the index. + pub fn index_id(&self) -> Option { + unsafe { + Binding::from_raw_opt(raw::git_submodule_index_id(self.raw)) + } + } + + /// Get the OID for the submodule in the current working directory. + /// + /// This returns the OID that corresponds to looking up 'HEAD' in the + /// checked out submodule. If there are pending changes in the index or + /// anything else, this won't notice that. + pub fn workdir_id(&self) -> Option { + unsafe { + Binding::from_raw_opt(raw::git_submodule_wd_id(self.raw)) + } + } + + /// Copy submodule info into ".git/config" file. + /// + /// Just like "git submodule init", this copies information about the + /// submodule into ".git/config". You can use the accessor functions above + /// to alter the in-memory git_submodule object and control what is written + /// to the config, overriding what is in .gitmodules. + /// + /// By default, existing entries will not be overwritten, but passing `true` + /// for `overwrite` forces them to be updated. + pub fn init(&mut self, overwrite: bool) -> Result<(), Error> { + unsafe { + try_call!(raw::git_submodule_init(self.raw, overwrite)); + } + Ok(()) + } + + /// Open the repository for a submodule. + /// + /// This will only work if the submodule is checked out into the working + /// directory. + pub fn open(&self) -> Result { + let mut raw = ptr::null_mut(); + unsafe { + try_call!(raw::git_submodule_open(&mut raw, self.raw)); + Ok(Binding::from_raw(raw)) + } + } + + /// Reread submodule info from config, index, and HEAD. + /// + /// Call this to reread cached submodule information for this submodule if + /// you have reason to believe that it has changed. + /// + /// If `force` is `true`, then data will be reloaded even if it doesn't seem + /// out of date + pub fn reload(&mut self, force: bool) -> Result<(), Error> { + unsafe { + try_call!(raw::git_submodule_reload(self.raw, force)); + } + Ok(()) + } + + /// Copy submodule remote info into submodule repo. + /// + /// This copies the information about the submodules URL into the checked + /// out submodule config, acting like "git submodule sync". This is useful + /// if you have altered the URL for the submodule (or it has been altered + /// by a fetch of upstream changes) and you need to update your local repo. + pub fn sync(&mut self) -> Result<(), Error> { + unsafe { try_call!(raw::git_submodule_sync(self.raw)); } + Ok(()) + } + + /// Add current submodule HEAD commit to index of superproject. + /// + /// If `write_index` is true, then the index file will be immediately + /// written. Otherwise you must explicitly call `write()` on an `Index` + /// later on. + pub fn add_to_index(&mut self, write_index: bool) -> Result<(), Error> { + unsafe { + try_call!(raw::git_submodule_add_to_index(self.raw, write_index)); + } + Ok(()) + } + + /// Resolve the setup of a new git submodule. + /// + /// This should be called on a submodule once you have called add setup and + /// done the clone of the submodule. This adds the .gitmodules file and the + /// newly cloned submodule to the index to be ready to be committed (but + /// doesn't actually do the commit). + pub fn add_finalize(&mut self) -> Result<(), Error> { + unsafe { try_call!(raw::git_submodule_add_finalize(self.raw)); } + Ok(()) + } + + /// Update submodule. + /// + /// This will clone a missing submodule and check out the subrepository to + /// the commit specified in the index of the containing repository. If + /// the submodule repository doesn't contain the target commit, then the + /// submodule is fetched using the fetch options supplied in `opts`. + /// + /// `init` indicates if the submodule should be initialized first if it has + /// not been initialized yet. + pub fn update(&mut self, init: bool, + opts: Option<&mut SubmoduleUpdateOptions>) + -> Result<(), Error> { + unsafe { + let mut raw_opts = opts.map(|o| o.raw()); + try_call!(raw::git_submodule_update(self.raw, init as c_int, + raw_opts.as_mut().map_or(ptr::null_mut(), |o| o))); + } + Ok(()) + } +} + +impl<'repo> Binding for Submodule<'repo> { + type Raw = *mut raw::git_submodule; + unsafe fn from_raw(raw: *mut raw::git_submodule) -> Submodule<'repo> { + Submodule { raw: raw, _marker: marker::PhantomData } + } + fn raw(&self) -> *mut raw::git_submodule { self.raw } +} + +impl<'repo> Drop for Submodule<'repo> { + fn drop(&mut self) { + unsafe { raw::git_submodule_free(self.raw) } + } +} + +/// Options to update a submodule. +pub struct SubmoduleUpdateOptions<'cb> { + checkout_builder: CheckoutBuilder<'cb>, + fetch_opts: FetchOptions<'cb>, + allow_fetch: bool, +} + +impl<'cb> SubmoduleUpdateOptions<'cb> { + /// Return default options. + pub fn new() -> Self { + SubmoduleUpdateOptions { + checkout_builder: CheckoutBuilder::new(), + fetch_opts: FetchOptions::new(), + allow_fetch: true, + } + } + + unsafe fn raw(&mut self) -> raw::git_submodule_update_options { + let mut checkout_opts: raw::git_checkout_options = mem::zeroed(); + let init_res = raw::git_checkout_init_options(&mut checkout_opts, + raw::GIT_CHECKOUT_OPTIONS_VERSION); + assert_eq!(0, init_res); + self.checkout_builder.configure(&mut checkout_opts); + let opts = raw::git_submodule_update_options { + version: raw::GIT_SUBMODULE_UPDATE_OPTIONS_VERSION, + checkout_opts, + fetch_opts: self.fetch_opts.raw(), + allow_fetch: self.allow_fetch as c_int, + }; + opts + } + + /// Set checkout options. + pub fn checkout(&mut self, opts: CheckoutBuilder<'cb>) -> &mut Self { + self.checkout_builder = opts; + self + } + + /// Set fetch options and allow fetching. + pub fn fetch(&mut self, opts: FetchOptions<'cb>) -> &mut Self { + self.fetch_opts = opts; + self.allow_fetch = true; + self + } + + /// Allow or disallow fetching. + pub fn allow_fetch(&mut self, b: bool) -> &mut Self { + self.allow_fetch = b; + self + } +} + +impl<'cb> Default for SubmoduleUpdateOptions<'cb> { + fn default() -> Self { + Self::new() + } +} + +#[cfg(test)] +mod tests { + use std::path::Path; + use std::fs; + use tempdir::TempDir; + use url::Url; + + use Repository; + use SubmoduleUpdateOptions; + + #[test] + fn smoke() { + let td = TempDir::new("test").unwrap(); + let repo = Repository::init(td.path()).unwrap(); + let mut s1 = repo.submodule("/path/to/nowhere", + Path::new("foo"), true).unwrap(); + s1.init(false).unwrap(); + s1.sync().unwrap(); + + let s2 = repo.submodule("/path/to/nowhere", + Path::new("bar"), true).unwrap(); + drop((s1, s2)); + + let mut submodules = repo.submodules().unwrap(); + assert_eq!(submodules.len(), 2); + let mut s = submodules.remove(0); + assert_eq!(s.name(), Some("bar")); + assert_eq!(s.url(), Some("/path/to/nowhere")); + assert_eq!(s.branch(), None); + assert!(s.head_id().is_none()); + assert!(s.index_id().is_none()); + assert!(s.workdir_id().is_none()); + + repo.find_submodule("bar").unwrap(); + s.open().unwrap(); + assert!(s.path() == Path::new("bar")); + s.reload(true).unwrap(); + } + + #[test] + fn add_a_submodule() { + let (_td, repo1) = ::test::repo_init(); + let (td, repo2) = ::test::repo_init(); + + let url = Url::from_file_path(&repo1.workdir().unwrap()).unwrap(); + let mut s = repo2.submodule(&url.to_string(), Path::new("bar"), + true).unwrap(); + t!(fs::remove_dir_all(td.path().join("bar"))); + t!(Repository::clone(&url.to_string(), + td.path().join("bar"))); + t!(s.add_to_index(false)); + t!(s.add_finalize()); + } + + #[test] + fn update_submodule() { + // ----------------------------------- + // Same as `add_a_submodule()` + let (_td, repo1) = ::test::repo_init(); + let (td, repo2) = ::test::repo_init(); + + let url = Url::from_file_path(&repo1.workdir().unwrap()).unwrap(); + let mut s = repo2.submodule(&url.to_string(), Path::new("bar"), + true).unwrap(); + t!(fs::remove_dir_all(td.path().join("bar"))); + t!(Repository::clone(&url.to_string(), + td.path().join("bar"))); + t!(s.add_to_index(false)); + t!(s.add_finalize()); + // ----------------------------------- + + // Attempt to update submodule + let submodules = t!(repo1.submodules()); + for mut submodule in submodules { + let mut submodule_options = SubmoduleUpdateOptions::new(); + let init = true; + let opts = Some(&mut submodule_options); + + t!(submodule.update(init, opts)); + } + } +} diff --git a/git2/src/tag.rs b/git2/src/tag.rs new file mode 100644 index 000000000..8041f3546 --- /dev/null +++ b/git2/src/tag.rs @@ -0,0 +1,191 @@ +use std::marker; +use std::mem; +use std::ptr; +use std::str; + +use {raw, signature, Error, Oid, Object, Signature, ObjectType}; +use util::Binding; + +/// A structure to represent a git [tag][1] +/// +/// [1]: http://git-scm.com/book/en/Git-Basics-Tagging +pub struct Tag<'repo> { + raw: *mut raw::git_tag, + _marker: marker::PhantomData>, +} + +impl<'repo> Tag<'repo> { + /// Get the id (SHA1) of a repository tag + pub fn id(&self) -> Oid { + unsafe { Binding::from_raw(raw::git_tag_id(&*self.raw)) } + } + + /// Get the message of a tag + /// + /// Returns None if there is no message or if it is not valid utf8 + pub fn message(&self) -> Option<&str> { + self.message_bytes().and_then(|s| str::from_utf8(s).ok()) + } + + /// Get the message of a tag + /// + /// Returns None if there is no message + pub fn message_bytes(&self) -> Option<&[u8]> { + unsafe { ::opt_bytes(self, raw::git_tag_message(&*self.raw)) } + } + + /// Get the name of a tag + /// + /// Returns None if it is not valid utf8 + pub fn name(&self) -> Option<&str> { + str::from_utf8(self.name_bytes()).ok() + } + + /// Get the name of a tag + pub fn name_bytes(&self) -> &[u8] { + unsafe { ::opt_bytes(self, raw::git_tag_name(&*self.raw)).unwrap() } + } + + /// Recursively peel a tag until a non tag git_object is found + pub fn peel(&self) -> Result, Error> { + let mut ret = ptr::null_mut(); + unsafe { + try_call!(raw::git_tag_peel(&mut ret, &*self.raw)); + Ok(Binding::from_raw(ret)) + } + } + + /// Get the tagger (author) of a tag + /// + /// If the author is unspecified, then `None` is returned. + pub fn tagger(&self) -> Option { + unsafe { + let ptr = raw::git_tag_tagger(&*self.raw); + if ptr.is_null() { + None + } else { + Some(signature::from_raw_const(self, ptr)) + } + } + } + + /// Get the tagged object of a tag + /// + /// This method performs a repository lookup for the given object and + /// returns it + pub fn target(&self) -> Result, Error> { + let mut ret = ptr::null_mut(); + unsafe { + try_call!(raw::git_tag_target(&mut ret, &*self.raw)); + Ok(Binding::from_raw(ret)) + } + } + + /// Get the OID of the tagged object of a tag + pub fn target_id(&self) -> Oid { + unsafe { Binding::from_raw(raw::git_tag_target_id(&*self.raw)) } + } + + /// Get the OID of the tagged object of a tag + pub fn target_type(&self) -> Option { + unsafe { ObjectType::from_raw(raw::git_tag_target_type(&*self.raw)) } + } + + /// Casts this Tag to be usable as an `Object` + pub fn as_object(&self) -> &Object<'repo> { + unsafe { + &*(self as *const _ as *const Object<'repo>) + } + } + + /// Consumes Tag to be returned as an `Object` + pub fn into_object(self) -> Object<'repo> { + assert_eq!(mem::size_of_val(&self), mem::size_of::()); + unsafe { + mem::transmute(self) + } + } +} + +impl<'repo> ::std::fmt::Debug for Tag<'repo> { + fn fmt(&self, f: &mut ::std::fmt::Formatter) -> Result<(), ::std::fmt::Error> { + let mut ds = f.debug_struct("Tag"); + if let Some(name) = self.name() { + ds.field("name", &name); + } + ds.field("id", &self.id()); + ds.finish() + } +} + +impl<'repo> Binding for Tag<'repo> { + type Raw = *mut raw::git_tag; + unsafe fn from_raw(raw: *mut raw::git_tag) -> Tag<'repo> { + Tag { raw: raw, _marker: marker::PhantomData } + } + fn raw(&self) -> *mut raw::git_tag { self.raw } +} + +impl<'repo> Clone for Tag<'repo> { + fn clone(&self) -> Self { + self.as_object().clone().into_tag().ok().unwrap() + } +} + +impl<'repo> Drop for Tag<'repo> { + fn drop(&mut self) { + unsafe { raw::git_tag_free(self.raw) } + } +} + +#[cfg(test)] +mod tests { + #[test] + fn smoke() { + let (_td, repo) = ::test::repo_init(); + let head = repo.head().unwrap(); + let id = head.target().unwrap(); + assert!(repo.find_tag(id).is_err()); + + let obj = repo.find_object(id, None).unwrap(); + let sig = repo.signature().unwrap(); + let tag_id = repo.tag("foo", &obj, &sig, "msg", false).unwrap(); + let tag = repo.find_tag(tag_id).unwrap(); + assert_eq!(tag.id(), tag_id); + + let tags = repo.tag_names(None).unwrap(); + assert_eq!(tags.len(), 1); + assert_eq!(tags.get(0), Some("foo")); + + assert_eq!(tag.name(), Some("foo")); + assert_eq!(tag.message(), Some("msg")); + assert_eq!(tag.peel().unwrap().id(), obj.id()); + assert_eq!(tag.target_id(), obj.id()); + assert_eq!(tag.target_type(), Some(::ObjectType::Commit)); + + assert_eq!(tag.tagger().unwrap().name(), sig.name()); + tag.target().unwrap(); + tag.into_object(); + + repo.find_object(tag_id, None).unwrap().as_tag().unwrap(); + repo.find_object(tag_id, None).unwrap().into_tag().ok().unwrap(); + + repo.tag_delete("foo").unwrap(); + } + + #[test] + fn lite() { + let (_td, repo) = ::test::repo_init(); + let head = t!(repo.head()); + let id = head.target().unwrap(); + let obj = t!(repo.find_object(id, None)); + let tag_id = t!(repo.tag_lightweight("foo", &obj, false)); + assert!(repo.find_tag(tag_id).is_err()); + assert_eq!(t!(repo.refname_to_id("refs/tags/foo")), id); + + let tags = t!(repo.tag_names(Some("f*"))); + assert_eq!(tags.len(), 1); + let tags = t!(repo.tag_names(Some("b*"))); + assert_eq!(tags.len(), 0); + } +} diff --git a/git2/src/test.rs b/git2/src/test.rs new file mode 100644 index 000000000..4f8813fda --- /dev/null +++ b/git2/src/test.rs @@ -0,0 +1,61 @@ +use std::path::{Path, PathBuf}; +use std::io; +#[cfg(unix)] +use std::ptr; +use tempdir::TempDir; +use url::Url; + +use Repository; + +macro_rules! t { + ($e:expr) => (match $e { + Ok(e) => e, + Err(e) => panic!("{} failed with {}", stringify!($e), e), + }) +} + +pub fn repo_init() -> (TempDir, Repository) { + let td = TempDir::new("test").unwrap(); + let repo = Repository::init(td.path()).unwrap(); + { + let mut config = repo.config().unwrap(); + config.set_str("user.name", "name").unwrap(); + config.set_str("user.email", "email").unwrap(); + let mut index = repo.index().unwrap(); + let id = index.write_tree().unwrap(); + + let tree = repo.find_tree(id).unwrap(); + let sig = repo.signature().unwrap(); + repo.commit(Some("HEAD"), &sig, &sig, "initial", + &tree, &[]).unwrap(); + } + (td, repo) +} + +pub fn path2url(path: &Path) -> String { + Url::from_file_path(path).unwrap().to_string() +} + +#[cfg(windows)] +pub fn realpath(original: &Path) -> io::Result { + Ok(original.to_path_buf()) +} +#[cfg(unix)] +pub fn realpath(original: &Path) -> io::Result { + use std::ffi::{CStr, OsString, CString}; + use std::os::unix::prelude::*; + use libc::{self, c_char}; + extern { + fn realpath(name: *const c_char, resolved: *mut c_char) -> *mut c_char; + } + unsafe { + let cstr = try!(CString::new(original.as_os_str().as_bytes())); + let ptr = realpath(cstr.as_ptr(), ptr::null_mut()); + if ptr.is_null() { + return Err(io::Error::last_os_error()) + } + let bytes = CStr::from_ptr(ptr).to_bytes().to_vec(); + libc::free(ptr as *mut _); + Ok(PathBuf::from(OsString::from_vec(bytes))) + } +} diff --git a/git2/src/time.rs b/git2/src/time.rs new file mode 100644 index 000000000..57a5a70a3 --- /dev/null +++ b/git2/src/time.rs @@ -0,0 +1,100 @@ +use std::cmp::Ordering; + +use libc::{c_int, c_char}; + +use raw; +use util::Binding; + +/// Time in a signature +#[derive(Copy, Clone, Eq, PartialEq)] +pub struct Time { + raw: raw::git_time, +} + +/// Time structure used in a git index entry. +#[derive(Copy, Clone, Eq, PartialEq)] +pub struct IndexTime { + raw: raw::git_index_time, +} + +impl Time { + /// Creates a new time structure from its components. + pub fn new(time: i64, offset: i32) -> Time { + unsafe { + Binding::from_raw(raw::git_time { + time: time as raw::git_time_t, + offset: offset as c_int, + sign: if offset < 0 { '-' } else { '+' } as c_char, + }) + } + } + + /// Return the time, in seconds, from epoch + pub fn seconds(&self) -> i64 { self.raw.time as i64 } + + /// Return the timezone offset, in minutes + pub fn offset_minutes(&self) -> i32 { self.raw.offset as i32 } + + /// Return whether the offset was positive or negative. Primarily useful + /// in case the offset is specified as a negative zero. + pub fn sign(&self) -> char { self.raw.offset as u8 as char } +} + +impl PartialOrd for Time { + fn partial_cmp(&self, other: &Time) -> Option { + Some(self.cmp(other)) + } +} + +impl Ord for Time { + fn cmp(&self, other: &Time) -> Ordering { + (self.raw.time, self.raw.offset).cmp(&(other.raw.time, other.raw.offset)) + } +} + +impl Binding for Time { + type Raw = raw::git_time; + unsafe fn from_raw(raw: raw::git_time) -> Time { + Time { raw: raw } + } + fn raw(&self) -> raw::git_time { self.raw } +} + +impl IndexTime { + /// Creates a new time structure from its components. + pub fn new(seconds: i32, nanoseconds: u32) -> IndexTime { + unsafe { + Binding::from_raw(raw::git_index_time { + seconds: seconds, + nanoseconds: nanoseconds, + }) + } + } + + /// Returns the number of seconds in the second component of this time. + pub fn seconds(&self) -> i32 { self.raw.seconds } + /// Returns the nanosecond component of this time. + pub fn nanoseconds(&self) -> u32 { self.raw.nanoseconds } +} + +impl Binding for IndexTime { + type Raw = raw::git_index_time; + unsafe fn from_raw(raw: raw::git_index_time) -> IndexTime { + IndexTime { raw: raw } + } + fn raw(&self) -> raw::git_index_time { self.raw } +} + +impl PartialOrd for IndexTime { + fn partial_cmp(&self, other: &IndexTime) -> Option { + Some(self.cmp(other)) + } +} + +impl Ord for IndexTime { + fn cmp(&self, other: &IndexTime) -> Ordering { + let me = (self.raw.seconds, self.raw.nanoseconds); + let other = (other.raw.seconds, other.raw.nanoseconds); + me.cmp(&other) + } +} diff --git a/git2/src/transport.rs b/git2/src/transport.rs new file mode 100644 index 000000000..d34db9f88 --- /dev/null +++ b/git2/src/transport.rs @@ -0,0 +1,326 @@ +//! Interfaces for adding custom transports to libgit2 + +use std::ffi::{CStr, CString}; +use std::io::prelude::*; +use std::io; +use std::mem; +use std::slice; +use std::ptr; +use std::str; +use libc::{c_int, c_void, c_uint, c_char, size_t}; + +use {raw, panic, Error, Remote}; +use util::Binding; + +/// A transport is a structure which knows how to transfer data to and from a +/// remote. +/// +/// This transport is a representation of the raw transport underneath it, which +/// is similar to a trait object in Rust. +#[allow(missing_copy_implementations)] +pub struct Transport { + raw: *mut raw::git_transport, + owned: bool, +} + +/// Interface used by smart transports. +/// +/// The full-fledged definiton of transports has to deal with lots of +/// nitty-gritty details of the git protocol, but "smart transports" largely +/// only need to deal with read() and write() of data over a channel. +/// +/// A smart subtransport is contained within an instance of a smart transport +/// and is delegated to in order to actually conduct network activity to push or +/// pull data from a remote. +pub trait SmartSubtransport: Send + 'static { + /// Indicates that this subtransport will be performing the specified action + /// on the specified URL. + /// + /// This function is responsible for making any network connections and + /// returns a stream which can be read and written from in order to + /// negotiate the git protocol. + fn action(&self, url: &str, action: Service) + -> Result, Error>; + + /// Terminates a connection with the remote. + /// + /// Each subtransport is guaranteed a call to close() between calls to + /// action(), except for the following two natural progressions of actions + /// against a constant URL. + /// + /// 1. UploadPackLs -> UploadPack + /// 2. ReceivePackLs -> ReceivePack + fn close(&self) -> Result<(), Error>; +} + +/// Actions that a smart transport can ask a subtransport to perform +#[derive(Copy, Clone)] +#[allow(missing_docs)] +pub enum Service { + UploadPackLs, + UploadPack, + ReceivePackLs, + ReceivePack, +} + +/// An instance of a stream over which a smart transport will communicate with a +/// remote. +/// +/// Currently this only requires the standard `Read` and `Write` traits. This +/// trait also does not need to be implemented manually as long as the `Read` +/// and `Write` traits are implemented. +pub trait SmartSubtransportStream: Read + Write + Send + 'static {} + +impl SmartSubtransportStream for T {} + +type TransportFactory = Fn(&Remote) -> Result + Send + Sync + + 'static; + +/// Boxed data payload used for registering new transports. +/// +/// Currently only contains a field which knows how to create transports. +struct TransportData { + factory: Box, +} + +/// Instance of a `git_smart_subtransport`, must use `#[repr(C)]` to ensure that +/// the C fields come first. +#[repr(C)] +struct RawSmartSubtransport { + raw: raw::git_smart_subtransport, + obj: Box, +} + +/// Instance of a `git_smart_subtransport_stream`, must use `#[repr(C)]` to +/// ensure that the C fields come first. +#[repr(C)] +struct RawSmartSubtransportStream { + raw: raw::git_smart_subtransport_stream, + obj: Box, +} + +/// Add a custom transport definition, to be used in addition to the built-in +/// set of transports that come with libgit2. +/// +/// This function is unsafe as it needs to be externally synchronized with calls +/// to creation of other transports. +pub unsafe fn register(prefix: &str, factory: F) -> Result<(), Error> + where F: Fn(&Remote) -> Result + Send + Sync + 'static +{ + let mut data = Box::new(TransportData { + factory: Box::new(factory), + }); + let prefix = try!(CString::new(prefix)); + let datap = (&mut *data) as *mut TransportData as *mut c_void; + try_call!(raw::git_transport_register(prefix, + transport_factory, + datap)); + mem::forget(data); + Ok(()) +} + +impl Transport { + /// Creates a new transport which will use the "smart" transport protocol + /// for transferring data. + /// + /// A smart transport requires a *subtransport* over which data is actually + /// communicated, but this subtransport largely just needs to be able to + /// read() and write(). The subtransport provided will be used to make + /// connections which can then be read/written from. + /// + /// The `rpc` argument is `true` if the protocol is stateless, false + /// otherwise. For example `http://` is stateless but `git://` is not. + pub fn smart(remote: &Remote, + rpc: bool, + subtransport: S) -> Result + where S: SmartSubtransport + { + let mut ret = ptr::null_mut(); + + let mut raw = Box::new(RawSmartSubtransport { + raw: raw::git_smart_subtransport { + action: subtransport_action, + close: subtransport_close, + free: subtransport_free, + }, + obj: Box::new(subtransport), + }); + let mut defn = raw::git_smart_subtransport_definition { + callback: smart_factory, + rpc: rpc as c_uint, + param: &mut *raw as *mut _ as *mut _, + }; + + // Currently there's no way to pass a payload via the + // git_smart_subtransport_definition structure, but it's only used as a + // configuration for the initial creation of the smart transport (verified + // by reading the current code, hopefully it doesn't change!). + // + // We, however, need some state (gotta pass in our + // `RawSmartSubtransport`). This also means that this block must be + // entirely synchronized with a lock (boo!) + unsafe { + try_call!(raw::git_transport_smart(&mut ret, remote.raw(), + &mut defn as *mut _ as *mut _)); + mem::forget(raw); // ownership transport to `ret` + } + return Ok(Transport { raw: ret, owned: true }); + + extern fn smart_factory(out: *mut *mut raw::git_smart_subtransport, + _owner: *mut raw::git_transport, + ptr: *mut c_void) -> c_int { + unsafe { + *out = ptr as *mut raw::git_smart_subtransport; + 0 + } + } + } +} + +impl Drop for Transport { + fn drop(&mut self) { + if self.owned { + unsafe { + ((*self.raw).free)(self.raw) + } + } + } +} + +// callback used by register() to create new transports +extern fn transport_factory(out: *mut *mut raw::git_transport, + owner: *mut raw::git_remote, + param: *mut c_void) -> c_int { + struct Bomb<'a> { remote: Option> } + impl<'a> Drop for Bomb<'a> { + fn drop(&mut self) { + // TODO: maybe a method instead? + mem::forget(self.remote.take()); + } + } + + panic::wrap(|| unsafe { + let remote = Bomb { remote: Some(Binding::from_raw(owner)) }; + let data = &mut *(param as *mut TransportData); + match (data.factory)(remote.remote.as_ref().unwrap()) { + Ok(mut transport) => { + *out = transport.raw; + transport.owned = false; + 0 + } + Err(e) => e.raw_code() as c_int, + } + }).unwrap_or(-1) +} + +// callback used by smart transports to delegate an action to a +// `SmartSubtransport` trait object. +extern fn subtransport_action(stream: *mut *mut raw::git_smart_subtransport_stream, + raw_transport: *mut raw::git_smart_subtransport, + url: *const c_char, + action: raw::git_smart_service_t) -> c_int { + panic::wrap(|| unsafe { + let url = CStr::from_ptr(url).to_bytes(); + let url = match str::from_utf8(url).ok() { + Some(s) => s, + None => return -1, + }; + let action = match action { + raw::GIT_SERVICE_UPLOADPACK_LS => Service::UploadPackLs, + raw::GIT_SERVICE_UPLOADPACK => Service::UploadPack, + raw::GIT_SERVICE_RECEIVEPACK_LS => Service::ReceivePackLs, + raw::GIT_SERVICE_RECEIVEPACK => Service::ReceivePack, + n => panic!("unknown action: {}", n), + }; + let transport = &mut *(raw_transport as *mut RawSmartSubtransport); + let obj = match transport.obj.action(url, action) { + Ok(s) => s, + Err(e) => return e.raw_code() as c_int, + }; + *stream = mem::transmute(Box::new(RawSmartSubtransportStream { + raw: raw::git_smart_subtransport_stream { + subtransport: raw_transport, + read: stream_read, + write: stream_write, + free: stream_free, + }, + obj: obj, + })); + 0 + }).unwrap_or(-1) +} + +// callback used by smart transports to close a `SmartSubtransport` trait +// object. +extern fn subtransport_close(transport: *mut raw::git_smart_subtransport) + -> c_int { + let ret = panic::wrap(|| unsafe { + let transport = &mut *(transport as *mut RawSmartSubtransport); + transport.obj.close() + }); + match ret { + Some(Ok(())) => 0, + Some(Err(e)) => e.raw_code() as c_int, + None => -1, + } +} + +// callback used by smart transports to free a `SmartSubtransport` trait +// object. +extern fn subtransport_free(transport: *mut raw::git_smart_subtransport) { + let _ = panic::wrap(|| unsafe { + mem::transmute::<_, Box>(transport); + }); +} + +// callback used by smart transports to read from a `SmartSubtransportStream` +// object. +extern fn stream_read(stream: *mut raw::git_smart_subtransport_stream, + buffer: *mut c_char, + buf_size: size_t, + bytes_read: *mut size_t) -> c_int { + let ret = panic::wrap(|| unsafe { + let transport = &mut *(stream as *mut RawSmartSubtransportStream); + let buf = slice::from_raw_parts_mut(buffer as *mut u8, + buf_size as usize); + match transport.obj.read(buf) { + Ok(n) => { *bytes_read = n as size_t; Ok(n) } + e => e, + } + }); + match ret { + Some(Ok(_)) => 0, + Some(Err(e)) => unsafe { set_err(&e); -2 }, + None => -1, + } +} + +// callback used by smart transports to write to a `SmartSubtransportStream` +// object. +extern fn stream_write(stream: *mut raw::git_smart_subtransport_stream, + buffer: *const c_char, + len: size_t) -> c_int { + let ret = panic::wrap(|| unsafe { + let transport = &mut *(stream as *mut RawSmartSubtransportStream); + let buf = slice::from_raw_parts(buffer as *const u8, len as usize); + transport.obj.write_all(buf) + }); + match ret { + Some(Ok(())) => 0, + Some(Err(e)) => unsafe { set_err(&e); -2 }, + None => -1, + } +} + +unsafe fn set_err(e: &io::Error) { + let s = CString::new(e.to_string()).unwrap(); + raw::giterr_set_str(raw::GITERR_NET as c_int, s.as_ptr()) +} + +// callback used by smart transports to free a `SmartSubtransportStream` +// object. +extern fn stream_free(stream: *mut raw::git_smart_subtransport_stream) { + let _ = panic::wrap(|| unsafe { + mem::transmute::<_, Box>(stream); + }); +} diff --git a/git2/src/tree.rs b/git2/src/tree.rs new file mode 100644 index 000000000..a2006f3c5 --- /dev/null +++ b/git2/src/tree.rs @@ -0,0 +1,406 @@ +use std::mem; +use std::cmp::Ordering; +use std::ffi::CString; +use std::ops::Range; +use std::marker; +use std::path::Path; +use std::ptr; +use std::str; +use libc; + +use {raw, Oid, Repository, Error, Object, ObjectType}; +use util::{Binding, IntoCString}; + +/// A structure to represent a git [tree][1] +/// +/// [1]: http://git-scm.com/book/en/Git-Internals-Git-Objects +pub struct Tree<'repo> { + raw: *mut raw::git_tree, + _marker: marker::PhantomData>, +} + +/// A structure representing an entry inside of a tree. An entry is borrowed +/// from a tree. +pub struct TreeEntry<'tree> { + raw: *mut raw::git_tree_entry, + owned: bool, + _marker: marker::PhantomData<&'tree raw::git_tree_entry>, +} + +/// An iterator over the entries in a tree. +pub struct TreeIter<'tree> { + range: Range, + tree: &'tree Tree<'tree>, +} + +impl<'repo> Tree<'repo> { + /// Get the id (SHA1) of a repository object + pub fn id(&self) -> Oid { + unsafe { Binding::from_raw(raw::git_tree_id(&*self.raw)) } + } + + /// Get the number of entries listed in this tree. + pub fn len(&self) -> usize { + unsafe { raw::git_tree_entrycount(&*self.raw) as usize } + } + + /// Return `true` if there is not entry + pub fn is_empty(&self) -> bool { + self.len() == 0 + } + + /// Returns an iterator over the entries in this tree. + pub fn iter(&self) -> TreeIter { + TreeIter { range: 0..self.len(), tree: self } + } + + /// Lookup a tree entry by SHA value. + pub fn get_id(&self, id: Oid) -> Option { + unsafe { + let ptr = raw::git_tree_entry_byid(&*self.raw(), &*id.raw()); + if ptr.is_null() { + None + } else { + Some(entry_from_raw_const(ptr)) + } + } + } + + /// Lookup a tree entry by its position in the tree + pub fn get(&self, n: usize) -> Option { + unsafe { + let ptr = raw::git_tree_entry_byindex(&*self.raw(), + n as libc::size_t); + if ptr.is_null() { + None + } else { + Some(entry_from_raw_const(ptr)) + } + } + } + + /// Lookup a tree entry by its filename + pub fn get_name(&self, filename: &str) -> Option { + let filename = CString::new(filename).unwrap(); + unsafe { + let ptr = call!(raw::git_tree_entry_byname(&*self.raw(), filename)); + if ptr.is_null() { + None + } else { + Some(entry_from_raw_const(ptr)) + } + } + } + + /// Retrieve a tree entry contained in a tree or in any of its subtrees, + /// given its relative path. + pub fn get_path(&self, path: &Path) -> Result, Error> { + let path = try!(path.into_c_string()); + let mut ret = ptr::null_mut(); + unsafe { + try_call!(raw::git_tree_entry_bypath(&mut ret, &*self.raw(), path)); + Ok(Binding::from_raw(ret)) + } + } + + /// Casts this Tree to be usable as an `Object` + pub fn as_object(&self) -> &Object<'repo> { + unsafe { + &*(self as *const _ as *const Object<'repo>) + } + } + + /// Consumes Commit to be returned as an `Object` + pub fn into_object(self) -> Object<'repo> { + assert_eq!(mem::size_of_val(&self), mem::size_of::()); + unsafe { + mem::transmute(self) + } + } +} + +impl<'repo> Binding for Tree<'repo> { + type Raw = *mut raw::git_tree; + + unsafe fn from_raw(raw: *mut raw::git_tree) -> Tree<'repo> { + Tree { raw: raw, _marker: marker::PhantomData } + } + fn raw(&self) -> *mut raw::git_tree { self.raw } +} + +impl<'repo> ::std::fmt::Debug for Tree<'repo> { + fn fmt(&self, f: &mut ::std::fmt::Formatter) -> Result<(), ::std::fmt::Error> { + f.debug_struct("Tree").field("id", &self.id()).finish() + } +} + +impl<'repo> Clone for Tree<'repo> { + fn clone(&self) -> Self { + self.as_object().clone().into_tree().ok().unwrap() + } +} + +impl<'repo> Drop for Tree<'repo> { + fn drop(&mut self) { + unsafe { raw::git_tree_free(self.raw) } + } +} + +impl<'repo, 'iter> IntoIterator for &'iter Tree<'repo> { + type Item = TreeEntry<'iter>; + type IntoIter = TreeIter<'iter>; + fn into_iter(self) -> Self::IntoIter { + self.iter() + } +} + +/// Create a new tree entry from the raw pointer provided. +/// +/// The lifetime of the entry is tied to the tree provided and the function +/// is unsafe because the validity of the pointer cannot be guaranteed. +pub unsafe fn entry_from_raw_const<'tree>(raw: *const raw::git_tree_entry) + -> TreeEntry<'tree> { + TreeEntry { + raw: raw as *mut raw::git_tree_entry, + owned: false, + _marker: marker::PhantomData, + } +} + +impl<'tree> TreeEntry<'tree> { + /// Get the id of the object pointed by the entry + pub fn id(&self) -> Oid { + unsafe { Binding::from_raw(raw::git_tree_entry_id(&*self.raw)) } + } + + /// Get the filename of a tree entry + /// + /// Returns `None` if the name is not valid utf-8 + pub fn name(&self) -> Option<&str> { + str::from_utf8(self.name_bytes()).ok() + } + + /// Get the filename of a tree entry + pub fn name_bytes(&self) -> &[u8] { + unsafe { + ::opt_bytes(self, raw::git_tree_entry_name(&*self.raw())).unwrap() + } + } + + /// Convert a tree entry to the object it points to. + pub fn to_object<'a>(&self, repo: &'a Repository) + -> Result, Error> { + let mut ret = ptr::null_mut(); + unsafe { + try_call!(raw::git_tree_entry_to_object(&mut ret, repo.raw(), + &*self.raw())); + Ok(Binding::from_raw(ret)) + } + } + + /// Get the type of the object pointed by the entry + pub fn kind(&self) -> Option { + ObjectType::from_raw(unsafe { raw::git_tree_entry_type(&*self.raw) }) + } + + /// Get the UNIX file attributes of a tree entry + pub fn filemode(&self) -> i32 { + unsafe { raw::git_tree_entry_filemode(&*self.raw) as i32 } + } + + /// Get the raw UNIX file attributes of a tree entry + pub fn filemode_raw(&self) -> i32 { + unsafe { raw::git_tree_entry_filemode_raw(&*self.raw) as i32 } + } + + /// Convert this entry of any lifetime into an owned signature with a static + /// lifetime. + /// + /// This will use the `Clone::clone` implementation under the hood. + pub fn to_owned(&self) -> TreeEntry<'static> { + unsafe { + let me = mem::transmute::<&TreeEntry<'tree>, &TreeEntry<'static>>(self); + me.clone() + } + } +} + +impl<'a> Binding for TreeEntry<'a> { + type Raw = *mut raw::git_tree_entry; + unsafe fn from_raw(raw: *mut raw::git_tree_entry) -> TreeEntry<'a> { + TreeEntry { + raw: raw, + owned: true, + _marker: marker::PhantomData, + } + } + fn raw(&self) -> *mut raw::git_tree_entry { self.raw } +} + +impl<'a> Clone for TreeEntry<'a> { + fn clone(&self) -> TreeEntry<'a> { + let mut ret = ptr::null_mut(); + unsafe { + assert_eq!(raw::git_tree_entry_dup(&mut ret, &*self.raw()), 0); + Binding::from_raw(ret) + } + } +} + +impl<'a> PartialOrd for TreeEntry<'a> { + fn partial_cmp(&self, other: &TreeEntry<'a>) -> Option { + Some(self.cmp(other)) + } +} +impl<'a> Ord for TreeEntry<'a> { + fn cmp(&self, other: &TreeEntry<'a>) -> Ordering { + match unsafe { raw::git_tree_entry_cmp(&*self.raw(), &*other.raw()) } { + 0 => Ordering::Equal, + n if n < 0 => Ordering::Less, + _ => Ordering::Greater, + } + } +} + +impl<'a> PartialEq for TreeEntry<'a> { + fn eq(&self, other: &TreeEntry<'a>) -> bool { + self.cmp(other) == Ordering::Equal + } +} +impl<'a> Eq for TreeEntry<'a> {} + +impl<'a> Drop for TreeEntry<'a> { + fn drop(&mut self) { + if self.owned { + unsafe { raw::git_tree_entry_free(self.raw) } + } + } +} + +impl<'tree> Iterator for TreeIter<'tree> { + type Item = TreeEntry<'tree>; + fn next(&mut self) -> Option> { + self.range.next().and_then(|i| self.tree.get(i)) + } + fn size_hint(&self) -> (usize, Option) { self.range.size_hint() } +} +impl<'tree> DoubleEndedIterator for TreeIter<'tree> { + fn next_back(&mut self) -> Option> { + self.range.next_back().and_then(|i| self.tree.get(i)) + } +} +impl<'tree> ExactSizeIterator for TreeIter<'tree> {} + +#[cfg(test)] +mod tests { + use {Repository,Tree,TreeEntry,ObjectType,Object}; + use tempdir::TempDir; + use std::fs::File; + use std::io::prelude::*; + use std::path::Path; + + pub struct TestTreeIter<'a> { + entries: Vec>, + repo: &'a Repository, + } + + impl<'a> Iterator for TestTreeIter<'a> { + type Item = TreeEntry<'a>; + + fn next(&mut self) -> Option > { + if self.entries.is_empty() { + None + } else { + let entry = self.entries.remove(0); + + match entry.kind() { + Some(ObjectType::Tree) => { + let obj: Object<'a> = entry.to_object(self.repo).unwrap(); + + let tree: &Tree<'a> = obj.as_tree().unwrap(); + + for entry in tree.iter() { + self.entries.push(entry.to_owned()); + } + } + _ => {} + } + + Some(entry) + } + } + } + + fn tree_iter<'repo>(tree: &Tree<'repo>, repo: &'repo Repository) + -> TestTreeIter<'repo> { + let mut initial = vec![]; + + for entry in tree.iter() { + initial.push(entry.to_owned()); + } + + TestTreeIter { + entries: initial, + repo: repo, + } + } + + #[test] + fn smoke_tree_iter() { + let (td, repo) = ::test::repo_init(); + + setup_repo(&td, &repo); + + let head = repo.head().unwrap(); + let target = head.target().unwrap(); + let commit = repo.find_commit(target).unwrap(); + + let tree = repo.find_tree(commit.tree_id()).unwrap(); + assert_eq!(tree.id(), commit.tree_id()); + assert_eq!(tree.len(), 1); + + for entry in tree_iter(&tree, &repo) { + println!("iter entry {:?}", entry.name()); + } + } + + fn setup_repo(td: &TempDir, repo: &Repository) { + let mut index = repo.index().unwrap(); + File::create(&td.path().join("foo")).unwrap().write_all(b"foo").unwrap(); + index.add_path(Path::new("foo")).unwrap(); + let id = index.write_tree().unwrap(); + let sig = repo.signature().unwrap(); + let tree = repo.find_tree(id).unwrap(); + let parent = repo.find_commit(repo.head().unwrap().target() + .unwrap()).unwrap(); + repo.commit(Some("HEAD"), &sig, &sig, "another commit", + &tree, &[&parent]).unwrap(); + } + + #[test] + fn smoke() { + let (td, repo) = ::test::repo_init(); + + setup_repo(&td, &repo); + + let head = repo.head().unwrap(); + let target = head.target().unwrap(); + let commit = repo.find_commit(target).unwrap(); + + let tree = repo.find_tree(commit.tree_id()).unwrap(); + assert_eq!(tree.id(), commit.tree_id()); + assert_eq!(tree.len(), 1); + { + let e1 = tree.get(0).unwrap(); + assert!(e1 == tree.get_id(e1.id()).unwrap()); + assert!(e1 == tree.get_name("foo").unwrap()); + assert!(e1 == tree.get_path(Path::new("foo")).unwrap()); + assert_eq!(e1.name(), Some("foo")); + e1.to_object(&repo).unwrap(); + } + tree.into_object(); + + repo.find_object(commit.tree_id(), None).unwrap().as_tree().unwrap(); + repo.find_object(commit.tree_id(), None).unwrap().into_tree().ok().unwrap(); + } +} diff --git a/git2/src/treebuilder.rs b/git2/src/treebuilder.rs new file mode 100644 index 000000000..e8ea1057c --- /dev/null +++ b/git2/src/treebuilder.rs @@ -0,0 +1,199 @@ +use std::marker; +use std::ptr; + +use libc::{c_int, c_void}; + +use {panic, raw, tree, Error, Oid, Repository, TreeEntry}; +use util::{Binding, IntoCString}; + +/// Constructor for in-memory trees +pub struct TreeBuilder<'repo> { + raw: *mut raw::git_treebuilder, + _marker: marker::PhantomData<&'repo Repository>, +} + +impl<'repo> TreeBuilder<'repo> { + /// Clear all the entries in the builder + pub fn clear(&mut self) { + unsafe { raw::git_treebuilder_clear(self.raw) } + } + + /// Get the number of entries + pub fn len(&self) -> usize { + unsafe { raw::git_treebuilder_entrycount(self.raw) as usize } + } + + /// Return `true` if there is no entry + pub fn is_empty(&self) -> bool { + self.len() == 0 + } + + /// Get en entry from the builder from its filename + pub fn get

(&self, filename: P) -> Result, Error> + where P: IntoCString + { + let filename = try!(filename.into_c_string()); + unsafe { + let ret = raw::git_treebuilder_get(self.raw, filename.as_ptr()); + if ret.is_null() { + Ok(None) + } else { + Ok(Some(tree::entry_from_raw_const(ret))) + } + } + } + + /// Add or update an entry in the builder + /// + /// No attempt is made to ensure that the provided Oid points to + /// an object of a reasonable type (or any object at all). + /// + /// The mode given must be one of 0o040000, 0o100644, 0o100755, 0o120000 or + /// 0o160000 currently. + pub fn insert(&mut self, filename: P, oid: Oid, + filemode: i32) -> Result { + let filename = try!(filename.into_c_string()); + let filemode = filemode as raw::git_filemode_t; + + let mut ret = ptr::null(); + unsafe { + try_call!(raw::git_treebuilder_insert(&mut ret, self.raw, filename, + oid.raw(), filemode)); + Ok(tree::entry_from_raw_const(ret)) + } + } + + /// Remove an entry from the builder by its filename + pub fn remove(&mut self, filename: P) -> Result<(), Error> { + let filename = try!(filename.into_c_string()); + unsafe { + try_call!(raw::git_treebuilder_remove(self.raw, filename)); + } + Ok(()) + } + + /// Selectively remove entries from the tree + /// + /// Values for which the filter returns `true` will be kept. Note + /// that this behavior is different from the libgit2 C interface. + pub fn filter(&mut self, mut filter: F) + where F: FnMut(&TreeEntry) -> bool + { + let mut cb: &mut FilterCb = &mut filter; + let ptr = &mut cb as *mut _; + unsafe { + raw::git_treebuilder_filter(self.raw, filter_cb, ptr as *mut _); + panic::check(); + } + } + + /// Write the contents of the TreeBuilder as a Tree object and + /// return its Oid + pub fn write(&self) -> Result { + let mut raw = raw::git_oid { id: [0; raw::GIT_OID_RAWSZ] }; + unsafe { + try_call!(raw::git_treebuilder_write(&mut raw, self.raw())); + Ok(Binding::from_raw(&raw as *const _)) + } + } +} + +type FilterCb<'a> = FnMut(&TreeEntry) -> bool + 'a; + +extern fn filter_cb(entry: *const raw::git_tree_entry, + payload: *mut c_void) -> c_int { + let ret = panic::wrap(|| unsafe { + // There's no way to return early from git_treebuilder_filter. + if panic::panicked() { + true + } else { + let entry = tree::entry_from_raw_const(entry); + let payload = payload as *mut &mut FilterCb; + (*payload)(&entry) + } + }); + if ret == Some(false) {1} else {0} +} + +impl<'repo> Binding for TreeBuilder<'repo> { + type Raw = *mut raw::git_treebuilder; + + unsafe fn from_raw(raw: *mut raw::git_treebuilder) -> TreeBuilder<'repo> { + TreeBuilder { raw: raw, _marker: marker::PhantomData } + } + fn raw(&self) -> *mut raw::git_treebuilder { self.raw } +} + +impl<'repo> Drop for TreeBuilder<'repo> { + fn drop(&mut self) { + unsafe { raw::git_treebuilder_free(self.raw) } + } +} + +#[cfg(test)] +mod tests { + use ObjectType; + + #[test] + fn smoke() { + let (_td, repo) = ::test::repo_init(); + + let mut builder = repo.treebuilder(None).unwrap(); + assert_eq!(builder.len(), 0); + let blob = repo.blob(b"data").unwrap(); + { + let entry = builder.insert("a", blob, 0o100644).unwrap(); + assert_eq!(entry.kind(), Some(ObjectType::Blob)); + } + builder.insert("b", blob, 0o100644).unwrap(); + assert_eq!(builder.len(), 2); + builder.remove("a").unwrap(); + assert_eq!(builder.len(), 1); + assert_eq!(builder.get("b").unwrap().unwrap().id(), blob); + builder.clear(); + assert_eq!(builder.len(), 0); + } + + #[test] + fn write() { + let (_td, repo) = ::test::repo_init(); + + let mut builder = repo.treebuilder(None).unwrap(); + let data = repo.blob(b"data").unwrap(); + builder.insert("name", data, 0o100644).unwrap(); + let tree = builder.write().unwrap(); + let tree = repo.find_tree(tree).unwrap(); + let entry = tree.get(0).unwrap(); + assert_eq!(entry.name(), Some("name")); + let blob = entry.to_object(&repo).unwrap(); + let blob = blob.as_blob().unwrap(); + assert_eq!(blob.content(), b"data"); + + let builder = repo.treebuilder(Some(&tree)).unwrap(); + assert_eq!(builder.len(), 1); + } + + #[test] + fn filter() { + let (_td, repo) = ::test::repo_init(); + + let mut builder = repo.treebuilder(None).unwrap(); + let blob = repo.blob(b"data").unwrap(); + let tree = { + let head = repo.head().unwrap() + .peel(ObjectType::Commit).unwrap(); + let head = head.as_commit().unwrap(); + head.tree_id() + }; + builder.insert("blob", blob, 0o100644).unwrap(); + builder.insert("dir", tree, 0o040000).unwrap(); + builder.insert("dir2", tree, 0o040000).unwrap(); + + builder.filter(|_| true); + assert_eq!(builder.len(), 3); + builder.filter(|e| e.kind().unwrap() != ObjectType::Blob); + assert_eq!(builder.len(), 2); + builder.filter(|_| false); + assert_eq!(builder.len(), 0); + } +} diff --git a/git2/src/util.rs b/git2/src/util.rs new file mode 100644 index 000000000..e111628ea --- /dev/null +++ b/git2/src/util.rs @@ -0,0 +1,152 @@ +use std::ffi::{CString, OsStr, OsString}; +use std::iter::IntoIterator; +use std::path::{Path, PathBuf}; +use libc::{c_char, size_t}; + +use {raw, Error}; + +#[doc(hidden)] +pub trait IsNull { + fn is_ptr_null(&self) -> bool; +} +impl IsNull for *const T { + fn is_ptr_null(&self) -> bool { + self.is_null() + } +} +impl IsNull for *mut T { + fn is_ptr_null(&self) -> bool { + self.is_null() + } +} + +#[doc(hidden)] +pub trait Binding: Sized { + type Raw; + + unsafe fn from_raw(raw: Self::Raw) -> Self; + fn raw(&self) -> Self::Raw; + + unsafe fn from_raw_opt(raw: T) -> Option + where T: Copy + IsNull, Self: Binding + { + if raw.is_ptr_null() { + None + } else { + Some(Binding::from_raw(raw)) + } + } +} + +pub fn iter2cstrs(iter: I) -> Result<(Vec, Vec<*const c_char>, + raw::git_strarray), Error> + where T: IntoCString, I: IntoIterator +{ + let cstrs: Vec<_> = try!(iter.into_iter().map(|i| i.into_c_string()).collect()); + let ptrs = cstrs.iter().map(|i| i.as_ptr()).collect::>(); + let raw = raw::git_strarray { + strings: ptrs.as_ptr() as *mut _, + count: ptrs.len() as size_t, + }; + Ok((cstrs, ptrs, raw)) +} + +#[cfg(unix)] +pub fn bytes2path(b: &[u8]) -> &Path { + use std::os::unix::prelude::*; + Path::new(OsStr::from_bytes(b)) +} +#[cfg(windows)] +pub fn bytes2path(b: &[u8]) -> &Path { + use std::str; + Path::new(str::from_utf8(b).unwrap()) +} + +/// A class of types that can be converted to C strings. +/// +/// These types are represented internally as byte slices and it is quite rare +/// for them to contain an interior 0 byte. +pub trait IntoCString { + /// Consume this container, converting it into a CString + fn into_c_string(self) -> Result; +} + +impl<'a, T: IntoCString + Clone> IntoCString for &'a T { + fn into_c_string(self) -> Result { + self.clone().into_c_string() + } +} + +impl<'a> IntoCString for &'a str { + fn into_c_string(self) -> Result { + Ok(try!(CString::new(self))) + } +} + +impl IntoCString for String { + fn into_c_string(self) -> Result { + Ok(try!(CString::new(self.into_bytes()))) + } +} + +impl IntoCString for CString { + fn into_c_string(self) -> Result { Ok(self) } +} + +impl<'a> IntoCString for &'a Path { + fn into_c_string(self) -> Result { + let s: &OsStr = self.as_ref(); + s.into_c_string() + } +} + +impl IntoCString for PathBuf { + fn into_c_string(self) -> Result { + let s: OsString = self.into(); + s.into_c_string() + } +} + +impl<'a> IntoCString for &'a OsStr { + fn into_c_string(self) -> Result { + self.to_os_string().into_c_string() + } +} + +impl IntoCString for OsString { + #[cfg(unix)] + fn into_c_string(self) -> Result { + use std::os::unix::prelude::*; + let s: &OsStr = self.as_ref(); + Ok(try!(CString::new(s.as_bytes()))) + } + #[cfg(windows)] + fn into_c_string(self) -> Result { + match self.to_str() { + Some(s) => s.into_c_string(), + None => Err(Error::from_str("only valid unicode paths are accepted \ + on windows")), + } + } +} + +impl<'a> IntoCString for &'a [u8] { + fn into_c_string(self) -> Result { + Ok(try!(CString::new(self))) + } +} + +impl IntoCString for Vec { + fn into_c_string(self) -> Result { + Ok(try!(CString::new(self))) + } +} + +pub fn into_opt_c_string(opt_s: Option) -> Result, Error> + where S: IntoCString +{ + match opt_s { + None => Ok(None), + Some(s) => Ok(Some(try!(s.into_c_string()))), + } +} diff --git a/glob/.cargo-checksum.json b/glob/.cargo-checksum.json new file mode 100644 index 000000000..9a6ec5217 --- /dev/null +++ b/glob/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{},"package":"8be18de09a56b60ed0edf84bc9df007e30040691af7acd1c41874faac5895bfb"} \ No newline at end of file diff --git a/glob/Cargo.toml b/glob/Cargo.toml new file mode 100644 index 000000000..48ba71906 --- /dev/null +++ b/glob/Cargo.toml @@ -0,0 +1,15 @@ +[package] + +name = "glob" +version = "0.2.11" +authors = ["The Rust Project Developers"] +license = "MIT/Apache-2.0" +homepage = "https://github.com/rust-lang/glob" +repository = "https://github.com/rust-lang/glob" +documentation = "https://doc.rust-lang.org/glob" +description = """ +Support for matching file paths against Unix shell style patterns. +""" + +[dev-dependencies] +tempdir = "0.3" diff --git a/glob/LICENSE-APACHE b/glob/LICENSE-APACHE new file mode 100644 index 000000000..16fe87b06 --- /dev/null +++ b/glob/LICENSE-APACHE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/glob/LICENSE-MIT b/glob/LICENSE-MIT new file mode 100644 index 000000000..39d4bdb5a --- /dev/null +++ b/glob/LICENSE-MIT @@ -0,0 +1,25 @@ +Copyright (c) 2014 The Rust Project Developers + +Permission is hereby granted, free of charge, to any +person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the +Software without restriction, including without +limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software +is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions +of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/glob/README.md b/glob/README.md new file mode 100644 index 000000000..86b112da6 --- /dev/null +++ b/glob/README.md @@ -0,0 +1,24 @@ +glob +==== + +Support for matching file paths against Unix shell style patterns. + +[![Build Status](https://travis-ci.org/rust-lang-nursery/glob.svg?branch=master)](https://travis-ci.org/rust-lang-nursery/glob) + +[Documentation](https://doc.rust-lang.org/glob) + +## Usage + +To use `glob`, add this to your `Cargo.toml`: + +```toml +[dependencies] +glob = "*" +``` + +And add this to your crate root: + +```rust +extern crate glob; +``` + diff --git a/glob/src/lib.rs b/glob/src/lib.rs new file mode 100644 index 000000000..a26b99689 --- /dev/null +++ b/glob/src/lib.rs @@ -0,0 +1,1312 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! Support for matching file paths against Unix shell style patterns. +//! +//! The `glob` and `glob_with` functions, in concert with the `Paths` +//! type, allow querying the filesystem for all files that match a particular +//! pattern - just like the libc `glob` function (for an example see the `glob` +//! documentation). The methods on the `Pattern` type provide functionality +//! for checking if individual paths match a particular pattern - in a similar +//! manner to the libc `fnmatch` function +//! For consistency across platforms, and for Windows support, this module +//! is implemented entirely in Rust rather than deferring to the libc +//! `glob`/`fnmatch` functions. + +#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", + html_favicon_url = "https://www.rust-lang.org/favicon.ico", + html_root_url = "https://doc.rust-lang.org/glob/")] +#![cfg_attr(all(test, windows), feature(std_misc))] + +use std::ascii::AsciiExt; +use std::cmp; +use std::fmt; +use std::fs; +use std::io::prelude::*; +use std::io; +use std::path::{self, Path, PathBuf, Component}; +use std::str::FromStr; +use std::error::Error; + +use PatternToken::{Char, AnyChar, AnySequence, AnyRecursiveSequence, AnyWithin}; +use PatternToken::AnyExcept; +use CharSpecifier::{SingleChar, CharRange}; +use MatchResult::{Match, SubPatternDoesntMatch, EntirePatternDoesntMatch}; + +/// An iterator that yields `Path`s from the filesystem that match a particular +/// pattern. +/// +/// Note that it yields `GlobResult` in order to report any `IoErrors` that may +/// arise during iteration. If a directory matches but is unreadable, +/// thereby preventing its contents from being checked for matches, a +/// `GlobError` is returned to express this. +/// +/// See the `glob` function for more details. +pub struct Paths { + dir_patterns: Vec, + require_dir: bool, + options: MatchOptions, + todo: Vec>, + scope: Option, +} + +/// Return an iterator that produces all the Paths that match the given pattern, +/// which may be absolute or relative to the current working directory. +/// +/// This may return an error if the pattern is invalid. +/// +/// This method uses the default match options and is equivalent to calling +/// `glob_with(pattern, MatchOptions::new())`. Use `glob_with` directly if you +/// want to use non-default match options. +/// +/// When iterating, each result is a `GlobResult` which expresses the +/// possibility that there was an `IoError` when attempting to read the contents +/// of the matched path. In other words, each item returned by the iterator +/// will either be an `Ok(Path)` if the path matched, or an `Err(GlobError)` if +/// the path (partially) matched _but_ its contents could not be read in order +/// to determine if its contents matched. +/// +/// See the `Paths` documentation for more information. +/// +/// # Example +/// +/// Consider a directory `/media/pictures` containing only the files +/// `kittens.jpg`, `puppies.jpg` and `hamsters.gif`: +/// +/// ```rust +/// use glob::glob; +/// +/// for entry in glob("/media/pictures/*.jpg").unwrap() { +/// match entry { +/// Ok(path) => println!("{:?}", path.display()), +/// +/// // if the path matched but was unreadable, +/// // thereby preventing its contents from matching +/// Err(e) => println!("{:?}", e), +/// } +/// } +/// ``` +/// +/// The above code will print: +/// +/// ```ignore +/// /media/pictures/kittens.jpg +/// /media/pictures/puppies.jpg +/// ``` +/// +/// If you want to ignore unreadable paths, you can use something like +/// `filter_map`: +/// +/// ```rust +/// use glob::glob; +/// use std::result::Result; +/// +/// for path in glob("/media/pictures/*.jpg").unwrap().filter_map(Result::ok) { +/// println!("{}", path.display()); +/// } +/// ``` +/// +pub fn glob(pattern: &str) -> Result { + glob_with(pattern, &MatchOptions::new()) +} + +/// Return an iterator that produces all the Paths that match the given pattern, +/// which may be absolute or relative to the current working directory. +/// +/// This may return an error if the pattern is invalid. +/// +/// This function accepts Unix shell style patterns as described by +/// `Pattern::new(..)`. The options given are passed through unchanged to +/// `Pattern::matches_with(..)` with the exception that +/// `require_literal_separator` is always set to `true` regardless of the value +/// passed to this function. +/// +/// Paths are yielded in alphabetical order. +pub fn glob_with(pattern: &str, options: &MatchOptions) -> Result { + // make sure that the pattern is valid first, else early return with error + let _compiled = try!(Pattern::new(pattern)); + + #[cfg(windows)] + fn check_windows_verbatim(p: &Path) -> bool { + use std::path::Prefix; + match p.components().next() { + Some(Component::Prefix(ref p)) => p.kind().is_verbatim(), + _ => false, + } + } + #[cfg(not(windows))] + fn check_windows_verbatim(_: &Path) -> bool { + false + } + + #[cfg(windows)] + fn to_scope(p: &Path) -> PathBuf { + // FIXME handle volume relative paths here + p.to_path_buf() + } + #[cfg(not(windows))] + fn to_scope(p: &Path) -> PathBuf { + p.to_path_buf() + } + + let mut components = Path::new(pattern).components().peekable(); + loop { + match components.peek() { + Some(&Component::Prefix(..)) | + Some(&Component::RootDir) => { + components.next(); + } + _ => break, + } + } + let rest = components.map(|s| s.as_os_str()).collect::(); + let normalized_pattern = Path::new(pattern).iter().collect::(); + let root_len = normalized_pattern.to_str().unwrap().len() - rest.to_str().unwrap().len(); + let root = if root_len > 0 { + Some(Path::new(&pattern[..root_len])) + } else { + None + }; + + if root_len > 0 && check_windows_verbatim(root.unwrap()) { + // FIXME: How do we want to handle verbatim paths? I'm inclined to + // return nothing, since we can't very well find all UNC shares with a + // 1-letter server name. + return Ok(Paths { + dir_patterns: Vec::new(), + require_dir: false, + options: options.clone(), + todo: Vec::new(), + scope: None, + }); + } + + let scope = root.map(to_scope).unwrap_or_else(|| PathBuf::from(".")); + + let mut dir_patterns = Vec::new(); + let components = pattern[cmp::min(root_len, pattern.len())..] + .split_terminator(path::is_separator); + + for component in components { + let compiled = try!(Pattern::new(component)); + dir_patterns.push(compiled); + } + + if root_len == pattern.len() { + dir_patterns.push(Pattern { + original: "".to_string(), + tokens: Vec::new(), + is_recursive: false, + }); + } + + let require_dir = pattern.chars().next_back().map(path::is_separator) == Some(true); + let todo = Vec::new(); + + Ok(Paths { + dir_patterns: dir_patterns, + require_dir: require_dir, + options: options.clone(), + todo: todo, + scope: Some(scope), + }) +} + +/// A glob iteration error. +/// +/// This is typically returned when a particular path cannot be read +/// to determine if its contents match the glob pattern. This is possible +/// if the program lacks the permissions, for example. +#[derive(Debug)] +pub struct GlobError { + path: PathBuf, + error: io::Error, +} + +impl GlobError { + /// The Path that the error corresponds to. + pub fn path(&self) -> &Path { + &self.path + } + + /// The error in question. + pub fn error(&self) -> &io::Error { + &self.error + } +} + +impl Error for GlobError { + fn description(&self) -> &str { + self.error.description() + } + fn cause(&self) -> Option<&Error> { + Some(&self.error) + } +} + +impl fmt::Display for GlobError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, + "attempting to read `{}` resulted in an error: {}", + self.path.display(), + self.error) + } +} + +fn is_dir(p: &Path) -> bool { + fs::metadata(p).map(|m| m.is_dir()).unwrap_or(false) +} + +/// An alias for a glob iteration result. +/// +/// This represents either a matched path or a glob iteration error, +/// such as failing to read a particular directory's contents. +pub type GlobResult = Result; + +impl Iterator for Paths { + type Item = GlobResult; + + fn next(&mut self) -> Option { + // the todo buffer hasn't been initialized yet, so it's done at this + // point rather than in glob() so that the errors are unified that is, + // failing to fill the buffer is an iteration error construction of the + // iterator (i.e. glob()) only fails if it fails to compile the Pattern + if let Some(scope) = self.scope.take() { + if self.dir_patterns.len() > 0 { + // Shouldn't happen, but we're using -1 as a special index. + assert!(self.dir_patterns.len() < !0 as usize); + + fill_todo(&mut self.todo, &self.dir_patterns, 0, &scope, &self.options); + } + } + + loop { + if self.dir_patterns.is_empty() || self.todo.is_empty() { + return None; + } + + let (path, mut idx) = match self.todo.pop().unwrap() { + Ok(pair) => pair, + Err(e) => return Some(Err(e)), + }; + + // idx -1: was already checked by fill_todo, maybe path was '.' or + // '..' that we can't match here because of normalization. + if idx == !0 as usize { + if self.require_dir && !is_dir(&path) { + continue; + } + return Some(Ok(path)); + } + + if self.dir_patterns[idx].is_recursive { + let mut next = idx; + + // collapse consecutive recursive patterns + while (next + 1) < self.dir_patterns.len() && + self.dir_patterns[next + 1].is_recursive { + next += 1; + } + + if is_dir(&path) { + // the path is a directory, so it's a match + + // push this directory's contents + fill_todo(&mut self.todo, + &self.dir_patterns, + next, + &path, + &self.options); + + if next == self.dir_patterns.len() - 1 { + // pattern ends in recursive pattern, so return this + // directory as a result + return Some(Ok(path)); + } else { + // advanced to the next pattern for this path + idx = next + 1; + } + } else if next != self.dir_patterns.len() - 1 { + // advanced to the next pattern for this path + idx = next + 1; + } else { + // not a directory and it's the last pattern, meaning no match + continue; + } + } + + // not recursive, so match normally + if self.dir_patterns[idx].matches_with({ + match path.file_name().and_then(|s| s.to_str()) { + // FIXME (#9639): How do we handle non-utf8 filenames? + // Ignore them for now Ideally we'd still match them + // against a * + None => continue, + Some(x) => x + } + }, &self.options) { + if idx == self.dir_patterns.len() - 1 { + // it is not possible for a pattern to match a directory + // *AND* its children so we don't need to check the + // children + + if !self.require_dir || is_dir(&path) { + return Some(Ok(path)); + } + } else { + fill_todo(&mut self.todo, &self.dir_patterns, + idx + 1, &path, &self.options); + } + } + } + } +} + +/// A pattern parsing error. +#[derive(Debug)] +#[allow(missing_copy_implementations)] +pub struct PatternError { + /// The approximate character index of where the error occurred. + pub pos: usize, + + /// A message describing the error. + pub msg: &'static str, +} + +impl Error for PatternError { + fn description(&self) -> &str { + self.msg + } +} + +impl fmt::Display for PatternError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, + "Pattern syntax error near position {}: {}", + self.pos, + self.msg) + } +} + +/// A compiled Unix shell style pattern. +/// +/// `?` matches any single character +/// +/// `*` matches any (possibly empty) sequence of characters +/// +/// `**` matches the current directory and arbitrary subdirectories. This +/// sequence **must** form a single path component, so both `**a` and `b**` are +/// invalid and will result in an error. A sequence of more than two +/// consecutive `*` characters is also invalid. +/// +/// `[...]` matches any character inside the brackets. +/// Character sequences can also specify ranges +/// of characters, as ordered by Unicode, so e.g. `[0-9]` specifies any +/// character between 0 and 9 inclusive. An unclosed bracket is invalid. +/// +/// `[!...]` is the negation of `[...]`, i.e. it matches any characters **not** +/// in the brackets. +/// +/// The metacharacters `?`, `*`, `[`, `]` can be matched by using brackets +/// (e.g. `[?]`). When a `]` occurs immediately following `[` or `[!` then +/// it is interpreted as being part of, rather then ending, the character +/// set, so `]` and NOT `]` can be matched by `[]]` and `[!]]` respectively. +/// The `-` character can be specified inside a character sequence pattern by +/// placing it at the start or the end, e.g. `[abc-]`. +#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Default, Debug)] +pub struct Pattern { + original: String, + tokens: Vec, + is_recursive: bool, +} + +/// Show the original glob pattern. +impl fmt::Display for Pattern { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + self.original.fmt(f) + } +} + +impl FromStr for Pattern { + type Err = PatternError; + + fn from_str(s: &str) -> Result { + Pattern::new(s) + } +} + +#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] +enum PatternToken { + Char(char), + AnyChar, + AnySequence, + AnyRecursiveSequence, + AnyWithin(Vec), + AnyExcept(Vec), +} + +#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] +enum CharSpecifier { + SingleChar(char), + CharRange(char, char), +} + +#[derive(Copy, Clone, PartialEq)] +enum MatchResult { + Match, + SubPatternDoesntMatch, + EntirePatternDoesntMatch, +} + +const ERROR_WILDCARDS: &'static str = "wildcards are either regular `*` or recursive `**`"; +const ERROR_RECURSIVE_WILDCARDS: &'static str = "recursive wildcards must form a single path \ + component"; +const ERROR_INVALID_RANGE: &'static str = "invalid range pattern"; + +impl Pattern { + /// This function compiles Unix shell style patterns. + /// + /// An invalid glob pattern will yield an error. + pub fn new(pattern: &str) -> Result { + + let chars = pattern.chars().collect::>(); + let mut tokens = Vec::new(); + let mut is_recursive = false; + let mut i = 0; + + while i < chars.len() { + match chars[i] { + '?' => { + tokens.push(AnyChar); + i += 1; + } + '*' => { + let old = i; + + while i < chars.len() && chars[i] == '*' { + i += 1; + } + + let count = i - old; + + if count > 2 { + return Err(PatternError { + pos: old + 2, + msg: ERROR_WILDCARDS, + }); + } else if count == 2 { + // ** can only be an entire path component + // i.e. a/**/b is valid, but a**/b or a/**b is not + // invalid matches are treated literally + let is_valid = if i == 2 || path::is_separator(chars[i - count - 1]) { + // it ends in a '/' + if i < chars.len() && path::is_separator(chars[i]) { + i += 1; + true + // or the pattern ends here + // this enables the existing globbing mechanism + } else if i == chars.len() { + true + // `**` ends in non-separator + } else { + return Err(PatternError { + pos: i, + msg: ERROR_RECURSIVE_WILDCARDS, + }); + } + // `**` begins with non-separator + } else { + return Err(PatternError { + pos: old - 1, + msg: ERROR_RECURSIVE_WILDCARDS, + }); + }; + + let tokens_len = tokens.len(); + + if is_valid { + // collapse consecutive AnyRecursiveSequence to a + // single one + if !(tokens_len > 1 && tokens[tokens_len - 1] == AnyRecursiveSequence) { + is_recursive = true; + tokens.push(AnyRecursiveSequence); + } + } + } else { + tokens.push(AnySequence); + } + } + '[' => { + + if i + 4 <= chars.len() && chars[i + 1] == '!' { + match chars[i + 3..].iter().position(|x| *x == ']') { + None => (), + Some(j) => { + let chars = &chars[i + 2..i + 3 + j]; + let cs = parse_char_specifiers(chars); + tokens.push(AnyExcept(cs)); + i += j + 4; + continue; + } + } + } else if i + 3 <= chars.len() && chars[i + 1] != '!' { + match chars[i + 2..].iter().position(|x| *x == ']') { + None => (), + Some(j) => { + let cs = parse_char_specifiers(&chars[i + 1..i + 2 + j]); + tokens.push(AnyWithin(cs)); + i += j + 3; + continue; + } + } + } + + // if we get here then this is not a valid range pattern + return Err(PatternError { + pos: i, + msg: ERROR_INVALID_RANGE, + }); + } + c => { + tokens.push(Char(c)); + i += 1; + } + } + } + + Ok(Pattern { + tokens: tokens, + original: pattern.to_string(), + is_recursive: is_recursive, + }) + } + + /// Escape metacharacters within the given string by surrounding them in + /// brackets. The resulting string will, when compiled into a `Pattern`, + /// match the input string and nothing else. + pub fn escape(s: &str) -> String { + let mut escaped = String::new(); + for c in s.chars() { + match c { + // note that ! does not need escaping because it is only special + // inside brackets + '?' | '*' | '[' | ']' => { + escaped.push('['); + escaped.push(c); + escaped.push(']'); + } + c => { + escaped.push(c); + } + } + } + escaped + } + + /// Return if the given `str` matches this `Pattern` using the default + /// match options (i.e. `MatchOptions::new()`). + /// + /// # Example + /// + /// ```rust + /// use glob::Pattern; + /// + /// assert!(Pattern::new("c?t").unwrap().matches("cat")); + /// assert!(Pattern::new("k[!e]tteh").unwrap().matches("kitteh")); + /// assert!(Pattern::new("d*g").unwrap().matches("doog")); + /// ``` + pub fn matches(&self, str: &str) -> bool { + self.matches_with(str, &MatchOptions::new()) + } + + /// Return if the given `Path`, when converted to a `str`, matches this + /// `Pattern` using the default match options (i.e. `MatchOptions::new()`). + pub fn matches_path(&self, path: &Path) -> bool { + // FIXME (#9639): This needs to handle non-utf8 paths + path.to_str().map_or(false, |s| self.matches(s)) + } + + /// Return if the given `str` matches this `Pattern` using the specified + /// match options. + pub fn matches_with(&self, str: &str, options: &MatchOptions) -> bool { + self.matches_from(true, str.chars(), 0, options) == Match + } + + /// Return if the given `Path`, when converted to a `str`, matches this + /// `Pattern` using the specified match options. + pub fn matches_path_with(&self, path: &Path, options: &MatchOptions) -> bool { + // FIXME (#9639): This needs to handle non-utf8 paths + path.to_str().map_or(false, |s| self.matches_with(s, options)) + } + + /// Access the original glob pattern. + pub fn as_str<'a>(&'a self) -> &'a str { + &self.original + } + + fn matches_from(&self, + mut follows_separator: bool, + mut file: std::str::Chars, + i: usize, + options: &MatchOptions) + -> MatchResult { + + for (ti, token) in self.tokens[i..].iter().enumerate() { + match *token { + AnySequence | AnyRecursiveSequence => { + // ** must be at the start. + debug_assert!(match *token { + AnyRecursiveSequence => follows_separator, + _ => true, + }); + + // Empty match + match self.matches_from(follows_separator, file.clone(), i + ti + 1, options) { + SubPatternDoesntMatch => (), // keep trying + m => return m, + }; + + while let Some(c) = file.next() { + if follows_separator && options.require_literal_leading_dot && c == '.' { + return SubPatternDoesntMatch; + } + follows_separator = path::is_separator(c); + match *token { + AnyRecursiveSequence if !follows_separator => continue, + AnySequence if options.require_literal_separator && + follows_separator => return SubPatternDoesntMatch, + _ => (), + } + match self.matches_from(follows_separator, + file.clone(), + i + ti + 1, + options) { + SubPatternDoesntMatch => (), // keep trying + m => return m, + } + } + } + _ => { + let c = match file.next() { + Some(c) => c, + None => return EntirePatternDoesntMatch, + }; + + let is_sep = path::is_separator(c); + + if !match *token { + AnyChar | AnyWithin(..) | AnyExcept(..) + if (options.require_literal_separator && is_sep) || + (follows_separator && options.require_literal_leading_dot && + c == '.') => false, + AnyChar => true, + AnyWithin(ref specifiers) => in_char_specifiers(&specifiers, c, options), + AnyExcept(ref specifiers) => !in_char_specifiers(&specifiers, c, options), + Char(c2) => chars_eq(c, c2, options.case_sensitive), + AnySequence | AnyRecursiveSequence => unreachable!(), + } { + return SubPatternDoesntMatch; + } + follows_separator = is_sep; + } + } + } + + // Iter is fused. + if file.next().is_none() { + Match + } else { + SubPatternDoesntMatch + } + } +} + +// Fills `todo` with paths under `path` to be matched by `patterns[idx]`, +// special-casing patterns to match `.` and `..`, and avoiding `readdir()` +// calls when there are no metacharacters in the pattern. +fn fill_todo(todo: &mut Vec>, + patterns: &[Pattern], + idx: usize, + path: &Path, + options: &MatchOptions) { + // convert a pattern that's just many Char(_) to a string + fn pattern_as_str(pattern: &Pattern) -> Option { + let mut s = String::new(); + for token in pattern.tokens.iter() { + match *token { + Char(c) => s.push(c), + _ => return None, + } + } + return Some(s); + } + + let add = |todo: &mut Vec<_>, next_path: PathBuf| { + if idx + 1 == patterns.len() { + // We know it's good, so don't make the iterator match this path + // against the pattern again. In particular, it can't match + // . or .. globs since these never show up as path components. + todo.push(Ok((next_path, !0 as usize))); + } else { + fill_todo(todo, patterns, idx + 1, &next_path, options); + } + }; + + let pattern = &patterns[idx]; + let is_dir = is_dir(path); + let curdir = path == Path::new("."); + match pattern_as_str(pattern) { + Some(s) => { + // This pattern component doesn't have any metacharacters, so we + // don't need to read the current directory to know where to + // continue. So instead of passing control back to the iterator, + // we can just check for that one entry and potentially recurse + // right away. + let special = "." == s || ".." == s; + let next_path = if curdir { + PathBuf::from(s) + } else { + path.join(&s) + }; + if (special && is_dir) || (!special && fs::metadata(&next_path).is_ok()) { + add(todo, next_path); + } + } + None if is_dir => { + let dirs = fs::read_dir(path).and_then(|d| { + d.map(|e| { + e.map(|e| { + if curdir { + PathBuf::from(e.path().file_name().unwrap()) + } else { + e.path() + } + }) + }) + .collect::, _>>() + }); + match dirs { + Ok(mut children) => { + children.sort_by(|p1, p2| p2.file_name().cmp(&p1.file_name())); + todo.extend(children.into_iter().map(|x| Ok((x, idx)))); + + // Matching the special directory entries . and .. that + // refer to the current and parent directory respectively + // requires that the pattern has a leading dot, even if the + // `MatchOptions` field `require_literal_leading_dot` is not + // set. + if pattern.tokens.len() > 0 && pattern.tokens[0] == Char('.') { + for &special in [".", ".."].iter() { + if pattern.matches_with(special, options) { + add(todo, path.join(special)); + } + } + } + } + Err(e) => { + todo.push(Err(GlobError { + path: path.to_path_buf(), + error: e, + })); + } + } + } + None => { + // not a directory, nothing more to find + } + } +} + +fn parse_char_specifiers(s: &[char]) -> Vec { + let mut cs = Vec::new(); + let mut i = 0; + while i < s.len() { + if i + 3 <= s.len() && s[i + 1] == '-' { + cs.push(CharRange(s[i], s[i + 2])); + i += 3; + } else { + cs.push(SingleChar(s[i])); + i += 1; + } + } + cs +} + +fn in_char_specifiers(specifiers: &[CharSpecifier], c: char, options: &MatchOptions) -> bool { + + for &specifier in specifiers.iter() { + match specifier { + SingleChar(sc) => { + if chars_eq(c, sc, options.case_sensitive) { + return true; + } + } + CharRange(start, end) => { + + // FIXME: work with non-ascii chars properly (issue #1347) + if !options.case_sensitive && c.is_ascii() && start.is_ascii() && end.is_ascii() { + + let start = start.to_ascii_lowercase(); + let end = end.to_ascii_lowercase(); + + let start_up = start.to_uppercase().next().unwrap(); + let end_up = end.to_uppercase().next().unwrap(); + + // only allow case insensitive matching when + // both start and end are within a-z or A-Z + if start != start_up && end != end_up { + let c = c.to_ascii_lowercase(); + if c >= start && c <= end { + return true; + } + } + } + + if c >= start && c <= end { + return true; + } + } + } + } + + false +} + +/// A helper function to determine if two chars are (possibly case-insensitively) equal. +fn chars_eq(a: char, b: char, case_sensitive: bool) -> bool { + if cfg!(windows) && path::is_separator(a) && path::is_separator(b) { + true + } else if !case_sensitive && a.is_ascii() && b.is_ascii() { + // FIXME: work with non-ascii chars properly (issue #9084) + a.to_ascii_lowercase() == b.to_ascii_lowercase() + } else { + a == b + } +} + + +/// Configuration options to modify the behaviour of `Pattern::matches_with(..)` +#[allow(missing_copy_implementations)] +#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] +pub struct MatchOptions { + /// Whether or not patterns should be matched in a case-sensitive manner. + /// This currently only considers upper/lower case relationships between + /// ASCII characters, but in future this might be extended to work with + /// Unicode. + pub case_sensitive: bool, + + /// If this is true then path-component separator characters (e.g. `/` on + /// Posix) must be matched by a literal `/`, rather than by `*` or `?` or + /// `[...]` + pub require_literal_separator: bool, + + /// If this is true then paths that contain components that start with a `.` + /// will not match unless the `.` appears literally in the pattern: `*`, `?`, `**`, + /// or `[...]` will not match. This is useful because such files are + /// conventionally considered hidden on Unix systems and it might be + /// desirable to skip them when listing files. + pub require_literal_leading_dot: bool, +} + +impl MatchOptions { + /// Constructs a new `MatchOptions` with default field values. This is used + /// when calling functions that do not take an explicit `MatchOptions` + /// parameter. + /// + /// This function always returns this value: + /// + /// ```rust,ignore + /// MatchOptions { + /// case_sensitive: true, + /// require_literal_separator: false. + /// require_literal_leading_dot: false + /// } + /// ``` + pub fn new() -> MatchOptions { + MatchOptions { + case_sensitive: true, + require_literal_separator: false, + require_literal_leading_dot: false, + } + } +} + +#[cfg(test)] +mod test { + use std::path::Path; + use super::{glob, Pattern, MatchOptions}; + + #[test] + fn test_pattern_from_str() { + assert!("a*b".parse::().unwrap().matches("a_b")); + assert!("a/**b".parse::().unwrap_err().pos == 4); + } + + #[test] + fn test_wildcard_errors() { + assert!(Pattern::new("a/**b").unwrap_err().pos == 4); + assert!(Pattern::new("a/bc**").unwrap_err().pos == 3); + assert!(Pattern::new("a/*****").unwrap_err().pos == 4); + assert!(Pattern::new("a/b**c**d").unwrap_err().pos == 2); + assert!(Pattern::new("a**b").unwrap_err().pos == 0); + } + + #[test] + fn test_unclosed_bracket_errors() { + assert!(Pattern::new("abc[def").unwrap_err().pos == 3); + assert!(Pattern::new("abc[!def").unwrap_err().pos == 3); + assert!(Pattern::new("abc[").unwrap_err().pos == 3); + assert!(Pattern::new("abc[!").unwrap_err().pos == 3); + assert!(Pattern::new("abc[d").unwrap_err().pos == 3); + assert!(Pattern::new("abc[!d").unwrap_err().pos == 3); + assert!(Pattern::new("abc[]").unwrap_err().pos == 3); + assert!(Pattern::new("abc[!]").unwrap_err().pos == 3); + } + + #[test] + fn test_glob_errors() { + assert!(glob("a/**b").err().unwrap().pos == 4); + assert!(glob("abc[def").err().unwrap().pos == 3); + } + + // this test assumes that there is a /root directory and that + // the user running this test is not root or otherwise doesn't + // have permission to read its contents + #[cfg(unix)] + #[test] + fn test_iteration_errors() { + use std::io; + let mut iter = glob("/root/*").unwrap(); + + // GlobErrors shouldn't halt iteration + let next = iter.next(); + assert!(next.is_some()); + + let err = next.unwrap(); + assert!(err.is_err()); + + let err = err.err().unwrap(); + assert!(err.path() == Path::new("/root")); + assert!(err.error().kind() == io::ErrorKind::PermissionDenied); + } + + #[test] + fn test_absolute_pattern() { + assert!(glob("/").unwrap().next().is_some()); + assert!(glob("//").unwrap().next().is_some()); + + // assume that the filesystem is not empty! + assert!(glob("/*").unwrap().next().is_some()); + + #[cfg(not(windows))] + fn win() {} + + #[cfg(windows)] + fn win() { + use std::env::current_dir; + use std::ffi::AsOsStr; + + // check windows absolute paths with host/device components + let root_with_device = current_dir() + .ok() + .and_then(|p| p.prefix().map(|p| p.join("*"))) + .unwrap(); + // FIXME (#9639): This needs to handle non-utf8 paths + assert!(glob(root_with_device.as_os_str().to_str().unwrap()).unwrap().next().is_some()); + } + win() + } + + #[test] + fn test_wildcards() { + assert!(Pattern::new("a*b").unwrap().matches("a_b")); + assert!(Pattern::new("a*b*c").unwrap().matches("abc")); + assert!(!Pattern::new("a*b*c").unwrap().matches("abcd")); + assert!(Pattern::new("a*b*c").unwrap().matches("a_b_c")); + assert!(Pattern::new("a*b*c").unwrap().matches("a___b___c")); + assert!(Pattern::new("abc*abc*abc").unwrap().matches("abcabcabcabcabcabcabc")); + assert!(!Pattern::new("abc*abc*abc").unwrap().matches("abcabcabcabcabcabcabca")); + assert!(Pattern::new("a*a*a*a*a*a*a*a*a") + .unwrap() + .matches("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa")); + assert!(Pattern::new("a*b[xyz]c*d").unwrap().matches("abxcdbxcddd")); + } + + #[test] + fn test_recursive_wildcards() { + let pat = Pattern::new("some/**/needle.txt").unwrap(); + assert!(pat.matches("some/needle.txt")); + assert!(pat.matches("some/one/needle.txt")); + assert!(pat.matches("some/one/two/needle.txt")); + assert!(pat.matches("some/other/needle.txt")); + assert!(!pat.matches("some/other/notthis.txt")); + + // a single ** should be valid, for globs + // Should accept anything + let pat = Pattern::new("**").unwrap(); + assert!(pat.is_recursive); + assert!(pat.matches("abcde")); + assert!(pat.matches("")); + assert!(pat.matches(".asdf")); + assert!(pat.matches("/x/.asdf")); + + + // collapse consecutive wildcards + let pat = Pattern::new("some/**/**/needle.txt").unwrap(); + assert!(pat.matches("some/needle.txt")); + assert!(pat.matches("some/one/needle.txt")); + assert!(pat.matches("some/one/two/needle.txt")); + assert!(pat.matches("some/other/needle.txt")); + assert!(!pat.matches("some/other/notthis.txt")); + + // ** can begin the pattern + let pat = Pattern::new("**/test").unwrap(); + assert!(pat.matches("one/two/test")); + assert!(pat.matches("one/test")); + assert!(pat.matches("test")); + + // /** can begin the pattern + let pat = Pattern::new("/**/test").unwrap(); + assert!(pat.matches("/one/two/test")); + assert!(pat.matches("/one/test")); + assert!(pat.matches("/test")); + assert!(!pat.matches("/one/notthis")); + assert!(!pat.matches("/notthis")); + + // Only start sub-patterns on start of path segment. + let pat = Pattern::new("**/.*").unwrap(); + assert!(pat.matches(".abc")); + assert!(pat.matches("abc/.abc")); + assert!(!pat.matches("ab.c")); + assert!(!pat.matches("abc/ab.c")); + } + + #[test] + fn test_lots_of_files() { + // this is a good test because it touches lots of differently named files + glob("/*/*/*/*").unwrap().skip(10000).next(); + } + + #[test] + fn test_range_pattern() { + + let pat = Pattern::new("a[0-9]b").unwrap(); + for i in 0..10 { + assert!(pat.matches(&format!("a{}b", i))); + } + assert!(!pat.matches("a_b")); + + let pat = Pattern::new("a[!0-9]b").unwrap(); + for i in 0..10 { + assert!(!pat.matches(&format!("a{}b", i))); + } + assert!(pat.matches("a_b")); + + let pats = ["[a-z123]", "[1a-z23]", "[123a-z]"]; + for &p in pats.iter() { + let pat = Pattern::new(p).unwrap(); + for c in "abcdefghijklmnopqrstuvwxyz".chars() { + assert!(pat.matches(&c.to_string())); + } + for c in "ABCDEFGHIJKLMNOPQRSTUVWXYZ".chars() { + let options = MatchOptions { case_sensitive: false, ..MatchOptions::new() }; + assert!(pat.matches_with(&c.to_string(), &options)); + } + assert!(pat.matches("1")); + assert!(pat.matches("2")); + assert!(pat.matches("3")); + } + + let pats = ["[abc-]", "[-abc]", "[a-c-]"]; + for &p in pats.iter() { + let pat = Pattern::new(p).unwrap(); + assert!(pat.matches("a")); + assert!(pat.matches("b")); + assert!(pat.matches("c")); + assert!(pat.matches("-")); + assert!(!pat.matches("d")); + } + + let pat = Pattern::new("[2-1]").unwrap(); + assert!(!pat.matches("1")); + assert!(!pat.matches("2")); + + assert!(Pattern::new("[-]").unwrap().matches("-")); + assert!(!Pattern::new("[!-]").unwrap().matches("-")); + } + + #[test] + fn test_pattern_matches() { + let txt_pat = Pattern::new("*hello.txt").unwrap(); + assert!(txt_pat.matches("hello.txt")); + assert!(txt_pat.matches("gareth_says_hello.txt")); + assert!(txt_pat.matches("some/path/to/hello.txt")); + assert!(txt_pat.matches("some\\path\\to\\hello.txt")); + assert!(txt_pat.matches("/an/absolute/path/to/hello.txt")); + assert!(!txt_pat.matches("hello.txt-and-then-some")); + assert!(!txt_pat.matches("goodbye.txt")); + + let dir_pat = Pattern::new("*some/path/to/hello.txt").unwrap(); + assert!(dir_pat.matches("some/path/to/hello.txt")); + assert!(dir_pat.matches("a/bigger/some/path/to/hello.txt")); + assert!(!dir_pat.matches("some/path/to/hello.txt-and-then-some")); + assert!(!dir_pat.matches("some/other/path/to/hello.txt")); + } + + #[test] + fn test_pattern_escape() { + let s = "_[_]_?_*_!_"; + assert_eq!(Pattern::escape(s), "_[[]_[]]_[?]_[*]_!_".to_string()); + assert!(Pattern::new(&Pattern::escape(s)).unwrap().matches(s)); + } + + #[test] + fn test_pattern_matches_case_insensitive() { + + let pat = Pattern::new("aBcDeFg").unwrap(); + let options = MatchOptions { + case_sensitive: false, + require_literal_separator: false, + require_literal_leading_dot: false, + }; + + assert!(pat.matches_with("aBcDeFg", &options)); + assert!(pat.matches_with("abcdefg", &options)); + assert!(pat.matches_with("ABCDEFG", &options)); + assert!(pat.matches_with("AbCdEfG", &options)); + } + + #[test] + fn test_pattern_matches_case_insensitive_range() { + + let pat_within = Pattern::new("[a]").unwrap(); + let pat_except = Pattern::new("[!a]").unwrap(); + + let options_case_insensitive = MatchOptions { + case_sensitive: false, + require_literal_separator: false, + require_literal_leading_dot: false, + }; + let options_case_sensitive = MatchOptions { + case_sensitive: true, + require_literal_separator: false, + require_literal_leading_dot: false, + }; + + assert!(pat_within.matches_with("a", &options_case_insensitive)); + assert!(pat_within.matches_with("A", &options_case_insensitive)); + assert!(!pat_within.matches_with("A", &options_case_sensitive)); + + assert!(!pat_except.matches_with("a", &options_case_insensitive)); + assert!(!pat_except.matches_with("A", &options_case_insensitive)); + assert!(pat_except.matches_with("A", &options_case_sensitive)); + } + + #[test] + fn test_pattern_matches_require_literal_separator() { + + let options_require_literal = MatchOptions { + case_sensitive: true, + require_literal_separator: true, + require_literal_leading_dot: false, + }; + let options_not_require_literal = MatchOptions { + case_sensitive: true, + require_literal_separator: false, + require_literal_leading_dot: false, + }; + + assert!(Pattern::new("abc/def").unwrap().matches_with("abc/def", &options_require_literal)); + assert!(!Pattern::new("abc?def") + .unwrap() + .matches_with("abc/def", &options_require_literal)); + assert!(!Pattern::new("abc*def") + .unwrap() + .matches_with("abc/def", &options_require_literal)); + assert!(!Pattern::new("abc[/]def") + .unwrap() + .matches_with("abc/def", &options_require_literal)); + + assert!(Pattern::new("abc/def") + .unwrap() + .matches_with("abc/def", &options_not_require_literal)); + assert!(Pattern::new("abc?def") + .unwrap() + .matches_with("abc/def", &options_not_require_literal)); + assert!(Pattern::new("abc*def") + .unwrap() + .matches_with("abc/def", &options_not_require_literal)); + assert!(Pattern::new("abc[/]def") + .unwrap() + .matches_with("abc/def", &options_not_require_literal)); + } + + #[test] + fn test_pattern_matches_require_literal_leading_dot() { + + let options_require_literal_leading_dot = MatchOptions { + case_sensitive: true, + require_literal_separator: false, + require_literal_leading_dot: true, + }; + let options_not_require_literal_leading_dot = MatchOptions { + case_sensitive: true, + require_literal_separator: false, + require_literal_leading_dot: false, + }; + + let f = |options| Pattern::new("*.txt").unwrap().matches_with(".hello.txt", options); + assert!(f(&options_not_require_literal_leading_dot)); + assert!(!f(&options_require_literal_leading_dot)); + + let f = |options| Pattern::new(".*.*").unwrap().matches_with(".hello.txt", options); + assert!(f(&options_not_require_literal_leading_dot)); + assert!(f(&options_require_literal_leading_dot)); + + let f = |options| Pattern::new("aaa/bbb/*").unwrap().matches_with("aaa/bbb/.ccc", options); + assert!(f(&options_not_require_literal_leading_dot)); + assert!(!f(&options_require_literal_leading_dot)); + + let f = |options| { + Pattern::new("aaa/bbb/*").unwrap().matches_with("aaa/bbb/c.c.c.", options) + }; + assert!(f(&options_not_require_literal_leading_dot)); + assert!(f(&options_require_literal_leading_dot)); + + let f = |options| Pattern::new("aaa/bbb/.*").unwrap().matches_with("aaa/bbb/.ccc", options); + assert!(f(&options_not_require_literal_leading_dot)); + assert!(f(&options_require_literal_leading_dot)); + + let f = |options| Pattern::new("aaa/?bbb").unwrap().matches_with("aaa/.bbb", options); + assert!(f(&options_not_require_literal_leading_dot)); + assert!(!f(&options_require_literal_leading_dot)); + + let f = |options| Pattern::new("aaa/[.]bbb").unwrap().matches_with("aaa/.bbb", options); + assert!(f(&options_not_require_literal_leading_dot)); + assert!(!f(&options_require_literal_leading_dot)); + + let f = |options| Pattern::new("**/*").unwrap().matches_with(".bbb", options); + assert!(f(&options_not_require_literal_leading_dot)); + assert!(!f(&options_require_literal_leading_dot)); + } + + #[test] + fn test_matches_path() { + // on windows, (Path::new("a/b").as_str().unwrap() == "a\\b"), so this + // tests that / and \ are considered equivalent on windows + assert!(Pattern::new("a/b").unwrap().matches_path(&Path::new("a/b"))); + } + + #[test] + fn test_path_join() { + let pattern = Path::new("one").join(&Path::new("**/*.rs")); + assert!(Pattern::new(pattern.to_str().unwrap()).is_ok()); + } +} diff --git a/glob/tests/glob-std.rs b/glob/tests/glob-std.rs new file mode 100644 index 000000000..c42641850 --- /dev/null +++ b/glob/tests/glob-std.rs @@ -0,0 +1,278 @@ +// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// ignore-windows TempDir may cause IoError on windows: #10462 + +#![cfg_attr(test, deny(warnings))] + +extern crate glob; +extern crate tempdir; + +use glob::glob; +use std::env; +use std::path::PathBuf; +use std::fs; +use tempdir::TempDir; + +#[test] +fn main() { + fn mk_file(path: &str, directory: bool) { + if directory { + fs::create_dir(path).unwrap(); + } else { + fs::File::create(path).unwrap(); + } + } + + fn glob_vec(pattern: &str) -> Vec { + glob(pattern).unwrap().map(|r| r.unwrap()).collect() + } + + let root = TempDir::new("glob-tests"); + let root = root.ok().expect("Should have created a temp directory"); + assert!(env::set_current_dir(root.path()).is_ok()); + + mk_file("aaa", true); + mk_file("aaa/apple", true); + mk_file("aaa/orange", true); + mk_file("aaa/tomato", true); + mk_file("aaa/tomato/tomato.txt", false); + mk_file("aaa/tomato/tomoto.txt", false); + mk_file("bbb", true); + mk_file("bbb/specials", true); + mk_file("bbb/specials/!", false); + + // windows does not allow `*` or `?` characters to exist in filenames + if env::consts::FAMILY != "windows" { + mk_file("bbb/specials/*", false); + mk_file("bbb/specials/?", false); + } + + mk_file("bbb/specials/[", false); + mk_file("bbb/specials/]", false); + mk_file("ccc", true); + mk_file("xyz", true); + mk_file("xyz/x", false); + mk_file("xyz/y", false); + mk_file("xyz/z", false); + + mk_file("r", true); + mk_file("r/current_dir.md", false); + mk_file("r/one", true); + mk_file("r/one/a.md", false); + mk_file("r/one/another", true); + mk_file("r/one/another/a.md", false); + mk_file("r/one/another/deep", true); + mk_file("r/one/another/deep/spelunking.md", false); + mk_file("r/another", true); + mk_file("r/another/a.md", false); + mk_file("r/two", true); + mk_file("r/two/b.md", false); + mk_file("r/three", true); + mk_file("r/three/c.md", false); + + // all recursive entities + assert_eq!(glob_vec("r/**"), vec!( + PathBuf::from("r/another"), + PathBuf::from("r/one"), + PathBuf::from("r/one/another"), + PathBuf::from("r/one/another/deep"), + PathBuf::from("r/three"), + PathBuf::from("r/two"))); + + // collapse consecutive recursive patterns + assert_eq!(glob_vec("r/**/**"), vec!( + PathBuf::from("r/another"), + PathBuf::from("r/one"), + PathBuf::from("r/one/another"), + PathBuf::from("r/one/another/deep"), + PathBuf::from("r/three"), + PathBuf::from("r/two"))); + + assert_eq!(glob_vec("r/**/*"), vec!( + PathBuf::from("r/another"), + PathBuf::from("r/another/a.md"), + PathBuf::from("r/current_dir.md"), + PathBuf::from("r/one"), + PathBuf::from("r/one/a.md"), + PathBuf::from("r/one/another"), + PathBuf::from("r/one/another/a.md"), + PathBuf::from("r/one/another/deep"), + PathBuf::from("r/one/another/deep/spelunking.md"), + PathBuf::from("r/three"), + PathBuf::from("r/three/c.md"), + PathBuf::from("r/two"), + PathBuf::from("r/two/b.md"))); + + // followed by a wildcard + assert_eq!(glob_vec("r/**/*.md"), vec!( + PathBuf::from("r/another/a.md"), + PathBuf::from("r/current_dir.md"), + PathBuf::from("r/one/a.md"), + PathBuf::from("r/one/another/a.md"), + PathBuf::from("r/one/another/deep/spelunking.md"), + PathBuf::from("r/three/c.md"), + PathBuf::from("r/two/b.md"))); + + // followed by a precise pattern + assert_eq!(glob_vec("r/one/**/a.md"), vec!( + PathBuf::from("r/one/a.md"), + PathBuf::from("r/one/another/a.md"))); + + // followed by another recursive pattern + // collapses consecutive recursives into one + assert_eq!(glob_vec("r/one/**/**/a.md"), vec!( + PathBuf::from("r/one/a.md"), + PathBuf::from("r/one/another/a.md"))); + + // followed by two precise patterns + assert_eq!(glob_vec("r/**/another/a.md"), vec!( + PathBuf::from("r/another/a.md"), + PathBuf::from("r/one/another/a.md"))); + + assert_eq!(glob_vec(""), Vec::::new()); + assert_eq!(glob_vec("."), vec!(PathBuf::from("."))); + assert_eq!(glob_vec(".."), vec!(PathBuf::from(".."))); + + assert_eq!(glob_vec("aaa"), vec!(PathBuf::from("aaa"))); + assert_eq!(glob_vec("aaa/"), vec!(PathBuf::from("aaa"))); + assert_eq!(glob_vec("a"), Vec::::new()); + assert_eq!(glob_vec("aa"), Vec::::new()); + assert_eq!(glob_vec("aaaa"), Vec::::new()); + + assert_eq!(glob_vec("aaa/apple"), vec!(PathBuf::from("aaa/apple"))); + assert_eq!(glob_vec("aaa/apple/nope"), Vec::::new()); + + // windows should support both / and \ as directory separators + if env::consts::FAMILY == "windows" { + assert_eq!(glob_vec("aaa\\apple"), vec!(PathBuf::from("aaa/apple"))); + } + + assert_eq!(glob_vec("???/"), vec!( + PathBuf::from("aaa"), + PathBuf::from("bbb"), + PathBuf::from("ccc"), + PathBuf::from("xyz"))); + + assert_eq!(glob_vec("aaa/tomato/tom?to.txt"), vec!( + PathBuf::from("aaa/tomato/tomato.txt"), + PathBuf::from("aaa/tomato/tomoto.txt"))); + + assert_eq!(glob_vec("xyz/?"), vec!( + PathBuf::from("xyz/x"), + PathBuf::from("xyz/y"), + PathBuf::from("xyz/z"))); + + assert_eq!(glob_vec("a*"), vec!(PathBuf::from("aaa"))); + assert_eq!(glob_vec("*a*"), vec!(PathBuf::from("aaa"))); + assert_eq!(glob_vec("a*a"), vec!(PathBuf::from("aaa"))); + assert_eq!(glob_vec("aaa*"), vec!(PathBuf::from("aaa"))); + assert_eq!(glob_vec("*aaa"), vec!(PathBuf::from("aaa"))); + assert_eq!(glob_vec("*aaa*"), vec!(PathBuf::from("aaa"))); + assert_eq!(glob_vec("*a*a*a*"), vec!(PathBuf::from("aaa"))); + assert_eq!(glob_vec("aaa*/"), vec!(PathBuf::from("aaa"))); + + assert_eq!(glob_vec("aaa/*"), vec!( + PathBuf::from("aaa/apple"), + PathBuf::from("aaa/orange"), + PathBuf::from("aaa/tomato"))); + + assert_eq!(glob_vec("aaa/*a*"), vec!( + PathBuf::from("aaa/apple"), + PathBuf::from("aaa/orange"), + PathBuf::from("aaa/tomato"))); + + assert_eq!(glob_vec("*/*/*.txt"), vec!( + PathBuf::from("aaa/tomato/tomato.txt"), + PathBuf::from("aaa/tomato/tomoto.txt"))); + + assert_eq!(glob_vec("*/*/t[aob]m?to[.]t[!y]t"), vec!( + PathBuf::from("aaa/tomato/tomato.txt"), + PathBuf::from("aaa/tomato/tomoto.txt"))); + + assert_eq!(glob_vec("./aaa"), vec!(PathBuf::from("aaa"))); + assert_eq!(glob_vec("./*"), glob_vec("*")); + assert_eq!(glob_vec("*/..").pop().unwrap(), PathBuf::from("xyz/..")); + assert_eq!(glob_vec("aaa/../bbb"), vec!(PathBuf::from("aaa/../bbb"))); + assert_eq!(glob_vec("nonexistent/../bbb"), Vec::::new()); + assert_eq!(glob_vec("aaa/tomato/tomato.txt/.."), Vec::::new()); + + assert_eq!(glob_vec("aaa/tomato/tomato.txt/"), Vec::::new()); + + assert_eq!(glob_vec("aa[a]"), vec!(PathBuf::from("aaa"))); + assert_eq!(glob_vec("aa[abc]"), vec!(PathBuf::from("aaa"))); + assert_eq!(glob_vec("a[bca]a"), vec!(PathBuf::from("aaa"))); + assert_eq!(glob_vec("aa[b]"), Vec::::new()); + assert_eq!(glob_vec("aa[xyz]"), Vec::::new()); + assert_eq!(glob_vec("aa[]]"), Vec::::new()); + + assert_eq!(glob_vec("aa[!b]"), vec!(PathBuf::from("aaa"))); + assert_eq!(glob_vec("aa[!bcd]"), vec!(PathBuf::from("aaa"))); + assert_eq!(glob_vec("a[!bcd]a"), vec!(PathBuf::from("aaa"))); + assert_eq!(glob_vec("aa[!a]"), Vec::::new()); + assert_eq!(glob_vec("aa[!abc]"), Vec::::new()); + + assert_eq!(glob_vec("bbb/specials/[[]"), vec!(PathBuf::from("bbb/specials/["))); + assert_eq!(glob_vec("bbb/specials/!"), vec!(PathBuf::from("bbb/specials/!"))); + assert_eq!(glob_vec("bbb/specials/[]]"), vec!(PathBuf::from("bbb/specials/]"))); + + if env::consts::FAMILY != "windows" { + assert_eq!(glob_vec("bbb/specials/[*]"), vec!(PathBuf::from("bbb/specials/*"))); + assert_eq!(glob_vec("bbb/specials/[?]"), vec!(PathBuf::from("bbb/specials/?"))); + } + + if env::consts::FAMILY == "windows" { + + assert_eq!(glob_vec("bbb/specials/[![]"), vec!( + PathBuf::from("bbb/specials/!"), + PathBuf::from("bbb/specials/]"))); + + assert_eq!(glob_vec("bbb/specials/[!]]"), vec!( + PathBuf::from("bbb/specials/!"), + PathBuf::from("bbb/specials/["))); + + assert_eq!(glob_vec("bbb/specials/[!!]"), vec!( + PathBuf::from("bbb/specials/["), + PathBuf::from("bbb/specials/]"))); + + } else { + + assert_eq!(glob_vec("bbb/specials/[![]"), vec!( + PathBuf::from("bbb/specials/!"), + PathBuf::from("bbb/specials/*"), + PathBuf::from("bbb/specials/?"), + PathBuf::from("bbb/specials/]"))); + + assert_eq!(glob_vec("bbb/specials/[!]]"), vec!( + PathBuf::from("bbb/specials/!"), + PathBuf::from("bbb/specials/*"), + PathBuf::from("bbb/specials/?"), + PathBuf::from("bbb/specials/["))); + + assert_eq!(glob_vec("bbb/specials/[!!]"), vec!( + PathBuf::from("bbb/specials/*"), + PathBuf::from("bbb/specials/?"), + PathBuf::from("bbb/specials/["), + PathBuf::from("bbb/specials/]"))); + + assert_eq!(glob_vec("bbb/specials/[!*]"), vec!( + PathBuf::from("bbb/specials/!"), + PathBuf::from("bbb/specials/?"), + PathBuf::from("bbb/specials/["), + PathBuf::from("bbb/specials/]"))); + + assert_eq!(glob_vec("bbb/specials/[!?]"), vec!( + PathBuf::from("bbb/specials/!"), + PathBuf::from("bbb/specials/*"), + PathBuf::from("bbb/specials/["), + PathBuf::from("bbb/specials/]"))); + + } +} diff --git a/globset/.cargo-checksum.json b/globset/.cargo-checksum.json new file mode 100644 index 000000000..8d47891bf --- /dev/null +++ b/globset/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{},"package":"4743617a7464bbda3c8aec8558ff2f9429047e025771037df561d383337ff865"} \ No newline at end of file diff --git a/globset/COPYING b/globset/COPYING new file mode 100644 index 000000000..bb9c20a09 --- /dev/null +++ b/globset/COPYING @@ -0,0 +1,3 @@ +This project is dual-licensed under the Unlicense and MIT licenses. + +You may use this code under the terms of either license. diff --git a/globset/Cargo.toml b/globset/Cargo.toml new file mode 100644 index 000000000..16b710ac8 --- /dev/null +++ b/globset/Cargo.toml @@ -0,0 +1,46 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g. crates.io) dependencies +# +# If you believe there's an error in this file please file an +# issue against the rust-lang/cargo repository. If you're +# editing this file be aware that the upstream Cargo.toml +# will likely look very different (and much more reasonable) + +[package] +name = "globset" +version = "0.4.2" +authors = ["Andrew Gallant "] +description = "Cross platform single glob and glob set matching. Glob set matching is the\nprocess of matching one or more glob patterns against a single candidate path\nsimultaneously, and returning all of the globs that matched.\n" +homepage = "https://github.com/BurntSushi/ripgrep/tree/master/globset" +documentation = "https://docs.rs/globset" +readme = "README.md" +keywords = ["regex", "glob", "multiple", "set", "pattern"] +license = "Unlicense/MIT" +repository = "https://github.com/BurntSushi/ripgrep/tree/master/globset" + +[lib] +name = "globset" +bench = false +[dependencies.aho-corasick] +version = "0.6.8" + +[dependencies.fnv] +version = "1.0.6" + +[dependencies.log] +version = "0.4.5" + +[dependencies.memchr] +version = "2.0.2" + +[dependencies.regex] +version = "1.0.5" +[dev-dependencies.glob] +version = "0.2.11" + +[features] +simd-accel = [] diff --git a/globset/LICENSE-MIT b/globset/LICENSE-MIT new file mode 100644 index 000000000..3b0a5dc09 --- /dev/null +++ b/globset/LICENSE-MIT @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015 Andrew Gallant + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/globset/README.md b/globset/README.md new file mode 100644 index 000000000..5d54172aa --- /dev/null +++ b/globset/README.md @@ -0,0 +1,122 @@ +globset +======= +Cross platform single glob and glob set matching. Glob set matching is the +process of matching one or more glob patterns against a single candidate path +simultaneously, and returning all of the globs that matched. + +[![Linux build status](https://api.travis-ci.org/BurntSushi/ripgrep.svg)](https://travis-ci.org/BurntSushi/ripgrep) +[![Windows build status](https://ci.appveyor.com/api/projects/status/github/BurntSushi/ripgrep?svg=true)](https://ci.appveyor.com/project/BurntSushi/ripgrep) +[![](https://img.shields.io/crates/v/globset.svg)](https://crates.io/crates/globset) + +Dual-licensed under MIT or the [UNLICENSE](http://unlicense.org). + +### Documentation + +[https://docs.rs/globset](https://docs.rs/globset) + +### Usage + +Add this to your `Cargo.toml`: + +```toml +[dependencies] +globset = "0.3" +``` + +and this to your crate root: + +```rust +extern crate globset; +``` + +### Example: one glob + +This example shows how to match a single glob against a single file path. + +```rust +use globset::Glob; + +let glob = Glob::new("*.rs")?.compile_matcher(); + +assert!(glob.is_match("foo.rs")); +assert!(glob.is_match("foo/bar.rs")); +assert!(!glob.is_match("Cargo.toml")); +``` + +### Example: configuring a glob matcher + +This example shows how to use a `GlobBuilder` to configure aspects of match +semantics. In this example, we prevent wildcards from matching path separators. + +```rust +use globset::GlobBuilder; + +let glob = GlobBuilder::new("*.rs") + .literal_separator(true).build()?.compile_matcher(); + +assert!(glob.is_match("foo.rs")); +assert!(!glob.is_match("foo/bar.rs")); // no longer matches +assert!(!glob.is_match("Cargo.toml")); +``` + +### Example: match multiple globs at once + +This example shows how to match multiple glob patterns at once. + +```rust +use globset::{Glob, GlobSetBuilder}; + +let mut builder = GlobSetBuilder::new(); +// A GlobBuilder can be used to configure each glob's match semantics +// independently. +builder.add(Glob::new("*.rs")?); +builder.add(Glob::new("src/lib.rs")?); +builder.add(Glob::new("src/**/foo.rs")?); +let set = builder.build()?; + +assert_eq!(set.matches("src/bar/baz/foo.rs"), vec![0, 2]); +``` + +### Performance + +This crate implements globs by converting them to regular expressions, and +executing them with the +[`regex`](https://github.com/rust-lang-nursery/regex) +crate. + +For single glob matching, performance of this crate should be roughly on par +with the performance of the +[`glob`](https://github.com/rust-lang-nursery/glob) +crate. (`*_regex` correspond to benchmarks for this library while `*_glob` +correspond to benchmarks for the `glob` library.) +Optimizations in the `regex` crate may propel this library past `glob`, +particularly when matching longer paths. + +``` +test ext_glob ... bench: 425 ns/iter (+/- 21) +test ext_regex ... bench: 175 ns/iter (+/- 10) +test long_glob ... bench: 182 ns/iter (+/- 11) +test long_regex ... bench: 173 ns/iter (+/- 10) +test short_glob ... bench: 69 ns/iter (+/- 4) +test short_regex ... bench: 83 ns/iter (+/- 2) +``` + +The primary performance advantage of this crate is when matching multiple +globs against a single path. With the `glob` crate, one must match each glob +synchronously, one after the other. In this crate, many can be matched +simultaneously. For example: + +``` +test many_short_glob ... bench: 1,063 ns/iter (+/- 47) +test many_short_regex_set ... bench: 186 ns/iter (+/- 11) +``` + +### Comparison with the [`glob`](https://github.com/rust-lang-nursery/glob) crate + +* Supports alternate "or" globs, e.g., `*.{foo,bar}`. +* Can match non-UTF-8 file paths correctly. +* Supports matching multiple globs at once. +* Doesn't provide a recursive directory iterator of matching file paths, + although I believe this crate should grow one eventually. +* Supports case insensitive and require-literal-separator match options, but + **doesn't** support the require-literal-leading-dot option. diff --git a/globset/UNLICENSE b/globset/UNLICENSE new file mode 100644 index 000000000..68a49daad --- /dev/null +++ b/globset/UNLICENSE @@ -0,0 +1,24 @@ +This is free and unencumbered software released into the public domain. + +Anyone is free to copy, modify, publish, use, compile, sell, or +distribute this software, either in source code form or as a compiled +binary, for any purpose, commercial or non-commercial, and by any +means. + +In jurisdictions that recognize copyright laws, the author or authors +of this software dedicate any and all copyright interest in the +software to the public domain. We make this dedication for the benefit +of the public at large and to the detriment of our heirs and +successors. We intend this dedication to be an overt act of +relinquishment in perpetuity of all present and future rights to this +software under copyright law. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR +OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. + +For more information, please refer to diff --git a/globset/benches/bench.rs b/globset/benches/bench.rs new file mode 100644 index 000000000..e142ed72e --- /dev/null +++ b/globset/benches/bench.rs @@ -0,0 +1,121 @@ +/*! +This module benchmarks the glob implementation. For benchmarks on the ripgrep +tool itself, see the benchsuite directory. +*/ +#![feature(test)] + +extern crate glob; +extern crate globset; +#[macro_use] +extern crate lazy_static; +extern crate regex; +extern crate test; + +use std::ffi::OsStr; +use std::path::Path; + +use globset::{Candidate, Glob, GlobMatcher, GlobSet, GlobSetBuilder}; + +const EXT: &'static str = "some/a/bigger/path/to/the/crazy/needle.txt"; +const EXT_PAT: &'static str = "*.txt"; + +const SHORT: &'static str = "some/needle.txt"; +const SHORT_PAT: &'static str = "some/**/needle.txt"; + +const LONG: &'static str = "some/a/bigger/path/to/the/crazy/needle.txt"; +const LONG_PAT: &'static str = "some/**/needle.txt"; + +fn new_glob(pat: &str) -> glob::Pattern { + glob::Pattern::new(pat).unwrap() +} + +fn new_reglob(pat: &str) -> GlobMatcher { + Glob::new(pat).unwrap().compile_matcher() +} + +fn new_reglob_many(pats: &[&str]) -> GlobSet { + let mut builder = GlobSetBuilder::new(); + for pat in pats { + builder.add(Glob::new(pat).unwrap()); + } + builder.build().unwrap() +} + +#[bench] +fn ext_glob(b: &mut test::Bencher) { + let pat = new_glob(EXT_PAT); + b.iter(|| assert!(pat.matches(EXT))); +} + +#[bench] +fn ext_regex(b: &mut test::Bencher) { + let set = new_reglob(EXT_PAT); + let cand = Candidate::new(EXT); + b.iter(|| assert!(set.is_match_candidate(&cand))); +} + +#[bench] +fn short_glob(b: &mut test::Bencher) { + let pat = new_glob(SHORT_PAT); + b.iter(|| assert!(pat.matches(SHORT))); +} + +#[bench] +fn short_regex(b: &mut test::Bencher) { + let set = new_reglob(SHORT_PAT); + let cand = Candidate::new(SHORT); + b.iter(|| assert!(set.is_match_candidate(&cand))); +} + +#[bench] +fn long_glob(b: &mut test::Bencher) { + let pat = new_glob(LONG_PAT); + b.iter(|| assert!(pat.matches(LONG))); +} + +#[bench] +fn long_regex(b: &mut test::Bencher) { + let set = new_reglob(LONG_PAT); + let cand = Candidate::new(LONG); + b.iter(|| assert!(set.is_match_candidate(&cand))); +} + +const MANY_SHORT_GLOBS: &'static [&'static str] = &[ + // Taken from a random .gitignore on my system. + ".*.swp", + "tags", + "target", + "*.lock", + "tmp", + "*.csv", + "*.fst", + "*-got", + "*.csv.idx", + "words", + "98m*", + "dict", + "test", + "months", +]; + +const MANY_SHORT_SEARCH: &'static str = "98m-blah.csv.idx"; + +#[bench] +fn many_short_glob(b: &mut test::Bencher) { + let pats: Vec<_> = MANY_SHORT_GLOBS.iter().map(|&s| new_glob(s)).collect(); + b.iter(|| { + let mut count = 0; + for pat in &pats { + if pat.matches(MANY_SHORT_SEARCH) { + count += 1; + } + } + assert_eq!(2, count); + }) +} + +#[bench] +fn many_short_regex_set(b: &mut test::Bencher) { + let set = new_reglob_many(MANY_SHORT_GLOBS); + b.iter(|| assert_eq!(2, set.matches(MANY_SHORT_SEARCH).iter().count())); +} diff --git a/globset/src/glob.rs b/globset/src/glob.rs new file mode 100644 index 000000000..cbbc7bad3 --- /dev/null +++ b/globset/src/glob.rs @@ -0,0 +1,1452 @@ +use std::fmt; +use std::hash; +use std::iter; +use std::ops::{Deref, DerefMut}; +use std::path::{Path, is_separator}; +use std::str; + +use regex; +use regex::bytes::Regex; + +use {Candidate, Error, ErrorKind, new_regex}; + +/// Describes a matching strategy for a particular pattern. +/// +/// This provides a way to more quickly determine whether a pattern matches +/// a particular file path in a way that scales with a large number of +/// patterns. For example, if many patterns are of the form `*.ext`, then it's +/// possible to test whether any of those patterns matches by looking up a +/// file path's extension in a hash table. +#[derive(Clone, Debug, Eq, PartialEq)] +pub enum MatchStrategy { + /// A pattern matches if and only if the entire file path matches this + /// literal string. + Literal(String), + /// A pattern matches if and only if the file path's basename matches this + /// literal string. + BasenameLiteral(String), + /// A pattern matches if and only if the file path's extension matches this + /// literal string. + Extension(String), + /// A pattern matches if and only if this prefix literal is a prefix of the + /// candidate file path. + Prefix(String), + /// A pattern matches if and only if this prefix literal is a prefix of the + /// candidate file path. + /// + /// An exception: if `component` is true, then `suffix` must appear at the + /// beginning of a file path or immediately following a `/`. + Suffix { + /// The actual suffix. + suffix: String, + /// Whether this must start at the beginning of a path component. + component: bool, + }, + /// A pattern matches only if the given extension matches the file path's + /// extension. Note that this is a necessary but NOT sufficient criterion. + /// Namely, if the extension matches, then a full regex search is still + /// required. + RequiredExtension(String), + /// A regex needs to be used for matching. + Regex, +} + +impl MatchStrategy { + /// Returns a matching strategy for the given pattern. + pub fn new(pat: &Glob) -> MatchStrategy { + if let Some(lit) = pat.basename_literal() { + MatchStrategy::BasenameLiteral(lit) + } else if let Some(lit) = pat.literal() { + MatchStrategy::Literal(lit) + } else if let Some(ext) = pat.ext() { + MatchStrategy::Extension(ext) + } else if let Some(prefix) = pat.prefix() { + MatchStrategy::Prefix(prefix) + } else if let Some((suffix, component)) = pat.suffix() { + MatchStrategy::Suffix { suffix: suffix, component: component } + } else if let Some(ext) = pat.required_ext() { + MatchStrategy::RequiredExtension(ext) + } else { + MatchStrategy::Regex + } + } +} + +/// Glob represents a successfully parsed shell glob pattern. +/// +/// It cannot be used directly to match file paths, but it can be converted +/// to a regular expression string or a matcher. +#[derive(Clone, Debug, Eq)] +pub struct Glob { + glob: String, + re: String, + opts: GlobOptions, + tokens: Tokens, +} + +impl PartialEq for Glob { + fn eq(&self, other: &Glob) -> bool { + self.glob == other.glob && self.opts == other.opts + } +} + +impl hash::Hash for Glob { + fn hash(&self, state: &mut H) { + self.glob.hash(state); + self.opts.hash(state); + } +} + +impl fmt::Display for Glob { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + self.glob.fmt(f) + } +} + +/// A matcher for a single pattern. +#[derive(Clone, Debug)] +pub struct GlobMatcher { + /// The underlying pattern. + pat: Glob, + /// The pattern, as a compiled regex. + re: Regex, +} + +impl GlobMatcher { + /// Tests whether the given path matches this pattern or not. + pub fn is_match>(&self, path: P) -> bool { + self.is_match_candidate(&Candidate::new(path.as_ref())) + } + + /// Tests whether the given path matches this pattern or not. + pub fn is_match_candidate(&self, path: &Candidate) -> bool { + self.re.is_match(&path.path) + } +} + +/// A strategic matcher for a single pattern. +#[cfg(test)] +#[derive(Clone, Debug)] +struct GlobStrategic { + /// The match strategy to use. + strategy: MatchStrategy, + /// The underlying pattern. + pat: Glob, + /// The pattern, as a compiled regex. + re: Regex, +} + +#[cfg(test)] +impl GlobStrategic { + /// Tests whether the given path matches this pattern or not. + fn is_match>(&self, path: P) -> bool { + self.is_match_candidate(&Candidate::new(path.as_ref())) + } + + /// Tests whether the given path matches this pattern or not. + fn is_match_candidate(&self, candidate: &Candidate) -> bool { + let byte_path = &*candidate.path; + + match self.strategy { + MatchStrategy::Literal(ref lit) => lit.as_bytes() == byte_path, + MatchStrategy::BasenameLiteral(ref lit) => { + lit.as_bytes() == &*candidate.basename + } + MatchStrategy::Extension(ref ext) => { + ext.as_bytes() == &*candidate.ext + } + MatchStrategy::Prefix(ref pre) => { + starts_with(pre.as_bytes(), byte_path) + } + MatchStrategy::Suffix { ref suffix, component } => { + if component && byte_path == &suffix.as_bytes()[1..] { + return true; + } + ends_with(suffix.as_bytes(), byte_path) + } + MatchStrategy::RequiredExtension(ref ext) => { + let ext = ext.as_bytes(); + &*candidate.ext == ext && self.re.is_match(byte_path) + } + MatchStrategy::Regex => self.re.is_match(byte_path), + } + } +} + +/// A builder for a pattern. +/// +/// This builder enables configuring the match semantics of a pattern. For +/// example, one can make matching case insensitive. +/// +/// The lifetime `'a` refers to the lifetime of the pattern string. +#[derive(Clone, Debug)] +pub struct GlobBuilder<'a> { + /// The glob pattern to compile. + glob: &'a str, + /// Options for the pattern. + opts: GlobOptions, +} + +#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] +struct GlobOptions { + /// Whether to match case insensitively. + case_insensitive: bool, + /// Whether to require a literal separator to match a separator in a file + /// path. e.g., when enabled, `*` won't match `/`. + literal_separator: bool, + /// Whether or not to use `\` to escape special characters. + /// e.g., when enabled, `\*` will match a literal `*`. + backslash_escape: bool, +} + +impl GlobOptions { + fn default() -> GlobOptions { + GlobOptions { + case_insensitive: false, + literal_separator: false, + backslash_escape: !is_separator('\\'), + } + } +} + +#[derive(Clone, Debug, Default, Eq, PartialEq)] +struct Tokens(Vec); + +impl Deref for Tokens { + type Target = Vec; + fn deref(&self) -> &Vec { &self.0 } +} + +impl DerefMut for Tokens { + fn deref_mut(&mut self) -> &mut Vec { &mut self.0 } +} + +#[derive(Clone, Debug, Eq, PartialEq)] +enum Token { + Literal(char), + Any, + ZeroOrMore, + RecursivePrefix, + RecursiveSuffix, + RecursiveZeroOrMore, + Class { + negated: bool, + ranges: Vec<(char, char)>, + }, + Alternates(Vec), +} + +impl Glob { + /// Builds a new pattern with default options. + pub fn new(glob: &str) -> Result { + GlobBuilder::new(glob).build() + } + + /// Returns a matcher for this pattern. + pub fn compile_matcher(&self) -> GlobMatcher { + let re = new_regex(&self.re) + .expect("regex compilation shouldn't fail"); + GlobMatcher { + pat: self.clone(), + re: re, + } + } + + /// Returns a strategic matcher. + /// + /// This isn't exposed because it's not clear whether it's actually + /// faster than just running a regex for a *single* pattern. If it + /// is faster, then GlobMatcher should do it automatically. + #[cfg(test)] + fn compile_strategic_matcher(&self) -> GlobStrategic { + let strategy = MatchStrategy::new(self); + let re = new_regex(&self.re) + .expect("regex compilation shouldn't fail"); + GlobStrategic { + strategy: strategy, + pat: self.clone(), + re: re, + } + } + + /// Returns the original glob pattern used to build this pattern. + pub fn glob(&self) -> &str { + &self.glob + } + + /// Returns the regular expression string for this glob. + /// + /// Note that regular expressions for globs are intended to be matched on + /// arbitrary bytes (`&[u8]`) instead of Unicode strings (`&str`). In + /// particular, globs are frequently used on file paths, where there is no + /// general guarantee that file paths are themselves valid UTF-8. As a + /// result, callers will need to ensure that they are using a regex API + /// that can match on arbitrary bytes. For example, the + /// [`regex`](https://crates.io/regex) + /// crate's + /// [`Regex`](https://docs.rs/regex/*/regex/struct.Regex.html) + /// API is not suitable for this since it matches on `&str`, but its + /// [`bytes::Regex`](https://docs.rs/regex/*/regex/bytes/struct.Regex.html) + /// API is suitable for this. + pub fn regex(&self) -> &str { + &self.re + } + + /// Returns the pattern as a literal if and only if the pattern must match + /// an entire path exactly. + /// + /// The basic format of these patterns is `{literal}`. + fn literal(&self) -> Option { + if self.opts.case_insensitive { + return None; + } + let mut lit = String::new(); + for t in &*self.tokens { + match *t { + Token::Literal(c) => lit.push(c), + _ => return None, + } + } + if lit.is_empty() { + None + } else { + Some(lit) + } + } + + /// Returns an extension if this pattern matches a file path if and only + /// if the file path has the extension returned. + /// + /// Note that this extension returned differs from the extension that + /// std::path::Path::extension returns. Namely, this extension includes + /// the '.'. Also, paths like `.rs` are considered to have an extension + /// of `.rs`. + fn ext(&self) -> Option { + if self.opts.case_insensitive { + return None; + } + let start = match self.tokens.get(0) { + Some(&Token::RecursivePrefix) => 1, + Some(_) => 0, + _ => return None, + }; + match self.tokens.get(start) { + Some(&Token::ZeroOrMore) => { + // If there was no recursive prefix, then we only permit + // `*` if `*` can match a `/`. For example, if `*` can't + // match `/`, then `*.c` doesn't match `foo/bar.c`. + if start == 0 && self.opts.literal_separator { + return None; + } + } + _ => return None, + } + match self.tokens.get(start + 1) { + Some(&Token::Literal('.')) => {} + _ => return None, + } + let mut lit = ".".to_string(); + for t in self.tokens[start + 2..].iter() { + match *t { + Token::Literal('.') | Token::Literal('/') => return None, + Token::Literal(c) => lit.push(c), + _ => return None, + } + } + if lit.is_empty() { + None + } else { + Some(lit) + } + } + + /// This is like `ext`, but returns an extension even if it isn't sufficent + /// to imply a match. Namely, if an extension is returned, then it is + /// necessary but not sufficient for a match. + fn required_ext(&self) -> Option { + if self.opts.case_insensitive { + return None; + } + // We don't care at all about the beginning of this pattern. All we + // need to check for is if it ends with a literal of the form `.ext`. + let mut ext: Vec = vec![]; // built in reverse + for t in self.tokens.iter().rev() { + match *t { + Token::Literal('/') => return None, + Token::Literal(c) => { + ext.push(c); + if c == '.' { + break; + } + } + _ => return None, + } + } + if ext.last() != Some(&'.') { + None + } else { + ext.reverse(); + Some(ext.into_iter().collect()) + } + } + + /// Returns a literal prefix of this pattern if the entire pattern matches + /// if the literal prefix matches. + fn prefix(&self) -> Option { + if self.opts.case_insensitive { + return None; + } + let end = match self.tokens.last() { + Some(&Token::ZeroOrMore) => { + if self.opts.literal_separator { + // If a trailing `*` can't match a `/`, then we can't + // assume a match of the prefix corresponds to a match + // of the overall pattern. e.g., `foo/*` with + // `literal_separator` enabled matches `foo/bar` but not + // `foo/bar/baz`, even though `foo/bar/baz` has a `foo/` + // literal prefix. + return None; + } + self.tokens.len() - 1 + } + _ => self.tokens.len(), + }; + let mut lit = String::new(); + for t in &self.tokens[0..end] { + match *t { + Token::Literal(c) => lit.push(c), + _ => return None, + } + } + if lit.is_empty() { + None + } else { + Some(lit) + } + } + + /// Returns a literal suffix of this pattern if the entire pattern matches + /// if the literal suffix matches. + /// + /// If a literal suffix is returned and it must match either the entire + /// file path or be preceded by a `/`, then also return true. This happens + /// with a pattern like `**/foo/bar`. Namely, this pattern matches + /// `foo/bar` and `baz/foo/bar`, but not `foofoo/bar`. In this case, the + /// suffix returned is `/foo/bar` (but should match the entire path + /// `foo/bar`). + /// + /// When this returns true, the suffix literal is guaranteed to start with + /// a `/`. + fn suffix(&self) -> Option<(String, bool)> { + if self.opts.case_insensitive { + return None; + } + let mut lit = String::new(); + let (start, entire) = match self.tokens.get(0) { + Some(&Token::RecursivePrefix) => { + // We only care if this follows a path component if the next + // token is a literal. + if let Some(&Token::Literal(_)) = self.tokens.get(1) { + lit.push('/'); + (1, true) + } else { + (1, false) + } + } + _ => (0, false), + }; + let start = match self.tokens.get(start) { + Some(&Token::ZeroOrMore) => { + // If literal_separator is enabled, then a `*` can't + // necessarily match everything, so reporting a suffix match + // as a match of the pattern would be a false positive. + if self.opts.literal_separator { + return None; + } + start + 1 + } + _ => start, + }; + for t in &self.tokens[start..] { + match *t { + Token::Literal(c) => lit.push(c), + _ => return None, + } + } + if lit.is_empty() || lit == "/" { + None + } else { + Some((lit, entire)) + } + } + + /// If this pattern only needs to inspect the basename of a file path, + /// then the tokens corresponding to only the basename match are returned. + /// + /// For example, given a pattern of `**/*.foo`, only the tokens + /// corresponding to `*.foo` are returned. + /// + /// Note that this will return None if any match of the basename tokens + /// doesn't correspond to a match of the entire pattern. For example, the + /// glob `foo` only matches when a file path has a basename of `foo`, but + /// doesn't *always* match when a file path has a basename of `foo`. e.g., + /// `foo` doesn't match `abc/foo`. + fn basename_tokens(&self) -> Option<&[Token]> { + if self.opts.case_insensitive { + return None; + } + let start = match self.tokens.get(0) { + Some(&Token::RecursivePrefix) => 1, + _ => { + // With nothing to gobble up the parent portion of a path, + // we can't assume that matching on only the basename is + // correct. + return None; + } + }; + if self.tokens[start..].is_empty() { + return None; + } + for t in &self.tokens[start..] { + match *t { + Token::Literal('/') => return None, + Token::Literal(_) => {} // OK + Token::Any | Token::ZeroOrMore => { + if !self.opts.literal_separator { + // In this case, `*` and `?` can match a path + // separator, which means this could reach outside + // the basename. + return None; + } + } + Token::RecursivePrefix + | Token::RecursiveSuffix + | Token::RecursiveZeroOrMore => { + return None; + } + Token::Class{..} | Token::Alternates(..) => { + // We *could* be a little smarter here, but either one + // of these is going to prevent our literal optimizations + // anyway, so give up. + return None; + } + } + } + Some(&self.tokens[start..]) + } + + /// Returns the pattern as a literal if and only if the pattern exclusively + /// matches the basename of a file path *and* is a literal. + /// + /// The basic format of these patterns is `**/{literal}`, where `{literal}` + /// does not contain a path separator. + fn basename_literal(&self) -> Option { + let tokens = match self.basename_tokens() { + None => return None, + Some(tokens) => tokens, + }; + let mut lit = String::new(); + for t in tokens { + match *t { + Token::Literal(c) => lit.push(c), + _ => return None, + } + } + Some(lit) + } +} + +impl<'a> GlobBuilder<'a> { + /// Create a new builder for the pattern given. + /// + /// The pattern is not compiled until `build` is called. + pub fn new(glob: &'a str) -> GlobBuilder<'a> { + GlobBuilder { + glob: glob, + opts: GlobOptions::default(), + } + } + + /// Parses and builds the pattern. + pub fn build(&self) -> Result { + let mut p = Parser { + glob: &self.glob, + stack: vec![Tokens::default()], + chars: self.glob.chars().peekable(), + prev: None, + cur: None, + opts: &self.opts, + }; + p.parse()?; + if p.stack.is_empty() { + Err(Error { + glob: Some(self.glob.to_string()), + kind: ErrorKind::UnopenedAlternates, + }) + } else if p.stack.len() > 1 { + Err(Error { + glob: Some(self.glob.to_string()), + kind: ErrorKind::UnclosedAlternates, + }) + } else { + let tokens = p.stack.pop().unwrap(); + Ok(Glob { + glob: self.glob.to_string(), + re: tokens.to_regex_with(&self.opts), + opts: self.opts, + tokens: tokens, + }) + } + } + + /// Toggle whether the pattern matches case insensitively or not. + /// + /// This is disabled by default. + pub fn case_insensitive(&mut self, yes: bool) -> &mut GlobBuilder<'a> { + self.opts.case_insensitive = yes; + self + } + + /// Toggle whether a literal `/` is required to match a path separator. + pub fn literal_separator(&mut self, yes: bool) -> &mut GlobBuilder<'a> { + self.opts.literal_separator = yes; + self + } + + /// When enabled, a back slash (`\`) may be used to escape + /// special characters in a glob pattern. Additionally, this will + /// prevent `\` from being interpreted as a path separator on all + /// platforms. + /// + /// This is enabled by default on platforms where `\` is not a + /// path separator and disabled by default on platforms where `\` + /// is a path separator. + pub fn backslash_escape(&mut self, yes: bool) -> &mut GlobBuilder<'a> { + self.opts.backslash_escape = yes; + self + } +} + +impl Tokens { + /// Convert this pattern to a string that is guaranteed to be a valid + /// regular expression and will represent the matching semantics of this + /// glob pattern and the options given. + fn to_regex_with(&self, options: &GlobOptions) -> String { + let mut re = String::new(); + re.push_str("(?-u)"); + if options.case_insensitive { + re.push_str("(?i)"); + } + re.push('^'); + // Special case. If the entire glob is just `**`, then it should match + // everything. + if self.len() == 1 && self[0] == Token::RecursivePrefix { + re.push_str(".*"); + re.push('$'); + return re; + } + self.tokens_to_regex(options, &self, &mut re); + re.push('$'); + re + } + + fn tokens_to_regex( + &self, + options: &GlobOptions, + tokens: &[Token], + re: &mut String, + ) { + for tok in tokens { + match *tok { + Token::Literal(c) => { + re.push_str(&char_to_escaped_literal(c)); + } + Token::Any => { + if options.literal_separator { + re.push_str("[^/]"); + } else { + re.push_str("."); + } + } + Token::ZeroOrMore => { + if options.literal_separator { + re.push_str("[^/]*"); + } else { + re.push_str(".*"); + } + } + Token::RecursivePrefix => { + re.push_str("(?:/?|.*/)"); + } + Token::RecursiveSuffix => { + re.push_str("(?:/?|/.*)"); + } + Token::RecursiveZeroOrMore => { + re.push_str("(?:/|/.*/)"); + } + Token::Class { negated, ref ranges } => { + re.push('['); + if negated { + re.push('^'); + } + for r in ranges { + if r.0 == r.1 { + // Not strictly necessary, but nicer to look at. + re.push_str(&char_to_escaped_literal(r.0)); + } else { + re.push_str(&char_to_escaped_literal(r.0)); + re.push('-'); + re.push_str(&char_to_escaped_literal(r.1)); + } + } + re.push(']'); + } + Token::Alternates(ref patterns) => { + let mut parts = vec![]; + for pat in patterns { + let mut altre = String::new(); + self.tokens_to_regex(options, &pat, &mut altre); + if !altre.is_empty() { + parts.push(altre); + } + } + + // It is possible to have an empty set in which case the + // resulting alternation '()' would be an error. + if !parts.is_empty() { + re.push('('); + re.push_str(&parts.join("|")); + re.push(')'); + } + } + } + } + } +} + +/// Convert a Unicode scalar value to an escaped string suitable for use as +/// a literal in a non-Unicode regex. +fn char_to_escaped_literal(c: char) -> String { + bytes_to_escaped_literal(&c.to_string().into_bytes()) +} + +/// Converts an arbitrary sequence of bytes to a UTF-8 string. All non-ASCII +/// code units are converted to their escaped form. +fn bytes_to_escaped_literal(bs: &[u8]) -> String { + let mut s = String::with_capacity(bs.len()); + for &b in bs { + if b <= 0x7F { + s.push_str(®ex::escape(&(b as char).to_string())); + } else { + s.push_str(&format!("\\x{:02x}", b)); + } + } + s +} + +struct Parser<'a> { + glob: &'a str, + stack: Vec, + chars: iter::Peekable>, + prev: Option, + cur: Option, + opts: &'a GlobOptions, +} + +impl<'a> Parser<'a> { + fn error(&self, kind: ErrorKind) -> Error { + Error { glob: Some(self.glob.to_string()), kind: kind } + } + + fn parse(&mut self) -> Result<(), Error> { + while let Some(c) = self.bump() { + match c { + '?' => self.push_token(Token::Any)?, + '*' => self.parse_star()?, + '[' => self.parse_class()?, + '{' => self.push_alternate()?, + '}' => self.pop_alternate()?, + ',' => self.parse_comma()?, + '\\' => self.parse_backslash()?, + c => self.push_token(Token::Literal(c))?, + } + } + Ok(()) + } + + fn push_alternate(&mut self) -> Result<(), Error> { + if self.stack.len() > 1 { + return Err(self.error(ErrorKind::NestedAlternates)); + } + Ok(self.stack.push(Tokens::default())) + } + + fn pop_alternate(&mut self) -> Result<(), Error> { + let mut alts = vec![]; + while self.stack.len() >= 2 { + alts.push(self.stack.pop().unwrap()); + } + self.push_token(Token::Alternates(alts)) + } + + fn push_token(&mut self, tok: Token) -> Result<(), Error> { + if let Some(ref mut pat) = self.stack.last_mut() { + return Ok(pat.push(tok)); + } + Err(self.error(ErrorKind::UnopenedAlternates)) + } + + fn pop_token(&mut self) -> Result { + if let Some(ref mut pat) = self.stack.last_mut() { + return Ok(pat.pop().unwrap()); + } + Err(self.error(ErrorKind::UnopenedAlternates)) + } + + fn have_tokens(&self) -> Result { + match self.stack.last() { + None => Err(self.error(ErrorKind::UnopenedAlternates)), + Some(ref pat) => Ok(!pat.is_empty()), + } + } + + fn parse_comma(&mut self) -> Result<(), Error> { + // If we aren't inside a group alternation, then don't + // treat commas specially. Otherwise, we need to start + // a new alternate. + if self.stack.len() <= 1 { + self.push_token(Token::Literal(',')) + } else { + Ok(self.stack.push(Tokens::default())) + } + } + + fn parse_backslash(&mut self) -> Result<(), Error> { + if self.opts.backslash_escape { + match self.bump() { + None => Err(self.error(ErrorKind::DanglingEscape)), + Some(c) => self.push_token(Token::Literal(c)), + } + } else if is_separator('\\') { + // Normalize all patterns to use / as a separator. + self.push_token(Token::Literal('/')) + } else { + self.push_token(Token::Literal('\\')) + } + } + + fn parse_star(&mut self) -> Result<(), Error> { + let prev = self.prev; + if self.chars.peek() != Some(&'*') { + self.push_token(Token::ZeroOrMore)?; + return Ok(()); + } + assert!(self.bump() == Some('*')); + if !self.have_tokens()? { + self.push_token(Token::RecursivePrefix)?; + let next = self.bump(); + if !next.map(is_separator).unwrap_or(true) { + return Err(self.error(ErrorKind::InvalidRecursive)); + } + return Ok(()); + } + self.pop_token()?; + if !prev.map(is_separator).unwrap_or(false) { + if self.stack.len() <= 1 + || (prev != Some(',') && prev != Some('{')) { + return Err(self.error(ErrorKind::InvalidRecursive)); + } + } + match self.chars.peek() { + None => { + assert!(self.bump().is_none()); + self.push_token(Token::RecursiveSuffix) + } + Some(&',') | Some(&'}') if self.stack.len() >= 2 => { + self.push_token(Token::RecursiveSuffix) + } + Some(&c) if is_separator(c) => { + assert!(self.bump().map(is_separator).unwrap_or(false)); + self.push_token(Token::RecursiveZeroOrMore) + } + _ => Err(self.error(ErrorKind::InvalidRecursive)), + } + } + + fn parse_class(&mut self) -> Result<(), Error> { + fn add_to_last_range( + glob: &str, + r: &mut (char, char), + add: char, + ) -> Result<(), Error> { + r.1 = add; + if r.1 < r.0 { + Err(Error { + glob: Some(glob.to_string()), + kind: ErrorKind::InvalidRange(r.0, r.1), + }) + } else { + Ok(()) + } + } + let mut ranges = vec![]; + let negated = match self.chars.peek() { + Some(&'!') | Some(&'^') => { + let bump = self.bump(); + assert!(bump == Some('!') || bump == Some('^')); + true + } + _ => false, + }; + let mut first = true; + let mut in_range = false; + loop { + let c = match self.bump() { + Some(c) => c, + // The only way to successfully break this loop is to observe + // a ']'. + None => return Err(self.error(ErrorKind::UnclosedClass)), + }; + match c { + ']' => { + if first { + ranges.push((']', ']')); + } else { + break; + } + } + '-' => { + if first { + ranges.push(('-', '-')); + } else if in_range { + // invariant: in_range is only set when there is + // already at least one character seen. + let r = ranges.last_mut().unwrap(); + add_to_last_range(&self.glob, r, '-')?; + in_range = false; + } else { + assert!(!ranges.is_empty()); + in_range = true; + } + } + c => { + if in_range { + // invariant: in_range is only set when there is + // already at least one character seen. + add_to_last_range( + &self.glob, ranges.last_mut().unwrap(), c)?; + } else { + ranges.push((c, c)); + } + in_range = false; + } + } + first = false; + } + if in_range { + // Means that the last character in the class was a '-', so add + // it as a literal. + ranges.push(('-', '-')); + } + self.push_token(Token::Class { + negated: negated, + ranges: ranges, + }) + } + + fn bump(&mut self) -> Option { + self.prev = self.cur; + self.cur = self.chars.next(); + self.cur + } +} + +#[cfg(test)] +fn starts_with(needle: &[u8], haystack: &[u8]) -> bool { + needle.len() <= haystack.len() && needle == &haystack[..needle.len()] +} + +#[cfg(test)] +fn ends_with(needle: &[u8], haystack: &[u8]) -> bool { + if needle.len() > haystack.len() { + return false; + } + needle == &haystack[haystack.len() - needle.len()..] +} + +#[cfg(test)] +mod tests { + use {GlobSetBuilder, ErrorKind}; + use super::{Glob, GlobBuilder, Token}; + use super::Token::*; + + #[derive(Clone, Copy, Debug, Default)] + struct Options { + casei: Option, + litsep: Option, + bsesc: Option, + } + + macro_rules! syntax { + ($name:ident, $pat:expr, $tokens:expr) => { + #[test] + fn $name() { + let pat = Glob::new($pat).unwrap(); + assert_eq!($tokens, pat.tokens.0); + } + } + } + + macro_rules! syntaxerr { + ($name:ident, $pat:expr, $err:expr) => { + #[test] + fn $name() { + let err = Glob::new($pat).unwrap_err(); + assert_eq!(&$err, err.kind()); + } + } + } + + macro_rules! toregex { + ($name:ident, $pat:expr, $re:expr) => { + toregex!($name, $pat, $re, Options::default()); + }; + ($name:ident, $pat:expr, $re:expr, $options:expr) => { + #[test] + fn $name() { + let mut builder = GlobBuilder::new($pat); + if let Some(casei) = $options.casei { + builder.case_insensitive(casei); + } + if let Some(litsep) = $options.litsep { + builder.literal_separator(litsep); + } + if let Some(bsesc) = $options.bsesc { + builder.backslash_escape(bsesc); + } + let pat = builder.build().unwrap(); + assert_eq!(format!("(?-u){}", $re), pat.regex()); + } + }; + } + + macro_rules! matches { + ($name:ident, $pat:expr, $path:expr) => { + matches!($name, $pat, $path, Options::default()); + }; + ($name:ident, $pat:expr, $path:expr, $options:expr) => { + #[test] + fn $name() { + let mut builder = GlobBuilder::new($pat); + if let Some(casei) = $options.casei { + builder.case_insensitive(casei); + } + if let Some(litsep) = $options.litsep { + builder.literal_separator(litsep); + } + if let Some(bsesc) = $options.bsesc { + builder.backslash_escape(bsesc); + } + let pat = builder.build().unwrap(); + let matcher = pat.compile_matcher(); + let strategic = pat.compile_strategic_matcher(); + let set = GlobSetBuilder::new().add(pat).build().unwrap(); + assert!(matcher.is_match($path)); + assert!(strategic.is_match($path)); + assert!(set.is_match($path)); + } + }; + } + + macro_rules! nmatches { + ($name:ident, $pat:expr, $path:expr) => { + nmatches!($name, $pat, $path, Options::default()); + }; + ($name:ident, $pat:expr, $path:expr, $options:expr) => { + #[test] + fn $name() { + let mut builder = GlobBuilder::new($pat); + if let Some(casei) = $options.casei { + builder.case_insensitive(casei); + } + if let Some(litsep) = $options.litsep { + builder.literal_separator(litsep); + } + if let Some(bsesc) = $options.bsesc { + builder.backslash_escape(bsesc); + } + let pat = builder.build().unwrap(); + let matcher = pat.compile_matcher(); + let strategic = pat.compile_strategic_matcher(); + let set = GlobSetBuilder::new().add(pat).build().unwrap(); + assert!(!matcher.is_match($path)); + assert!(!strategic.is_match($path)); + assert!(!set.is_match($path)); + } + }; + } + + fn s(string: &str) -> String { string.to_string() } + + fn class(s: char, e: char) -> Token { + Class { negated: false, ranges: vec![(s, e)] } + } + + fn classn(s: char, e: char) -> Token { + Class { negated: true, ranges: vec![(s, e)] } + } + + fn rclass(ranges: &[(char, char)]) -> Token { + Class { negated: false, ranges: ranges.to_vec() } + } + + fn rclassn(ranges: &[(char, char)]) -> Token { + Class { negated: true, ranges: ranges.to_vec() } + } + + syntax!(literal1, "a", vec![Literal('a')]); + syntax!(literal2, "ab", vec![Literal('a'), Literal('b')]); + syntax!(any1, "?", vec![Any]); + syntax!(any2, "a?b", vec![Literal('a'), Any, Literal('b')]); + syntax!(seq1, "*", vec![ZeroOrMore]); + syntax!(seq2, "a*b", vec![Literal('a'), ZeroOrMore, Literal('b')]); + syntax!(seq3, "*a*b*", vec![ + ZeroOrMore, Literal('a'), ZeroOrMore, Literal('b'), ZeroOrMore, + ]); + syntax!(rseq1, "**", vec![RecursivePrefix]); + syntax!(rseq2, "**/", vec![RecursivePrefix]); + syntax!(rseq3, "/**", vec![RecursiveSuffix]); + syntax!(rseq4, "/**/", vec![RecursiveZeroOrMore]); + syntax!(rseq5, "a/**/b", vec![ + Literal('a'), RecursiveZeroOrMore, Literal('b'), + ]); + syntax!(cls1, "[a]", vec![class('a', 'a')]); + syntax!(cls2, "[!a]", vec![classn('a', 'a')]); + syntax!(cls3, "[a-z]", vec![class('a', 'z')]); + syntax!(cls4, "[!a-z]", vec![classn('a', 'z')]); + syntax!(cls5, "[-]", vec![class('-', '-')]); + syntax!(cls6, "[]]", vec![class(']', ']')]); + syntax!(cls7, "[*]", vec![class('*', '*')]); + syntax!(cls8, "[!!]", vec![classn('!', '!')]); + syntax!(cls9, "[a-]", vec![rclass(&[('a', 'a'), ('-', '-')])]); + syntax!(cls10, "[-a-z]", vec![rclass(&[('-', '-'), ('a', 'z')])]); + syntax!(cls11, "[a-z-]", vec![rclass(&[('a', 'z'), ('-', '-')])]); + syntax!(cls12, "[-a-z-]", vec![ + rclass(&[('-', '-'), ('a', 'z'), ('-', '-')]), + ]); + syntax!(cls13, "[]-z]", vec![class(']', 'z')]); + syntax!(cls14, "[--z]", vec![class('-', 'z')]); + syntax!(cls15, "[ --]", vec![class(' ', '-')]); + syntax!(cls16, "[0-9a-z]", vec![rclass(&[('0', '9'), ('a', 'z')])]); + syntax!(cls17, "[a-z0-9]", vec![rclass(&[('a', 'z'), ('0', '9')])]); + syntax!(cls18, "[!0-9a-z]", vec![rclassn(&[('0', '9'), ('a', 'z')])]); + syntax!(cls19, "[!a-z0-9]", vec![rclassn(&[('a', 'z'), ('0', '9')])]); + syntax!(cls20, "[^a]", vec![classn('a', 'a')]); + syntax!(cls21, "[^a-z]", vec![classn('a', 'z')]); + + syntaxerr!(err_rseq1, "a**", ErrorKind::InvalidRecursive); + syntaxerr!(err_rseq2, "**a", ErrorKind::InvalidRecursive); + syntaxerr!(err_rseq3, "a**b", ErrorKind::InvalidRecursive); + syntaxerr!(err_rseq4, "***", ErrorKind::InvalidRecursive); + syntaxerr!(err_rseq5, "/a**", ErrorKind::InvalidRecursive); + syntaxerr!(err_rseq6, "/**a", ErrorKind::InvalidRecursive); + syntaxerr!(err_rseq7, "/a**b", ErrorKind::InvalidRecursive); + syntaxerr!(err_unclosed1, "[", ErrorKind::UnclosedClass); + syntaxerr!(err_unclosed2, "[]", ErrorKind::UnclosedClass); + syntaxerr!(err_unclosed3, "[!", ErrorKind::UnclosedClass); + syntaxerr!(err_unclosed4, "[!]", ErrorKind::UnclosedClass); + syntaxerr!(err_range1, "[z-a]", ErrorKind::InvalidRange('z', 'a')); + syntaxerr!(err_range2, "[z--]", ErrorKind::InvalidRange('z', '-')); + + const CASEI: Options = Options { + casei: Some(true), + litsep: None, + bsesc: None, + }; + const SLASHLIT: Options = Options { + casei: None, + litsep: Some(true), + bsesc: None, + }; + const NOBSESC: Options = Options { + casei: None, + litsep: None, + bsesc: Some(false), + }; + const BSESC: Options = Options { + casei: None, + litsep: None, + bsesc: Some(true), + }; + + toregex!(re_casei, "a", "(?i)^a$", &CASEI); + + toregex!(re_slash1, "?", r"^[^/]$", SLASHLIT); + toregex!(re_slash2, "*", r"^[^/]*$", SLASHLIT); + + toregex!(re1, "a", "^a$"); + toregex!(re2, "?", "^.$"); + toregex!(re3, "*", "^.*$"); + toregex!(re4, "a?", "^a.$"); + toregex!(re5, "?a", "^.a$"); + toregex!(re6, "a*", "^a.*$"); + toregex!(re7, "*a", "^.*a$"); + toregex!(re8, "[*]", r"^[\*]$"); + toregex!(re9, "[+]", r"^[\+]$"); + toregex!(re10, "+", r"^\+$"); + toregex!(re11, "**", r"^.*$"); + toregex!(re12, "☃", r"^\xe2\x98\x83$"); + + matches!(match1, "a", "a"); + matches!(match2, "a*b", "a_b"); + matches!(match3, "a*b*c", "abc"); + matches!(match4, "a*b*c", "a_b_c"); + matches!(match5, "a*b*c", "a___b___c"); + matches!(match6, "abc*abc*abc", "abcabcabcabcabcabcabc"); + matches!(match7, "a*a*a*a*a*a*a*a*a", "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"); + matches!(match8, "a*b[xyz]c*d", "abxcdbxcddd"); + matches!(match9, "*.rs", ".rs"); + matches!(match10, "☃", "☃"); + + matches!(matchrec1, "some/**/needle.txt", "some/needle.txt"); + matches!(matchrec2, "some/**/needle.txt", "some/one/needle.txt"); + matches!(matchrec3, "some/**/needle.txt", "some/one/two/needle.txt"); + matches!(matchrec4, "some/**/needle.txt", "some/other/needle.txt"); + matches!(matchrec5, "**", "abcde"); + matches!(matchrec6, "**", ""); + matches!(matchrec7, "**", ".asdf"); + matches!(matchrec8, "**", "/x/.asdf"); + matches!(matchrec9, "some/**/**/needle.txt", "some/needle.txt"); + matches!(matchrec10, "some/**/**/needle.txt", "some/one/needle.txt"); + matches!(matchrec11, "some/**/**/needle.txt", "some/one/two/needle.txt"); + matches!(matchrec12, "some/**/**/needle.txt", "some/other/needle.txt"); + matches!(matchrec13, "**/test", "one/two/test"); + matches!(matchrec14, "**/test", "one/test"); + matches!(matchrec15, "**/test", "test"); + matches!(matchrec16, "/**/test", "/one/two/test"); + matches!(matchrec17, "/**/test", "/one/test"); + matches!(matchrec18, "/**/test", "/test"); + matches!(matchrec19, "**/.*", ".abc"); + matches!(matchrec20, "**/.*", "abc/.abc"); + matches!(matchrec21, ".*/**", ".abc"); + matches!(matchrec22, ".*/**", ".abc/abc"); + matches!(matchrec23, "foo/**", "foo"); + matches!(matchrec24, "**/foo/bar", "foo/bar"); + matches!(matchrec25, "some/*/needle.txt", "some/one/needle.txt"); + + matches!(matchrange1, "a[0-9]b", "a0b"); + matches!(matchrange2, "a[0-9]b", "a9b"); + matches!(matchrange3, "a[!0-9]b", "a_b"); + matches!(matchrange4, "[a-z123]", "1"); + matches!(matchrange5, "[1a-z23]", "1"); + matches!(matchrange6, "[123a-z]", "1"); + matches!(matchrange7, "[abc-]", "-"); + matches!(matchrange8, "[-abc]", "-"); + matches!(matchrange9, "[-a-c]", "b"); + matches!(matchrange10, "[a-c-]", "b"); + matches!(matchrange11, "[-]", "-"); + matches!(matchrange12, "a[^0-9]b", "a_b"); + + matches!(matchpat1, "*hello.txt", "hello.txt"); + matches!(matchpat2, "*hello.txt", "gareth_says_hello.txt"); + matches!(matchpat3, "*hello.txt", "some/path/to/hello.txt"); + matches!(matchpat4, "*hello.txt", "some\\path\\to\\hello.txt"); + matches!(matchpat5, "*hello.txt", "/an/absolute/path/to/hello.txt"); + matches!(matchpat6, "*some/path/to/hello.txt", "some/path/to/hello.txt"); + matches!(matchpat7, "*some/path/to/hello.txt", + "a/bigger/some/path/to/hello.txt"); + + matches!(matchescape, "_[[]_[]]_[?]_[*]_!_", "_[_]_?_*_!_"); + + matches!(matchcasei1, "aBcDeFg", "aBcDeFg", CASEI); + matches!(matchcasei2, "aBcDeFg", "abcdefg", CASEI); + matches!(matchcasei3, "aBcDeFg", "ABCDEFG", CASEI); + matches!(matchcasei4, "aBcDeFg", "AbCdEfG", CASEI); + + matches!(matchalt1, "a,b", "a,b"); + matches!(matchalt2, ",", ","); + matches!(matchalt3, "{a,b}", "a"); + matches!(matchalt4, "{a,b}", "b"); + matches!(matchalt5, "{**/src/**,foo}", "abc/src/bar"); + matches!(matchalt6, "{**/src/**,foo}", "foo"); + matches!(matchalt7, "{[}],foo}", "}"); + matches!(matchalt8, "{foo}", "foo"); + matches!(matchalt9, "{}", ""); + matches!(matchalt10, "{,}", ""); + matches!(matchalt11, "{*.foo,*.bar,*.wat}", "test.foo"); + matches!(matchalt12, "{*.foo,*.bar,*.wat}", "test.bar"); + matches!(matchalt13, "{*.foo,*.bar,*.wat}", "test.wat"); + + matches!(matchslash1, "abc/def", "abc/def", SLASHLIT); + #[cfg(unix)] + nmatches!(matchslash2, "abc?def", "abc/def", SLASHLIT); + #[cfg(not(unix))] + nmatches!(matchslash2, "abc?def", "abc\\def", SLASHLIT); + nmatches!(matchslash3, "abc*def", "abc/def", SLASHLIT); + matches!(matchslash4, "abc[/]def", "abc/def", SLASHLIT); // differs + #[cfg(unix)] + nmatches!(matchslash5, "abc\\def", "abc/def", SLASHLIT); + #[cfg(not(unix))] + matches!(matchslash5, "abc\\def", "abc/def", SLASHLIT); + + matches!(matchbackslash1, "\\[", "[", BSESC); + matches!(matchbackslash2, "\\?", "?", BSESC); + matches!(matchbackslash3, "\\*", "*", BSESC); + matches!(matchbackslash4, "\\[a-z]", "\\a", NOBSESC); + matches!(matchbackslash5, "\\?", "\\a", NOBSESC); + matches!(matchbackslash6, "\\*", "\\\\", NOBSESC); + #[cfg(unix)] + matches!(matchbackslash7, "\\a", "a"); + #[cfg(not(unix))] + matches!(matchbackslash8, "\\a", "/a"); + + nmatches!(matchnot1, "a*b*c", "abcd"); + nmatches!(matchnot2, "abc*abc*abc", "abcabcabcabcabcabcabca"); + nmatches!(matchnot3, "some/**/needle.txt", "some/other/notthis.txt"); + nmatches!(matchnot4, "some/**/**/needle.txt", "some/other/notthis.txt"); + nmatches!(matchnot5, "/**/test", "test"); + nmatches!(matchnot6, "/**/test", "/one/notthis"); + nmatches!(matchnot7, "/**/test", "/notthis"); + nmatches!(matchnot8, "**/.*", "ab.c"); + nmatches!(matchnot9, "**/.*", "abc/ab.c"); + nmatches!(matchnot10, ".*/**", "a.bc"); + nmatches!(matchnot11, ".*/**", "abc/a.bc"); + nmatches!(matchnot12, "a[0-9]b", "a_b"); + nmatches!(matchnot13, "a[!0-9]b", "a0b"); + nmatches!(matchnot14, "a[!0-9]b", "a9b"); + nmatches!(matchnot15, "[!-]", "-"); + nmatches!(matchnot16, "*hello.txt", "hello.txt-and-then-some"); + nmatches!(matchnot17, "*hello.txt", "goodbye.txt"); + nmatches!(matchnot18, "*some/path/to/hello.txt", + "some/path/to/hello.txt-and-then-some"); + nmatches!(matchnot19, "*some/path/to/hello.txt", + "some/other/path/to/hello.txt"); + nmatches!(matchnot20, "a", "foo/a"); + nmatches!(matchnot21, "./foo", "foo"); + nmatches!(matchnot22, "**/foo", "foofoo"); + nmatches!(matchnot23, "**/foo/bar", "foofoo/bar"); + nmatches!(matchnot24, "/*.c", "mozilla-sha1/sha1.c"); + nmatches!(matchnot25, "*.c", "mozilla-sha1/sha1.c", SLASHLIT); + nmatches!(matchnot26, "**/m4/ltoptions.m4", + "csharp/src/packages/repositories.config", SLASHLIT); + nmatches!(matchnot27, "a[^0-9]b", "a0b"); + nmatches!(matchnot28, "a[^0-9]b", "a9b"); + nmatches!(matchnot29, "[^-]", "-"); + nmatches!(matchnot30, "some/*/needle.txt", "some/needle.txt"); + nmatches!( + matchrec31, + "some/*/needle.txt", "some/one/two/needle.txt", SLASHLIT); + nmatches!( + matchrec32, + "some/*/needle.txt", "some/one/two/three/needle.txt", SLASHLIT); + + macro_rules! extract { + ($which:ident, $name:ident, $pat:expr, $expect:expr) => { + extract!($which, $name, $pat, $expect, Options::default()); + }; + ($which:ident, $name:ident, $pat:expr, $expect:expr, $options:expr) => { + #[test] + fn $name() { + let mut builder = GlobBuilder::new($pat); + if let Some(casei) = $options.casei { + builder.case_insensitive(casei); + } + if let Some(litsep) = $options.litsep { + builder.literal_separator(litsep); + } + if let Some(bsesc) = $options.bsesc { + builder.backslash_escape(bsesc); + } + let pat = builder.build().unwrap(); + assert_eq!($expect, pat.$which()); + } + }; + } + + macro_rules! literal { + ($($tt:tt)*) => { extract!(literal, $($tt)*); } + } + + macro_rules! basetokens { + ($($tt:tt)*) => { extract!(basename_tokens, $($tt)*); } + } + + macro_rules! ext { + ($($tt:tt)*) => { extract!(ext, $($tt)*); } + } + + macro_rules! required_ext { + ($($tt:tt)*) => { extract!(required_ext, $($tt)*); } + } + + macro_rules! prefix { + ($($tt:tt)*) => { extract!(prefix, $($tt)*); } + } + + macro_rules! suffix { + ($($tt:tt)*) => { extract!(suffix, $($tt)*); } + } + + macro_rules! baseliteral { + ($($tt:tt)*) => { extract!(basename_literal, $($tt)*); } + } + + literal!(extract_lit1, "foo", Some(s("foo"))); + literal!(extract_lit2, "foo", None, CASEI); + literal!(extract_lit3, "/foo", Some(s("/foo"))); + literal!(extract_lit4, "/foo/", Some(s("/foo/"))); + literal!(extract_lit5, "/foo/bar", Some(s("/foo/bar"))); + literal!(extract_lit6, "*.foo", None); + literal!(extract_lit7, "foo/bar", Some(s("foo/bar"))); + literal!(extract_lit8, "**/foo/bar", None); + + basetokens!(extract_basetoks1, "**/foo", Some(&*vec![ + Literal('f'), Literal('o'), Literal('o'), + ])); + basetokens!(extract_basetoks2, "**/foo", None, CASEI); + basetokens!(extract_basetoks3, "**/foo", Some(&*vec![ + Literal('f'), Literal('o'), Literal('o'), + ]), SLASHLIT); + basetokens!(extract_basetoks4, "*foo", None, SLASHLIT); + basetokens!(extract_basetoks5, "*foo", None); + basetokens!(extract_basetoks6, "**/fo*o", None); + basetokens!(extract_basetoks7, "**/fo*o", Some(&*vec![ + Literal('f'), Literal('o'), ZeroOrMore, Literal('o'), + ]), SLASHLIT); + + ext!(extract_ext1, "**/*.rs", Some(s(".rs"))); + ext!(extract_ext2, "**/*.rs.bak", None); + ext!(extract_ext3, "*.rs", Some(s(".rs"))); + ext!(extract_ext4, "a*.rs", None); + ext!(extract_ext5, "/*.c", None); + ext!(extract_ext6, "*.c", None, SLASHLIT); + ext!(extract_ext7, "*.c", Some(s(".c"))); + + required_ext!(extract_req_ext1, "*.rs", Some(s(".rs"))); + required_ext!(extract_req_ext2, "/foo/bar/*.rs", Some(s(".rs"))); + required_ext!(extract_req_ext3, "/foo/bar/*.rs", Some(s(".rs"))); + required_ext!(extract_req_ext4, "/foo/bar/.rs", Some(s(".rs"))); + required_ext!(extract_req_ext5, ".rs", Some(s(".rs"))); + required_ext!(extract_req_ext6, "./rs", None); + required_ext!(extract_req_ext7, "foo", None); + required_ext!(extract_req_ext8, ".foo/", None); + required_ext!(extract_req_ext9, "foo/", None); + + prefix!(extract_prefix1, "/foo", Some(s("/foo"))); + prefix!(extract_prefix2, "/foo/*", Some(s("/foo/"))); + prefix!(extract_prefix3, "**/foo", None); + prefix!(extract_prefix4, "foo/**", None); + + suffix!(extract_suffix1, "**/foo/bar", Some((s("/foo/bar"), true))); + suffix!(extract_suffix2, "*/foo/bar", Some((s("/foo/bar"), false))); + suffix!(extract_suffix3, "*/foo/bar", None, SLASHLIT); + suffix!(extract_suffix4, "foo/bar", Some((s("foo/bar"), false))); + suffix!(extract_suffix5, "*.foo", Some((s(".foo"), false))); + suffix!(extract_suffix6, "*.foo", None, SLASHLIT); + suffix!(extract_suffix7, "**/*_test", Some((s("_test"), false))); + + baseliteral!(extract_baselit1, "**/foo", Some(s("foo"))); + baseliteral!(extract_baselit2, "foo", None); + baseliteral!(extract_baselit3, "*foo", None); + baseliteral!(extract_baselit4, "*/foo", None); +} diff --git a/globset/src/lib.rs b/globset/src/lib.rs new file mode 100644 index 000000000..8d26e1875 --- /dev/null +++ b/globset/src/lib.rs @@ -0,0 +1,866 @@ +/*! +The globset crate provides cross platform single glob and glob set matching. + +Glob set matching is the process of matching one or more glob patterns against +a single candidate path simultaneously, and returning all of the globs that +matched. For example, given this set of globs: + +```ignore +*.rs +src/lib.rs +src/**/foo.rs +``` + +and a path `src/bar/baz/foo.rs`, then the set would report the first and third +globs as matching. + +# Example: one glob + +This example shows how to match a single glob against a single file path. + +``` +# fn example() -> Result<(), globset::Error> { +use globset::Glob; + +let glob = Glob::new("*.rs")?.compile_matcher(); + +assert!(glob.is_match("foo.rs")); +assert!(glob.is_match("foo/bar.rs")); +assert!(!glob.is_match("Cargo.toml")); +# Ok(()) } example().unwrap(); +``` + +# Example: configuring a glob matcher + +This example shows how to use a `GlobBuilder` to configure aspects of match +semantics. In this example, we prevent wildcards from matching path separators. + +``` +# fn example() -> Result<(), globset::Error> { +use globset::GlobBuilder; + +let glob = GlobBuilder::new("*.rs") + .literal_separator(true).build()?.compile_matcher(); + +assert!(glob.is_match("foo.rs")); +assert!(!glob.is_match("foo/bar.rs")); // no longer matches +assert!(!glob.is_match("Cargo.toml")); +# Ok(()) } example().unwrap(); +``` + +# Example: match multiple globs at once + +This example shows how to match multiple glob patterns at once. + +``` +# fn example() -> Result<(), globset::Error> { +use globset::{Glob, GlobSetBuilder}; + +let mut builder = GlobSetBuilder::new(); +// A GlobBuilder can be used to configure each glob's match semantics +// independently. +builder.add(Glob::new("*.rs")?); +builder.add(Glob::new("src/lib.rs")?); +builder.add(Glob::new("src/**/foo.rs")?); +let set = builder.build()?; + +assert_eq!(set.matches("src/bar/baz/foo.rs"), vec![0, 2]); +# Ok(()) } example().unwrap(); +``` + +# Syntax + +Standard Unix-style glob syntax is supported: + +* `?` matches any single character. (If the `literal_separator` option is + enabled, then `?` can never match a path separator.) +* `*` matches zero or more characters. (If the `literal_separator` option is + enabled, then `*` can never match a path separator.) +* `**` recursively matches directories but are only legal in three situations. + First, if the glob starts with \*\*/, then it matches + all directories. For example, \*\*/foo matches `foo` + and `bar/foo` but not `foo/bar`. Secondly, if the glob ends with + /\*\*, then it matches all sub-entries. For example, + foo/\*\* matches `foo/a` and `foo/a/b`, but not `foo`. + Thirdly, if the glob contains /\*\*/ anywhere within + the pattern, then it matches zero or more directories. Using `**` anywhere + else is illegal (N.B. the glob `**` is allowed and means "match everything"). +* `{a,b}` matches `a` or `b` where `a` and `b` are arbitrary glob patterns. + (N.B. Nesting `{...}` is not currently allowed.) +* `[ab]` matches `a` or `b` where `a` and `b` are characters. Use + `[!ab]` to match any character except for `a` and `b`. +* Metacharacters such as `*` and `?` can be escaped with character class + notation. e.g., `[*]` matches `*`. +* When backslash escapes are enabled, a backslash (`\`) will escape all meta + characters in a glob. If it precedes a non-meta character, then the slash is + ignored. A `\\` will match a literal `\\`. Note that this mode is only + enabled on Unix platforms by default, but can be enabled on any platform + via the `backslash_escape` setting on `Glob`. + +A `GlobBuilder` can be used to prevent wildcards from matching path separators, +or to enable case insensitive matching. +*/ + +#![deny(missing_docs)] + +extern crate aho_corasick; +extern crate fnv; +#[macro_use] +extern crate log; +extern crate memchr; +extern crate regex; + +use std::borrow::Cow; +use std::collections::{BTreeMap, HashMap}; +use std::error::Error as StdError; +use std::ffi::OsStr; +use std::fmt; +use std::hash; +use std::path::Path; +use std::str; + +use aho_corasick::{Automaton, AcAutomaton, FullAcAutomaton}; +use regex::bytes::{Regex, RegexBuilder, RegexSet}; + +use pathutil::{ + file_name, file_name_ext, normalize_path, os_str_bytes, path_bytes, +}; +use glob::MatchStrategy; +pub use glob::{Glob, GlobBuilder, GlobMatcher}; + +mod glob; +mod pathutil; + +/// Represents an error that can occur when parsing a glob pattern. +#[derive(Clone, Debug, Eq, PartialEq)] +pub struct Error { + /// The original glob provided by the caller. + glob: Option, + /// The kind of error. + kind: ErrorKind, +} + +/// The kind of error that can occur when parsing a glob pattern. +#[derive(Clone, Debug, Eq, PartialEq)] +pub enum ErrorKind { + /// Occurs when a use of `**` is invalid. Namely, `**` can only appear + /// adjacent to a path separator, or the beginning/end of a glob. + InvalidRecursive, + /// Occurs when a character class (e.g., `[abc]`) is not closed. + UnclosedClass, + /// Occurs when a range in a character (e.g., `[a-z]`) is invalid. For + /// example, if the range starts with a lexicographically larger character + /// than it ends with. + InvalidRange(char, char), + /// Occurs when a `}` is found without a matching `{`. + UnopenedAlternates, + /// Occurs when a `{` is found without a matching `}`. + UnclosedAlternates, + /// Occurs when an alternating group is nested inside another alternating + /// group, e.g., `{{a,b},{c,d}}`. + NestedAlternates, + /// Occurs when an unescaped '\' is found at the end of a glob. + DanglingEscape, + /// An error associated with parsing or compiling a regex. + Regex(String), + /// Hints that destructuring should not be exhaustive. + /// + /// This enum may grow additional variants, so this makes sure clients + /// don't count on exhaustive matching. (Otherwise, adding a new variant + /// could break existing code.) + #[doc(hidden)] + __Nonexhaustive, +} + +impl StdError for Error { + fn description(&self) -> &str { + self.kind.description() + } +} + +impl Error { + /// Return the glob that caused this error, if one exists. + pub fn glob(&self) -> Option<&str> { + self.glob.as_ref().map(|s| &**s) + } + + /// Return the kind of this error. + pub fn kind(&self) -> &ErrorKind { + &self.kind + } +} + +impl ErrorKind { + fn description(&self) -> &str { + match *self { + ErrorKind::InvalidRecursive => { + "invalid use of **; must be one path component" + } + ErrorKind::UnclosedClass => { + "unclosed character class; missing ']'" + } + ErrorKind::InvalidRange(_, _) => { + "invalid character range" + } + ErrorKind::UnopenedAlternates => { + "unopened alternate group; missing '{' \ + (maybe escape '}' with '[}]'?)" + } + ErrorKind::UnclosedAlternates => { + "unclosed alternate group; missing '}' \ + (maybe escape '{' with '[{]'?)" + } + ErrorKind::NestedAlternates => { + "nested alternate groups are not allowed" + } + ErrorKind::DanglingEscape => { + "dangling '\\'" + } + ErrorKind::Regex(ref err) => err, + ErrorKind::__Nonexhaustive => unreachable!(), + } + } +} + +impl fmt::Display for Error { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self.glob { + None => self.kind.fmt(f), + Some(ref glob) => { + write!(f, "error parsing glob '{}': {}", glob, self.kind) + } + } + } +} + +impl fmt::Display for ErrorKind { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match *self { + ErrorKind::InvalidRecursive + | ErrorKind::UnclosedClass + | ErrorKind::UnopenedAlternates + | ErrorKind::UnclosedAlternates + | ErrorKind::NestedAlternates + | ErrorKind::DanglingEscape + | ErrorKind::Regex(_) => { + write!(f, "{}", self.description()) + } + ErrorKind::InvalidRange(s, e) => { + write!(f, "invalid range; '{}' > '{}'", s, e) + } + ErrorKind::__Nonexhaustive => unreachable!(), + } + } +} + +fn new_regex(pat: &str) -> Result { + RegexBuilder::new(pat) + .dot_matches_new_line(true) + .size_limit(10 * (1 << 20)) + .dfa_size_limit(10 * (1 << 20)) + .build() + .map_err(|err| { + Error { + glob: Some(pat.to_string()), + kind: ErrorKind::Regex(err.to_string()), + } + }) +} + +fn new_regex_set(pats: I) -> Result + where S: AsRef, I: IntoIterator { + RegexSet::new(pats).map_err(|err| { + Error { + glob: None, + kind: ErrorKind::Regex(err.to_string()), + } + }) +} + +type Fnv = hash::BuildHasherDefault; + +/// GlobSet represents a group of globs that can be matched together in a +/// single pass. +#[derive(Clone, Debug)] +pub struct GlobSet { + len: usize, + strats: Vec, +} + +impl GlobSet { + /// Create an empty `GlobSet`. An empty set matches nothing. + pub fn empty() -> GlobSet { + GlobSet { + len: 0, + strats: vec![], + } + } + + /// Returns true if this set is empty, and therefore matches nothing. + pub fn is_empty(&self) -> bool { + self.len == 0 + } + + /// Returns the number of globs in this set. + pub fn len(&self) -> usize { + self.len + } + + /// Returns true if any glob in this set matches the path given. + pub fn is_match>(&self, path: P) -> bool { + self.is_match_candidate(&Candidate::new(path.as_ref())) + } + + /// Returns true if any glob in this set matches the path given. + /// + /// This takes a Candidate as input, which can be used to amortize the + /// cost of preparing a path for matching. + pub fn is_match_candidate(&self, path: &Candidate) -> bool { + if self.is_empty() { + return false; + } + for strat in &self.strats { + if strat.is_match(path) { + return true; + } + } + false + } + + /// Returns the sequence number of every glob pattern that matches the + /// given path. + pub fn matches>(&self, path: P) -> Vec { + self.matches_candidate(&Candidate::new(path.as_ref())) + } + + /// Returns the sequence number of every glob pattern that matches the + /// given path. + /// + /// This takes a Candidate as input, which can be used to amortize the + /// cost of preparing a path for matching. + pub fn matches_candidate(&self, path: &Candidate) -> Vec { + let mut into = vec![]; + if self.is_empty() { + return into; + } + self.matches_candidate_into(path, &mut into); + into + } + + /// Adds the sequence number of every glob pattern that matches the given + /// path to the vec given. + /// + /// `into` is is cleared before matching begins, and contains the set of + /// sequence numbers (in ascending order) after matching ends. If no globs + /// were matched, then `into` will be empty. + pub fn matches_into>( + &self, + path: P, + into: &mut Vec, + ) { + self.matches_candidate_into(&Candidate::new(path.as_ref()), into); + } + + /// Adds the sequence number of every glob pattern that matches the given + /// path to the vec given. + /// + /// `into` is is cleared before matching begins, and contains the set of + /// sequence numbers (in ascending order) after matching ends. If no globs + /// were matched, then `into` will be empty. + /// + /// This takes a Candidate as input, which can be used to amortize the + /// cost of preparing a path for matching. + pub fn matches_candidate_into( + &self, + path: &Candidate, + into: &mut Vec, + ) { + into.clear(); + if self.is_empty() { + return; + } + for strat in &self.strats { + strat.matches_into(path, into); + } + into.sort(); + into.dedup(); + } + + fn new(pats: &[Glob]) -> Result { + if pats.is_empty() { + return Ok(GlobSet { len: 0, strats: vec![] }); + } + let mut lits = LiteralStrategy::new(); + let mut base_lits = BasenameLiteralStrategy::new(); + let mut exts = ExtensionStrategy::new(); + let mut prefixes = MultiStrategyBuilder::new(); + let mut suffixes = MultiStrategyBuilder::new(); + let mut required_exts = RequiredExtensionStrategyBuilder::new(); + let mut regexes = MultiStrategyBuilder::new(); + for (i, p) in pats.iter().enumerate() { + match MatchStrategy::new(p) { + MatchStrategy::Literal(lit) => { + lits.add(i, lit); + } + MatchStrategy::BasenameLiteral(lit) => { + base_lits.add(i, lit); + } + MatchStrategy::Extension(ext) => { + exts.add(i, ext); + } + MatchStrategy::Prefix(prefix) => { + prefixes.add(i, prefix); + } + MatchStrategy::Suffix { suffix, component } => { + if component { + lits.add(i, suffix[1..].to_string()); + } + suffixes.add(i, suffix); + } + MatchStrategy::RequiredExtension(ext) => { + required_exts.add(i, ext, p.regex().to_owned()); + } + MatchStrategy::Regex => { + debug!("glob converted to regex: {:?}", p); + regexes.add(i, p.regex().to_owned()); + } + } + } + debug!("built glob set; {} literals, {} basenames, {} extensions, \ + {} prefixes, {} suffixes, {} required extensions, {} regexes", + lits.0.len(), base_lits.0.len(), exts.0.len(), + prefixes.literals.len(), suffixes.literals.len(), + required_exts.0.len(), regexes.literals.len()); + Ok(GlobSet { + len: pats.len(), + strats: vec![ + GlobSetMatchStrategy::Extension(exts), + GlobSetMatchStrategy::BasenameLiteral(base_lits), + GlobSetMatchStrategy::Literal(lits), + GlobSetMatchStrategy::Suffix(suffixes.suffix()), + GlobSetMatchStrategy::Prefix(prefixes.prefix()), + GlobSetMatchStrategy::RequiredExtension( + required_exts.build()?), + GlobSetMatchStrategy::Regex(regexes.regex_set()?), + ], + }) + } +} + +/// GlobSetBuilder builds a group of patterns that can be used to +/// simultaneously match a file path. +#[derive(Clone, Debug)] +pub struct GlobSetBuilder { + pats: Vec, +} + +impl GlobSetBuilder { + /// Create a new GlobSetBuilder. A GlobSetBuilder can be used to add new + /// patterns. Once all patterns have been added, `build` should be called + /// to produce a `GlobSet`, which can then be used for matching. + pub fn new() -> GlobSetBuilder { + GlobSetBuilder { pats: vec![] } + } + + /// Builds a new matcher from all of the glob patterns added so far. + /// + /// Once a matcher is built, no new patterns can be added to it. + pub fn build(&self) -> Result { + GlobSet::new(&self.pats) + } + + /// Add a new pattern to this set. + pub fn add(&mut self, pat: Glob) -> &mut GlobSetBuilder { + self.pats.push(pat); + self + } +} + +/// A candidate path for matching. +/// +/// All glob matching in this crate operates on `Candidate` values. +/// Constructing candidates has a very small cost associated with it, so +/// callers may find it beneficial to amortize that cost when matching a single +/// path against multiple globs or sets of globs. +#[derive(Clone, Debug)] +pub struct Candidate<'a> { + path: Cow<'a, [u8]>, + basename: Cow<'a, [u8]>, + ext: Cow<'a, [u8]>, +} + +impl<'a> Candidate<'a> { + /// Create a new candidate for matching from the given path. + pub fn new + ?Sized>(path: &'a P) -> Candidate<'a> { + let path = path.as_ref(); + let basename = file_name(path).unwrap_or(OsStr::new("")); + Candidate { + path: normalize_path(path_bytes(path)), + basename: os_str_bytes(basename), + ext: file_name_ext(basename).unwrap_or(Cow::Borrowed(b"")), + } + } + + fn path_prefix(&self, max: usize) -> &[u8] { + if self.path.len() <= max { + &*self.path + } else { + &self.path[..max] + } + } + + fn path_suffix(&self, max: usize) -> &[u8] { + if self.path.len() <= max { + &*self.path + } else { + &self.path[self.path.len() - max..] + } + } +} + +#[derive(Clone, Debug)] +enum GlobSetMatchStrategy { + Literal(LiteralStrategy), + BasenameLiteral(BasenameLiteralStrategy), + Extension(ExtensionStrategy), + Prefix(PrefixStrategy), + Suffix(SuffixStrategy), + RequiredExtension(RequiredExtensionStrategy), + Regex(RegexSetStrategy), +} + +impl GlobSetMatchStrategy { + fn is_match(&self, candidate: &Candidate) -> bool { + use self::GlobSetMatchStrategy::*; + match *self { + Literal(ref s) => s.is_match(candidate), + BasenameLiteral(ref s) => s.is_match(candidate), + Extension(ref s) => s.is_match(candidate), + Prefix(ref s) => s.is_match(candidate), + Suffix(ref s) => s.is_match(candidate), + RequiredExtension(ref s) => s.is_match(candidate), + Regex(ref s) => s.is_match(candidate), + } + } + + fn matches_into(&self, candidate: &Candidate, matches: &mut Vec) { + use self::GlobSetMatchStrategy::*; + match *self { + Literal(ref s) => s.matches_into(candidate, matches), + BasenameLiteral(ref s) => s.matches_into(candidate, matches), + Extension(ref s) => s.matches_into(candidate, matches), + Prefix(ref s) => s.matches_into(candidate, matches), + Suffix(ref s) => s.matches_into(candidate, matches), + RequiredExtension(ref s) => s.matches_into(candidate, matches), + Regex(ref s) => s.matches_into(candidate, matches), + } + } +} + +#[derive(Clone, Debug)] +struct LiteralStrategy(BTreeMap, Vec>); + +impl LiteralStrategy { + fn new() -> LiteralStrategy { + LiteralStrategy(BTreeMap::new()) + } + + fn add(&mut self, global_index: usize, lit: String) { + self.0.entry(lit.into_bytes()).or_insert(vec![]).push(global_index); + } + + fn is_match(&self, candidate: &Candidate) -> bool { + self.0.contains_key(&*candidate.path) + } + + #[inline(never)] + fn matches_into(&self, candidate: &Candidate, matches: &mut Vec) { + if let Some(hits) = self.0.get(&*candidate.path) { + matches.extend(hits); + } + } +} + +#[derive(Clone, Debug)] +struct BasenameLiteralStrategy(BTreeMap, Vec>); + +impl BasenameLiteralStrategy { + fn new() -> BasenameLiteralStrategy { + BasenameLiteralStrategy(BTreeMap::new()) + } + + fn add(&mut self, global_index: usize, lit: String) { + self.0.entry(lit.into_bytes()).or_insert(vec![]).push(global_index); + } + + fn is_match(&self, candidate: &Candidate) -> bool { + if candidate.basename.is_empty() { + return false; + } + self.0.contains_key(&*candidate.basename) + } + + #[inline(never)] + fn matches_into(&self, candidate: &Candidate, matches: &mut Vec) { + if candidate.basename.is_empty() { + return; + } + if let Some(hits) = self.0.get(&*candidate.basename) { + matches.extend(hits); + } + } +} + +#[derive(Clone, Debug)] +struct ExtensionStrategy(HashMap, Vec, Fnv>); + +impl ExtensionStrategy { + fn new() -> ExtensionStrategy { + ExtensionStrategy(HashMap::with_hasher(Fnv::default())) + } + + fn add(&mut self, global_index: usize, ext: String) { + self.0.entry(ext.into_bytes()).or_insert(vec![]).push(global_index); + } + + fn is_match(&self, candidate: &Candidate) -> bool { + if candidate.ext.is_empty() { + return false; + } + self.0.contains_key(&*candidate.ext) + } + + #[inline(never)] + fn matches_into(&self, candidate: &Candidate, matches: &mut Vec) { + if candidate.ext.is_empty() { + return; + } + if let Some(hits) = self.0.get(&*candidate.ext) { + matches.extend(hits); + } + } +} + +#[derive(Clone, Debug)] +struct PrefixStrategy { + matcher: FullAcAutomaton>, + map: Vec, + longest: usize, +} + +impl PrefixStrategy { + fn is_match(&self, candidate: &Candidate) -> bool { + let path = candidate.path_prefix(self.longest); + for m in self.matcher.find_overlapping(path) { + if m.start == 0 { + return true; + } + } + false + } + + fn matches_into(&self, candidate: &Candidate, matches: &mut Vec) { + let path = candidate.path_prefix(self.longest); + for m in self.matcher.find_overlapping(path) { + if m.start == 0 { + matches.push(self.map[m.pati]); + } + } + } +} + +#[derive(Clone, Debug)] +struct SuffixStrategy { + matcher: FullAcAutomaton>, + map: Vec, + longest: usize, +} + +impl SuffixStrategy { + fn is_match(&self, candidate: &Candidate) -> bool { + let path = candidate.path_suffix(self.longest); + for m in self.matcher.find_overlapping(path) { + if m.end == path.len() { + return true; + } + } + false + } + + fn matches_into(&self, candidate: &Candidate, matches: &mut Vec) { + let path = candidate.path_suffix(self.longest); + for m in self.matcher.find_overlapping(path) { + if m.end == path.len() { + matches.push(self.map[m.pati]); + } + } + } +} + +#[derive(Clone, Debug)] +struct RequiredExtensionStrategy(HashMap, Vec<(usize, Regex)>, Fnv>); + +impl RequiredExtensionStrategy { + fn is_match(&self, candidate: &Candidate) -> bool { + if candidate.ext.is_empty() { + return false; + } + match self.0.get(&*candidate.ext) { + None => false, + Some(regexes) => { + for &(_, ref re) in regexes { + if re.is_match(&*candidate.path) { + return true; + } + } + false + } + } + } + + #[inline(never)] + fn matches_into(&self, candidate: &Candidate, matches: &mut Vec) { + if candidate.ext.is_empty() { + return; + } + if let Some(regexes) = self.0.get(&*candidate.ext) { + for &(global_index, ref re) in regexes { + if re.is_match(&*candidate.path) { + matches.push(global_index); + } + } + } + } +} + +#[derive(Clone, Debug)] +struct RegexSetStrategy { + matcher: RegexSet, + map: Vec, +} + +impl RegexSetStrategy { + fn is_match(&self, candidate: &Candidate) -> bool { + self.matcher.is_match(&*candidate.path) + } + + fn matches_into(&self, candidate: &Candidate, matches: &mut Vec) { + for i in self.matcher.matches(&*candidate.path) { + matches.push(self.map[i]); + } + } +} + +#[derive(Clone, Debug)] +struct MultiStrategyBuilder { + literals: Vec, + map: Vec, + longest: usize, +} + +impl MultiStrategyBuilder { + fn new() -> MultiStrategyBuilder { + MultiStrategyBuilder { + literals: vec![], + map: vec![], + longest: 0, + } + } + + fn add(&mut self, global_index: usize, literal: String) { + if literal.len() > self.longest { + self.longest = literal.len(); + } + self.map.push(global_index); + self.literals.push(literal); + } + + fn prefix(self) -> PrefixStrategy { + let it = self.literals.into_iter().map(|s| s.into_bytes()); + PrefixStrategy { + matcher: AcAutomaton::new(it).into_full(), + map: self.map, + longest: self.longest, + } + } + + fn suffix(self) -> SuffixStrategy { + let it = self.literals.into_iter().map(|s| s.into_bytes()); + SuffixStrategy { + matcher: AcAutomaton::new(it).into_full(), + map: self.map, + longest: self.longest, + } + } + + fn regex_set(self) -> Result { + Ok(RegexSetStrategy { + matcher: new_regex_set(self.literals)?, + map: self.map, + }) + } +} + +#[derive(Clone, Debug)] +struct RequiredExtensionStrategyBuilder( + HashMap, Vec<(usize, String)>>, +); + +impl RequiredExtensionStrategyBuilder { + fn new() -> RequiredExtensionStrategyBuilder { + RequiredExtensionStrategyBuilder(HashMap::new()) + } + + fn add(&mut self, global_index: usize, ext: String, regex: String) { + self.0 + .entry(ext.into_bytes()) + .or_insert(vec![]) + .push((global_index, regex)); + } + + fn build(self) -> Result { + let mut exts = HashMap::with_hasher(Fnv::default()); + for (ext, regexes) in self.0.into_iter() { + exts.insert(ext.clone(), vec![]); + for (global_index, regex) in regexes { + let compiled = new_regex(®ex)?; + exts.get_mut(&ext).unwrap().push((global_index, compiled)); + } + } + Ok(RequiredExtensionStrategy(exts)) + } +} + +#[cfg(test)] +mod tests { + use super::GlobSetBuilder; + use glob::Glob; + + #[test] + fn set_works() { + let mut builder = GlobSetBuilder::new(); + builder.add(Glob::new("src/**/*.rs").unwrap()); + builder.add(Glob::new("*.c").unwrap()); + builder.add(Glob::new("src/lib.rs").unwrap()); + let set = builder.build().unwrap(); + + assert!(set.is_match("foo.c")); + assert!(set.is_match("src/foo.c")); + assert!(!set.is_match("foo.rs")); + assert!(!set.is_match("tests/foo.rs")); + assert!(set.is_match("src/foo.rs")); + assert!(set.is_match("src/grep/src/main.rs")); + + let matches = set.matches("src/lib.rs"); + assert_eq!(2, matches.len()); + assert_eq!(0, matches[0]); + assert_eq!(2, matches[1]); + } + + #[test] + fn empty_set_works() { + let set = GlobSetBuilder::new().build().unwrap(); + assert!(!set.is_match("")); + assert!(!set.is_match("a")); + } +} diff --git a/globset/src/pathutil.rs b/globset/src/pathutil.rs new file mode 100644 index 000000000..4b808e86d --- /dev/null +++ b/globset/src/pathutil.rs @@ -0,0 +1,172 @@ +use std::borrow::Cow; +use std::ffi::OsStr; +use std::path::Path; + +/// The final component of the path, if it is a normal file. +/// +/// If the path terminates in ., .., or consists solely of a root of prefix, +/// file_name will return None. +#[cfg(unix)] +pub fn file_name<'a, P: AsRef + ?Sized>( + path: &'a P, +) -> Option<&'a OsStr> { + use std::os::unix::ffi::OsStrExt; + use memchr::memrchr; + + let path = path.as_ref().as_os_str().as_bytes(); + if path.is_empty() { + return None; + } else if path.len() == 1 && path[0] == b'.' { + return None; + } else if path.last() == Some(&b'.') { + return None; + } else if path.len() >= 2 && &path[path.len() - 2..] == &b".."[..] { + return None; + } + let last_slash = memrchr(b'/', path).map(|i| i + 1).unwrap_or(0); + Some(OsStr::from_bytes(&path[last_slash..])) +} + +/// The final component of the path, if it is a normal file. +/// +/// If the path terminates in ., .., or consists solely of a root of prefix, +/// file_name will return None. +#[cfg(not(unix))] +pub fn file_name<'a, P: AsRef + ?Sized>( + path: &'a P, +) -> Option<&'a OsStr> { + path.as_ref().file_name() +} + +/// Return a file extension given a path's file name. +/// +/// Note that this does NOT match the semantics of std::path::Path::extension. +/// Namely, the extension includes the `.` and matching is otherwise more +/// liberal. Specifically, the extenion is: +/// +/// * None, if the file name given is empty; +/// * None, if there is no embedded `.`; +/// * Otherwise, the portion of the file name starting with the final `.`. +/// +/// e.g., A file name of `.rs` has an extension `.rs`. +/// +/// N.B. This is done to make certain glob match optimizations easier. Namely, +/// a pattern like `*.rs` is obviously trying to match files with a `rs` +/// extension, but it also matches files like `.rs`, which doesn't have an +/// extension according to std::path::Path::extension. +pub fn file_name_ext(name: &OsStr) -> Option> { + if name.is_empty() { + return None; + } + let name = os_str_bytes(name); + let last_dot_at = { + let result = name + .iter().enumerate().rev() + .find(|&(_, &b)| b == b'.') + .map(|(i, _)| i); + match result { + None => return None, + Some(i) => i, + } + }; + Some(match name { + Cow::Borrowed(name) => Cow::Borrowed(&name[last_dot_at..]), + Cow::Owned(mut name) => { + name.drain(..last_dot_at); + Cow::Owned(name) + } + }) +} + +/// Return raw bytes of a path, transcoded to UTF-8 if necessary. +pub fn path_bytes(path: &Path) -> Cow<[u8]> { + os_str_bytes(path.as_os_str()) +} + +/// Return the raw bytes of the given OS string, possibly transcoded to UTF-8. +#[cfg(unix)] +pub fn os_str_bytes(s: &OsStr) -> Cow<[u8]> { + use std::os::unix::ffi::OsStrExt; + Cow::Borrowed(s.as_bytes()) +} + +/// Return the raw bytes of the given OS string, possibly transcoded to UTF-8. +#[cfg(not(unix))] +pub fn os_str_bytes(s: &OsStr) -> Cow<[u8]> { + // TODO(burntsushi): On Windows, OS strings are WTF-8, which is a superset + // of UTF-8, so even if we could get at the raw bytes, they wouldn't + // be useful. We *must* convert to UTF-8 before doing path matching. + // Unfortunate, but necessary. + match s.to_string_lossy() { + Cow::Owned(s) => Cow::Owned(s.into_bytes()), + Cow::Borrowed(s) => Cow::Borrowed(s.as_bytes()), + } +} + +/// Normalizes a path to use `/` as a separator everywhere, even on platforms +/// that recognize other characters as separators. +#[cfg(unix)] +pub fn normalize_path(path: Cow<[u8]>) -> Cow<[u8]> { + // UNIX only uses /, so we're good. + path +} + +/// Normalizes a path to use `/` as a separator everywhere, even on platforms +/// that recognize other characters as separators. +#[cfg(not(unix))] +pub fn normalize_path(mut path: Cow<[u8]>) -> Cow<[u8]> { + use std::path::is_separator; + + for i in 0..path.len() { + if path[i] == b'/' || !is_separator(path[i] as char) { + continue; + } + path.to_mut()[i] = b'/'; + } + path +} + +#[cfg(test)] +mod tests { + use std::borrow::Cow; + use std::ffi::OsStr; + + use super::{file_name_ext, normalize_path}; + + macro_rules! ext { + ($name:ident, $file_name:expr, $ext:expr) => { + #[test] + fn $name() { + let got = file_name_ext(OsStr::new($file_name)); + assert_eq!($ext.map(|s| Cow::Borrowed(s.as_bytes())), got); + } + }; + } + + ext!(ext1, "foo.rs", Some(".rs")); + ext!(ext2, ".rs", Some(".rs")); + ext!(ext3, "..rs", Some(".rs")); + ext!(ext4, "", None::<&str>); + ext!(ext5, "foo", None::<&str>); + + macro_rules! normalize { + ($name:ident, $path:expr, $expected:expr) => { + #[test] + fn $name() { + let got = normalize_path(Cow::Owned($path.to_vec())); + assert_eq!($expected.to_vec(), got.into_owned()); + } + }; + } + + normalize!(normal1, b"foo", b"foo"); + normalize!(normal2, b"foo/bar", b"foo/bar"); + #[cfg(unix)] + normalize!(normal3, b"foo\\bar", b"foo\\bar"); + #[cfg(not(unix))] + normalize!(normal3, b"foo\\bar", b"foo/bar"); + #[cfg(unix)] + normalize!(normal4, b"foo\\bar/baz", b"foo\\bar/baz"); + #[cfg(not(unix))] + normalize!(normal4, b"foo\\bar/baz", b"foo/bar/baz"); +} diff --git a/hex/.cargo-checksum.json b/hex/.cargo-checksum.json new file mode 100644 index 000000000..88fd6de0d --- /dev/null +++ b/hex/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{},"package":"805026a5d0141ffc30abb3be3173848ad46a1b1664fe632428479619a3644d77"} \ No newline at end of file diff --git a/hex/Cargo.toml b/hex/Cargo.toml new file mode 100644 index 000000000..0b19db4b2 --- /dev/null +++ b/hex/Cargo.toml @@ -0,0 +1,23 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g. crates.io) dependencies +# +# If you believe there's an error in this file please file an +# issue against the rust-lang/cargo repository. If you're +# editing this file be aware that the upstream Cargo.toml +# will likely look very different (and much more reasonable) + +[package] +name = "hex" +version = "0.3.2" +authors = ["KokaKiwi "] +description = "Encoding and decoding data into/from hexadecimal representation." +documentation = "https://docs.rs/hex/" +license = "MIT OR Apache-2.0" +repository = "https://github.com/KokaKiwi/rust-hex" + +[features] +benchmarks = [] diff --git a/hex/Dockerfile b/hex/Dockerfile new file mode 100644 index 000000000..63970aa41 --- /dev/null +++ b/hex/Dockerfile @@ -0,0 +1 @@ +FROM rust:latest diff --git a/hex/LICENSE-APACHE b/hex/LICENSE-APACHE new file mode 100644 index 000000000..8f71f43fe --- /dev/null +++ b/hex/LICENSE-APACHE @@ -0,0 +1,202 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + diff --git a/hex/LICENSE-MIT b/hex/LICENSE-MIT new file mode 100644 index 000000000..d4568f5ca --- /dev/null +++ b/hex/LICENSE-MIT @@ -0,0 +1,20 @@ +Copyright (c) 2013-2014 The Rust Project Developers. +Copyright (c) 2015-2016 The rust-hex Developers + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/hex/README.md b/hex/README.md new file mode 100644 index 000000000..b86e2b551 --- /dev/null +++ b/hex/README.md @@ -0,0 +1,17 @@ +Documentation: https://docs.rs/hex + +## License + +Licensed under either of + + * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0) + * MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT) + +at your option. + +### Contribution + +Unless you explicitly state otherwise, any contribution intentionally +submitted for inclusion in the work by you, as defined in the Apache-2.0 +license, shall be dual licensed as above, without any additional terms or +conditions. diff --git a/hex/src/lib.rs b/hex/src/lib.rs new file mode 100644 index 000000000..3b51e242e --- /dev/null +++ b/hex/src/lib.rs @@ -0,0 +1,406 @@ +#![cfg_attr(feature = "benchmarks", feature(test))] + +// Copyright (c) 2013-2014 The Rust Project Developers. +// Copyright (c) 2015-2018 The rust-hex Developers. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. +//! Encoding and decoding hex strings. +//! +//! For most cases, you can simply use the `decode()`, `encode()` and +//! `encode_upper()` functions. If you need a bit more control, use the traits +//! `ToHex` and `FromHex` instead. +//! +//! # Example +//! +//! ``` +//! extern crate hex; +//! +//! fn main() { +//! let hex_string = hex::encode("Hello world!"); +//! println!("{}", hex_string); // Prints '48656c6c6f20776f726c6421' +//! } +//! ``` + +use std::error; +use std::fmt; + +/// Encoding values as hex string. +/// +/// This trait is implemented for all `T` which implement `AsRef<[u8]>`. This +/// includes `String`, `str`, `Vec` and `[u8]`. +/// +/// # Example +/// +/// ``` +/// use hex::ToHex; +/// +/// let mut s = String::new(); +/// "Hello world!".write_hex(&mut s).unwrap(); +/// println!("{}", s); +/// ``` +/// +/// *Note*: instead of using this trait, you might want to use `encode()`. +pub trait ToHex { + /// Writes the hex string representing `self` into `w`. Lower case letters + /// are used (e.g. `f9b4ca`). + fn write_hex(&self, w: &mut W) -> fmt::Result; + + /// Writes the hex string representing `self` into `w`. Upper case letters + /// are used (e.g. `F9B4CA`). + fn write_hex_upper(&self, w: &mut W) -> fmt::Result; +} + +fn hex_write(table: &[u8; 16], src: &[u8], w: &mut W) -> fmt::Result { + let hex = |byte: u8| table[byte as usize]; + + for &b in src.iter() { + w.write_char(hex((b >> 4) & 0xf) as char)?; + w.write_char(hex(b & 0xf) as char)?; + } + + Ok(()) +} + +impl> ToHex for T { + fn write_hex(&self, w: &mut W) -> fmt::Result { + static CHARS: &'static [u8; 16] = b"0123456789abcdef"; + + hex_write(&CHARS, self.as_ref(), w) + } + + fn write_hex_upper(&self, w: &mut W) -> fmt::Result { + static CHARS: &'static [u8; 16] = b"0123456789ABCDEF"; + + hex_write(&CHARS, self.as_ref(), w) + } +} + +/// The error type for decoding a hex string into `Vec` or `[u8; N]`. +#[derive(Debug, Clone, Copy, PartialEq)] +pub enum FromHexError { + /// An invalid character was found. Valid ones are: `0...9`, `a...f` + /// or `A...F`. + InvalidHexCharacter { + c: char, + index: usize, + }, + + /// A hex string's length needs to be even, as two digits correspond to + /// one byte. + OddLength, + + /// If the hex string is decoded into a fixed sized container, such as an + /// array, the hex string's length * 2 has to match the container's + /// length. + InvalidStringLength, +} + +impl error::Error for FromHexError { + fn description(&self) -> &str { + match *self { + FromHexError::InvalidHexCharacter { .. } => "invalid character", + FromHexError::OddLength => "odd number of digits", + FromHexError::InvalidStringLength => "invalid string length", + + } + } +} + +impl fmt::Display for FromHexError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match *self { + FromHexError::InvalidHexCharacter { c, index } => + write!(f, "Invalid character '{}' at position {}", c, index), + FromHexError::OddLength => + write!(f, "Odd number of digits"), + FromHexError::InvalidStringLength => + write!(f, "Invalid string length"), + } + } +} + +/// Types that can be decoded from a hex string. +/// +/// This trait is implemented for `Vec` and small `u8`-arrays. +/// +/// # Example +/// +/// ``` +/// use hex::FromHex; +/// +/// match Vec::from_hex("48656c6c6f20776f726c6421") { +/// Ok(vec) => { +/// for b in vec { +/// println!("{}", b as char); +/// } +/// } +/// Err(e) => { +/// // Deal with the error ... +/// } +/// } +/// ``` +pub trait FromHex: Sized { + type Error; + + /// Creates an instance of type `Self` from the given hex string, or fails + /// with a custom error type. + /// + /// Both, upper and lower case characters are valid and can even be + /// mixed (e.g. `f9b4ca`, `F9B4CA` and `f9B4Ca` are all valid strings). + fn from_hex>(hex: T) -> Result; +} + +fn val(c: u8, idx: usize) -> Result { + match c { + b'A'...b'F' => Ok(c - b'A' + 10), + b'a'...b'f' => Ok(c - b'a' + 10), + b'0'...b'9' => Ok(c - b'0'), + _ => { + Err(FromHexError::InvalidHexCharacter { + c: c as char, + index: idx, + }) + } + } +} + +impl FromHex for Vec { + type Error = FromHexError; + + fn from_hex>(hex: T) -> Result { + let hex = hex.as_ref(); + if hex.len() % 2 != 0 { + return Err(FromHexError::OddLength); + } + + hex.chunks(2).enumerate().map(|(i, pair)| { + Ok(val(pair[0], 2 * i)? << 4 | val(pair[1], 2 * i + 1)?) + }).collect() + } +} + +// Helper macro to implement the trait for a few fixed sized arrays. Once Rust +// has type level integers, this should be removed. +macro_rules! from_hex_array_impl { + ($($len:expr)+) => {$( + impl FromHex for [u8; $len] { + type Error = FromHexError; + + fn from_hex>(hex: T) -> Result { + let hex = hex.as_ref(); + if hex.len() % 2 != 0 { + return Err(FromHexError::OddLength); + } + if hex.len() / 2 != $len { + return Err(FromHexError::InvalidStringLength); + } + + let mut out = [0; $len]; + for (i, byte) in out.iter_mut().enumerate() { + *byte = val(hex[2 * i], 2 * i)? << 4 + | val(hex[2 * i + 1], 2 * i + 1)?; + } + + Ok(out) + } + } + )+} +} + +from_hex_array_impl! { + 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 + 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 + 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 + 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 + 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 + 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 + 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 + 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 + 160 192 200 224 256 384 512 768 1024 2048 4096 8192 16384 32768 +} + +#[cfg(any(target_pointer_width = "32", target_pointer_width = "64"))] +from_hex_array_impl! { + 65536 131072 262144 524288 1048576 2097152 4194304 8388608 + 16777216 33554432 67108864 134217728 268435456 536870912 + 1073741824 2147483648 +} + +#[cfg(target_pointer_width = "64")] +from_hex_array_impl! { + 4294967296 +} + +/// Encodes `data` as hex string using lowercase characters. +/// +/// Lowercase characters are used (e.g. `f9b4ca`). The resulting string's +/// length is always even, each byte in `data` is always encoded using two hex +/// digits. Thus, the resulting string contains exactly twice as many bytes as +/// the input data. +/// +/// # Example +/// +/// ``` +/// assert_eq!(hex::encode("Hello world!"), "48656c6c6f20776f726c6421"); +/// assert_eq!(hex::encode(vec![1, 2, 3, 15, 16]), "0102030f10"); +/// ``` +pub fn encode>(data: T) -> String { + let mut s = String::with_capacity(data.as_ref().len() * 2); + + // Writing to a string never errors, so we can unwrap here. + data.write_hex(&mut s).unwrap(); + s +} + +/// Encodes `data` as hex string using uppercase characters. +/// +/// Apart from the characters' casing, this works exactly like `encode()`. +/// +/// # Example +/// +/// ``` +/// assert_eq!(hex::encode_upper("Hello world!"), "48656C6C6F20776F726C6421"); +/// assert_eq!(hex::encode_upper(vec![1, 2, 3, 15, 16]), "0102030F10"); +/// ``` +pub fn encode_upper>(data: T) -> String { + let mut s = String::with_capacity(data.as_ref().len() * 2); + + // Writing to a string never errors, so we can unwrap here. + data.write_hex_upper(&mut s).unwrap(); + s +} + +/// Decodes a hex string into raw bytes. +/// +/// Both, upper and lower case characters are valid in the input string and can +/// even be mixed (e.g. `f9b4ca`, `F9B4CA` and `f9B4Ca` are all valid strings). +/// +/// # Example +/// ``` +/// assert_eq!( +/// hex::decode("48656c6c6f20776f726c6421"), +/// Ok("Hello world!".to_owned().into_bytes()) +/// ); +/// +/// assert_eq!(hex::decode("123"), Err(hex::FromHexError::OddLength)); +/// assert!(hex::decode("foo").is_err()); +/// ``` +pub fn decode>(data: T) -> Result, FromHexError> { + FromHex::from_hex(data) +} + + +#[cfg(test)] +mod test { + use super::{encode, decode, FromHex, FromHexError}; + + #[test] + fn test_encode() { + assert_eq!(encode("foobar"), "666f6f626172"); + } + + #[test] + fn test_decode() { + assert_eq!(decode("666f6f626172"), Ok("foobar".to_owned().into_bytes())); + } + + #[test] + pub fn test_from_hex_okay_str() { + assert_eq!( + Vec::from_hex("666f6f626172").unwrap(), + b"foobar" + ); + assert_eq!( + Vec::from_hex("666F6F626172").unwrap(), + b"foobar" + ); + } + + #[test] + pub fn test_from_hex_okay_bytes() { + assert_eq!( + Vec::from_hex(b"666f6f626172").unwrap(), + b"foobar" + ); + assert_eq!( + Vec::from_hex(b"666F6F626172").unwrap(), + b"foobar" + ); + } + + #[test] + pub fn test_invalid_length() { + assert_eq!( + Vec::from_hex("1").unwrap_err(), + FromHexError::OddLength + ); + assert_eq!( + Vec::from_hex("666f6f6261721").unwrap_err(), + FromHexError::OddLength + ); + } + + #[test] + pub fn test_invalid_char() { + assert_eq!( + Vec::from_hex("66ag").unwrap_err(), + FromHexError::InvalidHexCharacter { + c: 'g', + index: 3 + } + ); + } + + #[test] + pub fn test_empty() { + assert_eq!(Vec::from_hex("").unwrap(), b""); + } + + #[test] + pub fn test_from_hex_whitespace() { + assert_eq!( + Vec::from_hex("666f 6f62617").unwrap_err(), + FromHexError::InvalidHexCharacter { + c: ' ', + index: 4 + } + ); + } + + #[test] + pub fn test_from_hex_array() { + assert_eq!( + <[u8; 6] as FromHex>::from_hex("666f6f626172"), + Ok([0x66, 0x6f, 0x6f, 0x62, 0x61, 0x72]) + ); + + assert_eq!( + <[u8; 5] as FromHex>::from_hex("666f6f626172"), + Err(FromHexError::InvalidStringLength) + ); + } +} + + +#[cfg(all(feature = "benchmarks", test))] +mod bench { + extern crate test; + use self::test::Bencher; + + use super::*; + + const MY_OWN_SOURCE: &[u8] = include_bytes!("lib.rs"); + + #[bench] + fn a_bench(b: &mut Bencher) { + b.bytes = MY_OWN_SOURCE.len() as u64; + + b.iter(|| { + encode(MY_OWN_SOURCE) + }); + } +} diff --git a/home/.cargo-checksum.json b/home/.cargo-checksum.json new file mode 100644 index 000000000..01b092d05 --- /dev/null +++ b/home/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{},"package":"80dff82fb58cfbbc617fb9a9184b010be0529201553cda50ad04372bc2333aff"} \ No newline at end of file diff --git a/home/Cargo.toml b/home/Cargo.toml new file mode 100644 index 000000000..f163b3ae7 --- /dev/null +++ b/home/Cargo.toml @@ -0,0 +1,26 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g. crates.io) dependencies +# +# If you believe there's an error in this file please file an +# issue against the rust-lang/cargo repository. If you're +# editing this file be aware that the upstream Cargo.toml +# will likely look very different (and much more reasonable) + +[package] +name = "home" +version = "0.3.3" +authors = ["Brian Anderson "] +description = "Shared definitions of home directories" +documentation = "https://docs.rs/home" +license = "MIT/Apache-2.0" +repository = "https://github.com/brson/home" +[target."cfg(windows)".dependencies.scopeguard] +version = "0.3" + +[target."cfg(windows)".dependencies.winapi] +version = "0.3" +features = ["errhandlingapi", "handleapi", "processthreadsapi", "std", "winerror", "winnt", "userenv"] diff --git a/home/README.md b/home/README.md new file mode 100644 index 000000000..2ff22423e --- /dev/null +++ b/home/README.md @@ -0,0 +1,24 @@ +Canonical definitions of `home_dir`, `cargo_home`, and `rustup_home`. + +This provides the definition of `home_dir` used by Cargo and rustup, +as well functions to find the correct value of `CARGO_HOME` and +`RUSTUP_HOME`. + +The definition of `home_dir` provided by the standard library is +incorrect because it considers the `HOME` environment variable on +Windows. This causes surprising situations where a Rust program will +behave differently depending on whether it is run under a Unix +emulation environment like Cygwin or MinGW. Neither Cargo nor rustup +use the standard libraries definition - they use the definition here. + +This crate further provides two functions, `cargo_home` and +`rustup_home`, which are the canonical way to determine the location +that Cargo and rustup store their data. + +See [rust-lang/rust#43321]. + +[rust-lang/rust#43321]: https://github.com/rust-lang/rust/issues/43321 + +## License + +MIT/Apache-2.0 diff --git a/home/src/lib.rs b/home/src/lib.rs new file mode 100644 index 000000000..3880f7837 --- /dev/null +++ b/home/src/lib.rs @@ -0,0 +1,297 @@ +/// Canonical definitions of `home_dir`, `cargo_home`, and `rustup_home`. +/// +/// This provides the definition of `home_dir` used by Cargo and +/// rustup, as well functions to find the correct value of +/// `CARGO_HOME` and `RUSTUP_HOME`. +/// +/// The definition of `home_dir` provided by the standard library is +/// incorrect because it considers the `HOME` environment variable on +/// Windows. This causes surprising situations where a Rust program +/// will behave differently depending on whether it is run under a +/// Unix emulation environment like Cygwin or MinGW. Neither Cargo nor +/// rustup use the standard libraries definition - they use the +/// definition here. +/// +/// This crate further provides two functions, `cargo_home` and +/// `rustup_home`, which are the canonical way to determine the +/// location that Cargo and rustup store their data. +/// +/// See [rust-lang/rust#43321]. +/// +/// [rust-lang/rust#43321]: https://github.com/rust-lang/rust/issues/43321 + +#[cfg(windows)] +extern crate scopeguard; +#[cfg(windows)] +extern crate winapi; + +#[cfg(windows)] +use winapi::shared::minwindef::DWORD; +use std::path::{PathBuf, Path}; +use std::io; +use std::env; + +/// Returns the path of the current user's home directory if known. +/// +/// # Unix +/// +/// Returns the value of the 'HOME' environment variable if it is set +/// and not equal to the empty string. Otherwise, it tries to determine the +/// home directory by invoking the `getpwuid_r` function on the UID of the +/// current user. +/// +/// # Windows +/// +/// Returns the value of the 'USERPROFILE' environment variable if it +/// is set and not equal to the empty string. If both do not exist, +/// [`GetUserProfileDirectory`][msdn] is used to return the +/// appropriate path. +/// +/// [msdn]: https://msdn.microsoft.com/en-us/library/windows/desktop/bb762280(v=vs.85).aspx +/// +/// # Examples +/// +/// ``` +/// use std::env; +/// +/// match env::home_dir() { +/// Some(path) => println!("{}", path.display()), +/// None => println!("Impossible to get your home dir!"), +/// } +/// ``` +pub fn home_dir() -> Option { + home_dir_() +} + +#[cfg(windows)] +fn home_dir_() -> Option { + use std::ptr; + use winapi::um::userenv::GetUserProfileDirectoryW; + use winapi::shared::winerror::ERROR_INSUFFICIENT_BUFFER; + use winapi::um::errhandlingapi::GetLastError; + use winapi::um::handleapi::CloseHandle; + use winapi::um::processthreadsapi::{GetCurrentProcess, OpenProcessToken}; + use winapi::um::winnt::TOKEN_READ; + use scopeguard; + + ::std::env::var_os("USERPROFILE").map(PathBuf::from).or_else(|| unsafe { + let me = GetCurrentProcess(); + let mut token = ptr::null_mut(); + if OpenProcessToken(me, TOKEN_READ, &mut token) == 0 { + return None; + } + let _g = scopeguard::guard(token, |h| { let _ = CloseHandle(*h); }); + fill_utf16_buf(|buf, mut sz| { + match GetUserProfileDirectoryW(token, buf, &mut sz) { + 0 if GetLastError() != ERROR_INSUFFICIENT_BUFFER => 0, + 0 => sz, + _ => sz - 1, // sz includes the null terminator + } + }, os2path).ok() + }) +} + +#[cfg(windows)] +fn os2path(s: &[u16]) -> PathBuf { + use std::ffi::OsString; + use std::os::windows::ffi::OsStringExt; + PathBuf::from(OsString::from_wide(s)) +} + +#[cfg(windows)] +fn fill_utf16_buf(mut f1: F1, f2: F2) -> io::Result + where F1: FnMut(*mut u16, DWORD) -> DWORD, + F2: FnOnce(&[u16]) -> T +{ + use winapi::um::errhandlingapi::{GetLastError, SetLastError}; + use winapi::shared::winerror::ERROR_INSUFFICIENT_BUFFER; + + // Start off with a stack buf but then spill over to the heap if we end up + // needing more space. + let mut stack_buf = [0u16; 512]; + let mut heap_buf = Vec::new(); + unsafe { + let mut n = stack_buf.len(); + loop { + let buf = if n <= stack_buf.len() { + &mut stack_buf[..] + } else { + let extra = n - heap_buf.len(); + heap_buf.reserve(extra); + heap_buf.set_len(n); + &mut heap_buf[..] + }; + + // This function is typically called on windows API functions which + // will return the correct length of the string, but these functions + // also return the `0` on error. In some cases, however, the + // returned "correct length" may actually be 0! + // + // To handle this case we call `SetLastError` to reset it to 0 and + // then check it again if we get the "0 error value". If the "last + // error" is still 0 then we interpret it as a 0 length buffer and + // not an actual error. + SetLastError(0); + let k = match f1(buf.as_mut_ptr(), n as DWORD) { + 0 if GetLastError() == 0 => 0, + 0 => return Err(io::Error::last_os_error()), + n => n, + } as usize; + if k == n && GetLastError() == ERROR_INSUFFICIENT_BUFFER { + n *= 2; + } else if k >= n { + n = k; + } else { + return Ok(f2(&buf[..k])) + } + } + } +} + +#[cfg(unix)] +fn home_dir_() -> Option { + ::std::env::home_dir() +} + +/// Returns the storage directory used by Cargo, often knowns as +/// `.cargo` or `CARGO_HOME`. +/// +/// It returns one of the following values, in this order of +/// preference: +/// +/// - The value of the `CARGO_HOME` environment variable, if it is +/// an absolute path. +/// - The value of the current working directory joined with the value +/// of the `CARGO_HOME` environment variable, if `CARGO_HOME` is a +/// relative directory. +/// - The `.cargo` directory in the user's home directory, as reported +/// by the `home_dir` function. +/// +/// # Errors +/// +/// This function fails if it fails to retrieve the current directory, +/// or if the home directory cannot be determined. +pub fn cargo_home() -> io::Result { + let cwd = env::current_dir()?; + cargo_home_with_cwd(&cwd) +} + +pub fn cargo_home_with_cwd(cwd: &Path) -> io::Result { + let env_var = env::var_os("CARGO_HOME"); + + // NB: During the multirust-rs -> rustup transition the install + // dir changed from ~/.multirust/bin to ~/.cargo/bin. Because + // multirust used to explicitly set CARGO_HOME it's possible to + // get here when e.g. installing under `cargo run` and decide to + // install to the wrong place. This check is to make the + // multirust-rs to rustup upgrade seamless. + let env_var = if let Some(v) = env_var { + let vv = v.to_string_lossy().to_string(); + if vv.contains(".multirust/cargo") || + vv.contains(r".multirust\cargo") || + vv.trim().is_empty() { + None + } else { + Some(v) + } + } else { + None + }; + + let env_cargo_home = env_var.map(|home| cwd.join(home)); + let home_dir = home_dir() + .ok_or(io::Error::new(io::ErrorKind::Other, "couldn't find home dir")); + let user_home = home_dir.map(|p| p.join(".cargo")); + + // Compatibility with old cargo that used the std definition of home_dir + let compat_home_dir = ::std::env::home_dir(); + let compat_user_home = compat_home_dir.map(|p| p.join(".cargo")); + + if let Some(p) = env_cargo_home { + Ok(p) + } else { + if let Some(d) = compat_user_home { + if d.exists() { + Ok(d) + } else { + user_home + } + } else { + user_home + } + } +} + +/// Returns the storage directory used by rustup, often knowns as +/// `.rustup` or `RUSTUP_HOME`. +/// +/// It returns one of the following values, in this order of +/// preference: +/// +/// - The value of the `RUSTUP_HOME` environment variable, if it is +/// an absolute path. +/// - The value of the current working directory joined with the value +/// of the `RUSTUP_HOME` environment variable, if `RUSTUP_HOME` is a +/// relative directory. +/// - The `.rustup` directory in the user's home directory, as reported +/// by the `home_dir` function. +/// +/// As a matter of backwards compatibility, this function _may_ return +/// the `.multirust` directory in the user's home directory, only if +/// it determines that the user is running an old version of rustup +/// where that is necessary. +/// +/// # Errors +/// +/// This function fails if it fails to retrieve the current directory, +/// or if the home directory cannot be determined. +pub fn rustup_home() -> io::Result { + let cwd = env::current_dir()?; + rustup_home_with_cwd(&cwd) +} + +pub fn rustup_home_with_cwd(cwd: &Path) -> io::Result { + let env_var = env::var_os("RUSTUP_HOME"); + let env_rustup_home = env_var.map(|home| cwd.join(home)); + let home_dir = home_dir() + .ok_or(io::Error::new(io::ErrorKind::Other, "couldn't find home dir")); + + let user_home = if use_rustup_dir() { + home_dir.map(|d| d.join(".rustup")) + } else { + home_dir.map(|d| d.join(".multirust")) + }; + + if let Some(p) = env_rustup_home { + Ok(p) + } else { + user_home + } +} + +fn use_rustup_dir() -> bool { + fn rustup_dir() -> Option { + home_dir().map(|p| p.join(".rustup")) + } + + fn multirust_dir() -> Option { + home_dir().map(|p| p.join(".multirust")) + } + + fn rustup_dir_exists() -> bool { + rustup_dir().map(|p| p.exists()).unwrap_or(false) + } + + fn multirust_dir_exists() -> bool { + multirust_dir().map(|p| p.exists()).unwrap_or(false) + } + + fn rustup_old_version_exists() -> bool { + rustup_dir() + .map(|p| p.join("rustup-version").exists()) + .unwrap_or(false) + } + + !rustup_old_version_exists() + && (rustup_dir_exists() || !multirust_dir_exists()) +} diff --git a/humantime/.cargo-checksum.json b/humantime/.cargo-checksum.json new file mode 100644 index 000000000..cda8ab3d4 --- /dev/null +++ b/humantime/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{},"package":"0484fda3e7007f2a4a0d9c3a703ca38c71c54c55602ce4660c419fd32e188c9e"} \ No newline at end of file diff --git a/humantime/Cargo.toml b/humantime/Cargo.toml new file mode 100644 index 000000000..3ffe0ee6f --- /dev/null +++ b/humantime/Cargo.toml @@ -0,0 +1,37 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g. crates.io) dependencies +# +# If you believe there's an error in this file please file an +# issue against the rust-lang/cargo repository. If you're +# editing this file be aware that the upstream Cargo.toml +# will likely look very different (and much more reasonable) + +[package] +name = "humantime" +version = "1.1.1" +authors = ["Paul Colomiets "] +description = " A parser and formatter for std::time::{Duration, SystemTime}\n" +homepage = "https://github.com/tailhook/humantime" +documentation = "https://docs.rs/humantime" +readme = "README.md" +keywords = ["time", "human", "human-friendly", "parser", "duration"] +categories = ["date-and-time"] +license = "MIT/Apache-2.0" + +[lib] +name = "humantime" +path = "src/lib.rs" +[dependencies.quick-error] +version = "1.0.0" +[dev-dependencies.chrono] +version = "0.4.0" + +[dev-dependencies.rand] +version = "0.4.2" + +[dev-dependencies.time] +version = "0.1.39" diff --git a/humantime/LICENSE-APACHE b/humantime/LICENSE-APACHE new file mode 100644 index 000000000..8f71f43fe --- /dev/null +++ b/humantime/LICENSE-APACHE @@ -0,0 +1,202 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + diff --git a/humantime/LICENSE-MIT b/humantime/LICENSE-MIT new file mode 100644 index 000000000..a099fbade --- /dev/null +++ b/humantime/LICENSE-MIT @@ -0,0 +1,26 @@ +Copyright (c) 2016 The humantime Developers + +Includes parts of http date with the following copyright: +Copyright (c) 2016 Pyfisch + +Includes portions of musl libc with the following copyright: +Copyright © 2005-2013 Rich Felker + + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/humantime/README.md b/humantime/README.md new file mode 100644 index 000000000..703fc4c54 --- /dev/null +++ b/humantime/README.md @@ -0,0 +1,67 @@ +Human Time +========== + +**Status: stable** + +[Documentation](https://docs.rs/humantime) | +[Github](https://github.com/tailhook/humantime) | +[Crate](https://crates.io/crates/humantime) + + +Features: + +* Parses durations in free form like `15days 2min 2s` +* Formats durations in similar form `2years 2min 12us` +* Parses and formats timestamp in `rfc3339` format: `2018-01-01T12:53:00Z` +* Parses timestamps in a weaker format: `2018-01-01 12:53:00` + +Timestamp parsing/formatting is super-fast because format is basically +fixed. + +Here are some micro-benchmarks: + +``` +test result: ok. 0 passed; 0 failed; 26 ignored; 0 measured; 0 filtered out + + Running target/release/deps/datetime_format-8facb4ac832d9770 + +running 2 tests +test rfc3339_chrono ... bench: 737 ns/iter (+/- 37) +test rfc3339_humantime_seconds ... bench: 73 ns/iter (+/- 2) + +test result: ok. 0 passed; 0 failed; 0 ignored; 2 measured; 0 filtered out + + Running target/release/deps/datetime_parse-342628f877d7867c + +running 6 tests +test datetime_utc_parse_millis ... bench: 228 ns/iter (+/- 11) +test datetime_utc_parse_nanos ... bench: 236 ns/iter (+/- 10) +test datetime_utc_parse_seconds ... bench: 204 ns/iter (+/- 18) +test rfc3339_humantime_millis ... bench: 28 ns/iter (+/- 1) +test rfc3339_humantime_nanos ... bench: 36 ns/iter (+/- 2) +test rfc3339_humantime_seconds ... bench: 24 ns/iter (+/- 1) + +test result: ok. 0 passed; 0 failed; 0 ignored; 6 measured; 0 filtered out +``` + +See [serde-humantime] for serde integration. + +[serde-humantime]: https://docs.rs/serde-humantime/0.1.1/serde_humantime/ + +License +======= + +Licensed under either of + +* Apache License, Version 2.0, (./LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0) +* MIT license (./LICENSE-MIT or http://opensource.org/licenses/MIT) + +at your option. + +Contribution +------------ + +Unless you explicitly state otherwise, any contribution intentionally +submitted for inclusion in the work by you, as defined in the Apache-2.0 +license, shall be dual licensed as above, without any additional terms or +conditions. diff --git a/humantime/benches/datetime_format.rs b/humantime/benches/datetime_format.rs new file mode 100644 index 000000000..7f6dca79c --- /dev/null +++ b/humantime/benches/datetime_format.rs @@ -0,0 +1,58 @@ +#![feature(test)] +extern crate chrono; +extern crate humantime; +extern crate test; + +use std::io::Write; +use std::time::{Duration, UNIX_EPOCH}; +use humantime::format_rfc3339; + + +#[bench] +fn rfc3339_humantime_seconds(b: &mut test::Bencher) { + let time = UNIX_EPOCH + Duration::new(1483228799, 0); + let mut buf = Vec::with_capacity(100); + b.iter(|| { + buf.truncate(0); + write!(&mut buf, "{}", format_rfc3339(time)).unwrap() + }); +} + +#[bench] +fn rfc3339_chrono(b: &mut test::Bencher) { + use chrono::{DateTime, NaiveDateTime, Utc}; + use chrono::format::Item; + use chrono::format::Item::*; + use chrono::format::Numeric::*; + use chrono::format::Fixed::*; + use chrono::format::Pad::*; + + let time = DateTime::::from_utc( + NaiveDateTime::from_timestamp(1483228799, 0), Utc); + let mut buf = Vec::with_capacity(100); + + // formatting code from env_logger + const ITEMS: &'static [Item<'static>] = { + &[ + Numeric(Year, Zero), + Literal("-"), + Numeric(Month, Zero), + Literal("-"), + Numeric(Day, Zero), + Literal("T"), + Numeric(Hour, Zero), + Literal(":"), + Numeric(Minute, Zero), + Literal(":"), + Numeric(Second, Zero), + Fixed(TimezoneOffsetZ), + ] + }; + + + b.iter(|| { + buf.truncate(0); + write!(&mut buf, "{}", time.format_with_items(ITEMS.iter().cloned())) + .unwrap() + }); +} diff --git a/humantime/benches/datetime_parse.rs b/humantime/benches/datetime_parse.rs new file mode 100644 index 000000000..785d713d3 --- /dev/null +++ b/humantime/benches/datetime_parse.rs @@ -0,0 +1,50 @@ +#![feature(test)] +extern crate chrono; +extern crate humantime; +extern crate test; + +use chrono::{DateTime}; +use humantime::parse_rfc3339; + + +#[bench] +fn rfc3339_humantime_seconds(b: &mut test::Bencher) { + b.iter(|| { + parse_rfc3339("2018-02-13T23:08:32Z").unwrap() + }); +} + +#[bench] +fn datetime_utc_parse_seconds(b: &mut test::Bencher) { + b.iter(|| { + DateTime::parse_from_rfc3339("2018-02-13T23:08:32Z").unwrap() + }); +} + +#[bench] +fn rfc3339_humantime_millis(b: &mut test::Bencher) { + b.iter(|| { + parse_rfc3339("2018-02-13T23:08:32.123Z").unwrap() + }); +} + +#[bench] +fn datetime_utc_parse_millis(b: &mut test::Bencher) { + b.iter(|| { + DateTime::parse_from_rfc3339("2018-02-13T23:08:32.123Z").unwrap() + }); +} + +#[bench] +fn rfc3339_humantime_nanos(b: &mut test::Bencher) { + b.iter(|| { + parse_rfc3339("2018-02-13T23:08:32.123456983Z").unwrap() + }); +} + +#[bench] +fn datetime_utc_parse_nanos(b: &mut test::Bencher) { + b.iter(|| { + DateTime::parse_from_rfc3339("2018-02-13T23:08:32.123456983Z").unwrap() + }); +} diff --git a/humantime/bulk.yaml b/humantime/bulk.yaml new file mode 100644 index 000000000..cdb9763b6 --- /dev/null +++ b/humantime/bulk.yaml @@ -0,0 +1,8 @@ +minimum-bulk: v0.4.5 + +versions: + +- file: Cargo.toml + block-start: ^\[package\] + block-end: ^\[.*\] + regex: ^version\s*=\s*"(\S+)" diff --git a/humantime/src/date.rs b/humantime/src/date.rs new file mode 100644 index 000000000..540872256 --- /dev/null +++ b/humantime/src/date.rs @@ -0,0 +1,530 @@ +use std::fmt; +use std::str; +use std::time::{SystemTime, Duration, UNIX_EPOCH}; + +#[cfg(target_os="cloudabi")] +mod max { + pub const SECONDS: u64 = ::std::u64::MAX / 1_000_000_000; + #[allow(unused)] + pub const TIMESTAMP: &'static str = "2554-07-21T23:34:33Z"; +} +#[cfg(all( + target_pointer_width="32", + not(target_os="cloudabi"), + not(target_os="windows"), + not(all(target_arch="wasm32", not(target_os="emscripten"))) +))] +mod max { + pub const SECONDS: u64 = ::std::i32::MAX as u64; + #[allow(unused)] + pub const TIMESTAMP: &'static str = "2038-01-19T03:14:07Z"; +} + +#[cfg(any( + target_pointer_width="64", + target_os="windows", + all(target_arch="wasm32", not(target_os="emscripten")), +))] +mod max { + pub const SECONDS: u64 = 253402300800-1; // last second of year 9999 + #[allow(unused)] + pub const TIMESTAMP: &'static str = "9999-12-31T23:59:59Z"; +} + +quick_error! { + /// Error parsing datetime (timestamp) + #[derive(Debug, PartialEq, Clone, Copy)] + pub enum Error { + /// Numeric component is out of range + OutOfRange { + display("numeric component is out of range") + } + /// Bad character where digit is expected + InvalidDigit { + display("bad character where digit is expected") + } + /// Other formatting errors + InvalidFormat { + display("timestamp format is invalid") + } + } +} + +#[derive(Debug, PartialEq, Eq)] +enum Precision { + Smart, + Seconds, + Nanos, +} + +/// A wrapper type that allows you to Display a SystemTime +#[derive(Debug)] +pub struct Rfc3339Timestamp(SystemTime, Precision); + +#[inline] +fn two_digits(b1: u8, b2: u8) -> Result { + if b1 < b'0' || b2 < b'0' || b1 > b'9' || b2 > b'9' { + return Err(Error::InvalidDigit); + } + Ok(((b1 - b'0')*10 + (b2 - b'0')) as u64) +} + +/// Parse RFC3339 timestamp `2018-02-14T00:28:07Z` +/// +/// Supported feature: any precision of fractional +/// digits `2018-02-14T00:28:07.133Z`. +/// +/// Unsupported feature: localized timestamps. Only UTC is supported. +pub fn parse_rfc3339(s: &str) -> Result { + if s.len() < "2018-02-14T00:28:07Z".len() { + return Err(Error::InvalidFormat); + } + let b = s.as_bytes(); + if b[10] != b'T' || b[b.len()-1] != b'Z' { + return Err(Error::InvalidFormat); + } + return parse_rfc3339_weak(s); +} + +/// Parse RFC3339-like timestamp `2018-02-14 00:28:07` +/// +/// Supported features: +/// +/// 1. Any precision of fractional digits `2018-02-14 00:28:07.133`. +/// 2. Supports timestamp with or without either of `T` or `Z` +/// 3. Anything valid for `parse_3339` is valid for this function +/// +/// Unsupported feature: localized timestamps. Only UTC is supported, even if +/// `Z` is not specified. +/// +/// This function is intended to use for parsing human input. Whereas +/// `parse_rfc3339` is for strings generated programmatically. +pub fn parse_rfc3339_weak(s: &str) -> Result { + if s.len() < "2018-02-14T00:28:07".len() { + return Err(Error::InvalidFormat); + } + let b = s.as_bytes(); // for careless slicing + if b[4] != b'-' || b[7] != b'-' || (b[10] != b'T' && b[10] != b' ') || + b[13] != b':' || b[16] != b':' + { + return Err(Error::InvalidFormat); + } + let year = two_digits(b[0], b[1])? * 100 + two_digits(b[2], b[3])?; + let month = two_digits(b[5], b[6])?; + let day = two_digits(b[8], b[9])?; + let hour = two_digits(b[11], b[12])?; + let minute = two_digits(b[14], b[15])?; + let mut second = two_digits(b[17], b[18])?; + + if year < 1970 || hour > 23 || minute > 59 || second > 60 { + return Err(Error::OutOfRange); + } + // TODO(tailhook) should we check that leaps second is only on midnight ? + if second == 60 { + second = 59 + }; + let leap_years = ((year - 1) - 1968) / 4 - ((year - 1) - 1900) / 100 + + ((year - 1) - 1600) / 400; + let leap = is_leap_year(year); + let (mut ydays, mdays) = match month { + 1 => (0, 31), + 2 if leap => (31, 29), + 2 => (31, 28), + 3 => (59, 31), + 4 => (90, 30), + 5 => (120, 31), + 6 => (151, 30), + 7 => (181, 31), + 8 => (212, 31), + 9 => (243, 30), + 10 => (273, 31), + 11 => (304, 30), + 12 => (334, 31), + _ => return Err(Error::OutOfRange), + }; + if day > mdays || day == 0 { + return Err(Error::OutOfRange); + } + ydays += day - 1; + if leap && month > 2 { + ydays += 1; + } + let days = (year - 1970) * 365 + leap_years + ydays; + + let time = second + minute * 60 + hour * 3600; + + let mut nanos = 0; + let mut mult = 100_000_000; + if b.get(19) == Some(&b'.') { + for idx in 20..b.len() { + if b[idx] == b'Z' { + if idx == b.len()-1 { + break; + } else { + return Err(Error::InvalidDigit); + } + } + if b[idx] < b'0' || b[idx] > b'9' { + return Err(Error::InvalidDigit); + } + nanos += mult * (b[idx] - b'0') as u32; + mult /= 10; + } + } else { + if b.len() != 19 && (b.len() > 20 || b[19] != b'Z') { + return Err(Error::InvalidFormat); + } + } + + let total_seconds = time + days * 86400; + if total_seconds > max::SECONDS { + return Err(Error::OutOfRange); + } + + return Ok(UNIX_EPOCH + Duration::new(total_seconds, nanos)); +} + +fn is_leap_year(y: u64) -> bool { + y % 4 == 0 && (!(y % 100 == 0) || y % 400 == 0) +} + +/// Format an RFC3339 timestamp `2018-02-14T00:28:07Z` +/// +/// This function formats timestamp with smart precision: i.e. if it has no +/// fractional seconds, they aren't written at all. And up to nine digits if +/// they are. +/// +/// The value is always UTC and ignores system timezone. +pub fn format_rfc3339(system_time: SystemTime) -> Rfc3339Timestamp { + return Rfc3339Timestamp(system_time, Precision::Smart); +} + +/// Format an RFC3339 timestamp `2018-02-14T00:28:07Z` +/// +/// This format always shows timestamp without fractional seconds. +/// +/// The value is always UTC and ignores system timezone. +pub fn format_rfc3339_seconds(system_time: SystemTime) -> Rfc3339Timestamp { + return Rfc3339Timestamp(system_time, Precision::Seconds); +} + +/// Format an RFC3339 timestamp `2018-02-14T00:28:07.000000000Z` +/// +/// This format always shows nanoseconds even if nanosecond value is zero. +/// +/// The value is always UTC and ignores system timezone. +pub fn format_rfc3339_nanos(system_time: SystemTime) -> Rfc3339Timestamp { + return Rfc3339Timestamp(system_time, Precision::Nanos); +} + +impl fmt::Display for Rfc3339Timestamp { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + use self::Precision::*; + + let dur = self.0.duration_since(UNIX_EPOCH) + .expect("all times should be after the epoch"); + let secs_since_epoch = dur.as_secs(); + let nanos = dur.subsec_nanos(); + + if secs_since_epoch >= 253402300800 { // year 9999 + return Err(fmt::Error); + } + + /* 2000-03-01 (mod 400 year, immediately after feb29 */ + const LEAPOCH: i64 = 11017; + const DAYS_PER_400Y: i64 = 365*400 + 97; + const DAYS_PER_100Y: i64 = 365*100 + 24; + const DAYS_PER_4Y: i64 = 365*4 + 1; + + let days = (secs_since_epoch / 86400) as i64 - LEAPOCH; + let secs_of_day = secs_since_epoch % 86400; + + let mut qc_cycles = days / DAYS_PER_400Y; + let mut remdays = days % DAYS_PER_400Y; + + if remdays < 0 { + remdays += DAYS_PER_400Y; + qc_cycles -= 1; + } + + let mut c_cycles = remdays / DAYS_PER_100Y; + if c_cycles == 4 { c_cycles -= 1; } + remdays -= c_cycles * DAYS_PER_100Y; + + let mut q_cycles = remdays / DAYS_PER_4Y; + if q_cycles == 25 { q_cycles -= 1; } + remdays -= q_cycles * DAYS_PER_4Y; + + let mut remyears = remdays / 365; + if remyears == 4 { remyears -= 1; } + remdays -= remyears * 365; + + let mut year = 2000 + + remyears + 4*q_cycles + 100*c_cycles + 400*qc_cycles; + + let months = [31,30,31,30,31,31,30,31,30,31,31,29]; + let mut mon = 0; + for mon_len in months.iter() { + mon += 1; + if remdays < *mon_len { + break; + } + remdays -= *mon_len; + } + let mday = remdays+1; + let mon = if mon + 2 > 12 { + year += 1; + mon - 10 + } else { + mon + 2 + }; + + let mut buf: [u8; 30] = [ + // Too long to write as: b"0000-00-00T00:00:00.000000000Z" + b'0', b'0', b'0', b'0', b'-', b'0', b'0', b'-', b'0', b'0', b'T', + b'0', b'0', b':', b'0', b'0', b':', b'0', b'0', + b'.', b'0', b'0', b'0', b'0', b'0', b'0', b'0', b'0', b'0', b'Z', + ]; + buf[0] = b'0' + (year / 1000) as u8; + buf[1] = b'0' + (year / 100 % 10) as u8; + buf[2] = b'0' + (year / 10 % 10) as u8; + buf[3] = b'0' + (year % 10) as u8; + buf[5] = b'0' + (mon / 10) as u8; + buf[6] = b'0' + (mon % 10) as u8; + buf[8] = b'0' + (mday / 10) as u8; + buf[9] = b'0' + (mday % 10) as u8; + buf[11] = b'0' + (secs_of_day / 3600 / 10) as u8; + buf[12] = b'0' + (secs_of_day / 3600 % 10) as u8; + buf[14] = b'0' + (secs_of_day / 60 / 10 % 6) as u8; + buf[15] = b'0' + (secs_of_day / 60 % 10) as u8; + buf[17] = b'0' + (secs_of_day / 10 % 6) as u8; + buf[18] = b'0' + (secs_of_day % 10) as u8; + + if self.1 == Seconds || nanos == 0 && self.1 == Smart { + buf[19] = b'Z'; + f.write_str(unsafe { str::from_utf8_unchecked(&buf[..20]) }) + } else { + buf[20] = b'0' + (nanos / 100_000_000) as u8; + buf[21] = b'0' + (nanos / 10_000_000 % 10) as u8; + buf[22] = b'0' + (nanos / 1_000_000 % 10) as u8; + buf[23] = b'0' + (nanos / 100_000 % 10) as u8; + buf[24] = b'0' + (nanos / 10_000 % 10) as u8; + buf[25] = b'0' + (nanos / 1_000 % 10) as u8; + buf[26] = b'0' + (nanos / 100 % 10) as u8; + buf[27] = b'0' + (nanos / 10 % 10) as u8; + buf[28] = b'0' + (nanos / 1 % 10) as u8; + // we know our chars are all ascii + f.write_str(unsafe { str::from_utf8_unchecked(&buf[..]) }) + } + } +} + +#[cfg(test)] +mod test { + extern crate time; + extern crate rand; + + use std::str::from_utf8; + use self::rand::Rng; + use std::time::{UNIX_EPOCH, SystemTime, Duration}; + use super::{parse_rfc3339, parse_rfc3339_weak, format_rfc3339}; + use super::max; + + fn from_sec(sec: u64) -> (String, SystemTime) { + let s = time::at_utc(time::Timespec { sec: sec as i64, nsec: 0 }) + .rfc3339().to_string(); + let time = UNIX_EPOCH + Duration::new(sec, 0); + return (s, time) + } + + #[test] + #[cfg(all(target_pointer_width="32", target_os="linux"))] + fn year_after_2038_fails_gracefully() { + // next second + assert_eq!(parse_rfc3339("2038-01-19T03:14:08Z").unwrap_err(), + super::Error::OutOfRange); + assert_eq!(parse_rfc3339("9999-12-31T23:59:59Z").unwrap_err(), + super::Error::OutOfRange); + } + + #[test] + fn smoke_tests_parse() { + assert_eq!(parse_rfc3339("1970-01-01T00:00:00Z").unwrap(), + UNIX_EPOCH + Duration::new(0, 0)); + assert_eq!(parse_rfc3339("1970-01-01T00:00:01Z").unwrap(), + UNIX_EPOCH + Duration::new(1, 0)); + assert_eq!(parse_rfc3339("2018-02-13T23:08:32Z").unwrap(), + UNIX_EPOCH + Duration::new(1518563312, 0)); + assert_eq!(parse_rfc3339("2012-01-01T00:00:00Z").unwrap(), + UNIX_EPOCH + Duration::new(1325376000, 0)); + } + + #[test] + fn smoke_tests_format() { + assert_eq!( + format_rfc3339(UNIX_EPOCH + Duration::new(0, 0)).to_string(), + "1970-01-01T00:00:00Z"); + assert_eq!( + format_rfc3339(UNIX_EPOCH + Duration::new(1, 0)).to_string(), + "1970-01-01T00:00:01Z"); + assert_eq!( + format_rfc3339(UNIX_EPOCH + Duration::new(1518563312, 0)).to_string(), + "2018-02-13T23:08:32Z"); + assert_eq!( + format_rfc3339(UNIX_EPOCH + Duration::new(1325376000, 0)).to_string(), + "2012-01-01T00:00:00Z"); + } + + #[test] + fn upper_bound() { + let max = UNIX_EPOCH + Duration::new(max::SECONDS, 0); + assert_eq!(parse_rfc3339(&max::TIMESTAMP).unwrap(), max); + assert_eq!(format_rfc3339(max).to_string(), max::TIMESTAMP); + } + + #[test] + fn leap_second() { + assert_eq!(parse_rfc3339("2016-12-31T23:59:60Z").unwrap(), + UNIX_EPOCH + Duration::new(1483228799, 0)); + } + + #[test] + fn first_731_days() { + let year_start = 0; // 1970 + for day in 0.. (365 * 2 + 1) { // scan leap year and non-leap year + let (s, time) = from_sec(year_start + day * 86400); + assert_eq!(parse_rfc3339(&s).unwrap(), time); + assert_eq!(format_rfc3339(time).to_string(), s); + } + } + + #[test] + fn the_731_consecutive_days() { + let year_start = 1325376000; // 2012 + for day in 0.. (365 * 2 + 1) { // scan leap year and non-leap year + let (s, time) = from_sec(year_start + day * 86400); + assert_eq!(parse_rfc3339(&s).unwrap(), time); + assert_eq!(format_rfc3339(time).to_string(), s); + } + } + + #[test] + fn all_86400_seconds() { + let day_start = 1325376000; + for second in 0..86400 { // scan leap year and non-leap year + let (s, time) = from_sec(day_start + second); + assert_eq!(parse_rfc3339(&s).unwrap(), time); + assert_eq!(format_rfc3339(time).to_string(), s); + } + } + + #[test] + fn random_past() { + let upper = SystemTime::now().duration_since(UNIX_EPOCH).unwrap() + .as_secs(); + for _ in 0..10000 { + let sec = rand::thread_rng().gen_range(0, upper); + let (s, time) = from_sec(sec); + assert_eq!(parse_rfc3339(&s).unwrap(), time); + assert_eq!(format_rfc3339(time).to_string(), s); + } + } + + #[test] + fn random_wide_range() { + for _ in 0..100000 { + let sec = rand::thread_rng().gen_range(0, max::SECONDS); + let (s, time) = from_sec(sec); + assert_eq!(parse_rfc3339(&s).unwrap(), time); + assert_eq!(format_rfc3339(time).to_string(), s); + } + } + + #[test] + fn milliseconds() { + assert_eq!(parse_rfc3339("1970-01-01T00:00:00.123Z").unwrap(), + UNIX_EPOCH + Duration::new(0, 123000000)); + assert_eq!(format_rfc3339(UNIX_EPOCH + Duration::new(0, 123000000)) + .to_string(), "1970-01-01T00:00:00.123000000Z"); + } + + #[test] + #[should_panic(expected="OutOfRange")] + fn zero_month() { + parse_rfc3339("1970-00-01T00:00:00Z").unwrap(); + } + + #[test] + #[should_panic(expected="OutOfRange")] + fn big_month() { + parse_rfc3339("1970-32-01T00:00:00Z").unwrap(); + } + + #[test] + #[should_panic(expected="OutOfRange")] + fn zero_day() { + parse_rfc3339("1970-01-00T00:00:00Z").unwrap(); + } + + #[test] + #[should_panic(expected="OutOfRange")] + fn big_day() { + parse_rfc3339("1970-12-35T00:00:00Z").unwrap(); + } + + #[test] + #[should_panic(expected="OutOfRange")] + fn big_day2() { + parse_rfc3339("1970-02-30T00:00:00Z").unwrap(); + } + + #[test] + #[should_panic(expected="OutOfRange")] + fn big_second() { + parse_rfc3339("1970-12-30T00:00:78Z").unwrap(); + } + + #[test] + #[should_panic(expected="OutOfRange")] + fn big_minute() { + parse_rfc3339("1970-12-30T00:78:00Z").unwrap(); + } + + #[test] + #[should_panic(expected="OutOfRange")] + fn big_hour() { + parse_rfc3339("1970-12-30T24:00:00Z").unwrap(); + } + + #[test] + fn break_data() { + for pos in 0.."2016-12-31T23:59:60Z".len() { + let mut s = b"2016-12-31T23:59:60Z".to_vec(); + s[pos] = b'x'; + parse_rfc3339(from_utf8(&s).unwrap()).unwrap_err(); + } + } + + #[test] + fn weak_smoke_tests() { + assert_eq!(parse_rfc3339_weak("1970-01-01 00:00:00").unwrap(), + UNIX_EPOCH + Duration::new(0, 0)); + parse_rfc3339("1970-01-01 00:00:00").unwrap_err(); + + assert_eq!(parse_rfc3339_weak("1970-01-01 00:00:00.000123").unwrap(), + UNIX_EPOCH + Duration::new(0, 123000)); + parse_rfc3339("1970-01-01 00:00:00.000123").unwrap_err(); + + assert_eq!(parse_rfc3339_weak("1970-01-01T00:00:00.000123").unwrap(), + UNIX_EPOCH + Duration::new(0, 123000)); + parse_rfc3339("1970-01-01T00:00:00.000123").unwrap_err(); + + assert_eq!(parse_rfc3339_weak("1970-01-01 00:00:00.000123Z").unwrap(), + UNIX_EPOCH + Duration::new(0, 123000)); + parse_rfc3339("1970-01-01 00:00:00.000123Z").unwrap_err(); + + assert_eq!(parse_rfc3339_weak("1970-01-01 00:00:00Z").unwrap(), + UNIX_EPOCH + Duration::new(0, 0)); + parse_rfc3339("1970-01-01 00:00:00Z").unwrap_err(); + } +} diff --git a/humantime/src/duration.rs b/humantime/src/duration.rs new file mode 100644 index 000000000..b6e5aecc1 --- /dev/null +++ b/humantime/src/duration.rs @@ -0,0 +1,411 @@ +use std::fmt; +use std::str::Chars; +use std::time::Duration; +use std::error::Error as StdError; + +quick_error! { + /// Error parsing human-friendly duration + #[derive(Debug, PartialEq, Clone, Copy)] + pub enum Error { + /// Invalid character during parsing + /// + /// More specifically anything that is not alphanumeric is prohibited + /// + /// The field is an byte offset of the character in the string. + InvalidCharacter(offset: usize) { + display("invalid character at {}", offset) + description("invalid character") + } + /// Non-numeric value where number is expected + /// + /// This usually means that either time unit is broken into words, + /// e.g. `m sec` instead of `msec`, or just number is omitted, + /// for example `2 hours min` instead of `2 hours 1 min` + /// + /// The field is an byte offset of the errorneous character + /// in the string. + NumberExpected(offset: usize) { + display("expected number at {}", offset) + description("expected number") + } + /// Unit in the number is not one of allowed units + /// + /// See documentation of `parse_duration` for the list of supported + /// time units. + /// + /// The two fields are start and end (exclusive) of the slice from + /// the original string, containing errorneous value + UnknownUnit(start: usize, end: usize) { + display("unknown unit at {}-{}", start, end) + description("unknown unit") + } + /// The numeric value is too large + /// + /// Usually this means value is too large to be useful. If user writes + /// data in subsecond units, then the maximum is about 3k years. When + /// using seconds, or larger units, the limit is even larger. + NumberOverflow { + display(self_) -> ("{}", self_.description()) + description("number is too large") + } + /// The value was an empty string (or consists only whitespace) + Empty { + display(self_) -> ("{}", self_.description()) + description("value was empty") + } + } + +} + +/// A wrapper type that allows you to Display a Duration +#[derive(Debug)] +pub struct FormattedDuration(Duration); + +trait OverflowOp: Sized { + fn mul(self, other: Self) -> Result; + fn add(self, other: Self) -> Result; +} + +impl OverflowOp for u64 { + fn mul(self, other: Self) -> Result { + self.checked_mul(other).ok_or(Error::NumberOverflow) + } + fn add(self, other: Self) -> Result { + self.checked_add(other).ok_or(Error::NumberOverflow) + } +} + +struct Parser<'a> { + iter: Chars<'a>, + src: &'a str, + current: (u64, u64), +} + +impl<'a> Parser<'a> { + fn off(&self) -> usize { + self.src.len() - self.iter.as_str().len() + } + + fn parse_first_char(&mut self) -> Result, Error> { + let off = self.off(); + for c in self.iter.by_ref() { + match c { + '0'...'9' => { + return Ok(Some(c as u64 - '0' as u64)); + } + c if c.is_whitespace() => continue, + _ => { + return Err(Error::NumberExpected(off)); + } + } + } + return Ok(None); + } + fn parse_unit(&mut self, n: u64, start: usize, end: usize) + -> Result<(), Error> + { + let (mut sec, nsec) = match &self.src[start..end] { + "nanos" | "nsec" | "ns" => (0u64, n), + "usec" | "us" => (0u64, try!(n.mul(1000))), + "millis" | "msec" | "ms" => (0u64, try!(n.mul(1000_000))), + "seconds" | "second" | "secs" | "sec" | "s" => (n, 0), + "minutes" | "minute" | "min" | "mins" | "m" + => (try!(n.mul(60)), 0), + "hours" | "hour" | "hr" | "hrs" | "h" => (try!(n.mul(3600)), 0), + "days" | "day" | "d" => (try!(n.mul(86400)), 0), + "weeks" | "week" | "w" => (try!(n.mul(86400*7)), 0), + "months" | "month" | "M" => (try!(n.mul(2630016)), 0), // 30.44d + "years" | "year" | "y" => (try!(n.mul(31557600)), 0), // 365.25d + _ => return Err(Error::UnknownUnit(start, end)), + }; + let mut nsec = try!(self.current.1.add(nsec)); + if nsec > 1000_000_000 { + sec = try!(sec.add(nsec / 1000_000_000)); + nsec %= 1000_000_000; + } + sec = try!(self.current.0.add(sec)); + self.current = (sec, nsec); + Ok(()) + } + + fn parse(mut self) -> Result { + let mut n = try!(try!(self.parse_first_char()).ok_or(Error::Empty)); + 'outer: loop { + let mut off = self.off(); + while let Some(c) = self.iter.next() { + match c { + '0'...'9' => { + n = try!(n.checked_mul(10) + .and_then(|x| x.checked_add(c as u64 - '0' as u64)) + .ok_or(Error::NumberOverflow)); + } + c if c.is_whitespace() => {} + 'a'...'z' | 'A'...'Z' => { + break; + } + _ => { + return Err(Error::InvalidCharacter(off)); + } + } + off = self.off(); + } + let start = off; + let mut off = self.off(); + while let Some(c) = self.iter.next() { + match c { + '0'...'9' => { + try!(self.parse_unit(n, start, off)); + n = c as u64 - '0' as u64; + continue 'outer; + } + c if c.is_whitespace() => break, + 'a'...'z' | 'A'...'Z' => {} + _ => { + return Err(Error::InvalidCharacter(off)); + } + } + off = self.off(); + } + try!(self.parse_unit(n, start, off)); + n = match try!(self.parse_first_char()) { + Some(n) => n, + None => return Ok( + Duration::new(self.current.0, self.current.1 as u32)), + }; + } + } + +} + +/// Parse duration object `1hour 12min 5s` +/// +/// The duration object is a concatenation of time spans. Where each time +/// span is an integer number and a suffix. Supported suffixes: +/// +/// * `nsec`, `ns` -- microseconds +/// * `usec`, `us` -- microseconds +/// * `msec`, `ms` -- milliseconds +/// * `seconds`, `second`, `sec`, `s` +/// * `minutes`, `minute`, `min`, `m` +/// * `hours`, `hour`, `hr`, `h` +/// * `days`, `day`, `d` +/// * `weeks`, `week`, `w` +/// * `months`, `month`, `M` -- defined as 30.44 days +/// * `years`, `year`, `y` -- defined as 365.25 days +/// +/// # Examples +/// +/// ``` +/// use std::time::Duration; +/// use humantime::parse_duration; +/// +/// assert_eq!(parse_duration("2h 37min"), Ok(Duration::new(9420, 0))); +/// assert_eq!(parse_duration("32ms"), Ok(Duration::new(0, 32_000_000))); +/// ``` +pub fn parse_duration(s: &str) -> Result { + Parser { + iter: s.chars(), + src: s, + current: (0, 0), + }.parse() +} + +/// Formats duration into a human-readable string +/// +/// Note: this format is guaranteed to have same value when using +/// parse_duration, but we can change some details of the exact composition +/// of the value. +/// +/// # Examples +/// +/// ``` +/// use std::time::Duration; +/// use humantime::format_duration; +/// +/// let val1 = Duration::new(9420, 0); +/// assert_eq!(format_duration(val1).to_string(), "2h 37m"); +/// let val2 = Duration::new(0, 32_000_000); +/// assert_eq!(format_duration(val2).to_string(), "32ms"); +/// ``` +pub fn format_duration(val: Duration) -> FormattedDuration { + FormattedDuration(val) +} + +fn item_plural(f: &mut fmt::Formatter, started: &mut bool, + name: &str, value: u64) + -> fmt::Result +{ + if value > 0 { + if *started { + f.write_str(" ")?; + } + write!(f, "{}{}", value, name)?; + if value > 1 { + f.write_str("s")?; + } + *started = true; + } + Ok(()) +} +fn item(f: &mut fmt::Formatter, started: &mut bool, name: &str, value: u32) + -> fmt::Result +{ + if value > 0 { + if *started { + f.write_str(" ")?; + } + write!(f, "{}{}", value, name)?; + *started = true; + } + Ok(()) +} + +impl fmt::Display for FormattedDuration { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let secs = self.0.as_secs(); + let nanos = self.0.subsec_nanos(); + + if secs == 0 && nanos == 0 { + f.write_str("0s")?; + return Ok(()); + } + + let years = secs / 31557600; // 365.25d + let ydays = secs % 31557600; + let months = ydays / 2630016; // 30.44d + let mdays = ydays % 2630016; + let days = mdays / 86400; + let day_secs = mdays % 86400; + let hours = day_secs / 3600; + let minutes = day_secs % 3600 / 60; + let seconds = day_secs % 60; + + let millis = nanos / 1_000_000; + let micros = nanos / 1000 % 1000; + let nanosec = nanos % 1000; + + let ref mut started = false; + item_plural(f, started, "year", years)?; + item_plural(f, started, "month", months)?; + item_plural(f, started, "day", days)?; + item(f, started, "h", hours as u32)?; + item(f, started, "m", minutes as u32)?; + item(f, started, "s", seconds as u32)?; + item(f, started, "ms", millis)?; + item(f, started, "us", micros)?; + item(f, started, "ns", nanosec)?; + Ok(()) + } +} + +#[cfg(test)] +mod test { + extern crate rand; + + use std::time::Duration; + use self::rand::Rng; + use super::{parse_duration, format_duration}; + use super::Error; + + #[test] + fn test_units() { + assert_eq!(parse_duration("17nsec"), Ok(Duration::new(0, 17))); + assert_eq!(parse_duration("17nanos"), Ok(Duration::new(0, 17))); + assert_eq!(parse_duration("33ns"), Ok(Duration::new(0, 33))); + assert_eq!(parse_duration("3usec"), Ok(Duration::new(0, 3000))); + assert_eq!(parse_duration("78us"), Ok(Duration::new(0, 78000))); + assert_eq!(parse_duration("31msec"), Ok(Duration::new(0, 31000000))); + assert_eq!(parse_duration("31millis"), Ok(Duration::new(0, 31000000))); + assert_eq!(parse_duration("6ms"), Ok(Duration::new(0, 6000000))); + assert_eq!(parse_duration("3000s"), Ok(Duration::new(3000, 0))); + assert_eq!(parse_duration("300sec"), Ok(Duration::new(300, 0))); + assert_eq!(parse_duration("300secs"), Ok(Duration::new(300, 0))); + assert_eq!(parse_duration("50seconds"), Ok(Duration::new(50, 0))); + assert_eq!(parse_duration("1second"), Ok(Duration::new(1, 0))); + assert_eq!(parse_duration("100m"), Ok(Duration::new(6000, 0))); + assert_eq!(parse_duration("12min"), Ok(Duration::new(720, 0))); + assert_eq!(parse_duration("12mins"), Ok(Duration::new(720, 0))); + assert_eq!(parse_duration("1minute"), Ok(Duration::new(60, 0))); + assert_eq!(parse_duration("7minutes"), Ok(Duration::new(420, 0))); + assert_eq!(parse_duration("2h"), Ok(Duration::new(7200, 0))); + assert_eq!(parse_duration("7hr"), Ok(Duration::new(25200, 0))); + assert_eq!(parse_duration("7hrs"), Ok(Duration::new(25200, 0))); + assert_eq!(parse_duration("1hour"), Ok(Duration::new(3600, 0))); + assert_eq!(parse_duration("24hours"), Ok(Duration::new(86400, 0))); + assert_eq!(parse_duration("1day"), Ok(Duration::new(86400, 0))); + assert_eq!(parse_duration("2days"), Ok(Duration::new(172800, 0))); + assert_eq!(parse_duration("365d"), Ok(Duration::new(31536000, 0))); + assert_eq!(parse_duration("1week"), Ok(Duration::new(604800, 0))); + assert_eq!(parse_duration("7weeks"), Ok(Duration::new(4233600, 0))); + assert_eq!(parse_duration("52w"), Ok(Duration::new(31449600, 0))); + assert_eq!(parse_duration("1month"), Ok(Duration::new(2630016, 0))); + assert_eq!(parse_duration("3months"), Ok(Duration::new(3*2630016, 0))); + assert_eq!(parse_duration("12M"), Ok(Duration::new(31560192, 0))); + assert_eq!(parse_duration("1year"), Ok(Duration::new(31557600, 0))); + assert_eq!(parse_duration("7years"), Ok(Duration::new(7*31557600, 0))); + assert_eq!(parse_duration("17y"), Ok(Duration::new(536479200, 0))); + } + + #[test] + fn test_combo() { + assert_eq!(parse_duration("20 min 17 nsec "), Ok(Duration::new(1200, 17))); + assert_eq!(parse_duration("2h 15m"), Ok(Duration::new(8100, 0))); + } + + #[test] + fn all_86400_seconds() { + for second in 0..86400 { // scan leap year and non-leap year + let d = Duration::new(second, 0); + assert_eq!(d, + parse_duration(&format_duration(d).to_string()).unwrap()); + } + } + + #[test] + fn random_second() { + for _ in 0..10000 { + let sec = rand::thread_rng().gen_range(0, 253370764800); + let d = Duration::new(sec, 0); + assert_eq!(d, + parse_duration(&format_duration(d).to_string()).unwrap()); + } + } + + #[test] + fn random_any() { + for _ in 0..10000 { + let sec = rand::thread_rng().gen_range(0, 253370764800); + let nanos = rand::thread_rng().gen_range(0, 1_000_000_000); + let d = Duration::new(sec, nanos); + assert_eq!(d, + parse_duration(&format_duration(d).to_string()).unwrap()); + } + } + + #[test] + fn test_overlow() { + // Overflow on subseconds is earlier because of how we do conversion + // we could fix it, but I don't see any good reason for this + assert_eq!(parse_duration("100000000000000000000ns"), + Err(Error::NumberOverflow)); + assert_eq!(parse_duration("100000000000000000us"), + Err(Error::NumberOverflow)); + assert_eq!(parse_duration("100000000000000ms"), + Err(Error::NumberOverflow)); + + assert_eq!(parse_duration("100000000000000000000s"), + Err(Error::NumberOverflow)); + assert_eq!(parse_duration("10000000000000000000m"), + Err(Error::NumberOverflow)); + assert_eq!(parse_duration("1000000000000000000h"), + Err(Error::NumberOverflow)); + assert_eq!(parse_duration("100000000000000000d"), + Err(Error::NumberOverflow)); + assert_eq!(parse_duration("10000000000000000w"), + Err(Error::NumberOverflow)); + assert_eq!(parse_duration("1000000000000000M"), + Err(Error::NumberOverflow)); + assert_eq!(parse_duration("10000000000000y"), + Err(Error::NumberOverflow)); + } +} diff --git a/humantime/src/lib.rs b/humantime/src/lib.rs new file mode 100644 index 000000000..1ab7cf4ea --- /dev/null +++ b/humantime/src/lib.rs @@ -0,0 +1,30 @@ +//! Human-friendly time parser and formatter +//! +//! Features: +//! +//! * Parses durations in free form like `15days 2min 2s` +//! * Formats durations in similar form `2years 2min 12us` +//! * Parses and formats timestamp in `rfc3339` format: `2018-01-01T12:53:00Z` +//! * Parses timestamps in a weaker format: `2018-01-01 12:53:00` +//! +//! Timestamp parsing/formatting is super-fast because format is basically +//! fixed. +//! +//! See [serde-humantime] for serde integration. +//! +//! [serde-humantime]: https://docs.rs/serde-humantime/0.1.1/serde_humantime/ +#![warn(missing_debug_implementations)] +#![warn(missing_docs)] + +#[macro_use] extern crate quick_error; + +mod duration; +mod wrapper; +mod date; + +pub use duration::{parse_duration, Error as DurationError}; +pub use duration::{format_duration, FormattedDuration}; +pub use wrapper::{Duration, Timestamp}; +pub use date::{parse_rfc3339, parse_rfc3339_weak, Error as TimestampError}; +pub use date::{format_rfc3339, format_rfc3339_seconds, format_rfc3339_nanos}; +pub use date::{Rfc3339Timestamp}; diff --git a/humantime/src/wrapper.rs b/humantime/src/wrapper.rs new file mode 100644 index 000000000..df01d791a --- /dev/null +++ b/humantime/src/wrapper.rs @@ -0,0 +1,107 @@ +use std::str::FromStr; +use std::ops::Deref; +use std::fmt; +use std::time::{Duration as StdDuration, SystemTime}; + +use duration::{self, parse_duration, format_duration}; +use date::{self, parse_rfc3339_weak, format_rfc3339}; + +/// A wrapper for duration that has `FromStr` implementation +/// +/// This is useful if you want to use it somewhere where `FromStr` is +/// expected. +/// +/// See `parse_duration` for the description of the format. +/// +/// # Example +/// +/// ``` +/// use std::time::Duration; +/// let x: Duration; +/// x = "12h 5min 2ns".parse::().unwrap().into(); +/// assert_eq!(x, Duration::new(12*3600 + 5*60, 2)) +/// ``` +/// +#[derive(Debug, PartialEq, Eq, Hash, Clone, Copy)] +pub struct Duration(StdDuration); + +/// A wrapper for SystemTime that has `FromStr` implementation +/// +/// This is useful if you want to use it somewhere where `FromStr` is +/// expected. +/// +/// See `parse_rfc3339_weak` for the description of the format. The "weak" +/// format is used as it's more pemissive for human input as this is the +/// expected use of the type (e.g. command-line parsing). +/// +/// # Example +/// +/// ``` +/// use std::time::SystemTime; +/// let x: SystemTime; +/// x = "2018-02-16T00:31:37Z".parse::().unwrap().into(); +/// assert_eq!(humantime::format_rfc3339(x).to_string(), "2018-02-16T00:31:37Z"); +/// ``` +/// +#[derive(Debug, PartialEq, Eq, Clone)] +pub struct Timestamp(SystemTime); + +impl AsRef for Duration { + fn as_ref(&self) -> &StdDuration { &self.0 } +} + +impl Deref for Duration { + type Target = StdDuration; + fn deref(&self) -> &StdDuration { &self.0 } +} + +impl Into for Duration { + fn into(self) -> StdDuration { self.0 } +} + +impl From for Duration { + fn from(dur: StdDuration) -> Duration { Duration(dur) } +} + +impl FromStr for Duration { + type Err = duration::Error; + fn from_str(s: &str) -> Result { + parse_duration(s).map(Duration) + } +} + +impl fmt::Display for Duration { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + format_duration(self.0).fmt(f) + } +} + +impl AsRef for Timestamp { + fn as_ref(&self) -> &SystemTime { &self.0 } +} + +impl Deref for Timestamp { + type Target = SystemTime; + fn deref(&self) -> &SystemTime { &self.0 } +} + +impl Into for Timestamp { + fn into(self) -> SystemTime { self.0 } +} + +impl From for Timestamp { + fn from(dur: SystemTime) -> Timestamp { Timestamp(dur) } +} + +impl FromStr for Timestamp { + type Err = date::Error; + fn from_str(s: &str) -> Result { + parse_rfc3339_weak(s).map(Timestamp) + } +} + +impl fmt::Display for Timestamp { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + format_rfc3339(self.0).fmt(f) + } +} diff --git a/humantime/vagga.yaml b/humantime/vagga.yaml new file mode 100644 index 000000000..b5d6b8b7c --- /dev/null +++ b/humantime/vagga.yaml @@ -0,0 +1,92 @@ +commands: + + cargo: !Command + description: Run any cargo command + container: ubuntu + run: [cargo] + + make: !Command + description: Build the library + container: ubuntu + run: [cargo, build] + + test64: !Command + description: Test the 64bit library + container: ubuntu + environ: { RUST_BACKTRACE: 1 } + run: [cargo, test] + + test32: !Command + description: Test the 32bit library + container: ubuntu32 + environ: { RUST_BACKTRACE: 1 } + run: [cargo, test] + + test: !Command + description: Test the 64bit library + container: ubuntu + environ: { RUST_BACKTRACE: 1 } + prerequisites: [test64, test32] + run: [echo, okay] + + bench: !Command + description: Run benchmarks + container: bench + environ: { RUST_BACKTRACE: 1 } + run: [cargo, bench] + + _bulk: !Command + description: Run `bulk` command (for version bookkeeping) + container: ubuntu + run: [bulk] + +containers: + + ubuntu: + setup: + - !Ubuntu xenial + - !UbuntuUniverse + - !Install [ca-certificates, build-essential, vim] + + - !TarInstall + url: "https://static.rust-lang.org/dist/rust-1.24.0-x86_64-unknown-linux-gnu.tar.gz" + script: "./install.sh --prefix=/usr \ + --components=rustc,rust-std-x86_64-unknown-linux-gnu,cargo" + - &bulk !Tar + url: "https://github.com/tailhook/bulk/releases/download/v0.4.10/bulk-v0.4.10.tar.gz" + sha256: 481513f8a0306a9857d045497fb5b50b50a51e9ff748909ecf7d2bda1de275ab + path: / + + environ: + HOME: /work/target + USER: pc + + ubuntu32: + setup: + - !UbuntuRelease + codename: xenial + arch: i386 + - !UbuntuUniverse + - !Install [ca-certificates, build-essential, vim] + + - !TarInstall + url: "https://static.rust-lang.org/dist/rust-1.24.0-i686-unknown-linux-gnu.tar.gz" + script: "./install.sh --prefix=/usr \ + --components=rustc,rust-std-i686-unknown-linux-gnu,cargo" + + environ: + HOME: /work/target + USER: pc + + bench: + setup: + - !Ubuntu xenial + - !Install [ca-certificates, wget, build-essential] + - !TarInstall + url: https://static.rust-lang.org/dist/rust-nightly-x86_64-unknown-linux-gnu.tar.gz + script: | + ./install.sh --prefix=/usr \ + --components=rustc,rust-std-x86_64-unknown-linux-gnu,cargo + environ: + HOME: /work/target + USER: pc diff --git a/idna/.cargo-checksum.json b/idna/.cargo-checksum.json new file mode 100644 index 000000000..220d3a920 --- /dev/null +++ b/idna/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{},"package":"38f09e0f0b1fb55fdee1f17470ad800da77af5186a1a76c026b679358b7e844e"} \ No newline at end of file diff --git a/idna/Cargo.toml b/idna/Cargo.toml new file mode 100644 index 000000000..8e6c0215e --- /dev/null +++ b/idna/Cargo.toml @@ -0,0 +1,43 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g. crates.io) dependencies +# +# If you believe there's an error in this file please file an +# issue against the rust-lang/cargo repository. If you're +# editing this file be aware that the upstream Cargo.toml +# will likely look very different (and much more reasonable) + +[package] +name = "idna" +version = "0.1.5" +authors = ["The rust-url developers"] +description = "IDNA (Internationalizing Domain Names in Applications) and Punycode." +license = "MIT/Apache-2.0" +repository = "https://github.com/servo/rust-url/" + +[lib] +test = false +doctest = false + +[[test]] +name = "tests" +harness = false + +[[test]] +name = "unit" +[dependencies.matches] +version = "0.1" + +[dependencies.unicode-bidi] +version = "0.3" + +[dependencies.unicode-normalization] +version = "0.1.5" +[dev-dependencies.rustc-serialize] +version = "0.3" + +[dev-dependencies.rustc-test] +version = "0.3" diff --git a/idna/LICENSE-APACHE b/idna/LICENSE-APACHE new file mode 100644 index 000000000..16fe87b06 --- /dev/null +++ b/idna/LICENSE-APACHE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/idna/LICENSE-MIT b/idna/LICENSE-MIT new file mode 100644 index 000000000..24de6b418 --- /dev/null +++ b/idna/LICENSE-MIT @@ -0,0 +1,25 @@ +Copyright (c) 2013-2016 The rust-url developers + +Permission is hereby granted, free of charge, to any +person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the +Software without restriction, including without +limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software +is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions +of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/idna/src/IdnaMappingTable.txt b/idna/src/IdnaMappingTable.txt new file mode 100644 index 000000000..295606447 --- /dev/null +++ b/idna/src/IdnaMappingTable.txt @@ -0,0 +1,8405 @@ +# IdnaMappingTable-10.0.0.txt +# Date: 2017-02-23, 14:18:32 GMT +# © 2017 Unicode®, Inc. +# Unicode and the Unicode Logo are registered trademarks of Unicode, Inc. in the U.S. and other countries. +# For terms of use, see http://www.unicode.org/terms_of_use.html +# +# Unicode IDNA Compatible Preprocessing (UTS #46) +# For documentation, see http://www.unicode.org/reports/tr46/ + +0000..002C ; disallowed_STD3_valid # 1.1 ..COMMA +002D..002E ; valid # 1.1 HYPHEN-MINUS..FULL STOP +002F ; disallowed_STD3_valid # 1.1 SOLIDUS +0030..0039 ; valid # 1.1 DIGIT ZERO..DIGIT NINE +003A..0040 ; disallowed_STD3_valid # 1.1 COLON..COMMERCIAL AT +0041 ; mapped ; 0061 # 1.1 LATIN CAPITAL LETTER A +0042 ; mapped ; 0062 # 1.1 LATIN CAPITAL LETTER B +0043 ; mapped ; 0063 # 1.1 LATIN CAPITAL LETTER C +0044 ; mapped ; 0064 # 1.1 LATIN CAPITAL LETTER D +0045 ; mapped ; 0065 # 1.1 LATIN CAPITAL LETTER E +0046 ; mapped ; 0066 # 1.1 LATIN CAPITAL LETTER F +0047 ; mapped ; 0067 # 1.1 LATIN CAPITAL LETTER G +0048 ; mapped ; 0068 # 1.1 LATIN CAPITAL LETTER H +0049 ; mapped ; 0069 # 1.1 LATIN CAPITAL LETTER I +004A ; mapped ; 006A # 1.1 LATIN CAPITAL LETTER J +004B ; mapped ; 006B # 1.1 LATIN CAPITAL LETTER K +004C ; mapped ; 006C # 1.1 LATIN CAPITAL LETTER L +004D ; mapped ; 006D # 1.1 LATIN CAPITAL LETTER M +004E ; mapped ; 006E # 1.1 LATIN CAPITAL LETTER N +004F ; mapped ; 006F # 1.1 LATIN CAPITAL LETTER O +0050 ; mapped ; 0070 # 1.1 LATIN CAPITAL LETTER P +0051 ; mapped ; 0071 # 1.1 LATIN CAPITAL LETTER Q +0052 ; mapped ; 0072 # 1.1 LATIN CAPITAL LETTER R +0053 ; mapped ; 0073 # 1.1 LATIN CAPITAL LETTER S +0054 ; mapped ; 0074 # 1.1 LATIN CAPITAL LETTER T +0055 ; mapped ; 0075 # 1.1 LATIN CAPITAL LETTER U +0056 ; mapped ; 0076 # 1.1 LATIN CAPITAL LETTER V +0057 ; mapped ; 0077 # 1.1 LATIN CAPITAL LETTER W +0058 ; mapped ; 0078 # 1.1 LATIN CAPITAL LETTER X +0059 ; mapped ; 0079 # 1.1 LATIN CAPITAL LETTER Y +005A ; mapped ; 007A # 1.1 LATIN CAPITAL LETTER Z +005B..0060 ; disallowed_STD3_valid # 1.1 LEFT SQUARE BRACKET..GRAVE ACCENT +0061..007A ; valid # 1.1 LATIN SMALL LETTER A..LATIN SMALL LETTER Z +007B..007F ; disallowed_STD3_valid # 1.1 LEFT CURLY BRACKET.. +0080..009F ; disallowed # 1.1 .. +00A0 ; disallowed_STD3_mapped ; 0020 # 1.1 NO-BREAK SPACE +00A1..00A7 ; valid ; ; NV8 # 1.1 INVERTED EXCLAMATION MARK..SECTION SIGN +00A8 ; disallowed_STD3_mapped ; 0020 0308 # 1.1 DIAERESIS +00A9 ; valid ; ; NV8 # 1.1 COPYRIGHT SIGN +00AA ; mapped ; 0061 # 1.1 FEMININE ORDINAL INDICATOR +00AB..00AC ; valid ; ; NV8 # 1.1 LEFT-POINTING DOUBLE ANGLE QUOTATION MARK..NOT SIGN +00AD ; ignored # 1.1 SOFT HYPHEN +00AE ; valid ; ; NV8 # 1.1 REGISTERED SIGN +00AF ; disallowed_STD3_mapped ; 0020 0304 # 1.1 MACRON +00B0..00B1 ; valid ; ; NV8 # 1.1 DEGREE SIGN..PLUS-MINUS SIGN +00B2 ; mapped ; 0032 # 1.1 SUPERSCRIPT TWO +00B3 ; mapped ; 0033 # 1.1 SUPERSCRIPT THREE +00B4 ; disallowed_STD3_mapped ; 0020 0301 # 1.1 ACUTE ACCENT +00B5 ; mapped ; 03BC # 1.1 MICRO SIGN +00B6 ; valid ; ; NV8 # 1.1 PILCROW SIGN +00B7 ; valid # 1.1 MIDDLE DOT +00B8 ; disallowed_STD3_mapped ; 0020 0327 # 1.1 CEDILLA +00B9 ; mapped ; 0031 # 1.1 SUPERSCRIPT ONE +00BA ; mapped ; 006F # 1.1 MASCULINE ORDINAL INDICATOR +00BB ; valid ; ; NV8 # 1.1 RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK +00BC ; mapped ; 0031 2044 0034 #1.1 VULGAR FRACTION ONE QUARTER +00BD ; mapped ; 0031 2044 0032 #1.1 VULGAR FRACTION ONE HALF +00BE ; mapped ; 0033 2044 0034 #1.1 VULGAR FRACTION THREE QUARTERS +00BF ; valid ; ; NV8 # 1.1 INVERTED QUESTION MARK +00C0 ; mapped ; 00E0 # 1.1 LATIN CAPITAL LETTER A WITH GRAVE +00C1 ; mapped ; 00E1 # 1.1 LATIN CAPITAL LETTER A WITH ACUTE +00C2 ; mapped ; 00E2 # 1.1 LATIN CAPITAL LETTER A WITH CIRCUMFLEX +00C3 ; mapped ; 00E3 # 1.1 LATIN CAPITAL LETTER A WITH TILDE +00C4 ; mapped ; 00E4 # 1.1 LATIN CAPITAL LETTER A WITH DIAERESIS +00C5 ; mapped ; 00E5 # 1.1 LATIN CAPITAL LETTER A WITH RING ABOVE +00C6 ; mapped ; 00E6 # 1.1 LATIN CAPITAL LETTER AE +00C7 ; mapped ; 00E7 # 1.1 LATIN CAPITAL LETTER C WITH CEDILLA +00C8 ; mapped ; 00E8 # 1.1 LATIN CAPITAL LETTER E WITH GRAVE +00C9 ; mapped ; 00E9 # 1.1 LATIN CAPITAL LETTER E WITH ACUTE +00CA ; mapped ; 00EA # 1.1 LATIN CAPITAL LETTER E WITH CIRCUMFLEX +00CB ; mapped ; 00EB # 1.1 LATIN CAPITAL LETTER E WITH DIAERESIS +00CC ; mapped ; 00EC # 1.1 LATIN CAPITAL LETTER I WITH GRAVE +00CD ; mapped ; 00ED # 1.1 LATIN CAPITAL LETTER I WITH ACUTE +00CE ; mapped ; 00EE # 1.1 LATIN CAPITAL LETTER I WITH CIRCUMFLEX +00CF ; mapped ; 00EF # 1.1 LATIN CAPITAL LETTER I WITH DIAERESIS +00D0 ; mapped ; 00F0 # 1.1 LATIN CAPITAL LETTER ETH +00D1 ; mapped ; 00F1 # 1.1 LATIN CAPITAL LETTER N WITH TILDE +00D2 ; mapped ; 00F2 # 1.1 LATIN CAPITAL LETTER O WITH GRAVE +00D3 ; mapped ; 00F3 # 1.1 LATIN CAPITAL LETTER O WITH ACUTE +00D4 ; mapped ; 00F4 # 1.1 LATIN CAPITAL LETTER O WITH CIRCUMFLEX +00D5 ; mapped ; 00F5 # 1.1 LATIN CAPITAL LETTER O WITH TILDE +00D6 ; mapped ; 00F6 # 1.1 LATIN CAPITAL LETTER O WITH DIAERESIS +00D7 ; valid ; ; NV8 # 1.1 MULTIPLICATION SIGN +00D8 ; mapped ; 00F8 # 1.1 LATIN CAPITAL LETTER O WITH STROKE +00D9 ; mapped ; 00F9 # 1.1 LATIN CAPITAL LETTER U WITH GRAVE +00DA ; mapped ; 00FA # 1.1 LATIN CAPITAL LETTER U WITH ACUTE +00DB ; mapped ; 00FB # 1.1 LATIN CAPITAL LETTER U WITH CIRCUMFLEX +00DC ; mapped ; 00FC # 1.1 LATIN CAPITAL LETTER U WITH DIAERESIS +00DD ; mapped ; 00FD # 1.1 LATIN CAPITAL LETTER Y WITH ACUTE +00DE ; mapped ; 00FE # 1.1 LATIN CAPITAL LETTER THORN +00DF ; deviation ; 0073 0073 # 1.1 LATIN SMALL LETTER SHARP S +00E0..00F6 ; valid # 1.1 LATIN SMALL LETTER A WITH GRAVE..LATIN SMALL LETTER O WITH DIAERESIS +00F7 ; valid ; ; NV8 # 1.1 DIVISION SIGN +00F8..00FF ; valid # 1.1 LATIN SMALL LETTER O WITH STROKE..LATIN SMALL LETTER Y WITH DIAERESIS +0100 ; mapped ; 0101 # 1.1 LATIN CAPITAL LETTER A WITH MACRON +0101 ; valid # 1.1 LATIN SMALL LETTER A WITH MACRON +0102 ; mapped ; 0103 # 1.1 LATIN CAPITAL LETTER A WITH BREVE +0103 ; valid # 1.1 LATIN SMALL LETTER A WITH BREVE +0104 ; mapped ; 0105 # 1.1 LATIN CAPITAL LETTER A WITH OGONEK +0105 ; valid # 1.1 LATIN SMALL LETTER A WITH OGONEK +0106 ; mapped ; 0107 # 1.1 LATIN CAPITAL LETTER C WITH ACUTE +0107 ; valid # 1.1 LATIN SMALL LETTER C WITH ACUTE +0108 ; mapped ; 0109 # 1.1 LATIN CAPITAL LETTER C WITH CIRCUMFLEX +0109 ; valid # 1.1 LATIN SMALL LETTER C WITH CIRCUMFLEX +010A ; mapped ; 010B # 1.1 LATIN CAPITAL LETTER C WITH DOT ABOVE +010B ; valid # 1.1 LATIN SMALL LETTER C WITH DOT ABOVE +010C ; mapped ; 010D # 1.1 LATIN CAPITAL LETTER C WITH CARON +010D ; valid # 1.1 LATIN SMALL LETTER C WITH CARON +010E ; mapped ; 010F # 1.1 LATIN CAPITAL LETTER D WITH CARON +010F ; valid # 1.1 LATIN SMALL LETTER D WITH CARON +0110 ; mapped ; 0111 # 1.1 LATIN CAPITAL LETTER D WITH STROKE +0111 ; valid # 1.1 LATIN SMALL LETTER D WITH STROKE +0112 ; mapped ; 0113 # 1.1 LATIN CAPITAL LETTER E WITH MACRON +0113 ; valid # 1.1 LATIN SMALL LETTER E WITH MACRON +0114 ; mapped ; 0115 # 1.1 LATIN CAPITAL LETTER E WITH BREVE +0115 ; valid # 1.1 LATIN SMALL LETTER E WITH BREVE +0116 ; mapped ; 0117 # 1.1 LATIN CAPITAL LETTER E WITH DOT ABOVE +0117 ; valid # 1.1 LATIN SMALL LETTER E WITH DOT ABOVE +0118 ; mapped ; 0119 # 1.1 LATIN CAPITAL LETTER E WITH OGONEK +0119 ; valid # 1.1 LATIN SMALL LETTER E WITH OGONEK +011A ; mapped ; 011B # 1.1 LATIN CAPITAL LETTER E WITH CARON +011B ; valid # 1.1 LATIN SMALL LETTER E WITH CARON +011C ; mapped ; 011D # 1.1 LATIN CAPITAL LETTER G WITH CIRCUMFLEX +011D ; valid # 1.1 LATIN SMALL LETTER G WITH CIRCUMFLEX +011E ; mapped ; 011F # 1.1 LATIN CAPITAL LETTER G WITH BREVE +011F ; valid # 1.1 LATIN SMALL LETTER G WITH BREVE +0120 ; mapped ; 0121 # 1.1 LATIN CAPITAL LETTER G WITH DOT ABOVE +0121 ; valid # 1.1 LATIN SMALL LETTER G WITH DOT ABOVE +0122 ; mapped ; 0123 # 1.1 LATIN CAPITAL LETTER G WITH CEDILLA +0123 ; valid # 1.1 LATIN SMALL LETTER G WITH CEDILLA +0124 ; mapped ; 0125 # 1.1 LATIN CAPITAL LETTER H WITH CIRCUMFLEX +0125 ; valid # 1.1 LATIN SMALL LETTER H WITH CIRCUMFLEX +0126 ; mapped ; 0127 # 1.1 LATIN CAPITAL LETTER H WITH STROKE +0127 ; valid # 1.1 LATIN SMALL LETTER H WITH STROKE +0128 ; mapped ; 0129 # 1.1 LATIN CAPITAL LETTER I WITH TILDE +0129 ; valid # 1.1 LATIN SMALL LETTER I WITH TILDE +012A ; mapped ; 012B # 1.1 LATIN CAPITAL LETTER I WITH MACRON +012B ; valid # 1.1 LATIN SMALL LETTER I WITH MACRON +012C ; mapped ; 012D # 1.1 LATIN CAPITAL LETTER I WITH BREVE +012D ; valid # 1.1 LATIN SMALL LETTER I WITH BREVE +012E ; mapped ; 012F # 1.1 LATIN CAPITAL LETTER I WITH OGONEK +012F ; valid # 1.1 LATIN SMALL LETTER I WITH OGONEK +0130 ; mapped ; 0069 0307 # 1.1 LATIN CAPITAL LETTER I WITH DOT ABOVE +0131 ; valid # 1.1 LATIN SMALL LETTER DOTLESS I +0132..0133 ; mapped ; 0069 006A # 1.1 LATIN CAPITAL LIGATURE IJ..LATIN SMALL LIGATURE IJ +0134 ; mapped ; 0135 # 1.1 LATIN CAPITAL LETTER J WITH CIRCUMFLEX +0135 ; valid # 1.1 LATIN SMALL LETTER J WITH CIRCUMFLEX +0136 ; mapped ; 0137 # 1.1 LATIN CAPITAL LETTER K WITH CEDILLA +0137..0138 ; valid # 1.1 LATIN SMALL LETTER K WITH CEDILLA..LATIN SMALL LETTER KRA +0139 ; mapped ; 013A # 1.1 LATIN CAPITAL LETTER L WITH ACUTE +013A ; valid # 1.1 LATIN SMALL LETTER L WITH ACUTE +013B ; mapped ; 013C # 1.1 LATIN CAPITAL LETTER L WITH CEDILLA +013C ; valid # 1.1 LATIN SMALL LETTER L WITH CEDILLA +013D ; mapped ; 013E # 1.1 LATIN CAPITAL LETTER L WITH CARON +013E ; valid # 1.1 LATIN SMALL LETTER L WITH CARON +013F..0140 ; mapped ; 006C 00B7 # 1.1 LATIN CAPITAL LETTER L WITH MIDDLE DOT..LATIN SMALL LETTER L WITH MIDDLE DOT +0141 ; mapped ; 0142 # 1.1 LATIN CAPITAL LETTER L WITH STROKE +0142 ; valid # 1.1 LATIN SMALL LETTER L WITH STROKE +0143 ; mapped ; 0144 # 1.1 LATIN CAPITAL LETTER N WITH ACUTE +0144 ; valid # 1.1 LATIN SMALL LETTER N WITH ACUTE +0145 ; mapped ; 0146 # 1.1 LATIN CAPITAL LETTER N WITH CEDILLA +0146 ; valid # 1.1 LATIN SMALL LETTER N WITH CEDILLA +0147 ; mapped ; 0148 # 1.1 LATIN CAPITAL LETTER N WITH CARON +0148 ; valid # 1.1 LATIN SMALL LETTER N WITH CARON +0149 ; mapped ; 02BC 006E # 1.1 LATIN SMALL LETTER N PRECEDED BY APOSTROPHE +014A ; mapped ; 014B # 1.1 LATIN CAPITAL LETTER ENG +014B ; valid # 1.1 LATIN SMALL LETTER ENG +014C ; mapped ; 014D # 1.1 LATIN CAPITAL LETTER O WITH MACRON +014D ; valid # 1.1 LATIN SMALL LETTER O WITH MACRON +014E ; mapped ; 014F # 1.1 LATIN CAPITAL LETTER O WITH BREVE +014F ; valid # 1.1 LATIN SMALL LETTER O WITH BREVE +0150 ; mapped ; 0151 # 1.1 LATIN CAPITAL LETTER O WITH DOUBLE ACUTE +0151 ; valid # 1.1 LATIN SMALL LETTER O WITH DOUBLE ACUTE +0152 ; mapped ; 0153 # 1.1 LATIN CAPITAL LIGATURE OE +0153 ; valid # 1.1 LATIN SMALL LIGATURE OE +0154 ; mapped ; 0155 # 1.1 LATIN CAPITAL LETTER R WITH ACUTE +0155 ; valid # 1.1 LATIN SMALL LETTER R WITH ACUTE +0156 ; mapped ; 0157 # 1.1 LATIN CAPITAL LETTER R WITH CEDILLA +0157 ; valid # 1.1 LATIN SMALL LETTER R WITH CEDILLA +0158 ; mapped ; 0159 # 1.1 LATIN CAPITAL LETTER R WITH CARON +0159 ; valid # 1.1 LATIN SMALL LETTER R WITH CARON +015A ; mapped ; 015B # 1.1 LATIN CAPITAL LETTER S WITH ACUTE +015B ; valid # 1.1 LATIN SMALL LETTER S WITH ACUTE +015C ; mapped ; 015D # 1.1 LATIN CAPITAL LETTER S WITH CIRCUMFLEX +015D ; valid # 1.1 LATIN SMALL LETTER S WITH CIRCUMFLEX +015E ; mapped ; 015F # 1.1 LATIN CAPITAL LETTER S WITH CEDILLA +015F ; valid # 1.1 LATIN SMALL LETTER S WITH CEDILLA +0160 ; mapped ; 0161 # 1.1 LATIN CAPITAL LETTER S WITH CARON +0161 ; valid # 1.1 LATIN SMALL LETTER S WITH CARON +0162 ; mapped ; 0163 # 1.1 LATIN CAPITAL LETTER T WITH CEDILLA +0163 ; valid # 1.1 LATIN SMALL LETTER T WITH CEDILLA +0164 ; mapped ; 0165 # 1.1 LATIN CAPITAL LETTER T WITH CARON +0165 ; valid # 1.1 LATIN SMALL LETTER T WITH CARON +0166 ; mapped ; 0167 # 1.1 LATIN CAPITAL LETTER T WITH STROKE +0167 ; valid # 1.1 LATIN SMALL LETTER T WITH STROKE +0168 ; mapped ; 0169 # 1.1 LATIN CAPITAL LETTER U WITH TILDE +0169 ; valid # 1.1 LATIN SMALL LETTER U WITH TILDE +016A ; mapped ; 016B # 1.1 LATIN CAPITAL LETTER U WITH MACRON +016B ; valid # 1.1 LATIN SMALL LETTER U WITH MACRON +016C ; mapped ; 016D # 1.1 LATIN CAPITAL LETTER U WITH BREVE +016D ; valid # 1.1 LATIN SMALL LETTER U WITH BREVE +016E ; mapped ; 016F # 1.1 LATIN CAPITAL LETTER U WITH RING ABOVE +016F ; valid # 1.1 LATIN SMALL LETTER U WITH RING ABOVE +0170 ; mapped ; 0171 # 1.1 LATIN CAPITAL LETTER U WITH DOUBLE ACUTE +0171 ; valid # 1.1 LATIN SMALL LETTER U WITH DOUBLE ACUTE +0172 ; mapped ; 0173 # 1.1 LATIN CAPITAL LETTER U WITH OGONEK +0173 ; valid # 1.1 LATIN SMALL LETTER U WITH OGONEK +0174 ; mapped ; 0175 # 1.1 LATIN CAPITAL LETTER W WITH CIRCUMFLEX +0175 ; valid # 1.1 LATIN SMALL LETTER W WITH CIRCUMFLEX +0176 ; mapped ; 0177 # 1.1 LATIN CAPITAL LETTER Y WITH CIRCUMFLEX +0177 ; valid # 1.1 LATIN SMALL LETTER Y WITH CIRCUMFLEX +0178 ; mapped ; 00FF # 1.1 LATIN CAPITAL LETTER Y WITH DIAERESIS +0179 ; mapped ; 017A # 1.1 LATIN CAPITAL LETTER Z WITH ACUTE +017A ; valid # 1.1 LATIN SMALL LETTER Z WITH ACUTE +017B ; mapped ; 017C # 1.1 LATIN CAPITAL LETTER Z WITH DOT ABOVE +017C ; valid # 1.1 LATIN SMALL LETTER Z WITH DOT ABOVE +017D ; mapped ; 017E # 1.1 LATIN CAPITAL LETTER Z WITH CARON +017E ; valid # 1.1 LATIN SMALL LETTER Z WITH CARON +017F ; mapped ; 0073 # 1.1 LATIN SMALL LETTER LONG S +0180 ; valid # 1.1 LATIN SMALL LETTER B WITH STROKE +0181 ; mapped ; 0253 # 1.1 LATIN CAPITAL LETTER B WITH HOOK +0182 ; mapped ; 0183 # 1.1 LATIN CAPITAL LETTER B WITH TOPBAR +0183 ; valid # 1.1 LATIN SMALL LETTER B WITH TOPBAR +0184 ; mapped ; 0185 # 1.1 LATIN CAPITAL LETTER TONE SIX +0185 ; valid # 1.1 LATIN SMALL LETTER TONE SIX +0186 ; mapped ; 0254 # 1.1 LATIN CAPITAL LETTER OPEN O +0187 ; mapped ; 0188 # 1.1 LATIN CAPITAL LETTER C WITH HOOK +0188 ; valid # 1.1 LATIN SMALL LETTER C WITH HOOK +0189 ; mapped ; 0256 # 1.1 LATIN CAPITAL LETTER AFRICAN D +018A ; mapped ; 0257 # 1.1 LATIN CAPITAL LETTER D WITH HOOK +018B ; mapped ; 018C # 1.1 LATIN CAPITAL LETTER D WITH TOPBAR +018C..018D ; valid # 1.1 LATIN SMALL LETTER D WITH TOPBAR..LATIN SMALL LETTER TURNED DELTA +018E ; mapped ; 01DD # 1.1 LATIN CAPITAL LETTER REVERSED E +018F ; mapped ; 0259 # 1.1 LATIN CAPITAL LETTER SCHWA +0190 ; mapped ; 025B # 1.1 LATIN CAPITAL LETTER OPEN E +0191 ; mapped ; 0192 # 1.1 LATIN CAPITAL LETTER F WITH HOOK +0192 ; valid # 1.1 LATIN SMALL LETTER F WITH HOOK +0193 ; mapped ; 0260 # 1.1 LATIN CAPITAL LETTER G WITH HOOK +0194 ; mapped ; 0263 # 1.1 LATIN CAPITAL LETTER GAMMA +0195 ; valid # 1.1 LATIN SMALL LETTER HV +0196 ; mapped ; 0269 # 1.1 LATIN CAPITAL LETTER IOTA +0197 ; mapped ; 0268 # 1.1 LATIN CAPITAL LETTER I WITH STROKE +0198 ; mapped ; 0199 # 1.1 LATIN CAPITAL LETTER K WITH HOOK +0199..019B ; valid # 1.1 LATIN SMALL LETTER K WITH HOOK..LATIN SMALL LETTER LAMBDA WITH STROKE +019C ; mapped ; 026F # 1.1 LATIN CAPITAL LETTER TURNED M +019D ; mapped ; 0272 # 1.1 LATIN CAPITAL LETTER N WITH LEFT HOOK +019E ; valid # 1.1 LATIN SMALL LETTER N WITH LONG RIGHT LEG +019F ; mapped ; 0275 # 1.1 LATIN CAPITAL LETTER O WITH MIDDLE TILDE +01A0 ; mapped ; 01A1 # 1.1 LATIN CAPITAL LETTER O WITH HORN +01A1 ; valid # 1.1 LATIN SMALL LETTER O WITH HORN +01A2 ; mapped ; 01A3 # 1.1 LATIN CAPITAL LETTER OI +01A3 ; valid # 1.1 LATIN SMALL LETTER OI +01A4 ; mapped ; 01A5 # 1.1 LATIN CAPITAL LETTER P WITH HOOK +01A5 ; valid # 1.1 LATIN SMALL LETTER P WITH HOOK +01A6 ; mapped ; 0280 # 1.1 LATIN LETTER YR +01A7 ; mapped ; 01A8 # 1.1 LATIN CAPITAL LETTER TONE TWO +01A8 ; valid # 1.1 LATIN SMALL LETTER TONE TWO +01A9 ; mapped ; 0283 # 1.1 LATIN CAPITAL LETTER ESH +01AA..01AB ; valid # 1.1 LATIN LETTER REVERSED ESH LOOP..LATIN SMALL LETTER T WITH PALATAL HOOK +01AC ; mapped ; 01AD # 1.1 LATIN CAPITAL LETTER T WITH HOOK +01AD ; valid # 1.1 LATIN SMALL LETTER T WITH HOOK +01AE ; mapped ; 0288 # 1.1 LATIN CAPITAL LETTER T WITH RETROFLEX HOOK +01AF ; mapped ; 01B0 # 1.1 LATIN CAPITAL LETTER U WITH HORN +01B0 ; valid # 1.1 LATIN SMALL LETTER U WITH HORN +01B1 ; mapped ; 028A # 1.1 LATIN CAPITAL LETTER UPSILON +01B2 ; mapped ; 028B # 1.1 LATIN CAPITAL LETTER V WITH HOOK +01B3 ; mapped ; 01B4 # 1.1 LATIN CAPITAL LETTER Y WITH HOOK +01B4 ; valid # 1.1 LATIN SMALL LETTER Y WITH HOOK +01B5 ; mapped ; 01B6 # 1.1 LATIN CAPITAL LETTER Z WITH STROKE +01B6 ; valid # 1.1 LATIN SMALL LETTER Z WITH STROKE +01B7 ; mapped ; 0292 # 1.1 LATIN CAPITAL LETTER EZH +01B8 ; mapped ; 01B9 # 1.1 LATIN CAPITAL LETTER EZH REVERSED +01B9..01BB ; valid # 1.1 LATIN SMALL LETTER EZH REVERSED..LATIN LETTER TWO WITH STROKE +01BC ; mapped ; 01BD # 1.1 LATIN CAPITAL LETTER TONE FIVE +01BD..01C3 ; valid # 1.1 LATIN SMALL LETTER TONE FIVE..LATIN LETTER RETROFLEX CLICK +01C4..01C6 ; mapped ; 0064 017E # 1.1 LATIN CAPITAL LETTER DZ WITH CARON..LATIN SMALL LETTER DZ WITH CARON +01C7..01C9 ; mapped ; 006C 006A # 1.1 LATIN CAPITAL LETTER LJ..LATIN SMALL LETTER LJ +01CA..01CC ; mapped ; 006E 006A # 1.1 LATIN CAPITAL LETTER NJ..LATIN SMALL LETTER NJ +01CD ; mapped ; 01CE # 1.1 LATIN CAPITAL LETTER A WITH CARON +01CE ; valid # 1.1 LATIN SMALL LETTER A WITH CARON +01CF ; mapped ; 01D0 # 1.1 LATIN CAPITAL LETTER I WITH CARON +01D0 ; valid # 1.1 LATIN SMALL LETTER I WITH CARON +01D1 ; mapped ; 01D2 # 1.1 LATIN CAPITAL LETTER O WITH CARON +01D2 ; valid # 1.1 LATIN SMALL LETTER O WITH CARON +01D3 ; mapped ; 01D4 # 1.1 LATIN CAPITAL LETTER U WITH CARON +01D4 ; valid # 1.1 LATIN SMALL LETTER U WITH CARON +01D5 ; mapped ; 01D6 # 1.1 LATIN CAPITAL LETTER U WITH DIAERESIS AND MACRON +01D6 ; valid # 1.1 LATIN SMALL LETTER U WITH DIAERESIS AND MACRON +01D7 ; mapped ; 01D8 # 1.1 LATIN CAPITAL LETTER U WITH DIAERESIS AND ACUTE +01D8 ; valid # 1.1 LATIN SMALL LETTER U WITH DIAERESIS AND ACUTE +01D9 ; mapped ; 01DA # 1.1 LATIN CAPITAL LETTER U WITH DIAERESIS AND CARON +01DA ; valid # 1.1 LATIN SMALL LETTER U WITH DIAERESIS AND CARON +01DB ; mapped ; 01DC # 1.1 LATIN CAPITAL LETTER U WITH DIAERESIS AND GRAVE +01DC..01DD ; valid # 1.1 LATIN SMALL LETTER U WITH DIAERESIS AND GRAVE..LATIN SMALL LETTER TURNED E +01DE ; mapped ; 01DF # 1.1 LATIN CAPITAL LETTER A WITH DIAERESIS AND MACRON +01DF ; valid # 1.1 LATIN SMALL LETTER A WITH DIAERESIS AND MACRON +01E0 ; mapped ; 01E1 # 1.1 LATIN CAPITAL LETTER A WITH DOT ABOVE AND MACRON +01E1 ; valid # 1.1 LATIN SMALL LETTER A WITH DOT ABOVE AND MACRON +01E2 ; mapped ; 01E3 # 1.1 LATIN CAPITAL LETTER AE WITH MACRON +01E3 ; valid # 1.1 LATIN SMALL LETTER AE WITH MACRON +01E4 ; mapped ; 01E5 # 1.1 LATIN CAPITAL LETTER G WITH STROKE +01E5 ; valid # 1.1 LATIN SMALL LETTER G WITH STROKE +01E6 ; mapped ; 01E7 # 1.1 LATIN CAPITAL LETTER G WITH CARON +01E7 ; valid # 1.1 LATIN SMALL LETTER G WITH CARON +01E8 ; mapped ; 01E9 # 1.1 LATIN CAPITAL LETTER K WITH CARON +01E9 ; valid # 1.1 LATIN SMALL LETTER K WITH CARON +01EA ; mapped ; 01EB # 1.1 LATIN CAPITAL LETTER O WITH OGONEK +01EB ; valid # 1.1 LATIN SMALL LETTER O WITH OGONEK +01EC ; mapped ; 01ED # 1.1 LATIN CAPITAL LETTER O WITH OGONEK AND MACRON +01ED ; valid # 1.1 LATIN SMALL LETTER O WITH OGONEK AND MACRON +01EE ; mapped ; 01EF # 1.1 LATIN CAPITAL LETTER EZH WITH CARON +01EF..01F0 ; valid # 1.1 LATIN SMALL LETTER EZH WITH CARON..LATIN SMALL LETTER J WITH CARON +01F1..01F3 ; mapped ; 0064 007A # 1.1 LATIN CAPITAL LETTER DZ..LATIN SMALL LETTER DZ +01F4 ; mapped ; 01F5 # 1.1 LATIN CAPITAL LETTER G WITH ACUTE +01F5 ; valid # 1.1 LATIN SMALL LETTER G WITH ACUTE +01F6 ; mapped ; 0195 # 3.0 LATIN CAPITAL LETTER HWAIR +01F7 ; mapped ; 01BF # 3.0 LATIN CAPITAL LETTER WYNN +01F8 ; mapped ; 01F9 # 3.0 LATIN CAPITAL LETTER N WITH GRAVE +01F9 ; valid # 3.0 LATIN SMALL LETTER N WITH GRAVE +01FA ; mapped ; 01FB # 1.1 LATIN CAPITAL LETTER A WITH RING ABOVE AND ACUTE +01FB ; valid # 1.1 LATIN SMALL LETTER A WITH RING ABOVE AND ACUTE +01FC ; mapped ; 01FD # 1.1 LATIN CAPITAL LETTER AE WITH ACUTE +01FD ; valid # 1.1 LATIN SMALL LETTER AE WITH ACUTE +01FE ; mapped ; 01FF # 1.1 LATIN CAPITAL LETTER O WITH STROKE AND ACUTE +01FF ; valid # 1.1 LATIN SMALL LETTER O WITH STROKE AND ACUTE +0200 ; mapped ; 0201 # 1.1 LATIN CAPITAL LETTER A WITH DOUBLE GRAVE +0201 ; valid # 1.1 LATIN SMALL LETTER A WITH DOUBLE GRAVE +0202 ; mapped ; 0203 # 1.1 LATIN CAPITAL LETTER A WITH INVERTED BREVE +0203 ; valid # 1.1 LATIN SMALL LETTER A WITH INVERTED BREVE +0204 ; mapped ; 0205 # 1.1 LATIN CAPITAL LETTER E WITH DOUBLE GRAVE +0205 ; valid # 1.1 LATIN SMALL LETTER E WITH DOUBLE GRAVE +0206 ; mapped ; 0207 # 1.1 LATIN CAPITAL LETTER E WITH INVERTED BREVE +0207 ; valid # 1.1 LATIN SMALL LETTER E WITH INVERTED BREVE +0208 ; mapped ; 0209 # 1.1 LATIN CAPITAL LETTER I WITH DOUBLE GRAVE +0209 ; valid # 1.1 LATIN SMALL LETTER I WITH DOUBLE GRAVE +020A ; mapped ; 020B # 1.1 LATIN CAPITAL LETTER I WITH INVERTED BREVE +020B ; valid # 1.1 LATIN SMALL LETTER I WITH INVERTED BREVE +020C ; mapped ; 020D # 1.1 LATIN CAPITAL LETTER O WITH DOUBLE GRAVE +020D ; valid # 1.1 LATIN SMALL LETTER O WITH DOUBLE GRAVE +020E ; mapped ; 020F # 1.1 LATIN CAPITAL LETTER O WITH INVERTED BREVE +020F ; valid # 1.1 LATIN SMALL LETTER O WITH INVERTED BREVE +0210 ; mapped ; 0211 # 1.1 LATIN CAPITAL LETTER R WITH DOUBLE GRAVE +0211 ; valid # 1.1 LATIN SMALL LETTER R WITH DOUBLE GRAVE +0212 ; mapped ; 0213 # 1.1 LATIN CAPITAL LETTER R WITH INVERTED BREVE +0213 ; valid # 1.1 LATIN SMALL LETTER R WITH INVERTED BREVE +0214 ; mapped ; 0215 # 1.1 LATIN CAPITAL LETTER U WITH DOUBLE GRAVE +0215 ; valid # 1.1 LATIN SMALL LETTER U WITH DOUBLE GRAVE +0216 ; mapped ; 0217 # 1.1 LATIN CAPITAL LETTER U WITH INVERTED BREVE +0217 ; valid # 1.1 LATIN SMALL LETTER U WITH INVERTED BREVE +0218 ; mapped ; 0219 # 3.0 LATIN CAPITAL LETTER S WITH COMMA BELOW +0219 ; valid # 3.0 LATIN SMALL LETTER S WITH COMMA BELOW +021A ; mapped ; 021B # 3.0 LATIN CAPITAL LETTER T WITH COMMA BELOW +021B ; valid # 3.0 LATIN SMALL LETTER T WITH COMMA BELOW +021C ; mapped ; 021D # 3.0 LATIN CAPITAL LETTER YOGH +021D ; valid # 3.0 LATIN SMALL LETTER YOGH +021E ; mapped ; 021F # 3.0 LATIN CAPITAL LETTER H WITH CARON +021F ; valid # 3.0 LATIN SMALL LETTER H WITH CARON +0220 ; mapped ; 019E # 3.2 LATIN CAPITAL LETTER N WITH LONG RIGHT LEG +0221 ; valid # 4.0 LATIN SMALL LETTER D WITH CURL +0222 ; mapped ; 0223 # 3.0 LATIN CAPITAL LETTER OU +0223 ; valid # 3.0 LATIN SMALL LETTER OU +0224 ; mapped ; 0225 # 3.0 LATIN CAPITAL LETTER Z WITH HOOK +0225 ; valid # 3.0 LATIN SMALL LETTER Z WITH HOOK +0226 ; mapped ; 0227 # 3.0 LATIN CAPITAL LETTER A WITH DOT ABOVE +0227 ; valid # 3.0 LATIN SMALL LETTER A WITH DOT ABOVE +0228 ; mapped ; 0229 # 3.0 LATIN CAPITAL LETTER E WITH CEDILLA +0229 ; valid # 3.0 LATIN SMALL LETTER E WITH CEDILLA +022A ; mapped ; 022B # 3.0 LATIN CAPITAL LETTER O WITH DIAERESIS AND MACRON +022B ; valid # 3.0 LATIN SMALL LETTER O WITH DIAERESIS AND MACRON +022C ; mapped ; 022D # 3.0 LATIN CAPITAL LETTER O WITH TILDE AND MACRON +022D ; valid # 3.0 LATIN SMALL LETTER O WITH TILDE AND MACRON +022E ; mapped ; 022F # 3.0 LATIN CAPITAL LETTER O WITH DOT ABOVE +022F ; valid # 3.0 LATIN SMALL LETTER O WITH DOT ABOVE +0230 ; mapped ; 0231 # 3.0 LATIN CAPITAL LETTER O WITH DOT ABOVE AND MACRON +0231 ; valid # 3.0 LATIN SMALL LETTER O WITH DOT ABOVE AND MACRON +0232 ; mapped ; 0233 # 3.0 LATIN CAPITAL LETTER Y WITH MACRON +0233 ; valid # 3.0 LATIN SMALL LETTER Y WITH MACRON +0234..0236 ; valid # 4.0 LATIN SMALL LETTER L WITH CURL..LATIN SMALL LETTER T WITH CURL +0237..0239 ; valid # 4.1 LATIN SMALL LETTER DOTLESS J..LATIN SMALL LETTER QP DIGRAPH +023A ; mapped ; 2C65 # 4.1 LATIN CAPITAL LETTER A WITH STROKE +023B ; mapped ; 023C # 4.1 LATIN CAPITAL LETTER C WITH STROKE +023C ; valid # 4.1 LATIN SMALL LETTER C WITH STROKE +023D ; mapped ; 019A # 4.1 LATIN CAPITAL LETTER L WITH BAR +023E ; mapped ; 2C66 # 4.1 LATIN CAPITAL LETTER T WITH DIAGONAL STROKE +023F..0240 ; valid # 4.1 LATIN SMALL LETTER S WITH SWASH TAIL..LATIN SMALL LETTER Z WITH SWASH TAIL +0241 ; mapped ; 0242 # 4.1 LATIN CAPITAL LETTER GLOTTAL STOP +0242 ; valid # 5.0 LATIN SMALL LETTER GLOTTAL STOP +0243 ; mapped ; 0180 # 5.0 LATIN CAPITAL LETTER B WITH STROKE +0244 ; mapped ; 0289 # 5.0 LATIN CAPITAL LETTER U BAR +0245 ; mapped ; 028C # 5.0 LATIN CAPITAL LETTER TURNED V +0246 ; mapped ; 0247 # 5.0 LATIN CAPITAL LETTER E WITH STROKE +0247 ; valid # 5.0 LATIN SMALL LETTER E WITH STROKE +0248 ; mapped ; 0249 # 5.0 LATIN CAPITAL LETTER J WITH STROKE +0249 ; valid # 5.0 LATIN SMALL LETTER J WITH STROKE +024A ; mapped ; 024B # 5.0 LATIN CAPITAL LETTER SMALL Q WITH HOOK TAIL +024B ; valid # 5.0 LATIN SMALL LETTER Q WITH HOOK TAIL +024C ; mapped ; 024D # 5.0 LATIN CAPITAL LETTER R WITH STROKE +024D ; valid # 5.0 LATIN SMALL LETTER R WITH STROKE +024E ; mapped ; 024F # 5.0 LATIN CAPITAL LETTER Y WITH STROKE +024F ; valid # 5.0 LATIN SMALL LETTER Y WITH STROKE +0250..02A8 ; valid # 1.1 LATIN SMALL LETTER TURNED A..LATIN SMALL LETTER TC DIGRAPH WITH CURL +02A9..02AD ; valid # 3.0 LATIN SMALL LETTER FENG DIGRAPH..LATIN LETTER BIDENTAL PERCUSSIVE +02AE..02AF ; valid # 4.0 LATIN SMALL LETTER TURNED H WITH FISHHOOK..LATIN SMALL LETTER TURNED H WITH FISHHOOK AND TAIL +02B0 ; mapped ; 0068 # 1.1 MODIFIER LETTER SMALL H +02B1 ; mapped ; 0266 # 1.1 MODIFIER LETTER SMALL H WITH HOOK +02B2 ; mapped ; 006A # 1.1 MODIFIER LETTER SMALL J +02B3 ; mapped ; 0072 # 1.1 MODIFIER LETTER SMALL R +02B4 ; mapped ; 0279 # 1.1 MODIFIER LETTER SMALL TURNED R +02B5 ; mapped ; 027B # 1.1 MODIFIER LETTER SMALL TURNED R WITH HOOK +02B6 ; mapped ; 0281 # 1.1 MODIFIER LETTER SMALL CAPITAL INVERTED R +02B7 ; mapped ; 0077 # 1.1 MODIFIER LETTER SMALL W +02B8 ; mapped ; 0079 # 1.1 MODIFIER LETTER SMALL Y +02B9..02C1 ; valid # 1.1 MODIFIER LETTER PRIME..MODIFIER LETTER REVERSED GLOTTAL STOP +02C2..02C5 ; valid ; ; NV8 # 1.1 MODIFIER LETTER LEFT ARROWHEAD..MODIFIER LETTER DOWN ARROWHEAD +02C6..02D1 ; valid # 1.1 MODIFIER LETTER CIRCUMFLEX ACCENT..MODIFIER LETTER HALF TRIANGULAR COLON +02D2..02D7 ; valid ; ; NV8 # 1.1 MODIFIER LETTER CENTRED RIGHT HALF RING..MODIFIER LETTER MINUS SIGN +02D8 ; disallowed_STD3_mapped ; 0020 0306 # 1.1 BREVE +02D9 ; disallowed_STD3_mapped ; 0020 0307 # 1.1 DOT ABOVE +02DA ; disallowed_STD3_mapped ; 0020 030A # 1.1 RING ABOVE +02DB ; disallowed_STD3_mapped ; 0020 0328 # 1.1 OGONEK +02DC ; disallowed_STD3_mapped ; 0020 0303 # 1.1 SMALL TILDE +02DD ; disallowed_STD3_mapped ; 0020 030B # 1.1 DOUBLE ACUTE ACCENT +02DE ; valid ; ; NV8 # 1.1 MODIFIER LETTER RHOTIC HOOK +02DF ; valid ; ; NV8 # 3.0 MODIFIER LETTER CROSS ACCENT +02E0 ; mapped ; 0263 # 1.1 MODIFIER LETTER SMALL GAMMA +02E1 ; mapped ; 006C # 1.1 MODIFIER LETTER SMALL L +02E2 ; mapped ; 0073 # 1.1 MODIFIER LETTER SMALL S +02E3 ; mapped ; 0078 # 1.1 MODIFIER LETTER SMALL X +02E4 ; mapped ; 0295 # 1.1 MODIFIER LETTER SMALL REVERSED GLOTTAL STOP +02E5..02E9 ; valid ; ; NV8 # 1.1 MODIFIER LETTER EXTRA-HIGH TONE BAR..MODIFIER LETTER EXTRA-LOW TONE BAR +02EA..02EB ; valid ; ; NV8 # 3.0 MODIFIER LETTER YIN DEPARTING TONE MARK..MODIFIER LETTER YANG DEPARTING TONE MARK +02EC ; valid # 3.0 MODIFIER LETTER VOICING +02ED ; valid ; ; NV8 # 3.0 MODIFIER LETTER UNASPIRATED +02EE ; valid # 3.0 MODIFIER LETTER DOUBLE APOSTROPHE +02EF..02FF ; valid ; ; NV8 # 4.0 MODIFIER LETTER LOW DOWN ARROWHEAD..MODIFIER LETTER LOW LEFT ARROW +0300..033F ; valid # 1.1 COMBINING GRAVE ACCENT..COMBINING DOUBLE OVERLINE +0340 ; mapped ; 0300 # 1.1 COMBINING GRAVE TONE MARK +0341 ; mapped ; 0301 # 1.1 COMBINING ACUTE TONE MARK +0342 ; valid # 1.1 COMBINING GREEK PERISPOMENI +0343 ; mapped ; 0313 # 1.1 COMBINING GREEK KORONIS +0344 ; mapped ; 0308 0301 # 1.1 COMBINING GREEK DIALYTIKA TONOS +0345 ; mapped ; 03B9 # 1.1 COMBINING GREEK YPOGEGRAMMENI +0346..034E ; valid # 3.0 COMBINING BRIDGE ABOVE..COMBINING UPWARDS ARROW BELOW +034F ; ignored # 3.2 COMBINING GRAPHEME JOINER +0350..0357 ; valid # 4.0 COMBINING RIGHT ARROWHEAD ABOVE..COMBINING RIGHT HALF RING ABOVE +0358..035C ; valid # 4.1 COMBINING DOT ABOVE RIGHT..COMBINING DOUBLE BREVE BELOW +035D..035F ; valid # 4.0 COMBINING DOUBLE BREVE..COMBINING DOUBLE MACRON BELOW +0360..0361 ; valid # 1.1 COMBINING DOUBLE TILDE..COMBINING DOUBLE INVERTED BREVE +0362 ; valid # 3.0 COMBINING DOUBLE RIGHTWARDS ARROW BELOW +0363..036F ; valid # 3.2 COMBINING LATIN SMALL LETTER A..COMBINING LATIN SMALL LETTER X +0370 ; mapped ; 0371 # 5.1 GREEK CAPITAL LETTER HETA +0371 ; valid # 5.1 GREEK SMALL LETTER HETA +0372 ; mapped ; 0373 # 5.1 GREEK CAPITAL LETTER ARCHAIC SAMPI +0373 ; valid # 5.1 GREEK SMALL LETTER ARCHAIC SAMPI +0374 ; mapped ; 02B9 # 1.1 GREEK NUMERAL SIGN +0375 ; valid # 1.1 GREEK LOWER NUMERAL SIGN +0376 ; mapped ; 0377 # 5.1 GREEK CAPITAL LETTER PAMPHYLIAN DIGAMMA +0377 ; valid # 5.1 GREEK SMALL LETTER PAMPHYLIAN DIGAMMA +0378..0379 ; disallowed # NA .. +037A ; disallowed_STD3_mapped ; 0020 03B9 # 1.1 GREEK YPOGEGRAMMENI +037B..037D ; valid # 5.0 GREEK SMALL REVERSED LUNATE SIGMA SYMBOL..GREEK SMALL REVERSED DOTTED LUNATE SIGMA SYMBOL +037E ; disallowed_STD3_mapped ; 003B # 1.1 GREEK QUESTION MARK +037F ; mapped ; 03F3 # 7.0 GREEK CAPITAL LETTER YOT +0380..0383 ; disallowed # NA .. +0384 ; disallowed_STD3_mapped ; 0020 0301 # 1.1 GREEK TONOS +0385 ; disallowed_STD3_mapped ; 0020 0308 0301 #1.1 GREEK DIALYTIKA TONOS +0386 ; mapped ; 03AC # 1.1 GREEK CAPITAL LETTER ALPHA WITH TONOS +0387 ; mapped ; 00B7 # 1.1 GREEK ANO TELEIA +0388 ; mapped ; 03AD # 1.1 GREEK CAPITAL LETTER EPSILON WITH TONOS +0389 ; mapped ; 03AE # 1.1 GREEK CAPITAL LETTER ETA WITH TONOS +038A ; mapped ; 03AF # 1.1 GREEK CAPITAL LETTER IOTA WITH TONOS +038B ; disallowed # NA +038C ; mapped ; 03CC # 1.1 GREEK CAPITAL LETTER OMICRON WITH TONOS +038D ; disallowed # NA +038E ; mapped ; 03CD # 1.1 GREEK CAPITAL LETTER UPSILON WITH TONOS +038F ; mapped ; 03CE # 1.1 GREEK CAPITAL LETTER OMEGA WITH TONOS +0390 ; valid # 1.1 GREEK SMALL LETTER IOTA WITH DIALYTIKA AND TONOS +0391 ; mapped ; 03B1 # 1.1 GREEK CAPITAL LETTER ALPHA +0392 ; mapped ; 03B2 # 1.1 GREEK CAPITAL LETTER BETA +0393 ; mapped ; 03B3 # 1.1 GREEK CAPITAL LETTER GAMMA +0394 ; mapped ; 03B4 # 1.1 GREEK CAPITAL LETTER DELTA +0395 ; mapped ; 03B5 # 1.1 GREEK CAPITAL LETTER EPSILON +0396 ; mapped ; 03B6 # 1.1 GREEK CAPITAL LETTER ZETA +0397 ; mapped ; 03B7 # 1.1 GREEK CAPITAL LETTER ETA +0398 ; mapped ; 03B8 # 1.1 GREEK CAPITAL LETTER THETA +0399 ; mapped ; 03B9 # 1.1 GREEK CAPITAL LETTER IOTA +039A ; mapped ; 03BA # 1.1 GREEK CAPITAL LETTER KAPPA +039B ; mapped ; 03BB # 1.1 GREEK CAPITAL LETTER LAMDA +039C ; mapped ; 03BC # 1.1 GREEK CAPITAL LETTER MU +039D ; mapped ; 03BD # 1.1 GREEK CAPITAL LETTER NU +039E ; mapped ; 03BE # 1.1 GREEK CAPITAL LETTER XI +039F ; mapped ; 03BF # 1.1 GREEK CAPITAL LETTER OMICRON +03A0 ; mapped ; 03C0 # 1.1 GREEK CAPITAL LETTER PI +03A1 ; mapped ; 03C1 # 1.1 GREEK CAPITAL LETTER RHO +03A2 ; disallowed # NA +03A3 ; mapped ; 03C3 # 1.1 GREEK CAPITAL LETTER SIGMA +03A4 ; mapped ; 03C4 # 1.1 GREEK CAPITAL LETTER TAU +03A5 ; mapped ; 03C5 # 1.1 GREEK CAPITAL LETTER UPSILON +03A6 ; mapped ; 03C6 # 1.1 GREEK CAPITAL LETTER PHI +03A7 ; mapped ; 03C7 # 1.1 GREEK CAPITAL LETTER CHI +03A8 ; mapped ; 03C8 # 1.1 GREEK CAPITAL LETTER PSI +03A9 ; mapped ; 03C9 # 1.1 GREEK CAPITAL LETTER OMEGA +03AA ; mapped ; 03CA # 1.1 GREEK CAPITAL LETTER IOTA WITH DIALYTIKA +03AB ; mapped ; 03CB # 1.1 GREEK CAPITAL LETTER UPSILON WITH DIALYTIKA +03AC..03C1 ; valid # 1.1 GREEK SMALL LETTER ALPHA WITH TONOS..GREEK SMALL LETTER RHO +03C2 ; deviation ; 03C3 # 1.1 GREEK SMALL LETTER FINAL SIGMA +03C3..03CE ; valid # 1.1 GREEK SMALL LETTER SIGMA..GREEK SMALL LETTER OMEGA WITH TONOS +03CF ; mapped ; 03D7 # 5.1 GREEK CAPITAL KAI SYMBOL +03D0 ; mapped ; 03B2 # 1.1 GREEK BETA SYMBOL +03D1 ; mapped ; 03B8 # 1.1 GREEK THETA SYMBOL +03D2 ; mapped ; 03C5 # 1.1 GREEK UPSILON WITH HOOK SYMBOL +03D3 ; mapped ; 03CD # 1.1 GREEK UPSILON WITH ACUTE AND HOOK SYMBOL +03D4 ; mapped ; 03CB # 1.1 GREEK UPSILON WITH DIAERESIS AND HOOK SYMBOL +03D5 ; mapped ; 03C6 # 1.1 GREEK PHI SYMBOL +03D6 ; mapped ; 03C0 # 1.1 GREEK PI SYMBOL +03D7 ; valid # 3.0 GREEK KAI SYMBOL +03D8 ; mapped ; 03D9 # 3.2 GREEK LETTER ARCHAIC KOPPA +03D9 ; valid # 3.2 GREEK SMALL LETTER ARCHAIC KOPPA +03DA ; mapped ; 03DB # 1.1 GREEK LETTER STIGMA +03DB ; valid # 3.0 GREEK SMALL LETTER STIGMA +03DC ; mapped ; 03DD # 1.1 GREEK LETTER DIGAMMA +03DD ; valid # 3.0 GREEK SMALL LETTER DIGAMMA +03DE ; mapped ; 03DF # 1.1 GREEK LETTER KOPPA +03DF ; valid # 3.0 GREEK SMALL LETTER KOPPA +03E0 ; mapped ; 03E1 # 1.1 GREEK LETTER SAMPI +03E1 ; valid # 3.0 GREEK SMALL LETTER SAMPI +03E2 ; mapped ; 03E3 # 1.1 COPTIC CAPITAL LETTER SHEI +03E3 ; valid # 1.1 COPTIC SMALL LETTER SHEI +03E4 ; mapped ; 03E5 # 1.1 COPTIC CAPITAL LETTER FEI +03E5 ; valid # 1.1 COPTIC SMALL LETTER FEI +03E6 ; mapped ; 03E7 # 1.1 COPTIC CAPITAL LETTER KHEI +03E7 ; valid # 1.1 COPTIC SMALL LETTER KHEI +03E8 ; mapped ; 03E9 # 1.1 COPTIC CAPITAL LETTER HORI +03E9 ; valid # 1.1 COPTIC SMALL LETTER HORI +03EA ; mapped ; 03EB # 1.1 COPTIC CAPITAL LETTER GANGIA +03EB ; valid # 1.1 COPTIC SMALL LETTER GANGIA +03EC ; mapped ; 03ED # 1.1 COPTIC CAPITAL LETTER SHIMA +03ED ; valid # 1.1 COPTIC SMALL LETTER SHIMA +03EE ; mapped ; 03EF # 1.1 COPTIC CAPITAL LETTER DEI +03EF ; valid # 1.1 COPTIC SMALL LETTER DEI +03F0 ; mapped ; 03BA # 1.1 GREEK KAPPA SYMBOL +03F1 ; mapped ; 03C1 # 1.1 GREEK RHO SYMBOL +03F2 ; mapped ; 03C3 # 1.1 GREEK LUNATE SIGMA SYMBOL +03F3 ; valid # 1.1 GREEK LETTER YOT +03F4 ; mapped ; 03B8 # 3.1 GREEK CAPITAL THETA SYMBOL +03F5 ; mapped ; 03B5 # 3.1 GREEK LUNATE EPSILON SYMBOL +03F6 ; valid ; ; NV8 # 3.2 GREEK REVERSED LUNATE EPSILON SYMBOL +03F7 ; mapped ; 03F8 # 4.0 GREEK CAPITAL LETTER SHO +03F8 ; valid # 4.0 GREEK SMALL LETTER SHO +03F9 ; mapped ; 03C3 # 4.0 GREEK CAPITAL LUNATE SIGMA SYMBOL +03FA ; mapped ; 03FB # 4.0 GREEK CAPITAL LETTER SAN +03FB ; valid # 4.0 GREEK SMALL LETTER SAN +03FC ; valid # 4.1 GREEK RHO WITH STROKE SYMBOL +03FD ; mapped ; 037B # 4.1 GREEK CAPITAL REVERSED LUNATE SIGMA SYMBOL +03FE ; mapped ; 037C # 4.1 GREEK CAPITAL DOTTED LUNATE SIGMA SYMBOL +03FF ; mapped ; 037D # 4.1 GREEK CAPITAL REVERSED DOTTED LUNATE SIGMA SYMBOL +0400 ; mapped ; 0450 # 3.0 CYRILLIC CAPITAL LETTER IE WITH GRAVE +0401 ; mapped ; 0451 # 1.1 CYRILLIC CAPITAL LETTER IO +0402 ; mapped ; 0452 # 1.1 CYRILLIC CAPITAL LETTER DJE +0403 ; mapped ; 0453 # 1.1 CYRILLIC CAPITAL LETTER GJE +0404 ; mapped ; 0454 # 1.1 CYRILLIC CAPITAL LETTER UKRAINIAN IE +0405 ; mapped ; 0455 # 1.1 CYRILLIC CAPITAL LETTER DZE +0406 ; mapped ; 0456 # 1.1 CYRILLIC CAPITAL LETTER BYELORUSSIAN-UKRAINIAN I +0407 ; mapped ; 0457 # 1.1 CYRILLIC CAPITAL LETTER YI +0408 ; mapped ; 0458 # 1.1 CYRILLIC CAPITAL LETTER JE +0409 ; mapped ; 0459 # 1.1 CYRILLIC CAPITAL LETTER LJE +040A ; mapped ; 045A # 1.1 CYRILLIC CAPITAL LETTER NJE +040B ; mapped ; 045B # 1.1 CYRILLIC CAPITAL LETTER TSHE +040C ; mapped ; 045C # 1.1 CYRILLIC CAPITAL LETTER KJE +040D ; mapped ; 045D # 3.0 CYRILLIC CAPITAL LETTER I WITH GRAVE +040E ; mapped ; 045E # 1.1 CYRILLIC CAPITAL LETTER SHORT U +040F ; mapped ; 045F # 1.1 CYRILLIC CAPITAL LETTER DZHE +0410 ; mapped ; 0430 # 1.1 CYRILLIC CAPITAL LETTER A +0411 ; mapped ; 0431 # 1.1 CYRILLIC CAPITAL LETTER BE +0412 ; mapped ; 0432 # 1.1 CYRILLIC CAPITAL LETTER VE +0413 ; mapped ; 0433 # 1.1 CYRILLIC CAPITAL LETTER GHE +0414 ; mapped ; 0434 # 1.1 CYRILLIC CAPITAL LETTER DE +0415 ; mapped ; 0435 # 1.1 CYRILLIC CAPITAL LETTER IE +0416 ; mapped ; 0436 # 1.1 CYRILLIC CAPITAL LETTER ZHE +0417 ; mapped ; 0437 # 1.1 CYRILLIC CAPITAL LETTER ZE +0418 ; mapped ; 0438 # 1.1 CYRILLIC CAPITAL LETTER I +0419 ; mapped ; 0439 # 1.1 CYRILLIC CAPITAL LETTER SHORT I +041A ; mapped ; 043A # 1.1 CYRILLIC CAPITAL LETTER KA +041B ; mapped ; 043B # 1.1 CYRILLIC CAPITAL LETTER EL +041C ; mapped ; 043C # 1.1 CYRILLIC CAPITAL LETTER EM +041D ; mapped ; 043D # 1.1 CYRILLIC CAPITAL LETTER EN +041E ; mapped ; 043E # 1.1 CYRILLIC CAPITAL LETTER O +041F ; mapped ; 043F # 1.1 CYRILLIC CAPITAL LETTER PE +0420 ; mapped ; 0440 # 1.1 CYRILLIC CAPITAL LETTER ER +0421 ; mapped ; 0441 # 1.1 CYRILLIC CAPITAL LETTER ES +0422 ; mapped ; 0442 # 1.1 CYRILLIC CAPITAL LETTER TE +0423 ; mapped ; 0443 # 1.1 CYRILLIC CAPITAL LETTER U +0424 ; mapped ; 0444 # 1.1 CYRILLIC CAPITAL LETTER EF +0425 ; mapped ; 0445 # 1.1 CYRILLIC CAPITAL LETTER HA +0426 ; mapped ; 0446 # 1.1 CYRILLIC CAPITAL LETTER TSE +0427 ; mapped ; 0447 # 1.1 CYRILLIC CAPITAL LETTER CHE +0428 ; mapped ; 0448 # 1.1 CYRILLIC CAPITAL LETTER SHA +0429 ; mapped ; 0449 # 1.1 CYRILLIC CAPITAL LETTER SHCHA +042A ; mapped ; 044A # 1.1 CYRILLIC CAPITAL LETTER HARD SIGN +042B ; mapped ; 044B # 1.1 CYRILLIC CAPITAL LETTER YERU +042C ; mapped ; 044C # 1.1 CYRILLIC CAPITAL LETTER SOFT SIGN +042D ; mapped ; 044D # 1.1 CYRILLIC CAPITAL LETTER E +042E ; mapped ; 044E # 1.1 CYRILLIC CAPITAL LETTER YU +042F ; mapped ; 044F # 1.1 CYRILLIC CAPITAL LETTER YA +0430..044F ; valid # 1.1 CYRILLIC SMALL LETTER A..CYRILLIC SMALL LETTER YA +0450 ; valid # 3.0 CYRILLIC SMALL LETTER IE WITH GRAVE +0451..045C ; valid # 1.1 CYRILLIC SMALL LETTER IO..CYRILLIC SMALL LETTER KJE +045D ; valid # 3.0 CYRILLIC SMALL LETTER I WITH GRAVE +045E..045F ; valid # 1.1 CYRILLIC SMALL LETTER SHORT U..CYRILLIC SMALL LETTER DZHE +0460 ; mapped ; 0461 # 1.1 CYRILLIC CAPITAL LETTER OMEGA +0461 ; valid # 1.1 CYRILLIC SMALL LETTER OMEGA +0462 ; mapped ; 0463 # 1.1 CYRILLIC CAPITAL LETTER YAT +0463 ; valid # 1.1 CYRILLIC SMALL LETTER YAT +0464 ; mapped ; 0465 # 1.1 CYRILLIC CAPITAL LETTER IOTIFIED E +0465 ; valid # 1.1 CYRILLIC SMALL LETTER IOTIFIED E +0466 ; mapped ; 0467 # 1.1 CYRILLIC CAPITAL LETTER LITTLE YUS +0467 ; valid # 1.1 CYRILLIC SMALL LETTER LITTLE YUS +0468 ; mapped ; 0469 # 1.1 CYRILLIC CAPITAL LETTER IOTIFIED LITTLE YUS +0469 ; valid # 1.1 CYRILLIC SMALL LETTER IOTIFIED LITTLE YUS +046A ; mapped ; 046B # 1.1 CYRILLIC CAPITAL LETTER BIG YUS +046B ; valid # 1.1 CYRILLIC SMALL LETTER BIG YUS +046C ; mapped ; 046D # 1.1 CYRILLIC CAPITAL LETTER IOTIFIED BIG YUS +046D ; valid # 1.1 CYRILLIC SMALL LETTER IOTIFIED BIG YUS +046E ; mapped ; 046F # 1.1 CYRILLIC CAPITAL LETTER KSI +046F ; valid # 1.1 CYRILLIC SMALL LETTER KSI +0470 ; mapped ; 0471 # 1.1 CYRILLIC CAPITAL LETTER PSI +0471 ; valid # 1.1 CYRILLIC SMALL LETTER PSI +0472 ; mapped ; 0473 # 1.1 CYRILLIC CAPITAL LETTER FITA +0473 ; valid # 1.1 CYRILLIC SMALL LETTER FITA +0474 ; mapped ; 0475 # 1.1 CYRILLIC CAPITAL LETTER IZHITSA +0475 ; valid # 1.1 CYRILLIC SMALL LETTER IZHITSA +0476 ; mapped ; 0477 # 1.1 CYRILLIC CAPITAL LETTER IZHITSA WITH DOUBLE GRAVE ACCENT +0477 ; valid # 1.1 CYRILLIC SMALL LETTER IZHITSA WITH DOUBLE GRAVE ACCENT +0478 ; mapped ; 0479 # 1.1 CYRILLIC CAPITAL LETTER UK +0479 ; valid # 1.1 CYRILLIC SMALL LETTER UK +047A ; mapped ; 047B # 1.1 CYRILLIC CAPITAL LETTER ROUND OMEGA +047B ; valid # 1.1 CYRILLIC SMALL LETTER ROUND OMEGA +047C ; mapped ; 047D # 1.1 CYRILLIC CAPITAL LETTER OMEGA WITH TITLO +047D ; valid # 1.1 CYRILLIC SMALL LETTER OMEGA WITH TITLO +047E ; mapped ; 047F # 1.1 CYRILLIC CAPITAL LETTER OT +047F ; valid # 1.1 CYRILLIC SMALL LETTER OT +0480 ; mapped ; 0481 # 1.1 CYRILLIC CAPITAL LETTER KOPPA +0481 ; valid # 1.1 CYRILLIC SMALL LETTER KOPPA +0482 ; valid ; ; NV8 # 1.1 CYRILLIC THOUSANDS SIGN +0483..0486 ; valid # 1.1 COMBINING CYRILLIC TITLO..COMBINING CYRILLIC PSILI PNEUMATA +0487 ; valid # 5.1 COMBINING CYRILLIC POKRYTIE +0488..0489 ; valid ; ; NV8 # 3.0 COMBINING CYRILLIC HUNDRED THOUSANDS SIGN..COMBINING CYRILLIC MILLIONS SIGN +048A ; mapped ; 048B # 3.2 CYRILLIC CAPITAL LETTER SHORT I WITH TAIL +048B ; valid # 3.2 CYRILLIC SMALL LETTER SHORT I WITH TAIL +048C ; mapped ; 048D # 3.0 CYRILLIC CAPITAL LETTER SEMISOFT SIGN +048D ; valid # 3.0 CYRILLIC SMALL LETTER SEMISOFT SIGN +048E ; mapped ; 048F # 3.0 CYRILLIC CAPITAL LETTER ER WITH TICK +048F ; valid # 3.0 CYRILLIC SMALL LETTER ER WITH TICK +0490 ; mapped ; 0491 # 1.1 CYRILLIC CAPITAL LETTER GHE WITH UPTURN +0491 ; valid # 1.1 CYRILLIC SMALL LETTER GHE WITH UPTURN +0492 ; mapped ; 0493 # 1.1 CYRILLIC CAPITAL LETTER GHE WITH STROKE +0493 ; valid # 1.1 CYRILLIC SMALL LETTER GHE WITH STROKE +0494 ; mapped ; 0495 # 1.1 CYRILLIC CAPITAL LETTER GHE WITH MIDDLE HOOK +0495 ; valid # 1.1 CYRILLIC SMALL LETTER GHE WITH MIDDLE HOOK +0496 ; mapped ; 0497 # 1.1 CYRILLIC CAPITAL LETTER ZHE WITH DESCENDER +0497 ; valid # 1.1 CYRILLIC SMALL LETTER ZHE WITH DESCENDER +0498 ; mapped ; 0499 # 1.1 CYRILLIC CAPITAL LETTER ZE WITH DESCENDER +0499 ; valid # 1.1 CYRILLIC SMALL LETTER ZE WITH DESCENDER +049A ; mapped ; 049B # 1.1 CYRILLIC CAPITAL LETTER KA WITH DESCENDER +049B ; valid # 1.1 CYRILLIC SMALL LETTER KA WITH DESCENDER +049C ; mapped ; 049D # 1.1 CYRILLIC CAPITAL LETTER KA WITH VERTICAL STROKE +049D ; valid # 1.1 CYRILLIC SMALL LETTER KA WITH VERTICAL STROKE +049E ; mapped ; 049F # 1.1 CYRILLIC CAPITAL LETTER KA WITH STROKE +049F ; valid # 1.1 CYRILLIC SMALL LETTER KA WITH STROKE +04A0 ; mapped ; 04A1 # 1.1 CYRILLIC CAPITAL LETTER BASHKIR KA +04A1 ; valid # 1.1 CYRILLIC SMALL LETTER BASHKIR KA +04A2 ; mapped ; 04A3 # 1.1 CYRILLIC CAPITAL LETTER EN WITH DESCENDER +04A3 ; valid # 1.1 CYRILLIC SMALL LETTER EN WITH DESCENDER +04A4 ; mapped ; 04A5 # 1.1 CYRILLIC CAPITAL LIGATURE EN GHE +04A5 ; valid # 1.1 CYRILLIC SMALL LIGATURE EN GHE +04A6 ; mapped ; 04A7 # 1.1 CYRILLIC CAPITAL LETTER PE WITH MIDDLE HOOK +04A7 ; valid # 1.1 CYRILLIC SMALL LETTER PE WITH MIDDLE HOOK +04A8 ; mapped ; 04A9 # 1.1 CYRILLIC CAPITAL LETTER ABKHASIAN HA +04A9 ; valid # 1.1 CYRILLIC SMALL LETTER ABKHASIAN HA +04AA ; mapped ; 04AB # 1.1 CYRILLIC CAPITAL LETTER ES WITH DESCENDER +04AB ; valid # 1.1 CYRILLIC SMALL LETTER ES WITH DESCENDER +04AC ; mapped ; 04AD # 1.1 CYRILLIC CAPITAL LETTER TE WITH DESCENDER +04AD ; valid # 1.1 CYRILLIC SMALL LETTER TE WITH DESCENDER +04AE ; mapped ; 04AF # 1.1 CYRILLIC CAPITAL LETTER STRAIGHT U +04AF ; valid # 1.1 CYRILLIC SMALL LETTER STRAIGHT U +04B0 ; mapped ; 04B1 # 1.1 CYRILLIC CAPITAL LETTER STRAIGHT U WITH STROKE +04B1 ; valid # 1.1 CYRILLIC SMALL LETTER STRAIGHT U WITH STROKE +04B2 ; mapped ; 04B3 # 1.1 CYRILLIC CAPITAL LETTER HA WITH DESCENDER +04B3 ; valid # 1.1 CYRILLIC SMALL LETTER HA WITH DESCENDER +04B4 ; mapped ; 04B5 # 1.1 CYRILLIC CAPITAL LIGATURE TE TSE +04B5 ; valid # 1.1 CYRILLIC SMALL LIGATURE TE TSE +04B6 ; mapped ; 04B7 # 1.1 CYRILLIC CAPITAL LETTER CHE WITH DESCENDER +04B7 ; valid # 1.1 CYRILLIC SMALL LETTER CHE WITH DESCENDER +04B8 ; mapped ; 04B9 # 1.1 CYRILLIC CAPITAL LETTER CHE WITH VERTICAL STROKE +04B9 ; valid # 1.1 CYRILLIC SMALL LETTER CHE WITH VERTICAL STROKE +04BA ; mapped ; 04BB # 1.1 CYRILLIC CAPITAL LETTER SHHA +04BB ; valid # 1.1 CYRILLIC SMALL LETTER SHHA +04BC ; mapped ; 04BD # 1.1 CYRILLIC CAPITAL LETTER ABKHASIAN CHE +04BD ; valid # 1.1 CYRILLIC SMALL LETTER ABKHASIAN CHE +04BE ; mapped ; 04BF # 1.1 CYRILLIC CAPITAL LETTER ABKHASIAN CHE WITH DESCENDER +04BF ; valid # 1.1 CYRILLIC SMALL LETTER ABKHASIAN CHE WITH DESCENDER +04C0 ; disallowed # 1.1 CYRILLIC LETTER PALOCHKA +04C1 ; mapped ; 04C2 # 1.1 CYRILLIC CAPITAL LETTER ZHE WITH BREVE +04C2 ; valid # 1.1 CYRILLIC SMALL LETTER ZHE WITH BREVE +04C3 ; mapped ; 04C4 # 1.1 CYRILLIC CAPITAL LETTER KA WITH HOOK +04C4 ; valid # 1.1 CYRILLIC SMALL LETTER KA WITH HOOK +04C5 ; mapped ; 04C6 # 3.2 CYRILLIC CAPITAL LETTER EL WITH TAIL +04C6 ; valid # 3.2 CYRILLIC SMALL LETTER EL WITH TAIL +04C7 ; mapped ; 04C8 # 1.1 CYRILLIC CAPITAL LETTER EN WITH HOOK +04C8 ; valid # 1.1 CYRILLIC SMALL LETTER EN WITH HOOK +04C9 ; mapped ; 04CA # 3.2 CYRILLIC CAPITAL LETTER EN WITH TAIL +04CA ; valid # 3.2 CYRILLIC SMALL LETTER EN WITH TAIL +04CB ; mapped ; 04CC # 1.1 CYRILLIC CAPITAL LETTER KHAKASSIAN CHE +04CC ; valid # 1.1 CYRILLIC SMALL LETTER KHAKASSIAN CHE +04CD ; mapped ; 04CE # 3.2 CYRILLIC CAPITAL LETTER EM WITH TAIL +04CE ; valid # 3.2 CYRILLIC SMALL LETTER EM WITH TAIL +04CF ; valid # 5.0 CYRILLIC SMALL LETTER PALOCHKA +04D0 ; mapped ; 04D1 # 1.1 CYRILLIC CAPITAL LETTER A WITH BREVE +04D1 ; valid # 1.1 CYRILLIC SMALL LETTER A WITH BREVE +04D2 ; mapped ; 04D3 # 1.1 CYRILLIC CAPITAL LETTER A WITH DIAERESIS +04D3 ; valid # 1.1 CYRILLIC SMALL LETTER A WITH DIAERESIS +04D4 ; mapped ; 04D5 # 1.1 CYRILLIC CAPITAL LIGATURE A IE +04D5 ; valid # 1.1 CYRILLIC SMALL LIGATURE A IE +04D6 ; mapped ; 04D7 # 1.1 CYRILLIC CAPITAL LETTER IE WITH BREVE +04D7 ; valid # 1.1 CYRILLIC SMALL LETTER IE WITH BREVE +04D8 ; mapped ; 04D9 # 1.1 CYRILLIC CAPITAL LETTER SCHWA +04D9 ; valid # 1.1 CYRILLIC SMALL LETTER SCHWA +04DA ; mapped ; 04DB # 1.1 CYRILLIC CAPITAL LETTER SCHWA WITH DIAERESIS +04DB ; valid # 1.1 CYRILLIC SMALL LETTER SCHWA WITH DIAERESIS +04DC ; mapped ; 04DD # 1.1 CYRILLIC CAPITAL LETTER ZHE WITH DIAERESIS +04DD ; valid # 1.1 CYRILLIC SMALL LETTER ZHE WITH DIAERESIS +04DE ; mapped ; 04DF # 1.1 CYRILLIC CAPITAL LETTER ZE WITH DIAERESIS +04DF ; valid # 1.1 CYRILLIC SMALL LETTER ZE WITH DIAERESIS +04E0 ; mapped ; 04E1 # 1.1 CYRILLIC CAPITAL LETTER ABKHASIAN DZE +04E1 ; valid # 1.1 CYRILLIC SMALL LETTER ABKHASIAN DZE +04E2 ; mapped ; 04E3 # 1.1 CYRILLIC CAPITAL LETTER I WITH MACRON +04E3 ; valid # 1.1 CYRILLIC SMALL LETTER I WITH MACRON +04E4 ; mapped ; 04E5 # 1.1 CYRILLIC CAPITAL LETTER I WITH DIAERESIS +04E5 ; valid # 1.1 CYRILLIC SMALL LETTER I WITH DIAERESIS +04E6 ; mapped ; 04E7 # 1.1 CYRILLIC CAPITAL LETTER O WITH DIAERESIS +04E7 ; valid # 1.1 CYRILLIC SMALL LETTER O WITH DIAERESIS +04E8 ; mapped ; 04E9 # 1.1 CYRILLIC CAPITAL LETTER BARRED O +04E9 ; valid # 1.1 CYRILLIC SMALL LETTER BARRED O +04EA ; mapped ; 04EB # 1.1 CYRILLIC CAPITAL LETTER BARRED O WITH DIAERESIS +04EB ; valid # 1.1 CYRILLIC SMALL LETTER BARRED O WITH DIAERESIS +04EC ; mapped ; 04ED # 3.0 CYRILLIC CAPITAL LETTER E WITH DIAERESIS +04ED ; valid # 3.0 CYRILLIC SMALL LETTER E WITH DIAERESIS +04EE ; mapped ; 04EF # 1.1 CYRILLIC CAPITAL LETTER U WITH MACRON +04EF ; valid # 1.1 CYRILLIC SMALL LETTER U WITH MACRON +04F0 ; mapped ; 04F1 # 1.1 CYRILLIC CAPITAL LETTER U WITH DIAERESIS +04F1 ; valid # 1.1 CYRILLIC SMALL LETTER U WITH DIAERESIS +04F2 ; mapped ; 04F3 # 1.1 CYRILLIC CAPITAL LETTER U WITH DOUBLE ACUTE +04F3 ; valid # 1.1 CYRILLIC SMALL LETTER U WITH DOUBLE ACUTE +04F4 ; mapped ; 04F5 # 1.1 CYRILLIC CAPITAL LETTER CHE WITH DIAERESIS +04F5 ; valid # 1.1 CYRILLIC SMALL LETTER CHE WITH DIAERESIS +04F6 ; mapped ; 04F7 # 4.1 CYRILLIC CAPITAL LETTER GHE WITH DESCENDER +04F7 ; valid # 4.1 CYRILLIC SMALL LETTER GHE WITH DESCENDER +04F8 ; mapped ; 04F9 # 1.1 CYRILLIC CAPITAL LETTER YERU WITH DIAERESIS +04F9 ; valid # 1.1 CYRILLIC SMALL LETTER YERU WITH DIAERESIS +04FA ; mapped ; 04FB # 5.0 CYRILLIC CAPITAL LETTER GHE WITH STROKE AND HOOK +04FB ; valid # 5.0 CYRILLIC SMALL LETTER GHE WITH STROKE AND HOOK +04FC ; mapped ; 04FD # 5.0 CYRILLIC CAPITAL LETTER HA WITH HOOK +04FD ; valid # 5.0 CYRILLIC SMALL LETTER HA WITH HOOK +04FE ; mapped ; 04FF # 5.0 CYRILLIC CAPITAL LETTER HA WITH STROKE +04FF ; valid # 5.0 CYRILLIC SMALL LETTER HA WITH STROKE +0500 ; mapped ; 0501 # 3.2 CYRILLIC CAPITAL LETTER KOMI DE +0501 ; valid # 3.2 CYRILLIC SMALL LETTER KOMI DE +0502 ; mapped ; 0503 # 3.2 CYRILLIC CAPITAL LETTER KOMI DJE +0503 ; valid # 3.2 CYRILLIC SMALL LETTER KOMI DJE +0504 ; mapped ; 0505 # 3.2 CYRILLIC CAPITAL LETTER KOMI ZJE +0505 ; valid # 3.2 CYRILLIC SMALL LETTER KOMI ZJE +0506 ; mapped ; 0507 # 3.2 CYRILLIC CAPITAL LETTER KOMI DZJE +0507 ; valid # 3.2 CYRILLIC SMALL LETTER KOMI DZJE +0508 ; mapped ; 0509 # 3.2 CYRILLIC CAPITAL LETTER KOMI LJE +0509 ; valid # 3.2 CYRILLIC SMALL LETTER KOMI LJE +050A ; mapped ; 050B # 3.2 CYRILLIC CAPITAL LETTER KOMI NJE +050B ; valid # 3.2 CYRILLIC SMALL LETTER KOMI NJE +050C ; mapped ; 050D # 3.2 CYRILLIC CAPITAL LETTER KOMI SJE +050D ; valid # 3.2 CYRILLIC SMALL LETTER KOMI SJE +050E ; mapped ; 050F # 3.2 CYRILLIC CAPITAL LETTER KOMI TJE +050F ; valid # 3.2 CYRILLIC SMALL LETTER KOMI TJE +0510 ; mapped ; 0511 # 5.0 CYRILLIC CAPITAL LETTER REVERSED ZE +0511 ; valid # 5.0 CYRILLIC SMALL LETTER REVERSED ZE +0512 ; mapped ; 0513 # 5.0 CYRILLIC CAPITAL LETTER EL WITH HOOK +0513 ; valid # 5.0 CYRILLIC SMALL LETTER EL WITH HOOK +0514 ; mapped ; 0515 # 5.1 CYRILLIC CAPITAL LETTER LHA +0515 ; valid # 5.1 CYRILLIC SMALL LETTER LHA +0516 ; mapped ; 0517 # 5.1 CYRILLIC CAPITAL LETTER RHA +0517 ; valid # 5.1 CYRILLIC SMALL LETTER RHA +0518 ; mapped ; 0519 # 5.1 CYRILLIC CAPITAL LETTER YAE +0519 ; valid # 5.1 CYRILLIC SMALL LETTER YAE +051A ; mapped ; 051B # 5.1 CYRILLIC CAPITAL LETTER QA +051B ; valid # 5.1 CYRILLIC SMALL LETTER QA +051C ; mapped ; 051D # 5.1 CYRILLIC CAPITAL LETTER WE +051D ; valid # 5.1 CYRILLIC SMALL LETTER WE +051E ; mapped ; 051F # 5.1 CYRILLIC CAPITAL LETTER ALEUT KA +051F ; valid # 5.1 CYRILLIC SMALL LETTER ALEUT KA +0520 ; mapped ; 0521 # 5.1 CYRILLIC CAPITAL LETTER EL WITH MIDDLE HOOK +0521 ; valid # 5.1 CYRILLIC SMALL LETTER EL WITH MIDDLE HOOK +0522 ; mapped ; 0523 # 5.1 CYRILLIC CAPITAL LETTER EN WITH MIDDLE HOOK +0523 ; valid # 5.1 CYRILLIC SMALL LETTER EN WITH MIDDLE HOOK +0524 ; mapped ; 0525 # 5.2 CYRILLIC CAPITAL LETTER PE WITH DESCENDER +0525 ; valid # 5.2 CYRILLIC SMALL LETTER PE WITH DESCENDER +0526 ; mapped ; 0527 # 6.0 CYRILLIC CAPITAL LETTER SHHA WITH DESCENDER +0527 ; valid # 6.0 CYRILLIC SMALL LETTER SHHA WITH DESCENDER +0528 ; mapped ; 0529 # 7.0 CYRILLIC CAPITAL LETTER EN WITH LEFT HOOK +0529 ; valid # 7.0 CYRILLIC SMALL LETTER EN WITH LEFT HOOK +052A ; mapped ; 052B # 7.0 CYRILLIC CAPITAL LETTER DZZHE +052B ; valid # 7.0 CYRILLIC SMALL LETTER DZZHE +052C ; mapped ; 052D # 7.0 CYRILLIC CAPITAL LETTER DCHE +052D ; valid # 7.0 CYRILLIC SMALL LETTER DCHE +052E ; mapped ; 052F # 7.0 CYRILLIC CAPITAL LETTER EL WITH DESCENDER +052F ; valid # 7.0 CYRILLIC SMALL LETTER EL WITH DESCENDER +0530 ; disallowed # NA +0531 ; mapped ; 0561 # 1.1 ARMENIAN CAPITAL LETTER AYB +0532 ; mapped ; 0562 # 1.1 ARMENIAN CAPITAL LETTER BEN +0533 ; mapped ; 0563 # 1.1 ARMENIAN CAPITAL LETTER GIM +0534 ; mapped ; 0564 # 1.1 ARMENIAN CAPITAL LETTER DA +0535 ; mapped ; 0565 # 1.1 ARMENIAN CAPITAL LETTER ECH +0536 ; mapped ; 0566 # 1.1 ARMENIAN CAPITAL LETTER ZA +0537 ; mapped ; 0567 # 1.1 ARMENIAN CAPITAL LETTER EH +0538 ; mapped ; 0568 # 1.1 ARMENIAN CAPITAL LETTER ET +0539 ; mapped ; 0569 # 1.1 ARMENIAN CAPITAL LETTER TO +053A ; mapped ; 056A # 1.1 ARMENIAN CAPITAL LETTER ZHE +053B ; mapped ; 056B # 1.1 ARMENIAN CAPITAL LETTER INI +053C ; mapped ; 056C # 1.1 ARMENIAN CAPITAL LETTER LIWN +053D ; mapped ; 056D # 1.1 ARMENIAN CAPITAL LETTER XEH +053E ; mapped ; 056E # 1.1 ARMENIAN CAPITAL LETTER CA +053F ; mapped ; 056F # 1.1 ARMENIAN CAPITAL LETTER KEN +0540 ; mapped ; 0570 # 1.1 ARMENIAN CAPITAL LETTER HO +0541 ; mapped ; 0571 # 1.1 ARMENIAN CAPITAL LETTER JA +0542 ; mapped ; 0572 # 1.1 ARMENIAN CAPITAL LETTER GHAD +0543 ; mapped ; 0573 # 1.1 ARMENIAN CAPITAL LETTER CHEH +0544 ; mapped ; 0574 # 1.1 ARMENIAN CAPITAL LETTER MEN +0545 ; mapped ; 0575 # 1.1 ARMENIAN CAPITAL LETTER YI +0546 ; mapped ; 0576 # 1.1 ARMENIAN CAPITAL LETTER NOW +0547 ; mapped ; 0577 # 1.1 ARMENIAN CAPITAL LETTER SHA +0548 ; mapped ; 0578 # 1.1 ARMENIAN CAPITAL LETTER VO +0549 ; mapped ; 0579 # 1.1 ARMENIAN CAPITAL LETTER CHA +054A ; mapped ; 057A # 1.1 ARMENIAN CAPITAL LETTER PEH +054B ; mapped ; 057B # 1.1 ARMENIAN CAPITAL LETTER JHEH +054C ; mapped ; 057C # 1.1 ARMENIAN CAPITAL LETTER RA +054D ; mapped ; 057D # 1.1 ARMENIAN CAPITAL LETTER SEH +054E ; mapped ; 057E # 1.1 ARMENIAN CAPITAL LETTER VEW +054F ; mapped ; 057F # 1.1 ARMENIAN CAPITAL LETTER TIWN +0550 ; mapped ; 0580 # 1.1 ARMENIAN CAPITAL LETTER REH +0551 ; mapped ; 0581 # 1.1 ARMENIAN CAPITAL LETTER CO +0552 ; mapped ; 0582 # 1.1 ARMENIAN CAPITAL LETTER YIWN +0553 ; mapped ; 0583 # 1.1 ARMENIAN CAPITAL LETTER PIWR +0554 ; mapped ; 0584 # 1.1 ARMENIAN CAPITAL LETTER KEH +0555 ; mapped ; 0585 # 1.1 ARMENIAN CAPITAL LETTER OH +0556 ; mapped ; 0586 # 1.1 ARMENIAN CAPITAL LETTER FEH +0557..0558 ; disallowed # NA .. +0559 ; valid # 1.1 ARMENIAN MODIFIER LETTER LEFT HALF RING +055A..055F ; valid ; ; NV8 # 1.1 ARMENIAN APOSTROPHE..ARMENIAN ABBREVIATION MARK +0560 ; disallowed # NA +0561..0586 ; valid # 1.1 ARMENIAN SMALL LETTER AYB..ARMENIAN SMALL LETTER FEH +0587 ; mapped ; 0565 0582 # 1.1 ARMENIAN SMALL LIGATURE ECH YIWN +0588 ; disallowed # NA +0589 ; valid ; ; NV8 # 1.1 ARMENIAN FULL STOP +058A ; valid ; ; NV8 # 3.0 ARMENIAN HYPHEN +058B..058C ; disallowed # NA .. +058D..058E ; valid ; ; NV8 # 7.0 RIGHT-FACING ARMENIAN ETERNITY SIGN..LEFT-FACING ARMENIAN ETERNITY SIGN +058F ; valid ; ; NV8 # 6.1 ARMENIAN DRAM SIGN +0590 ; disallowed # NA +0591..05A1 ; valid # 2.0 HEBREW ACCENT ETNAHTA..HEBREW ACCENT PAZER +05A2 ; valid # 4.1 HEBREW ACCENT ATNAH HAFUKH +05A3..05AF ; valid # 2.0 HEBREW ACCENT MUNAH..HEBREW MARK MASORA CIRCLE +05B0..05B9 ; valid # 1.1 HEBREW POINT SHEVA..HEBREW POINT HOLAM +05BA ; valid # 5.0 HEBREW POINT HOLAM HASER FOR VAV +05BB..05BD ; valid # 1.1 HEBREW POINT QUBUTS..HEBREW POINT METEG +05BE ; valid ; ; NV8 # 1.1 HEBREW PUNCTUATION MAQAF +05BF ; valid # 1.1 HEBREW POINT RAFE +05C0 ; valid ; ; NV8 # 1.1 HEBREW PUNCTUATION PASEQ +05C1..05C2 ; valid # 1.1 HEBREW POINT SHIN DOT..HEBREW POINT SIN DOT +05C3 ; valid ; ; NV8 # 1.1 HEBREW PUNCTUATION SOF PASUQ +05C4 ; valid # 2.0 HEBREW MARK UPPER DOT +05C5 ; valid # 4.1 HEBREW MARK LOWER DOT +05C6 ; valid ; ; NV8 # 4.1 HEBREW PUNCTUATION NUN HAFUKHA +05C7 ; valid # 4.1 HEBREW POINT QAMATS QATAN +05C8..05CF ; disallowed # NA .. +05D0..05EA ; valid # 1.1 HEBREW LETTER ALEF..HEBREW LETTER TAV +05EB..05EF ; disallowed # NA .. +05F0..05F4 ; valid # 1.1 HEBREW LIGATURE YIDDISH DOUBLE VAV..HEBREW PUNCTUATION GERSHAYIM +05F5..05FF ; disallowed # NA .. +0600..0603 ; disallowed # 4.0 ARABIC NUMBER SIGN..ARABIC SIGN SAFHA +0604 ; disallowed # 6.1 ARABIC SIGN SAMVAT +0605 ; disallowed # 7.0 ARABIC NUMBER MARK ABOVE +0606..060A ; valid ; ; NV8 # 5.1 ARABIC-INDIC CUBE ROOT..ARABIC-INDIC PER TEN THOUSAND SIGN +060B ; valid ; ; NV8 # 4.1 AFGHANI SIGN +060C ; valid ; ; NV8 # 1.1 ARABIC COMMA +060D..060F ; valid ; ; NV8 # 4.0 ARABIC DATE SEPARATOR..ARABIC SIGN MISRA +0610..0615 ; valid # 4.0 ARABIC SIGN SALLALLAHOU ALAYHE WASSALLAM..ARABIC SMALL HIGH TAH +0616..061A ; valid # 5.1 ARABIC SMALL HIGH LIGATURE ALEF WITH LAM WITH YEH..ARABIC SMALL KASRA +061B ; valid ; ; NV8 # 1.1 ARABIC SEMICOLON +061C ; disallowed # 6.3 ARABIC LETTER MARK +061D ; disallowed # NA +061E ; valid ; ; NV8 # 4.1 ARABIC TRIPLE DOT PUNCTUATION MARK +061F ; valid ; ; NV8 # 1.1 ARABIC QUESTION MARK +0620 ; valid # 6.0 ARABIC LETTER KASHMIRI YEH +0621..063A ; valid # 1.1 ARABIC LETTER HAMZA..ARABIC LETTER GHAIN +063B..063F ; valid # 5.1 ARABIC LETTER KEHEH WITH TWO DOTS ABOVE..ARABIC LETTER FARSI YEH WITH THREE DOTS ABOVE +0640 ; valid ; ; NV8 # 1.1 ARABIC TATWEEL +0641..0652 ; valid # 1.1 ARABIC LETTER FEH..ARABIC SUKUN +0653..0655 ; valid # 3.0 ARABIC MADDAH ABOVE..ARABIC HAMZA BELOW +0656..0658 ; valid # 4.0 ARABIC SUBSCRIPT ALEF..ARABIC MARK NOON GHUNNA +0659..065E ; valid # 4.1 ARABIC ZWARAKAY..ARABIC FATHA WITH TWO DOTS +065F ; valid # 6.0 ARABIC WAVY HAMZA BELOW +0660..0669 ; valid # 1.1 ARABIC-INDIC DIGIT ZERO..ARABIC-INDIC DIGIT NINE +066A..066D ; valid ; ; NV8 # 1.1 ARABIC PERCENT SIGN..ARABIC FIVE POINTED STAR +066E..066F ; valid # 3.2 ARABIC LETTER DOTLESS BEH..ARABIC LETTER DOTLESS QAF +0670..0674 ; valid # 1.1 ARABIC LETTER SUPERSCRIPT ALEF..ARABIC LETTER HIGH HAMZA +0675 ; mapped ; 0627 0674 # 1.1 ARABIC LETTER HIGH HAMZA ALEF +0676 ; mapped ; 0648 0674 # 1.1 ARABIC LETTER HIGH HAMZA WAW +0677 ; mapped ; 06C7 0674 # 1.1 ARABIC LETTER U WITH HAMZA ABOVE +0678 ; mapped ; 064A 0674 # 1.1 ARABIC LETTER HIGH HAMZA YEH +0679..06B7 ; valid # 1.1 ARABIC LETTER TTEH..ARABIC LETTER LAM WITH THREE DOTS ABOVE +06B8..06B9 ; valid # 3.0 ARABIC LETTER LAM WITH THREE DOTS BELOW..ARABIC LETTER NOON WITH DOT BELOW +06BA..06BE ; valid # 1.1 ARABIC LETTER NOON GHUNNA..ARABIC LETTER HEH DOACHASHMEE +06BF ; valid # 3.0 ARABIC LETTER TCHEH WITH DOT ABOVE +06C0..06CE ; valid # 1.1 ARABIC LETTER HEH WITH YEH ABOVE..ARABIC LETTER YEH WITH SMALL V +06CF ; valid # 3.0 ARABIC LETTER WAW WITH DOT ABOVE +06D0..06D3 ; valid # 1.1 ARABIC LETTER E..ARABIC LETTER YEH BARREE WITH HAMZA ABOVE +06D4 ; valid ; ; NV8 # 1.1 ARABIC FULL STOP +06D5..06DC ; valid # 1.1 ARABIC LETTER AE..ARABIC SMALL HIGH SEEN +06DD ; disallowed # 1.1 ARABIC END OF AYAH +06DE ; valid ; ; NV8 # 1.1 ARABIC START OF RUB EL HIZB +06DF..06E8 ; valid # 1.1 ARABIC SMALL HIGH ROUNDED ZERO..ARABIC SMALL HIGH NOON +06E9 ; valid ; ; NV8 # 1.1 ARABIC PLACE OF SAJDAH +06EA..06ED ; valid # 1.1 ARABIC EMPTY CENTRE LOW STOP..ARABIC SMALL LOW MEEM +06EE..06EF ; valid # 4.0 ARABIC LETTER DAL WITH INVERTED V..ARABIC LETTER REH WITH INVERTED V +06F0..06F9 ; valid # 1.1 EXTENDED ARABIC-INDIC DIGIT ZERO..EXTENDED ARABIC-INDIC DIGIT NINE +06FA..06FE ; valid # 3.0 ARABIC LETTER SHEEN WITH DOT BELOW..ARABIC SIGN SINDHI POSTPOSITION MEN +06FF ; valid # 4.0 ARABIC LETTER HEH WITH INVERTED V +0700..070D ; valid ; ; NV8 # 3.0 SYRIAC END OF PARAGRAPH..SYRIAC HARKLEAN ASTERISCUS +070E ; disallowed # NA +070F ; disallowed # 3.0 SYRIAC ABBREVIATION MARK +0710..072C ; valid # 3.0 SYRIAC LETTER ALAPH..SYRIAC LETTER TAW +072D..072F ; valid # 4.0 SYRIAC LETTER PERSIAN BHETH..SYRIAC LETTER PERSIAN DHALATH +0730..074A ; valid # 3.0 SYRIAC PTHAHA ABOVE..SYRIAC BARREKH +074B..074C ; disallowed # NA .. +074D..074F ; valid # 4.0 SYRIAC LETTER SOGDIAN ZHAIN..SYRIAC LETTER SOGDIAN FE +0750..076D ; valid # 4.1 ARABIC LETTER BEH WITH THREE DOTS HORIZONTALLY BELOW..ARABIC LETTER SEEN WITH TWO DOTS VERTICALLY ABOVE +076E..077F ; valid # 5.1 ARABIC LETTER HAH WITH SMALL ARABIC LETTER TAH BELOW..ARABIC LETTER KAF WITH TWO DOTS ABOVE +0780..07B0 ; valid # 3.0 THAANA LETTER HAA..THAANA SUKUN +07B1 ; valid # 3.2 THAANA LETTER NAA +07B2..07BF ; disallowed # NA .. +07C0..07F5 ; valid # 5.0 NKO DIGIT ZERO..NKO LOW TONE APOSTROPHE +07F6..07FA ; valid ; ; NV8 # 5.0 NKO SYMBOL OO DENNEN..NKO LAJANYALAN +07FB..07FF ; disallowed # NA .. +0800..082D ; valid # 5.2 SAMARITAN LETTER ALAF..SAMARITAN MARK NEQUDAA +082E..082F ; disallowed # NA .. +0830..083E ; valid ; ; NV8 # 5.2 SAMARITAN PUNCTUATION NEQUDAA..SAMARITAN PUNCTUATION ANNAAU +083F ; disallowed # NA +0840..085B ; valid # 6.0 MANDAIC LETTER HALQA..MANDAIC GEMINATION MARK +085C..085D ; disallowed # NA .. +085E ; valid ; ; NV8 # 6.0 MANDAIC PUNCTUATION +085F ; disallowed # NA +0860..086A ; valid # 10.0 SYRIAC LETTER MALAYALAM NGA..SYRIAC LETTER MALAYALAM SSA +086B..089F ; disallowed # NA .. +08A0 ; valid # 6.1 ARABIC LETTER BEH WITH SMALL V BELOW +08A1 ; valid # 7.0 ARABIC LETTER BEH WITH HAMZA ABOVE +08A2..08AC ; valid # 6.1 ARABIC LETTER JEEM WITH TWO DOTS ABOVE..ARABIC LETTER ROHINGYA YEH +08AD..08B2 ; valid # 7.0 ARABIC LETTER LOW ALEF..ARABIC LETTER ZAIN WITH INVERTED V ABOVE +08B3..08B4 ; valid # 8.0 ARABIC LETTER AIN WITH THREE DOTS BELOW..ARABIC LETTER KAF WITH DOT BELOW +08B5 ; disallowed # NA +08B6..08BD ; valid # 9.0 ARABIC LETTER BEH WITH SMALL MEEM ABOVE..ARABIC LETTER AFRICAN NOON +08BE..08D3 ; disallowed # NA .. +08D4..08E1 ; valid # 9.0 ARABIC SMALL HIGH WORD AR-RUB..ARABIC SMALL HIGH SIGN SAFHA +08E2 ; disallowed # 9.0 ARABIC DISPUTED END OF AYAH +08E3 ; valid # 8.0 ARABIC TURNED DAMMA BELOW +08E4..08FE ; valid # 6.1 ARABIC CURLY FATHA..ARABIC DAMMA WITH DOT +08FF ; valid # 7.0 ARABIC MARK SIDEWAYS NOON GHUNNA +0900 ; valid # 5.2 DEVANAGARI SIGN INVERTED CANDRABINDU +0901..0903 ; valid # 1.1 DEVANAGARI SIGN CANDRABINDU..DEVANAGARI SIGN VISARGA +0904 ; valid # 4.0 DEVANAGARI LETTER SHORT A +0905..0939 ; valid # 1.1 DEVANAGARI LETTER A..DEVANAGARI LETTER HA +093A..093B ; valid # 6.0 DEVANAGARI VOWEL SIGN OE..DEVANAGARI VOWEL SIGN OOE +093C..094D ; valid # 1.1 DEVANAGARI SIGN NUKTA..DEVANAGARI SIGN VIRAMA +094E ; valid # 5.2 DEVANAGARI VOWEL SIGN PRISHTHAMATRA E +094F ; valid # 6.0 DEVANAGARI VOWEL SIGN AW +0950..0954 ; valid # 1.1 DEVANAGARI OM..DEVANAGARI ACUTE ACCENT +0955 ; valid # 5.2 DEVANAGARI VOWEL SIGN CANDRA LONG E +0956..0957 ; valid # 6.0 DEVANAGARI VOWEL SIGN UE..DEVANAGARI VOWEL SIGN UUE +0958 ; mapped ; 0915 093C # 1.1 DEVANAGARI LETTER QA +0959 ; mapped ; 0916 093C # 1.1 DEVANAGARI LETTER KHHA +095A ; mapped ; 0917 093C # 1.1 DEVANAGARI LETTER GHHA +095B ; mapped ; 091C 093C # 1.1 DEVANAGARI LETTER ZA +095C ; mapped ; 0921 093C # 1.1 DEVANAGARI LETTER DDDHA +095D ; mapped ; 0922 093C # 1.1 DEVANAGARI LETTER RHA +095E ; mapped ; 092B 093C # 1.1 DEVANAGARI LETTER FA +095F ; mapped ; 092F 093C # 1.1 DEVANAGARI LETTER YYA +0960..0963 ; valid # 1.1 DEVANAGARI LETTER VOCALIC RR..DEVANAGARI VOWEL SIGN VOCALIC LL +0964..0965 ; valid ; ; NV8 # 1.1 DEVANAGARI DANDA..DEVANAGARI DOUBLE DANDA +0966..096F ; valid # 1.1 DEVANAGARI DIGIT ZERO..DEVANAGARI DIGIT NINE +0970 ; valid ; ; NV8 # 1.1 DEVANAGARI ABBREVIATION SIGN +0971..0972 ; valid # 5.1 DEVANAGARI SIGN HIGH SPACING DOT..DEVANAGARI LETTER CANDRA A +0973..0977 ; valid # 6.0 DEVANAGARI LETTER OE..DEVANAGARI LETTER UUE +0978 ; valid # 7.0 DEVANAGARI LETTER MARWARI DDA +0979..097A ; valid # 5.2 DEVANAGARI LETTER ZHA..DEVANAGARI LETTER HEAVY YA +097B..097C ; valid # 5.0 DEVANAGARI LETTER GGA..DEVANAGARI LETTER JJA +097D ; valid # 4.1 DEVANAGARI LETTER GLOTTAL STOP +097E..097F ; valid # 5.0 DEVANAGARI LETTER DDDA..DEVANAGARI LETTER BBA +0980 ; valid # 7.0 BENGALI ANJI +0981..0983 ; valid # 1.1 BENGALI SIGN CANDRABINDU..BENGALI SIGN VISARGA +0984 ; disallowed # NA +0985..098C ; valid # 1.1 BENGALI LETTER A..BENGALI LETTER VOCALIC L +098D..098E ; disallowed # NA .. +098F..0990 ; valid # 1.1 BENGALI LETTER E..BENGALI LETTER AI +0991..0992 ; disallowed # NA .. +0993..09A8 ; valid # 1.1 BENGALI LETTER O..BENGALI LETTER NA +09A9 ; disallowed # NA +09AA..09B0 ; valid # 1.1 BENGALI LETTER PA..BENGALI LETTER RA +09B1 ; disallowed # NA +09B2 ; valid # 1.1 BENGALI LETTER LA +09B3..09B5 ; disallowed # NA .. +09B6..09B9 ; valid # 1.1 BENGALI LETTER SHA..BENGALI LETTER HA +09BA..09BB ; disallowed # NA .. +09BC ; valid # 1.1 BENGALI SIGN NUKTA +09BD ; valid # 4.0 BENGALI SIGN AVAGRAHA +09BE..09C4 ; valid # 1.1 BENGALI VOWEL SIGN AA..BENGALI VOWEL SIGN VOCALIC RR +09C5..09C6 ; disallowed # NA .. +09C7..09C8 ; valid # 1.1 BENGALI VOWEL SIGN E..BENGALI VOWEL SIGN AI +09C9..09CA ; disallowed # NA .. +09CB..09CD ; valid # 1.1 BENGALI VOWEL SIGN O..BENGALI SIGN VIRAMA +09CE ; valid # 4.1 BENGALI LETTER KHANDA TA +09CF..09D6 ; disallowed # NA .. +09D7 ; valid # 1.1 BENGALI AU LENGTH MARK +09D8..09DB ; disallowed # NA .. +09DC ; mapped ; 09A1 09BC # 1.1 BENGALI LETTER RRA +09DD ; mapped ; 09A2 09BC # 1.1 BENGALI LETTER RHA +09DE ; disallowed # NA +09DF ; mapped ; 09AF 09BC # 1.1 BENGALI LETTER YYA +09E0..09E3 ; valid # 1.1 BENGALI LETTER VOCALIC RR..BENGALI VOWEL SIGN VOCALIC LL +09E4..09E5 ; disallowed # NA .. +09E6..09F1 ; valid # 1.1 BENGALI DIGIT ZERO..BENGALI LETTER RA WITH LOWER DIAGONAL +09F2..09FA ; valid ; ; NV8 # 1.1 BENGALI RUPEE MARK..BENGALI ISSHAR +09FB ; valid ; ; NV8 # 5.2 BENGALI GANDA MARK +09FC ; valid # 10.0 BENGALI LETTER VEDIC ANUSVARA +09FD ; valid ; ; NV8 # 10.0 BENGALI ABBREVIATION SIGN +09FE..0A00 ; disallowed # NA .. +0A01 ; valid # 4.0 GURMUKHI SIGN ADAK BINDI +0A02 ; valid # 1.1 GURMUKHI SIGN BINDI +0A03 ; valid # 4.0 GURMUKHI SIGN VISARGA +0A04 ; disallowed # NA +0A05..0A0A ; valid # 1.1 GURMUKHI LETTER A..GURMUKHI LETTER UU +0A0B..0A0E ; disallowed # NA .. +0A0F..0A10 ; valid # 1.1 GURMUKHI LETTER EE..GURMUKHI LETTER AI +0A11..0A12 ; disallowed # NA .. +0A13..0A28 ; valid # 1.1 GURMUKHI LETTER OO..GURMUKHI LETTER NA +0A29 ; disallowed # NA +0A2A..0A30 ; valid # 1.1 GURMUKHI LETTER PA..GURMUKHI LETTER RA +0A31 ; disallowed # NA +0A32 ; valid # 1.1 GURMUKHI LETTER LA +0A33 ; mapped ; 0A32 0A3C # 1.1 GURMUKHI LETTER LLA +0A34 ; disallowed # NA +0A35 ; valid # 1.1 GURMUKHI LETTER VA +0A36 ; mapped ; 0A38 0A3C # 1.1 GURMUKHI LETTER SHA +0A37 ; disallowed # NA +0A38..0A39 ; valid # 1.1 GURMUKHI LETTER SA..GURMUKHI LETTER HA +0A3A..0A3B ; disallowed # NA .. +0A3C ; valid # 1.1 GURMUKHI SIGN NUKTA +0A3D ; disallowed # NA +0A3E..0A42 ; valid # 1.1 GURMUKHI VOWEL SIGN AA..GURMUKHI VOWEL SIGN UU +0A43..0A46 ; disallowed # NA .. +0A47..0A48 ; valid # 1.1 GURMUKHI VOWEL SIGN EE..GURMUKHI VOWEL SIGN AI +0A49..0A4A ; disallowed # NA .. +0A4B..0A4D ; valid # 1.1 GURMUKHI VOWEL SIGN OO..GURMUKHI SIGN VIRAMA +0A4E..0A50 ; disallowed # NA .. +0A51 ; valid # 5.1 GURMUKHI SIGN UDAAT +0A52..0A58 ; disallowed # NA .. +0A59 ; mapped ; 0A16 0A3C # 1.1 GURMUKHI LETTER KHHA +0A5A ; mapped ; 0A17 0A3C # 1.1 GURMUKHI LETTER GHHA +0A5B ; mapped ; 0A1C 0A3C # 1.1 GURMUKHI LETTER ZA +0A5C ; valid # 1.1 GURMUKHI LETTER RRA +0A5D ; disallowed # NA +0A5E ; mapped ; 0A2B 0A3C # 1.1 GURMUKHI LETTER FA +0A5F..0A65 ; disallowed # NA .. +0A66..0A74 ; valid # 1.1 GURMUKHI DIGIT ZERO..GURMUKHI EK ONKAR +0A75 ; valid # 5.1 GURMUKHI SIGN YAKASH +0A76..0A80 ; disallowed # NA .. +0A81..0A83 ; valid # 1.1 GUJARATI SIGN CANDRABINDU..GUJARATI SIGN VISARGA +0A84 ; disallowed # NA +0A85..0A8B ; valid # 1.1 GUJARATI LETTER A..GUJARATI LETTER VOCALIC R +0A8C ; valid # 4.0 GUJARATI LETTER VOCALIC L +0A8D ; valid # 1.1 GUJARATI VOWEL CANDRA E +0A8E ; disallowed # NA +0A8F..0A91 ; valid # 1.1 GUJARATI LETTER E..GUJARATI VOWEL CANDRA O +0A92 ; disallowed # NA +0A93..0AA8 ; valid # 1.1 GUJARATI LETTER O..GUJARATI LETTER NA +0AA9 ; disallowed # NA +0AAA..0AB0 ; valid # 1.1 GUJARATI LETTER PA..GUJARATI LETTER RA +0AB1 ; disallowed # NA +0AB2..0AB3 ; valid # 1.1 GUJARATI LETTER LA..GUJARATI LETTER LLA +0AB4 ; disallowed # NA +0AB5..0AB9 ; valid # 1.1 GUJARATI LETTER VA..GUJARATI LETTER HA +0ABA..0ABB ; disallowed # NA .. +0ABC..0AC5 ; valid # 1.1 GUJARATI SIGN NUKTA..GUJARATI VOWEL SIGN CANDRA E +0AC6 ; disallowed # NA +0AC7..0AC9 ; valid # 1.1 GUJARATI VOWEL SIGN E..GUJARATI VOWEL SIGN CANDRA O +0ACA ; disallowed # NA +0ACB..0ACD ; valid # 1.1 GUJARATI VOWEL SIGN O..GUJARATI SIGN VIRAMA +0ACE..0ACF ; disallowed # NA .. +0AD0 ; valid # 1.1 GUJARATI OM +0AD1..0ADF ; disallowed # NA .. +0AE0 ; valid # 1.1 GUJARATI LETTER VOCALIC RR +0AE1..0AE3 ; valid # 4.0 GUJARATI LETTER VOCALIC LL..GUJARATI VOWEL SIGN VOCALIC LL +0AE4..0AE5 ; disallowed # NA .. +0AE6..0AEF ; valid # 1.1 GUJARATI DIGIT ZERO..GUJARATI DIGIT NINE +0AF0 ; valid ; ; NV8 # 6.1 GUJARATI ABBREVIATION SIGN +0AF1 ; valid ; ; NV8 # 4.0 GUJARATI RUPEE SIGN +0AF2..0AF8 ; disallowed # NA .. +0AF9 ; valid # 8.0 GUJARATI LETTER ZHA +0AFA..0AFF ; valid # 10.0 GUJARATI SIGN SUKUN..GUJARATI SIGN TWO-CIRCLE NUKTA ABOVE +0B00 ; disallowed # NA +0B01..0B03 ; valid # 1.1 ORIYA SIGN CANDRABINDU..ORIYA SIGN VISARGA +0B04 ; disallowed # NA +0B05..0B0C ; valid # 1.1 ORIYA LETTER A..ORIYA LETTER VOCALIC L +0B0D..0B0E ; disallowed # NA .. +0B0F..0B10 ; valid # 1.1 ORIYA LETTER E..ORIYA LETTER AI +0B11..0B12 ; disallowed # NA .. +0B13..0B28 ; valid # 1.1 ORIYA LETTER O..ORIYA LETTER NA +0B29 ; disallowed # NA +0B2A..0B30 ; valid # 1.1 ORIYA LETTER PA..ORIYA LETTER RA +0B31 ; disallowed # NA +0B32..0B33 ; valid # 1.1 ORIYA LETTER LA..ORIYA LETTER LLA +0B34 ; disallowed # NA +0B35 ; valid # 4.0 ORIYA LETTER VA +0B36..0B39 ; valid # 1.1 ORIYA LETTER SHA..ORIYA LETTER HA +0B3A..0B3B ; disallowed # NA .. +0B3C..0B43 ; valid # 1.1 ORIYA SIGN NUKTA..ORIYA VOWEL SIGN VOCALIC R +0B44 ; valid # 5.1 ORIYA VOWEL SIGN VOCALIC RR +0B45..0B46 ; disallowed # NA .. +0B47..0B48 ; valid # 1.1 ORIYA VOWEL SIGN E..ORIYA VOWEL SIGN AI +0B49..0B4A ; disallowed # NA .. +0B4B..0B4D ; valid # 1.1 ORIYA VOWEL SIGN O..ORIYA SIGN VIRAMA +0B4E..0B55 ; disallowed # NA .. +0B56..0B57 ; valid # 1.1 ORIYA AI LENGTH MARK..ORIYA AU LENGTH MARK +0B58..0B5B ; disallowed # NA .. +0B5C ; mapped ; 0B21 0B3C # 1.1 ORIYA LETTER RRA +0B5D ; mapped ; 0B22 0B3C # 1.1 ORIYA LETTER RHA +0B5E ; disallowed # NA +0B5F..0B61 ; valid # 1.1 ORIYA LETTER YYA..ORIYA LETTER VOCALIC LL +0B62..0B63 ; valid # 5.1 ORIYA VOWEL SIGN VOCALIC L..ORIYA VOWEL SIGN VOCALIC LL +0B64..0B65 ; disallowed # NA .. +0B66..0B6F ; valid # 1.1 ORIYA DIGIT ZERO..ORIYA DIGIT NINE +0B70 ; valid ; ; NV8 # 1.1 ORIYA ISSHAR +0B71 ; valid # 4.0 ORIYA LETTER WA +0B72..0B77 ; valid ; ; NV8 # 6.0 ORIYA FRACTION ONE QUARTER..ORIYA FRACTION THREE SIXTEENTHS +0B78..0B81 ; disallowed # NA .. +0B82..0B83 ; valid # 1.1 TAMIL SIGN ANUSVARA..TAMIL SIGN VISARGA +0B84 ; disallowed # NA +0B85..0B8A ; valid # 1.1 TAMIL LETTER A..TAMIL LETTER UU +0B8B..0B8D ; disallowed # NA .. +0B8E..0B90 ; valid # 1.1 TAMIL LETTER E..TAMIL LETTER AI +0B91 ; disallowed # NA +0B92..0B95 ; valid # 1.1 TAMIL LETTER O..TAMIL LETTER KA +0B96..0B98 ; disallowed # NA .. +0B99..0B9A ; valid # 1.1 TAMIL LETTER NGA..TAMIL LETTER CA +0B9B ; disallowed # NA +0B9C ; valid # 1.1 TAMIL LETTER JA +0B9D ; disallowed # NA +0B9E..0B9F ; valid # 1.1 TAMIL LETTER NYA..TAMIL LETTER TTA +0BA0..0BA2 ; disallowed # NA .. +0BA3..0BA4 ; valid # 1.1 TAMIL LETTER NNA..TAMIL LETTER TA +0BA5..0BA7 ; disallowed # NA .. +0BA8..0BAA ; valid # 1.1 TAMIL LETTER NA..TAMIL LETTER PA +0BAB..0BAD ; disallowed # NA .. +0BAE..0BB5 ; valid # 1.1 TAMIL LETTER MA..TAMIL LETTER VA +0BB6 ; valid # 4.1 TAMIL LETTER SHA +0BB7..0BB9 ; valid # 1.1 TAMIL LETTER SSA..TAMIL LETTER HA +0BBA..0BBD ; disallowed # NA .. +0BBE..0BC2 ; valid # 1.1 TAMIL VOWEL SIGN AA..TAMIL VOWEL SIGN UU +0BC3..0BC5 ; disallowed # NA .. +0BC6..0BC8 ; valid # 1.1 TAMIL VOWEL SIGN E..TAMIL VOWEL SIGN AI +0BC9 ; disallowed # NA +0BCA..0BCD ; valid # 1.1 TAMIL VOWEL SIGN O..TAMIL SIGN VIRAMA +0BCE..0BCF ; disallowed # NA .. +0BD0 ; valid # 5.1 TAMIL OM +0BD1..0BD6 ; disallowed # NA .. +0BD7 ; valid # 1.1 TAMIL AU LENGTH MARK +0BD8..0BE5 ; disallowed # NA .. +0BE6 ; valid # 4.1 TAMIL DIGIT ZERO +0BE7..0BEF ; valid # 1.1 TAMIL DIGIT ONE..TAMIL DIGIT NINE +0BF0..0BF2 ; valid ; ; NV8 # 1.1 TAMIL NUMBER TEN..TAMIL NUMBER ONE THOUSAND +0BF3..0BFA ; valid ; ; NV8 # 4.0 TAMIL DAY SIGN..TAMIL NUMBER SIGN +0BFB..0BFF ; disallowed # NA .. +0C00 ; valid # 7.0 TELUGU SIGN COMBINING CANDRABINDU ABOVE +0C01..0C03 ; valid # 1.1 TELUGU SIGN CANDRABINDU..TELUGU SIGN VISARGA +0C04 ; disallowed # NA +0C05..0C0C ; valid # 1.1 TELUGU LETTER A..TELUGU LETTER VOCALIC L +0C0D ; disallowed # NA +0C0E..0C10 ; valid # 1.1 TELUGU LETTER E..TELUGU LETTER AI +0C11 ; disallowed # NA +0C12..0C28 ; valid # 1.1 TELUGU LETTER O..TELUGU LETTER NA +0C29 ; disallowed # NA +0C2A..0C33 ; valid # 1.1 TELUGU LETTER PA..TELUGU LETTER LLA +0C34 ; valid # 7.0 TELUGU LETTER LLLA +0C35..0C39 ; valid # 1.1 TELUGU LETTER VA..TELUGU LETTER HA +0C3A..0C3C ; disallowed # NA .. +0C3D ; valid # 5.1 TELUGU SIGN AVAGRAHA +0C3E..0C44 ; valid # 1.1 TELUGU VOWEL SIGN AA..TELUGU VOWEL SIGN VOCALIC RR +0C45 ; disallowed # NA +0C46..0C48 ; valid # 1.1 TELUGU VOWEL SIGN E..TELUGU VOWEL SIGN AI +0C49 ; disallowed # NA +0C4A..0C4D ; valid # 1.1 TELUGU VOWEL SIGN O..TELUGU SIGN VIRAMA +0C4E..0C54 ; disallowed # NA .. +0C55..0C56 ; valid # 1.1 TELUGU LENGTH MARK..TELUGU AI LENGTH MARK +0C57 ; disallowed # NA +0C58..0C59 ; valid # 5.1 TELUGU LETTER TSA..TELUGU LETTER DZA +0C5A ; valid # 8.0 TELUGU LETTER RRRA +0C5B..0C5F ; disallowed # NA .. +0C60..0C61 ; valid # 1.1 TELUGU LETTER VOCALIC RR..TELUGU LETTER VOCALIC LL +0C62..0C63 ; valid # 5.1 TELUGU VOWEL SIGN VOCALIC L..TELUGU VOWEL SIGN VOCALIC LL +0C64..0C65 ; disallowed # NA .. +0C66..0C6F ; valid # 1.1 TELUGU DIGIT ZERO..TELUGU DIGIT NINE +0C70..0C77 ; disallowed # NA .. +0C78..0C7F ; valid ; ; NV8 # 5.1 TELUGU FRACTION DIGIT ZERO FOR ODD POWERS OF FOUR..TELUGU SIGN TUUMU +0C80 ; valid # 9.0 KANNADA SIGN SPACING CANDRABINDU +0C81 ; valid # 7.0 KANNADA SIGN CANDRABINDU +0C82..0C83 ; valid # 1.1 KANNADA SIGN ANUSVARA..KANNADA SIGN VISARGA +0C84 ; disallowed # NA +0C85..0C8C ; valid # 1.1 KANNADA LETTER A..KANNADA LETTER VOCALIC L +0C8D ; disallowed # NA +0C8E..0C90 ; valid # 1.1 KANNADA LETTER E..KANNADA LETTER AI +0C91 ; disallowed # NA +0C92..0CA8 ; valid # 1.1 KANNADA LETTER O..KANNADA LETTER NA +0CA9 ; disallowed # NA +0CAA..0CB3 ; valid # 1.1 KANNADA LETTER PA..KANNADA LETTER LLA +0CB4 ; disallowed # NA +0CB5..0CB9 ; valid # 1.1 KANNADA LETTER VA..KANNADA LETTER HA +0CBA..0CBB ; disallowed # NA .. +0CBC..0CBD ; valid # 4.0 KANNADA SIGN NUKTA..KANNADA SIGN AVAGRAHA +0CBE..0CC4 ; valid # 1.1 KANNADA VOWEL SIGN AA..KANNADA VOWEL SIGN VOCALIC RR +0CC5 ; disallowed # NA +0CC6..0CC8 ; valid # 1.1 KANNADA VOWEL SIGN E..KANNADA VOWEL SIGN AI +0CC9 ; disallowed # NA +0CCA..0CCD ; valid # 1.1 KANNADA VOWEL SIGN O..KANNADA SIGN VIRAMA +0CCE..0CD4 ; disallowed # NA .. +0CD5..0CD6 ; valid # 1.1 KANNADA LENGTH MARK..KANNADA AI LENGTH MARK +0CD7..0CDD ; disallowed # NA .. +0CDE ; valid # 1.1 KANNADA LETTER FA +0CDF ; disallowed # NA +0CE0..0CE1 ; valid # 1.1 KANNADA LETTER VOCALIC RR..KANNADA LETTER VOCALIC LL +0CE2..0CE3 ; valid # 5.0 KANNADA VOWEL SIGN VOCALIC L..KANNADA VOWEL SIGN VOCALIC LL +0CE4..0CE5 ; disallowed # NA .. +0CE6..0CEF ; valid # 1.1 KANNADA DIGIT ZERO..KANNADA DIGIT NINE +0CF0 ; disallowed # NA +0CF1..0CF2 ; valid # 5.0 KANNADA SIGN JIHVAMULIYA..KANNADA SIGN UPADHMANIYA +0CF3..0CFF ; disallowed # NA .. +0D00 ; valid # 10.0 MALAYALAM SIGN COMBINING ANUSVARA ABOVE +0D01 ; valid # 7.0 MALAYALAM SIGN CANDRABINDU +0D02..0D03 ; valid # 1.1 MALAYALAM SIGN ANUSVARA..MALAYALAM SIGN VISARGA +0D04 ; disallowed # NA +0D05..0D0C ; valid # 1.1 MALAYALAM LETTER A..MALAYALAM LETTER VOCALIC L +0D0D ; disallowed # NA +0D0E..0D10 ; valid # 1.1 MALAYALAM LETTER E..MALAYALAM LETTER AI +0D11 ; disallowed # NA +0D12..0D28 ; valid # 1.1 MALAYALAM LETTER O..MALAYALAM LETTER NA +0D29 ; valid # 6.0 MALAYALAM LETTER NNNA +0D2A..0D39 ; valid # 1.1 MALAYALAM LETTER PA..MALAYALAM LETTER HA +0D3A ; valid # 6.0 MALAYALAM LETTER TTTA +0D3B..0D3C ; valid # 10.0 MALAYALAM SIGN VERTICAL BAR VIRAMA..MALAYALAM SIGN CIRCULAR VIRAMA +0D3D ; valid # 5.1 MALAYALAM SIGN AVAGRAHA +0D3E..0D43 ; valid # 1.1 MALAYALAM VOWEL SIGN AA..MALAYALAM VOWEL SIGN VOCALIC R +0D44 ; valid # 5.1 MALAYALAM VOWEL SIGN VOCALIC RR +0D45 ; disallowed # NA +0D46..0D48 ; valid # 1.1 MALAYALAM VOWEL SIGN E..MALAYALAM VOWEL SIGN AI +0D49 ; disallowed # NA +0D4A..0D4D ; valid # 1.1 MALAYALAM VOWEL SIGN O..MALAYALAM SIGN VIRAMA +0D4E ; valid # 6.0 MALAYALAM LETTER DOT REPH +0D4F ; valid ; ; NV8 # 9.0 MALAYALAM SIGN PARA +0D50..0D53 ; disallowed # NA .. +0D54..0D56 ; valid # 9.0 MALAYALAM LETTER CHILLU M..MALAYALAM LETTER CHILLU LLL +0D57 ; valid # 1.1 MALAYALAM AU LENGTH MARK +0D58..0D5E ; valid ; ; NV8 # 9.0 MALAYALAM FRACTION ONE ONE-HUNDRED-AND-SIXTIETH..MALAYALAM FRACTION ONE FIFTH +0D5F ; valid # 8.0 MALAYALAM LETTER ARCHAIC II +0D60..0D61 ; valid # 1.1 MALAYALAM LETTER VOCALIC RR..MALAYALAM LETTER VOCALIC LL +0D62..0D63 ; valid # 5.1 MALAYALAM VOWEL SIGN VOCALIC L..MALAYALAM VOWEL SIGN VOCALIC LL +0D64..0D65 ; disallowed # NA .. +0D66..0D6F ; valid # 1.1 MALAYALAM DIGIT ZERO..MALAYALAM DIGIT NINE +0D70..0D75 ; valid ; ; NV8 # 5.1 MALAYALAM NUMBER TEN..MALAYALAM FRACTION THREE QUARTERS +0D76..0D78 ; valid ; ; NV8 # 9.0 MALAYALAM FRACTION ONE SIXTEENTH..MALAYALAM FRACTION THREE SIXTEENTHS +0D79 ; valid ; ; NV8 # 5.1 MALAYALAM DATE MARK +0D7A..0D7F ; valid # 5.1 MALAYALAM LETTER CHILLU NN..MALAYALAM LETTER CHILLU K +0D80..0D81 ; disallowed # NA .. +0D82..0D83 ; valid # 3.0 SINHALA SIGN ANUSVARAYA..SINHALA SIGN VISARGAYA +0D84 ; disallowed # NA +0D85..0D96 ; valid # 3.0 SINHALA LETTER AYANNA..SINHALA LETTER AUYANNA +0D97..0D99 ; disallowed # NA .. +0D9A..0DB1 ; valid # 3.0 SINHALA LETTER ALPAPRAANA KAYANNA..SINHALA LETTER DANTAJA NAYANNA +0DB2 ; disallowed # NA +0DB3..0DBB ; valid # 3.0 SINHALA LETTER SANYAKA DAYANNA..SINHALA LETTER RAYANNA +0DBC ; disallowed # NA +0DBD ; valid # 3.0 SINHALA LETTER DANTAJA LAYANNA +0DBE..0DBF ; disallowed # NA .. +0DC0..0DC6 ; valid # 3.0 SINHALA LETTER VAYANNA..SINHALA LETTER FAYANNA +0DC7..0DC9 ; disallowed # NA .. +0DCA ; valid # 3.0 SINHALA SIGN AL-LAKUNA +0DCB..0DCE ; disallowed # NA .. +0DCF..0DD4 ; valid # 3.0 SINHALA VOWEL SIGN AELA-PILLA..SINHALA VOWEL SIGN KETTI PAA-PILLA +0DD5 ; disallowed # NA +0DD6 ; valid # 3.0 SINHALA VOWEL SIGN DIGA PAA-PILLA +0DD7 ; disallowed # NA +0DD8..0DDF ; valid # 3.0 SINHALA VOWEL SIGN GAETTA-PILLA..SINHALA VOWEL SIGN GAYANUKITTA +0DE0..0DE5 ; disallowed # NA .. +0DE6..0DEF ; valid # 7.0 SINHALA LITH DIGIT ZERO..SINHALA LITH DIGIT NINE +0DF0..0DF1 ; disallowed # NA .. +0DF2..0DF3 ; valid # 3.0 SINHALA VOWEL SIGN DIGA GAETTA-PILLA..SINHALA VOWEL SIGN DIGA GAYANUKITTA +0DF4 ; valid ; ; NV8 # 3.0 SINHALA PUNCTUATION KUNDDALIYA +0DF5..0E00 ; disallowed # NA .. +0E01..0E32 ; valid # 1.1 THAI CHARACTER KO KAI..THAI CHARACTER SARA AA +0E33 ; mapped ; 0E4D 0E32 # 1.1 THAI CHARACTER SARA AM +0E34..0E3A ; valid # 1.1 THAI CHARACTER SARA I..THAI CHARACTER PHINTHU +0E3B..0E3E ; disallowed # NA .. +0E3F ; valid ; ; NV8 # 1.1 THAI CURRENCY SYMBOL BAHT +0E40..0E4E ; valid # 1.1 THAI CHARACTER SARA E..THAI CHARACTER YAMAKKAN +0E4F ; valid ; ; NV8 # 1.1 THAI CHARACTER FONGMAN +0E50..0E59 ; valid # 1.1 THAI DIGIT ZERO..THAI DIGIT NINE +0E5A..0E5B ; valid ; ; NV8 # 1.1 THAI CHARACTER ANGKHANKHU..THAI CHARACTER KHOMUT +0E5C..0E80 ; disallowed # NA .. +0E81..0E82 ; valid # 1.1 LAO LETTER KO..LAO LETTER KHO SUNG +0E83 ; disallowed # NA +0E84 ; valid # 1.1 LAO LETTER KHO TAM +0E85..0E86 ; disallowed # NA .. +0E87..0E88 ; valid # 1.1 LAO LETTER NGO..LAO LETTER CO +0E89 ; disallowed # NA +0E8A ; valid # 1.1 LAO LETTER SO TAM +0E8B..0E8C ; disallowed # NA .. +0E8D ; valid # 1.1 LAO LETTER NYO +0E8E..0E93 ; disallowed # NA .. +0E94..0E97 ; valid # 1.1 LAO LETTER DO..LAO LETTER THO TAM +0E98 ; disallowed # NA +0E99..0E9F ; valid # 1.1 LAO LETTER NO..LAO LETTER FO SUNG +0EA0 ; disallowed # NA +0EA1..0EA3 ; valid # 1.1 LAO LETTER MO..LAO LETTER LO LING +0EA4 ; disallowed # NA +0EA5 ; valid # 1.1 LAO LETTER LO LOOT +0EA6 ; disallowed # NA +0EA7 ; valid # 1.1 LAO LETTER WO +0EA8..0EA9 ; disallowed # NA .. +0EAA..0EAB ; valid # 1.1 LAO LETTER SO SUNG..LAO LETTER HO SUNG +0EAC ; disallowed # NA +0EAD..0EB2 ; valid # 1.1 LAO LETTER O..LAO VOWEL SIGN AA +0EB3 ; mapped ; 0ECD 0EB2 # 1.1 LAO VOWEL SIGN AM +0EB4..0EB9 ; valid # 1.1 LAO VOWEL SIGN I..LAO VOWEL SIGN UU +0EBA ; disallowed # NA +0EBB..0EBD ; valid # 1.1 LAO VOWEL SIGN MAI KON..LAO SEMIVOWEL SIGN NYO +0EBE..0EBF ; disallowed # NA .. +0EC0..0EC4 ; valid # 1.1 LAO VOWEL SIGN E..LAO VOWEL SIGN AI +0EC5 ; disallowed # NA +0EC6 ; valid # 1.1 LAO KO LA +0EC7 ; disallowed # NA +0EC8..0ECD ; valid # 1.1 LAO TONE MAI EK..LAO NIGGAHITA +0ECE..0ECF ; disallowed # NA .. +0ED0..0ED9 ; valid # 1.1 LAO DIGIT ZERO..LAO DIGIT NINE +0EDA..0EDB ; disallowed # NA .. +0EDC ; mapped ; 0EAB 0E99 # 1.1 LAO HO NO +0EDD ; mapped ; 0EAB 0EA1 # 1.1 LAO HO MO +0EDE..0EDF ; valid # 6.1 LAO LETTER KHMU GO..LAO LETTER KHMU NYO +0EE0..0EFF ; disallowed # NA .. +0F00 ; valid # 2.0 TIBETAN SYLLABLE OM +0F01..0F0A ; valid ; ; NV8 # 2.0 TIBETAN MARK GTER YIG MGO TRUNCATED A..TIBETAN MARK BKA- SHOG YIG MGO +0F0B ; valid # 2.0 TIBETAN MARK INTERSYLLABIC TSHEG +0F0C ; mapped ; 0F0B # 2.0 TIBETAN MARK DELIMITER TSHEG BSTAR +0F0D..0F17 ; valid ; ; NV8 # 2.0 TIBETAN MARK SHAD..TIBETAN ASTROLOGICAL SIGN SGRA GCAN -CHAR RTAGS +0F18..0F19 ; valid # 2.0 TIBETAN ASTROLOGICAL SIGN -KHYUD PA..TIBETAN ASTROLOGICAL SIGN SDONG TSHUGS +0F1A..0F1F ; valid ; ; NV8 # 2.0 TIBETAN SIGN RDEL DKAR GCIG..TIBETAN SIGN RDEL DKAR RDEL NAG +0F20..0F29 ; valid # 2.0 TIBETAN DIGIT ZERO..TIBETAN DIGIT NINE +0F2A..0F34 ; valid ; ; NV8 # 2.0 TIBETAN DIGIT HALF ONE..TIBETAN MARK BSDUS RTAGS +0F35 ; valid # 2.0 TIBETAN MARK NGAS BZUNG NYI ZLA +0F36 ; valid ; ; NV8 # 2.0 TIBETAN MARK CARET -DZUD RTAGS BZHI MIG CAN +0F37 ; valid # 2.0 TIBETAN MARK NGAS BZUNG SGOR RTAGS +0F38 ; valid ; ; NV8 # 2.0 TIBETAN MARK CHE MGO +0F39 ; valid # 2.0 TIBETAN MARK TSA -PHRU +0F3A..0F3D ; valid ; ; NV8 # 2.0 TIBETAN MARK GUG RTAGS GYON..TIBETAN MARK ANG KHANG GYAS +0F3E..0F42 ; valid # 2.0 TIBETAN SIGN YAR TSHES..TIBETAN LETTER GA +0F43 ; mapped ; 0F42 0FB7 # 2.0 TIBETAN LETTER GHA +0F44..0F47 ; valid # 2.0 TIBETAN LETTER NGA..TIBETAN LETTER JA +0F48 ; disallowed # NA +0F49..0F4C ; valid # 2.0 TIBETAN LETTER NYA..TIBETAN LETTER DDA +0F4D ; mapped ; 0F4C 0FB7 # 2.0 TIBETAN LETTER DDHA +0F4E..0F51 ; valid # 2.0 TIBETAN LETTER NNA..TIBETAN LETTER DA +0F52 ; mapped ; 0F51 0FB7 # 2.0 TIBETAN LETTER DHA +0F53..0F56 ; valid # 2.0 TIBETAN LETTER NA..TIBETAN LETTER BA +0F57 ; mapped ; 0F56 0FB7 # 2.0 TIBETAN LETTER BHA +0F58..0F5B ; valid # 2.0 TIBETAN LETTER MA..TIBETAN LETTER DZA +0F5C ; mapped ; 0F5B 0FB7 # 2.0 TIBETAN LETTER DZHA +0F5D..0F68 ; valid # 2.0 TIBETAN LETTER WA..TIBETAN LETTER A +0F69 ; mapped ; 0F40 0FB5 # 2.0 TIBETAN LETTER KSSA +0F6A ; valid # 3.0 TIBETAN LETTER FIXED-FORM RA +0F6B..0F6C ; valid # 5.1 TIBETAN LETTER KKA..TIBETAN LETTER RRA +0F6D..0F70 ; disallowed # NA .. +0F71..0F72 ; valid # 2.0 TIBETAN VOWEL SIGN AA..TIBETAN VOWEL SIGN I +0F73 ; mapped ; 0F71 0F72 # 2.0 TIBETAN VOWEL SIGN II +0F74 ; valid # 2.0 TIBETAN VOWEL SIGN U +0F75 ; mapped ; 0F71 0F74 # 2.0 TIBETAN VOWEL SIGN UU +0F76 ; mapped ; 0FB2 0F80 # 2.0 TIBETAN VOWEL SIGN VOCALIC R +0F77 ; mapped ; 0FB2 0F71 0F80 #2.0 TIBETAN VOWEL SIGN VOCALIC RR +0F78 ; mapped ; 0FB3 0F80 # 2.0 TIBETAN VOWEL SIGN VOCALIC L +0F79 ; mapped ; 0FB3 0F71 0F80 #2.0 TIBETAN VOWEL SIGN VOCALIC LL +0F7A..0F80 ; valid # 2.0 TIBETAN VOWEL SIGN E..TIBETAN VOWEL SIGN REVERSED I +0F81 ; mapped ; 0F71 0F80 # 2.0 TIBETAN VOWEL SIGN REVERSED II +0F82..0F84 ; valid # 2.0 TIBETAN SIGN NYI ZLA NAA DA..TIBETAN MARK HALANTA +0F85 ; valid ; ; NV8 # 2.0 TIBETAN MARK PALUTA +0F86..0F8B ; valid # 2.0 TIBETAN SIGN LCI RTAGS..TIBETAN SIGN GRU MED RGYINGS +0F8C..0F8F ; valid # 6.0 TIBETAN SIGN INVERTED MCHU CAN..TIBETAN SUBJOINED SIGN INVERTED MCHU CAN +0F90..0F92 ; valid # 2.0 TIBETAN SUBJOINED LETTER KA..TIBETAN SUBJOINED LETTER GA +0F93 ; mapped ; 0F92 0FB7 # 2.0 TIBETAN SUBJOINED LETTER GHA +0F94..0F95 ; valid # 2.0 TIBETAN SUBJOINED LETTER NGA..TIBETAN SUBJOINED LETTER CA +0F96 ; valid # 3.0 TIBETAN SUBJOINED LETTER CHA +0F97 ; valid # 2.0 TIBETAN SUBJOINED LETTER JA +0F98 ; disallowed # NA +0F99..0F9C ; valid # 2.0 TIBETAN SUBJOINED LETTER NYA..TIBETAN SUBJOINED LETTER DDA +0F9D ; mapped ; 0F9C 0FB7 # 2.0 TIBETAN SUBJOINED LETTER DDHA +0F9E..0FA1 ; valid # 2.0 TIBETAN SUBJOINED LETTER NNA..TIBETAN SUBJOINED LETTER DA +0FA2 ; mapped ; 0FA1 0FB7 # 2.0 TIBETAN SUBJOINED LETTER DHA +0FA3..0FA6 ; valid # 2.0 TIBETAN SUBJOINED LETTER NA..TIBETAN SUBJOINED LETTER BA +0FA7 ; mapped ; 0FA6 0FB7 # 2.0 TIBETAN SUBJOINED LETTER BHA +0FA8..0FAB ; valid # 2.0 TIBETAN SUBJOINED LETTER MA..TIBETAN SUBJOINED LETTER DZA +0FAC ; mapped ; 0FAB 0FB7 # 2.0 TIBETAN SUBJOINED LETTER DZHA +0FAD ; valid # 2.0 TIBETAN SUBJOINED LETTER WA +0FAE..0FB0 ; valid # 3.0 TIBETAN SUBJOINED LETTER ZHA..TIBETAN SUBJOINED LETTER -A +0FB1..0FB7 ; valid # 2.0 TIBETAN SUBJOINED LETTER YA..TIBETAN SUBJOINED LETTER HA +0FB8 ; valid # 3.0 TIBETAN SUBJOINED LETTER A +0FB9 ; mapped ; 0F90 0FB5 # 2.0 TIBETAN SUBJOINED LETTER KSSA +0FBA..0FBC ; valid # 3.0 TIBETAN SUBJOINED LETTER FIXED-FORM WA..TIBETAN SUBJOINED LETTER FIXED-FORM RA +0FBD ; disallowed # NA +0FBE..0FC5 ; valid ; ; NV8 # 3.0 TIBETAN KU RU KHA..TIBETAN SYMBOL RDO RJE +0FC6 ; valid # 3.0 TIBETAN SYMBOL PADMA GDAN +0FC7..0FCC ; valid ; ; NV8 # 3.0 TIBETAN SYMBOL RDO RJE RGYA GRAM..TIBETAN SYMBOL NOR BU BZHI -KHYIL +0FCD ; disallowed # NA +0FCE ; valid ; ; NV8 # 5.1 TIBETAN SIGN RDEL NAG RDEL DKAR +0FCF ; valid ; ; NV8 # 3.0 TIBETAN SIGN RDEL NAG GSUM +0FD0..0FD1 ; valid ; ; NV8 # 4.1 TIBETAN MARK BSKA- SHOG GI MGO RGYAN..TIBETAN MARK MNYAM YIG GI MGO RGYAN +0FD2..0FD4 ; valid ; ; NV8 # 5.1 TIBETAN MARK NYIS TSHEG..TIBETAN MARK CLOSING BRDA RNYING YIG MGO SGAB MA +0FD5..0FD8 ; valid ; ; NV8 # 5.2 RIGHT-FACING SVASTI SIGN..LEFT-FACING SVASTI SIGN WITH DOTS +0FD9..0FDA ; valid ; ; NV8 # 6.0 TIBETAN MARK LEADING MCHAN RTAGS..TIBETAN MARK TRAILING MCHAN RTAGS +0FDB..0FFF ; disallowed # NA .. +1000..1021 ; valid # 3.0 MYANMAR LETTER KA..MYANMAR LETTER A +1022 ; valid # 5.1 MYANMAR LETTER SHAN A +1023..1027 ; valid # 3.0 MYANMAR LETTER I..MYANMAR LETTER E +1028 ; valid # 5.1 MYANMAR LETTER MON E +1029..102A ; valid # 3.0 MYANMAR LETTER O..MYANMAR LETTER AU +102B ; valid # 5.1 MYANMAR VOWEL SIGN TALL AA +102C..1032 ; valid # 3.0 MYANMAR VOWEL SIGN AA..MYANMAR VOWEL SIGN AI +1033..1035 ; valid # 5.1 MYANMAR VOWEL SIGN MON II..MYANMAR VOWEL SIGN E ABOVE +1036..1039 ; valid # 3.0 MYANMAR SIGN ANUSVARA..MYANMAR SIGN VIRAMA +103A..103F ; valid # 5.1 MYANMAR SIGN ASAT..MYANMAR LETTER GREAT SA +1040..1049 ; valid # 3.0 MYANMAR DIGIT ZERO..MYANMAR DIGIT NINE +104A..104F ; valid ; ; NV8 # 3.0 MYANMAR SIGN LITTLE SECTION..MYANMAR SYMBOL GENITIVE +1050..1059 ; valid # 3.0 MYANMAR LETTER SHA..MYANMAR VOWEL SIGN VOCALIC LL +105A..1099 ; valid # 5.1 MYANMAR LETTER MON NGA..MYANMAR SHAN DIGIT NINE +109A..109D ; valid # 5.2 MYANMAR SIGN KHAMTI TONE-1..MYANMAR VOWEL SIGN AITON AI +109E..109F ; valid ; ; NV8 # 5.1 MYANMAR SYMBOL SHAN ONE..MYANMAR SYMBOL SHAN EXCLAMATION +10A0..10C5 ; disallowed # 1.1 GEORGIAN CAPITAL LETTER AN..GEORGIAN CAPITAL LETTER HOE +10C6 ; disallowed # NA +10C7 ; mapped ; 2D27 # 6.1 GEORGIAN CAPITAL LETTER YN +10C8..10CC ; disallowed # NA .. +10CD ; mapped ; 2D2D # 6.1 GEORGIAN CAPITAL LETTER AEN +10CE..10CF ; disallowed # NA .. +10D0..10F6 ; valid # 1.1 GEORGIAN LETTER AN..GEORGIAN LETTER FI +10F7..10F8 ; valid # 3.2 GEORGIAN LETTER YN..GEORGIAN LETTER ELIFI +10F9..10FA ; valid # 4.1 GEORGIAN LETTER TURNED GAN..GEORGIAN LETTER AIN +10FB ; valid ; ; NV8 # 1.1 GEORGIAN PARAGRAPH SEPARATOR +10FC ; mapped ; 10DC # 4.1 MODIFIER LETTER GEORGIAN NAR +10FD..10FF ; valid # 6.1 GEORGIAN LETTER AEN..GEORGIAN LETTER LABIAL SIGN +1100..1159 ; valid ; ; NV8 # 1.1 HANGUL CHOSEONG KIYEOK..HANGUL CHOSEONG YEORINHIEUH +115A..115E ; valid ; ; NV8 # 5.2 HANGUL CHOSEONG KIYEOK-TIKEUT..HANGUL CHOSEONG TIKEUT-RIEUL +115F..1160 ; disallowed # 1.1 HANGUL CHOSEONG FILLER..HANGUL JUNGSEONG FILLER +1161..11A2 ; valid ; ; NV8 # 1.1 HANGUL JUNGSEONG A..HANGUL JUNGSEONG SSANGARAEA +11A3..11A7 ; valid ; ; NV8 # 5.2 HANGUL JUNGSEONG A-EU..HANGUL JUNGSEONG O-YAE +11A8..11F9 ; valid ; ; NV8 # 1.1 HANGUL JONGSEONG KIYEOK..HANGUL JONGSEONG YEORINHIEUH +11FA..11FF ; valid ; ; NV8 # 5.2 HANGUL JONGSEONG KIYEOK-NIEUN..HANGUL JONGSEONG SSANGNIEUN +1200..1206 ; valid # 3.0 ETHIOPIC SYLLABLE HA..ETHIOPIC SYLLABLE HO +1207 ; valid # 4.1 ETHIOPIC SYLLABLE HOA +1208..1246 ; valid # 3.0 ETHIOPIC SYLLABLE LA..ETHIOPIC SYLLABLE QO +1247 ; valid # 4.1 ETHIOPIC SYLLABLE QOA +1248 ; valid # 3.0 ETHIOPIC SYLLABLE QWA +1249 ; disallowed # NA +124A..124D ; valid # 3.0 ETHIOPIC SYLLABLE QWI..ETHIOPIC SYLLABLE QWE +124E..124F ; disallowed # NA .. +1250..1256 ; valid # 3.0 ETHIOPIC SYLLABLE QHA..ETHIOPIC SYLLABLE QHO +1257 ; disallowed # NA +1258 ; valid # 3.0 ETHIOPIC SYLLABLE QHWA +1259 ; disallowed # NA +125A..125D ; valid # 3.0 ETHIOPIC SYLLABLE QHWI..ETHIOPIC SYLLABLE QHWE +125E..125F ; disallowed # NA .. +1260..1286 ; valid # 3.0 ETHIOPIC SYLLABLE BA..ETHIOPIC SYLLABLE XO +1287 ; valid # 4.1 ETHIOPIC SYLLABLE XOA +1288 ; valid # 3.0 ETHIOPIC SYLLABLE XWA +1289 ; disallowed # NA +128A..128D ; valid # 3.0 ETHIOPIC SYLLABLE XWI..ETHIOPIC SYLLABLE XWE +128E..128F ; disallowed # NA .. +1290..12AE ; valid # 3.0 ETHIOPIC SYLLABLE NA..ETHIOPIC SYLLABLE KO +12AF ; valid # 4.1 ETHIOPIC SYLLABLE KOA +12B0 ; valid # 3.0 ETHIOPIC SYLLABLE KWA +12B1 ; disallowed # NA +12B2..12B5 ; valid # 3.0 ETHIOPIC SYLLABLE KWI..ETHIOPIC SYLLABLE KWE +12B6..12B7 ; disallowed # NA .. +12B8..12BE ; valid # 3.0 ETHIOPIC SYLLABLE KXA..ETHIOPIC SYLLABLE KXO +12BF ; disallowed # NA +12C0 ; valid # 3.0 ETHIOPIC SYLLABLE KXWA +12C1 ; disallowed # NA +12C2..12C5 ; valid # 3.0 ETHIOPIC SYLLABLE KXWI..ETHIOPIC SYLLABLE KXWE +12C6..12C7 ; disallowed # NA .. +12C8..12CE ; valid # 3.0 ETHIOPIC SYLLABLE WA..ETHIOPIC SYLLABLE WO +12CF ; valid # 4.1 ETHIOPIC SYLLABLE WOA +12D0..12D6 ; valid # 3.0 ETHIOPIC SYLLABLE PHARYNGEAL A..ETHIOPIC SYLLABLE PHARYNGEAL O +12D7 ; disallowed # NA +12D8..12EE ; valid # 3.0 ETHIOPIC SYLLABLE ZA..ETHIOPIC SYLLABLE YO +12EF ; valid # 4.1 ETHIOPIC SYLLABLE YOA +12F0..130E ; valid # 3.0 ETHIOPIC SYLLABLE DA..ETHIOPIC SYLLABLE GO +130F ; valid # 4.1 ETHIOPIC SYLLABLE GOA +1310 ; valid # 3.0 ETHIOPIC SYLLABLE GWA +1311 ; disallowed # NA +1312..1315 ; valid # 3.0 ETHIOPIC SYLLABLE GWI..ETHIOPIC SYLLABLE GWE +1316..1317 ; disallowed # NA .. +1318..131E ; valid # 3.0 ETHIOPIC SYLLABLE GGA..ETHIOPIC SYLLABLE GGO +131F ; valid # 4.1 ETHIOPIC SYLLABLE GGWAA +1320..1346 ; valid # 3.0 ETHIOPIC SYLLABLE THA..ETHIOPIC SYLLABLE TZO +1347 ; valid # 4.1 ETHIOPIC SYLLABLE TZOA +1348..135A ; valid # 3.0 ETHIOPIC SYLLABLE FA..ETHIOPIC SYLLABLE FYA +135B..135C ; disallowed # NA .. +135D..135E ; valid # 6.0 ETHIOPIC COMBINING GEMINATION AND VOWEL LENGTH MARK..ETHIOPIC COMBINING VOWEL LENGTH MARK +135F ; valid # 4.1 ETHIOPIC COMBINING GEMINATION MARK +1360 ; valid ; ; NV8 # 4.1 ETHIOPIC SECTION MARK +1361..137C ; valid ; ; NV8 # 3.0 ETHIOPIC WORDSPACE..ETHIOPIC NUMBER TEN THOUSAND +137D..137F ; disallowed # NA .. +1380..138F ; valid # 4.1 ETHIOPIC SYLLABLE SEBATBEIT MWA..ETHIOPIC SYLLABLE PWE +1390..1399 ; valid ; ; NV8 # 4.1 ETHIOPIC TONAL MARK YIZET..ETHIOPIC TONAL MARK KURT +139A..139F ; disallowed # NA .. +13A0..13F4 ; valid # 3.0 CHEROKEE LETTER A..CHEROKEE LETTER YV +13F5 ; valid # 8.0 CHEROKEE LETTER MV +13F6..13F7 ; disallowed # NA .. +13F8 ; mapped ; 13F0 # 8.0 CHEROKEE SMALL LETTER YE +13F9 ; mapped ; 13F1 # 8.0 CHEROKEE SMALL LETTER YI +13FA ; mapped ; 13F2 # 8.0 CHEROKEE SMALL LETTER YO +13FB ; mapped ; 13F3 # 8.0 CHEROKEE SMALL LETTER YU +13FC ; mapped ; 13F4 # 8.0 CHEROKEE SMALL LETTER YV +13FD ; mapped ; 13F5 # 8.0 CHEROKEE SMALL LETTER MV +13FE..13FF ; disallowed # NA .. +1400 ; valid ; ; NV8 # 5.2 CANADIAN SYLLABICS HYPHEN +1401..166C ; valid # 3.0 CANADIAN SYLLABICS E..CANADIAN SYLLABICS CARRIER TTSA +166D..166E ; valid ; ; NV8 # 3.0 CANADIAN SYLLABICS CHI SIGN..CANADIAN SYLLABICS FULL STOP +166F..1676 ; valid # 3.0 CANADIAN SYLLABICS QAI..CANADIAN SYLLABICS NNGAA +1677..167F ; valid # 5.2 CANADIAN SYLLABICS WOODS-CREE THWEE..CANADIAN SYLLABICS BLACKFOOT W +1680 ; disallowed # 3.0 OGHAM SPACE MARK +1681..169A ; valid # 3.0 OGHAM LETTER BEITH..OGHAM LETTER PEITH +169B..169C ; valid ; ; NV8 # 3.0 OGHAM FEATHER MARK..OGHAM REVERSED FEATHER MARK +169D..169F ; disallowed # NA .. +16A0..16EA ; valid # 3.0 RUNIC LETTER FEHU FEOH FE F..RUNIC LETTER X +16EB..16F0 ; valid ; ; NV8 # 3.0 RUNIC SINGLE PUNCTUATION..RUNIC BELGTHOR SYMBOL +16F1..16F8 ; valid # 7.0 RUNIC LETTER K..RUNIC LETTER FRANKS CASKET AESC +16F9..16FF ; disallowed # NA .. +1700..170C ; valid # 3.2 TAGALOG LETTER A..TAGALOG LETTER YA +170D ; disallowed # NA +170E..1714 ; valid # 3.2 TAGALOG LETTER LA..TAGALOG SIGN VIRAMA +1715..171F ; disallowed # NA .. +1720..1734 ; valid # 3.2 HANUNOO LETTER A..HANUNOO SIGN PAMUDPOD +1735..1736 ; valid ; ; NV8 # 3.2 PHILIPPINE SINGLE PUNCTUATION..PHILIPPINE DOUBLE PUNCTUATION +1737..173F ; disallowed # NA .. +1740..1753 ; valid # 3.2 BUHID LETTER A..BUHID VOWEL SIGN U +1754..175F ; disallowed # NA .. +1760..176C ; valid # 3.2 TAGBANWA LETTER A..TAGBANWA LETTER YA +176D ; disallowed # NA +176E..1770 ; valid # 3.2 TAGBANWA LETTER LA..TAGBANWA LETTER SA +1771 ; disallowed # NA +1772..1773 ; valid # 3.2 TAGBANWA VOWEL SIGN I..TAGBANWA VOWEL SIGN U +1774..177F ; disallowed # NA .. +1780..17B3 ; valid # 3.0 KHMER LETTER KA..KHMER INDEPENDENT VOWEL QAU +17B4..17B5 ; disallowed # 3.0 KHMER VOWEL INHERENT AQ..KHMER VOWEL INHERENT AA +17B6..17D3 ; valid # 3.0 KHMER VOWEL SIGN AA..KHMER SIGN BATHAMASAT +17D4..17D6 ; valid ; ; NV8 # 3.0 KHMER SIGN KHAN..KHMER SIGN CAMNUC PII KUUH +17D7 ; valid # 3.0 KHMER SIGN LEK TOO +17D8..17DB ; valid ; ; NV8 # 3.0 KHMER SIGN BEYYAL..KHMER CURRENCY SYMBOL RIEL +17DC ; valid # 3.0 KHMER SIGN AVAKRAHASANYA +17DD ; valid # 4.0 KHMER SIGN ATTHACAN +17DE..17DF ; disallowed # NA .. +17E0..17E9 ; valid # 3.0 KHMER DIGIT ZERO..KHMER DIGIT NINE +17EA..17EF ; disallowed # NA .. +17F0..17F9 ; valid ; ; NV8 # 4.0 KHMER SYMBOL LEK ATTAK SON..KHMER SYMBOL LEK ATTAK PRAM-BUON +17FA..17FF ; disallowed # NA .. +1800..1805 ; valid ; ; NV8 # 3.0 MONGOLIAN BIRGA..MONGOLIAN FOUR DOTS +1806 ; disallowed # 3.0 MONGOLIAN TODO SOFT HYPHEN +1807..180A ; valid ; ; NV8 # 3.0 MONGOLIAN SIBE SYLLABLE BOUNDARY MARKER..MONGOLIAN NIRUGU +180B..180D ; ignored # 3.0 MONGOLIAN FREE VARIATION SELECTOR ONE..MONGOLIAN FREE VARIATION SELECTOR THREE +180E ; disallowed # 3.0 MONGOLIAN VOWEL SEPARATOR +180F ; disallowed # NA +1810..1819 ; valid # 3.0 MONGOLIAN DIGIT ZERO..MONGOLIAN DIGIT NINE +181A..181F ; disallowed # NA .. +1820..1877 ; valid # 3.0 MONGOLIAN LETTER A..MONGOLIAN LETTER MANCHU ZHA +1878..187F ; disallowed # NA .. +1880..18A9 ; valid # 3.0 MONGOLIAN LETTER ALI GALI ANUSVARA ONE..MONGOLIAN LETTER ALI GALI DAGALGA +18AA ; valid # 5.1 MONGOLIAN LETTER MANCHU ALI GALI LHA +18AB..18AF ; disallowed # NA .. +18B0..18F5 ; valid # 5.2 CANADIAN SYLLABICS OY..CANADIAN SYLLABICS CARRIER DENTAL S +18F6..18FF ; disallowed # NA .. +1900..191C ; valid # 4.0 LIMBU VOWEL-CARRIER LETTER..LIMBU LETTER HA +191D..191E ; valid # 7.0 LIMBU LETTER GYAN..LIMBU LETTER TRA +191F ; disallowed # NA +1920..192B ; valid # 4.0 LIMBU VOWEL SIGN A..LIMBU SUBJOINED LETTER WA +192C..192F ; disallowed # NA .. +1930..193B ; valid # 4.0 LIMBU SMALL LETTER KA..LIMBU SIGN SA-I +193C..193F ; disallowed # NA .. +1940 ; valid ; ; NV8 # 4.0 LIMBU SIGN LOO +1941..1943 ; disallowed # NA .. +1944..1945 ; valid ; ; NV8 # 4.0 LIMBU EXCLAMATION MARK..LIMBU QUESTION MARK +1946..196D ; valid # 4.0 LIMBU DIGIT ZERO..TAI LE LETTER AI +196E..196F ; disallowed # NA .. +1970..1974 ; valid # 4.0 TAI LE LETTER TONE-2..TAI LE LETTER TONE-6 +1975..197F ; disallowed # NA .. +1980..19A9 ; valid # 4.1 NEW TAI LUE LETTER HIGH QA..NEW TAI LUE LETTER LOW XVA +19AA..19AB ; valid # 5.2 NEW TAI LUE LETTER HIGH SUA..NEW TAI LUE LETTER LOW SUA +19AC..19AF ; disallowed # NA .. +19B0..19C9 ; valid # 4.1 NEW TAI LUE VOWEL SIGN VOWEL SHORTENER..NEW TAI LUE TONE MARK-2 +19CA..19CF ; disallowed # NA .. +19D0..19D9 ; valid # 4.1 NEW TAI LUE DIGIT ZERO..NEW TAI LUE DIGIT NINE +19DA ; valid ; ; XV8 # 5.2 NEW TAI LUE THAM DIGIT ONE +19DB..19DD ; disallowed # NA .. +19DE..19DF ; valid ; ; NV8 # 4.1 NEW TAI LUE SIGN LAE..NEW TAI LUE SIGN LAEV +19E0..19FF ; valid ; ; NV8 # 4.0 KHMER SYMBOL PATHAMASAT..KHMER SYMBOL DAP-PRAM ROC +1A00..1A1B ; valid # 4.1 BUGINESE LETTER KA..BUGINESE VOWEL SIGN AE +1A1C..1A1D ; disallowed # NA .. +1A1E..1A1F ; valid ; ; NV8 # 4.1 BUGINESE PALLAWA..BUGINESE END OF SECTION +1A20..1A5E ; valid # 5.2 TAI THAM LETTER HIGH KA..TAI THAM CONSONANT SIGN SA +1A5F ; disallowed # NA +1A60..1A7C ; valid # 5.2 TAI THAM SIGN SAKOT..TAI THAM SIGN KHUEN-LUE KARAN +1A7D..1A7E ; disallowed # NA .. +1A7F..1A89 ; valid # 5.2 TAI THAM COMBINING CRYPTOGRAMMIC DOT..TAI THAM HORA DIGIT NINE +1A8A..1A8F ; disallowed # NA .. +1A90..1A99 ; valid # 5.2 TAI THAM THAM DIGIT ZERO..TAI THAM THAM DIGIT NINE +1A9A..1A9F ; disallowed # NA .. +1AA0..1AA6 ; valid ; ; NV8 # 5.2 TAI THAM SIGN WIANG..TAI THAM SIGN REVERSED ROTATED RANA +1AA7 ; valid # 5.2 TAI THAM SIGN MAI YAMOK +1AA8..1AAD ; valid ; ; NV8 # 5.2 TAI THAM SIGN KAAN..TAI THAM SIGN CAANG +1AAE..1AAF ; disallowed # NA .. +1AB0..1ABD ; valid # 7.0 COMBINING DOUBLED CIRCUMFLEX ACCENT..COMBINING PARENTHESES BELOW +1ABE ; valid ; ; NV8 # 7.0 COMBINING PARENTHESES OVERLAY +1ABF..1AFF ; disallowed # NA .. +1B00..1B4B ; valid # 5.0 BALINESE SIGN ULU RICEM..BALINESE LETTER ASYURA SASAK +1B4C..1B4F ; disallowed # NA .. +1B50..1B59 ; valid # 5.0 BALINESE DIGIT ZERO..BALINESE DIGIT NINE +1B5A..1B6A ; valid ; ; NV8 # 5.0 BALINESE PANTI..BALINESE MUSICAL SYMBOL DANG GEDE +1B6B..1B73 ; valid # 5.0 BALINESE MUSICAL SYMBOL COMBINING TEGEH..BALINESE MUSICAL SYMBOL COMBINING GONG +1B74..1B7C ; valid ; ; NV8 # 5.0 BALINESE MUSICAL SYMBOL RIGHT-HAND OPEN DUG..BALINESE MUSICAL SYMBOL LEFT-HAND OPEN PING +1B7D..1B7F ; disallowed # NA .. +1B80..1BAA ; valid # 5.1 SUNDANESE SIGN PANYECEK..SUNDANESE SIGN PAMAAEH +1BAB..1BAD ; valid # 6.1 SUNDANESE SIGN VIRAMA..SUNDANESE CONSONANT SIGN PASANGAN WA +1BAE..1BB9 ; valid # 5.1 SUNDANESE LETTER KHA..SUNDANESE DIGIT NINE +1BBA..1BBF ; valid # 6.1 SUNDANESE AVAGRAHA..SUNDANESE LETTER FINAL M +1BC0..1BF3 ; valid # 6.0 BATAK LETTER A..BATAK PANONGONAN +1BF4..1BFB ; disallowed # NA .. +1BFC..1BFF ; valid ; ; NV8 # 6.0 BATAK SYMBOL BINDU NA METEK..BATAK SYMBOL BINDU PANGOLAT +1C00..1C37 ; valid # 5.1 LEPCHA LETTER KA..LEPCHA SIGN NUKTA +1C38..1C3A ; disallowed # NA .. +1C3B..1C3F ; valid ; ; NV8 # 5.1 LEPCHA PUNCTUATION TA-ROL..LEPCHA PUNCTUATION TSHOOK +1C40..1C49 ; valid # 5.1 LEPCHA DIGIT ZERO..LEPCHA DIGIT NINE +1C4A..1C4C ; disallowed # NA .. +1C4D..1C7D ; valid # 5.1 LEPCHA LETTER TTA..OL CHIKI AHAD +1C7E..1C7F ; valid ; ; NV8 # 5.1 OL CHIKI PUNCTUATION MUCAAD..OL CHIKI PUNCTUATION DOUBLE MUCAAD +1C80 ; mapped ; 0432 # 9.0 CYRILLIC SMALL LETTER ROUNDED VE +1C81 ; mapped ; 0434 # 9.0 CYRILLIC SMALL LETTER LONG-LEGGED DE +1C82 ; mapped ; 043E # 9.0 CYRILLIC SMALL LETTER NARROW O +1C83 ; mapped ; 0441 # 9.0 CYRILLIC SMALL LETTER WIDE ES +1C84..1C85 ; mapped ; 0442 # 9.0 CYRILLIC SMALL LETTER TALL TE..CYRILLIC SMALL LETTER THREE-LEGGED TE +1C86 ; mapped ; 044A # 9.0 CYRILLIC SMALL LETTER TALL HARD SIGN +1C87 ; mapped ; 0463 # 9.0 CYRILLIC SMALL LETTER TALL YAT +1C88 ; mapped ; A64B # 9.0 CYRILLIC SMALL LETTER UNBLENDED UK +1C89..1CBF ; disallowed # NA .. +1CC0..1CC7 ; valid ; ; NV8 # 6.1 SUNDANESE PUNCTUATION BINDU SURYA..SUNDANESE PUNCTUATION BINDU BA SATANGA +1CC8..1CCF ; disallowed # NA .. +1CD0..1CD2 ; valid # 5.2 VEDIC TONE KARSHANA..VEDIC TONE PRENKHA +1CD3 ; valid ; ; NV8 # 5.2 VEDIC SIGN NIHSHVASA +1CD4..1CF2 ; valid # 5.2 VEDIC SIGN YAJURVEDIC MIDLINE SVARITA..VEDIC SIGN ARDHAVISARGA +1CF3..1CF6 ; valid # 6.1 VEDIC SIGN ROTATED ARDHAVISARGA..VEDIC SIGN UPADHMANIYA +1CF7 ; valid # 10.0 VEDIC SIGN ATIKRAMA +1CF8..1CF9 ; valid # 7.0 VEDIC TONE RING ABOVE..VEDIC TONE DOUBLE RING ABOVE +1CFA..1CFF ; disallowed # NA .. +1D00..1D2B ; valid # 4.0 LATIN LETTER SMALL CAPITAL A..CYRILLIC LETTER SMALL CAPITAL EL +1D2C ; mapped ; 0061 # 4.0 MODIFIER LETTER CAPITAL A +1D2D ; mapped ; 00E6 # 4.0 MODIFIER LETTER CAPITAL AE +1D2E ; mapped ; 0062 # 4.0 MODIFIER LETTER CAPITAL B +1D2F ; valid # 4.0 MODIFIER LETTER CAPITAL BARRED B +1D30 ; mapped ; 0064 # 4.0 MODIFIER LETTER CAPITAL D +1D31 ; mapped ; 0065 # 4.0 MODIFIER LETTER CAPITAL E +1D32 ; mapped ; 01DD # 4.0 MODIFIER LETTER CAPITAL REVERSED E +1D33 ; mapped ; 0067 # 4.0 MODIFIER LETTER CAPITAL G +1D34 ; mapped ; 0068 # 4.0 MODIFIER LETTER CAPITAL H +1D35 ; mapped ; 0069 # 4.0 MODIFIER LETTER CAPITAL I +1D36 ; mapped ; 006A # 4.0 MODIFIER LETTER CAPITAL J +1D37 ; mapped ; 006B # 4.0 MODIFIER LETTER CAPITAL K +1D38 ; mapped ; 006C # 4.0 MODIFIER LETTER CAPITAL L +1D39 ; mapped ; 006D # 4.0 MODIFIER LETTER CAPITAL M +1D3A ; mapped ; 006E # 4.0 MODIFIER LETTER CAPITAL N +1D3B ; valid # 4.0 MODIFIER LETTER CAPITAL REVERSED N +1D3C ; mapped ; 006F # 4.0 MODIFIER LETTER CAPITAL O +1D3D ; mapped ; 0223 # 4.0 MODIFIER LETTER CAPITAL OU +1D3E ; mapped ; 0070 # 4.0 MODIFIER LETTER CAPITAL P +1D3F ; mapped ; 0072 # 4.0 MODIFIER LETTER CAPITAL R +1D40 ; mapped ; 0074 # 4.0 MODIFIER LETTER CAPITAL T +1D41 ; mapped ; 0075 # 4.0 MODIFIER LETTER CAPITAL U +1D42 ; mapped ; 0077 # 4.0 MODIFIER LETTER CAPITAL W +1D43 ; mapped ; 0061 # 4.0 MODIFIER LETTER SMALL A +1D44 ; mapped ; 0250 # 4.0 MODIFIER LETTER SMALL TURNED A +1D45 ; mapped ; 0251 # 4.0 MODIFIER LETTER SMALL ALPHA +1D46 ; mapped ; 1D02 # 4.0 MODIFIER LETTER SMALL TURNED AE +1D47 ; mapped ; 0062 # 4.0 MODIFIER LETTER SMALL B +1D48 ; mapped ; 0064 # 4.0 MODIFIER LETTER SMALL D +1D49 ; mapped ; 0065 # 4.0 MODIFIER LETTER SMALL E +1D4A ; mapped ; 0259 # 4.0 MODIFIER LETTER SMALL SCHWA +1D4B ; mapped ; 025B # 4.0 MODIFIER LETTER SMALL OPEN E +1D4C ; mapped ; 025C # 4.0 MODIFIER LETTER SMALL TURNED OPEN E +1D4D ; mapped ; 0067 # 4.0 MODIFIER LETTER SMALL G +1D4E ; valid # 4.0 MODIFIER LETTER SMALL TURNED I +1D4F ; mapped ; 006B # 4.0 MODIFIER LETTER SMALL K +1D50 ; mapped ; 006D # 4.0 MODIFIER LETTER SMALL M +1D51 ; mapped ; 014B # 4.0 MODIFIER LETTER SMALL ENG +1D52 ; mapped ; 006F # 4.0 MODIFIER LETTER SMALL O +1D53 ; mapped ; 0254 # 4.0 MODIFIER LETTER SMALL OPEN O +1D54 ; mapped ; 1D16 # 4.0 MODIFIER LETTER SMALL TOP HALF O +1D55 ; mapped ; 1D17 # 4.0 MODIFIER LETTER SMALL BOTTOM HALF O +1D56 ; mapped ; 0070 # 4.0 MODIFIER LETTER SMALL P +1D57 ; mapped ; 0074 # 4.0 MODIFIER LETTER SMALL T +1D58 ; mapped ; 0075 # 4.0 MODIFIER LETTER SMALL U +1D59 ; mapped ; 1D1D # 4.0 MODIFIER LETTER SMALL SIDEWAYS U +1D5A ; mapped ; 026F # 4.0 MODIFIER LETTER SMALL TURNED M +1D5B ; mapped ; 0076 # 4.0 MODIFIER LETTER SMALL V +1D5C ; mapped ; 1D25 # 4.0 MODIFIER LETTER SMALL AIN +1D5D ; mapped ; 03B2 # 4.0 MODIFIER LETTER SMALL BETA +1D5E ; mapped ; 03B3 # 4.0 MODIFIER LETTER SMALL GREEK GAMMA +1D5F ; mapped ; 03B4 # 4.0 MODIFIER LETTER SMALL DELTA +1D60 ; mapped ; 03C6 # 4.0 MODIFIER LETTER SMALL GREEK PHI +1D61 ; mapped ; 03C7 # 4.0 MODIFIER LETTER SMALL CHI +1D62 ; mapped ; 0069 # 4.0 LATIN SUBSCRIPT SMALL LETTER I +1D63 ; mapped ; 0072 # 4.0 LATIN SUBSCRIPT SMALL LETTER R +1D64 ; mapped ; 0075 # 4.0 LATIN SUBSCRIPT SMALL LETTER U +1D65 ; mapped ; 0076 # 4.0 LATIN SUBSCRIPT SMALL LETTER V +1D66 ; mapped ; 03B2 # 4.0 GREEK SUBSCRIPT SMALL LETTER BETA +1D67 ; mapped ; 03B3 # 4.0 GREEK SUBSCRIPT SMALL LETTER GAMMA +1D68 ; mapped ; 03C1 # 4.0 GREEK SUBSCRIPT SMALL LETTER RHO +1D69 ; mapped ; 03C6 # 4.0 GREEK SUBSCRIPT SMALL LETTER PHI +1D6A ; mapped ; 03C7 # 4.0 GREEK SUBSCRIPT SMALL LETTER CHI +1D6B ; valid # 4.0 LATIN SMALL LETTER UE +1D6C..1D77 ; valid # 4.1 LATIN SMALL LETTER B WITH MIDDLE TILDE..LATIN SMALL LETTER TURNED G +1D78 ; mapped ; 043D # 4.1 MODIFIER LETTER CYRILLIC EN +1D79..1D9A ; valid # 4.1 LATIN SMALL LETTER INSULAR G..LATIN SMALL LETTER EZH WITH RETROFLEX HOOK +1D9B ; mapped ; 0252 # 4.1 MODIFIER LETTER SMALL TURNED ALPHA +1D9C ; mapped ; 0063 # 4.1 MODIFIER LETTER SMALL C +1D9D ; mapped ; 0255 # 4.1 MODIFIER LETTER SMALL C WITH CURL +1D9E ; mapped ; 00F0 # 4.1 MODIFIER LETTER SMALL ETH +1D9F ; mapped ; 025C # 4.1 MODIFIER LETTER SMALL REVERSED OPEN E +1DA0 ; mapped ; 0066 # 4.1 MODIFIER LETTER SMALL F +1DA1 ; mapped ; 025F # 4.1 MODIFIER LETTER SMALL DOTLESS J WITH STROKE +1DA2 ; mapped ; 0261 # 4.1 MODIFIER LETTER SMALL SCRIPT G +1DA3 ; mapped ; 0265 # 4.1 MODIFIER LETTER SMALL TURNED H +1DA4 ; mapped ; 0268 # 4.1 MODIFIER LETTER SMALL I WITH STROKE +1DA5 ; mapped ; 0269 # 4.1 MODIFIER LETTER SMALL IOTA +1DA6 ; mapped ; 026A # 4.1 MODIFIER LETTER SMALL CAPITAL I +1DA7 ; mapped ; 1D7B # 4.1 MODIFIER LETTER SMALL CAPITAL I WITH STROKE +1DA8 ; mapped ; 029D # 4.1 MODIFIER LETTER SMALL J WITH CROSSED-TAIL +1DA9 ; mapped ; 026D # 4.1 MODIFIER LETTER SMALL L WITH RETROFLEX HOOK +1DAA ; mapped ; 1D85 # 4.1 MODIFIER LETTER SMALL L WITH PALATAL HOOK +1DAB ; mapped ; 029F # 4.1 MODIFIER LETTER SMALL CAPITAL L +1DAC ; mapped ; 0271 # 4.1 MODIFIER LETTER SMALL M WITH HOOK +1DAD ; mapped ; 0270 # 4.1 MODIFIER LETTER SMALL TURNED M WITH LONG LEG +1DAE ; mapped ; 0272 # 4.1 MODIFIER LETTER SMALL N WITH LEFT HOOK +1DAF ; mapped ; 0273 # 4.1 MODIFIER LETTER SMALL N WITH RETROFLEX HOOK +1DB0 ; mapped ; 0274 # 4.1 MODIFIER LETTER SMALL CAPITAL N +1DB1 ; mapped ; 0275 # 4.1 MODIFIER LETTER SMALL BARRED O +1DB2 ; mapped ; 0278 # 4.1 MODIFIER LETTER SMALL PHI +1DB3 ; mapped ; 0282 # 4.1 MODIFIER LETTER SMALL S WITH HOOK +1DB4 ; mapped ; 0283 # 4.1 MODIFIER LETTER SMALL ESH +1DB5 ; mapped ; 01AB # 4.1 MODIFIER LETTER SMALL T WITH PALATAL HOOK +1DB6 ; mapped ; 0289 # 4.1 MODIFIER LETTER SMALL U BAR +1DB7 ; mapped ; 028A # 4.1 MODIFIER LETTER SMALL UPSILON +1DB8 ; mapped ; 1D1C # 4.1 MODIFIER LETTER SMALL CAPITAL U +1DB9 ; mapped ; 028B # 4.1 MODIFIER LETTER SMALL V WITH HOOK +1DBA ; mapped ; 028C # 4.1 MODIFIER LETTER SMALL TURNED V +1DBB ; mapped ; 007A # 4.1 MODIFIER LETTER SMALL Z +1DBC ; mapped ; 0290 # 4.1 MODIFIER LETTER SMALL Z WITH RETROFLEX HOOK +1DBD ; mapped ; 0291 # 4.1 MODIFIER LETTER SMALL Z WITH CURL +1DBE ; mapped ; 0292 # 4.1 MODIFIER LETTER SMALL EZH +1DBF ; mapped ; 03B8 # 4.1 MODIFIER LETTER SMALL THETA +1DC0..1DC3 ; valid # 4.1 COMBINING DOTTED GRAVE ACCENT..COMBINING SUSPENSION MARK +1DC4..1DCA ; valid # 5.0 COMBINING MACRON-ACUTE..COMBINING LATIN SMALL LETTER R BELOW +1DCB..1DE6 ; valid # 5.1 COMBINING BREVE-MACRON..COMBINING LATIN SMALL LETTER Z +1DE7..1DF5 ; valid # 7.0 COMBINING LATIN SMALL LETTER ALPHA..COMBINING UP TACK ABOVE +1DF6..1DF9 ; valid # 10.0 COMBINING KAVYKA ABOVE RIGHT..COMBINING WIDE INVERTED BRIDGE BELOW +1DFA ; disallowed # NA +1DFB ; valid # 9.0 COMBINING DELETION MARK +1DFC ; valid # 6.0 COMBINING DOUBLE INVERTED BREVE BELOW +1DFD ; valid # 5.2 COMBINING ALMOST EQUAL TO BELOW +1DFE..1DFF ; valid # 5.0 COMBINING LEFT ARROWHEAD ABOVE..COMBINING RIGHT ARROWHEAD AND DOWN ARROWHEAD BELOW +1E00 ; mapped ; 1E01 # 1.1 LATIN CAPITAL LETTER A WITH RING BELOW +1E01 ; valid # 1.1 LATIN SMALL LETTER A WITH RING BELOW +1E02 ; mapped ; 1E03 # 1.1 LATIN CAPITAL LETTER B WITH DOT ABOVE +1E03 ; valid # 1.1 LATIN SMALL LETTER B WITH DOT ABOVE +1E04 ; mapped ; 1E05 # 1.1 LATIN CAPITAL LETTER B WITH DOT BELOW +1E05 ; valid # 1.1 LATIN SMALL LETTER B WITH DOT BELOW +1E06 ; mapped ; 1E07 # 1.1 LATIN CAPITAL LETTER B WITH LINE BELOW +1E07 ; valid # 1.1 LATIN SMALL LETTER B WITH LINE BELOW +1E08 ; mapped ; 1E09 # 1.1 LATIN CAPITAL LETTER C WITH CEDILLA AND ACUTE +1E09 ; valid # 1.1 LATIN SMALL LETTER C WITH CEDILLA AND ACUTE +1E0A ; mapped ; 1E0B # 1.1 LATIN CAPITAL LETTER D WITH DOT ABOVE +1E0B ; valid # 1.1 LATIN SMALL LETTER D WITH DOT ABOVE +1E0C ; mapped ; 1E0D # 1.1 LATIN CAPITAL LETTER D WITH DOT BELOW +1E0D ; valid # 1.1 LATIN SMALL LETTER D WITH DOT BELOW +1E0E ; mapped ; 1E0F # 1.1 LATIN CAPITAL LETTER D WITH LINE BELOW +1E0F ; valid # 1.1 LATIN SMALL LETTER D WITH LINE BELOW +1E10 ; mapped ; 1E11 # 1.1 LATIN CAPITAL LETTER D WITH CEDILLA +1E11 ; valid # 1.1 LATIN SMALL LETTER D WITH CEDILLA +1E12 ; mapped ; 1E13 # 1.1 LATIN CAPITAL LETTER D WITH CIRCUMFLEX BELOW +1E13 ; valid # 1.1 LATIN SMALL LETTER D WITH CIRCUMFLEX BELOW +1E14 ; mapped ; 1E15 # 1.1 LATIN CAPITAL LETTER E WITH MACRON AND GRAVE +1E15 ; valid # 1.1 LATIN SMALL LETTER E WITH MACRON AND GRAVE +1E16 ; mapped ; 1E17 # 1.1 LATIN CAPITAL LETTER E WITH MACRON AND ACUTE +1E17 ; valid # 1.1 LATIN SMALL LETTER E WITH MACRON AND ACUTE +1E18 ; mapped ; 1E19 # 1.1 LATIN CAPITAL LETTER E WITH CIRCUMFLEX BELOW +1E19 ; valid # 1.1 LATIN SMALL LETTER E WITH CIRCUMFLEX BELOW +1E1A ; mapped ; 1E1B # 1.1 LATIN CAPITAL LETTER E WITH TILDE BELOW +1E1B ; valid # 1.1 LATIN SMALL LETTER E WITH TILDE BELOW +1E1C ; mapped ; 1E1D # 1.1 LATIN CAPITAL LETTER E WITH CEDILLA AND BREVE +1E1D ; valid # 1.1 LATIN SMALL LETTER E WITH CEDILLA AND BREVE +1E1E ; mapped ; 1E1F # 1.1 LATIN CAPITAL LETTER F WITH DOT ABOVE +1E1F ; valid # 1.1 LATIN SMALL LETTER F WITH DOT ABOVE +1E20 ; mapped ; 1E21 # 1.1 LATIN CAPITAL LETTER G WITH MACRON +1E21 ; valid # 1.1 LATIN SMALL LETTER G WITH MACRON +1E22 ; mapped ; 1E23 # 1.1 LATIN CAPITAL LETTER H WITH DOT ABOVE +1E23 ; valid # 1.1 LATIN SMALL LETTER H WITH DOT ABOVE +1E24 ; mapped ; 1E25 # 1.1 LATIN CAPITAL LETTER H WITH DOT BELOW +1E25 ; valid # 1.1 LATIN SMALL LETTER H WITH DOT BELOW +1E26 ; mapped ; 1E27 # 1.1 LATIN CAPITAL LETTER H WITH DIAERESIS +1E27 ; valid # 1.1 LATIN SMALL LETTER H WITH DIAERESIS +1E28 ; mapped ; 1E29 # 1.1 LATIN CAPITAL LETTER H WITH CEDILLA +1E29 ; valid # 1.1 LATIN SMALL LETTER H WITH CEDILLA +1E2A ; mapped ; 1E2B # 1.1 LATIN CAPITAL LETTER H WITH BREVE BELOW +1E2B ; valid # 1.1 LATIN SMALL LETTER H WITH BREVE BELOW +1E2C ; mapped ; 1E2D # 1.1 LATIN CAPITAL LETTER I WITH TILDE BELOW +1E2D ; valid # 1.1 LATIN SMALL LETTER I WITH TILDE BELOW +1E2E ; mapped ; 1E2F # 1.1 LATIN CAPITAL LETTER I WITH DIAERESIS AND ACUTE +1E2F ; valid # 1.1 LATIN SMALL LETTER I WITH DIAERESIS AND ACUTE +1E30 ; mapped ; 1E31 # 1.1 LATIN CAPITAL LETTER K WITH ACUTE +1E31 ; valid # 1.1 LATIN SMALL LETTER K WITH ACUTE +1E32 ; mapped ; 1E33 # 1.1 LATIN CAPITAL LETTER K WITH DOT BELOW +1E33 ; valid # 1.1 LATIN SMALL LETTER K WITH DOT BELOW +1E34 ; mapped ; 1E35 # 1.1 LATIN CAPITAL LETTER K WITH LINE BELOW +1E35 ; valid # 1.1 LATIN SMALL LETTER K WITH LINE BELOW +1E36 ; mapped ; 1E37 # 1.1 LATIN CAPITAL LETTER L WITH DOT BELOW +1E37 ; valid # 1.1 LATIN SMALL LETTER L WITH DOT BELOW +1E38 ; mapped ; 1E39 # 1.1 LATIN CAPITAL LETTER L WITH DOT BELOW AND MACRON +1E39 ; valid # 1.1 LATIN SMALL LETTER L WITH DOT BELOW AND MACRON +1E3A ; mapped ; 1E3B # 1.1 LATIN CAPITAL LETTER L WITH LINE BELOW +1E3B ; valid # 1.1 LATIN SMALL LETTER L WITH LINE BELOW +1E3C ; mapped ; 1E3D # 1.1 LATIN CAPITAL LETTER L WITH CIRCUMFLEX BELOW +1E3D ; valid # 1.1 LATIN SMALL LETTER L WITH CIRCUMFLEX BELOW +1E3E ; mapped ; 1E3F # 1.1 LATIN CAPITAL LETTER M WITH ACUTE +1E3F ; valid # 1.1 LATIN SMALL LETTER M WITH ACUTE +1E40 ; mapped ; 1E41 # 1.1 LATIN CAPITAL LETTER M WITH DOT ABOVE +1E41 ; valid # 1.1 LATIN SMALL LETTER M WITH DOT ABOVE +1E42 ; mapped ; 1E43 # 1.1 LATIN CAPITAL LETTER M WITH DOT BELOW +1E43 ; valid # 1.1 LATIN SMALL LETTER M WITH DOT BELOW +1E44 ; mapped ; 1E45 # 1.1 LATIN CAPITAL LETTER N WITH DOT ABOVE +1E45 ; valid # 1.1 LATIN SMALL LETTER N WITH DOT ABOVE +1E46 ; mapped ; 1E47 # 1.1 LATIN CAPITAL LETTER N WITH DOT BELOW +1E47 ; valid # 1.1 LATIN SMALL LETTER N WITH DOT BELOW +1E48 ; mapped ; 1E49 # 1.1 LATIN CAPITAL LETTER N WITH LINE BELOW +1E49 ; valid # 1.1 LATIN SMALL LETTER N WITH LINE BELOW +1E4A ; mapped ; 1E4B # 1.1 LATIN CAPITAL LETTER N WITH CIRCUMFLEX BELOW +1E4B ; valid # 1.1 LATIN SMALL LETTER N WITH CIRCUMFLEX BELOW +1E4C ; mapped ; 1E4D # 1.1 LATIN CAPITAL LETTER O WITH TILDE AND ACUTE +1E4D ; valid # 1.1 LATIN SMALL LETTER O WITH TILDE AND ACUTE +1E4E ; mapped ; 1E4F # 1.1 LATIN CAPITAL LETTER O WITH TILDE AND DIAERESIS +1E4F ; valid # 1.1 LATIN SMALL LETTER O WITH TILDE AND DIAERESIS +1E50 ; mapped ; 1E51 # 1.1 LATIN CAPITAL LETTER O WITH MACRON AND GRAVE +1E51 ; valid # 1.1 LATIN SMALL LETTER O WITH MACRON AND GRAVE +1E52 ; mapped ; 1E53 # 1.1 LATIN CAPITAL LETTER O WITH MACRON AND ACUTE +1E53 ; valid # 1.1 LATIN SMALL LETTER O WITH MACRON AND ACUTE +1E54 ; mapped ; 1E55 # 1.1 LATIN CAPITAL LETTER P WITH ACUTE +1E55 ; valid # 1.1 LATIN SMALL LETTER P WITH ACUTE +1E56 ; mapped ; 1E57 # 1.1 LATIN CAPITAL LETTER P WITH DOT ABOVE +1E57 ; valid # 1.1 LATIN SMALL LETTER P WITH DOT ABOVE +1E58 ; mapped ; 1E59 # 1.1 LATIN CAPITAL LETTER R WITH DOT ABOVE +1E59 ; valid # 1.1 LATIN SMALL LETTER R WITH DOT ABOVE +1E5A ; mapped ; 1E5B # 1.1 LATIN CAPITAL LETTER R WITH DOT BELOW +1E5B ; valid # 1.1 LATIN SMALL LETTER R WITH DOT BELOW +1E5C ; mapped ; 1E5D # 1.1 LATIN CAPITAL LETTER R WITH DOT BELOW AND MACRON +1E5D ; valid # 1.1 LATIN SMALL LETTER R WITH DOT BELOW AND MACRON +1E5E ; mapped ; 1E5F # 1.1 LATIN CAPITAL LETTER R WITH LINE BELOW +1E5F ; valid # 1.1 LATIN SMALL LETTER R WITH LINE BELOW +1E60 ; mapped ; 1E61 # 1.1 LATIN CAPITAL LETTER S WITH DOT ABOVE +1E61 ; valid # 1.1 LATIN SMALL LETTER S WITH DOT ABOVE +1E62 ; mapped ; 1E63 # 1.1 LATIN CAPITAL LETTER S WITH DOT BELOW +1E63 ; valid # 1.1 LATIN SMALL LETTER S WITH DOT BELOW +1E64 ; mapped ; 1E65 # 1.1 LATIN CAPITAL LETTER S WITH ACUTE AND DOT ABOVE +1E65 ; valid # 1.1 LATIN SMALL LETTER S WITH ACUTE AND DOT ABOVE +1E66 ; mapped ; 1E67 # 1.1 LATIN CAPITAL LETTER S WITH CARON AND DOT ABOVE +1E67 ; valid # 1.1 LATIN SMALL LETTER S WITH CARON AND DOT ABOVE +1E68 ; mapped ; 1E69 # 1.1 LATIN CAPITAL LETTER S WITH DOT BELOW AND DOT ABOVE +1E69 ; valid # 1.1 LATIN SMALL LETTER S WITH DOT BELOW AND DOT ABOVE +1E6A ; mapped ; 1E6B # 1.1 LATIN CAPITAL LETTER T WITH DOT ABOVE +1E6B ; valid # 1.1 LATIN SMALL LETTER T WITH DOT ABOVE +1E6C ; mapped ; 1E6D # 1.1 LATIN CAPITAL LETTER T WITH DOT BELOW +1E6D ; valid # 1.1 LATIN SMALL LETTER T WITH DOT BELOW +1E6E ; mapped ; 1E6F # 1.1 LATIN CAPITAL LETTER T WITH LINE BELOW +1E6F ; valid # 1.1 LATIN SMALL LETTER T WITH LINE BELOW +1E70 ; mapped ; 1E71 # 1.1 LATIN CAPITAL LETTER T WITH CIRCUMFLEX BELOW +1E71 ; valid # 1.1 LATIN SMALL LETTER T WITH CIRCUMFLEX BELOW +1E72 ; mapped ; 1E73 # 1.1 LATIN CAPITAL LETTER U WITH DIAERESIS BELOW +1E73 ; valid # 1.1 LATIN SMALL LETTER U WITH DIAERESIS BELOW +1E74 ; mapped ; 1E75 # 1.1 LATIN CAPITAL LETTER U WITH TILDE BELOW +1E75 ; valid # 1.1 LATIN SMALL LETTER U WITH TILDE BELOW +1E76 ; mapped ; 1E77 # 1.1 LATIN CAPITAL LETTER U WITH CIRCUMFLEX BELOW +1E77 ; valid # 1.1 LATIN SMALL LETTER U WITH CIRCUMFLEX BELOW +1E78 ; mapped ; 1E79 # 1.1 LATIN CAPITAL LETTER U WITH TILDE AND ACUTE +1E79 ; valid # 1.1 LATIN SMALL LETTER U WITH TILDE AND ACUTE +1E7A ; mapped ; 1E7B # 1.1 LATIN CAPITAL LETTER U WITH MACRON AND DIAERESIS +1E7B ; valid # 1.1 LATIN SMALL LETTER U WITH MACRON AND DIAERESIS +1E7C ; mapped ; 1E7D # 1.1 LATIN CAPITAL LETTER V WITH TILDE +1E7D ; valid # 1.1 LATIN SMALL LETTER V WITH TILDE +1E7E ; mapped ; 1E7F # 1.1 LATIN CAPITAL LETTER V WITH DOT BELOW +1E7F ; valid # 1.1 LATIN SMALL LETTER V WITH DOT BELOW +1E80 ; mapped ; 1E81 # 1.1 LATIN CAPITAL LETTER W WITH GRAVE +1E81 ; valid # 1.1 LATIN SMALL LETTER W WITH GRAVE +1E82 ; mapped ; 1E83 # 1.1 LATIN CAPITAL LETTER W WITH ACUTE +1E83 ; valid # 1.1 LATIN SMALL LETTER W WITH ACUTE +1E84 ; mapped ; 1E85 # 1.1 LATIN CAPITAL LETTER W WITH DIAERESIS +1E85 ; valid # 1.1 LATIN SMALL LETTER W WITH DIAERESIS +1E86 ; mapped ; 1E87 # 1.1 LATIN CAPITAL LETTER W WITH DOT ABOVE +1E87 ; valid # 1.1 LATIN SMALL LETTER W WITH DOT ABOVE +1E88 ; mapped ; 1E89 # 1.1 LATIN CAPITAL LETTER W WITH DOT BELOW +1E89 ; valid # 1.1 LATIN SMALL LETTER W WITH DOT BELOW +1E8A ; mapped ; 1E8B # 1.1 LATIN CAPITAL LETTER X WITH DOT ABOVE +1E8B ; valid # 1.1 LATIN SMALL LETTER X WITH DOT ABOVE +1E8C ; mapped ; 1E8D # 1.1 LATIN CAPITAL LETTER X WITH DIAERESIS +1E8D ; valid # 1.1 LATIN SMALL LETTER X WITH DIAERESIS +1E8E ; mapped ; 1E8F # 1.1 LATIN CAPITAL LETTER Y WITH DOT ABOVE +1E8F ; valid # 1.1 LATIN SMALL LETTER Y WITH DOT ABOVE +1E90 ; mapped ; 1E91 # 1.1 LATIN CAPITAL LETTER Z WITH CIRCUMFLEX +1E91 ; valid # 1.1 LATIN SMALL LETTER Z WITH CIRCUMFLEX +1E92 ; mapped ; 1E93 # 1.1 LATIN CAPITAL LETTER Z WITH DOT BELOW +1E93 ; valid # 1.1 LATIN SMALL LETTER Z WITH DOT BELOW +1E94 ; mapped ; 1E95 # 1.1 LATIN CAPITAL LETTER Z WITH LINE BELOW +1E95..1E99 ; valid # 1.1 LATIN SMALL LETTER Z WITH LINE BELOW..LATIN SMALL LETTER Y WITH RING ABOVE +1E9A ; mapped ; 0061 02BE # 1.1 LATIN SMALL LETTER A WITH RIGHT HALF RING +1E9B ; mapped ; 1E61 # 2.0 LATIN SMALL LETTER LONG S WITH DOT ABOVE +1E9C..1E9D ; valid # 5.1 LATIN SMALL LETTER LONG S WITH DIAGONAL STROKE..LATIN SMALL LETTER LONG S WITH HIGH STROKE +1E9E ; mapped ; 0073 0073 # 5.1 LATIN CAPITAL LETTER SHARP S +1E9F ; valid # 5.1 LATIN SMALL LETTER DELTA +1EA0 ; mapped ; 1EA1 # 1.1 LATIN CAPITAL LETTER A WITH DOT BELOW +1EA1 ; valid # 1.1 LATIN SMALL LETTER A WITH DOT BELOW +1EA2 ; mapped ; 1EA3 # 1.1 LATIN CAPITAL LETTER A WITH HOOK ABOVE +1EA3 ; valid # 1.1 LATIN SMALL LETTER A WITH HOOK ABOVE +1EA4 ; mapped ; 1EA5 # 1.1 LATIN CAPITAL LETTER A WITH CIRCUMFLEX AND ACUTE +1EA5 ; valid # 1.1 LATIN SMALL LETTER A WITH CIRCUMFLEX AND ACUTE +1EA6 ; mapped ; 1EA7 # 1.1 LATIN CAPITAL LETTER A WITH CIRCUMFLEX AND GRAVE +1EA7 ; valid # 1.1 LATIN SMALL LETTER A WITH CIRCUMFLEX AND GRAVE +1EA8 ; mapped ; 1EA9 # 1.1 LATIN CAPITAL LETTER A WITH CIRCUMFLEX AND HOOK ABOVE +1EA9 ; valid # 1.1 LATIN SMALL LETTER A WITH CIRCUMFLEX AND HOOK ABOVE +1EAA ; mapped ; 1EAB # 1.1 LATIN CAPITAL LETTER A WITH CIRCUMFLEX AND TILDE +1EAB ; valid # 1.1 LATIN SMALL LETTER A WITH CIRCUMFLEX AND TILDE +1EAC ; mapped ; 1EAD # 1.1 LATIN CAPITAL LETTER A WITH CIRCUMFLEX AND DOT BELOW +1EAD ; valid # 1.1 LATIN SMALL LETTER A WITH CIRCUMFLEX AND DOT BELOW +1EAE ; mapped ; 1EAF # 1.1 LATIN CAPITAL LETTER A WITH BREVE AND ACUTE +1EAF ; valid # 1.1 LATIN SMALL LETTER A WITH BREVE AND ACUTE +1EB0 ; mapped ; 1EB1 # 1.1 LATIN CAPITAL LETTER A WITH BREVE AND GRAVE +1EB1 ; valid # 1.1 LATIN SMALL LETTER A WITH BREVE AND GRAVE +1EB2 ; mapped ; 1EB3 # 1.1 LATIN CAPITAL LETTER A WITH BREVE AND HOOK ABOVE +1EB3 ; valid # 1.1 LATIN SMALL LETTER A WITH BREVE AND HOOK ABOVE +1EB4 ; mapped ; 1EB5 # 1.1 LATIN CAPITAL LETTER A WITH BREVE AND TILDE +1EB5 ; valid # 1.1 LATIN SMALL LETTER A WITH BREVE AND TILDE +1EB6 ; mapped ; 1EB7 # 1.1 LATIN CAPITAL LETTER A WITH BREVE AND DOT BELOW +1EB7 ; valid # 1.1 LATIN SMALL LETTER A WITH BREVE AND DOT BELOW +1EB8 ; mapped ; 1EB9 # 1.1 LATIN CAPITAL LETTER E WITH DOT BELOW +1EB9 ; valid # 1.1 LATIN SMALL LETTER E WITH DOT BELOW +1EBA ; mapped ; 1EBB # 1.1 LATIN CAPITAL LETTER E WITH HOOK ABOVE +1EBB ; valid # 1.1 LATIN SMALL LETTER E WITH HOOK ABOVE +1EBC ; mapped ; 1EBD # 1.1 LATIN CAPITAL LETTER E WITH TILDE +1EBD ; valid # 1.1 LATIN SMALL LETTER E WITH TILDE +1EBE ; mapped ; 1EBF # 1.1 LATIN CAPITAL LETTER E WITH CIRCUMFLEX AND ACUTE +1EBF ; valid # 1.1 LATIN SMALL LETTER E WITH CIRCUMFLEX AND ACUTE +1EC0 ; mapped ; 1EC1 # 1.1 LATIN CAPITAL LETTER E WITH CIRCUMFLEX AND GRAVE +1EC1 ; valid # 1.1 LATIN SMALL LETTER E WITH CIRCUMFLEX AND GRAVE +1EC2 ; mapped ; 1EC3 # 1.1 LATIN CAPITAL LETTER E WITH CIRCUMFLEX AND HOOK ABOVE +1EC3 ; valid # 1.1 LATIN SMALL LETTER E WITH CIRCUMFLEX AND HOOK ABOVE +1EC4 ; mapped ; 1EC5 # 1.1 LATIN CAPITAL LETTER E WITH CIRCUMFLEX AND TILDE +1EC5 ; valid # 1.1 LATIN SMALL LETTER E WITH CIRCUMFLEX AND TILDE +1EC6 ; mapped ; 1EC7 # 1.1 LATIN CAPITAL LETTER E WITH CIRCUMFLEX AND DOT BELOW +1EC7 ; valid # 1.1 LATIN SMALL LETTER E WITH CIRCUMFLEX AND DOT BELOW +1EC8 ; mapped ; 1EC9 # 1.1 LATIN CAPITAL LETTER I WITH HOOK ABOVE +1EC9 ; valid # 1.1 LATIN SMALL LETTER I WITH HOOK ABOVE +1ECA ; mapped ; 1ECB # 1.1 LATIN CAPITAL LETTER I WITH DOT BELOW +1ECB ; valid # 1.1 LATIN SMALL LETTER I WITH DOT BELOW +1ECC ; mapped ; 1ECD # 1.1 LATIN CAPITAL LETTER O WITH DOT BELOW +1ECD ; valid # 1.1 LATIN SMALL LETTER O WITH DOT BELOW +1ECE ; mapped ; 1ECF # 1.1 LATIN CAPITAL LETTER O WITH HOOK ABOVE +1ECF ; valid # 1.1 LATIN SMALL LETTER O WITH HOOK ABOVE +1ED0 ; mapped ; 1ED1 # 1.1 LATIN CAPITAL LETTER O WITH CIRCUMFLEX AND ACUTE +1ED1 ; valid # 1.1 LATIN SMALL LETTER O WITH CIRCUMFLEX AND ACUTE +1ED2 ; mapped ; 1ED3 # 1.1 LATIN CAPITAL LETTER O WITH CIRCUMFLEX AND GRAVE +1ED3 ; valid # 1.1 LATIN SMALL LETTER O WITH CIRCUMFLEX AND GRAVE +1ED4 ; mapped ; 1ED5 # 1.1 LATIN CAPITAL LETTER O WITH CIRCUMFLEX AND HOOK ABOVE +1ED5 ; valid # 1.1 LATIN SMALL LETTER O WITH CIRCUMFLEX AND HOOK ABOVE +1ED6 ; mapped ; 1ED7 # 1.1 LATIN CAPITAL LETTER O WITH CIRCUMFLEX AND TILDE +1ED7 ; valid # 1.1 LATIN SMALL LETTER O WITH CIRCUMFLEX AND TILDE +1ED8 ; mapped ; 1ED9 # 1.1 LATIN CAPITAL LETTER O WITH CIRCUMFLEX AND DOT BELOW +1ED9 ; valid # 1.1 LATIN SMALL LETTER O WITH CIRCUMFLEX AND DOT BELOW +1EDA ; mapped ; 1EDB # 1.1 LATIN CAPITAL LETTER O WITH HORN AND ACUTE +1EDB ; valid # 1.1 LATIN SMALL LETTER O WITH HORN AND ACUTE +1EDC ; mapped ; 1EDD # 1.1 LATIN CAPITAL LETTER O WITH HORN AND GRAVE +1EDD ; valid # 1.1 LATIN SMALL LETTER O WITH HORN AND GRAVE +1EDE ; mapped ; 1EDF # 1.1 LATIN CAPITAL LETTER O WITH HORN AND HOOK ABOVE +1EDF ; valid # 1.1 LATIN SMALL LETTER O WITH HORN AND HOOK ABOVE +1EE0 ; mapped ; 1EE1 # 1.1 LATIN CAPITAL LETTER O WITH HORN AND TILDE +1EE1 ; valid # 1.1 LATIN SMALL LETTER O WITH HORN AND TILDE +1EE2 ; mapped ; 1EE3 # 1.1 LATIN CAPITAL LETTER O WITH HORN AND DOT BELOW +1EE3 ; valid # 1.1 LATIN SMALL LETTER O WITH HORN AND DOT BELOW +1EE4 ; mapped ; 1EE5 # 1.1 LATIN CAPITAL LETTER U WITH DOT BELOW +1EE5 ; valid # 1.1 LATIN SMALL LETTER U WITH DOT BELOW +1EE6 ; mapped ; 1EE7 # 1.1 LATIN CAPITAL LETTER U WITH HOOK ABOVE +1EE7 ; valid # 1.1 LATIN SMALL LETTER U WITH HOOK ABOVE +1EE8 ; mapped ; 1EE9 # 1.1 LATIN CAPITAL LETTER U WITH HORN AND ACUTE +1EE9 ; valid # 1.1 LATIN SMALL LETTER U WITH HORN AND ACUTE +1EEA ; mapped ; 1EEB # 1.1 LATIN CAPITAL LETTER U WITH HORN AND GRAVE +1EEB ; valid # 1.1 LATIN SMALL LETTER U WITH HORN AND GRAVE +1EEC ; mapped ; 1EED # 1.1 LATIN CAPITAL LETTER U WITH HORN AND HOOK ABOVE +1EED ; valid # 1.1 LATIN SMALL LETTER U WITH HORN AND HOOK ABOVE +1EEE ; mapped ; 1EEF # 1.1 LATIN CAPITAL LETTER U WITH HORN AND TILDE +1EEF ; valid # 1.1 LATIN SMALL LETTER U WITH HORN AND TILDE +1EF0 ; mapped ; 1EF1 # 1.1 LATIN CAPITAL LETTER U WITH HORN AND DOT BELOW +1EF1 ; valid # 1.1 LATIN SMALL LETTER U WITH HORN AND DOT BELOW +1EF2 ; mapped ; 1EF3 # 1.1 LATIN CAPITAL LETTER Y WITH GRAVE +1EF3 ; valid # 1.1 LATIN SMALL LETTER Y WITH GRAVE +1EF4 ; mapped ; 1EF5 # 1.1 LATIN CAPITAL LETTER Y WITH DOT BELOW +1EF5 ; valid # 1.1 LATIN SMALL LETTER Y WITH DOT BELOW +1EF6 ; mapped ; 1EF7 # 1.1 LATIN CAPITAL LETTER Y WITH HOOK ABOVE +1EF7 ; valid # 1.1 LATIN SMALL LETTER Y WITH HOOK ABOVE +1EF8 ; mapped ; 1EF9 # 1.1 LATIN CAPITAL LETTER Y WITH TILDE +1EF9 ; valid # 1.1 LATIN SMALL LETTER Y WITH TILDE +1EFA ; mapped ; 1EFB # 5.1 LATIN CAPITAL LETTER MIDDLE-WELSH LL +1EFB ; valid # 5.1 LATIN SMALL LETTER MIDDLE-WELSH LL +1EFC ; mapped ; 1EFD # 5.1 LATIN CAPITAL LETTER MIDDLE-WELSH V +1EFD ; valid # 5.1 LATIN SMALL LETTER MIDDLE-WELSH V +1EFE ; mapped ; 1EFF # 5.1 LATIN CAPITAL LETTER Y WITH LOOP +1EFF ; valid # 5.1 LATIN SMALL LETTER Y WITH LOOP +1F00..1F07 ; valid # 1.1 GREEK SMALL LETTER ALPHA WITH PSILI..GREEK SMALL LETTER ALPHA WITH DASIA AND PERISPOMENI +1F08 ; mapped ; 1F00 # 1.1 GREEK CAPITAL LETTER ALPHA WITH PSILI +1F09 ; mapped ; 1F01 # 1.1 GREEK CAPITAL LETTER ALPHA WITH DASIA +1F0A ; mapped ; 1F02 # 1.1 GREEK CAPITAL LETTER ALPHA WITH PSILI AND VARIA +1F0B ; mapped ; 1F03 # 1.1 GREEK CAPITAL LETTER ALPHA WITH DASIA AND VARIA +1F0C ; mapped ; 1F04 # 1.1 GREEK CAPITAL LETTER ALPHA WITH PSILI AND OXIA +1F0D ; mapped ; 1F05 # 1.1 GREEK CAPITAL LETTER ALPHA WITH DASIA AND OXIA +1F0E ; mapped ; 1F06 # 1.1 GREEK CAPITAL LETTER ALPHA WITH PSILI AND PERISPOMENI +1F0F ; mapped ; 1F07 # 1.1 GREEK CAPITAL LETTER ALPHA WITH DASIA AND PERISPOMENI +1F10..1F15 ; valid # 1.1 GREEK SMALL LETTER EPSILON WITH PSILI..GREEK SMALL LETTER EPSILON WITH DASIA AND OXIA +1F16..1F17 ; disallowed # NA .. +1F18 ; mapped ; 1F10 # 1.1 GREEK CAPITAL LETTER EPSILON WITH PSILI +1F19 ; mapped ; 1F11 # 1.1 GREEK CAPITAL LETTER EPSILON WITH DASIA +1F1A ; mapped ; 1F12 # 1.1 GREEK CAPITAL LETTER EPSILON WITH PSILI AND VARIA +1F1B ; mapped ; 1F13 # 1.1 GREEK CAPITAL LETTER EPSILON WITH DASIA AND VARIA +1F1C ; mapped ; 1F14 # 1.1 GREEK CAPITAL LETTER EPSILON WITH PSILI AND OXIA +1F1D ; mapped ; 1F15 # 1.1 GREEK CAPITAL LETTER EPSILON WITH DASIA AND OXIA +1F1E..1F1F ; disallowed # NA .. +1F20..1F27 ; valid # 1.1 GREEK SMALL LETTER ETA WITH PSILI..GREEK SMALL LETTER ETA WITH DASIA AND PERISPOMENI +1F28 ; mapped ; 1F20 # 1.1 GREEK CAPITAL LETTER ETA WITH PSILI +1F29 ; mapped ; 1F21 # 1.1 GREEK CAPITAL LETTER ETA WITH DASIA +1F2A ; mapped ; 1F22 # 1.1 GREEK CAPITAL LETTER ETA WITH PSILI AND VARIA +1F2B ; mapped ; 1F23 # 1.1 GREEK CAPITAL LETTER ETA WITH DASIA AND VARIA +1F2C ; mapped ; 1F24 # 1.1 GREEK CAPITAL LETTER ETA WITH PSILI AND OXIA +1F2D ; mapped ; 1F25 # 1.1 GREEK CAPITAL LETTER ETA WITH DASIA AND OXIA +1F2E ; mapped ; 1F26 # 1.1 GREEK CAPITAL LETTER ETA WITH PSILI AND PERISPOMENI +1F2F ; mapped ; 1F27 # 1.1 GREEK CAPITAL LETTER ETA WITH DASIA AND PERISPOMENI +1F30..1F37 ; valid # 1.1 GREEK SMALL LETTER IOTA WITH PSILI..GREEK SMALL LETTER IOTA WITH DASIA AND PERISPOMENI +1F38 ; mapped ; 1F30 # 1.1 GREEK CAPITAL LETTER IOTA WITH PSILI +1F39 ; mapped ; 1F31 # 1.1 GREEK CAPITAL LETTER IOTA WITH DASIA +1F3A ; mapped ; 1F32 # 1.1 GREEK CAPITAL LETTER IOTA WITH PSILI AND VARIA +1F3B ; mapped ; 1F33 # 1.1 GREEK CAPITAL LETTER IOTA WITH DASIA AND VARIA +1F3C ; mapped ; 1F34 # 1.1 GREEK CAPITAL LETTER IOTA WITH PSILI AND OXIA +1F3D ; mapped ; 1F35 # 1.1 GREEK CAPITAL LETTER IOTA WITH DASIA AND OXIA +1F3E ; mapped ; 1F36 # 1.1 GREEK CAPITAL LETTER IOTA WITH PSILI AND PERISPOMENI +1F3F ; mapped ; 1F37 # 1.1 GREEK CAPITAL LETTER IOTA WITH DASIA AND PERISPOMENI +1F40..1F45 ; valid # 1.1 GREEK SMALL LETTER OMICRON WITH PSILI..GREEK SMALL LETTER OMICRON WITH DASIA AND OXIA +1F46..1F47 ; disallowed # NA .. +1F48 ; mapped ; 1F40 # 1.1 GREEK CAPITAL LETTER OMICRON WITH PSILI +1F49 ; mapped ; 1F41 # 1.1 GREEK CAPITAL LETTER OMICRON WITH DASIA +1F4A ; mapped ; 1F42 # 1.1 GREEK CAPITAL LETTER OMICRON WITH PSILI AND VARIA +1F4B ; mapped ; 1F43 # 1.1 GREEK CAPITAL LETTER OMICRON WITH DASIA AND VARIA +1F4C ; mapped ; 1F44 # 1.1 GREEK CAPITAL LETTER OMICRON WITH PSILI AND OXIA +1F4D ; mapped ; 1F45 # 1.1 GREEK CAPITAL LETTER OMICRON WITH DASIA AND OXIA +1F4E..1F4F ; disallowed # NA .. +1F50..1F57 ; valid # 1.1 GREEK SMALL LETTER UPSILON WITH PSILI..GREEK SMALL LETTER UPSILON WITH DASIA AND PERISPOMENI +1F58 ; disallowed # NA +1F59 ; mapped ; 1F51 # 1.1 GREEK CAPITAL LETTER UPSILON WITH DASIA +1F5A ; disallowed # NA +1F5B ; mapped ; 1F53 # 1.1 GREEK CAPITAL LETTER UPSILON WITH DASIA AND VARIA +1F5C ; disallowed # NA +1F5D ; mapped ; 1F55 # 1.1 GREEK CAPITAL LETTER UPSILON WITH DASIA AND OXIA +1F5E ; disallowed # NA +1F5F ; mapped ; 1F57 # 1.1 GREEK CAPITAL LETTER UPSILON WITH DASIA AND PERISPOMENI +1F60..1F67 ; valid # 1.1 GREEK SMALL LETTER OMEGA WITH PSILI..GREEK SMALL LETTER OMEGA WITH DASIA AND PERISPOMENI +1F68 ; mapped ; 1F60 # 1.1 GREEK CAPITAL LETTER OMEGA WITH PSILI +1F69 ; mapped ; 1F61 # 1.1 GREEK CAPITAL LETTER OMEGA WITH DASIA +1F6A ; mapped ; 1F62 # 1.1 GREEK CAPITAL LETTER OMEGA WITH PSILI AND VARIA +1F6B ; mapped ; 1F63 # 1.1 GREEK CAPITAL LETTER OMEGA WITH DASIA AND VARIA +1F6C ; mapped ; 1F64 # 1.1 GREEK CAPITAL LETTER OMEGA WITH PSILI AND OXIA +1F6D ; mapped ; 1F65 # 1.1 GREEK CAPITAL LETTER OMEGA WITH DASIA AND OXIA +1F6E ; mapped ; 1F66 # 1.1 GREEK CAPITAL LETTER OMEGA WITH PSILI AND PERISPOMENI +1F6F ; mapped ; 1F67 # 1.1 GREEK CAPITAL LETTER OMEGA WITH DASIA AND PERISPOMENI +1F70 ; valid # 1.1 GREEK SMALL LETTER ALPHA WITH VARIA +1F71 ; mapped ; 03AC # 1.1 GREEK SMALL LETTER ALPHA WITH OXIA +1F72 ; valid # 1.1 GREEK SMALL LETTER EPSILON WITH VARIA +1F73 ; mapped ; 03AD # 1.1 GREEK SMALL LETTER EPSILON WITH OXIA +1F74 ; valid # 1.1 GREEK SMALL LETTER ETA WITH VARIA +1F75 ; mapped ; 03AE # 1.1 GREEK SMALL LETTER ETA WITH OXIA +1F76 ; valid # 1.1 GREEK SMALL LETTER IOTA WITH VARIA +1F77 ; mapped ; 03AF # 1.1 GREEK SMALL LETTER IOTA WITH OXIA +1F78 ; valid # 1.1 GREEK SMALL LETTER OMICRON WITH VARIA +1F79 ; mapped ; 03CC # 1.1 GREEK SMALL LETTER OMICRON WITH OXIA +1F7A ; valid # 1.1 GREEK SMALL LETTER UPSILON WITH VARIA +1F7B ; mapped ; 03CD # 1.1 GREEK SMALL LETTER UPSILON WITH OXIA +1F7C ; valid # 1.1 GREEK SMALL LETTER OMEGA WITH VARIA +1F7D ; mapped ; 03CE # 1.1 GREEK SMALL LETTER OMEGA WITH OXIA +1F7E..1F7F ; disallowed # NA .. +1F80 ; mapped ; 1F00 03B9 # 1.1 GREEK SMALL LETTER ALPHA WITH PSILI AND YPOGEGRAMMENI +1F81 ; mapped ; 1F01 03B9 # 1.1 GREEK SMALL LETTER ALPHA WITH DASIA AND YPOGEGRAMMENI +1F82 ; mapped ; 1F02 03B9 # 1.1 GREEK SMALL LETTER ALPHA WITH PSILI AND VARIA AND YPOGEGRAMMENI +1F83 ; mapped ; 1F03 03B9 # 1.1 GREEK SMALL LETTER ALPHA WITH DASIA AND VARIA AND YPOGEGRAMMENI +1F84 ; mapped ; 1F04 03B9 # 1.1 GREEK SMALL LETTER ALPHA WITH PSILI AND OXIA AND YPOGEGRAMMENI +1F85 ; mapped ; 1F05 03B9 # 1.1 GREEK SMALL LETTER ALPHA WITH DASIA AND OXIA AND YPOGEGRAMMENI +1F86 ; mapped ; 1F06 03B9 # 1.1 GREEK SMALL LETTER ALPHA WITH PSILI AND PERISPOMENI AND YPOGEGRAMMENI +1F87 ; mapped ; 1F07 03B9 # 1.1 GREEK SMALL LETTER ALPHA WITH DASIA AND PERISPOMENI AND YPOGEGRAMMENI +1F88 ; mapped ; 1F00 03B9 # 1.1 GREEK CAPITAL LETTER ALPHA WITH PSILI AND PROSGEGRAMMENI +1F89 ; mapped ; 1F01 03B9 # 1.1 GREEK CAPITAL LETTER ALPHA WITH DASIA AND PROSGEGRAMMENI +1F8A ; mapped ; 1F02 03B9 # 1.1 GREEK CAPITAL LETTER ALPHA WITH PSILI AND VARIA AND PROSGEGRAMMENI +1F8B ; mapped ; 1F03 03B9 # 1.1 GREEK CAPITAL LETTER ALPHA WITH DASIA AND VARIA AND PROSGEGRAMMENI +1F8C ; mapped ; 1F04 03B9 # 1.1 GREEK CAPITAL LETTER ALPHA WITH PSILI AND OXIA AND PROSGEGRAMMENI +1F8D ; mapped ; 1F05 03B9 # 1.1 GREEK CAPITAL LETTER ALPHA WITH DASIA AND OXIA AND PROSGEGRAMMENI +1F8E ; mapped ; 1F06 03B9 # 1.1 GREEK CAPITAL LETTER ALPHA WITH PSILI AND PERISPOMENI AND PROSGEGRAMMENI +1F8F ; mapped ; 1F07 03B9 # 1.1 GREEK CAPITAL LETTER ALPHA WITH DASIA AND PERISPOMENI AND PROSGEGRAMMENI +1F90 ; mapped ; 1F20 03B9 # 1.1 GREEK SMALL LETTER ETA WITH PSILI AND YPOGEGRAMMENI +1F91 ; mapped ; 1F21 03B9 # 1.1 GREEK SMALL LETTER ETA WITH DASIA AND YPOGEGRAMMENI +1F92 ; mapped ; 1F22 03B9 # 1.1 GREEK SMALL LETTER ETA WITH PSILI AND VARIA AND YPOGEGRAMMENI +1F93 ; mapped ; 1F23 03B9 # 1.1 GREEK SMALL LETTER ETA WITH DASIA AND VARIA AND YPOGEGRAMMENI +1F94 ; mapped ; 1F24 03B9 # 1.1 GREEK SMALL LETTER ETA WITH PSILI AND OXIA AND YPOGEGRAMMENI +1F95 ; mapped ; 1F25 03B9 # 1.1 GREEK SMALL LETTER ETA WITH DASIA AND OXIA AND YPOGEGRAMMENI +1F96 ; mapped ; 1F26 03B9 # 1.1 GREEK SMALL LETTER ETA WITH PSILI AND PERISPOMENI AND YPOGEGRAMMENI +1F97 ; mapped ; 1F27 03B9 # 1.1 GREEK SMALL LETTER ETA WITH DASIA AND PERISPOMENI AND YPOGEGRAMMENI +1F98 ; mapped ; 1F20 03B9 # 1.1 GREEK CAPITAL LETTER ETA WITH PSILI AND PROSGEGRAMMENI +1F99 ; mapped ; 1F21 03B9 # 1.1 GREEK CAPITAL LETTER ETA WITH DASIA AND PROSGEGRAMMENI +1F9A ; mapped ; 1F22 03B9 # 1.1 GREEK CAPITAL LETTER ETA WITH PSILI AND VARIA AND PROSGEGRAMMENI +1F9B ; mapped ; 1F23 03B9 # 1.1 GREEK CAPITAL LETTER ETA WITH DASIA AND VARIA AND PROSGEGRAMMENI +1F9C ; mapped ; 1F24 03B9 # 1.1 GREEK CAPITAL LETTER ETA WITH PSILI AND OXIA AND PROSGEGRAMMENI +1F9D ; mapped ; 1F25 03B9 # 1.1 GREEK CAPITAL LETTER ETA WITH DASIA AND OXIA AND PROSGEGRAMMENI +1F9E ; mapped ; 1F26 03B9 # 1.1 GREEK CAPITAL LETTER ETA WITH PSILI AND PERISPOMENI AND PROSGEGRAMMENI +1F9F ; mapped ; 1F27 03B9 # 1.1 GREEK CAPITAL LETTER ETA WITH DASIA AND PERISPOMENI AND PROSGEGRAMMENI +1FA0 ; mapped ; 1F60 03B9 # 1.1 GREEK SMALL LETTER OMEGA WITH PSILI AND YPOGEGRAMMENI +1FA1 ; mapped ; 1F61 03B9 # 1.1 GREEK SMALL LETTER OMEGA WITH DASIA AND YPOGEGRAMMENI +1FA2 ; mapped ; 1F62 03B9 # 1.1 GREEK SMALL LETTER OMEGA WITH PSILI AND VARIA AND YPOGEGRAMMENI +1FA3 ; mapped ; 1F63 03B9 # 1.1 GREEK SMALL LETTER OMEGA WITH DASIA AND VARIA AND YPOGEGRAMMENI +1FA4 ; mapped ; 1F64 03B9 # 1.1 GREEK SMALL LETTER OMEGA WITH PSILI AND OXIA AND YPOGEGRAMMENI +1FA5 ; mapped ; 1F65 03B9 # 1.1 GREEK SMALL LETTER OMEGA WITH DASIA AND OXIA AND YPOGEGRAMMENI +1FA6 ; mapped ; 1F66 03B9 # 1.1 GREEK SMALL LETTER OMEGA WITH PSILI AND PERISPOMENI AND YPOGEGRAMMENI +1FA7 ; mapped ; 1F67 03B9 # 1.1 GREEK SMALL LETTER OMEGA WITH DASIA AND PERISPOMENI AND YPOGEGRAMMENI +1FA8 ; mapped ; 1F60 03B9 # 1.1 GREEK CAPITAL LETTER OMEGA WITH PSILI AND PROSGEGRAMMENI +1FA9 ; mapped ; 1F61 03B9 # 1.1 GREEK CAPITAL LETTER OMEGA WITH DASIA AND PROSGEGRAMMENI +1FAA ; mapped ; 1F62 03B9 # 1.1 GREEK CAPITAL LETTER OMEGA WITH PSILI AND VARIA AND PROSGEGRAMMENI +1FAB ; mapped ; 1F63 03B9 # 1.1 GREEK CAPITAL LETTER OMEGA WITH DASIA AND VARIA AND PROSGEGRAMMENI +1FAC ; mapped ; 1F64 03B9 # 1.1 GREEK CAPITAL LETTER OMEGA WITH PSILI AND OXIA AND PROSGEGRAMMENI +1FAD ; mapped ; 1F65 03B9 # 1.1 GREEK CAPITAL LETTER OMEGA WITH DASIA AND OXIA AND PROSGEGRAMMENI +1FAE ; mapped ; 1F66 03B9 # 1.1 GREEK CAPITAL LETTER OMEGA WITH PSILI AND PERISPOMENI AND PROSGEGRAMMENI +1FAF ; mapped ; 1F67 03B9 # 1.1 GREEK CAPITAL LETTER OMEGA WITH DASIA AND PERISPOMENI AND PROSGEGRAMMENI +1FB0..1FB1 ; valid # 1.1 GREEK SMALL LETTER ALPHA WITH VRACHY..GREEK SMALL LETTER ALPHA WITH MACRON +1FB2 ; mapped ; 1F70 03B9 # 1.1 GREEK SMALL LETTER ALPHA WITH VARIA AND YPOGEGRAMMENI +1FB3 ; mapped ; 03B1 03B9 # 1.1 GREEK SMALL LETTER ALPHA WITH YPOGEGRAMMENI +1FB4 ; mapped ; 03AC 03B9 # 1.1 GREEK SMALL LETTER ALPHA WITH OXIA AND YPOGEGRAMMENI +1FB5 ; disallowed # NA +1FB6 ; valid # 1.1 GREEK SMALL LETTER ALPHA WITH PERISPOMENI +1FB7 ; mapped ; 1FB6 03B9 # 1.1 GREEK SMALL LETTER ALPHA WITH PERISPOMENI AND YPOGEGRAMMENI +1FB8 ; mapped ; 1FB0 # 1.1 GREEK CAPITAL LETTER ALPHA WITH VRACHY +1FB9 ; mapped ; 1FB1 # 1.1 GREEK CAPITAL LETTER ALPHA WITH MACRON +1FBA ; mapped ; 1F70 # 1.1 GREEK CAPITAL LETTER ALPHA WITH VARIA +1FBB ; mapped ; 03AC # 1.1 GREEK CAPITAL LETTER ALPHA WITH OXIA +1FBC ; mapped ; 03B1 03B9 # 1.1 GREEK CAPITAL LETTER ALPHA WITH PROSGEGRAMMENI +1FBD ; disallowed_STD3_mapped ; 0020 0313 # 1.1 GREEK KORONIS +1FBE ; mapped ; 03B9 # 1.1 GREEK PROSGEGRAMMENI +1FBF ; disallowed_STD3_mapped ; 0020 0313 # 1.1 GREEK PSILI +1FC0 ; disallowed_STD3_mapped ; 0020 0342 # 1.1 GREEK PERISPOMENI +1FC1 ; disallowed_STD3_mapped ; 0020 0308 0342 #1.1 GREEK DIALYTIKA AND PERISPOMENI +1FC2 ; mapped ; 1F74 03B9 # 1.1 GREEK SMALL LETTER ETA WITH VARIA AND YPOGEGRAMMENI +1FC3 ; mapped ; 03B7 03B9 # 1.1 GREEK SMALL LETTER ETA WITH YPOGEGRAMMENI +1FC4 ; mapped ; 03AE 03B9 # 1.1 GREEK SMALL LETTER ETA WITH OXIA AND YPOGEGRAMMENI +1FC5 ; disallowed # NA +1FC6 ; valid # 1.1 GREEK SMALL LETTER ETA WITH PERISPOMENI +1FC7 ; mapped ; 1FC6 03B9 # 1.1 GREEK SMALL LETTER ETA WITH PERISPOMENI AND YPOGEGRAMMENI +1FC8 ; mapped ; 1F72 # 1.1 GREEK CAPITAL LETTER EPSILON WITH VARIA +1FC9 ; mapped ; 03AD # 1.1 GREEK CAPITAL LETTER EPSILON WITH OXIA +1FCA ; mapped ; 1F74 # 1.1 GREEK CAPITAL LETTER ETA WITH VARIA +1FCB ; mapped ; 03AE # 1.1 GREEK CAPITAL LETTER ETA WITH OXIA +1FCC ; mapped ; 03B7 03B9 # 1.1 GREEK CAPITAL LETTER ETA WITH PROSGEGRAMMENI +1FCD ; disallowed_STD3_mapped ; 0020 0313 0300 #1.1 GREEK PSILI AND VARIA +1FCE ; disallowed_STD3_mapped ; 0020 0313 0301 #1.1 GREEK PSILI AND OXIA +1FCF ; disallowed_STD3_mapped ; 0020 0313 0342 #1.1 GREEK PSILI AND PERISPOMENI +1FD0..1FD2 ; valid # 1.1 GREEK SMALL LETTER IOTA WITH VRACHY..GREEK SMALL LETTER IOTA WITH DIALYTIKA AND VARIA +1FD3 ; mapped ; 0390 # 1.1 GREEK SMALL LETTER IOTA WITH DIALYTIKA AND OXIA +1FD4..1FD5 ; disallowed # NA .. +1FD6..1FD7 ; valid # 1.1 GREEK SMALL LETTER IOTA WITH PERISPOMENI..GREEK SMALL LETTER IOTA WITH DIALYTIKA AND PERISPOMENI +1FD8 ; mapped ; 1FD0 # 1.1 GREEK CAPITAL LETTER IOTA WITH VRACHY +1FD9 ; mapped ; 1FD1 # 1.1 GREEK CAPITAL LETTER IOTA WITH MACRON +1FDA ; mapped ; 1F76 # 1.1 GREEK CAPITAL LETTER IOTA WITH VARIA +1FDB ; mapped ; 03AF # 1.1 GREEK CAPITAL LETTER IOTA WITH OXIA +1FDC ; disallowed # NA +1FDD ; disallowed_STD3_mapped ; 0020 0314 0300 #1.1 GREEK DASIA AND VARIA +1FDE ; disallowed_STD3_mapped ; 0020 0314 0301 #1.1 GREEK DASIA AND OXIA +1FDF ; disallowed_STD3_mapped ; 0020 0314 0342 #1.1 GREEK DASIA AND PERISPOMENI +1FE0..1FE2 ; valid # 1.1 GREEK SMALL LETTER UPSILON WITH VRACHY..GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND VARIA +1FE3 ; mapped ; 03B0 # 1.1 GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND OXIA +1FE4..1FE7 ; valid # 1.1 GREEK SMALL LETTER RHO WITH PSILI..GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND PERISPOMENI +1FE8 ; mapped ; 1FE0 # 1.1 GREEK CAPITAL LETTER UPSILON WITH VRACHY +1FE9 ; mapped ; 1FE1 # 1.1 GREEK CAPITAL LETTER UPSILON WITH MACRON +1FEA ; mapped ; 1F7A # 1.1 GREEK CAPITAL LETTER UPSILON WITH VARIA +1FEB ; mapped ; 03CD # 1.1 GREEK CAPITAL LETTER UPSILON WITH OXIA +1FEC ; mapped ; 1FE5 # 1.1 GREEK CAPITAL LETTER RHO WITH DASIA +1FED ; disallowed_STD3_mapped ; 0020 0308 0300 #1.1 GREEK DIALYTIKA AND VARIA +1FEE ; disallowed_STD3_mapped ; 0020 0308 0301 #1.1 GREEK DIALYTIKA AND OXIA +1FEF ; disallowed_STD3_mapped ; 0060 # 1.1 GREEK VARIA +1FF0..1FF1 ; disallowed # NA .. +1FF2 ; mapped ; 1F7C 03B9 # 1.1 GREEK SMALL LETTER OMEGA WITH VARIA AND YPOGEGRAMMENI +1FF3 ; mapped ; 03C9 03B9 # 1.1 GREEK SMALL LETTER OMEGA WITH YPOGEGRAMMENI +1FF4 ; mapped ; 03CE 03B9 # 1.1 GREEK SMALL LETTER OMEGA WITH OXIA AND YPOGEGRAMMENI +1FF5 ; disallowed # NA +1FF6 ; valid # 1.1 GREEK SMALL LETTER OMEGA WITH PERISPOMENI +1FF7 ; mapped ; 1FF6 03B9 # 1.1 GREEK SMALL LETTER OMEGA WITH PERISPOMENI AND YPOGEGRAMMENI +1FF8 ; mapped ; 1F78 # 1.1 GREEK CAPITAL LETTER OMICRON WITH VARIA +1FF9 ; mapped ; 03CC # 1.1 GREEK CAPITAL LETTER OMICRON WITH OXIA +1FFA ; mapped ; 1F7C # 1.1 GREEK CAPITAL LETTER OMEGA WITH VARIA +1FFB ; mapped ; 03CE # 1.1 GREEK CAPITAL LETTER OMEGA WITH OXIA +1FFC ; mapped ; 03C9 03B9 # 1.1 GREEK CAPITAL LETTER OMEGA WITH PROSGEGRAMMENI +1FFD ; disallowed_STD3_mapped ; 0020 0301 # 1.1 GREEK OXIA +1FFE ; disallowed_STD3_mapped ; 0020 0314 # 1.1 GREEK DASIA +1FFF ; disallowed # NA +2000..200A ; disallowed_STD3_mapped ; 0020 # 1.1 EN QUAD..HAIR SPACE +200B ; ignored # 1.1 ZERO WIDTH SPACE +200C..200D ; deviation ; # 1.1 ZERO WIDTH NON-JOINER..ZERO WIDTH JOINER +200E..200F ; disallowed # 1.1 LEFT-TO-RIGHT MARK..RIGHT-TO-LEFT MARK +2010 ; valid ; ; NV8 # 1.1 HYPHEN +2011 ; mapped ; 2010 # 1.1 NON-BREAKING HYPHEN +2012..2016 ; valid ; ; NV8 # 1.1 FIGURE DASH..DOUBLE VERTICAL LINE +2017 ; disallowed_STD3_mapped ; 0020 0333 # 1.1 DOUBLE LOW LINE +2018..2023 ; valid ; ; NV8 # 1.1 LEFT SINGLE QUOTATION MARK..TRIANGULAR BULLET +2024..2026 ; disallowed # 1.1 ONE DOT LEADER..HORIZONTAL ELLIPSIS +2027 ; valid ; ; NV8 # 1.1 HYPHENATION POINT +2028..202E ; disallowed # 1.1 LINE SEPARATOR..RIGHT-TO-LEFT OVERRIDE +202F ; disallowed_STD3_mapped ; 0020 # 3.0 NARROW NO-BREAK SPACE +2030..2032 ; valid ; ; NV8 # 1.1 PER MILLE SIGN..PRIME +2033 ; mapped ; 2032 2032 # 1.1 DOUBLE PRIME +2034 ; mapped ; 2032 2032 2032 #1.1 TRIPLE PRIME +2035 ; valid ; ; NV8 # 1.1 REVERSED PRIME +2036 ; mapped ; 2035 2035 # 1.1 REVERSED DOUBLE PRIME +2037 ; mapped ; 2035 2035 2035 #1.1 REVERSED TRIPLE PRIME +2038..203B ; valid ; ; NV8 # 1.1 CARET..REFERENCE MARK +203C ; disallowed_STD3_mapped ; 0021 0021 # 1.1 DOUBLE EXCLAMATION MARK +203D ; valid ; ; NV8 # 1.1 INTERROBANG +203E ; disallowed_STD3_mapped ; 0020 0305 # 1.1 OVERLINE +203F..2046 ; valid ; ; NV8 # 1.1 UNDERTIE..RIGHT SQUARE BRACKET WITH QUILL +2047 ; disallowed_STD3_mapped ; 003F 003F # 3.2 DOUBLE QUESTION MARK +2048 ; disallowed_STD3_mapped ; 003F 0021 # 3.0 QUESTION EXCLAMATION MARK +2049 ; disallowed_STD3_mapped ; 0021 003F # 3.0 EXCLAMATION QUESTION MARK +204A..204D ; valid ; ; NV8 # 3.0 TIRONIAN SIGN ET..BLACK RIGHTWARDS BULLET +204E..2052 ; valid ; ; NV8 # 3.2 LOW ASTERISK..COMMERCIAL MINUS SIGN +2053..2054 ; valid ; ; NV8 # 4.0 SWUNG DASH..INVERTED UNDERTIE +2055..2056 ; valid ; ; NV8 # 4.1 FLOWER PUNCTUATION MARK..THREE DOT PUNCTUATION +2057 ; mapped ; 2032 2032 2032 2032 #3.2 QUADRUPLE PRIME +2058..205E ; valid ; ; NV8 # 4.1 FOUR DOT PUNCTUATION..VERTICAL FOUR DOTS +205F ; disallowed_STD3_mapped ; 0020 # 3.2 MEDIUM MATHEMATICAL SPACE +2060 ; ignored # 3.2 WORD JOINER +2061..2063 ; disallowed # 3.2 FUNCTION APPLICATION..INVISIBLE SEPARATOR +2064 ; ignored # 5.1 INVISIBLE PLUS +2065 ; disallowed # NA +2066..2069 ; disallowed # 6.3 LEFT-TO-RIGHT ISOLATE..POP DIRECTIONAL ISOLATE +206A..206F ; disallowed # 1.1 INHIBIT SYMMETRIC SWAPPING..NOMINAL DIGIT SHAPES +2070 ; mapped ; 0030 # 1.1 SUPERSCRIPT ZERO +2071 ; mapped ; 0069 # 3.2 SUPERSCRIPT LATIN SMALL LETTER I +2072..2073 ; disallowed # NA .. +2074 ; mapped ; 0034 # 1.1 SUPERSCRIPT FOUR +2075 ; mapped ; 0035 # 1.1 SUPERSCRIPT FIVE +2076 ; mapped ; 0036 # 1.1 SUPERSCRIPT SIX +2077 ; mapped ; 0037 # 1.1 SUPERSCRIPT SEVEN +2078 ; mapped ; 0038 # 1.1 SUPERSCRIPT EIGHT +2079 ; mapped ; 0039 # 1.1 SUPERSCRIPT NINE +207A ; disallowed_STD3_mapped ; 002B # 1.1 SUPERSCRIPT PLUS SIGN +207B ; mapped ; 2212 # 1.1 SUPERSCRIPT MINUS +207C ; disallowed_STD3_mapped ; 003D # 1.1 SUPERSCRIPT EQUALS SIGN +207D ; disallowed_STD3_mapped ; 0028 # 1.1 SUPERSCRIPT LEFT PARENTHESIS +207E ; disallowed_STD3_mapped ; 0029 # 1.1 SUPERSCRIPT RIGHT PARENTHESIS +207F ; mapped ; 006E # 1.1 SUPERSCRIPT LATIN SMALL LETTER N +2080 ; mapped ; 0030 # 1.1 SUBSCRIPT ZERO +2081 ; mapped ; 0031 # 1.1 SUBSCRIPT ONE +2082 ; mapped ; 0032 # 1.1 SUBSCRIPT TWO +2083 ; mapped ; 0033 # 1.1 SUBSCRIPT THREE +2084 ; mapped ; 0034 # 1.1 SUBSCRIPT FOUR +2085 ; mapped ; 0035 # 1.1 SUBSCRIPT FIVE +2086 ; mapped ; 0036 # 1.1 SUBSCRIPT SIX +2087 ; mapped ; 0037 # 1.1 SUBSCRIPT SEVEN +2088 ; mapped ; 0038 # 1.1 SUBSCRIPT EIGHT +2089 ; mapped ; 0039 # 1.1 SUBSCRIPT NINE +208A ; disallowed_STD3_mapped ; 002B # 1.1 SUBSCRIPT PLUS SIGN +208B ; mapped ; 2212 # 1.1 SUBSCRIPT MINUS +208C ; disallowed_STD3_mapped ; 003D # 1.1 SUBSCRIPT EQUALS SIGN +208D ; disallowed_STD3_mapped ; 0028 # 1.1 SUBSCRIPT LEFT PARENTHESIS +208E ; disallowed_STD3_mapped ; 0029 # 1.1 SUBSCRIPT RIGHT PARENTHESIS +208F ; disallowed # NA +2090 ; mapped ; 0061 # 4.1 LATIN SUBSCRIPT SMALL LETTER A +2091 ; mapped ; 0065 # 4.1 LATIN SUBSCRIPT SMALL LETTER E +2092 ; mapped ; 006F # 4.1 LATIN SUBSCRIPT SMALL LETTER O +2093 ; mapped ; 0078 # 4.1 LATIN SUBSCRIPT SMALL LETTER X +2094 ; mapped ; 0259 # 4.1 LATIN SUBSCRIPT SMALL LETTER SCHWA +2095 ; mapped ; 0068 # 6.0 LATIN SUBSCRIPT SMALL LETTER H +2096 ; mapped ; 006B # 6.0 LATIN SUBSCRIPT SMALL LETTER K +2097 ; mapped ; 006C # 6.0 LATIN SUBSCRIPT SMALL LETTER L +2098 ; mapped ; 006D # 6.0 LATIN SUBSCRIPT SMALL LETTER M +2099 ; mapped ; 006E # 6.0 LATIN SUBSCRIPT SMALL LETTER N +209A ; mapped ; 0070 # 6.0 LATIN SUBSCRIPT SMALL LETTER P +209B ; mapped ; 0073 # 6.0 LATIN SUBSCRIPT SMALL LETTER S +209C ; mapped ; 0074 # 6.0 LATIN SUBSCRIPT SMALL LETTER T +209D..209F ; disallowed # NA .. +20A0..20A7 ; valid ; ; NV8 # 1.1 EURO-CURRENCY SIGN..PESETA SIGN +20A8 ; mapped ; 0072 0073 # 1.1 RUPEE SIGN +20A9..20AA ; valid ; ; NV8 # 1.1 WON SIGN..NEW SHEQEL SIGN +20AB ; valid ; ; NV8 # 2.0 DONG SIGN +20AC ; valid ; ; NV8 # 2.1 EURO SIGN +20AD..20AF ; valid ; ; NV8 # 3.0 KIP SIGN..DRACHMA SIGN +20B0..20B1 ; valid ; ; NV8 # 3.2 GERMAN PENNY SIGN..PESO SIGN +20B2..20B5 ; valid ; ; NV8 # 4.1 GUARANI SIGN..CEDI SIGN +20B6..20B8 ; valid ; ; NV8 # 5.2 LIVRE TOURNOIS SIGN..TENGE SIGN +20B9 ; valid ; ; NV8 # 6.0 INDIAN RUPEE SIGN +20BA ; valid ; ; NV8 # 6.2 TURKISH LIRA SIGN +20BB..20BD ; valid ; ; NV8 # 7.0 NORDIC MARK SIGN..RUBLE SIGN +20BE ; valid ; ; NV8 # 8.0 LARI SIGN +20BF ; valid ; ; NV8 # 10.0 BITCOIN SIGN +20C0..20CF ; disallowed # NA .. +20D0..20E1 ; valid ; ; NV8 # 1.1 COMBINING LEFT HARPOON ABOVE..COMBINING LEFT RIGHT ARROW ABOVE +20E2..20E3 ; valid ; ; NV8 # 3.0 COMBINING ENCLOSING SCREEN..COMBINING ENCLOSING KEYCAP +20E4..20EA ; valid ; ; NV8 # 3.2 COMBINING ENCLOSING UPWARD POINTING TRIANGLE..COMBINING LEFTWARDS ARROW OVERLAY +20EB ; valid ; ; NV8 # 4.1 COMBINING LONG DOUBLE SOLIDUS OVERLAY +20EC..20EF ; valid ; ; NV8 # 5.0 COMBINING RIGHTWARDS HARPOON WITH BARB DOWNWARDS..COMBINING RIGHT ARROW BELOW +20F0 ; valid ; ; NV8 # 5.1 COMBINING ASTERISK ABOVE +20F1..20FF ; disallowed # NA .. +2100 ; disallowed_STD3_mapped ; 0061 002F 0063 #1.1 ACCOUNT OF +2101 ; disallowed_STD3_mapped ; 0061 002F 0073 #1.1 ADDRESSED TO THE SUBJECT +2102 ; mapped ; 0063 # 1.1 DOUBLE-STRUCK CAPITAL C +2103 ; mapped ; 00B0 0063 # 1.1 DEGREE CELSIUS +2104 ; valid ; ; NV8 # 1.1 CENTRE LINE SYMBOL +2105 ; disallowed_STD3_mapped ; 0063 002F 006F #1.1 CARE OF +2106 ; disallowed_STD3_mapped ; 0063 002F 0075 #1.1 CADA UNA +2107 ; mapped ; 025B # 1.1 EULER CONSTANT +2108 ; valid ; ; NV8 # 1.1 SCRUPLE +2109 ; mapped ; 00B0 0066 # 1.1 DEGREE FAHRENHEIT +210A ; mapped ; 0067 # 1.1 SCRIPT SMALL G +210B..210E ; mapped ; 0068 # 1.1 SCRIPT CAPITAL H..PLANCK CONSTANT +210F ; mapped ; 0127 # 1.1 PLANCK CONSTANT OVER TWO PI +2110..2111 ; mapped ; 0069 # 1.1 SCRIPT CAPITAL I..BLACK-LETTER CAPITAL I +2112..2113 ; mapped ; 006C # 1.1 SCRIPT CAPITAL L..SCRIPT SMALL L +2114 ; valid ; ; NV8 # 1.1 L B BAR SYMBOL +2115 ; mapped ; 006E # 1.1 DOUBLE-STRUCK CAPITAL N +2116 ; mapped ; 006E 006F # 1.1 NUMERO SIGN +2117..2118 ; valid ; ; NV8 # 1.1 SOUND RECORDING COPYRIGHT..SCRIPT CAPITAL P +2119 ; mapped ; 0070 # 1.1 DOUBLE-STRUCK CAPITAL P +211A ; mapped ; 0071 # 1.1 DOUBLE-STRUCK CAPITAL Q +211B..211D ; mapped ; 0072 # 1.1 SCRIPT CAPITAL R..DOUBLE-STRUCK CAPITAL R +211E..211F ; valid ; ; NV8 # 1.1 PRESCRIPTION TAKE..RESPONSE +2120 ; mapped ; 0073 006D # 1.1 SERVICE MARK +2121 ; mapped ; 0074 0065 006C #1.1 TELEPHONE SIGN +2122 ; mapped ; 0074 006D # 1.1 TRADE MARK SIGN +2123 ; valid ; ; NV8 # 1.1 VERSICLE +2124 ; mapped ; 007A # 1.1 DOUBLE-STRUCK CAPITAL Z +2125 ; valid ; ; NV8 # 1.1 OUNCE SIGN +2126 ; mapped ; 03C9 # 1.1 OHM SIGN +2127 ; valid ; ; NV8 # 1.1 INVERTED OHM SIGN +2128 ; mapped ; 007A # 1.1 BLACK-LETTER CAPITAL Z +2129 ; valid ; ; NV8 # 1.1 TURNED GREEK SMALL LETTER IOTA +212A ; mapped ; 006B # 1.1 KELVIN SIGN +212B ; mapped ; 00E5 # 1.1 ANGSTROM SIGN +212C ; mapped ; 0062 # 1.1 SCRIPT CAPITAL B +212D ; mapped ; 0063 # 1.1 BLACK-LETTER CAPITAL C +212E ; valid ; ; NV8 # 1.1 ESTIMATED SYMBOL +212F..2130 ; mapped ; 0065 # 1.1 SCRIPT SMALL E..SCRIPT CAPITAL E +2131 ; mapped ; 0066 # 1.1 SCRIPT CAPITAL F +2132 ; disallowed # 1.1 TURNED CAPITAL F +2133 ; mapped ; 006D # 1.1 SCRIPT CAPITAL M +2134 ; mapped ; 006F # 1.1 SCRIPT SMALL O +2135 ; mapped ; 05D0 # 1.1 ALEF SYMBOL +2136 ; mapped ; 05D1 # 1.1 BET SYMBOL +2137 ; mapped ; 05D2 # 1.1 GIMEL SYMBOL +2138 ; mapped ; 05D3 # 1.1 DALET SYMBOL +2139 ; mapped ; 0069 # 3.0 INFORMATION SOURCE +213A ; valid ; ; NV8 # 3.0 ROTATED CAPITAL Q +213B ; mapped ; 0066 0061 0078 #4.0 FACSIMILE SIGN +213C ; mapped ; 03C0 # 4.1 DOUBLE-STRUCK SMALL PI +213D..213E ; mapped ; 03B3 # 3.2 DOUBLE-STRUCK SMALL GAMMA..DOUBLE-STRUCK CAPITAL GAMMA +213F ; mapped ; 03C0 # 3.2 DOUBLE-STRUCK CAPITAL PI +2140 ; mapped ; 2211 # 3.2 DOUBLE-STRUCK N-ARY SUMMATION +2141..2144 ; valid ; ; NV8 # 3.2 TURNED SANS-SERIF CAPITAL G..TURNED SANS-SERIF CAPITAL Y +2145..2146 ; mapped ; 0064 # 3.2 DOUBLE-STRUCK ITALIC CAPITAL D..DOUBLE-STRUCK ITALIC SMALL D +2147 ; mapped ; 0065 # 3.2 DOUBLE-STRUCK ITALIC SMALL E +2148 ; mapped ; 0069 # 3.2 DOUBLE-STRUCK ITALIC SMALL I +2149 ; mapped ; 006A # 3.2 DOUBLE-STRUCK ITALIC SMALL J +214A..214B ; valid ; ; NV8 # 3.2 PROPERTY LINE..TURNED AMPERSAND +214C ; valid ; ; NV8 # 4.1 PER SIGN +214D ; valid ; ; NV8 # 5.0 AKTIESELSKAB +214E ; valid # 5.0 TURNED SMALL F +214F ; valid ; ; NV8 # 5.1 SYMBOL FOR SAMARITAN SOURCE +2150 ; mapped ; 0031 2044 0037 #5.2 VULGAR FRACTION ONE SEVENTH +2151 ; mapped ; 0031 2044 0039 #5.2 VULGAR FRACTION ONE NINTH +2152 ; mapped ; 0031 2044 0031 0030 #5.2 VULGAR FRACTION ONE TENTH +2153 ; mapped ; 0031 2044 0033 #1.1 VULGAR FRACTION ONE THIRD +2154 ; mapped ; 0032 2044 0033 #1.1 VULGAR FRACTION TWO THIRDS +2155 ; mapped ; 0031 2044 0035 #1.1 VULGAR FRACTION ONE FIFTH +2156 ; mapped ; 0032 2044 0035 #1.1 VULGAR FRACTION TWO FIFTHS +2157 ; mapped ; 0033 2044 0035 #1.1 VULGAR FRACTION THREE FIFTHS +2158 ; mapped ; 0034 2044 0035 #1.1 VULGAR FRACTION FOUR FIFTHS +2159 ; mapped ; 0031 2044 0036 #1.1 VULGAR FRACTION ONE SIXTH +215A ; mapped ; 0035 2044 0036 #1.1 VULGAR FRACTION FIVE SIXTHS +215B ; mapped ; 0031 2044 0038 #1.1 VULGAR FRACTION ONE EIGHTH +215C ; mapped ; 0033 2044 0038 #1.1 VULGAR FRACTION THREE EIGHTHS +215D ; mapped ; 0035 2044 0038 #1.1 VULGAR FRACTION FIVE EIGHTHS +215E ; mapped ; 0037 2044 0038 #1.1 VULGAR FRACTION SEVEN EIGHTHS +215F ; mapped ; 0031 2044 # 1.1 FRACTION NUMERATOR ONE +2160 ; mapped ; 0069 # 1.1 ROMAN NUMERAL ONE +2161 ; mapped ; 0069 0069 # 1.1 ROMAN NUMERAL TWO +2162 ; mapped ; 0069 0069 0069 #1.1 ROMAN NUMERAL THREE +2163 ; mapped ; 0069 0076 # 1.1 ROMAN NUMERAL FOUR +2164 ; mapped ; 0076 # 1.1 ROMAN NUMERAL FIVE +2165 ; mapped ; 0076 0069 # 1.1 ROMAN NUMERAL SIX +2166 ; mapped ; 0076 0069 0069 #1.1 ROMAN NUMERAL SEVEN +2167 ; mapped ; 0076 0069 0069 0069 #1.1 ROMAN NUMERAL EIGHT +2168 ; mapped ; 0069 0078 # 1.1 ROMAN NUMERAL NINE +2169 ; mapped ; 0078 # 1.1 ROMAN NUMERAL TEN +216A ; mapped ; 0078 0069 # 1.1 ROMAN NUMERAL ELEVEN +216B ; mapped ; 0078 0069 0069 #1.1 ROMAN NUMERAL TWELVE +216C ; mapped ; 006C # 1.1 ROMAN NUMERAL FIFTY +216D ; mapped ; 0063 # 1.1 ROMAN NUMERAL ONE HUNDRED +216E ; mapped ; 0064 # 1.1 ROMAN NUMERAL FIVE HUNDRED +216F ; mapped ; 006D # 1.1 ROMAN NUMERAL ONE THOUSAND +2170 ; mapped ; 0069 # 1.1 SMALL ROMAN NUMERAL ONE +2171 ; mapped ; 0069 0069 # 1.1 SMALL ROMAN NUMERAL TWO +2172 ; mapped ; 0069 0069 0069 #1.1 SMALL ROMAN NUMERAL THREE +2173 ; mapped ; 0069 0076 # 1.1 SMALL ROMAN NUMERAL FOUR +2174 ; mapped ; 0076 # 1.1 SMALL ROMAN NUMERAL FIVE +2175 ; mapped ; 0076 0069 # 1.1 SMALL ROMAN NUMERAL SIX +2176 ; mapped ; 0076 0069 0069 #1.1 SMALL ROMAN NUMERAL SEVEN +2177 ; mapped ; 0076 0069 0069 0069 #1.1 SMALL ROMAN NUMERAL EIGHT +2178 ; mapped ; 0069 0078 # 1.1 SMALL ROMAN NUMERAL NINE +2179 ; mapped ; 0078 # 1.1 SMALL ROMAN NUMERAL TEN +217A ; mapped ; 0078 0069 # 1.1 SMALL ROMAN NUMERAL ELEVEN +217B ; mapped ; 0078 0069 0069 #1.1 SMALL ROMAN NUMERAL TWELVE +217C ; mapped ; 006C # 1.1 SMALL ROMAN NUMERAL FIFTY +217D ; mapped ; 0063 # 1.1 SMALL ROMAN NUMERAL ONE HUNDRED +217E ; mapped ; 0064 # 1.1 SMALL ROMAN NUMERAL FIVE HUNDRED +217F ; mapped ; 006D # 1.1 SMALL ROMAN NUMERAL ONE THOUSAND +2180..2182 ; valid ; ; NV8 # 1.1 ROMAN NUMERAL ONE THOUSAND C D..ROMAN NUMERAL TEN THOUSAND +2183 ; disallowed # 3.0 ROMAN NUMERAL REVERSED ONE HUNDRED +2184 ; valid # 5.0 LATIN SMALL LETTER REVERSED C +2185..2188 ; valid ; ; NV8 # 5.1 ROMAN NUMERAL SIX LATE FORM..ROMAN NUMERAL ONE HUNDRED THOUSAND +2189 ; mapped ; 0030 2044 0033 #5.2 VULGAR FRACTION ZERO THIRDS +218A..218B ; valid ; ; NV8 # 8.0 TURNED DIGIT TWO..TURNED DIGIT THREE +218C..218F ; disallowed # NA .. +2190..21EA ; valid ; ; NV8 # 1.1 LEFTWARDS ARROW..UPWARDS WHITE ARROW FROM BAR +21EB..21F3 ; valid ; ; NV8 # 3.0 UPWARDS WHITE ARROW ON PEDESTAL..UP DOWN WHITE ARROW +21F4..21FF ; valid ; ; NV8 # 3.2 RIGHT ARROW WITH SMALL CIRCLE..LEFT RIGHT OPEN-HEADED ARROW +2200..222B ; valid ; ; NV8 # 1.1 FOR ALL..INTEGRAL +222C ; mapped ; 222B 222B # 1.1 DOUBLE INTEGRAL +222D ; mapped ; 222B 222B 222B #1.1 TRIPLE INTEGRAL +222E ; valid ; ; NV8 # 1.1 CONTOUR INTEGRAL +222F ; mapped ; 222E 222E # 1.1 SURFACE INTEGRAL +2230 ; mapped ; 222E 222E 222E #1.1 VOLUME INTEGRAL +2231..225F ; valid ; ; NV8 # 1.1 CLOCKWISE INTEGRAL..QUESTIONED EQUAL TO +2260 ; disallowed_STD3_valid # 1.1 NOT EQUAL TO +2261..226D ; valid ; ; NV8 # 1.1 IDENTICAL TO..NOT EQUIVALENT TO +226E..226F ; disallowed_STD3_valid # 1.1 NOT LESS-THAN..NOT GREATER-THAN +2270..22F1 ; valid ; ; NV8 # 1.1 NEITHER LESS-THAN NOR EQUAL TO..DOWN RIGHT DIAGONAL ELLIPSIS +22F2..22FF ; valid ; ; NV8 # 3.2 ELEMENT OF WITH LONG HORIZONTAL STROKE..Z NOTATION BAG MEMBERSHIP +2300 ; valid ; ; NV8 # 1.1 DIAMETER SIGN +2301 ; valid ; ; NV8 # 3.0 ELECTRIC ARROW +2302..2328 ; valid ; ; NV8 # 1.1 HOUSE..KEYBOARD +2329 ; mapped ; 3008 # 1.1 LEFT-POINTING ANGLE BRACKET +232A ; mapped ; 3009 # 1.1 RIGHT-POINTING ANGLE BRACKET +232B..237A ; valid ; ; NV8 # 1.1 ERASE TO THE LEFT..APL FUNCTIONAL SYMBOL ALPHA +237B ; valid ; ; NV8 # 3.0 NOT CHECK MARK +237C ; valid ; ; NV8 # 3.2 RIGHT ANGLE WITH DOWNWARDS ZIGZAG ARROW +237D..239A ; valid ; ; NV8 # 3.0 SHOULDERED OPEN BOX..CLEAR SCREEN SYMBOL +239B..23CE ; valid ; ; NV8 # 3.2 LEFT PARENTHESIS UPPER HOOK..RETURN SYMBOL +23CF..23D0 ; valid ; ; NV8 # 4.0 EJECT SYMBOL..VERTICAL LINE EXTENSION +23D1..23DB ; valid ; ; NV8 # 4.1 METRICAL BREVE..FUSE +23DC..23E7 ; valid ; ; NV8 # 5.0 TOP PARENTHESIS..ELECTRICAL INTERSECTION +23E8 ; valid ; ; NV8 # 5.2 DECIMAL EXPONENT SYMBOL +23E9..23F3 ; valid ; ; NV8 # 6.0 BLACK RIGHT-POINTING DOUBLE TRIANGLE..HOURGLASS WITH FLOWING SAND +23F4..23FA ; valid ; ; NV8 # 7.0 BLACK MEDIUM LEFT-POINTING TRIANGLE..BLACK CIRCLE FOR RECORD +23FB..23FE ; valid ; ; NV8 # 9.0 POWER SYMBOL..POWER SLEEP SYMBOL +23FF ; valid ; ; NV8 # 10.0 OBSERVER EYE SYMBOL +2400..2424 ; valid ; ; NV8 # 1.1 SYMBOL FOR NULL..SYMBOL FOR NEWLINE +2425..2426 ; valid ; ; NV8 # 3.0 SYMBOL FOR DELETE FORM TWO..SYMBOL FOR SUBSTITUTE FORM TWO +2427..243F ; disallowed # NA .. +2440..244A ; valid ; ; NV8 # 1.1 OCR HOOK..OCR DOUBLE BACKSLASH +244B..245F ; disallowed # NA .. +2460 ; mapped ; 0031 # 1.1 CIRCLED DIGIT ONE +2461 ; mapped ; 0032 # 1.1 CIRCLED DIGIT TWO +2462 ; mapped ; 0033 # 1.1 CIRCLED DIGIT THREE +2463 ; mapped ; 0034 # 1.1 CIRCLED DIGIT FOUR +2464 ; mapped ; 0035 # 1.1 CIRCLED DIGIT FIVE +2465 ; mapped ; 0036 # 1.1 CIRCLED DIGIT SIX +2466 ; mapped ; 0037 # 1.1 CIRCLED DIGIT SEVEN +2467 ; mapped ; 0038 # 1.1 CIRCLED DIGIT EIGHT +2468 ; mapped ; 0039 # 1.1 CIRCLED DIGIT NINE +2469 ; mapped ; 0031 0030 # 1.1 CIRCLED NUMBER TEN +246A ; mapped ; 0031 0031 # 1.1 CIRCLED NUMBER ELEVEN +246B ; mapped ; 0031 0032 # 1.1 CIRCLED NUMBER TWELVE +246C ; mapped ; 0031 0033 # 1.1 CIRCLED NUMBER THIRTEEN +246D ; mapped ; 0031 0034 # 1.1 CIRCLED NUMBER FOURTEEN +246E ; mapped ; 0031 0035 # 1.1 CIRCLED NUMBER FIFTEEN +246F ; mapped ; 0031 0036 # 1.1 CIRCLED NUMBER SIXTEEN +2470 ; mapped ; 0031 0037 # 1.1 CIRCLED NUMBER SEVENTEEN +2471 ; mapped ; 0031 0038 # 1.1 CIRCLED NUMBER EIGHTEEN +2472 ; mapped ; 0031 0039 # 1.1 CIRCLED NUMBER NINETEEN +2473 ; mapped ; 0032 0030 # 1.1 CIRCLED NUMBER TWENTY +2474 ; disallowed_STD3_mapped ; 0028 0031 0029 #1.1 PARENTHESIZED DIGIT ONE +2475 ; disallowed_STD3_mapped ; 0028 0032 0029 #1.1 PARENTHESIZED DIGIT TWO +2476 ; disallowed_STD3_mapped ; 0028 0033 0029 #1.1 PARENTHESIZED DIGIT THREE +2477 ; disallowed_STD3_mapped ; 0028 0034 0029 #1.1 PARENTHESIZED DIGIT FOUR +2478 ; disallowed_STD3_mapped ; 0028 0035 0029 #1.1 PARENTHESIZED DIGIT FIVE +2479 ; disallowed_STD3_mapped ; 0028 0036 0029 #1.1 PARENTHESIZED DIGIT SIX +247A ; disallowed_STD3_mapped ; 0028 0037 0029 #1.1 PARENTHESIZED DIGIT SEVEN +247B ; disallowed_STD3_mapped ; 0028 0038 0029 #1.1 PARENTHESIZED DIGIT EIGHT +247C ; disallowed_STD3_mapped ; 0028 0039 0029 #1.1 PARENTHESIZED DIGIT NINE +247D ; disallowed_STD3_mapped ; 0028 0031 0030 0029 #1.1 PARENTHESIZED NUMBER TEN +247E ; disallowed_STD3_mapped ; 0028 0031 0031 0029 #1.1 PARENTHESIZED NUMBER ELEVEN +247F ; disallowed_STD3_mapped ; 0028 0031 0032 0029 #1.1 PARENTHESIZED NUMBER TWELVE +2480 ; disallowed_STD3_mapped ; 0028 0031 0033 0029 #1.1 PARENTHESIZED NUMBER THIRTEEN +2481 ; disallowed_STD3_mapped ; 0028 0031 0034 0029 #1.1 PARENTHESIZED NUMBER FOURTEEN +2482 ; disallowed_STD3_mapped ; 0028 0031 0035 0029 #1.1 PARENTHESIZED NUMBER FIFTEEN +2483 ; disallowed_STD3_mapped ; 0028 0031 0036 0029 #1.1 PARENTHESIZED NUMBER SIXTEEN +2484 ; disallowed_STD3_mapped ; 0028 0031 0037 0029 #1.1 PARENTHESIZED NUMBER SEVENTEEN +2485 ; disallowed_STD3_mapped ; 0028 0031 0038 0029 #1.1 PARENTHESIZED NUMBER EIGHTEEN +2486 ; disallowed_STD3_mapped ; 0028 0031 0039 0029 #1.1 PARENTHESIZED NUMBER NINETEEN +2487 ; disallowed_STD3_mapped ; 0028 0032 0030 0029 #1.1 PARENTHESIZED NUMBER TWENTY +2488..249B ; disallowed # 1.1 DIGIT ONE FULL STOP..NUMBER TWENTY FULL STOP +249C ; disallowed_STD3_mapped ; 0028 0061 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER A +249D ; disallowed_STD3_mapped ; 0028 0062 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER B +249E ; disallowed_STD3_mapped ; 0028 0063 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER C +249F ; disallowed_STD3_mapped ; 0028 0064 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER D +24A0 ; disallowed_STD3_mapped ; 0028 0065 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER E +24A1 ; disallowed_STD3_mapped ; 0028 0066 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER F +24A2 ; disallowed_STD3_mapped ; 0028 0067 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER G +24A3 ; disallowed_STD3_mapped ; 0028 0068 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER H +24A4 ; disallowed_STD3_mapped ; 0028 0069 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER I +24A5 ; disallowed_STD3_mapped ; 0028 006A 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER J +24A6 ; disallowed_STD3_mapped ; 0028 006B 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER K +24A7 ; disallowed_STD3_mapped ; 0028 006C 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER L +24A8 ; disallowed_STD3_mapped ; 0028 006D 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER M +24A9 ; disallowed_STD3_mapped ; 0028 006E 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER N +24AA ; disallowed_STD3_mapped ; 0028 006F 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER O +24AB ; disallowed_STD3_mapped ; 0028 0070 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER P +24AC ; disallowed_STD3_mapped ; 0028 0071 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER Q +24AD ; disallowed_STD3_mapped ; 0028 0072 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER R +24AE ; disallowed_STD3_mapped ; 0028 0073 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER S +24AF ; disallowed_STD3_mapped ; 0028 0074 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER T +24B0 ; disallowed_STD3_mapped ; 0028 0075 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER U +24B1 ; disallowed_STD3_mapped ; 0028 0076 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER V +24B2 ; disallowed_STD3_mapped ; 0028 0077 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER W +24B3 ; disallowed_STD3_mapped ; 0028 0078 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER X +24B4 ; disallowed_STD3_mapped ; 0028 0079 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER Y +24B5 ; disallowed_STD3_mapped ; 0028 007A 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER Z +24B6 ; mapped ; 0061 # 1.1 CIRCLED LATIN CAPITAL LETTER A +24B7 ; mapped ; 0062 # 1.1 CIRCLED LATIN CAPITAL LETTER B +24B8 ; mapped ; 0063 # 1.1 CIRCLED LATIN CAPITAL LETTER C +24B9 ; mapped ; 0064 # 1.1 CIRCLED LATIN CAPITAL LETTER D +24BA ; mapped ; 0065 # 1.1 CIRCLED LATIN CAPITAL LETTER E +24BB ; mapped ; 0066 # 1.1 CIRCLED LATIN CAPITAL LETTER F +24BC ; mapped ; 0067 # 1.1 CIRCLED LATIN CAPITAL LETTER G +24BD ; mapped ; 0068 # 1.1 CIRCLED LATIN CAPITAL LETTER H +24BE ; mapped ; 0069 # 1.1 CIRCLED LATIN CAPITAL LETTER I +24BF ; mapped ; 006A # 1.1 CIRCLED LATIN CAPITAL LETTER J +24C0 ; mapped ; 006B # 1.1 CIRCLED LATIN CAPITAL LETTER K +24C1 ; mapped ; 006C # 1.1 CIRCLED LATIN CAPITAL LETTER L +24C2 ; mapped ; 006D # 1.1 CIRCLED LATIN CAPITAL LETTER M +24C3 ; mapped ; 006E # 1.1 CIRCLED LATIN CAPITAL LETTER N +24C4 ; mapped ; 006F # 1.1 CIRCLED LATIN CAPITAL LETTER O +24C5 ; mapped ; 0070 # 1.1 CIRCLED LATIN CAPITAL LETTER P +24C6 ; mapped ; 0071 # 1.1 CIRCLED LATIN CAPITAL LETTER Q +24C7 ; mapped ; 0072 # 1.1 CIRCLED LATIN CAPITAL LETTER R +24C8 ; mapped ; 0073 # 1.1 CIRCLED LATIN CAPITAL LETTER S +24C9 ; mapped ; 0074 # 1.1 CIRCLED LATIN CAPITAL LETTER T +24CA ; mapped ; 0075 # 1.1 CIRCLED LATIN CAPITAL LETTER U +24CB ; mapped ; 0076 # 1.1 CIRCLED LATIN CAPITAL LETTER V +24CC ; mapped ; 0077 # 1.1 CIRCLED LATIN CAPITAL LETTER W +24CD ; mapped ; 0078 # 1.1 CIRCLED LATIN CAPITAL LETTER X +24CE ; mapped ; 0079 # 1.1 CIRCLED LATIN CAPITAL LETTER Y +24CF ; mapped ; 007A # 1.1 CIRCLED LATIN CAPITAL LETTER Z +24D0 ; mapped ; 0061 # 1.1 CIRCLED LATIN SMALL LETTER A +24D1 ; mapped ; 0062 # 1.1 CIRCLED LATIN SMALL LETTER B +24D2 ; mapped ; 0063 # 1.1 CIRCLED LATIN SMALL LETTER C +24D3 ; mapped ; 0064 # 1.1 CIRCLED LATIN SMALL LETTER D +24D4 ; mapped ; 0065 # 1.1 CIRCLED LATIN SMALL LETTER E +24D5 ; mapped ; 0066 # 1.1 CIRCLED LATIN SMALL LETTER F +24D6 ; mapped ; 0067 # 1.1 CIRCLED LATIN SMALL LETTER G +24D7 ; mapped ; 0068 # 1.1 CIRCLED LATIN SMALL LETTER H +24D8 ; mapped ; 0069 # 1.1 CIRCLED LATIN SMALL LETTER I +24D9 ; mapped ; 006A # 1.1 CIRCLED LATIN SMALL LETTER J +24DA ; mapped ; 006B # 1.1 CIRCLED LATIN SMALL LETTER K +24DB ; mapped ; 006C # 1.1 CIRCLED LATIN SMALL LETTER L +24DC ; mapped ; 006D # 1.1 CIRCLED LATIN SMALL LETTER M +24DD ; mapped ; 006E # 1.1 CIRCLED LATIN SMALL LETTER N +24DE ; mapped ; 006F # 1.1 CIRCLED LATIN SMALL LETTER O +24DF ; mapped ; 0070 # 1.1 CIRCLED LATIN SMALL LETTER P +24E0 ; mapped ; 0071 # 1.1 CIRCLED LATIN SMALL LETTER Q +24E1 ; mapped ; 0072 # 1.1 CIRCLED LATIN SMALL LETTER R +24E2 ; mapped ; 0073 # 1.1 CIRCLED LATIN SMALL LETTER S +24E3 ; mapped ; 0074 # 1.1 CIRCLED LATIN SMALL LETTER T +24E4 ; mapped ; 0075 # 1.1 CIRCLED LATIN SMALL LETTER U +24E5 ; mapped ; 0076 # 1.1 CIRCLED LATIN SMALL LETTER V +24E6 ; mapped ; 0077 # 1.1 CIRCLED LATIN SMALL LETTER W +24E7 ; mapped ; 0078 # 1.1 CIRCLED LATIN SMALL LETTER X +24E8 ; mapped ; 0079 # 1.1 CIRCLED LATIN SMALL LETTER Y +24E9 ; mapped ; 007A # 1.1 CIRCLED LATIN SMALL LETTER Z +24EA ; mapped ; 0030 # 1.1 CIRCLED DIGIT ZERO +24EB..24FE ; valid ; ; NV8 # 3.2 NEGATIVE CIRCLED NUMBER ELEVEN..DOUBLE CIRCLED NUMBER TEN +24FF ; valid ; ; NV8 # 4.0 NEGATIVE CIRCLED DIGIT ZERO +2500..2595 ; valid ; ; NV8 # 1.1 BOX DRAWINGS LIGHT HORIZONTAL..RIGHT ONE EIGHTH BLOCK +2596..259F ; valid ; ; NV8 # 3.2 QUADRANT LOWER LEFT..QUADRANT UPPER RIGHT AND LOWER LEFT AND LOWER RIGHT +25A0..25EF ; valid ; ; NV8 # 1.1 BLACK SQUARE..LARGE CIRCLE +25F0..25F7 ; valid ; ; NV8 # 3.0 WHITE SQUARE WITH UPPER LEFT QUADRANT..WHITE CIRCLE WITH UPPER RIGHT QUADRANT +25F8..25FF ; valid ; ; NV8 # 3.2 UPPER LEFT TRIANGLE..LOWER RIGHT TRIANGLE +2600..2613 ; valid ; ; NV8 # 1.1 BLACK SUN WITH RAYS..SALTIRE +2614..2615 ; valid ; ; NV8 # 4.0 UMBRELLA WITH RAIN DROPS..HOT BEVERAGE +2616..2617 ; valid ; ; NV8 # 3.2 WHITE SHOGI PIECE..BLACK SHOGI PIECE +2618 ; valid ; ; NV8 # 4.1 SHAMROCK +2619 ; valid ; ; NV8 # 3.0 REVERSED ROTATED FLORAL HEART BULLET +261A..266F ; valid ; ; NV8 # 1.1 BLACK LEFT POINTING INDEX..MUSIC SHARP SIGN +2670..2671 ; valid ; ; NV8 # 3.0 WEST SYRIAC CROSS..EAST SYRIAC CROSS +2672..267D ; valid ; ; NV8 # 3.2 UNIVERSAL RECYCLING SYMBOL..PARTIALLY-RECYCLED PAPER SYMBOL +267E..267F ; valid ; ; NV8 # 4.1 PERMANENT PAPER SIGN..WHEELCHAIR SYMBOL +2680..2689 ; valid ; ; NV8 # 3.2 DIE FACE-1..BLACK CIRCLE WITH TWO WHITE DOTS +268A..2691 ; valid ; ; NV8 # 4.0 MONOGRAM FOR YANG..BLACK FLAG +2692..269C ; valid ; ; NV8 # 4.1 HAMMER AND PICK..FLEUR-DE-LIS +269D ; valid ; ; NV8 # 5.1 OUTLINED WHITE STAR +269E..269F ; valid ; ; NV8 # 5.2 THREE LINES CONVERGING RIGHT..THREE LINES CONVERGING LEFT +26A0..26A1 ; valid ; ; NV8 # 4.0 WARNING SIGN..HIGH VOLTAGE SIGN +26A2..26B1 ; valid ; ; NV8 # 4.1 DOUBLED FEMALE SIGN..FUNERAL URN +26B2 ; valid ; ; NV8 # 5.0 NEUTER +26B3..26BC ; valid ; ; NV8 # 5.1 CERES..SESQUIQUADRATE +26BD..26BF ; valid ; ; NV8 # 5.2 SOCCER BALL..SQUARED KEY +26C0..26C3 ; valid ; ; NV8 # 5.1 WHITE DRAUGHTS MAN..BLACK DRAUGHTS KING +26C4..26CD ; valid ; ; NV8 # 5.2 SNOWMAN WITHOUT SNOW..DISABLED CAR +26CE ; valid ; ; NV8 # 6.0 OPHIUCHUS +26CF..26E1 ; valid ; ; NV8 # 5.2 PICK..RESTRICTED LEFT ENTRY-2 +26E2 ; valid ; ; NV8 # 6.0 ASTRONOMICAL SYMBOL FOR URANUS +26E3 ; valid ; ; NV8 # 5.2 HEAVY CIRCLE WITH STROKE AND TWO DOTS ABOVE +26E4..26E7 ; valid ; ; NV8 # 6.0 PENTAGRAM..INVERTED PENTAGRAM +26E8..26FF ; valid ; ; NV8 # 5.2 BLACK CROSS ON SHIELD..WHITE FLAG WITH HORIZONTAL MIDDLE BLACK STRIPE +2700 ; valid ; ; NV8 # 7.0 BLACK SAFETY SCISSORS +2701..2704 ; valid ; ; NV8 # 1.1 UPPER BLADE SCISSORS..WHITE SCISSORS +2705 ; valid ; ; NV8 # 6.0 WHITE HEAVY CHECK MARK +2706..2709 ; valid ; ; NV8 # 1.1 TELEPHONE LOCATION SIGN..ENVELOPE +270A..270B ; valid ; ; NV8 # 6.0 RAISED FIST..RAISED HAND +270C..2727 ; valid ; ; NV8 # 1.1 VICTORY HAND..WHITE FOUR POINTED STAR +2728 ; valid ; ; NV8 # 6.0 SPARKLES +2729..274B ; valid ; ; NV8 # 1.1 STRESS OUTLINED WHITE STAR..HEAVY EIGHT TEARDROP-SPOKED PROPELLER ASTERISK +274C ; valid ; ; NV8 # 6.0 CROSS MARK +274D ; valid ; ; NV8 # 1.1 SHADOWED WHITE CIRCLE +274E ; valid ; ; NV8 # 6.0 NEGATIVE SQUARED CROSS MARK +274F..2752 ; valid ; ; NV8 # 1.1 LOWER RIGHT DROP-SHADOWED WHITE SQUARE..UPPER RIGHT SHADOWED WHITE SQUARE +2753..2755 ; valid ; ; NV8 # 6.0 BLACK QUESTION MARK ORNAMENT..WHITE EXCLAMATION MARK ORNAMENT +2756 ; valid ; ; NV8 # 1.1 BLACK DIAMOND MINUS WHITE X +2757 ; valid ; ; NV8 # 5.2 HEAVY EXCLAMATION MARK SYMBOL +2758..275E ; valid ; ; NV8 # 1.1 LIGHT VERTICAL BAR..HEAVY DOUBLE COMMA QUOTATION MARK ORNAMENT +275F..2760 ; valid ; ; NV8 # 6.0 HEAVY LOW SINGLE COMMA QUOTATION MARK ORNAMENT..HEAVY LOW DOUBLE COMMA QUOTATION MARK ORNAMENT +2761..2767 ; valid ; ; NV8 # 1.1 CURVED STEM PARAGRAPH SIGN ORNAMENT..ROTATED FLORAL HEART BULLET +2768..2775 ; valid ; ; NV8 # 3.2 MEDIUM LEFT PARENTHESIS ORNAMENT..MEDIUM RIGHT CURLY BRACKET ORNAMENT +2776..2794 ; valid ; ; NV8 # 1.1 DINGBAT NEGATIVE CIRCLED DIGIT ONE..HEAVY WIDE-HEADED RIGHTWARDS ARROW +2795..2797 ; valid ; ; NV8 # 6.0 HEAVY PLUS SIGN..HEAVY DIVISION SIGN +2798..27AF ; valid ; ; NV8 # 1.1 HEAVY SOUTH EAST ARROW..NOTCHED LOWER RIGHT-SHADOWED WHITE RIGHTWARDS ARROW +27B0 ; valid ; ; NV8 # 6.0 CURLY LOOP +27B1..27BE ; valid ; ; NV8 # 1.1 NOTCHED UPPER RIGHT-SHADOWED WHITE RIGHTWARDS ARROW..OPEN-OUTLINED RIGHTWARDS ARROW +27BF ; valid ; ; NV8 # 6.0 DOUBLE CURLY LOOP +27C0..27C6 ; valid ; ; NV8 # 4.1 THREE DIMENSIONAL ANGLE..RIGHT S-SHAPED BAG DELIMITER +27C7..27CA ; valid ; ; NV8 # 5.0 OR WITH DOT INSIDE..VERTICAL BAR WITH HORIZONTAL STROKE +27CB ; valid ; ; NV8 # 6.1 MATHEMATICAL RISING DIAGONAL +27CC ; valid ; ; NV8 # 5.1 LONG DIVISION +27CD ; valid ; ; NV8 # 6.1 MATHEMATICAL FALLING DIAGONAL +27CE..27CF ; valid ; ; NV8 # 6.0 SQUARED LOGICAL AND..SQUARED LOGICAL OR +27D0..27EB ; valid ; ; NV8 # 3.2 WHITE DIAMOND WITH CENTRED DOT..MATHEMATICAL RIGHT DOUBLE ANGLE BRACKET +27EC..27EF ; valid ; ; NV8 # 5.1 MATHEMATICAL LEFT WHITE TORTOISE SHELL BRACKET..MATHEMATICAL RIGHT FLATTENED PARENTHESIS +27F0..27FF ; valid ; ; NV8 # 3.2 UPWARDS QUADRUPLE ARROW..LONG RIGHTWARDS SQUIGGLE ARROW +2800..28FF ; valid ; ; NV8 # 3.0 BRAILLE PATTERN BLANK..BRAILLE PATTERN DOTS-12345678 +2900..2A0B ; valid ; ; NV8 # 3.2 RIGHTWARDS TWO-HEADED ARROW WITH VERTICAL STROKE..SUMMATION WITH INTEGRAL +2A0C ; mapped ; 222B 222B 222B 222B #3.2 QUADRUPLE INTEGRAL OPERATOR +2A0D..2A73 ; valid ; ; NV8 # 3.2 FINITE PART INTEGRAL..EQUALS SIGN ABOVE TILDE OPERATOR +2A74 ; disallowed_STD3_mapped ; 003A 003A 003D #3.2 DOUBLE COLON EQUAL +2A75 ; disallowed_STD3_mapped ; 003D 003D # 3.2 TWO CONSECUTIVE EQUALS SIGNS +2A76 ; disallowed_STD3_mapped ; 003D 003D 003D #3.2 THREE CONSECUTIVE EQUALS SIGNS +2A77..2ADB ; valid ; ; NV8 # 3.2 EQUALS SIGN WITH TWO DOTS ABOVE AND TWO DOTS BELOW..TRANSVERSAL INTERSECTION +2ADC ; mapped ; 2ADD 0338 # 3.2 FORKING +2ADD..2AFF ; valid ; ; NV8 # 3.2 NONFORKING..N-ARY WHITE VERTICAL BAR +2B00..2B0D ; valid ; ; NV8 # 4.0 NORTH EAST WHITE ARROW..UP DOWN BLACK ARROW +2B0E..2B13 ; valid ; ; NV8 # 4.1 RIGHTWARDS ARROW WITH TIP DOWNWARDS..SQUARE WITH BOTTOM HALF BLACK +2B14..2B1A ; valid ; ; NV8 # 5.0 SQUARE WITH UPPER RIGHT DIAGONAL HALF BLACK..DOTTED SQUARE +2B1B..2B1F ; valid ; ; NV8 # 5.1 BLACK LARGE SQUARE..BLACK PENTAGON +2B20..2B23 ; valid ; ; NV8 # 5.0 WHITE PENTAGON..HORIZONTAL BLACK HEXAGON +2B24..2B4C ; valid ; ; NV8 # 5.1 BLACK LARGE CIRCLE..RIGHTWARDS ARROW ABOVE REVERSE TILDE OPERATOR +2B4D..2B4F ; valid ; ; NV8 # 7.0 DOWNWARDS TRIANGLE-HEADED ZIGZAG ARROW..SHORT BACKSLANTED SOUTH ARROW +2B50..2B54 ; valid ; ; NV8 # 5.1 WHITE MEDIUM STAR..WHITE RIGHT-POINTING PENTAGON +2B55..2B59 ; valid ; ; NV8 # 5.2 HEAVY LARGE CIRCLE..HEAVY CIRCLED SALTIRE +2B5A..2B73 ; valid ; ; NV8 # 7.0 SLANTED NORTH ARROW WITH HOOKED HEAD..DOWNWARDS TRIANGLE-HEADED ARROW TO BAR +2B74..2B75 ; disallowed # NA .. +2B76..2B95 ; valid ; ; NV8 # 7.0 NORTH WEST TRIANGLE-HEADED ARROW TO BAR..RIGHTWARDS BLACK ARROW +2B96..2B97 ; disallowed # NA .. +2B98..2BB9 ; valid ; ; NV8 # 7.0 THREE-D TOP-LIGHTED LEFTWARDS EQUILATERAL ARROWHEAD..UP ARROWHEAD IN A RECTANGLE BOX +2BBA..2BBC ; disallowed # NA .. +2BBD..2BC8 ; valid ; ; NV8 # 7.0 BALLOT BOX WITH LIGHT X..BLACK MEDIUM RIGHT-POINTING TRIANGLE CENTRED +2BC9 ; disallowed # NA +2BCA..2BD1 ; valid ; ; NV8 # 7.0 TOP HALF BLACK CIRCLE..UNCERTAINTY SIGN +2BD2 ; valid ; ; NV8 # 10.0 GROUP MARK +2BD3..2BEB ; disallowed # NA .. +2BEC..2BEF ; valid ; ; NV8 # 8.0 LEFTWARDS TWO-HEADED ARROW WITH TRIANGLE ARROWHEADS..DOWNWARDS TWO-HEADED ARROW WITH TRIANGLE ARROWHEADS +2BF0..2BFF ; disallowed # NA .. +2C00 ; mapped ; 2C30 # 4.1 GLAGOLITIC CAPITAL LETTER AZU +2C01 ; mapped ; 2C31 # 4.1 GLAGOLITIC CAPITAL LETTER BUKY +2C02 ; mapped ; 2C32 # 4.1 GLAGOLITIC CAPITAL LETTER VEDE +2C03 ; mapped ; 2C33 # 4.1 GLAGOLITIC CAPITAL LETTER GLAGOLI +2C04 ; mapped ; 2C34 # 4.1 GLAGOLITIC CAPITAL LETTER DOBRO +2C05 ; mapped ; 2C35 # 4.1 GLAGOLITIC CAPITAL LETTER YESTU +2C06 ; mapped ; 2C36 # 4.1 GLAGOLITIC CAPITAL LETTER ZHIVETE +2C07 ; mapped ; 2C37 # 4.1 GLAGOLITIC CAPITAL LETTER DZELO +2C08 ; mapped ; 2C38 # 4.1 GLAGOLITIC CAPITAL LETTER ZEMLJA +2C09 ; mapped ; 2C39 # 4.1 GLAGOLITIC CAPITAL LETTER IZHE +2C0A ; mapped ; 2C3A # 4.1 GLAGOLITIC CAPITAL LETTER INITIAL IZHE +2C0B ; mapped ; 2C3B # 4.1 GLAGOLITIC CAPITAL LETTER I +2C0C ; mapped ; 2C3C # 4.1 GLAGOLITIC CAPITAL LETTER DJERVI +2C0D ; mapped ; 2C3D # 4.1 GLAGOLITIC CAPITAL LETTER KAKO +2C0E ; mapped ; 2C3E # 4.1 GLAGOLITIC CAPITAL LETTER LJUDIJE +2C0F ; mapped ; 2C3F # 4.1 GLAGOLITIC CAPITAL LETTER MYSLITE +2C10 ; mapped ; 2C40 # 4.1 GLAGOLITIC CAPITAL LETTER NASHI +2C11 ; mapped ; 2C41 # 4.1 GLAGOLITIC CAPITAL LETTER ONU +2C12 ; mapped ; 2C42 # 4.1 GLAGOLITIC CAPITAL LETTER POKOJI +2C13 ; mapped ; 2C43 # 4.1 GLAGOLITIC CAPITAL LETTER RITSI +2C14 ; mapped ; 2C44 # 4.1 GLAGOLITIC CAPITAL LETTER SLOVO +2C15 ; mapped ; 2C45 # 4.1 GLAGOLITIC CAPITAL LETTER TVRIDO +2C16 ; mapped ; 2C46 # 4.1 GLAGOLITIC CAPITAL LETTER UKU +2C17 ; mapped ; 2C47 # 4.1 GLAGOLITIC CAPITAL LETTER FRITU +2C18 ; mapped ; 2C48 # 4.1 GLAGOLITIC CAPITAL LETTER HERU +2C19 ; mapped ; 2C49 # 4.1 GLAGOLITIC CAPITAL LETTER OTU +2C1A ; mapped ; 2C4A # 4.1 GLAGOLITIC CAPITAL LETTER PE +2C1B ; mapped ; 2C4B # 4.1 GLAGOLITIC CAPITAL LETTER SHTA +2C1C ; mapped ; 2C4C # 4.1 GLAGOLITIC CAPITAL LETTER TSI +2C1D ; mapped ; 2C4D # 4.1 GLAGOLITIC CAPITAL LETTER CHRIVI +2C1E ; mapped ; 2C4E # 4.1 GLAGOLITIC CAPITAL LETTER SHA +2C1F ; mapped ; 2C4F # 4.1 GLAGOLITIC CAPITAL LETTER YERU +2C20 ; mapped ; 2C50 # 4.1 GLAGOLITIC CAPITAL LETTER YERI +2C21 ; mapped ; 2C51 # 4.1 GLAGOLITIC CAPITAL LETTER YATI +2C22 ; mapped ; 2C52 # 4.1 GLAGOLITIC CAPITAL LETTER SPIDERY HA +2C23 ; mapped ; 2C53 # 4.1 GLAGOLITIC CAPITAL LETTER YU +2C24 ; mapped ; 2C54 # 4.1 GLAGOLITIC CAPITAL LETTER SMALL YUS +2C25 ; mapped ; 2C55 # 4.1 GLAGOLITIC CAPITAL LETTER SMALL YUS WITH TAIL +2C26 ; mapped ; 2C56 # 4.1 GLAGOLITIC CAPITAL LETTER YO +2C27 ; mapped ; 2C57 # 4.1 GLAGOLITIC CAPITAL LETTER IOTATED SMALL YUS +2C28 ; mapped ; 2C58 # 4.1 GLAGOLITIC CAPITAL LETTER BIG YUS +2C29 ; mapped ; 2C59 # 4.1 GLAGOLITIC CAPITAL LETTER IOTATED BIG YUS +2C2A ; mapped ; 2C5A # 4.1 GLAGOLITIC CAPITAL LETTER FITA +2C2B ; mapped ; 2C5B # 4.1 GLAGOLITIC CAPITAL LETTER IZHITSA +2C2C ; mapped ; 2C5C # 4.1 GLAGOLITIC CAPITAL LETTER SHTAPIC +2C2D ; mapped ; 2C5D # 4.1 GLAGOLITIC CAPITAL LETTER TROKUTASTI A +2C2E ; mapped ; 2C5E # 4.1 GLAGOLITIC CAPITAL LETTER LATINATE MYSLITE +2C2F ; disallowed # NA +2C30..2C5E ; valid # 4.1 GLAGOLITIC SMALL LETTER AZU..GLAGOLITIC SMALL LETTER LATINATE MYSLITE +2C5F ; disallowed # NA +2C60 ; mapped ; 2C61 # 5.0 LATIN CAPITAL LETTER L WITH DOUBLE BAR +2C61 ; valid # 5.0 LATIN SMALL LETTER L WITH DOUBLE BAR +2C62 ; mapped ; 026B # 5.0 LATIN CAPITAL LETTER L WITH MIDDLE TILDE +2C63 ; mapped ; 1D7D # 5.0 LATIN CAPITAL LETTER P WITH STROKE +2C64 ; mapped ; 027D # 5.0 LATIN CAPITAL LETTER R WITH TAIL +2C65..2C66 ; valid # 5.0 LATIN SMALL LETTER A WITH STROKE..LATIN SMALL LETTER T WITH DIAGONAL STROKE +2C67 ; mapped ; 2C68 # 5.0 LATIN CAPITAL LETTER H WITH DESCENDER +2C68 ; valid # 5.0 LATIN SMALL LETTER H WITH DESCENDER +2C69 ; mapped ; 2C6A # 5.0 LATIN CAPITAL LETTER K WITH DESCENDER +2C6A ; valid # 5.0 LATIN SMALL LETTER K WITH DESCENDER +2C6B ; mapped ; 2C6C # 5.0 LATIN CAPITAL LETTER Z WITH DESCENDER +2C6C ; valid # 5.0 LATIN SMALL LETTER Z WITH DESCENDER +2C6D ; mapped ; 0251 # 5.1 LATIN CAPITAL LETTER ALPHA +2C6E ; mapped ; 0271 # 5.1 LATIN CAPITAL LETTER M WITH HOOK +2C6F ; mapped ; 0250 # 5.1 LATIN CAPITAL LETTER TURNED A +2C70 ; mapped ; 0252 # 5.2 LATIN CAPITAL LETTER TURNED ALPHA +2C71 ; valid # 5.1 LATIN SMALL LETTER V WITH RIGHT HOOK +2C72 ; mapped ; 2C73 # 5.1 LATIN CAPITAL LETTER W WITH HOOK +2C73 ; valid # 5.1 LATIN SMALL LETTER W WITH HOOK +2C74 ; valid # 5.0 LATIN SMALL LETTER V WITH CURL +2C75 ; mapped ; 2C76 # 5.0 LATIN CAPITAL LETTER HALF H +2C76..2C77 ; valid # 5.0 LATIN SMALL LETTER HALF H..LATIN SMALL LETTER TAILLESS PHI +2C78..2C7B ; valid # 5.1 LATIN SMALL LETTER E WITH NOTCH..LATIN LETTER SMALL CAPITAL TURNED E +2C7C ; mapped ; 006A # 5.1 LATIN SUBSCRIPT SMALL LETTER J +2C7D ; mapped ; 0076 # 5.1 MODIFIER LETTER CAPITAL V +2C7E ; mapped ; 023F # 5.2 LATIN CAPITAL LETTER S WITH SWASH TAIL +2C7F ; mapped ; 0240 # 5.2 LATIN CAPITAL LETTER Z WITH SWASH TAIL +2C80 ; mapped ; 2C81 # 4.1 COPTIC CAPITAL LETTER ALFA +2C81 ; valid # 4.1 COPTIC SMALL LETTER ALFA +2C82 ; mapped ; 2C83 # 4.1 COPTIC CAPITAL LETTER VIDA +2C83 ; valid # 4.1 COPTIC SMALL LETTER VIDA +2C84 ; mapped ; 2C85 # 4.1 COPTIC CAPITAL LETTER GAMMA +2C85 ; valid # 4.1 COPTIC SMALL LETTER GAMMA +2C86 ; mapped ; 2C87 # 4.1 COPTIC CAPITAL LETTER DALDA +2C87 ; valid # 4.1 COPTIC SMALL LETTER DALDA +2C88 ; mapped ; 2C89 # 4.1 COPTIC CAPITAL LETTER EIE +2C89 ; valid # 4.1 COPTIC SMALL LETTER EIE +2C8A ; mapped ; 2C8B # 4.1 COPTIC CAPITAL LETTER SOU +2C8B ; valid # 4.1 COPTIC SMALL LETTER SOU +2C8C ; mapped ; 2C8D # 4.1 COPTIC CAPITAL LETTER ZATA +2C8D ; valid # 4.1 COPTIC SMALL LETTER ZATA +2C8E ; mapped ; 2C8F # 4.1 COPTIC CAPITAL LETTER HATE +2C8F ; valid # 4.1 COPTIC SMALL LETTER HATE +2C90 ; mapped ; 2C91 # 4.1 COPTIC CAPITAL LETTER THETHE +2C91 ; valid # 4.1 COPTIC SMALL LETTER THETHE +2C92 ; mapped ; 2C93 # 4.1 COPTIC CAPITAL LETTER IAUDA +2C93 ; valid # 4.1 COPTIC SMALL LETTER IAUDA +2C94 ; mapped ; 2C95 # 4.1 COPTIC CAPITAL LETTER KAPA +2C95 ; valid # 4.1 COPTIC SMALL LETTER KAPA +2C96 ; mapped ; 2C97 # 4.1 COPTIC CAPITAL LETTER LAULA +2C97 ; valid # 4.1 COPTIC SMALL LETTER LAULA +2C98 ; mapped ; 2C99 # 4.1 COPTIC CAPITAL LETTER MI +2C99 ; valid # 4.1 COPTIC SMALL LETTER MI +2C9A ; mapped ; 2C9B # 4.1 COPTIC CAPITAL LETTER NI +2C9B ; valid # 4.1 COPTIC SMALL LETTER NI +2C9C ; mapped ; 2C9D # 4.1 COPTIC CAPITAL LETTER KSI +2C9D ; valid # 4.1 COPTIC SMALL LETTER KSI +2C9E ; mapped ; 2C9F # 4.1 COPTIC CAPITAL LETTER O +2C9F ; valid # 4.1 COPTIC SMALL LETTER O +2CA0 ; mapped ; 2CA1 # 4.1 COPTIC CAPITAL LETTER PI +2CA1 ; valid # 4.1 COPTIC SMALL LETTER PI +2CA2 ; mapped ; 2CA3 # 4.1 COPTIC CAPITAL LETTER RO +2CA3 ; valid # 4.1 COPTIC SMALL LETTER RO +2CA4 ; mapped ; 2CA5 # 4.1 COPTIC CAPITAL LETTER SIMA +2CA5 ; valid # 4.1 COPTIC SMALL LETTER SIMA +2CA6 ; mapped ; 2CA7 # 4.1 COPTIC CAPITAL LETTER TAU +2CA7 ; valid # 4.1 COPTIC SMALL LETTER TAU +2CA8 ; mapped ; 2CA9 # 4.1 COPTIC CAPITAL LETTER UA +2CA9 ; valid # 4.1 COPTIC SMALL LETTER UA +2CAA ; mapped ; 2CAB # 4.1 COPTIC CAPITAL LETTER FI +2CAB ; valid # 4.1 COPTIC SMALL LETTER FI +2CAC ; mapped ; 2CAD # 4.1 COPTIC CAPITAL LETTER KHI +2CAD ; valid # 4.1 COPTIC SMALL LETTER KHI +2CAE ; mapped ; 2CAF # 4.1 COPTIC CAPITAL LETTER PSI +2CAF ; valid # 4.1 COPTIC SMALL LETTER PSI +2CB0 ; mapped ; 2CB1 # 4.1 COPTIC CAPITAL LETTER OOU +2CB1 ; valid # 4.1 COPTIC SMALL LETTER OOU +2CB2 ; mapped ; 2CB3 # 4.1 COPTIC CAPITAL LETTER DIALECT-P ALEF +2CB3 ; valid # 4.1 COPTIC SMALL LETTER DIALECT-P ALEF +2CB4 ; mapped ; 2CB5 # 4.1 COPTIC CAPITAL LETTER OLD COPTIC AIN +2CB5 ; valid # 4.1 COPTIC SMALL LETTER OLD COPTIC AIN +2CB6 ; mapped ; 2CB7 # 4.1 COPTIC CAPITAL LETTER CRYPTOGRAMMIC EIE +2CB7 ; valid # 4.1 COPTIC SMALL LETTER CRYPTOGRAMMIC EIE +2CB8 ; mapped ; 2CB9 # 4.1 COPTIC CAPITAL LETTER DIALECT-P KAPA +2CB9 ; valid # 4.1 COPTIC SMALL LETTER DIALECT-P KAPA +2CBA ; mapped ; 2CBB # 4.1 COPTIC CAPITAL LETTER DIALECT-P NI +2CBB ; valid # 4.1 COPTIC SMALL LETTER DIALECT-P NI +2CBC ; mapped ; 2CBD # 4.1 COPTIC CAPITAL LETTER CRYPTOGRAMMIC NI +2CBD ; valid # 4.1 COPTIC SMALL LETTER CRYPTOGRAMMIC NI +2CBE ; mapped ; 2CBF # 4.1 COPTIC CAPITAL LETTER OLD COPTIC OOU +2CBF ; valid # 4.1 COPTIC SMALL LETTER OLD COPTIC OOU +2CC0 ; mapped ; 2CC1 # 4.1 COPTIC CAPITAL LETTER SAMPI +2CC1 ; valid # 4.1 COPTIC SMALL LETTER SAMPI +2CC2 ; mapped ; 2CC3 # 4.1 COPTIC CAPITAL LETTER CROSSED SHEI +2CC3 ; valid # 4.1 COPTIC SMALL LETTER CROSSED SHEI +2CC4 ; mapped ; 2CC5 # 4.1 COPTIC CAPITAL LETTER OLD COPTIC SHEI +2CC5 ; valid # 4.1 COPTIC SMALL LETTER OLD COPTIC SHEI +2CC6 ; mapped ; 2CC7 # 4.1 COPTIC CAPITAL LETTER OLD COPTIC ESH +2CC7 ; valid # 4.1 COPTIC SMALL LETTER OLD COPTIC ESH +2CC8 ; mapped ; 2CC9 # 4.1 COPTIC CAPITAL LETTER AKHMIMIC KHEI +2CC9 ; valid # 4.1 COPTIC SMALL LETTER AKHMIMIC KHEI +2CCA ; mapped ; 2CCB # 4.1 COPTIC CAPITAL LETTER DIALECT-P HORI +2CCB ; valid # 4.1 COPTIC SMALL LETTER DIALECT-P HORI +2CCC ; mapped ; 2CCD # 4.1 COPTIC CAPITAL LETTER OLD COPTIC HORI +2CCD ; valid # 4.1 COPTIC SMALL LETTER OLD COPTIC HORI +2CCE ; mapped ; 2CCF # 4.1 COPTIC CAPITAL LETTER OLD COPTIC HA +2CCF ; valid # 4.1 COPTIC SMALL LETTER OLD COPTIC HA +2CD0 ; mapped ; 2CD1 # 4.1 COPTIC CAPITAL LETTER L-SHAPED HA +2CD1 ; valid # 4.1 COPTIC SMALL LETTER L-SHAPED HA +2CD2 ; mapped ; 2CD3 # 4.1 COPTIC CAPITAL LETTER OLD COPTIC HEI +2CD3 ; valid # 4.1 COPTIC SMALL LETTER OLD COPTIC HEI +2CD4 ; mapped ; 2CD5 # 4.1 COPTIC CAPITAL LETTER OLD COPTIC HAT +2CD5 ; valid # 4.1 COPTIC SMALL LETTER OLD COPTIC HAT +2CD6 ; mapped ; 2CD7 # 4.1 COPTIC CAPITAL LETTER OLD COPTIC GANGIA +2CD7 ; valid # 4.1 COPTIC SMALL LETTER OLD COPTIC GANGIA +2CD8 ; mapped ; 2CD9 # 4.1 COPTIC CAPITAL LETTER OLD COPTIC DJA +2CD9 ; valid # 4.1 COPTIC SMALL LETTER OLD COPTIC DJA +2CDA ; mapped ; 2CDB # 4.1 COPTIC CAPITAL LETTER OLD COPTIC SHIMA +2CDB ; valid # 4.1 COPTIC SMALL LETTER OLD COPTIC SHIMA +2CDC ; mapped ; 2CDD # 4.1 COPTIC CAPITAL LETTER OLD NUBIAN SHIMA +2CDD ; valid # 4.1 COPTIC SMALL LETTER OLD NUBIAN SHIMA +2CDE ; mapped ; 2CDF # 4.1 COPTIC CAPITAL LETTER OLD NUBIAN NGI +2CDF ; valid # 4.1 COPTIC SMALL LETTER OLD NUBIAN NGI +2CE0 ; mapped ; 2CE1 # 4.1 COPTIC CAPITAL LETTER OLD NUBIAN NYI +2CE1 ; valid # 4.1 COPTIC SMALL LETTER OLD NUBIAN NYI +2CE2 ; mapped ; 2CE3 # 4.1 COPTIC CAPITAL LETTER OLD NUBIAN WAU +2CE3..2CE4 ; valid # 4.1 COPTIC SMALL LETTER OLD NUBIAN WAU..COPTIC SYMBOL KAI +2CE5..2CEA ; valid ; ; NV8 # 4.1 COPTIC SYMBOL MI RO..COPTIC SYMBOL SHIMA SIMA +2CEB ; mapped ; 2CEC # 5.2 COPTIC CAPITAL LETTER CRYPTOGRAMMIC SHEI +2CEC ; valid # 5.2 COPTIC SMALL LETTER CRYPTOGRAMMIC SHEI +2CED ; mapped ; 2CEE # 5.2 COPTIC CAPITAL LETTER CRYPTOGRAMMIC GANGIA +2CEE..2CF1 ; valid # 5.2 COPTIC SMALL LETTER CRYPTOGRAMMIC GANGIA..COPTIC COMBINING SPIRITUS LENIS +2CF2 ; mapped ; 2CF3 # 6.1 COPTIC CAPITAL LETTER BOHAIRIC KHEI +2CF3 ; valid # 6.1 COPTIC SMALL LETTER BOHAIRIC KHEI +2CF4..2CF8 ; disallowed # NA .. +2CF9..2CFF ; valid ; ; NV8 # 4.1 COPTIC OLD NUBIAN FULL STOP..COPTIC MORPHOLOGICAL DIVIDER +2D00..2D25 ; valid # 4.1 GEORGIAN SMALL LETTER AN..GEORGIAN SMALL LETTER HOE +2D26 ; disallowed # NA +2D27 ; valid # 6.1 GEORGIAN SMALL LETTER YN +2D28..2D2C ; disallowed # NA .. +2D2D ; valid # 6.1 GEORGIAN SMALL LETTER AEN +2D2E..2D2F ; disallowed # NA .. +2D30..2D65 ; valid # 4.1 TIFINAGH LETTER YA..TIFINAGH LETTER YAZZ +2D66..2D67 ; valid # 6.1 TIFINAGH LETTER YE..TIFINAGH LETTER YO +2D68..2D6E ; disallowed # NA .. +2D6F ; mapped ; 2D61 # 4.1 TIFINAGH MODIFIER LETTER LABIALIZATION MARK +2D70 ; valid ; ; NV8 # 6.0 TIFINAGH SEPARATOR MARK +2D71..2D7E ; disallowed # NA .. +2D7F ; valid # 6.0 TIFINAGH CONSONANT JOINER +2D80..2D96 ; valid # 4.1 ETHIOPIC SYLLABLE LOA..ETHIOPIC SYLLABLE GGWE +2D97..2D9F ; disallowed # NA .. +2DA0..2DA6 ; valid # 4.1 ETHIOPIC SYLLABLE SSA..ETHIOPIC SYLLABLE SSO +2DA7 ; disallowed # NA +2DA8..2DAE ; valid # 4.1 ETHIOPIC SYLLABLE CCA..ETHIOPIC SYLLABLE CCO +2DAF ; disallowed # NA +2DB0..2DB6 ; valid # 4.1 ETHIOPIC SYLLABLE ZZA..ETHIOPIC SYLLABLE ZZO +2DB7 ; disallowed # NA +2DB8..2DBE ; valid # 4.1 ETHIOPIC SYLLABLE CCHA..ETHIOPIC SYLLABLE CCHO +2DBF ; disallowed # NA +2DC0..2DC6 ; valid # 4.1 ETHIOPIC SYLLABLE QYA..ETHIOPIC SYLLABLE QYO +2DC7 ; disallowed # NA +2DC8..2DCE ; valid # 4.1 ETHIOPIC SYLLABLE KYA..ETHIOPIC SYLLABLE KYO +2DCF ; disallowed # NA +2DD0..2DD6 ; valid # 4.1 ETHIOPIC SYLLABLE XYA..ETHIOPIC SYLLABLE XYO +2DD7 ; disallowed # NA +2DD8..2DDE ; valid # 4.1 ETHIOPIC SYLLABLE GYA..ETHIOPIC SYLLABLE GYO +2DDF ; disallowed # NA +2DE0..2DFF ; valid # 5.1 COMBINING CYRILLIC LETTER BE..COMBINING CYRILLIC LETTER IOTIFIED BIG YUS +2E00..2E17 ; valid ; ; NV8 # 4.1 RIGHT ANGLE SUBSTITUTION MARKER..DOUBLE OBLIQUE HYPHEN +2E18..2E1B ; valid ; ; NV8 # 5.1 INVERTED INTERROBANG..TILDE WITH RING ABOVE +2E1C..2E1D ; valid ; ; NV8 # 4.1 LEFT LOW PARAPHRASE BRACKET..RIGHT LOW PARAPHRASE BRACKET +2E1E..2E2E ; valid ; ; NV8 # 5.1 TILDE WITH DOT ABOVE..REVERSED QUESTION MARK +2E2F ; valid # 5.1 VERTICAL TILDE +2E30 ; valid ; ; NV8 # 5.1 RING POINT +2E31 ; valid ; ; NV8 # 5.2 WORD SEPARATOR MIDDLE DOT +2E32..2E3B ; valid ; ; NV8 # 6.1 TURNED COMMA..THREE-EM DASH +2E3C..2E42 ; valid ; ; NV8 # 7.0 STENOGRAPHIC FULL STOP..DOUBLE LOW-REVERSED-9 QUOTATION MARK +2E43..2E44 ; valid ; ; NV8 # 9.0 DASH WITH LEFT UPTURN..DOUBLE SUSPENSION MARK +2E45..2E49 ; valid ; ; NV8 # 10.0 INVERTED LOW KAVYKA..DOUBLE STACKED COMMA +2E4A..2E7F ; disallowed # NA .. +2E80..2E99 ; valid ; ; NV8 # 3.0 CJK RADICAL REPEAT..CJK RADICAL RAP +2E9A ; disallowed # NA +2E9B..2E9E ; valid ; ; NV8 # 3.0 CJK RADICAL CHOKE..CJK RADICAL DEATH +2E9F ; mapped ; 6BCD # 3.0 CJK RADICAL MOTHER +2EA0..2EF2 ; valid ; ; NV8 # 3.0 CJK RADICAL CIVILIAN..CJK RADICAL J-SIMPLIFIED TURTLE +2EF3 ; mapped ; 9F9F # 3.0 CJK RADICAL C-SIMPLIFIED TURTLE +2EF4..2EFF ; disallowed # NA .. +2F00 ; mapped ; 4E00 # 3.0 KANGXI RADICAL ONE +2F01 ; mapped ; 4E28 # 3.0 KANGXI RADICAL LINE +2F02 ; mapped ; 4E36 # 3.0 KANGXI RADICAL DOT +2F03 ; mapped ; 4E3F # 3.0 KANGXI RADICAL SLASH +2F04 ; mapped ; 4E59 # 3.0 KANGXI RADICAL SECOND +2F05 ; mapped ; 4E85 # 3.0 KANGXI RADICAL HOOK +2F06 ; mapped ; 4E8C # 3.0 KANGXI RADICAL TWO +2F07 ; mapped ; 4EA0 # 3.0 KANGXI RADICAL LID +2F08 ; mapped ; 4EBA # 3.0 KANGXI RADICAL MAN +2F09 ; mapped ; 513F # 3.0 KANGXI RADICAL LEGS +2F0A ; mapped ; 5165 # 3.0 KANGXI RADICAL ENTER +2F0B ; mapped ; 516B # 3.0 KANGXI RADICAL EIGHT +2F0C ; mapped ; 5182 # 3.0 KANGXI RADICAL DOWN BOX +2F0D ; mapped ; 5196 # 3.0 KANGXI RADICAL COVER +2F0E ; mapped ; 51AB # 3.0 KANGXI RADICAL ICE +2F0F ; mapped ; 51E0 # 3.0 KANGXI RADICAL TABLE +2F10 ; mapped ; 51F5 # 3.0 KANGXI RADICAL OPEN BOX +2F11 ; mapped ; 5200 # 3.0 KANGXI RADICAL KNIFE +2F12 ; mapped ; 529B # 3.0 KANGXI RADICAL POWER +2F13 ; mapped ; 52F9 # 3.0 KANGXI RADICAL WRAP +2F14 ; mapped ; 5315 # 3.0 KANGXI RADICAL SPOON +2F15 ; mapped ; 531A # 3.0 KANGXI RADICAL RIGHT OPEN BOX +2F16 ; mapped ; 5338 # 3.0 KANGXI RADICAL HIDING ENCLOSURE +2F17 ; mapped ; 5341 # 3.0 KANGXI RADICAL TEN +2F18 ; mapped ; 535C # 3.0 KANGXI RADICAL DIVINATION +2F19 ; mapped ; 5369 # 3.0 KANGXI RADICAL SEAL +2F1A ; mapped ; 5382 # 3.0 KANGXI RADICAL CLIFF +2F1B ; mapped ; 53B6 # 3.0 KANGXI RADICAL PRIVATE +2F1C ; mapped ; 53C8 # 3.0 KANGXI RADICAL AGAIN +2F1D ; mapped ; 53E3 # 3.0 KANGXI RADICAL MOUTH +2F1E ; mapped ; 56D7 # 3.0 KANGXI RADICAL ENCLOSURE +2F1F ; mapped ; 571F # 3.0 KANGXI RADICAL EARTH +2F20 ; mapped ; 58EB # 3.0 KANGXI RADICAL SCHOLAR +2F21 ; mapped ; 5902 # 3.0 KANGXI RADICAL GO +2F22 ; mapped ; 590A # 3.0 KANGXI RADICAL GO SLOWLY +2F23 ; mapped ; 5915 # 3.0 KANGXI RADICAL EVENING +2F24 ; mapped ; 5927 # 3.0 KANGXI RADICAL BIG +2F25 ; mapped ; 5973 # 3.0 KANGXI RADICAL WOMAN +2F26 ; mapped ; 5B50 # 3.0 KANGXI RADICAL CHILD +2F27 ; mapped ; 5B80 # 3.0 KANGXI RADICAL ROOF +2F28 ; mapped ; 5BF8 # 3.0 KANGXI RADICAL INCH +2F29 ; mapped ; 5C0F # 3.0 KANGXI RADICAL SMALL +2F2A ; mapped ; 5C22 # 3.0 KANGXI RADICAL LAME +2F2B ; mapped ; 5C38 # 3.0 KANGXI RADICAL CORPSE +2F2C ; mapped ; 5C6E # 3.0 KANGXI RADICAL SPROUT +2F2D ; mapped ; 5C71 # 3.0 KANGXI RADICAL MOUNTAIN +2F2E ; mapped ; 5DDB # 3.0 KANGXI RADICAL RIVER +2F2F ; mapped ; 5DE5 # 3.0 KANGXI RADICAL WORK +2F30 ; mapped ; 5DF1 # 3.0 KANGXI RADICAL ONESELF +2F31 ; mapped ; 5DFE # 3.0 KANGXI RADICAL TURBAN +2F32 ; mapped ; 5E72 # 3.0 KANGXI RADICAL DRY +2F33 ; mapped ; 5E7A # 3.0 KANGXI RADICAL SHORT THREAD +2F34 ; mapped ; 5E7F # 3.0 KANGXI RADICAL DOTTED CLIFF +2F35 ; mapped ; 5EF4 # 3.0 KANGXI RADICAL LONG STRIDE +2F36 ; mapped ; 5EFE # 3.0 KANGXI RADICAL TWO HANDS +2F37 ; mapped ; 5F0B # 3.0 KANGXI RADICAL SHOOT +2F38 ; mapped ; 5F13 # 3.0 KANGXI RADICAL BOW +2F39 ; mapped ; 5F50 # 3.0 KANGXI RADICAL SNOUT +2F3A ; mapped ; 5F61 # 3.0 KANGXI RADICAL BRISTLE +2F3B ; mapped ; 5F73 # 3.0 KANGXI RADICAL STEP +2F3C ; mapped ; 5FC3 # 3.0 KANGXI RADICAL HEART +2F3D ; mapped ; 6208 # 3.0 KANGXI RADICAL HALBERD +2F3E ; mapped ; 6236 # 3.0 KANGXI RADICAL DOOR +2F3F ; mapped ; 624B # 3.0 KANGXI RADICAL HAND +2F40 ; mapped ; 652F # 3.0 KANGXI RADICAL BRANCH +2F41 ; mapped ; 6534 # 3.0 KANGXI RADICAL RAP +2F42 ; mapped ; 6587 # 3.0 KANGXI RADICAL SCRIPT +2F43 ; mapped ; 6597 # 3.0 KANGXI RADICAL DIPPER +2F44 ; mapped ; 65A4 # 3.0 KANGXI RADICAL AXE +2F45 ; mapped ; 65B9 # 3.0 KANGXI RADICAL SQUARE +2F46 ; mapped ; 65E0 # 3.0 KANGXI RADICAL NOT +2F47 ; mapped ; 65E5 # 3.0 KANGXI RADICAL SUN +2F48 ; mapped ; 66F0 # 3.0 KANGXI RADICAL SAY +2F49 ; mapped ; 6708 # 3.0 KANGXI RADICAL MOON +2F4A ; mapped ; 6728 # 3.0 KANGXI RADICAL TREE +2F4B ; mapped ; 6B20 # 3.0 KANGXI RADICAL LACK +2F4C ; mapped ; 6B62 # 3.0 KANGXI RADICAL STOP +2F4D ; mapped ; 6B79 # 3.0 KANGXI RADICAL DEATH +2F4E ; mapped ; 6BB3 # 3.0 KANGXI RADICAL WEAPON +2F4F ; mapped ; 6BCB # 3.0 KANGXI RADICAL DO NOT +2F50 ; mapped ; 6BD4 # 3.0 KANGXI RADICAL COMPARE +2F51 ; mapped ; 6BDB # 3.0 KANGXI RADICAL FUR +2F52 ; mapped ; 6C0F # 3.0 KANGXI RADICAL CLAN +2F53 ; mapped ; 6C14 # 3.0 KANGXI RADICAL STEAM +2F54 ; mapped ; 6C34 # 3.0 KANGXI RADICAL WATER +2F55 ; mapped ; 706B # 3.0 KANGXI RADICAL FIRE +2F56 ; mapped ; 722A # 3.0 KANGXI RADICAL CLAW +2F57 ; mapped ; 7236 # 3.0 KANGXI RADICAL FATHER +2F58 ; mapped ; 723B # 3.0 KANGXI RADICAL DOUBLE X +2F59 ; mapped ; 723F # 3.0 KANGXI RADICAL HALF TREE TRUNK +2F5A ; mapped ; 7247 # 3.0 KANGXI RADICAL SLICE +2F5B ; mapped ; 7259 # 3.0 KANGXI RADICAL FANG +2F5C ; mapped ; 725B # 3.0 KANGXI RADICAL COW +2F5D ; mapped ; 72AC # 3.0 KANGXI RADICAL DOG +2F5E ; mapped ; 7384 # 3.0 KANGXI RADICAL PROFOUND +2F5F ; mapped ; 7389 # 3.0 KANGXI RADICAL JADE +2F60 ; mapped ; 74DC # 3.0 KANGXI RADICAL MELON +2F61 ; mapped ; 74E6 # 3.0 KANGXI RADICAL TILE +2F62 ; mapped ; 7518 # 3.0 KANGXI RADICAL SWEET +2F63 ; mapped ; 751F # 3.0 KANGXI RADICAL LIFE +2F64 ; mapped ; 7528 # 3.0 KANGXI RADICAL USE +2F65 ; mapped ; 7530 # 3.0 KANGXI RADICAL FIELD +2F66 ; mapped ; 758B # 3.0 KANGXI RADICAL BOLT OF CLOTH +2F67 ; mapped ; 7592 # 3.0 KANGXI RADICAL SICKNESS +2F68 ; mapped ; 7676 # 3.0 KANGXI RADICAL DOTTED TENT +2F69 ; mapped ; 767D # 3.0 KANGXI RADICAL WHITE +2F6A ; mapped ; 76AE # 3.0 KANGXI RADICAL SKIN +2F6B ; mapped ; 76BF # 3.0 KANGXI RADICAL DISH +2F6C ; mapped ; 76EE # 3.0 KANGXI RADICAL EYE +2F6D ; mapped ; 77DB # 3.0 KANGXI RADICAL SPEAR +2F6E ; mapped ; 77E2 # 3.0 KANGXI RADICAL ARROW +2F6F ; mapped ; 77F3 # 3.0 KANGXI RADICAL STONE +2F70 ; mapped ; 793A # 3.0 KANGXI RADICAL SPIRIT +2F71 ; mapped ; 79B8 # 3.0 KANGXI RADICAL TRACK +2F72 ; mapped ; 79BE # 3.0 KANGXI RADICAL GRAIN +2F73 ; mapped ; 7A74 # 3.0 KANGXI RADICAL CAVE +2F74 ; mapped ; 7ACB # 3.0 KANGXI RADICAL STAND +2F75 ; mapped ; 7AF9 # 3.0 KANGXI RADICAL BAMBOO +2F76 ; mapped ; 7C73 # 3.0 KANGXI RADICAL RICE +2F77 ; mapped ; 7CF8 # 3.0 KANGXI RADICAL SILK +2F78 ; mapped ; 7F36 # 3.0 KANGXI RADICAL JAR +2F79 ; mapped ; 7F51 # 3.0 KANGXI RADICAL NET +2F7A ; mapped ; 7F8A # 3.0 KANGXI RADICAL SHEEP +2F7B ; mapped ; 7FBD # 3.0 KANGXI RADICAL FEATHER +2F7C ; mapped ; 8001 # 3.0 KANGXI RADICAL OLD +2F7D ; mapped ; 800C # 3.0 KANGXI RADICAL AND +2F7E ; mapped ; 8012 # 3.0 KANGXI RADICAL PLOW +2F7F ; mapped ; 8033 # 3.0 KANGXI RADICAL EAR +2F80 ; mapped ; 807F # 3.0 KANGXI RADICAL BRUSH +2F81 ; mapped ; 8089 # 3.0 KANGXI RADICAL MEAT +2F82 ; mapped ; 81E3 # 3.0 KANGXI RADICAL MINISTER +2F83 ; mapped ; 81EA # 3.0 KANGXI RADICAL SELF +2F84 ; mapped ; 81F3 # 3.0 KANGXI RADICAL ARRIVE +2F85 ; mapped ; 81FC # 3.0 KANGXI RADICAL MORTAR +2F86 ; mapped ; 820C # 3.0 KANGXI RADICAL TONGUE +2F87 ; mapped ; 821B # 3.0 KANGXI RADICAL OPPOSE +2F88 ; mapped ; 821F # 3.0 KANGXI RADICAL BOAT +2F89 ; mapped ; 826E # 3.0 KANGXI RADICAL STOPPING +2F8A ; mapped ; 8272 # 3.0 KANGXI RADICAL COLOR +2F8B ; mapped ; 8278 # 3.0 KANGXI RADICAL GRASS +2F8C ; mapped ; 864D # 3.0 KANGXI RADICAL TIGER +2F8D ; mapped ; 866B # 3.0 KANGXI RADICAL INSECT +2F8E ; mapped ; 8840 # 3.0 KANGXI RADICAL BLOOD +2F8F ; mapped ; 884C # 3.0 KANGXI RADICAL WALK ENCLOSURE +2F90 ; mapped ; 8863 # 3.0 KANGXI RADICAL CLOTHES +2F91 ; mapped ; 897E # 3.0 KANGXI RADICAL WEST +2F92 ; mapped ; 898B # 3.0 KANGXI RADICAL SEE +2F93 ; mapped ; 89D2 # 3.0 KANGXI RADICAL HORN +2F94 ; mapped ; 8A00 # 3.0 KANGXI RADICAL SPEECH +2F95 ; mapped ; 8C37 # 3.0 KANGXI RADICAL VALLEY +2F96 ; mapped ; 8C46 # 3.0 KANGXI RADICAL BEAN +2F97 ; mapped ; 8C55 # 3.0 KANGXI RADICAL PIG +2F98 ; mapped ; 8C78 # 3.0 KANGXI RADICAL BADGER +2F99 ; mapped ; 8C9D # 3.0 KANGXI RADICAL SHELL +2F9A ; mapped ; 8D64 # 3.0 KANGXI RADICAL RED +2F9B ; mapped ; 8D70 # 3.0 KANGXI RADICAL RUN +2F9C ; mapped ; 8DB3 # 3.0 KANGXI RADICAL FOOT +2F9D ; mapped ; 8EAB # 3.0 KANGXI RADICAL BODY +2F9E ; mapped ; 8ECA # 3.0 KANGXI RADICAL CART +2F9F ; mapped ; 8F9B # 3.0 KANGXI RADICAL BITTER +2FA0 ; mapped ; 8FB0 # 3.0 KANGXI RADICAL MORNING +2FA1 ; mapped ; 8FB5 # 3.0 KANGXI RADICAL WALK +2FA2 ; mapped ; 9091 # 3.0 KANGXI RADICAL CITY +2FA3 ; mapped ; 9149 # 3.0 KANGXI RADICAL WINE +2FA4 ; mapped ; 91C6 # 3.0 KANGXI RADICAL DISTINGUISH +2FA5 ; mapped ; 91CC # 3.0 KANGXI RADICAL VILLAGE +2FA6 ; mapped ; 91D1 # 3.0 KANGXI RADICAL GOLD +2FA7 ; mapped ; 9577 # 3.0 KANGXI RADICAL LONG +2FA8 ; mapped ; 9580 # 3.0 KANGXI RADICAL GATE +2FA9 ; mapped ; 961C # 3.0 KANGXI RADICAL MOUND +2FAA ; mapped ; 96B6 # 3.0 KANGXI RADICAL SLAVE +2FAB ; mapped ; 96B9 # 3.0 KANGXI RADICAL SHORT TAILED BIRD +2FAC ; mapped ; 96E8 # 3.0 KANGXI RADICAL RAIN +2FAD ; mapped ; 9751 # 3.0 KANGXI RADICAL BLUE +2FAE ; mapped ; 975E # 3.0 KANGXI RADICAL WRONG +2FAF ; mapped ; 9762 # 3.0 KANGXI RADICAL FACE +2FB0 ; mapped ; 9769 # 3.0 KANGXI RADICAL LEATHER +2FB1 ; mapped ; 97CB # 3.0 KANGXI RADICAL TANNED LEATHER +2FB2 ; mapped ; 97ED # 3.0 KANGXI RADICAL LEEK +2FB3 ; mapped ; 97F3 # 3.0 KANGXI RADICAL SOUND +2FB4 ; mapped ; 9801 # 3.0 KANGXI RADICAL LEAF +2FB5 ; mapped ; 98A8 # 3.0 KANGXI RADICAL WIND +2FB6 ; mapped ; 98DB # 3.0 KANGXI RADICAL FLY +2FB7 ; mapped ; 98DF # 3.0 KANGXI RADICAL EAT +2FB8 ; mapped ; 9996 # 3.0 KANGXI RADICAL HEAD +2FB9 ; mapped ; 9999 # 3.0 KANGXI RADICAL FRAGRANT +2FBA ; mapped ; 99AC # 3.0 KANGXI RADICAL HORSE +2FBB ; mapped ; 9AA8 # 3.0 KANGXI RADICAL BONE +2FBC ; mapped ; 9AD8 # 3.0 KANGXI RADICAL TALL +2FBD ; mapped ; 9ADF # 3.0 KANGXI RADICAL HAIR +2FBE ; mapped ; 9B25 # 3.0 KANGXI RADICAL FIGHT +2FBF ; mapped ; 9B2F # 3.0 KANGXI RADICAL SACRIFICIAL WINE +2FC0 ; mapped ; 9B32 # 3.0 KANGXI RADICAL CAULDRON +2FC1 ; mapped ; 9B3C # 3.0 KANGXI RADICAL GHOST +2FC2 ; mapped ; 9B5A # 3.0 KANGXI RADICAL FISH +2FC3 ; mapped ; 9CE5 # 3.0 KANGXI RADICAL BIRD +2FC4 ; mapped ; 9E75 # 3.0 KANGXI RADICAL SALT +2FC5 ; mapped ; 9E7F # 3.0 KANGXI RADICAL DEER +2FC6 ; mapped ; 9EA5 # 3.0 KANGXI RADICAL WHEAT +2FC7 ; mapped ; 9EBB # 3.0 KANGXI RADICAL HEMP +2FC8 ; mapped ; 9EC3 # 3.0 KANGXI RADICAL YELLOW +2FC9 ; mapped ; 9ECD # 3.0 KANGXI RADICAL MILLET +2FCA ; mapped ; 9ED1 # 3.0 KANGXI RADICAL BLACK +2FCB ; mapped ; 9EF9 # 3.0 KANGXI RADICAL EMBROIDERY +2FCC ; mapped ; 9EFD # 3.0 KANGXI RADICAL FROG +2FCD ; mapped ; 9F0E # 3.0 KANGXI RADICAL TRIPOD +2FCE ; mapped ; 9F13 # 3.0 KANGXI RADICAL DRUM +2FCF ; mapped ; 9F20 # 3.0 KANGXI RADICAL RAT +2FD0 ; mapped ; 9F3B # 3.0 KANGXI RADICAL NOSE +2FD1 ; mapped ; 9F4A # 3.0 KANGXI RADICAL EVEN +2FD2 ; mapped ; 9F52 # 3.0 KANGXI RADICAL TOOTH +2FD3 ; mapped ; 9F8D # 3.0 KANGXI RADICAL DRAGON +2FD4 ; mapped ; 9F9C # 3.0 KANGXI RADICAL TURTLE +2FD5 ; mapped ; 9FA0 # 3.0 KANGXI RADICAL FLUTE +2FD6..2FEF ; disallowed # NA .. +2FF0..2FFB ; disallowed # 3.0 IDEOGRAPHIC DESCRIPTION CHARACTER LEFT TO RIGHT..IDEOGRAPHIC DESCRIPTION CHARACTER OVERLAID +2FFC..2FFF ; disallowed # NA .. +3000 ; disallowed_STD3_mapped ; 0020 # 1.1 IDEOGRAPHIC SPACE +3001 ; valid ; ; NV8 # 1.1 IDEOGRAPHIC COMMA +3002 ; mapped ; 002E # 1.1 IDEOGRAPHIC FULL STOP +3003..3004 ; valid ; ; NV8 # 1.1 DITTO MARK..JAPANESE INDUSTRIAL STANDARD SYMBOL +3005..3007 ; valid # 1.1 IDEOGRAPHIC ITERATION MARK..IDEOGRAPHIC NUMBER ZERO +3008..3029 ; valid ; ; NV8 # 1.1 LEFT ANGLE BRACKET..HANGZHOU NUMERAL NINE +302A..302D ; valid # 1.1 IDEOGRAPHIC LEVEL TONE MARK..IDEOGRAPHIC ENTERING TONE MARK +302E..3035 ; valid ; ; NV8 # 1.1 HANGUL SINGLE DOT TONE MARK..VERTICAL KANA REPEAT MARK LOWER HALF +3036 ; mapped ; 3012 # 1.1 CIRCLED POSTAL MARK +3037 ; valid ; ; NV8 # 1.1 IDEOGRAPHIC TELEGRAPH LINE FEED SEPARATOR SYMBOL +3038 ; mapped ; 5341 # 3.0 HANGZHOU NUMERAL TEN +3039 ; mapped ; 5344 # 3.0 HANGZHOU NUMERAL TWENTY +303A ; mapped ; 5345 # 3.0 HANGZHOU NUMERAL THIRTY +303B ; valid ; ; NV8 # 3.2 VERTICAL IDEOGRAPHIC ITERATION MARK +303C ; valid # 3.2 MASU MARK +303D ; valid ; ; NV8 # 3.2 PART ALTERNATION MARK +303E ; valid ; ; NV8 # 3.0 IDEOGRAPHIC VARIATION INDICATOR +303F ; valid ; ; NV8 # 1.1 IDEOGRAPHIC HALF FILL SPACE +3040 ; disallowed # NA +3041..3094 ; valid # 1.1 HIRAGANA LETTER SMALL A..HIRAGANA LETTER VU +3095..3096 ; valid # 3.2 HIRAGANA LETTER SMALL KA..HIRAGANA LETTER SMALL KE +3097..3098 ; disallowed # NA .. +3099..309A ; valid # 1.1 COMBINING KATAKANA-HIRAGANA VOICED SOUND MARK..COMBINING KATAKANA-HIRAGANA SEMI-VOICED SOUND MARK +309B ; disallowed_STD3_mapped ; 0020 3099 # 1.1 KATAKANA-HIRAGANA VOICED SOUND MARK +309C ; disallowed_STD3_mapped ; 0020 309A # 1.1 KATAKANA-HIRAGANA SEMI-VOICED SOUND MARK +309D..309E ; valid # 1.1 HIRAGANA ITERATION MARK..HIRAGANA VOICED ITERATION MARK +309F ; mapped ; 3088 308A # 3.2 HIRAGANA DIGRAPH YORI +30A0 ; valid ; ; NV8 # 3.2 KATAKANA-HIRAGANA DOUBLE HYPHEN +30A1..30FE ; valid # 1.1 KATAKANA LETTER SMALL A..KATAKANA VOICED ITERATION MARK +30FF ; mapped ; 30B3 30C8 # 3.2 KATAKANA DIGRAPH KOTO +3100..3104 ; disallowed # NA .. +3105..312C ; valid # 1.1 BOPOMOFO LETTER B..BOPOMOFO LETTER GN +312D ; valid # 5.1 BOPOMOFO LETTER IH +312E ; valid # 10.0 BOPOMOFO LETTER O WITH DOT ABOVE +312F..3130 ; disallowed # NA .. +3131 ; mapped ; 1100 # 1.1 HANGUL LETTER KIYEOK +3132 ; mapped ; 1101 # 1.1 HANGUL LETTER SSANGKIYEOK +3133 ; mapped ; 11AA # 1.1 HANGUL LETTER KIYEOK-SIOS +3134 ; mapped ; 1102 # 1.1 HANGUL LETTER NIEUN +3135 ; mapped ; 11AC # 1.1 HANGUL LETTER NIEUN-CIEUC +3136 ; mapped ; 11AD # 1.1 HANGUL LETTER NIEUN-HIEUH +3137 ; mapped ; 1103 # 1.1 HANGUL LETTER TIKEUT +3138 ; mapped ; 1104 # 1.1 HANGUL LETTER SSANGTIKEUT +3139 ; mapped ; 1105 # 1.1 HANGUL LETTER RIEUL +313A ; mapped ; 11B0 # 1.1 HANGUL LETTER RIEUL-KIYEOK +313B ; mapped ; 11B1 # 1.1 HANGUL LETTER RIEUL-MIEUM +313C ; mapped ; 11B2 # 1.1 HANGUL LETTER RIEUL-PIEUP +313D ; mapped ; 11B3 # 1.1 HANGUL LETTER RIEUL-SIOS +313E ; mapped ; 11B4 # 1.1 HANGUL LETTER RIEUL-THIEUTH +313F ; mapped ; 11B5 # 1.1 HANGUL LETTER RIEUL-PHIEUPH +3140 ; mapped ; 111A # 1.1 HANGUL LETTER RIEUL-HIEUH +3141 ; mapped ; 1106 # 1.1 HANGUL LETTER MIEUM +3142 ; mapped ; 1107 # 1.1 HANGUL LETTER PIEUP +3143 ; mapped ; 1108 # 1.1 HANGUL LETTER SSANGPIEUP +3144 ; mapped ; 1121 # 1.1 HANGUL LETTER PIEUP-SIOS +3145 ; mapped ; 1109 # 1.1 HANGUL LETTER SIOS +3146 ; mapped ; 110A # 1.1 HANGUL LETTER SSANGSIOS +3147 ; mapped ; 110B # 1.1 HANGUL LETTER IEUNG +3148 ; mapped ; 110C # 1.1 HANGUL LETTER CIEUC +3149 ; mapped ; 110D # 1.1 HANGUL LETTER SSANGCIEUC +314A ; mapped ; 110E # 1.1 HANGUL LETTER CHIEUCH +314B ; mapped ; 110F # 1.1 HANGUL LETTER KHIEUKH +314C ; mapped ; 1110 # 1.1 HANGUL LETTER THIEUTH +314D ; mapped ; 1111 # 1.1 HANGUL LETTER PHIEUPH +314E ; mapped ; 1112 # 1.1 HANGUL LETTER HIEUH +314F ; mapped ; 1161 # 1.1 HANGUL LETTER A +3150 ; mapped ; 1162 # 1.1 HANGUL LETTER AE +3151 ; mapped ; 1163 # 1.1 HANGUL LETTER YA +3152 ; mapped ; 1164 # 1.1 HANGUL LETTER YAE +3153 ; mapped ; 1165 # 1.1 HANGUL LETTER EO +3154 ; mapped ; 1166 # 1.1 HANGUL LETTER E +3155 ; mapped ; 1167 # 1.1 HANGUL LETTER YEO +3156 ; mapped ; 1168 # 1.1 HANGUL LETTER YE +3157 ; mapped ; 1169 # 1.1 HANGUL LETTER O +3158 ; mapped ; 116A # 1.1 HANGUL LETTER WA +3159 ; mapped ; 116B # 1.1 HANGUL LETTER WAE +315A ; mapped ; 116C # 1.1 HANGUL LETTER OE +315B ; mapped ; 116D # 1.1 HANGUL LETTER YO +315C ; mapped ; 116E # 1.1 HANGUL LETTER U +315D ; mapped ; 116F # 1.1 HANGUL LETTER WEO +315E ; mapped ; 1170 # 1.1 HANGUL LETTER WE +315F ; mapped ; 1171 # 1.1 HANGUL LETTER WI +3160 ; mapped ; 1172 # 1.1 HANGUL LETTER YU +3161 ; mapped ; 1173 # 1.1 HANGUL LETTER EU +3162 ; mapped ; 1174 # 1.1 HANGUL LETTER YI +3163 ; mapped ; 1175 # 1.1 HANGUL LETTER I +3164 ; disallowed # 1.1 HANGUL FILLER +3165 ; mapped ; 1114 # 1.1 HANGUL LETTER SSANGNIEUN +3166 ; mapped ; 1115 # 1.1 HANGUL LETTER NIEUN-TIKEUT +3167 ; mapped ; 11C7 # 1.1 HANGUL LETTER NIEUN-SIOS +3168 ; mapped ; 11C8 # 1.1 HANGUL LETTER NIEUN-PANSIOS +3169 ; mapped ; 11CC # 1.1 HANGUL LETTER RIEUL-KIYEOK-SIOS +316A ; mapped ; 11CE # 1.1 HANGUL LETTER RIEUL-TIKEUT +316B ; mapped ; 11D3 # 1.1 HANGUL LETTER RIEUL-PIEUP-SIOS +316C ; mapped ; 11D7 # 1.1 HANGUL LETTER RIEUL-PANSIOS +316D ; mapped ; 11D9 # 1.1 HANGUL LETTER RIEUL-YEORINHIEUH +316E ; mapped ; 111C # 1.1 HANGUL LETTER MIEUM-PIEUP +316F ; mapped ; 11DD # 1.1 HANGUL LETTER MIEUM-SIOS +3170 ; mapped ; 11DF # 1.1 HANGUL LETTER MIEUM-PANSIOS +3171 ; mapped ; 111D # 1.1 HANGUL LETTER KAPYEOUNMIEUM +3172 ; mapped ; 111E # 1.1 HANGUL LETTER PIEUP-KIYEOK +3173 ; mapped ; 1120 # 1.1 HANGUL LETTER PIEUP-TIKEUT +3174 ; mapped ; 1122 # 1.1 HANGUL LETTER PIEUP-SIOS-KIYEOK +3175 ; mapped ; 1123 # 1.1 HANGUL LETTER PIEUP-SIOS-TIKEUT +3176 ; mapped ; 1127 # 1.1 HANGUL LETTER PIEUP-CIEUC +3177 ; mapped ; 1129 # 1.1 HANGUL LETTER PIEUP-THIEUTH +3178 ; mapped ; 112B # 1.1 HANGUL LETTER KAPYEOUNPIEUP +3179 ; mapped ; 112C # 1.1 HANGUL LETTER KAPYEOUNSSANGPIEUP +317A ; mapped ; 112D # 1.1 HANGUL LETTER SIOS-KIYEOK +317B ; mapped ; 112E # 1.1 HANGUL LETTER SIOS-NIEUN +317C ; mapped ; 112F # 1.1 HANGUL LETTER SIOS-TIKEUT +317D ; mapped ; 1132 # 1.1 HANGUL LETTER SIOS-PIEUP +317E ; mapped ; 1136 # 1.1 HANGUL LETTER SIOS-CIEUC +317F ; mapped ; 1140 # 1.1 HANGUL LETTER PANSIOS +3180 ; mapped ; 1147 # 1.1 HANGUL LETTER SSANGIEUNG +3181 ; mapped ; 114C # 1.1 HANGUL LETTER YESIEUNG +3182 ; mapped ; 11F1 # 1.1 HANGUL LETTER YESIEUNG-SIOS +3183 ; mapped ; 11F2 # 1.1 HANGUL LETTER YESIEUNG-PANSIOS +3184 ; mapped ; 1157 # 1.1 HANGUL LETTER KAPYEOUNPHIEUPH +3185 ; mapped ; 1158 # 1.1 HANGUL LETTER SSANGHIEUH +3186 ; mapped ; 1159 # 1.1 HANGUL LETTER YEORINHIEUH +3187 ; mapped ; 1184 # 1.1 HANGUL LETTER YO-YA +3188 ; mapped ; 1185 # 1.1 HANGUL LETTER YO-YAE +3189 ; mapped ; 1188 # 1.1 HANGUL LETTER YO-I +318A ; mapped ; 1191 # 1.1 HANGUL LETTER YU-YEO +318B ; mapped ; 1192 # 1.1 HANGUL LETTER YU-YE +318C ; mapped ; 1194 # 1.1 HANGUL LETTER YU-I +318D ; mapped ; 119E # 1.1 HANGUL LETTER ARAEA +318E ; mapped ; 11A1 # 1.1 HANGUL LETTER ARAEAE +318F ; disallowed # NA +3190..3191 ; valid ; ; NV8 # 1.1 IDEOGRAPHIC ANNOTATION LINKING MARK..IDEOGRAPHIC ANNOTATION REVERSE MARK +3192 ; mapped ; 4E00 # 1.1 IDEOGRAPHIC ANNOTATION ONE MARK +3193 ; mapped ; 4E8C # 1.1 IDEOGRAPHIC ANNOTATION TWO MARK +3194 ; mapped ; 4E09 # 1.1 IDEOGRAPHIC ANNOTATION THREE MARK +3195 ; mapped ; 56DB # 1.1 IDEOGRAPHIC ANNOTATION FOUR MARK +3196 ; mapped ; 4E0A # 1.1 IDEOGRAPHIC ANNOTATION TOP MARK +3197 ; mapped ; 4E2D # 1.1 IDEOGRAPHIC ANNOTATION MIDDLE MARK +3198 ; mapped ; 4E0B # 1.1 IDEOGRAPHIC ANNOTATION BOTTOM MARK +3199 ; mapped ; 7532 # 1.1 IDEOGRAPHIC ANNOTATION FIRST MARK +319A ; mapped ; 4E59 # 1.1 IDEOGRAPHIC ANNOTATION SECOND MARK +319B ; mapped ; 4E19 # 1.1 IDEOGRAPHIC ANNOTATION THIRD MARK +319C ; mapped ; 4E01 # 1.1 IDEOGRAPHIC ANNOTATION FOURTH MARK +319D ; mapped ; 5929 # 1.1 IDEOGRAPHIC ANNOTATION HEAVEN MARK +319E ; mapped ; 5730 # 1.1 IDEOGRAPHIC ANNOTATION EARTH MARK +319F ; mapped ; 4EBA # 1.1 IDEOGRAPHIC ANNOTATION MAN MARK +31A0..31B7 ; valid # 3.0 BOPOMOFO LETTER BU..BOPOMOFO FINAL LETTER H +31B8..31BA ; valid # 6.0 BOPOMOFO LETTER GH..BOPOMOFO LETTER ZY +31BB..31BF ; disallowed # NA .. +31C0..31CF ; valid ; ; NV8 # 4.1 CJK STROKE T..CJK STROKE N +31D0..31E3 ; valid ; ; NV8 # 5.1 CJK STROKE H..CJK STROKE Q +31E4..31EF ; disallowed # NA .. +31F0..31FF ; valid # 3.2 KATAKANA LETTER SMALL KU..KATAKANA LETTER SMALL RO +3200 ; disallowed_STD3_mapped ; 0028 1100 0029 #1.1 PARENTHESIZED HANGUL KIYEOK +3201 ; disallowed_STD3_mapped ; 0028 1102 0029 #1.1 PARENTHESIZED HANGUL NIEUN +3202 ; disallowed_STD3_mapped ; 0028 1103 0029 #1.1 PARENTHESIZED HANGUL TIKEUT +3203 ; disallowed_STD3_mapped ; 0028 1105 0029 #1.1 PARENTHESIZED HANGUL RIEUL +3204 ; disallowed_STD3_mapped ; 0028 1106 0029 #1.1 PARENTHESIZED HANGUL MIEUM +3205 ; disallowed_STD3_mapped ; 0028 1107 0029 #1.1 PARENTHESIZED HANGUL PIEUP +3206 ; disallowed_STD3_mapped ; 0028 1109 0029 #1.1 PARENTHESIZED HANGUL SIOS +3207 ; disallowed_STD3_mapped ; 0028 110B 0029 #1.1 PARENTHESIZED HANGUL IEUNG +3208 ; disallowed_STD3_mapped ; 0028 110C 0029 #1.1 PARENTHESIZED HANGUL CIEUC +3209 ; disallowed_STD3_mapped ; 0028 110E 0029 #1.1 PARENTHESIZED HANGUL CHIEUCH +320A ; disallowed_STD3_mapped ; 0028 110F 0029 #1.1 PARENTHESIZED HANGUL KHIEUKH +320B ; disallowed_STD3_mapped ; 0028 1110 0029 #1.1 PARENTHESIZED HANGUL THIEUTH +320C ; disallowed_STD3_mapped ; 0028 1111 0029 #1.1 PARENTHESIZED HANGUL PHIEUPH +320D ; disallowed_STD3_mapped ; 0028 1112 0029 #1.1 PARENTHESIZED HANGUL HIEUH +320E ; disallowed_STD3_mapped ; 0028 AC00 0029 #1.1 PARENTHESIZED HANGUL KIYEOK A +320F ; disallowed_STD3_mapped ; 0028 B098 0029 #1.1 PARENTHESIZED HANGUL NIEUN A +3210 ; disallowed_STD3_mapped ; 0028 B2E4 0029 #1.1 PARENTHESIZED HANGUL TIKEUT A +3211 ; disallowed_STD3_mapped ; 0028 B77C 0029 #1.1 PARENTHESIZED HANGUL RIEUL A +3212 ; disallowed_STD3_mapped ; 0028 B9C8 0029 #1.1 PARENTHESIZED HANGUL MIEUM A +3213 ; disallowed_STD3_mapped ; 0028 BC14 0029 #1.1 PARENTHESIZED HANGUL PIEUP A +3214 ; disallowed_STD3_mapped ; 0028 C0AC 0029 #1.1 PARENTHESIZED HANGUL SIOS A +3215 ; disallowed_STD3_mapped ; 0028 C544 0029 #1.1 PARENTHESIZED HANGUL IEUNG A +3216 ; disallowed_STD3_mapped ; 0028 C790 0029 #1.1 PARENTHESIZED HANGUL CIEUC A +3217 ; disallowed_STD3_mapped ; 0028 CC28 0029 #1.1 PARENTHESIZED HANGUL CHIEUCH A +3218 ; disallowed_STD3_mapped ; 0028 CE74 0029 #1.1 PARENTHESIZED HANGUL KHIEUKH A +3219 ; disallowed_STD3_mapped ; 0028 D0C0 0029 #1.1 PARENTHESIZED HANGUL THIEUTH A +321A ; disallowed_STD3_mapped ; 0028 D30C 0029 #1.1 PARENTHESIZED HANGUL PHIEUPH A +321B ; disallowed_STD3_mapped ; 0028 D558 0029 #1.1 PARENTHESIZED HANGUL HIEUH A +321C ; disallowed_STD3_mapped ; 0028 C8FC 0029 #1.1 PARENTHESIZED HANGUL CIEUC U +321D ; disallowed_STD3_mapped ; 0028 C624 C804 0029 #4.0 PARENTHESIZED KOREAN CHARACTER OJEON +321E ; disallowed_STD3_mapped ; 0028 C624 D6C4 0029 #4.0 PARENTHESIZED KOREAN CHARACTER O HU +321F ; disallowed # NA +3220 ; disallowed_STD3_mapped ; 0028 4E00 0029 #1.1 PARENTHESIZED IDEOGRAPH ONE +3221 ; disallowed_STD3_mapped ; 0028 4E8C 0029 #1.1 PARENTHESIZED IDEOGRAPH TWO +3222 ; disallowed_STD3_mapped ; 0028 4E09 0029 #1.1 PARENTHESIZED IDEOGRAPH THREE +3223 ; disallowed_STD3_mapped ; 0028 56DB 0029 #1.1 PARENTHESIZED IDEOGRAPH FOUR +3224 ; disallowed_STD3_mapped ; 0028 4E94 0029 #1.1 PARENTHESIZED IDEOGRAPH FIVE +3225 ; disallowed_STD3_mapped ; 0028 516D 0029 #1.1 PARENTHESIZED IDEOGRAPH SIX +3226 ; disallowed_STD3_mapped ; 0028 4E03 0029 #1.1 PARENTHESIZED IDEOGRAPH SEVEN +3227 ; disallowed_STD3_mapped ; 0028 516B 0029 #1.1 PARENTHESIZED IDEOGRAPH EIGHT +3228 ; disallowed_STD3_mapped ; 0028 4E5D 0029 #1.1 PARENTHESIZED IDEOGRAPH NINE +3229 ; disallowed_STD3_mapped ; 0028 5341 0029 #1.1 PARENTHESIZED IDEOGRAPH TEN +322A ; disallowed_STD3_mapped ; 0028 6708 0029 #1.1 PARENTHESIZED IDEOGRAPH MOON +322B ; disallowed_STD3_mapped ; 0028 706B 0029 #1.1 PARENTHESIZED IDEOGRAPH FIRE +322C ; disallowed_STD3_mapped ; 0028 6C34 0029 #1.1 PARENTHESIZED IDEOGRAPH WATER +322D ; disallowed_STD3_mapped ; 0028 6728 0029 #1.1 PARENTHESIZED IDEOGRAPH WOOD +322E ; disallowed_STD3_mapped ; 0028 91D1 0029 #1.1 PARENTHESIZED IDEOGRAPH METAL +322F ; disallowed_STD3_mapped ; 0028 571F 0029 #1.1 PARENTHESIZED IDEOGRAPH EARTH +3230 ; disallowed_STD3_mapped ; 0028 65E5 0029 #1.1 PARENTHESIZED IDEOGRAPH SUN +3231 ; disallowed_STD3_mapped ; 0028 682A 0029 #1.1 PARENTHESIZED IDEOGRAPH STOCK +3232 ; disallowed_STD3_mapped ; 0028 6709 0029 #1.1 PARENTHESIZED IDEOGRAPH HAVE +3233 ; disallowed_STD3_mapped ; 0028 793E 0029 #1.1 PARENTHESIZED IDEOGRAPH SOCIETY +3234 ; disallowed_STD3_mapped ; 0028 540D 0029 #1.1 PARENTHESIZED IDEOGRAPH NAME +3235 ; disallowed_STD3_mapped ; 0028 7279 0029 #1.1 PARENTHESIZED IDEOGRAPH SPECIAL +3236 ; disallowed_STD3_mapped ; 0028 8CA1 0029 #1.1 PARENTHESIZED IDEOGRAPH FINANCIAL +3237 ; disallowed_STD3_mapped ; 0028 795D 0029 #1.1 PARENTHESIZED IDEOGRAPH CONGRATULATION +3238 ; disallowed_STD3_mapped ; 0028 52B4 0029 #1.1 PARENTHESIZED IDEOGRAPH LABOR +3239 ; disallowed_STD3_mapped ; 0028 4EE3 0029 #1.1 PARENTHESIZED IDEOGRAPH REPRESENT +323A ; disallowed_STD3_mapped ; 0028 547C 0029 #1.1 PARENTHESIZED IDEOGRAPH CALL +323B ; disallowed_STD3_mapped ; 0028 5B66 0029 #1.1 PARENTHESIZED IDEOGRAPH STUDY +323C ; disallowed_STD3_mapped ; 0028 76E3 0029 #1.1 PARENTHESIZED IDEOGRAPH SUPERVISE +323D ; disallowed_STD3_mapped ; 0028 4F01 0029 #1.1 PARENTHESIZED IDEOGRAPH ENTERPRISE +323E ; disallowed_STD3_mapped ; 0028 8CC7 0029 #1.1 PARENTHESIZED IDEOGRAPH RESOURCE +323F ; disallowed_STD3_mapped ; 0028 5354 0029 #1.1 PARENTHESIZED IDEOGRAPH ALLIANCE +3240 ; disallowed_STD3_mapped ; 0028 796D 0029 #1.1 PARENTHESIZED IDEOGRAPH FESTIVAL +3241 ; disallowed_STD3_mapped ; 0028 4F11 0029 #1.1 PARENTHESIZED IDEOGRAPH REST +3242 ; disallowed_STD3_mapped ; 0028 81EA 0029 #1.1 PARENTHESIZED IDEOGRAPH SELF +3243 ; disallowed_STD3_mapped ; 0028 81F3 0029 #1.1 PARENTHESIZED IDEOGRAPH REACH +3244 ; mapped ; 554F # 5.2 CIRCLED IDEOGRAPH QUESTION +3245 ; mapped ; 5E7C # 5.2 CIRCLED IDEOGRAPH KINDERGARTEN +3246 ; mapped ; 6587 # 5.2 CIRCLED IDEOGRAPH SCHOOL +3247 ; mapped ; 7B8F # 5.2 CIRCLED IDEOGRAPH KOTO +3248..324F ; valid ; ; NV8 # 5.2 CIRCLED NUMBER TEN ON BLACK SQUARE..CIRCLED NUMBER EIGHTY ON BLACK SQUARE +3250 ; mapped ; 0070 0074 0065 #4.0 PARTNERSHIP SIGN +3251 ; mapped ; 0032 0031 # 3.2 CIRCLED NUMBER TWENTY ONE +3252 ; mapped ; 0032 0032 # 3.2 CIRCLED NUMBER TWENTY TWO +3253 ; mapped ; 0032 0033 # 3.2 CIRCLED NUMBER TWENTY THREE +3254 ; mapped ; 0032 0034 # 3.2 CIRCLED NUMBER TWENTY FOUR +3255 ; mapped ; 0032 0035 # 3.2 CIRCLED NUMBER TWENTY FIVE +3256 ; mapped ; 0032 0036 # 3.2 CIRCLED NUMBER TWENTY SIX +3257 ; mapped ; 0032 0037 # 3.2 CIRCLED NUMBER TWENTY SEVEN +3258 ; mapped ; 0032 0038 # 3.2 CIRCLED NUMBER TWENTY EIGHT +3259 ; mapped ; 0032 0039 # 3.2 CIRCLED NUMBER TWENTY NINE +325A ; mapped ; 0033 0030 # 3.2 CIRCLED NUMBER THIRTY +325B ; mapped ; 0033 0031 # 3.2 CIRCLED NUMBER THIRTY ONE +325C ; mapped ; 0033 0032 # 3.2 CIRCLED NUMBER THIRTY TWO +325D ; mapped ; 0033 0033 # 3.2 CIRCLED NUMBER THIRTY THREE +325E ; mapped ; 0033 0034 # 3.2 CIRCLED NUMBER THIRTY FOUR +325F ; mapped ; 0033 0035 # 3.2 CIRCLED NUMBER THIRTY FIVE +3260 ; mapped ; 1100 # 1.1 CIRCLED HANGUL KIYEOK +3261 ; mapped ; 1102 # 1.1 CIRCLED HANGUL NIEUN +3262 ; mapped ; 1103 # 1.1 CIRCLED HANGUL TIKEUT +3263 ; mapped ; 1105 # 1.1 CIRCLED HANGUL RIEUL +3264 ; mapped ; 1106 # 1.1 CIRCLED HANGUL MIEUM +3265 ; mapped ; 1107 # 1.1 CIRCLED HANGUL PIEUP +3266 ; mapped ; 1109 # 1.1 CIRCLED HANGUL SIOS +3267 ; mapped ; 110B # 1.1 CIRCLED HANGUL IEUNG +3268 ; mapped ; 110C # 1.1 CIRCLED HANGUL CIEUC +3269 ; mapped ; 110E # 1.1 CIRCLED HANGUL CHIEUCH +326A ; mapped ; 110F # 1.1 CIRCLED HANGUL KHIEUKH +326B ; mapped ; 1110 # 1.1 CIRCLED HANGUL THIEUTH +326C ; mapped ; 1111 # 1.1 CIRCLED HANGUL PHIEUPH +326D ; mapped ; 1112 # 1.1 CIRCLED HANGUL HIEUH +326E ; mapped ; AC00 # 1.1 CIRCLED HANGUL KIYEOK A +326F ; mapped ; B098 # 1.1 CIRCLED HANGUL NIEUN A +3270 ; mapped ; B2E4 # 1.1 CIRCLED HANGUL TIKEUT A +3271 ; mapped ; B77C # 1.1 CIRCLED HANGUL RIEUL A +3272 ; mapped ; B9C8 # 1.1 CIRCLED HANGUL MIEUM A +3273 ; mapped ; BC14 # 1.1 CIRCLED HANGUL PIEUP A +3274 ; mapped ; C0AC # 1.1 CIRCLED HANGUL SIOS A +3275 ; mapped ; C544 # 1.1 CIRCLED HANGUL IEUNG A +3276 ; mapped ; C790 # 1.1 CIRCLED HANGUL CIEUC A +3277 ; mapped ; CC28 # 1.1 CIRCLED HANGUL CHIEUCH A +3278 ; mapped ; CE74 # 1.1 CIRCLED HANGUL KHIEUKH A +3279 ; mapped ; D0C0 # 1.1 CIRCLED HANGUL THIEUTH A +327A ; mapped ; D30C # 1.1 CIRCLED HANGUL PHIEUPH A +327B ; mapped ; D558 # 1.1 CIRCLED HANGUL HIEUH A +327C ; mapped ; CC38 ACE0 # 4.0 CIRCLED KOREAN CHARACTER CHAMKO +327D ; mapped ; C8FC C758 # 4.0 CIRCLED KOREAN CHARACTER JUEUI +327E ; mapped ; C6B0 # 4.1 CIRCLED HANGUL IEUNG U +327F ; valid ; ; NV8 # 1.1 KOREAN STANDARD SYMBOL +3280 ; mapped ; 4E00 # 1.1 CIRCLED IDEOGRAPH ONE +3281 ; mapped ; 4E8C # 1.1 CIRCLED IDEOGRAPH TWO +3282 ; mapped ; 4E09 # 1.1 CIRCLED IDEOGRAPH THREE +3283 ; mapped ; 56DB # 1.1 CIRCLED IDEOGRAPH FOUR +3284 ; mapped ; 4E94 # 1.1 CIRCLED IDEOGRAPH FIVE +3285 ; mapped ; 516D # 1.1 CIRCLED IDEOGRAPH SIX +3286 ; mapped ; 4E03 # 1.1 CIRCLED IDEOGRAPH SEVEN +3287 ; mapped ; 516B # 1.1 CIRCLED IDEOGRAPH EIGHT +3288 ; mapped ; 4E5D # 1.1 CIRCLED IDEOGRAPH NINE +3289 ; mapped ; 5341 # 1.1 CIRCLED IDEOGRAPH TEN +328A ; mapped ; 6708 # 1.1 CIRCLED IDEOGRAPH MOON +328B ; mapped ; 706B # 1.1 CIRCLED IDEOGRAPH FIRE +328C ; mapped ; 6C34 # 1.1 CIRCLED IDEOGRAPH WATER +328D ; mapped ; 6728 # 1.1 CIRCLED IDEOGRAPH WOOD +328E ; mapped ; 91D1 # 1.1 CIRCLED IDEOGRAPH METAL +328F ; mapped ; 571F # 1.1 CIRCLED IDEOGRAPH EARTH +3290 ; mapped ; 65E5 # 1.1 CIRCLED IDEOGRAPH SUN +3291 ; mapped ; 682A # 1.1 CIRCLED IDEOGRAPH STOCK +3292 ; mapped ; 6709 # 1.1 CIRCLED IDEOGRAPH HAVE +3293 ; mapped ; 793E # 1.1 CIRCLED IDEOGRAPH SOCIETY +3294 ; mapped ; 540D # 1.1 CIRCLED IDEOGRAPH NAME +3295 ; mapped ; 7279 # 1.1 CIRCLED IDEOGRAPH SPECIAL +3296 ; mapped ; 8CA1 # 1.1 CIRCLED IDEOGRAPH FINANCIAL +3297 ; mapped ; 795D # 1.1 CIRCLED IDEOGRAPH CONGRATULATION +3298 ; mapped ; 52B4 # 1.1 CIRCLED IDEOGRAPH LABOR +3299 ; mapped ; 79D8 # 1.1 CIRCLED IDEOGRAPH SECRET +329A ; mapped ; 7537 # 1.1 CIRCLED IDEOGRAPH MALE +329B ; mapped ; 5973 # 1.1 CIRCLED IDEOGRAPH FEMALE +329C ; mapped ; 9069 # 1.1 CIRCLED IDEOGRAPH SUITABLE +329D ; mapped ; 512A # 1.1 CIRCLED IDEOGRAPH EXCELLENT +329E ; mapped ; 5370 # 1.1 CIRCLED IDEOGRAPH PRINT +329F ; mapped ; 6CE8 # 1.1 CIRCLED IDEOGRAPH ATTENTION +32A0 ; mapped ; 9805 # 1.1 CIRCLED IDEOGRAPH ITEM +32A1 ; mapped ; 4F11 # 1.1 CIRCLED IDEOGRAPH REST +32A2 ; mapped ; 5199 # 1.1 CIRCLED IDEOGRAPH COPY +32A3 ; mapped ; 6B63 # 1.1 CIRCLED IDEOGRAPH CORRECT +32A4 ; mapped ; 4E0A # 1.1 CIRCLED IDEOGRAPH HIGH +32A5 ; mapped ; 4E2D # 1.1 CIRCLED IDEOGRAPH CENTRE +32A6 ; mapped ; 4E0B # 1.1 CIRCLED IDEOGRAPH LOW +32A7 ; mapped ; 5DE6 # 1.1 CIRCLED IDEOGRAPH LEFT +32A8 ; mapped ; 53F3 # 1.1 CIRCLED IDEOGRAPH RIGHT +32A9 ; mapped ; 533B # 1.1 CIRCLED IDEOGRAPH MEDICINE +32AA ; mapped ; 5B97 # 1.1 CIRCLED IDEOGRAPH RELIGION +32AB ; mapped ; 5B66 # 1.1 CIRCLED IDEOGRAPH STUDY +32AC ; mapped ; 76E3 # 1.1 CIRCLED IDEOGRAPH SUPERVISE +32AD ; mapped ; 4F01 # 1.1 CIRCLED IDEOGRAPH ENTERPRISE +32AE ; mapped ; 8CC7 # 1.1 CIRCLED IDEOGRAPH RESOURCE +32AF ; mapped ; 5354 # 1.1 CIRCLED IDEOGRAPH ALLIANCE +32B0 ; mapped ; 591C # 1.1 CIRCLED IDEOGRAPH NIGHT +32B1 ; mapped ; 0033 0036 # 3.2 CIRCLED NUMBER THIRTY SIX +32B2 ; mapped ; 0033 0037 # 3.2 CIRCLED NUMBER THIRTY SEVEN +32B3 ; mapped ; 0033 0038 # 3.2 CIRCLED NUMBER THIRTY EIGHT +32B4 ; mapped ; 0033 0039 # 3.2 CIRCLED NUMBER THIRTY NINE +32B5 ; mapped ; 0034 0030 # 3.2 CIRCLED NUMBER FORTY +32B6 ; mapped ; 0034 0031 # 3.2 CIRCLED NUMBER FORTY ONE +32B7 ; mapped ; 0034 0032 # 3.2 CIRCLED NUMBER FORTY TWO +32B8 ; mapped ; 0034 0033 # 3.2 CIRCLED NUMBER FORTY THREE +32B9 ; mapped ; 0034 0034 # 3.2 CIRCLED NUMBER FORTY FOUR +32BA ; mapped ; 0034 0035 # 3.2 CIRCLED NUMBER FORTY FIVE +32BB ; mapped ; 0034 0036 # 3.2 CIRCLED NUMBER FORTY SIX +32BC ; mapped ; 0034 0037 # 3.2 CIRCLED NUMBER FORTY SEVEN +32BD ; mapped ; 0034 0038 # 3.2 CIRCLED NUMBER FORTY EIGHT +32BE ; mapped ; 0034 0039 # 3.2 CIRCLED NUMBER FORTY NINE +32BF ; mapped ; 0035 0030 # 3.2 CIRCLED NUMBER FIFTY +32C0 ; mapped ; 0031 6708 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR JANUARY +32C1 ; mapped ; 0032 6708 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR FEBRUARY +32C2 ; mapped ; 0033 6708 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR MARCH +32C3 ; mapped ; 0034 6708 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR APRIL +32C4 ; mapped ; 0035 6708 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR MAY +32C5 ; mapped ; 0036 6708 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR JUNE +32C6 ; mapped ; 0037 6708 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR JULY +32C7 ; mapped ; 0038 6708 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR AUGUST +32C8 ; mapped ; 0039 6708 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR SEPTEMBER +32C9 ; mapped ; 0031 0030 6708 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR OCTOBER +32CA ; mapped ; 0031 0031 6708 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR NOVEMBER +32CB ; mapped ; 0031 0032 6708 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DECEMBER +32CC ; mapped ; 0068 0067 # 4.0 SQUARE HG +32CD ; mapped ; 0065 0072 0067 #4.0 SQUARE ERG +32CE ; mapped ; 0065 0076 # 4.0 SQUARE EV +32CF ; mapped ; 006C 0074 0064 #4.0 LIMITED LIABILITY SIGN +32D0 ; mapped ; 30A2 # 1.1 CIRCLED KATAKANA A +32D1 ; mapped ; 30A4 # 1.1 CIRCLED KATAKANA I +32D2 ; mapped ; 30A6 # 1.1 CIRCLED KATAKANA U +32D3 ; mapped ; 30A8 # 1.1 CIRCLED KATAKANA E +32D4 ; mapped ; 30AA # 1.1 CIRCLED KATAKANA O +32D5 ; mapped ; 30AB # 1.1 CIRCLED KATAKANA KA +32D6 ; mapped ; 30AD # 1.1 CIRCLED KATAKANA KI +32D7 ; mapped ; 30AF # 1.1 CIRCLED KATAKANA KU +32D8 ; mapped ; 30B1 # 1.1 CIRCLED KATAKANA KE +32D9 ; mapped ; 30B3 # 1.1 CIRCLED KATAKANA KO +32DA ; mapped ; 30B5 # 1.1 CIRCLED KATAKANA SA +32DB ; mapped ; 30B7 # 1.1 CIRCLED KATAKANA SI +32DC ; mapped ; 30B9 # 1.1 CIRCLED KATAKANA SU +32DD ; mapped ; 30BB # 1.1 CIRCLED KATAKANA SE +32DE ; mapped ; 30BD # 1.1 CIRCLED KATAKANA SO +32DF ; mapped ; 30BF # 1.1 CIRCLED KATAKANA TA +32E0 ; mapped ; 30C1 # 1.1 CIRCLED KATAKANA TI +32E1 ; mapped ; 30C4 # 1.1 CIRCLED KATAKANA TU +32E2 ; mapped ; 30C6 # 1.1 CIRCLED KATAKANA TE +32E3 ; mapped ; 30C8 # 1.1 CIRCLED KATAKANA TO +32E4 ; mapped ; 30CA # 1.1 CIRCLED KATAKANA NA +32E5 ; mapped ; 30CB # 1.1 CIRCLED KATAKANA NI +32E6 ; mapped ; 30CC # 1.1 CIRCLED KATAKANA NU +32E7 ; mapped ; 30CD # 1.1 CIRCLED KATAKANA NE +32E8 ; mapped ; 30CE # 1.1 CIRCLED KATAKANA NO +32E9 ; mapped ; 30CF # 1.1 CIRCLED KATAKANA HA +32EA ; mapped ; 30D2 # 1.1 CIRCLED KATAKANA HI +32EB ; mapped ; 30D5 # 1.1 CIRCLED KATAKANA HU +32EC ; mapped ; 30D8 # 1.1 CIRCLED KATAKANA HE +32ED ; mapped ; 30DB # 1.1 CIRCLED KATAKANA HO +32EE ; mapped ; 30DE # 1.1 CIRCLED KATAKANA MA +32EF ; mapped ; 30DF # 1.1 CIRCLED KATAKANA MI +32F0 ; mapped ; 30E0 # 1.1 CIRCLED KATAKANA MU +32F1 ; mapped ; 30E1 # 1.1 CIRCLED KATAKANA ME +32F2 ; mapped ; 30E2 # 1.1 CIRCLED KATAKANA MO +32F3 ; mapped ; 30E4 # 1.1 CIRCLED KATAKANA YA +32F4 ; mapped ; 30E6 # 1.1 CIRCLED KATAKANA YU +32F5 ; mapped ; 30E8 # 1.1 CIRCLED KATAKANA YO +32F6 ; mapped ; 30E9 # 1.1 CIRCLED KATAKANA RA +32F7 ; mapped ; 30EA # 1.1 CIRCLED KATAKANA RI +32F8 ; mapped ; 30EB # 1.1 CIRCLED KATAKANA RU +32F9 ; mapped ; 30EC # 1.1 CIRCLED KATAKANA RE +32FA ; mapped ; 30ED # 1.1 CIRCLED KATAKANA RO +32FB ; mapped ; 30EF # 1.1 CIRCLED KATAKANA WA +32FC ; mapped ; 30F0 # 1.1 CIRCLED KATAKANA WI +32FD ; mapped ; 30F1 # 1.1 CIRCLED KATAKANA WE +32FE ; mapped ; 30F2 # 1.1 CIRCLED KATAKANA WO +32FF ; disallowed # NA +3300 ; mapped ; 30A2 30D1 30FC 30C8 #1.1 SQUARE APAATO +3301 ; mapped ; 30A2 30EB 30D5 30A1 #1.1 SQUARE ARUHUA +3302 ; mapped ; 30A2 30F3 30DA 30A2 #1.1 SQUARE ANPEA +3303 ; mapped ; 30A2 30FC 30EB #1.1 SQUARE AARU +3304 ; mapped ; 30A4 30CB 30F3 30B0 #1.1 SQUARE ININGU +3305 ; mapped ; 30A4 30F3 30C1 #1.1 SQUARE INTI +3306 ; mapped ; 30A6 30A9 30F3 #1.1 SQUARE UON +3307 ; mapped ; 30A8 30B9 30AF 30FC 30C9 #1.1 SQUARE ESUKUUDO +3308 ; mapped ; 30A8 30FC 30AB 30FC #1.1 SQUARE EEKAA +3309 ; mapped ; 30AA 30F3 30B9 #1.1 SQUARE ONSU +330A ; mapped ; 30AA 30FC 30E0 #1.1 SQUARE OOMU +330B ; mapped ; 30AB 30A4 30EA #1.1 SQUARE KAIRI +330C ; mapped ; 30AB 30E9 30C3 30C8 #1.1 SQUARE KARATTO +330D ; mapped ; 30AB 30ED 30EA 30FC #1.1 SQUARE KARORII +330E ; mapped ; 30AC 30ED 30F3 #1.1 SQUARE GARON +330F ; mapped ; 30AC 30F3 30DE #1.1 SQUARE GANMA +3310 ; mapped ; 30AE 30AC # 1.1 SQUARE GIGA +3311 ; mapped ; 30AE 30CB 30FC #1.1 SQUARE GINII +3312 ; mapped ; 30AD 30E5 30EA 30FC #1.1 SQUARE KYURII +3313 ; mapped ; 30AE 30EB 30C0 30FC #1.1 SQUARE GIRUDAA +3314 ; mapped ; 30AD 30ED # 1.1 SQUARE KIRO +3315 ; mapped ; 30AD 30ED 30B0 30E9 30E0 #1.1 SQUARE KIROGURAMU +3316 ; mapped ; 30AD 30ED 30E1 30FC 30C8 30EB #1.1 SQUARE KIROMEETORU +3317 ; mapped ; 30AD 30ED 30EF 30C3 30C8 #1.1 SQUARE KIROWATTO +3318 ; mapped ; 30B0 30E9 30E0 #1.1 SQUARE GURAMU +3319 ; mapped ; 30B0 30E9 30E0 30C8 30F3 #1.1 SQUARE GURAMUTON +331A ; mapped ; 30AF 30EB 30BC 30A4 30ED #1.1 SQUARE KURUZEIRO +331B ; mapped ; 30AF 30ED 30FC 30CD #1.1 SQUARE KUROONE +331C ; mapped ; 30B1 30FC 30B9 #1.1 SQUARE KEESU +331D ; mapped ; 30B3 30EB 30CA #1.1 SQUARE KORUNA +331E ; mapped ; 30B3 30FC 30DD #1.1 SQUARE KOOPO +331F ; mapped ; 30B5 30A4 30AF 30EB #1.1 SQUARE SAIKURU +3320 ; mapped ; 30B5 30F3 30C1 30FC 30E0 #1.1 SQUARE SANTIIMU +3321 ; mapped ; 30B7 30EA 30F3 30B0 #1.1 SQUARE SIRINGU +3322 ; mapped ; 30BB 30F3 30C1 #1.1 SQUARE SENTI +3323 ; mapped ; 30BB 30F3 30C8 #1.1 SQUARE SENTO +3324 ; mapped ; 30C0 30FC 30B9 #1.1 SQUARE DAASU +3325 ; mapped ; 30C7 30B7 # 1.1 SQUARE DESI +3326 ; mapped ; 30C9 30EB # 1.1 SQUARE DORU +3327 ; mapped ; 30C8 30F3 # 1.1 SQUARE TON +3328 ; mapped ; 30CA 30CE # 1.1 SQUARE NANO +3329 ; mapped ; 30CE 30C3 30C8 #1.1 SQUARE NOTTO +332A ; mapped ; 30CF 30A4 30C4 #1.1 SQUARE HAITU +332B ; mapped ; 30D1 30FC 30BB 30F3 30C8 #1.1 SQUARE PAASENTO +332C ; mapped ; 30D1 30FC 30C4 #1.1 SQUARE PAATU +332D ; mapped ; 30D0 30FC 30EC 30EB #1.1 SQUARE BAARERU +332E ; mapped ; 30D4 30A2 30B9 30C8 30EB #1.1 SQUARE PIASUTORU +332F ; mapped ; 30D4 30AF 30EB #1.1 SQUARE PIKURU +3330 ; mapped ; 30D4 30B3 # 1.1 SQUARE PIKO +3331 ; mapped ; 30D3 30EB # 1.1 SQUARE BIRU +3332 ; mapped ; 30D5 30A1 30E9 30C3 30C9 #1.1 SQUARE HUARADDO +3333 ; mapped ; 30D5 30A3 30FC 30C8 #1.1 SQUARE HUIITO +3334 ; mapped ; 30D6 30C3 30B7 30A7 30EB #1.1 SQUARE BUSSYERU +3335 ; mapped ; 30D5 30E9 30F3 #1.1 SQUARE HURAN +3336 ; mapped ; 30D8 30AF 30BF 30FC 30EB #1.1 SQUARE HEKUTAARU +3337 ; mapped ; 30DA 30BD # 1.1 SQUARE PESO +3338 ; mapped ; 30DA 30CB 30D2 #1.1 SQUARE PENIHI +3339 ; mapped ; 30D8 30EB 30C4 #1.1 SQUARE HERUTU +333A ; mapped ; 30DA 30F3 30B9 #1.1 SQUARE PENSU +333B ; mapped ; 30DA 30FC 30B8 #1.1 SQUARE PEEZI +333C ; mapped ; 30D9 30FC 30BF #1.1 SQUARE BEETA +333D ; mapped ; 30DD 30A4 30F3 30C8 #1.1 SQUARE POINTO +333E ; mapped ; 30DC 30EB 30C8 #1.1 SQUARE BORUTO +333F ; mapped ; 30DB 30F3 # 1.1 SQUARE HON +3340 ; mapped ; 30DD 30F3 30C9 #1.1 SQUARE PONDO +3341 ; mapped ; 30DB 30FC 30EB #1.1 SQUARE HOORU +3342 ; mapped ; 30DB 30FC 30F3 #1.1 SQUARE HOON +3343 ; mapped ; 30DE 30A4 30AF 30ED #1.1 SQUARE MAIKURO +3344 ; mapped ; 30DE 30A4 30EB #1.1 SQUARE MAIRU +3345 ; mapped ; 30DE 30C3 30CF #1.1 SQUARE MAHHA +3346 ; mapped ; 30DE 30EB 30AF #1.1 SQUARE MARUKU +3347 ; mapped ; 30DE 30F3 30B7 30E7 30F3 #1.1 SQUARE MANSYON +3348 ; mapped ; 30DF 30AF 30ED 30F3 #1.1 SQUARE MIKURON +3349 ; mapped ; 30DF 30EA # 1.1 SQUARE MIRI +334A ; mapped ; 30DF 30EA 30D0 30FC 30EB #1.1 SQUARE MIRIBAARU +334B ; mapped ; 30E1 30AC # 1.1 SQUARE MEGA +334C ; mapped ; 30E1 30AC 30C8 30F3 #1.1 SQUARE MEGATON +334D ; mapped ; 30E1 30FC 30C8 30EB #1.1 SQUARE MEETORU +334E ; mapped ; 30E4 30FC 30C9 #1.1 SQUARE YAADO +334F ; mapped ; 30E4 30FC 30EB #1.1 SQUARE YAARU +3350 ; mapped ; 30E6 30A2 30F3 #1.1 SQUARE YUAN +3351 ; mapped ; 30EA 30C3 30C8 30EB #1.1 SQUARE RITTORU +3352 ; mapped ; 30EA 30E9 # 1.1 SQUARE RIRA +3353 ; mapped ; 30EB 30D4 30FC #1.1 SQUARE RUPII +3354 ; mapped ; 30EB 30FC 30D6 30EB #1.1 SQUARE RUUBURU +3355 ; mapped ; 30EC 30E0 # 1.1 SQUARE REMU +3356 ; mapped ; 30EC 30F3 30C8 30B2 30F3 #1.1 SQUARE RENTOGEN +3357 ; mapped ; 30EF 30C3 30C8 #1.1 SQUARE WATTO +3358 ; mapped ; 0030 70B9 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR ZERO +3359 ; mapped ; 0031 70B9 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR ONE +335A ; mapped ; 0032 70B9 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR TWO +335B ; mapped ; 0033 70B9 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR THREE +335C ; mapped ; 0034 70B9 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR FOUR +335D ; mapped ; 0035 70B9 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR FIVE +335E ; mapped ; 0036 70B9 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR SIX +335F ; mapped ; 0037 70B9 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR SEVEN +3360 ; mapped ; 0038 70B9 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR EIGHT +3361 ; mapped ; 0039 70B9 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR NINE +3362 ; mapped ; 0031 0030 70B9 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR TEN +3363 ; mapped ; 0031 0031 70B9 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR ELEVEN +3364 ; mapped ; 0031 0032 70B9 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR TWELVE +3365 ; mapped ; 0031 0033 70B9 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR THIRTEEN +3366 ; mapped ; 0031 0034 70B9 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR FOURTEEN +3367 ; mapped ; 0031 0035 70B9 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR FIFTEEN +3368 ; mapped ; 0031 0036 70B9 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR SIXTEEN +3369 ; mapped ; 0031 0037 70B9 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR SEVENTEEN +336A ; mapped ; 0031 0038 70B9 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR EIGHTEEN +336B ; mapped ; 0031 0039 70B9 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR NINETEEN +336C ; mapped ; 0032 0030 70B9 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR TWENTY +336D ; mapped ; 0032 0031 70B9 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR TWENTY-ONE +336E ; mapped ; 0032 0032 70B9 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR TWENTY-TWO +336F ; mapped ; 0032 0033 70B9 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR TWENTY-THREE +3370 ; mapped ; 0032 0034 70B9 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR TWENTY-FOUR +3371 ; mapped ; 0068 0070 0061 #1.1 SQUARE HPA +3372 ; mapped ; 0064 0061 # 1.1 SQUARE DA +3373 ; mapped ; 0061 0075 # 1.1 SQUARE AU +3374 ; mapped ; 0062 0061 0072 #1.1 SQUARE BAR +3375 ; mapped ; 006F 0076 # 1.1 SQUARE OV +3376 ; mapped ; 0070 0063 # 1.1 SQUARE PC +3377 ; mapped ; 0064 006D # 4.0 SQUARE DM +3378 ; mapped ; 0064 006D 0032 #4.0 SQUARE DM SQUARED +3379 ; mapped ; 0064 006D 0033 #4.0 SQUARE DM CUBED +337A ; mapped ; 0069 0075 # 4.0 SQUARE IU +337B ; mapped ; 5E73 6210 # 1.1 SQUARE ERA NAME HEISEI +337C ; mapped ; 662D 548C # 1.1 SQUARE ERA NAME SYOUWA +337D ; mapped ; 5927 6B63 # 1.1 SQUARE ERA NAME TAISYOU +337E ; mapped ; 660E 6CBB # 1.1 SQUARE ERA NAME MEIZI +337F ; mapped ; 682A 5F0F 4F1A 793E #1.1 SQUARE CORPORATION +3380 ; mapped ; 0070 0061 # 1.1 SQUARE PA AMPS +3381 ; mapped ; 006E 0061 # 1.1 SQUARE NA +3382 ; mapped ; 03BC 0061 # 1.1 SQUARE MU A +3383 ; mapped ; 006D 0061 # 1.1 SQUARE MA +3384 ; mapped ; 006B 0061 # 1.1 SQUARE KA +3385 ; mapped ; 006B 0062 # 1.1 SQUARE KB +3386 ; mapped ; 006D 0062 # 1.1 SQUARE MB +3387 ; mapped ; 0067 0062 # 1.1 SQUARE GB +3388 ; mapped ; 0063 0061 006C #1.1 SQUARE CAL +3389 ; mapped ; 006B 0063 0061 006C #1.1 SQUARE KCAL +338A ; mapped ; 0070 0066 # 1.1 SQUARE PF +338B ; mapped ; 006E 0066 # 1.1 SQUARE NF +338C ; mapped ; 03BC 0066 # 1.1 SQUARE MU F +338D ; mapped ; 03BC 0067 # 1.1 SQUARE MU G +338E ; mapped ; 006D 0067 # 1.1 SQUARE MG +338F ; mapped ; 006B 0067 # 1.1 SQUARE KG +3390 ; mapped ; 0068 007A # 1.1 SQUARE HZ +3391 ; mapped ; 006B 0068 007A #1.1 SQUARE KHZ +3392 ; mapped ; 006D 0068 007A #1.1 SQUARE MHZ +3393 ; mapped ; 0067 0068 007A #1.1 SQUARE GHZ +3394 ; mapped ; 0074 0068 007A #1.1 SQUARE THZ +3395 ; mapped ; 03BC 006C # 1.1 SQUARE MU L +3396 ; mapped ; 006D 006C # 1.1 SQUARE ML +3397 ; mapped ; 0064 006C # 1.1 SQUARE DL +3398 ; mapped ; 006B 006C # 1.1 SQUARE KL +3399 ; mapped ; 0066 006D # 1.1 SQUARE FM +339A ; mapped ; 006E 006D # 1.1 SQUARE NM +339B ; mapped ; 03BC 006D # 1.1 SQUARE MU M +339C ; mapped ; 006D 006D # 1.1 SQUARE MM +339D ; mapped ; 0063 006D # 1.1 SQUARE CM +339E ; mapped ; 006B 006D # 1.1 SQUARE KM +339F ; mapped ; 006D 006D 0032 #1.1 SQUARE MM SQUARED +33A0 ; mapped ; 0063 006D 0032 #1.1 SQUARE CM SQUARED +33A1 ; mapped ; 006D 0032 # 1.1 SQUARE M SQUARED +33A2 ; mapped ; 006B 006D 0032 #1.1 SQUARE KM SQUARED +33A3 ; mapped ; 006D 006D 0033 #1.1 SQUARE MM CUBED +33A4 ; mapped ; 0063 006D 0033 #1.1 SQUARE CM CUBED +33A5 ; mapped ; 006D 0033 # 1.1 SQUARE M CUBED +33A6 ; mapped ; 006B 006D 0033 #1.1 SQUARE KM CUBED +33A7 ; mapped ; 006D 2215 0073 #1.1 SQUARE M OVER S +33A8 ; mapped ; 006D 2215 0073 0032 #1.1 SQUARE M OVER S SQUARED +33A9 ; mapped ; 0070 0061 # 1.1 SQUARE PA +33AA ; mapped ; 006B 0070 0061 #1.1 SQUARE KPA +33AB ; mapped ; 006D 0070 0061 #1.1 SQUARE MPA +33AC ; mapped ; 0067 0070 0061 #1.1 SQUARE GPA +33AD ; mapped ; 0072 0061 0064 #1.1 SQUARE RAD +33AE ; mapped ; 0072 0061 0064 2215 0073 #1.1 SQUARE RAD OVER S +33AF ; mapped ; 0072 0061 0064 2215 0073 0032 #1.1 SQUARE RAD OVER S SQUARED +33B0 ; mapped ; 0070 0073 # 1.1 SQUARE PS +33B1 ; mapped ; 006E 0073 # 1.1 SQUARE NS +33B2 ; mapped ; 03BC 0073 # 1.1 SQUARE MU S +33B3 ; mapped ; 006D 0073 # 1.1 SQUARE MS +33B4 ; mapped ; 0070 0076 # 1.1 SQUARE PV +33B5 ; mapped ; 006E 0076 # 1.1 SQUARE NV +33B6 ; mapped ; 03BC 0076 # 1.1 SQUARE MU V +33B7 ; mapped ; 006D 0076 # 1.1 SQUARE MV +33B8 ; mapped ; 006B 0076 # 1.1 SQUARE KV +33B9 ; mapped ; 006D 0076 # 1.1 SQUARE MV MEGA +33BA ; mapped ; 0070 0077 # 1.1 SQUARE PW +33BB ; mapped ; 006E 0077 # 1.1 SQUARE NW +33BC ; mapped ; 03BC 0077 # 1.1 SQUARE MU W +33BD ; mapped ; 006D 0077 # 1.1 SQUARE MW +33BE ; mapped ; 006B 0077 # 1.1 SQUARE KW +33BF ; mapped ; 006D 0077 # 1.1 SQUARE MW MEGA +33C0 ; mapped ; 006B 03C9 # 1.1 SQUARE K OHM +33C1 ; mapped ; 006D 03C9 # 1.1 SQUARE M OHM +33C2 ; disallowed # 1.1 SQUARE AM +33C3 ; mapped ; 0062 0071 # 1.1 SQUARE BQ +33C4 ; mapped ; 0063 0063 # 1.1 SQUARE CC +33C5 ; mapped ; 0063 0064 # 1.1 SQUARE CD +33C6 ; mapped ; 0063 2215 006B 0067 #1.1 SQUARE C OVER KG +33C7 ; disallowed # 1.1 SQUARE CO +33C8 ; mapped ; 0064 0062 # 1.1 SQUARE DB +33C9 ; mapped ; 0067 0079 # 1.1 SQUARE GY +33CA ; mapped ; 0068 0061 # 1.1 SQUARE HA +33CB ; mapped ; 0068 0070 # 1.1 SQUARE HP +33CC ; mapped ; 0069 006E # 1.1 SQUARE IN +33CD ; mapped ; 006B 006B # 1.1 SQUARE KK +33CE ; mapped ; 006B 006D # 1.1 SQUARE KM CAPITAL +33CF ; mapped ; 006B 0074 # 1.1 SQUARE KT +33D0 ; mapped ; 006C 006D # 1.1 SQUARE LM +33D1 ; mapped ; 006C 006E # 1.1 SQUARE LN +33D2 ; mapped ; 006C 006F 0067 #1.1 SQUARE LOG +33D3 ; mapped ; 006C 0078 # 1.1 SQUARE LX +33D4 ; mapped ; 006D 0062 # 1.1 SQUARE MB SMALL +33D5 ; mapped ; 006D 0069 006C #1.1 SQUARE MIL +33D6 ; mapped ; 006D 006F 006C #1.1 SQUARE MOL +33D7 ; mapped ; 0070 0068 # 1.1 SQUARE PH +33D8 ; disallowed # 1.1 SQUARE PM +33D9 ; mapped ; 0070 0070 006D #1.1 SQUARE PPM +33DA ; mapped ; 0070 0072 # 1.1 SQUARE PR +33DB ; mapped ; 0073 0072 # 1.1 SQUARE SR +33DC ; mapped ; 0073 0076 # 1.1 SQUARE SV +33DD ; mapped ; 0077 0062 # 1.1 SQUARE WB +33DE ; mapped ; 0076 2215 006D #4.0 SQUARE V OVER M +33DF ; mapped ; 0061 2215 006D #4.0 SQUARE A OVER M +33E0 ; mapped ; 0031 65E5 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY ONE +33E1 ; mapped ; 0032 65E5 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY TWO +33E2 ; mapped ; 0033 65E5 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY THREE +33E3 ; mapped ; 0034 65E5 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY FOUR +33E4 ; mapped ; 0035 65E5 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY FIVE +33E5 ; mapped ; 0036 65E5 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY SIX +33E6 ; mapped ; 0037 65E5 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY SEVEN +33E7 ; mapped ; 0038 65E5 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY EIGHT +33E8 ; mapped ; 0039 65E5 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY NINE +33E9 ; mapped ; 0031 0030 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY TEN +33EA ; mapped ; 0031 0031 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY ELEVEN +33EB ; mapped ; 0031 0032 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY TWELVE +33EC ; mapped ; 0031 0033 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY THIRTEEN +33ED ; mapped ; 0031 0034 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY FOURTEEN +33EE ; mapped ; 0031 0035 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY FIFTEEN +33EF ; mapped ; 0031 0036 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY SIXTEEN +33F0 ; mapped ; 0031 0037 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY SEVENTEEN +33F1 ; mapped ; 0031 0038 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY EIGHTEEN +33F2 ; mapped ; 0031 0039 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY NINETEEN +33F3 ; mapped ; 0032 0030 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY TWENTY +33F4 ; mapped ; 0032 0031 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY TWENTY-ONE +33F5 ; mapped ; 0032 0032 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY TWENTY-TWO +33F6 ; mapped ; 0032 0033 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY TWENTY-THREE +33F7 ; mapped ; 0032 0034 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY TWENTY-FOUR +33F8 ; mapped ; 0032 0035 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY TWENTY-FIVE +33F9 ; mapped ; 0032 0036 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY TWENTY-SIX +33FA ; mapped ; 0032 0037 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY TWENTY-SEVEN +33FB ; mapped ; 0032 0038 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY TWENTY-EIGHT +33FC ; mapped ; 0032 0039 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY TWENTY-NINE +33FD ; mapped ; 0033 0030 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY THIRTY +33FE ; mapped ; 0033 0031 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY THIRTY-ONE +33FF ; mapped ; 0067 0061 006C #4.0 SQUARE GAL +3400..4DB5 ; valid # 3.0 CJK UNIFIED IDEOGRAPH-3400..CJK UNIFIED IDEOGRAPH-4DB5 +4DB6..4DBF ; disallowed # NA .. +4DC0..4DFF ; valid ; ; NV8 # 4.0 HEXAGRAM FOR THE CREATIVE HEAVEN..HEXAGRAM FOR BEFORE COMPLETION +4E00..9FA5 ; valid # 1.1 CJK UNIFIED IDEOGRAPH-4E00..CJK UNIFIED IDEOGRAPH-9FA5 +9FA6..9FBB ; valid # 4.1 CJK UNIFIED IDEOGRAPH-9FA6..CJK UNIFIED IDEOGRAPH-9FBB +9FBC..9FC3 ; valid # 5.1 CJK UNIFIED IDEOGRAPH-9FBC..CJK UNIFIED IDEOGRAPH-9FC3 +9FC4..9FCB ; valid # 5.2 CJK UNIFIED IDEOGRAPH-9FC4..CJK UNIFIED IDEOGRAPH-9FCB +9FCC ; valid # 6.1 CJK UNIFIED IDEOGRAPH-9FCC +9FCD..9FD5 ; valid # 8.0 CJK UNIFIED IDEOGRAPH-9FCD..CJK UNIFIED IDEOGRAPH-9FD5 +9FD6..9FEA ; valid # 10.0 CJK UNIFIED IDEOGRAPH-9FD6..CJK UNIFIED IDEOGRAPH-9FEA +9FEB..9FFF ; disallowed # NA .. +A000..A48C ; valid # 3.0 YI SYLLABLE IT..YI SYLLABLE YYR +A48D..A48F ; disallowed # NA .. +A490..A4A1 ; valid ; ; NV8 # 3.0 YI RADICAL QOT..YI RADICAL GA +A4A2..A4A3 ; valid ; ; NV8 # 3.2 YI RADICAL ZUP..YI RADICAL CYT +A4A4..A4B3 ; valid ; ; NV8 # 3.0 YI RADICAL DDUR..YI RADICAL JO +A4B4 ; valid ; ; NV8 # 3.2 YI RADICAL NZUP +A4B5..A4C0 ; valid ; ; NV8 # 3.0 YI RADICAL JJY..YI RADICAL SHAT +A4C1 ; valid ; ; NV8 # 3.2 YI RADICAL ZUR +A4C2..A4C4 ; valid ; ; NV8 # 3.0 YI RADICAL SHOP..YI RADICAL ZZIET +A4C5 ; valid ; ; NV8 # 3.2 YI RADICAL NBIE +A4C6 ; valid ; ; NV8 # 3.0 YI RADICAL KE +A4C7..A4CF ; disallowed # NA .. +A4D0..A4FD ; valid # 5.2 LISU LETTER BA..LISU LETTER TONE MYA JEU +A4FE..A4FF ; valid ; ; NV8 # 5.2 LISU PUNCTUATION COMMA..LISU PUNCTUATION FULL STOP +A500..A60C ; valid # 5.1 VAI SYLLABLE EE..VAI SYLLABLE LENGTHENER +A60D..A60F ; valid ; ; NV8 # 5.1 VAI COMMA..VAI QUESTION MARK +A610..A62B ; valid # 5.1 VAI SYLLABLE NDOLE FA..VAI SYLLABLE NDOLE DO +A62C..A63F ; disallowed # NA .. +A640 ; mapped ; A641 # 5.1 CYRILLIC CAPITAL LETTER ZEMLYA +A641 ; valid # 5.1 CYRILLIC SMALL LETTER ZEMLYA +A642 ; mapped ; A643 # 5.1 CYRILLIC CAPITAL LETTER DZELO +A643 ; valid # 5.1 CYRILLIC SMALL LETTER DZELO +A644 ; mapped ; A645 # 5.1 CYRILLIC CAPITAL LETTER REVERSED DZE +A645 ; valid # 5.1 CYRILLIC SMALL LETTER REVERSED DZE +A646 ; mapped ; A647 # 5.1 CYRILLIC CAPITAL LETTER IOTA +A647 ; valid # 5.1 CYRILLIC SMALL LETTER IOTA +A648 ; mapped ; A649 # 5.1 CYRILLIC CAPITAL LETTER DJERV +A649 ; valid # 5.1 CYRILLIC SMALL LETTER DJERV +A64A ; mapped ; A64B # 5.1 CYRILLIC CAPITAL LETTER MONOGRAPH UK +A64B ; valid # 5.1 CYRILLIC SMALL LETTER MONOGRAPH UK +A64C ; mapped ; A64D # 5.1 CYRILLIC CAPITAL LETTER BROAD OMEGA +A64D ; valid # 5.1 CYRILLIC SMALL LETTER BROAD OMEGA +A64E ; mapped ; A64F # 5.1 CYRILLIC CAPITAL LETTER NEUTRAL YER +A64F ; valid # 5.1 CYRILLIC SMALL LETTER NEUTRAL YER +A650 ; mapped ; A651 # 5.1 CYRILLIC CAPITAL LETTER YERU WITH BACK YER +A651 ; valid # 5.1 CYRILLIC SMALL LETTER YERU WITH BACK YER +A652 ; mapped ; A653 # 5.1 CYRILLIC CAPITAL LETTER IOTIFIED YAT +A653 ; valid # 5.1 CYRILLIC SMALL LETTER IOTIFIED YAT +A654 ; mapped ; A655 # 5.1 CYRILLIC CAPITAL LETTER REVERSED YU +A655 ; valid # 5.1 CYRILLIC SMALL LETTER REVERSED YU +A656 ; mapped ; A657 # 5.1 CYRILLIC CAPITAL LETTER IOTIFIED A +A657 ; valid # 5.1 CYRILLIC SMALL LETTER IOTIFIED A +A658 ; mapped ; A659 # 5.1 CYRILLIC CAPITAL LETTER CLOSED LITTLE YUS +A659 ; valid # 5.1 CYRILLIC SMALL LETTER CLOSED LITTLE YUS +A65A ; mapped ; A65B # 5.1 CYRILLIC CAPITAL LETTER BLENDED YUS +A65B ; valid # 5.1 CYRILLIC SMALL LETTER BLENDED YUS +A65C ; mapped ; A65D # 5.1 CYRILLIC CAPITAL LETTER IOTIFIED CLOSED LITTLE YUS +A65D ; valid # 5.1 CYRILLIC SMALL LETTER IOTIFIED CLOSED LITTLE YUS +A65E ; mapped ; A65F # 5.1 CYRILLIC CAPITAL LETTER YN +A65F ; valid # 5.1 CYRILLIC SMALL LETTER YN +A660 ; mapped ; A661 # 6.0 CYRILLIC CAPITAL LETTER REVERSED TSE +A661 ; valid # 6.0 CYRILLIC SMALL LETTER REVERSED TSE +A662 ; mapped ; A663 # 5.1 CYRILLIC CAPITAL LETTER SOFT DE +A663 ; valid # 5.1 CYRILLIC SMALL LETTER SOFT DE +A664 ; mapped ; A665 # 5.1 CYRILLIC CAPITAL LETTER SOFT EL +A665 ; valid # 5.1 CYRILLIC SMALL LETTER SOFT EL +A666 ; mapped ; A667 # 5.1 CYRILLIC CAPITAL LETTER SOFT EM +A667 ; valid # 5.1 CYRILLIC SMALL LETTER SOFT EM +A668 ; mapped ; A669 # 5.1 CYRILLIC CAPITAL LETTER MONOCULAR O +A669 ; valid # 5.1 CYRILLIC SMALL LETTER MONOCULAR O +A66A ; mapped ; A66B # 5.1 CYRILLIC CAPITAL LETTER BINOCULAR O +A66B ; valid # 5.1 CYRILLIC SMALL LETTER BINOCULAR O +A66C ; mapped ; A66D # 5.1 CYRILLIC CAPITAL LETTER DOUBLE MONOCULAR O +A66D..A66F ; valid # 5.1 CYRILLIC SMALL LETTER DOUBLE MONOCULAR O..COMBINING CYRILLIC VZMET +A670..A673 ; valid ; ; NV8 # 5.1 COMBINING CYRILLIC TEN MILLIONS SIGN..SLAVONIC ASTERISK +A674..A67B ; valid # 6.1 COMBINING CYRILLIC LETTER UKRAINIAN IE..COMBINING CYRILLIC LETTER OMEGA +A67C..A67D ; valid # 5.1 COMBINING CYRILLIC KAVYKA..COMBINING CYRILLIC PAYEROK +A67E ; valid ; ; NV8 # 5.1 CYRILLIC KAVYKA +A67F ; valid # 5.1 CYRILLIC PAYEROK +A680 ; mapped ; A681 # 5.1 CYRILLIC CAPITAL LETTER DWE +A681 ; valid # 5.1 CYRILLIC SMALL LETTER DWE +A682 ; mapped ; A683 # 5.1 CYRILLIC CAPITAL LETTER DZWE +A683 ; valid # 5.1 CYRILLIC SMALL LETTER DZWE +A684 ; mapped ; A685 # 5.1 CYRILLIC CAPITAL LETTER ZHWE +A685 ; valid # 5.1 CYRILLIC SMALL LETTER ZHWE +A686 ; mapped ; A687 # 5.1 CYRILLIC CAPITAL LETTER CCHE +A687 ; valid # 5.1 CYRILLIC SMALL LETTER CCHE +A688 ; mapped ; A689 # 5.1 CYRILLIC CAPITAL LETTER DZZE +A689 ; valid # 5.1 CYRILLIC SMALL LETTER DZZE +A68A ; mapped ; A68B # 5.1 CYRILLIC CAPITAL LETTER TE WITH MIDDLE HOOK +A68B ; valid # 5.1 CYRILLIC SMALL LETTER TE WITH MIDDLE HOOK +A68C ; mapped ; A68D # 5.1 CYRILLIC CAPITAL LETTER TWE +A68D ; valid # 5.1 CYRILLIC SMALL LETTER TWE +A68E ; mapped ; A68F # 5.1 CYRILLIC CAPITAL LETTER TSWE +A68F ; valid # 5.1 CYRILLIC SMALL LETTER TSWE +A690 ; mapped ; A691 # 5.1 CYRILLIC CAPITAL LETTER TSSE +A691 ; valid # 5.1 CYRILLIC SMALL LETTER TSSE +A692 ; mapped ; A693 # 5.1 CYRILLIC CAPITAL LETTER TCHE +A693 ; valid # 5.1 CYRILLIC SMALL LETTER TCHE +A694 ; mapped ; A695 # 5.1 CYRILLIC CAPITAL LETTER HWE +A695 ; valid # 5.1 CYRILLIC SMALL LETTER HWE +A696 ; mapped ; A697 # 5.1 CYRILLIC CAPITAL LETTER SHWE +A697 ; valid # 5.1 CYRILLIC SMALL LETTER SHWE +A698 ; mapped ; A699 # 7.0 CYRILLIC CAPITAL LETTER DOUBLE O +A699 ; valid # 7.0 CYRILLIC SMALL LETTER DOUBLE O +A69A ; mapped ; A69B # 7.0 CYRILLIC CAPITAL LETTER CROSSED O +A69B ; valid # 7.0 CYRILLIC SMALL LETTER CROSSED O +A69C ; mapped ; 044A # 7.0 MODIFIER LETTER CYRILLIC HARD SIGN +A69D ; mapped ; 044C # 7.0 MODIFIER LETTER CYRILLIC SOFT SIGN +A69E ; valid # 8.0 COMBINING CYRILLIC LETTER EF +A69F ; valid # 6.1 COMBINING CYRILLIC LETTER IOTIFIED E +A6A0..A6E5 ; valid # 5.2 BAMUM LETTER A..BAMUM LETTER KI +A6E6..A6EF ; valid ; ; NV8 # 5.2 BAMUM LETTER MO..BAMUM LETTER KOGHOM +A6F0..A6F1 ; valid # 5.2 BAMUM COMBINING MARK KOQNDON..BAMUM COMBINING MARK TUKWENTIS +A6F2..A6F7 ; valid ; ; NV8 # 5.2 BAMUM NJAEMLI..BAMUM QUESTION MARK +A6F8..A6FF ; disallowed # NA .. +A700..A716 ; valid ; ; NV8 # 4.1 MODIFIER LETTER CHINESE TONE YIN PING..MODIFIER LETTER EXTRA-LOW LEFT-STEM TONE BAR +A717..A71A ; valid # 5.0 MODIFIER LETTER DOT VERTICAL BAR..MODIFIER LETTER LOWER RIGHT CORNER ANGLE +A71B..A71F ; valid # 5.1 MODIFIER LETTER RAISED UP ARROW..MODIFIER LETTER LOW INVERTED EXCLAMATION MARK +A720..A721 ; valid ; ; NV8 # 5.0 MODIFIER LETTER STRESS AND HIGH TONE..MODIFIER LETTER STRESS AND LOW TONE +A722 ; mapped ; A723 # 5.1 LATIN CAPITAL LETTER EGYPTOLOGICAL ALEF +A723 ; valid # 5.1 LATIN SMALL LETTER EGYPTOLOGICAL ALEF +A724 ; mapped ; A725 # 5.1 LATIN CAPITAL LETTER EGYPTOLOGICAL AIN +A725 ; valid # 5.1 LATIN SMALL LETTER EGYPTOLOGICAL AIN +A726 ; mapped ; A727 # 5.1 LATIN CAPITAL LETTER HENG +A727 ; valid # 5.1 LATIN SMALL LETTER HENG +A728 ; mapped ; A729 # 5.1 LATIN CAPITAL LETTER TZ +A729 ; valid # 5.1 LATIN SMALL LETTER TZ +A72A ; mapped ; A72B # 5.1 LATIN CAPITAL LETTER TRESILLO +A72B ; valid # 5.1 LATIN SMALL LETTER TRESILLO +A72C ; mapped ; A72D # 5.1 LATIN CAPITAL LETTER CUATRILLO +A72D ; valid # 5.1 LATIN SMALL LETTER CUATRILLO +A72E ; mapped ; A72F # 5.1 LATIN CAPITAL LETTER CUATRILLO WITH COMMA +A72F..A731 ; valid # 5.1 LATIN SMALL LETTER CUATRILLO WITH COMMA..LATIN LETTER SMALL CAPITAL S +A732 ; mapped ; A733 # 5.1 LATIN CAPITAL LETTER AA +A733 ; valid # 5.1 LATIN SMALL LETTER AA +A734 ; mapped ; A735 # 5.1 LATIN CAPITAL LETTER AO +A735 ; valid # 5.1 LATIN SMALL LETTER AO +A736 ; mapped ; A737 # 5.1 LATIN CAPITAL LETTER AU +A737 ; valid # 5.1 LATIN SMALL LETTER AU +A738 ; mapped ; A739 # 5.1 LATIN CAPITAL LETTER AV +A739 ; valid # 5.1 LATIN SMALL LETTER AV +A73A ; mapped ; A73B # 5.1 LATIN CAPITAL LETTER AV WITH HORIZONTAL BAR +A73B ; valid # 5.1 LATIN SMALL LETTER AV WITH HORIZONTAL BAR +A73C ; mapped ; A73D # 5.1 LATIN CAPITAL LETTER AY +A73D ; valid # 5.1 LATIN SMALL LETTER AY +A73E ; mapped ; A73F # 5.1 LATIN CAPITAL LETTER REVERSED C WITH DOT +A73F ; valid # 5.1 LATIN SMALL LETTER REVERSED C WITH DOT +A740 ; mapped ; A741 # 5.1 LATIN CAPITAL LETTER K WITH STROKE +A741 ; valid # 5.1 LATIN SMALL LETTER K WITH STROKE +A742 ; mapped ; A743 # 5.1 LATIN CAPITAL LETTER K WITH DIAGONAL STROKE +A743 ; valid # 5.1 LATIN SMALL LETTER K WITH DIAGONAL STROKE +A744 ; mapped ; A745 # 5.1 LATIN CAPITAL LETTER K WITH STROKE AND DIAGONAL STROKE +A745 ; valid # 5.1 LATIN SMALL LETTER K WITH STROKE AND DIAGONAL STROKE +A746 ; mapped ; A747 # 5.1 LATIN CAPITAL LETTER BROKEN L +A747 ; valid # 5.1 LATIN SMALL LETTER BROKEN L +A748 ; mapped ; A749 # 5.1 LATIN CAPITAL LETTER L WITH HIGH STROKE +A749 ; valid # 5.1 LATIN SMALL LETTER L WITH HIGH STROKE +A74A ; mapped ; A74B # 5.1 LATIN CAPITAL LETTER O WITH LONG STROKE OVERLAY +A74B ; valid # 5.1 LATIN SMALL LETTER O WITH LONG STROKE OVERLAY +A74C ; mapped ; A74D # 5.1 LATIN CAPITAL LETTER O WITH LOOP +A74D ; valid # 5.1 LATIN SMALL LETTER O WITH LOOP +A74E ; mapped ; A74F # 5.1 LATIN CAPITAL LETTER OO +A74F ; valid # 5.1 LATIN SMALL LETTER OO +A750 ; mapped ; A751 # 5.1 LATIN CAPITAL LETTER P WITH STROKE THROUGH DESCENDER +A751 ; valid # 5.1 LATIN SMALL LETTER P WITH STROKE THROUGH DESCENDER +A752 ; mapped ; A753 # 5.1 LATIN CAPITAL LETTER P WITH FLOURISH +A753 ; valid # 5.1 LATIN SMALL LETTER P WITH FLOURISH +A754 ; mapped ; A755 # 5.1 LATIN CAPITAL LETTER P WITH SQUIRREL TAIL +A755 ; valid # 5.1 LATIN SMALL LETTER P WITH SQUIRREL TAIL +A756 ; mapped ; A757 # 5.1 LATIN CAPITAL LETTER Q WITH STROKE THROUGH DESCENDER +A757 ; valid # 5.1 LATIN SMALL LETTER Q WITH STROKE THROUGH DESCENDER +A758 ; mapped ; A759 # 5.1 LATIN CAPITAL LETTER Q WITH DIAGONAL STROKE +A759 ; valid # 5.1 LATIN SMALL LETTER Q WITH DIAGONAL STROKE +A75A ; mapped ; A75B # 5.1 LATIN CAPITAL LETTER R ROTUNDA +A75B ; valid # 5.1 LATIN SMALL LETTER R ROTUNDA +A75C ; mapped ; A75D # 5.1 LATIN CAPITAL LETTER RUM ROTUNDA +A75D ; valid # 5.1 LATIN SMALL LETTER RUM ROTUNDA +A75E ; mapped ; A75F # 5.1 LATIN CAPITAL LETTER V WITH DIAGONAL STROKE +A75F ; valid # 5.1 LATIN SMALL LETTER V WITH DIAGONAL STROKE +A760 ; mapped ; A761 # 5.1 LATIN CAPITAL LETTER VY +A761 ; valid # 5.1 LATIN SMALL LETTER VY +A762 ; mapped ; A763 # 5.1 LATIN CAPITAL LETTER VISIGOTHIC Z +A763 ; valid # 5.1 LATIN SMALL LETTER VISIGOTHIC Z +A764 ; mapped ; A765 # 5.1 LATIN CAPITAL LETTER THORN WITH STROKE +A765 ; valid # 5.1 LATIN SMALL LETTER THORN WITH STROKE +A766 ; mapped ; A767 # 5.1 LATIN CAPITAL LETTER THORN WITH STROKE THROUGH DESCENDER +A767 ; valid # 5.1 LATIN SMALL LETTER THORN WITH STROKE THROUGH DESCENDER +A768 ; mapped ; A769 # 5.1 LATIN CAPITAL LETTER VEND +A769 ; valid # 5.1 LATIN SMALL LETTER VEND +A76A ; mapped ; A76B # 5.1 LATIN CAPITAL LETTER ET +A76B ; valid # 5.1 LATIN SMALL LETTER ET +A76C ; mapped ; A76D # 5.1 LATIN CAPITAL LETTER IS +A76D ; valid # 5.1 LATIN SMALL LETTER IS +A76E ; mapped ; A76F # 5.1 LATIN CAPITAL LETTER CON +A76F ; valid # 5.1 LATIN SMALL LETTER CON +A770 ; mapped ; A76F # 5.1 MODIFIER LETTER US +A771..A778 ; valid # 5.1 LATIN SMALL LETTER DUM..LATIN SMALL LETTER UM +A779 ; mapped ; A77A # 5.1 LATIN CAPITAL LETTER INSULAR D +A77A ; valid # 5.1 LATIN SMALL LETTER INSULAR D +A77B ; mapped ; A77C # 5.1 LATIN CAPITAL LETTER INSULAR F +A77C ; valid # 5.1 LATIN SMALL LETTER INSULAR F +A77D ; mapped ; 1D79 # 5.1 LATIN CAPITAL LETTER INSULAR G +A77E ; mapped ; A77F # 5.1 LATIN CAPITAL LETTER TURNED INSULAR G +A77F ; valid # 5.1 LATIN SMALL LETTER TURNED INSULAR G +A780 ; mapped ; A781 # 5.1 LATIN CAPITAL LETTER TURNED L +A781 ; valid # 5.1 LATIN SMALL LETTER TURNED L +A782 ; mapped ; A783 # 5.1 LATIN CAPITAL LETTER INSULAR R +A783 ; valid # 5.1 LATIN SMALL LETTER INSULAR R +A784 ; mapped ; A785 # 5.1 LATIN CAPITAL LETTER INSULAR S +A785 ; valid # 5.1 LATIN SMALL LETTER INSULAR S +A786 ; mapped ; A787 # 5.1 LATIN CAPITAL LETTER INSULAR T +A787..A788 ; valid # 5.1 LATIN SMALL LETTER INSULAR T..MODIFIER LETTER LOW CIRCUMFLEX ACCENT +A789..A78A ; valid ; ; NV8 # 5.1 MODIFIER LETTER COLON..MODIFIER LETTER SHORT EQUALS SIGN +A78B ; mapped ; A78C # 5.1 LATIN CAPITAL LETTER SALTILLO +A78C ; valid # 5.1 LATIN SMALL LETTER SALTILLO +A78D ; mapped ; 0265 # 6.0 LATIN CAPITAL LETTER TURNED H +A78E ; valid # 6.0 LATIN SMALL LETTER L WITH RETROFLEX HOOK AND BELT +A78F ; valid # 8.0 LATIN LETTER SINOLOGICAL DOT +A790 ; mapped ; A791 # 6.0 LATIN CAPITAL LETTER N WITH DESCENDER +A791 ; valid # 6.0 LATIN SMALL LETTER N WITH DESCENDER +A792 ; mapped ; A793 # 6.1 LATIN CAPITAL LETTER C WITH BAR +A793 ; valid # 6.1 LATIN SMALL LETTER C WITH BAR +A794..A795 ; valid # 7.0 LATIN SMALL LETTER C WITH PALATAL HOOK..LATIN SMALL LETTER H WITH PALATAL HOOK +A796 ; mapped ; A797 # 7.0 LATIN CAPITAL LETTER B WITH FLOURISH +A797 ; valid # 7.0 LATIN SMALL LETTER B WITH FLOURISH +A798 ; mapped ; A799 # 7.0 LATIN CAPITAL LETTER F WITH STROKE +A799 ; valid # 7.0 LATIN SMALL LETTER F WITH STROKE +A79A ; mapped ; A79B # 7.0 LATIN CAPITAL LETTER VOLAPUK AE +A79B ; valid # 7.0 LATIN SMALL LETTER VOLAPUK AE +A79C ; mapped ; A79D # 7.0 LATIN CAPITAL LETTER VOLAPUK OE +A79D ; valid # 7.0 LATIN SMALL LETTER VOLAPUK OE +A79E ; mapped ; A79F # 7.0 LATIN CAPITAL LETTER VOLAPUK UE +A79F ; valid # 7.0 LATIN SMALL LETTER VOLAPUK UE +A7A0 ; mapped ; A7A1 # 6.0 LATIN CAPITAL LETTER G WITH OBLIQUE STROKE +A7A1 ; valid # 6.0 LATIN SMALL LETTER G WITH OBLIQUE STROKE +A7A2 ; mapped ; A7A3 # 6.0 LATIN CAPITAL LETTER K WITH OBLIQUE STROKE +A7A3 ; valid # 6.0 LATIN SMALL LETTER K WITH OBLIQUE STROKE +A7A4 ; mapped ; A7A5 # 6.0 LATIN CAPITAL LETTER N WITH OBLIQUE STROKE +A7A5 ; valid # 6.0 LATIN SMALL LETTER N WITH OBLIQUE STROKE +A7A6 ; mapped ; A7A7 # 6.0 LATIN CAPITAL LETTER R WITH OBLIQUE STROKE +A7A7 ; valid # 6.0 LATIN SMALL LETTER R WITH OBLIQUE STROKE +A7A8 ; mapped ; A7A9 # 6.0 LATIN CAPITAL LETTER S WITH OBLIQUE STROKE +A7A9 ; valid # 6.0 LATIN SMALL LETTER S WITH OBLIQUE STROKE +A7AA ; mapped ; 0266 # 6.1 LATIN CAPITAL LETTER H WITH HOOK +A7AB ; mapped ; 025C # 7.0 LATIN CAPITAL LETTER REVERSED OPEN E +A7AC ; mapped ; 0261 # 7.0 LATIN CAPITAL LETTER SCRIPT G +A7AD ; mapped ; 026C # 7.0 LATIN CAPITAL LETTER L WITH BELT +A7AE ; mapped ; 026A # 9.0 LATIN CAPITAL LETTER SMALL CAPITAL I +A7AF ; disallowed # NA +A7B0 ; mapped ; 029E # 7.0 LATIN CAPITAL LETTER TURNED K +A7B1 ; mapped ; 0287 # 7.0 LATIN CAPITAL LETTER TURNED T +A7B2 ; mapped ; 029D # 8.0 LATIN CAPITAL LETTER J WITH CROSSED-TAIL +A7B3 ; mapped ; AB53 # 8.0 LATIN CAPITAL LETTER CHI +A7B4 ; mapped ; A7B5 # 8.0 LATIN CAPITAL LETTER BETA +A7B5 ; valid # 8.0 LATIN SMALL LETTER BETA +A7B6 ; mapped ; A7B7 # 8.0 LATIN CAPITAL LETTER OMEGA +A7B7 ; valid # 8.0 LATIN SMALL LETTER OMEGA +A7B8..A7F6 ; disallowed # NA .. +A7F7 ; valid # 7.0 LATIN EPIGRAPHIC LETTER SIDEWAYS I +A7F8 ; mapped ; 0127 # 6.1 MODIFIER LETTER CAPITAL H WITH STROKE +A7F9 ; mapped ; 0153 # 6.1 MODIFIER LETTER SMALL LIGATURE OE +A7FA ; valid # 6.0 LATIN LETTER SMALL CAPITAL TURNED M +A7FB..A7FF ; valid # 5.1 LATIN EPIGRAPHIC LETTER REVERSED F..LATIN EPIGRAPHIC LETTER ARCHAIC M +A800..A827 ; valid # 4.1 SYLOTI NAGRI LETTER A..SYLOTI NAGRI VOWEL SIGN OO +A828..A82B ; valid ; ; NV8 # 4.1 SYLOTI NAGRI POETRY MARK-1..SYLOTI NAGRI POETRY MARK-4 +A82C..A82F ; disallowed # NA .. +A830..A839 ; valid ; ; NV8 # 5.2 NORTH INDIC FRACTION ONE QUARTER..NORTH INDIC QUANTITY MARK +A83A..A83F ; disallowed # NA .. +A840..A873 ; valid # 5.0 PHAGS-PA LETTER KA..PHAGS-PA LETTER CANDRABINDU +A874..A877 ; valid ; ; NV8 # 5.0 PHAGS-PA SINGLE HEAD MARK..PHAGS-PA MARK DOUBLE SHAD +A878..A87F ; disallowed # NA .. +A880..A8C4 ; valid # 5.1 SAURASHTRA SIGN ANUSVARA..SAURASHTRA SIGN VIRAMA +A8C5 ; valid # 9.0 SAURASHTRA SIGN CANDRABINDU +A8C6..A8CD ; disallowed # NA .. +A8CE..A8CF ; valid ; ; NV8 # 5.1 SAURASHTRA DANDA..SAURASHTRA DOUBLE DANDA +A8D0..A8D9 ; valid # 5.1 SAURASHTRA DIGIT ZERO..SAURASHTRA DIGIT NINE +A8DA..A8DF ; disallowed # NA .. +A8E0..A8F7 ; valid # 5.2 COMBINING DEVANAGARI DIGIT ZERO..DEVANAGARI SIGN CANDRABINDU AVAGRAHA +A8F8..A8FA ; valid ; ; NV8 # 5.2 DEVANAGARI SIGN PUSHPIKA..DEVANAGARI CARET +A8FB ; valid # 5.2 DEVANAGARI HEADSTROKE +A8FC ; valid ; ; NV8 # 8.0 DEVANAGARI SIGN SIDDHAM +A8FD ; valid # 8.0 DEVANAGARI JAIN OM +A8FE..A8FF ; disallowed # NA .. +A900..A92D ; valid # 5.1 KAYAH LI DIGIT ZERO..KAYAH LI TONE CALYA PLOPHU +A92E..A92F ; valid ; ; NV8 # 5.1 KAYAH LI SIGN CWI..KAYAH LI SIGN SHYA +A930..A953 ; valid # 5.1 REJANG LETTER KA..REJANG VIRAMA +A954..A95E ; disallowed # NA .. +A95F ; valid ; ; NV8 # 5.1 REJANG SECTION MARK +A960..A97C ; valid ; ; NV8 # 5.2 HANGUL CHOSEONG TIKEUT-MIEUM..HANGUL CHOSEONG SSANGYEORINHIEUH +A97D..A97F ; disallowed # NA .. +A980..A9C0 ; valid # 5.2 JAVANESE SIGN PANYANGGA..JAVANESE PANGKON +A9C1..A9CD ; valid ; ; NV8 # 5.2 JAVANESE LEFT RERENGGAN..JAVANESE TURNED PADA PISELEH +A9CE ; disallowed # NA +A9CF..A9D9 ; valid # 5.2 JAVANESE PANGRANGKEP..JAVANESE DIGIT NINE +A9DA..A9DD ; disallowed # NA .. +A9DE..A9DF ; valid ; ; NV8 # 5.2 JAVANESE PADA TIRTA TUMETES..JAVANESE PADA ISEN-ISEN +A9E0..A9FE ; valid # 7.0 MYANMAR LETTER SHAN GHA..MYANMAR LETTER TAI LAING BHA +A9FF ; disallowed # NA +AA00..AA36 ; valid # 5.1 CHAM LETTER A..CHAM CONSONANT SIGN WA +AA37..AA3F ; disallowed # NA .. +AA40..AA4D ; valid # 5.1 CHAM LETTER FINAL K..CHAM CONSONANT SIGN FINAL H +AA4E..AA4F ; disallowed # NA .. +AA50..AA59 ; valid # 5.1 CHAM DIGIT ZERO..CHAM DIGIT NINE +AA5A..AA5B ; disallowed # NA .. +AA5C..AA5F ; valid ; ; NV8 # 5.1 CHAM PUNCTUATION SPIRAL..CHAM PUNCTUATION TRIPLE DANDA +AA60..AA76 ; valid # 5.2 MYANMAR LETTER KHAMTI GA..MYANMAR LOGOGRAM KHAMTI HM +AA77..AA79 ; valid ; ; NV8 # 5.2 MYANMAR SYMBOL AITON EXCLAMATION..MYANMAR SYMBOL AITON TWO +AA7A..AA7B ; valid # 5.2 MYANMAR LETTER AITON RA..MYANMAR SIGN PAO KAREN TONE +AA7C..AA7F ; valid # 7.0 MYANMAR SIGN TAI LAING TONE-2..MYANMAR LETTER SHWE PALAUNG SHA +AA80..AAC2 ; valid # 5.2 TAI VIET LETTER LOW KO..TAI VIET TONE MAI SONG +AAC3..AADA ; disallowed # NA .. +AADB..AADD ; valid # 5.2 TAI VIET SYMBOL KON..TAI VIET SYMBOL SAM +AADE..AADF ; valid ; ; NV8 # 5.2 TAI VIET SYMBOL HO HOI..TAI VIET SYMBOL KOI KOI +AAE0..AAEF ; valid # 6.1 MEETEI MAYEK LETTER E..MEETEI MAYEK VOWEL SIGN AAU +AAF0..AAF1 ; valid ; ; NV8 # 6.1 MEETEI MAYEK CHEIKHAN..MEETEI MAYEK AHANG KHUDAM +AAF2..AAF6 ; valid # 6.1 MEETEI MAYEK ANJI..MEETEI MAYEK VIRAMA +AAF7..AB00 ; disallowed # NA .. +AB01..AB06 ; valid # 6.0 ETHIOPIC SYLLABLE TTHU..ETHIOPIC SYLLABLE TTHO +AB07..AB08 ; disallowed # NA .. +AB09..AB0E ; valid # 6.0 ETHIOPIC SYLLABLE DDHU..ETHIOPIC SYLLABLE DDHO +AB0F..AB10 ; disallowed # NA .. +AB11..AB16 ; valid # 6.0 ETHIOPIC SYLLABLE DZU..ETHIOPIC SYLLABLE DZO +AB17..AB1F ; disallowed # NA .. +AB20..AB26 ; valid # 6.0 ETHIOPIC SYLLABLE CCHHA..ETHIOPIC SYLLABLE CCHHO +AB27 ; disallowed # NA +AB28..AB2E ; valid # 6.0 ETHIOPIC SYLLABLE BBA..ETHIOPIC SYLLABLE BBO +AB2F ; disallowed # NA +AB30..AB5A ; valid # 7.0 LATIN SMALL LETTER BARRED ALPHA..LATIN SMALL LETTER Y WITH SHORT RIGHT LEG +AB5B ; valid ; ; NV8 # 7.0 MODIFIER BREVE WITH INVERTED BREVE +AB5C ; mapped ; A727 # 7.0 MODIFIER LETTER SMALL HENG +AB5D ; mapped ; AB37 # 7.0 MODIFIER LETTER SMALL L WITH INVERTED LAZY S +AB5E ; mapped ; 026B # 7.0 MODIFIER LETTER SMALL L WITH MIDDLE TILDE +AB5F ; mapped ; AB52 # 7.0 MODIFIER LETTER SMALL U WITH LEFT HOOK +AB60..AB63 ; valid # 8.0 LATIN SMALL LETTER SAKHA YAT..LATIN SMALL LETTER UO +AB64..AB65 ; valid # 7.0 LATIN SMALL LETTER INVERTED ALPHA..GREEK LETTER SMALL CAPITAL OMEGA +AB66..AB6F ; disallowed # NA .. +AB70 ; mapped ; 13A0 # 8.0 CHEROKEE SMALL LETTER A +AB71 ; mapped ; 13A1 # 8.0 CHEROKEE SMALL LETTER E +AB72 ; mapped ; 13A2 # 8.0 CHEROKEE SMALL LETTER I +AB73 ; mapped ; 13A3 # 8.0 CHEROKEE SMALL LETTER O +AB74 ; mapped ; 13A4 # 8.0 CHEROKEE SMALL LETTER U +AB75 ; mapped ; 13A5 # 8.0 CHEROKEE SMALL LETTER V +AB76 ; mapped ; 13A6 # 8.0 CHEROKEE SMALL LETTER GA +AB77 ; mapped ; 13A7 # 8.0 CHEROKEE SMALL LETTER KA +AB78 ; mapped ; 13A8 # 8.0 CHEROKEE SMALL LETTER GE +AB79 ; mapped ; 13A9 # 8.0 CHEROKEE SMALL LETTER GI +AB7A ; mapped ; 13AA # 8.0 CHEROKEE SMALL LETTER GO +AB7B ; mapped ; 13AB # 8.0 CHEROKEE SMALL LETTER GU +AB7C ; mapped ; 13AC # 8.0 CHEROKEE SMALL LETTER GV +AB7D ; mapped ; 13AD # 8.0 CHEROKEE SMALL LETTER HA +AB7E ; mapped ; 13AE # 8.0 CHEROKEE SMALL LETTER HE +AB7F ; mapped ; 13AF # 8.0 CHEROKEE SMALL LETTER HI +AB80 ; mapped ; 13B0 # 8.0 CHEROKEE SMALL LETTER HO +AB81 ; mapped ; 13B1 # 8.0 CHEROKEE SMALL LETTER HU +AB82 ; mapped ; 13B2 # 8.0 CHEROKEE SMALL LETTER HV +AB83 ; mapped ; 13B3 # 8.0 CHEROKEE SMALL LETTER LA +AB84 ; mapped ; 13B4 # 8.0 CHEROKEE SMALL LETTER LE +AB85 ; mapped ; 13B5 # 8.0 CHEROKEE SMALL LETTER LI +AB86 ; mapped ; 13B6 # 8.0 CHEROKEE SMALL LETTER LO +AB87 ; mapped ; 13B7 # 8.0 CHEROKEE SMALL LETTER LU +AB88 ; mapped ; 13B8 # 8.0 CHEROKEE SMALL LETTER LV +AB89 ; mapped ; 13B9 # 8.0 CHEROKEE SMALL LETTER MA +AB8A ; mapped ; 13BA # 8.0 CHEROKEE SMALL LETTER ME +AB8B ; mapped ; 13BB # 8.0 CHEROKEE SMALL LETTER MI +AB8C ; mapped ; 13BC # 8.0 CHEROKEE SMALL LETTER MO +AB8D ; mapped ; 13BD # 8.0 CHEROKEE SMALL LETTER MU +AB8E ; mapped ; 13BE # 8.0 CHEROKEE SMALL LETTER NA +AB8F ; mapped ; 13BF # 8.0 CHEROKEE SMALL LETTER HNA +AB90 ; mapped ; 13C0 # 8.0 CHEROKEE SMALL LETTER NAH +AB91 ; mapped ; 13C1 # 8.0 CHEROKEE SMALL LETTER NE +AB92 ; mapped ; 13C2 # 8.0 CHEROKEE SMALL LETTER NI +AB93 ; mapped ; 13C3 # 8.0 CHEROKEE SMALL LETTER NO +AB94 ; mapped ; 13C4 # 8.0 CHEROKEE SMALL LETTER NU +AB95 ; mapped ; 13C5 # 8.0 CHEROKEE SMALL LETTER NV +AB96 ; mapped ; 13C6 # 8.0 CHEROKEE SMALL LETTER QUA +AB97 ; mapped ; 13C7 # 8.0 CHEROKEE SMALL LETTER QUE +AB98 ; mapped ; 13C8 # 8.0 CHEROKEE SMALL LETTER QUI +AB99 ; mapped ; 13C9 # 8.0 CHEROKEE SMALL LETTER QUO +AB9A ; mapped ; 13CA # 8.0 CHEROKEE SMALL LETTER QUU +AB9B ; mapped ; 13CB # 8.0 CHEROKEE SMALL LETTER QUV +AB9C ; mapped ; 13CC # 8.0 CHEROKEE SMALL LETTER SA +AB9D ; mapped ; 13CD # 8.0 CHEROKEE SMALL LETTER S +AB9E ; mapped ; 13CE # 8.0 CHEROKEE SMALL LETTER SE +AB9F ; mapped ; 13CF # 8.0 CHEROKEE SMALL LETTER SI +ABA0 ; mapped ; 13D0 # 8.0 CHEROKEE SMALL LETTER SO +ABA1 ; mapped ; 13D1 # 8.0 CHEROKEE SMALL LETTER SU +ABA2 ; mapped ; 13D2 # 8.0 CHEROKEE SMALL LETTER SV +ABA3 ; mapped ; 13D3 # 8.0 CHEROKEE SMALL LETTER DA +ABA4 ; mapped ; 13D4 # 8.0 CHEROKEE SMALL LETTER TA +ABA5 ; mapped ; 13D5 # 8.0 CHEROKEE SMALL LETTER DE +ABA6 ; mapped ; 13D6 # 8.0 CHEROKEE SMALL LETTER TE +ABA7 ; mapped ; 13D7 # 8.0 CHEROKEE SMALL LETTER DI +ABA8 ; mapped ; 13D8 # 8.0 CHEROKEE SMALL LETTER TI +ABA9 ; mapped ; 13D9 # 8.0 CHEROKEE SMALL LETTER DO +ABAA ; mapped ; 13DA # 8.0 CHEROKEE SMALL LETTER DU +ABAB ; mapped ; 13DB # 8.0 CHEROKEE SMALL LETTER DV +ABAC ; mapped ; 13DC # 8.0 CHEROKEE SMALL LETTER DLA +ABAD ; mapped ; 13DD # 8.0 CHEROKEE SMALL LETTER TLA +ABAE ; mapped ; 13DE # 8.0 CHEROKEE SMALL LETTER TLE +ABAF ; mapped ; 13DF # 8.0 CHEROKEE SMALL LETTER TLI +ABB0 ; mapped ; 13E0 # 8.0 CHEROKEE SMALL LETTER TLO +ABB1 ; mapped ; 13E1 # 8.0 CHEROKEE SMALL LETTER TLU +ABB2 ; mapped ; 13E2 # 8.0 CHEROKEE SMALL LETTER TLV +ABB3 ; mapped ; 13E3 # 8.0 CHEROKEE SMALL LETTER TSA +ABB4 ; mapped ; 13E4 # 8.0 CHEROKEE SMALL LETTER TSE +ABB5 ; mapped ; 13E5 # 8.0 CHEROKEE SMALL LETTER TSI +ABB6 ; mapped ; 13E6 # 8.0 CHEROKEE SMALL LETTER TSO +ABB7 ; mapped ; 13E7 # 8.0 CHEROKEE SMALL LETTER TSU +ABB8 ; mapped ; 13E8 # 8.0 CHEROKEE SMALL LETTER TSV +ABB9 ; mapped ; 13E9 # 8.0 CHEROKEE SMALL LETTER WA +ABBA ; mapped ; 13EA # 8.0 CHEROKEE SMALL LETTER WE +ABBB ; mapped ; 13EB # 8.0 CHEROKEE SMALL LETTER WI +ABBC ; mapped ; 13EC # 8.0 CHEROKEE SMALL LETTER WO +ABBD ; mapped ; 13ED # 8.0 CHEROKEE SMALL LETTER WU +ABBE ; mapped ; 13EE # 8.0 CHEROKEE SMALL LETTER WV +ABBF ; mapped ; 13EF # 8.0 CHEROKEE SMALL LETTER YA +ABC0..ABEA ; valid # 5.2 MEETEI MAYEK LETTER KOK..MEETEI MAYEK VOWEL SIGN NUNG +ABEB ; valid ; ; NV8 # 5.2 MEETEI MAYEK CHEIKHEI +ABEC..ABED ; valid # 5.2 MEETEI MAYEK LUM IYEK..MEETEI MAYEK APUN IYEK +ABEE..ABEF ; disallowed # NA .. +ABF0..ABF9 ; valid # 5.2 MEETEI MAYEK DIGIT ZERO..MEETEI MAYEK DIGIT NINE +ABFA..ABFF ; disallowed # NA .. +AC00..D7A3 ; valid # 2.0 HANGUL SYLLABLE GA..HANGUL SYLLABLE HIH +D7A4..D7AF ; disallowed # NA .. +D7B0..D7C6 ; valid ; ; NV8 # 5.2 HANGUL JUNGSEONG O-YEO..HANGUL JUNGSEONG ARAEA-E +D7C7..D7CA ; disallowed # NA .. +D7CB..D7FB ; valid ; ; NV8 # 5.2 HANGUL JONGSEONG NIEUN-RIEUL..HANGUL JONGSEONG PHIEUPH-THIEUTH +D7FC..D7FF ; disallowed # NA .. +D800..DFFF ; disallowed # 2.0 .. +E000..F8FF ; disallowed # 1.1 .. +F900 ; mapped ; 8C48 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F900 +F901 ; mapped ; 66F4 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F901 +F902 ; mapped ; 8ECA # 1.1 CJK COMPATIBILITY IDEOGRAPH-F902 +F903 ; mapped ; 8CC8 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F903 +F904 ; mapped ; 6ED1 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F904 +F905 ; mapped ; 4E32 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F905 +F906 ; mapped ; 53E5 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F906 +F907..F908 ; mapped ; 9F9C # 1.1 CJK COMPATIBILITY IDEOGRAPH-F907..CJK COMPATIBILITY IDEOGRAPH-F908 +F909 ; mapped ; 5951 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F909 +F90A ; mapped ; 91D1 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F90A +F90B ; mapped ; 5587 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F90B +F90C ; mapped ; 5948 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F90C +F90D ; mapped ; 61F6 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F90D +F90E ; mapped ; 7669 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F90E +F90F ; mapped ; 7F85 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F90F +F910 ; mapped ; 863F # 1.1 CJK COMPATIBILITY IDEOGRAPH-F910 +F911 ; mapped ; 87BA # 1.1 CJK COMPATIBILITY IDEOGRAPH-F911 +F912 ; mapped ; 88F8 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F912 +F913 ; mapped ; 908F # 1.1 CJK COMPATIBILITY IDEOGRAPH-F913 +F914 ; mapped ; 6A02 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F914 +F915 ; mapped ; 6D1B # 1.1 CJK COMPATIBILITY IDEOGRAPH-F915 +F916 ; mapped ; 70D9 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F916 +F917 ; mapped ; 73DE # 1.1 CJK COMPATIBILITY IDEOGRAPH-F917 +F918 ; mapped ; 843D # 1.1 CJK COMPATIBILITY IDEOGRAPH-F918 +F919 ; mapped ; 916A # 1.1 CJK COMPATIBILITY IDEOGRAPH-F919 +F91A ; mapped ; 99F1 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F91A +F91B ; mapped ; 4E82 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F91B +F91C ; mapped ; 5375 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F91C +F91D ; mapped ; 6B04 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F91D +F91E ; mapped ; 721B # 1.1 CJK COMPATIBILITY IDEOGRAPH-F91E +F91F ; mapped ; 862D # 1.1 CJK COMPATIBILITY IDEOGRAPH-F91F +F920 ; mapped ; 9E1E # 1.1 CJK COMPATIBILITY IDEOGRAPH-F920 +F921 ; mapped ; 5D50 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F921 +F922 ; mapped ; 6FEB # 1.1 CJK COMPATIBILITY IDEOGRAPH-F922 +F923 ; mapped ; 85CD # 1.1 CJK COMPATIBILITY IDEOGRAPH-F923 +F924 ; mapped ; 8964 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F924 +F925 ; mapped ; 62C9 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F925 +F926 ; mapped ; 81D8 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F926 +F927 ; mapped ; 881F # 1.1 CJK COMPATIBILITY IDEOGRAPH-F927 +F928 ; mapped ; 5ECA # 1.1 CJK COMPATIBILITY IDEOGRAPH-F928 +F929 ; mapped ; 6717 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F929 +F92A ; mapped ; 6D6A # 1.1 CJK COMPATIBILITY IDEOGRAPH-F92A +F92B ; mapped ; 72FC # 1.1 CJK COMPATIBILITY IDEOGRAPH-F92B +F92C ; mapped ; 90CE # 1.1 CJK COMPATIBILITY IDEOGRAPH-F92C +F92D ; mapped ; 4F86 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F92D +F92E ; mapped ; 51B7 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F92E +F92F ; mapped ; 52DE # 1.1 CJK COMPATIBILITY IDEOGRAPH-F92F +F930 ; mapped ; 64C4 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F930 +F931 ; mapped ; 6AD3 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F931 +F932 ; mapped ; 7210 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F932 +F933 ; mapped ; 76E7 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F933 +F934 ; mapped ; 8001 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F934 +F935 ; mapped ; 8606 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F935 +F936 ; mapped ; 865C # 1.1 CJK COMPATIBILITY IDEOGRAPH-F936 +F937 ; mapped ; 8DEF # 1.1 CJK COMPATIBILITY IDEOGRAPH-F937 +F938 ; mapped ; 9732 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F938 +F939 ; mapped ; 9B6F # 1.1 CJK COMPATIBILITY IDEOGRAPH-F939 +F93A ; mapped ; 9DFA # 1.1 CJK COMPATIBILITY IDEOGRAPH-F93A +F93B ; mapped ; 788C # 1.1 CJK COMPATIBILITY IDEOGRAPH-F93B +F93C ; mapped ; 797F # 1.1 CJK COMPATIBILITY IDEOGRAPH-F93C +F93D ; mapped ; 7DA0 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F93D +F93E ; mapped ; 83C9 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F93E +F93F ; mapped ; 9304 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F93F +F940 ; mapped ; 9E7F # 1.1 CJK COMPATIBILITY IDEOGRAPH-F940 +F941 ; mapped ; 8AD6 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F941 +F942 ; mapped ; 58DF # 1.1 CJK COMPATIBILITY IDEOGRAPH-F942 +F943 ; mapped ; 5F04 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F943 +F944 ; mapped ; 7C60 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F944 +F945 ; mapped ; 807E # 1.1 CJK COMPATIBILITY IDEOGRAPH-F945 +F946 ; mapped ; 7262 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F946 +F947 ; mapped ; 78CA # 1.1 CJK COMPATIBILITY IDEOGRAPH-F947 +F948 ; mapped ; 8CC2 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F948 +F949 ; mapped ; 96F7 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F949 +F94A ; mapped ; 58D8 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F94A +F94B ; mapped ; 5C62 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F94B +F94C ; mapped ; 6A13 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F94C +F94D ; mapped ; 6DDA # 1.1 CJK COMPATIBILITY IDEOGRAPH-F94D +F94E ; mapped ; 6F0F # 1.1 CJK COMPATIBILITY IDEOGRAPH-F94E +F94F ; mapped ; 7D2F # 1.1 CJK COMPATIBILITY IDEOGRAPH-F94F +F950 ; mapped ; 7E37 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F950 +F951 ; mapped ; 964B # 1.1 CJK COMPATIBILITY IDEOGRAPH-F951 +F952 ; mapped ; 52D2 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F952 +F953 ; mapped ; 808B # 1.1 CJK COMPATIBILITY IDEOGRAPH-F953 +F954 ; mapped ; 51DC # 1.1 CJK COMPATIBILITY IDEOGRAPH-F954 +F955 ; mapped ; 51CC # 1.1 CJK COMPATIBILITY IDEOGRAPH-F955 +F956 ; mapped ; 7A1C # 1.1 CJK COMPATIBILITY IDEOGRAPH-F956 +F957 ; mapped ; 7DBE # 1.1 CJK COMPATIBILITY IDEOGRAPH-F957 +F958 ; mapped ; 83F1 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F958 +F959 ; mapped ; 9675 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F959 +F95A ; mapped ; 8B80 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F95A +F95B ; mapped ; 62CF # 1.1 CJK COMPATIBILITY IDEOGRAPH-F95B +F95C ; mapped ; 6A02 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F95C +F95D ; mapped ; 8AFE # 1.1 CJK COMPATIBILITY IDEOGRAPH-F95D +F95E ; mapped ; 4E39 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F95E +F95F ; mapped ; 5BE7 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F95F +F960 ; mapped ; 6012 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F960 +F961 ; mapped ; 7387 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F961 +F962 ; mapped ; 7570 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F962 +F963 ; mapped ; 5317 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F963 +F964 ; mapped ; 78FB # 1.1 CJK COMPATIBILITY IDEOGRAPH-F964 +F965 ; mapped ; 4FBF # 1.1 CJK COMPATIBILITY IDEOGRAPH-F965 +F966 ; mapped ; 5FA9 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F966 +F967 ; mapped ; 4E0D # 1.1 CJK COMPATIBILITY IDEOGRAPH-F967 +F968 ; mapped ; 6CCC # 1.1 CJK COMPATIBILITY IDEOGRAPH-F968 +F969 ; mapped ; 6578 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F969 +F96A ; mapped ; 7D22 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F96A +F96B ; mapped ; 53C3 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F96B +F96C ; mapped ; 585E # 1.1 CJK COMPATIBILITY IDEOGRAPH-F96C +F96D ; mapped ; 7701 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F96D +F96E ; mapped ; 8449 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F96E +F96F ; mapped ; 8AAA # 1.1 CJK COMPATIBILITY IDEOGRAPH-F96F +F970 ; mapped ; 6BBA # 1.1 CJK COMPATIBILITY IDEOGRAPH-F970 +F971 ; mapped ; 8FB0 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F971 +F972 ; mapped ; 6C88 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F972 +F973 ; mapped ; 62FE # 1.1 CJK COMPATIBILITY IDEOGRAPH-F973 +F974 ; mapped ; 82E5 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F974 +F975 ; mapped ; 63A0 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F975 +F976 ; mapped ; 7565 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F976 +F977 ; mapped ; 4EAE # 1.1 CJK COMPATIBILITY IDEOGRAPH-F977 +F978 ; mapped ; 5169 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F978 +F979 ; mapped ; 51C9 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F979 +F97A ; mapped ; 6881 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F97A +F97B ; mapped ; 7CE7 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F97B +F97C ; mapped ; 826F # 1.1 CJK COMPATIBILITY IDEOGRAPH-F97C +F97D ; mapped ; 8AD2 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F97D +F97E ; mapped ; 91CF # 1.1 CJK COMPATIBILITY IDEOGRAPH-F97E +F97F ; mapped ; 52F5 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F97F +F980 ; mapped ; 5442 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F980 +F981 ; mapped ; 5973 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F981 +F982 ; mapped ; 5EEC # 1.1 CJK COMPATIBILITY IDEOGRAPH-F982 +F983 ; mapped ; 65C5 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F983 +F984 ; mapped ; 6FFE # 1.1 CJK COMPATIBILITY IDEOGRAPH-F984 +F985 ; mapped ; 792A # 1.1 CJK COMPATIBILITY IDEOGRAPH-F985 +F986 ; mapped ; 95AD # 1.1 CJK COMPATIBILITY IDEOGRAPH-F986 +F987 ; mapped ; 9A6A # 1.1 CJK COMPATIBILITY IDEOGRAPH-F987 +F988 ; mapped ; 9E97 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F988 +F989 ; mapped ; 9ECE # 1.1 CJK COMPATIBILITY IDEOGRAPH-F989 +F98A ; mapped ; 529B # 1.1 CJK COMPATIBILITY IDEOGRAPH-F98A +F98B ; mapped ; 66C6 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F98B +F98C ; mapped ; 6B77 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F98C +F98D ; mapped ; 8F62 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F98D +F98E ; mapped ; 5E74 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F98E +F98F ; mapped ; 6190 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F98F +F990 ; mapped ; 6200 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F990 +F991 ; mapped ; 649A # 1.1 CJK COMPATIBILITY IDEOGRAPH-F991 +F992 ; mapped ; 6F23 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F992 +F993 ; mapped ; 7149 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F993 +F994 ; mapped ; 7489 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F994 +F995 ; mapped ; 79CA # 1.1 CJK COMPATIBILITY IDEOGRAPH-F995 +F996 ; mapped ; 7DF4 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F996 +F997 ; mapped ; 806F # 1.1 CJK COMPATIBILITY IDEOGRAPH-F997 +F998 ; mapped ; 8F26 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F998 +F999 ; mapped ; 84EE # 1.1 CJK COMPATIBILITY IDEOGRAPH-F999 +F99A ; mapped ; 9023 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F99A +F99B ; mapped ; 934A # 1.1 CJK COMPATIBILITY IDEOGRAPH-F99B +F99C ; mapped ; 5217 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F99C +F99D ; mapped ; 52A3 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F99D +F99E ; mapped ; 54BD # 1.1 CJK COMPATIBILITY IDEOGRAPH-F99E +F99F ; mapped ; 70C8 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F99F +F9A0 ; mapped ; 88C2 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9A0 +F9A1 ; mapped ; 8AAA # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9A1 +F9A2 ; mapped ; 5EC9 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9A2 +F9A3 ; mapped ; 5FF5 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9A3 +F9A4 ; mapped ; 637B # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9A4 +F9A5 ; mapped ; 6BAE # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9A5 +F9A6 ; mapped ; 7C3E # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9A6 +F9A7 ; mapped ; 7375 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9A7 +F9A8 ; mapped ; 4EE4 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9A8 +F9A9 ; mapped ; 56F9 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9A9 +F9AA ; mapped ; 5BE7 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9AA +F9AB ; mapped ; 5DBA # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9AB +F9AC ; mapped ; 601C # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9AC +F9AD ; mapped ; 73B2 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9AD +F9AE ; mapped ; 7469 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9AE +F9AF ; mapped ; 7F9A # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9AF +F9B0 ; mapped ; 8046 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9B0 +F9B1 ; mapped ; 9234 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9B1 +F9B2 ; mapped ; 96F6 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9B2 +F9B3 ; mapped ; 9748 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9B3 +F9B4 ; mapped ; 9818 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9B4 +F9B5 ; mapped ; 4F8B # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9B5 +F9B6 ; mapped ; 79AE # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9B6 +F9B7 ; mapped ; 91B4 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9B7 +F9B8 ; mapped ; 96B8 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9B8 +F9B9 ; mapped ; 60E1 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9B9 +F9BA ; mapped ; 4E86 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9BA +F9BB ; mapped ; 50DA # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9BB +F9BC ; mapped ; 5BEE # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9BC +F9BD ; mapped ; 5C3F # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9BD +F9BE ; mapped ; 6599 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9BE +F9BF ; mapped ; 6A02 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9BF +F9C0 ; mapped ; 71CE # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9C0 +F9C1 ; mapped ; 7642 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9C1 +F9C2 ; mapped ; 84FC # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9C2 +F9C3 ; mapped ; 907C # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9C3 +F9C4 ; mapped ; 9F8D # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9C4 +F9C5 ; mapped ; 6688 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9C5 +F9C6 ; mapped ; 962E # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9C6 +F9C7 ; mapped ; 5289 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9C7 +F9C8 ; mapped ; 677B # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9C8 +F9C9 ; mapped ; 67F3 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9C9 +F9CA ; mapped ; 6D41 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9CA +F9CB ; mapped ; 6E9C # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9CB +F9CC ; mapped ; 7409 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9CC +F9CD ; mapped ; 7559 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9CD +F9CE ; mapped ; 786B # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9CE +F9CF ; mapped ; 7D10 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9CF +F9D0 ; mapped ; 985E # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9D0 +F9D1 ; mapped ; 516D # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9D1 +F9D2 ; mapped ; 622E # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9D2 +F9D3 ; mapped ; 9678 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9D3 +F9D4 ; mapped ; 502B # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9D4 +F9D5 ; mapped ; 5D19 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9D5 +F9D6 ; mapped ; 6DEA # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9D6 +F9D7 ; mapped ; 8F2A # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9D7 +F9D8 ; mapped ; 5F8B # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9D8 +F9D9 ; mapped ; 6144 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9D9 +F9DA ; mapped ; 6817 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9DA +F9DB ; mapped ; 7387 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9DB +F9DC ; mapped ; 9686 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9DC +F9DD ; mapped ; 5229 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9DD +F9DE ; mapped ; 540F # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9DE +F9DF ; mapped ; 5C65 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9DF +F9E0 ; mapped ; 6613 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9E0 +F9E1 ; mapped ; 674E # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9E1 +F9E2 ; mapped ; 68A8 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9E2 +F9E3 ; mapped ; 6CE5 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9E3 +F9E4 ; mapped ; 7406 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9E4 +F9E5 ; mapped ; 75E2 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9E5 +F9E6 ; mapped ; 7F79 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9E6 +F9E7 ; mapped ; 88CF # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9E7 +F9E8 ; mapped ; 88E1 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9E8 +F9E9 ; mapped ; 91CC # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9E9 +F9EA ; mapped ; 96E2 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9EA +F9EB ; mapped ; 533F # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9EB +F9EC ; mapped ; 6EBA # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9EC +F9ED ; mapped ; 541D # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9ED +F9EE ; mapped ; 71D0 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9EE +F9EF ; mapped ; 7498 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9EF +F9F0 ; mapped ; 85FA # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9F0 +F9F1 ; mapped ; 96A3 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9F1 +F9F2 ; mapped ; 9C57 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9F2 +F9F3 ; mapped ; 9E9F # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9F3 +F9F4 ; mapped ; 6797 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9F4 +F9F5 ; mapped ; 6DCB # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9F5 +F9F6 ; mapped ; 81E8 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9F6 +F9F7 ; mapped ; 7ACB # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9F7 +F9F8 ; mapped ; 7B20 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9F8 +F9F9 ; mapped ; 7C92 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9F9 +F9FA ; mapped ; 72C0 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9FA +F9FB ; mapped ; 7099 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9FB +F9FC ; mapped ; 8B58 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9FC +F9FD ; mapped ; 4EC0 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9FD +F9FE ; mapped ; 8336 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9FE +F9FF ; mapped ; 523A # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9FF +FA00 ; mapped ; 5207 # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA00 +FA01 ; mapped ; 5EA6 # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA01 +FA02 ; mapped ; 62D3 # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA02 +FA03 ; mapped ; 7CD6 # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA03 +FA04 ; mapped ; 5B85 # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA04 +FA05 ; mapped ; 6D1E # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA05 +FA06 ; mapped ; 66B4 # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA06 +FA07 ; mapped ; 8F3B # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA07 +FA08 ; mapped ; 884C # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA08 +FA09 ; mapped ; 964D # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA09 +FA0A ; mapped ; 898B # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA0A +FA0B ; mapped ; 5ED3 # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA0B +FA0C ; mapped ; 5140 # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA0C +FA0D ; mapped ; 55C0 # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA0D +FA0E..FA0F ; valid # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA0E..CJK COMPATIBILITY IDEOGRAPH-FA0F +FA10 ; mapped ; 585A # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA10 +FA11 ; valid # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA11 +FA12 ; mapped ; 6674 # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA12 +FA13..FA14 ; valid # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA13..CJK COMPATIBILITY IDEOGRAPH-FA14 +FA15 ; mapped ; 51DE # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA15 +FA16 ; mapped ; 732A # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA16 +FA17 ; mapped ; 76CA # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA17 +FA18 ; mapped ; 793C # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA18 +FA19 ; mapped ; 795E # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA19 +FA1A ; mapped ; 7965 # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA1A +FA1B ; mapped ; 798F # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA1B +FA1C ; mapped ; 9756 # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA1C +FA1D ; mapped ; 7CBE # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA1D +FA1E ; mapped ; 7FBD # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA1E +FA1F ; valid # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA1F +FA20 ; mapped ; 8612 # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA20 +FA21 ; valid # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA21 +FA22 ; mapped ; 8AF8 # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA22 +FA23..FA24 ; valid # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA23..CJK COMPATIBILITY IDEOGRAPH-FA24 +FA25 ; mapped ; 9038 # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA25 +FA26 ; mapped ; 90FD # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA26 +FA27..FA29 ; valid # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA27..CJK COMPATIBILITY IDEOGRAPH-FA29 +FA2A ; mapped ; 98EF # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA2A +FA2B ; mapped ; 98FC # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA2B +FA2C ; mapped ; 9928 # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA2C +FA2D ; mapped ; 9DB4 # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA2D +FA2E ; mapped ; 90DE # 6.1 CJK COMPATIBILITY IDEOGRAPH-FA2E +FA2F ; mapped ; 96B7 # 6.1 CJK COMPATIBILITY IDEOGRAPH-FA2F +FA30 ; mapped ; 4FAE # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA30 +FA31 ; mapped ; 50E7 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA31 +FA32 ; mapped ; 514D # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA32 +FA33 ; mapped ; 52C9 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA33 +FA34 ; mapped ; 52E4 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA34 +FA35 ; mapped ; 5351 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA35 +FA36 ; mapped ; 559D # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA36 +FA37 ; mapped ; 5606 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA37 +FA38 ; mapped ; 5668 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA38 +FA39 ; mapped ; 5840 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA39 +FA3A ; mapped ; 58A8 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA3A +FA3B ; mapped ; 5C64 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA3B +FA3C ; mapped ; 5C6E # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA3C +FA3D ; mapped ; 6094 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA3D +FA3E ; mapped ; 6168 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA3E +FA3F ; mapped ; 618E # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA3F +FA40 ; mapped ; 61F2 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA40 +FA41 ; mapped ; 654F # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA41 +FA42 ; mapped ; 65E2 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA42 +FA43 ; mapped ; 6691 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA43 +FA44 ; mapped ; 6885 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA44 +FA45 ; mapped ; 6D77 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA45 +FA46 ; mapped ; 6E1A # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA46 +FA47 ; mapped ; 6F22 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA47 +FA48 ; mapped ; 716E # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA48 +FA49 ; mapped ; 722B # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA49 +FA4A ; mapped ; 7422 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA4A +FA4B ; mapped ; 7891 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA4B +FA4C ; mapped ; 793E # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA4C +FA4D ; mapped ; 7949 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA4D +FA4E ; mapped ; 7948 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA4E +FA4F ; mapped ; 7950 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA4F +FA50 ; mapped ; 7956 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA50 +FA51 ; mapped ; 795D # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA51 +FA52 ; mapped ; 798D # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA52 +FA53 ; mapped ; 798E # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA53 +FA54 ; mapped ; 7A40 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA54 +FA55 ; mapped ; 7A81 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA55 +FA56 ; mapped ; 7BC0 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA56 +FA57 ; mapped ; 7DF4 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA57 +FA58 ; mapped ; 7E09 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA58 +FA59 ; mapped ; 7E41 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA59 +FA5A ; mapped ; 7F72 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA5A +FA5B ; mapped ; 8005 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA5B +FA5C ; mapped ; 81ED # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA5C +FA5D..FA5E ; mapped ; 8279 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA5D..CJK COMPATIBILITY IDEOGRAPH-FA5E +FA5F ; mapped ; 8457 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA5F +FA60 ; mapped ; 8910 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA60 +FA61 ; mapped ; 8996 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA61 +FA62 ; mapped ; 8B01 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA62 +FA63 ; mapped ; 8B39 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA63 +FA64 ; mapped ; 8CD3 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA64 +FA65 ; mapped ; 8D08 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA65 +FA66 ; mapped ; 8FB6 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA66 +FA67 ; mapped ; 9038 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA67 +FA68 ; mapped ; 96E3 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA68 +FA69 ; mapped ; 97FF # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA69 +FA6A ; mapped ; 983B # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA6A +FA6B ; mapped ; 6075 # 5.2 CJK COMPATIBILITY IDEOGRAPH-FA6B +FA6C ; mapped ; 242EE # 5.2 CJK COMPATIBILITY IDEOGRAPH-FA6C +FA6D ; mapped ; 8218 # 5.2 CJK COMPATIBILITY IDEOGRAPH-FA6D +FA6E..FA6F ; disallowed # NA .. +FA70 ; mapped ; 4E26 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA70 +FA71 ; mapped ; 51B5 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA71 +FA72 ; mapped ; 5168 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA72 +FA73 ; mapped ; 4F80 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA73 +FA74 ; mapped ; 5145 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA74 +FA75 ; mapped ; 5180 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA75 +FA76 ; mapped ; 52C7 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA76 +FA77 ; mapped ; 52FA # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA77 +FA78 ; mapped ; 559D # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA78 +FA79 ; mapped ; 5555 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA79 +FA7A ; mapped ; 5599 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA7A +FA7B ; mapped ; 55E2 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA7B +FA7C ; mapped ; 585A # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA7C +FA7D ; mapped ; 58B3 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA7D +FA7E ; mapped ; 5944 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA7E +FA7F ; mapped ; 5954 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA7F +FA80 ; mapped ; 5A62 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA80 +FA81 ; mapped ; 5B28 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA81 +FA82 ; mapped ; 5ED2 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA82 +FA83 ; mapped ; 5ED9 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA83 +FA84 ; mapped ; 5F69 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA84 +FA85 ; mapped ; 5FAD # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA85 +FA86 ; mapped ; 60D8 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA86 +FA87 ; mapped ; 614E # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA87 +FA88 ; mapped ; 6108 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA88 +FA89 ; mapped ; 618E # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA89 +FA8A ; mapped ; 6160 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA8A +FA8B ; mapped ; 61F2 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA8B +FA8C ; mapped ; 6234 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA8C +FA8D ; mapped ; 63C4 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA8D +FA8E ; mapped ; 641C # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA8E +FA8F ; mapped ; 6452 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA8F +FA90 ; mapped ; 6556 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA90 +FA91 ; mapped ; 6674 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA91 +FA92 ; mapped ; 6717 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA92 +FA93 ; mapped ; 671B # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA93 +FA94 ; mapped ; 6756 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA94 +FA95 ; mapped ; 6B79 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA95 +FA96 ; mapped ; 6BBA # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA96 +FA97 ; mapped ; 6D41 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA97 +FA98 ; mapped ; 6EDB # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA98 +FA99 ; mapped ; 6ECB # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA99 +FA9A ; mapped ; 6F22 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA9A +FA9B ; mapped ; 701E # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA9B +FA9C ; mapped ; 716E # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA9C +FA9D ; mapped ; 77A7 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA9D +FA9E ; mapped ; 7235 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA9E +FA9F ; mapped ; 72AF # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA9F +FAA0 ; mapped ; 732A # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAA0 +FAA1 ; mapped ; 7471 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAA1 +FAA2 ; mapped ; 7506 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAA2 +FAA3 ; mapped ; 753B # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAA3 +FAA4 ; mapped ; 761D # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAA4 +FAA5 ; mapped ; 761F # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAA5 +FAA6 ; mapped ; 76CA # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAA6 +FAA7 ; mapped ; 76DB # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAA7 +FAA8 ; mapped ; 76F4 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAA8 +FAA9 ; mapped ; 774A # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAA9 +FAAA ; mapped ; 7740 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAAA +FAAB ; mapped ; 78CC # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAAB +FAAC ; mapped ; 7AB1 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAAC +FAAD ; mapped ; 7BC0 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAAD +FAAE ; mapped ; 7C7B # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAAE +FAAF ; mapped ; 7D5B # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAAF +FAB0 ; mapped ; 7DF4 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAB0 +FAB1 ; mapped ; 7F3E # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAB1 +FAB2 ; mapped ; 8005 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAB2 +FAB3 ; mapped ; 8352 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAB3 +FAB4 ; mapped ; 83EF # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAB4 +FAB5 ; mapped ; 8779 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAB5 +FAB6 ; mapped ; 8941 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAB6 +FAB7 ; mapped ; 8986 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAB7 +FAB8 ; mapped ; 8996 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAB8 +FAB9 ; mapped ; 8ABF # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAB9 +FABA ; mapped ; 8AF8 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FABA +FABB ; mapped ; 8ACB # 4.1 CJK COMPATIBILITY IDEOGRAPH-FABB +FABC ; mapped ; 8B01 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FABC +FABD ; mapped ; 8AFE # 4.1 CJK COMPATIBILITY IDEOGRAPH-FABD +FABE ; mapped ; 8AED # 4.1 CJK COMPATIBILITY IDEOGRAPH-FABE +FABF ; mapped ; 8B39 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FABF +FAC0 ; mapped ; 8B8A # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAC0 +FAC1 ; mapped ; 8D08 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAC1 +FAC2 ; mapped ; 8F38 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAC2 +FAC3 ; mapped ; 9072 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAC3 +FAC4 ; mapped ; 9199 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAC4 +FAC5 ; mapped ; 9276 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAC5 +FAC6 ; mapped ; 967C # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAC6 +FAC7 ; mapped ; 96E3 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAC7 +FAC8 ; mapped ; 9756 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAC8 +FAC9 ; mapped ; 97DB # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAC9 +FACA ; mapped ; 97FF # 4.1 CJK COMPATIBILITY IDEOGRAPH-FACA +FACB ; mapped ; 980B # 4.1 CJK COMPATIBILITY IDEOGRAPH-FACB +FACC ; mapped ; 983B # 4.1 CJK COMPATIBILITY IDEOGRAPH-FACC +FACD ; mapped ; 9B12 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FACD +FACE ; mapped ; 9F9C # 4.1 CJK COMPATIBILITY IDEOGRAPH-FACE +FACF ; mapped ; 2284A # 4.1 CJK COMPATIBILITY IDEOGRAPH-FACF +FAD0 ; mapped ; 22844 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAD0 +FAD1 ; mapped ; 233D5 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAD1 +FAD2 ; mapped ; 3B9D # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAD2 +FAD3 ; mapped ; 4018 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAD3 +FAD4 ; mapped ; 4039 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAD4 +FAD5 ; mapped ; 25249 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAD5 +FAD6 ; mapped ; 25CD0 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAD6 +FAD7 ; mapped ; 27ED3 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAD7 +FAD8 ; mapped ; 9F43 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAD8 +FAD9 ; mapped ; 9F8E # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAD9 +FADA..FAFF ; disallowed # NA .. +FB00 ; mapped ; 0066 0066 # 1.1 LATIN SMALL LIGATURE FF +FB01 ; mapped ; 0066 0069 # 1.1 LATIN SMALL LIGATURE FI +FB02 ; mapped ; 0066 006C # 1.1 LATIN SMALL LIGATURE FL +FB03 ; mapped ; 0066 0066 0069 #1.1 LATIN SMALL LIGATURE FFI +FB04 ; mapped ; 0066 0066 006C #1.1 LATIN SMALL LIGATURE FFL +FB05..FB06 ; mapped ; 0073 0074 # 1.1 LATIN SMALL LIGATURE LONG S T..LATIN SMALL LIGATURE ST +FB07..FB12 ; disallowed # NA .. +FB13 ; mapped ; 0574 0576 # 1.1 ARMENIAN SMALL LIGATURE MEN NOW +FB14 ; mapped ; 0574 0565 # 1.1 ARMENIAN SMALL LIGATURE MEN ECH +FB15 ; mapped ; 0574 056B # 1.1 ARMENIAN SMALL LIGATURE MEN INI +FB16 ; mapped ; 057E 0576 # 1.1 ARMENIAN SMALL LIGATURE VEW NOW +FB17 ; mapped ; 0574 056D # 1.1 ARMENIAN SMALL LIGATURE MEN XEH +FB18..FB1C ; disallowed # NA .. +FB1D ; mapped ; 05D9 05B4 # 3.0 HEBREW LETTER YOD WITH HIRIQ +FB1E ; valid # 1.1 HEBREW POINT JUDEO-SPANISH VARIKA +FB1F ; mapped ; 05F2 05B7 # 1.1 HEBREW LIGATURE YIDDISH YOD YOD PATAH +FB20 ; mapped ; 05E2 # 1.1 HEBREW LETTER ALTERNATIVE AYIN +FB21 ; mapped ; 05D0 # 1.1 HEBREW LETTER WIDE ALEF +FB22 ; mapped ; 05D3 # 1.1 HEBREW LETTER WIDE DALET +FB23 ; mapped ; 05D4 # 1.1 HEBREW LETTER WIDE HE +FB24 ; mapped ; 05DB # 1.1 HEBREW LETTER WIDE KAF +FB25 ; mapped ; 05DC # 1.1 HEBREW LETTER WIDE LAMED +FB26 ; mapped ; 05DD # 1.1 HEBREW LETTER WIDE FINAL MEM +FB27 ; mapped ; 05E8 # 1.1 HEBREW LETTER WIDE RESH +FB28 ; mapped ; 05EA # 1.1 HEBREW LETTER WIDE TAV +FB29 ; disallowed_STD3_mapped ; 002B # 1.1 HEBREW LETTER ALTERNATIVE PLUS SIGN +FB2A ; mapped ; 05E9 05C1 # 1.1 HEBREW LETTER SHIN WITH SHIN DOT +FB2B ; mapped ; 05E9 05C2 # 1.1 HEBREW LETTER SHIN WITH SIN DOT +FB2C ; mapped ; 05E9 05BC 05C1 #1.1 HEBREW LETTER SHIN WITH DAGESH AND SHIN DOT +FB2D ; mapped ; 05E9 05BC 05C2 #1.1 HEBREW LETTER SHIN WITH DAGESH AND SIN DOT +FB2E ; mapped ; 05D0 05B7 # 1.1 HEBREW LETTER ALEF WITH PATAH +FB2F ; mapped ; 05D0 05B8 # 1.1 HEBREW LETTER ALEF WITH QAMATS +FB30 ; mapped ; 05D0 05BC # 1.1 HEBREW LETTER ALEF WITH MAPIQ +FB31 ; mapped ; 05D1 05BC # 1.1 HEBREW LETTER BET WITH DAGESH +FB32 ; mapped ; 05D2 05BC # 1.1 HEBREW LETTER GIMEL WITH DAGESH +FB33 ; mapped ; 05D3 05BC # 1.1 HEBREW LETTER DALET WITH DAGESH +FB34 ; mapped ; 05D4 05BC # 1.1 HEBREW LETTER HE WITH MAPIQ +FB35 ; mapped ; 05D5 05BC # 1.1 HEBREW LETTER VAV WITH DAGESH +FB36 ; mapped ; 05D6 05BC # 1.1 HEBREW LETTER ZAYIN WITH DAGESH +FB37 ; disallowed # NA +FB38 ; mapped ; 05D8 05BC # 1.1 HEBREW LETTER TET WITH DAGESH +FB39 ; mapped ; 05D9 05BC # 1.1 HEBREW LETTER YOD WITH DAGESH +FB3A ; mapped ; 05DA 05BC # 1.1 HEBREW LETTER FINAL KAF WITH DAGESH +FB3B ; mapped ; 05DB 05BC # 1.1 HEBREW LETTER KAF WITH DAGESH +FB3C ; mapped ; 05DC 05BC # 1.1 HEBREW LETTER LAMED WITH DAGESH +FB3D ; disallowed # NA +FB3E ; mapped ; 05DE 05BC # 1.1 HEBREW LETTER MEM WITH DAGESH +FB3F ; disallowed # NA +FB40 ; mapped ; 05E0 05BC # 1.1 HEBREW LETTER NUN WITH DAGESH +FB41 ; mapped ; 05E1 05BC # 1.1 HEBREW LETTER SAMEKH WITH DAGESH +FB42 ; disallowed # NA +FB43 ; mapped ; 05E3 05BC # 1.1 HEBREW LETTER FINAL PE WITH DAGESH +FB44 ; mapped ; 05E4 05BC # 1.1 HEBREW LETTER PE WITH DAGESH +FB45 ; disallowed # NA +FB46 ; mapped ; 05E6 05BC # 1.1 HEBREW LETTER TSADI WITH DAGESH +FB47 ; mapped ; 05E7 05BC # 1.1 HEBREW LETTER QOF WITH DAGESH +FB48 ; mapped ; 05E8 05BC # 1.1 HEBREW LETTER RESH WITH DAGESH +FB49 ; mapped ; 05E9 05BC # 1.1 HEBREW LETTER SHIN WITH DAGESH +FB4A ; mapped ; 05EA 05BC # 1.1 HEBREW LETTER TAV WITH DAGESH +FB4B ; mapped ; 05D5 05B9 # 1.1 HEBREW LETTER VAV WITH HOLAM +FB4C ; mapped ; 05D1 05BF # 1.1 HEBREW LETTER BET WITH RAFE +FB4D ; mapped ; 05DB 05BF # 1.1 HEBREW LETTER KAF WITH RAFE +FB4E ; mapped ; 05E4 05BF # 1.1 HEBREW LETTER PE WITH RAFE +FB4F ; mapped ; 05D0 05DC # 1.1 HEBREW LIGATURE ALEF LAMED +FB50..FB51 ; mapped ; 0671 # 1.1 ARABIC LETTER ALEF WASLA ISOLATED FORM..ARABIC LETTER ALEF WASLA FINAL FORM +FB52..FB55 ; mapped ; 067B # 1.1 ARABIC LETTER BEEH ISOLATED FORM..ARABIC LETTER BEEH MEDIAL FORM +FB56..FB59 ; mapped ; 067E # 1.1 ARABIC LETTER PEH ISOLATED FORM..ARABIC LETTER PEH MEDIAL FORM +FB5A..FB5D ; mapped ; 0680 # 1.1 ARABIC LETTER BEHEH ISOLATED FORM..ARABIC LETTER BEHEH MEDIAL FORM +FB5E..FB61 ; mapped ; 067A # 1.1 ARABIC LETTER TTEHEH ISOLATED FORM..ARABIC LETTER TTEHEH MEDIAL FORM +FB62..FB65 ; mapped ; 067F # 1.1 ARABIC LETTER TEHEH ISOLATED FORM..ARABIC LETTER TEHEH MEDIAL FORM +FB66..FB69 ; mapped ; 0679 # 1.1 ARABIC LETTER TTEH ISOLATED FORM..ARABIC LETTER TTEH MEDIAL FORM +FB6A..FB6D ; mapped ; 06A4 # 1.1 ARABIC LETTER VEH ISOLATED FORM..ARABIC LETTER VEH MEDIAL FORM +FB6E..FB71 ; mapped ; 06A6 # 1.1 ARABIC LETTER PEHEH ISOLATED FORM..ARABIC LETTER PEHEH MEDIAL FORM +FB72..FB75 ; mapped ; 0684 # 1.1 ARABIC LETTER DYEH ISOLATED FORM..ARABIC LETTER DYEH MEDIAL FORM +FB76..FB79 ; mapped ; 0683 # 1.1 ARABIC LETTER NYEH ISOLATED FORM..ARABIC LETTER NYEH MEDIAL FORM +FB7A..FB7D ; mapped ; 0686 # 1.1 ARABIC LETTER TCHEH ISOLATED FORM..ARABIC LETTER TCHEH MEDIAL FORM +FB7E..FB81 ; mapped ; 0687 # 1.1 ARABIC LETTER TCHEHEH ISOLATED FORM..ARABIC LETTER TCHEHEH MEDIAL FORM +FB82..FB83 ; mapped ; 068D # 1.1 ARABIC LETTER DDAHAL ISOLATED FORM..ARABIC LETTER DDAHAL FINAL FORM +FB84..FB85 ; mapped ; 068C # 1.1 ARABIC LETTER DAHAL ISOLATED FORM..ARABIC LETTER DAHAL FINAL FORM +FB86..FB87 ; mapped ; 068E # 1.1 ARABIC LETTER DUL ISOLATED FORM..ARABIC LETTER DUL FINAL FORM +FB88..FB89 ; mapped ; 0688 # 1.1 ARABIC LETTER DDAL ISOLATED FORM..ARABIC LETTER DDAL FINAL FORM +FB8A..FB8B ; mapped ; 0698 # 1.1 ARABIC LETTER JEH ISOLATED FORM..ARABIC LETTER JEH FINAL FORM +FB8C..FB8D ; mapped ; 0691 # 1.1 ARABIC LETTER RREH ISOLATED FORM..ARABIC LETTER RREH FINAL FORM +FB8E..FB91 ; mapped ; 06A9 # 1.1 ARABIC LETTER KEHEH ISOLATED FORM..ARABIC LETTER KEHEH MEDIAL FORM +FB92..FB95 ; mapped ; 06AF # 1.1 ARABIC LETTER GAF ISOLATED FORM..ARABIC LETTER GAF MEDIAL FORM +FB96..FB99 ; mapped ; 06B3 # 1.1 ARABIC LETTER GUEH ISOLATED FORM..ARABIC LETTER GUEH MEDIAL FORM +FB9A..FB9D ; mapped ; 06B1 # 1.1 ARABIC LETTER NGOEH ISOLATED FORM..ARABIC LETTER NGOEH MEDIAL FORM +FB9E..FB9F ; mapped ; 06BA # 1.1 ARABIC LETTER NOON GHUNNA ISOLATED FORM..ARABIC LETTER NOON GHUNNA FINAL FORM +FBA0..FBA3 ; mapped ; 06BB # 1.1 ARABIC LETTER RNOON ISOLATED FORM..ARABIC LETTER RNOON MEDIAL FORM +FBA4..FBA5 ; mapped ; 06C0 # 1.1 ARABIC LETTER HEH WITH YEH ABOVE ISOLATED FORM..ARABIC LETTER HEH WITH YEH ABOVE FINAL FORM +FBA6..FBA9 ; mapped ; 06C1 # 1.1 ARABIC LETTER HEH GOAL ISOLATED FORM..ARABIC LETTER HEH GOAL MEDIAL FORM +FBAA..FBAD ; mapped ; 06BE # 1.1 ARABIC LETTER HEH DOACHASHMEE ISOLATED FORM..ARABIC LETTER HEH DOACHASHMEE MEDIAL FORM +FBAE..FBAF ; mapped ; 06D2 # 1.1 ARABIC LETTER YEH BARREE ISOLATED FORM..ARABIC LETTER YEH BARREE FINAL FORM +FBB0..FBB1 ; mapped ; 06D3 # 1.1 ARABIC LETTER YEH BARREE WITH HAMZA ABOVE ISOLATED FORM..ARABIC LETTER YEH BARREE WITH HAMZA ABOVE FINAL FORM +FBB2..FBC1 ; valid ; ; NV8 # 6.0 ARABIC SYMBOL DOT ABOVE..ARABIC SYMBOL SMALL TAH BELOW +FBC2..FBD2 ; disallowed # NA .. +FBD3..FBD6 ; mapped ; 06AD # 1.1 ARABIC LETTER NG ISOLATED FORM..ARABIC LETTER NG MEDIAL FORM +FBD7..FBD8 ; mapped ; 06C7 # 1.1 ARABIC LETTER U ISOLATED FORM..ARABIC LETTER U FINAL FORM +FBD9..FBDA ; mapped ; 06C6 # 1.1 ARABIC LETTER OE ISOLATED FORM..ARABIC LETTER OE FINAL FORM +FBDB..FBDC ; mapped ; 06C8 # 1.1 ARABIC LETTER YU ISOLATED FORM..ARABIC LETTER YU FINAL FORM +FBDD ; mapped ; 06C7 0674 # 1.1 ARABIC LETTER U WITH HAMZA ABOVE ISOLATED FORM +FBDE..FBDF ; mapped ; 06CB # 1.1 ARABIC LETTER VE ISOLATED FORM..ARABIC LETTER VE FINAL FORM +FBE0..FBE1 ; mapped ; 06C5 # 1.1 ARABIC LETTER KIRGHIZ OE ISOLATED FORM..ARABIC LETTER KIRGHIZ OE FINAL FORM +FBE2..FBE3 ; mapped ; 06C9 # 1.1 ARABIC LETTER KIRGHIZ YU ISOLATED FORM..ARABIC LETTER KIRGHIZ YU FINAL FORM +FBE4..FBE7 ; mapped ; 06D0 # 1.1 ARABIC LETTER E ISOLATED FORM..ARABIC LETTER E MEDIAL FORM +FBE8..FBE9 ; mapped ; 0649 # 1.1 ARABIC LETTER UIGHUR KAZAKH KIRGHIZ ALEF MAKSURA INITIAL FORM..ARABIC LETTER UIGHUR KAZAKH KIRGHIZ ALEF MAKSURA MEDIAL FORM +FBEA..FBEB ; mapped ; 0626 0627 # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH ALEF ISOLATED FORM..ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH ALEF FINAL FORM +FBEC..FBED ; mapped ; 0626 06D5 # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH AE ISOLATED FORM..ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH AE FINAL FORM +FBEE..FBEF ; mapped ; 0626 0648 # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH WAW ISOLATED FORM..ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH WAW FINAL FORM +FBF0..FBF1 ; mapped ; 0626 06C7 # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH U ISOLATED FORM..ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH U FINAL FORM +FBF2..FBF3 ; mapped ; 0626 06C6 # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH OE ISOLATED FORM..ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH OE FINAL FORM +FBF4..FBF5 ; mapped ; 0626 06C8 # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH YU ISOLATED FORM..ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH YU FINAL FORM +FBF6..FBF8 ; mapped ; 0626 06D0 # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH E ISOLATED FORM..ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH E INITIAL FORM +FBF9..FBFB ; mapped ; 0626 0649 # 1.1 ARABIC LIGATURE UIGHUR KIRGHIZ YEH WITH HAMZA ABOVE WITH ALEF MAKSURA ISOLATED FORM..ARABIC LIGATURE UIGHUR KIRGHIZ YEH WITH HAMZA ABOVE WITH ALEF MAKSURA INITIAL FORM +FBFC..FBFF ; mapped ; 06CC # 1.1 ARABIC LETTER FARSI YEH ISOLATED FORM..ARABIC LETTER FARSI YEH MEDIAL FORM +FC00 ; mapped ; 0626 062C # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH JEEM ISOLATED FORM +FC01 ; mapped ; 0626 062D # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH HAH ISOLATED FORM +FC02 ; mapped ; 0626 0645 # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH MEEM ISOLATED FORM +FC03 ; mapped ; 0626 0649 # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH ALEF MAKSURA ISOLATED FORM +FC04 ; mapped ; 0626 064A # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH YEH ISOLATED FORM +FC05 ; mapped ; 0628 062C # 1.1 ARABIC LIGATURE BEH WITH JEEM ISOLATED FORM +FC06 ; mapped ; 0628 062D # 1.1 ARABIC LIGATURE BEH WITH HAH ISOLATED FORM +FC07 ; mapped ; 0628 062E # 1.1 ARABIC LIGATURE BEH WITH KHAH ISOLATED FORM +FC08 ; mapped ; 0628 0645 # 1.1 ARABIC LIGATURE BEH WITH MEEM ISOLATED FORM +FC09 ; mapped ; 0628 0649 # 1.1 ARABIC LIGATURE BEH WITH ALEF MAKSURA ISOLATED FORM +FC0A ; mapped ; 0628 064A # 1.1 ARABIC LIGATURE BEH WITH YEH ISOLATED FORM +FC0B ; mapped ; 062A 062C # 1.1 ARABIC LIGATURE TEH WITH JEEM ISOLATED FORM +FC0C ; mapped ; 062A 062D # 1.1 ARABIC LIGATURE TEH WITH HAH ISOLATED FORM +FC0D ; mapped ; 062A 062E # 1.1 ARABIC LIGATURE TEH WITH KHAH ISOLATED FORM +FC0E ; mapped ; 062A 0645 # 1.1 ARABIC LIGATURE TEH WITH MEEM ISOLATED FORM +FC0F ; mapped ; 062A 0649 # 1.1 ARABIC LIGATURE TEH WITH ALEF MAKSURA ISOLATED FORM +FC10 ; mapped ; 062A 064A # 1.1 ARABIC LIGATURE TEH WITH YEH ISOLATED FORM +FC11 ; mapped ; 062B 062C # 1.1 ARABIC LIGATURE THEH WITH JEEM ISOLATED FORM +FC12 ; mapped ; 062B 0645 # 1.1 ARABIC LIGATURE THEH WITH MEEM ISOLATED FORM +FC13 ; mapped ; 062B 0649 # 1.1 ARABIC LIGATURE THEH WITH ALEF MAKSURA ISOLATED FORM +FC14 ; mapped ; 062B 064A # 1.1 ARABIC LIGATURE THEH WITH YEH ISOLATED FORM +FC15 ; mapped ; 062C 062D # 1.1 ARABIC LIGATURE JEEM WITH HAH ISOLATED FORM +FC16 ; mapped ; 062C 0645 # 1.1 ARABIC LIGATURE JEEM WITH MEEM ISOLATED FORM +FC17 ; mapped ; 062D 062C # 1.1 ARABIC LIGATURE HAH WITH JEEM ISOLATED FORM +FC18 ; mapped ; 062D 0645 # 1.1 ARABIC LIGATURE HAH WITH MEEM ISOLATED FORM +FC19 ; mapped ; 062E 062C # 1.1 ARABIC LIGATURE KHAH WITH JEEM ISOLATED FORM +FC1A ; mapped ; 062E 062D # 1.1 ARABIC LIGATURE KHAH WITH HAH ISOLATED FORM +FC1B ; mapped ; 062E 0645 # 1.1 ARABIC LIGATURE KHAH WITH MEEM ISOLATED FORM +FC1C ; mapped ; 0633 062C # 1.1 ARABIC LIGATURE SEEN WITH JEEM ISOLATED FORM +FC1D ; mapped ; 0633 062D # 1.1 ARABIC LIGATURE SEEN WITH HAH ISOLATED FORM +FC1E ; mapped ; 0633 062E # 1.1 ARABIC LIGATURE SEEN WITH KHAH ISOLATED FORM +FC1F ; mapped ; 0633 0645 # 1.1 ARABIC LIGATURE SEEN WITH MEEM ISOLATED FORM +FC20 ; mapped ; 0635 062D # 1.1 ARABIC LIGATURE SAD WITH HAH ISOLATED FORM +FC21 ; mapped ; 0635 0645 # 1.1 ARABIC LIGATURE SAD WITH MEEM ISOLATED FORM +FC22 ; mapped ; 0636 062C # 1.1 ARABIC LIGATURE DAD WITH JEEM ISOLATED FORM +FC23 ; mapped ; 0636 062D # 1.1 ARABIC LIGATURE DAD WITH HAH ISOLATED FORM +FC24 ; mapped ; 0636 062E # 1.1 ARABIC LIGATURE DAD WITH KHAH ISOLATED FORM +FC25 ; mapped ; 0636 0645 # 1.1 ARABIC LIGATURE DAD WITH MEEM ISOLATED FORM +FC26 ; mapped ; 0637 062D # 1.1 ARABIC LIGATURE TAH WITH HAH ISOLATED FORM +FC27 ; mapped ; 0637 0645 # 1.1 ARABIC LIGATURE TAH WITH MEEM ISOLATED FORM +FC28 ; mapped ; 0638 0645 # 1.1 ARABIC LIGATURE ZAH WITH MEEM ISOLATED FORM +FC29 ; mapped ; 0639 062C # 1.1 ARABIC LIGATURE AIN WITH JEEM ISOLATED FORM +FC2A ; mapped ; 0639 0645 # 1.1 ARABIC LIGATURE AIN WITH MEEM ISOLATED FORM +FC2B ; mapped ; 063A 062C # 1.1 ARABIC LIGATURE GHAIN WITH JEEM ISOLATED FORM +FC2C ; mapped ; 063A 0645 # 1.1 ARABIC LIGATURE GHAIN WITH MEEM ISOLATED FORM +FC2D ; mapped ; 0641 062C # 1.1 ARABIC LIGATURE FEH WITH JEEM ISOLATED FORM +FC2E ; mapped ; 0641 062D # 1.1 ARABIC LIGATURE FEH WITH HAH ISOLATED FORM +FC2F ; mapped ; 0641 062E # 1.1 ARABIC LIGATURE FEH WITH KHAH ISOLATED FORM +FC30 ; mapped ; 0641 0645 # 1.1 ARABIC LIGATURE FEH WITH MEEM ISOLATED FORM +FC31 ; mapped ; 0641 0649 # 1.1 ARABIC LIGATURE FEH WITH ALEF MAKSURA ISOLATED FORM +FC32 ; mapped ; 0641 064A # 1.1 ARABIC LIGATURE FEH WITH YEH ISOLATED FORM +FC33 ; mapped ; 0642 062D # 1.1 ARABIC LIGATURE QAF WITH HAH ISOLATED FORM +FC34 ; mapped ; 0642 0645 # 1.1 ARABIC LIGATURE QAF WITH MEEM ISOLATED FORM +FC35 ; mapped ; 0642 0649 # 1.1 ARABIC LIGATURE QAF WITH ALEF MAKSURA ISOLATED FORM +FC36 ; mapped ; 0642 064A # 1.1 ARABIC LIGATURE QAF WITH YEH ISOLATED FORM +FC37 ; mapped ; 0643 0627 # 1.1 ARABIC LIGATURE KAF WITH ALEF ISOLATED FORM +FC38 ; mapped ; 0643 062C # 1.1 ARABIC LIGATURE KAF WITH JEEM ISOLATED FORM +FC39 ; mapped ; 0643 062D # 1.1 ARABIC LIGATURE KAF WITH HAH ISOLATED FORM +FC3A ; mapped ; 0643 062E # 1.1 ARABIC LIGATURE KAF WITH KHAH ISOLATED FORM +FC3B ; mapped ; 0643 0644 # 1.1 ARABIC LIGATURE KAF WITH LAM ISOLATED FORM +FC3C ; mapped ; 0643 0645 # 1.1 ARABIC LIGATURE KAF WITH MEEM ISOLATED FORM +FC3D ; mapped ; 0643 0649 # 1.1 ARABIC LIGATURE KAF WITH ALEF MAKSURA ISOLATED FORM +FC3E ; mapped ; 0643 064A # 1.1 ARABIC LIGATURE KAF WITH YEH ISOLATED FORM +FC3F ; mapped ; 0644 062C # 1.1 ARABIC LIGATURE LAM WITH JEEM ISOLATED FORM +FC40 ; mapped ; 0644 062D # 1.1 ARABIC LIGATURE LAM WITH HAH ISOLATED FORM +FC41 ; mapped ; 0644 062E # 1.1 ARABIC LIGATURE LAM WITH KHAH ISOLATED FORM +FC42 ; mapped ; 0644 0645 # 1.1 ARABIC LIGATURE LAM WITH MEEM ISOLATED FORM +FC43 ; mapped ; 0644 0649 # 1.1 ARABIC LIGATURE LAM WITH ALEF MAKSURA ISOLATED FORM +FC44 ; mapped ; 0644 064A # 1.1 ARABIC LIGATURE LAM WITH YEH ISOLATED FORM +FC45 ; mapped ; 0645 062C # 1.1 ARABIC LIGATURE MEEM WITH JEEM ISOLATED FORM +FC46 ; mapped ; 0645 062D # 1.1 ARABIC LIGATURE MEEM WITH HAH ISOLATED FORM +FC47 ; mapped ; 0645 062E # 1.1 ARABIC LIGATURE MEEM WITH KHAH ISOLATED FORM +FC48 ; mapped ; 0645 0645 # 1.1 ARABIC LIGATURE MEEM WITH MEEM ISOLATED FORM +FC49 ; mapped ; 0645 0649 # 1.1 ARABIC LIGATURE MEEM WITH ALEF MAKSURA ISOLATED FORM +FC4A ; mapped ; 0645 064A # 1.1 ARABIC LIGATURE MEEM WITH YEH ISOLATED FORM +FC4B ; mapped ; 0646 062C # 1.1 ARABIC LIGATURE NOON WITH JEEM ISOLATED FORM +FC4C ; mapped ; 0646 062D # 1.1 ARABIC LIGATURE NOON WITH HAH ISOLATED FORM +FC4D ; mapped ; 0646 062E # 1.1 ARABIC LIGATURE NOON WITH KHAH ISOLATED FORM +FC4E ; mapped ; 0646 0645 # 1.1 ARABIC LIGATURE NOON WITH MEEM ISOLATED FORM +FC4F ; mapped ; 0646 0649 # 1.1 ARABIC LIGATURE NOON WITH ALEF MAKSURA ISOLATED FORM +FC50 ; mapped ; 0646 064A # 1.1 ARABIC LIGATURE NOON WITH YEH ISOLATED FORM +FC51 ; mapped ; 0647 062C # 1.1 ARABIC LIGATURE HEH WITH JEEM ISOLATED FORM +FC52 ; mapped ; 0647 0645 # 1.1 ARABIC LIGATURE HEH WITH MEEM ISOLATED FORM +FC53 ; mapped ; 0647 0649 # 1.1 ARABIC LIGATURE HEH WITH ALEF MAKSURA ISOLATED FORM +FC54 ; mapped ; 0647 064A # 1.1 ARABIC LIGATURE HEH WITH YEH ISOLATED FORM +FC55 ; mapped ; 064A 062C # 1.1 ARABIC LIGATURE YEH WITH JEEM ISOLATED FORM +FC56 ; mapped ; 064A 062D # 1.1 ARABIC LIGATURE YEH WITH HAH ISOLATED FORM +FC57 ; mapped ; 064A 062E # 1.1 ARABIC LIGATURE YEH WITH KHAH ISOLATED FORM +FC58 ; mapped ; 064A 0645 # 1.1 ARABIC LIGATURE YEH WITH MEEM ISOLATED FORM +FC59 ; mapped ; 064A 0649 # 1.1 ARABIC LIGATURE YEH WITH ALEF MAKSURA ISOLATED FORM +FC5A ; mapped ; 064A 064A # 1.1 ARABIC LIGATURE YEH WITH YEH ISOLATED FORM +FC5B ; mapped ; 0630 0670 # 1.1 ARABIC LIGATURE THAL WITH SUPERSCRIPT ALEF ISOLATED FORM +FC5C ; mapped ; 0631 0670 # 1.1 ARABIC LIGATURE REH WITH SUPERSCRIPT ALEF ISOLATED FORM +FC5D ; mapped ; 0649 0670 # 1.1 ARABIC LIGATURE ALEF MAKSURA WITH SUPERSCRIPT ALEF ISOLATED FORM +FC5E ; disallowed_STD3_mapped ; 0020 064C 0651 #1.1 ARABIC LIGATURE SHADDA WITH DAMMATAN ISOLATED FORM +FC5F ; disallowed_STD3_mapped ; 0020 064D 0651 #1.1 ARABIC LIGATURE SHADDA WITH KASRATAN ISOLATED FORM +FC60 ; disallowed_STD3_mapped ; 0020 064E 0651 #1.1 ARABIC LIGATURE SHADDA WITH FATHA ISOLATED FORM +FC61 ; disallowed_STD3_mapped ; 0020 064F 0651 #1.1 ARABIC LIGATURE SHADDA WITH DAMMA ISOLATED FORM +FC62 ; disallowed_STD3_mapped ; 0020 0650 0651 #1.1 ARABIC LIGATURE SHADDA WITH KASRA ISOLATED FORM +FC63 ; disallowed_STD3_mapped ; 0020 0651 0670 #1.1 ARABIC LIGATURE SHADDA WITH SUPERSCRIPT ALEF ISOLATED FORM +FC64 ; mapped ; 0626 0631 # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH REH FINAL FORM +FC65 ; mapped ; 0626 0632 # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH ZAIN FINAL FORM +FC66 ; mapped ; 0626 0645 # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH MEEM FINAL FORM +FC67 ; mapped ; 0626 0646 # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH NOON FINAL FORM +FC68 ; mapped ; 0626 0649 # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH ALEF MAKSURA FINAL FORM +FC69 ; mapped ; 0626 064A # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH YEH FINAL FORM +FC6A ; mapped ; 0628 0631 # 1.1 ARABIC LIGATURE BEH WITH REH FINAL FORM +FC6B ; mapped ; 0628 0632 # 1.1 ARABIC LIGATURE BEH WITH ZAIN FINAL FORM +FC6C ; mapped ; 0628 0645 # 1.1 ARABIC LIGATURE BEH WITH MEEM FINAL FORM +FC6D ; mapped ; 0628 0646 # 1.1 ARABIC LIGATURE BEH WITH NOON FINAL FORM +FC6E ; mapped ; 0628 0649 # 1.1 ARABIC LIGATURE BEH WITH ALEF MAKSURA FINAL FORM +FC6F ; mapped ; 0628 064A # 1.1 ARABIC LIGATURE BEH WITH YEH FINAL FORM +FC70 ; mapped ; 062A 0631 # 1.1 ARABIC LIGATURE TEH WITH REH FINAL FORM +FC71 ; mapped ; 062A 0632 # 1.1 ARABIC LIGATURE TEH WITH ZAIN FINAL FORM +FC72 ; mapped ; 062A 0645 # 1.1 ARABIC LIGATURE TEH WITH MEEM FINAL FORM +FC73 ; mapped ; 062A 0646 # 1.1 ARABIC LIGATURE TEH WITH NOON FINAL FORM +FC74 ; mapped ; 062A 0649 # 1.1 ARABIC LIGATURE TEH WITH ALEF MAKSURA FINAL FORM +FC75 ; mapped ; 062A 064A # 1.1 ARABIC LIGATURE TEH WITH YEH FINAL FORM +FC76 ; mapped ; 062B 0631 # 1.1 ARABIC LIGATURE THEH WITH REH FINAL FORM +FC77 ; mapped ; 062B 0632 # 1.1 ARABIC LIGATURE THEH WITH ZAIN FINAL FORM +FC78 ; mapped ; 062B 0645 # 1.1 ARABIC LIGATURE THEH WITH MEEM FINAL FORM +FC79 ; mapped ; 062B 0646 # 1.1 ARABIC LIGATURE THEH WITH NOON FINAL FORM +FC7A ; mapped ; 062B 0649 # 1.1 ARABIC LIGATURE THEH WITH ALEF MAKSURA FINAL FORM +FC7B ; mapped ; 062B 064A # 1.1 ARABIC LIGATURE THEH WITH YEH FINAL FORM +FC7C ; mapped ; 0641 0649 # 1.1 ARABIC LIGATURE FEH WITH ALEF MAKSURA FINAL FORM +FC7D ; mapped ; 0641 064A # 1.1 ARABIC LIGATURE FEH WITH YEH FINAL FORM +FC7E ; mapped ; 0642 0649 # 1.1 ARABIC LIGATURE QAF WITH ALEF MAKSURA FINAL FORM +FC7F ; mapped ; 0642 064A # 1.1 ARABIC LIGATURE QAF WITH YEH FINAL FORM +FC80 ; mapped ; 0643 0627 # 1.1 ARABIC LIGATURE KAF WITH ALEF FINAL FORM +FC81 ; mapped ; 0643 0644 # 1.1 ARABIC LIGATURE KAF WITH LAM FINAL FORM +FC82 ; mapped ; 0643 0645 # 1.1 ARABIC LIGATURE KAF WITH MEEM FINAL FORM +FC83 ; mapped ; 0643 0649 # 1.1 ARABIC LIGATURE KAF WITH ALEF MAKSURA FINAL FORM +FC84 ; mapped ; 0643 064A # 1.1 ARABIC LIGATURE KAF WITH YEH FINAL FORM +FC85 ; mapped ; 0644 0645 # 1.1 ARABIC LIGATURE LAM WITH MEEM FINAL FORM +FC86 ; mapped ; 0644 0649 # 1.1 ARABIC LIGATURE LAM WITH ALEF MAKSURA FINAL FORM +FC87 ; mapped ; 0644 064A # 1.1 ARABIC LIGATURE LAM WITH YEH FINAL FORM +FC88 ; mapped ; 0645 0627 # 1.1 ARABIC LIGATURE MEEM WITH ALEF FINAL FORM +FC89 ; mapped ; 0645 0645 # 1.1 ARABIC LIGATURE MEEM WITH MEEM FINAL FORM +FC8A ; mapped ; 0646 0631 # 1.1 ARABIC LIGATURE NOON WITH REH FINAL FORM +FC8B ; mapped ; 0646 0632 # 1.1 ARABIC LIGATURE NOON WITH ZAIN FINAL FORM +FC8C ; mapped ; 0646 0645 # 1.1 ARABIC LIGATURE NOON WITH MEEM FINAL FORM +FC8D ; mapped ; 0646 0646 # 1.1 ARABIC LIGATURE NOON WITH NOON FINAL FORM +FC8E ; mapped ; 0646 0649 # 1.1 ARABIC LIGATURE NOON WITH ALEF MAKSURA FINAL FORM +FC8F ; mapped ; 0646 064A # 1.1 ARABIC LIGATURE NOON WITH YEH FINAL FORM +FC90 ; mapped ; 0649 0670 # 1.1 ARABIC LIGATURE ALEF MAKSURA WITH SUPERSCRIPT ALEF FINAL FORM +FC91 ; mapped ; 064A 0631 # 1.1 ARABIC LIGATURE YEH WITH REH FINAL FORM +FC92 ; mapped ; 064A 0632 # 1.1 ARABIC LIGATURE YEH WITH ZAIN FINAL FORM +FC93 ; mapped ; 064A 0645 # 1.1 ARABIC LIGATURE YEH WITH MEEM FINAL FORM +FC94 ; mapped ; 064A 0646 # 1.1 ARABIC LIGATURE YEH WITH NOON FINAL FORM +FC95 ; mapped ; 064A 0649 # 1.1 ARABIC LIGATURE YEH WITH ALEF MAKSURA FINAL FORM +FC96 ; mapped ; 064A 064A # 1.1 ARABIC LIGATURE YEH WITH YEH FINAL FORM +FC97 ; mapped ; 0626 062C # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH JEEM INITIAL FORM +FC98 ; mapped ; 0626 062D # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH HAH INITIAL FORM +FC99 ; mapped ; 0626 062E # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH KHAH INITIAL FORM +FC9A ; mapped ; 0626 0645 # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH MEEM INITIAL FORM +FC9B ; mapped ; 0626 0647 # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH HEH INITIAL FORM +FC9C ; mapped ; 0628 062C # 1.1 ARABIC LIGATURE BEH WITH JEEM INITIAL FORM +FC9D ; mapped ; 0628 062D # 1.1 ARABIC LIGATURE BEH WITH HAH INITIAL FORM +FC9E ; mapped ; 0628 062E # 1.1 ARABIC LIGATURE BEH WITH KHAH INITIAL FORM +FC9F ; mapped ; 0628 0645 # 1.1 ARABIC LIGATURE BEH WITH MEEM INITIAL FORM +FCA0 ; mapped ; 0628 0647 # 1.1 ARABIC LIGATURE BEH WITH HEH INITIAL FORM +FCA1 ; mapped ; 062A 062C # 1.1 ARABIC LIGATURE TEH WITH JEEM INITIAL FORM +FCA2 ; mapped ; 062A 062D # 1.1 ARABIC LIGATURE TEH WITH HAH INITIAL FORM +FCA3 ; mapped ; 062A 062E # 1.1 ARABIC LIGATURE TEH WITH KHAH INITIAL FORM +FCA4 ; mapped ; 062A 0645 # 1.1 ARABIC LIGATURE TEH WITH MEEM INITIAL FORM +FCA5 ; mapped ; 062A 0647 # 1.1 ARABIC LIGATURE TEH WITH HEH INITIAL FORM +FCA6 ; mapped ; 062B 0645 # 1.1 ARABIC LIGATURE THEH WITH MEEM INITIAL FORM +FCA7 ; mapped ; 062C 062D # 1.1 ARABIC LIGATURE JEEM WITH HAH INITIAL FORM +FCA8 ; mapped ; 062C 0645 # 1.1 ARABIC LIGATURE JEEM WITH MEEM INITIAL FORM +FCA9 ; mapped ; 062D 062C # 1.1 ARABIC LIGATURE HAH WITH JEEM INITIAL FORM +FCAA ; mapped ; 062D 0645 # 1.1 ARABIC LIGATURE HAH WITH MEEM INITIAL FORM +FCAB ; mapped ; 062E 062C # 1.1 ARABIC LIGATURE KHAH WITH JEEM INITIAL FORM +FCAC ; mapped ; 062E 0645 # 1.1 ARABIC LIGATURE KHAH WITH MEEM INITIAL FORM +FCAD ; mapped ; 0633 062C # 1.1 ARABIC LIGATURE SEEN WITH JEEM INITIAL FORM +FCAE ; mapped ; 0633 062D # 1.1 ARABIC LIGATURE SEEN WITH HAH INITIAL FORM +FCAF ; mapped ; 0633 062E # 1.1 ARABIC LIGATURE SEEN WITH KHAH INITIAL FORM +FCB0 ; mapped ; 0633 0645 # 1.1 ARABIC LIGATURE SEEN WITH MEEM INITIAL FORM +FCB1 ; mapped ; 0635 062D # 1.1 ARABIC LIGATURE SAD WITH HAH INITIAL FORM +FCB2 ; mapped ; 0635 062E # 1.1 ARABIC LIGATURE SAD WITH KHAH INITIAL FORM +FCB3 ; mapped ; 0635 0645 # 1.1 ARABIC LIGATURE SAD WITH MEEM INITIAL FORM +FCB4 ; mapped ; 0636 062C # 1.1 ARABIC LIGATURE DAD WITH JEEM INITIAL FORM +FCB5 ; mapped ; 0636 062D # 1.1 ARABIC LIGATURE DAD WITH HAH INITIAL FORM +FCB6 ; mapped ; 0636 062E # 1.1 ARABIC LIGATURE DAD WITH KHAH INITIAL FORM +FCB7 ; mapped ; 0636 0645 # 1.1 ARABIC LIGATURE DAD WITH MEEM INITIAL FORM +FCB8 ; mapped ; 0637 062D # 1.1 ARABIC LIGATURE TAH WITH HAH INITIAL FORM +FCB9 ; mapped ; 0638 0645 # 1.1 ARABIC LIGATURE ZAH WITH MEEM INITIAL FORM +FCBA ; mapped ; 0639 062C # 1.1 ARABIC LIGATURE AIN WITH JEEM INITIAL FORM +FCBB ; mapped ; 0639 0645 # 1.1 ARABIC LIGATURE AIN WITH MEEM INITIAL FORM +FCBC ; mapped ; 063A 062C # 1.1 ARABIC LIGATURE GHAIN WITH JEEM INITIAL FORM +FCBD ; mapped ; 063A 0645 # 1.1 ARABIC LIGATURE GHAIN WITH MEEM INITIAL FORM +FCBE ; mapped ; 0641 062C # 1.1 ARABIC LIGATURE FEH WITH JEEM INITIAL FORM +FCBF ; mapped ; 0641 062D # 1.1 ARABIC LIGATURE FEH WITH HAH INITIAL FORM +FCC0 ; mapped ; 0641 062E # 1.1 ARABIC LIGATURE FEH WITH KHAH INITIAL FORM +FCC1 ; mapped ; 0641 0645 # 1.1 ARABIC LIGATURE FEH WITH MEEM INITIAL FORM +FCC2 ; mapped ; 0642 062D # 1.1 ARABIC LIGATURE QAF WITH HAH INITIAL FORM +FCC3 ; mapped ; 0642 0645 # 1.1 ARABIC LIGATURE QAF WITH MEEM INITIAL FORM +FCC4 ; mapped ; 0643 062C # 1.1 ARABIC LIGATURE KAF WITH JEEM INITIAL FORM +FCC5 ; mapped ; 0643 062D # 1.1 ARABIC LIGATURE KAF WITH HAH INITIAL FORM +FCC6 ; mapped ; 0643 062E # 1.1 ARABIC LIGATURE KAF WITH KHAH INITIAL FORM +FCC7 ; mapped ; 0643 0644 # 1.1 ARABIC LIGATURE KAF WITH LAM INITIAL FORM +FCC8 ; mapped ; 0643 0645 # 1.1 ARABIC LIGATURE KAF WITH MEEM INITIAL FORM +FCC9 ; mapped ; 0644 062C # 1.1 ARABIC LIGATURE LAM WITH JEEM INITIAL FORM +FCCA ; mapped ; 0644 062D # 1.1 ARABIC LIGATURE LAM WITH HAH INITIAL FORM +FCCB ; mapped ; 0644 062E # 1.1 ARABIC LIGATURE LAM WITH KHAH INITIAL FORM +FCCC ; mapped ; 0644 0645 # 1.1 ARABIC LIGATURE LAM WITH MEEM INITIAL FORM +FCCD ; mapped ; 0644 0647 # 1.1 ARABIC LIGATURE LAM WITH HEH INITIAL FORM +FCCE ; mapped ; 0645 062C # 1.1 ARABIC LIGATURE MEEM WITH JEEM INITIAL FORM +FCCF ; mapped ; 0645 062D # 1.1 ARABIC LIGATURE MEEM WITH HAH INITIAL FORM +FCD0 ; mapped ; 0645 062E # 1.1 ARABIC LIGATURE MEEM WITH KHAH INITIAL FORM +FCD1 ; mapped ; 0645 0645 # 1.1 ARABIC LIGATURE MEEM WITH MEEM INITIAL FORM +FCD2 ; mapped ; 0646 062C # 1.1 ARABIC LIGATURE NOON WITH JEEM INITIAL FORM +FCD3 ; mapped ; 0646 062D # 1.1 ARABIC LIGATURE NOON WITH HAH INITIAL FORM +FCD4 ; mapped ; 0646 062E # 1.1 ARABIC LIGATURE NOON WITH KHAH INITIAL FORM +FCD5 ; mapped ; 0646 0645 # 1.1 ARABIC LIGATURE NOON WITH MEEM INITIAL FORM +FCD6 ; mapped ; 0646 0647 # 1.1 ARABIC LIGATURE NOON WITH HEH INITIAL FORM +FCD7 ; mapped ; 0647 062C # 1.1 ARABIC LIGATURE HEH WITH JEEM INITIAL FORM +FCD8 ; mapped ; 0647 0645 # 1.1 ARABIC LIGATURE HEH WITH MEEM INITIAL FORM +FCD9 ; mapped ; 0647 0670 # 1.1 ARABIC LIGATURE HEH WITH SUPERSCRIPT ALEF INITIAL FORM +FCDA ; mapped ; 064A 062C # 1.1 ARABIC LIGATURE YEH WITH JEEM INITIAL FORM +FCDB ; mapped ; 064A 062D # 1.1 ARABIC LIGATURE YEH WITH HAH INITIAL FORM +FCDC ; mapped ; 064A 062E # 1.1 ARABIC LIGATURE YEH WITH KHAH INITIAL FORM +FCDD ; mapped ; 064A 0645 # 1.1 ARABIC LIGATURE YEH WITH MEEM INITIAL FORM +FCDE ; mapped ; 064A 0647 # 1.1 ARABIC LIGATURE YEH WITH HEH INITIAL FORM +FCDF ; mapped ; 0626 0645 # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH MEEM MEDIAL FORM +FCE0 ; mapped ; 0626 0647 # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH HEH MEDIAL FORM +FCE1 ; mapped ; 0628 0645 # 1.1 ARABIC LIGATURE BEH WITH MEEM MEDIAL FORM +FCE2 ; mapped ; 0628 0647 # 1.1 ARABIC LIGATURE BEH WITH HEH MEDIAL FORM +FCE3 ; mapped ; 062A 0645 # 1.1 ARABIC LIGATURE TEH WITH MEEM MEDIAL FORM +FCE4 ; mapped ; 062A 0647 # 1.1 ARABIC LIGATURE TEH WITH HEH MEDIAL FORM +FCE5 ; mapped ; 062B 0645 # 1.1 ARABIC LIGATURE THEH WITH MEEM MEDIAL FORM +FCE6 ; mapped ; 062B 0647 # 1.1 ARABIC LIGATURE THEH WITH HEH MEDIAL FORM +FCE7 ; mapped ; 0633 0645 # 1.1 ARABIC LIGATURE SEEN WITH MEEM MEDIAL FORM +FCE8 ; mapped ; 0633 0647 # 1.1 ARABIC LIGATURE SEEN WITH HEH MEDIAL FORM +FCE9 ; mapped ; 0634 0645 # 1.1 ARABIC LIGATURE SHEEN WITH MEEM MEDIAL FORM +FCEA ; mapped ; 0634 0647 # 1.1 ARABIC LIGATURE SHEEN WITH HEH MEDIAL FORM +FCEB ; mapped ; 0643 0644 # 1.1 ARABIC LIGATURE KAF WITH LAM MEDIAL FORM +FCEC ; mapped ; 0643 0645 # 1.1 ARABIC LIGATURE KAF WITH MEEM MEDIAL FORM +FCED ; mapped ; 0644 0645 # 1.1 ARABIC LIGATURE LAM WITH MEEM MEDIAL FORM +FCEE ; mapped ; 0646 0645 # 1.1 ARABIC LIGATURE NOON WITH MEEM MEDIAL FORM +FCEF ; mapped ; 0646 0647 # 1.1 ARABIC LIGATURE NOON WITH HEH MEDIAL FORM +FCF0 ; mapped ; 064A 0645 # 1.1 ARABIC LIGATURE YEH WITH MEEM MEDIAL FORM +FCF1 ; mapped ; 064A 0647 # 1.1 ARABIC LIGATURE YEH WITH HEH MEDIAL FORM +FCF2 ; mapped ; 0640 064E 0651 #1.1 ARABIC LIGATURE SHADDA WITH FATHA MEDIAL FORM +FCF3 ; mapped ; 0640 064F 0651 #1.1 ARABIC LIGATURE SHADDA WITH DAMMA MEDIAL FORM +FCF4 ; mapped ; 0640 0650 0651 #1.1 ARABIC LIGATURE SHADDA WITH KASRA MEDIAL FORM +FCF5 ; mapped ; 0637 0649 # 1.1 ARABIC LIGATURE TAH WITH ALEF MAKSURA ISOLATED FORM +FCF6 ; mapped ; 0637 064A # 1.1 ARABIC LIGATURE TAH WITH YEH ISOLATED FORM +FCF7 ; mapped ; 0639 0649 # 1.1 ARABIC LIGATURE AIN WITH ALEF MAKSURA ISOLATED FORM +FCF8 ; mapped ; 0639 064A # 1.1 ARABIC LIGATURE AIN WITH YEH ISOLATED FORM +FCF9 ; mapped ; 063A 0649 # 1.1 ARABIC LIGATURE GHAIN WITH ALEF MAKSURA ISOLATED FORM +FCFA ; mapped ; 063A 064A # 1.1 ARABIC LIGATURE GHAIN WITH YEH ISOLATED FORM +FCFB ; mapped ; 0633 0649 # 1.1 ARABIC LIGATURE SEEN WITH ALEF MAKSURA ISOLATED FORM +FCFC ; mapped ; 0633 064A # 1.1 ARABIC LIGATURE SEEN WITH YEH ISOLATED FORM +FCFD ; mapped ; 0634 0649 # 1.1 ARABIC LIGATURE SHEEN WITH ALEF MAKSURA ISOLATED FORM +FCFE ; mapped ; 0634 064A # 1.1 ARABIC LIGATURE SHEEN WITH YEH ISOLATED FORM +FCFF ; mapped ; 062D 0649 # 1.1 ARABIC LIGATURE HAH WITH ALEF MAKSURA ISOLATED FORM +FD00 ; mapped ; 062D 064A # 1.1 ARABIC LIGATURE HAH WITH YEH ISOLATED FORM +FD01 ; mapped ; 062C 0649 # 1.1 ARABIC LIGATURE JEEM WITH ALEF MAKSURA ISOLATED FORM +FD02 ; mapped ; 062C 064A # 1.1 ARABIC LIGATURE JEEM WITH YEH ISOLATED FORM +FD03 ; mapped ; 062E 0649 # 1.1 ARABIC LIGATURE KHAH WITH ALEF MAKSURA ISOLATED FORM +FD04 ; mapped ; 062E 064A # 1.1 ARABIC LIGATURE KHAH WITH YEH ISOLATED FORM +FD05 ; mapped ; 0635 0649 # 1.1 ARABIC LIGATURE SAD WITH ALEF MAKSURA ISOLATED FORM +FD06 ; mapped ; 0635 064A # 1.1 ARABIC LIGATURE SAD WITH YEH ISOLATED FORM +FD07 ; mapped ; 0636 0649 # 1.1 ARABIC LIGATURE DAD WITH ALEF MAKSURA ISOLATED FORM +FD08 ; mapped ; 0636 064A # 1.1 ARABIC LIGATURE DAD WITH YEH ISOLATED FORM +FD09 ; mapped ; 0634 062C # 1.1 ARABIC LIGATURE SHEEN WITH JEEM ISOLATED FORM +FD0A ; mapped ; 0634 062D # 1.1 ARABIC LIGATURE SHEEN WITH HAH ISOLATED FORM +FD0B ; mapped ; 0634 062E # 1.1 ARABIC LIGATURE SHEEN WITH KHAH ISOLATED FORM +FD0C ; mapped ; 0634 0645 # 1.1 ARABIC LIGATURE SHEEN WITH MEEM ISOLATED FORM +FD0D ; mapped ; 0634 0631 # 1.1 ARABIC LIGATURE SHEEN WITH REH ISOLATED FORM +FD0E ; mapped ; 0633 0631 # 1.1 ARABIC LIGATURE SEEN WITH REH ISOLATED FORM +FD0F ; mapped ; 0635 0631 # 1.1 ARABIC LIGATURE SAD WITH REH ISOLATED FORM +FD10 ; mapped ; 0636 0631 # 1.1 ARABIC LIGATURE DAD WITH REH ISOLATED FORM +FD11 ; mapped ; 0637 0649 # 1.1 ARABIC LIGATURE TAH WITH ALEF MAKSURA FINAL FORM +FD12 ; mapped ; 0637 064A # 1.1 ARABIC LIGATURE TAH WITH YEH FINAL FORM +FD13 ; mapped ; 0639 0649 # 1.1 ARABIC LIGATURE AIN WITH ALEF MAKSURA FINAL FORM +FD14 ; mapped ; 0639 064A # 1.1 ARABIC LIGATURE AIN WITH YEH FINAL FORM +FD15 ; mapped ; 063A 0649 # 1.1 ARABIC LIGATURE GHAIN WITH ALEF MAKSURA FINAL FORM +FD16 ; mapped ; 063A 064A # 1.1 ARABIC LIGATURE GHAIN WITH YEH FINAL FORM +FD17 ; mapped ; 0633 0649 # 1.1 ARABIC LIGATURE SEEN WITH ALEF MAKSURA FINAL FORM +FD18 ; mapped ; 0633 064A # 1.1 ARABIC LIGATURE SEEN WITH YEH FINAL FORM +FD19 ; mapped ; 0634 0649 # 1.1 ARABIC LIGATURE SHEEN WITH ALEF MAKSURA FINAL FORM +FD1A ; mapped ; 0634 064A # 1.1 ARABIC LIGATURE SHEEN WITH YEH FINAL FORM +FD1B ; mapped ; 062D 0649 # 1.1 ARABIC LIGATURE HAH WITH ALEF MAKSURA FINAL FORM +FD1C ; mapped ; 062D 064A # 1.1 ARABIC LIGATURE HAH WITH YEH FINAL FORM +FD1D ; mapped ; 062C 0649 # 1.1 ARABIC LIGATURE JEEM WITH ALEF MAKSURA FINAL FORM +FD1E ; mapped ; 062C 064A # 1.1 ARABIC LIGATURE JEEM WITH YEH FINAL FORM +FD1F ; mapped ; 062E 0649 # 1.1 ARABIC LIGATURE KHAH WITH ALEF MAKSURA FINAL FORM +FD20 ; mapped ; 062E 064A # 1.1 ARABIC LIGATURE KHAH WITH YEH FINAL FORM +FD21 ; mapped ; 0635 0649 # 1.1 ARABIC LIGATURE SAD WITH ALEF MAKSURA FINAL FORM +FD22 ; mapped ; 0635 064A # 1.1 ARABIC LIGATURE SAD WITH YEH FINAL FORM +FD23 ; mapped ; 0636 0649 # 1.1 ARABIC LIGATURE DAD WITH ALEF MAKSURA FINAL FORM +FD24 ; mapped ; 0636 064A # 1.1 ARABIC LIGATURE DAD WITH YEH FINAL FORM +FD25 ; mapped ; 0634 062C # 1.1 ARABIC LIGATURE SHEEN WITH JEEM FINAL FORM +FD26 ; mapped ; 0634 062D # 1.1 ARABIC LIGATURE SHEEN WITH HAH FINAL FORM +FD27 ; mapped ; 0634 062E # 1.1 ARABIC LIGATURE SHEEN WITH KHAH FINAL FORM +FD28 ; mapped ; 0634 0645 # 1.1 ARABIC LIGATURE SHEEN WITH MEEM FINAL FORM +FD29 ; mapped ; 0634 0631 # 1.1 ARABIC LIGATURE SHEEN WITH REH FINAL FORM +FD2A ; mapped ; 0633 0631 # 1.1 ARABIC LIGATURE SEEN WITH REH FINAL FORM +FD2B ; mapped ; 0635 0631 # 1.1 ARABIC LIGATURE SAD WITH REH FINAL FORM +FD2C ; mapped ; 0636 0631 # 1.1 ARABIC LIGATURE DAD WITH REH FINAL FORM +FD2D ; mapped ; 0634 062C # 1.1 ARABIC LIGATURE SHEEN WITH JEEM INITIAL FORM +FD2E ; mapped ; 0634 062D # 1.1 ARABIC LIGATURE SHEEN WITH HAH INITIAL FORM +FD2F ; mapped ; 0634 062E # 1.1 ARABIC LIGATURE SHEEN WITH KHAH INITIAL FORM +FD30 ; mapped ; 0634 0645 # 1.1 ARABIC LIGATURE SHEEN WITH MEEM INITIAL FORM +FD31 ; mapped ; 0633 0647 # 1.1 ARABIC LIGATURE SEEN WITH HEH INITIAL FORM +FD32 ; mapped ; 0634 0647 # 1.1 ARABIC LIGATURE SHEEN WITH HEH INITIAL FORM +FD33 ; mapped ; 0637 0645 # 1.1 ARABIC LIGATURE TAH WITH MEEM INITIAL FORM +FD34 ; mapped ; 0633 062C # 1.1 ARABIC LIGATURE SEEN WITH JEEM MEDIAL FORM +FD35 ; mapped ; 0633 062D # 1.1 ARABIC LIGATURE SEEN WITH HAH MEDIAL FORM +FD36 ; mapped ; 0633 062E # 1.1 ARABIC LIGATURE SEEN WITH KHAH MEDIAL FORM +FD37 ; mapped ; 0634 062C # 1.1 ARABIC LIGATURE SHEEN WITH JEEM MEDIAL FORM +FD38 ; mapped ; 0634 062D # 1.1 ARABIC LIGATURE SHEEN WITH HAH MEDIAL FORM +FD39 ; mapped ; 0634 062E # 1.1 ARABIC LIGATURE SHEEN WITH KHAH MEDIAL FORM +FD3A ; mapped ; 0637 0645 # 1.1 ARABIC LIGATURE TAH WITH MEEM MEDIAL FORM +FD3B ; mapped ; 0638 0645 # 1.1 ARABIC LIGATURE ZAH WITH MEEM MEDIAL FORM +FD3C..FD3D ; mapped ; 0627 064B # 1.1 ARABIC LIGATURE ALEF WITH FATHATAN FINAL FORM..ARABIC LIGATURE ALEF WITH FATHATAN ISOLATED FORM +FD3E..FD3F ; valid ; ; NV8 # 1.1 ORNATE LEFT PARENTHESIS..ORNATE RIGHT PARENTHESIS +FD40..FD4F ; disallowed # NA .. +FD50 ; mapped ; 062A 062C 0645 #1.1 ARABIC LIGATURE TEH WITH JEEM WITH MEEM INITIAL FORM +FD51..FD52 ; mapped ; 062A 062D 062C #1.1 ARABIC LIGATURE TEH WITH HAH WITH JEEM FINAL FORM..ARABIC LIGATURE TEH WITH HAH WITH JEEM INITIAL FORM +FD53 ; mapped ; 062A 062D 0645 #1.1 ARABIC LIGATURE TEH WITH HAH WITH MEEM INITIAL FORM +FD54 ; mapped ; 062A 062E 0645 #1.1 ARABIC LIGATURE TEH WITH KHAH WITH MEEM INITIAL FORM +FD55 ; mapped ; 062A 0645 062C #1.1 ARABIC LIGATURE TEH WITH MEEM WITH JEEM INITIAL FORM +FD56 ; mapped ; 062A 0645 062D #1.1 ARABIC LIGATURE TEH WITH MEEM WITH HAH INITIAL FORM +FD57 ; mapped ; 062A 0645 062E #1.1 ARABIC LIGATURE TEH WITH MEEM WITH KHAH INITIAL FORM +FD58..FD59 ; mapped ; 062C 0645 062D #1.1 ARABIC LIGATURE JEEM WITH MEEM WITH HAH FINAL FORM..ARABIC LIGATURE JEEM WITH MEEM WITH HAH INITIAL FORM +FD5A ; mapped ; 062D 0645 064A #1.1 ARABIC LIGATURE HAH WITH MEEM WITH YEH FINAL FORM +FD5B ; mapped ; 062D 0645 0649 #1.1 ARABIC LIGATURE HAH WITH MEEM WITH ALEF MAKSURA FINAL FORM +FD5C ; mapped ; 0633 062D 062C #1.1 ARABIC LIGATURE SEEN WITH HAH WITH JEEM INITIAL FORM +FD5D ; mapped ; 0633 062C 062D #1.1 ARABIC LIGATURE SEEN WITH JEEM WITH HAH INITIAL FORM +FD5E ; mapped ; 0633 062C 0649 #1.1 ARABIC LIGATURE SEEN WITH JEEM WITH ALEF MAKSURA FINAL FORM +FD5F..FD60 ; mapped ; 0633 0645 062D #1.1 ARABIC LIGATURE SEEN WITH MEEM WITH HAH FINAL FORM..ARABIC LIGATURE SEEN WITH MEEM WITH HAH INITIAL FORM +FD61 ; mapped ; 0633 0645 062C #1.1 ARABIC LIGATURE SEEN WITH MEEM WITH JEEM INITIAL FORM +FD62..FD63 ; mapped ; 0633 0645 0645 #1.1 ARABIC LIGATURE SEEN WITH MEEM WITH MEEM FINAL FORM..ARABIC LIGATURE SEEN WITH MEEM WITH MEEM INITIAL FORM +FD64..FD65 ; mapped ; 0635 062D 062D #1.1 ARABIC LIGATURE SAD WITH HAH WITH HAH FINAL FORM..ARABIC LIGATURE SAD WITH HAH WITH HAH INITIAL FORM +FD66 ; mapped ; 0635 0645 0645 #1.1 ARABIC LIGATURE SAD WITH MEEM WITH MEEM FINAL FORM +FD67..FD68 ; mapped ; 0634 062D 0645 #1.1 ARABIC LIGATURE SHEEN WITH HAH WITH MEEM FINAL FORM..ARABIC LIGATURE SHEEN WITH HAH WITH MEEM INITIAL FORM +FD69 ; mapped ; 0634 062C 064A #1.1 ARABIC LIGATURE SHEEN WITH JEEM WITH YEH FINAL FORM +FD6A..FD6B ; mapped ; 0634 0645 062E #1.1 ARABIC LIGATURE SHEEN WITH MEEM WITH KHAH FINAL FORM..ARABIC LIGATURE SHEEN WITH MEEM WITH KHAH INITIAL FORM +FD6C..FD6D ; mapped ; 0634 0645 0645 #1.1 ARABIC LIGATURE SHEEN WITH MEEM WITH MEEM FINAL FORM..ARABIC LIGATURE SHEEN WITH MEEM WITH MEEM INITIAL FORM +FD6E ; mapped ; 0636 062D 0649 #1.1 ARABIC LIGATURE DAD WITH HAH WITH ALEF MAKSURA FINAL FORM +FD6F..FD70 ; mapped ; 0636 062E 0645 #1.1 ARABIC LIGATURE DAD WITH KHAH WITH MEEM FINAL FORM..ARABIC LIGATURE DAD WITH KHAH WITH MEEM INITIAL FORM +FD71..FD72 ; mapped ; 0637 0645 062D #1.1 ARABIC LIGATURE TAH WITH MEEM WITH HAH FINAL FORM..ARABIC LIGATURE TAH WITH MEEM WITH HAH INITIAL FORM +FD73 ; mapped ; 0637 0645 0645 #1.1 ARABIC LIGATURE TAH WITH MEEM WITH MEEM INITIAL FORM +FD74 ; mapped ; 0637 0645 064A #1.1 ARABIC LIGATURE TAH WITH MEEM WITH YEH FINAL FORM +FD75 ; mapped ; 0639 062C 0645 #1.1 ARABIC LIGATURE AIN WITH JEEM WITH MEEM FINAL FORM +FD76..FD77 ; mapped ; 0639 0645 0645 #1.1 ARABIC LIGATURE AIN WITH MEEM WITH MEEM FINAL FORM..ARABIC LIGATURE AIN WITH MEEM WITH MEEM INITIAL FORM +FD78 ; mapped ; 0639 0645 0649 #1.1 ARABIC LIGATURE AIN WITH MEEM WITH ALEF MAKSURA FINAL FORM +FD79 ; mapped ; 063A 0645 0645 #1.1 ARABIC LIGATURE GHAIN WITH MEEM WITH MEEM FINAL FORM +FD7A ; mapped ; 063A 0645 064A #1.1 ARABIC LIGATURE GHAIN WITH MEEM WITH YEH FINAL FORM +FD7B ; mapped ; 063A 0645 0649 #1.1 ARABIC LIGATURE GHAIN WITH MEEM WITH ALEF MAKSURA FINAL FORM +FD7C..FD7D ; mapped ; 0641 062E 0645 #1.1 ARABIC LIGATURE FEH WITH KHAH WITH MEEM FINAL FORM..ARABIC LIGATURE FEH WITH KHAH WITH MEEM INITIAL FORM +FD7E ; mapped ; 0642 0645 062D #1.1 ARABIC LIGATURE QAF WITH MEEM WITH HAH FINAL FORM +FD7F ; mapped ; 0642 0645 0645 #1.1 ARABIC LIGATURE QAF WITH MEEM WITH MEEM FINAL FORM +FD80 ; mapped ; 0644 062D 0645 #1.1 ARABIC LIGATURE LAM WITH HAH WITH MEEM FINAL FORM +FD81 ; mapped ; 0644 062D 064A #1.1 ARABIC LIGATURE LAM WITH HAH WITH YEH FINAL FORM +FD82 ; mapped ; 0644 062D 0649 #1.1 ARABIC LIGATURE LAM WITH HAH WITH ALEF MAKSURA FINAL FORM +FD83..FD84 ; mapped ; 0644 062C 062C #1.1 ARABIC LIGATURE LAM WITH JEEM WITH JEEM INITIAL FORM..ARABIC LIGATURE LAM WITH JEEM WITH JEEM FINAL FORM +FD85..FD86 ; mapped ; 0644 062E 0645 #1.1 ARABIC LIGATURE LAM WITH KHAH WITH MEEM FINAL FORM..ARABIC LIGATURE LAM WITH KHAH WITH MEEM INITIAL FORM +FD87..FD88 ; mapped ; 0644 0645 062D #1.1 ARABIC LIGATURE LAM WITH MEEM WITH HAH FINAL FORM..ARABIC LIGATURE LAM WITH MEEM WITH HAH INITIAL FORM +FD89 ; mapped ; 0645 062D 062C #1.1 ARABIC LIGATURE MEEM WITH HAH WITH JEEM INITIAL FORM +FD8A ; mapped ; 0645 062D 0645 #1.1 ARABIC LIGATURE MEEM WITH HAH WITH MEEM INITIAL FORM +FD8B ; mapped ; 0645 062D 064A #1.1 ARABIC LIGATURE MEEM WITH HAH WITH YEH FINAL FORM +FD8C ; mapped ; 0645 062C 062D #1.1 ARABIC LIGATURE MEEM WITH JEEM WITH HAH INITIAL FORM +FD8D ; mapped ; 0645 062C 0645 #1.1 ARABIC LIGATURE MEEM WITH JEEM WITH MEEM INITIAL FORM +FD8E ; mapped ; 0645 062E 062C #1.1 ARABIC LIGATURE MEEM WITH KHAH WITH JEEM INITIAL FORM +FD8F ; mapped ; 0645 062E 0645 #1.1 ARABIC LIGATURE MEEM WITH KHAH WITH MEEM INITIAL FORM +FD90..FD91 ; disallowed # NA .. +FD92 ; mapped ; 0645 062C 062E #1.1 ARABIC LIGATURE MEEM WITH JEEM WITH KHAH INITIAL FORM +FD93 ; mapped ; 0647 0645 062C #1.1 ARABIC LIGATURE HEH WITH MEEM WITH JEEM INITIAL FORM +FD94 ; mapped ; 0647 0645 0645 #1.1 ARABIC LIGATURE HEH WITH MEEM WITH MEEM INITIAL FORM +FD95 ; mapped ; 0646 062D 0645 #1.1 ARABIC LIGATURE NOON WITH HAH WITH MEEM INITIAL FORM +FD96 ; mapped ; 0646 062D 0649 #1.1 ARABIC LIGATURE NOON WITH HAH WITH ALEF MAKSURA FINAL FORM +FD97..FD98 ; mapped ; 0646 062C 0645 #1.1 ARABIC LIGATURE NOON WITH JEEM WITH MEEM FINAL FORM..ARABIC LIGATURE NOON WITH JEEM WITH MEEM INITIAL FORM +FD99 ; mapped ; 0646 062C 0649 #1.1 ARABIC LIGATURE NOON WITH JEEM WITH ALEF MAKSURA FINAL FORM +FD9A ; mapped ; 0646 0645 064A #1.1 ARABIC LIGATURE NOON WITH MEEM WITH YEH FINAL FORM +FD9B ; mapped ; 0646 0645 0649 #1.1 ARABIC LIGATURE NOON WITH MEEM WITH ALEF MAKSURA FINAL FORM +FD9C..FD9D ; mapped ; 064A 0645 0645 #1.1 ARABIC LIGATURE YEH WITH MEEM WITH MEEM FINAL FORM..ARABIC LIGATURE YEH WITH MEEM WITH MEEM INITIAL FORM +FD9E ; mapped ; 0628 062E 064A #1.1 ARABIC LIGATURE BEH WITH KHAH WITH YEH FINAL FORM +FD9F ; mapped ; 062A 062C 064A #1.1 ARABIC LIGATURE TEH WITH JEEM WITH YEH FINAL FORM +FDA0 ; mapped ; 062A 062C 0649 #1.1 ARABIC LIGATURE TEH WITH JEEM WITH ALEF MAKSURA FINAL FORM +FDA1 ; mapped ; 062A 062E 064A #1.1 ARABIC LIGATURE TEH WITH KHAH WITH YEH FINAL FORM +FDA2 ; mapped ; 062A 062E 0649 #1.1 ARABIC LIGATURE TEH WITH KHAH WITH ALEF MAKSURA FINAL FORM +FDA3 ; mapped ; 062A 0645 064A #1.1 ARABIC LIGATURE TEH WITH MEEM WITH YEH FINAL FORM +FDA4 ; mapped ; 062A 0645 0649 #1.1 ARABIC LIGATURE TEH WITH MEEM WITH ALEF MAKSURA FINAL FORM +FDA5 ; mapped ; 062C 0645 064A #1.1 ARABIC LIGATURE JEEM WITH MEEM WITH YEH FINAL FORM +FDA6 ; mapped ; 062C 062D 0649 #1.1 ARABIC LIGATURE JEEM WITH HAH WITH ALEF MAKSURA FINAL FORM +FDA7 ; mapped ; 062C 0645 0649 #1.1 ARABIC LIGATURE JEEM WITH MEEM WITH ALEF MAKSURA FINAL FORM +FDA8 ; mapped ; 0633 062E 0649 #1.1 ARABIC LIGATURE SEEN WITH KHAH WITH ALEF MAKSURA FINAL FORM +FDA9 ; mapped ; 0635 062D 064A #1.1 ARABIC LIGATURE SAD WITH HAH WITH YEH FINAL FORM +FDAA ; mapped ; 0634 062D 064A #1.1 ARABIC LIGATURE SHEEN WITH HAH WITH YEH FINAL FORM +FDAB ; mapped ; 0636 062D 064A #1.1 ARABIC LIGATURE DAD WITH HAH WITH YEH FINAL FORM +FDAC ; mapped ; 0644 062C 064A #1.1 ARABIC LIGATURE LAM WITH JEEM WITH YEH FINAL FORM +FDAD ; mapped ; 0644 0645 064A #1.1 ARABIC LIGATURE LAM WITH MEEM WITH YEH FINAL FORM +FDAE ; mapped ; 064A 062D 064A #1.1 ARABIC LIGATURE YEH WITH HAH WITH YEH FINAL FORM +FDAF ; mapped ; 064A 062C 064A #1.1 ARABIC LIGATURE YEH WITH JEEM WITH YEH FINAL FORM +FDB0 ; mapped ; 064A 0645 064A #1.1 ARABIC LIGATURE YEH WITH MEEM WITH YEH FINAL FORM +FDB1 ; mapped ; 0645 0645 064A #1.1 ARABIC LIGATURE MEEM WITH MEEM WITH YEH FINAL FORM +FDB2 ; mapped ; 0642 0645 064A #1.1 ARABIC LIGATURE QAF WITH MEEM WITH YEH FINAL FORM +FDB3 ; mapped ; 0646 062D 064A #1.1 ARABIC LIGATURE NOON WITH HAH WITH YEH FINAL FORM +FDB4 ; mapped ; 0642 0645 062D #1.1 ARABIC LIGATURE QAF WITH MEEM WITH HAH INITIAL FORM +FDB5 ; mapped ; 0644 062D 0645 #1.1 ARABIC LIGATURE LAM WITH HAH WITH MEEM INITIAL FORM +FDB6 ; mapped ; 0639 0645 064A #1.1 ARABIC LIGATURE AIN WITH MEEM WITH YEH FINAL FORM +FDB7 ; mapped ; 0643 0645 064A #1.1 ARABIC LIGATURE KAF WITH MEEM WITH YEH FINAL FORM +FDB8 ; mapped ; 0646 062C 062D #1.1 ARABIC LIGATURE NOON WITH JEEM WITH HAH INITIAL FORM +FDB9 ; mapped ; 0645 062E 064A #1.1 ARABIC LIGATURE MEEM WITH KHAH WITH YEH FINAL FORM +FDBA ; mapped ; 0644 062C 0645 #1.1 ARABIC LIGATURE LAM WITH JEEM WITH MEEM INITIAL FORM +FDBB ; mapped ; 0643 0645 0645 #1.1 ARABIC LIGATURE KAF WITH MEEM WITH MEEM FINAL FORM +FDBC ; mapped ; 0644 062C 0645 #1.1 ARABIC LIGATURE LAM WITH JEEM WITH MEEM FINAL FORM +FDBD ; mapped ; 0646 062C 062D #1.1 ARABIC LIGATURE NOON WITH JEEM WITH HAH FINAL FORM +FDBE ; mapped ; 062C 062D 064A #1.1 ARABIC LIGATURE JEEM WITH HAH WITH YEH FINAL FORM +FDBF ; mapped ; 062D 062C 064A #1.1 ARABIC LIGATURE HAH WITH JEEM WITH YEH FINAL FORM +FDC0 ; mapped ; 0645 062C 064A #1.1 ARABIC LIGATURE MEEM WITH JEEM WITH YEH FINAL FORM +FDC1 ; mapped ; 0641 0645 064A #1.1 ARABIC LIGATURE FEH WITH MEEM WITH YEH FINAL FORM +FDC2 ; mapped ; 0628 062D 064A #1.1 ARABIC LIGATURE BEH WITH HAH WITH YEH FINAL FORM +FDC3 ; mapped ; 0643 0645 0645 #1.1 ARABIC LIGATURE KAF WITH MEEM WITH MEEM INITIAL FORM +FDC4 ; mapped ; 0639 062C 0645 #1.1 ARABIC LIGATURE AIN WITH JEEM WITH MEEM INITIAL FORM +FDC5 ; mapped ; 0635 0645 0645 #1.1 ARABIC LIGATURE SAD WITH MEEM WITH MEEM INITIAL FORM +FDC6 ; mapped ; 0633 062E 064A #1.1 ARABIC LIGATURE SEEN WITH KHAH WITH YEH FINAL FORM +FDC7 ; mapped ; 0646 062C 064A #1.1 ARABIC LIGATURE NOON WITH JEEM WITH YEH FINAL FORM +FDC8..FDCF ; disallowed # NA .. +FDD0..FDEF ; disallowed # 3.1 .. +FDF0 ; mapped ; 0635 0644 06D2 #1.1 ARABIC LIGATURE SALLA USED AS KORANIC STOP SIGN ISOLATED FORM +FDF1 ; mapped ; 0642 0644 06D2 #1.1 ARABIC LIGATURE QALA USED AS KORANIC STOP SIGN ISOLATED FORM +FDF2 ; mapped ; 0627 0644 0644 0647 #1.1 ARABIC LIGATURE ALLAH ISOLATED FORM +FDF3 ; mapped ; 0627 0643 0628 0631 #1.1 ARABIC LIGATURE AKBAR ISOLATED FORM +FDF4 ; mapped ; 0645 062D 0645 062F #1.1 ARABIC LIGATURE MOHAMMAD ISOLATED FORM +FDF5 ; mapped ; 0635 0644 0639 0645 #1.1 ARABIC LIGATURE SALAM ISOLATED FORM +FDF6 ; mapped ; 0631 0633 0648 0644 #1.1 ARABIC LIGATURE RASOUL ISOLATED FORM +FDF7 ; mapped ; 0639 0644 064A 0647 #1.1 ARABIC LIGATURE ALAYHE ISOLATED FORM +FDF8 ; mapped ; 0648 0633 0644 0645 #1.1 ARABIC LIGATURE WASALLAM ISOLATED FORM +FDF9 ; mapped ; 0635 0644 0649 #1.1 ARABIC LIGATURE SALLA ISOLATED FORM +FDFA ; disallowed_STD3_mapped ; 0635 0644 0649 0020 0627 0644 0644 0647 0020 0639 0644 064A 0647 0020 0648 0633 0644 0645 #1.1 ARABIC LIGATURE SALLALLAHOU ALAYHE WASALLAM +FDFB ; disallowed_STD3_mapped ; 062C 0644 0020 062C 0644 0627 0644 0647 #1.1 ARABIC LIGATURE JALLAJALALOUHOU +FDFC ; mapped ; 0631 06CC 0627 0644 #3.2 RIAL SIGN +FDFD ; valid ; ; NV8 # 4.0 ARABIC LIGATURE BISMILLAH AR-RAHMAN AR-RAHEEM +FDFE..FDFF ; disallowed # NA .. +FE00..FE0F ; ignored # 3.2 VARIATION SELECTOR-1..VARIATION SELECTOR-16 +FE10 ; disallowed_STD3_mapped ; 002C # 4.1 PRESENTATION FORM FOR VERTICAL COMMA +FE11 ; mapped ; 3001 # 4.1 PRESENTATION FORM FOR VERTICAL IDEOGRAPHIC COMMA +FE12 ; disallowed # 4.1 PRESENTATION FORM FOR VERTICAL IDEOGRAPHIC FULL STOP +FE13 ; disallowed_STD3_mapped ; 003A # 4.1 PRESENTATION FORM FOR VERTICAL COLON +FE14 ; disallowed_STD3_mapped ; 003B # 4.1 PRESENTATION FORM FOR VERTICAL SEMICOLON +FE15 ; disallowed_STD3_mapped ; 0021 # 4.1 PRESENTATION FORM FOR VERTICAL EXCLAMATION MARK +FE16 ; disallowed_STD3_mapped ; 003F # 4.1 PRESENTATION FORM FOR VERTICAL QUESTION MARK +FE17 ; mapped ; 3016 # 4.1 PRESENTATION FORM FOR VERTICAL LEFT WHITE LENTICULAR BRACKET +FE18 ; mapped ; 3017 # 4.1 PRESENTATION FORM FOR VERTICAL RIGHT WHITE LENTICULAR BRAKCET +FE19 ; disallowed # 4.1 PRESENTATION FORM FOR VERTICAL HORIZONTAL ELLIPSIS +FE1A..FE1F ; disallowed # NA .. +FE20..FE23 ; valid # 1.1 COMBINING LIGATURE LEFT HALF..COMBINING DOUBLE TILDE RIGHT HALF +FE24..FE26 ; valid # 5.1 COMBINING MACRON LEFT HALF..COMBINING CONJOINING MACRON +FE27..FE2D ; valid # 7.0 COMBINING LIGATURE LEFT HALF BELOW..COMBINING CONJOINING MACRON BELOW +FE2E..FE2F ; valid # 8.0 COMBINING CYRILLIC TITLO LEFT HALF..COMBINING CYRILLIC TITLO RIGHT HALF +FE30 ; disallowed # 1.1 PRESENTATION FORM FOR VERTICAL TWO DOT LEADER +FE31 ; mapped ; 2014 # 1.1 PRESENTATION FORM FOR VERTICAL EM DASH +FE32 ; mapped ; 2013 # 1.1 PRESENTATION FORM FOR VERTICAL EN DASH +FE33..FE34 ; disallowed_STD3_mapped ; 005F # 1.1 PRESENTATION FORM FOR VERTICAL LOW LINE..PRESENTATION FORM FOR VERTICAL WAVY LOW LINE +FE35 ; disallowed_STD3_mapped ; 0028 # 1.1 PRESENTATION FORM FOR VERTICAL LEFT PARENTHESIS +FE36 ; disallowed_STD3_mapped ; 0029 # 1.1 PRESENTATION FORM FOR VERTICAL RIGHT PARENTHESIS +FE37 ; disallowed_STD3_mapped ; 007B # 1.1 PRESENTATION FORM FOR VERTICAL LEFT CURLY BRACKET +FE38 ; disallowed_STD3_mapped ; 007D # 1.1 PRESENTATION FORM FOR VERTICAL RIGHT CURLY BRACKET +FE39 ; mapped ; 3014 # 1.1 PRESENTATION FORM FOR VERTICAL LEFT TORTOISE SHELL BRACKET +FE3A ; mapped ; 3015 # 1.1 PRESENTATION FORM FOR VERTICAL RIGHT TORTOISE SHELL BRACKET +FE3B ; mapped ; 3010 # 1.1 PRESENTATION FORM FOR VERTICAL LEFT BLACK LENTICULAR BRACKET +FE3C ; mapped ; 3011 # 1.1 PRESENTATION FORM FOR VERTICAL RIGHT BLACK LENTICULAR BRACKET +FE3D ; mapped ; 300A # 1.1 PRESENTATION FORM FOR VERTICAL LEFT DOUBLE ANGLE BRACKET +FE3E ; mapped ; 300B # 1.1 PRESENTATION FORM FOR VERTICAL RIGHT DOUBLE ANGLE BRACKET +FE3F ; mapped ; 3008 # 1.1 PRESENTATION FORM FOR VERTICAL LEFT ANGLE BRACKET +FE40 ; mapped ; 3009 # 1.1 PRESENTATION FORM FOR VERTICAL RIGHT ANGLE BRACKET +FE41 ; mapped ; 300C # 1.1 PRESENTATION FORM FOR VERTICAL LEFT CORNER BRACKET +FE42 ; mapped ; 300D # 1.1 PRESENTATION FORM FOR VERTICAL RIGHT CORNER BRACKET +FE43 ; mapped ; 300E # 1.1 PRESENTATION FORM FOR VERTICAL LEFT WHITE CORNER BRACKET +FE44 ; mapped ; 300F # 1.1 PRESENTATION FORM FOR VERTICAL RIGHT WHITE CORNER BRACKET +FE45..FE46 ; valid ; ; NV8 # 3.2 SESAME DOT..WHITE SESAME DOT +FE47 ; disallowed_STD3_mapped ; 005B # 4.0 PRESENTATION FORM FOR VERTICAL LEFT SQUARE BRACKET +FE48 ; disallowed_STD3_mapped ; 005D # 4.0 PRESENTATION FORM FOR VERTICAL RIGHT SQUARE BRACKET +FE49..FE4C ; disallowed_STD3_mapped ; 0020 0305 # 1.1 DASHED OVERLINE..DOUBLE WAVY OVERLINE +FE4D..FE4F ; disallowed_STD3_mapped ; 005F # 1.1 DASHED LOW LINE..WAVY LOW LINE +FE50 ; disallowed_STD3_mapped ; 002C # 1.1 SMALL COMMA +FE51 ; mapped ; 3001 # 1.1 SMALL IDEOGRAPHIC COMMA +FE52 ; disallowed # 1.1 SMALL FULL STOP +FE53 ; disallowed # NA +FE54 ; disallowed_STD3_mapped ; 003B # 1.1 SMALL SEMICOLON +FE55 ; disallowed_STD3_mapped ; 003A # 1.1 SMALL COLON +FE56 ; disallowed_STD3_mapped ; 003F # 1.1 SMALL QUESTION MARK +FE57 ; disallowed_STD3_mapped ; 0021 # 1.1 SMALL EXCLAMATION MARK +FE58 ; mapped ; 2014 # 1.1 SMALL EM DASH +FE59 ; disallowed_STD3_mapped ; 0028 # 1.1 SMALL LEFT PARENTHESIS +FE5A ; disallowed_STD3_mapped ; 0029 # 1.1 SMALL RIGHT PARENTHESIS +FE5B ; disallowed_STD3_mapped ; 007B # 1.1 SMALL LEFT CURLY BRACKET +FE5C ; disallowed_STD3_mapped ; 007D # 1.1 SMALL RIGHT CURLY BRACKET +FE5D ; mapped ; 3014 # 1.1 SMALL LEFT TORTOISE SHELL BRACKET +FE5E ; mapped ; 3015 # 1.1 SMALL RIGHT TORTOISE SHELL BRACKET +FE5F ; disallowed_STD3_mapped ; 0023 # 1.1 SMALL NUMBER SIGN +FE60 ; disallowed_STD3_mapped ; 0026 # 1.1 SMALL AMPERSAND +FE61 ; disallowed_STD3_mapped ; 002A # 1.1 SMALL ASTERISK +FE62 ; disallowed_STD3_mapped ; 002B # 1.1 SMALL PLUS SIGN +FE63 ; mapped ; 002D # 1.1 SMALL HYPHEN-MINUS +FE64 ; disallowed_STD3_mapped ; 003C # 1.1 SMALL LESS-THAN SIGN +FE65 ; disallowed_STD3_mapped ; 003E # 1.1 SMALL GREATER-THAN SIGN +FE66 ; disallowed_STD3_mapped ; 003D # 1.1 SMALL EQUALS SIGN +FE67 ; disallowed # NA +FE68 ; disallowed_STD3_mapped ; 005C # 1.1 SMALL REVERSE SOLIDUS +FE69 ; disallowed_STD3_mapped ; 0024 # 1.1 SMALL DOLLAR SIGN +FE6A ; disallowed_STD3_mapped ; 0025 # 1.1 SMALL PERCENT SIGN +FE6B ; disallowed_STD3_mapped ; 0040 # 1.1 SMALL COMMERCIAL AT +FE6C..FE6F ; disallowed # NA .. +FE70 ; disallowed_STD3_mapped ; 0020 064B # 1.1 ARABIC FATHATAN ISOLATED FORM +FE71 ; mapped ; 0640 064B # 1.1 ARABIC TATWEEL WITH FATHATAN ABOVE +FE72 ; disallowed_STD3_mapped ; 0020 064C # 1.1 ARABIC DAMMATAN ISOLATED FORM +FE73 ; valid # 3.2 ARABIC TAIL FRAGMENT +FE74 ; disallowed_STD3_mapped ; 0020 064D # 1.1 ARABIC KASRATAN ISOLATED FORM +FE75 ; disallowed # NA +FE76 ; disallowed_STD3_mapped ; 0020 064E # 1.1 ARABIC FATHA ISOLATED FORM +FE77 ; mapped ; 0640 064E # 1.1 ARABIC FATHA MEDIAL FORM +FE78 ; disallowed_STD3_mapped ; 0020 064F # 1.1 ARABIC DAMMA ISOLATED FORM +FE79 ; mapped ; 0640 064F # 1.1 ARABIC DAMMA MEDIAL FORM +FE7A ; disallowed_STD3_mapped ; 0020 0650 # 1.1 ARABIC KASRA ISOLATED FORM +FE7B ; mapped ; 0640 0650 # 1.1 ARABIC KASRA MEDIAL FORM +FE7C ; disallowed_STD3_mapped ; 0020 0651 # 1.1 ARABIC SHADDA ISOLATED FORM +FE7D ; mapped ; 0640 0651 # 1.1 ARABIC SHADDA MEDIAL FORM +FE7E ; disallowed_STD3_mapped ; 0020 0652 # 1.1 ARABIC SUKUN ISOLATED FORM +FE7F ; mapped ; 0640 0652 # 1.1 ARABIC SUKUN MEDIAL FORM +FE80 ; mapped ; 0621 # 1.1 ARABIC LETTER HAMZA ISOLATED FORM +FE81..FE82 ; mapped ; 0622 # 1.1 ARABIC LETTER ALEF WITH MADDA ABOVE ISOLATED FORM..ARABIC LETTER ALEF WITH MADDA ABOVE FINAL FORM +FE83..FE84 ; mapped ; 0623 # 1.1 ARABIC LETTER ALEF WITH HAMZA ABOVE ISOLATED FORM..ARABIC LETTER ALEF WITH HAMZA ABOVE FINAL FORM +FE85..FE86 ; mapped ; 0624 # 1.1 ARABIC LETTER WAW WITH HAMZA ABOVE ISOLATED FORM..ARABIC LETTER WAW WITH HAMZA ABOVE FINAL FORM +FE87..FE88 ; mapped ; 0625 # 1.1 ARABIC LETTER ALEF WITH HAMZA BELOW ISOLATED FORM..ARABIC LETTER ALEF WITH HAMZA BELOW FINAL FORM +FE89..FE8C ; mapped ; 0626 # 1.1 ARABIC LETTER YEH WITH HAMZA ABOVE ISOLATED FORM..ARABIC LETTER YEH WITH HAMZA ABOVE MEDIAL FORM +FE8D..FE8E ; mapped ; 0627 # 1.1 ARABIC LETTER ALEF ISOLATED FORM..ARABIC LETTER ALEF FINAL FORM +FE8F..FE92 ; mapped ; 0628 # 1.1 ARABIC LETTER BEH ISOLATED FORM..ARABIC LETTER BEH MEDIAL FORM +FE93..FE94 ; mapped ; 0629 # 1.1 ARABIC LETTER TEH MARBUTA ISOLATED FORM..ARABIC LETTER TEH MARBUTA FINAL FORM +FE95..FE98 ; mapped ; 062A # 1.1 ARABIC LETTER TEH ISOLATED FORM..ARABIC LETTER TEH MEDIAL FORM +FE99..FE9C ; mapped ; 062B # 1.1 ARABIC LETTER THEH ISOLATED FORM..ARABIC LETTER THEH MEDIAL FORM +FE9D..FEA0 ; mapped ; 062C # 1.1 ARABIC LETTER JEEM ISOLATED FORM..ARABIC LETTER JEEM MEDIAL FORM +FEA1..FEA4 ; mapped ; 062D # 1.1 ARABIC LETTER HAH ISOLATED FORM..ARABIC LETTER HAH MEDIAL FORM +FEA5..FEA8 ; mapped ; 062E # 1.1 ARABIC LETTER KHAH ISOLATED FORM..ARABIC LETTER KHAH MEDIAL FORM +FEA9..FEAA ; mapped ; 062F # 1.1 ARABIC LETTER DAL ISOLATED FORM..ARABIC LETTER DAL FINAL FORM +FEAB..FEAC ; mapped ; 0630 # 1.1 ARABIC LETTER THAL ISOLATED FORM..ARABIC LETTER THAL FINAL FORM +FEAD..FEAE ; mapped ; 0631 # 1.1 ARABIC LETTER REH ISOLATED FORM..ARABIC LETTER REH FINAL FORM +FEAF..FEB0 ; mapped ; 0632 # 1.1 ARABIC LETTER ZAIN ISOLATED FORM..ARABIC LETTER ZAIN FINAL FORM +FEB1..FEB4 ; mapped ; 0633 # 1.1 ARABIC LETTER SEEN ISOLATED FORM..ARABIC LETTER SEEN MEDIAL FORM +FEB5..FEB8 ; mapped ; 0634 # 1.1 ARABIC LETTER SHEEN ISOLATED FORM..ARABIC LETTER SHEEN MEDIAL FORM +FEB9..FEBC ; mapped ; 0635 # 1.1 ARABIC LETTER SAD ISOLATED FORM..ARABIC LETTER SAD MEDIAL FORM +FEBD..FEC0 ; mapped ; 0636 # 1.1 ARABIC LETTER DAD ISOLATED FORM..ARABIC LETTER DAD MEDIAL FORM +FEC1..FEC4 ; mapped ; 0637 # 1.1 ARABIC LETTER TAH ISOLATED FORM..ARABIC LETTER TAH MEDIAL FORM +FEC5..FEC8 ; mapped ; 0638 # 1.1 ARABIC LETTER ZAH ISOLATED FORM..ARABIC LETTER ZAH MEDIAL FORM +FEC9..FECC ; mapped ; 0639 # 1.1 ARABIC LETTER AIN ISOLATED FORM..ARABIC LETTER AIN MEDIAL FORM +FECD..FED0 ; mapped ; 063A # 1.1 ARABIC LETTER GHAIN ISOLATED FORM..ARABIC LETTER GHAIN MEDIAL FORM +FED1..FED4 ; mapped ; 0641 # 1.1 ARABIC LETTER FEH ISOLATED FORM..ARABIC LETTER FEH MEDIAL FORM +FED5..FED8 ; mapped ; 0642 # 1.1 ARABIC LETTER QAF ISOLATED FORM..ARABIC LETTER QAF MEDIAL FORM +FED9..FEDC ; mapped ; 0643 # 1.1 ARABIC LETTER KAF ISOLATED FORM..ARABIC LETTER KAF MEDIAL FORM +FEDD..FEE0 ; mapped ; 0644 # 1.1 ARABIC LETTER LAM ISOLATED FORM..ARABIC LETTER LAM MEDIAL FORM +FEE1..FEE4 ; mapped ; 0645 # 1.1 ARABIC LETTER MEEM ISOLATED FORM..ARABIC LETTER MEEM MEDIAL FORM +FEE5..FEE8 ; mapped ; 0646 # 1.1 ARABIC LETTER NOON ISOLATED FORM..ARABIC LETTER NOON MEDIAL FORM +FEE9..FEEC ; mapped ; 0647 # 1.1 ARABIC LETTER HEH ISOLATED FORM..ARABIC LETTER HEH MEDIAL FORM +FEED..FEEE ; mapped ; 0648 # 1.1 ARABIC LETTER WAW ISOLATED FORM..ARABIC LETTER WAW FINAL FORM +FEEF..FEF0 ; mapped ; 0649 # 1.1 ARABIC LETTER ALEF MAKSURA ISOLATED FORM..ARABIC LETTER ALEF MAKSURA FINAL FORM +FEF1..FEF4 ; mapped ; 064A # 1.1 ARABIC LETTER YEH ISOLATED FORM..ARABIC LETTER YEH MEDIAL FORM +FEF5..FEF6 ; mapped ; 0644 0622 # 1.1 ARABIC LIGATURE LAM WITH ALEF WITH MADDA ABOVE ISOLATED FORM..ARABIC LIGATURE LAM WITH ALEF WITH MADDA ABOVE FINAL FORM +FEF7..FEF8 ; mapped ; 0644 0623 # 1.1 ARABIC LIGATURE LAM WITH ALEF WITH HAMZA ABOVE ISOLATED FORM..ARABIC LIGATURE LAM WITH ALEF WITH HAMZA ABOVE FINAL FORM +FEF9..FEFA ; mapped ; 0644 0625 # 1.1 ARABIC LIGATURE LAM WITH ALEF WITH HAMZA BELOW ISOLATED FORM..ARABIC LIGATURE LAM WITH ALEF WITH HAMZA BELOW FINAL FORM +FEFB..FEFC ; mapped ; 0644 0627 # 1.1 ARABIC LIGATURE LAM WITH ALEF ISOLATED FORM..ARABIC LIGATURE LAM WITH ALEF FINAL FORM +FEFD..FEFE ; disallowed # NA .. +FEFF ; ignored # 1.1 ZERO WIDTH NO-BREAK SPACE +FF00 ; disallowed # NA +FF01 ; disallowed_STD3_mapped ; 0021 # 1.1 FULLWIDTH EXCLAMATION MARK +FF02 ; disallowed_STD3_mapped ; 0022 # 1.1 FULLWIDTH QUOTATION MARK +FF03 ; disallowed_STD3_mapped ; 0023 # 1.1 FULLWIDTH NUMBER SIGN +FF04 ; disallowed_STD3_mapped ; 0024 # 1.1 FULLWIDTH DOLLAR SIGN +FF05 ; disallowed_STD3_mapped ; 0025 # 1.1 FULLWIDTH PERCENT SIGN +FF06 ; disallowed_STD3_mapped ; 0026 # 1.1 FULLWIDTH AMPERSAND +FF07 ; disallowed_STD3_mapped ; 0027 # 1.1 FULLWIDTH APOSTROPHE +FF08 ; disallowed_STD3_mapped ; 0028 # 1.1 FULLWIDTH LEFT PARENTHESIS +FF09 ; disallowed_STD3_mapped ; 0029 # 1.1 FULLWIDTH RIGHT PARENTHESIS +FF0A ; disallowed_STD3_mapped ; 002A # 1.1 FULLWIDTH ASTERISK +FF0B ; disallowed_STD3_mapped ; 002B # 1.1 FULLWIDTH PLUS SIGN +FF0C ; disallowed_STD3_mapped ; 002C # 1.1 FULLWIDTH COMMA +FF0D ; mapped ; 002D # 1.1 FULLWIDTH HYPHEN-MINUS +FF0E ; mapped ; 002E # 1.1 FULLWIDTH FULL STOP +FF0F ; disallowed_STD3_mapped ; 002F # 1.1 FULLWIDTH SOLIDUS +FF10 ; mapped ; 0030 # 1.1 FULLWIDTH DIGIT ZERO +FF11 ; mapped ; 0031 # 1.1 FULLWIDTH DIGIT ONE +FF12 ; mapped ; 0032 # 1.1 FULLWIDTH DIGIT TWO +FF13 ; mapped ; 0033 # 1.1 FULLWIDTH DIGIT THREE +FF14 ; mapped ; 0034 # 1.1 FULLWIDTH DIGIT FOUR +FF15 ; mapped ; 0035 # 1.1 FULLWIDTH DIGIT FIVE +FF16 ; mapped ; 0036 # 1.1 FULLWIDTH DIGIT SIX +FF17 ; mapped ; 0037 # 1.1 FULLWIDTH DIGIT SEVEN +FF18 ; mapped ; 0038 # 1.1 FULLWIDTH DIGIT EIGHT +FF19 ; mapped ; 0039 # 1.1 FULLWIDTH DIGIT NINE +FF1A ; disallowed_STD3_mapped ; 003A # 1.1 FULLWIDTH COLON +FF1B ; disallowed_STD3_mapped ; 003B # 1.1 FULLWIDTH SEMICOLON +FF1C ; disallowed_STD3_mapped ; 003C # 1.1 FULLWIDTH LESS-THAN SIGN +FF1D ; disallowed_STD3_mapped ; 003D # 1.1 FULLWIDTH EQUALS SIGN +FF1E ; disallowed_STD3_mapped ; 003E # 1.1 FULLWIDTH GREATER-THAN SIGN +FF1F ; disallowed_STD3_mapped ; 003F # 1.1 FULLWIDTH QUESTION MARK +FF20 ; disallowed_STD3_mapped ; 0040 # 1.1 FULLWIDTH COMMERCIAL AT +FF21 ; mapped ; 0061 # 1.1 FULLWIDTH LATIN CAPITAL LETTER A +FF22 ; mapped ; 0062 # 1.1 FULLWIDTH LATIN CAPITAL LETTER B +FF23 ; mapped ; 0063 # 1.1 FULLWIDTH LATIN CAPITAL LETTER C +FF24 ; mapped ; 0064 # 1.1 FULLWIDTH LATIN CAPITAL LETTER D +FF25 ; mapped ; 0065 # 1.1 FULLWIDTH LATIN CAPITAL LETTER E +FF26 ; mapped ; 0066 # 1.1 FULLWIDTH LATIN CAPITAL LETTER F +FF27 ; mapped ; 0067 # 1.1 FULLWIDTH LATIN CAPITAL LETTER G +FF28 ; mapped ; 0068 # 1.1 FULLWIDTH LATIN CAPITAL LETTER H +FF29 ; mapped ; 0069 # 1.1 FULLWIDTH LATIN CAPITAL LETTER I +FF2A ; mapped ; 006A # 1.1 FULLWIDTH LATIN CAPITAL LETTER J +FF2B ; mapped ; 006B # 1.1 FULLWIDTH LATIN CAPITAL LETTER K +FF2C ; mapped ; 006C # 1.1 FULLWIDTH LATIN CAPITAL LETTER L +FF2D ; mapped ; 006D # 1.1 FULLWIDTH LATIN CAPITAL LETTER M +FF2E ; mapped ; 006E # 1.1 FULLWIDTH LATIN CAPITAL LETTER N +FF2F ; mapped ; 006F # 1.1 FULLWIDTH LATIN CAPITAL LETTER O +FF30 ; mapped ; 0070 # 1.1 FULLWIDTH LATIN CAPITAL LETTER P +FF31 ; mapped ; 0071 # 1.1 FULLWIDTH LATIN CAPITAL LETTER Q +FF32 ; mapped ; 0072 # 1.1 FULLWIDTH LATIN CAPITAL LETTER R +FF33 ; mapped ; 0073 # 1.1 FULLWIDTH LATIN CAPITAL LETTER S +FF34 ; mapped ; 0074 # 1.1 FULLWIDTH LATIN CAPITAL LETTER T +FF35 ; mapped ; 0075 # 1.1 FULLWIDTH LATIN CAPITAL LETTER U +FF36 ; mapped ; 0076 # 1.1 FULLWIDTH LATIN CAPITAL LETTER V +FF37 ; mapped ; 0077 # 1.1 FULLWIDTH LATIN CAPITAL LETTER W +FF38 ; mapped ; 0078 # 1.1 FULLWIDTH LATIN CAPITAL LETTER X +FF39 ; mapped ; 0079 # 1.1 FULLWIDTH LATIN CAPITAL LETTER Y +FF3A ; mapped ; 007A # 1.1 FULLWIDTH LATIN CAPITAL LETTER Z +FF3B ; disallowed_STD3_mapped ; 005B # 1.1 FULLWIDTH LEFT SQUARE BRACKET +FF3C ; disallowed_STD3_mapped ; 005C # 1.1 FULLWIDTH REVERSE SOLIDUS +FF3D ; disallowed_STD3_mapped ; 005D # 1.1 FULLWIDTH RIGHT SQUARE BRACKET +FF3E ; disallowed_STD3_mapped ; 005E # 1.1 FULLWIDTH CIRCUMFLEX ACCENT +FF3F ; disallowed_STD3_mapped ; 005F # 1.1 FULLWIDTH LOW LINE +FF40 ; disallowed_STD3_mapped ; 0060 # 1.1 FULLWIDTH GRAVE ACCENT +FF41 ; mapped ; 0061 # 1.1 FULLWIDTH LATIN SMALL LETTER A +FF42 ; mapped ; 0062 # 1.1 FULLWIDTH LATIN SMALL LETTER B +FF43 ; mapped ; 0063 # 1.1 FULLWIDTH LATIN SMALL LETTER C +FF44 ; mapped ; 0064 # 1.1 FULLWIDTH LATIN SMALL LETTER D +FF45 ; mapped ; 0065 # 1.1 FULLWIDTH LATIN SMALL LETTER E +FF46 ; mapped ; 0066 # 1.1 FULLWIDTH LATIN SMALL LETTER F +FF47 ; mapped ; 0067 # 1.1 FULLWIDTH LATIN SMALL LETTER G +FF48 ; mapped ; 0068 # 1.1 FULLWIDTH LATIN SMALL LETTER H +FF49 ; mapped ; 0069 # 1.1 FULLWIDTH LATIN SMALL LETTER I +FF4A ; mapped ; 006A # 1.1 FULLWIDTH LATIN SMALL LETTER J +FF4B ; mapped ; 006B # 1.1 FULLWIDTH LATIN SMALL LETTER K +FF4C ; mapped ; 006C # 1.1 FULLWIDTH LATIN SMALL LETTER L +FF4D ; mapped ; 006D # 1.1 FULLWIDTH LATIN SMALL LETTER M +FF4E ; mapped ; 006E # 1.1 FULLWIDTH LATIN SMALL LETTER N +FF4F ; mapped ; 006F # 1.1 FULLWIDTH LATIN SMALL LETTER O +FF50 ; mapped ; 0070 # 1.1 FULLWIDTH LATIN SMALL LETTER P +FF51 ; mapped ; 0071 # 1.1 FULLWIDTH LATIN SMALL LETTER Q +FF52 ; mapped ; 0072 # 1.1 FULLWIDTH LATIN SMALL LETTER R +FF53 ; mapped ; 0073 # 1.1 FULLWIDTH LATIN SMALL LETTER S +FF54 ; mapped ; 0074 # 1.1 FULLWIDTH LATIN SMALL LETTER T +FF55 ; mapped ; 0075 # 1.1 FULLWIDTH LATIN SMALL LETTER U +FF56 ; mapped ; 0076 # 1.1 FULLWIDTH LATIN SMALL LETTER V +FF57 ; mapped ; 0077 # 1.1 FULLWIDTH LATIN SMALL LETTER W +FF58 ; mapped ; 0078 # 1.1 FULLWIDTH LATIN SMALL LETTER X +FF59 ; mapped ; 0079 # 1.1 FULLWIDTH LATIN SMALL LETTER Y +FF5A ; mapped ; 007A # 1.1 FULLWIDTH LATIN SMALL LETTER Z +FF5B ; disallowed_STD3_mapped ; 007B # 1.1 FULLWIDTH LEFT CURLY BRACKET +FF5C ; disallowed_STD3_mapped ; 007C # 1.1 FULLWIDTH VERTICAL LINE +FF5D ; disallowed_STD3_mapped ; 007D # 1.1 FULLWIDTH RIGHT CURLY BRACKET +FF5E ; disallowed_STD3_mapped ; 007E # 1.1 FULLWIDTH TILDE +FF5F ; mapped ; 2985 # 3.2 FULLWIDTH LEFT WHITE PARENTHESIS +FF60 ; mapped ; 2986 # 3.2 FULLWIDTH RIGHT WHITE PARENTHESIS +FF61 ; mapped ; 002E # 1.1 HALFWIDTH IDEOGRAPHIC FULL STOP +FF62 ; mapped ; 300C # 1.1 HALFWIDTH LEFT CORNER BRACKET +FF63 ; mapped ; 300D # 1.1 HALFWIDTH RIGHT CORNER BRACKET +FF64 ; mapped ; 3001 # 1.1 HALFWIDTH IDEOGRAPHIC COMMA +FF65 ; mapped ; 30FB # 1.1 HALFWIDTH KATAKANA MIDDLE DOT +FF66 ; mapped ; 30F2 # 1.1 HALFWIDTH KATAKANA LETTER WO +FF67 ; mapped ; 30A1 # 1.1 HALFWIDTH KATAKANA LETTER SMALL A +FF68 ; mapped ; 30A3 # 1.1 HALFWIDTH KATAKANA LETTER SMALL I +FF69 ; mapped ; 30A5 # 1.1 HALFWIDTH KATAKANA LETTER SMALL U +FF6A ; mapped ; 30A7 # 1.1 HALFWIDTH KATAKANA LETTER SMALL E +FF6B ; mapped ; 30A9 # 1.1 HALFWIDTH KATAKANA LETTER SMALL O +FF6C ; mapped ; 30E3 # 1.1 HALFWIDTH KATAKANA LETTER SMALL YA +FF6D ; mapped ; 30E5 # 1.1 HALFWIDTH KATAKANA LETTER SMALL YU +FF6E ; mapped ; 30E7 # 1.1 HALFWIDTH KATAKANA LETTER SMALL YO +FF6F ; mapped ; 30C3 # 1.1 HALFWIDTH KATAKANA LETTER SMALL TU +FF70 ; mapped ; 30FC # 1.1 HALFWIDTH KATAKANA-HIRAGANA PROLONGED SOUND MARK +FF71 ; mapped ; 30A2 # 1.1 HALFWIDTH KATAKANA LETTER A +FF72 ; mapped ; 30A4 # 1.1 HALFWIDTH KATAKANA LETTER I +FF73 ; mapped ; 30A6 # 1.1 HALFWIDTH KATAKANA LETTER U +FF74 ; mapped ; 30A8 # 1.1 HALFWIDTH KATAKANA LETTER E +FF75 ; mapped ; 30AA # 1.1 HALFWIDTH KATAKANA LETTER O +FF76 ; mapped ; 30AB # 1.1 HALFWIDTH KATAKANA LETTER KA +FF77 ; mapped ; 30AD # 1.1 HALFWIDTH KATAKANA LETTER KI +FF78 ; mapped ; 30AF # 1.1 HALFWIDTH KATAKANA LETTER KU +FF79 ; mapped ; 30B1 # 1.1 HALFWIDTH KATAKANA LETTER KE +FF7A ; mapped ; 30B3 # 1.1 HALFWIDTH KATAKANA LETTER KO +FF7B ; mapped ; 30B5 # 1.1 HALFWIDTH KATAKANA LETTER SA +FF7C ; mapped ; 30B7 # 1.1 HALFWIDTH KATAKANA LETTER SI +FF7D ; mapped ; 30B9 # 1.1 HALFWIDTH KATAKANA LETTER SU +FF7E ; mapped ; 30BB # 1.1 HALFWIDTH KATAKANA LETTER SE +FF7F ; mapped ; 30BD # 1.1 HALFWIDTH KATAKANA LETTER SO +FF80 ; mapped ; 30BF # 1.1 HALFWIDTH KATAKANA LETTER TA +FF81 ; mapped ; 30C1 # 1.1 HALFWIDTH KATAKANA LETTER TI +FF82 ; mapped ; 30C4 # 1.1 HALFWIDTH KATAKANA LETTER TU +FF83 ; mapped ; 30C6 # 1.1 HALFWIDTH KATAKANA LETTER TE +FF84 ; mapped ; 30C8 # 1.1 HALFWIDTH KATAKANA LETTER TO +FF85 ; mapped ; 30CA # 1.1 HALFWIDTH KATAKANA LETTER NA +FF86 ; mapped ; 30CB # 1.1 HALFWIDTH KATAKANA LETTER NI +FF87 ; mapped ; 30CC # 1.1 HALFWIDTH KATAKANA LETTER NU +FF88 ; mapped ; 30CD # 1.1 HALFWIDTH KATAKANA LETTER NE +FF89 ; mapped ; 30CE # 1.1 HALFWIDTH KATAKANA LETTER NO +FF8A ; mapped ; 30CF # 1.1 HALFWIDTH KATAKANA LETTER HA +FF8B ; mapped ; 30D2 # 1.1 HALFWIDTH KATAKANA LETTER HI +FF8C ; mapped ; 30D5 # 1.1 HALFWIDTH KATAKANA LETTER HU +FF8D ; mapped ; 30D8 # 1.1 HALFWIDTH KATAKANA LETTER HE +FF8E ; mapped ; 30DB # 1.1 HALFWIDTH KATAKANA LETTER HO +FF8F ; mapped ; 30DE # 1.1 HALFWIDTH KATAKANA LETTER MA +FF90 ; mapped ; 30DF # 1.1 HALFWIDTH KATAKANA LETTER MI +FF91 ; mapped ; 30E0 # 1.1 HALFWIDTH KATAKANA LETTER MU +FF92 ; mapped ; 30E1 # 1.1 HALFWIDTH KATAKANA LETTER ME +FF93 ; mapped ; 30E2 # 1.1 HALFWIDTH KATAKANA LETTER MO +FF94 ; mapped ; 30E4 # 1.1 HALFWIDTH KATAKANA LETTER YA +FF95 ; mapped ; 30E6 # 1.1 HALFWIDTH KATAKANA LETTER YU +FF96 ; mapped ; 30E8 # 1.1 HALFWIDTH KATAKANA LETTER YO +FF97 ; mapped ; 30E9 # 1.1 HALFWIDTH KATAKANA LETTER RA +FF98 ; mapped ; 30EA # 1.1 HALFWIDTH KATAKANA LETTER RI +FF99 ; mapped ; 30EB # 1.1 HALFWIDTH KATAKANA LETTER RU +FF9A ; mapped ; 30EC # 1.1 HALFWIDTH KATAKANA LETTER RE +FF9B ; mapped ; 30ED # 1.1 HALFWIDTH KATAKANA LETTER RO +FF9C ; mapped ; 30EF # 1.1 HALFWIDTH KATAKANA LETTER WA +FF9D ; mapped ; 30F3 # 1.1 HALFWIDTH KATAKANA LETTER N +FF9E ; mapped ; 3099 # 1.1 HALFWIDTH KATAKANA VOICED SOUND MARK +FF9F ; mapped ; 309A # 1.1 HALFWIDTH KATAKANA SEMI-VOICED SOUND MARK +FFA0 ; disallowed # 1.1 HALFWIDTH HANGUL FILLER +FFA1 ; mapped ; 1100 # 1.1 HALFWIDTH HANGUL LETTER KIYEOK +FFA2 ; mapped ; 1101 # 1.1 HALFWIDTH HANGUL LETTER SSANGKIYEOK +FFA3 ; mapped ; 11AA # 1.1 HALFWIDTH HANGUL LETTER KIYEOK-SIOS +FFA4 ; mapped ; 1102 # 1.1 HALFWIDTH HANGUL LETTER NIEUN +FFA5 ; mapped ; 11AC # 1.1 HALFWIDTH HANGUL LETTER NIEUN-CIEUC +FFA6 ; mapped ; 11AD # 1.1 HALFWIDTH HANGUL LETTER NIEUN-HIEUH +FFA7 ; mapped ; 1103 # 1.1 HALFWIDTH HANGUL LETTER TIKEUT +FFA8 ; mapped ; 1104 # 1.1 HALFWIDTH HANGUL LETTER SSANGTIKEUT +FFA9 ; mapped ; 1105 # 1.1 HALFWIDTH HANGUL LETTER RIEUL +FFAA ; mapped ; 11B0 # 1.1 HALFWIDTH HANGUL LETTER RIEUL-KIYEOK +FFAB ; mapped ; 11B1 # 1.1 HALFWIDTH HANGUL LETTER RIEUL-MIEUM +FFAC ; mapped ; 11B2 # 1.1 HALFWIDTH HANGUL LETTER RIEUL-PIEUP +FFAD ; mapped ; 11B3 # 1.1 HALFWIDTH HANGUL LETTER RIEUL-SIOS +FFAE ; mapped ; 11B4 # 1.1 HALFWIDTH HANGUL LETTER RIEUL-THIEUTH +FFAF ; mapped ; 11B5 # 1.1 HALFWIDTH HANGUL LETTER RIEUL-PHIEUPH +FFB0 ; mapped ; 111A # 1.1 HALFWIDTH HANGUL LETTER RIEUL-HIEUH +FFB1 ; mapped ; 1106 # 1.1 HALFWIDTH HANGUL LETTER MIEUM +FFB2 ; mapped ; 1107 # 1.1 HALFWIDTH HANGUL LETTER PIEUP +FFB3 ; mapped ; 1108 # 1.1 HALFWIDTH HANGUL LETTER SSANGPIEUP +FFB4 ; mapped ; 1121 # 1.1 HALFWIDTH HANGUL LETTER PIEUP-SIOS +FFB5 ; mapped ; 1109 # 1.1 HALFWIDTH HANGUL LETTER SIOS +FFB6 ; mapped ; 110A # 1.1 HALFWIDTH HANGUL LETTER SSANGSIOS +FFB7 ; mapped ; 110B # 1.1 HALFWIDTH HANGUL LETTER IEUNG +FFB8 ; mapped ; 110C # 1.1 HALFWIDTH HANGUL LETTER CIEUC +FFB9 ; mapped ; 110D # 1.1 HALFWIDTH HANGUL LETTER SSANGCIEUC +FFBA ; mapped ; 110E # 1.1 HALFWIDTH HANGUL LETTER CHIEUCH +FFBB ; mapped ; 110F # 1.1 HALFWIDTH HANGUL LETTER KHIEUKH +FFBC ; mapped ; 1110 # 1.1 HALFWIDTH HANGUL LETTER THIEUTH +FFBD ; mapped ; 1111 # 1.1 HALFWIDTH HANGUL LETTER PHIEUPH +FFBE ; mapped ; 1112 # 1.1 HALFWIDTH HANGUL LETTER HIEUH +FFBF..FFC1 ; disallowed # NA .. +FFC2 ; mapped ; 1161 # 1.1 HALFWIDTH HANGUL LETTER A +FFC3 ; mapped ; 1162 # 1.1 HALFWIDTH HANGUL LETTER AE +FFC4 ; mapped ; 1163 # 1.1 HALFWIDTH HANGUL LETTER YA +FFC5 ; mapped ; 1164 # 1.1 HALFWIDTH HANGUL LETTER YAE +FFC6 ; mapped ; 1165 # 1.1 HALFWIDTH HANGUL LETTER EO +FFC7 ; mapped ; 1166 # 1.1 HALFWIDTH HANGUL LETTER E +FFC8..FFC9 ; disallowed # NA .. +FFCA ; mapped ; 1167 # 1.1 HALFWIDTH HANGUL LETTER YEO +FFCB ; mapped ; 1168 # 1.1 HALFWIDTH HANGUL LETTER YE +FFCC ; mapped ; 1169 # 1.1 HALFWIDTH HANGUL LETTER O +FFCD ; mapped ; 116A # 1.1 HALFWIDTH HANGUL LETTER WA +FFCE ; mapped ; 116B # 1.1 HALFWIDTH HANGUL LETTER WAE +FFCF ; mapped ; 116C # 1.1 HALFWIDTH HANGUL LETTER OE +FFD0..FFD1 ; disallowed # NA .. +FFD2 ; mapped ; 116D # 1.1 HALFWIDTH HANGUL LETTER YO +FFD3 ; mapped ; 116E # 1.1 HALFWIDTH HANGUL LETTER U +FFD4 ; mapped ; 116F # 1.1 HALFWIDTH HANGUL LETTER WEO +FFD5 ; mapped ; 1170 # 1.1 HALFWIDTH HANGUL LETTER WE +FFD6 ; mapped ; 1171 # 1.1 HALFWIDTH HANGUL LETTER WI +FFD7 ; mapped ; 1172 # 1.1 HALFWIDTH HANGUL LETTER YU +FFD8..FFD9 ; disallowed # NA .. +FFDA ; mapped ; 1173 # 1.1 HALFWIDTH HANGUL LETTER EU +FFDB ; mapped ; 1174 # 1.1 HALFWIDTH HANGUL LETTER YI +FFDC ; mapped ; 1175 # 1.1 HALFWIDTH HANGUL LETTER I +FFDD..FFDF ; disallowed # NA .. +FFE0 ; mapped ; 00A2 # 1.1 FULLWIDTH CENT SIGN +FFE1 ; mapped ; 00A3 # 1.1 FULLWIDTH POUND SIGN +FFE2 ; mapped ; 00AC # 1.1 FULLWIDTH NOT SIGN +FFE3 ; disallowed_STD3_mapped ; 0020 0304 # 1.1 FULLWIDTH MACRON +FFE4 ; mapped ; 00A6 # 1.1 FULLWIDTH BROKEN BAR +FFE5 ; mapped ; 00A5 # 1.1 FULLWIDTH YEN SIGN +FFE6 ; mapped ; 20A9 # 1.1 FULLWIDTH WON SIGN +FFE7 ; disallowed # NA +FFE8 ; mapped ; 2502 # 1.1 HALFWIDTH FORMS LIGHT VERTICAL +FFE9 ; mapped ; 2190 # 1.1 HALFWIDTH LEFTWARDS ARROW +FFEA ; mapped ; 2191 # 1.1 HALFWIDTH UPWARDS ARROW +FFEB ; mapped ; 2192 # 1.1 HALFWIDTH RIGHTWARDS ARROW +FFEC ; mapped ; 2193 # 1.1 HALFWIDTH DOWNWARDS ARROW +FFED ; mapped ; 25A0 # 1.1 HALFWIDTH BLACK SQUARE +FFEE ; mapped ; 25CB # 1.1 HALFWIDTH WHITE CIRCLE +FFEF..FFF8 ; disallowed # NA .. +FFF9..FFFB ; disallowed # 3.0 INTERLINEAR ANNOTATION ANCHOR..INTERLINEAR ANNOTATION TERMINATOR +FFFC ; disallowed # 2.1 OBJECT REPLACEMENT CHARACTER +FFFD ; disallowed # 1.1 REPLACEMENT CHARACTER +FFFE..FFFF ; disallowed # 1.1 .. +10000..1000B ; valid # 4.0 LINEAR B SYLLABLE B008 A..LINEAR B SYLLABLE B046 JE +1000C ; disallowed # NA +1000D..10026 ; valid # 4.0 LINEAR B SYLLABLE B036 JO..LINEAR B SYLLABLE B032 QO +10027 ; disallowed # NA +10028..1003A ; valid # 4.0 LINEAR B SYLLABLE B060 RA..LINEAR B SYLLABLE B042 WO +1003B ; disallowed # NA +1003C..1003D ; valid # 4.0 LINEAR B SYLLABLE B017 ZA..LINEAR B SYLLABLE B074 ZE +1003E ; disallowed # NA +1003F..1004D ; valid # 4.0 LINEAR B SYLLABLE B020 ZO..LINEAR B SYLLABLE B091 TWO +1004E..1004F ; disallowed # NA .. +10050..1005D ; valid # 4.0 LINEAR B SYMBOL B018..LINEAR B SYMBOL B089 +1005E..1007F ; disallowed # NA .. +10080..100FA ; valid # 4.0 LINEAR B IDEOGRAM B100 MAN..LINEAR B IDEOGRAM VESSEL B305 +100FB..100FF ; disallowed # NA .. +10100..10102 ; valid ; ; NV8 # 4.0 AEGEAN WORD SEPARATOR LINE..AEGEAN CHECK MARK +10103..10106 ; disallowed # NA .. +10107..10133 ; valid ; ; NV8 # 4.0 AEGEAN NUMBER ONE..AEGEAN NUMBER NINETY THOUSAND +10134..10136 ; disallowed # NA .. +10137..1013F ; valid ; ; NV8 # 4.0 AEGEAN WEIGHT BASE UNIT..AEGEAN MEASURE THIRD SUBUNIT +10140..1018A ; valid ; ; NV8 # 4.1 GREEK ACROPHONIC ATTIC ONE QUARTER..GREEK ZERO SIGN +1018B..1018C ; valid ; ; NV8 # 7.0 GREEK ONE QUARTER SIGN..GREEK SINUSOID SIGN +1018D..1018E ; valid ; ; NV8 # 9.0 GREEK INDICTION SIGN..NOMISMA SIGN +1018F ; disallowed # NA +10190..1019B ; valid ; ; NV8 # 5.1 ROMAN SEXTANS SIGN..ROMAN CENTURIAL SIGN +1019C..1019F ; disallowed # NA .. +101A0 ; valid ; ; NV8 # 7.0 GREEK SYMBOL TAU RHO +101A1..101CF ; disallowed # NA .. +101D0..101FC ; valid ; ; NV8 # 5.1 PHAISTOS DISC SIGN PEDESTRIAN..PHAISTOS DISC SIGN WAVY BAND +101FD ; valid # 5.1 PHAISTOS DISC SIGN COMBINING OBLIQUE STROKE +101FE..1027F ; disallowed # NA .. +10280..1029C ; valid # 5.1 LYCIAN LETTER A..LYCIAN LETTER X +1029D..1029F ; disallowed # NA .. +102A0..102D0 ; valid # 5.1 CARIAN LETTER A..CARIAN LETTER UUU3 +102D1..102DF ; disallowed # NA .. +102E0 ; valid # 7.0 COPTIC EPACT THOUSANDS MARK +102E1..102FB ; valid ; ; NV8 # 7.0 COPTIC EPACT DIGIT ONE..COPTIC EPACT NUMBER NINE HUNDRED +102FC..102FF ; disallowed # NA .. +10300..1031E ; valid # 3.1 OLD ITALIC LETTER A..OLD ITALIC LETTER UU +1031F ; valid # 7.0 OLD ITALIC LETTER ESS +10320..10323 ; valid ; ; NV8 # 3.1 OLD ITALIC NUMERAL ONE..OLD ITALIC NUMERAL FIFTY +10324..1032C ; disallowed # NA .. +1032D..1032F ; valid # 10.0 OLD ITALIC LETTER YE..OLD ITALIC LETTER SOUTHERN TSE +10330..10340 ; valid # 3.1 GOTHIC LETTER AHSA..GOTHIC LETTER PAIRTHRA +10341 ; valid ; ; NV8 # 3.1 GOTHIC LETTER NINETY +10342..10349 ; valid # 3.1 GOTHIC LETTER RAIDA..GOTHIC LETTER OTHAL +1034A ; valid ; ; NV8 # 3.1 GOTHIC LETTER NINE HUNDRED +1034B..1034F ; disallowed # NA .. +10350..1037A ; valid # 7.0 OLD PERMIC LETTER AN..COMBINING OLD PERMIC LETTER SII +1037B..1037F ; disallowed # NA .. +10380..1039D ; valid # 4.0 UGARITIC LETTER ALPA..UGARITIC LETTER SSU +1039E ; disallowed # NA +1039F ; valid ; ; NV8 # 4.0 UGARITIC WORD DIVIDER +103A0..103C3 ; valid # 4.1 OLD PERSIAN SIGN A..OLD PERSIAN SIGN HA +103C4..103C7 ; disallowed # NA .. +103C8..103CF ; valid # 4.1 OLD PERSIAN SIGN AURAMAZDAA..OLD PERSIAN SIGN BUUMISH +103D0..103D5 ; valid ; ; NV8 # 4.1 OLD PERSIAN WORD DIVIDER..OLD PERSIAN NUMBER HUNDRED +103D6..103FF ; disallowed # NA .. +10400 ; mapped ; 10428 # 3.1 DESERET CAPITAL LETTER LONG I +10401 ; mapped ; 10429 # 3.1 DESERET CAPITAL LETTER LONG E +10402 ; mapped ; 1042A # 3.1 DESERET CAPITAL LETTER LONG A +10403 ; mapped ; 1042B # 3.1 DESERET CAPITAL LETTER LONG AH +10404 ; mapped ; 1042C # 3.1 DESERET CAPITAL LETTER LONG O +10405 ; mapped ; 1042D # 3.1 DESERET CAPITAL LETTER LONG OO +10406 ; mapped ; 1042E # 3.1 DESERET CAPITAL LETTER SHORT I +10407 ; mapped ; 1042F # 3.1 DESERET CAPITAL LETTER SHORT E +10408 ; mapped ; 10430 # 3.1 DESERET CAPITAL LETTER SHORT A +10409 ; mapped ; 10431 # 3.1 DESERET CAPITAL LETTER SHORT AH +1040A ; mapped ; 10432 # 3.1 DESERET CAPITAL LETTER SHORT O +1040B ; mapped ; 10433 # 3.1 DESERET CAPITAL LETTER SHORT OO +1040C ; mapped ; 10434 # 3.1 DESERET CAPITAL LETTER AY +1040D ; mapped ; 10435 # 3.1 DESERET CAPITAL LETTER OW +1040E ; mapped ; 10436 # 3.1 DESERET CAPITAL LETTER WU +1040F ; mapped ; 10437 # 3.1 DESERET CAPITAL LETTER YEE +10410 ; mapped ; 10438 # 3.1 DESERET CAPITAL LETTER H +10411 ; mapped ; 10439 # 3.1 DESERET CAPITAL LETTER PEE +10412 ; mapped ; 1043A # 3.1 DESERET CAPITAL LETTER BEE +10413 ; mapped ; 1043B # 3.1 DESERET CAPITAL LETTER TEE +10414 ; mapped ; 1043C # 3.1 DESERET CAPITAL LETTER DEE +10415 ; mapped ; 1043D # 3.1 DESERET CAPITAL LETTER CHEE +10416 ; mapped ; 1043E # 3.1 DESERET CAPITAL LETTER JEE +10417 ; mapped ; 1043F # 3.1 DESERET CAPITAL LETTER KAY +10418 ; mapped ; 10440 # 3.1 DESERET CAPITAL LETTER GAY +10419 ; mapped ; 10441 # 3.1 DESERET CAPITAL LETTER EF +1041A ; mapped ; 10442 # 3.1 DESERET CAPITAL LETTER VEE +1041B ; mapped ; 10443 # 3.1 DESERET CAPITAL LETTER ETH +1041C ; mapped ; 10444 # 3.1 DESERET CAPITAL LETTER THEE +1041D ; mapped ; 10445 # 3.1 DESERET CAPITAL LETTER ES +1041E ; mapped ; 10446 # 3.1 DESERET CAPITAL LETTER ZEE +1041F ; mapped ; 10447 # 3.1 DESERET CAPITAL LETTER ESH +10420 ; mapped ; 10448 # 3.1 DESERET CAPITAL LETTER ZHEE +10421 ; mapped ; 10449 # 3.1 DESERET CAPITAL LETTER ER +10422 ; mapped ; 1044A # 3.1 DESERET CAPITAL LETTER EL +10423 ; mapped ; 1044B # 3.1 DESERET CAPITAL LETTER EM +10424 ; mapped ; 1044C # 3.1 DESERET CAPITAL LETTER EN +10425 ; mapped ; 1044D # 3.1 DESERET CAPITAL LETTER ENG +10426 ; mapped ; 1044E # 4.0 DESERET CAPITAL LETTER OI +10427 ; mapped ; 1044F # 4.0 DESERET CAPITAL LETTER EW +10428..1044D ; valid # 3.1 DESERET SMALL LETTER LONG I..DESERET SMALL LETTER ENG +1044E..1049D ; valid # 4.0 DESERET SMALL LETTER OI..OSMANYA LETTER OO +1049E..1049F ; disallowed # NA .. +104A0..104A9 ; valid # 4.0 OSMANYA DIGIT ZERO..OSMANYA DIGIT NINE +104AA..104AF ; disallowed # NA .. +104B0 ; mapped ; 104D8 # 9.0 OSAGE CAPITAL LETTER A +104B1 ; mapped ; 104D9 # 9.0 OSAGE CAPITAL LETTER AI +104B2 ; mapped ; 104DA # 9.0 OSAGE CAPITAL LETTER AIN +104B3 ; mapped ; 104DB # 9.0 OSAGE CAPITAL LETTER AH +104B4 ; mapped ; 104DC # 9.0 OSAGE CAPITAL LETTER BRA +104B5 ; mapped ; 104DD # 9.0 OSAGE CAPITAL LETTER CHA +104B6 ; mapped ; 104DE # 9.0 OSAGE CAPITAL LETTER EHCHA +104B7 ; mapped ; 104DF # 9.0 OSAGE CAPITAL LETTER E +104B8 ; mapped ; 104E0 # 9.0 OSAGE CAPITAL LETTER EIN +104B9 ; mapped ; 104E1 # 9.0 OSAGE CAPITAL LETTER HA +104BA ; mapped ; 104E2 # 9.0 OSAGE CAPITAL LETTER HYA +104BB ; mapped ; 104E3 # 9.0 OSAGE CAPITAL LETTER I +104BC ; mapped ; 104E4 # 9.0 OSAGE CAPITAL LETTER KA +104BD ; mapped ; 104E5 # 9.0 OSAGE CAPITAL LETTER EHKA +104BE ; mapped ; 104E6 # 9.0 OSAGE CAPITAL LETTER KYA +104BF ; mapped ; 104E7 # 9.0 OSAGE CAPITAL LETTER LA +104C0 ; mapped ; 104E8 # 9.0 OSAGE CAPITAL LETTER MA +104C1 ; mapped ; 104E9 # 9.0 OSAGE CAPITAL LETTER NA +104C2 ; mapped ; 104EA # 9.0 OSAGE CAPITAL LETTER O +104C3 ; mapped ; 104EB # 9.0 OSAGE CAPITAL LETTER OIN +104C4 ; mapped ; 104EC # 9.0 OSAGE CAPITAL LETTER PA +104C5 ; mapped ; 104ED # 9.0 OSAGE CAPITAL LETTER EHPA +104C6 ; mapped ; 104EE # 9.0 OSAGE CAPITAL LETTER SA +104C7 ; mapped ; 104EF # 9.0 OSAGE CAPITAL LETTER SHA +104C8 ; mapped ; 104F0 # 9.0 OSAGE CAPITAL LETTER TA +104C9 ; mapped ; 104F1 # 9.0 OSAGE CAPITAL LETTER EHTA +104CA ; mapped ; 104F2 # 9.0 OSAGE CAPITAL LETTER TSA +104CB ; mapped ; 104F3 # 9.0 OSAGE CAPITAL LETTER EHTSA +104CC ; mapped ; 104F4 # 9.0 OSAGE CAPITAL LETTER TSHA +104CD ; mapped ; 104F5 # 9.0 OSAGE CAPITAL LETTER DHA +104CE ; mapped ; 104F6 # 9.0 OSAGE CAPITAL LETTER U +104CF ; mapped ; 104F7 # 9.0 OSAGE CAPITAL LETTER WA +104D0 ; mapped ; 104F8 # 9.0 OSAGE CAPITAL LETTER KHA +104D1 ; mapped ; 104F9 # 9.0 OSAGE CAPITAL LETTER GHA +104D2 ; mapped ; 104FA # 9.0 OSAGE CAPITAL LETTER ZA +104D3 ; mapped ; 104FB # 9.0 OSAGE CAPITAL LETTER ZHA +104D4..104D7 ; disallowed # NA .. +104D8..104FB ; valid # 9.0 OSAGE SMALL LETTER A..OSAGE SMALL LETTER ZHA +104FC..104FF ; disallowed # NA .. +10500..10527 ; valid # 7.0 ELBASAN LETTER A..ELBASAN LETTER KHE +10528..1052F ; disallowed # NA .. +10530..10563 ; valid # 7.0 CAUCASIAN ALBANIAN LETTER ALT..CAUCASIAN ALBANIAN LETTER KIW +10564..1056E ; disallowed # NA .. +1056F ; valid ; ; NV8 # 7.0 CAUCASIAN ALBANIAN CITATION MARK +10570..105FF ; disallowed # NA .. +10600..10736 ; valid # 7.0 LINEAR A SIGN AB001..LINEAR A SIGN A664 +10737..1073F ; disallowed # NA .. +10740..10755 ; valid # 7.0 LINEAR A SIGN A701 A..LINEAR A SIGN A732 JE +10756..1075F ; disallowed # NA .. +10760..10767 ; valid # 7.0 LINEAR A SIGN A800..LINEAR A SIGN A807 +10768..107FF ; disallowed # NA .. +10800..10805 ; valid # 4.0 CYPRIOT SYLLABLE A..CYPRIOT SYLLABLE JA +10806..10807 ; disallowed # NA .. +10808 ; valid # 4.0 CYPRIOT SYLLABLE JO +10809 ; disallowed # NA +1080A..10835 ; valid # 4.0 CYPRIOT SYLLABLE KA..CYPRIOT SYLLABLE WO +10836 ; disallowed # NA +10837..10838 ; valid # 4.0 CYPRIOT SYLLABLE XA..CYPRIOT SYLLABLE XE +10839..1083B ; disallowed # NA .. +1083C ; valid # 4.0 CYPRIOT SYLLABLE ZA +1083D..1083E ; disallowed # NA .. +1083F ; valid # 4.0 CYPRIOT SYLLABLE ZO +10840..10855 ; valid # 5.2 IMPERIAL ARAMAIC LETTER ALEPH..IMPERIAL ARAMAIC LETTER TAW +10856 ; disallowed # NA +10857..1085F ; valid ; ; NV8 # 5.2 IMPERIAL ARAMAIC SECTION SIGN..IMPERIAL ARAMAIC NUMBER TEN THOUSAND +10860..10876 ; valid # 7.0 PALMYRENE LETTER ALEPH..PALMYRENE LETTER TAW +10877..1087F ; valid ; ; NV8 # 7.0 PALMYRENE LEFT-POINTING FLEURON..PALMYRENE NUMBER TWENTY +10880..1089E ; valid # 7.0 NABATAEAN LETTER FINAL ALEPH..NABATAEAN LETTER TAW +1089F..108A6 ; disallowed # NA .. +108A7..108AF ; valid ; ; NV8 # 7.0 NABATAEAN NUMBER ONE..NABATAEAN NUMBER ONE HUNDRED +108B0..108DF ; disallowed # NA .. +108E0..108F2 ; valid # 8.0 HATRAN LETTER ALEPH..HATRAN LETTER QOPH +108F3 ; disallowed # NA +108F4..108F5 ; valid # 8.0 HATRAN LETTER SHIN..HATRAN LETTER TAW +108F6..108FA ; disallowed # NA .. +108FB..108FF ; valid ; ; NV8 # 8.0 HATRAN NUMBER ONE..HATRAN NUMBER ONE HUNDRED +10900..10915 ; valid # 5.0 PHOENICIAN LETTER ALF..PHOENICIAN LETTER TAU +10916..10919 ; valid ; ; NV8 # 5.0 PHOENICIAN NUMBER ONE..PHOENICIAN NUMBER ONE HUNDRED +1091A..1091B ; valid ; ; NV8 # 5.2 PHOENICIAN NUMBER TWO..PHOENICIAN NUMBER THREE +1091C..1091E ; disallowed # NA .. +1091F ; valid ; ; NV8 # 5.0 PHOENICIAN WORD SEPARATOR +10920..10939 ; valid # 5.1 LYDIAN LETTER A..LYDIAN LETTER C +1093A..1093E ; disallowed # NA .. +1093F ; valid ; ; NV8 # 5.1 LYDIAN TRIANGULAR MARK +10940..1097F ; disallowed # NA .. +10980..109B7 ; valid # 6.1 MEROITIC HIEROGLYPHIC LETTER A..MEROITIC CURSIVE LETTER DA +109B8..109BB ; disallowed # NA .. +109BC..109BD ; valid ; ; NV8 # 8.0 MEROITIC CURSIVE FRACTION ELEVEN TWELFTHS..MEROITIC CURSIVE FRACTION ONE HALF +109BE..109BF ; valid # 6.1 MEROITIC CURSIVE LOGOGRAM RMT..MEROITIC CURSIVE LOGOGRAM IMN +109C0..109CF ; valid ; ; NV8 # 8.0 MEROITIC CURSIVE NUMBER ONE..MEROITIC CURSIVE NUMBER SEVENTY +109D0..109D1 ; disallowed # NA .. +109D2..109FF ; valid ; ; NV8 # 8.0 MEROITIC CURSIVE NUMBER ONE HUNDRED..MEROITIC CURSIVE FRACTION TEN TWELFTHS +10A00..10A03 ; valid # 4.1 KHAROSHTHI LETTER A..KHAROSHTHI VOWEL SIGN VOCALIC R +10A04 ; disallowed # NA +10A05..10A06 ; valid # 4.1 KHAROSHTHI VOWEL SIGN E..KHAROSHTHI VOWEL SIGN O +10A07..10A0B ; disallowed # NA .. +10A0C..10A13 ; valid # 4.1 KHAROSHTHI VOWEL LENGTH MARK..KHAROSHTHI LETTER GHA +10A14 ; disallowed # NA +10A15..10A17 ; valid # 4.1 KHAROSHTHI LETTER CA..KHAROSHTHI LETTER JA +10A18 ; disallowed # NA +10A19..10A33 ; valid # 4.1 KHAROSHTHI LETTER NYA..KHAROSHTHI LETTER TTTHA +10A34..10A37 ; disallowed # NA .. +10A38..10A3A ; valid # 4.1 KHAROSHTHI SIGN BAR ABOVE..KHAROSHTHI SIGN DOT BELOW +10A3B..10A3E ; disallowed # NA .. +10A3F ; valid # 4.1 KHAROSHTHI VIRAMA +10A40..10A47 ; valid ; ; NV8 # 4.1 KHAROSHTHI DIGIT ONE..KHAROSHTHI NUMBER ONE THOUSAND +10A48..10A4F ; disallowed # NA .. +10A50..10A58 ; valid ; ; NV8 # 4.1 KHAROSHTHI PUNCTUATION DOT..KHAROSHTHI PUNCTUATION LINES +10A59..10A5F ; disallowed # NA .. +10A60..10A7C ; valid # 5.2 OLD SOUTH ARABIAN LETTER HE..OLD SOUTH ARABIAN LETTER THETH +10A7D..10A7F ; valid ; ; NV8 # 5.2 OLD SOUTH ARABIAN NUMBER ONE..OLD SOUTH ARABIAN NUMERIC INDICATOR +10A80..10A9C ; valid # 7.0 OLD NORTH ARABIAN LETTER HEH..OLD NORTH ARABIAN LETTER ZAH +10A9D..10A9F ; valid ; ; NV8 # 7.0 OLD NORTH ARABIAN NUMBER ONE..OLD NORTH ARABIAN NUMBER TWENTY +10AA0..10ABF ; disallowed # NA .. +10AC0..10AC7 ; valid # 7.0 MANICHAEAN LETTER ALEPH..MANICHAEAN LETTER WAW +10AC8 ; valid ; ; NV8 # 7.0 MANICHAEAN SIGN UD +10AC9..10AE6 ; valid # 7.0 MANICHAEAN LETTER ZAYIN..MANICHAEAN ABBREVIATION MARK BELOW +10AE7..10AEA ; disallowed # NA .. +10AEB..10AF6 ; valid ; ; NV8 # 7.0 MANICHAEAN NUMBER ONE..MANICHAEAN PUNCTUATION LINE FILLER +10AF7..10AFF ; disallowed # NA .. +10B00..10B35 ; valid # 5.2 AVESTAN LETTER A..AVESTAN LETTER HE +10B36..10B38 ; disallowed # NA .. +10B39..10B3F ; valid ; ; NV8 # 5.2 AVESTAN ABBREVIATION MARK..LARGE ONE RING OVER TWO RINGS PUNCTUATION +10B40..10B55 ; valid # 5.2 INSCRIPTIONAL PARTHIAN LETTER ALEPH..INSCRIPTIONAL PARTHIAN LETTER TAW +10B56..10B57 ; disallowed # NA .. +10B58..10B5F ; valid ; ; NV8 # 5.2 INSCRIPTIONAL PARTHIAN NUMBER ONE..INSCRIPTIONAL PARTHIAN NUMBER ONE THOUSAND +10B60..10B72 ; valid # 5.2 INSCRIPTIONAL PAHLAVI LETTER ALEPH..INSCRIPTIONAL PAHLAVI LETTER TAW +10B73..10B77 ; disallowed # NA .. +10B78..10B7F ; valid ; ; NV8 # 5.2 INSCRIPTIONAL PAHLAVI NUMBER ONE..INSCRIPTIONAL PAHLAVI NUMBER ONE THOUSAND +10B80..10B91 ; valid # 7.0 PSALTER PAHLAVI LETTER ALEPH..PSALTER PAHLAVI LETTER TAW +10B92..10B98 ; disallowed # NA .. +10B99..10B9C ; valid ; ; NV8 # 7.0 PSALTER PAHLAVI SECTION MARK..PSALTER PAHLAVI FOUR DOTS WITH DOT +10B9D..10BA8 ; disallowed # NA .. +10BA9..10BAF ; valid ; ; NV8 # 7.0 PSALTER PAHLAVI NUMBER ONE..PSALTER PAHLAVI NUMBER ONE HUNDRED +10BB0..10BFF ; disallowed # NA .. +10C00..10C48 ; valid # 5.2 OLD TURKIC LETTER ORKHON A..OLD TURKIC LETTER ORKHON BASH +10C49..10C7F ; disallowed # NA .. +10C80 ; mapped ; 10CC0 # 8.0 OLD HUNGARIAN CAPITAL LETTER A +10C81 ; mapped ; 10CC1 # 8.0 OLD HUNGARIAN CAPITAL LETTER AA +10C82 ; mapped ; 10CC2 # 8.0 OLD HUNGARIAN CAPITAL LETTER EB +10C83 ; mapped ; 10CC3 # 8.0 OLD HUNGARIAN CAPITAL LETTER AMB +10C84 ; mapped ; 10CC4 # 8.0 OLD HUNGARIAN CAPITAL LETTER EC +10C85 ; mapped ; 10CC5 # 8.0 OLD HUNGARIAN CAPITAL LETTER ENC +10C86 ; mapped ; 10CC6 # 8.0 OLD HUNGARIAN CAPITAL LETTER ECS +10C87 ; mapped ; 10CC7 # 8.0 OLD HUNGARIAN CAPITAL LETTER ED +10C88 ; mapped ; 10CC8 # 8.0 OLD HUNGARIAN CAPITAL LETTER AND +10C89 ; mapped ; 10CC9 # 8.0 OLD HUNGARIAN CAPITAL LETTER E +10C8A ; mapped ; 10CCA # 8.0 OLD HUNGARIAN CAPITAL LETTER CLOSE E +10C8B ; mapped ; 10CCB # 8.0 OLD HUNGARIAN CAPITAL LETTER EE +10C8C ; mapped ; 10CCC # 8.0 OLD HUNGARIAN CAPITAL LETTER EF +10C8D ; mapped ; 10CCD # 8.0 OLD HUNGARIAN CAPITAL LETTER EG +10C8E ; mapped ; 10CCE # 8.0 OLD HUNGARIAN CAPITAL LETTER EGY +10C8F ; mapped ; 10CCF # 8.0 OLD HUNGARIAN CAPITAL LETTER EH +10C90 ; mapped ; 10CD0 # 8.0 OLD HUNGARIAN CAPITAL LETTER I +10C91 ; mapped ; 10CD1 # 8.0 OLD HUNGARIAN CAPITAL LETTER II +10C92 ; mapped ; 10CD2 # 8.0 OLD HUNGARIAN CAPITAL LETTER EJ +10C93 ; mapped ; 10CD3 # 8.0 OLD HUNGARIAN CAPITAL LETTER EK +10C94 ; mapped ; 10CD4 # 8.0 OLD HUNGARIAN CAPITAL LETTER AK +10C95 ; mapped ; 10CD5 # 8.0 OLD HUNGARIAN CAPITAL LETTER UNK +10C96 ; mapped ; 10CD6 # 8.0 OLD HUNGARIAN CAPITAL LETTER EL +10C97 ; mapped ; 10CD7 # 8.0 OLD HUNGARIAN CAPITAL LETTER ELY +10C98 ; mapped ; 10CD8 # 8.0 OLD HUNGARIAN CAPITAL LETTER EM +10C99 ; mapped ; 10CD9 # 8.0 OLD HUNGARIAN CAPITAL LETTER EN +10C9A ; mapped ; 10CDA # 8.0 OLD HUNGARIAN CAPITAL LETTER ENY +10C9B ; mapped ; 10CDB # 8.0 OLD HUNGARIAN CAPITAL LETTER O +10C9C ; mapped ; 10CDC # 8.0 OLD HUNGARIAN CAPITAL LETTER OO +10C9D ; mapped ; 10CDD # 8.0 OLD HUNGARIAN CAPITAL LETTER NIKOLSBURG OE +10C9E ; mapped ; 10CDE # 8.0 OLD HUNGARIAN CAPITAL LETTER RUDIMENTA OE +10C9F ; mapped ; 10CDF # 8.0 OLD HUNGARIAN CAPITAL LETTER OEE +10CA0 ; mapped ; 10CE0 # 8.0 OLD HUNGARIAN CAPITAL LETTER EP +10CA1 ; mapped ; 10CE1 # 8.0 OLD HUNGARIAN CAPITAL LETTER EMP +10CA2 ; mapped ; 10CE2 # 8.0 OLD HUNGARIAN CAPITAL LETTER ER +10CA3 ; mapped ; 10CE3 # 8.0 OLD HUNGARIAN CAPITAL LETTER SHORT ER +10CA4 ; mapped ; 10CE4 # 8.0 OLD HUNGARIAN CAPITAL LETTER ES +10CA5 ; mapped ; 10CE5 # 8.0 OLD HUNGARIAN CAPITAL LETTER ESZ +10CA6 ; mapped ; 10CE6 # 8.0 OLD HUNGARIAN CAPITAL LETTER ET +10CA7 ; mapped ; 10CE7 # 8.0 OLD HUNGARIAN CAPITAL LETTER ENT +10CA8 ; mapped ; 10CE8 # 8.0 OLD HUNGARIAN CAPITAL LETTER ETY +10CA9 ; mapped ; 10CE9 # 8.0 OLD HUNGARIAN CAPITAL LETTER ECH +10CAA ; mapped ; 10CEA # 8.0 OLD HUNGARIAN CAPITAL LETTER U +10CAB ; mapped ; 10CEB # 8.0 OLD HUNGARIAN CAPITAL LETTER UU +10CAC ; mapped ; 10CEC # 8.0 OLD HUNGARIAN CAPITAL LETTER NIKOLSBURG UE +10CAD ; mapped ; 10CED # 8.0 OLD HUNGARIAN CAPITAL LETTER RUDIMENTA UE +10CAE ; mapped ; 10CEE # 8.0 OLD HUNGARIAN CAPITAL LETTER EV +10CAF ; mapped ; 10CEF # 8.0 OLD HUNGARIAN CAPITAL LETTER EZ +10CB0 ; mapped ; 10CF0 # 8.0 OLD HUNGARIAN CAPITAL LETTER EZS +10CB1 ; mapped ; 10CF1 # 8.0 OLD HUNGARIAN CAPITAL LETTER ENT-SHAPED SIGN +10CB2 ; mapped ; 10CF2 # 8.0 OLD HUNGARIAN CAPITAL LETTER US +10CB3..10CBF ; disallowed # NA .. +10CC0..10CF2 ; valid # 8.0 OLD HUNGARIAN SMALL LETTER A..OLD HUNGARIAN SMALL LETTER US +10CF3..10CF9 ; disallowed # NA .. +10CFA..10CFF ; valid ; ; NV8 # 8.0 OLD HUNGARIAN NUMBER ONE..OLD HUNGARIAN NUMBER ONE THOUSAND +10D00..10E5F ; disallowed # NA .. +10E60..10E7E ; valid ; ; NV8 # 5.2 RUMI DIGIT ONE..RUMI FRACTION TWO THIRDS +10E7F..10FFF ; disallowed # NA .. +11000..11046 ; valid # 6.0 BRAHMI SIGN CANDRABINDU..BRAHMI VIRAMA +11047..1104D ; valid ; ; NV8 # 6.0 BRAHMI DANDA..BRAHMI PUNCTUATION LOTUS +1104E..11051 ; disallowed # NA .. +11052..11065 ; valid ; ; NV8 # 6.0 BRAHMI NUMBER ONE..BRAHMI NUMBER ONE THOUSAND +11066..1106F ; valid # 6.0 BRAHMI DIGIT ZERO..BRAHMI DIGIT NINE +11070..1107E ; disallowed # NA .. +1107F ; valid # 7.0 BRAHMI NUMBER JOINER +11080..110BA ; valid # 5.2 KAITHI SIGN CANDRABINDU..KAITHI SIGN NUKTA +110BB..110BC ; valid ; ; NV8 # 5.2 KAITHI ABBREVIATION SIGN..KAITHI ENUMERATION SIGN +110BD ; disallowed # 5.2 KAITHI NUMBER SIGN +110BE..110C1 ; valid ; ; NV8 # 5.2 KAITHI SECTION MARK..KAITHI DOUBLE DANDA +110C2..110CF ; disallowed # NA .. +110D0..110E8 ; valid # 6.1 SORA SOMPENG LETTER SAH..SORA SOMPENG LETTER MAE +110E9..110EF ; disallowed # NA .. +110F0..110F9 ; valid # 6.1 SORA SOMPENG DIGIT ZERO..SORA SOMPENG DIGIT NINE +110FA..110FF ; disallowed # NA .. +11100..11134 ; valid # 6.1 CHAKMA SIGN CANDRABINDU..CHAKMA MAAYYAA +11135 ; disallowed # NA +11136..1113F ; valid # 6.1 CHAKMA DIGIT ZERO..CHAKMA DIGIT NINE +11140..11143 ; valid ; ; NV8 # 6.1 CHAKMA SECTION MARK..CHAKMA QUESTION MARK +11144..1114F ; disallowed # NA .. +11150..11173 ; valid # 7.0 MAHAJANI LETTER A..MAHAJANI SIGN NUKTA +11174..11175 ; valid ; ; NV8 # 7.0 MAHAJANI ABBREVIATION SIGN..MAHAJANI SECTION MARK +11176 ; valid # 7.0 MAHAJANI LIGATURE SHRI +11177..1117F ; disallowed # NA .. +11180..111C4 ; valid # 6.1 SHARADA SIGN CANDRABINDU..SHARADA OM +111C5..111C8 ; valid ; ; NV8 # 6.1 SHARADA DANDA..SHARADA SEPARATOR +111C9 ; valid ; ; NV8 # 8.0 SHARADA SANDHI MARK +111CA..111CC ; valid # 8.0 SHARADA SIGN NUKTA..SHARADA EXTRA SHORT VOWEL MARK +111CD ; valid ; ; NV8 # 7.0 SHARADA SUTRA MARK +111CE..111CF ; disallowed # NA .. +111D0..111D9 ; valid # 6.1 SHARADA DIGIT ZERO..SHARADA DIGIT NINE +111DA ; valid # 7.0 SHARADA EKAM +111DB ; valid ; ; NV8 # 8.0 SHARADA SIGN SIDDHAM +111DC ; valid # 8.0 SHARADA HEADSTROKE +111DD..111DF ; valid ; ; NV8 # 8.0 SHARADA CONTINUATION SIGN..SHARADA SECTION MARK-2 +111E0 ; disallowed # NA +111E1..111F4 ; valid ; ; NV8 # 7.0 SINHALA ARCHAIC DIGIT ONE..SINHALA ARCHAIC NUMBER ONE THOUSAND +111F5..111FF ; disallowed # NA .. +11200..11211 ; valid # 7.0 KHOJKI LETTER A..KHOJKI LETTER JJA +11212 ; disallowed # NA +11213..11237 ; valid # 7.0 KHOJKI LETTER NYA..KHOJKI SIGN SHADDA +11238..1123D ; valid ; ; NV8 # 7.0 KHOJKI DANDA..KHOJKI ABBREVIATION SIGN +1123E ; valid # 9.0 KHOJKI SIGN SUKUN +1123F..1127F ; disallowed # NA .. +11280..11286 ; valid # 8.0 MULTANI LETTER A..MULTANI LETTER GA +11287 ; disallowed # NA +11288 ; valid # 8.0 MULTANI LETTER GHA +11289 ; disallowed # NA +1128A..1128D ; valid # 8.0 MULTANI LETTER CA..MULTANI LETTER JJA +1128E ; disallowed # NA +1128F..1129D ; valid # 8.0 MULTANI LETTER NYA..MULTANI LETTER BA +1129E ; disallowed # NA +1129F..112A8 ; valid # 8.0 MULTANI LETTER BHA..MULTANI LETTER RHA +112A9 ; valid ; ; NV8 # 8.0 MULTANI SECTION MARK +112AA..112AF ; disallowed # NA .. +112B0..112EA ; valid # 7.0 KHUDAWADI LETTER A..KHUDAWADI SIGN VIRAMA +112EB..112EF ; disallowed # NA .. +112F0..112F9 ; valid # 7.0 KHUDAWADI DIGIT ZERO..KHUDAWADI DIGIT NINE +112FA..112FF ; disallowed # NA .. +11300 ; valid # 8.0 GRANTHA SIGN COMBINING ANUSVARA ABOVE +11301..11303 ; valid # 7.0 GRANTHA SIGN CANDRABINDU..GRANTHA SIGN VISARGA +11304 ; disallowed # NA +11305..1130C ; valid # 7.0 GRANTHA LETTER A..GRANTHA LETTER VOCALIC L +1130D..1130E ; disallowed # NA .. +1130F..11310 ; valid # 7.0 GRANTHA LETTER EE..GRANTHA LETTER AI +11311..11312 ; disallowed # NA .. +11313..11328 ; valid # 7.0 GRANTHA LETTER OO..GRANTHA LETTER NA +11329 ; disallowed # NA +1132A..11330 ; valid # 7.0 GRANTHA LETTER PA..GRANTHA LETTER RA +11331 ; disallowed # NA +11332..11333 ; valid # 7.0 GRANTHA LETTER LA..GRANTHA LETTER LLA +11334 ; disallowed # NA +11335..11339 ; valid # 7.0 GRANTHA LETTER VA..GRANTHA LETTER HA +1133A..1133B ; disallowed # NA .. +1133C..11344 ; valid # 7.0 GRANTHA SIGN NUKTA..GRANTHA VOWEL SIGN VOCALIC RR +11345..11346 ; disallowed # NA .. +11347..11348 ; valid # 7.0 GRANTHA VOWEL SIGN EE..GRANTHA VOWEL SIGN AI +11349..1134A ; disallowed # NA .. +1134B..1134D ; valid # 7.0 GRANTHA VOWEL SIGN OO..GRANTHA SIGN VIRAMA +1134E..1134F ; disallowed # NA .. +11350 ; valid # 8.0 GRANTHA OM +11351..11356 ; disallowed # NA .. +11357 ; valid # 7.0 GRANTHA AU LENGTH MARK +11358..1135C ; disallowed # NA .. +1135D..11363 ; valid # 7.0 GRANTHA SIGN PLUTA..GRANTHA VOWEL SIGN VOCALIC LL +11364..11365 ; disallowed # NA .. +11366..1136C ; valid # 7.0 COMBINING GRANTHA DIGIT ZERO..COMBINING GRANTHA DIGIT SIX +1136D..1136F ; disallowed # NA .. +11370..11374 ; valid # 7.0 COMBINING GRANTHA LETTER A..COMBINING GRANTHA LETTER PA +11375..113FF ; disallowed # NA .. +11400..1144A ; valid # 9.0 NEWA LETTER A..NEWA SIDDHI +1144B..1144F ; valid ; ; NV8 # 9.0 NEWA DANDA..NEWA ABBREVIATION SIGN +11450..11459 ; valid # 9.0 NEWA DIGIT ZERO..NEWA DIGIT NINE +1145A ; disallowed # NA +1145B ; valid ; ; NV8 # 9.0 NEWA PLACEHOLDER MARK +1145C ; disallowed # NA +1145D ; valid ; ; NV8 # 9.0 NEWA INSERTION SIGN +1145E..1147F ; disallowed # NA .. +11480..114C5 ; valid # 7.0 TIRHUTA ANJI..TIRHUTA GVANG +114C6 ; valid ; ; NV8 # 7.0 TIRHUTA ABBREVIATION SIGN +114C7 ; valid # 7.0 TIRHUTA OM +114C8..114CF ; disallowed # NA .. +114D0..114D9 ; valid # 7.0 TIRHUTA DIGIT ZERO..TIRHUTA DIGIT NINE +114DA..1157F ; disallowed # NA .. +11580..115B5 ; valid # 7.0 SIDDHAM LETTER A..SIDDHAM VOWEL SIGN VOCALIC RR +115B6..115B7 ; disallowed # NA .. +115B8..115C0 ; valid # 7.0 SIDDHAM VOWEL SIGN E..SIDDHAM SIGN NUKTA +115C1..115C9 ; valid ; ; NV8 # 7.0 SIDDHAM SIGN SIDDHAM..SIDDHAM END OF TEXT MARK +115CA..115D7 ; valid ; ; NV8 # 8.0 SIDDHAM SECTION MARK WITH TRIDENT AND U-SHAPED ORNAMENTS..SIDDHAM SECTION MARK WITH CIRCLES AND FOUR ENCLOSURES +115D8..115DD ; valid # 8.0 SIDDHAM LETTER THREE-CIRCLE ALTERNATE I..SIDDHAM VOWEL SIGN ALTERNATE UU +115DE..115FF ; disallowed # NA .. +11600..11640 ; valid # 7.0 MODI LETTER A..MODI SIGN ARDHACANDRA +11641..11643 ; valid ; ; NV8 # 7.0 MODI DANDA..MODI ABBREVIATION SIGN +11644 ; valid # 7.0 MODI SIGN HUVA +11645..1164F ; disallowed # NA .. +11650..11659 ; valid # 7.0 MODI DIGIT ZERO..MODI DIGIT NINE +1165A..1165F ; disallowed # NA .. +11660..1166C ; valid ; ; NV8 # 9.0 MONGOLIAN BIRGA WITH ORNAMENT..MONGOLIAN TURNED SWIRL BIRGA WITH DOUBLE ORNAMENT +1166D..1167F ; disallowed # NA .. +11680..116B7 ; valid # 6.1 TAKRI LETTER A..TAKRI SIGN NUKTA +116B8..116BF ; disallowed # NA .. +116C0..116C9 ; valid # 6.1 TAKRI DIGIT ZERO..TAKRI DIGIT NINE +116CA..116FF ; disallowed # NA .. +11700..11719 ; valid # 8.0 AHOM LETTER KA..AHOM LETTER JHA +1171A..1171C ; disallowed # NA .. +1171D..1172B ; valid # 8.0 AHOM CONSONANT SIGN MEDIAL LA..AHOM SIGN KILLER +1172C..1172F ; disallowed # NA .. +11730..11739 ; valid # 8.0 AHOM DIGIT ZERO..AHOM DIGIT NINE +1173A..1173F ; valid ; ; NV8 # 8.0 AHOM NUMBER TEN..AHOM SYMBOL VI +11740..1189F ; disallowed # NA .. +118A0 ; mapped ; 118C0 # 7.0 WARANG CITI CAPITAL LETTER NGAA +118A1 ; mapped ; 118C1 # 7.0 WARANG CITI CAPITAL LETTER A +118A2 ; mapped ; 118C2 # 7.0 WARANG CITI CAPITAL LETTER WI +118A3 ; mapped ; 118C3 # 7.0 WARANG CITI CAPITAL LETTER YU +118A4 ; mapped ; 118C4 # 7.0 WARANG CITI CAPITAL LETTER YA +118A5 ; mapped ; 118C5 # 7.0 WARANG CITI CAPITAL LETTER YO +118A6 ; mapped ; 118C6 # 7.0 WARANG CITI CAPITAL LETTER II +118A7 ; mapped ; 118C7 # 7.0 WARANG CITI CAPITAL LETTER UU +118A8 ; mapped ; 118C8 # 7.0 WARANG CITI CAPITAL LETTER E +118A9 ; mapped ; 118C9 # 7.0 WARANG CITI CAPITAL LETTER O +118AA ; mapped ; 118CA # 7.0 WARANG CITI CAPITAL LETTER ANG +118AB ; mapped ; 118CB # 7.0 WARANG CITI CAPITAL LETTER GA +118AC ; mapped ; 118CC # 7.0 WARANG CITI CAPITAL LETTER KO +118AD ; mapped ; 118CD # 7.0 WARANG CITI CAPITAL LETTER ENY +118AE ; mapped ; 118CE # 7.0 WARANG CITI CAPITAL LETTER YUJ +118AF ; mapped ; 118CF # 7.0 WARANG CITI CAPITAL LETTER UC +118B0 ; mapped ; 118D0 # 7.0 WARANG CITI CAPITAL LETTER ENN +118B1 ; mapped ; 118D1 # 7.0 WARANG CITI CAPITAL LETTER ODD +118B2 ; mapped ; 118D2 # 7.0 WARANG CITI CAPITAL LETTER TTE +118B3 ; mapped ; 118D3 # 7.0 WARANG CITI CAPITAL LETTER NUNG +118B4 ; mapped ; 118D4 # 7.0 WARANG CITI CAPITAL LETTER DA +118B5 ; mapped ; 118D5 # 7.0 WARANG CITI CAPITAL LETTER AT +118B6 ; mapped ; 118D6 # 7.0 WARANG CITI CAPITAL LETTER AM +118B7 ; mapped ; 118D7 # 7.0 WARANG CITI CAPITAL LETTER BU +118B8 ; mapped ; 118D8 # 7.0 WARANG CITI CAPITAL LETTER PU +118B9 ; mapped ; 118D9 # 7.0 WARANG CITI CAPITAL LETTER HIYO +118BA ; mapped ; 118DA # 7.0 WARANG CITI CAPITAL LETTER HOLO +118BB ; mapped ; 118DB # 7.0 WARANG CITI CAPITAL LETTER HORR +118BC ; mapped ; 118DC # 7.0 WARANG CITI CAPITAL LETTER HAR +118BD ; mapped ; 118DD # 7.0 WARANG CITI CAPITAL LETTER SSUU +118BE ; mapped ; 118DE # 7.0 WARANG CITI CAPITAL LETTER SII +118BF ; mapped ; 118DF # 7.0 WARANG CITI CAPITAL LETTER VIYO +118C0..118E9 ; valid # 7.0 WARANG CITI SMALL LETTER NGAA..WARANG CITI DIGIT NINE +118EA..118F2 ; valid ; ; NV8 # 7.0 WARANG CITI NUMBER TEN..WARANG CITI NUMBER NINETY +118F3..118FE ; disallowed # NA .. +118FF ; valid # 7.0 WARANG CITI OM +11900..119FF ; disallowed # NA .. +11A00..11A3E ; valid # 10.0 ZANABAZAR SQUARE LETTER A..ZANABAZAR SQUARE CLUSTER-FINAL LETTER VA +11A3F..11A46 ; valid ; ; NV8 # 10.0 ZANABAZAR SQUARE INITIAL HEAD MARK..ZANABAZAR SQUARE CLOSING DOUBLE-LINED HEAD MARK +11A47 ; valid # 10.0 ZANABAZAR SQUARE SUBJOINER +11A48..11A4F ; disallowed # NA .. +11A50..11A83 ; valid # 10.0 SOYOMBO LETTER A..SOYOMBO LETTER KSSA +11A84..11A85 ; disallowed # NA .. +11A86..11A99 ; valid # 10.0 SOYOMBO CLUSTER-INITIAL LETTER RA..SOYOMBO SUBJOINER +11A9A..11A9C ; valid ; ; NV8 # 10.0 SOYOMBO MARK TSHEG..SOYOMBO MARK DOUBLE SHAD +11A9D ; disallowed # NA +11A9E..11AA2 ; valid ; ; NV8 # 10.0 SOYOMBO HEAD MARK WITH MOON AND SUN AND TRIPLE FLAME..SOYOMBO TERMINAL MARK-2 +11AA3..11ABF ; disallowed # NA .. +11AC0..11AF8 ; valid # 7.0 PAU CIN HAU LETTER PA..PAU CIN HAU GLOTTAL STOP FINAL +11AF9..11BFF ; disallowed # NA .. +11C00..11C08 ; valid # 9.0 BHAIKSUKI LETTER A..BHAIKSUKI LETTER VOCALIC L +11C09 ; disallowed # NA +11C0A..11C36 ; valid # 9.0 BHAIKSUKI LETTER E..BHAIKSUKI VOWEL SIGN VOCALIC L +11C37 ; disallowed # NA +11C38..11C40 ; valid # 9.0 BHAIKSUKI VOWEL SIGN E..BHAIKSUKI SIGN AVAGRAHA +11C41..11C45 ; valid ; ; NV8 # 9.0 BHAIKSUKI DANDA..BHAIKSUKI GAP FILLER-2 +11C46..11C4F ; disallowed # NA .. +11C50..11C59 ; valid # 9.0 BHAIKSUKI DIGIT ZERO..BHAIKSUKI DIGIT NINE +11C5A..11C6C ; valid ; ; NV8 # 9.0 BHAIKSUKI NUMBER ONE..BHAIKSUKI HUNDREDS UNIT MARK +11C6D..11C6F ; disallowed # NA .. +11C70..11C71 ; valid ; ; NV8 # 9.0 MARCHEN HEAD MARK..MARCHEN MARK SHAD +11C72..11C8F ; valid # 9.0 MARCHEN LETTER KA..MARCHEN LETTER A +11C90..11C91 ; disallowed # NA .. +11C92..11CA7 ; valid # 9.0 MARCHEN SUBJOINED LETTER KA..MARCHEN SUBJOINED LETTER ZA +11CA8 ; disallowed # NA +11CA9..11CB6 ; valid # 9.0 MARCHEN SUBJOINED LETTER YA..MARCHEN SIGN CANDRABINDU +11CB7..11CFF ; disallowed # NA .. +11D00..11D06 ; valid # 10.0 MASARAM GONDI LETTER A..MASARAM GONDI LETTER E +11D07 ; disallowed # NA +11D08..11D09 ; valid # 10.0 MASARAM GONDI LETTER AI..MASARAM GONDI LETTER O +11D0A ; disallowed # NA +11D0B..11D36 ; valid # 10.0 MASARAM GONDI LETTER AU..MASARAM GONDI VOWEL SIGN VOCALIC R +11D37..11D39 ; disallowed # NA .. +11D3A ; valid # 10.0 MASARAM GONDI VOWEL SIGN E +11D3B ; disallowed # NA +11D3C..11D3D ; valid # 10.0 MASARAM GONDI VOWEL SIGN AI..MASARAM GONDI VOWEL SIGN O +11D3E ; disallowed # NA +11D3F..11D47 ; valid # 10.0 MASARAM GONDI VOWEL SIGN AU..MASARAM GONDI RA-KARA +11D48..11D4F ; disallowed # NA .. +11D50..11D59 ; valid # 10.0 MASARAM GONDI DIGIT ZERO..MASARAM GONDI DIGIT NINE +11D5A..11FFF ; disallowed # NA .. +12000..1236E ; valid # 5.0 CUNEIFORM SIGN A..CUNEIFORM SIGN ZUM +1236F..12398 ; valid # 7.0 CUNEIFORM SIGN KAP ELAMITE..CUNEIFORM SIGN UM TIMES ME +12399 ; valid # 8.0 CUNEIFORM SIGN U U +1239A..123FF ; disallowed # NA .. +12400..12462 ; valid ; ; NV8 # 5.0 CUNEIFORM NUMERIC SIGN TWO ASH..CUNEIFORM NUMERIC SIGN OLD ASSYRIAN ONE QUARTER +12463..1246E ; valid ; ; NV8 # 7.0 CUNEIFORM NUMERIC SIGN ONE QUARTER GUR..CUNEIFORM NUMERIC SIGN NINE U VARIANT FORM +1246F ; disallowed # NA +12470..12473 ; valid ; ; NV8 # 5.0 CUNEIFORM PUNCTUATION SIGN OLD ASSYRIAN WORD DIVIDER..CUNEIFORM PUNCTUATION SIGN DIAGONAL TRICOLON +12474 ; valid ; ; NV8 # 7.0 CUNEIFORM PUNCTUATION SIGN DIAGONAL QUADCOLON +12475..1247F ; disallowed # NA .. +12480..12543 ; valid # 8.0 CUNEIFORM SIGN AB TIMES NUN TENU..CUNEIFORM SIGN ZU5 TIMES THREE DISH TENU +12544..12FFF ; disallowed # NA .. +13000..1342E ; valid # 5.2 EGYPTIAN HIEROGLYPH A001..EGYPTIAN HIEROGLYPH AA032 +1342F..143FF ; disallowed # NA .. +14400..14646 ; valid # 8.0 ANATOLIAN HIEROGLYPH A001..ANATOLIAN HIEROGLYPH A530 +14647..167FF ; disallowed # NA .. +16800..16A38 ; valid # 6.0 BAMUM LETTER PHASE-A NGKUE MFON..BAMUM LETTER PHASE-F VUEQ +16A39..16A3F ; disallowed # NA .. +16A40..16A5E ; valid # 7.0 MRO LETTER TA..MRO LETTER TEK +16A5F ; disallowed # NA +16A60..16A69 ; valid # 7.0 MRO DIGIT ZERO..MRO DIGIT NINE +16A6A..16A6D ; disallowed # NA .. +16A6E..16A6F ; valid ; ; NV8 # 7.0 MRO DANDA..MRO DOUBLE DANDA +16A70..16ACF ; disallowed # NA .. +16AD0..16AED ; valid # 7.0 BASSA VAH LETTER ENNI..BASSA VAH LETTER I +16AEE..16AEF ; disallowed # NA .. +16AF0..16AF4 ; valid # 7.0 BASSA VAH COMBINING HIGH TONE..BASSA VAH COMBINING HIGH-LOW TONE +16AF5 ; valid ; ; NV8 # 7.0 BASSA VAH FULL STOP +16AF6..16AFF ; disallowed # NA .. +16B00..16B36 ; valid # 7.0 PAHAWH HMONG VOWEL KEEB..PAHAWH HMONG MARK CIM TAUM +16B37..16B3F ; valid ; ; NV8 # 7.0 PAHAWH HMONG SIGN VOS THOM..PAHAWH HMONG SIGN XYEEM FAIB +16B40..16B43 ; valid # 7.0 PAHAWH HMONG SIGN VOS SEEV..PAHAWH HMONG SIGN IB YAM +16B44..16B45 ; valid ; ; NV8 # 7.0 PAHAWH HMONG SIGN XAUS..PAHAWH HMONG SIGN CIM TSOV ROG +16B46..16B4F ; disallowed # NA .. +16B50..16B59 ; valid # 7.0 PAHAWH HMONG DIGIT ZERO..PAHAWH HMONG DIGIT NINE +16B5A ; disallowed # NA +16B5B..16B61 ; valid ; ; NV8 # 7.0 PAHAWH HMONG NUMBER TENS..PAHAWH HMONG NUMBER TRILLIONS +16B62 ; disallowed # NA +16B63..16B77 ; valid # 7.0 PAHAWH HMONG SIGN VOS LUB..PAHAWH HMONG SIGN CIM NRES TOS +16B78..16B7C ; disallowed # NA .. +16B7D..16B8F ; valid # 7.0 PAHAWH HMONG CLAN SIGN TSHEEJ..PAHAWH HMONG CLAN SIGN VWJ +16B90..16EFF ; disallowed # NA .. +16F00..16F44 ; valid # 6.1 MIAO LETTER PA..MIAO LETTER HHA +16F45..16F4F ; disallowed # NA .. +16F50..16F7E ; valid # 6.1 MIAO LETTER NASALIZATION..MIAO VOWEL SIGN NG +16F7F..16F8E ; disallowed # NA .. +16F8F..16F9F ; valid # 6.1 MIAO TONE RIGHT..MIAO LETTER REFORMED TONE-8 +16FA0..16FDF ; disallowed # NA .. +16FE0 ; valid # 9.0 TANGUT ITERATION MARK +16FE1 ; valid # 10.0 NUSHU ITERATION MARK +16FE2..16FFF ; disallowed # NA .. +17000..187EC ; valid # 9.0 TANGUT IDEOGRAPH-17000..TANGUT IDEOGRAPH-187EC +187ED..187FF ; disallowed # NA .. +18800..18AF2 ; valid # 9.0 TANGUT COMPONENT-001..TANGUT COMPONENT-755 +18AF3..1AFFF ; disallowed # NA .. +1B000..1B001 ; valid # 6.0 KATAKANA LETTER ARCHAIC E..HIRAGANA LETTER ARCHAIC YE +1B002..1B11E ; valid # 10.0 HENTAIGANA LETTER A-1..HENTAIGANA LETTER N-MU-MO-2 +1B11F..1B16F ; disallowed # NA .. +1B170..1B2FB ; valid # 10.0 NUSHU CHARACTER-1B170..NUSHU CHARACTER-1B2FB +1B2FC..1BBFF ; disallowed # NA .. +1BC00..1BC6A ; valid # 7.0 DUPLOYAN LETTER H..DUPLOYAN LETTER VOCALIC M +1BC6B..1BC6F ; disallowed # NA .. +1BC70..1BC7C ; valid # 7.0 DUPLOYAN AFFIX LEFT HORIZONTAL SECANT..DUPLOYAN AFFIX ATTACHED TANGENT HOOK +1BC7D..1BC7F ; disallowed # NA .. +1BC80..1BC88 ; valid # 7.0 DUPLOYAN AFFIX HIGH ACUTE..DUPLOYAN AFFIX HIGH VERTICAL +1BC89..1BC8F ; disallowed # NA .. +1BC90..1BC99 ; valid # 7.0 DUPLOYAN AFFIX LOW ACUTE..DUPLOYAN AFFIX LOW ARROW +1BC9A..1BC9B ; disallowed # NA .. +1BC9C ; valid ; ; NV8 # 7.0 DUPLOYAN SIGN O WITH CROSS +1BC9D..1BC9E ; valid # 7.0 DUPLOYAN THICK LETTER SELECTOR..DUPLOYAN DOUBLE MARK +1BC9F ; valid ; ; NV8 # 7.0 DUPLOYAN PUNCTUATION CHINOOK FULL STOP +1BCA0..1BCA3 ; ignored # 7.0 SHORTHAND FORMAT LETTER OVERLAP..SHORTHAND FORMAT UP STEP +1BCA4..1CFFF ; disallowed # NA .. +1D000..1D0F5 ; valid ; ; NV8 # 3.1 BYZANTINE MUSICAL SYMBOL PSILI..BYZANTINE MUSICAL SYMBOL GORGON NEO KATO +1D0F6..1D0FF ; disallowed # NA .. +1D100..1D126 ; valid ; ; NV8 # 3.1 MUSICAL SYMBOL SINGLE BARLINE..MUSICAL SYMBOL DRUM CLEF-2 +1D127..1D128 ; disallowed # NA .. +1D129 ; valid ; ; NV8 # 5.1 MUSICAL SYMBOL MULTIPLE MEASURE REST +1D12A..1D15D ; valid ; ; NV8 # 3.1 MUSICAL SYMBOL DOUBLE SHARP..MUSICAL SYMBOL WHOLE NOTE +1D15E ; mapped ; 1D157 1D165 # 3.1 MUSICAL SYMBOL HALF NOTE +1D15F ; mapped ; 1D158 1D165 # 3.1 MUSICAL SYMBOL QUARTER NOTE +1D160 ; mapped ; 1D158 1D165 1D16E #3.1 MUSICAL SYMBOL EIGHTH NOTE +1D161 ; mapped ; 1D158 1D165 1D16F #3.1 MUSICAL SYMBOL SIXTEENTH NOTE +1D162 ; mapped ; 1D158 1D165 1D170 #3.1 MUSICAL SYMBOL THIRTY-SECOND NOTE +1D163 ; mapped ; 1D158 1D165 1D171 #3.1 MUSICAL SYMBOL SIXTY-FOURTH NOTE +1D164 ; mapped ; 1D158 1D165 1D172 #3.1 MUSICAL SYMBOL ONE HUNDRED TWENTY-EIGHTH NOTE +1D165..1D172 ; valid ; ; NV8 # 3.1 MUSICAL SYMBOL COMBINING STEM..MUSICAL SYMBOL COMBINING FLAG-5 +1D173..1D17A ; disallowed # 3.1 MUSICAL SYMBOL BEGIN BEAM..MUSICAL SYMBOL END PHRASE +1D17B..1D1BA ; valid ; ; NV8 # 3.1 MUSICAL SYMBOL COMBINING ACCENT..MUSICAL SYMBOL SEMIBREVIS BLACK +1D1BB ; mapped ; 1D1B9 1D165 # 3.1 MUSICAL SYMBOL MINIMA +1D1BC ; mapped ; 1D1BA 1D165 # 3.1 MUSICAL SYMBOL MINIMA BLACK +1D1BD ; mapped ; 1D1B9 1D165 1D16E #3.1 MUSICAL SYMBOL SEMIMINIMA WHITE +1D1BE ; mapped ; 1D1BA 1D165 1D16E #3.1 MUSICAL SYMBOL SEMIMINIMA BLACK +1D1BF ; mapped ; 1D1B9 1D165 1D16F #3.1 MUSICAL SYMBOL FUSA WHITE +1D1C0 ; mapped ; 1D1BA 1D165 1D16F #3.1 MUSICAL SYMBOL FUSA BLACK +1D1C1..1D1DD ; valid ; ; NV8 # 3.1 MUSICAL SYMBOL LONGA PERFECTA REST..MUSICAL SYMBOL PES SUBPUNCTIS +1D1DE..1D1E8 ; valid ; ; NV8 # 8.0 MUSICAL SYMBOL KIEVAN C CLEF..MUSICAL SYMBOL KIEVAN FLAT SIGN +1D1E9..1D1FF ; disallowed # NA .. +1D200..1D245 ; valid ; ; NV8 # 4.1 GREEK VOCAL NOTATION SYMBOL-1..GREEK MUSICAL LEIMMA +1D246..1D2FF ; disallowed # NA .. +1D300..1D356 ; valid ; ; NV8 # 4.0 MONOGRAM FOR EARTH..TETRAGRAM FOR FOSTERING +1D357..1D35F ; disallowed # NA .. +1D360..1D371 ; valid ; ; NV8 # 5.0 COUNTING ROD UNIT DIGIT ONE..COUNTING ROD TENS DIGIT NINE +1D372..1D3FF ; disallowed # NA .. +1D400 ; mapped ; 0061 # 3.1 MATHEMATICAL BOLD CAPITAL A +1D401 ; mapped ; 0062 # 3.1 MATHEMATICAL BOLD CAPITAL B +1D402 ; mapped ; 0063 # 3.1 MATHEMATICAL BOLD CAPITAL C +1D403 ; mapped ; 0064 # 3.1 MATHEMATICAL BOLD CAPITAL D +1D404 ; mapped ; 0065 # 3.1 MATHEMATICAL BOLD CAPITAL E +1D405 ; mapped ; 0066 # 3.1 MATHEMATICAL BOLD CAPITAL F +1D406 ; mapped ; 0067 # 3.1 MATHEMATICAL BOLD CAPITAL G +1D407 ; mapped ; 0068 # 3.1 MATHEMATICAL BOLD CAPITAL H +1D408 ; mapped ; 0069 # 3.1 MATHEMATICAL BOLD CAPITAL I +1D409 ; mapped ; 006A # 3.1 MATHEMATICAL BOLD CAPITAL J +1D40A ; mapped ; 006B # 3.1 MATHEMATICAL BOLD CAPITAL K +1D40B ; mapped ; 006C # 3.1 MATHEMATICAL BOLD CAPITAL L +1D40C ; mapped ; 006D # 3.1 MATHEMATICAL BOLD CAPITAL M +1D40D ; mapped ; 006E # 3.1 MATHEMATICAL BOLD CAPITAL N +1D40E ; mapped ; 006F # 3.1 MATHEMATICAL BOLD CAPITAL O +1D40F ; mapped ; 0070 # 3.1 MATHEMATICAL BOLD CAPITAL P +1D410 ; mapped ; 0071 # 3.1 MATHEMATICAL BOLD CAPITAL Q +1D411 ; mapped ; 0072 # 3.1 MATHEMATICAL BOLD CAPITAL R +1D412 ; mapped ; 0073 # 3.1 MATHEMATICAL BOLD CAPITAL S +1D413 ; mapped ; 0074 # 3.1 MATHEMATICAL BOLD CAPITAL T +1D414 ; mapped ; 0075 # 3.1 MATHEMATICAL BOLD CAPITAL U +1D415 ; mapped ; 0076 # 3.1 MATHEMATICAL BOLD CAPITAL V +1D416 ; mapped ; 0077 # 3.1 MATHEMATICAL BOLD CAPITAL W +1D417 ; mapped ; 0078 # 3.1 MATHEMATICAL BOLD CAPITAL X +1D418 ; mapped ; 0079 # 3.1 MATHEMATICAL BOLD CAPITAL Y +1D419 ; mapped ; 007A # 3.1 MATHEMATICAL BOLD CAPITAL Z +1D41A ; mapped ; 0061 # 3.1 MATHEMATICAL BOLD SMALL A +1D41B ; mapped ; 0062 # 3.1 MATHEMATICAL BOLD SMALL B +1D41C ; mapped ; 0063 # 3.1 MATHEMATICAL BOLD SMALL C +1D41D ; mapped ; 0064 # 3.1 MATHEMATICAL BOLD SMALL D +1D41E ; mapped ; 0065 # 3.1 MATHEMATICAL BOLD SMALL E +1D41F ; mapped ; 0066 # 3.1 MATHEMATICAL BOLD SMALL F +1D420 ; mapped ; 0067 # 3.1 MATHEMATICAL BOLD SMALL G +1D421 ; mapped ; 0068 # 3.1 MATHEMATICAL BOLD SMALL H +1D422 ; mapped ; 0069 # 3.1 MATHEMATICAL BOLD SMALL I +1D423 ; mapped ; 006A # 3.1 MATHEMATICAL BOLD SMALL J +1D424 ; mapped ; 006B # 3.1 MATHEMATICAL BOLD SMALL K +1D425 ; mapped ; 006C # 3.1 MATHEMATICAL BOLD SMALL L +1D426 ; mapped ; 006D # 3.1 MATHEMATICAL BOLD SMALL M +1D427 ; mapped ; 006E # 3.1 MATHEMATICAL BOLD SMALL N +1D428 ; mapped ; 006F # 3.1 MATHEMATICAL BOLD SMALL O +1D429 ; mapped ; 0070 # 3.1 MATHEMATICAL BOLD SMALL P +1D42A ; mapped ; 0071 # 3.1 MATHEMATICAL BOLD SMALL Q +1D42B ; mapped ; 0072 # 3.1 MATHEMATICAL BOLD SMALL R +1D42C ; mapped ; 0073 # 3.1 MATHEMATICAL BOLD SMALL S +1D42D ; mapped ; 0074 # 3.1 MATHEMATICAL BOLD SMALL T +1D42E ; mapped ; 0075 # 3.1 MATHEMATICAL BOLD SMALL U +1D42F ; mapped ; 0076 # 3.1 MATHEMATICAL BOLD SMALL V +1D430 ; mapped ; 0077 # 3.1 MATHEMATICAL BOLD SMALL W +1D431 ; mapped ; 0078 # 3.1 MATHEMATICAL BOLD SMALL X +1D432 ; mapped ; 0079 # 3.1 MATHEMATICAL BOLD SMALL Y +1D433 ; mapped ; 007A # 3.1 MATHEMATICAL BOLD SMALL Z +1D434 ; mapped ; 0061 # 3.1 MATHEMATICAL ITALIC CAPITAL A +1D435 ; mapped ; 0062 # 3.1 MATHEMATICAL ITALIC CAPITAL B +1D436 ; mapped ; 0063 # 3.1 MATHEMATICAL ITALIC CAPITAL C +1D437 ; mapped ; 0064 # 3.1 MATHEMATICAL ITALIC CAPITAL D +1D438 ; mapped ; 0065 # 3.1 MATHEMATICAL ITALIC CAPITAL E +1D439 ; mapped ; 0066 # 3.1 MATHEMATICAL ITALIC CAPITAL F +1D43A ; mapped ; 0067 # 3.1 MATHEMATICAL ITALIC CAPITAL G +1D43B ; mapped ; 0068 # 3.1 MATHEMATICAL ITALIC CAPITAL H +1D43C ; mapped ; 0069 # 3.1 MATHEMATICAL ITALIC CAPITAL I +1D43D ; mapped ; 006A # 3.1 MATHEMATICAL ITALIC CAPITAL J +1D43E ; mapped ; 006B # 3.1 MATHEMATICAL ITALIC CAPITAL K +1D43F ; mapped ; 006C # 3.1 MATHEMATICAL ITALIC CAPITAL L +1D440 ; mapped ; 006D # 3.1 MATHEMATICAL ITALIC CAPITAL M +1D441 ; mapped ; 006E # 3.1 MATHEMATICAL ITALIC CAPITAL N +1D442 ; mapped ; 006F # 3.1 MATHEMATICAL ITALIC CAPITAL O +1D443 ; mapped ; 0070 # 3.1 MATHEMATICAL ITALIC CAPITAL P +1D444 ; mapped ; 0071 # 3.1 MATHEMATICAL ITALIC CAPITAL Q +1D445 ; mapped ; 0072 # 3.1 MATHEMATICAL ITALIC CAPITAL R +1D446 ; mapped ; 0073 # 3.1 MATHEMATICAL ITALIC CAPITAL S +1D447 ; mapped ; 0074 # 3.1 MATHEMATICAL ITALIC CAPITAL T +1D448 ; mapped ; 0075 # 3.1 MATHEMATICAL ITALIC CAPITAL U +1D449 ; mapped ; 0076 # 3.1 MATHEMATICAL ITALIC CAPITAL V +1D44A ; mapped ; 0077 # 3.1 MATHEMATICAL ITALIC CAPITAL W +1D44B ; mapped ; 0078 # 3.1 MATHEMATICAL ITALIC CAPITAL X +1D44C ; mapped ; 0079 # 3.1 MATHEMATICAL ITALIC CAPITAL Y +1D44D ; mapped ; 007A # 3.1 MATHEMATICAL ITALIC CAPITAL Z +1D44E ; mapped ; 0061 # 3.1 MATHEMATICAL ITALIC SMALL A +1D44F ; mapped ; 0062 # 3.1 MATHEMATICAL ITALIC SMALL B +1D450 ; mapped ; 0063 # 3.1 MATHEMATICAL ITALIC SMALL C +1D451 ; mapped ; 0064 # 3.1 MATHEMATICAL ITALIC SMALL D +1D452 ; mapped ; 0065 # 3.1 MATHEMATICAL ITALIC SMALL E +1D453 ; mapped ; 0066 # 3.1 MATHEMATICAL ITALIC SMALL F +1D454 ; mapped ; 0067 # 3.1 MATHEMATICAL ITALIC SMALL G +1D455 ; disallowed # NA +1D456 ; mapped ; 0069 # 3.1 MATHEMATICAL ITALIC SMALL I +1D457 ; mapped ; 006A # 3.1 MATHEMATICAL ITALIC SMALL J +1D458 ; mapped ; 006B # 3.1 MATHEMATICAL ITALIC SMALL K +1D459 ; mapped ; 006C # 3.1 MATHEMATICAL ITALIC SMALL L +1D45A ; mapped ; 006D # 3.1 MATHEMATICAL ITALIC SMALL M +1D45B ; mapped ; 006E # 3.1 MATHEMATICAL ITALIC SMALL N +1D45C ; mapped ; 006F # 3.1 MATHEMATICAL ITALIC SMALL O +1D45D ; mapped ; 0070 # 3.1 MATHEMATICAL ITALIC SMALL P +1D45E ; mapped ; 0071 # 3.1 MATHEMATICAL ITALIC SMALL Q +1D45F ; mapped ; 0072 # 3.1 MATHEMATICAL ITALIC SMALL R +1D460 ; mapped ; 0073 # 3.1 MATHEMATICAL ITALIC SMALL S +1D461 ; mapped ; 0074 # 3.1 MATHEMATICAL ITALIC SMALL T +1D462 ; mapped ; 0075 # 3.1 MATHEMATICAL ITALIC SMALL U +1D463 ; mapped ; 0076 # 3.1 MATHEMATICAL ITALIC SMALL V +1D464 ; mapped ; 0077 # 3.1 MATHEMATICAL ITALIC SMALL W +1D465 ; mapped ; 0078 # 3.1 MATHEMATICAL ITALIC SMALL X +1D466 ; mapped ; 0079 # 3.1 MATHEMATICAL ITALIC SMALL Y +1D467 ; mapped ; 007A # 3.1 MATHEMATICAL ITALIC SMALL Z +1D468 ; mapped ; 0061 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL A +1D469 ; mapped ; 0062 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL B +1D46A ; mapped ; 0063 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL C +1D46B ; mapped ; 0064 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL D +1D46C ; mapped ; 0065 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL E +1D46D ; mapped ; 0066 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL F +1D46E ; mapped ; 0067 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL G +1D46F ; mapped ; 0068 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL H +1D470 ; mapped ; 0069 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL I +1D471 ; mapped ; 006A # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL J +1D472 ; mapped ; 006B # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL K +1D473 ; mapped ; 006C # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL L +1D474 ; mapped ; 006D # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL M +1D475 ; mapped ; 006E # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL N +1D476 ; mapped ; 006F # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL O +1D477 ; mapped ; 0070 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL P +1D478 ; mapped ; 0071 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL Q +1D479 ; mapped ; 0072 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL R +1D47A ; mapped ; 0073 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL S +1D47B ; mapped ; 0074 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL T +1D47C ; mapped ; 0075 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL U +1D47D ; mapped ; 0076 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL V +1D47E ; mapped ; 0077 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL W +1D47F ; mapped ; 0078 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL X +1D480 ; mapped ; 0079 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL Y +1D481 ; mapped ; 007A # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL Z +1D482 ; mapped ; 0061 # 3.1 MATHEMATICAL BOLD ITALIC SMALL A +1D483 ; mapped ; 0062 # 3.1 MATHEMATICAL BOLD ITALIC SMALL B +1D484 ; mapped ; 0063 # 3.1 MATHEMATICAL BOLD ITALIC SMALL C +1D485 ; mapped ; 0064 # 3.1 MATHEMATICAL BOLD ITALIC SMALL D +1D486 ; mapped ; 0065 # 3.1 MATHEMATICAL BOLD ITALIC SMALL E +1D487 ; mapped ; 0066 # 3.1 MATHEMATICAL BOLD ITALIC SMALL F +1D488 ; mapped ; 0067 # 3.1 MATHEMATICAL BOLD ITALIC SMALL G +1D489 ; mapped ; 0068 # 3.1 MATHEMATICAL BOLD ITALIC SMALL H +1D48A ; mapped ; 0069 # 3.1 MATHEMATICAL BOLD ITALIC SMALL I +1D48B ; mapped ; 006A # 3.1 MATHEMATICAL BOLD ITALIC SMALL J +1D48C ; mapped ; 006B # 3.1 MATHEMATICAL BOLD ITALIC SMALL K +1D48D ; mapped ; 006C # 3.1 MATHEMATICAL BOLD ITALIC SMALL L +1D48E ; mapped ; 006D # 3.1 MATHEMATICAL BOLD ITALIC SMALL M +1D48F ; mapped ; 006E # 3.1 MATHEMATICAL BOLD ITALIC SMALL N +1D490 ; mapped ; 006F # 3.1 MATHEMATICAL BOLD ITALIC SMALL O +1D491 ; mapped ; 0070 # 3.1 MATHEMATICAL BOLD ITALIC SMALL P +1D492 ; mapped ; 0071 # 3.1 MATHEMATICAL BOLD ITALIC SMALL Q +1D493 ; mapped ; 0072 # 3.1 MATHEMATICAL BOLD ITALIC SMALL R +1D494 ; mapped ; 0073 # 3.1 MATHEMATICAL BOLD ITALIC SMALL S +1D495 ; mapped ; 0074 # 3.1 MATHEMATICAL BOLD ITALIC SMALL T +1D496 ; mapped ; 0075 # 3.1 MATHEMATICAL BOLD ITALIC SMALL U +1D497 ; mapped ; 0076 # 3.1 MATHEMATICAL BOLD ITALIC SMALL V +1D498 ; mapped ; 0077 # 3.1 MATHEMATICAL BOLD ITALIC SMALL W +1D499 ; mapped ; 0078 # 3.1 MATHEMATICAL BOLD ITALIC SMALL X +1D49A ; mapped ; 0079 # 3.1 MATHEMATICAL BOLD ITALIC SMALL Y +1D49B ; mapped ; 007A # 3.1 MATHEMATICAL BOLD ITALIC SMALL Z +1D49C ; mapped ; 0061 # 3.1 MATHEMATICAL SCRIPT CAPITAL A +1D49D ; disallowed # NA +1D49E ; mapped ; 0063 # 3.1 MATHEMATICAL SCRIPT CAPITAL C +1D49F ; mapped ; 0064 # 3.1 MATHEMATICAL SCRIPT CAPITAL D +1D4A0..1D4A1 ; disallowed # NA .. +1D4A2 ; mapped ; 0067 # 3.1 MATHEMATICAL SCRIPT CAPITAL G +1D4A3..1D4A4 ; disallowed # NA .. +1D4A5 ; mapped ; 006A # 3.1 MATHEMATICAL SCRIPT CAPITAL J +1D4A6 ; mapped ; 006B # 3.1 MATHEMATICAL SCRIPT CAPITAL K +1D4A7..1D4A8 ; disallowed # NA .. +1D4A9 ; mapped ; 006E # 3.1 MATHEMATICAL SCRIPT CAPITAL N +1D4AA ; mapped ; 006F # 3.1 MATHEMATICAL SCRIPT CAPITAL O +1D4AB ; mapped ; 0070 # 3.1 MATHEMATICAL SCRIPT CAPITAL P +1D4AC ; mapped ; 0071 # 3.1 MATHEMATICAL SCRIPT CAPITAL Q +1D4AD ; disallowed # NA +1D4AE ; mapped ; 0073 # 3.1 MATHEMATICAL SCRIPT CAPITAL S +1D4AF ; mapped ; 0074 # 3.1 MATHEMATICAL SCRIPT CAPITAL T +1D4B0 ; mapped ; 0075 # 3.1 MATHEMATICAL SCRIPT CAPITAL U +1D4B1 ; mapped ; 0076 # 3.1 MATHEMATICAL SCRIPT CAPITAL V +1D4B2 ; mapped ; 0077 # 3.1 MATHEMATICAL SCRIPT CAPITAL W +1D4B3 ; mapped ; 0078 # 3.1 MATHEMATICAL SCRIPT CAPITAL X +1D4B4 ; mapped ; 0079 # 3.1 MATHEMATICAL SCRIPT CAPITAL Y +1D4B5 ; mapped ; 007A # 3.1 MATHEMATICAL SCRIPT CAPITAL Z +1D4B6 ; mapped ; 0061 # 3.1 MATHEMATICAL SCRIPT SMALL A +1D4B7 ; mapped ; 0062 # 3.1 MATHEMATICAL SCRIPT SMALL B +1D4B8 ; mapped ; 0063 # 3.1 MATHEMATICAL SCRIPT SMALL C +1D4B9 ; mapped ; 0064 # 3.1 MATHEMATICAL SCRIPT SMALL D +1D4BA ; disallowed # NA +1D4BB ; mapped ; 0066 # 3.1 MATHEMATICAL SCRIPT SMALL F +1D4BC ; disallowed # NA +1D4BD ; mapped ; 0068 # 3.1 MATHEMATICAL SCRIPT SMALL H +1D4BE ; mapped ; 0069 # 3.1 MATHEMATICAL SCRIPT SMALL I +1D4BF ; mapped ; 006A # 3.1 MATHEMATICAL SCRIPT SMALL J +1D4C0 ; mapped ; 006B # 3.1 MATHEMATICAL SCRIPT SMALL K +1D4C1 ; mapped ; 006C # 4.0 MATHEMATICAL SCRIPT SMALL L +1D4C2 ; mapped ; 006D # 3.1 MATHEMATICAL SCRIPT SMALL M +1D4C3 ; mapped ; 006E # 3.1 MATHEMATICAL SCRIPT SMALL N +1D4C4 ; disallowed # NA +1D4C5 ; mapped ; 0070 # 3.1 MATHEMATICAL SCRIPT SMALL P +1D4C6 ; mapped ; 0071 # 3.1 MATHEMATICAL SCRIPT SMALL Q +1D4C7 ; mapped ; 0072 # 3.1 MATHEMATICAL SCRIPT SMALL R +1D4C8 ; mapped ; 0073 # 3.1 MATHEMATICAL SCRIPT SMALL S +1D4C9 ; mapped ; 0074 # 3.1 MATHEMATICAL SCRIPT SMALL T +1D4CA ; mapped ; 0075 # 3.1 MATHEMATICAL SCRIPT SMALL U +1D4CB ; mapped ; 0076 # 3.1 MATHEMATICAL SCRIPT SMALL V +1D4CC ; mapped ; 0077 # 3.1 MATHEMATICAL SCRIPT SMALL W +1D4CD ; mapped ; 0078 # 3.1 MATHEMATICAL SCRIPT SMALL X +1D4CE ; mapped ; 0079 # 3.1 MATHEMATICAL SCRIPT SMALL Y +1D4CF ; mapped ; 007A # 3.1 MATHEMATICAL SCRIPT SMALL Z +1D4D0 ; mapped ; 0061 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL A +1D4D1 ; mapped ; 0062 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL B +1D4D2 ; mapped ; 0063 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL C +1D4D3 ; mapped ; 0064 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL D +1D4D4 ; mapped ; 0065 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL E +1D4D5 ; mapped ; 0066 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL F +1D4D6 ; mapped ; 0067 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL G +1D4D7 ; mapped ; 0068 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL H +1D4D8 ; mapped ; 0069 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL I +1D4D9 ; mapped ; 006A # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL J +1D4DA ; mapped ; 006B # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL K +1D4DB ; mapped ; 006C # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL L +1D4DC ; mapped ; 006D # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL M +1D4DD ; mapped ; 006E # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL N +1D4DE ; mapped ; 006F # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL O +1D4DF ; mapped ; 0070 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL P +1D4E0 ; mapped ; 0071 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL Q +1D4E1 ; mapped ; 0072 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL R +1D4E2 ; mapped ; 0073 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL S +1D4E3 ; mapped ; 0074 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL T +1D4E4 ; mapped ; 0075 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL U +1D4E5 ; mapped ; 0076 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL V +1D4E6 ; mapped ; 0077 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL W +1D4E7 ; mapped ; 0078 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL X +1D4E8 ; mapped ; 0079 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL Y +1D4E9 ; mapped ; 007A # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL Z +1D4EA ; mapped ; 0061 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL A +1D4EB ; mapped ; 0062 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL B +1D4EC ; mapped ; 0063 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL C +1D4ED ; mapped ; 0064 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL D +1D4EE ; mapped ; 0065 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL E +1D4EF ; mapped ; 0066 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL F +1D4F0 ; mapped ; 0067 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL G +1D4F1 ; mapped ; 0068 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL H +1D4F2 ; mapped ; 0069 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL I +1D4F3 ; mapped ; 006A # 3.1 MATHEMATICAL BOLD SCRIPT SMALL J +1D4F4 ; mapped ; 006B # 3.1 MATHEMATICAL BOLD SCRIPT SMALL K +1D4F5 ; mapped ; 006C # 3.1 MATHEMATICAL BOLD SCRIPT SMALL L +1D4F6 ; mapped ; 006D # 3.1 MATHEMATICAL BOLD SCRIPT SMALL M +1D4F7 ; mapped ; 006E # 3.1 MATHEMATICAL BOLD SCRIPT SMALL N +1D4F8 ; mapped ; 006F # 3.1 MATHEMATICAL BOLD SCRIPT SMALL O +1D4F9 ; mapped ; 0070 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL P +1D4FA ; mapped ; 0071 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL Q +1D4FB ; mapped ; 0072 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL R +1D4FC ; mapped ; 0073 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL S +1D4FD ; mapped ; 0074 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL T +1D4FE ; mapped ; 0075 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL U +1D4FF ; mapped ; 0076 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL V +1D500 ; mapped ; 0077 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL W +1D501 ; mapped ; 0078 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL X +1D502 ; mapped ; 0079 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL Y +1D503 ; mapped ; 007A # 3.1 MATHEMATICAL BOLD SCRIPT SMALL Z +1D504 ; mapped ; 0061 # 3.1 MATHEMATICAL FRAKTUR CAPITAL A +1D505 ; mapped ; 0062 # 3.1 MATHEMATICAL FRAKTUR CAPITAL B +1D506 ; disallowed # NA +1D507 ; mapped ; 0064 # 3.1 MATHEMATICAL FRAKTUR CAPITAL D +1D508 ; mapped ; 0065 # 3.1 MATHEMATICAL FRAKTUR CAPITAL E +1D509 ; mapped ; 0066 # 3.1 MATHEMATICAL FRAKTUR CAPITAL F +1D50A ; mapped ; 0067 # 3.1 MATHEMATICAL FRAKTUR CAPITAL G +1D50B..1D50C ; disallowed # NA .. +1D50D ; mapped ; 006A # 3.1 MATHEMATICAL FRAKTUR CAPITAL J +1D50E ; mapped ; 006B # 3.1 MATHEMATICAL FRAKTUR CAPITAL K +1D50F ; mapped ; 006C # 3.1 MATHEMATICAL FRAKTUR CAPITAL L +1D510 ; mapped ; 006D # 3.1 MATHEMATICAL FRAKTUR CAPITAL M +1D511 ; mapped ; 006E # 3.1 MATHEMATICAL FRAKTUR CAPITAL N +1D512 ; mapped ; 006F # 3.1 MATHEMATICAL FRAKTUR CAPITAL O +1D513 ; mapped ; 0070 # 3.1 MATHEMATICAL FRAKTUR CAPITAL P +1D514 ; mapped ; 0071 # 3.1 MATHEMATICAL FRAKTUR CAPITAL Q +1D515 ; disallowed # NA +1D516 ; mapped ; 0073 # 3.1 MATHEMATICAL FRAKTUR CAPITAL S +1D517 ; mapped ; 0074 # 3.1 MATHEMATICAL FRAKTUR CAPITAL T +1D518 ; mapped ; 0075 # 3.1 MATHEMATICAL FRAKTUR CAPITAL U +1D519 ; mapped ; 0076 # 3.1 MATHEMATICAL FRAKTUR CAPITAL V +1D51A ; mapped ; 0077 # 3.1 MATHEMATICAL FRAKTUR CAPITAL W +1D51B ; mapped ; 0078 # 3.1 MATHEMATICAL FRAKTUR CAPITAL X +1D51C ; mapped ; 0079 # 3.1 MATHEMATICAL FRAKTUR CAPITAL Y +1D51D ; disallowed # NA +1D51E ; mapped ; 0061 # 3.1 MATHEMATICAL FRAKTUR SMALL A +1D51F ; mapped ; 0062 # 3.1 MATHEMATICAL FRAKTUR SMALL B +1D520 ; mapped ; 0063 # 3.1 MATHEMATICAL FRAKTUR SMALL C +1D521 ; mapped ; 0064 # 3.1 MATHEMATICAL FRAKTUR SMALL D +1D522 ; mapped ; 0065 # 3.1 MATHEMATICAL FRAKTUR SMALL E +1D523 ; mapped ; 0066 # 3.1 MATHEMATICAL FRAKTUR SMALL F +1D524 ; mapped ; 0067 # 3.1 MATHEMATICAL FRAKTUR SMALL G +1D525 ; mapped ; 0068 # 3.1 MATHEMATICAL FRAKTUR SMALL H +1D526 ; mapped ; 0069 # 3.1 MATHEMATICAL FRAKTUR SMALL I +1D527 ; mapped ; 006A # 3.1 MATHEMATICAL FRAKTUR SMALL J +1D528 ; mapped ; 006B # 3.1 MATHEMATICAL FRAKTUR SMALL K +1D529 ; mapped ; 006C # 3.1 MATHEMATICAL FRAKTUR SMALL L +1D52A ; mapped ; 006D # 3.1 MATHEMATICAL FRAKTUR SMALL M +1D52B ; mapped ; 006E # 3.1 MATHEMATICAL FRAKTUR SMALL N +1D52C ; mapped ; 006F # 3.1 MATHEMATICAL FRAKTUR SMALL O +1D52D ; mapped ; 0070 # 3.1 MATHEMATICAL FRAKTUR SMALL P +1D52E ; mapped ; 0071 # 3.1 MATHEMATICAL FRAKTUR SMALL Q +1D52F ; mapped ; 0072 # 3.1 MATHEMATICAL FRAKTUR SMALL R +1D530 ; mapped ; 0073 # 3.1 MATHEMATICAL FRAKTUR SMALL S +1D531 ; mapped ; 0074 # 3.1 MATHEMATICAL FRAKTUR SMALL T +1D532 ; mapped ; 0075 # 3.1 MATHEMATICAL FRAKTUR SMALL U +1D533 ; mapped ; 0076 # 3.1 MATHEMATICAL FRAKTUR SMALL V +1D534 ; mapped ; 0077 # 3.1 MATHEMATICAL FRAKTUR SMALL W +1D535 ; mapped ; 0078 # 3.1 MATHEMATICAL FRAKTUR SMALL X +1D536 ; mapped ; 0079 # 3.1 MATHEMATICAL FRAKTUR SMALL Y +1D537 ; mapped ; 007A # 3.1 MATHEMATICAL FRAKTUR SMALL Z +1D538 ; mapped ; 0061 # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL A +1D539 ; mapped ; 0062 # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL B +1D53A ; disallowed # NA +1D53B ; mapped ; 0064 # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL D +1D53C ; mapped ; 0065 # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL E +1D53D ; mapped ; 0066 # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL F +1D53E ; mapped ; 0067 # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL G +1D53F ; disallowed # NA +1D540 ; mapped ; 0069 # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL I +1D541 ; mapped ; 006A # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL J +1D542 ; mapped ; 006B # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL K +1D543 ; mapped ; 006C # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL L +1D544 ; mapped ; 006D # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL M +1D545 ; disallowed # NA +1D546 ; mapped ; 006F # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL O +1D547..1D549 ; disallowed # NA .. +1D54A ; mapped ; 0073 # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL S +1D54B ; mapped ; 0074 # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL T +1D54C ; mapped ; 0075 # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL U +1D54D ; mapped ; 0076 # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL V +1D54E ; mapped ; 0077 # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL W +1D54F ; mapped ; 0078 # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL X +1D550 ; mapped ; 0079 # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL Y +1D551 ; disallowed # NA +1D552 ; mapped ; 0061 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL A +1D553 ; mapped ; 0062 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL B +1D554 ; mapped ; 0063 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL C +1D555 ; mapped ; 0064 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL D +1D556 ; mapped ; 0065 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL E +1D557 ; mapped ; 0066 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL F +1D558 ; mapped ; 0067 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL G +1D559 ; mapped ; 0068 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL H +1D55A ; mapped ; 0069 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL I +1D55B ; mapped ; 006A # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL J +1D55C ; mapped ; 006B # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL K +1D55D ; mapped ; 006C # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL L +1D55E ; mapped ; 006D # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL M +1D55F ; mapped ; 006E # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL N +1D560 ; mapped ; 006F # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL O +1D561 ; mapped ; 0070 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL P +1D562 ; mapped ; 0071 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL Q +1D563 ; mapped ; 0072 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL R +1D564 ; mapped ; 0073 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL S +1D565 ; mapped ; 0074 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL T +1D566 ; mapped ; 0075 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL U +1D567 ; mapped ; 0076 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL V +1D568 ; mapped ; 0077 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL W +1D569 ; mapped ; 0078 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL X +1D56A ; mapped ; 0079 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL Y +1D56B ; mapped ; 007A # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL Z +1D56C ; mapped ; 0061 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL A +1D56D ; mapped ; 0062 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL B +1D56E ; mapped ; 0063 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL C +1D56F ; mapped ; 0064 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL D +1D570 ; mapped ; 0065 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL E +1D571 ; mapped ; 0066 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL F +1D572 ; mapped ; 0067 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL G +1D573 ; mapped ; 0068 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL H +1D574 ; mapped ; 0069 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL I +1D575 ; mapped ; 006A # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL J +1D576 ; mapped ; 006B # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL K +1D577 ; mapped ; 006C # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL L +1D578 ; mapped ; 006D # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL M +1D579 ; mapped ; 006E # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL N +1D57A ; mapped ; 006F # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL O +1D57B ; mapped ; 0070 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL P +1D57C ; mapped ; 0071 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL Q +1D57D ; mapped ; 0072 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL R +1D57E ; mapped ; 0073 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL S +1D57F ; mapped ; 0074 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL T +1D580 ; mapped ; 0075 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL U +1D581 ; mapped ; 0076 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL V +1D582 ; mapped ; 0077 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL W +1D583 ; mapped ; 0078 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL X +1D584 ; mapped ; 0079 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL Y +1D585 ; mapped ; 007A # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL Z +1D586 ; mapped ; 0061 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL A +1D587 ; mapped ; 0062 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL B +1D588 ; mapped ; 0063 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL C +1D589 ; mapped ; 0064 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL D +1D58A ; mapped ; 0065 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL E +1D58B ; mapped ; 0066 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL F +1D58C ; mapped ; 0067 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL G +1D58D ; mapped ; 0068 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL H +1D58E ; mapped ; 0069 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL I +1D58F ; mapped ; 006A # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL J +1D590 ; mapped ; 006B # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL K +1D591 ; mapped ; 006C # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL L +1D592 ; mapped ; 006D # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL M +1D593 ; mapped ; 006E # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL N +1D594 ; mapped ; 006F # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL O +1D595 ; mapped ; 0070 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL P +1D596 ; mapped ; 0071 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL Q +1D597 ; mapped ; 0072 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL R +1D598 ; mapped ; 0073 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL S +1D599 ; mapped ; 0074 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL T +1D59A ; mapped ; 0075 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL U +1D59B ; mapped ; 0076 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL V +1D59C ; mapped ; 0077 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL W +1D59D ; mapped ; 0078 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL X +1D59E ; mapped ; 0079 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL Y +1D59F ; mapped ; 007A # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL Z +1D5A0 ; mapped ; 0061 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL A +1D5A1 ; mapped ; 0062 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL B +1D5A2 ; mapped ; 0063 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL C +1D5A3 ; mapped ; 0064 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL D +1D5A4 ; mapped ; 0065 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL E +1D5A5 ; mapped ; 0066 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL F +1D5A6 ; mapped ; 0067 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL G +1D5A7 ; mapped ; 0068 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL H +1D5A8 ; mapped ; 0069 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL I +1D5A9 ; mapped ; 006A # 3.1 MATHEMATICAL SANS-SERIF CAPITAL J +1D5AA ; mapped ; 006B # 3.1 MATHEMATICAL SANS-SERIF CAPITAL K +1D5AB ; mapped ; 006C # 3.1 MATHEMATICAL SANS-SERIF CAPITAL L +1D5AC ; mapped ; 006D # 3.1 MATHEMATICAL SANS-SERIF CAPITAL M +1D5AD ; mapped ; 006E # 3.1 MATHEMATICAL SANS-SERIF CAPITAL N +1D5AE ; mapped ; 006F # 3.1 MATHEMATICAL SANS-SERIF CAPITAL O +1D5AF ; mapped ; 0070 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL P +1D5B0 ; mapped ; 0071 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL Q +1D5B1 ; mapped ; 0072 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL R +1D5B2 ; mapped ; 0073 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL S +1D5B3 ; mapped ; 0074 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL T +1D5B4 ; mapped ; 0075 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL U +1D5B5 ; mapped ; 0076 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL V +1D5B6 ; mapped ; 0077 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL W +1D5B7 ; mapped ; 0078 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL X +1D5B8 ; mapped ; 0079 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL Y +1D5B9 ; mapped ; 007A # 3.1 MATHEMATICAL SANS-SERIF CAPITAL Z +1D5BA ; mapped ; 0061 # 3.1 MATHEMATICAL SANS-SERIF SMALL A +1D5BB ; mapped ; 0062 # 3.1 MATHEMATICAL SANS-SERIF SMALL B +1D5BC ; mapped ; 0063 # 3.1 MATHEMATICAL SANS-SERIF SMALL C +1D5BD ; mapped ; 0064 # 3.1 MATHEMATICAL SANS-SERIF SMALL D +1D5BE ; mapped ; 0065 # 3.1 MATHEMATICAL SANS-SERIF SMALL E +1D5BF ; mapped ; 0066 # 3.1 MATHEMATICAL SANS-SERIF SMALL F +1D5C0 ; mapped ; 0067 # 3.1 MATHEMATICAL SANS-SERIF SMALL G +1D5C1 ; mapped ; 0068 # 3.1 MATHEMATICAL SANS-SERIF SMALL H +1D5C2 ; mapped ; 0069 # 3.1 MATHEMATICAL SANS-SERIF SMALL I +1D5C3 ; mapped ; 006A # 3.1 MATHEMATICAL SANS-SERIF SMALL J +1D5C4 ; mapped ; 006B # 3.1 MATHEMATICAL SANS-SERIF SMALL K +1D5C5 ; mapped ; 006C # 3.1 MATHEMATICAL SANS-SERIF SMALL L +1D5C6 ; mapped ; 006D # 3.1 MATHEMATICAL SANS-SERIF SMALL M +1D5C7 ; mapped ; 006E # 3.1 MATHEMATICAL SANS-SERIF SMALL N +1D5C8 ; mapped ; 006F # 3.1 MATHEMATICAL SANS-SERIF SMALL O +1D5C9 ; mapped ; 0070 # 3.1 MATHEMATICAL SANS-SERIF SMALL P +1D5CA ; mapped ; 0071 # 3.1 MATHEMATICAL SANS-SERIF SMALL Q +1D5CB ; mapped ; 0072 # 3.1 MATHEMATICAL SANS-SERIF SMALL R +1D5CC ; mapped ; 0073 # 3.1 MATHEMATICAL SANS-SERIF SMALL S +1D5CD ; mapped ; 0074 # 3.1 MATHEMATICAL SANS-SERIF SMALL T +1D5CE ; mapped ; 0075 # 3.1 MATHEMATICAL SANS-SERIF SMALL U +1D5CF ; mapped ; 0076 # 3.1 MATHEMATICAL SANS-SERIF SMALL V +1D5D0 ; mapped ; 0077 # 3.1 MATHEMATICAL SANS-SERIF SMALL W +1D5D1 ; mapped ; 0078 # 3.1 MATHEMATICAL SANS-SERIF SMALL X +1D5D2 ; mapped ; 0079 # 3.1 MATHEMATICAL SANS-SERIF SMALL Y +1D5D3 ; mapped ; 007A # 3.1 MATHEMATICAL SANS-SERIF SMALL Z +1D5D4 ; mapped ; 0061 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL A +1D5D5 ; mapped ; 0062 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL B +1D5D6 ; mapped ; 0063 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL C +1D5D7 ; mapped ; 0064 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL D +1D5D8 ; mapped ; 0065 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL E +1D5D9 ; mapped ; 0066 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL F +1D5DA ; mapped ; 0067 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL G +1D5DB ; mapped ; 0068 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL H +1D5DC ; mapped ; 0069 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL I +1D5DD ; mapped ; 006A # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL J +1D5DE ; mapped ; 006B # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL K +1D5DF ; mapped ; 006C # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL L +1D5E0 ; mapped ; 006D # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL M +1D5E1 ; mapped ; 006E # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL N +1D5E2 ; mapped ; 006F # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL O +1D5E3 ; mapped ; 0070 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL P +1D5E4 ; mapped ; 0071 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL Q +1D5E5 ; mapped ; 0072 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL R +1D5E6 ; mapped ; 0073 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL S +1D5E7 ; mapped ; 0074 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL T +1D5E8 ; mapped ; 0075 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL U +1D5E9 ; mapped ; 0076 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL V +1D5EA ; mapped ; 0077 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL W +1D5EB ; mapped ; 0078 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL X +1D5EC ; mapped ; 0079 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL Y +1D5ED ; mapped ; 007A # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL Z +1D5EE ; mapped ; 0061 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL A +1D5EF ; mapped ; 0062 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL B +1D5F0 ; mapped ; 0063 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL C +1D5F1 ; mapped ; 0064 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL D +1D5F2 ; mapped ; 0065 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL E +1D5F3 ; mapped ; 0066 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL F +1D5F4 ; mapped ; 0067 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL G +1D5F5 ; mapped ; 0068 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL H +1D5F6 ; mapped ; 0069 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL I +1D5F7 ; mapped ; 006A # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL J +1D5F8 ; mapped ; 006B # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL K +1D5F9 ; mapped ; 006C # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL L +1D5FA ; mapped ; 006D # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL M +1D5FB ; mapped ; 006E # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL N +1D5FC ; mapped ; 006F # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL O +1D5FD ; mapped ; 0070 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL P +1D5FE ; mapped ; 0071 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL Q +1D5FF ; mapped ; 0072 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL R +1D600 ; mapped ; 0073 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL S +1D601 ; mapped ; 0074 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL T +1D602 ; mapped ; 0075 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL U +1D603 ; mapped ; 0076 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL V +1D604 ; mapped ; 0077 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL W +1D605 ; mapped ; 0078 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL X +1D606 ; mapped ; 0079 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL Y +1D607 ; mapped ; 007A # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL Z +1D608 ; mapped ; 0061 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL A +1D609 ; mapped ; 0062 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL B +1D60A ; mapped ; 0063 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL C +1D60B ; mapped ; 0064 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL D +1D60C ; mapped ; 0065 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL E +1D60D ; mapped ; 0066 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL F +1D60E ; mapped ; 0067 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL G +1D60F ; mapped ; 0068 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL H +1D610 ; mapped ; 0069 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL I +1D611 ; mapped ; 006A # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL J +1D612 ; mapped ; 006B # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL K +1D613 ; mapped ; 006C # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL L +1D614 ; mapped ; 006D # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL M +1D615 ; mapped ; 006E # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL N +1D616 ; mapped ; 006F # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL O +1D617 ; mapped ; 0070 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL P +1D618 ; mapped ; 0071 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL Q +1D619 ; mapped ; 0072 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL R +1D61A ; mapped ; 0073 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL S +1D61B ; mapped ; 0074 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL T +1D61C ; mapped ; 0075 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL U +1D61D ; mapped ; 0076 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL V +1D61E ; mapped ; 0077 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL W +1D61F ; mapped ; 0078 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL X +1D620 ; mapped ; 0079 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL Y +1D621 ; mapped ; 007A # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL Z +1D622 ; mapped ; 0061 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL A +1D623 ; mapped ; 0062 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL B +1D624 ; mapped ; 0063 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL C +1D625 ; mapped ; 0064 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL D +1D626 ; mapped ; 0065 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL E +1D627 ; mapped ; 0066 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL F +1D628 ; mapped ; 0067 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL G +1D629 ; mapped ; 0068 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL H +1D62A ; mapped ; 0069 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL I +1D62B ; mapped ; 006A # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL J +1D62C ; mapped ; 006B # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL K +1D62D ; mapped ; 006C # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL L +1D62E ; mapped ; 006D # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL M +1D62F ; mapped ; 006E # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL N +1D630 ; mapped ; 006F # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL O +1D631 ; mapped ; 0070 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL P +1D632 ; mapped ; 0071 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL Q +1D633 ; mapped ; 0072 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL R +1D634 ; mapped ; 0073 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL S +1D635 ; mapped ; 0074 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL T +1D636 ; mapped ; 0075 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL U +1D637 ; mapped ; 0076 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL V +1D638 ; mapped ; 0077 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL W +1D639 ; mapped ; 0078 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL X +1D63A ; mapped ; 0079 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL Y +1D63B ; mapped ; 007A # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL Z +1D63C ; mapped ; 0061 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL A +1D63D ; mapped ; 0062 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL B +1D63E ; mapped ; 0063 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL C +1D63F ; mapped ; 0064 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL D +1D640 ; mapped ; 0065 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL E +1D641 ; mapped ; 0066 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL F +1D642 ; mapped ; 0067 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL G +1D643 ; mapped ; 0068 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL H +1D644 ; mapped ; 0069 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL I +1D645 ; mapped ; 006A # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL J +1D646 ; mapped ; 006B # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL K +1D647 ; mapped ; 006C # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL L +1D648 ; mapped ; 006D # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL M +1D649 ; mapped ; 006E # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL N +1D64A ; mapped ; 006F # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL O +1D64B ; mapped ; 0070 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL P +1D64C ; mapped ; 0071 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL Q +1D64D ; mapped ; 0072 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL R +1D64E ; mapped ; 0073 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL S +1D64F ; mapped ; 0074 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL T +1D650 ; mapped ; 0075 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL U +1D651 ; mapped ; 0076 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL V +1D652 ; mapped ; 0077 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL W +1D653 ; mapped ; 0078 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL X +1D654 ; mapped ; 0079 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL Y +1D655 ; mapped ; 007A # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL Z +1D656 ; mapped ; 0061 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL A +1D657 ; mapped ; 0062 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL B +1D658 ; mapped ; 0063 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL C +1D659 ; mapped ; 0064 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL D +1D65A ; mapped ; 0065 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL E +1D65B ; mapped ; 0066 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL F +1D65C ; mapped ; 0067 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL G +1D65D ; mapped ; 0068 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL H +1D65E ; mapped ; 0069 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL I +1D65F ; mapped ; 006A # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL J +1D660 ; mapped ; 006B # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL K +1D661 ; mapped ; 006C # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL L +1D662 ; mapped ; 006D # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL M +1D663 ; mapped ; 006E # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL N +1D664 ; mapped ; 006F # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL O +1D665 ; mapped ; 0070 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL P +1D666 ; mapped ; 0071 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL Q +1D667 ; mapped ; 0072 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL R +1D668 ; mapped ; 0073 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL S +1D669 ; mapped ; 0074 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL T +1D66A ; mapped ; 0075 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL U +1D66B ; mapped ; 0076 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL V +1D66C ; mapped ; 0077 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL W +1D66D ; mapped ; 0078 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL X +1D66E ; mapped ; 0079 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL Y +1D66F ; mapped ; 007A # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL Z +1D670 ; mapped ; 0061 # 3.1 MATHEMATICAL MONOSPACE CAPITAL A +1D671 ; mapped ; 0062 # 3.1 MATHEMATICAL MONOSPACE CAPITAL B +1D672 ; mapped ; 0063 # 3.1 MATHEMATICAL MONOSPACE CAPITAL C +1D673 ; mapped ; 0064 # 3.1 MATHEMATICAL MONOSPACE CAPITAL D +1D674 ; mapped ; 0065 # 3.1 MATHEMATICAL MONOSPACE CAPITAL E +1D675 ; mapped ; 0066 # 3.1 MATHEMATICAL MONOSPACE CAPITAL F +1D676 ; mapped ; 0067 # 3.1 MATHEMATICAL MONOSPACE CAPITAL G +1D677 ; mapped ; 0068 # 3.1 MATHEMATICAL MONOSPACE CAPITAL H +1D678 ; mapped ; 0069 # 3.1 MATHEMATICAL MONOSPACE CAPITAL I +1D679 ; mapped ; 006A # 3.1 MATHEMATICAL MONOSPACE CAPITAL J +1D67A ; mapped ; 006B # 3.1 MATHEMATICAL MONOSPACE CAPITAL K +1D67B ; mapped ; 006C # 3.1 MATHEMATICAL MONOSPACE CAPITAL L +1D67C ; mapped ; 006D # 3.1 MATHEMATICAL MONOSPACE CAPITAL M +1D67D ; mapped ; 006E # 3.1 MATHEMATICAL MONOSPACE CAPITAL N +1D67E ; mapped ; 006F # 3.1 MATHEMATICAL MONOSPACE CAPITAL O +1D67F ; mapped ; 0070 # 3.1 MATHEMATICAL MONOSPACE CAPITAL P +1D680 ; mapped ; 0071 # 3.1 MATHEMATICAL MONOSPACE CAPITAL Q +1D681 ; mapped ; 0072 # 3.1 MATHEMATICAL MONOSPACE CAPITAL R +1D682 ; mapped ; 0073 # 3.1 MATHEMATICAL MONOSPACE CAPITAL S +1D683 ; mapped ; 0074 # 3.1 MATHEMATICAL MONOSPACE CAPITAL T +1D684 ; mapped ; 0075 # 3.1 MATHEMATICAL MONOSPACE CAPITAL U +1D685 ; mapped ; 0076 # 3.1 MATHEMATICAL MONOSPACE CAPITAL V +1D686 ; mapped ; 0077 # 3.1 MATHEMATICAL MONOSPACE CAPITAL W +1D687 ; mapped ; 0078 # 3.1 MATHEMATICAL MONOSPACE CAPITAL X +1D688 ; mapped ; 0079 # 3.1 MATHEMATICAL MONOSPACE CAPITAL Y +1D689 ; mapped ; 007A # 3.1 MATHEMATICAL MONOSPACE CAPITAL Z +1D68A ; mapped ; 0061 # 3.1 MATHEMATICAL MONOSPACE SMALL A +1D68B ; mapped ; 0062 # 3.1 MATHEMATICAL MONOSPACE SMALL B +1D68C ; mapped ; 0063 # 3.1 MATHEMATICAL MONOSPACE SMALL C +1D68D ; mapped ; 0064 # 3.1 MATHEMATICAL MONOSPACE SMALL D +1D68E ; mapped ; 0065 # 3.1 MATHEMATICAL MONOSPACE SMALL E +1D68F ; mapped ; 0066 # 3.1 MATHEMATICAL MONOSPACE SMALL F +1D690 ; mapped ; 0067 # 3.1 MATHEMATICAL MONOSPACE SMALL G +1D691 ; mapped ; 0068 # 3.1 MATHEMATICAL MONOSPACE SMALL H +1D692 ; mapped ; 0069 # 3.1 MATHEMATICAL MONOSPACE SMALL I +1D693 ; mapped ; 006A # 3.1 MATHEMATICAL MONOSPACE SMALL J +1D694 ; mapped ; 006B # 3.1 MATHEMATICAL MONOSPACE SMALL K +1D695 ; mapped ; 006C # 3.1 MATHEMATICAL MONOSPACE SMALL L +1D696 ; mapped ; 006D # 3.1 MATHEMATICAL MONOSPACE SMALL M +1D697 ; mapped ; 006E # 3.1 MATHEMATICAL MONOSPACE SMALL N +1D698 ; mapped ; 006F # 3.1 MATHEMATICAL MONOSPACE SMALL O +1D699 ; mapped ; 0070 # 3.1 MATHEMATICAL MONOSPACE SMALL P +1D69A ; mapped ; 0071 # 3.1 MATHEMATICAL MONOSPACE SMALL Q +1D69B ; mapped ; 0072 # 3.1 MATHEMATICAL MONOSPACE SMALL R +1D69C ; mapped ; 0073 # 3.1 MATHEMATICAL MONOSPACE SMALL S +1D69D ; mapped ; 0074 # 3.1 MATHEMATICAL MONOSPACE SMALL T +1D69E ; mapped ; 0075 # 3.1 MATHEMATICAL MONOSPACE SMALL U +1D69F ; mapped ; 0076 # 3.1 MATHEMATICAL MONOSPACE SMALL V +1D6A0 ; mapped ; 0077 # 3.1 MATHEMATICAL MONOSPACE SMALL W +1D6A1 ; mapped ; 0078 # 3.1 MATHEMATICAL MONOSPACE SMALL X +1D6A2 ; mapped ; 0079 # 3.1 MATHEMATICAL MONOSPACE SMALL Y +1D6A3 ; mapped ; 007A # 3.1 MATHEMATICAL MONOSPACE SMALL Z +1D6A4 ; mapped ; 0131 # 4.1 MATHEMATICAL ITALIC SMALL DOTLESS I +1D6A5 ; mapped ; 0237 # 4.1 MATHEMATICAL ITALIC SMALL DOTLESS J +1D6A6..1D6A7 ; disallowed # NA .. +1D6A8 ; mapped ; 03B1 # 3.1 MATHEMATICAL BOLD CAPITAL ALPHA +1D6A9 ; mapped ; 03B2 # 3.1 MATHEMATICAL BOLD CAPITAL BETA +1D6AA ; mapped ; 03B3 # 3.1 MATHEMATICAL BOLD CAPITAL GAMMA +1D6AB ; mapped ; 03B4 # 3.1 MATHEMATICAL BOLD CAPITAL DELTA +1D6AC ; mapped ; 03B5 # 3.1 MATHEMATICAL BOLD CAPITAL EPSILON +1D6AD ; mapped ; 03B6 # 3.1 MATHEMATICAL BOLD CAPITAL ZETA +1D6AE ; mapped ; 03B7 # 3.1 MATHEMATICAL BOLD CAPITAL ETA +1D6AF ; mapped ; 03B8 # 3.1 MATHEMATICAL BOLD CAPITAL THETA +1D6B0 ; mapped ; 03B9 # 3.1 MATHEMATICAL BOLD CAPITAL IOTA +1D6B1 ; mapped ; 03BA # 3.1 MATHEMATICAL BOLD CAPITAL KAPPA +1D6B2 ; mapped ; 03BB # 3.1 MATHEMATICAL BOLD CAPITAL LAMDA +1D6B3 ; mapped ; 03BC # 3.1 MATHEMATICAL BOLD CAPITAL MU +1D6B4 ; mapped ; 03BD # 3.1 MATHEMATICAL BOLD CAPITAL NU +1D6B5 ; mapped ; 03BE # 3.1 MATHEMATICAL BOLD CAPITAL XI +1D6B6 ; mapped ; 03BF # 3.1 MATHEMATICAL BOLD CAPITAL OMICRON +1D6B7 ; mapped ; 03C0 # 3.1 MATHEMATICAL BOLD CAPITAL PI +1D6B8 ; mapped ; 03C1 # 3.1 MATHEMATICAL BOLD CAPITAL RHO +1D6B9 ; mapped ; 03B8 # 3.1 MATHEMATICAL BOLD CAPITAL THETA SYMBOL +1D6BA ; mapped ; 03C3 # 3.1 MATHEMATICAL BOLD CAPITAL SIGMA +1D6BB ; mapped ; 03C4 # 3.1 MATHEMATICAL BOLD CAPITAL TAU +1D6BC ; mapped ; 03C5 # 3.1 MATHEMATICAL BOLD CAPITAL UPSILON +1D6BD ; mapped ; 03C6 # 3.1 MATHEMATICAL BOLD CAPITAL PHI +1D6BE ; mapped ; 03C7 # 3.1 MATHEMATICAL BOLD CAPITAL CHI +1D6BF ; mapped ; 03C8 # 3.1 MATHEMATICAL BOLD CAPITAL PSI +1D6C0 ; mapped ; 03C9 # 3.1 MATHEMATICAL BOLD CAPITAL OMEGA +1D6C1 ; mapped ; 2207 # 3.1 MATHEMATICAL BOLD NABLA +1D6C2 ; mapped ; 03B1 # 3.1 MATHEMATICAL BOLD SMALL ALPHA +1D6C3 ; mapped ; 03B2 # 3.1 MATHEMATICAL BOLD SMALL BETA +1D6C4 ; mapped ; 03B3 # 3.1 MATHEMATICAL BOLD SMALL GAMMA +1D6C5 ; mapped ; 03B4 # 3.1 MATHEMATICAL BOLD SMALL DELTA +1D6C6 ; mapped ; 03B5 # 3.1 MATHEMATICAL BOLD SMALL EPSILON +1D6C7 ; mapped ; 03B6 # 3.1 MATHEMATICAL BOLD SMALL ZETA +1D6C8 ; mapped ; 03B7 # 3.1 MATHEMATICAL BOLD SMALL ETA +1D6C9 ; mapped ; 03B8 # 3.1 MATHEMATICAL BOLD SMALL THETA +1D6CA ; mapped ; 03B9 # 3.1 MATHEMATICAL BOLD SMALL IOTA +1D6CB ; mapped ; 03BA # 3.1 MATHEMATICAL BOLD SMALL KAPPA +1D6CC ; mapped ; 03BB # 3.1 MATHEMATICAL BOLD SMALL LAMDA +1D6CD ; mapped ; 03BC # 3.1 MATHEMATICAL BOLD SMALL MU +1D6CE ; mapped ; 03BD # 3.1 MATHEMATICAL BOLD SMALL NU +1D6CF ; mapped ; 03BE # 3.1 MATHEMATICAL BOLD SMALL XI +1D6D0 ; mapped ; 03BF # 3.1 MATHEMATICAL BOLD SMALL OMICRON +1D6D1 ; mapped ; 03C0 # 3.1 MATHEMATICAL BOLD SMALL PI +1D6D2 ; mapped ; 03C1 # 3.1 MATHEMATICAL BOLD SMALL RHO +1D6D3..1D6D4 ; mapped ; 03C3 # 3.1 MATHEMATICAL BOLD SMALL FINAL SIGMA..MATHEMATICAL BOLD SMALL SIGMA +1D6D5 ; mapped ; 03C4 # 3.1 MATHEMATICAL BOLD SMALL TAU +1D6D6 ; mapped ; 03C5 # 3.1 MATHEMATICAL BOLD SMALL UPSILON +1D6D7 ; mapped ; 03C6 # 3.1 MATHEMATICAL BOLD SMALL PHI +1D6D8 ; mapped ; 03C7 # 3.1 MATHEMATICAL BOLD SMALL CHI +1D6D9 ; mapped ; 03C8 # 3.1 MATHEMATICAL BOLD SMALL PSI +1D6DA ; mapped ; 03C9 # 3.1 MATHEMATICAL BOLD SMALL OMEGA +1D6DB ; mapped ; 2202 # 3.1 MATHEMATICAL BOLD PARTIAL DIFFERENTIAL +1D6DC ; mapped ; 03B5 # 3.1 MATHEMATICAL BOLD EPSILON SYMBOL +1D6DD ; mapped ; 03B8 # 3.1 MATHEMATICAL BOLD THETA SYMBOL +1D6DE ; mapped ; 03BA # 3.1 MATHEMATICAL BOLD KAPPA SYMBOL +1D6DF ; mapped ; 03C6 # 3.1 MATHEMATICAL BOLD PHI SYMBOL +1D6E0 ; mapped ; 03C1 # 3.1 MATHEMATICAL BOLD RHO SYMBOL +1D6E1 ; mapped ; 03C0 # 3.1 MATHEMATICAL BOLD PI SYMBOL +1D6E2 ; mapped ; 03B1 # 3.1 MATHEMATICAL ITALIC CAPITAL ALPHA +1D6E3 ; mapped ; 03B2 # 3.1 MATHEMATICAL ITALIC CAPITAL BETA +1D6E4 ; mapped ; 03B3 # 3.1 MATHEMATICAL ITALIC CAPITAL GAMMA +1D6E5 ; mapped ; 03B4 # 3.1 MATHEMATICAL ITALIC CAPITAL DELTA +1D6E6 ; mapped ; 03B5 # 3.1 MATHEMATICAL ITALIC CAPITAL EPSILON +1D6E7 ; mapped ; 03B6 # 3.1 MATHEMATICAL ITALIC CAPITAL ZETA +1D6E8 ; mapped ; 03B7 # 3.1 MATHEMATICAL ITALIC CAPITAL ETA +1D6E9 ; mapped ; 03B8 # 3.1 MATHEMATICAL ITALIC CAPITAL THETA +1D6EA ; mapped ; 03B9 # 3.1 MATHEMATICAL ITALIC CAPITAL IOTA +1D6EB ; mapped ; 03BA # 3.1 MATHEMATICAL ITALIC CAPITAL KAPPA +1D6EC ; mapped ; 03BB # 3.1 MATHEMATICAL ITALIC CAPITAL LAMDA +1D6ED ; mapped ; 03BC # 3.1 MATHEMATICAL ITALIC CAPITAL MU +1D6EE ; mapped ; 03BD # 3.1 MATHEMATICAL ITALIC CAPITAL NU +1D6EF ; mapped ; 03BE # 3.1 MATHEMATICAL ITALIC CAPITAL XI +1D6F0 ; mapped ; 03BF # 3.1 MATHEMATICAL ITALIC CAPITAL OMICRON +1D6F1 ; mapped ; 03C0 # 3.1 MATHEMATICAL ITALIC CAPITAL PI +1D6F2 ; mapped ; 03C1 # 3.1 MATHEMATICAL ITALIC CAPITAL RHO +1D6F3 ; mapped ; 03B8 # 3.1 MATHEMATICAL ITALIC CAPITAL THETA SYMBOL +1D6F4 ; mapped ; 03C3 # 3.1 MATHEMATICAL ITALIC CAPITAL SIGMA +1D6F5 ; mapped ; 03C4 # 3.1 MATHEMATICAL ITALIC CAPITAL TAU +1D6F6 ; mapped ; 03C5 # 3.1 MATHEMATICAL ITALIC CAPITAL UPSILON +1D6F7 ; mapped ; 03C6 # 3.1 MATHEMATICAL ITALIC CAPITAL PHI +1D6F8 ; mapped ; 03C7 # 3.1 MATHEMATICAL ITALIC CAPITAL CHI +1D6F9 ; mapped ; 03C8 # 3.1 MATHEMATICAL ITALIC CAPITAL PSI +1D6FA ; mapped ; 03C9 # 3.1 MATHEMATICAL ITALIC CAPITAL OMEGA +1D6FB ; mapped ; 2207 # 3.1 MATHEMATICAL ITALIC NABLA +1D6FC ; mapped ; 03B1 # 3.1 MATHEMATICAL ITALIC SMALL ALPHA +1D6FD ; mapped ; 03B2 # 3.1 MATHEMATICAL ITALIC SMALL BETA +1D6FE ; mapped ; 03B3 # 3.1 MATHEMATICAL ITALIC SMALL GAMMA +1D6FF ; mapped ; 03B4 # 3.1 MATHEMATICAL ITALIC SMALL DELTA +1D700 ; mapped ; 03B5 # 3.1 MATHEMATICAL ITALIC SMALL EPSILON +1D701 ; mapped ; 03B6 # 3.1 MATHEMATICAL ITALIC SMALL ZETA +1D702 ; mapped ; 03B7 # 3.1 MATHEMATICAL ITALIC SMALL ETA +1D703 ; mapped ; 03B8 # 3.1 MATHEMATICAL ITALIC SMALL THETA +1D704 ; mapped ; 03B9 # 3.1 MATHEMATICAL ITALIC SMALL IOTA +1D705 ; mapped ; 03BA # 3.1 MATHEMATICAL ITALIC SMALL KAPPA +1D706 ; mapped ; 03BB # 3.1 MATHEMATICAL ITALIC SMALL LAMDA +1D707 ; mapped ; 03BC # 3.1 MATHEMATICAL ITALIC SMALL MU +1D708 ; mapped ; 03BD # 3.1 MATHEMATICAL ITALIC SMALL NU +1D709 ; mapped ; 03BE # 3.1 MATHEMATICAL ITALIC SMALL XI +1D70A ; mapped ; 03BF # 3.1 MATHEMATICAL ITALIC SMALL OMICRON +1D70B ; mapped ; 03C0 # 3.1 MATHEMATICAL ITALIC SMALL PI +1D70C ; mapped ; 03C1 # 3.1 MATHEMATICAL ITALIC SMALL RHO +1D70D..1D70E ; mapped ; 03C3 # 3.1 MATHEMATICAL ITALIC SMALL FINAL SIGMA..MATHEMATICAL ITALIC SMALL SIGMA +1D70F ; mapped ; 03C4 # 3.1 MATHEMATICAL ITALIC SMALL TAU +1D710 ; mapped ; 03C5 # 3.1 MATHEMATICAL ITALIC SMALL UPSILON +1D711 ; mapped ; 03C6 # 3.1 MATHEMATICAL ITALIC SMALL PHI +1D712 ; mapped ; 03C7 # 3.1 MATHEMATICAL ITALIC SMALL CHI +1D713 ; mapped ; 03C8 # 3.1 MATHEMATICAL ITALIC SMALL PSI +1D714 ; mapped ; 03C9 # 3.1 MATHEMATICAL ITALIC SMALL OMEGA +1D715 ; mapped ; 2202 # 3.1 MATHEMATICAL ITALIC PARTIAL DIFFERENTIAL +1D716 ; mapped ; 03B5 # 3.1 MATHEMATICAL ITALIC EPSILON SYMBOL +1D717 ; mapped ; 03B8 # 3.1 MATHEMATICAL ITALIC THETA SYMBOL +1D718 ; mapped ; 03BA # 3.1 MATHEMATICAL ITALIC KAPPA SYMBOL +1D719 ; mapped ; 03C6 # 3.1 MATHEMATICAL ITALIC PHI SYMBOL +1D71A ; mapped ; 03C1 # 3.1 MATHEMATICAL ITALIC RHO SYMBOL +1D71B ; mapped ; 03C0 # 3.1 MATHEMATICAL ITALIC PI SYMBOL +1D71C ; mapped ; 03B1 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL ALPHA +1D71D ; mapped ; 03B2 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL BETA +1D71E ; mapped ; 03B3 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL GAMMA +1D71F ; mapped ; 03B4 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL DELTA +1D720 ; mapped ; 03B5 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL EPSILON +1D721 ; mapped ; 03B6 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL ZETA +1D722 ; mapped ; 03B7 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL ETA +1D723 ; mapped ; 03B8 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL THETA +1D724 ; mapped ; 03B9 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL IOTA +1D725 ; mapped ; 03BA # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL KAPPA +1D726 ; mapped ; 03BB # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL LAMDA +1D727 ; mapped ; 03BC # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL MU +1D728 ; mapped ; 03BD # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL NU +1D729 ; mapped ; 03BE # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL XI +1D72A ; mapped ; 03BF # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL OMICRON +1D72B ; mapped ; 03C0 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL PI +1D72C ; mapped ; 03C1 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL RHO +1D72D ; mapped ; 03B8 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL THETA SYMBOL +1D72E ; mapped ; 03C3 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL SIGMA +1D72F ; mapped ; 03C4 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL TAU +1D730 ; mapped ; 03C5 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL UPSILON +1D731 ; mapped ; 03C6 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL PHI +1D732 ; mapped ; 03C7 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL CHI +1D733 ; mapped ; 03C8 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL PSI +1D734 ; mapped ; 03C9 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL OMEGA +1D735 ; mapped ; 2207 # 3.1 MATHEMATICAL BOLD ITALIC NABLA +1D736 ; mapped ; 03B1 # 3.1 MATHEMATICAL BOLD ITALIC SMALL ALPHA +1D737 ; mapped ; 03B2 # 3.1 MATHEMATICAL BOLD ITALIC SMALL BETA +1D738 ; mapped ; 03B3 # 3.1 MATHEMATICAL BOLD ITALIC SMALL GAMMA +1D739 ; mapped ; 03B4 # 3.1 MATHEMATICAL BOLD ITALIC SMALL DELTA +1D73A ; mapped ; 03B5 # 3.1 MATHEMATICAL BOLD ITALIC SMALL EPSILON +1D73B ; mapped ; 03B6 # 3.1 MATHEMATICAL BOLD ITALIC SMALL ZETA +1D73C ; mapped ; 03B7 # 3.1 MATHEMATICAL BOLD ITALIC SMALL ETA +1D73D ; mapped ; 03B8 # 3.1 MATHEMATICAL BOLD ITALIC SMALL THETA +1D73E ; mapped ; 03B9 # 3.1 MATHEMATICAL BOLD ITALIC SMALL IOTA +1D73F ; mapped ; 03BA # 3.1 MATHEMATICAL BOLD ITALIC SMALL KAPPA +1D740 ; mapped ; 03BB # 3.1 MATHEMATICAL BOLD ITALIC SMALL LAMDA +1D741 ; mapped ; 03BC # 3.1 MATHEMATICAL BOLD ITALIC SMALL MU +1D742 ; mapped ; 03BD # 3.1 MATHEMATICAL BOLD ITALIC SMALL NU +1D743 ; mapped ; 03BE # 3.1 MATHEMATICAL BOLD ITALIC SMALL XI +1D744 ; mapped ; 03BF # 3.1 MATHEMATICAL BOLD ITALIC SMALL OMICRON +1D745 ; mapped ; 03C0 # 3.1 MATHEMATICAL BOLD ITALIC SMALL PI +1D746 ; mapped ; 03C1 # 3.1 MATHEMATICAL BOLD ITALIC SMALL RHO +1D747..1D748 ; mapped ; 03C3 # 3.1 MATHEMATICAL BOLD ITALIC SMALL FINAL SIGMA..MATHEMATICAL BOLD ITALIC SMALL SIGMA +1D749 ; mapped ; 03C4 # 3.1 MATHEMATICAL BOLD ITALIC SMALL TAU +1D74A ; mapped ; 03C5 # 3.1 MATHEMATICAL BOLD ITALIC SMALL UPSILON +1D74B ; mapped ; 03C6 # 3.1 MATHEMATICAL BOLD ITALIC SMALL PHI +1D74C ; mapped ; 03C7 # 3.1 MATHEMATICAL BOLD ITALIC SMALL CHI +1D74D ; mapped ; 03C8 # 3.1 MATHEMATICAL BOLD ITALIC SMALL PSI +1D74E ; mapped ; 03C9 # 3.1 MATHEMATICAL BOLD ITALIC SMALL OMEGA +1D74F ; mapped ; 2202 # 3.1 MATHEMATICAL BOLD ITALIC PARTIAL DIFFERENTIAL +1D750 ; mapped ; 03B5 # 3.1 MATHEMATICAL BOLD ITALIC EPSILON SYMBOL +1D751 ; mapped ; 03B8 # 3.1 MATHEMATICAL BOLD ITALIC THETA SYMBOL +1D752 ; mapped ; 03BA # 3.1 MATHEMATICAL BOLD ITALIC KAPPA SYMBOL +1D753 ; mapped ; 03C6 # 3.1 MATHEMATICAL BOLD ITALIC PHI SYMBOL +1D754 ; mapped ; 03C1 # 3.1 MATHEMATICAL BOLD ITALIC RHO SYMBOL +1D755 ; mapped ; 03C0 # 3.1 MATHEMATICAL BOLD ITALIC PI SYMBOL +1D756 ; mapped ; 03B1 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL ALPHA +1D757 ; mapped ; 03B2 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL BETA +1D758 ; mapped ; 03B3 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL GAMMA +1D759 ; mapped ; 03B4 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL DELTA +1D75A ; mapped ; 03B5 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL EPSILON +1D75B ; mapped ; 03B6 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL ZETA +1D75C ; mapped ; 03B7 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL ETA +1D75D ; mapped ; 03B8 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL THETA +1D75E ; mapped ; 03B9 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL IOTA +1D75F ; mapped ; 03BA # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL KAPPA +1D760 ; mapped ; 03BB # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL LAMDA +1D761 ; mapped ; 03BC # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL MU +1D762 ; mapped ; 03BD # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL NU +1D763 ; mapped ; 03BE # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL XI +1D764 ; mapped ; 03BF # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL OMICRON +1D765 ; mapped ; 03C0 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL PI +1D766 ; mapped ; 03C1 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL RHO +1D767 ; mapped ; 03B8 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL THETA SYMBOL +1D768 ; mapped ; 03C3 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL SIGMA +1D769 ; mapped ; 03C4 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL TAU +1D76A ; mapped ; 03C5 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL UPSILON +1D76B ; mapped ; 03C6 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL PHI +1D76C ; mapped ; 03C7 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL CHI +1D76D ; mapped ; 03C8 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL PSI +1D76E ; mapped ; 03C9 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL OMEGA +1D76F ; mapped ; 2207 # 3.1 MATHEMATICAL SANS-SERIF BOLD NABLA +1D770 ; mapped ; 03B1 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL ALPHA +1D771 ; mapped ; 03B2 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL BETA +1D772 ; mapped ; 03B3 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL GAMMA +1D773 ; mapped ; 03B4 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL DELTA +1D774 ; mapped ; 03B5 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL EPSILON +1D775 ; mapped ; 03B6 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL ZETA +1D776 ; mapped ; 03B7 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL ETA +1D777 ; mapped ; 03B8 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL THETA +1D778 ; mapped ; 03B9 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL IOTA +1D779 ; mapped ; 03BA # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL KAPPA +1D77A ; mapped ; 03BB # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL LAMDA +1D77B ; mapped ; 03BC # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL MU +1D77C ; mapped ; 03BD # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL NU +1D77D ; mapped ; 03BE # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL XI +1D77E ; mapped ; 03BF # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL OMICRON +1D77F ; mapped ; 03C0 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL PI +1D780 ; mapped ; 03C1 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL RHO +1D781..1D782 ; mapped ; 03C3 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL FINAL SIGMA..MATHEMATICAL SANS-SERIF BOLD SMALL SIGMA +1D783 ; mapped ; 03C4 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL TAU +1D784 ; mapped ; 03C5 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL UPSILON +1D785 ; mapped ; 03C6 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL PHI +1D786 ; mapped ; 03C7 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL CHI +1D787 ; mapped ; 03C8 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL PSI +1D788 ; mapped ; 03C9 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL OMEGA +1D789 ; mapped ; 2202 # 3.1 MATHEMATICAL SANS-SERIF BOLD PARTIAL DIFFERENTIAL +1D78A ; mapped ; 03B5 # 3.1 MATHEMATICAL SANS-SERIF BOLD EPSILON SYMBOL +1D78B ; mapped ; 03B8 # 3.1 MATHEMATICAL SANS-SERIF BOLD THETA SYMBOL +1D78C ; mapped ; 03BA # 3.1 MATHEMATICAL SANS-SERIF BOLD KAPPA SYMBOL +1D78D ; mapped ; 03C6 # 3.1 MATHEMATICAL SANS-SERIF BOLD PHI SYMBOL +1D78E ; mapped ; 03C1 # 3.1 MATHEMATICAL SANS-SERIF BOLD RHO SYMBOL +1D78F ; mapped ; 03C0 # 3.1 MATHEMATICAL SANS-SERIF BOLD PI SYMBOL +1D790 ; mapped ; 03B1 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL ALPHA +1D791 ; mapped ; 03B2 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL BETA +1D792 ; mapped ; 03B3 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL GAMMA +1D793 ; mapped ; 03B4 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL DELTA +1D794 ; mapped ; 03B5 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL EPSILON +1D795 ; mapped ; 03B6 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL ZETA +1D796 ; mapped ; 03B7 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL ETA +1D797 ; mapped ; 03B8 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL THETA +1D798 ; mapped ; 03B9 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL IOTA +1D799 ; mapped ; 03BA # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL KAPPA +1D79A ; mapped ; 03BB # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL LAMDA +1D79B ; mapped ; 03BC # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL MU +1D79C ; mapped ; 03BD # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL NU +1D79D ; mapped ; 03BE # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL XI +1D79E ; mapped ; 03BF # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL OMICRON +1D79F ; mapped ; 03C0 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL PI +1D7A0 ; mapped ; 03C1 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL RHO +1D7A1 ; mapped ; 03B8 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL THETA SYMBOL +1D7A2 ; mapped ; 03C3 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL SIGMA +1D7A3 ; mapped ; 03C4 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL TAU +1D7A4 ; mapped ; 03C5 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL UPSILON +1D7A5 ; mapped ; 03C6 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL PHI +1D7A6 ; mapped ; 03C7 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL CHI +1D7A7 ; mapped ; 03C8 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL PSI +1D7A8 ; mapped ; 03C9 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL OMEGA +1D7A9 ; mapped ; 2207 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC NABLA +1D7AA ; mapped ; 03B1 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL ALPHA +1D7AB ; mapped ; 03B2 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL BETA +1D7AC ; mapped ; 03B3 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL GAMMA +1D7AD ; mapped ; 03B4 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL DELTA +1D7AE ; mapped ; 03B5 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL EPSILON +1D7AF ; mapped ; 03B6 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL ZETA +1D7B0 ; mapped ; 03B7 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL ETA +1D7B1 ; mapped ; 03B8 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL THETA +1D7B2 ; mapped ; 03B9 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL IOTA +1D7B3 ; mapped ; 03BA # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL KAPPA +1D7B4 ; mapped ; 03BB # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL LAMDA +1D7B5 ; mapped ; 03BC # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL MU +1D7B6 ; mapped ; 03BD # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL NU +1D7B7 ; mapped ; 03BE # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL XI +1D7B8 ; mapped ; 03BF # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL OMICRON +1D7B9 ; mapped ; 03C0 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL PI +1D7BA ; mapped ; 03C1 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL RHO +1D7BB..1D7BC ; mapped ; 03C3 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL FINAL SIGMA..MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL SIGMA +1D7BD ; mapped ; 03C4 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL TAU +1D7BE ; mapped ; 03C5 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL UPSILON +1D7BF ; mapped ; 03C6 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL PHI +1D7C0 ; mapped ; 03C7 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL CHI +1D7C1 ; mapped ; 03C8 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL PSI +1D7C2 ; mapped ; 03C9 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL OMEGA +1D7C3 ; mapped ; 2202 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC PARTIAL DIFFERENTIAL +1D7C4 ; mapped ; 03B5 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC EPSILON SYMBOL +1D7C5 ; mapped ; 03B8 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC THETA SYMBOL +1D7C6 ; mapped ; 03BA # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC KAPPA SYMBOL +1D7C7 ; mapped ; 03C6 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC PHI SYMBOL +1D7C8 ; mapped ; 03C1 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC RHO SYMBOL +1D7C9 ; mapped ; 03C0 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC PI SYMBOL +1D7CA..1D7CB ; mapped ; 03DD # 5.0 MATHEMATICAL BOLD CAPITAL DIGAMMA..MATHEMATICAL BOLD SMALL DIGAMMA +1D7CC..1D7CD ; disallowed # NA .. +1D7CE ; mapped ; 0030 # 3.1 MATHEMATICAL BOLD DIGIT ZERO +1D7CF ; mapped ; 0031 # 3.1 MATHEMATICAL BOLD DIGIT ONE +1D7D0 ; mapped ; 0032 # 3.1 MATHEMATICAL BOLD DIGIT TWO +1D7D1 ; mapped ; 0033 # 3.1 MATHEMATICAL BOLD DIGIT THREE +1D7D2 ; mapped ; 0034 # 3.1 MATHEMATICAL BOLD DIGIT FOUR +1D7D3 ; mapped ; 0035 # 3.1 MATHEMATICAL BOLD DIGIT FIVE +1D7D4 ; mapped ; 0036 # 3.1 MATHEMATICAL BOLD DIGIT SIX +1D7D5 ; mapped ; 0037 # 3.1 MATHEMATICAL BOLD DIGIT SEVEN +1D7D6 ; mapped ; 0038 # 3.1 MATHEMATICAL BOLD DIGIT EIGHT +1D7D7 ; mapped ; 0039 # 3.1 MATHEMATICAL BOLD DIGIT NINE +1D7D8 ; mapped ; 0030 # 3.1 MATHEMATICAL DOUBLE-STRUCK DIGIT ZERO +1D7D9 ; mapped ; 0031 # 3.1 MATHEMATICAL DOUBLE-STRUCK DIGIT ONE +1D7DA ; mapped ; 0032 # 3.1 MATHEMATICAL DOUBLE-STRUCK DIGIT TWO +1D7DB ; mapped ; 0033 # 3.1 MATHEMATICAL DOUBLE-STRUCK DIGIT THREE +1D7DC ; mapped ; 0034 # 3.1 MATHEMATICAL DOUBLE-STRUCK DIGIT FOUR +1D7DD ; mapped ; 0035 # 3.1 MATHEMATICAL DOUBLE-STRUCK DIGIT FIVE +1D7DE ; mapped ; 0036 # 3.1 MATHEMATICAL DOUBLE-STRUCK DIGIT SIX +1D7DF ; mapped ; 0037 # 3.1 MATHEMATICAL DOUBLE-STRUCK DIGIT SEVEN +1D7E0 ; mapped ; 0038 # 3.1 MATHEMATICAL DOUBLE-STRUCK DIGIT EIGHT +1D7E1 ; mapped ; 0039 # 3.1 MATHEMATICAL DOUBLE-STRUCK DIGIT NINE +1D7E2 ; mapped ; 0030 # 3.1 MATHEMATICAL SANS-SERIF DIGIT ZERO +1D7E3 ; mapped ; 0031 # 3.1 MATHEMATICAL SANS-SERIF DIGIT ONE +1D7E4 ; mapped ; 0032 # 3.1 MATHEMATICAL SANS-SERIF DIGIT TWO +1D7E5 ; mapped ; 0033 # 3.1 MATHEMATICAL SANS-SERIF DIGIT THREE +1D7E6 ; mapped ; 0034 # 3.1 MATHEMATICAL SANS-SERIF DIGIT FOUR +1D7E7 ; mapped ; 0035 # 3.1 MATHEMATICAL SANS-SERIF DIGIT FIVE +1D7E8 ; mapped ; 0036 # 3.1 MATHEMATICAL SANS-SERIF DIGIT SIX +1D7E9 ; mapped ; 0037 # 3.1 MATHEMATICAL SANS-SERIF DIGIT SEVEN +1D7EA ; mapped ; 0038 # 3.1 MATHEMATICAL SANS-SERIF DIGIT EIGHT +1D7EB ; mapped ; 0039 # 3.1 MATHEMATICAL SANS-SERIF DIGIT NINE +1D7EC ; mapped ; 0030 # 3.1 MATHEMATICAL SANS-SERIF BOLD DIGIT ZERO +1D7ED ; mapped ; 0031 # 3.1 MATHEMATICAL SANS-SERIF BOLD DIGIT ONE +1D7EE ; mapped ; 0032 # 3.1 MATHEMATICAL SANS-SERIF BOLD DIGIT TWO +1D7EF ; mapped ; 0033 # 3.1 MATHEMATICAL SANS-SERIF BOLD DIGIT THREE +1D7F0 ; mapped ; 0034 # 3.1 MATHEMATICAL SANS-SERIF BOLD DIGIT FOUR +1D7F1 ; mapped ; 0035 # 3.1 MATHEMATICAL SANS-SERIF BOLD DIGIT FIVE +1D7F2 ; mapped ; 0036 # 3.1 MATHEMATICAL SANS-SERIF BOLD DIGIT SIX +1D7F3 ; mapped ; 0037 # 3.1 MATHEMATICAL SANS-SERIF BOLD DIGIT SEVEN +1D7F4 ; mapped ; 0038 # 3.1 MATHEMATICAL SANS-SERIF BOLD DIGIT EIGHT +1D7F5 ; mapped ; 0039 # 3.1 MATHEMATICAL SANS-SERIF BOLD DIGIT NINE +1D7F6 ; mapped ; 0030 # 3.1 MATHEMATICAL MONOSPACE DIGIT ZERO +1D7F7 ; mapped ; 0031 # 3.1 MATHEMATICAL MONOSPACE DIGIT ONE +1D7F8 ; mapped ; 0032 # 3.1 MATHEMATICAL MONOSPACE DIGIT TWO +1D7F9 ; mapped ; 0033 # 3.1 MATHEMATICAL MONOSPACE DIGIT THREE +1D7FA ; mapped ; 0034 # 3.1 MATHEMATICAL MONOSPACE DIGIT FOUR +1D7FB ; mapped ; 0035 # 3.1 MATHEMATICAL MONOSPACE DIGIT FIVE +1D7FC ; mapped ; 0036 # 3.1 MATHEMATICAL MONOSPACE DIGIT SIX +1D7FD ; mapped ; 0037 # 3.1 MATHEMATICAL MONOSPACE DIGIT SEVEN +1D7FE ; mapped ; 0038 # 3.1 MATHEMATICAL MONOSPACE DIGIT EIGHT +1D7FF ; mapped ; 0039 # 3.1 MATHEMATICAL MONOSPACE DIGIT NINE +1D800..1D9FF ; valid ; ; NV8 # 8.0 SIGNWRITING HAND-FIST INDEX..SIGNWRITING HEAD +1DA00..1DA36 ; valid # 8.0 SIGNWRITING HEAD RIM..SIGNWRITING AIR SUCKING IN +1DA37..1DA3A ; valid ; ; NV8 # 8.0 SIGNWRITING AIR BLOW SMALL ROTATIONS..SIGNWRITING BREATH EXHALE +1DA3B..1DA6C ; valid # 8.0 SIGNWRITING MOUTH CLOSED NEUTRAL..SIGNWRITING EXCITEMENT +1DA6D..1DA74 ; valid ; ; NV8 # 8.0 SIGNWRITING SHOULDER HIP SPINE..SIGNWRITING TORSO-FLOORPLANE TWISTING +1DA75 ; valid # 8.0 SIGNWRITING UPPER BODY TILTING FROM HIP JOINTS +1DA76..1DA83 ; valid ; ; NV8 # 8.0 SIGNWRITING LIMB COMBINATION..SIGNWRITING LOCATION DEPTH +1DA84 ; valid # 8.0 SIGNWRITING LOCATION HEAD NECK +1DA85..1DA8B ; valid ; ; NV8 # 8.0 SIGNWRITING LOCATION TORSO..SIGNWRITING PARENTHESIS +1DA8C..1DA9A ; disallowed # NA .. +1DA9B..1DA9F ; valid # 8.0 SIGNWRITING FILL MODIFIER-2..SIGNWRITING FILL MODIFIER-6 +1DAA0 ; disallowed # NA +1DAA1..1DAAF ; valid # 8.0 SIGNWRITING ROTATION MODIFIER-2..SIGNWRITING ROTATION MODIFIER-16 +1DAB0..1DFFF ; disallowed # NA .. +1E000..1E006 ; valid # 9.0 COMBINING GLAGOLITIC LETTER AZU..COMBINING GLAGOLITIC LETTER ZHIVETE +1E007 ; disallowed # NA +1E008..1E018 ; valid # 9.0 COMBINING GLAGOLITIC LETTER ZEMLJA..COMBINING GLAGOLITIC LETTER HERU +1E019..1E01A ; disallowed # NA .. +1E01B..1E021 ; valid # 9.0 COMBINING GLAGOLITIC LETTER SHTA..COMBINING GLAGOLITIC LETTER YATI +1E022 ; disallowed # NA +1E023..1E024 ; valid # 9.0 COMBINING GLAGOLITIC LETTER YU..COMBINING GLAGOLITIC LETTER SMALL YUS +1E025 ; disallowed # NA +1E026..1E02A ; valid # 9.0 COMBINING GLAGOLITIC LETTER YO..COMBINING GLAGOLITIC LETTER FITA +1E02B..1E7FF ; disallowed # NA .. +1E800..1E8C4 ; valid # 7.0 MENDE KIKAKUI SYLLABLE M001 KI..MENDE KIKAKUI SYLLABLE M060 NYON +1E8C5..1E8C6 ; disallowed # NA .. +1E8C7..1E8CF ; valid ; ; NV8 # 7.0 MENDE KIKAKUI DIGIT ONE..MENDE KIKAKUI DIGIT NINE +1E8D0..1E8D6 ; valid # 7.0 MENDE KIKAKUI COMBINING NUMBER TEENS..MENDE KIKAKUI COMBINING NUMBER MILLIONS +1E8D7..1E8FF ; disallowed # NA .. +1E900 ; mapped ; 1E922 # 9.0 ADLAM CAPITAL LETTER ALIF +1E901 ; mapped ; 1E923 # 9.0 ADLAM CAPITAL LETTER DAALI +1E902 ; mapped ; 1E924 # 9.0 ADLAM CAPITAL LETTER LAAM +1E903 ; mapped ; 1E925 # 9.0 ADLAM CAPITAL LETTER MIIM +1E904 ; mapped ; 1E926 # 9.0 ADLAM CAPITAL LETTER BA +1E905 ; mapped ; 1E927 # 9.0 ADLAM CAPITAL LETTER SINNYIIYHE +1E906 ; mapped ; 1E928 # 9.0 ADLAM CAPITAL LETTER PE +1E907 ; mapped ; 1E929 # 9.0 ADLAM CAPITAL LETTER BHE +1E908 ; mapped ; 1E92A # 9.0 ADLAM CAPITAL LETTER RA +1E909 ; mapped ; 1E92B # 9.0 ADLAM CAPITAL LETTER E +1E90A ; mapped ; 1E92C # 9.0 ADLAM CAPITAL LETTER FA +1E90B ; mapped ; 1E92D # 9.0 ADLAM CAPITAL LETTER I +1E90C ; mapped ; 1E92E # 9.0 ADLAM CAPITAL LETTER O +1E90D ; mapped ; 1E92F # 9.0 ADLAM CAPITAL LETTER DHA +1E90E ; mapped ; 1E930 # 9.0 ADLAM CAPITAL LETTER YHE +1E90F ; mapped ; 1E931 # 9.0 ADLAM CAPITAL LETTER WAW +1E910 ; mapped ; 1E932 # 9.0 ADLAM CAPITAL LETTER NUN +1E911 ; mapped ; 1E933 # 9.0 ADLAM CAPITAL LETTER KAF +1E912 ; mapped ; 1E934 # 9.0 ADLAM CAPITAL LETTER YA +1E913 ; mapped ; 1E935 # 9.0 ADLAM CAPITAL LETTER U +1E914 ; mapped ; 1E936 # 9.0 ADLAM CAPITAL LETTER JIIM +1E915 ; mapped ; 1E937 # 9.0 ADLAM CAPITAL LETTER CHI +1E916 ; mapped ; 1E938 # 9.0 ADLAM CAPITAL LETTER HA +1E917 ; mapped ; 1E939 # 9.0 ADLAM CAPITAL LETTER QAAF +1E918 ; mapped ; 1E93A # 9.0 ADLAM CAPITAL LETTER GA +1E919 ; mapped ; 1E93B # 9.0 ADLAM CAPITAL LETTER NYA +1E91A ; mapped ; 1E93C # 9.0 ADLAM CAPITAL LETTER TU +1E91B ; mapped ; 1E93D # 9.0 ADLAM CAPITAL LETTER NHA +1E91C ; mapped ; 1E93E # 9.0 ADLAM CAPITAL LETTER VA +1E91D ; mapped ; 1E93F # 9.0 ADLAM CAPITAL LETTER KHA +1E91E ; mapped ; 1E940 # 9.0 ADLAM CAPITAL LETTER GBE +1E91F ; mapped ; 1E941 # 9.0 ADLAM CAPITAL LETTER ZAL +1E920 ; mapped ; 1E942 # 9.0 ADLAM CAPITAL LETTER KPO +1E921 ; mapped ; 1E943 # 9.0 ADLAM CAPITAL LETTER SHA +1E922..1E94A ; valid # 9.0 ADLAM SMALL LETTER ALIF..ADLAM NUKTA +1E94B..1E94F ; disallowed # NA .. +1E950..1E959 ; valid # 9.0 ADLAM DIGIT ZERO..ADLAM DIGIT NINE +1E95A..1E95D ; disallowed # NA .. +1E95E..1E95F ; valid ; ; NV8 # 9.0 ADLAM INITIAL EXCLAMATION MARK..ADLAM INITIAL QUESTION MARK +1E960..1EDFF ; disallowed # NA .. +1EE00 ; mapped ; 0627 # 6.1 ARABIC MATHEMATICAL ALEF +1EE01 ; mapped ; 0628 # 6.1 ARABIC MATHEMATICAL BEH +1EE02 ; mapped ; 062C # 6.1 ARABIC MATHEMATICAL JEEM +1EE03 ; mapped ; 062F # 6.1 ARABIC MATHEMATICAL DAL +1EE04 ; disallowed # NA +1EE05 ; mapped ; 0648 # 6.1 ARABIC MATHEMATICAL WAW +1EE06 ; mapped ; 0632 # 6.1 ARABIC MATHEMATICAL ZAIN +1EE07 ; mapped ; 062D # 6.1 ARABIC MATHEMATICAL HAH +1EE08 ; mapped ; 0637 # 6.1 ARABIC MATHEMATICAL TAH +1EE09 ; mapped ; 064A # 6.1 ARABIC MATHEMATICAL YEH +1EE0A ; mapped ; 0643 # 6.1 ARABIC MATHEMATICAL KAF +1EE0B ; mapped ; 0644 # 6.1 ARABIC MATHEMATICAL LAM +1EE0C ; mapped ; 0645 # 6.1 ARABIC MATHEMATICAL MEEM +1EE0D ; mapped ; 0646 # 6.1 ARABIC MATHEMATICAL NOON +1EE0E ; mapped ; 0633 # 6.1 ARABIC MATHEMATICAL SEEN +1EE0F ; mapped ; 0639 # 6.1 ARABIC MATHEMATICAL AIN +1EE10 ; mapped ; 0641 # 6.1 ARABIC MATHEMATICAL FEH +1EE11 ; mapped ; 0635 # 6.1 ARABIC MATHEMATICAL SAD +1EE12 ; mapped ; 0642 # 6.1 ARABIC MATHEMATICAL QAF +1EE13 ; mapped ; 0631 # 6.1 ARABIC MATHEMATICAL REH +1EE14 ; mapped ; 0634 # 6.1 ARABIC MATHEMATICAL SHEEN +1EE15 ; mapped ; 062A # 6.1 ARABIC MATHEMATICAL TEH +1EE16 ; mapped ; 062B # 6.1 ARABIC MATHEMATICAL THEH +1EE17 ; mapped ; 062E # 6.1 ARABIC MATHEMATICAL KHAH +1EE18 ; mapped ; 0630 # 6.1 ARABIC MATHEMATICAL THAL +1EE19 ; mapped ; 0636 # 6.1 ARABIC MATHEMATICAL DAD +1EE1A ; mapped ; 0638 # 6.1 ARABIC MATHEMATICAL ZAH +1EE1B ; mapped ; 063A # 6.1 ARABIC MATHEMATICAL GHAIN +1EE1C ; mapped ; 066E # 6.1 ARABIC MATHEMATICAL DOTLESS BEH +1EE1D ; mapped ; 06BA # 6.1 ARABIC MATHEMATICAL DOTLESS NOON +1EE1E ; mapped ; 06A1 # 6.1 ARABIC MATHEMATICAL DOTLESS FEH +1EE1F ; mapped ; 066F # 6.1 ARABIC MATHEMATICAL DOTLESS QAF +1EE20 ; disallowed # NA +1EE21 ; mapped ; 0628 # 6.1 ARABIC MATHEMATICAL INITIAL BEH +1EE22 ; mapped ; 062C # 6.1 ARABIC MATHEMATICAL INITIAL JEEM +1EE23 ; disallowed # NA +1EE24 ; mapped ; 0647 # 6.1 ARABIC MATHEMATICAL INITIAL HEH +1EE25..1EE26 ; disallowed # NA .. +1EE27 ; mapped ; 062D # 6.1 ARABIC MATHEMATICAL INITIAL HAH +1EE28 ; disallowed # NA +1EE29 ; mapped ; 064A # 6.1 ARABIC MATHEMATICAL INITIAL YEH +1EE2A ; mapped ; 0643 # 6.1 ARABIC MATHEMATICAL INITIAL KAF +1EE2B ; mapped ; 0644 # 6.1 ARABIC MATHEMATICAL INITIAL LAM +1EE2C ; mapped ; 0645 # 6.1 ARABIC MATHEMATICAL INITIAL MEEM +1EE2D ; mapped ; 0646 # 6.1 ARABIC MATHEMATICAL INITIAL NOON +1EE2E ; mapped ; 0633 # 6.1 ARABIC MATHEMATICAL INITIAL SEEN +1EE2F ; mapped ; 0639 # 6.1 ARABIC MATHEMATICAL INITIAL AIN +1EE30 ; mapped ; 0641 # 6.1 ARABIC MATHEMATICAL INITIAL FEH +1EE31 ; mapped ; 0635 # 6.1 ARABIC MATHEMATICAL INITIAL SAD +1EE32 ; mapped ; 0642 # 6.1 ARABIC MATHEMATICAL INITIAL QAF +1EE33 ; disallowed # NA +1EE34 ; mapped ; 0634 # 6.1 ARABIC MATHEMATICAL INITIAL SHEEN +1EE35 ; mapped ; 062A # 6.1 ARABIC MATHEMATICAL INITIAL TEH +1EE36 ; mapped ; 062B # 6.1 ARABIC MATHEMATICAL INITIAL THEH +1EE37 ; mapped ; 062E # 6.1 ARABIC MATHEMATICAL INITIAL KHAH +1EE38 ; disallowed # NA +1EE39 ; mapped ; 0636 # 6.1 ARABIC MATHEMATICAL INITIAL DAD +1EE3A ; disallowed # NA +1EE3B ; mapped ; 063A # 6.1 ARABIC MATHEMATICAL INITIAL GHAIN +1EE3C..1EE41 ; disallowed # NA .. +1EE42 ; mapped ; 062C # 6.1 ARABIC MATHEMATICAL TAILED JEEM +1EE43..1EE46 ; disallowed # NA .. +1EE47 ; mapped ; 062D # 6.1 ARABIC MATHEMATICAL TAILED HAH +1EE48 ; disallowed # NA +1EE49 ; mapped ; 064A # 6.1 ARABIC MATHEMATICAL TAILED YEH +1EE4A ; disallowed # NA +1EE4B ; mapped ; 0644 # 6.1 ARABIC MATHEMATICAL TAILED LAM +1EE4C ; disallowed # NA +1EE4D ; mapped ; 0646 # 6.1 ARABIC MATHEMATICAL TAILED NOON +1EE4E ; mapped ; 0633 # 6.1 ARABIC MATHEMATICAL TAILED SEEN +1EE4F ; mapped ; 0639 # 6.1 ARABIC MATHEMATICAL TAILED AIN +1EE50 ; disallowed # NA +1EE51 ; mapped ; 0635 # 6.1 ARABIC MATHEMATICAL TAILED SAD +1EE52 ; mapped ; 0642 # 6.1 ARABIC MATHEMATICAL TAILED QAF +1EE53 ; disallowed # NA +1EE54 ; mapped ; 0634 # 6.1 ARABIC MATHEMATICAL TAILED SHEEN +1EE55..1EE56 ; disallowed # NA .. +1EE57 ; mapped ; 062E # 6.1 ARABIC MATHEMATICAL TAILED KHAH +1EE58 ; disallowed # NA +1EE59 ; mapped ; 0636 # 6.1 ARABIC MATHEMATICAL TAILED DAD +1EE5A ; disallowed # NA +1EE5B ; mapped ; 063A # 6.1 ARABIC MATHEMATICAL TAILED GHAIN +1EE5C ; disallowed # NA +1EE5D ; mapped ; 06BA # 6.1 ARABIC MATHEMATICAL TAILED DOTLESS NOON +1EE5E ; disallowed # NA +1EE5F ; mapped ; 066F # 6.1 ARABIC MATHEMATICAL TAILED DOTLESS QAF +1EE60 ; disallowed # NA +1EE61 ; mapped ; 0628 # 6.1 ARABIC MATHEMATICAL STRETCHED BEH +1EE62 ; mapped ; 062C # 6.1 ARABIC MATHEMATICAL STRETCHED JEEM +1EE63 ; disallowed # NA +1EE64 ; mapped ; 0647 # 6.1 ARABIC MATHEMATICAL STRETCHED HEH +1EE65..1EE66 ; disallowed # NA .. +1EE67 ; mapped ; 062D # 6.1 ARABIC MATHEMATICAL STRETCHED HAH +1EE68 ; mapped ; 0637 # 6.1 ARABIC MATHEMATICAL STRETCHED TAH +1EE69 ; mapped ; 064A # 6.1 ARABIC MATHEMATICAL STRETCHED YEH +1EE6A ; mapped ; 0643 # 6.1 ARABIC MATHEMATICAL STRETCHED KAF +1EE6B ; disallowed # NA +1EE6C ; mapped ; 0645 # 6.1 ARABIC MATHEMATICAL STRETCHED MEEM +1EE6D ; mapped ; 0646 # 6.1 ARABIC MATHEMATICAL STRETCHED NOON +1EE6E ; mapped ; 0633 # 6.1 ARABIC MATHEMATICAL STRETCHED SEEN +1EE6F ; mapped ; 0639 # 6.1 ARABIC MATHEMATICAL STRETCHED AIN +1EE70 ; mapped ; 0641 # 6.1 ARABIC MATHEMATICAL STRETCHED FEH +1EE71 ; mapped ; 0635 # 6.1 ARABIC MATHEMATICAL STRETCHED SAD +1EE72 ; mapped ; 0642 # 6.1 ARABIC MATHEMATICAL STRETCHED QAF +1EE73 ; disallowed # NA +1EE74 ; mapped ; 0634 # 6.1 ARABIC MATHEMATICAL STRETCHED SHEEN +1EE75 ; mapped ; 062A # 6.1 ARABIC MATHEMATICAL STRETCHED TEH +1EE76 ; mapped ; 062B # 6.1 ARABIC MATHEMATICAL STRETCHED THEH +1EE77 ; mapped ; 062E # 6.1 ARABIC MATHEMATICAL STRETCHED KHAH +1EE78 ; disallowed # NA +1EE79 ; mapped ; 0636 # 6.1 ARABIC MATHEMATICAL STRETCHED DAD +1EE7A ; mapped ; 0638 # 6.1 ARABIC MATHEMATICAL STRETCHED ZAH +1EE7B ; mapped ; 063A # 6.1 ARABIC MATHEMATICAL STRETCHED GHAIN +1EE7C ; mapped ; 066E # 6.1 ARABIC MATHEMATICAL STRETCHED DOTLESS BEH +1EE7D ; disallowed # NA +1EE7E ; mapped ; 06A1 # 6.1 ARABIC MATHEMATICAL STRETCHED DOTLESS FEH +1EE7F ; disallowed # NA +1EE80 ; mapped ; 0627 # 6.1 ARABIC MATHEMATICAL LOOPED ALEF +1EE81 ; mapped ; 0628 # 6.1 ARABIC MATHEMATICAL LOOPED BEH +1EE82 ; mapped ; 062C # 6.1 ARABIC MATHEMATICAL LOOPED JEEM +1EE83 ; mapped ; 062F # 6.1 ARABIC MATHEMATICAL LOOPED DAL +1EE84 ; mapped ; 0647 # 6.1 ARABIC MATHEMATICAL LOOPED HEH +1EE85 ; mapped ; 0648 # 6.1 ARABIC MATHEMATICAL LOOPED WAW +1EE86 ; mapped ; 0632 # 6.1 ARABIC MATHEMATICAL LOOPED ZAIN +1EE87 ; mapped ; 062D # 6.1 ARABIC MATHEMATICAL LOOPED HAH +1EE88 ; mapped ; 0637 # 6.1 ARABIC MATHEMATICAL LOOPED TAH +1EE89 ; mapped ; 064A # 6.1 ARABIC MATHEMATICAL LOOPED YEH +1EE8A ; disallowed # NA +1EE8B ; mapped ; 0644 # 6.1 ARABIC MATHEMATICAL LOOPED LAM +1EE8C ; mapped ; 0645 # 6.1 ARABIC MATHEMATICAL LOOPED MEEM +1EE8D ; mapped ; 0646 # 6.1 ARABIC MATHEMATICAL LOOPED NOON +1EE8E ; mapped ; 0633 # 6.1 ARABIC MATHEMATICAL LOOPED SEEN +1EE8F ; mapped ; 0639 # 6.1 ARABIC MATHEMATICAL LOOPED AIN +1EE90 ; mapped ; 0641 # 6.1 ARABIC MATHEMATICAL LOOPED FEH +1EE91 ; mapped ; 0635 # 6.1 ARABIC MATHEMATICAL LOOPED SAD +1EE92 ; mapped ; 0642 # 6.1 ARABIC MATHEMATICAL LOOPED QAF +1EE93 ; mapped ; 0631 # 6.1 ARABIC MATHEMATICAL LOOPED REH +1EE94 ; mapped ; 0634 # 6.1 ARABIC MATHEMATICAL LOOPED SHEEN +1EE95 ; mapped ; 062A # 6.1 ARABIC MATHEMATICAL LOOPED TEH +1EE96 ; mapped ; 062B # 6.1 ARABIC MATHEMATICAL LOOPED THEH +1EE97 ; mapped ; 062E # 6.1 ARABIC MATHEMATICAL LOOPED KHAH +1EE98 ; mapped ; 0630 # 6.1 ARABIC MATHEMATICAL LOOPED THAL +1EE99 ; mapped ; 0636 # 6.1 ARABIC MATHEMATICAL LOOPED DAD +1EE9A ; mapped ; 0638 # 6.1 ARABIC MATHEMATICAL LOOPED ZAH +1EE9B ; mapped ; 063A # 6.1 ARABIC MATHEMATICAL LOOPED GHAIN +1EE9C..1EEA0 ; disallowed # NA .. +1EEA1 ; mapped ; 0628 # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK BEH +1EEA2 ; mapped ; 062C # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK JEEM +1EEA3 ; mapped ; 062F # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK DAL +1EEA4 ; disallowed # NA +1EEA5 ; mapped ; 0648 # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK WAW +1EEA6 ; mapped ; 0632 # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK ZAIN +1EEA7 ; mapped ; 062D # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK HAH +1EEA8 ; mapped ; 0637 # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK TAH +1EEA9 ; mapped ; 064A # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK YEH +1EEAA ; disallowed # NA +1EEAB ; mapped ; 0644 # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK LAM +1EEAC ; mapped ; 0645 # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK MEEM +1EEAD ; mapped ; 0646 # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK NOON +1EEAE ; mapped ; 0633 # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK SEEN +1EEAF ; mapped ; 0639 # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK AIN +1EEB0 ; mapped ; 0641 # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK FEH +1EEB1 ; mapped ; 0635 # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK SAD +1EEB2 ; mapped ; 0642 # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK QAF +1EEB3 ; mapped ; 0631 # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK REH +1EEB4 ; mapped ; 0634 # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK SHEEN +1EEB5 ; mapped ; 062A # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK TEH +1EEB6 ; mapped ; 062B # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK THEH +1EEB7 ; mapped ; 062E # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK KHAH +1EEB8 ; mapped ; 0630 # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK THAL +1EEB9 ; mapped ; 0636 # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK DAD +1EEBA ; mapped ; 0638 # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK ZAH +1EEBB ; mapped ; 063A # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK GHAIN +1EEBC..1EEEF ; disallowed # NA .. +1EEF0..1EEF1 ; valid ; ; NV8 # 6.1 ARABIC MATHEMATICAL OPERATOR MEEM WITH HAH WITH TATWEEL..ARABIC MATHEMATICAL OPERATOR HAH WITH DAL +1EEF2..1EFFF ; disallowed # NA .. +1F000..1F02B ; valid ; ; NV8 # 5.1 MAHJONG TILE EAST WIND..MAHJONG TILE BACK +1F02C..1F02F ; disallowed # NA .. +1F030..1F093 ; valid ; ; NV8 # 5.1 DOMINO TILE HORIZONTAL BACK..DOMINO TILE VERTICAL-06-06 +1F094..1F09F ; disallowed # NA .. +1F0A0..1F0AE ; valid ; ; NV8 # 6.0 PLAYING CARD BACK..PLAYING CARD KING OF SPADES +1F0AF..1F0B0 ; disallowed # NA .. +1F0B1..1F0BE ; valid ; ; NV8 # 6.0 PLAYING CARD ACE OF HEARTS..PLAYING CARD KING OF HEARTS +1F0BF ; valid ; ; NV8 # 7.0 PLAYING CARD RED JOKER +1F0C0 ; disallowed # NA +1F0C1..1F0CF ; valid ; ; NV8 # 6.0 PLAYING CARD ACE OF DIAMONDS..PLAYING CARD BLACK JOKER +1F0D0 ; disallowed # NA +1F0D1..1F0DF ; valid ; ; NV8 # 6.0 PLAYING CARD ACE OF CLUBS..PLAYING CARD WHITE JOKER +1F0E0..1F0F5 ; valid ; ; NV8 # 7.0 PLAYING CARD FOOL..PLAYING CARD TRUMP-21 +1F0F6..1F0FF ; disallowed # NA .. +1F100 ; disallowed # 5.2 DIGIT ZERO FULL STOP +1F101 ; disallowed_STD3_mapped ; 0030 002C # 5.2 DIGIT ZERO COMMA +1F102 ; disallowed_STD3_mapped ; 0031 002C # 5.2 DIGIT ONE COMMA +1F103 ; disallowed_STD3_mapped ; 0032 002C # 5.2 DIGIT TWO COMMA +1F104 ; disallowed_STD3_mapped ; 0033 002C # 5.2 DIGIT THREE COMMA +1F105 ; disallowed_STD3_mapped ; 0034 002C # 5.2 DIGIT FOUR COMMA +1F106 ; disallowed_STD3_mapped ; 0035 002C # 5.2 DIGIT FIVE COMMA +1F107 ; disallowed_STD3_mapped ; 0036 002C # 5.2 DIGIT SIX COMMA +1F108 ; disallowed_STD3_mapped ; 0037 002C # 5.2 DIGIT SEVEN COMMA +1F109 ; disallowed_STD3_mapped ; 0038 002C # 5.2 DIGIT EIGHT COMMA +1F10A ; disallowed_STD3_mapped ; 0039 002C # 5.2 DIGIT NINE COMMA +1F10B..1F10C ; valid ; ; NV8 # 7.0 DINGBAT CIRCLED SANS-SERIF DIGIT ZERO..DINGBAT NEGATIVE CIRCLED SANS-SERIF DIGIT ZERO +1F10D..1F10F ; disallowed # NA .. +1F110 ; disallowed_STD3_mapped ; 0028 0061 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER A +1F111 ; disallowed_STD3_mapped ; 0028 0062 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER B +1F112 ; disallowed_STD3_mapped ; 0028 0063 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER C +1F113 ; disallowed_STD3_mapped ; 0028 0064 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER D +1F114 ; disallowed_STD3_mapped ; 0028 0065 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER E +1F115 ; disallowed_STD3_mapped ; 0028 0066 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER F +1F116 ; disallowed_STD3_mapped ; 0028 0067 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER G +1F117 ; disallowed_STD3_mapped ; 0028 0068 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER H +1F118 ; disallowed_STD3_mapped ; 0028 0069 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER I +1F119 ; disallowed_STD3_mapped ; 0028 006A 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER J +1F11A ; disallowed_STD3_mapped ; 0028 006B 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER K +1F11B ; disallowed_STD3_mapped ; 0028 006C 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER L +1F11C ; disallowed_STD3_mapped ; 0028 006D 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER M +1F11D ; disallowed_STD3_mapped ; 0028 006E 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER N +1F11E ; disallowed_STD3_mapped ; 0028 006F 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER O +1F11F ; disallowed_STD3_mapped ; 0028 0070 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER P +1F120 ; disallowed_STD3_mapped ; 0028 0071 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER Q +1F121 ; disallowed_STD3_mapped ; 0028 0072 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER R +1F122 ; disallowed_STD3_mapped ; 0028 0073 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER S +1F123 ; disallowed_STD3_mapped ; 0028 0074 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER T +1F124 ; disallowed_STD3_mapped ; 0028 0075 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER U +1F125 ; disallowed_STD3_mapped ; 0028 0076 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER V +1F126 ; disallowed_STD3_mapped ; 0028 0077 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER W +1F127 ; disallowed_STD3_mapped ; 0028 0078 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER X +1F128 ; disallowed_STD3_mapped ; 0028 0079 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER Y +1F129 ; disallowed_STD3_mapped ; 0028 007A 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER Z +1F12A ; mapped ; 3014 0073 3015 #5.2 TORTOISE SHELL BRACKETED LATIN CAPITAL LETTER S +1F12B ; mapped ; 0063 # 5.2 CIRCLED ITALIC LATIN CAPITAL LETTER C +1F12C ; mapped ; 0072 # 5.2 CIRCLED ITALIC LATIN CAPITAL LETTER R +1F12D ; mapped ; 0063 0064 # 5.2 CIRCLED CD +1F12E ; mapped ; 0077 007A # 5.2 CIRCLED WZ +1F12F ; disallowed # NA +1F130 ; mapped ; 0061 # 6.0 SQUARED LATIN CAPITAL LETTER A +1F131 ; mapped ; 0062 # 5.2 SQUARED LATIN CAPITAL LETTER B +1F132 ; mapped ; 0063 # 6.0 SQUARED LATIN CAPITAL LETTER C +1F133 ; mapped ; 0064 # 6.0 SQUARED LATIN CAPITAL LETTER D +1F134 ; mapped ; 0065 # 6.0 SQUARED LATIN CAPITAL LETTER E +1F135 ; mapped ; 0066 # 6.0 SQUARED LATIN CAPITAL LETTER F +1F136 ; mapped ; 0067 # 6.0 SQUARED LATIN CAPITAL LETTER G +1F137 ; mapped ; 0068 # 6.0 SQUARED LATIN CAPITAL LETTER H +1F138 ; mapped ; 0069 # 6.0 SQUARED LATIN CAPITAL LETTER I +1F139 ; mapped ; 006A # 6.0 SQUARED LATIN CAPITAL LETTER J +1F13A ; mapped ; 006B # 6.0 SQUARED LATIN CAPITAL LETTER K +1F13B ; mapped ; 006C # 6.0 SQUARED LATIN CAPITAL LETTER L +1F13C ; mapped ; 006D # 6.0 SQUARED LATIN CAPITAL LETTER M +1F13D ; mapped ; 006E # 5.2 SQUARED LATIN CAPITAL LETTER N +1F13E ; mapped ; 006F # 6.0 SQUARED LATIN CAPITAL LETTER O +1F13F ; mapped ; 0070 # 5.2 SQUARED LATIN CAPITAL LETTER P +1F140 ; mapped ; 0071 # 6.0 SQUARED LATIN CAPITAL LETTER Q +1F141 ; mapped ; 0072 # 6.0 SQUARED LATIN CAPITAL LETTER R +1F142 ; mapped ; 0073 # 5.2 SQUARED LATIN CAPITAL LETTER S +1F143 ; mapped ; 0074 # 6.0 SQUARED LATIN CAPITAL LETTER T +1F144 ; mapped ; 0075 # 6.0 SQUARED LATIN CAPITAL LETTER U +1F145 ; mapped ; 0076 # 6.0 SQUARED LATIN CAPITAL LETTER V +1F146 ; mapped ; 0077 # 5.2 SQUARED LATIN CAPITAL LETTER W +1F147 ; mapped ; 0078 # 6.0 SQUARED LATIN CAPITAL LETTER X +1F148 ; mapped ; 0079 # 6.0 SQUARED LATIN CAPITAL LETTER Y +1F149 ; mapped ; 007A # 6.0 SQUARED LATIN CAPITAL LETTER Z +1F14A ; mapped ; 0068 0076 # 5.2 SQUARED HV +1F14B ; mapped ; 006D 0076 # 5.2 SQUARED MV +1F14C ; mapped ; 0073 0064 # 5.2 SQUARED SD +1F14D ; mapped ; 0073 0073 # 5.2 SQUARED SS +1F14E ; mapped ; 0070 0070 0076 #5.2 SQUARED PPV +1F14F ; mapped ; 0077 0063 # 6.0 SQUARED WC +1F150..1F156 ; valid ; ; NV8 # 6.0 NEGATIVE CIRCLED LATIN CAPITAL LETTER A..NEGATIVE CIRCLED LATIN CAPITAL LETTER G +1F157 ; valid ; ; NV8 # 5.2 NEGATIVE CIRCLED LATIN CAPITAL LETTER H +1F158..1F15E ; valid ; ; NV8 # 6.0 NEGATIVE CIRCLED LATIN CAPITAL LETTER I..NEGATIVE CIRCLED LATIN CAPITAL LETTER O +1F15F ; valid ; ; NV8 # 5.2 NEGATIVE CIRCLED LATIN CAPITAL LETTER P +1F160..1F169 ; valid ; ; NV8 # 6.0 NEGATIVE CIRCLED LATIN CAPITAL LETTER Q..NEGATIVE CIRCLED LATIN CAPITAL LETTER Z +1F16A ; mapped ; 006D 0063 # 6.1 RAISED MC SIGN +1F16B ; mapped ; 006D 0064 # 6.1 RAISED MD SIGN +1F16C..1F16F ; disallowed # NA .. +1F170..1F178 ; valid ; ; NV8 # 6.0 NEGATIVE SQUARED LATIN CAPITAL LETTER A..NEGATIVE SQUARED LATIN CAPITAL LETTER I +1F179 ; valid ; ; NV8 # 5.2 NEGATIVE SQUARED LATIN CAPITAL LETTER J +1F17A ; valid ; ; NV8 # 6.0 NEGATIVE SQUARED LATIN CAPITAL LETTER K +1F17B..1F17C ; valid ; ; NV8 # 5.2 NEGATIVE SQUARED LATIN CAPITAL LETTER L..NEGATIVE SQUARED LATIN CAPITAL LETTER M +1F17D..1F17E ; valid ; ; NV8 # 6.0 NEGATIVE SQUARED LATIN CAPITAL LETTER N..NEGATIVE SQUARED LATIN CAPITAL LETTER O +1F17F ; valid ; ; NV8 # 5.2 NEGATIVE SQUARED LATIN CAPITAL LETTER P +1F180..1F189 ; valid ; ; NV8 # 6.0 NEGATIVE SQUARED LATIN CAPITAL LETTER Q..NEGATIVE SQUARED LATIN CAPITAL LETTER Z +1F18A..1F18D ; valid ; ; NV8 # 5.2 CROSSED NEGATIVE SQUARED LATIN CAPITAL LETTER P..NEGATIVE SQUARED SA +1F18E..1F18F ; valid ; ; NV8 # 6.0 NEGATIVE SQUARED AB..NEGATIVE SQUARED WC +1F190 ; mapped ; 0064 006A # 5.2 SQUARE DJ +1F191..1F19A ; valid ; ; NV8 # 6.0 SQUARED CL..SQUARED VS +1F19B..1F1AC ; valid ; ; NV8 # 9.0 SQUARED THREE D..SQUARED VOD +1F1AD..1F1E5 ; disallowed # NA .. +1F1E6..1F1FF ; valid ; ; NV8 # 6.0 REGIONAL INDICATOR SYMBOL LETTER A..REGIONAL INDICATOR SYMBOL LETTER Z +1F200 ; mapped ; 307B 304B # 5.2 SQUARE HIRAGANA HOKA +1F201 ; mapped ; 30B3 30B3 # 6.0 SQUARED KATAKANA KOKO +1F202 ; mapped ; 30B5 # 6.0 SQUARED KATAKANA SA +1F203..1F20F ; disallowed # NA .. +1F210 ; mapped ; 624B # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-624B +1F211 ; mapped ; 5B57 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-5B57 +1F212 ; mapped ; 53CC # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-53CC +1F213 ; mapped ; 30C7 # 5.2 SQUARED KATAKANA DE +1F214 ; mapped ; 4E8C # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-4E8C +1F215 ; mapped ; 591A # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-591A +1F216 ; mapped ; 89E3 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-89E3 +1F217 ; mapped ; 5929 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-5929 +1F218 ; mapped ; 4EA4 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-4EA4 +1F219 ; mapped ; 6620 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-6620 +1F21A ; mapped ; 7121 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-7121 +1F21B ; mapped ; 6599 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-6599 +1F21C ; mapped ; 524D # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-524D +1F21D ; mapped ; 5F8C # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-5F8C +1F21E ; mapped ; 518D # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-518D +1F21F ; mapped ; 65B0 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-65B0 +1F220 ; mapped ; 521D # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-521D +1F221 ; mapped ; 7D42 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-7D42 +1F222 ; mapped ; 751F # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-751F +1F223 ; mapped ; 8CA9 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-8CA9 +1F224 ; mapped ; 58F0 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-58F0 +1F225 ; mapped ; 5439 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-5439 +1F226 ; mapped ; 6F14 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-6F14 +1F227 ; mapped ; 6295 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-6295 +1F228 ; mapped ; 6355 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-6355 +1F229 ; mapped ; 4E00 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-4E00 +1F22A ; mapped ; 4E09 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-4E09 +1F22B ; mapped ; 904A # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-904A +1F22C ; mapped ; 5DE6 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-5DE6 +1F22D ; mapped ; 4E2D # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-4E2D +1F22E ; mapped ; 53F3 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-53F3 +1F22F ; mapped ; 6307 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-6307 +1F230 ; mapped ; 8D70 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-8D70 +1F231 ; mapped ; 6253 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-6253 +1F232 ; mapped ; 7981 # 6.0 SQUARED CJK UNIFIED IDEOGRAPH-7981 +1F233 ; mapped ; 7A7A # 6.0 SQUARED CJK UNIFIED IDEOGRAPH-7A7A +1F234 ; mapped ; 5408 # 6.0 SQUARED CJK UNIFIED IDEOGRAPH-5408 +1F235 ; mapped ; 6E80 # 6.0 SQUARED CJK UNIFIED IDEOGRAPH-6E80 +1F236 ; mapped ; 6709 # 6.0 SQUARED CJK UNIFIED IDEOGRAPH-6709 +1F237 ; mapped ; 6708 # 6.0 SQUARED CJK UNIFIED IDEOGRAPH-6708 +1F238 ; mapped ; 7533 # 6.0 SQUARED CJK UNIFIED IDEOGRAPH-7533 +1F239 ; mapped ; 5272 # 6.0 SQUARED CJK UNIFIED IDEOGRAPH-5272 +1F23A ; mapped ; 55B6 # 6.0 SQUARED CJK UNIFIED IDEOGRAPH-55B6 +1F23B ; mapped ; 914D # 9.0 SQUARED CJK UNIFIED IDEOGRAPH-914D +1F23C..1F23F ; disallowed # NA .. +1F240 ; mapped ; 3014 672C 3015 #5.2 TORTOISE SHELL BRACKETED CJK UNIFIED IDEOGRAPH-672C +1F241 ; mapped ; 3014 4E09 3015 #5.2 TORTOISE SHELL BRACKETED CJK UNIFIED IDEOGRAPH-4E09 +1F242 ; mapped ; 3014 4E8C 3015 #5.2 TORTOISE SHELL BRACKETED CJK UNIFIED IDEOGRAPH-4E8C +1F243 ; mapped ; 3014 5B89 3015 #5.2 TORTOISE SHELL BRACKETED CJK UNIFIED IDEOGRAPH-5B89 +1F244 ; mapped ; 3014 70B9 3015 #5.2 TORTOISE SHELL BRACKETED CJK UNIFIED IDEOGRAPH-70B9 +1F245 ; mapped ; 3014 6253 3015 #5.2 TORTOISE SHELL BRACKETED CJK UNIFIED IDEOGRAPH-6253 +1F246 ; mapped ; 3014 76D7 3015 #5.2 TORTOISE SHELL BRACKETED CJK UNIFIED IDEOGRAPH-76D7 +1F247 ; mapped ; 3014 52DD 3015 #5.2 TORTOISE SHELL BRACKETED CJK UNIFIED IDEOGRAPH-52DD +1F248 ; mapped ; 3014 6557 3015 #5.2 TORTOISE SHELL BRACKETED CJK UNIFIED IDEOGRAPH-6557 +1F249..1F24F ; disallowed # NA .. +1F250 ; mapped ; 5F97 # 6.0 CIRCLED IDEOGRAPH ADVANTAGE +1F251 ; mapped ; 53EF # 6.0 CIRCLED IDEOGRAPH ACCEPT +1F252..1F25F ; disallowed # NA .. +1F260..1F265 ; valid ; ; NV8 # 10.0 ROUNDED SYMBOL FOR FU..ROUNDED SYMBOL FOR CAI +1F266..1F2FF ; disallowed # NA .. +1F300..1F320 ; valid ; ; NV8 # 6.0 CYCLONE..SHOOTING STAR +1F321..1F32C ; valid ; ; NV8 # 7.0 THERMOMETER..WIND BLOWING FACE +1F32D..1F32F ; valid ; ; NV8 # 8.0 HOT DOG..BURRITO +1F330..1F335 ; valid ; ; NV8 # 6.0 CHESTNUT..CACTUS +1F336 ; valid ; ; NV8 # 7.0 HOT PEPPER +1F337..1F37C ; valid ; ; NV8 # 6.0 TULIP..BABY BOTTLE +1F37D ; valid ; ; NV8 # 7.0 FORK AND KNIFE WITH PLATE +1F37E..1F37F ; valid ; ; NV8 # 8.0 BOTTLE WITH POPPING CORK..POPCORN +1F380..1F393 ; valid ; ; NV8 # 6.0 RIBBON..GRADUATION CAP +1F394..1F39F ; valid ; ; NV8 # 7.0 HEART WITH TIP ON THE LEFT..ADMISSION TICKETS +1F3A0..1F3C4 ; valid ; ; NV8 # 6.0 CAROUSEL HORSE..SURFER +1F3C5 ; valid ; ; NV8 # 7.0 SPORTS MEDAL +1F3C6..1F3CA ; valid ; ; NV8 # 6.0 TROPHY..SWIMMER +1F3CB..1F3CE ; valid ; ; NV8 # 7.0 WEIGHT LIFTER..RACING CAR +1F3CF..1F3D3 ; valid ; ; NV8 # 8.0 CRICKET BAT AND BALL..TABLE TENNIS PADDLE AND BALL +1F3D4..1F3DF ; valid ; ; NV8 # 7.0 SNOW CAPPED MOUNTAIN..STADIUM +1F3E0..1F3F0 ; valid ; ; NV8 # 6.0 HOUSE BUILDING..EUROPEAN CASTLE +1F3F1..1F3F7 ; valid ; ; NV8 # 7.0 WHITE PENNANT..LABEL +1F3F8..1F3FF ; valid ; ; NV8 # 8.0 BADMINTON RACQUET AND SHUTTLECOCK..EMOJI MODIFIER FITZPATRICK TYPE-6 +1F400..1F43E ; valid ; ; NV8 # 6.0 RAT..PAW PRINTS +1F43F ; valid ; ; NV8 # 7.0 CHIPMUNK +1F440 ; valid ; ; NV8 # 6.0 EYES +1F441 ; valid ; ; NV8 # 7.0 EYE +1F442..1F4F7 ; valid ; ; NV8 # 6.0 EAR..CAMERA +1F4F8 ; valid ; ; NV8 # 7.0 CAMERA WITH FLASH +1F4F9..1F4FC ; valid ; ; NV8 # 6.0 VIDEO CAMERA..VIDEOCASSETTE +1F4FD..1F4FE ; valid ; ; NV8 # 7.0 FILM PROJECTOR..PORTABLE STEREO +1F4FF ; valid ; ; NV8 # 8.0 PRAYER BEADS +1F500..1F53D ; valid ; ; NV8 # 6.0 TWISTED RIGHTWARDS ARROWS..DOWN-POINTING SMALL RED TRIANGLE +1F53E..1F53F ; valid ; ; NV8 # 7.0 LOWER RIGHT SHADOWED WHITE CIRCLE..UPPER RIGHT SHADOWED WHITE CIRCLE +1F540..1F543 ; valid ; ; NV8 # 6.1 CIRCLED CROSS POMMEE..NOTCHED LEFT SEMICIRCLE WITH THREE DOTS +1F544..1F54A ; valid ; ; NV8 # 7.0 NOTCHED RIGHT SEMICIRCLE WITH THREE DOTS..DOVE OF PEACE +1F54B..1F54F ; valid ; ; NV8 # 8.0 KAABA..BOWL OF HYGIEIA +1F550..1F567 ; valid ; ; NV8 # 6.0 CLOCK FACE ONE OCLOCK..CLOCK FACE TWELVE-THIRTY +1F568..1F579 ; valid ; ; NV8 # 7.0 RIGHT SPEAKER..JOYSTICK +1F57A ; valid ; ; NV8 # 9.0 MAN DANCING +1F57B..1F5A3 ; valid ; ; NV8 # 7.0 LEFT HAND TELEPHONE RECEIVER..BLACK DOWN POINTING BACKHAND INDEX +1F5A4 ; valid ; ; NV8 # 9.0 BLACK HEART +1F5A5..1F5FA ; valid ; ; NV8 # 7.0 DESKTOP COMPUTER..WORLD MAP +1F5FB..1F5FF ; valid ; ; NV8 # 6.0 MOUNT FUJI..MOYAI +1F600 ; valid ; ; NV8 # 6.1 GRINNING FACE +1F601..1F610 ; valid ; ; NV8 # 6.0 GRINNING FACE WITH SMILING EYES..NEUTRAL FACE +1F611 ; valid ; ; NV8 # 6.1 EXPRESSIONLESS FACE +1F612..1F614 ; valid ; ; NV8 # 6.0 UNAMUSED FACE..PENSIVE FACE +1F615 ; valid ; ; NV8 # 6.1 CONFUSED FACE +1F616 ; valid ; ; NV8 # 6.0 CONFOUNDED FACE +1F617 ; valid ; ; NV8 # 6.1 KISSING FACE +1F618 ; valid ; ; NV8 # 6.0 FACE THROWING A KISS +1F619 ; valid ; ; NV8 # 6.1 KISSING FACE WITH SMILING EYES +1F61A ; valid ; ; NV8 # 6.0 KISSING FACE WITH CLOSED EYES +1F61B ; valid ; ; NV8 # 6.1 FACE WITH STUCK-OUT TONGUE +1F61C..1F61E ; valid ; ; NV8 # 6.0 FACE WITH STUCK-OUT TONGUE AND WINKING EYE..DISAPPOINTED FACE +1F61F ; valid ; ; NV8 # 6.1 WORRIED FACE +1F620..1F625 ; valid ; ; NV8 # 6.0 ANGRY FACE..DISAPPOINTED BUT RELIEVED FACE +1F626..1F627 ; valid ; ; NV8 # 6.1 FROWNING FACE WITH OPEN MOUTH..ANGUISHED FACE +1F628..1F62B ; valid ; ; NV8 # 6.0 FEARFUL FACE..TIRED FACE +1F62C ; valid ; ; NV8 # 6.1 GRIMACING FACE +1F62D ; valid ; ; NV8 # 6.0 LOUDLY CRYING FACE +1F62E..1F62F ; valid ; ; NV8 # 6.1 FACE WITH OPEN MOUTH..HUSHED FACE +1F630..1F633 ; valid ; ; NV8 # 6.0 FACE WITH OPEN MOUTH AND COLD SWEAT..FLUSHED FACE +1F634 ; valid ; ; NV8 # 6.1 SLEEPING FACE +1F635..1F640 ; valid ; ; NV8 # 6.0 DIZZY FACE..WEARY CAT FACE +1F641..1F642 ; valid ; ; NV8 # 7.0 SLIGHTLY FROWNING FACE..SLIGHTLY SMILING FACE +1F643..1F644 ; valid ; ; NV8 # 8.0 UPSIDE-DOWN FACE..FACE WITH ROLLING EYES +1F645..1F64F ; valid ; ; NV8 # 6.0 FACE WITH NO GOOD GESTURE..PERSON WITH FOLDED HANDS +1F650..1F67F ; valid ; ; NV8 # 7.0 NORTH WEST POINTING LEAF..REVERSE CHECKER BOARD +1F680..1F6C5 ; valid ; ; NV8 # 6.0 ROCKET..LEFT LUGGAGE +1F6C6..1F6CF ; valid ; ; NV8 # 7.0 TRIANGLE WITH ROUNDED CORNERS..BED +1F6D0 ; valid ; ; NV8 # 8.0 PLACE OF WORSHIP +1F6D1..1F6D2 ; valid ; ; NV8 # 9.0 OCTAGONAL SIGN..SHOPPING TROLLEY +1F6D3..1F6D4 ; valid ; ; NV8 # 10.0 STUPA..PAGODA +1F6D5..1F6DF ; disallowed # NA .. +1F6E0..1F6EC ; valid ; ; NV8 # 7.0 HAMMER AND WRENCH..AIRPLANE ARRIVING +1F6ED..1F6EF ; disallowed # NA .. +1F6F0..1F6F3 ; valid ; ; NV8 # 7.0 SATELLITE..PASSENGER SHIP +1F6F4..1F6F6 ; valid ; ; NV8 # 9.0 SCOOTER..CANOE +1F6F7..1F6F8 ; valid ; ; NV8 # 10.0 SLED..FLYING SAUCER +1F6F9..1F6FF ; disallowed # NA .. +1F700..1F773 ; valid ; ; NV8 # 6.0 ALCHEMICAL SYMBOL FOR QUINTESSENCE..ALCHEMICAL SYMBOL FOR HALF OUNCE +1F774..1F77F ; disallowed # NA .. +1F780..1F7D4 ; valid ; ; NV8 # 7.0 BLACK LEFT-POINTING ISOSCELES RIGHT TRIANGLE..HEAVY TWELVE POINTED PINWHEEL STAR +1F7D5..1F7FF ; disallowed # NA .. +1F800..1F80B ; valid ; ; NV8 # 7.0 LEFTWARDS ARROW WITH SMALL TRIANGLE ARROWHEAD..DOWNWARDS ARROW WITH LARGE TRIANGLE ARROWHEAD +1F80C..1F80F ; disallowed # NA .. +1F810..1F847 ; valid ; ; NV8 # 7.0 LEFTWARDS ARROW WITH SMALL EQUILATERAL ARROWHEAD..DOWNWARDS HEAVY ARROW +1F848..1F84F ; disallowed # NA .. +1F850..1F859 ; valid ; ; NV8 # 7.0 LEFTWARDS SANS-SERIF ARROW..UP DOWN SANS-SERIF ARROW +1F85A..1F85F ; disallowed # NA .. +1F860..1F887 ; valid ; ; NV8 # 7.0 WIDE-HEADED LEFTWARDS LIGHT BARB ARROW..WIDE-HEADED SOUTH WEST VERY HEAVY BARB ARROW +1F888..1F88F ; disallowed # NA .. +1F890..1F8AD ; valid ; ; NV8 # 7.0 LEFTWARDS TRIANGLE ARROWHEAD..WHITE ARROW SHAFT WIDTH TWO THIRDS +1F8AE..1F8FF ; disallowed # NA .. +1F900..1F90B ; valid ; ; NV8 # 10.0 CIRCLED CROSS FORMEE WITH FOUR DOTS..DOWNWARD FACING NOTCHED HOOK WITH DOT +1F90C..1F90F ; disallowed # NA .. +1F910..1F918 ; valid ; ; NV8 # 8.0 ZIPPER-MOUTH FACE..SIGN OF THE HORNS +1F919..1F91E ; valid ; ; NV8 # 9.0 CALL ME HAND..HAND WITH INDEX AND MIDDLE FINGERS CROSSED +1F91F ; valid ; ; NV8 # 10.0 I LOVE YOU HAND SIGN +1F920..1F927 ; valid ; ; NV8 # 9.0 FACE WITH COWBOY HAT..SNEEZING FACE +1F928..1F92F ; valid ; ; NV8 # 10.0 FACE WITH ONE EYEBROW RAISED..SHOCKED FACE WITH EXPLODING HEAD +1F930 ; valid ; ; NV8 # 9.0 PREGNANT WOMAN +1F931..1F932 ; valid ; ; NV8 # 10.0 BREAST-FEEDING..PALMS UP TOGETHER +1F933..1F93E ; valid ; ; NV8 # 9.0 SELFIE..HANDBALL +1F93F ; disallowed # NA +1F940..1F94B ; valid ; ; NV8 # 9.0 WILTED FLOWER..MARTIAL ARTS UNIFORM +1F94C ; valid ; ; NV8 # 10.0 CURLING STONE +1F94D..1F94F ; disallowed # NA .. +1F950..1F95E ; valid ; ; NV8 # 9.0 CROISSANT..PANCAKES +1F95F..1F96B ; valid ; ; NV8 # 10.0 DUMPLING..CANNED FOOD +1F96C..1F97F ; disallowed # NA .. +1F980..1F984 ; valid ; ; NV8 # 8.0 CRAB..UNICORN FACE +1F985..1F991 ; valid ; ; NV8 # 9.0 EAGLE..SQUID +1F992..1F997 ; valid ; ; NV8 # 10.0 GIRAFFE FACE..CRICKET +1F998..1F9BF ; disallowed # NA .. +1F9C0 ; valid ; ; NV8 # 8.0 CHEESE WEDGE +1F9C1..1F9CF ; disallowed # NA .. +1F9D0..1F9E6 ; valid ; ; NV8 # 10.0 FACE WITH MONOCLE..SOCKS +1F9E7..1FFFD ; disallowed # NA .. +1FFFE..1FFFF ; disallowed # 2.0 .. +20000..2A6D6 ; valid # 3.1 CJK UNIFIED IDEOGRAPH-20000..CJK UNIFIED IDEOGRAPH-2A6D6 +2A6D7..2A6FF ; disallowed # NA .. +2A700..2B734 ; valid # 5.2 CJK UNIFIED IDEOGRAPH-2A700..CJK UNIFIED IDEOGRAPH-2B734 +2B735..2B73F ; disallowed # NA .. +2B740..2B81D ; valid # 6.0 CJK UNIFIED IDEOGRAPH-2B740..CJK UNIFIED IDEOGRAPH-2B81D +2B81E..2B81F ; disallowed # NA .. +2B820..2CEA1 ; valid # 8.0 CJK UNIFIED IDEOGRAPH-2B820..CJK UNIFIED IDEOGRAPH-2CEA1 +2CEA2..2CEAF ; disallowed # NA .. +2CEB0..2EBE0 ; valid # 10.0 CJK UNIFIED IDEOGRAPH-2CEB0..CJK UNIFIED IDEOGRAPH-2EBE0 +2EBE1..2F7FF ; disallowed # NA .. +2F800 ; mapped ; 4E3D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F800 +2F801 ; mapped ; 4E38 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F801 +2F802 ; mapped ; 4E41 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F802 +2F803 ; mapped ; 20122 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F803 +2F804 ; mapped ; 4F60 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F804 +2F805 ; mapped ; 4FAE # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F805 +2F806 ; mapped ; 4FBB # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F806 +2F807 ; mapped ; 5002 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F807 +2F808 ; mapped ; 507A # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F808 +2F809 ; mapped ; 5099 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F809 +2F80A ; mapped ; 50E7 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F80A +2F80B ; mapped ; 50CF # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F80B +2F80C ; mapped ; 349E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F80C +2F80D ; mapped ; 2063A # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F80D +2F80E ; mapped ; 514D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F80E +2F80F ; mapped ; 5154 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F80F +2F810 ; mapped ; 5164 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F810 +2F811 ; mapped ; 5177 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F811 +2F812 ; mapped ; 2051C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F812 +2F813 ; mapped ; 34B9 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F813 +2F814 ; mapped ; 5167 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F814 +2F815 ; mapped ; 518D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F815 +2F816 ; mapped ; 2054B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F816 +2F817 ; mapped ; 5197 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F817 +2F818 ; mapped ; 51A4 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F818 +2F819 ; mapped ; 4ECC # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F819 +2F81A ; mapped ; 51AC # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F81A +2F81B ; mapped ; 51B5 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F81B +2F81C ; mapped ; 291DF # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F81C +2F81D ; mapped ; 51F5 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F81D +2F81E ; mapped ; 5203 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F81E +2F81F ; mapped ; 34DF # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F81F +2F820 ; mapped ; 523B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F820 +2F821 ; mapped ; 5246 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F821 +2F822 ; mapped ; 5272 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F822 +2F823 ; mapped ; 5277 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F823 +2F824 ; mapped ; 3515 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F824 +2F825 ; mapped ; 52C7 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F825 +2F826 ; mapped ; 52C9 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F826 +2F827 ; mapped ; 52E4 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F827 +2F828 ; mapped ; 52FA # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F828 +2F829 ; mapped ; 5305 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F829 +2F82A ; mapped ; 5306 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F82A +2F82B ; mapped ; 5317 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F82B +2F82C ; mapped ; 5349 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F82C +2F82D ; mapped ; 5351 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F82D +2F82E ; mapped ; 535A # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F82E +2F82F ; mapped ; 5373 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F82F +2F830 ; mapped ; 537D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F830 +2F831..2F833 ; mapped ; 537F # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F831..CJK COMPATIBILITY IDEOGRAPH-2F833 +2F834 ; mapped ; 20A2C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F834 +2F835 ; mapped ; 7070 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F835 +2F836 ; mapped ; 53CA # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F836 +2F837 ; mapped ; 53DF # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F837 +2F838 ; mapped ; 20B63 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F838 +2F839 ; mapped ; 53EB # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F839 +2F83A ; mapped ; 53F1 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F83A +2F83B ; mapped ; 5406 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F83B +2F83C ; mapped ; 549E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F83C +2F83D ; mapped ; 5438 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F83D +2F83E ; mapped ; 5448 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F83E +2F83F ; mapped ; 5468 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F83F +2F840 ; mapped ; 54A2 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F840 +2F841 ; mapped ; 54F6 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F841 +2F842 ; mapped ; 5510 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F842 +2F843 ; mapped ; 5553 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F843 +2F844 ; mapped ; 5563 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F844 +2F845..2F846 ; mapped ; 5584 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F845..CJK COMPATIBILITY IDEOGRAPH-2F846 +2F847 ; mapped ; 5599 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F847 +2F848 ; mapped ; 55AB # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F848 +2F849 ; mapped ; 55B3 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F849 +2F84A ; mapped ; 55C2 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F84A +2F84B ; mapped ; 5716 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F84B +2F84C ; mapped ; 5606 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F84C +2F84D ; mapped ; 5717 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F84D +2F84E ; mapped ; 5651 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F84E +2F84F ; mapped ; 5674 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F84F +2F850 ; mapped ; 5207 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F850 +2F851 ; mapped ; 58EE # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F851 +2F852 ; mapped ; 57CE # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F852 +2F853 ; mapped ; 57F4 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F853 +2F854 ; mapped ; 580D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F854 +2F855 ; mapped ; 578B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F855 +2F856 ; mapped ; 5832 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F856 +2F857 ; mapped ; 5831 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F857 +2F858 ; mapped ; 58AC # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F858 +2F859 ; mapped ; 214E4 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F859 +2F85A ; mapped ; 58F2 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F85A +2F85B ; mapped ; 58F7 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F85B +2F85C ; mapped ; 5906 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F85C +2F85D ; mapped ; 591A # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F85D +2F85E ; mapped ; 5922 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F85E +2F85F ; mapped ; 5962 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F85F +2F860 ; mapped ; 216A8 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F860 +2F861 ; mapped ; 216EA # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F861 +2F862 ; mapped ; 59EC # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F862 +2F863 ; mapped ; 5A1B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F863 +2F864 ; mapped ; 5A27 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F864 +2F865 ; mapped ; 59D8 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F865 +2F866 ; mapped ; 5A66 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F866 +2F867 ; mapped ; 36EE # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F867 +2F868 ; disallowed # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F868 +2F869 ; mapped ; 5B08 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F869 +2F86A..2F86B ; mapped ; 5B3E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F86A..CJK COMPATIBILITY IDEOGRAPH-2F86B +2F86C ; mapped ; 219C8 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F86C +2F86D ; mapped ; 5BC3 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F86D +2F86E ; mapped ; 5BD8 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F86E +2F86F ; mapped ; 5BE7 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F86F +2F870 ; mapped ; 5BF3 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F870 +2F871 ; mapped ; 21B18 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F871 +2F872 ; mapped ; 5BFF # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F872 +2F873 ; mapped ; 5C06 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F873 +2F874 ; disallowed # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F874 +2F875 ; mapped ; 5C22 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F875 +2F876 ; mapped ; 3781 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F876 +2F877 ; mapped ; 5C60 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F877 +2F878 ; mapped ; 5C6E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F878 +2F879 ; mapped ; 5CC0 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F879 +2F87A ; mapped ; 5C8D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F87A +2F87B ; mapped ; 21DE4 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F87B +2F87C ; mapped ; 5D43 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F87C +2F87D ; mapped ; 21DE6 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F87D +2F87E ; mapped ; 5D6E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F87E +2F87F ; mapped ; 5D6B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F87F +2F880 ; mapped ; 5D7C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F880 +2F881 ; mapped ; 5DE1 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F881 +2F882 ; mapped ; 5DE2 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F882 +2F883 ; mapped ; 382F # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F883 +2F884 ; mapped ; 5DFD # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F884 +2F885 ; mapped ; 5E28 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F885 +2F886 ; mapped ; 5E3D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F886 +2F887 ; mapped ; 5E69 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F887 +2F888 ; mapped ; 3862 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F888 +2F889 ; mapped ; 22183 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F889 +2F88A ; mapped ; 387C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F88A +2F88B ; mapped ; 5EB0 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F88B +2F88C ; mapped ; 5EB3 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F88C +2F88D ; mapped ; 5EB6 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F88D +2F88E ; mapped ; 5ECA # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F88E +2F88F ; mapped ; 2A392 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F88F +2F890 ; mapped ; 5EFE # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F890 +2F891..2F892 ; mapped ; 22331 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F891..CJK COMPATIBILITY IDEOGRAPH-2F892 +2F893 ; mapped ; 8201 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F893 +2F894..2F895 ; mapped ; 5F22 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F894..CJK COMPATIBILITY IDEOGRAPH-2F895 +2F896 ; mapped ; 38C7 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F896 +2F897 ; mapped ; 232B8 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F897 +2F898 ; mapped ; 261DA # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F898 +2F899 ; mapped ; 5F62 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F899 +2F89A ; mapped ; 5F6B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F89A +2F89B ; mapped ; 38E3 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F89B +2F89C ; mapped ; 5F9A # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F89C +2F89D ; mapped ; 5FCD # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F89D +2F89E ; mapped ; 5FD7 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F89E +2F89F ; mapped ; 5FF9 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F89F +2F8A0 ; mapped ; 6081 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8A0 +2F8A1 ; mapped ; 393A # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8A1 +2F8A2 ; mapped ; 391C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8A2 +2F8A3 ; mapped ; 6094 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8A3 +2F8A4 ; mapped ; 226D4 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8A4 +2F8A5 ; mapped ; 60C7 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8A5 +2F8A6 ; mapped ; 6148 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8A6 +2F8A7 ; mapped ; 614C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8A7 +2F8A8 ; mapped ; 614E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8A8 +2F8A9 ; mapped ; 614C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8A9 +2F8AA ; mapped ; 617A # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8AA +2F8AB ; mapped ; 618E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8AB +2F8AC ; mapped ; 61B2 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8AC +2F8AD ; mapped ; 61A4 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8AD +2F8AE ; mapped ; 61AF # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8AE +2F8AF ; mapped ; 61DE # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8AF +2F8B0 ; mapped ; 61F2 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8B0 +2F8B1 ; mapped ; 61F6 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8B1 +2F8B2 ; mapped ; 6210 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8B2 +2F8B3 ; mapped ; 621B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8B3 +2F8B4 ; mapped ; 625D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8B4 +2F8B5 ; mapped ; 62B1 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8B5 +2F8B6 ; mapped ; 62D4 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8B6 +2F8B7 ; mapped ; 6350 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8B7 +2F8B8 ; mapped ; 22B0C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8B8 +2F8B9 ; mapped ; 633D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8B9 +2F8BA ; mapped ; 62FC # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8BA +2F8BB ; mapped ; 6368 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8BB +2F8BC ; mapped ; 6383 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8BC +2F8BD ; mapped ; 63E4 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8BD +2F8BE ; mapped ; 22BF1 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8BE +2F8BF ; mapped ; 6422 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8BF +2F8C0 ; mapped ; 63C5 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8C0 +2F8C1 ; mapped ; 63A9 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8C1 +2F8C2 ; mapped ; 3A2E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8C2 +2F8C3 ; mapped ; 6469 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8C3 +2F8C4 ; mapped ; 647E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8C4 +2F8C5 ; mapped ; 649D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8C5 +2F8C6 ; mapped ; 6477 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8C6 +2F8C7 ; mapped ; 3A6C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8C7 +2F8C8 ; mapped ; 654F # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8C8 +2F8C9 ; mapped ; 656C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8C9 +2F8CA ; mapped ; 2300A # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8CA +2F8CB ; mapped ; 65E3 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8CB +2F8CC ; mapped ; 66F8 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8CC +2F8CD ; mapped ; 6649 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8CD +2F8CE ; mapped ; 3B19 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8CE +2F8CF ; mapped ; 6691 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8CF +2F8D0 ; mapped ; 3B08 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8D0 +2F8D1 ; mapped ; 3AE4 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8D1 +2F8D2 ; mapped ; 5192 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8D2 +2F8D3 ; mapped ; 5195 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8D3 +2F8D4 ; mapped ; 6700 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8D4 +2F8D5 ; mapped ; 669C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8D5 +2F8D6 ; mapped ; 80AD # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8D6 +2F8D7 ; mapped ; 43D9 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8D7 +2F8D8 ; mapped ; 6717 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8D8 +2F8D9 ; mapped ; 671B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8D9 +2F8DA ; mapped ; 6721 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8DA +2F8DB ; mapped ; 675E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8DB +2F8DC ; mapped ; 6753 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8DC +2F8DD ; mapped ; 233C3 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8DD +2F8DE ; mapped ; 3B49 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8DE +2F8DF ; mapped ; 67FA # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8DF +2F8E0 ; mapped ; 6785 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8E0 +2F8E1 ; mapped ; 6852 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8E1 +2F8E2 ; mapped ; 6885 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8E2 +2F8E3 ; mapped ; 2346D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8E3 +2F8E4 ; mapped ; 688E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8E4 +2F8E5 ; mapped ; 681F # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8E5 +2F8E6 ; mapped ; 6914 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8E6 +2F8E7 ; mapped ; 3B9D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8E7 +2F8E8 ; mapped ; 6942 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8E8 +2F8E9 ; mapped ; 69A3 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8E9 +2F8EA ; mapped ; 69EA # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8EA +2F8EB ; mapped ; 6AA8 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8EB +2F8EC ; mapped ; 236A3 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8EC +2F8ED ; mapped ; 6ADB # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8ED +2F8EE ; mapped ; 3C18 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8EE +2F8EF ; mapped ; 6B21 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8EF +2F8F0 ; mapped ; 238A7 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8F0 +2F8F1 ; mapped ; 6B54 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8F1 +2F8F2 ; mapped ; 3C4E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8F2 +2F8F3 ; mapped ; 6B72 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8F3 +2F8F4 ; mapped ; 6B9F # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8F4 +2F8F5 ; mapped ; 6BBA # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8F5 +2F8F6 ; mapped ; 6BBB # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8F6 +2F8F7 ; mapped ; 23A8D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8F7 +2F8F8 ; mapped ; 21D0B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8F8 +2F8F9 ; mapped ; 23AFA # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8F9 +2F8FA ; mapped ; 6C4E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8FA +2F8FB ; mapped ; 23CBC # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8FB +2F8FC ; mapped ; 6CBF # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8FC +2F8FD ; mapped ; 6CCD # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8FD +2F8FE ; mapped ; 6C67 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8FE +2F8FF ; mapped ; 6D16 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8FF +2F900 ; mapped ; 6D3E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F900 +2F901 ; mapped ; 6D77 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F901 +2F902 ; mapped ; 6D41 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F902 +2F903 ; mapped ; 6D69 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F903 +2F904 ; mapped ; 6D78 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F904 +2F905 ; mapped ; 6D85 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F905 +2F906 ; mapped ; 23D1E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F906 +2F907 ; mapped ; 6D34 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F907 +2F908 ; mapped ; 6E2F # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F908 +2F909 ; mapped ; 6E6E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F909 +2F90A ; mapped ; 3D33 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F90A +2F90B ; mapped ; 6ECB # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F90B +2F90C ; mapped ; 6EC7 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F90C +2F90D ; mapped ; 23ED1 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F90D +2F90E ; mapped ; 6DF9 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F90E +2F90F ; mapped ; 6F6E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F90F +2F910 ; mapped ; 23F5E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F910 +2F911 ; mapped ; 23F8E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F911 +2F912 ; mapped ; 6FC6 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F912 +2F913 ; mapped ; 7039 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F913 +2F914 ; mapped ; 701E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F914 +2F915 ; mapped ; 701B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F915 +2F916 ; mapped ; 3D96 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F916 +2F917 ; mapped ; 704A # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F917 +2F918 ; mapped ; 707D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F918 +2F919 ; mapped ; 7077 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F919 +2F91A ; mapped ; 70AD # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F91A +2F91B ; mapped ; 20525 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F91B +2F91C ; mapped ; 7145 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F91C +2F91D ; mapped ; 24263 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F91D +2F91E ; mapped ; 719C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F91E +2F91F ; disallowed # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F91F +2F920 ; mapped ; 7228 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F920 +2F921 ; mapped ; 7235 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F921 +2F922 ; mapped ; 7250 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F922 +2F923 ; mapped ; 24608 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F923 +2F924 ; mapped ; 7280 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F924 +2F925 ; mapped ; 7295 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F925 +2F926 ; mapped ; 24735 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F926 +2F927 ; mapped ; 24814 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F927 +2F928 ; mapped ; 737A # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F928 +2F929 ; mapped ; 738B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F929 +2F92A ; mapped ; 3EAC # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F92A +2F92B ; mapped ; 73A5 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F92B +2F92C..2F92D ; mapped ; 3EB8 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F92C..CJK COMPATIBILITY IDEOGRAPH-2F92D +2F92E ; mapped ; 7447 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F92E +2F92F ; mapped ; 745C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F92F +2F930 ; mapped ; 7471 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F930 +2F931 ; mapped ; 7485 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F931 +2F932 ; mapped ; 74CA # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F932 +2F933 ; mapped ; 3F1B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F933 +2F934 ; mapped ; 7524 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F934 +2F935 ; mapped ; 24C36 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F935 +2F936 ; mapped ; 753E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F936 +2F937 ; mapped ; 24C92 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F937 +2F938 ; mapped ; 7570 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F938 +2F939 ; mapped ; 2219F # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F939 +2F93A ; mapped ; 7610 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F93A +2F93B ; mapped ; 24FA1 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F93B +2F93C ; mapped ; 24FB8 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F93C +2F93D ; mapped ; 25044 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F93D +2F93E ; mapped ; 3FFC # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F93E +2F93F ; mapped ; 4008 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F93F +2F940 ; mapped ; 76F4 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F940 +2F941 ; mapped ; 250F3 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F941 +2F942 ; mapped ; 250F2 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F942 +2F943 ; mapped ; 25119 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F943 +2F944 ; mapped ; 25133 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F944 +2F945 ; mapped ; 771E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F945 +2F946..2F947 ; mapped ; 771F # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F946..CJK COMPATIBILITY IDEOGRAPH-2F947 +2F948 ; mapped ; 774A # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F948 +2F949 ; mapped ; 4039 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F949 +2F94A ; mapped ; 778B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F94A +2F94B ; mapped ; 4046 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F94B +2F94C ; mapped ; 4096 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F94C +2F94D ; mapped ; 2541D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F94D +2F94E ; mapped ; 784E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F94E +2F94F ; mapped ; 788C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F94F +2F950 ; mapped ; 78CC # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F950 +2F951 ; mapped ; 40E3 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F951 +2F952 ; mapped ; 25626 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F952 +2F953 ; mapped ; 7956 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F953 +2F954 ; mapped ; 2569A # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F954 +2F955 ; mapped ; 256C5 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F955 +2F956 ; mapped ; 798F # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F956 +2F957 ; mapped ; 79EB # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F957 +2F958 ; mapped ; 412F # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F958 +2F959 ; mapped ; 7A40 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F959 +2F95A ; mapped ; 7A4A # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F95A +2F95B ; mapped ; 7A4F # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F95B +2F95C ; mapped ; 2597C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F95C +2F95D..2F95E ; mapped ; 25AA7 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F95D..CJK COMPATIBILITY IDEOGRAPH-2F95E +2F95F ; disallowed # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F95F +2F960 ; mapped ; 4202 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F960 +2F961 ; mapped ; 25BAB # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F961 +2F962 ; mapped ; 7BC6 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F962 +2F963 ; mapped ; 7BC9 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F963 +2F964 ; mapped ; 4227 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F964 +2F965 ; mapped ; 25C80 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F965 +2F966 ; mapped ; 7CD2 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F966 +2F967 ; mapped ; 42A0 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F967 +2F968 ; mapped ; 7CE8 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F968 +2F969 ; mapped ; 7CE3 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F969 +2F96A ; mapped ; 7D00 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F96A +2F96B ; mapped ; 25F86 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F96B +2F96C ; mapped ; 7D63 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F96C +2F96D ; mapped ; 4301 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F96D +2F96E ; mapped ; 7DC7 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F96E +2F96F ; mapped ; 7E02 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F96F +2F970 ; mapped ; 7E45 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F970 +2F971 ; mapped ; 4334 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F971 +2F972 ; mapped ; 26228 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F972 +2F973 ; mapped ; 26247 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F973 +2F974 ; mapped ; 4359 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F974 +2F975 ; mapped ; 262D9 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F975 +2F976 ; mapped ; 7F7A # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F976 +2F977 ; mapped ; 2633E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F977 +2F978 ; mapped ; 7F95 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F978 +2F979 ; mapped ; 7FFA # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F979 +2F97A ; mapped ; 8005 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F97A +2F97B ; mapped ; 264DA # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F97B +2F97C ; mapped ; 26523 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F97C +2F97D ; mapped ; 8060 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F97D +2F97E ; mapped ; 265A8 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F97E +2F97F ; mapped ; 8070 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F97F +2F980 ; mapped ; 2335F # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F980 +2F981 ; mapped ; 43D5 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F981 +2F982 ; mapped ; 80B2 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F982 +2F983 ; mapped ; 8103 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F983 +2F984 ; mapped ; 440B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F984 +2F985 ; mapped ; 813E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F985 +2F986 ; mapped ; 5AB5 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F986 +2F987 ; mapped ; 267A7 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F987 +2F988 ; mapped ; 267B5 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F988 +2F989 ; mapped ; 23393 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F989 +2F98A ; mapped ; 2339C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F98A +2F98B ; mapped ; 8201 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F98B +2F98C ; mapped ; 8204 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F98C +2F98D ; mapped ; 8F9E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F98D +2F98E ; mapped ; 446B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F98E +2F98F ; mapped ; 8291 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F98F +2F990 ; mapped ; 828B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F990 +2F991 ; mapped ; 829D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F991 +2F992 ; mapped ; 52B3 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F992 +2F993 ; mapped ; 82B1 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F993 +2F994 ; mapped ; 82B3 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F994 +2F995 ; mapped ; 82BD # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F995 +2F996 ; mapped ; 82E6 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F996 +2F997 ; mapped ; 26B3C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F997 +2F998 ; mapped ; 82E5 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F998 +2F999 ; mapped ; 831D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F999 +2F99A ; mapped ; 8363 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F99A +2F99B ; mapped ; 83AD # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F99B +2F99C ; mapped ; 8323 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F99C +2F99D ; mapped ; 83BD # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F99D +2F99E ; mapped ; 83E7 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F99E +2F99F ; mapped ; 8457 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F99F +2F9A0 ; mapped ; 8353 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9A0 +2F9A1 ; mapped ; 83CA # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9A1 +2F9A2 ; mapped ; 83CC # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9A2 +2F9A3 ; mapped ; 83DC # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9A3 +2F9A4 ; mapped ; 26C36 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9A4 +2F9A5 ; mapped ; 26D6B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9A5 +2F9A6 ; mapped ; 26CD5 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9A6 +2F9A7 ; mapped ; 452B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9A7 +2F9A8 ; mapped ; 84F1 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9A8 +2F9A9 ; mapped ; 84F3 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9A9 +2F9AA ; mapped ; 8516 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9AA +2F9AB ; mapped ; 273CA # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9AB +2F9AC ; mapped ; 8564 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9AC +2F9AD ; mapped ; 26F2C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9AD +2F9AE ; mapped ; 455D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9AE +2F9AF ; mapped ; 4561 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9AF +2F9B0 ; mapped ; 26FB1 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9B0 +2F9B1 ; mapped ; 270D2 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9B1 +2F9B2 ; mapped ; 456B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9B2 +2F9B3 ; mapped ; 8650 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9B3 +2F9B4 ; mapped ; 865C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9B4 +2F9B5 ; mapped ; 8667 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9B5 +2F9B6 ; mapped ; 8669 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9B6 +2F9B7 ; mapped ; 86A9 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9B7 +2F9B8 ; mapped ; 8688 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9B8 +2F9B9 ; mapped ; 870E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9B9 +2F9BA ; mapped ; 86E2 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9BA +2F9BB ; mapped ; 8779 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9BB +2F9BC ; mapped ; 8728 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9BC +2F9BD ; mapped ; 876B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9BD +2F9BE ; mapped ; 8786 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9BE +2F9BF ; disallowed # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9BF +2F9C0 ; mapped ; 87E1 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9C0 +2F9C1 ; mapped ; 8801 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9C1 +2F9C2 ; mapped ; 45F9 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9C2 +2F9C3 ; mapped ; 8860 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9C3 +2F9C4 ; mapped ; 8863 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9C4 +2F9C5 ; mapped ; 27667 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9C5 +2F9C6 ; mapped ; 88D7 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9C6 +2F9C7 ; mapped ; 88DE # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9C7 +2F9C8 ; mapped ; 4635 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9C8 +2F9C9 ; mapped ; 88FA # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9C9 +2F9CA ; mapped ; 34BB # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9CA +2F9CB ; mapped ; 278AE # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9CB +2F9CC ; mapped ; 27966 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9CC +2F9CD ; mapped ; 46BE # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9CD +2F9CE ; mapped ; 46C7 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9CE +2F9CF ; mapped ; 8AA0 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9CF +2F9D0 ; mapped ; 8AED # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9D0 +2F9D1 ; mapped ; 8B8A # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9D1 +2F9D2 ; mapped ; 8C55 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9D2 +2F9D3 ; mapped ; 27CA8 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9D3 +2F9D4 ; mapped ; 8CAB # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9D4 +2F9D5 ; mapped ; 8CC1 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9D5 +2F9D6 ; mapped ; 8D1B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9D6 +2F9D7 ; mapped ; 8D77 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9D7 +2F9D8 ; mapped ; 27F2F # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9D8 +2F9D9 ; mapped ; 20804 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9D9 +2F9DA ; mapped ; 8DCB # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9DA +2F9DB ; mapped ; 8DBC # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9DB +2F9DC ; mapped ; 8DF0 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9DC +2F9DD ; mapped ; 208DE # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9DD +2F9DE ; mapped ; 8ED4 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9DE +2F9DF ; mapped ; 8F38 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9DF +2F9E0 ; mapped ; 285D2 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9E0 +2F9E1 ; mapped ; 285ED # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9E1 +2F9E2 ; mapped ; 9094 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9E2 +2F9E3 ; mapped ; 90F1 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9E3 +2F9E4 ; mapped ; 9111 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9E4 +2F9E5 ; mapped ; 2872E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9E5 +2F9E6 ; mapped ; 911B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9E6 +2F9E7 ; mapped ; 9238 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9E7 +2F9E8 ; mapped ; 92D7 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9E8 +2F9E9 ; mapped ; 92D8 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9E9 +2F9EA ; mapped ; 927C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9EA +2F9EB ; mapped ; 93F9 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9EB +2F9EC ; mapped ; 9415 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9EC +2F9ED ; mapped ; 28BFA # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9ED +2F9EE ; mapped ; 958B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9EE +2F9EF ; mapped ; 4995 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9EF +2F9F0 ; mapped ; 95B7 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9F0 +2F9F1 ; mapped ; 28D77 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9F1 +2F9F2 ; mapped ; 49E6 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9F2 +2F9F3 ; mapped ; 96C3 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9F3 +2F9F4 ; mapped ; 5DB2 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9F4 +2F9F5 ; mapped ; 9723 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9F5 +2F9F6 ; mapped ; 29145 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9F6 +2F9F7 ; mapped ; 2921A # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9F7 +2F9F8 ; mapped ; 4A6E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9F8 +2F9F9 ; mapped ; 4A76 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9F9 +2F9FA ; mapped ; 97E0 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9FA +2F9FB ; mapped ; 2940A # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9FB +2F9FC ; mapped ; 4AB2 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9FC +2F9FD ; mapped ; 29496 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9FD +2F9FE..2F9FF ; mapped ; 980B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9FE..CJK COMPATIBILITY IDEOGRAPH-2F9FF +2FA00 ; mapped ; 9829 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA00 +2FA01 ; mapped ; 295B6 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA01 +2FA02 ; mapped ; 98E2 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA02 +2FA03 ; mapped ; 4B33 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA03 +2FA04 ; mapped ; 9929 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA04 +2FA05 ; mapped ; 99A7 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA05 +2FA06 ; mapped ; 99C2 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA06 +2FA07 ; mapped ; 99FE # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA07 +2FA08 ; mapped ; 4BCE # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA08 +2FA09 ; mapped ; 29B30 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA09 +2FA0A ; mapped ; 9B12 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA0A +2FA0B ; mapped ; 9C40 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA0B +2FA0C ; mapped ; 9CFD # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA0C +2FA0D ; mapped ; 4CCE # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA0D +2FA0E ; mapped ; 4CED # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA0E +2FA0F ; mapped ; 9D67 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA0F +2FA10 ; mapped ; 2A0CE # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA10 +2FA11 ; mapped ; 4CF8 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA11 +2FA12 ; mapped ; 2A105 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA12 +2FA13 ; mapped ; 2A20E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA13 +2FA14 ; mapped ; 2A291 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA14 +2FA15 ; mapped ; 9EBB # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA15 +2FA16 ; mapped ; 4D56 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA16 +2FA17 ; mapped ; 9EF9 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA17 +2FA18 ; mapped ; 9EFE # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA18 +2FA19 ; mapped ; 9F05 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA19 +2FA1A ; mapped ; 9F0F # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA1A +2FA1B ; mapped ; 9F16 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA1B +2FA1C ; mapped ; 9F3B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA1C +2FA1D ; mapped ; 2A600 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA1D +2FA1E..2FFFD ; disallowed # NA .. +2FFFE..2FFFF ; disallowed # 2.0 .. +30000..3FFFD ; disallowed # NA .. +3FFFE..3FFFF ; disallowed # 2.0 .. +40000..4FFFD ; disallowed # NA .. +4FFFE..4FFFF ; disallowed # 2.0 .. +50000..5FFFD ; disallowed # NA .. +5FFFE..5FFFF ; disallowed # 2.0 .. +60000..6FFFD ; disallowed # NA .. +6FFFE..6FFFF ; disallowed # 2.0 .. +70000..7FFFD ; disallowed # NA .. +7FFFE..7FFFF ; disallowed # 2.0 .. +80000..8FFFD ; disallowed # NA .. +8FFFE..8FFFF ; disallowed # 2.0 .. +90000..9FFFD ; disallowed # NA .. +9FFFE..9FFFF ; disallowed # 2.0 .. +A0000..AFFFD ; disallowed # NA .. +AFFFE..AFFFF ; disallowed # 2.0 .. +B0000..BFFFD ; disallowed # NA .. +BFFFE..BFFFF ; disallowed # 2.0 .. +C0000..CFFFD ; disallowed # NA .. +CFFFE..CFFFF ; disallowed # 2.0 .. +D0000..DFFFD ; disallowed # NA .. +DFFFE..DFFFF ; disallowed # 2.0 .. +E0000 ; disallowed # NA +E0001 ; disallowed # 3.1 LANGUAGE TAG +E0002..E001F ; disallowed # NA .. +E0020..E007F ; disallowed # 3.1 TAG SPACE..CANCEL TAG +E0080..E00FF ; disallowed # NA .. +E0100..E01EF ; ignored # 4.0 VARIATION SELECTOR-17..VARIATION SELECTOR-256 +E01F0..EFFFD ; disallowed # NA .. +EFFFE..EFFFF ; disallowed # 2.0 .. +F0000..FFFFD ; disallowed # 2.0 .. +FFFFE..FFFFF ; disallowed # 2.0 .. +100000..10FFFD; disallowed # 2.0 .. +10FFFE..10FFFF; disallowed # 2.0 .. + +# Total code points: 1114112 + diff --git a/idna/src/lib.rs b/idna/src/lib.rs new file mode 100644 index 000000000..92b5df230 --- /dev/null +++ b/idna/src/lib.rs @@ -0,0 +1,73 @@ +// Copyright 2016 The rust-url developers. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! This Rust crate implements IDNA +//! [per the WHATWG URL Standard](https://url.spec.whatwg.org/#idna). +//! +//! It also exposes the underlying algorithms from [*Unicode IDNA Compatibility Processing* +//! (Unicode Technical Standard #46)](http://www.unicode.org/reports/tr46/) +//! and [Punycode (RFC 3492)](https://tools.ietf.org/html/rfc3492). +//! +//! Quoting from [UTS #46’s introduction](http://www.unicode.org/reports/tr46/#Introduction): +//! +//! > Initially, domain names were restricted to ASCII characters. +//! > A system was introduced in 2003 for internationalized domain names (IDN). +//! > This system is called Internationalizing Domain Names for Applications, +//! > or IDNA2003 for short. +//! > This mechanism supports IDNs by means of a client software transformation +//! > into a format known as Punycode. +//! > A revision of IDNA was approved in 2010 (IDNA2008). +//! > This revision has a number of incompatibilities with IDNA2003. +//! > +//! > The incompatibilities force implementers of client software, +//! > such as browsers and emailers, +//! > to face difficult choices during the transition period +//! > as registries shift from IDNA2003 to IDNA2008. +//! > This document specifies a mechanism +//! > that minimizes the impact of this transition for client software, +//! > allowing client software to access domains that are valid under either system. + +#[macro_use] extern crate matches; +extern crate unicode_bidi; +extern crate unicode_normalization; + +pub mod punycode; +pub mod uts46; + +/// The [domain to ASCII](https://url.spec.whatwg.org/#concept-domain-to-ascii) algorithm. +/// +/// Return the ASCII representation a domain name, +/// normalizing characters (upper-case to lower-case and other kinds of equivalence) +/// and using Punycode as necessary. +/// +/// This process may fail. +pub fn domain_to_ascii(domain: &str) -> Result { + uts46::to_ascii(domain, uts46::Flags { + use_std3_ascii_rules: false, + transitional_processing: false, + verify_dns_length: false, + }) +} + +/// The [domain to Unicode](https://url.spec.whatwg.org/#concept-domain-to-unicode) algorithm. +/// +/// Return the Unicode representation of a domain name, +/// normalizing characters (upper-case to lower-case and other kinds of equivalence) +/// and decoding Punycode as necessary. +/// +/// This may indicate [syntax violations](https://url.spec.whatwg.org/#syntax-violation) +/// but always returns a string for the mapped domain. +pub fn domain_to_unicode(domain: &str) -> (String, Result<(), uts46::Errors>) { + uts46::to_unicode(domain, uts46::Flags { + use_std3_ascii_rules: false, + + // Unused: + transitional_processing: false, + verify_dns_length: false, + }) +} diff --git a/idna/src/make_uts46_mapping_table.py b/idna/src/make_uts46_mapping_table.py new file mode 100644 index 000000000..16e4feb44 --- /dev/null +++ b/idna/src/make_uts46_mapping_table.py @@ -0,0 +1,192 @@ +# Copyright 2013-2014 The rust-url developers. +# +# Licensed under the Apache License, Version 2.0 or the MIT license +# , at your +# option. This file may not be copied, modified, or distributed +# except according to those terms. + +# Run as: python make_uts46_mapping_table.py IdnaMappingTable.txt > uts46_mapping_table.rs +# You can get the latest idna table from +# http://www.unicode.org/Public/idna/latest/IdnaMappingTable.txt + +from __future__ import print_function +import collections +import itertools + +print('''\ +// Copyright 2013-2014 The rust-url developers. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Generated by make_idna_table.py +''') + +txt = open("IdnaMappingTable.txt") + +def escape_char(c): + return "\\u{%x}" % ord(c[0]) + +def char(s): + return unichr(int(s, 16)) + +strtab = collections.OrderedDict() +strtab_offset = 0 + +def strtab_slice(s): + global strtab, strtab_offset + + if s in strtab: + return strtab[s] + else: + utf8_len = len(s.encode('utf8')) + c = (strtab_offset, utf8_len) + strtab[s] = c + strtab_offset += utf8_len + return c + +def rust_slice(s): + start = s[0] + length = s[1] + start_lo = start & 0xff + start_hi = start >> 8 + assert length <= 255 + assert start_hi <= 255 + return "(StringTableSlice { byte_start_lo: %d, byte_start_hi: %d, byte_len: %d })" % (start_lo, start_hi, length) + +ranges = [] + +for line in txt: + # remove comments + line, _, _ = line.partition('#') + # skip empty lines + if len(line.strip()) == 0: + continue + fields = line.split(';') + if fields[0].strip() == 'D800..DFFF': + continue # Surrogates don't occur in Rust strings. + first, _, last = fields[0].strip().partition('..') + if not last: + last = first + mapping = fields[1].strip().replace('_', ' ').title().replace(' ', '') + unicode_str = None + if len(fields) > 2: + if fields[2].strip(): + unicode_str = u''.join(char(c) for c in fields[2].strip().split(' ')) + elif mapping == "Deviation": + unicode_str = u'' + ranges.append((first, last, mapping, unicode_str)) + +def mergeable_key(r): + mapping = r[2] + + # These types have associated data, so we should not merge them. + if mapping in ('Mapped', 'Deviation', 'DisallowedStd3Mapped'): + return r + assert mapping in ('Valid', 'Ignored', 'Disallowed', 'DisallowedStd3Valid') + return mapping + +grouped_ranges = itertools.groupby(ranges, key=mergeable_key) + +optimized_ranges = [] + +for (k, g) in grouped_ranges: + group = list(g) + if len(group) == 1: + optimized_ranges.append(group[0]) + continue + # Assert that nothing in the group has an associated unicode string. + for g in group: + if g[3] is not None and len(g[3]) > 2: + assert not g[3][2].strip() + # Assert that consecutive members of the group don't leave gaps in + # the codepoint space. + a, b = itertools.tee(group) + next(b, None) + for (g1, g2) in itertools.izip(a, b): + last_char = int(g1[1], 16) + next_char = int(g2[0], 16) + if last_char + 1 == next_char: + continue + # There's a gap where surrogates would appear, but we don't have to + # worry about that gap, as surrogates never appear in Rust strings. + # Assert we're seeing the surrogate case here. + assert last_char == 0xd7ff + assert next_char == 0xe000 + first = group[0][0] + last = group[-1][1] + mapping = group[0][2] + unicode_str = group[0][3] + optimized_ranges.append((first, last, mapping, unicode_str)) + +def is_single_char_range(r): + (first, last, _, _) = r + return first == last + +# We can reduce the size of the character range table and the index table to about 1/4 +# by merging runs of single character ranges and using character offsets from the start +# of that range to retrieve the correct `Mapping` value +def merge_single_char_ranges(ranges): + current = [] + for r in ranges: + if not current or is_single_char_range(current[-1]) and is_single_char_range(r): + current.append(r) + continue + if len(current) != 0: + ret = current + current = [r] + yield ret + continue + current.append(r) + ret = current + current = [] + yield ret + yield current + +optimized_ranges = list(merge_single_char_ranges(optimized_ranges)) + + +print("static TABLE: &'static [Range] = &[") + +for ranges in optimized_ranges: + first = ranges[0][0] + last = ranges[-1][1] + print(" Range { from: '%s', to: '%s', }," % (escape_char(char(first)), + escape_char(char(last)))) + +print("];\n") + +print("static INDEX_TABLE: &'static [u16] = &[") + +SINGLE_MARKER = 1 << 15 + +offset = 0 +for ranges in optimized_ranges: + assert offset < SINGLE_MARKER + + block_len = len(ranges) + single = SINGLE_MARKER if block_len == 1 else 0 + print(" %s," % (offset | single)) + offset += block_len + +print("];\n") + +print("static MAPPING_TABLE: &'static [Mapping] = &[") + +for ranges in optimized_ranges: + for (first, last, mapping, unicode_str) in ranges: + if unicode_str is not None: + mapping += rust_slice(strtab_slice(unicode_str)) + print(" %s," % mapping) + +print("];\n") + +def escape_str(s): + return [escape_char(c) for c in s] + +print("static STRING_TABLE: &'static str = \"%s\";" + % '\\\n '.join(itertools.chain(*[escape_str(s) for s in strtab.iterkeys()]))) diff --git a/idna/src/punycode.rs b/idna/src/punycode.rs new file mode 100644 index 000000000..acdde5897 --- /dev/null +++ b/idna/src/punycode.rs @@ -0,0 +1,213 @@ +// Copyright 2013 The rust-url developers. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! Punycode ([RFC 3492](http://tools.ietf.org/html/rfc3492)) implementation. +//! +//! Since Punycode fundamentally works on unicode code points, +//! `encode` and `decode` take and return slices and vectors of `char`. +//! `encode_str` and `decode_to_string` provide convenience wrappers +//! that convert from and to Rust’s UTF-8 based `str` and `String` types. + +use std::u32; +use std::char; +#[allow(unused_imports, deprecated)] +use std::ascii::AsciiExt; + +// Bootstring parameters for Punycode +static BASE: u32 = 36; +static T_MIN: u32 = 1; +static T_MAX: u32 = 26; +static SKEW: u32 = 38; +static DAMP: u32 = 700; +static INITIAL_BIAS: u32 = 72; +static INITIAL_N: u32 = 0x80; +static DELIMITER: char = '-'; + + +#[inline] +fn adapt(mut delta: u32, num_points: u32, first_time: bool) -> u32 { + delta /= if first_time { DAMP } else { 2 }; + delta += delta / num_points; + let mut k = 0; + while delta > ((BASE - T_MIN) * T_MAX) / 2 { + delta /= BASE - T_MIN; + k += BASE; + } + k + (((BASE - T_MIN + 1) * delta) / (delta + SKEW)) +} + + +/// Convert Punycode to an Unicode `String`. +/// +/// This is a convenience wrapper around `decode`. +#[inline] +pub fn decode_to_string(input: &str) -> Option { + decode(input).map(|chars| chars.into_iter().collect()) +} + + +/// Convert Punycode to Unicode. +/// +/// Return None on malformed input or overflow. +/// Overflow can only happen on inputs that take more than +/// 63 encoded bytes, the DNS limit on domain name labels. +pub fn decode(input: &str) -> Option> { + // Handle "basic" (ASCII) code points. + // They are encoded as-is before the last delimiter, if any. + let (mut output, input) = match input.rfind(DELIMITER) { + None => (Vec::new(), input), + Some(position) => ( + input[..position].chars().collect(), + if position > 0 { &input[position + 1..] } else { input } + ) + }; + let mut code_point = INITIAL_N; + let mut bias = INITIAL_BIAS; + let mut i = 0; + let mut iter = input.bytes(); + loop { + let previous_i = i; + let mut weight = 1; + let mut k = BASE; + let mut byte = match iter.next() { + None => break, + Some(byte) => byte, + }; + // Decode a generalized variable-length integer into delta, + // which gets added to i. + loop { + let digit = match byte { + byte @ b'0' ... b'9' => byte - b'0' + 26, + byte @ b'A' ... b'Z' => byte - b'A', + byte @ b'a' ... b'z' => byte - b'a', + _ => return None + } as u32; + if digit > (u32::MAX - i) / weight { + return None // Overflow + } + i += digit * weight; + let t = if k <= bias { T_MIN } + else if k >= bias + T_MAX { T_MAX } + else { k - bias }; + if digit < t { + break + } + if weight > u32::MAX / (BASE - t) { + return None // Overflow + } + weight *= BASE - t; + k += BASE; + byte = match iter.next() { + None => return None, // End of input before the end of this delta + Some(byte) => byte, + }; + } + let length = output.len() as u32; + bias = adapt(i - previous_i, length + 1, previous_i == 0); + if i / (length + 1) > u32::MAX - code_point { + return None // Overflow + } + // i was supposed to wrap around from length+1 to 0, + // incrementing code_point each time. + code_point += i / (length + 1); + i %= length + 1; + let c = match char::from_u32(code_point) { + Some(c) => c, + None => return None + }; + output.insert(i as usize, c); + i += 1; + } + Some(output) +} + + +/// Convert an Unicode `str` to Punycode. +/// +/// This is a convenience wrapper around `encode`. +#[inline] +pub fn encode_str(input: &str) -> Option { + encode(&input.chars().collect::>()) +} + + +/// Convert Unicode to Punycode. +/// +/// Return None on overflow, which can only happen on inputs that would take more than +/// 63 encoded bytes, the DNS limit on domain name labels. +pub fn encode(input: &[char]) -> Option { + // Handle "basic" (ASCII) code points. They are encoded as-is. + let output_bytes = input.iter().filter_map(|&c| + if c.is_ascii() { Some(c as u8) } else { None } + ).collect(); + let mut output = unsafe { String::from_utf8_unchecked(output_bytes) }; + let basic_length = output.len() as u32; + if basic_length > 0 { + output.push_str("-") + } + let mut code_point = INITIAL_N; + let mut delta = 0; + let mut bias = INITIAL_BIAS; + let mut processed = basic_length; + let input_length = input.len() as u32; + while processed < input_length { + // All code points < code_point have been handled already. + // Find the next larger one. + let min_code_point = input.iter().map(|&c| c as u32) + .filter(|&c| c >= code_point).min().unwrap(); + if min_code_point - code_point > (u32::MAX - delta) / (processed + 1) { + return None // Overflow + } + // Increase delta to advance the decoder’s state to + delta += (min_code_point - code_point) * (processed + 1); + code_point = min_code_point; + for &c in input { + let c = c as u32; + if c < code_point { + delta += 1; + if delta == 0 { + return None // Overflow + } + } + if c == code_point { + // Represent delta as a generalized variable-length integer: + let mut q = delta; + let mut k = BASE; + loop { + let t = if k <= bias { T_MIN } + else if k >= bias + T_MAX { T_MAX } + else { k - bias }; + if q < t { + break + } + let value = t + ((q - t) % (BASE - t)); + output.push(value_to_digit(value)); + q = (q - t) / (BASE - t); + k += BASE; + } + output.push(value_to_digit(q)); + bias = adapt(delta, processed + 1, processed == basic_length); + delta = 0; + processed += 1; + } + } + delta += 1; + code_point += 1; + } + Some(output) +} + + +#[inline] +fn value_to_digit(value: u32) -> char { + match value { + 0 ... 25 => (value as u8 + 'a' as u8) as char, // a..z + 26 ... 35 => (value as u8 - 26 + '0' as u8) as char, // 0..9 + _ => panic!() + } +} diff --git a/idna/src/uts46.rs b/idna/src/uts46.rs new file mode 100644 index 000000000..ac348d1fa --- /dev/null +++ b/idna/src/uts46.rs @@ -0,0 +1,433 @@ +// Copyright 2013-2014 The rust-url developers. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! [*Unicode IDNA Compatibility Processing* +//! (Unicode Technical Standard #46)](http://www.unicode.org/reports/tr46/) + +use self::Mapping::*; +use punycode; +#[allow(unused_imports, deprecated)] +use std::ascii::AsciiExt; +use std::cmp::Ordering::{Equal, Less, Greater}; +use unicode_bidi::{BidiClass, bidi_class}; +use unicode_normalization::UnicodeNormalization; +use unicode_normalization::char::is_combining_mark; + +include!("uts46_mapping_table.rs"); + + +pub static PUNYCODE_PREFIX: &'static str = "xn--"; + + +#[derive(Debug)] +struct StringTableSlice { + // Store these as separate fields so the structure will have an + // alignment of 1 and thus pack better into the Mapping enum, below. + byte_start_lo: u8, + byte_start_hi: u8, + byte_len: u8, +} + +fn decode_slice(slice: &StringTableSlice) -> &'static str { + let lo = slice.byte_start_lo as usize; + let hi = slice.byte_start_hi as usize; + let start = (hi << 8) | lo; + let len = slice.byte_len as usize; + &STRING_TABLE[start..(start + len)] +} + +#[repr(u8)] +#[derive(Debug)] +enum Mapping { + Valid, + Ignored, + Mapped(StringTableSlice), + Deviation(StringTableSlice), + Disallowed, + DisallowedStd3Valid, + DisallowedStd3Mapped(StringTableSlice), +} + +struct Range { + from: char, + to: char, +} + +fn find_char(codepoint: char) -> &'static Mapping { + let r = TABLE.binary_search_by(|ref range| { + if codepoint > range.to { + Less + } else if codepoint < range.from { + Greater + } else { + Equal + } + }); + r.ok().map(|i| { + const SINGLE_MARKER: u16 = 1 << 15; + + let x = INDEX_TABLE[i]; + let single = (x & SINGLE_MARKER) != 0; + let offset = !SINGLE_MARKER & x; + + if single { + &MAPPING_TABLE[offset as usize] + } else { + &MAPPING_TABLE[(offset + (codepoint as u16 - TABLE[i].from as u16)) as usize] + } + }).unwrap() +} + +fn map_char(codepoint: char, flags: Flags, output: &mut String, errors: &mut Vec) { + match *find_char(codepoint) { + Mapping::Valid => output.push(codepoint), + Mapping::Ignored => {}, + Mapping::Mapped(ref slice) => output.push_str(decode_slice(slice)), + Mapping::Deviation(ref slice) => { + if flags.transitional_processing { + output.push_str(decode_slice(slice)) + } else { + output.push(codepoint) + } + } + Mapping::Disallowed => { + errors.push(Error::DissallowedCharacter); + output.push(codepoint); + } + Mapping::DisallowedStd3Valid => { + if flags.use_std3_ascii_rules { + errors.push(Error::DissallowedByStd3AsciiRules); + } + output.push(codepoint) + } + Mapping::DisallowedStd3Mapped(ref slice) => { + if flags.use_std3_ascii_rules { + errors.push(Error::DissallowedMappedInStd3); + } + output.push_str(decode_slice(slice)) + } + } +} + +// http://tools.ietf.org/html/rfc5893#section-2 +fn passes_bidi(label: &str, is_bidi_domain: bool) -> bool { + // Rule 0: Bidi Rules apply to Bidi Domain Names: a name with at least one RTL label. A label + // is RTL if it contains at least one character of bidi class R, AL or AN. + if !is_bidi_domain { + return true; + } + + let mut chars = label.chars(); + let first_char_class = match chars.next() { + Some(c) => bidi_class(c), + None => return true, // empty string + }; + + match first_char_class { + // LTR label + BidiClass::L => { + // Rule 5 + loop { + match chars.next() { + Some(c) => { + if !matches!(bidi_class(c), + BidiClass::L | BidiClass::EN | + BidiClass::ES | BidiClass::CS | + BidiClass::ET | BidiClass::ON | + BidiClass::BN | BidiClass::NSM + ) { + return false; + } + }, + None => { break; }, + } + } + + // Rule 6 + // must end in L or EN followed by 0 or more NSM + let mut rev_chars = label.chars().rev(); + let mut last_non_nsm = rev_chars.next(); + loop { + match last_non_nsm { + Some(c) if bidi_class(c) == BidiClass::NSM => { + last_non_nsm = rev_chars.next(); + continue; + } + _ => { break; }, + } + } + match last_non_nsm { + Some(c) if bidi_class(c) == BidiClass::L + || bidi_class(c) == BidiClass::EN => {}, + Some(_) => { return false; }, + _ => {} + } + + } + + // RTL label + BidiClass::R | BidiClass::AL => { + let mut found_en = false; + let mut found_an = false; + + // Rule 2 + loop { + match chars.next() { + Some(c) => { + let char_class = bidi_class(c); + + if char_class == BidiClass::EN { + found_en = true; + } + if char_class == BidiClass::AN { + found_an = true; + } + + if !matches!(char_class, BidiClass::R | BidiClass::AL | + BidiClass::AN | BidiClass::EN | + BidiClass::ES | BidiClass::CS | + BidiClass::ET | BidiClass::ON | + BidiClass::BN | BidiClass::NSM) { + return false; + } + }, + None => { break; }, + } + } + // Rule 3 + let mut rev_chars = label.chars().rev(); + let mut last = rev_chars.next(); + loop { // must end in L or EN followed by 0 or more NSM + match last { + Some(c) if bidi_class(c) == BidiClass::NSM => { + last = rev_chars.next(); + continue; + } + _ => { break; }, + } + } + match last { + Some(c) if matches!(bidi_class(c), BidiClass::R | BidiClass::AL | + BidiClass::EN | BidiClass::AN) => {}, + _ => { return false; } + } + + // Rule 4 + if found_an && found_en { + return false; + } + } + + // Rule 1: Should start with L or R/AL + _ => { + return false; + } + } + + return true; +} + +/// http://www.unicode.org/reports/tr46/#Validity_Criteria +fn validate_full(label: &str, is_bidi_domain: bool, flags: Flags, errors: &mut Vec) { + // V1: Must be in NFC form. + if label.nfc().ne(label.chars()) { + errors.push(Error::ValidityCriteria); + } else { + validate(label, is_bidi_domain, flags, errors); + } +} + +fn validate(label: &str, is_bidi_domain: bool, flags: Flags, errors: &mut Vec) { + let first_char = label.chars().next(); + if first_char == None { + // Empty string, pass + } + + // V2: No U+002D HYPHEN-MINUS in both third and fourth positions. + // + // NOTE: Spec says that the label must not contain a HYPHEN-MINUS character in both the + // third and fourth positions. But nobody follows this criteria. See the spec issue below: + // https://github.com/whatwg/url/issues/53 + // + // TODO: Add *CheckHyphens* flag. + + // V3: neither begin nor end with a U+002D HYPHEN-MINUS + else if label.starts_with("-") || label.ends_with("-") { + errors.push(Error::ValidityCriteria); + } + + // V4: not contain a U+002E FULL STOP + // + // Here, label can't contain '.' since the input is from .split('.') + + // V5: not begin with a GC=Mark + else if is_combining_mark(first_char.unwrap()) { + errors.push(Error::ValidityCriteria); + } + + // V6: Check against Mapping Table + else if label.chars().any(|c| match *find_char(c) { + Mapping::Valid => false, + Mapping::Deviation(_) => flags.transitional_processing, + Mapping::DisallowedStd3Valid => flags.use_std3_ascii_rules, + _ => true, + }) { + errors.push(Error::ValidityCriteria); + } + + // V7: ContextJ rules + // + // TODO: Implement rules and add *CheckJoiners* flag. + + // V8: Bidi rules + // + // TODO: Add *CheckBidi* flag + else if !passes_bidi(label, is_bidi_domain) + { + errors.push(Error::ValidityCriteria); + } +} + +/// http://www.unicode.org/reports/tr46/#Processing +fn processing(domain: &str, flags: Flags, errors: &mut Vec) -> String { + let mut mapped = String::with_capacity(domain.len()); + for c in domain.chars() { + map_char(c, flags, &mut mapped, errors) + } + let mut normalized = String::with_capacity(mapped.len()); + normalized.extend(mapped.nfc()); + + // Find out if it's a Bidi Domain Name + // + // First, check for literal bidi chars + let mut is_bidi_domain = domain.chars().any(|c| + matches!(bidi_class(c), BidiClass::R | BidiClass::AL | BidiClass::AN) + ); + if !is_bidi_domain { + // Then check for punycode-encoded bidi chars + for label in normalized.split('.') { + if label.starts_with(PUNYCODE_PREFIX) { + match punycode::decode_to_string(&label[PUNYCODE_PREFIX.len()..]) { + Some(decoded_label) => { + if decoded_label.chars().any(|c| + matches!(bidi_class(c), BidiClass::R | BidiClass::AL | BidiClass::AN) + ) { + is_bidi_domain = true; + } + } + None => { + is_bidi_domain = true; + } + } + } + } + } + + let mut validated = String::new(); + let mut first = true; + for label in normalized.split('.') { + if !first { + validated.push('.'); + } + first = false; + if label.starts_with(PUNYCODE_PREFIX) { + match punycode::decode_to_string(&label[PUNYCODE_PREFIX.len()..]) { + Some(decoded_label) => { + let flags = Flags { transitional_processing: false, ..flags }; + validate_full(&decoded_label, is_bidi_domain, flags, errors); + validated.push_str(&decoded_label) + } + None => errors.push(Error::PunycodeError) + } + } else { + // `normalized` is already `NFC` so we can skip that check + validate(label, is_bidi_domain, flags, errors); + validated.push_str(label) + } + } + validated +} + +#[derive(Copy, Clone)] +pub struct Flags { + pub use_std3_ascii_rules: bool, + pub transitional_processing: bool, + pub verify_dns_length: bool, +} + +#[derive(PartialEq, Eq, Clone, Copy, Debug)] +enum Error { + PunycodeError, + ValidityCriteria, + DissallowedByStd3AsciiRules, + DissallowedMappedInStd3, + DissallowedCharacter, + TooLongForDns, + TooShortForDns, +} + +/// Errors recorded during UTS #46 processing. +/// +/// This is opaque for now, only indicating the presence of at least one error. +/// More details may be exposed in the future. +#[derive(Debug)] +pub struct Errors(Vec); + +/// http://www.unicode.org/reports/tr46/#ToASCII +pub fn to_ascii(domain: &str, flags: Flags) -> Result { + let mut errors = Vec::new(); + let mut result = String::new(); + let mut first = true; + for label in processing(domain, flags, &mut errors).split('.') { + if !first { + result.push('.'); + } + first = false; + if label.is_ascii() { + result.push_str(label); + } else { + match punycode::encode_str(label) { + Some(x) => { + result.push_str(PUNYCODE_PREFIX); + result.push_str(&x); + }, + None => errors.push(Error::PunycodeError) + } + } + } + + if flags.verify_dns_length { + let domain = if result.ends_with(".") { &result[..result.len()-1] } else { &*result }; + if domain.len() < 1 || domain.split('.').any(|label| label.len() < 1) { + errors.push(Error::TooShortForDns) + } + if domain.len() > 253 || domain.split('.').any(|label| label.len() > 63) { + errors.push(Error::TooLongForDns) + } + } + if errors.is_empty() { + Ok(result) + } else { + Err(Errors(errors)) + } +} + +/// http://www.unicode.org/reports/tr46/#ToUnicode +/// +/// Only `use_std3_ascii_rules` is used in `flags`. +pub fn to_unicode(domain: &str, mut flags: Flags) -> (String, Result<(), Errors>) { + flags.transitional_processing = false; + let mut errors = Vec::new(); + let domain = processing(domain, flags, &mut errors); + let errors = if errors.is_empty() { + Ok(()) + } else { + Err(Errors(errors)) + }; + (domain, errors) +} diff --git a/idna/src/uts46_mapping_table.rs b/idna/src/uts46_mapping_table.rs new file mode 100644 index 000000000..2733290d6 --- /dev/null +++ b/idna/src/uts46_mapping_table.rs @@ -0,0 +1,16005 @@ +// Copyright 2013-2014 The rust-url developers. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Generated by make_idna_table.py + +static TABLE: &'static [Range] = &[ + Range { from: '\u{0}', to: '\u{2c}', }, + Range { from: '\u{2d}', to: '\u{2e}', }, + Range { from: '\u{2f}', to: '\u{2f}', }, + Range { from: '\u{30}', to: '\u{39}', }, + Range { from: '\u{3a}', to: '\u{40}', }, + Range { from: '\u{41}', to: '\u{5a}', }, + Range { from: '\u{5b}', to: '\u{60}', }, + Range { from: '\u{61}', to: '\u{7a}', }, + Range { from: '\u{7b}', to: '\u{7f}', }, + Range { from: '\u{80}', to: '\u{9f}', }, + Range { from: '\u{a0}', to: '\u{a0}', }, + Range { from: '\u{a1}', to: '\u{a7}', }, + Range { from: '\u{a8}', to: '\u{aa}', }, + Range { from: '\u{ab}', to: '\u{ac}', }, + Range { from: '\u{ad}', to: '\u{af}', }, + Range { from: '\u{b0}', to: '\u{b1}', }, + Range { from: '\u{b2}', to: '\u{b5}', }, + Range { from: '\u{b6}', to: '\u{b7}', }, + Range { from: '\u{b8}', to: '\u{df}', }, + Range { from: '\u{e0}', to: '\u{ff}', }, + Range { from: '\u{100}', to: '\u{131}', }, + Range { from: '\u{132}', to: '\u{133}', }, + Range { from: '\u{134}', to: '\u{136}', }, + Range { from: '\u{137}', to: '\u{138}', }, + Range { from: '\u{139}', to: '\u{13e}', }, + Range { from: '\u{13f}', to: '\u{140}', }, + Range { from: '\u{141}', to: '\u{18b}', }, + Range { from: '\u{18c}', to: '\u{18d}', }, + Range { from: '\u{18e}', to: '\u{198}', }, + Range { from: '\u{199}', to: '\u{19b}', }, + Range { from: '\u{19c}', to: '\u{1a9}', }, + Range { from: '\u{1aa}', to: '\u{1ab}', }, + Range { from: '\u{1ac}', to: '\u{1b8}', }, + Range { from: '\u{1b9}', to: '\u{1bb}', }, + Range { from: '\u{1bc}', to: '\u{1bc}', }, + Range { from: '\u{1bd}', to: '\u{1c3}', }, + Range { from: '\u{1c4}', to: '\u{1c6}', }, + Range { from: '\u{1c7}', to: '\u{1c9}', }, + Range { from: '\u{1ca}', to: '\u{1cc}', }, + Range { from: '\u{1cd}', to: '\u{1db}', }, + Range { from: '\u{1dc}', to: '\u{1dd}', }, + Range { from: '\u{1de}', to: '\u{1ee}', }, + Range { from: '\u{1ef}', to: '\u{1f0}', }, + Range { from: '\u{1f1}', to: '\u{1f3}', }, + Range { from: '\u{1f4}', to: '\u{232}', }, + Range { from: '\u{233}', to: '\u{239}', }, + Range { from: '\u{23a}', to: '\u{23e}', }, + Range { from: '\u{23f}', to: '\u{240}', }, + Range { from: '\u{241}', to: '\u{24e}', }, + Range { from: '\u{24f}', to: '\u{2af}', }, + Range { from: '\u{2b0}', to: '\u{2b8}', }, + Range { from: '\u{2b9}', to: '\u{2d7}', }, + Range { from: '\u{2d8}', to: '\u{2dd}', }, + Range { from: '\u{2de}', to: '\u{2df}', }, + Range { from: '\u{2e0}', to: '\u{2e4}', }, + Range { from: '\u{2e5}', to: '\u{33f}', }, + Range { from: '\u{340}', to: '\u{345}', }, + Range { from: '\u{346}', to: '\u{34e}', }, + Range { from: '\u{34f}', to: '\u{34f}', }, + Range { from: '\u{350}', to: '\u{36f}', }, + Range { from: '\u{370}', to: '\u{377}', }, + Range { from: '\u{378}', to: '\u{379}', }, + Range { from: '\u{37a}', to: '\u{37a}', }, + Range { from: '\u{37b}', to: '\u{37d}', }, + Range { from: '\u{37e}', to: '\u{37f}', }, + Range { from: '\u{380}', to: '\u{383}', }, + Range { from: '\u{384}', to: '\u{3ab}', }, + Range { from: '\u{3ac}', to: '\u{3c1}', }, + Range { from: '\u{3c2}', to: '\u{3c2}', }, + Range { from: '\u{3c3}', to: '\u{3ce}', }, + Range { from: '\u{3cf}', to: '\u{3fa}', }, + Range { from: '\u{3fb}', to: '\u{3fc}', }, + Range { from: '\u{3fd}', to: '\u{42f}', }, + Range { from: '\u{430}', to: '\u{45f}', }, + Range { from: '\u{460}', to: '\u{480}', }, + Range { from: '\u{481}', to: '\u{489}', }, + Range { from: '\u{48a}', to: '\u{4cd}', }, + Range { from: '\u{4ce}', to: '\u{4cf}', }, + Range { from: '\u{4d0}', to: '\u{556}', }, + Range { from: '\u{557}', to: '\u{558}', }, + Range { from: '\u{559}', to: '\u{55f}', }, + Range { from: '\u{560}', to: '\u{560}', }, + Range { from: '\u{561}', to: '\u{586}', }, + Range { from: '\u{587}', to: '\u{588}', }, + Range { from: '\u{589}', to: '\u{58a}', }, + Range { from: '\u{58b}', to: '\u{58c}', }, + Range { from: '\u{58d}', to: '\u{58f}', }, + Range { from: '\u{590}', to: '\u{590}', }, + Range { from: '\u{591}', to: '\u{5c7}', }, + Range { from: '\u{5c8}', to: '\u{5cf}', }, + Range { from: '\u{5d0}', to: '\u{5ea}', }, + Range { from: '\u{5eb}', to: '\u{5ef}', }, + Range { from: '\u{5f0}', to: '\u{5f4}', }, + Range { from: '\u{5f5}', to: '\u{605}', }, + Range { from: '\u{606}', to: '\u{61b}', }, + Range { from: '\u{61c}', to: '\u{61d}', }, + Range { from: '\u{61e}', to: '\u{674}', }, + Range { from: '\u{675}', to: '\u{678}', }, + Range { from: '\u{679}', to: '\u{6dc}', }, + Range { from: '\u{6dd}', to: '\u{6dd}', }, + Range { from: '\u{6de}', to: '\u{70d}', }, + Range { from: '\u{70e}', to: '\u{70f}', }, + Range { from: '\u{710}', to: '\u{74a}', }, + Range { from: '\u{74b}', to: '\u{74c}', }, + Range { from: '\u{74d}', to: '\u{7b1}', }, + Range { from: '\u{7b2}', to: '\u{7bf}', }, + Range { from: '\u{7c0}', to: '\u{7fa}', }, + Range { from: '\u{7fb}', to: '\u{7ff}', }, + Range { from: '\u{800}', to: '\u{82d}', }, + Range { from: '\u{82e}', to: '\u{82f}', }, + Range { from: '\u{830}', to: '\u{83e}', }, + Range { from: '\u{83f}', to: '\u{83f}', }, + Range { from: '\u{840}', to: '\u{85b}', }, + Range { from: '\u{85c}', to: '\u{85d}', }, + Range { from: '\u{85e}', to: '\u{85f}', }, + Range { from: '\u{860}', to: '\u{86a}', }, + Range { from: '\u{86b}', to: '\u{89f}', }, + Range { from: '\u{8a0}', to: '\u{8b4}', }, + Range { from: '\u{8b5}', to: '\u{8b5}', }, + Range { from: '\u{8b6}', to: '\u{8bd}', }, + Range { from: '\u{8be}', to: '\u{8d3}', }, + Range { from: '\u{8d4}', to: '\u{8e1}', }, + Range { from: '\u{8e2}', to: '\u{8e2}', }, + Range { from: '\u{8e3}', to: '\u{957}', }, + Range { from: '\u{958}', to: '\u{95f}', }, + Range { from: '\u{960}', to: '\u{983}', }, + Range { from: '\u{984}', to: '\u{984}', }, + Range { from: '\u{985}', to: '\u{98c}', }, + Range { from: '\u{98d}', to: '\u{98e}', }, + Range { from: '\u{98f}', to: '\u{990}', }, + Range { from: '\u{991}', to: '\u{992}', }, + Range { from: '\u{993}', to: '\u{9a8}', }, + Range { from: '\u{9a9}', to: '\u{9a9}', }, + Range { from: '\u{9aa}', to: '\u{9b0}', }, + Range { from: '\u{9b1}', to: '\u{9b2}', }, + Range { from: '\u{9b3}', to: '\u{9b5}', }, + Range { from: '\u{9b6}', to: '\u{9b9}', }, + Range { from: '\u{9ba}', to: '\u{9bb}', }, + Range { from: '\u{9bc}', to: '\u{9c4}', }, + Range { from: '\u{9c5}', to: '\u{9c6}', }, + Range { from: '\u{9c7}', to: '\u{9c8}', }, + Range { from: '\u{9c9}', to: '\u{9ca}', }, + Range { from: '\u{9cb}', to: '\u{9ce}', }, + Range { from: '\u{9cf}', to: '\u{9d6}', }, + Range { from: '\u{9d7}', to: '\u{9d7}', }, + Range { from: '\u{9d8}', to: '\u{9db}', }, + Range { from: '\u{9dc}', to: '\u{9df}', }, + Range { from: '\u{9e0}', to: '\u{9e3}', }, + Range { from: '\u{9e4}', to: '\u{9e5}', }, + Range { from: '\u{9e6}', to: '\u{9fd}', }, + Range { from: '\u{9fe}', to: '\u{a00}', }, + Range { from: '\u{a01}', to: '\u{a03}', }, + Range { from: '\u{a04}', to: '\u{a04}', }, + Range { from: '\u{a05}', to: '\u{a0a}', }, + Range { from: '\u{a0b}', to: '\u{a0e}', }, + Range { from: '\u{a0f}', to: '\u{a10}', }, + Range { from: '\u{a11}', to: '\u{a12}', }, + Range { from: '\u{a13}', to: '\u{a28}', }, + Range { from: '\u{a29}', to: '\u{a29}', }, + Range { from: '\u{a2a}', to: '\u{a30}', }, + Range { from: '\u{a31}', to: '\u{a37}', }, + Range { from: '\u{a38}', to: '\u{a39}', }, + Range { from: '\u{a3a}', to: '\u{a3b}', }, + Range { from: '\u{a3c}', to: '\u{a3d}', }, + Range { from: '\u{a3e}', to: '\u{a42}', }, + Range { from: '\u{a43}', to: '\u{a46}', }, + Range { from: '\u{a47}', to: '\u{a48}', }, + Range { from: '\u{a49}', to: '\u{a4a}', }, + Range { from: '\u{a4b}', to: '\u{a4d}', }, + Range { from: '\u{a4e}', to: '\u{a50}', }, + Range { from: '\u{a51}', to: '\u{a51}', }, + Range { from: '\u{a52}', to: '\u{a58}', }, + Range { from: '\u{a59}', to: '\u{a5e}', }, + Range { from: '\u{a5f}', to: '\u{a65}', }, + Range { from: '\u{a66}', to: '\u{a75}', }, + Range { from: '\u{a76}', to: '\u{a80}', }, + Range { from: '\u{a81}', to: '\u{a83}', }, + Range { from: '\u{a84}', to: '\u{a84}', }, + Range { from: '\u{a85}', to: '\u{a8d}', }, + Range { from: '\u{a8e}', to: '\u{a8e}', }, + Range { from: '\u{a8f}', to: '\u{a91}', }, + Range { from: '\u{a92}', to: '\u{a92}', }, + Range { from: '\u{a93}', to: '\u{aa8}', }, + Range { from: '\u{aa9}', to: '\u{aa9}', }, + Range { from: '\u{aaa}', to: '\u{ab0}', }, + Range { from: '\u{ab1}', to: '\u{ab1}', }, + Range { from: '\u{ab2}', to: '\u{ab3}', }, + Range { from: '\u{ab4}', to: '\u{ab4}', }, + Range { from: '\u{ab5}', to: '\u{ab9}', }, + Range { from: '\u{aba}', to: '\u{abb}', }, + Range { from: '\u{abc}', to: '\u{ac5}', }, + Range { from: '\u{ac6}', to: '\u{ac6}', }, + Range { from: '\u{ac7}', to: '\u{ac9}', }, + Range { from: '\u{aca}', to: '\u{aca}', }, + Range { from: '\u{acb}', to: '\u{acd}', }, + Range { from: '\u{ace}', to: '\u{acf}', }, + Range { from: '\u{ad0}', to: '\u{ad0}', }, + Range { from: '\u{ad1}', to: '\u{adf}', }, + Range { from: '\u{ae0}', to: '\u{ae3}', }, + Range { from: '\u{ae4}', to: '\u{ae5}', }, + Range { from: '\u{ae6}', to: '\u{af1}', }, + Range { from: '\u{af2}', to: '\u{af8}', }, + Range { from: '\u{af9}', to: '\u{aff}', }, + Range { from: '\u{b00}', to: '\u{b00}', }, + Range { from: '\u{b01}', to: '\u{b03}', }, + Range { from: '\u{b04}', to: '\u{b04}', }, + Range { from: '\u{b05}', to: '\u{b0c}', }, + Range { from: '\u{b0d}', to: '\u{b0e}', }, + Range { from: '\u{b0f}', to: '\u{b10}', }, + Range { from: '\u{b11}', to: '\u{b12}', }, + Range { from: '\u{b13}', to: '\u{b28}', }, + Range { from: '\u{b29}', to: '\u{b29}', }, + Range { from: '\u{b2a}', to: '\u{b30}', }, + Range { from: '\u{b31}', to: '\u{b31}', }, + Range { from: '\u{b32}', to: '\u{b33}', }, + Range { from: '\u{b34}', to: '\u{b34}', }, + Range { from: '\u{b35}', to: '\u{b39}', }, + Range { from: '\u{b3a}', to: '\u{b3b}', }, + Range { from: '\u{b3c}', to: '\u{b44}', }, + Range { from: '\u{b45}', to: '\u{b46}', }, + Range { from: '\u{b47}', to: '\u{b48}', }, + Range { from: '\u{b49}', to: '\u{b4a}', }, + Range { from: '\u{b4b}', to: '\u{b4d}', }, + Range { from: '\u{b4e}', to: '\u{b55}', }, + Range { from: '\u{b56}', to: '\u{b57}', }, + Range { from: '\u{b58}', to: '\u{b5b}', }, + Range { from: '\u{b5c}', to: '\u{b5e}', }, + Range { from: '\u{b5f}', to: '\u{b63}', }, + Range { from: '\u{b64}', to: '\u{b65}', }, + Range { from: '\u{b66}', to: '\u{b77}', }, + Range { from: '\u{b78}', to: '\u{b81}', }, + Range { from: '\u{b82}', to: '\u{b83}', }, + Range { from: '\u{b84}', to: '\u{b84}', }, + Range { from: '\u{b85}', to: '\u{b8a}', }, + Range { from: '\u{b8b}', to: '\u{b8d}', }, + Range { from: '\u{b8e}', to: '\u{b90}', }, + Range { from: '\u{b91}', to: '\u{b91}', }, + Range { from: '\u{b92}', to: '\u{b95}', }, + Range { from: '\u{b96}', to: '\u{b98}', }, + Range { from: '\u{b99}', to: '\u{b9a}', }, + Range { from: '\u{b9b}', to: '\u{b9d}', }, + Range { from: '\u{b9e}', to: '\u{b9f}', }, + Range { from: '\u{ba0}', to: '\u{ba2}', }, + Range { from: '\u{ba3}', to: '\u{ba4}', }, + Range { from: '\u{ba5}', to: '\u{ba7}', }, + Range { from: '\u{ba8}', to: '\u{baa}', }, + Range { from: '\u{bab}', to: '\u{bad}', }, + Range { from: '\u{bae}', to: '\u{bb9}', }, + Range { from: '\u{bba}', to: '\u{bbd}', }, + Range { from: '\u{bbe}', to: '\u{bc2}', }, + Range { from: '\u{bc3}', to: '\u{bc5}', }, + Range { from: '\u{bc6}', to: '\u{bc8}', }, + Range { from: '\u{bc9}', to: '\u{bc9}', }, + Range { from: '\u{bca}', to: '\u{bcd}', }, + Range { from: '\u{bce}', to: '\u{bcf}', }, + Range { from: '\u{bd0}', to: '\u{bd0}', }, + Range { from: '\u{bd1}', to: '\u{bd6}', }, + Range { from: '\u{bd7}', to: '\u{bd7}', }, + Range { from: '\u{bd8}', to: '\u{be5}', }, + Range { from: '\u{be6}', to: '\u{bfa}', }, + Range { from: '\u{bfb}', to: '\u{bff}', }, + Range { from: '\u{c00}', to: '\u{c03}', }, + Range { from: '\u{c04}', to: '\u{c04}', }, + Range { from: '\u{c05}', to: '\u{c0c}', }, + Range { from: '\u{c0d}', to: '\u{c0d}', }, + Range { from: '\u{c0e}', to: '\u{c10}', }, + Range { from: '\u{c11}', to: '\u{c11}', }, + Range { from: '\u{c12}', to: '\u{c28}', }, + Range { from: '\u{c29}', to: '\u{c29}', }, + Range { from: '\u{c2a}', to: '\u{c39}', }, + Range { from: '\u{c3a}', to: '\u{c3c}', }, + Range { from: '\u{c3d}', to: '\u{c44}', }, + Range { from: '\u{c45}', to: '\u{c45}', }, + Range { from: '\u{c46}', to: '\u{c48}', }, + Range { from: '\u{c49}', to: '\u{c49}', }, + Range { from: '\u{c4a}', to: '\u{c4d}', }, + Range { from: '\u{c4e}', to: '\u{c54}', }, + Range { from: '\u{c55}', to: '\u{c56}', }, + Range { from: '\u{c57}', to: '\u{c57}', }, + Range { from: '\u{c58}', to: '\u{c5a}', }, + Range { from: '\u{c5b}', to: '\u{c5f}', }, + Range { from: '\u{c60}', to: '\u{c63}', }, + Range { from: '\u{c64}', to: '\u{c65}', }, + Range { from: '\u{c66}', to: '\u{c6f}', }, + Range { from: '\u{c70}', to: '\u{c77}', }, + Range { from: '\u{c78}', to: '\u{c83}', }, + Range { from: '\u{c84}', to: '\u{c84}', }, + Range { from: '\u{c85}', to: '\u{c8c}', }, + Range { from: '\u{c8d}', to: '\u{c8d}', }, + Range { from: '\u{c8e}', to: '\u{c90}', }, + Range { from: '\u{c91}', to: '\u{c91}', }, + Range { from: '\u{c92}', to: '\u{ca8}', }, + Range { from: '\u{ca9}', to: '\u{ca9}', }, + Range { from: '\u{caa}', to: '\u{cb3}', }, + Range { from: '\u{cb4}', to: '\u{cb4}', }, + Range { from: '\u{cb5}', to: '\u{cb9}', }, + Range { from: '\u{cba}', to: '\u{cbb}', }, + Range { from: '\u{cbc}', to: '\u{cc4}', }, + Range { from: '\u{cc5}', to: '\u{cc5}', }, + Range { from: '\u{cc6}', to: '\u{cc8}', }, + Range { from: '\u{cc9}', to: '\u{cc9}', }, + Range { from: '\u{cca}', to: '\u{ccd}', }, + Range { from: '\u{cce}', to: '\u{cd4}', }, + Range { from: '\u{cd5}', to: '\u{cd6}', }, + Range { from: '\u{cd7}', to: '\u{cdd}', }, + Range { from: '\u{cde}', to: '\u{cdf}', }, + Range { from: '\u{ce0}', to: '\u{ce3}', }, + Range { from: '\u{ce4}', to: '\u{ce5}', }, + Range { from: '\u{ce6}', to: '\u{cef}', }, + Range { from: '\u{cf0}', to: '\u{cf0}', }, + Range { from: '\u{cf1}', to: '\u{cf2}', }, + Range { from: '\u{cf3}', to: '\u{cff}', }, + Range { from: '\u{d00}', to: '\u{d03}', }, + Range { from: '\u{d04}', to: '\u{d04}', }, + Range { from: '\u{d05}', to: '\u{d0c}', }, + Range { from: '\u{d0d}', to: '\u{d0d}', }, + Range { from: '\u{d0e}', to: '\u{d10}', }, + Range { from: '\u{d11}', to: '\u{d11}', }, + Range { from: '\u{d12}', to: '\u{d44}', }, + Range { from: '\u{d45}', to: '\u{d45}', }, + Range { from: '\u{d46}', to: '\u{d48}', }, + Range { from: '\u{d49}', to: '\u{d49}', }, + Range { from: '\u{d4a}', to: '\u{d4f}', }, + Range { from: '\u{d50}', to: '\u{d53}', }, + Range { from: '\u{d54}', to: '\u{d63}', }, + Range { from: '\u{d64}', to: '\u{d65}', }, + Range { from: '\u{d66}', to: '\u{d7f}', }, + Range { from: '\u{d80}', to: '\u{d81}', }, + Range { from: '\u{d82}', to: '\u{d83}', }, + Range { from: '\u{d84}', to: '\u{d84}', }, + Range { from: '\u{d85}', to: '\u{d96}', }, + Range { from: '\u{d97}', to: '\u{d99}', }, + Range { from: '\u{d9a}', to: '\u{db1}', }, + Range { from: '\u{db2}', to: '\u{db2}', }, + Range { from: '\u{db3}', to: '\u{dbb}', }, + Range { from: '\u{dbc}', to: '\u{dbd}', }, + Range { from: '\u{dbe}', to: '\u{dbf}', }, + Range { from: '\u{dc0}', to: '\u{dc6}', }, + Range { from: '\u{dc7}', to: '\u{dc9}', }, + Range { from: '\u{dca}', to: '\u{dca}', }, + Range { from: '\u{dcb}', to: '\u{dce}', }, + Range { from: '\u{dcf}', to: '\u{dd4}', }, + Range { from: '\u{dd5}', to: '\u{dd7}', }, + Range { from: '\u{dd8}', to: '\u{ddf}', }, + Range { from: '\u{de0}', to: '\u{de5}', }, + Range { from: '\u{de6}', to: '\u{def}', }, + Range { from: '\u{df0}', to: '\u{df1}', }, + Range { from: '\u{df2}', to: '\u{df4}', }, + Range { from: '\u{df5}', to: '\u{e00}', }, + Range { from: '\u{e01}', to: '\u{e32}', }, + Range { from: '\u{e33}', to: '\u{e33}', }, + Range { from: '\u{e34}', to: '\u{e3a}', }, + Range { from: '\u{e3b}', to: '\u{e3e}', }, + Range { from: '\u{e3f}', to: '\u{e5b}', }, + Range { from: '\u{e5c}', to: '\u{e80}', }, + Range { from: '\u{e81}', to: '\u{e82}', }, + Range { from: '\u{e83}', to: '\u{e84}', }, + Range { from: '\u{e85}', to: '\u{e86}', }, + Range { from: '\u{e87}', to: '\u{e88}', }, + Range { from: '\u{e89}', to: '\u{e8a}', }, + Range { from: '\u{e8b}', to: '\u{e8c}', }, + Range { from: '\u{e8d}', to: '\u{e8d}', }, + Range { from: '\u{e8e}', to: '\u{e93}', }, + Range { from: '\u{e94}', to: '\u{e97}', }, + Range { from: '\u{e98}', to: '\u{e98}', }, + Range { from: '\u{e99}', to: '\u{e9f}', }, + Range { from: '\u{ea0}', to: '\u{ea0}', }, + Range { from: '\u{ea1}', to: '\u{ea3}', }, + Range { from: '\u{ea4}', to: '\u{ea7}', }, + Range { from: '\u{ea8}', to: '\u{ea9}', }, + Range { from: '\u{eaa}', to: '\u{eab}', }, + Range { from: '\u{eac}', to: '\u{eac}', }, + Range { from: '\u{ead}', to: '\u{eb2}', }, + Range { from: '\u{eb3}', to: '\u{eb3}', }, + Range { from: '\u{eb4}', to: '\u{eb9}', }, + Range { from: '\u{eba}', to: '\u{eba}', }, + Range { from: '\u{ebb}', to: '\u{ebd}', }, + Range { from: '\u{ebe}', to: '\u{ebf}', }, + Range { from: '\u{ec0}', to: '\u{ec4}', }, + Range { from: '\u{ec5}', to: '\u{ec7}', }, + Range { from: '\u{ec8}', to: '\u{ecd}', }, + Range { from: '\u{ece}', to: '\u{ecf}', }, + Range { from: '\u{ed0}', to: '\u{ed9}', }, + Range { from: '\u{eda}', to: '\u{edb}', }, + Range { from: '\u{edc}', to: '\u{edd}', }, + Range { from: '\u{ede}', to: '\u{edf}', }, + Range { from: '\u{ee0}', to: '\u{eff}', }, + Range { from: '\u{f00}', to: '\u{f0b}', }, + Range { from: '\u{f0c}', to: '\u{f0c}', }, + Range { from: '\u{f0d}', to: '\u{f42}', }, + Range { from: '\u{f43}', to: '\u{f43}', }, + Range { from: '\u{f44}', to: '\u{f47}', }, + Range { from: '\u{f48}', to: '\u{f48}', }, + Range { from: '\u{f49}', to: '\u{f4c}', }, + Range { from: '\u{f4d}', to: '\u{f4d}', }, + Range { from: '\u{f4e}', to: '\u{f51}', }, + Range { from: '\u{f52}', to: '\u{f52}', }, + Range { from: '\u{f53}', to: '\u{f56}', }, + Range { from: '\u{f57}', to: '\u{f57}', }, + Range { from: '\u{f58}', to: '\u{f5b}', }, + Range { from: '\u{f5c}', to: '\u{f5c}', }, + Range { from: '\u{f5d}', to: '\u{f68}', }, + Range { from: '\u{f69}', to: '\u{f69}', }, + Range { from: '\u{f6a}', to: '\u{f6c}', }, + Range { from: '\u{f6d}', to: '\u{f70}', }, + Range { from: '\u{f71}', to: '\u{f72}', }, + Range { from: '\u{f73}', to: '\u{f79}', }, + Range { from: '\u{f7a}', to: '\u{f80}', }, + Range { from: '\u{f81}', to: '\u{f81}', }, + Range { from: '\u{f82}', to: '\u{f92}', }, + Range { from: '\u{f93}', to: '\u{f93}', }, + Range { from: '\u{f94}', to: '\u{f97}', }, + Range { from: '\u{f98}', to: '\u{f98}', }, + Range { from: '\u{f99}', to: '\u{f9c}', }, + Range { from: '\u{f9d}', to: '\u{f9d}', }, + Range { from: '\u{f9e}', to: '\u{fa1}', }, + Range { from: '\u{fa2}', to: '\u{fa2}', }, + Range { from: '\u{fa3}', to: '\u{fa6}', }, + Range { from: '\u{fa7}', to: '\u{fa7}', }, + Range { from: '\u{fa8}', to: '\u{fab}', }, + Range { from: '\u{fac}', to: '\u{fac}', }, + Range { from: '\u{fad}', to: '\u{fb8}', }, + Range { from: '\u{fb9}', to: '\u{fb9}', }, + Range { from: '\u{fba}', to: '\u{fbc}', }, + Range { from: '\u{fbd}', to: '\u{fbd}', }, + Range { from: '\u{fbe}', to: '\u{fcc}', }, + Range { from: '\u{fcd}', to: '\u{fcd}', }, + Range { from: '\u{fce}', to: '\u{fda}', }, + Range { from: '\u{fdb}', to: '\u{fff}', }, + Range { from: '\u{1000}', to: '\u{109f}', }, + Range { from: '\u{10a0}', to: '\u{10c6}', }, + Range { from: '\u{10c7}', to: '\u{10c7}', }, + Range { from: '\u{10c8}', to: '\u{10cc}', }, + Range { from: '\u{10cd}', to: '\u{10cd}', }, + Range { from: '\u{10ce}', to: '\u{10cf}', }, + Range { from: '\u{10d0}', to: '\u{10fb}', }, + Range { from: '\u{10fc}', to: '\u{10fc}', }, + Range { from: '\u{10fd}', to: '\u{115e}', }, + Range { from: '\u{115f}', to: '\u{1160}', }, + Range { from: '\u{1161}', to: '\u{1248}', }, + Range { from: '\u{1249}', to: '\u{1249}', }, + Range { from: '\u{124a}', to: '\u{124d}', }, + Range { from: '\u{124e}', to: '\u{124f}', }, + Range { from: '\u{1250}', to: '\u{1256}', }, + Range { from: '\u{1257}', to: '\u{1259}', }, + Range { from: '\u{125a}', to: '\u{125d}', }, + Range { from: '\u{125e}', to: '\u{125f}', }, + Range { from: '\u{1260}', to: '\u{1288}', }, + Range { from: '\u{1289}', to: '\u{1289}', }, + Range { from: '\u{128a}', to: '\u{128d}', }, + Range { from: '\u{128e}', to: '\u{128f}', }, + Range { from: '\u{1290}', to: '\u{12b0}', }, + Range { from: '\u{12b1}', to: '\u{12b1}', }, + Range { from: '\u{12b2}', to: '\u{12b5}', }, + Range { from: '\u{12b6}', to: '\u{12b7}', }, + Range { from: '\u{12b8}', to: '\u{12be}', }, + Range { from: '\u{12bf}', to: '\u{12c1}', }, + Range { from: '\u{12c2}', to: '\u{12c5}', }, + Range { from: '\u{12c6}', to: '\u{12c7}', }, + Range { from: '\u{12c8}', to: '\u{12d6}', }, + Range { from: '\u{12d7}', to: '\u{12d7}', }, + Range { from: '\u{12d8}', to: '\u{1310}', }, + Range { from: '\u{1311}', to: '\u{1311}', }, + Range { from: '\u{1312}', to: '\u{1315}', }, + Range { from: '\u{1316}', to: '\u{1317}', }, + Range { from: '\u{1318}', to: '\u{135a}', }, + Range { from: '\u{135b}', to: '\u{135c}', }, + Range { from: '\u{135d}', to: '\u{137c}', }, + Range { from: '\u{137d}', to: '\u{137f}', }, + Range { from: '\u{1380}', to: '\u{1399}', }, + Range { from: '\u{139a}', to: '\u{139f}', }, + Range { from: '\u{13a0}', to: '\u{13f5}', }, + Range { from: '\u{13f6}', to: '\u{13f7}', }, + Range { from: '\u{13f8}', to: '\u{13fd}', }, + Range { from: '\u{13fe}', to: '\u{13ff}', }, + Range { from: '\u{1400}', to: '\u{167f}', }, + Range { from: '\u{1680}', to: '\u{1680}', }, + Range { from: '\u{1681}', to: '\u{169c}', }, + Range { from: '\u{169d}', to: '\u{169f}', }, + Range { from: '\u{16a0}', to: '\u{16f8}', }, + Range { from: '\u{16f9}', to: '\u{16ff}', }, + Range { from: '\u{1700}', to: '\u{170c}', }, + Range { from: '\u{170d}', to: '\u{170d}', }, + Range { from: '\u{170e}', to: '\u{1714}', }, + Range { from: '\u{1715}', to: '\u{171f}', }, + Range { from: '\u{1720}', to: '\u{1736}', }, + Range { from: '\u{1737}', to: '\u{173f}', }, + Range { from: '\u{1740}', to: '\u{1753}', }, + Range { from: '\u{1754}', to: '\u{175f}', }, + Range { from: '\u{1760}', to: '\u{176c}', }, + Range { from: '\u{176d}', to: '\u{176d}', }, + Range { from: '\u{176e}', to: '\u{1770}', }, + Range { from: '\u{1771}', to: '\u{1771}', }, + Range { from: '\u{1772}', to: '\u{1773}', }, + Range { from: '\u{1774}', to: '\u{177f}', }, + Range { from: '\u{1780}', to: '\u{17b3}', }, + Range { from: '\u{17b4}', to: '\u{17b5}', }, + Range { from: '\u{17b6}', to: '\u{17dd}', }, + Range { from: '\u{17de}', to: '\u{17df}', }, + Range { from: '\u{17e0}', to: '\u{17e9}', }, + Range { from: '\u{17ea}', to: '\u{17ef}', }, + Range { from: '\u{17f0}', to: '\u{17f9}', }, + Range { from: '\u{17fa}', to: '\u{17ff}', }, + Range { from: '\u{1800}', to: '\u{1805}', }, + Range { from: '\u{1806}', to: '\u{1806}', }, + Range { from: '\u{1807}', to: '\u{180a}', }, + Range { from: '\u{180b}', to: '\u{180d}', }, + Range { from: '\u{180e}', to: '\u{180f}', }, + Range { from: '\u{1810}', to: '\u{1819}', }, + Range { from: '\u{181a}', to: '\u{181f}', }, + Range { from: '\u{1820}', to: '\u{1877}', }, + Range { from: '\u{1878}', to: '\u{187f}', }, + Range { from: '\u{1880}', to: '\u{18aa}', }, + Range { from: '\u{18ab}', to: '\u{18af}', }, + Range { from: '\u{18b0}', to: '\u{18f5}', }, + Range { from: '\u{18f6}', to: '\u{18ff}', }, + Range { from: '\u{1900}', to: '\u{191e}', }, + Range { from: '\u{191f}', to: '\u{191f}', }, + Range { from: '\u{1920}', to: '\u{192b}', }, + Range { from: '\u{192c}', to: '\u{192f}', }, + Range { from: '\u{1930}', to: '\u{193b}', }, + Range { from: '\u{193c}', to: '\u{193f}', }, + Range { from: '\u{1940}', to: '\u{1940}', }, + Range { from: '\u{1941}', to: '\u{1943}', }, + Range { from: '\u{1944}', to: '\u{196d}', }, + Range { from: '\u{196e}', to: '\u{196f}', }, + Range { from: '\u{1970}', to: '\u{1974}', }, + Range { from: '\u{1975}', to: '\u{197f}', }, + Range { from: '\u{1980}', to: '\u{19ab}', }, + Range { from: '\u{19ac}', to: '\u{19af}', }, + Range { from: '\u{19b0}', to: '\u{19c9}', }, + Range { from: '\u{19ca}', to: '\u{19cf}', }, + Range { from: '\u{19d0}', to: '\u{19da}', }, + Range { from: '\u{19db}', to: '\u{19dd}', }, + Range { from: '\u{19de}', to: '\u{1a1b}', }, + Range { from: '\u{1a1c}', to: '\u{1a1d}', }, + Range { from: '\u{1a1e}', to: '\u{1a5e}', }, + Range { from: '\u{1a5f}', to: '\u{1a5f}', }, + Range { from: '\u{1a60}', to: '\u{1a7c}', }, + Range { from: '\u{1a7d}', to: '\u{1a7e}', }, + Range { from: '\u{1a7f}', to: '\u{1a89}', }, + Range { from: '\u{1a8a}', to: '\u{1a8f}', }, + Range { from: '\u{1a90}', to: '\u{1a99}', }, + Range { from: '\u{1a9a}', to: '\u{1a9f}', }, + Range { from: '\u{1aa0}', to: '\u{1aad}', }, + Range { from: '\u{1aae}', to: '\u{1aaf}', }, + Range { from: '\u{1ab0}', to: '\u{1abe}', }, + Range { from: '\u{1abf}', to: '\u{1aff}', }, + Range { from: '\u{1b00}', to: '\u{1b4b}', }, + Range { from: '\u{1b4c}', to: '\u{1b4f}', }, + Range { from: '\u{1b50}', to: '\u{1b7c}', }, + Range { from: '\u{1b7d}', to: '\u{1b7f}', }, + Range { from: '\u{1b80}', to: '\u{1bf3}', }, + Range { from: '\u{1bf4}', to: '\u{1bfb}', }, + Range { from: '\u{1bfc}', to: '\u{1c37}', }, + Range { from: '\u{1c38}', to: '\u{1c3a}', }, + Range { from: '\u{1c3b}', to: '\u{1c49}', }, + Range { from: '\u{1c4a}', to: '\u{1c4c}', }, + Range { from: '\u{1c4d}', to: '\u{1c7f}', }, + Range { from: '\u{1c80}', to: '\u{1c83}', }, + Range { from: '\u{1c84}', to: '\u{1c85}', }, + Range { from: '\u{1c86}', to: '\u{1c88}', }, + Range { from: '\u{1c89}', to: '\u{1cbf}', }, + Range { from: '\u{1cc0}', to: '\u{1cc7}', }, + Range { from: '\u{1cc8}', to: '\u{1ccf}', }, + Range { from: '\u{1cd0}', to: '\u{1cf9}', }, + Range { from: '\u{1cfa}', to: '\u{1cff}', }, + Range { from: '\u{1d00}', to: '\u{1d2b}', }, + Range { from: '\u{1d2c}', to: '\u{1d6a}', }, + Range { from: '\u{1d6b}', to: '\u{1d77}', }, + Range { from: '\u{1d78}', to: '\u{1d78}', }, + Range { from: '\u{1d79}', to: '\u{1d9a}', }, + Range { from: '\u{1d9b}', to: '\u{1dbf}', }, + Range { from: '\u{1dc0}', to: '\u{1df9}', }, + Range { from: '\u{1dfa}', to: '\u{1dfa}', }, + Range { from: '\u{1dfb}', to: '\u{1dff}', }, + Range { from: '\u{1e00}', to: '\u{1e94}', }, + Range { from: '\u{1e95}', to: '\u{1e99}', }, + Range { from: '\u{1e9a}', to: '\u{1e9b}', }, + Range { from: '\u{1e9c}', to: '\u{1e9d}', }, + Range { from: '\u{1e9e}', to: '\u{1efe}', }, + Range { from: '\u{1eff}', to: '\u{1f07}', }, + Range { from: '\u{1f08}', to: '\u{1f0f}', }, + Range { from: '\u{1f10}', to: '\u{1f15}', }, + Range { from: '\u{1f16}', to: '\u{1f17}', }, + Range { from: '\u{1f18}', to: '\u{1f1d}', }, + Range { from: '\u{1f1e}', to: '\u{1f1f}', }, + Range { from: '\u{1f20}', to: '\u{1f27}', }, + Range { from: '\u{1f28}', to: '\u{1f2f}', }, + Range { from: '\u{1f30}', to: '\u{1f37}', }, + Range { from: '\u{1f38}', to: '\u{1f3f}', }, + Range { from: '\u{1f40}', to: '\u{1f45}', }, + Range { from: '\u{1f46}', to: '\u{1f47}', }, + Range { from: '\u{1f48}', to: '\u{1f4d}', }, + Range { from: '\u{1f4e}', to: '\u{1f4f}', }, + Range { from: '\u{1f50}', to: '\u{1f57}', }, + Range { from: '\u{1f58}', to: '\u{1f5f}', }, + Range { from: '\u{1f60}', to: '\u{1f67}', }, + Range { from: '\u{1f68}', to: '\u{1f7d}', }, + Range { from: '\u{1f7e}', to: '\u{1f7f}', }, + Range { from: '\u{1f80}', to: '\u{1faf}', }, + Range { from: '\u{1fb0}', to: '\u{1fb1}', }, + Range { from: '\u{1fb2}', to: '\u{1fcf}', }, + Range { from: '\u{1fd0}', to: '\u{1fd2}', }, + Range { from: '\u{1fd3}', to: '\u{1fd3}', }, + Range { from: '\u{1fd4}', to: '\u{1fd5}', }, + Range { from: '\u{1fd6}', to: '\u{1fd7}', }, + Range { from: '\u{1fd8}', to: '\u{1fdf}', }, + Range { from: '\u{1fe0}', to: '\u{1fe2}', }, + Range { from: '\u{1fe3}', to: '\u{1fe3}', }, + Range { from: '\u{1fe4}', to: '\u{1fe7}', }, + Range { from: '\u{1fe8}', to: '\u{1fef}', }, + Range { from: '\u{1ff0}', to: '\u{1ff1}', }, + Range { from: '\u{1ff2}', to: '\u{1fff}', }, + Range { from: '\u{2000}', to: '\u{200a}', }, + Range { from: '\u{200b}', to: '\u{200b}', }, + Range { from: '\u{200c}', to: '\u{200d}', }, + Range { from: '\u{200e}', to: '\u{200f}', }, + Range { from: '\u{2010}', to: '\u{2011}', }, + Range { from: '\u{2012}', to: '\u{2016}', }, + Range { from: '\u{2017}', to: '\u{2017}', }, + Range { from: '\u{2018}', to: '\u{2023}', }, + Range { from: '\u{2024}', to: '\u{2026}', }, + Range { from: '\u{2027}', to: '\u{2027}', }, + Range { from: '\u{2028}', to: '\u{202e}', }, + Range { from: '\u{202f}', to: '\u{202f}', }, + Range { from: '\u{2030}', to: '\u{2032}', }, + Range { from: '\u{2033}', to: '\u{2037}', }, + Range { from: '\u{2038}', to: '\u{203b}', }, + Range { from: '\u{203c}', to: '\u{203e}', }, + Range { from: '\u{203f}', to: '\u{2046}', }, + Range { from: '\u{2047}', to: '\u{2049}', }, + Range { from: '\u{204a}', to: '\u{2056}', }, + Range { from: '\u{2057}', to: '\u{2057}', }, + Range { from: '\u{2058}', to: '\u{205e}', }, + Range { from: '\u{205f}', to: '\u{2060}', }, + Range { from: '\u{2061}', to: '\u{2063}', }, + Range { from: '\u{2064}', to: '\u{2064}', }, + Range { from: '\u{2065}', to: '\u{206f}', }, + Range { from: '\u{2070}', to: '\u{2071}', }, + Range { from: '\u{2072}', to: '\u{2073}', }, + Range { from: '\u{2074}', to: '\u{209c}', }, + Range { from: '\u{209d}', to: '\u{209f}', }, + Range { from: '\u{20a0}', to: '\u{20a7}', }, + Range { from: '\u{20a8}', to: '\u{20a8}', }, + Range { from: '\u{20a9}', to: '\u{20bf}', }, + Range { from: '\u{20c0}', to: '\u{20cf}', }, + Range { from: '\u{20d0}', to: '\u{20f0}', }, + Range { from: '\u{20f1}', to: '\u{20ff}', }, + Range { from: '\u{2100}', to: '\u{210a}', }, + Range { from: '\u{210b}', to: '\u{210e}', }, + Range { from: '\u{210f}', to: '\u{210f}', }, + Range { from: '\u{2110}', to: '\u{2111}', }, + Range { from: '\u{2112}', to: '\u{2113}', }, + Range { from: '\u{2114}', to: '\u{2116}', }, + Range { from: '\u{2117}', to: '\u{2118}', }, + Range { from: '\u{2119}', to: '\u{211a}', }, + Range { from: '\u{211b}', to: '\u{211d}', }, + Range { from: '\u{211e}', to: '\u{211f}', }, + Range { from: '\u{2120}', to: '\u{212e}', }, + Range { from: '\u{212f}', to: '\u{2130}', }, + Range { from: '\u{2131}', to: '\u{213c}', }, + Range { from: '\u{213d}', to: '\u{213e}', }, + Range { from: '\u{213f}', to: '\u{2140}', }, + Range { from: '\u{2141}', to: '\u{2144}', }, + Range { from: '\u{2145}', to: '\u{2146}', }, + Range { from: '\u{2147}', to: '\u{2149}', }, + Range { from: '\u{214a}', to: '\u{214f}', }, + Range { from: '\u{2150}', to: '\u{217f}', }, + Range { from: '\u{2180}', to: '\u{2182}', }, + Range { from: '\u{2183}', to: '\u{2183}', }, + Range { from: '\u{2184}', to: '\u{2188}', }, + Range { from: '\u{2189}', to: '\u{2189}', }, + Range { from: '\u{218a}', to: '\u{218b}', }, + Range { from: '\u{218c}', to: '\u{218f}', }, + Range { from: '\u{2190}', to: '\u{222b}', }, + Range { from: '\u{222c}', to: '\u{2230}', }, + Range { from: '\u{2231}', to: '\u{225f}', }, + Range { from: '\u{2260}', to: '\u{2260}', }, + Range { from: '\u{2261}', to: '\u{226d}', }, + Range { from: '\u{226e}', to: '\u{226f}', }, + Range { from: '\u{2270}', to: '\u{2328}', }, + Range { from: '\u{2329}', to: '\u{232a}', }, + Range { from: '\u{232b}', to: '\u{2426}', }, + Range { from: '\u{2427}', to: '\u{243f}', }, + Range { from: '\u{2440}', to: '\u{244a}', }, + Range { from: '\u{244b}', to: '\u{245f}', }, + Range { from: '\u{2460}', to: '\u{2487}', }, + Range { from: '\u{2488}', to: '\u{249b}', }, + Range { from: '\u{249c}', to: '\u{24ea}', }, + Range { from: '\u{24eb}', to: '\u{2a0b}', }, + Range { from: '\u{2a0c}', to: '\u{2a0c}', }, + Range { from: '\u{2a0d}', to: '\u{2a73}', }, + Range { from: '\u{2a74}', to: '\u{2a76}', }, + Range { from: '\u{2a77}', to: '\u{2adb}', }, + Range { from: '\u{2adc}', to: '\u{2adc}', }, + Range { from: '\u{2add}', to: '\u{2b73}', }, + Range { from: '\u{2b74}', to: '\u{2b75}', }, + Range { from: '\u{2b76}', to: '\u{2b95}', }, + Range { from: '\u{2b96}', to: '\u{2b97}', }, + Range { from: '\u{2b98}', to: '\u{2bb9}', }, + Range { from: '\u{2bba}', to: '\u{2bbc}', }, + Range { from: '\u{2bbd}', to: '\u{2bc8}', }, + Range { from: '\u{2bc9}', to: '\u{2bc9}', }, + Range { from: '\u{2bca}', to: '\u{2bd2}', }, + Range { from: '\u{2bd3}', to: '\u{2beb}', }, + Range { from: '\u{2bec}', to: '\u{2bef}', }, + Range { from: '\u{2bf0}', to: '\u{2bff}', }, + Range { from: '\u{2c00}', to: '\u{2c2f}', }, + Range { from: '\u{2c30}', to: '\u{2c5e}', }, + Range { from: '\u{2c5f}', to: '\u{2c64}', }, + Range { from: '\u{2c65}', to: '\u{2c66}', }, + Range { from: '\u{2c67}', to: '\u{2c72}', }, + Range { from: '\u{2c73}', to: '\u{2c74}', }, + Range { from: '\u{2c75}', to: '\u{2c75}', }, + Range { from: '\u{2c76}', to: '\u{2c7b}', }, + Range { from: '\u{2c7c}', to: '\u{2ce2}', }, + Range { from: '\u{2ce3}', to: '\u{2cea}', }, + Range { from: '\u{2ceb}', to: '\u{2ced}', }, + Range { from: '\u{2cee}', to: '\u{2cf1}', }, + Range { from: '\u{2cf2}', to: '\u{2cf3}', }, + Range { from: '\u{2cf4}', to: '\u{2cf8}', }, + Range { from: '\u{2cf9}', to: '\u{2d25}', }, + Range { from: '\u{2d26}', to: '\u{2d27}', }, + Range { from: '\u{2d28}', to: '\u{2d2c}', }, + Range { from: '\u{2d2d}', to: '\u{2d2d}', }, + Range { from: '\u{2d2e}', to: '\u{2d2f}', }, + Range { from: '\u{2d30}', to: '\u{2d67}', }, + Range { from: '\u{2d68}', to: '\u{2d6e}', }, + Range { from: '\u{2d6f}', to: '\u{2d70}', }, + Range { from: '\u{2d71}', to: '\u{2d7e}', }, + Range { from: '\u{2d7f}', to: '\u{2d96}', }, + Range { from: '\u{2d97}', to: '\u{2d9f}', }, + Range { from: '\u{2da0}', to: '\u{2da6}', }, + Range { from: '\u{2da7}', to: '\u{2da7}', }, + Range { from: '\u{2da8}', to: '\u{2dae}', }, + Range { from: '\u{2daf}', to: '\u{2daf}', }, + Range { from: '\u{2db0}', to: '\u{2db6}', }, + Range { from: '\u{2db7}', to: '\u{2db7}', }, + Range { from: '\u{2db8}', to: '\u{2dbe}', }, + Range { from: '\u{2dbf}', to: '\u{2dbf}', }, + Range { from: '\u{2dc0}', to: '\u{2dc6}', }, + Range { from: '\u{2dc7}', to: '\u{2dc7}', }, + Range { from: '\u{2dc8}', to: '\u{2dce}', }, + Range { from: '\u{2dcf}', to: '\u{2dcf}', }, + Range { from: '\u{2dd0}', to: '\u{2dd6}', }, + Range { from: '\u{2dd7}', to: '\u{2dd7}', }, + Range { from: '\u{2dd8}', to: '\u{2dde}', }, + Range { from: '\u{2ddf}', to: '\u{2ddf}', }, + Range { from: '\u{2de0}', to: '\u{2e49}', }, + Range { from: '\u{2e4a}', to: '\u{2e7f}', }, + Range { from: '\u{2e80}', to: '\u{2e99}', }, + Range { from: '\u{2e9a}', to: '\u{2e9a}', }, + Range { from: '\u{2e9b}', to: '\u{2e9e}', }, + Range { from: '\u{2e9f}', to: '\u{2e9f}', }, + Range { from: '\u{2ea0}', to: '\u{2ef2}', }, + Range { from: '\u{2ef3}', to: '\u{2ef3}', }, + Range { from: '\u{2ef4}', to: '\u{2eff}', }, + Range { from: '\u{2f00}', to: '\u{2fd5}', }, + Range { from: '\u{2fd6}', to: '\u{2fff}', }, + Range { from: '\u{3000}', to: '\u{3002}', }, + Range { from: '\u{3003}', to: '\u{3035}', }, + Range { from: '\u{3036}', to: '\u{303a}', }, + Range { from: '\u{303b}', to: '\u{303f}', }, + Range { from: '\u{3040}', to: '\u{3040}', }, + Range { from: '\u{3041}', to: '\u{3096}', }, + Range { from: '\u{3097}', to: '\u{3098}', }, + Range { from: '\u{3099}', to: '\u{309a}', }, + Range { from: '\u{309b}', to: '\u{309c}', }, + Range { from: '\u{309d}', to: '\u{309e}', }, + Range { from: '\u{309f}', to: '\u{309f}', }, + Range { from: '\u{30a0}', to: '\u{30fe}', }, + Range { from: '\u{30ff}', to: '\u{30ff}', }, + Range { from: '\u{3100}', to: '\u{3104}', }, + Range { from: '\u{3105}', to: '\u{312e}', }, + Range { from: '\u{312f}', to: '\u{3130}', }, + Range { from: '\u{3131}', to: '\u{318f}', }, + Range { from: '\u{3190}', to: '\u{3191}', }, + Range { from: '\u{3192}', to: '\u{319f}', }, + Range { from: '\u{31a0}', to: '\u{31ba}', }, + Range { from: '\u{31bb}', to: '\u{31bf}', }, + Range { from: '\u{31c0}', to: '\u{31e3}', }, + Range { from: '\u{31e4}', to: '\u{31ef}', }, + Range { from: '\u{31f0}', to: '\u{31ff}', }, + Range { from: '\u{3200}', to: '\u{3247}', }, + Range { from: '\u{3248}', to: '\u{324f}', }, + Range { from: '\u{3250}', to: '\u{33ff}', }, + Range { from: '\u{3400}', to: '\u{4db5}', }, + Range { from: '\u{4db6}', to: '\u{4dbf}', }, + Range { from: '\u{4dc0}', to: '\u{9fea}', }, + Range { from: '\u{9feb}', to: '\u{9fff}', }, + Range { from: '\u{a000}', to: '\u{a48c}', }, + Range { from: '\u{a48d}', to: '\u{a48f}', }, + Range { from: '\u{a490}', to: '\u{a4c6}', }, + Range { from: '\u{a4c7}', to: '\u{a4cf}', }, + Range { from: '\u{a4d0}', to: '\u{a62b}', }, + Range { from: '\u{a62c}', to: '\u{a63f}', }, + Range { from: '\u{a640}', to: '\u{a66c}', }, + Range { from: '\u{a66d}', to: '\u{a67f}', }, + Range { from: '\u{a680}', to: '\u{a69d}', }, + Range { from: '\u{a69e}', to: '\u{a6f7}', }, + Range { from: '\u{a6f8}', to: '\u{a6ff}', }, + Range { from: '\u{a700}', to: '\u{a721}', }, + Range { from: '\u{a722}', to: '\u{a72e}', }, + Range { from: '\u{a72f}', to: '\u{a731}', }, + Range { from: '\u{a732}', to: '\u{a770}', }, + Range { from: '\u{a771}', to: '\u{a778}', }, + Range { from: '\u{a779}', to: '\u{a786}', }, + Range { from: '\u{a787}', to: '\u{a78a}', }, + Range { from: '\u{a78b}', to: '\u{a78d}', }, + Range { from: '\u{a78e}', to: '\u{a78f}', }, + Range { from: '\u{a790}', to: '\u{a792}', }, + Range { from: '\u{a793}', to: '\u{a795}', }, + Range { from: '\u{a796}', to: '\u{a7b7}', }, + Range { from: '\u{a7b8}', to: '\u{a7f6}', }, + Range { from: '\u{a7f7}', to: '\u{a7f9}', }, + Range { from: '\u{a7fa}', to: '\u{a82b}', }, + Range { from: '\u{a82c}', to: '\u{a82f}', }, + Range { from: '\u{a830}', to: '\u{a839}', }, + Range { from: '\u{a83a}', to: '\u{a83f}', }, + Range { from: '\u{a840}', to: '\u{a877}', }, + Range { from: '\u{a878}', to: '\u{a87f}', }, + Range { from: '\u{a880}', to: '\u{a8c5}', }, + Range { from: '\u{a8c6}', to: '\u{a8cd}', }, + Range { from: '\u{a8ce}', to: '\u{a8d9}', }, + Range { from: '\u{a8da}', to: '\u{a8df}', }, + Range { from: '\u{a8e0}', to: '\u{a8fd}', }, + Range { from: '\u{a8fe}', to: '\u{a8ff}', }, + Range { from: '\u{a900}', to: '\u{a953}', }, + Range { from: '\u{a954}', to: '\u{a95e}', }, + Range { from: '\u{a95f}', to: '\u{a97c}', }, + Range { from: '\u{a97d}', to: '\u{a97f}', }, + Range { from: '\u{a980}', to: '\u{a9cd}', }, + Range { from: '\u{a9ce}', to: '\u{a9ce}', }, + Range { from: '\u{a9cf}', to: '\u{a9d9}', }, + Range { from: '\u{a9da}', to: '\u{a9dd}', }, + Range { from: '\u{a9de}', to: '\u{a9fe}', }, + Range { from: '\u{a9ff}', to: '\u{a9ff}', }, + Range { from: '\u{aa00}', to: '\u{aa36}', }, + Range { from: '\u{aa37}', to: '\u{aa3f}', }, + Range { from: '\u{aa40}', to: '\u{aa4d}', }, + Range { from: '\u{aa4e}', to: '\u{aa4f}', }, + Range { from: '\u{aa50}', to: '\u{aa59}', }, + Range { from: '\u{aa5a}', to: '\u{aa5b}', }, + Range { from: '\u{aa5c}', to: '\u{aac2}', }, + Range { from: '\u{aac3}', to: '\u{aada}', }, + Range { from: '\u{aadb}', to: '\u{aaf6}', }, + Range { from: '\u{aaf7}', to: '\u{ab00}', }, + Range { from: '\u{ab01}', to: '\u{ab06}', }, + Range { from: '\u{ab07}', to: '\u{ab08}', }, + Range { from: '\u{ab09}', to: '\u{ab0e}', }, + Range { from: '\u{ab0f}', to: '\u{ab10}', }, + Range { from: '\u{ab11}', to: '\u{ab16}', }, + Range { from: '\u{ab17}', to: '\u{ab1f}', }, + Range { from: '\u{ab20}', to: '\u{ab26}', }, + Range { from: '\u{ab27}', to: '\u{ab27}', }, + Range { from: '\u{ab28}', to: '\u{ab2e}', }, + Range { from: '\u{ab2f}', to: '\u{ab2f}', }, + Range { from: '\u{ab30}', to: '\u{ab5b}', }, + Range { from: '\u{ab5c}', to: '\u{ab5f}', }, + Range { from: '\u{ab60}', to: '\u{ab65}', }, + Range { from: '\u{ab66}', to: '\u{ab6f}', }, + Range { from: '\u{ab70}', to: '\u{abbf}', }, + Range { from: '\u{abc0}', to: '\u{abed}', }, + Range { from: '\u{abee}', to: '\u{abef}', }, + Range { from: '\u{abf0}', to: '\u{abf9}', }, + Range { from: '\u{abfa}', to: '\u{abff}', }, + Range { from: '\u{ac00}', to: '\u{d7a3}', }, + Range { from: '\u{d7a4}', to: '\u{d7af}', }, + Range { from: '\u{d7b0}', to: '\u{d7c6}', }, + Range { from: '\u{d7c7}', to: '\u{d7ca}', }, + Range { from: '\u{d7cb}', to: '\u{d7fb}', }, + Range { from: '\u{d7fc}', to: '\u{f8ff}', }, + Range { from: '\u{f900}', to: '\u{f906}', }, + Range { from: '\u{f907}', to: '\u{f908}', }, + Range { from: '\u{f909}', to: '\u{fa0d}', }, + Range { from: '\u{fa0e}', to: '\u{fa0f}', }, + Range { from: '\u{fa10}', to: '\u{fa12}', }, + Range { from: '\u{fa13}', to: '\u{fa14}', }, + Range { from: '\u{fa15}', to: '\u{fa22}', }, + Range { from: '\u{fa23}', to: '\u{fa24}', }, + Range { from: '\u{fa25}', to: '\u{fa26}', }, + Range { from: '\u{fa27}', to: '\u{fa29}', }, + Range { from: '\u{fa2a}', to: '\u{fa5c}', }, + Range { from: '\u{fa5d}', to: '\u{fa5e}', }, + Range { from: '\u{fa5f}', to: '\u{fa6d}', }, + Range { from: '\u{fa6e}', to: '\u{fa6f}', }, + Range { from: '\u{fa70}', to: '\u{fad9}', }, + Range { from: '\u{fada}', to: '\u{faff}', }, + Range { from: '\u{fb00}', to: '\u{fb04}', }, + Range { from: '\u{fb05}', to: '\u{fb06}', }, + Range { from: '\u{fb07}', to: '\u{fb12}', }, + Range { from: '\u{fb13}', to: '\u{fb17}', }, + Range { from: '\u{fb18}', to: '\u{fb1c}', }, + Range { from: '\u{fb1d}', to: '\u{fb4f}', }, + Range { from: '\u{fb50}', to: '\u{fb51}', }, + Range { from: '\u{fb52}', to: '\u{fb55}', }, + Range { from: '\u{fb56}', to: '\u{fb59}', }, + Range { from: '\u{fb5a}', to: '\u{fb5d}', }, + Range { from: '\u{fb5e}', to: '\u{fb61}', }, + Range { from: '\u{fb62}', to: '\u{fb65}', }, + Range { from: '\u{fb66}', to: '\u{fb69}', }, + Range { from: '\u{fb6a}', to: '\u{fb6d}', }, + Range { from: '\u{fb6e}', to: '\u{fb71}', }, + Range { from: '\u{fb72}', to: '\u{fb75}', }, + Range { from: '\u{fb76}', to: '\u{fb79}', }, + Range { from: '\u{fb7a}', to: '\u{fb7d}', }, + Range { from: '\u{fb7e}', to: '\u{fb81}', }, + Range { from: '\u{fb82}', to: '\u{fb83}', }, + Range { from: '\u{fb84}', to: '\u{fb85}', }, + Range { from: '\u{fb86}', to: '\u{fb87}', }, + Range { from: '\u{fb88}', to: '\u{fb89}', }, + Range { from: '\u{fb8a}', to: '\u{fb8b}', }, + Range { from: '\u{fb8c}', to: '\u{fb8d}', }, + Range { from: '\u{fb8e}', to: '\u{fb91}', }, + Range { from: '\u{fb92}', to: '\u{fb95}', }, + Range { from: '\u{fb96}', to: '\u{fb99}', }, + Range { from: '\u{fb9a}', to: '\u{fb9d}', }, + Range { from: '\u{fb9e}', to: '\u{fb9f}', }, + Range { from: '\u{fba0}', to: '\u{fba3}', }, + Range { from: '\u{fba4}', to: '\u{fba5}', }, + Range { from: '\u{fba6}', to: '\u{fba9}', }, + Range { from: '\u{fbaa}', to: '\u{fbad}', }, + Range { from: '\u{fbae}', to: '\u{fbaf}', }, + Range { from: '\u{fbb0}', to: '\u{fbb1}', }, + Range { from: '\u{fbb2}', to: '\u{fbc1}', }, + Range { from: '\u{fbc2}', to: '\u{fbd2}', }, + Range { from: '\u{fbd3}', to: '\u{fbd6}', }, + Range { from: '\u{fbd7}', to: '\u{fbd8}', }, + Range { from: '\u{fbd9}', to: '\u{fbda}', }, + Range { from: '\u{fbdb}', to: '\u{fbdc}', }, + Range { from: '\u{fbdd}', to: '\u{fbdd}', }, + Range { from: '\u{fbde}', to: '\u{fbdf}', }, + Range { from: '\u{fbe0}', to: '\u{fbe1}', }, + Range { from: '\u{fbe2}', to: '\u{fbe3}', }, + Range { from: '\u{fbe4}', to: '\u{fbe7}', }, + Range { from: '\u{fbe8}', to: '\u{fbe9}', }, + Range { from: '\u{fbea}', to: '\u{fbeb}', }, + Range { from: '\u{fbec}', to: '\u{fbed}', }, + Range { from: '\u{fbee}', to: '\u{fbef}', }, + Range { from: '\u{fbf0}', to: '\u{fbf1}', }, + Range { from: '\u{fbf2}', to: '\u{fbf3}', }, + Range { from: '\u{fbf4}', to: '\u{fbf5}', }, + Range { from: '\u{fbf6}', to: '\u{fbf8}', }, + Range { from: '\u{fbf9}', to: '\u{fbfb}', }, + Range { from: '\u{fbfc}', to: '\u{fbff}', }, + Range { from: '\u{fc00}', to: '\u{fd3b}', }, + Range { from: '\u{fd3c}', to: '\u{fd3d}', }, + Range { from: '\u{fd3e}', to: '\u{fd3f}', }, + Range { from: '\u{fd40}', to: '\u{fd4f}', }, + Range { from: '\u{fd50}', to: '\u{fd50}', }, + Range { from: '\u{fd51}', to: '\u{fd52}', }, + Range { from: '\u{fd53}', to: '\u{fd57}', }, + Range { from: '\u{fd58}', to: '\u{fd59}', }, + Range { from: '\u{fd5a}', to: '\u{fd5e}', }, + Range { from: '\u{fd5f}', to: '\u{fd60}', }, + Range { from: '\u{fd61}', to: '\u{fd61}', }, + Range { from: '\u{fd62}', to: '\u{fd63}', }, + Range { from: '\u{fd64}', to: '\u{fd65}', }, + Range { from: '\u{fd66}', to: '\u{fd66}', }, + Range { from: '\u{fd67}', to: '\u{fd68}', }, + Range { from: '\u{fd69}', to: '\u{fd69}', }, + Range { from: '\u{fd6a}', to: '\u{fd6b}', }, + Range { from: '\u{fd6c}', to: '\u{fd6d}', }, + Range { from: '\u{fd6e}', to: '\u{fd6e}', }, + Range { from: '\u{fd6f}', to: '\u{fd70}', }, + Range { from: '\u{fd71}', to: '\u{fd72}', }, + Range { from: '\u{fd73}', to: '\u{fd75}', }, + Range { from: '\u{fd76}', to: '\u{fd77}', }, + Range { from: '\u{fd78}', to: '\u{fd7b}', }, + Range { from: '\u{fd7c}', to: '\u{fd7d}', }, + Range { from: '\u{fd7e}', to: '\u{fd82}', }, + Range { from: '\u{fd83}', to: '\u{fd84}', }, + Range { from: '\u{fd85}', to: '\u{fd86}', }, + Range { from: '\u{fd87}', to: '\u{fd88}', }, + Range { from: '\u{fd89}', to: '\u{fd8f}', }, + Range { from: '\u{fd90}', to: '\u{fd91}', }, + Range { from: '\u{fd92}', to: '\u{fd96}', }, + Range { from: '\u{fd97}', to: '\u{fd98}', }, + Range { from: '\u{fd99}', to: '\u{fd9b}', }, + Range { from: '\u{fd9c}', to: '\u{fd9d}', }, + Range { from: '\u{fd9e}', to: '\u{fdc7}', }, + Range { from: '\u{fdc8}', to: '\u{fdef}', }, + Range { from: '\u{fdf0}', to: '\u{fdfd}', }, + Range { from: '\u{fdfe}', to: '\u{fdff}', }, + Range { from: '\u{fe00}', to: '\u{fe0f}', }, + Range { from: '\u{fe10}', to: '\u{fe18}', }, + Range { from: '\u{fe19}', to: '\u{fe1f}', }, + Range { from: '\u{fe20}', to: '\u{fe2f}', }, + Range { from: '\u{fe30}', to: '\u{fe32}', }, + Range { from: '\u{fe33}', to: '\u{fe34}', }, + Range { from: '\u{fe35}', to: '\u{fe44}', }, + Range { from: '\u{fe45}', to: '\u{fe46}', }, + Range { from: '\u{fe47}', to: '\u{fe48}', }, + Range { from: '\u{fe49}', to: '\u{fe4c}', }, + Range { from: '\u{fe4d}', to: '\u{fe4f}', }, + Range { from: '\u{fe50}', to: '\u{fe51}', }, + Range { from: '\u{fe52}', to: '\u{fe53}', }, + Range { from: '\u{fe54}', to: '\u{fe6b}', }, + Range { from: '\u{fe6c}', to: '\u{fe6f}', }, + Range { from: '\u{fe70}', to: '\u{fe80}', }, + Range { from: '\u{fe81}', to: '\u{fe82}', }, + Range { from: '\u{fe83}', to: '\u{fe84}', }, + Range { from: '\u{fe85}', to: '\u{fe86}', }, + Range { from: '\u{fe87}', to: '\u{fe88}', }, + Range { from: '\u{fe89}', to: '\u{fe8c}', }, + Range { from: '\u{fe8d}', to: '\u{fe8e}', }, + Range { from: '\u{fe8f}', to: '\u{fe92}', }, + Range { from: '\u{fe93}', to: '\u{fe94}', }, + Range { from: '\u{fe95}', to: '\u{fe98}', }, + Range { from: '\u{fe99}', to: '\u{fe9c}', }, + Range { from: '\u{fe9d}', to: '\u{fea0}', }, + Range { from: '\u{fea1}', to: '\u{fea4}', }, + Range { from: '\u{fea5}', to: '\u{fea8}', }, + Range { from: '\u{fea9}', to: '\u{feaa}', }, + Range { from: '\u{feab}', to: '\u{feac}', }, + Range { from: '\u{fead}', to: '\u{feae}', }, + Range { from: '\u{feaf}', to: '\u{feb0}', }, + Range { from: '\u{feb1}', to: '\u{feb4}', }, + Range { from: '\u{feb5}', to: '\u{feb8}', }, + Range { from: '\u{feb9}', to: '\u{febc}', }, + Range { from: '\u{febd}', to: '\u{fec0}', }, + Range { from: '\u{fec1}', to: '\u{fec4}', }, + Range { from: '\u{fec5}', to: '\u{fec8}', }, + Range { from: '\u{fec9}', to: '\u{fecc}', }, + Range { from: '\u{fecd}', to: '\u{fed0}', }, + Range { from: '\u{fed1}', to: '\u{fed4}', }, + Range { from: '\u{fed5}', to: '\u{fed8}', }, + Range { from: '\u{fed9}', to: '\u{fedc}', }, + Range { from: '\u{fedd}', to: '\u{fee0}', }, + Range { from: '\u{fee1}', to: '\u{fee4}', }, + Range { from: '\u{fee5}', to: '\u{fee8}', }, + Range { from: '\u{fee9}', to: '\u{feec}', }, + Range { from: '\u{feed}', to: '\u{feee}', }, + Range { from: '\u{feef}', to: '\u{fef0}', }, + Range { from: '\u{fef1}', to: '\u{fef4}', }, + Range { from: '\u{fef5}', to: '\u{fef6}', }, + Range { from: '\u{fef7}', to: '\u{fef8}', }, + Range { from: '\u{fef9}', to: '\u{fefa}', }, + Range { from: '\u{fefb}', to: '\u{fefc}', }, + Range { from: '\u{fefd}', to: '\u{fefe}', }, + Range { from: '\u{feff}', to: '\u{ffbe}', }, + Range { from: '\u{ffbf}', to: '\u{ffc1}', }, + Range { from: '\u{ffc2}', to: '\u{ffc7}', }, + Range { from: '\u{ffc8}', to: '\u{ffc9}', }, + Range { from: '\u{ffca}', to: '\u{ffcf}', }, + Range { from: '\u{ffd0}', to: '\u{ffd1}', }, + Range { from: '\u{ffd2}', to: '\u{ffd7}', }, + Range { from: '\u{ffd8}', to: '\u{ffd9}', }, + Range { from: '\u{ffda}', to: '\u{ffdc}', }, + Range { from: '\u{ffdd}', to: '\u{ffdf}', }, + Range { from: '\u{ffe0}', to: '\u{ffee}', }, + Range { from: '\u{ffef}', to: '\u{ffff}', }, + Range { from: '\u{10000}', to: '\u{1000b}', }, + Range { from: '\u{1000c}', to: '\u{1000c}', }, + Range { from: '\u{1000d}', to: '\u{10026}', }, + Range { from: '\u{10027}', to: '\u{10027}', }, + Range { from: '\u{10028}', to: '\u{1003a}', }, + Range { from: '\u{1003b}', to: '\u{1003b}', }, + Range { from: '\u{1003c}', to: '\u{1003d}', }, + Range { from: '\u{1003e}', to: '\u{1003e}', }, + Range { from: '\u{1003f}', to: '\u{1004d}', }, + Range { from: '\u{1004e}', to: '\u{1004f}', }, + Range { from: '\u{10050}', to: '\u{1005d}', }, + Range { from: '\u{1005e}', to: '\u{1007f}', }, + Range { from: '\u{10080}', to: '\u{100fa}', }, + Range { from: '\u{100fb}', to: '\u{100ff}', }, + Range { from: '\u{10100}', to: '\u{10102}', }, + Range { from: '\u{10103}', to: '\u{10106}', }, + Range { from: '\u{10107}', to: '\u{10133}', }, + Range { from: '\u{10134}', to: '\u{10136}', }, + Range { from: '\u{10137}', to: '\u{1018e}', }, + Range { from: '\u{1018f}', to: '\u{1018f}', }, + Range { from: '\u{10190}', to: '\u{1019b}', }, + Range { from: '\u{1019c}', to: '\u{1019f}', }, + Range { from: '\u{101a0}', to: '\u{101a0}', }, + Range { from: '\u{101a1}', to: '\u{101cf}', }, + Range { from: '\u{101d0}', to: '\u{101fd}', }, + Range { from: '\u{101fe}', to: '\u{1027f}', }, + Range { from: '\u{10280}', to: '\u{1029c}', }, + Range { from: '\u{1029d}', to: '\u{1029f}', }, + Range { from: '\u{102a0}', to: '\u{102d0}', }, + Range { from: '\u{102d1}', to: '\u{102df}', }, + Range { from: '\u{102e0}', to: '\u{102fb}', }, + Range { from: '\u{102fc}', to: '\u{102ff}', }, + Range { from: '\u{10300}', to: '\u{10323}', }, + Range { from: '\u{10324}', to: '\u{1032c}', }, + Range { from: '\u{1032d}', to: '\u{1034a}', }, + Range { from: '\u{1034b}', to: '\u{1034f}', }, + Range { from: '\u{10350}', to: '\u{1037a}', }, + Range { from: '\u{1037b}', to: '\u{1037f}', }, + Range { from: '\u{10380}', to: '\u{1039d}', }, + Range { from: '\u{1039e}', to: '\u{1039e}', }, + Range { from: '\u{1039f}', to: '\u{103c3}', }, + Range { from: '\u{103c4}', to: '\u{103c7}', }, + Range { from: '\u{103c8}', to: '\u{103d5}', }, + Range { from: '\u{103d6}', to: '\u{103ff}', }, + Range { from: '\u{10400}', to: '\u{10427}', }, + Range { from: '\u{10428}', to: '\u{1049d}', }, + Range { from: '\u{1049e}', to: '\u{1049f}', }, + Range { from: '\u{104a0}', to: '\u{104a9}', }, + Range { from: '\u{104aa}', to: '\u{104af}', }, + Range { from: '\u{104b0}', to: '\u{104d3}', }, + Range { from: '\u{104d4}', to: '\u{104d7}', }, + Range { from: '\u{104d8}', to: '\u{104fb}', }, + Range { from: '\u{104fc}', to: '\u{104ff}', }, + Range { from: '\u{10500}', to: '\u{10527}', }, + Range { from: '\u{10528}', to: '\u{1052f}', }, + Range { from: '\u{10530}', to: '\u{10563}', }, + Range { from: '\u{10564}', to: '\u{1056e}', }, + Range { from: '\u{1056f}', to: '\u{1056f}', }, + Range { from: '\u{10570}', to: '\u{105ff}', }, + Range { from: '\u{10600}', to: '\u{10736}', }, + Range { from: '\u{10737}', to: '\u{1073f}', }, + Range { from: '\u{10740}', to: '\u{10755}', }, + Range { from: '\u{10756}', to: '\u{1075f}', }, + Range { from: '\u{10760}', to: '\u{10767}', }, + Range { from: '\u{10768}', to: '\u{107ff}', }, + Range { from: '\u{10800}', to: '\u{10805}', }, + Range { from: '\u{10806}', to: '\u{10807}', }, + Range { from: '\u{10808}', to: '\u{10809}', }, + Range { from: '\u{1080a}', to: '\u{10835}', }, + Range { from: '\u{10836}', to: '\u{10836}', }, + Range { from: '\u{10837}', to: '\u{10838}', }, + Range { from: '\u{10839}', to: '\u{1083b}', }, + Range { from: '\u{1083c}', to: '\u{1083c}', }, + Range { from: '\u{1083d}', to: '\u{1083e}', }, + Range { from: '\u{1083f}', to: '\u{10855}', }, + Range { from: '\u{10856}', to: '\u{10856}', }, + Range { from: '\u{10857}', to: '\u{1089e}', }, + Range { from: '\u{1089f}', to: '\u{108a6}', }, + Range { from: '\u{108a7}', to: '\u{108af}', }, + Range { from: '\u{108b0}', to: '\u{108df}', }, + Range { from: '\u{108e0}', to: '\u{108f2}', }, + Range { from: '\u{108f3}', to: '\u{108f3}', }, + Range { from: '\u{108f4}', to: '\u{108f5}', }, + Range { from: '\u{108f6}', to: '\u{108fa}', }, + Range { from: '\u{108fb}', to: '\u{1091b}', }, + Range { from: '\u{1091c}', to: '\u{1091e}', }, + Range { from: '\u{1091f}', to: '\u{10939}', }, + Range { from: '\u{1093a}', to: '\u{1093e}', }, + Range { from: '\u{1093f}', to: '\u{1093f}', }, + Range { from: '\u{10940}', to: '\u{1097f}', }, + Range { from: '\u{10980}', to: '\u{109b7}', }, + Range { from: '\u{109b8}', to: '\u{109bb}', }, + Range { from: '\u{109bc}', to: '\u{109cf}', }, + Range { from: '\u{109d0}', to: '\u{109d1}', }, + Range { from: '\u{109d2}', to: '\u{10a03}', }, + Range { from: '\u{10a04}', to: '\u{10a04}', }, + Range { from: '\u{10a05}', to: '\u{10a06}', }, + Range { from: '\u{10a07}', to: '\u{10a0b}', }, + Range { from: '\u{10a0c}', to: '\u{10a13}', }, + Range { from: '\u{10a14}', to: '\u{10a14}', }, + Range { from: '\u{10a15}', to: '\u{10a17}', }, + Range { from: '\u{10a18}', to: '\u{10a18}', }, + Range { from: '\u{10a19}', to: '\u{10a33}', }, + Range { from: '\u{10a34}', to: '\u{10a37}', }, + Range { from: '\u{10a38}', to: '\u{10a3a}', }, + Range { from: '\u{10a3b}', to: '\u{10a3e}', }, + Range { from: '\u{10a3f}', to: '\u{10a47}', }, + Range { from: '\u{10a48}', to: '\u{10a4f}', }, + Range { from: '\u{10a50}', to: '\u{10a58}', }, + Range { from: '\u{10a59}', to: '\u{10a5f}', }, + Range { from: '\u{10a60}', to: '\u{10a9f}', }, + Range { from: '\u{10aa0}', to: '\u{10abf}', }, + Range { from: '\u{10ac0}', to: '\u{10ae6}', }, + Range { from: '\u{10ae7}', to: '\u{10aea}', }, + Range { from: '\u{10aeb}', to: '\u{10af6}', }, + Range { from: '\u{10af7}', to: '\u{10aff}', }, + Range { from: '\u{10b00}', to: '\u{10b35}', }, + Range { from: '\u{10b36}', to: '\u{10b38}', }, + Range { from: '\u{10b39}', to: '\u{10b55}', }, + Range { from: '\u{10b56}', to: '\u{10b57}', }, + Range { from: '\u{10b58}', to: '\u{10b72}', }, + Range { from: '\u{10b73}', to: '\u{10b77}', }, + Range { from: '\u{10b78}', to: '\u{10b91}', }, + Range { from: '\u{10b92}', to: '\u{10b98}', }, + Range { from: '\u{10b99}', to: '\u{10b9c}', }, + Range { from: '\u{10b9d}', to: '\u{10ba8}', }, + Range { from: '\u{10ba9}', to: '\u{10baf}', }, + Range { from: '\u{10bb0}', to: '\u{10bff}', }, + Range { from: '\u{10c00}', to: '\u{10c48}', }, + Range { from: '\u{10c49}', to: '\u{10c7f}', }, + Range { from: '\u{10c80}', to: '\u{10cb2}', }, + Range { from: '\u{10cb3}', to: '\u{10cbf}', }, + Range { from: '\u{10cc0}', to: '\u{10cf2}', }, + Range { from: '\u{10cf3}', to: '\u{10cf9}', }, + Range { from: '\u{10cfa}', to: '\u{10cff}', }, + Range { from: '\u{10d00}', to: '\u{10e5f}', }, + Range { from: '\u{10e60}', to: '\u{10e7e}', }, + Range { from: '\u{10e7f}', to: '\u{10fff}', }, + Range { from: '\u{11000}', to: '\u{1104d}', }, + Range { from: '\u{1104e}', to: '\u{11051}', }, + Range { from: '\u{11052}', to: '\u{1106f}', }, + Range { from: '\u{11070}', to: '\u{1107e}', }, + Range { from: '\u{1107f}', to: '\u{110bc}', }, + Range { from: '\u{110bd}', to: '\u{110bd}', }, + Range { from: '\u{110be}', to: '\u{110c1}', }, + Range { from: '\u{110c2}', to: '\u{110cf}', }, + Range { from: '\u{110d0}', to: '\u{110e8}', }, + Range { from: '\u{110e9}', to: '\u{110ef}', }, + Range { from: '\u{110f0}', to: '\u{110f9}', }, + Range { from: '\u{110fa}', to: '\u{110ff}', }, + Range { from: '\u{11100}', to: '\u{11134}', }, + Range { from: '\u{11135}', to: '\u{11135}', }, + Range { from: '\u{11136}', to: '\u{11143}', }, + Range { from: '\u{11144}', to: '\u{1114f}', }, + Range { from: '\u{11150}', to: '\u{11176}', }, + Range { from: '\u{11177}', to: '\u{1117f}', }, + Range { from: '\u{11180}', to: '\u{111cd}', }, + Range { from: '\u{111ce}', to: '\u{111cf}', }, + Range { from: '\u{111d0}', to: '\u{111df}', }, + Range { from: '\u{111e0}', to: '\u{111e0}', }, + Range { from: '\u{111e1}', to: '\u{111f4}', }, + Range { from: '\u{111f5}', to: '\u{111ff}', }, + Range { from: '\u{11200}', to: '\u{11211}', }, + Range { from: '\u{11212}', to: '\u{11212}', }, + Range { from: '\u{11213}', to: '\u{1123e}', }, + Range { from: '\u{1123f}', to: '\u{1127f}', }, + Range { from: '\u{11280}', to: '\u{11286}', }, + Range { from: '\u{11287}', to: '\u{11289}', }, + Range { from: '\u{1128a}', to: '\u{1128d}', }, + Range { from: '\u{1128e}', to: '\u{1128e}', }, + Range { from: '\u{1128f}', to: '\u{1129d}', }, + Range { from: '\u{1129e}', to: '\u{1129e}', }, + Range { from: '\u{1129f}', to: '\u{112a9}', }, + Range { from: '\u{112aa}', to: '\u{112af}', }, + Range { from: '\u{112b0}', to: '\u{112ea}', }, + Range { from: '\u{112eb}', to: '\u{112ef}', }, + Range { from: '\u{112f0}', to: '\u{112f9}', }, + Range { from: '\u{112fa}', to: '\u{112ff}', }, + Range { from: '\u{11300}', to: '\u{11303}', }, + Range { from: '\u{11304}', to: '\u{11304}', }, + Range { from: '\u{11305}', to: '\u{1130c}', }, + Range { from: '\u{1130d}', to: '\u{1130e}', }, + Range { from: '\u{1130f}', to: '\u{11310}', }, + Range { from: '\u{11311}', to: '\u{11312}', }, + Range { from: '\u{11313}', to: '\u{11328}', }, + Range { from: '\u{11329}', to: '\u{11329}', }, + Range { from: '\u{1132a}', to: '\u{11330}', }, + Range { from: '\u{11331}', to: '\u{11331}', }, + Range { from: '\u{11332}', to: '\u{11333}', }, + Range { from: '\u{11334}', to: '\u{11334}', }, + Range { from: '\u{11335}', to: '\u{11339}', }, + Range { from: '\u{1133a}', to: '\u{1133b}', }, + Range { from: '\u{1133c}', to: '\u{11344}', }, + Range { from: '\u{11345}', to: '\u{11346}', }, + Range { from: '\u{11347}', to: '\u{11348}', }, + Range { from: '\u{11349}', to: '\u{1134a}', }, + Range { from: '\u{1134b}', to: '\u{1134d}', }, + Range { from: '\u{1134e}', to: '\u{1134f}', }, + Range { from: '\u{11350}', to: '\u{11350}', }, + Range { from: '\u{11351}', to: '\u{11356}', }, + Range { from: '\u{11357}', to: '\u{11357}', }, + Range { from: '\u{11358}', to: '\u{1135c}', }, + Range { from: '\u{1135d}', to: '\u{11363}', }, + Range { from: '\u{11364}', to: '\u{11365}', }, + Range { from: '\u{11366}', to: '\u{1136c}', }, + Range { from: '\u{1136d}', to: '\u{1136f}', }, + Range { from: '\u{11370}', to: '\u{11374}', }, + Range { from: '\u{11375}', to: '\u{113ff}', }, + Range { from: '\u{11400}', to: '\u{11459}', }, + Range { from: '\u{1145a}', to: '\u{1145d}', }, + Range { from: '\u{1145e}', to: '\u{1147f}', }, + Range { from: '\u{11480}', to: '\u{114c7}', }, + Range { from: '\u{114c8}', to: '\u{114cf}', }, + Range { from: '\u{114d0}', to: '\u{114d9}', }, + Range { from: '\u{114da}', to: '\u{1157f}', }, + Range { from: '\u{11580}', to: '\u{115b5}', }, + Range { from: '\u{115b6}', to: '\u{115b7}', }, + Range { from: '\u{115b8}', to: '\u{115dd}', }, + Range { from: '\u{115de}', to: '\u{115ff}', }, + Range { from: '\u{11600}', to: '\u{11644}', }, + Range { from: '\u{11645}', to: '\u{1164f}', }, + Range { from: '\u{11650}', to: '\u{11659}', }, + Range { from: '\u{1165a}', to: '\u{1165f}', }, + Range { from: '\u{11660}', to: '\u{1166c}', }, + Range { from: '\u{1166d}', to: '\u{1167f}', }, + Range { from: '\u{11680}', to: '\u{116b7}', }, + Range { from: '\u{116b8}', to: '\u{116bf}', }, + Range { from: '\u{116c0}', to: '\u{116c9}', }, + Range { from: '\u{116ca}', to: '\u{116ff}', }, + Range { from: '\u{11700}', to: '\u{11719}', }, + Range { from: '\u{1171a}', to: '\u{1171c}', }, + Range { from: '\u{1171d}', to: '\u{1172b}', }, + Range { from: '\u{1172c}', to: '\u{1172f}', }, + Range { from: '\u{11730}', to: '\u{1173f}', }, + Range { from: '\u{11740}', to: '\u{1189f}', }, + Range { from: '\u{118a0}', to: '\u{118bf}', }, + Range { from: '\u{118c0}', to: '\u{118f2}', }, + Range { from: '\u{118f3}', to: '\u{118fe}', }, + Range { from: '\u{118ff}', to: '\u{118ff}', }, + Range { from: '\u{11900}', to: '\u{119ff}', }, + Range { from: '\u{11a00}', to: '\u{11a47}', }, + Range { from: '\u{11a48}', to: '\u{11a4f}', }, + Range { from: '\u{11a50}', to: '\u{11a83}', }, + Range { from: '\u{11a84}', to: '\u{11a85}', }, + Range { from: '\u{11a86}', to: '\u{11a9c}', }, + Range { from: '\u{11a9d}', to: '\u{11a9d}', }, + Range { from: '\u{11a9e}', to: '\u{11aa2}', }, + Range { from: '\u{11aa3}', to: '\u{11abf}', }, + Range { from: '\u{11ac0}', to: '\u{11af8}', }, + Range { from: '\u{11af9}', to: '\u{11bff}', }, + Range { from: '\u{11c00}', to: '\u{11c08}', }, + Range { from: '\u{11c09}', to: '\u{11c09}', }, + Range { from: '\u{11c0a}', to: '\u{11c36}', }, + Range { from: '\u{11c37}', to: '\u{11c37}', }, + Range { from: '\u{11c38}', to: '\u{11c45}', }, + Range { from: '\u{11c46}', to: '\u{11c4f}', }, + Range { from: '\u{11c50}', to: '\u{11c6c}', }, + Range { from: '\u{11c6d}', to: '\u{11c6f}', }, + Range { from: '\u{11c70}', to: '\u{11c8f}', }, + Range { from: '\u{11c90}', to: '\u{11c91}', }, + Range { from: '\u{11c92}', to: '\u{11ca7}', }, + Range { from: '\u{11ca8}', to: '\u{11ca8}', }, + Range { from: '\u{11ca9}', to: '\u{11cb6}', }, + Range { from: '\u{11cb7}', to: '\u{11cff}', }, + Range { from: '\u{11d00}', to: '\u{11d06}', }, + Range { from: '\u{11d07}', to: '\u{11d07}', }, + Range { from: '\u{11d08}', to: '\u{11d09}', }, + Range { from: '\u{11d0a}', to: '\u{11d0a}', }, + Range { from: '\u{11d0b}', to: '\u{11d36}', }, + Range { from: '\u{11d37}', to: '\u{11d39}', }, + Range { from: '\u{11d3a}', to: '\u{11d3b}', }, + Range { from: '\u{11d3c}', to: '\u{11d3d}', }, + Range { from: '\u{11d3e}', to: '\u{11d3e}', }, + Range { from: '\u{11d3f}', to: '\u{11d47}', }, + Range { from: '\u{11d48}', to: '\u{11d4f}', }, + Range { from: '\u{11d50}', to: '\u{11d59}', }, + Range { from: '\u{11d5a}', to: '\u{11fff}', }, + Range { from: '\u{12000}', to: '\u{12399}', }, + Range { from: '\u{1239a}', to: '\u{123ff}', }, + Range { from: '\u{12400}', to: '\u{1246e}', }, + Range { from: '\u{1246f}', to: '\u{1246f}', }, + Range { from: '\u{12470}', to: '\u{12474}', }, + Range { from: '\u{12475}', to: '\u{1247f}', }, + Range { from: '\u{12480}', to: '\u{12543}', }, + Range { from: '\u{12544}', to: '\u{12fff}', }, + Range { from: '\u{13000}', to: '\u{1342e}', }, + Range { from: '\u{1342f}', to: '\u{143ff}', }, + Range { from: '\u{14400}', to: '\u{14646}', }, + Range { from: '\u{14647}', to: '\u{167ff}', }, + Range { from: '\u{16800}', to: '\u{16a38}', }, + Range { from: '\u{16a39}', to: '\u{16a3f}', }, + Range { from: '\u{16a40}', to: '\u{16a5e}', }, + Range { from: '\u{16a5f}', to: '\u{16a5f}', }, + Range { from: '\u{16a60}', to: '\u{16a69}', }, + Range { from: '\u{16a6a}', to: '\u{16a6d}', }, + Range { from: '\u{16a6e}', to: '\u{16a6f}', }, + Range { from: '\u{16a70}', to: '\u{16acf}', }, + Range { from: '\u{16ad0}', to: '\u{16aed}', }, + Range { from: '\u{16aee}', to: '\u{16aef}', }, + Range { from: '\u{16af0}', to: '\u{16af5}', }, + Range { from: '\u{16af6}', to: '\u{16aff}', }, + Range { from: '\u{16b00}', to: '\u{16b45}', }, + Range { from: '\u{16b46}', to: '\u{16b4f}', }, + Range { from: '\u{16b50}', to: '\u{16b59}', }, + Range { from: '\u{16b5a}', to: '\u{16b5a}', }, + Range { from: '\u{16b5b}', to: '\u{16b61}', }, + Range { from: '\u{16b62}', to: '\u{16b62}', }, + Range { from: '\u{16b63}', to: '\u{16b77}', }, + Range { from: '\u{16b78}', to: '\u{16b7c}', }, + Range { from: '\u{16b7d}', to: '\u{16b8f}', }, + Range { from: '\u{16b90}', to: '\u{16eff}', }, + Range { from: '\u{16f00}', to: '\u{16f44}', }, + Range { from: '\u{16f45}', to: '\u{16f4f}', }, + Range { from: '\u{16f50}', to: '\u{16f7e}', }, + Range { from: '\u{16f7f}', to: '\u{16f8e}', }, + Range { from: '\u{16f8f}', to: '\u{16f9f}', }, + Range { from: '\u{16fa0}', to: '\u{16fdf}', }, + Range { from: '\u{16fe0}', to: '\u{16fe1}', }, + Range { from: '\u{16fe2}', to: '\u{16fff}', }, + Range { from: '\u{17000}', to: '\u{187ec}', }, + Range { from: '\u{187ed}', to: '\u{187ff}', }, + Range { from: '\u{18800}', to: '\u{18af2}', }, + Range { from: '\u{18af3}', to: '\u{1afff}', }, + Range { from: '\u{1b000}', to: '\u{1b11e}', }, + Range { from: '\u{1b11f}', to: '\u{1b16f}', }, + Range { from: '\u{1b170}', to: '\u{1b2fb}', }, + Range { from: '\u{1b2fc}', to: '\u{1bbff}', }, + Range { from: '\u{1bc00}', to: '\u{1bc6a}', }, + Range { from: '\u{1bc6b}', to: '\u{1bc6f}', }, + Range { from: '\u{1bc70}', to: '\u{1bc7c}', }, + Range { from: '\u{1bc7d}', to: '\u{1bc7f}', }, + Range { from: '\u{1bc80}', to: '\u{1bc88}', }, + Range { from: '\u{1bc89}', to: '\u{1bc8f}', }, + Range { from: '\u{1bc90}', to: '\u{1bc99}', }, + Range { from: '\u{1bc9a}', to: '\u{1bc9b}', }, + Range { from: '\u{1bc9c}', to: '\u{1bc9f}', }, + Range { from: '\u{1bca0}', to: '\u{1bca3}', }, + Range { from: '\u{1bca4}', to: '\u{1cfff}', }, + Range { from: '\u{1d000}', to: '\u{1d0f5}', }, + Range { from: '\u{1d0f6}', to: '\u{1d0ff}', }, + Range { from: '\u{1d100}', to: '\u{1d126}', }, + Range { from: '\u{1d127}', to: '\u{1d128}', }, + Range { from: '\u{1d129}', to: '\u{1d15d}', }, + Range { from: '\u{1d15e}', to: '\u{1d164}', }, + Range { from: '\u{1d165}', to: '\u{1d172}', }, + Range { from: '\u{1d173}', to: '\u{1d17a}', }, + Range { from: '\u{1d17b}', to: '\u{1d1ba}', }, + Range { from: '\u{1d1bb}', to: '\u{1d1c0}', }, + Range { from: '\u{1d1c1}', to: '\u{1d1e8}', }, + Range { from: '\u{1d1e9}', to: '\u{1d1ff}', }, + Range { from: '\u{1d200}', to: '\u{1d245}', }, + Range { from: '\u{1d246}', to: '\u{1d2ff}', }, + Range { from: '\u{1d300}', to: '\u{1d356}', }, + Range { from: '\u{1d357}', to: '\u{1d35f}', }, + Range { from: '\u{1d360}', to: '\u{1d371}', }, + Range { from: '\u{1d372}', to: '\u{1d3ff}', }, + Range { from: '\u{1d400}', to: '\u{1d49f}', }, + Range { from: '\u{1d4a0}', to: '\u{1d4a1}', }, + Range { from: '\u{1d4a2}', to: '\u{1d4a2}', }, + Range { from: '\u{1d4a3}', to: '\u{1d4a4}', }, + Range { from: '\u{1d4a5}', to: '\u{1d4a6}', }, + Range { from: '\u{1d4a7}', to: '\u{1d4a8}', }, + Range { from: '\u{1d4a9}', to: '\u{1d50a}', }, + Range { from: '\u{1d50b}', to: '\u{1d50c}', }, + Range { from: '\u{1d50d}', to: '\u{1d546}', }, + Range { from: '\u{1d547}', to: '\u{1d549}', }, + Range { from: '\u{1d54a}', to: '\u{1d6a5}', }, + Range { from: '\u{1d6a6}', to: '\u{1d6a7}', }, + Range { from: '\u{1d6a8}', to: '\u{1d6d2}', }, + Range { from: '\u{1d6d3}', to: '\u{1d6d4}', }, + Range { from: '\u{1d6d5}', to: '\u{1d70c}', }, + Range { from: '\u{1d70d}', to: '\u{1d70e}', }, + Range { from: '\u{1d70f}', to: '\u{1d746}', }, + Range { from: '\u{1d747}', to: '\u{1d748}', }, + Range { from: '\u{1d749}', to: '\u{1d780}', }, + Range { from: '\u{1d781}', to: '\u{1d782}', }, + Range { from: '\u{1d783}', to: '\u{1d7ba}', }, + Range { from: '\u{1d7bb}', to: '\u{1d7bc}', }, + Range { from: '\u{1d7bd}', to: '\u{1d7c9}', }, + Range { from: '\u{1d7ca}', to: '\u{1d7cb}', }, + Range { from: '\u{1d7cc}', to: '\u{1d7cd}', }, + Range { from: '\u{1d7ce}', to: '\u{1d7ff}', }, + Range { from: '\u{1d800}', to: '\u{1da8b}', }, + Range { from: '\u{1da8c}', to: '\u{1da9a}', }, + Range { from: '\u{1da9b}', to: '\u{1da9f}', }, + Range { from: '\u{1daa0}', to: '\u{1daa0}', }, + Range { from: '\u{1daa1}', to: '\u{1daaf}', }, + Range { from: '\u{1dab0}', to: '\u{1dfff}', }, + Range { from: '\u{1e000}', to: '\u{1e006}', }, + Range { from: '\u{1e007}', to: '\u{1e007}', }, + Range { from: '\u{1e008}', to: '\u{1e018}', }, + Range { from: '\u{1e019}', to: '\u{1e01a}', }, + Range { from: '\u{1e01b}', to: '\u{1e021}', }, + Range { from: '\u{1e022}', to: '\u{1e022}', }, + Range { from: '\u{1e023}', to: '\u{1e024}', }, + Range { from: '\u{1e025}', to: '\u{1e025}', }, + Range { from: '\u{1e026}', to: '\u{1e02a}', }, + Range { from: '\u{1e02b}', to: '\u{1e7ff}', }, + Range { from: '\u{1e800}', to: '\u{1e8c4}', }, + Range { from: '\u{1e8c5}', to: '\u{1e8c6}', }, + Range { from: '\u{1e8c7}', to: '\u{1e8d6}', }, + Range { from: '\u{1e8d7}', to: '\u{1e8ff}', }, + Range { from: '\u{1e900}', to: '\u{1e921}', }, + Range { from: '\u{1e922}', to: '\u{1e94a}', }, + Range { from: '\u{1e94b}', to: '\u{1e94f}', }, + Range { from: '\u{1e950}', to: '\u{1e959}', }, + Range { from: '\u{1e95a}', to: '\u{1e95d}', }, + Range { from: '\u{1e95e}', to: '\u{1e95f}', }, + Range { from: '\u{1e960}', to: '\u{1edff}', }, + Range { from: '\u{1ee00}', to: '\u{1ee24}', }, + Range { from: '\u{1ee25}', to: '\u{1ee26}', }, + Range { from: '\u{1ee27}', to: '\u{1ee3b}', }, + Range { from: '\u{1ee3c}', to: '\u{1ee41}', }, + Range { from: '\u{1ee42}', to: '\u{1ee42}', }, + Range { from: '\u{1ee43}', to: '\u{1ee46}', }, + Range { from: '\u{1ee47}', to: '\u{1ee54}', }, + Range { from: '\u{1ee55}', to: '\u{1ee56}', }, + Range { from: '\u{1ee57}', to: '\u{1ee64}', }, + Range { from: '\u{1ee65}', to: '\u{1ee66}', }, + Range { from: '\u{1ee67}', to: '\u{1ee9b}', }, + Range { from: '\u{1ee9c}', to: '\u{1eea0}', }, + Range { from: '\u{1eea1}', to: '\u{1eebb}', }, + Range { from: '\u{1eebc}', to: '\u{1eeef}', }, + Range { from: '\u{1eef0}', to: '\u{1eef1}', }, + Range { from: '\u{1eef2}', to: '\u{1efff}', }, + Range { from: '\u{1f000}', to: '\u{1f02b}', }, + Range { from: '\u{1f02c}', to: '\u{1f02f}', }, + Range { from: '\u{1f030}', to: '\u{1f093}', }, + Range { from: '\u{1f094}', to: '\u{1f09f}', }, + Range { from: '\u{1f0a0}', to: '\u{1f0ae}', }, + Range { from: '\u{1f0af}', to: '\u{1f0b0}', }, + Range { from: '\u{1f0b1}', to: '\u{1f0bf}', }, + Range { from: '\u{1f0c0}', to: '\u{1f0c0}', }, + Range { from: '\u{1f0c1}', to: '\u{1f0cf}', }, + Range { from: '\u{1f0d0}', to: '\u{1f0d0}', }, + Range { from: '\u{1f0d1}', to: '\u{1f0f5}', }, + Range { from: '\u{1f0f6}', to: '\u{1f100}', }, + Range { from: '\u{1f101}', to: '\u{1f10a}', }, + Range { from: '\u{1f10b}', to: '\u{1f10c}', }, + Range { from: '\u{1f10d}', to: '\u{1f10f}', }, + Range { from: '\u{1f110}', to: '\u{1f14f}', }, + Range { from: '\u{1f150}', to: '\u{1f169}', }, + Range { from: '\u{1f16a}', to: '\u{1f16b}', }, + Range { from: '\u{1f16c}', to: '\u{1f16f}', }, + Range { from: '\u{1f170}', to: '\u{1f18f}', }, + Range { from: '\u{1f190}', to: '\u{1f190}', }, + Range { from: '\u{1f191}', to: '\u{1f1ac}', }, + Range { from: '\u{1f1ad}', to: '\u{1f1e5}', }, + Range { from: '\u{1f1e6}', to: '\u{1f1ff}', }, + Range { from: '\u{1f200}', to: '\u{1f202}', }, + Range { from: '\u{1f203}', to: '\u{1f20f}', }, + Range { from: '\u{1f210}', to: '\u{1f23b}', }, + Range { from: '\u{1f23c}', to: '\u{1f23f}', }, + Range { from: '\u{1f240}', to: '\u{1f248}', }, + Range { from: '\u{1f249}', to: '\u{1f24f}', }, + Range { from: '\u{1f250}', to: '\u{1f251}', }, + Range { from: '\u{1f252}', to: '\u{1f25f}', }, + Range { from: '\u{1f260}', to: '\u{1f265}', }, + Range { from: '\u{1f266}', to: '\u{1f2ff}', }, + Range { from: '\u{1f300}', to: '\u{1f6d4}', }, + Range { from: '\u{1f6d5}', to: '\u{1f6df}', }, + Range { from: '\u{1f6e0}', to: '\u{1f6ec}', }, + Range { from: '\u{1f6ed}', to: '\u{1f6ef}', }, + Range { from: '\u{1f6f0}', to: '\u{1f6f8}', }, + Range { from: '\u{1f6f9}', to: '\u{1f6ff}', }, + Range { from: '\u{1f700}', to: '\u{1f773}', }, + Range { from: '\u{1f774}', to: '\u{1f77f}', }, + Range { from: '\u{1f780}', to: '\u{1f7d4}', }, + Range { from: '\u{1f7d5}', to: '\u{1f7ff}', }, + Range { from: '\u{1f800}', to: '\u{1f80b}', }, + Range { from: '\u{1f80c}', to: '\u{1f80f}', }, + Range { from: '\u{1f810}', to: '\u{1f847}', }, + Range { from: '\u{1f848}', to: '\u{1f84f}', }, + Range { from: '\u{1f850}', to: '\u{1f859}', }, + Range { from: '\u{1f85a}', to: '\u{1f85f}', }, + Range { from: '\u{1f860}', to: '\u{1f887}', }, + Range { from: '\u{1f888}', to: '\u{1f88f}', }, + Range { from: '\u{1f890}', to: '\u{1f8ad}', }, + Range { from: '\u{1f8ae}', to: '\u{1f8ff}', }, + Range { from: '\u{1f900}', to: '\u{1f90b}', }, + Range { from: '\u{1f90c}', to: '\u{1f90f}', }, + Range { from: '\u{1f910}', to: '\u{1f93e}', }, + Range { from: '\u{1f93f}', to: '\u{1f93f}', }, + Range { from: '\u{1f940}', to: '\u{1f94c}', }, + Range { from: '\u{1f94d}', to: '\u{1f94f}', }, + Range { from: '\u{1f950}', to: '\u{1f96b}', }, + Range { from: '\u{1f96c}', to: '\u{1f97f}', }, + Range { from: '\u{1f980}', to: '\u{1f997}', }, + Range { from: '\u{1f998}', to: '\u{1f9bf}', }, + Range { from: '\u{1f9c0}', to: '\u{1f9c0}', }, + Range { from: '\u{1f9c1}', to: '\u{1f9cf}', }, + Range { from: '\u{1f9d0}', to: '\u{1f9e6}', }, + Range { from: '\u{1f9e7}', to: '\u{1ffff}', }, + Range { from: '\u{20000}', to: '\u{2a6d6}', }, + Range { from: '\u{2a6d7}', to: '\u{2a6ff}', }, + Range { from: '\u{2a700}', to: '\u{2b734}', }, + Range { from: '\u{2b735}', to: '\u{2b73f}', }, + Range { from: '\u{2b740}', to: '\u{2b81d}', }, + Range { from: '\u{2b81e}', to: '\u{2b81f}', }, + Range { from: '\u{2b820}', to: '\u{2cea1}', }, + Range { from: '\u{2cea2}', to: '\u{2ceaf}', }, + Range { from: '\u{2ceb0}', to: '\u{2ebe0}', }, + Range { from: '\u{2ebe1}', to: '\u{2f7ff}', }, + Range { from: '\u{2f800}', to: '\u{2f830}', }, + Range { from: '\u{2f831}', to: '\u{2f833}', }, + Range { from: '\u{2f834}', to: '\u{2f844}', }, + Range { from: '\u{2f845}', to: '\u{2f846}', }, + Range { from: '\u{2f847}', to: '\u{2f869}', }, + Range { from: '\u{2f86a}', to: '\u{2f86b}', }, + Range { from: '\u{2f86c}', to: '\u{2f890}', }, + Range { from: '\u{2f891}', to: '\u{2f892}', }, + Range { from: '\u{2f893}', to: '\u{2f893}', }, + Range { from: '\u{2f894}', to: '\u{2f895}', }, + Range { from: '\u{2f896}', to: '\u{2f92b}', }, + Range { from: '\u{2f92c}', to: '\u{2f92d}', }, + Range { from: '\u{2f92e}', to: '\u{2f945}', }, + Range { from: '\u{2f946}', to: '\u{2f947}', }, + Range { from: '\u{2f948}', to: '\u{2f95c}', }, + Range { from: '\u{2f95d}', to: '\u{2f95e}', }, + Range { from: '\u{2f95f}', to: '\u{2f9fd}', }, + Range { from: '\u{2f9fe}', to: '\u{2f9ff}', }, + Range { from: '\u{2fa00}', to: '\u{2fa1d}', }, + Range { from: '\u{2fa1e}', to: '\u{e00ff}', }, + Range { from: '\u{e0100}', to: '\u{e01ef}', }, + Range { from: '\u{e01f0}', to: '\u{10ffff}', }, +]; + +static INDEX_TABLE: &'static [u16] = &[ + 32768, + 32769, + 32770, + 32771, + 32772, + 5, + 32799, + 32800, + 32801, + 32802, + 32803, + 32804, + 37, + 32808, + 41, + 32812, + 45, + 32817, + 50, + 32858, + 91, + 32909, + 142, + 32913, + 146, + 32920, + 153, + 32996, + 229, + 33008, + 241, + 33023, + 256, + 33037, + 33038, + 33039, + 33040, + 33041, + 33042, + 275, + 33058, + 291, + 33076, + 33077, + 310, + 33141, + 374, + 33147, + 380, + 33162, + 395, + 33172, + 405, + 33179, + 412, + 33185, + 418, + 33192, + 33193, + 33194, + 427, + 33203, + 33204, + 33205, + 438, + 33208, + 441, + 33249, + 33250, + 33251, + 484, + 33296, + 529, + 33348, + 581, + 33382, + 615, + 33451, + 684, + 33587, + 33588, + 33589, + 33590, + 823, + 33593, + 33594, + 33595, + 33596, + 33597, + 33598, + 33599, + 33600, + 33601, + 33602, + 33603, + 33604, + 33605, + 838, + 33610, + 33611, + 33612, + 33613, + 33614, + 33615, + 33616, + 33617, + 33618, + 33619, + 33620, + 33621, + 33622, + 33623, + 33624, + 33625, + 858, + 33628, + 33629, + 33630, + 33631, + 33632, + 33633, + 33634, + 33635, + 33636, + 869, + 33645, + 33646, + 33647, + 33648, + 33649, + 33650, + 33651, + 33652, + 33653, + 886, + 33656, + 33657, + 33658, + 33659, + 33660, + 33661, + 33662, + 33663, + 33664, + 33665, + 33666, + 899, + 33671, + 33672, + 33673, + 33674, + 33675, + 33676, + 33677, + 33678, + 33679, + 33680, + 33681, + 33682, + 33683, + 916, + 33691, + 33692, + 925, + 33695, + 33696, + 33697, + 33698, + 33699, + 33700, + 33701, + 33702, + 935, + 33709, + 33710, + 33711, + 33712, + 33713, + 33714, + 33715, + 33716, + 33717, + 33718, + 33719, + 33720, + 33721, + 33722, + 33723, + 33724, + 33725, + 33726, + 33727, + 33728, + 33729, + 33730, + 33731, + 33732, + 33733, + 33734, + 33735, + 33736, + 33737, + 33738, + 33739, + 33740, + 33741, + 33742, + 33743, + 33744, + 33745, + 33746, + 33747, + 33748, + 33749, + 33750, + 33751, + 33752, + 33753, + 33754, + 33755, + 33756, + 33757, + 33758, + 33759, + 33760, + 33761, + 994, + 33765, + 33766, + 33767, + 33768, + 33769, + 33770, + 33771, + 33772, + 33773, + 33774, + 33775, + 33776, + 33777, + 1010, + 33781, + 33782, + 33783, + 33784, + 33785, + 33786, + 33787, + 33788, + 33789, + 33790, + 33791, + 33792, + 33793, + 33794, + 33795, + 33796, + 33797, + 33798, + 33799, + 33800, + 33801, + 33802, + 33803, + 33804, + 33805, + 33806, + 33807, + 33808, + 33809, + 33810, + 33811, + 33812, + 33813, + 33814, + 33815, + 33816, + 33817, + 33818, + 33819, + 33820, + 33821, + 33822, + 33823, + 33824, + 33825, + 33826, + 33827, + 33828, + 33829, + 33830, + 33831, + 33832, + 33833, + 33834, + 33835, + 33836, + 33837, + 33838, + 33839, + 33840, + 33841, + 33842, + 33843, + 33844, + 1077, + 33847, + 33848, + 33849, + 33850, + 33851, + 33852, + 33853, + 33854, + 33855, + 33856, + 33857, + 33858, + 33859, + 33860, + 33861, + 33862, + 33863, + 33864, + 33865, + 33866, + 33867, + 33868, + 33869, + 33870, + 33871, + 33872, + 33873, + 33874, + 33875, + 1108, + 33878, + 33879, + 33880, + 33881, + 33882, + 33883, + 1116, + 33887, + 33888, + 33889, + 33890, + 33891, + 33892, + 33893, + 33894, + 33895, + 33896, + 33897, + 33898, + 33899, + 1132, + 33902, + 33903, + 1136, + 33906, + 33907, + 33908, + 33909, + 33910, + 33911, + 33912, + 33913, + 1146, + 33918, + 33919, + 33920, + 33921, + 33922, + 33923, + 33924, + 33925, + 33926, + 33927, + 1160, + 33931, + 33932, + 33933, + 33934, + 1167, + 33937, + 33938, + 33939, + 33940, + 33941, + 33942, + 33943, + 33944, + 33945, + 33946, + 33947, + 33948, + 33949, + 33950, + 33951, + 33952, + 33953, + 33954, + 33955, + 33956, + 33957, + 1190, + 33965, + 33966, + 33967, + 33968, + 33969, + 33970, + 33971, + 33972, + 33973, + 33974, + 33975, + 33976, + 33977, + 33978, + 33979, + 33980, + 33981, + 33982, + 33983, + 33984, + 33985, + 33986, + 33987, + 33988, + 33989, + 33990, + 33991, + 33992, + 33993, + 33994, + 33995, + 33996, + 33997, + 33998, + 33999, + 34000, + 34001, + 1234, + 34005, + 34006, + 34007, + 34008, + 34009, + 34010, + 34011, + 34012, + 34013, + 34014, + 34015, + 1248, + 34019, + 34020, + 34021, + 34022, + 34023, + 34024, + 34025, + 34026, + 34027, + 34028, + 34029, + 34030, + 34031, + 34032, + 34033, + 34034, + 1267, + 34041, + 34042, + 34043, + 34044, + 34045, + 34046, + 34047, + 34048, + 34049, + 34050, + 34051, + 34052, + 34053, + 34054, + 34055, + 34056, + 34057, + 34058, + 34059, + 34060, + 34061, + 34062, + 34063, + 34064, + 34065, + 34066, + 34067, + 34068, + 34069, + 34070, + 34071, + 34072, + 34073, + 34074, + 34075, + 34076, + 34077, + 34078, + 34079, + 34080, + 34081, + 34082, + 34083, + 34084, + 34085, + 34086, + 34087, + 34088, + 34089, + 34090, + 34091, + 34092, + 34093, + 34094, + 34095, + 34096, + 34097, + 34098, + 34099, + 34100, + 34101, + 34102, + 34103, + 34104, + 34105, + 34106, + 34107, + 34108, + 34109, + 34110, + 34111, + 34112, + 34113, + 34114, + 34115, + 34116, + 34117, + 34118, + 34119, + 34120, + 34121, + 34122, + 34123, + 34124, + 34125, + 1358, + 34130, + 1363, + 34134, + 34135, + 34136, + 34137, + 34138, + 34139, + 1372, + 34203, + 34204, + 34205, + 1438, + 34243, + 34244, + 34245, + 1478, + 34395, + 1628, + 34398, + 1631, + 34496, + 1729, + 34505, + 34506, + 1739, + 34513, + 34514, + 1747, + 34523, + 1756, + 34532, + 34533, + 1766, + 34540, + 34541, + 1774, + 34550, + 1783, + 34573, + 1806, + 34622, + 1855, + 34653, + 34654, + 34655, + 34656, + 1889, + 34665, + 34666, + 34667, + 1900, + 34676, + 1909, + 34691, + 34692, + 34693, + 34694, + 1927, + 34697, + 34698, + 34699, + 34700, + 34701, + 34702, + 34703, + 34704, + 1937, + 34710, + 1943, + 34714, + 1947, + 34718, + 34719, + 34720, + 1953, + 34723, + 34724, + 34725, + 1958, + 34728, + 1961, + 34770, + 34771, + 34772, + 34773, + 34774, + 34775, + 34776, + 2009, + 34788, + 34789, + 34790, + 34791, + 2024, + 34795, + 2028, + 34798, + 34799, + 2032, + 34815, + 2048, + 34828, + 2061, + 34831, + 34832, + 2065, + 34836, + 2069, + 34885, + 34886, + 34887, + 34888, + 34889, + 34890, + 34891, + 2124, + 34897, + 34898, + 34899, + 34900, + 34901, + 2134, + 34904, + 34905, + 34906, + 34907, + 2140, + 34948, + 2181, + 35028, + 35029, + 35030, + 2263, + 35034, + 35035, + 35036, + 35037, + 35038, + 35039, + 35040, + 35041, + 35042, + 35043, + 35044, + 35045, + 35046, + 35047, + 2280, + 35096, + 2329, + 35103, + 2336, + 35116, + 35117, + 35118, + 2351, + 35222, + 2455, + 35226, + 2459, + 35229, + 35230, + 2463, + 35233, + 35234, + 35235, + 35236, + 35237, + 2470, + 35240, + 35241, + 35242, + 35243, + 35244, + 35245, + 35246, + 35247, + 35248, + 35249, + 35250, + 35251, + 35252, + 35253, + 35254, + 35255, + 35256, + 35257, + 35258, + 35259, + 35260, + 35261, + 35262, + 35263, + 35264, + 35265, + 35266, + 35267, + 2500, + 35482, + 2715, + 35486, + 2719, + 35492, + 35493, + 35494, + 35495, + 35496, + 2729, + 35499, + 35500, + 35501, + 35502, + 35503, + 35504, + 35505, + 2738, + 35601, + 2834, + 35616, + 35617, + 35618, + 35619, + 35620, + 2853, + 35693, + 2926, + 36126, + 36127, + 36128, + 36129, + 36130, + 36131, + 36132, + 36133, + 36134, + 36135, + 3368, + 36181, + 3414, + 36212, + 36213, + 36214, + 3447, + 36228, + 3461, + 36292, + 3525, + 36307, + 3540, + 36311, + 3544, + 36315, + 3548, + 36350, + 3583, + 36354, + 36355, + 36356, + 36357, + 36358, + 36359, + 36360, + 36361, + 36362, + 36363, + 36364, + 36365, + 36366, + 36367, + 36368, + 36369, + 36370, + 36371, + 36372, + 36373, + 36374, + 36375, + 36376, + 36377, + 36378, + 36379, + 36380, + 36381, + 36382, + 36383, + 36384, + 36385, + 36386, + 36387, + 36388, + 36389, + 36390, + 36391, + 36392, + 36393, + 36394, + 36395, + 36396, + 3629, + 36401, + 36402, + 3635, + 36483, + 36484, + 36485, + 36486, + 36487, + 36488, + 36489, + 36490, + 36491, + 36492, + 3725, + 36500, + 3733, + 36762, + 3995, + 36766, + 3999, + 36781, + 4014, + 36784, + 4017, + 36836, + 4069, + 36852, + 4085, + 36959, + 4192, + 36965, + 36966, + 4199, + 36972, + 4205, + 37024, + 37025, + 37026, + 37027, + 37028, + 37029, + 37030, + 37031, + 37032, + 37033, + 37034, + 37035, + 37036, + 37037, + 37038, + 37039, + 37040, + 37041, + 37042, + 37043, + 37044, + 37045, + 37046, + 37047, + 37048, + 37049, + 37050, + 37051, + 37052, + 37053, + 37054, + 37055, + 37056, + 37057, + 37058, + 37059, + 37060, + 37061, + 37062, + 37063, + 37064, + 37065, + 37066, + 37067, + 37068, + 37069, + 37070, + 37071, + 37072, + 37073, + 37074, + 4307, + 37391, + 37392, + 37393, + 37394, + 37395, + 4628, + 37401, + 4634, + 37407, + 37408, + 37409, + 37410, + 37411, + 37412, + 37413, + 37414, + 37415, + 37416, + 37417, + 37418, + 4651, + 37422, + 4655, + 37427, + 4660, + 37433, + 37434, + 37435, + 4668, + 37443, + 4676, + 37449, + 4682, + 37453, + 4686, + 37496, + 4729, + 37511, + 37512, + 4745, + 37522, + 37523, + 4756, + 37527, + 4760, + 37544, + 4777, + 37547, + 37548, + 4781, + 37551, + 4784, + 37576, + 4809, + 37594, + 37595, + 37596, + 37597, + 37598, + 37599, + 37600, + 37601, + 37602, + 37603, + 37604, + 37605, + 37606, + 37607, + 37608, + 37609, + 37610, + 37611, + 37612, + 37613, + 37614, + 37615, + 37616, + 37617, + 37618, + 37619, + 37620, + 37621, + 37622, + 37623, + 37624, + 37625, + 37626, + 37627, + 37628, + 37629, + 37630, + 37631, + 37632, + 37633, + 4866, + 37826, + 5059, + 37833, + 5066, + 37840, + 5073, + 37847, + 5080, + 37851, + 5084, + 37867, + 37868, + 37869, + 37870, + 37871, + 37872, + 37873, + 37874, + 37875, + 37876, + 37877, + 37878, + 37879, + 37880, + 37881, + 37882, + 37883, + 37884, + 37885, + 37886, + 37887, + 37888, + 37889, + 37890, + 37891, + 37892, + 37893, + 37894, + 37895, + 37896, + 37897, + 37898, + 37899, + 37900, + 37901, + 37902, + 37903, + 37904, + 37905, + 37906, + 37907, + 37908, + 37909, + 37910, + 37911, + 5144, + 37952, + 37953, + 37954, + 37955, + 5188, + 37992, + 37993, + 37994, + 37995, + 37996, + 37997, + 37998, + 37999, + 38000, + 38001, + 38002, + 38003, + 38004, + 38005, + 38006, + 38007, + 38008, + 5241, + 38011, + 38012, + 38013, + 38014, + 38015, + 38016, + 38017, + 38018, + 38019, + 38020, + 38021, + 38022, + 38023, + 38024, + 38025, + 38026, + 38027, + 38028, + 38029, + 38030, + 38031, + 38032, + 38033, + 38034, + 38035, + 38036, + 38037, + 38038, + 38039, + 38040, + 38041, + 38042, + 38043, + 38044, + 38045, + 38046, + 38047, + 38048, + 38049, + 38050, + 38051, + 38052, + 38053, + 38054, + 38055, + 38056, + 38057, + 38058, + 38059, + 38060, + 38061, + 38062, + 38063, + 38064, + 38065, + 38066, + 38067, + 38068, + 38069, + 38070, + 38071, + 38072, + 5305, + 38124, + 38125, + 38126, + 38127, + 38128, + 38129, + 38130, + 38131, + 38132, + 38133, + 38134, + 38135, + 38136, + 38137, + 38138, + 38139, + 38140, + 38141, + 38142, + 38143, + 38144, + 38145, + 38146, + 38147, + 38148, + 38149, + 38150, + 38151, + 38152, + 38153, + 38154, + 38155, + 38156, + 38157, + 38158, + 38159, + 5392, + 38163, + 38164, + 38165, + 38166, + 38167, + 38168, + 38169, + 38170, + 38171, + 38172, + 38173, + 38174, + 38175, + 38176, + 38177, + 38178, + 38179, + 38180, + 38181, + 38182, + 38183, + 38184, + 38185, + 38186, + 38187, + 38188, + 38189, + 38190, + 38191, + 38192, + 38193, + 38194, + 38195, + 38196, + 38197, + 38198, + 38199, + 38200, + 38201, + 38202, + 38203, + 5436, + 38208, + 38209, + 38210, + 38211, + 38212, + 38213, + 38214, + 38215, + 38216, + 38217, + 38218, + 38219, + 38220, + 38221, + 38222, + 38223, + 38224, + 38225, + 38226, + 38227, + 38228, + 38229, + 38230, + 38231, + 38232, + 5465, + 38265, + 38266, + 38267, + 38268, + 38269, + 38270, + 38271, + 38272, + 38273, + 38274, + 38275, + 38276, + 38277, + 38278, + 38279, + 38280, + 38281, + 38282, + 38283, + 38284, + 38285, + 38286, + 38287, + 38288, + 38289, + 38290, + 38291, + 38292, + 38293, + 38294, + 38295, + 38296, + 38297, + 38298, + 5531, + 38301, + 38302, + 38303, + 38304, + 38305, + 38306, + 38307, + 38308, + 38309, + 38310, + 38311, + 38312, + 38313, + 38314, + 38315, + 38316, + 38317, + 38318, + 38319, + 38320, + 38321, + 38322, + 38323, + 38324, + 38325, + 38326, + 38327, + 38328, + 38329, + 38330, + 38331, + 38332, + 38333, + 38334, + 38335, + 38336, + 38337, + 38338, + 38339, + 38340, + 38341, + 38342, + 38343, + 38344, + 38345, + 38346, + 38347, + 38348, + 38349, + 38350, + 38351, + 38352, + 38353, + 38354, + 38355, + 38356, + 38357, + 38358, + 38359, + 38360, + 38361, + 38362, + 38363, + 38364, + 38365, + 38366, + 38367, + 38368, + 38369, + 38370, + 38371, + 38372, + 5605, + 38380, + 38381, + 38382, + 5615, + 38389, + 38390, + 38391, + 38392, + 38393, + 38394, + 38395, + 38396, + 5629, + 38557, + 38558, + 38559, + 5792, + 38562, + 5795, + 38661, + 5894, + 38720, + 5953, + 39069, + 6302, + 39113, + 6346, + 39170, + 6403, + 39227, + 6460, + 39284, + 6517, + 39341, + 6574, + 39355, + 39356, + 6589, + 39407, + 39408, + 39409, + 39410, + 39411, + 39412, + 39413, + 39414, + 39415, + 39416, + 39417, + 39418, + 39419, + 39420, + 39421, + 39422, + 39423, + 39424, + 39425, + 39426, + 6659, + 39461, + 39462, + 39463, + 39464, + 39465, + 39466, + 6699, + 39504, + 6737, + 39526, + 39527, + 39528, + 6761, + 39543, + 6776, + 39558, + 6791, + 39612, + 6845, + 39640, + 39641, + 39642, + 39643, + 39644, + 39645, + 39646, + 39647, + 39648, + 39649, + 39650, + 39651, + 39652, + 39653, + 39654, + 6887, + 39665, + 39666, + 6899, + 39731, + 6964, + 39734, + 39735, + 39736, + 39737, + 39738, + 39739, + 6972, + 39743, + 6976, + 39788, + 7021, + 39798, + 7031, + 39801, + 39802, + 39803, + 39804, + 39805, + 39806, + 39807, + 39808, + 39809, + 39810, + 39811, + 39812, + 39813, + 39814, + 39815, + 39816, + 39817, + 39818, + 39819, + 39820, + 39821, + 39822, + 39823, + 39824, + 39825, + 39826, + 39827, + 39828, + 39829, + 39830, + 39831, + 39832, + 39833, + 39834, + 39835, + 39836, + 39837, + 39838, + 39839, + 39840, + 39841, + 39842, + 39843, + 39844, + 39845, + 39846, + 39847, + 7080, + 39897, + 7130, + 39915, + 7148, + 39951, + 7184, + 39989, + 39990, + 39991, + 7224, + 40142, + 7375, + 40167, + 7400, + 40189, + 7422, + 40349, + 7582, + 40380, + 40381, + 40382, +]; + +static MAPPING_TABLE: &'static [Mapping] = &[ + DisallowedStd3Valid, + Valid, + DisallowedStd3Valid, + Valid, + DisallowedStd3Valid, + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }), + DisallowedStd3Valid, + Valid, + DisallowedStd3Valid, + Disallowed, + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 0, byte_len: 1 }), + Valid, + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 27, byte_start_hi: 0, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }), + Valid, + Ignored, + Valid, + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 0, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 33, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 0, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 35, byte_start_hi: 0, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 0, byte_len: 2 }), + Valid, + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 0, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 0, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 49, byte_start_hi: 0, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 0, byte_len: 5 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 59, byte_start_hi: 0, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 61, byte_start_hi: 0, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 63, byte_start_hi: 0, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 65, byte_start_hi: 0, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 67, byte_start_hi: 0, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 69, byte_start_hi: 0, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 71, byte_start_hi: 0, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 73, byte_start_hi: 0, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 75, byte_start_hi: 0, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 77, byte_start_hi: 0, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 79, byte_start_hi: 0, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 81, byte_start_hi: 0, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 83, byte_start_hi: 0, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 85, byte_start_hi: 0, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 87, byte_start_hi: 0, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 89, byte_start_hi: 0, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 91, byte_start_hi: 0, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 93, byte_start_hi: 0, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 95, byte_start_hi: 0, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 97, byte_start_hi: 0, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 0, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 101, byte_start_hi: 0, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 103, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 105, byte_start_hi: 0, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 107, byte_start_hi: 0, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 109, byte_start_hi: 0, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 111, byte_start_hi: 0, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 113, byte_start_hi: 0, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 115, byte_start_hi: 0, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 117, byte_start_hi: 0, byte_len: 2 }), + Deviation(StringTableSlice { byte_start_lo: 119, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 121, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 123, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 125, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 127, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 129, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 131, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 133, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 135, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 137, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 139, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 141, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 143, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 145, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 147, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 149, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 151, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 153, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 155, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 157, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 159, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 161, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 163, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 169, byte_start_hi: 0, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 0, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 178, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 182, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 0, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 187, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 189, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 191, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 193, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 195, byte_start_hi: 0, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 202, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 208, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 214, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 220, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 226, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 232, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 0, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 0, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 1, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 71, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 73, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 75, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 77, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 79, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 81, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 83, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 85, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 87, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 89, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 91, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 93, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 95, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 97, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 101, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 103, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 105, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 107, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 109, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 111, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 113, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 115, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 117, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 119, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 121, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 123, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 125, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 127, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 129, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 131, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 133, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 135, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 137, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 139, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 141, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 143, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 145, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 147, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 149, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 151, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 153, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 155, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 157, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 159, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 161, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 163, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 169, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 173, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 175, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 177, byte_start_hi: 1, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 182, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 1, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 187, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 189, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 191, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 193, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 195, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 197, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 199, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 201, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 203, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 205, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 207, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 209, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 211, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }), + Valid, + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 213, byte_start_hi: 1, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 1, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 219, byte_start_hi: 1, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 1, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 225, byte_start_hi: 1, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 1, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 231, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 233, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 235, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 237, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 239, byte_start_hi: 1, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 1, byte_len: 2 }), + Valid, + Ignored, + Valid, + Mapped(StringTableSlice { byte_start_lo: 245, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 247, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 249, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 251, byte_start_hi: 1, byte_len: 2 }), + Valid, + Disallowed, + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 253, byte_start_hi: 1, byte_len: 3 }), + Valid, + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 2, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 2, byte_len: 2 }), + Disallowed, + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 35, byte_start_hi: 0, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 2, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 2, byte_len: 2 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 2, byte_len: 2 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 0, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 2, byte_len: 2 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 2, byte_len: 2 }), + Valid, + Deviation(StringTableSlice { byte_start_lo: 54, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 74, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 78, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 82, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 86, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 90, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 94, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 98, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 102, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 106, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 110, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 114, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 122, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 126, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 130, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 134, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 142, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 146, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 150, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 154, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 162, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 178, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 182, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 186, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 190, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 194, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 202, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 208, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 214, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 220, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 226, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 232, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 3, byte_len: 2 }), + Valid, + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 74, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 78, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 82, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 86, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 90, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 94, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 98, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 102, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 106, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 110, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 114, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 122, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 126, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 130, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 134, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 142, byte_start_hi: 3, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 3, byte_len: 2 }), + Valid, + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 146, byte_start_hi: 3, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 3, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 150, byte_start_hi: 3, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 3, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 154, byte_start_hi: 3, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 3, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 3, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 3, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 162, byte_start_hi: 3, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 3, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 3, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 3, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 3, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 3, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 3, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 3, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 178, byte_start_hi: 3, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 3, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 182, byte_start_hi: 3, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 3, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 186, byte_start_hi: 3, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 3, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 190, byte_start_hi: 3, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 3, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 194, byte_start_hi: 3, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 3, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 3, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 3, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 202, byte_start_hi: 3, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 3, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 3, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 208, byte_start_hi: 3, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 3, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 3, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 214, byte_start_hi: 3, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 3, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 3, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 220, byte_start_hi: 3, byte_len: 2 }), + Disallowed, + Valid, + Disallowed, + Valid, + Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 3, byte_len: 4 }), + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Mapped(StringTableSlice { byte_start_lo: 226, byte_start_hi: 3, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 3, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 3, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 3, byte_len: 4 }), + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 3, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 3, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 3, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 4, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 4, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 4, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 4, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 4, byte_len: 6 }), + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 4, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 4, byte_len: 6 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 4, byte_len: 6 }), + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 4, byte_len: 6 }), + Disallowed, + Valid, + Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 4, byte_len: 6 }), + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 4, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 4, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 4, byte_len: 6 }), + Valid, + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 82, byte_start_hi: 4, byte_len: 6 }), + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 4, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 94, byte_start_hi: 4, byte_len: 6 }), + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 4, byte_len: 6 }), + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Mapped(StringTableSlice { byte_start_lo: 106, byte_start_hi: 4, byte_len: 6 }), + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 4, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 4, byte_len: 6 }), + Valid, + Disallowed, + Valid, + Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 4, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 127, byte_start_hi: 4, byte_len: 6 }), + Valid, + Disallowed, + Valid, + Mapped(StringTableSlice { byte_start_lo: 133, byte_start_hi: 4, byte_len: 6 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 139, byte_start_hi: 4, byte_len: 6 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 145, byte_start_hi: 4, byte_len: 6 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 151, byte_start_hi: 4, byte_len: 6 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 157, byte_start_hi: 4, byte_len: 6 }), + Valid, + Disallowed, + Valid, + Mapped(StringTableSlice { byte_start_lo: 163, byte_start_hi: 4, byte_len: 6 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 169, byte_start_hi: 4, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 175, byte_start_hi: 4, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 181, byte_start_hi: 4, byte_len: 9 }), + Mapped(StringTableSlice { byte_start_lo: 190, byte_start_hi: 4, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 4, byte_len: 9 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 205, byte_start_hi: 4, byte_len: 6 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 211, byte_start_hi: 4, byte_len: 6 }), + Valid, + Disallowed, + Valid, + Mapped(StringTableSlice { byte_start_lo: 217, byte_start_hi: 4, byte_len: 6 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 223, byte_start_hi: 4, byte_len: 6 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 229, byte_start_hi: 4, byte_len: 6 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 235, byte_start_hi: 4, byte_len: 6 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 241, byte_start_hi: 4, byte_len: 6 }), + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 247, byte_start_hi: 4, byte_len: 3 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 4, byte_len: 3 }), + Disallowed, + Valid, + Mapped(StringTableSlice { byte_start_lo: 253, byte_start_hi: 4, byte_len: 3 }), + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 5, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 5, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 5, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 5, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 5, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 5, byte_len: 3 }), + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Ignored, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 5, byte_len: 3 }), + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 71, byte_start_hi: 0, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 159, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 5, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 5, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 5, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 5, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 0, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 5, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 33, byte_start_hi: 5, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 5, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 39, byte_start_hi: 5, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 5, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 5, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 91, byte_start_hi: 0, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 5, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 5, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 5, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 5, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 5, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 5, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 57, byte_start_hi: 5, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 59, byte_start_hi: 5, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 61, byte_start_hi: 5, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 5, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 5, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 5, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 5, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 5, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 74, byte_start_hi: 5, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 5, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 78, byte_start_hi: 5, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 191, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 5, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 193, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 83, byte_start_hi: 5, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 85, byte_start_hi: 5, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }), + Valid, + Disallowed, + Valid, + Mapped(StringTableSlice { byte_start_lo: 87, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 90, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 93, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 102, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 105, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 111, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 114, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 117, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 123, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 126, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 129, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 135, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 141, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 147, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 150, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 153, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 159, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 162, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 177, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 183, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 186, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 189, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 195, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 201, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 207, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 213, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 219, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 225, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 231, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 237, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 249, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 255, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 29, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 35, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 41, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 47, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 53, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 6, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 231, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 119, byte_start_hi: 0, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 59, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 65, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 71, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 74, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 77, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 83, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 86, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 89, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 95, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 98, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 101, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 107, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 110, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 113, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 119, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 122, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 125, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 131, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 134, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 137, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 143, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 146, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 149, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 155, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 161, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 173, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 179, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 182, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 185, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 191, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 194, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 197, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 6, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 203, byte_start_hi: 6, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 6, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 209, byte_start_hi: 6, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 6, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 215, byte_start_hi: 6, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 6, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 221, byte_start_hi: 6, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 6, byte_len: 3 }), + Valid, + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 227, byte_start_hi: 6, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 6, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 233, byte_start_hi: 6, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 6, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 239, byte_start_hi: 6, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 6, byte_len: 3 }), + Disallowed, + Valid, + Mapped(StringTableSlice { byte_start_lo: 245, byte_start_hi: 6, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 6, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 251, byte_start_hi: 6, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 6, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 7, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 7, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 7, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 7, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 7, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 7, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 7, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 7, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 7, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 7, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 31, byte_start_hi: 7, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 7, byte_len: 3 }), + Valid, + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 37, byte_start_hi: 7, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 7, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 7, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 7, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 49, byte_start_hi: 7, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 7, byte_len: 3 }), + Disallowed, + Valid, + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 55, byte_start_hi: 7, byte_len: 3 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 7, byte_len: 3 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 61, byte_start_hi: 7, byte_len: 3 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 7, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 67, byte_start_hi: 7, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 7, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 73, byte_start_hi: 7, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 7, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 79, byte_start_hi: 7, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 82, byte_start_hi: 7, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 85, byte_start_hi: 7, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 7, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 2, byte_len: 2 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 91, byte_start_hi: 7, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 7, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 101, byte_start_hi: 7, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 106, byte_start_hi: 7, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 111, byte_start_hi: 7, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 7, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 121, byte_start_hi: 7, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 126, byte_start_hi: 7, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 91, byte_start_hi: 7, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 7, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 101, byte_start_hi: 7, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 106, byte_start_hi: 7, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 111, byte_start_hi: 7, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 7, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 121, byte_start_hi: 7, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 126, byte_start_hi: 7, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 131, byte_start_hi: 7, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 7, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 141, byte_start_hi: 7, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 146, byte_start_hi: 7, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 151, byte_start_hi: 7, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 7, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 161, byte_start_hi: 7, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 7, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 131, byte_start_hi: 7, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 7, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 141, byte_start_hi: 7, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 146, byte_start_hi: 7, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 151, byte_start_hi: 7, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 7, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 161, byte_start_hi: 7, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 7, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 7, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 7, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 181, byte_start_hi: 7, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 186, byte_start_hi: 7, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 191, byte_start_hi: 7, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 7, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 201, byte_start_hi: 7, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 7, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 7, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 7, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 181, byte_start_hi: 7, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 186, byte_start_hi: 7, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 191, byte_start_hi: 7, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 7, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 201, byte_start_hi: 7, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 7, byte_len: 5 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 211, byte_start_hi: 7, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 7, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 220, byte_start_hi: 7, byte_len: 4 }), + Disallowed, + Valid, + Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 7, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 229, byte_start_hi: 7, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 232, byte_start_hi: 7, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 235, byte_start_hi: 7, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 7, byte_len: 4 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 7, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 1, byte_len: 2 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 7, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 241, byte_start_hi: 7, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 7, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 249, byte_start_hi: 7, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 7, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 8, byte_len: 4 }), + Disallowed, + Valid, + Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 8, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 8, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 8, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 7, byte_len: 4 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 8, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 8, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 27, byte_start_hi: 8, byte_len: 5 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 8, byte_len: 2 }), + Disallowed, + Valid, + Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 8, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 37, byte_start_hi: 8, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 8, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 2, byte_len: 2 }), + Disallowed, + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 8, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 8, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 53, byte_start_hi: 8, byte_len: 5 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 8, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 8, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 63, byte_start_hi: 8, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 8, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 69, byte_start_hi: 8, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 8, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 2, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 77, byte_start_hi: 8, byte_len: 1 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 78, byte_start_hi: 8, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 83, byte_start_hi: 8, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 87, byte_start_hi: 8, byte_len: 4 }), + Disallowed, + Valid, + Mapped(StringTableSlice { byte_start_lo: 91, byte_start_hi: 8, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 8, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 8, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 83, byte_start_hi: 8, byte_len: 4 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 35, byte_start_hi: 0, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 102, byte_start_hi: 8, byte_len: 3 }), + Disallowed, + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 0, byte_len: 1 }), + Ignored, + Deviation(StringTableSlice { byte_start_lo: 105, byte_start_hi: 8, byte_len: 0 }), + Disallowed, + Valid, + Mapped(StringTableSlice { byte_start_lo: 105, byte_start_hi: 8, byte_len: 3 }), + Valid, + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 8, byte_len: 3 }), + Valid, + Disallowed, + Valid, + Disallowed, + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 0, byte_len: 1 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 111, byte_start_hi: 8, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 117, byte_start_hi: 8, byte_len: 9 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 126, byte_start_hi: 8, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 8, byte_len: 9 }), + Valid, + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 141, byte_start_hi: 8, byte_len: 2 }), + Valid, + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 143, byte_start_hi: 8, byte_len: 3 }), + Valid, + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 146, byte_start_hi: 8, byte_len: 2 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 8, byte_len: 2 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 150, byte_start_hi: 8, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 8, byte_len: 12 }), + Valid, + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 0, byte_len: 1 }), + Ignored, + Disallowed, + Ignored, + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 169, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 8, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 8, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 175, byte_start_hi: 8, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 8, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 177, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 33, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 169, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 8, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 8, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 175, byte_start_hi: 8, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 8, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 177, byte_start_hi: 8, byte_len: 1 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }), + Disallowed, + Valid, + Mapped(StringTableSlice { byte_start_lo: 178, byte_start_hi: 8, byte_len: 2 }), + Valid, + Disallowed, + Valid, + Disallowed, + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 8, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 183, byte_start_hi: 8, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 186, byte_start_hi: 8, byte_len: 3 }), + Valid, + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 189, byte_start_hi: 8, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 8, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 1, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 195, byte_start_hi: 8, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 159, byte_start_hi: 0, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 8, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 8, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 202, byte_start_hi: 8, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 205, byte_start_hi: 8, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 2, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 69, byte_start_hi: 0, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 207, byte_start_hi: 8, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 209, byte_start_hi: 8, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 211, byte_start_hi: 8, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 213, byte_start_hi: 8, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 215, byte_start_hi: 8, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 8, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 221, byte_start_hi: 8, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 226, byte_start_hi: 8, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 231, byte_start_hi: 8, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 237, byte_start_hi: 8, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 8, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 247, byte_start_hi: 8, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 8, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 9, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 9, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 9, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 9, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 9, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 9, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 31, byte_start_hi: 9, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 9, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 41, byte_start_hi: 9, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 45, byte_start_hi: 9, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 47, byte_start_hi: 9, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 9, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 9, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 9, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 57, byte_start_hi: 9, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 61, byte_start_hi: 9, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 63, byte_start_hi: 9, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 65, byte_start_hi: 9, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 45, byte_start_hi: 9, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 47, byte_start_hi: 9, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 9, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 9, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 9, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 57, byte_start_hi: 9, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 61, byte_start_hi: 9, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 63, byte_start_hi: 9, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 65, byte_start_hi: 9, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }), + Valid, + Disallowed, + Valid, + Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 9, byte_len: 5 }), + Valid, + Disallowed, + Valid, + Mapped(StringTableSlice { byte_start_lo: 73, byte_start_hi: 9, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 79, byte_start_hi: 9, byte_len: 9 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 9, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 94, byte_start_hi: 9, byte_len: 9 }), + Valid, + DisallowedStd3Valid, + Valid, + DisallowedStd3Valid, + Valid, + Mapped(StringTableSlice { byte_start_lo: 103, byte_start_hi: 9, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 106, byte_start_hi: 9, byte_len: 3 }), + Valid, + Disallowed, + Valid, + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 33, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 169, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 109, byte_start_hi: 9, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 111, byte_start_hi: 9, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 113, byte_start_hi: 9, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 115, byte_start_hi: 9, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 117, byte_start_hi: 9, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 119, byte_start_hi: 9, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 121, byte_start_hi: 9, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 123, byte_start_hi: 9, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 125, byte_start_hi: 9, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 127, byte_start_hi: 9, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 129, byte_start_hi: 9, byte_len: 2 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 131, byte_start_hi: 9, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 134, byte_start_hi: 9, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 137, byte_start_hi: 9, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 9, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 143, byte_start_hi: 9, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 146, byte_start_hi: 9, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 149, byte_start_hi: 9, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 9, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 155, byte_start_hi: 9, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 9, byte_len: 4 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 162, byte_start_hi: 9, byte_len: 4 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 9, byte_len: 4 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 9, byte_len: 4 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 9, byte_len: 4 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 178, byte_start_hi: 9, byte_len: 4 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 182, byte_start_hi: 9, byte_len: 4 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 186, byte_start_hi: 9, byte_len: 4 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 190, byte_start_hi: 9, byte_len: 4 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 194, byte_start_hi: 9, byte_len: 4 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 9, byte_len: 4 }), + Disallowed, + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 202, byte_start_hi: 9, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 205, byte_start_hi: 9, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 208, byte_start_hi: 9, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 211, byte_start_hi: 9, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 214, byte_start_hi: 9, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 217, byte_start_hi: 9, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 220, byte_start_hi: 9, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 223, byte_start_hi: 9, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 226, byte_start_hi: 9, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 229, byte_start_hi: 9, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 232, byte_start_hi: 9, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 235, byte_start_hi: 9, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 9, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 241, byte_start_hi: 9, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 9, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 247, byte_start_hi: 9, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 9, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 253, byte_start_hi: 9, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 10, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 10, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 10, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 10, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 10, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 10, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 10, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 10, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 8, byte_len: 1 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 10, byte_len: 12 }), + Valid, + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 10, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 39, byte_start_hi: 10, byte_len: 2 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 41, byte_start_hi: 10, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 10, byte_len: 5 }), + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 49, byte_start_hi: 10, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 10, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 55, byte_start_hi: 10, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 10, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 61, byte_start_hi: 10, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 10, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 67, byte_start_hi: 10, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 10, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 73, byte_start_hi: 10, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 10, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 79, byte_start_hi: 10, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 82, byte_start_hi: 10, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 85, byte_start_hi: 10, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 10, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 91, byte_start_hi: 10, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 94, byte_start_hi: 10, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 97, byte_start_hi: 10, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 10, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 103, byte_start_hi: 10, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 106, byte_start_hi: 10, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 109, byte_start_hi: 10, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 10, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 115, byte_start_hi: 10, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 10, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 121, byte_start_hi: 10, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 10, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 127, byte_start_hi: 10, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 130, byte_start_hi: 10, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 133, byte_start_hi: 10, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 10, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 139, byte_start_hi: 10, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 142, byte_start_hi: 10, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 145, byte_start_hi: 10, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 10, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 151, byte_start_hi: 10, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 154, byte_start_hi: 10, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 157, byte_start_hi: 10, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 10, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 163, byte_start_hi: 10, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 10, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 169, byte_start_hi: 10, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 10, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 175, byte_start_hi: 10, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 178, byte_start_hi: 10, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 181, byte_start_hi: 10, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 10, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 187, byte_start_hi: 10, byte_len: 3 }), + Disallowed, + Valid, + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 190, byte_start_hi: 10, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 193, byte_start_hi: 10, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 195, byte_start_hi: 10, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 10, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 10, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 203, byte_start_hi: 10, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 10, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 5, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 5, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 5, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 5, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 209, byte_start_hi: 10, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 10, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 215, byte_start_hi: 10, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 217, byte_start_hi: 10, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 219, byte_start_hi: 10, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 10, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 225, byte_start_hi: 10, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 10, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 231, byte_start_hi: 10, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 10, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 237, byte_start_hi: 10, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 10, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 10, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 10, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 249, byte_start_hi: 10, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 10, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 255, byte_start_hi: 10, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 11, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 11, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 11, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 11, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 11, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 11, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 11, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 11, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 11, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 29, byte_start_hi: 11, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 11, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 35, byte_start_hi: 11, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 11, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 41, byte_start_hi: 11, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 11, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 47, byte_start_hi: 11, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 11, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 53, byte_start_hi: 11, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 11, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 59, byte_start_hi: 11, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 11, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 65, byte_start_hi: 11, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 11, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 71, byte_start_hi: 11, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 74, byte_start_hi: 11, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 77, byte_start_hi: 11, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 11, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 83, byte_start_hi: 11, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 86, byte_start_hi: 11, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 89, byte_start_hi: 11, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 11, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 95, byte_start_hi: 11, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 98, byte_start_hi: 11, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 101, byte_start_hi: 11, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 11, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 107, byte_start_hi: 11, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 110, byte_start_hi: 11, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 113, byte_start_hi: 11, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 11, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 119, byte_start_hi: 11, byte_len: 3 }), + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 122, byte_start_hi: 11, byte_len: 3 }), + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Mapped(StringTableSlice { byte_start_lo: 125, byte_start_hi: 11, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 11, byte_len: 3 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 131, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 134, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 137, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 143, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 146, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 149, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 155, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 161, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 173, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 179, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 182, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 185, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 191, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 194, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 197, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 203, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 209, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 215, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 221, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 227, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 233, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 239, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 245, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 251, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 31, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 37, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 49, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 55, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 61, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 67, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 73, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 79, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 82, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 85, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 91, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 94, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 97, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 103, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 106, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 109, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 115, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 121, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 127, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 130, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 133, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 139, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 142, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 145, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 151, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 154, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 157, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 163, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 169, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 175, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 178, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 181, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 187, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 190, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 193, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 199, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 202, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 205, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 208, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 211, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 214, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 217, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 220, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 223, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 226, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 229, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 232, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 235, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 241, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 247, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 253, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 27, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 33, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 39, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 45, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 51, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 57, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 63, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 69, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 75, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 78, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 81, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 87, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 90, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 93, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 102, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 105, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 111, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 114, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 117, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 123, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 126, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 129, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 135, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 141, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 147, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 150, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 153, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 159, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 162, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 177, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 183, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 186, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 189, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 195, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 201, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 207, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 213, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 219, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 225, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 231, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 237, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 249, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 255, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 14, byte_len: 3 }), + Disallowed, + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 0, byte_len: 1 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 14, byte_len: 1 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 14, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 14, byte_len: 3 }), + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 14, byte_len: 4 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 14, byte_len: 4 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 14, byte_len: 6 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 29, byte_start_hi: 14, byte_len: 6 }), + Disallowed, + Valid, + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 35, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 41, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 47, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 53, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 59, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 65, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 71, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 74, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 77, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 83, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 86, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 89, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 95, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 98, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 101, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 107, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 110, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 113, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 119, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 122, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 125, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 131, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 134, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 137, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 143, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 146, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 149, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 155, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 161, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 173, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 179, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 182, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 185, byte_start_hi: 14, byte_len: 3 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 191, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 194, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 197, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 203, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 209, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 215, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 221, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 227, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 233, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 239, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 245, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 251, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 15, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 15, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 15, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 15, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 15, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 15, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 15, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 15, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 15, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 15, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 31, byte_start_hi: 15, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 15, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 37, byte_start_hi: 15, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 15, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 15, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 15, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 49, byte_start_hi: 15, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 15, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 55, byte_start_hi: 15, byte_len: 3 }), + Disallowed, + Valid, + Mapped(StringTableSlice { byte_start_lo: 131, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 149, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 15, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 61, byte_start_hi: 15, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 15, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 67, byte_start_hi: 15, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 15, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 73, byte_start_hi: 15, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 143, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 15, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 79, byte_start_hi: 15, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 82, byte_start_hi: 15, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 85, byte_start_hi: 15, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 155, byte_start_hi: 11, byte_len: 3 }), + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 15, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 93, byte_start_hi: 15, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 98, byte_start_hi: 15, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 103, byte_start_hi: 15, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 15, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 113, byte_start_hi: 15, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 15, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 123, byte_start_hi: 15, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 15, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 133, byte_start_hi: 15, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 15, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 143, byte_start_hi: 15, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 15, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 153, byte_start_hi: 15, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 15, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 163, byte_start_hi: 15, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 15, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 173, byte_start_hi: 15, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 178, byte_start_hi: 15, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 183, byte_start_hi: 15, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 15, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 193, byte_start_hi: 15, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 15, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 203, byte_start_hi: 15, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 208, byte_start_hi: 15, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 213, byte_start_hi: 15, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 15, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 223, byte_start_hi: 15, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 15, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 233, byte_start_hi: 15, byte_len: 8 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 241, byte_start_hi: 15, byte_len: 8 }), + Disallowed, + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 249, byte_start_hi: 15, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 15, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 16, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 16, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 16, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 16, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 16, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 16, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 33, byte_start_hi: 16, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 16, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 16, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 16, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 53, byte_start_hi: 16, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 16, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 63, byte_start_hi: 16, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 16, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 73, byte_start_hi: 16, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 78, byte_start_hi: 16, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 83, byte_start_hi: 16, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 16, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 93, byte_start_hi: 16, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 98, byte_start_hi: 16, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 103, byte_start_hi: 16, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 16, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 113, byte_start_hi: 16, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 16, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 123, byte_start_hi: 16, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 16, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 133, byte_start_hi: 16, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 16, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 143, byte_start_hi: 16, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 16, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 153, byte_start_hi: 16, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 16, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 163, byte_start_hi: 16, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 16, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 173, byte_start_hi: 16, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 16, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 73, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 179, byte_start_hi: 16, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 182, byte_start_hi: 16, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 185, byte_start_hi: 16, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 187, byte_start_hi: 16, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 189, byte_start_hi: 16, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 191, byte_start_hi: 16, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 193, byte_start_hi: 16, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 195, byte_start_hi: 16, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 197, byte_start_hi: 16, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 199, byte_start_hi: 16, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 201, byte_start_hi: 16, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 203, byte_start_hi: 16, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 205, byte_start_hi: 16, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 207, byte_start_hi: 16, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 209, byte_start_hi: 16, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 211, byte_start_hi: 16, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 213, byte_start_hi: 16, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 35, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 53, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 59, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 83, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 86, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 95, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 101, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 110, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 113, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 119, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 122, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 215, byte_start_hi: 16, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 16, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 221, byte_start_hi: 16, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 16, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 227, byte_start_hi: 16, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 16, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 233, byte_start_hi: 16, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 16, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 239, byte_start_hi: 16, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 16, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 245, byte_start_hi: 16, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 16, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 251, byte_start_hi: 16, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 16, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 17, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 17, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 17, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 131, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 149, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 15, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 61, byte_start_hi: 15, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 94, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 130, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 127, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 97, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 117, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 31, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 37, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 49, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 55, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 61, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 67, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 73, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 79, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 15, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 67, byte_start_hi: 15, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 15, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 82, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 85, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 91, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 94, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 97, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 103, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 106, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 109, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 17, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 114, byte_start_hi: 17, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 17, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 17, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 17, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 122, byte_start_hi: 17, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 17, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 126, byte_start_hi: 17, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 17, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 130, byte_start_hi: 17, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 17, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 134, byte_start_hi: 17, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 17, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 17, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 17, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 142, byte_start_hi: 17, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 146, byte_start_hi: 17, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 150, byte_start_hi: 17, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 154, byte_start_hi: 17, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 17, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 162, byte_start_hi: 17, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 17, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 17, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 17, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 178, byte_start_hi: 17, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 183, byte_start_hi: 17, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 17, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 193, byte_start_hi: 17, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 195, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 17, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 203, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 209, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 215, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 221, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 227, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 233, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 239, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 245, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 251, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 31, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 37, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 49, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 55, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 61, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 67, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 73, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 79, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 82, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 85, byte_start_hi: 18, byte_len: 3 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 18, byte_len: 12 }), + Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 18, byte_len: 12 }), + Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 18, byte_len: 12 }), + Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 18, byte_len: 9 }), + Mapped(StringTableSlice { byte_start_lo: 133, byte_start_hi: 18, byte_len: 12 }), + Mapped(StringTableSlice { byte_start_lo: 145, byte_start_hi: 18, byte_len: 9 }), + Mapped(StringTableSlice { byte_start_lo: 154, byte_start_hi: 18, byte_len: 9 }), + Mapped(StringTableSlice { byte_start_lo: 163, byte_start_hi: 18, byte_len: 15 }), + Mapped(StringTableSlice { byte_start_lo: 178, byte_start_hi: 18, byte_len: 12 }), + Mapped(StringTableSlice { byte_start_lo: 190, byte_start_hi: 18, byte_len: 9 }), + Mapped(StringTableSlice { byte_start_lo: 199, byte_start_hi: 18, byte_len: 9 }), + Mapped(StringTableSlice { byte_start_lo: 208, byte_start_hi: 18, byte_len: 9 }), + Mapped(StringTableSlice { byte_start_lo: 217, byte_start_hi: 18, byte_len: 12 }), + Mapped(StringTableSlice { byte_start_lo: 229, byte_start_hi: 18, byte_len: 12 }), + Mapped(StringTableSlice { byte_start_lo: 241, byte_start_hi: 18, byte_len: 9 }), + Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 18, byte_len: 9 }), + Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 19, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 19, byte_len: 9 }), + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 19, byte_len: 12 }), + Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 19, byte_len: 12 }), + Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 19, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 19, byte_len: 15 }), + Mapped(StringTableSlice { byte_start_lo: 63, byte_start_hi: 19, byte_len: 18 }), + Mapped(StringTableSlice { byte_start_lo: 81, byte_start_hi: 19, byte_len: 15 }), + Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 19, byte_len: 9 }), + Mapped(StringTableSlice { byte_start_lo: 105, byte_start_hi: 19, byte_len: 15 }), + Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 19, byte_len: 15 }), + Mapped(StringTableSlice { byte_start_lo: 135, byte_start_hi: 19, byte_len: 12 }), + Mapped(StringTableSlice { byte_start_lo: 147, byte_start_hi: 19, byte_len: 9 }), + Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 19, byte_len: 9 }), + Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 19, byte_len: 9 }), + Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 19, byte_len: 12 }), + Mapped(StringTableSlice { byte_start_lo: 186, byte_start_hi: 19, byte_len: 15 }), + Mapped(StringTableSlice { byte_start_lo: 201, byte_start_hi: 19, byte_len: 12 }), + Mapped(StringTableSlice { byte_start_lo: 213, byte_start_hi: 19, byte_len: 9 }), + Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 19, byte_len: 9 }), + Mapped(StringTableSlice { byte_start_lo: 231, byte_start_hi: 19, byte_len: 9 }), + Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 19, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 19, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 19, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 20, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 20, byte_len: 9 }), + Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 20, byte_len: 9 }), + Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 20, byte_len: 15 }), + Mapped(StringTableSlice { byte_start_lo: 41, byte_start_hi: 20, byte_len: 9 }), + Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 20, byte_len: 12 }), + Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 20, byte_len: 15 }), + Mapped(StringTableSlice { byte_start_lo: 77, byte_start_hi: 20, byte_len: 9 }), + Mapped(StringTableSlice { byte_start_lo: 86, byte_start_hi: 20, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 20, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 98, byte_start_hi: 20, byte_len: 15 }), + Mapped(StringTableSlice { byte_start_lo: 113, byte_start_hi: 20, byte_len: 12 }), + Mapped(StringTableSlice { byte_start_lo: 125, byte_start_hi: 20, byte_len: 15 }), + Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 20, byte_len: 9 }), + Mapped(StringTableSlice { byte_start_lo: 149, byte_start_hi: 20, byte_len: 15 }), + Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 20, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 20, byte_len: 9 }), + Mapped(StringTableSlice { byte_start_lo: 179, byte_start_hi: 20, byte_len: 9 }), + Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 20, byte_len: 9 }), + Mapped(StringTableSlice { byte_start_lo: 197, byte_start_hi: 20, byte_len: 9 }), + Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 20, byte_len: 9 }), + Mapped(StringTableSlice { byte_start_lo: 215, byte_start_hi: 20, byte_len: 12 }), + Mapped(StringTableSlice { byte_start_lo: 227, byte_start_hi: 20, byte_len: 9 }), + Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 20, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 20, byte_len: 9 }), + Mapped(StringTableSlice { byte_start_lo: 251, byte_start_hi: 20, byte_len: 9 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 21, byte_len: 9 }), + Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 21, byte_len: 12 }), + Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 21, byte_len: 9 }), + Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 21, byte_len: 9 }), + Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 21, byte_len: 9 }), + Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 21, byte_len: 15 }), + Mapped(StringTableSlice { byte_start_lo: 67, byte_start_hi: 21, byte_len: 12 }), + Mapped(StringTableSlice { byte_start_lo: 79, byte_start_hi: 21, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 85, byte_start_hi: 21, byte_len: 15 }), + Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 21, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 106, byte_start_hi: 21, byte_len: 12 }), + Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 21, byte_len: 12 }), + Mapped(StringTableSlice { byte_start_lo: 130, byte_start_hi: 21, byte_len: 9 }), + Mapped(StringTableSlice { byte_start_lo: 139, byte_start_hi: 21, byte_len: 9 }), + Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 21, byte_len: 9 }), + Mapped(StringTableSlice { byte_start_lo: 157, byte_start_hi: 21, byte_len: 12 }), + Mapped(StringTableSlice { byte_start_lo: 169, byte_start_hi: 21, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 175, byte_start_hi: 21, byte_len: 9 }), + Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 21, byte_len: 12 }), + Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 21, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 202, byte_start_hi: 21, byte_len: 15 }), + Mapped(StringTableSlice { byte_start_lo: 217, byte_start_hi: 21, byte_len: 9 }), + Mapped(StringTableSlice { byte_start_lo: 226, byte_start_hi: 21, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 21, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 21, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 21, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 21, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 21, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 21, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 21, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 22, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 22, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 22, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 22, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 22, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 22, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 22, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 35, byte_start_hi: 22, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 22, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 45, byte_start_hi: 22, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 22, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 55, byte_start_hi: 22, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 22, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 65, byte_start_hi: 22, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 22, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 75, byte_start_hi: 22, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 22, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 85, byte_start_hi: 22, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 22, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 90, byte_start_hi: 22, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 22, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 95, byte_start_hi: 22, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 97, byte_start_hi: 22, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 22, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 101, byte_start_hi: 22, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 22, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 107, byte_start_hi: 22, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 109, byte_start_hi: 22, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 115, byte_start_hi: 22, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 121, byte_start_hi: 22, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 127, byte_start_hi: 22, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 133, byte_start_hi: 22, byte_len: 12 }), + Mapped(StringTableSlice { byte_start_lo: 145, byte_start_hi: 22, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 147, byte_start_hi: 22, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 149, byte_start_hi: 22, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 22, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 154, byte_start_hi: 22, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 22, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 22, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 22, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 162, byte_start_hi: 22, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 22, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 169, byte_start_hi: 22, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 22, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 173, byte_start_hi: 22, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 22, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 179, byte_start_hi: 22, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 181, byte_start_hi: 22, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 183, byte_start_hi: 22, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 185, byte_start_hi: 22, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 22, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 191, byte_start_hi: 22, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 194, byte_start_hi: 22, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 197, byte_start_hi: 22, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 22, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 202, byte_start_hi: 22, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 22, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 22, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 208, byte_start_hi: 22, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 22, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 213, byte_start_hi: 22, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 215, byte_start_hi: 22, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 217, byte_start_hi: 22, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 219, byte_start_hi: 22, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 22, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 225, byte_start_hi: 22, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 227, byte_start_hi: 22, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 22, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 233, byte_start_hi: 22, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 22, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 22, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 241, byte_start_hi: 22, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 22, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 145, byte_start_hi: 22, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 22, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 255, byte_start_hi: 22, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 23, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 23, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 23, byte_len: 7 }), + Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 23, byte_len: 8 }), + Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 23, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 23, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 27, byte_start_hi: 23, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 23, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 23, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 23, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 23, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 39, byte_start_hi: 23, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 41, byte_start_hi: 23, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 39, byte_start_hi: 23, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 23, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 45, byte_start_hi: 23, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 47, byte_start_hi: 23, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 23, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 23, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 23, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 23, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 57, byte_start_hi: 23, byte_len: 3 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 23, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 23, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 23, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 23, byte_len: 6 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 23, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 74, byte_start_hi: 23, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 23, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 78, byte_start_hi: 23, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 23, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 82, byte_start_hi: 23, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 217, byte_start_hi: 22, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 23, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 86, byte_start_hi: 23, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 23, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 90, byte_start_hi: 23, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 93, byte_start_hi: 23, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 22, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 95, byte_start_hi: 23, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 98, byte_start_hi: 23, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 101, byte_start_hi: 23, byte_len: 2 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 103, byte_start_hi: 23, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 106, byte_start_hi: 23, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 23, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 110, byte_start_hi: 23, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 23, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 114, byte_start_hi: 23, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 119, byte_start_hi: 23, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 23, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 23, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 23, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 23, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 23, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 23, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 23, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 23, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 23, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 23, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 23, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 23, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 175, byte_start_hi: 23, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 23, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 185, byte_start_hi: 23, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 190, byte_start_hi: 23, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 195, byte_start_hi: 23, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 23, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 205, byte_start_hi: 23, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 23, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 215, byte_start_hi: 23, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 220, byte_start_hi: 23, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 225, byte_start_hi: 23, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 23, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 235, byte_start_hi: 23, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 23, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 245, byte_start_hi: 23, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 23, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 255, byte_start_hi: 23, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 24, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 24, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 24, byte_len: 3 }), + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 29, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 5, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 35, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 41, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 47, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 53, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 59, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 65, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 71, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 74, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 77, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 83, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 86, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 89, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 95, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 98, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 101, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 107, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 110, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 113, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 119, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 122, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 2, byte_len: 2 }), + Valid, + Disallowed, + Valid, + Mapped(StringTableSlice { byte_start_lo: 125, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 131, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 134, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 137, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 143, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 146, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 149, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 155, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 161, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 173, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 179, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 182, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 185, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 191, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 194, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 197, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 203, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 209, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 215, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 221, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 227, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 233, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 239, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 245, byte_start_hi: 24, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 251, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 24, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 25, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 25, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 25, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 5, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 25, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 25, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 25, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 25, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 25, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 25, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 25, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 31, byte_start_hi: 25, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 25, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 37, byte_start_hi: 25, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 25, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 25, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 205, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 5, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 5, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 25, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 5, byte_len: 2 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 25, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 25, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 57, byte_start_hi: 5, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 55, byte_start_hi: 25, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 25, byte_len: 3 }), + Valid, + Disallowed, + Valid, + Mapped(StringTableSlice { byte_start_lo: 159, byte_start_hi: 0, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 0, byte_len: 2 }), + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Mapped(StringTableSlice { byte_start_lo: 131, byte_start_hi: 24, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 61, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 193, byte_start_hi: 10, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 25, byte_len: 3 }), + Valid, + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 67, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 73, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 79, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 82, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 85, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 91, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 94, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 97, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 103, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 106, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 109, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 115, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 121, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 127, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 130, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 133, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 139, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 142, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 145, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 151, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 154, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 157, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 163, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 169, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 175, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 178, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 181, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 187, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 190, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 193, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 199, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 202, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 205, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 208, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 211, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 214, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 217, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 220, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 223, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 226, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 229, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 232, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 235, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 241, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 247, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 253, byte_start_hi: 25, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 27, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 33, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 39, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 45, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 26, byte_len: 3 }), + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 51, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 93, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 57, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 63, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 255, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 69, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 117, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 75, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 78, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 81, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 87, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 90, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 93, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 102, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 105, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 111, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 114, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 117, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 123, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 126, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 129, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 135, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 141, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 147, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 150, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 153, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 159, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 162, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 177, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 183, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 186, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 189, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 247, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 195, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 201, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 207, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 213, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 219, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 225, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 231, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 237, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 249, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 255, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 29, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 35, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 41, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 47, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 53, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 59, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 65, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 71, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 74, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 77, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 83, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 86, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 89, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 95, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 98, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 101, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 107, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 110, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 113, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 119, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 122, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 125, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 131, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 134, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 137, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 143, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 146, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 149, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 155, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 161, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 173, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 179, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 185, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 182, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 185, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 191, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 194, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 197, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 203, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 209, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 215, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 221, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 227, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 233, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 239, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 245, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 107, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 251, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 59, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 31, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 37, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 49, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 55, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 61, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 67, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 73, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 79, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 82, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 85, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 91, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 94, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 97, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 103, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 106, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 109, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 115, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 121, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 127, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 130, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 133, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 139, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 142, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 145, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 65, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 151, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 154, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 157, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 163, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 169, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 175, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 178, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 181, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 187, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 114, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 190, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 193, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 199, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 202, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 205, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 208, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 211, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 214, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 217, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 220, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 223, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 226, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 223, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 229, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 232, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 235, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 241, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 247, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 253, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 57, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 27, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 29, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 33, byte_start_hi: 29, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 29, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 39, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 45, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 51, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 57, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 63, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 12, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 29, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 69, byte_start_hi: 29, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 75, byte_start_hi: 29, byte_len: 3 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 78, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 81, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 87, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 90, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 93, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 102, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 105, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 111, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 114, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 117, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 123, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 126, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 129, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 135, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 141, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 147, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 150, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 153, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 159, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 162, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 177, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 183, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 186, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 189, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 195, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 201, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 215, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 207, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 213, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 219, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 225, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 231, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 237, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 249, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 255, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 30, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 30, byte_len: 3 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 27, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 114, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 33, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 39, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 33, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 45, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 51, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 57, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 63, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 69, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 75, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 78, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 141, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 81, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 87, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 90, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 93, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 162, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 106, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 110, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 103, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 102, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 105, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 162, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 111, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 114, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 117, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 123, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 126, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 129, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 45, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 135, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 141, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 147, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 150, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 201, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 153, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 215, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 159, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 213, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 162, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 177, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 69, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 231, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 53, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 183, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 186, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 189, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 195, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 201, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 249, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 207, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 255, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 213, byte_start_hi: 30, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 217, byte_start_hi: 30, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 221, byte_start_hi: 30, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 225, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 231, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 30, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 30, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 30, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 249, byte_start_hi: 30, byte_len: 3 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 30, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 30, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 31, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 31, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 31, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 31, byte_len: 2 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 31, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 31, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 31, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 31, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 31, byte_len: 4 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 31, byte_len: 4 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 31, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 31, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 207, byte_start_hi: 8, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 213, byte_start_hi: 8, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 31, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 31, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 31, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 31, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 31, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 31, byte_len: 2 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 31, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 31, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 31, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 31, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 31, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 31, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 31, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 31, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 31, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 31, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 31, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 31, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 31, byte_len: 4 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 31, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 31, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 31, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 31, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 31, byte_len: 4 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 31, byte_len: 4 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 31, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 31, byte_len: 4 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 31, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 31, byte_len: 4 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 31, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 31, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 31, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 31, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 31, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 31, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 31, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 31, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 31, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 31, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 31, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 190, byte_start_hi: 31, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 31, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 194, byte_start_hi: 31, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 31, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 31, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 31, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 202, byte_start_hi: 31, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 31, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 31, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 208, byte_start_hi: 31, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 31, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 31, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 214, byte_start_hi: 31, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 31, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 31, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 220, byte_start_hi: 31, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 31, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 31, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 226, byte_start_hi: 31, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 31, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 31, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 232, byte_start_hi: 31, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 31, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 31, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 31, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 31, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 31, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 31, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 31, byte_len: 2 }), + Valid, + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 31, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 31, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 31, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 31, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 3, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 32, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 32, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 32, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 32, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 32, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 32, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 208, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 220, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 232, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 33, byte_len: 4 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 33, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 33, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 33, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 175, byte_start_hi: 33, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 33, byte_len: 5 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 185, byte_start_hi: 33, byte_len: 5 }), + Mapped(StringTableSlice { byte_start_lo: 190, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 194, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 202, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 214, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 226, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 208, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 220, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 232, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 33, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 34, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 34, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 74, byte_start_hi: 34, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 34, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 32, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 34, byte_len: 4 }), + Valid, + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 34, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 34, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 34, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 34, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 34, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 34, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 34, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 34, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 34, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 34, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 34, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 35, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 35, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 35, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 35, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 35, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 35, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 35, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 35, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 35, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 35, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 35, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 35, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 74, byte_start_hi: 35, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 35, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 86, byte_start_hi: 35, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 35, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 98, byte_start_hi: 35, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 35, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 110, byte_start_hi: 35, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 35, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 122, byte_start_hi: 35, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 35, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 134, byte_start_hi: 35, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 35, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 146, byte_start_hi: 35, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 35, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 35, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 35, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 35, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 35, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 182, byte_start_hi: 35, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 35, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 194, byte_start_hi: 35, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 35, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 35, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 35, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 35, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 35, byte_len: 6 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 35, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 35, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 35, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 35, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 35, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 36, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 36, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 36, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 36, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 36, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 36, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 36, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 36, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 36, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 36, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 36, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 36, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 36, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 82, byte_start_hi: 36, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 36, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 94, byte_start_hi: 36, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 36, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 106, byte_start_hi: 36, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 36, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 36, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 36, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 130, byte_start_hi: 36, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 36, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 142, byte_start_hi: 36, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 36, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 154, byte_start_hi: 36, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 36, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 35, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 35, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 36, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 36, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 178, byte_start_hi: 36, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 36, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 190, byte_start_hi: 36, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 36, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 190, byte_start_hi: 36, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 178, byte_start_hi: 36, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 202, byte_start_hi: 36, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 208, byte_start_hi: 36, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 214, byte_start_hi: 36, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 220, byte_start_hi: 36, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 226, byte_start_hi: 36, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 36, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 98, byte_start_hi: 35, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 35, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 232, byte_start_hi: 36, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 36, byte_len: 6 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 36, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 36, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 37, byte_len: 8 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 37, byte_len: 8 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 37, byte_len: 8 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 37, byte_len: 8 }), + Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 37, byte_len: 8 }), + Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 37, byte_len: 8 }), + Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 37, byte_len: 8 }), + Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 37, byte_len: 6 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 37, byte_len: 33 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 95, byte_start_hi: 37, byte_len: 15 }), + Mapped(StringTableSlice { byte_start_lo: 110, byte_start_hi: 37, byte_len: 8 }), + Valid, + Disallowed, + Ignored, + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 37, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 119, byte_start_hi: 37, byte_len: 3 }), + Disallowed, + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 122, byte_start_hi: 37, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 2, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 123, byte_start_hi: 37, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 37, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 125, byte_start_hi: 37, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 37, byte_len: 3 }), + Disallowed, + Valid, + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 131, byte_start_hi: 37, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 134, byte_start_hi: 37, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 137, byte_start_hi: 37, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 8, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 177, byte_start_hi: 8, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 37, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 139, byte_start_hi: 37, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 37, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 143, byte_start_hi: 37, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 146, byte_start_hi: 37, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 149, byte_start_hi: 37, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 37, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 155, byte_start_hi: 37, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 103, byte_start_hi: 9, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 106, byte_start_hi: 9, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 37, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 161, byte_start_hi: 37, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 37, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 37, byte_len: 3 }), + Valid, + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 37, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 37, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 143, byte_start_hi: 8, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 137, byte_start_hi: 37, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 37, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 119, byte_start_hi: 37, byte_len: 3 }), + Disallowed, + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 2, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 122, byte_start_hi: 37, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 37, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 123, byte_start_hi: 37, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 131, byte_start_hi: 37, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 8, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 177, byte_start_hi: 8, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 37, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 139, byte_start_hi: 37, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 37, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 143, byte_start_hi: 37, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 37, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 173, byte_start_hi: 37, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 37, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 175, byte_start_hi: 37, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 37, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 177, byte_start_hi: 37, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 175, byte_start_hi: 8, byte_len: 1 }), + Disallowed, + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 178, byte_start_hi: 37, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 179, byte_start_hi: 37, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 37, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 181, byte_start_hi: 37, byte_len: 1 }), + Disallowed, + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 182, byte_start_hi: 37, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 185, byte_start_hi: 37, byte_len: 4 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 189, byte_start_hi: 37, byte_len: 3 }), + Valid, + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 37, byte_len: 3 }), + Disallowed, + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 195, byte_start_hi: 37, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 37, byte_len: 4 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 202, byte_start_hi: 37, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 205, byte_start_hi: 37, byte_len: 4 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 209, byte_start_hi: 37, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 37, byte_len: 4 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 37, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 219, byte_start_hi: 37, byte_len: 4 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 223, byte_start_hi: 37, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 226, byte_start_hi: 37, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 37, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 232, byte_start_hi: 37, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 37, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 37, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 37, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 37, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 37, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 37, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 37, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 37, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 37, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 37, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 37, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 32, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 38, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 38, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 38, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 38, byte_len: 4 }), + Disallowed, + Ignored, + Disallowed, + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 123, byte_start_hi: 37, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 38, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 37, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 179, byte_start_hi: 37, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 37, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 173, byte_start_hi: 37, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 61, byte_start_hi: 38, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 8, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 177, byte_start_hi: 8, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 37, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 8, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 37, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 175, byte_start_hi: 37, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 14, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 38, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 33, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 169, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 8, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 122, byte_start_hi: 37, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 2, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 37, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 175, byte_start_hi: 8, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 177, byte_start_hi: 37, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 37, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 181, byte_start_hi: 37, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 37, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 178, byte_start_hi: 37, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 37, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 63, byte_start_hi: 38, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 137, byte_start_hi: 37, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 77, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 37, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 38, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 139, byte_start_hi: 37, byte_len: 1 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 65, byte_start_hi: 38, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 38, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 69, byte_start_hi: 38, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 14, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 37, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 161, byte_start_hi: 37, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 119, byte_start_hi: 37, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 38, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 85, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 75, byte_start_hi: 38, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 78, byte_start_hi: 38, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 81, byte_start_hi: 38, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 38, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 87, byte_start_hi: 38, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 90, byte_start_hi: 38, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 93, byte_start_hi: 38, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 38, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 38, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 102, byte_start_hi: 38, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 203, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 209, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 215, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 221, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 227, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 233, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 239, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 245, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 251, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 31, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 37, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 49, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 55, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 61, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 67, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 73, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 18, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 105, byte_start_hi: 38, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 38, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 111, byte_start_hi: 38, byte_len: 3 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 35, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 41, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 47, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 53, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 59, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 65, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 71, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 74, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 77, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 83, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 86, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 89, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 95, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 98, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 101, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 107, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 110, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 113, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 119, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 122, byte_start_hi: 14, byte_len: 3 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 125, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 131, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 134, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 137, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 14, byte_len: 3 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 143, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 146, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 149, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 155, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 14, byte_len: 3 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 161, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 173, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 14, byte_len: 3 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 179, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 182, byte_start_hi: 14, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 185, byte_start_hi: 14, byte_len: 3 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 114, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 38, byte_len: 2 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 0, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 122, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 38, byte_len: 3 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 127, byte_start_hi: 38, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 130, byte_start_hi: 38, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 133, byte_start_hi: 38, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 38, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 139, byte_start_hi: 38, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 142, byte_start_hi: 38, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 145, byte_start_hi: 38, byte_len: 3 }), + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 38, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 38, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 38, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 38, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 38, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 38, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 38, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 38, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 38, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 38, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 38, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 38, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 38, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 38, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 38, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 208, byte_start_hi: 38, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 38, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 38, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 220, byte_start_hi: 38, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 38, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 38, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 232, byte_start_hi: 38, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 38, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 38, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 38, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 38, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 38, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 39, byte_len: 4 }), + Valid, + Disallowed, + Valid, + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 39, byte_len: 4 }), + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 208, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 220, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 232, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 39, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 40, byte_len: 4 }), + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 208, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 220, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 232, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 40, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 41, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 41, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 41, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 41, byte_len: 4 }), + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Ignored, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 41, byte_len: 8 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 41, byte_len: 8 }), + Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 41, byte_len: 12 }), + Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 41, byte_len: 12 }), + Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 41, byte_len: 12 }), + Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 41, byte_len: 12 }), + Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 41, byte_len: 12 }), + Valid, + Disallowed, + Valid, + Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 41, byte_len: 8 }), + Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 41, byte_len: 8 }), + Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 41, byte_len: 12 }), + Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 41, byte_len: 12 }), + Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 41, byte_len: 12 }), + Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 41, byte_len: 12 }), + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 41, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 41, byte_len: 2 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 0, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 41, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 0, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 163, byte_start_hi: 41, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 0, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 41, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 0, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 163, byte_start_hi: 41, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 0, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 41, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 0, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 163, byte_start_hi: 41, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 0, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 41, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 0, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 163, byte_start_hi: 41, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 0, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 41, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 1, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 0, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 163, byte_start_hi: 41, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 78, byte_start_hi: 2, byte_len: 2 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 33, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 169, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 33, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 169, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 33, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 169, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 33, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 169, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 33, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 169, byte_start_hi: 8, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 8, byte_len: 1 }), + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 41, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 41, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 41, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 178, byte_start_hi: 41, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 182, byte_start_hi: 41, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 186, byte_start_hi: 41, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 190, byte_start_hi: 41, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 194, byte_start_hi: 41, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 41, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 202, byte_start_hi: 41, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 41, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 41, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 214, byte_start_hi: 41, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 41, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 41, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 226, byte_start_hi: 41, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 41, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 41, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 41, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 41, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 41, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 41, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 41, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 42, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 42, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 42, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 42, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 42, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 42, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 42, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 42, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 42, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 42, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 42, byte_len: 4 }), + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 37, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 37, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 37, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 38, byte_len: 2 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 37, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 37, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 37, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 42, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 31, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 42, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 42, byte_len: 2 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 37, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 37, byte_len: 2 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 38, byte_len: 2 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 37, byte_len: 2 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 38, byte_len: 2 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 37, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 37, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 38, byte_len: 2 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 38, byte_len: 2 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 38, byte_len: 2 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 37, byte_len: 2 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 37, byte_len: 2 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 38, byte_len: 2 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 38, byte_len: 2 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 38, byte_len: 2 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 38, byte_len: 2 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 38, byte_len: 2 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 38, byte_len: 2 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 38, byte_len: 2 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 38, byte_len: 2 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 31, byte_len: 2 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 42, byte_len: 2 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 37, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 37, byte_len: 2 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 38, byte_len: 2 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 37, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 38, byte_len: 2 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 38, byte_len: 2 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 37, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 37, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 38, byte_len: 2 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 42, byte_len: 2 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 42, byte_len: 2 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 37, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 37, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 37, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 37, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 38, byte_len: 2 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 37, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 37, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 38, byte_len: 2 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 37, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 37, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 38, byte_len: 2 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 37, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 38, byte_len: 2 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 37, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 37, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 38, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 38, byte_len: 2 }), + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 42, byte_len: 2 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 42, byte_len: 2 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 42, byte_len: 2 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 42, byte_len: 2 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 42, byte_len: 2 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 42, byte_len: 2 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 42, byte_len: 2 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 42, byte_len: 2 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 42, byte_len: 2 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 42, byte_len: 2 }), + Valid, + Disallowed, + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 202, byte_start_hi: 9, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 205, byte_start_hi: 9, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 208, byte_start_hi: 9, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 211, byte_start_hi: 9, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 214, byte_start_hi: 9, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 217, byte_start_hi: 9, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 220, byte_start_hi: 9, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 223, byte_start_hi: 9, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 226, byte_start_hi: 9, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 229, byte_start_hi: 9, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 232, byte_start_hi: 9, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 235, byte_start_hi: 9, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 9, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 241, byte_start_hi: 9, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 9, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 247, byte_start_hi: 9, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 9, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 253, byte_start_hi: 9, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 10, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 10, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 10, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 10, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 10, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 10, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 10, byte_len: 3 }), + DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 10, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 42, byte_len: 7 }), + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 23, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 79, byte_start_hi: 42, byte_len: 2 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }), + Mapped(StringTableSlice { byte_start_lo: 81, byte_start_hi: 42, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 39, byte_start_hi: 23, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 83, byte_start_hi: 42, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 119, byte_start_hi: 0, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 85, byte_start_hi: 42, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 42, byte_len: 2 }), + Valid, + Mapped(StringTableSlice { byte_start_lo: 90, byte_start_hi: 42, byte_len: 2 }), + Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 42, byte_len: 2 }), + Disallowed, + Valid, + Mapped(StringTableSlice { byte_start_lo: 94, byte_start_hi: 42, byte_len: 2 }), + Valid, + Disallowed, + Valid, + Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 42, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 102, byte_start_hi: 42, byte_len: 6 }), + Mapped(StringTableSlice { byte_start_lo: 233, byte_start_hi: 17, byte_len: 3 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 42, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 111, byte_start_hi: 42, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 114, byte_start_hi: 42, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 149, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 117, byte_start_hi: 42, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 42, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 82, byte_start_hi: 15, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 123, byte_start_hi: 42, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 126, byte_start_hi: 42, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 129, byte_start_hi: 42, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 73, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 42, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 135, byte_start_hi: 42, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 42, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 141, byte_start_hi: 42, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 42, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 147, byte_start_hi: 42, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 150, byte_start_hi: 42, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 153, byte_start_hi: 42, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 42, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 159, byte_start_hi: 42, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 162, byte_start_hi: 42, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 42, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 131, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 15, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 42, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 82, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 67, byte_start_hi: 15, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 85, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 42, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 42, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 177, byte_start_hi: 42, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 42, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 183, byte_start_hi: 42, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 186, byte_start_hi: 42, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 31, byte_start_hi: 17, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 94, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 189, byte_start_hi: 42, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 42, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 195, byte_start_hi: 42, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 42, byte_len: 3 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 201, byte_start_hi: 42, byte_len: 9 }), + Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 42, byte_len: 9 }), + Mapped(StringTableSlice { byte_start_lo: 219, byte_start_hi: 42, byte_len: 9 }), + Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 42, byte_len: 9 }), + Mapped(StringTableSlice { byte_start_lo: 237, byte_start_hi: 42, byte_len: 9 }), + Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 42, byte_len: 9 }), + Mapped(StringTableSlice { byte_start_lo: 255, byte_start_hi: 42, byte_len: 9 }), + Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 43, byte_len: 9 }), + Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 43, byte_len: 9 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 29, byte_start_hi: 43, byte_len: 3 }), + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Valid, + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 35, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 41, byte_start_hi: 43, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 45, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 51, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 57, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 63, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 43, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 102, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 73, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 79, byte_start_hi: 43, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 83, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 86, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 42, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 89, byte_start_hi: 43, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 93, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 102, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 105, byte_start_hi: 43, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 179, byte_start_hi: 11, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 109, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 115, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 42, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 121, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 27, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 105, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 127, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 130, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 71, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 133, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 111, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 139, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 142, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 145, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 43, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 155, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 161, byte_start_hi: 43, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 177, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 183, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 186, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 189, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 195, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 201, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 207, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 213, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 117, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 219, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 253, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 225, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 231, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 237, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 249, byte_start_hi: 43, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 253, byte_start_hi: 43, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 117, byte_start_hi: 42, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 44, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 44, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 29, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 35, byte_start_hi: 44, byte_len: 3 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 41, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 44, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 51, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 59, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 57, byte_start_hi: 44, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 61, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 44, byte_len: 3 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 67, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 73, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 79, byte_start_hi: 44, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 83, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 86, byte_start_hi: 44, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 90, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 93, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 102, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 105, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 111, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 114, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 117, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 123, byte_start_hi: 44, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 127, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 130, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 133, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 159, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 139, byte_start_hi: 44, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 37, byte_start_hi: 12, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 143, byte_start_hi: 44, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 147, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 150, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 153, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 44, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 44, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 173, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 179, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 182, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 185, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 191, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 194, byte_start_hi: 44, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 201, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 207, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 213, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 219, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 141, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 78, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 225, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 231, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 237, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 44, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 247, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 253, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 45, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 31, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 37, byte_start_hi: 45, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 41, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 47, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 150, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 53, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 59, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 65, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 71, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 74, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 162, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 77, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 83, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 86, byte_start_hi: 45, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 90, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 93, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 153, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 102, byte_start_hi: 45, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 106, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 109, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 225, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 115, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 121, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 127, byte_start_hi: 45, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 131, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 134, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 137, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 45, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 147, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 150, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 153, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 110, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 159, byte_start_hi: 45, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 163, byte_start_hi: 45, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 45, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 45, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 178, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 181, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 187, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 190, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 103, byte_start_hi: 28, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 193, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 199, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 202, byte_start_hi: 45, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 209, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 215, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 105, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 221, byte_start_hi: 45, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 225, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 231, byte_start_hi: 45, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 235, byte_start_hi: 45, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 239, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 245, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 251, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 45, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 46, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 46, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 46, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 46, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 46, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 46, byte_len: 3 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 46, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 114, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 46, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 27, byte_start_hi: 46, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 31, byte_start_hi: 46, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 46, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 37, byte_start_hi: 46, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 41, byte_start_hi: 46, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 45, byte_start_hi: 46, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 46, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 51, byte_start_hi: 46, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 46, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 57, byte_start_hi: 46, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 46, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 63, byte_start_hi: 46, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 46, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 69, byte_start_hi: 46, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 46, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 75, byte_start_hi: 46, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 78, byte_start_hi: 46, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 82, byte_start_hi: 46, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 85, byte_start_hi: 46, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 89, byte_start_hi: 46, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 93, byte_start_hi: 46, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 46, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 46, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 46, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 46, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 111, byte_start_hi: 46, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 114, byte_start_hi: 46, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 46, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 122, byte_start_hi: 46, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 126, byte_start_hi: 46, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 130, byte_start_hi: 46, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 133, byte_start_hi: 46, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 141, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 231, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 46, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 139, byte_start_hi: 46, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 142, byte_start_hi: 46, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 145, byte_start_hi: 46, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 149, byte_start_hi: 46, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 213, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 147, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 46, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 155, byte_start_hi: 46, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 186, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 159, byte_start_hi: 46, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 163, byte_start_hi: 46, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 57, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 46, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 46, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 195, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 173, byte_start_hi: 46, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 46, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 179, byte_start_hi: 46, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 183, byte_start_hi: 46, byte_len: 4 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 187, byte_start_hi: 46, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 190, byte_start_hi: 46, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 194, byte_start_hi: 46, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 197, byte_start_hi: 46, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 46, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 203, byte_start_hi: 46, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 207, byte_start_hi: 46, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 46, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 213, byte_start_hi: 46, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 46, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 219, byte_start_hi: 46, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 46, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 226, byte_start_hi: 46, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 229, byte_start_hi: 46, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 232, byte_start_hi: 46, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 235, byte_start_hi: 46, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 46, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 241, byte_start_hi: 46, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 46, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 46, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 46, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 255, byte_start_hi: 46, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 47, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 213, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 47, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 47, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 27, byte_start_hi: 47, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 31, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 47, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 41, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 47, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 53, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 47, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 47, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 47, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 47, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 147, byte_start_hi: 44, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 75, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 78, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 81, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 87, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 90, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 93, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 102, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 105, byte_start_hi: 47, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 119, byte_start_hi: 27, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 109, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 115, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 121, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 29, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 127, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 130, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 133, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 139, byte_start_hi: 47, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 143, byte_start_hi: 47, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 147, byte_start_hi: 47, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 151, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 154, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 157, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 163, byte_start_hi: 47, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 47, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 177, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 47, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 47, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 191, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 26, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 194, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 197, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 203, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 209, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 215, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 47, byte_len: 3 }), + Disallowed, + Mapped(StringTableSlice { byte_start_lo: 221, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 227, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 51, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 233, byte_start_hi: 47, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 237, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 249, byte_start_hi: 47, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 47, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 48, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 48, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 48, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 48, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 183, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 186, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 48, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 48, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 48, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 48, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 48, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 29, byte_start_hi: 48, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 33, byte_start_hi: 48, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 37, byte_start_hi: 48, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 48, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 48, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 48, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 48, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 189, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 53, byte_start_hi: 48, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 57, byte_start_hi: 48, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 61, byte_start_hi: 48, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 48, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 67, byte_start_hi: 48, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 48, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 74, byte_start_hi: 48, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 77, byte_start_hi: 48, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 48, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 83, byte_start_hi: 48, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 86, byte_start_hi: 48, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 89, byte_start_hi: 48, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 48, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 95, byte_start_hi: 48, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 48, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 102, byte_start_hi: 48, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 105, byte_start_hi: 48, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 48, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 48, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 115, byte_start_hi: 48, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 48, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 121, byte_start_hi: 48, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 48, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 48, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 48, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 135, byte_start_hi: 48, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 48, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 141, byte_start_hi: 48, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 145, byte_start_hi: 48, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 48, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 207, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 48, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 155, byte_start_hi: 48, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 159, byte_start_hi: 48, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 162, byte_start_hi: 48, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 48, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 48, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 48, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 48, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 177, byte_start_hi: 48, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 48, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 30, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 48, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 187, byte_start_hi: 48, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 190, byte_start_hi: 48, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 193, byte_start_hi: 48, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 48, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 199, byte_start_hi: 48, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 203, byte_start_hi: 48, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 48, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 48, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 214, byte_start_hi: 48, byte_len: 4 }), + Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 48, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 221, byte_start_hi: 48, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 48, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 227, byte_start_hi: 48, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 48, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 13, byte_len: 3 }), + Mapped(StringTableSlice { byte_start_lo: 233, byte_start_hi: 48, byte_len: 4 }), + Disallowed, + Ignored, + Disallowed, +]; + +static STRING_TABLE: &'static str = "\u{61}\ + \u{62}\ + \u{63}\ + \u{64}\ + \u{65}\ + \u{66}\ + \u{67}\ + \u{68}\ + \u{69}\ + \u{6a}\ + \u{6b}\ + \u{6c}\ + \u{6d}\ + \u{6e}\ + \u{6f}\ + \u{70}\ + \u{71}\ + \u{72}\ + \u{73}\ + \u{74}\ + \u{75}\ + \u{76}\ + \u{77}\ + \u{78}\ + \u{79}\ + \u{7a}\ + \u{20}\ + \u{20}\ + \u{308}\ + \u{20}\ + \u{304}\ + \u{32}\ + \u{33}\ + \u{20}\ + \u{301}\ + \u{3bc}\ + \u{20}\ + \u{327}\ + \u{31}\ + \u{31}\ + \u{2044}\ + \u{34}\ + \u{31}\ + \u{2044}\ + \u{32}\ + \u{33}\ + \u{2044}\ + \u{34}\ + \u{e0}\ + \u{e1}\ + \u{e2}\ + \u{e3}\ + \u{e4}\ + \u{e5}\ + \u{e6}\ + \u{e7}\ + \u{e8}\ + \u{e9}\ + \u{ea}\ + \u{eb}\ + \u{ec}\ + \u{ed}\ + \u{ee}\ + \u{ef}\ + \u{f0}\ + \u{f1}\ + \u{f2}\ + \u{f3}\ + \u{f4}\ + \u{f5}\ + \u{f6}\ + \u{f8}\ + \u{f9}\ + \u{fa}\ + \u{fb}\ + \u{fc}\ + \u{fd}\ + \u{fe}\ + \u{73}\ + \u{73}\ + \u{101}\ + \u{103}\ + \u{105}\ + \u{107}\ + \u{109}\ + \u{10b}\ + \u{10d}\ + \u{10f}\ + \u{111}\ + \u{113}\ + \u{115}\ + \u{117}\ + \u{119}\ + \u{11b}\ + \u{11d}\ + \u{11f}\ + \u{121}\ + \u{123}\ + \u{125}\ + \u{127}\ + \u{129}\ + \u{12b}\ + \u{12d}\ + \u{12f}\ + \u{69}\ + \u{307}\ + \u{69}\ + \u{6a}\ + \u{135}\ + \u{137}\ + \u{13a}\ + \u{13c}\ + \u{13e}\ + \u{6c}\ + \u{b7}\ + \u{142}\ + \u{144}\ + \u{146}\ + \u{148}\ + \u{2bc}\ + \u{6e}\ + \u{14b}\ + \u{14d}\ + \u{14f}\ + \u{151}\ + \u{153}\ + \u{155}\ + \u{157}\ + \u{159}\ + \u{15b}\ + \u{15d}\ + \u{15f}\ + \u{161}\ + \u{163}\ + \u{165}\ + \u{167}\ + \u{169}\ + \u{16b}\ + \u{16d}\ + \u{16f}\ + \u{171}\ + \u{173}\ + \u{175}\ + \u{177}\ + \u{ff}\ + \u{17a}\ + \u{17c}\ + \u{17e}\ + \u{253}\ + \u{183}\ + \u{185}\ + \u{254}\ + \u{188}\ + \u{256}\ + \u{257}\ + \u{18c}\ + \u{1dd}\ + \u{259}\ + \u{25b}\ + \u{192}\ + \u{260}\ + \u{263}\ + \u{269}\ + \u{268}\ + \u{199}\ + \u{26f}\ + \u{272}\ + \u{275}\ + \u{1a1}\ + \u{1a3}\ + \u{1a5}\ + \u{280}\ + \u{1a8}\ + \u{283}\ + \u{1ad}\ + \u{288}\ + \u{1b0}\ + \u{28a}\ + \u{28b}\ + \u{1b4}\ + \u{1b6}\ + \u{292}\ + \u{1b9}\ + \u{1bd}\ + \u{64}\ + \u{17e}\ + \u{6c}\ + \u{6a}\ + \u{6e}\ + \u{6a}\ + \u{1ce}\ + \u{1d0}\ + \u{1d2}\ + \u{1d4}\ + \u{1d6}\ + \u{1d8}\ + \u{1da}\ + \u{1dc}\ + \u{1df}\ + \u{1e1}\ + \u{1e3}\ + \u{1e5}\ + \u{1e7}\ + \u{1e9}\ + \u{1eb}\ + \u{1ed}\ + \u{1ef}\ + \u{64}\ + \u{7a}\ + \u{1f5}\ + \u{195}\ + \u{1bf}\ + \u{1f9}\ + \u{1fb}\ + \u{1fd}\ + \u{1ff}\ + \u{201}\ + \u{203}\ + \u{205}\ + \u{207}\ + \u{209}\ + \u{20b}\ + \u{20d}\ + \u{20f}\ + \u{211}\ + \u{213}\ + \u{215}\ + \u{217}\ + \u{219}\ + \u{21b}\ + \u{21d}\ + \u{21f}\ + \u{19e}\ + \u{223}\ + \u{225}\ + \u{227}\ + \u{229}\ + \u{22b}\ + \u{22d}\ + \u{22f}\ + \u{231}\ + \u{233}\ + \u{2c65}\ + \u{23c}\ + \u{19a}\ + \u{2c66}\ + \u{242}\ + \u{180}\ + \u{289}\ + \u{28c}\ + \u{247}\ + \u{249}\ + \u{24b}\ + \u{24d}\ + \u{24f}\ + \u{266}\ + \u{279}\ + \u{27b}\ + \u{281}\ + \u{20}\ + \u{306}\ + \u{20}\ + \u{307}\ + \u{20}\ + \u{30a}\ + \u{20}\ + \u{328}\ + \u{20}\ + \u{303}\ + \u{20}\ + \u{30b}\ + \u{295}\ + \u{300}\ + \u{301}\ + \u{313}\ + \u{308}\ + \u{301}\ + \u{3b9}\ + \u{371}\ + \u{373}\ + \u{2b9}\ + \u{377}\ + \u{20}\ + \u{3b9}\ + \u{3b}\ + \u{3f3}\ + \u{20}\ + \u{308}\ + \u{301}\ + \u{3ac}\ + \u{b7}\ + \u{3ad}\ + \u{3ae}\ + \u{3af}\ + \u{3cc}\ + \u{3cd}\ + \u{3ce}\ + \u{3b1}\ + \u{3b2}\ + \u{3b3}\ + \u{3b4}\ + \u{3b5}\ + \u{3b6}\ + \u{3b7}\ + \u{3b8}\ + \u{3ba}\ + \u{3bb}\ + \u{3bd}\ + \u{3be}\ + \u{3bf}\ + \u{3c0}\ + \u{3c1}\ + \u{3c3}\ + \u{3c4}\ + \u{3c5}\ + \u{3c6}\ + \u{3c7}\ + \u{3c8}\ + \u{3c9}\ + \u{3ca}\ + \u{3cb}\ + \u{3d7}\ + \u{3d9}\ + \u{3db}\ + \u{3dd}\ + \u{3df}\ + \u{3e1}\ + \u{3e3}\ + \u{3e5}\ + \u{3e7}\ + \u{3e9}\ + \u{3eb}\ + \u{3ed}\ + \u{3ef}\ + \u{3f8}\ + \u{3fb}\ + \u{37b}\ + \u{37c}\ + \u{37d}\ + \u{450}\ + \u{451}\ + \u{452}\ + \u{453}\ + \u{454}\ + \u{455}\ + \u{456}\ + \u{457}\ + \u{458}\ + \u{459}\ + \u{45a}\ + \u{45b}\ + \u{45c}\ + \u{45d}\ + \u{45e}\ + \u{45f}\ + \u{430}\ + \u{431}\ + \u{432}\ + \u{433}\ + \u{434}\ + \u{435}\ + \u{436}\ + \u{437}\ + \u{438}\ + \u{439}\ + \u{43a}\ + \u{43b}\ + \u{43c}\ + \u{43d}\ + \u{43e}\ + \u{43f}\ + \u{440}\ + \u{441}\ + \u{442}\ + \u{443}\ + \u{444}\ + \u{445}\ + \u{446}\ + \u{447}\ + \u{448}\ + \u{449}\ + \u{44a}\ + \u{44b}\ + \u{44c}\ + \u{44d}\ + \u{44e}\ + \u{44f}\ + \u{461}\ + \u{463}\ + \u{465}\ + \u{467}\ + \u{469}\ + \u{46b}\ + \u{46d}\ + \u{46f}\ + \u{471}\ + \u{473}\ + \u{475}\ + \u{477}\ + \u{479}\ + \u{47b}\ + \u{47d}\ + \u{47f}\ + \u{481}\ + \u{48b}\ + \u{48d}\ + \u{48f}\ + \u{491}\ + \u{493}\ + \u{495}\ + \u{497}\ + \u{499}\ + \u{49b}\ + \u{49d}\ + \u{49f}\ + \u{4a1}\ + \u{4a3}\ + \u{4a5}\ + \u{4a7}\ + \u{4a9}\ + \u{4ab}\ + \u{4ad}\ + \u{4af}\ + \u{4b1}\ + \u{4b3}\ + \u{4b5}\ + \u{4b7}\ + \u{4b9}\ + \u{4bb}\ + \u{4bd}\ + \u{4bf}\ + \u{4c2}\ + \u{4c4}\ + \u{4c6}\ + \u{4c8}\ + \u{4ca}\ + \u{4cc}\ + \u{4ce}\ + \u{4d1}\ + \u{4d3}\ + \u{4d5}\ + \u{4d7}\ + \u{4d9}\ + \u{4db}\ + \u{4dd}\ + \u{4df}\ + \u{4e1}\ + \u{4e3}\ + \u{4e5}\ + \u{4e7}\ + \u{4e9}\ + \u{4eb}\ + \u{4ed}\ + \u{4ef}\ + \u{4f1}\ + \u{4f3}\ + \u{4f5}\ + \u{4f7}\ + \u{4f9}\ + \u{4fb}\ + \u{4fd}\ + \u{4ff}\ + \u{501}\ + \u{503}\ + \u{505}\ + \u{507}\ + \u{509}\ + \u{50b}\ + \u{50d}\ + \u{50f}\ + \u{511}\ + \u{513}\ + \u{515}\ + \u{517}\ + \u{519}\ + \u{51b}\ + \u{51d}\ + \u{51f}\ + \u{521}\ + \u{523}\ + \u{525}\ + \u{527}\ + \u{529}\ + \u{52b}\ + \u{52d}\ + \u{52f}\ + \u{561}\ + \u{562}\ + \u{563}\ + \u{564}\ + \u{565}\ + \u{566}\ + \u{567}\ + \u{568}\ + \u{569}\ + \u{56a}\ + \u{56b}\ + \u{56c}\ + \u{56d}\ + \u{56e}\ + \u{56f}\ + \u{570}\ + \u{571}\ + \u{572}\ + \u{573}\ + \u{574}\ + \u{575}\ + \u{576}\ + \u{577}\ + \u{578}\ + \u{579}\ + \u{57a}\ + \u{57b}\ + \u{57c}\ + \u{57d}\ + \u{57e}\ + \u{57f}\ + \u{580}\ + \u{581}\ + \u{582}\ + \u{583}\ + \u{584}\ + \u{585}\ + \u{586}\ + \u{565}\ + \u{582}\ + \u{627}\ + \u{674}\ + \u{648}\ + \u{674}\ + \u{6c7}\ + \u{674}\ + \u{64a}\ + \u{674}\ + \u{915}\ + \u{93c}\ + \u{916}\ + \u{93c}\ + \u{917}\ + \u{93c}\ + \u{91c}\ + \u{93c}\ + \u{921}\ + \u{93c}\ + \u{922}\ + \u{93c}\ + \u{92b}\ + \u{93c}\ + \u{92f}\ + \u{93c}\ + \u{9a1}\ + \u{9bc}\ + \u{9a2}\ + \u{9bc}\ + \u{9af}\ + \u{9bc}\ + \u{a32}\ + \u{a3c}\ + \u{a38}\ + \u{a3c}\ + \u{a16}\ + \u{a3c}\ + \u{a17}\ + \u{a3c}\ + \u{a1c}\ + \u{a3c}\ + \u{a2b}\ + \u{a3c}\ + \u{b21}\ + \u{b3c}\ + \u{b22}\ + \u{b3c}\ + \u{e4d}\ + \u{e32}\ + \u{ecd}\ + \u{eb2}\ + \u{eab}\ + \u{e99}\ + \u{eab}\ + \u{ea1}\ + \u{f0b}\ + \u{f42}\ + \u{fb7}\ + \u{f4c}\ + \u{fb7}\ + \u{f51}\ + \u{fb7}\ + \u{f56}\ + \u{fb7}\ + \u{f5b}\ + \u{fb7}\ + \u{f40}\ + \u{fb5}\ + \u{f71}\ + \u{f72}\ + \u{f71}\ + \u{f74}\ + \u{fb2}\ + \u{f80}\ + \u{fb2}\ + \u{f71}\ + \u{f80}\ + \u{fb3}\ + \u{f80}\ + \u{fb3}\ + \u{f71}\ + \u{f80}\ + \u{f71}\ + \u{f80}\ + \u{f92}\ + \u{fb7}\ + \u{f9c}\ + \u{fb7}\ + \u{fa1}\ + \u{fb7}\ + \u{fa6}\ + \u{fb7}\ + \u{fab}\ + \u{fb7}\ + \u{f90}\ + \u{fb5}\ + \u{2d27}\ + \u{2d2d}\ + \u{10dc}\ + \u{13f0}\ + \u{13f1}\ + \u{13f2}\ + \u{13f3}\ + \u{13f4}\ + \u{13f5}\ + \u{a64b}\ + \u{250}\ + \u{251}\ + \u{1d02}\ + \u{25c}\ + \u{1d16}\ + \u{1d17}\ + \u{1d1d}\ + \u{1d25}\ + \u{252}\ + \u{255}\ + \u{25f}\ + \u{261}\ + \u{265}\ + \u{26a}\ + \u{1d7b}\ + \u{29d}\ + \u{26d}\ + \u{1d85}\ + \u{29f}\ + \u{271}\ + \u{270}\ + \u{273}\ + \u{274}\ + \u{278}\ + \u{282}\ + \u{1ab}\ + \u{1d1c}\ + \u{290}\ + \u{291}\ + \u{1e01}\ + \u{1e03}\ + \u{1e05}\ + \u{1e07}\ + \u{1e09}\ + \u{1e0b}\ + \u{1e0d}\ + \u{1e0f}\ + \u{1e11}\ + \u{1e13}\ + \u{1e15}\ + \u{1e17}\ + \u{1e19}\ + \u{1e1b}\ + \u{1e1d}\ + \u{1e1f}\ + \u{1e21}\ + \u{1e23}\ + \u{1e25}\ + \u{1e27}\ + \u{1e29}\ + \u{1e2b}\ + \u{1e2d}\ + \u{1e2f}\ + \u{1e31}\ + \u{1e33}\ + \u{1e35}\ + \u{1e37}\ + \u{1e39}\ + \u{1e3b}\ + \u{1e3d}\ + \u{1e3f}\ + \u{1e41}\ + \u{1e43}\ + \u{1e45}\ + \u{1e47}\ + \u{1e49}\ + \u{1e4b}\ + \u{1e4d}\ + \u{1e4f}\ + \u{1e51}\ + \u{1e53}\ + \u{1e55}\ + \u{1e57}\ + \u{1e59}\ + \u{1e5b}\ + \u{1e5d}\ + \u{1e5f}\ + \u{1e61}\ + \u{1e63}\ + \u{1e65}\ + \u{1e67}\ + \u{1e69}\ + \u{1e6b}\ + \u{1e6d}\ + \u{1e6f}\ + \u{1e71}\ + \u{1e73}\ + \u{1e75}\ + \u{1e77}\ + \u{1e79}\ + \u{1e7b}\ + \u{1e7d}\ + \u{1e7f}\ + \u{1e81}\ + \u{1e83}\ + \u{1e85}\ + \u{1e87}\ + \u{1e89}\ + \u{1e8b}\ + \u{1e8d}\ + \u{1e8f}\ + \u{1e91}\ + \u{1e93}\ + \u{1e95}\ + \u{61}\ + \u{2be}\ + \u{1ea1}\ + \u{1ea3}\ + \u{1ea5}\ + \u{1ea7}\ + \u{1ea9}\ + \u{1eab}\ + \u{1ead}\ + \u{1eaf}\ + \u{1eb1}\ + \u{1eb3}\ + \u{1eb5}\ + \u{1eb7}\ + \u{1eb9}\ + \u{1ebb}\ + \u{1ebd}\ + \u{1ebf}\ + \u{1ec1}\ + \u{1ec3}\ + \u{1ec5}\ + \u{1ec7}\ + \u{1ec9}\ + \u{1ecb}\ + \u{1ecd}\ + \u{1ecf}\ + \u{1ed1}\ + \u{1ed3}\ + \u{1ed5}\ + \u{1ed7}\ + \u{1ed9}\ + \u{1edb}\ + \u{1edd}\ + \u{1edf}\ + \u{1ee1}\ + \u{1ee3}\ + \u{1ee5}\ + \u{1ee7}\ + \u{1ee9}\ + \u{1eeb}\ + \u{1eed}\ + \u{1eef}\ + \u{1ef1}\ + \u{1ef3}\ + \u{1ef5}\ + \u{1ef7}\ + \u{1ef9}\ + \u{1efb}\ + \u{1efd}\ + \u{1eff}\ + \u{1f00}\ + \u{1f01}\ + \u{1f02}\ + \u{1f03}\ + \u{1f04}\ + \u{1f05}\ + \u{1f06}\ + \u{1f07}\ + \u{1f10}\ + \u{1f11}\ + \u{1f12}\ + \u{1f13}\ + \u{1f14}\ + \u{1f15}\ + \u{1f20}\ + \u{1f21}\ + \u{1f22}\ + \u{1f23}\ + \u{1f24}\ + \u{1f25}\ + \u{1f26}\ + \u{1f27}\ + \u{1f30}\ + \u{1f31}\ + \u{1f32}\ + \u{1f33}\ + \u{1f34}\ + \u{1f35}\ + \u{1f36}\ + \u{1f37}\ + \u{1f40}\ + \u{1f41}\ + \u{1f42}\ + \u{1f43}\ + \u{1f44}\ + \u{1f45}\ + \u{1f51}\ + \u{1f53}\ + \u{1f55}\ + \u{1f57}\ + \u{1f60}\ + \u{1f61}\ + \u{1f62}\ + \u{1f63}\ + \u{1f64}\ + \u{1f65}\ + \u{1f66}\ + \u{1f67}\ + \u{1f00}\ + \u{3b9}\ + \u{1f01}\ + \u{3b9}\ + \u{1f02}\ + \u{3b9}\ + \u{1f03}\ + \u{3b9}\ + \u{1f04}\ + \u{3b9}\ + \u{1f05}\ + \u{3b9}\ + \u{1f06}\ + \u{3b9}\ + \u{1f07}\ + \u{3b9}\ + \u{1f20}\ + \u{3b9}\ + \u{1f21}\ + \u{3b9}\ + \u{1f22}\ + \u{3b9}\ + \u{1f23}\ + \u{3b9}\ + \u{1f24}\ + \u{3b9}\ + \u{1f25}\ + \u{3b9}\ + \u{1f26}\ + \u{3b9}\ + \u{1f27}\ + \u{3b9}\ + \u{1f60}\ + \u{3b9}\ + \u{1f61}\ + \u{3b9}\ + \u{1f62}\ + \u{3b9}\ + \u{1f63}\ + \u{3b9}\ + \u{1f64}\ + \u{3b9}\ + \u{1f65}\ + \u{3b9}\ + \u{1f66}\ + \u{3b9}\ + \u{1f67}\ + \u{3b9}\ + \u{1f70}\ + \u{3b9}\ + \u{3b1}\ + \u{3b9}\ + \u{3ac}\ + \u{3b9}\ + \u{1fb6}\ + \u{3b9}\ + \u{1fb0}\ + \u{1fb1}\ + \u{1f70}\ + \u{20}\ + \u{313}\ + \u{20}\ + \u{342}\ + \u{20}\ + \u{308}\ + \u{342}\ + \u{1f74}\ + \u{3b9}\ + \u{3b7}\ + \u{3b9}\ + \u{3ae}\ + \u{3b9}\ + \u{1fc6}\ + \u{3b9}\ + \u{1f72}\ + \u{1f74}\ + \u{20}\ + \u{313}\ + \u{300}\ + \u{20}\ + \u{313}\ + \u{301}\ + \u{20}\ + \u{313}\ + \u{342}\ + \u{390}\ + \u{1fd0}\ + \u{1fd1}\ + \u{1f76}\ + \u{20}\ + \u{314}\ + \u{300}\ + \u{20}\ + \u{314}\ + \u{301}\ + \u{20}\ + \u{314}\ + \u{342}\ + \u{3b0}\ + \u{1fe0}\ + \u{1fe1}\ + \u{1f7a}\ + \u{1fe5}\ + \u{20}\ + \u{308}\ + \u{300}\ + \u{60}\ + \u{1f7c}\ + \u{3b9}\ + \u{3c9}\ + \u{3b9}\ + \u{3ce}\ + \u{3b9}\ + \u{1ff6}\ + \u{3b9}\ + \u{1f78}\ + \u{1f7c}\ + \u{20}\ + \u{314}\ + \u{2010}\ + \u{20}\ + \u{333}\ + \u{2032}\ + \u{2032}\ + \u{2032}\ + \u{2032}\ + \u{2032}\ + \u{2035}\ + \u{2035}\ + \u{2035}\ + \u{2035}\ + \u{2035}\ + \u{21}\ + \u{21}\ + \u{20}\ + \u{305}\ + \u{3f}\ + \u{3f}\ + \u{3f}\ + \u{21}\ + \u{21}\ + \u{3f}\ + \u{2032}\ + \u{2032}\ + \u{2032}\ + \u{2032}\ + \u{30}\ + \u{34}\ + \u{35}\ + \u{36}\ + \u{37}\ + \u{38}\ + \u{39}\ + \u{2b}\ + \u{2212}\ + \u{3d}\ + \u{28}\ + \u{29}\ + \u{72}\ + \u{73}\ + \u{61}\ + \u{2f}\ + \u{63}\ + \u{61}\ + \u{2f}\ + \u{73}\ + \u{b0}\ + \u{63}\ + \u{63}\ + \u{2f}\ + \u{6f}\ + \u{63}\ + \u{2f}\ + \u{75}\ + \u{b0}\ + \u{66}\ + \u{6e}\ + \u{6f}\ + \u{73}\ + \u{6d}\ + \u{74}\ + \u{65}\ + \u{6c}\ + \u{74}\ + \u{6d}\ + \u{5d0}\ + \u{5d1}\ + \u{5d2}\ + \u{5d3}\ + \u{66}\ + \u{61}\ + \u{78}\ + \u{2211}\ + \u{31}\ + \u{2044}\ + \u{37}\ + \u{31}\ + \u{2044}\ + \u{39}\ + \u{31}\ + \u{2044}\ + \u{31}\ + \u{30}\ + \u{31}\ + \u{2044}\ + \u{33}\ + \u{32}\ + \u{2044}\ + \u{33}\ + \u{31}\ + \u{2044}\ + \u{35}\ + \u{32}\ + \u{2044}\ + \u{35}\ + \u{33}\ + \u{2044}\ + \u{35}\ + \u{34}\ + \u{2044}\ + \u{35}\ + \u{31}\ + \u{2044}\ + \u{36}\ + \u{35}\ + \u{2044}\ + \u{36}\ + \u{31}\ + \u{2044}\ + \u{38}\ + \u{33}\ + \u{2044}\ + \u{38}\ + \u{35}\ + \u{2044}\ + \u{38}\ + \u{37}\ + \u{2044}\ + \u{38}\ + \u{31}\ + \u{2044}\ + \u{69}\ + \u{69}\ + \u{69}\ + \u{69}\ + \u{69}\ + \u{69}\ + \u{76}\ + \u{76}\ + \u{69}\ + \u{76}\ + \u{69}\ + \u{69}\ + \u{76}\ + \u{69}\ + \u{69}\ + \u{69}\ + \u{69}\ + \u{78}\ + \u{78}\ + \u{69}\ + \u{78}\ + \u{69}\ + \u{69}\ + \u{30}\ + \u{2044}\ + \u{33}\ + \u{222b}\ + \u{222b}\ + \u{222b}\ + \u{222b}\ + \u{222b}\ + \u{222e}\ + \u{222e}\ + \u{222e}\ + \u{222e}\ + \u{222e}\ + \u{3008}\ + \u{3009}\ + \u{31}\ + \u{30}\ + \u{31}\ + \u{31}\ + \u{31}\ + \u{32}\ + \u{31}\ + \u{33}\ + \u{31}\ + \u{34}\ + \u{31}\ + \u{35}\ + \u{31}\ + \u{36}\ + \u{31}\ + \u{37}\ + \u{31}\ + \u{38}\ + \u{31}\ + \u{39}\ + \u{32}\ + \u{30}\ + \u{28}\ + \u{31}\ + \u{29}\ + \u{28}\ + \u{32}\ + \u{29}\ + \u{28}\ + \u{33}\ + \u{29}\ + \u{28}\ + \u{34}\ + \u{29}\ + \u{28}\ + \u{35}\ + \u{29}\ + \u{28}\ + \u{36}\ + \u{29}\ + \u{28}\ + \u{37}\ + \u{29}\ + \u{28}\ + \u{38}\ + \u{29}\ + \u{28}\ + \u{39}\ + \u{29}\ + \u{28}\ + \u{31}\ + \u{30}\ + \u{29}\ + \u{28}\ + \u{31}\ + \u{31}\ + \u{29}\ + \u{28}\ + \u{31}\ + \u{32}\ + \u{29}\ + \u{28}\ + \u{31}\ + \u{33}\ + \u{29}\ + \u{28}\ + \u{31}\ + \u{34}\ + \u{29}\ + \u{28}\ + \u{31}\ + \u{35}\ + \u{29}\ + \u{28}\ + \u{31}\ + \u{36}\ + \u{29}\ + \u{28}\ + \u{31}\ + \u{37}\ + \u{29}\ + \u{28}\ + \u{31}\ + \u{38}\ + \u{29}\ + \u{28}\ + \u{31}\ + \u{39}\ + \u{29}\ + \u{28}\ + \u{32}\ + \u{30}\ + \u{29}\ + \u{28}\ + \u{61}\ + \u{29}\ + \u{28}\ + \u{62}\ + \u{29}\ + \u{28}\ + \u{63}\ + \u{29}\ + \u{28}\ + \u{64}\ + \u{29}\ + \u{28}\ + \u{65}\ + \u{29}\ + \u{28}\ + \u{66}\ + \u{29}\ + \u{28}\ + \u{67}\ + \u{29}\ + \u{28}\ + \u{68}\ + \u{29}\ + \u{28}\ + \u{69}\ + \u{29}\ + \u{28}\ + \u{6a}\ + \u{29}\ + \u{28}\ + \u{6b}\ + \u{29}\ + \u{28}\ + \u{6c}\ + \u{29}\ + \u{28}\ + \u{6d}\ + \u{29}\ + \u{28}\ + \u{6e}\ + \u{29}\ + \u{28}\ + \u{6f}\ + \u{29}\ + \u{28}\ + \u{70}\ + \u{29}\ + \u{28}\ + \u{71}\ + \u{29}\ + \u{28}\ + \u{72}\ + \u{29}\ + \u{28}\ + \u{73}\ + \u{29}\ + \u{28}\ + \u{74}\ + \u{29}\ + \u{28}\ + \u{75}\ + \u{29}\ + \u{28}\ + \u{76}\ + \u{29}\ + \u{28}\ + \u{77}\ + \u{29}\ + \u{28}\ + \u{78}\ + \u{29}\ + \u{28}\ + \u{79}\ + \u{29}\ + \u{28}\ + \u{7a}\ + \u{29}\ + \u{222b}\ + \u{222b}\ + \u{222b}\ + \u{222b}\ + \u{3a}\ + \u{3a}\ + \u{3d}\ + \u{3d}\ + \u{3d}\ + \u{3d}\ + \u{3d}\ + \u{3d}\ + \u{2add}\ + \u{338}\ + \u{2c30}\ + \u{2c31}\ + \u{2c32}\ + \u{2c33}\ + \u{2c34}\ + \u{2c35}\ + \u{2c36}\ + \u{2c37}\ + \u{2c38}\ + \u{2c39}\ + \u{2c3a}\ + \u{2c3b}\ + \u{2c3c}\ + \u{2c3d}\ + \u{2c3e}\ + \u{2c3f}\ + \u{2c40}\ + \u{2c41}\ + \u{2c42}\ + \u{2c43}\ + \u{2c44}\ + \u{2c45}\ + \u{2c46}\ + \u{2c47}\ + \u{2c48}\ + \u{2c49}\ + \u{2c4a}\ + \u{2c4b}\ + \u{2c4c}\ + \u{2c4d}\ + \u{2c4e}\ + \u{2c4f}\ + \u{2c50}\ + \u{2c51}\ + \u{2c52}\ + \u{2c53}\ + \u{2c54}\ + \u{2c55}\ + \u{2c56}\ + \u{2c57}\ + \u{2c58}\ + \u{2c59}\ + \u{2c5a}\ + \u{2c5b}\ + \u{2c5c}\ + \u{2c5d}\ + \u{2c5e}\ + \u{2c61}\ + \u{26b}\ + \u{1d7d}\ + \u{27d}\ + \u{2c68}\ + \u{2c6a}\ + \u{2c6c}\ + \u{2c73}\ + \u{2c76}\ + \u{23f}\ + \u{240}\ + \u{2c81}\ + \u{2c83}\ + \u{2c85}\ + \u{2c87}\ + \u{2c89}\ + \u{2c8b}\ + \u{2c8d}\ + \u{2c8f}\ + \u{2c91}\ + \u{2c93}\ + \u{2c95}\ + \u{2c97}\ + \u{2c99}\ + \u{2c9b}\ + \u{2c9d}\ + \u{2c9f}\ + \u{2ca1}\ + \u{2ca3}\ + \u{2ca5}\ + \u{2ca7}\ + \u{2ca9}\ + \u{2cab}\ + \u{2cad}\ + \u{2caf}\ + \u{2cb1}\ + \u{2cb3}\ + \u{2cb5}\ + \u{2cb7}\ + \u{2cb9}\ + \u{2cbb}\ + \u{2cbd}\ + \u{2cbf}\ + \u{2cc1}\ + \u{2cc3}\ + \u{2cc5}\ + \u{2cc7}\ + \u{2cc9}\ + \u{2ccb}\ + \u{2ccd}\ + \u{2ccf}\ + \u{2cd1}\ + \u{2cd3}\ + \u{2cd5}\ + \u{2cd7}\ + \u{2cd9}\ + \u{2cdb}\ + \u{2cdd}\ + \u{2cdf}\ + \u{2ce1}\ + \u{2ce3}\ + \u{2cec}\ + \u{2cee}\ + \u{2cf3}\ + \u{2d61}\ + \u{6bcd}\ + \u{9f9f}\ + \u{4e00}\ + \u{4e28}\ + \u{4e36}\ + \u{4e3f}\ + \u{4e59}\ + \u{4e85}\ + \u{4e8c}\ + \u{4ea0}\ + \u{4eba}\ + \u{513f}\ + \u{5165}\ + \u{516b}\ + \u{5182}\ + \u{5196}\ + \u{51ab}\ + \u{51e0}\ + \u{51f5}\ + \u{5200}\ + \u{529b}\ + \u{52f9}\ + \u{5315}\ + \u{531a}\ + \u{5338}\ + \u{5341}\ + \u{535c}\ + \u{5369}\ + \u{5382}\ + \u{53b6}\ + \u{53c8}\ + \u{53e3}\ + \u{56d7}\ + \u{571f}\ + \u{58eb}\ + \u{5902}\ + \u{590a}\ + \u{5915}\ + \u{5927}\ + \u{5973}\ + \u{5b50}\ + \u{5b80}\ + \u{5bf8}\ + \u{5c0f}\ + \u{5c22}\ + \u{5c38}\ + \u{5c6e}\ + \u{5c71}\ + \u{5ddb}\ + \u{5de5}\ + \u{5df1}\ + \u{5dfe}\ + \u{5e72}\ + \u{5e7a}\ + \u{5e7f}\ + \u{5ef4}\ + \u{5efe}\ + \u{5f0b}\ + \u{5f13}\ + \u{5f50}\ + \u{5f61}\ + \u{5f73}\ + \u{5fc3}\ + \u{6208}\ + \u{6236}\ + \u{624b}\ + \u{652f}\ + \u{6534}\ + \u{6587}\ + \u{6597}\ + \u{65a4}\ + \u{65b9}\ + \u{65e0}\ + \u{65e5}\ + \u{66f0}\ + \u{6708}\ + \u{6728}\ + \u{6b20}\ + \u{6b62}\ + \u{6b79}\ + \u{6bb3}\ + \u{6bcb}\ + \u{6bd4}\ + \u{6bdb}\ + \u{6c0f}\ + \u{6c14}\ + \u{6c34}\ + \u{706b}\ + \u{722a}\ + \u{7236}\ + \u{723b}\ + \u{723f}\ + \u{7247}\ + \u{7259}\ + \u{725b}\ + \u{72ac}\ + \u{7384}\ + \u{7389}\ + \u{74dc}\ + \u{74e6}\ + \u{7518}\ + \u{751f}\ + \u{7528}\ + \u{7530}\ + \u{758b}\ + \u{7592}\ + \u{7676}\ + \u{767d}\ + \u{76ae}\ + \u{76bf}\ + \u{76ee}\ + \u{77db}\ + \u{77e2}\ + \u{77f3}\ + \u{793a}\ + \u{79b8}\ + \u{79be}\ + \u{7a74}\ + \u{7acb}\ + \u{7af9}\ + \u{7c73}\ + \u{7cf8}\ + \u{7f36}\ + \u{7f51}\ + \u{7f8a}\ + \u{7fbd}\ + \u{8001}\ + \u{800c}\ + \u{8012}\ + \u{8033}\ + \u{807f}\ + \u{8089}\ + \u{81e3}\ + \u{81ea}\ + \u{81f3}\ + \u{81fc}\ + \u{820c}\ + \u{821b}\ + \u{821f}\ + \u{826e}\ + \u{8272}\ + \u{8278}\ + \u{864d}\ + \u{866b}\ + \u{8840}\ + \u{884c}\ + \u{8863}\ + \u{897e}\ + \u{898b}\ + \u{89d2}\ + \u{8a00}\ + \u{8c37}\ + \u{8c46}\ + \u{8c55}\ + \u{8c78}\ + \u{8c9d}\ + \u{8d64}\ + \u{8d70}\ + \u{8db3}\ + \u{8eab}\ + \u{8eca}\ + \u{8f9b}\ + \u{8fb0}\ + \u{8fb5}\ + \u{9091}\ + \u{9149}\ + \u{91c6}\ + \u{91cc}\ + \u{91d1}\ + \u{9577}\ + \u{9580}\ + \u{961c}\ + \u{96b6}\ + \u{96b9}\ + \u{96e8}\ + \u{9751}\ + \u{975e}\ + \u{9762}\ + \u{9769}\ + \u{97cb}\ + \u{97ed}\ + \u{97f3}\ + \u{9801}\ + \u{98a8}\ + \u{98db}\ + \u{98df}\ + \u{9996}\ + \u{9999}\ + \u{99ac}\ + \u{9aa8}\ + \u{9ad8}\ + \u{9adf}\ + \u{9b25}\ + \u{9b2f}\ + \u{9b32}\ + \u{9b3c}\ + \u{9b5a}\ + \u{9ce5}\ + \u{9e75}\ + \u{9e7f}\ + \u{9ea5}\ + \u{9ebb}\ + \u{9ec3}\ + \u{9ecd}\ + \u{9ed1}\ + \u{9ef9}\ + \u{9efd}\ + \u{9f0e}\ + \u{9f13}\ + \u{9f20}\ + \u{9f3b}\ + \u{9f4a}\ + \u{9f52}\ + \u{9f8d}\ + \u{9f9c}\ + \u{9fa0}\ + \u{2e}\ + \u{3012}\ + \u{5344}\ + \u{5345}\ + \u{20}\ + \u{3099}\ + \u{20}\ + \u{309a}\ + \u{3088}\ + \u{308a}\ + \u{30b3}\ + \u{30c8}\ + \u{1100}\ + \u{1101}\ + \u{11aa}\ + \u{1102}\ + \u{11ac}\ + \u{11ad}\ + \u{1103}\ + \u{1104}\ + \u{1105}\ + \u{11b0}\ + \u{11b1}\ + \u{11b2}\ + \u{11b3}\ + \u{11b4}\ + \u{11b5}\ + \u{111a}\ + \u{1106}\ + \u{1107}\ + \u{1108}\ + \u{1121}\ + \u{1109}\ + \u{110a}\ + \u{110b}\ + \u{110c}\ + \u{110d}\ + \u{110e}\ + \u{110f}\ + \u{1110}\ + \u{1111}\ + \u{1112}\ + \u{1161}\ + \u{1162}\ + \u{1163}\ + \u{1164}\ + \u{1165}\ + \u{1166}\ + \u{1167}\ + \u{1168}\ + \u{1169}\ + \u{116a}\ + \u{116b}\ + \u{116c}\ + \u{116d}\ + \u{116e}\ + \u{116f}\ + \u{1170}\ + \u{1171}\ + \u{1172}\ + \u{1173}\ + \u{1174}\ + \u{1175}\ + \u{1114}\ + \u{1115}\ + \u{11c7}\ + \u{11c8}\ + \u{11cc}\ + \u{11ce}\ + \u{11d3}\ + \u{11d7}\ + \u{11d9}\ + \u{111c}\ + \u{11dd}\ + \u{11df}\ + \u{111d}\ + \u{111e}\ + \u{1120}\ + \u{1122}\ + \u{1123}\ + \u{1127}\ + \u{1129}\ + \u{112b}\ + \u{112c}\ + \u{112d}\ + \u{112e}\ + \u{112f}\ + \u{1132}\ + \u{1136}\ + \u{1140}\ + \u{1147}\ + \u{114c}\ + \u{11f1}\ + \u{11f2}\ + \u{1157}\ + \u{1158}\ + \u{1159}\ + \u{1184}\ + \u{1185}\ + \u{1188}\ + \u{1191}\ + \u{1192}\ + \u{1194}\ + \u{119e}\ + \u{11a1}\ + \u{4e09}\ + \u{56db}\ + \u{4e0a}\ + \u{4e2d}\ + \u{4e0b}\ + \u{7532}\ + \u{4e19}\ + \u{4e01}\ + \u{5929}\ + \u{5730}\ + \u{28}\ + \u{1100}\ + \u{29}\ + \u{28}\ + \u{1102}\ + \u{29}\ + \u{28}\ + \u{1103}\ + \u{29}\ + \u{28}\ + \u{1105}\ + \u{29}\ + \u{28}\ + \u{1106}\ + \u{29}\ + \u{28}\ + \u{1107}\ + \u{29}\ + \u{28}\ + \u{1109}\ + \u{29}\ + \u{28}\ + \u{110b}\ + \u{29}\ + \u{28}\ + \u{110c}\ + \u{29}\ + \u{28}\ + \u{110e}\ + \u{29}\ + \u{28}\ + \u{110f}\ + \u{29}\ + \u{28}\ + \u{1110}\ + \u{29}\ + \u{28}\ + \u{1111}\ + \u{29}\ + \u{28}\ + \u{1112}\ + \u{29}\ + \u{28}\ + \u{ac00}\ + \u{29}\ + \u{28}\ + \u{b098}\ + \u{29}\ + \u{28}\ + \u{b2e4}\ + \u{29}\ + \u{28}\ + \u{b77c}\ + \u{29}\ + \u{28}\ + \u{b9c8}\ + \u{29}\ + \u{28}\ + \u{bc14}\ + \u{29}\ + \u{28}\ + \u{c0ac}\ + \u{29}\ + \u{28}\ + \u{c544}\ + \u{29}\ + \u{28}\ + \u{c790}\ + \u{29}\ + \u{28}\ + \u{cc28}\ + \u{29}\ + \u{28}\ + \u{ce74}\ + \u{29}\ + \u{28}\ + \u{d0c0}\ + \u{29}\ + \u{28}\ + \u{d30c}\ + \u{29}\ + \u{28}\ + \u{d558}\ + \u{29}\ + \u{28}\ + \u{c8fc}\ + \u{29}\ + \u{28}\ + \u{c624}\ + \u{c804}\ + \u{29}\ + \u{28}\ + \u{c624}\ + \u{d6c4}\ + \u{29}\ + \u{28}\ + \u{4e00}\ + \u{29}\ + \u{28}\ + \u{4e8c}\ + \u{29}\ + \u{28}\ + \u{4e09}\ + \u{29}\ + \u{28}\ + \u{56db}\ + \u{29}\ + \u{28}\ + \u{4e94}\ + \u{29}\ + \u{28}\ + \u{516d}\ + \u{29}\ + \u{28}\ + \u{4e03}\ + \u{29}\ + \u{28}\ + \u{516b}\ + \u{29}\ + \u{28}\ + \u{4e5d}\ + \u{29}\ + \u{28}\ + \u{5341}\ + \u{29}\ + \u{28}\ + \u{6708}\ + \u{29}\ + \u{28}\ + \u{706b}\ + \u{29}\ + \u{28}\ + \u{6c34}\ + \u{29}\ + \u{28}\ + \u{6728}\ + \u{29}\ + \u{28}\ + \u{91d1}\ + \u{29}\ + \u{28}\ + \u{571f}\ + \u{29}\ + \u{28}\ + \u{65e5}\ + \u{29}\ + \u{28}\ + \u{682a}\ + \u{29}\ + \u{28}\ + \u{6709}\ + \u{29}\ + \u{28}\ + \u{793e}\ + \u{29}\ + \u{28}\ + \u{540d}\ + \u{29}\ + \u{28}\ + \u{7279}\ + \u{29}\ + \u{28}\ + \u{8ca1}\ + \u{29}\ + \u{28}\ + \u{795d}\ + \u{29}\ + \u{28}\ + \u{52b4}\ + \u{29}\ + \u{28}\ + \u{4ee3}\ + \u{29}\ + \u{28}\ + \u{547c}\ + \u{29}\ + \u{28}\ + \u{5b66}\ + \u{29}\ + \u{28}\ + \u{76e3}\ + \u{29}\ + \u{28}\ + \u{4f01}\ + \u{29}\ + \u{28}\ + \u{8cc7}\ + \u{29}\ + \u{28}\ + \u{5354}\ + \u{29}\ + \u{28}\ + \u{796d}\ + \u{29}\ + \u{28}\ + \u{4f11}\ + \u{29}\ + \u{28}\ + \u{81ea}\ + \u{29}\ + \u{28}\ + \u{81f3}\ + \u{29}\ + \u{554f}\ + \u{5e7c}\ + \u{7b8f}\ + \u{70}\ + \u{74}\ + \u{65}\ + \u{32}\ + \u{31}\ + \u{32}\ + \u{32}\ + \u{32}\ + \u{33}\ + \u{32}\ + \u{34}\ + \u{32}\ + \u{35}\ + \u{32}\ + \u{36}\ + \u{32}\ + \u{37}\ + \u{32}\ + \u{38}\ + \u{32}\ + \u{39}\ + \u{33}\ + \u{30}\ + \u{33}\ + \u{31}\ + \u{33}\ + \u{32}\ + \u{33}\ + \u{33}\ + \u{33}\ + \u{34}\ + \u{33}\ + \u{35}\ + \u{ac00}\ + \u{b098}\ + \u{b2e4}\ + \u{b77c}\ + \u{b9c8}\ + \u{bc14}\ + \u{c0ac}\ + \u{c544}\ + \u{c790}\ + \u{cc28}\ + \u{ce74}\ + \u{d0c0}\ + \u{d30c}\ + \u{d558}\ + \u{cc38}\ + \u{ace0}\ + \u{c8fc}\ + \u{c758}\ + \u{c6b0}\ + \u{4e94}\ + \u{516d}\ + \u{4e03}\ + \u{4e5d}\ + \u{682a}\ + \u{6709}\ + \u{793e}\ + \u{540d}\ + \u{7279}\ + \u{8ca1}\ + \u{795d}\ + \u{52b4}\ + \u{79d8}\ + \u{7537}\ + \u{9069}\ + \u{512a}\ + \u{5370}\ + \u{6ce8}\ + \u{9805}\ + \u{4f11}\ + \u{5199}\ + \u{6b63}\ + \u{5de6}\ + \u{53f3}\ + \u{533b}\ + \u{5b97}\ + \u{5b66}\ + \u{76e3}\ + \u{4f01}\ + \u{8cc7}\ + \u{5354}\ + \u{591c}\ + \u{33}\ + \u{36}\ + \u{33}\ + \u{37}\ + \u{33}\ + \u{38}\ + \u{33}\ + \u{39}\ + \u{34}\ + \u{30}\ + \u{34}\ + \u{31}\ + \u{34}\ + \u{32}\ + \u{34}\ + \u{33}\ + \u{34}\ + \u{34}\ + \u{34}\ + \u{35}\ + \u{34}\ + \u{36}\ + \u{34}\ + \u{37}\ + \u{34}\ + \u{38}\ + \u{34}\ + \u{39}\ + \u{35}\ + \u{30}\ + \u{31}\ + \u{6708}\ + \u{32}\ + \u{6708}\ + \u{33}\ + \u{6708}\ + \u{34}\ + \u{6708}\ + \u{35}\ + \u{6708}\ + \u{36}\ + \u{6708}\ + \u{37}\ + \u{6708}\ + \u{38}\ + \u{6708}\ + \u{39}\ + \u{6708}\ + \u{31}\ + \u{30}\ + \u{6708}\ + \u{31}\ + \u{31}\ + \u{6708}\ + \u{31}\ + \u{32}\ + \u{6708}\ + \u{68}\ + \u{67}\ + \u{65}\ + \u{72}\ + \u{67}\ + \u{65}\ + \u{76}\ + \u{6c}\ + \u{74}\ + \u{64}\ + \u{30a2}\ + \u{30a4}\ + \u{30a6}\ + \u{30a8}\ + \u{30aa}\ + \u{30ab}\ + \u{30ad}\ + \u{30af}\ + \u{30b1}\ + \u{30b3}\ + \u{30b5}\ + \u{30b7}\ + \u{30b9}\ + \u{30bb}\ + \u{30bd}\ + \u{30bf}\ + \u{30c1}\ + \u{30c4}\ + \u{30c6}\ + \u{30c8}\ + \u{30ca}\ + \u{30cb}\ + \u{30cc}\ + \u{30cd}\ + \u{30ce}\ + \u{30cf}\ + \u{30d2}\ + \u{30d5}\ + \u{30d8}\ + \u{30db}\ + \u{30de}\ + \u{30df}\ + \u{30e0}\ + \u{30e1}\ + \u{30e2}\ + \u{30e4}\ + \u{30e6}\ + \u{30e8}\ + \u{30e9}\ + \u{30ea}\ + \u{30eb}\ + \u{30ec}\ + \u{30ed}\ + \u{30ef}\ + \u{30f0}\ + \u{30f1}\ + \u{30f2}\ + \u{30a2}\ + \u{30d1}\ + \u{30fc}\ + \u{30c8}\ + \u{30a2}\ + \u{30eb}\ + \u{30d5}\ + \u{30a1}\ + \u{30a2}\ + \u{30f3}\ + \u{30da}\ + \u{30a2}\ + \u{30a2}\ + \u{30fc}\ + \u{30eb}\ + \u{30a4}\ + \u{30cb}\ + \u{30f3}\ + \u{30b0}\ + \u{30a4}\ + \u{30f3}\ + \u{30c1}\ + \u{30a6}\ + \u{30a9}\ + \u{30f3}\ + \u{30a8}\ + \u{30b9}\ + \u{30af}\ + \u{30fc}\ + \u{30c9}\ + \u{30a8}\ + \u{30fc}\ + \u{30ab}\ + \u{30fc}\ + \u{30aa}\ + \u{30f3}\ + \u{30b9}\ + \u{30aa}\ + \u{30fc}\ + \u{30e0}\ + \u{30ab}\ + \u{30a4}\ + \u{30ea}\ + \u{30ab}\ + \u{30e9}\ + \u{30c3}\ + \u{30c8}\ + \u{30ab}\ + \u{30ed}\ + \u{30ea}\ + \u{30fc}\ + \u{30ac}\ + \u{30ed}\ + \u{30f3}\ + \u{30ac}\ + \u{30f3}\ + \u{30de}\ + \u{30ae}\ + \u{30ac}\ + \u{30ae}\ + \u{30cb}\ + \u{30fc}\ + \u{30ad}\ + \u{30e5}\ + \u{30ea}\ + \u{30fc}\ + \u{30ae}\ + \u{30eb}\ + \u{30c0}\ + \u{30fc}\ + \u{30ad}\ + \u{30ed}\ + \u{30ad}\ + \u{30ed}\ + \u{30b0}\ + \u{30e9}\ + \u{30e0}\ + \u{30ad}\ + \u{30ed}\ + \u{30e1}\ + \u{30fc}\ + \u{30c8}\ + \u{30eb}\ + \u{30ad}\ + \u{30ed}\ + \u{30ef}\ + \u{30c3}\ + \u{30c8}\ + \u{30b0}\ + \u{30e9}\ + \u{30e0}\ + \u{30b0}\ + \u{30e9}\ + \u{30e0}\ + \u{30c8}\ + \u{30f3}\ + \u{30af}\ + \u{30eb}\ + \u{30bc}\ + \u{30a4}\ + \u{30ed}\ + \u{30af}\ + \u{30ed}\ + \u{30fc}\ + \u{30cd}\ + \u{30b1}\ + \u{30fc}\ + \u{30b9}\ + \u{30b3}\ + \u{30eb}\ + \u{30ca}\ + \u{30b3}\ + \u{30fc}\ + \u{30dd}\ + \u{30b5}\ + \u{30a4}\ + \u{30af}\ + \u{30eb}\ + \u{30b5}\ + \u{30f3}\ + \u{30c1}\ + \u{30fc}\ + \u{30e0}\ + \u{30b7}\ + \u{30ea}\ + \u{30f3}\ + \u{30b0}\ + \u{30bb}\ + \u{30f3}\ + \u{30c1}\ + \u{30bb}\ + \u{30f3}\ + \u{30c8}\ + \u{30c0}\ + \u{30fc}\ + \u{30b9}\ + \u{30c7}\ + \u{30b7}\ + \u{30c9}\ + \u{30eb}\ + \u{30c8}\ + \u{30f3}\ + \u{30ca}\ + \u{30ce}\ + \u{30ce}\ + \u{30c3}\ + \u{30c8}\ + \u{30cf}\ + \u{30a4}\ + \u{30c4}\ + \u{30d1}\ + \u{30fc}\ + \u{30bb}\ + \u{30f3}\ + \u{30c8}\ + \u{30d1}\ + \u{30fc}\ + \u{30c4}\ + \u{30d0}\ + \u{30fc}\ + \u{30ec}\ + \u{30eb}\ + \u{30d4}\ + \u{30a2}\ + \u{30b9}\ + \u{30c8}\ + \u{30eb}\ + \u{30d4}\ + \u{30af}\ + \u{30eb}\ + \u{30d4}\ + \u{30b3}\ + \u{30d3}\ + \u{30eb}\ + \u{30d5}\ + \u{30a1}\ + \u{30e9}\ + \u{30c3}\ + \u{30c9}\ + \u{30d5}\ + \u{30a3}\ + \u{30fc}\ + \u{30c8}\ + \u{30d6}\ + \u{30c3}\ + \u{30b7}\ + \u{30a7}\ + \u{30eb}\ + \u{30d5}\ + \u{30e9}\ + \u{30f3}\ + \u{30d8}\ + \u{30af}\ + \u{30bf}\ + \u{30fc}\ + \u{30eb}\ + \u{30da}\ + \u{30bd}\ + \u{30da}\ + \u{30cb}\ + \u{30d2}\ + \u{30d8}\ + \u{30eb}\ + \u{30c4}\ + \u{30da}\ + \u{30f3}\ + \u{30b9}\ + \u{30da}\ + \u{30fc}\ + \u{30b8}\ + \u{30d9}\ + \u{30fc}\ + \u{30bf}\ + \u{30dd}\ + \u{30a4}\ + \u{30f3}\ + \u{30c8}\ + \u{30dc}\ + \u{30eb}\ + \u{30c8}\ + \u{30db}\ + \u{30f3}\ + \u{30dd}\ + \u{30f3}\ + \u{30c9}\ + \u{30db}\ + \u{30fc}\ + \u{30eb}\ + \u{30db}\ + \u{30fc}\ + \u{30f3}\ + \u{30de}\ + \u{30a4}\ + \u{30af}\ + \u{30ed}\ + \u{30de}\ + \u{30a4}\ + \u{30eb}\ + \u{30de}\ + \u{30c3}\ + \u{30cf}\ + \u{30de}\ + \u{30eb}\ + \u{30af}\ + \u{30de}\ + \u{30f3}\ + \u{30b7}\ + \u{30e7}\ + \u{30f3}\ + \u{30df}\ + \u{30af}\ + \u{30ed}\ + \u{30f3}\ + \u{30df}\ + \u{30ea}\ + \u{30df}\ + \u{30ea}\ + \u{30d0}\ + \u{30fc}\ + \u{30eb}\ + \u{30e1}\ + \u{30ac}\ + \u{30e1}\ + \u{30ac}\ + \u{30c8}\ + \u{30f3}\ + \u{30e1}\ + \u{30fc}\ + \u{30c8}\ + \u{30eb}\ + \u{30e4}\ + \u{30fc}\ + \u{30c9}\ + \u{30e4}\ + \u{30fc}\ + \u{30eb}\ + \u{30e6}\ + \u{30a2}\ + \u{30f3}\ + \u{30ea}\ + \u{30c3}\ + \u{30c8}\ + \u{30eb}\ + \u{30ea}\ + \u{30e9}\ + \u{30eb}\ + \u{30d4}\ + \u{30fc}\ + \u{30eb}\ + \u{30fc}\ + \u{30d6}\ + \u{30eb}\ + \u{30ec}\ + \u{30e0}\ + \u{30ec}\ + \u{30f3}\ + \u{30c8}\ + \u{30b2}\ + \u{30f3}\ + \u{30ef}\ + \u{30c3}\ + \u{30c8}\ + \u{30}\ + \u{70b9}\ + \u{31}\ + \u{70b9}\ + \u{32}\ + \u{70b9}\ + \u{33}\ + \u{70b9}\ + \u{34}\ + \u{70b9}\ + \u{35}\ + \u{70b9}\ + \u{36}\ + \u{70b9}\ + \u{37}\ + \u{70b9}\ + \u{38}\ + \u{70b9}\ + \u{39}\ + \u{70b9}\ + \u{31}\ + \u{30}\ + \u{70b9}\ + \u{31}\ + \u{31}\ + \u{70b9}\ + \u{31}\ + \u{32}\ + \u{70b9}\ + \u{31}\ + \u{33}\ + \u{70b9}\ + \u{31}\ + \u{34}\ + \u{70b9}\ + \u{31}\ + \u{35}\ + \u{70b9}\ + \u{31}\ + \u{36}\ + \u{70b9}\ + \u{31}\ + \u{37}\ + \u{70b9}\ + \u{31}\ + \u{38}\ + \u{70b9}\ + \u{31}\ + \u{39}\ + \u{70b9}\ + \u{32}\ + \u{30}\ + \u{70b9}\ + \u{32}\ + \u{31}\ + \u{70b9}\ + \u{32}\ + \u{32}\ + \u{70b9}\ + \u{32}\ + \u{33}\ + \u{70b9}\ + \u{32}\ + \u{34}\ + \u{70b9}\ + \u{68}\ + \u{70}\ + \u{61}\ + \u{64}\ + \u{61}\ + \u{61}\ + \u{75}\ + \u{62}\ + \u{61}\ + \u{72}\ + \u{6f}\ + \u{76}\ + \u{70}\ + \u{63}\ + \u{64}\ + \u{6d}\ + \u{64}\ + \u{6d}\ + \u{32}\ + \u{64}\ + \u{6d}\ + \u{33}\ + \u{69}\ + \u{75}\ + \u{5e73}\ + \u{6210}\ + \u{662d}\ + \u{548c}\ + \u{5927}\ + \u{6b63}\ + \u{660e}\ + \u{6cbb}\ + \u{682a}\ + \u{5f0f}\ + \u{4f1a}\ + \u{793e}\ + \u{70}\ + \u{61}\ + \u{6e}\ + \u{61}\ + \u{3bc}\ + \u{61}\ + \u{6d}\ + \u{61}\ + \u{6b}\ + \u{61}\ + \u{6b}\ + \u{62}\ + \u{6d}\ + \u{62}\ + \u{67}\ + \u{62}\ + \u{63}\ + \u{61}\ + \u{6c}\ + \u{6b}\ + \u{63}\ + \u{61}\ + \u{6c}\ + \u{70}\ + \u{66}\ + \u{6e}\ + \u{66}\ + \u{3bc}\ + \u{66}\ + \u{3bc}\ + \u{67}\ + \u{6d}\ + \u{67}\ + \u{6b}\ + \u{67}\ + \u{68}\ + \u{7a}\ + \u{6b}\ + \u{68}\ + \u{7a}\ + \u{6d}\ + \u{68}\ + \u{7a}\ + \u{67}\ + \u{68}\ + \u{7a}\ + \u{74}\ + \u{68}\ + \u{7a}\ + \u{3bc}\ + \u{6c}\ + \u{6d}\ + \u{6c}\ + \u{64}\ + \u{6c}\ + \u{6b}\ + \u{6c}\ + \u{66}\ + \u{6d}\ + \u{6e}\ + \u{6d}\ + \u{3bc}\ + \u{6d}\ + \u{6d}\ + \u{6d}\ + \u{63}\ + \u{6d}\ + \u{6b}\ + \u{6d}\ + \u{6d}\ + \u{6d}\ + \u{32}\ + \u{63}\ + \u{6d}\ + \u{32}\ + \u{6d}\ + \u{32}\ + \u{6b}\ + \u{6d}\ + \u{32}\ + \u{6d}\ + \u{6d}\ + \u{33}\ + \u{63}\ + \u{6d}\ + \u{33}\ + \u{6d}\ + \u{33}\ + \u{6b}\ + \u{6d}\ + \u{33}\ + \u{6d}\ + \u{2215}\ + \u{73}\ + \u{6d}\ + \u{2215}\ + \u{73}\ + \u{32}\ + \u{6b}\ + \u{70}\ + \u{61}\ + \u{6d}\ + \u{70}\ + \u{61}\ + \u{67}\ + \u{70}\ + \u{61}\ + \u{72}\ + \u{61}\ + \u{64}\ + \u{72}\ + \u{61}\ + \u{64}\ + \u{2215}\ + \u{73}\ + \u{72}\ + \u{61}\ + \u{64}\ + \u{2215}\ + \u{73}\ + \u{32}\ + \u{70}\ + \u{73}\ + \u{6e}\ + \u{73}\ + \u{3bc}\ + \u{73}\ + \u{6d}\ + \u{73}\ + \u{70}\ + \u{76}\ + \u{6e}\ + \u{76}\ + \u{3bc}\ + \u{76}\ + \u{6d}\ + \u{76}\ + \u{6b}\ + \u{76}\ + \u{70}\ + \u{77}\ + \u{6e}\ + \u{77}\ + \u{3bc}\ + \u{77}\ + \u{6d}\ + \u{77}\ + \u{6b}\ + \u{77}\ + \u{6b}\ + \u{3c9}\ + \u{6d}\ + \u{3c9}\ + \u{62}\ + \u{71}\ + \u{63}\ + \u{63}\ + \u{63}\ + \u{64}\ + \u{63}\ + \u{2215}\ + \u{6b}\ + \u{67}\ + \u{64}\ + \u{62}\ + \u{67}\ + \u{79}\ + \u{68}\ + \u{61}\ + \u{68}\ + \u{70}\ + \u{69}\ + \u{6e}\ + \u{6b}\ + \u{6b}\ + \u{6b}\ + \u{74}\ + \u{6c}\ + \u{6d}\ + \u{6c}\ + \u{6e}\ + \u{6c}\ + \u{6f}\ + \u{67}\ + \u{6c}\ + \u{78}\ + \u{6d}\ + \u{69}\ + \u{6c}\ + \u{6d}\ + \u{6f}\ + \u{6c}\ + \u{70}\ + \u{68}\ + \u{70}\ + \u{70}\ + \u{6d}\ + \u{70}\ + \u{72}\ + \u{73}\ + \u{72}\ + \u{73}\ + \u{76}\ + \u{77}\ + \u{62}\ + \u{76}\ + \u{2215}\ + \u{6d}\ + \u{61}\ + \u{2215}\ + \u{6d}\ + \u{31}\ + \u{65e5}\ + \u{32}\ + \u{65e5}\ + \u{33}\ + \u{65e5}\ + \u{34}\ + \u{65e5}\ + \u{35}\ + \u{65e5}\ + \u{36}\ + \u{65e5}\ + \u{37}\ + \u{65e5}\ + \u{38}\ + \u{65e5}\ + \u{39}\ + \u{65e5}\ + \u{31}\ + \u{30}\ + \u{65e5}\ + \u{31}\ + \u{31}\ + \u{65e5}\ + \u{31}\ + \u{32}\ + \u{65e5}\ + \u{31}\ + \u{33}\ + \u{65e5}\ + \u{31}\ + \u{34}\ + \u{65e5}\ + \u{31}\ + \u{35}\ + \u{65e5}\ + \u{31}\ + \u{36}\ + \u{65e5}\ + \u{31}\ + \u{37}\ + \u{65e5}\ + \u{31}\ + \u{38}\ + \u{65e5}\ + \u{31}\ + \u{39}\ + \u{65e5}\ + \u{32}\ + \u{30}\ + \u{65e5}\ + \u{32}\ + \u{31}\ + \u{65e5}\ + \u{32}\ + \u{32}\ + \u{65e5}\ + \u{32}\ + \u{33}\ + \u{65e5}\ + \u{32}\ + \u{34}\ + \u{65e5}\ + \u{32}\ + \u{35}\ + \u{65e5}\ + \u{32}\ + \u{36}\ + \u{65e5}\ + \u{32}\ + \u{37}\ + \u{65e5}\ + \u{32}\ + \u{38}\ + \u{65e5}\ + \u{32}\ + \u{39}\ + \u{65e5}\ + \u{33}\ + \u{30}\ + \u{65e5}\ + \u{33}\ + \u{31}\ + \u{65e5}\ + \u{67}\ + \u{61}\ + \u{6c}\ + \u{a641}\ + \u{a643}\ + \u{a645}\ + \u{a647}\ + \u{a649}\ + \u{a64d}\ + \u{a64f}\ + \u{a651}\ + \u{a653}\ + \u{a655}\ + \u{a657}\ + \u{a659}\ + \u{a65b}\ + \u{a65d}\ + \u{a65f}\ + \u{a661}\ + \u{a663}\ + \u{a665}\ + \u{a667}\ + \u{a669}\ + \u{a66b}\ + \u{a66d}\ + \u{a681}\ + \u{a683}\ + \u{a685}\ + \u{a687}\ + \u{a689}\ + \u{a68b}\ + \u{a68d}\ + \u{a68f}\ + \u{a691}\ + \u{a693}\ + \u{a695}\ + \u{a697}\ + \u{a699}\ + \u{a69b}\ + \u{a723}\ + \u{a725}\ + \u{a727}\ + \u{a729}\ + \u{a72b}\ + \u{a72d}\ + \u{a72f}\ + \u{a733}\ + \u{a735}\ + \u{a737}\ + \u{a739}\ + \u{a73b}\ + \u{a73d}\ + \u{a73f}\ + \u{a741}\ + \u{a743}\ + \u{a745}\ + \u{a747}\ + \u{a749}\ + \u{a74b}\ + \u{a74d}\ + \u{a74f}\ + \u{a751}\ + \u{a753}\ + \u{a755}\ + \u{a757}\ + \u{a759}\ + \u{a75b}\ + \u{a75d}\ + \u{a75f}\ + \u{a761}\ + \u{a763}\ + \u{a765}\ + \u{a767}\ + \u{a769}\ + \u{a76b}\ + \u{a76d}\ + \u{a76f}\ + \u{a77a}\ + \u{a77c}\ + \u{1d79}\ + \u{a77f}\ + \u{a781}\ + \u{a783}\ + \u{a785}\ + \u{a787}\ + \u{a78c}\ + \u{a791}\ + \u{a793}\ + \u{a797}\ + \u{a799}\ + \u{a79b}\ + \u{a79d}\ + \u{a79f}\ + \u{a7a1}\ + \u{a7a3}\ + \u{a7a5}\ + \u{a7a7}\ + \u{a7a9}\ + \u{26c}\ + \u{29e}\ + \u{287}\ + \u{ab53}\ + \u{a7b5}\ + \u{a7b7}\ + \u{ab37}\ + \u{ab52}\ + \u{13a0}\ + \u{13a1}\ + \u{13a2}\ + \u{13a3}\ + \u{13a4}\ + \u{13a5}\ + \u{13a6}\ + \u{13a7}\ + \u{13a8}\ + \u{13a9}\ + \u{13aa}\ + \u{13ab}\ + \u{13ac}\ + \u{13ad}\ + \u{13ae}\ + \u{13af}\ + \u{13b0}\ + \u{13b1}\ + \u{13b2}\ + \u{13b3}\ + \u{13b4}\ + \u{13b5}\ + \u{13b6}\ + \u{13b7}\ + \u{13b8}\ + \u{13b9}\ + \u{13ba}\ + \u{13bb}\ + \u{13bc}\ + \u{13bd}\ + \u{13be}\ + \u{13bf}\ + \u{13c0}\ + \u{13c1}\ + \u{13c2}\ + \u{13c3}\ + \u{13c4}\ + \u{13c5}\ + \u{13c6}\ + \u{13c7}\ + \u{13c8}\ + \u{13c9}\ + \u{13ca}\ + \u{13cb}\ + \u{13cc}\ + \u{13cd}\ + \u{13ce}\ + \u{13cf}\ + \u{13d0}\ + \u{13d1}\ + \u{13d2}\ + \u{13d3}\ + \u{13d4}\ + \u{13d5}\ + \u{13d6}\ + \u{13d7}\ + \u{13d8}\ + \u{13d9}\ + \u{13da}\ + \u{13db}\ + \u{13dc}\ + \u{13dd}\ + \u{13de}\ + \u{13df}\ + \u{13e0}\ + \u{13e1}\ + \u{13e2}\ + \u{13e3}\ + \u{13e4}\ + \u{13e5}\ + \u{13e6}\ + \u{13e7}\ + \u{13e8}\ + \u{13e9}\ + \u{13ea}\ + \u{13eb}\ + \u{13ec}\ + \u{13ed}\ + \u{13ee}\ + \u{13ef}\ + \u{8c48}\ + \u{66f4}\ + \u{8cc8}\ + \u{6ed1}\ + \u{4e32}\ + \u{53e5}\ + \u{5951}\ + \u{5587}\ + \u{5948}\ + \u{61f6}\ + \u{7669}\ + \u{7f85}\ + \u{863f}\ + \u{87ba}\ + \u{88f8}\ + \u{908f}\ + \u{6a02}\ + \u{6d1b}\ + \u{70d9}\ + \u{73de}\ + \u{843d}\ + \u{916a}\ + \u{99f1}\ + \u{4e82}\ + \u{5375}\ + \u{6b04}\ + \u{721b}\ + \u{862d}\ + \u{9e1e}\ + \u{5d50}\ + \u{6feb}\ + \u{85cd}\ + \u{8964}\ + \u{62c9}\ + \u{81d8}\ + \u{881f}\ + \u{5eca}\ + \u{6717}\ + \u{6d6a}\ + \u{72fc}\ + \u{90ce}\ + \u{4f86}\ + \u{51b7}\ + \u{52de}\ + \u{64c4}\ + \u{6ad3}\ + \u{7210}\ + \u{76e7}\ + \u{8606}\ + \u{865c}\ + \u{8def}\ + \u{9732}\ + \u{9b6f}\ + \u{9dfa}\ + \u{788c}\ + \u{797f}\ + \u{7da0}\ + \u{83c9}\ + \u{9304}\ + \u{8ad6}\ + \u{58df}\ + \u{5f04}\ + \u{7c60}\ + \u{807e}\ + \u{7262}\ + \u{78ca}\ + \u{8cc2}\ + \u{96f7}\ + \u{58d8}\ + \u{5c62}\ + \u{6a13}\ + \u{6dda}\ + \u{6f0f}\ + \u{7d2f}\ + \u{7e37}\ + \u{964b}\ + \u{52d2}\ + \u{808b}\ + \u{51dc}\ + \u{51cc}\ + \u{7a1c}\ + \u{7dbe}\ + \u{83f1}\ + \u{9675}\ + \u{8b80}\ + \u{62cf}\ + \u{8afe}\ + \u{4e39}\ + \u{5be7}\ + \u{6012}\ + \u{7387}\ + \u{7570}\ + \u{5317}\ + \u{78fb}\ + \u{4fbf}\ + \u{5fa9}\ + \u{4e0d}\ + \u{6ccc}\ + \u{6578}\ + \u{7d22}\ + \u{53c3}\ + \u{585e}\ + \u{7701}\ + \u{8449}\ + \u{8aaa}\ + \u{6bba}\ + \u{6c88}\ + \u{62fe}\ + \u{82e5}\ + \u{63a0}\ + \u{7565}\ + \u{4eae}\ + \u{5169}\ + \u{51c9}\ + \u{6881}\ + \u{7ce7}\ + \u{826f}\ + \u{8ad2}\ + \u{91cf}\ + \u{52f5}\ + \u{5442}\ + \u{5eec}\ + \u{65c5}\ + \u{6ffe}\ + \u{792a}\ + \u{95ad}\ + \u{9a6a}\ + \u{9e97}\ + \u{9ece}\ + \u{66c6}\ + \u{6b77}\ + \u{8f62}\ + \u{5e74}\ + \u{6190}\ + \u{6200}\ + \u{649a}\ + \u{6f23}\ + \u{7149}\ + \u{7489}\ + \u{79ca}\ + \u{7df4}\ + \u{806f}\ + \u{8f26}\ + \u{84ee}\ + \u{9023}\ + \u{934a}\ + \u{5217}\ + \u{52a3}\ + \u{54bd}\ + \u{70c8}\ + \u{88c2}\ + \u{5ec9}\ + \u{5ff5}\ + \u{637b}\ + \u{6bae}\ + \u{7c3e}\ + \u{7375}\ + \u{4ee4}\ + \u{56f9}\ + \u{5dba}\ + \u{601c}\ + \u{73b2}\ + \u{7469}\ + \u{7f9a}\ + \u{8046}\ + \u{9234}\ + \u{96f6}\ + \u{9748}\ + \u{9818}\ + \u{4f8b}\ + \u{79ae}\ + \u{91b4}\ + \u{96b8}\ + \u{60e1}\ + \u{4e86}\ + \u{50da}\ + \u{5bee}\ + \u{5c3f}\ + \u{6599}\ + \u{71ce}\ + \u{7642}\ + \u{84fc}\ + \u{907c}\ + \u{6688}\ + \u{962e}\ + \u{5289}\ + \u{677b}\ + \u{67f3}\ + \u{6d41}\ + \u{6e9c}\ + \u{7409}\ + \u{7559}\ + \u{786b}\ + \u{7d10}\ + \u{985e}\ + \u{622e}\ + \u{9678}\ + \u{502b}\ + \u{5d19}\ + \u{6dea}\ + \u{8f2a}\ + \u{5f8b}\ + \u{6144}\ + \u{6817}\ + \u{9686}\ + \u{5229}\ + \u{540f}\ + \u{5c65}\ + \u{6613}\ + \u{674e}\ + \u{68a8}\ + \u{6ce5}\ + \u{7406}\ + \u{75e2}\ + \u{7f79}\ + \u{88cf}\ + \u{88e1}\ + \u{96e2}\ + \u{533f}\ + \u{6eba}\ + \u{541d}\ + \u{71d0}\ + \u{7498}\ + \u{85fa}\ + \u{96a3}\ + \u{9c57}\ + \u{9e9f}\ + \u{6797}\ + \u{6dcb}\ + \u{81e8}\ + \u{7b20}\ + \u{7c92}\ + \u{72c0}\ + \u{7099}\ + \u{8b58}\ + \u{4ec0}\ + \u{8336}\ + \u{523a}\ + \u{5207}\ + \u{5ea6}\ + \u{62d3}\ + \u{7cd6}\ + \u{5b85}\ + \u{6d1e}\ + \u{66b4}\ + \u{8f3b}\ + \u{964d}\ + \u{5ed3}\ + \u{5140}\ + \u{55c0}\ + \u{585a}\ + \u{6674}\ + \u{51de}\ + \u{732a}\ + \u{76ca}\ + \u{793c}\ + \u{795e}\ + \u{7965}\ + \u{798f}\ + \u{9756}\ + \u{7cbe}\ + \u{8612}\ + \u{8af8}\ + \u{9038}\ + \u{90fd}\ + \u{98ef}\ + \u{98fc}\ + \u{9928}\ + \u{9db4}\ + \u{90de}\ + \u{96b7}\ + \u{4fae}\ + \u{50e7}\ + \u{514d}\ + \u{52c9}\ + \u{52e4}\ + \u{5351}\ + \u{559d}\ + \u{5606}\ + \u{5668}\ + \u{5840}\ + \u{58a8}\ + \u{5c64}\ + \u{6094}\ + \u{6168}\ + \u{618e}\ + \u{61f2}\ + \u{654f}\ + \u{65e2}\ + \u{6691}\ + \u{6885}\ + \u{6d77}\ + \u{6e1a}\ + \u{6f22}\ + \u{716e}\ + \u{722b}\ + \u{7422}\ + \u{7891}\ + \u{7949}\ + \u{7948}\ + \u{7950}\ + \u{7956}\ + \u{798d}\ + \u{798e}\ + \u{7a40}\ + \u{7a81}\ + \u{7bc0}\ + \u{7e09}\ + \u{7e41}\ + \u{7f72}\ + \u{8005}\ + \u{81ed}\ + \u{8279}\ + \u{8457}\ + \u{8910}\ + \u{8996}\ + \u{8b01}\ + \u{8b39}\ + \u{8cd3}\ + \u{8d08}\ + \u{8fb6}\ + \u{96e3}\ + \u{97ff}\ + \u{983b}\ + \u{6075}\ + \u{242ee}\ + \u{8218}\ + \u{4e26}\ + \u{51b5}\ + \u{5168}\ + \u{4f80}\ + \u{5145}\ + \u{5180}\ + \u{52c7}\ + \u{52fa}\ + \u{5555}\ + \u{5599}\ + \u{55e2}\ + \u{58b3}\ + \u{5944}\ + \u{5954}\ + \u{5a62}\ + \u{5b28}\ + \u{5ed2}\ + \u{5ed9}\ + \u{5f69}\ + \u{5fad}\ + \u{60d8}\ + \u{614e}\ + \u{6108}\ + \u{6160}\ + \u{6234}\ + \u{63c4}\ + \u{641c}\ + \u{6452}\ + \u{6556}\ + \u{671b}\ + \u{6756}\ + \u{6edb}\ + \u{6ecb}\ + \u{701e}\ + \u{77a7}\ + \u{7235}\ + \u{72af}\ + \u{7471}\ + \u{7506}\ + \u{753b}\ + \u{761d}\ + \u{761f}\ + \u{76db}\ + \u{76f4}\ + \u{774a}\ + \u{7740}\ + \u{78cc}\ + \u{7ab1}\ + \u{7c7b}\ + \u{7d5b}\ + \u{7f3e}\ + \u{8352}\ + \u{83ef}\ + \u{8779}\ + \u{8941}\ + \u{8986}\ + \u{8abf}\ + \u{8acb}\ + \u{8aed}\ + \u{8b8a}\ + \u{8f38}\ + \u{9072}\ + \u{9199}\ + \u{9276}\ + \u{967c}\ + \u{97db}\ + \u{980b}\ + \u{9b12}\ + \u{2284a}\ + \u{22844}\ + \u{233d5}\ + \u{3b9d}\ + \u{4018}\ + \u{4039}\ + \u{25249}\ + \u{25cd0}\ + \u{27ed3}\ + \u{9f43}\ + \u{9f8e}\ + \u{66}\ + \u{66}\ + \u{66}\ + \u{69}\ + \u{66}\ + \u{6c}\ + \u{66}\ + \u{66}\ + \u{69}\ + \u{66}\ + \u{66}\ + \u{6c}\ + \u{73}\ + \u{74}\ + \u{574}\ + \u{576}\ + \u{574}\ + \u{565}\ + \u{574}\ + \u{56b}\ + \u{57e}\ + \u{576}\ + \u{574}\ + \u{56d}\ + \u{5d9}\ + \u{5b4}\ + \u{5f2}\ + \u{5b7}\ + \u{5e2}\ + \u{5d4}\ + \u{5db}\ + \u{5dc}\ + \u{5dd}\ + \u{5e8}\ + \u{5ea}\ + \u{5e9}\ + \u{5c1}\ + \u{5e9}\ + \u{5c2}\ + \u{5e9}\ + \u{5bc}\ + \u{5c1}\ + \u{5e9}\ + \u{5bc}\ + \u{5c2}\ + \u{5d0}\ + \u{5b7}\ + \u{5d0}\ + \u{5b8}\ + \u{5d0}\ + \u{5bc}\ + \u{5d1}\ + \u{5bc}\ + \u{5d2}\ + \u{5bc}\ + \u{5d3}\ + \u{5bc}\ + \u{5d4}\ + \u{5bc}\ + \u{5d5}\ + \u{5bc}\ + \u{5d6}\ + \u{5bc}\ + \u{5d8}\ + \u{5bc}\ + \u{5d9}\ + \u{5bc}\ + \u{5da}\ + \u{5bc}\ + \u{5db}\ + \u{5bc}\ + \u{5dc}\ + \u{5bc}\ + \u{5de}\ + \u{5bc}\ + \u{5e0}\ + \u{5bc}\ + \u{5e1}\ + \u{5bc}\ + \u{5e3}\ + \u{5bc}\ + \u{5e4}\ + \u{5bc}\ + \u{5e6}\ + \u{5bc}\ + \u{5e7}\ + \u{5bc}\ + \u{5e8}\ + \u{5bc}\ + \u{5e9}\ + \u{5bc}\ + \u{5ea}\ + \u{5bc}\ + \u{5d5}\ + \u{5b9}\ + \u{5d1}\ + \u{5bf}\ + \u{5db}\ + \u{5bf}\ + \u{5e4}\ + \u{5bf}\ + \u{5d0}\ + \u{5dc}\ + \u{671}\ + \u{67b}\ + \u{67e}\ + \u{680}\ + \u{67a}\ + \u{67f}\ + \u{679}\ + \u{6a4}\ + \u{6a6}\ + \u{684}\ + \u{683}\ + \u{686}\ + \u{687}\ + \u{68d}\ + \u{68c}\ + \u{68e}\ + \u{688}\ + \u{698}\ + \u{691}\ + \u{6a9}\ + \u{6af}\ + \u{6b3}\ + \u{6b1}\ + \u{6ba}\ + \u{6bb}\ + \u{6c0}\ + \u{6c1}\ + \u{6be}\ + \u{6d2}\ + \u{6d3}\ + \u{6ad}\ + \u{6c7}\ + \u{6c6}\ + \u{6c8}\ + \u{6cb}\ + \u{6c5}\ + \u{6c9}\ + \u{6d0}\ + \u{649}\ + \u{626}\ + \u{627}\ + \u{626}\ + \u{6d5}\ + \u{626}\ + \u{648}\ + \u{626}\ + \u{6c7}\ + \u{626}\ + \u{6c6}\ + \u{626}\ + \u{6c8}\ + \u{626}\ + \u{6d0}\ + \u{626}\ + \u{649}\ + \u{6cc}\ + \u{626}\ + \u{62c}\ + \u{626}\ + \u{62d}\ + \u{626}\ + \u{645}\ + \u{626}\ + \u{64a}\ + \u{628}\ + \u{62c}\ + \u{628}\ + \u{62d}\ + \u{628}\ + \u{62e}\ + \u{628}\ + \u{645}\ + \u{628}\ + \u{649}\ + \u{628}\ + \u{64a}\ + \u{62a}\ + \u{62c}\ + \u{62a}\ + \u{62d}\ + \u{62a}\ + \u{62e}\ + \u{62a}\ + \u{645}\ + \u{62a}\ + \u{649}\ + \u{62a}\ + \u{64a}\ + \u{62b}\ + \u{62c}\ + \u{62b}\ + \u{645}\ + \u{62b}\ + \u{649}\ + \u{62b}\ + \u{64a}\ + \u{62c}\ + \u{62d}\ + \u{62c}\ + \u{645}\ + \u{62d}\ + \u{62c}\ + \u{62d}\ + \u{645}\ + \u{62e}\ + \u{62c}\ + \u{62e}\ + \u{62d}\ + \u{62e}\ + \u{645}\ + \u{633}\ + \u{62c}\ + \u{633}\ + \u{62d}\ + \u{633}\ + \u{62e}\ + \u{633}\ + \u{645}\ + \u{635}\ + \u{62d}\ + \u{635}\ + \u{645}\ + \u{636}\ + \u{62c}\ + \u{636}\ + \u{62d}\ + \u{636}\ + \u{62e}\ + \u{636}\ + \u{645}\ + \u{637}\ + \u{62d}\ + \u{637}\ + \u{645}\ + \u{638}\ + \u{645}\ + \u{639}\ + \u{62c}\ + \u{639}\ + \u{645}\ + \u{63a}\ + \u{62c}\ + \u{63a}\ + \u{645}\ + \u{641}\ + \u{62c}\ + \u{641}\ + \u{62d}\ + \u{641}\ + \u{62e}\ + \u{641}\ + \u{645}\ + \u{641}\ + \u{649}\ + \u{641}\ + \u{64a}\ + \u{642}\ + \u{62d}\ + \u{642}\ + \u{645}\ + \u{642}\ + \u{649}\ + \u{642}\ + \u{64a}\ + \u{643}\ + \u{627}\ + \u{643}\ + \u{62c}\ + \u{643}\ + \u{62d}\ + \u{643}\ + \u{62e}\ + \u{643}\ + \u{644}\ + \u{643}\ + \u{645}\ + \u{643}\ + \u{649}\ + \u{643}\ + \u{64a}\ + \u{644}\ + \u{62c}\ + \u{644}\ + \u{62d}\ + \u{644}\ + \u{62e}\ + \u{644}\ + \u{645}\ + \u{644}\ + \u{649}\ + \u{644}\ + \u{64a}\ + \u{645}\ + \u{62c}\ + \u{645}\ + \u{62d}\ + \u{645}\ + \u{62e}\ + \u{645}\ + \u{645}\ + \u{645}\ + \u{649}\ + \u{645}\ + \u{64a}\ + \u{646}\ + \u{62c}\ + \u{646}\ + \u{62d}\ + \u{646}\ + \u{62e}\ + \u{646}\ + \u{645}\ + \u{646}\ + \u{649}\ + \u{646}\ + \u{64a}\ + \u{647}\ + \u{62c}\ + \u{647}\ + \u{645}\ + \u{647}\ + \u{649}\ + \u{647}\ + \u{64a}\ + \u{64a}\ + \u{62c}\ + \u{64a}\ + \u{62d}\ + \u{64a}\ + \u{62e}\ + \u{64a}\ + \u{645}\ + \u{64a}\ + \u{649}\ + \u{64a}\ + \u{64a}\ + \u{630}\ + \u{670}\ + \u{631}\ + \u{670}\ + \u{649}\ + \u{670}\ + \u{20}\ + \u{64c}\ + \u{651}\ + \u{20}\ + \u{64d}\ + \u{651}\ + \u{20}\ + \u{64e}\ + \u{651}\ + \u{20}\ + \u{64f}\ + \u{651}\ + \u{20}\ + \u{650}\ + \u{651}\ + \u{20}\ + \u{651}\ + \u{670}\ + \u{626}\ + \u{631}\ + \u{626}\ + \u{632}\ + \u{626}\ + \u{646}\ + \u{628}\ + \u{631}\ + \u{628}\ + \u{632}\ + \u{628}\ + \u{646}\ + \u{62a}\ + \u{631}\ + \u{62a}\ + \u{632}\ + \u{62a}\ + \u{646}\ + \u{62b}\ + \u{631}\ + \u{62b}\ + \u{632}\ + \u{62b}\ + \u{646}\ + \u{645}\ + \u{627}\ + \u{646}\ + \u{631}\ + \u{646}\ + \u{632}\ + \u{646}\ + \u{646}\ + \u{64a}\ + \u{631}\ + \u{64a}\ + \u{632}\ + \u{64a}\ + \u{646}\ + \u{626}\ + \u{62e}\ + \u{626}\ + \u{647}\ + \u{628}\ + \u{647}\ + \u{62a}\ + \u{647}\ + \u{635}\ + \u{62e}\ + \u{644}\ + \u{647}\ + \u{646}\ + \u{647}\ + \u{647}\ + \u{670}\ + \u{64a}\ + \u{647}\ + \u{62b}\ + \u{647}\ + \u{633}\ + \u{647}\ + \u{634}\ + \u{645}\ + \u{634}\ + \u{647}\ + \u{640}\ + \u{64e}\ + \u{651}\ + \u{640}\ + \u{64f}\ + \u{651}\ + \u{640}\ + \u{650}\ + \u{651}\ + \u{637}\ + \u{649}\ + \u{637}\ + \u{64a}\ + \u{639}\ + \u{649}\ + \u{639}\ + \u{64a}\ + \u{63a}\ + \u{649}\ + \u{63a}\ + \u{64a}\ + \u{633}\ + \u{649}\ + \u{633}\ + \u{64a}\ + \u{634}\ + \u{649}\ + \u{634}\ + \u{64a}\ + \u{62d}\ + \u{649}\ + \u{62d}\ + \u{64a}\ + \u{62c}\ + \u{649}\ + \u{62c}\ + \u{64a}\ + \u{62e}\ + \u{649}\ + \u{62e}\ + \u{64a}\ + \u{635}\ + \u{649}\ + \u{635}\ + \u{64a}\ + \u{636}\ + \u{649}\ + \u{636}\ + \u{64a}\ + \u{634}\ + \u{62c}\ + \u{634}\ + \u{62d}\ + \u{634}\ + \u{62e}\ + \u{634}\ + \u{631}\ + \u{633}\ + \u{631}\ + \u{635}\ + \u{631}\ + \u{636}\ + \u{631}\ + \u{627}\ + \u{64b}\ + \u{62a}\ + \u{62c}\ + \u{645}\ + \u{62a}\ + \u{62d}\ + \u{62c}\ + \u{62a}\ + \u{62d}\ + \u{645}\ + \u{62a}\ + \u{62e}\ + \u{645}\ + \u{62a}\ + \u{645}\ + \u{62c}\ + \u{62a}\ + \u{645}\ + \u{62d}\ + \u{62a}\ + \u{645}\ + \u{62e}\ + \u{62c}\ + \u{645}\ + \u{62d}\ + \u{62d}\ + \u{645}\ + \u{64a}\ + \u{62d}\ + \u{645}\ + \u{649}\ + \u{633}\ + \u{62d}\ + \u{62c}\ + \u{633}\ + \u{62c}\ + \u{62d}\ + \u{633}\ + \u{62c}\ + \u{649}\ + \u{633}\ + \u{645}\ + \u{62d}\ + \u{633}\ + \u{645}\ + \u{62c}\ + \u{633}\ + \u{645}\ + \u{645}\ + \u{635}\ + \u{62d}\ + \u{62d}\ + \u{635}\ + \u{645}\ + \u{645}\ + \u{634}\ + \u{62d}\ + \u{645}\ + \u{634}\ + \u{62c}\ + \u{64a}\ + \u{634}\ + \u{645}\ + \u{62e}\ + \u{634}\ + \u{645}\ + \u{645}\ + \u{636}\ + \u{62d}\ + \u{649}\ + \u{636}\ + \u{62e}\ + \u{645}\ + \u{637}\ + \u{645}\ + \u{62d}\ + \u{637}\ + \u{645}\ + \u{645}\ + \u{637}\ + \u{645}\ + \u{64a}\ + \u{639}\ + \u{62c}\ + \u{645}\ + \u{639}\ + \u{645}\ + \u{645}\ + \u{639}\ + \u{645}\ + \u{649}\ + \u{63a}\ + \u{645}\ + \u{645}\ + \u{63a}\ + \u{645}\ + \u{64a}\ + \u{63a}\ + \u{645}\ + \u{649}\ + \u{641}\ + \u{62e}\ + \u{645}\ + \u{642}\ + \u{645}\ + \u{62d}\ + \u{642}\ + \u{645}\ + \u{645}\ + \u{644}\ + \u{62d}\ + \u{645}\ + \u{644}\ + \u{62d}\ + \u{64a}\ + \u{644}\ + \u{62d}\ + \u{649}\ + \u{644}\ + \u{62c}\ + \u{62c}\ + \u{644}\ + \u{62e}\ + \u{645}\ + \u{644}\ + \u{645}\ + \u{62d}\ + \u{645}\ + \u{62d}\ + \u{62c}\ + \u{645}\ + \u{62d}\ + \u{645}\ + \u{645}\ + \u{62d}\ + \u{64a}\ + \u{645}\ + \u{62c}\ + \u{62d}\ + \u{645}\ + \u{62c}\ + \u{645}\ + \u{645}\ + \u{62e}\ + \u{62c}\ + \u{645}\ + \u{62e}\ + \u{645}\ + \u{645}\ + \u{62c}\ + \u{62e}\ + \u{647}\ + \u{645}\ + \u{62c}\ + \u{647}\ + \u{645}\ + \u{645}\ + \u{646}\ + \u{62d}\ + \u{645}\ + \u{646}\ + \u{62d}\ + \u{649}\ + \u{646}\ + \u{62c}\ + \u{645}\ + \u{646}\ + \u{62c}\ + \u{649}\ + \u{646}\ + \u{645}\ + \u{64a}\ + \u{646}\ + \u{645}\ + \u{649}\ + \u{64a}\ + \u{645}\ + \u{645}\ + \u{628}\ + \u{62e}\ + \u{64a}\ + \u{62a}\ + \u{62c}\ + \u{64a}\ + \u{62a}\ + \u{62c}\ + \u{649}\ + \u{62a}\ + \u{62e}\ + \u{64a}\ + \u{62a}\ + \u{62e}\ + \u{649}\ + \u{62a}\ + \u{645}\ + \u{64a}\ + \u{62a}\ + \u{645}\ + \u{649}\ + \u{62c}\ + \u{645}\ + \u{64a}\ + \u{62c}\ + \u{62d}\ + \u{649}\ + \u{62c}\ + \u{645}\ + \u{649}\ + \u{633}\ + \u{62e}\ + \u{649}\ + \u{635}\ + \u{62d}\ + \u{64a}\ + \u{634}\ + \u{62d}\ + \u{64a}\ + \u{636}\ + \u{62d}\ + \u{64a}\ + \u{644}\ + \u{62c}\ + \u{64a}\ + \u{644}\ + \u{645}\ + \u{64a}\ + \u{64a}\ + \u{62d}\ + \u{64a}\ + \u{64a}\ + \u{62c}\ + \u{64a}\ + \u{64a}\ + \u{645}\ + \u{64a}\ + \u{645}\ + \u{645}\ + \u{64a}\ + \u{642}\ + \u{645}\ + \u{64a}\ + \u{646}\ + \u{62d}\ + \u{64a}\ + \u{639}\ + \u{645}\ + \u{64a}\ + \u{643}\ + \u{645}\ + \u{64a}\ + \u{646}\ + \u{62c}\ + \u{62d}\ + \u{645}\ + \u{62e}\ + \u{64a}\ + \u{644}\ + \u{62c}\ + \u{645}\ + \u{643}\ + \u{645}\ + \u{645}\ + \u{62c}\ + \u{62d}\ + \u{64a}\ + \u{62d}\ + \u{62c}\ + \u{64a}\ + \u{645}\ + \u{62c}\ + \u{64a}\ + \u{641}\ + \u{645}\ + \u{64a}\ + \u{628}\ + \u{62d}\ + \u{64a}\ + \u{633}\ + \u{62e}\ + \u{64a}\ + \u{646}\ + \u{62c}\ + \u{64a}\ + \u{635}\ + \u{644}\ + \u{6d2}\ + \u{642}\ + \u{644}\ + \u{6d2}\ + \u{627}\ + \u{644}\ + \u{644}\ + \u{647}\ + \u{627}\ + \u{643}\ + \u{628}\ + \u{631}\ + \u{645}\ + \u{62d}\ + \u{645}\ + \u{62f}\ + \u{635}\ + \u{644}\ + \u{639}\ + \u{645}\ + \u{631}\ + \u{633}\ + \u{648}\ + \u{644}\ + \u{639}\ + \u{644}\ + \u{64a}\ + \u{647}\ + \u{648}\ + \u{633}\ + \u{644}\ + \u{645}\ + \u{635}\ + \u{644}\ + \u{649}\ + \u{635}\ + \u{644}\ + \u{649}\ + \u{20}\ + \u{627}\ + \u{644}\ + \u{644}\ + \u{647}\ + \u{20}\ + \u{639}\ + \u{644}\ + \u{64a}\ + \u{647}\ + \u{20}\ + \u{648}\ + \u{633}\ + \u{644}\ + \u{645}\ + \u{62c}\ + \u{644}\ + \u{20}\ + \u{62c}\ + \u{644}\ + \u{627}\ + \u{644}\ + \u{647}\ + \u{631}\ + \u{6cc}\ + \u{627}\ + \u{644}\ + \u{2c}\ + \u{3001}\ + \u{3a}\ + \u{21}\ + \u{3f}\ + \u{3016}\ + \u{3017}\ + \u{2014}\ + \u{2013}\ + \u{5f}\ + \u{7b}\ + \u{7d}\ + \u{3014}\ + \u{3015}\ + \u{3010}\ + \u{3011}\ + \u{300a}\ + \u{300b}\ + \u{300c}\ + \u{300d}\ + \u{300e}\ + \u{300f}\ + \u{5b}\ + \u{5d}\ + \u{23}\ + \u{26}\ + \u{2a}\ + \u{2d}\ + \u{3c}\ + \u{3e}\ + \u{5c}\ + \u{24}\ + \u{25}\ + \u{40}\ + \u{20}\ + \u{64b}\ + \u{640}\ + \u{64b}\ + \u{20}\ + \u{64c}\ + \u{20}\ + \u{64d}\ + \u{20}\ + \u{64e}\ + \u{640}\ + \u{64e}\ + \u{20}\ + \u{64f}\ + \u{640}\ + \u{64f}\ + \u{20}\ + \u{650}\ + \u{640}\ + \u{650}\ + \u{20}\ + \u{651}\ + \u{640}\ + \u{651}\ + \u{20}\ + \u{652}\ + \u{640}\ + \u{652}\ + \u{621}\ + \u{622}\ + \u{623}\ + \u{624}\ + \u{625}\ + \u{626}\ + \u{627}\ + \u{628}\ + \u{629}\ + \u{62a}\ + \u{62b}\ + \u{62c}\ + \u{62d}\ + \u{62e}\ + \u{62f}\ + \u{630}\ + \u{631}\ + \u{632}\ + \u{633}\ + \u{634}\ + \u{635}\ + \u{636}\ + \u{637}\ + \u{638}\ + \u{639}\ + \u{63a}\ + \u{641}\ + \u{642}\ + \u{643}\ + \u{644}\ + \u{645}\ + \u{646}\ + \u{647}\ + \u{648}\ + \u{64a}\ + \u{644}\ + \u{622}\ + \u{644}\ + \u{623}\ + \u{644}\ + \u{625}\ + \u{644}\ + \u{627}\ + \u{22}\ + \u{27}\ + \u{2f}\ + \u{5e}\ + \u{7c}\ + \u{7e}\ + \u{2985}\ + \u{2986}\ + \u{30fb}\ + \u{30a1}\ + \u{30a3}\ + \u{30a5}\ + \u{30a7}\ + \u{30a9}\ + \u{30e3}\ + \u{30e5}\ + \u{30e7}\ + \u{30c3}\ + \u{30fc}\ + \u{30f3}\ + \u{3099}\ + \u{309a}\ + \u{a2}\ + \u{a3}\ + \u{ac}\ + \u{a6}\ + \u{a5}\ + \u{20a9}\ + \u{2502}\ + \u{2190}\ + \u{2191}\ + \u{2192}\ + \u{2193}\ + \u{25a0}\ + \u{25cb}\ + \u{10428}\ + \u{10429}\ + \u{1042a}\ + \u{1042b}\ + \u{1042c}\ + \u{1042d}\ + \u{1042e}\ + \u{1042f}\ + \u{10430}\ + \u{10431}\ + \u{10432}\ + \u{10433}\ + \u{10434}\ + \u{10435}\ + \u{10436}\ + \u{10437}\ + \u{10438}\ + \u{10439}\ + \u{1043a}\ + \u{1043b}\ + \u{1043c}\ + \u{1043d}\ + \u{1043e}\ + \u{1043f}\ + \u{10440}\ + \u{10441}\ + \u{10442}\ + \u{10443}\ + \u{10444}\ + \u{10445}\ + \u{10446}\ + \u{10447}\ + \u{10448}\ + \u{10449}\ + \u{1044a}\ + \u{1044b}\ + \u{1044c}\ + \u{1044d}\ + \u{1044e}\ + \u{1044f}\ + \u{104d8}\ + \u{104d9}\ + \u{104da}\ + \u{104db}\ + \u{104dc}\ + \u{104dd}\ + \u{104de}\ + \u{104df}\ + \u{104e0}\ + \u{104e1}\ + \u{104e2}\ + \u{104e3}\ + \u{104e4}\ + \u{104e5}\ + \u{104e6}\ + \u{104e7}\ + \u{104e8}\ + \u{104e9}\ + \u{104ea}\ + \u{104eb}\ + \u{104ec}\ + \u{104ed}\ + \u{104ee}\ + \u{104ef}\ + \u{104f0}\ + \u{104f1}\ + \u{104f2}\ + \u{104f3}\ + \u{104f4}\ + \u{104f5}\ + \u{104f6}\ + \u{104f7}\ + \u{104f8}\ + \u{104f9}\ + \u{104fa}\ + \u{104fb}\ + \u{10cc0}\ + \u{10cc1}\ + \u{10cc2}\ + \u{10cc3}\ + \u{10cc4}\ + \u{10cc5}\ + \u{10cc6}\ + \u{10cc7}\ + \u{10cc8}\ + \u{10cc9}\ + \u{10cca}\ + \u{10ccb}\ + \u{10ccc}\ + \u{10ccd}\ + \u{10cce}\ + \u{10ccf}\ + \u{10cd0}\ + \u{10cd1}\ + \u{10cd2}\ + \u{10cd3}\ + \u{10cd4}\ + \u{10cd5}\ + \u{10cd6}\ + \u{10cd7}\ + \u{10cd8}\ + \u{10cd9}\ + \u{10cda}\ + \u{10cdb}\ + \u{10cdc}\ + \u{10cdd}\ + \u{10cde}\ + \u{10cdf}\ + \u{10ce0}\ + \u{10ce1}\ + \u{10ce2}\ + \u{10ce3}\ + \u{10ce4}\ + \u{10ce5}\ + \u{10ce6}\ + \u{10ce7}\ + \u{10ce8}\ + \u{10ce9}\ + \u{10cea}\ + \u{10ceb}\ + \u{10cec}\ + \u{10ced}\ + \u{10cee}\ + \u{10cef}\ + \u{10cf0}\ + \u{10cf1}\ + \u{10cf2}\ + \u{118c0}\ + \u{118c1}\ + \u{118c2}\ + \u{118c3}\ + \u{118c4}\ + \u{118c5}\ + \u{118c6}\ + \u{118c7}\ + \u{118c8}\ + \u{118c9}\ + \u{118ca}\ + \u{118cb}\ + \u{118cc}\ + \u{118cd}\ + \u{118ce}\ + \u{118cf}\ + \u{118d0}\ + \u{118d1}\ + \u{118d2}\ + \u{118d3}\ + \u{118d4}\ + \u{118d5}\ + \u{118d6}\ + \u{118d7}\ + \u{118d8}\ + \u{118d9}\ + \u{118da}\ + \u{118db}\ + \u{118dc}\ + \u{118dd}\ + \u{118de}\ + \u{118df}\ + \u{1d157}\ + \u{1d165}\ + \u{1d158}\ + \u{1d165}\ + \u{1d158}\ + \u{1d165}\ + \u{1d16e}\ + \u{1d158}\ + \u{1d165}\ + \u{1d16f}\ + \u{1d158}\ + \u{1d165}\ + \u{1d170}\ + \u{1d158}\ + \u{1d165}\ + \u{1d171}\ + \u{1d158}\ + \u{1d165}\ + \u{1d172}\ + \u{1d1b9}\ + \u{1d165}\ + \u{1d1ba}\ + \u{1d165}\ + \u{1d1b9}\ + \u{1d165}\ + \u{1d16e}\ + \u{1d1ba}\ + \u{1d165}\ + \u{1d16e}\ + \u{1d1b9}\ + \u{1d165}\ + \u{1d16f}\ + \u{1d1ba}\ + \u{1d165}\ + \u{1d16f}\ + \u{131}\ + \u{237}\ + \u{2207}\ + \u{2202}\ + \u{1e922}\ + \u{1e923}\ + \u{1e924}\ + \u{1e925}\ + \u{1e926}\ + \u{1e927}\ + \u{1e928}\ + \u{1e929}\ + \u{1e92a}\ + \u{1e92b}\ + \u{1e92c}\ + \u{1e92d}\ + \u{1e92e}\ + \u{1e92f}\ + \u{1e930}\ + \u{1e931}\ + \u{1e932}\ + \u{1e933}\ + \u{1e934}\ + \u{1e935}\ + \u{1e936}\ + \u{1e937}\ + \u{1e938}\ + \u{1e939}\ + \u{1e93a}\ + \u{1e93b}\ + \u{1e93c}\ + \u{1e93d}\ + \u{1e93e}\ + \u{1e93f}\ + \u{1e940}\ + \u{1e941}\ + \u{1e942}\ + \u{1e943}\ + \u{66e}\ + \u{6a1}\ + \u{66f}\ + \u{30}\ + \u{2c}\ + \u{31}\ + \u{2c}\ + \u{32}\ + \u{2c}\ + \u{33}\ + \u{2c}\ + \u{34}\ + \u{2c}\ + \u{35}\ + \u{2c}\ + \u{36}\ + \u{2c}\ + \u{37}\ + \u{2c}\ + \u{38}\ + \u{2c}\ + \u{39}\ + \u{2c}\ + \u{3014}\ + \u{73}\ + \u{3015}\ + \u{77}\ + \u{7a}\ + \u{68}\ + \u{76}\ + \u{73}\ + \u{64}\ + \u{70}\ + \u{70}\ + \u{76}\ + \u{77}\ + \u{63}\ + \u{6d}\ + \u{63}\ + \u{6d}\ + \u{64}\ + \u{64}\ + \u{6a}\ + \u{307b}\ + \u{304b}\ + \u{30b3}\ + \u{30b3}\ + \u{5b57}\ + \u{53cc}\ + \u{30c7}\ + \u{591a}\ + \u{89e3}\ + \u{4ea4}\ + \u{6620}\ + \u{7121}\ + \u{524d}\ + \u{5f8c}\ + \u{518d}\ + \u{65b0}\ + \u{521d}\ + \u{7d42}\ + \u{8ca9}\ + \u{58f0}\ + \u{5439}\ + \u{6f14}\ + \u{6295}\ + \u{6355}\ + \u{904a}\ + \u{6307}\ + \u{6253}\ + \u{7981}\ + \u{7a7a}\ + \u{5408}\ + \u{6e80}\ + \u{7533}\ + \u{5272}\ + \u{55b6}\ + \u{914d}\ + \u{3014}\ + \u{672c}\ + \u{3015}\ + \u{3014}\ + \u{4e09}\ + \u{3015}\ + \u{3014}\ + \u{4e8c}\ + \u{3015}\ + \u{3014}\ + \u{5b89}\ + \u{3015}\ + \u{3014}\ + \u{70b9}\ + \u{3015}\ + \u{3014}\ + \u{6253}\ + \u{3015}\ + \u{3014}\ + \u{76d7}\ + \u{3015}\ + \u{3014}\ + \u{52dd}\ + \u{3015}\ + \u{3014}\ + \u{6557}\ + \u{3015}\ + \u{5f97}\ + \u{53ef}\ + \u{4e3d}\ + \u{4e38}\ + \u{4e41}\ + \u{20122}\ + \u{4f60}\ + \u{4fbb}\ + \u{5002}\ + \u{507a}\ + \u{5099}\ + \u{50cf}\ + \u{349e}\ + \u{2063a}\ + \u{5154}\ + \u{5164}\ + \u{5177}\ + \u{2051c}\ + \u{34b9}\ + \u{5167}\ + \u{2054b}\ + \u{5197}\ + \u{51a4}\ + \u{4ecc}\ + \u{51ac}\ + \u{291df}\ + \u{5203}\ + \u{34df}\ + \u{523b}\ + \u{5246}\ + \u{5277}\ + \u{3515}\ + \u{5305}\ + \u{5306}\ + \u{5349}\ + \u{535a}\ + \u{5373}\ + \u{537d}\ + \u{537f}\ + \u{20a2c}\ + \u{7070}\ + \u{53ca}\ + \u{53df}\ + \u{20b63}\ + \u{53eb}\ + \u{53f1}\ + \u{5406}\ + \u{549e}\ + \u{5438}\ + \u{5448}\ + \u{5468}\ + \u{54a2}\ + \u{54f6}\ + \u{5510}\ + \u{5553}\ + \u{5563}\ + \u{5584}\ + \u{55ab}\ + \u{55b3}\ + \u{55c2}\ + \u{5716}\ + \u{5717}\ + \u{5651}\ + \u{5674}\ + \u{58ee}\ + \u{57ce}\ + \u{57f4}\ + \u{580d}\ + \u{578b}\ + \u{5832}\ + \u{5831}\ + \u{58ac}\ + \u{214e4}\ + \u{58f2}\ + \u{58f7}\ + \u{5906}\ + \u{5922}\ + \u{5962}\ + \u{216a8}\ + \u{216ea}\ + \u{59ec}\ + \u{5a1b}\ + \u{5a27}\ + \u{59d8}\ + \u{5a66}\ + \u{36ee}\ + \u{5b08}\ + \u{5b3e}\ + \u{219c8}\ + \u{5bc3}\ + \u{5bd8}\ + \u{5bf3}\ + \u{21b18}\ + \u{5bff}\ + \u{5c06}\ + \u{3781}\ + \u{5c60}\ + \u{5cc0}\ + \u{5c8d}\ + \u{21de4}\ + \u{5d43}\ + \u{21de6}\ + \u{5d6e}\ + \u{5d6b}\ + \u{5d7c}\ + \u{5de1}\ + \u{5de2}\ + \u{382f}\ + \u{5dfd}\ + \u{5e28}\ + \u{5e3d}\ + \u{5e69}\ + \u{3862}\ + \u{22183}\ + \u{387c}\ + \u{5eb0}\ + \u{5eb3}\ + \u{5eb6}\ + \u{2a392}\ + \u{22331}\ + \u{8201}\ + \u{5f22}\ + \u{38c7}\ + \u{232b8}\ + \u{261da}\ + \u{5f62}\ + \u{5f6b}\ + \u{38e3}\ + \u{5f9a}\ + \u{5fcd}\ + \u{5fd7}\ + \u{5ff9}\ + \u{6081}\ + \u{393a}\ + \u{391c}\ + \u{226d4}\ + \u{60c7}\ + \u{6148}\ + \u{614c}\ + \u{617a}\ + \u{61b2}\ + \u{61a4}\ + \u{61af}\ + \u{61de}\ + \u{6210}\ + \u{621b}\ + \u{625d}\ + \u{62b1}\ + \u{62d4}\ + \u{6350}\ + \u{22b0c}\ + \u{633d}\ + \u{62fc}\ + \u{6368}\ + \u{6383}\ + \u{63e4}\ + \u{22bf1}\ + \u{6422}\ + \u{63c5}\ + \u{63a9}\ + \u{3a2e}\ + \u{6469}\ + \u{647e}\ + \u{649d}\ + \u{6477}\ + \u{3a6c}\ + \u{656c}\ + \u{2300a}\ + \u{65e3}\ + \u{66f8}\ + \u{6649}\ + \u{3b19}\ + \u{3b08}\ + \u{3ae4}\ + \u{5192}\ + \u{5195}\ + \u{6700}\ + \u{669c}\ + \u{80ad}\ + \u{43d9}\ + \u{6721}\ + \u{675e}\ + \u{6753}\ + \u{233c3}\ + \u{3b49}\ + \u{67fa}\ + \u{6785}\ + \u{6852}\ + \u{2346d}\ + \u{688e}\ + \u{681f}\ + \u{6914}\ + \u{6942}\ + \u{69a3}\ + \u{69ea}\ + \u{6aa8}\ + \u{236a3}\ + \u{6adb}\ + \u{3c18}\ + \u{6b21}\ + \u{238a7}\ + \u{6b54}\ + \u{3c4e}\ + \u{6b72}\ + \u{6b9f}\ + \u{6bbb}\ + \u{23a8d}\ + \u{21d0b}\ + \u{23afa}\ + \u{6c4e}\ + \u{23cbc}\ + \u{6cbf}\ + \u{6ccd}\ + \u{6c67}\ + \u{6d16}\ + \u{6d3e}\ + \u{6d69}\ + \u{6d78}\ + \u{6d85}\ + \u{23d1e}\ + \u{6d34}\ + \u{6e2f}\ + \u{6e6e}\ + \u{3d33}\ + \u{6ec7}\ + \u{23ed1}\ + \u{6df9}\ + \u{6f6e}\ + \u{23f5e}\ + \u{23f8e}\ + \u{6fc6}\ + \u{7039}\ + \u{701b}\ + \u{3d96}\ + \u{704a}\ + \u{707d}\ + \u{7077}\ + \u{70ad}\ + \u{20525}\ + \u{7145}\ + \u{24263}\ + \u{719c}\ + \u{7228}\ + \u{7250}\ + \u{24608}\ + \u{7280}\ + \u{7295}\ + \u{24735}\ + \u{24814}\ + \u{737a}\ + \u{738b}\ + \u{3eac}\ + \u{73a5}\ + \u{3eb8}\ + \u{7447}\ + \u{745c}\ + \u{7485}\ + \u{74ca}\ + \u{3f1b}\ + \u{7524}\ + \u{24c36}\ + \u{753e}\ + \u{24c92}\ + \u{2219f}\ + \u{7610}\ + \u{24fa1}\ + \u{24fb8}\ + \u{25044}\ + \u{3ffc}\ + \u{4008}\ + \u{250f3}\ + \u{250f2}\ + \u{25119}\ + \u{25133}\ + \u{771e}\ + \u{771f}\ + \u{778b}\ + \u{4046}\ + \u{4096}\ + \u{2541d}\ + \u{784e}\ + \u{40e3}\ + \u{25626}\ + \u{2569a}\ + \u{256c5}\ + \u{79eb}\ + \u{412f}\ + \u{7a4a}\ + \u{7a4f}\ + \u{2597c}\ + \u{25aa7}\ + \u{4202}\ + \u{25bab}\ + \u{7bc6}\ + \u{7bc9}\ + \u{4227}\ + \u{25c80}\ + \u{7cd2}\ + \u{42a0}\ + \u{7ce8}\ + \u{7ce3}\ + \u{7d00}\ + \u{25f86}\ + \u{7d63}\ + \u{4301}\ + \u{7dc7}\ + \u{7e02}\ + \u{7e45}\ + \u{4334}\ + \u{26228}\ + \u{26247}\ + \u{4359}\ + \u{262d9}\ + \u{7f7a}\ + \u{2633e}\ + \u{7f95}\ + \u{7ffa}\ + \u{264da}\ + \u{26523}\ + \u{8060}\ + \u{265a8}\ + \u{8070}\ + \u{2335f}\ + \u{43d5}\ + \u{80b2}\ + \u{8103}\ + \u{440b}\ + \u{813e}\ + \u{5ab5}\ + \u{267a7}\ + \u{267b5}\ + \u{23393}\ + \u{2339c}\ + \u{8204}\ + \u{8f9e}\ + \u{446b}\ + \u{8291}\ + \u{828b}\ + \u{829d}\ + \u{52b3}\ + \u{82b1}\ + \u{82b3}\ + \u{82bd}\ + \u{82e6}\ + \u{26b3c}\ + \u{831d}\ + \u{8363}\ + \u{83ad}\ + \u{8323}\ + \u{83bd}\ + \u{83e7}\ + \u{8353}\ + \u{83ca}\ + \u{83cc}\ + \u{83dc}\ + \u{26c36}\ + \u{26d6b}\ + \u{26cd5}\ + \u{452b}\ + \u{84f1}\ + \u{84f3}\ + \u{8516}\ + \u{273ca}\ + \u{8564}\ + \u{26f2c}\ + \u{455d}\ + \u{4561}\ + \u{26fb1}\ + \u{270d2}\ + \u{456b}\ + \u{8650}\ + \u{8667}\ + \u{8669}\ + \u{86a9}\ + \u{8688}\ + \u{870e}\ + \u{86e2}\ + \u{8728}\ + \u{876b}\ + \u{8786}\ + \u{87e1}\ + \u{8801}\ + \u{45f9}\ + \u{8860}\ + \u{27667}\ + \u{88d7}\ + \u{88de}\ + \u{4635}\ + \u{88fa}\ + \u{34bb}\ + \u{278ae}\ + \u{27966}\ + \u{46be}\ + \u{46c7}\ + \u{8aa0}\ + \u{27ca8}\ + \u{8cab}\ + \u{8cc1}\ + \u{8d1b}\ + \u{8d77}\ + \u{27f2f}\ + \u{20804}\ + \u{8dcb}\ + \u{8dbc}\ + \u{8df0}\ + \u{208de}\ + \u{8ed4}\ + \u{285d2}\ + \u{285ed}\ + \u{9094}\ + \u{90f1}\ + \u{9111}\ + \u{2872e}\ + \u{911b}\ + \u{9238}\ + \u{92d7}\ + \u{92d8}\ + \u{927c}\ + \u{93f9}\ + \u{9415}\ + \u{28bfa}\ + \u{958b}\ + \u{4995}\ + \u{95b7}\ + \u{28d77}\ + \u{49e6}\ + \u{96c3}\ + \u{5db2}\ + \u{9723}\ + \u{29145}\ + \u{2921a}\ + \u{4a6e}\ + \u{4a76}\ + \u{97e0}\ + \u{2940a}\ + \u{4ab2}\ + \u{29496}\ + \u{9829}\ + \u{295b6}\ + \u{98e2}\ + \u{4b33}\ + \u{9929}\ + \u{99a7}\ + \u{99c2}\ + \u{99fe}\ + \u{4bce}\ + \u{29b30}\ + \u{9c40}\ + \u{9cfd}\ + \u{4cce}\ + \u{4ced}\ + \u{9d67}\ + \u{2a0ce}\ + \u{4cf8}\ + \u{2a105}\ + \u{2a20e}\ + \u{2a291}\ + \u{4d56}\ + \u{9efe}\ + \u{9f05}\ + \u{9f0f}\ + \u{9f16}\ + \u{2a600}"; diff --git a/idna/tests/IdnaTest.txt b/idna/tests/IdnaTest.txt new file mode 100644 index 000000000..123a1f061 --- /dev/null +++ b/idna/tests/IdnaTest.txt @@ -0,0 +1,7848 @@ +# IdnaTest.txt +# Date: 2017-06-02, 14:19:52 GMT +# © 2017 Unicode®, Inc. +# Unicode and the Unicode Logo are registered trademarks of Unicode, Inc. in the U.S. and other countries. +# For terms of use, see http://www.unicode.org/terms_of_use.html +# +# Contains test cases for verifying UTS46 conformance. For more information, +# see http://www.unicode.org/reports/tr46/ +# +# FORMAT: +# +# This file is in UTF8, with certain characters escaped using the \uXXXX or \x{XXXX} +# convention where they could otherwise have a confusing display. +# These characters include: +# +# - General Categories C, Z, and M +# - Default ignorable characters +# - Bidi categories R, AL, AN +# +# Columns (c1, c2,...) are separated by semicolons. +# Leading and trailing spaces and tabs in each column are ignored. +# Comments are indicated with hash marks. +# +# Column 1: type - T for transitional, N for nontransitional, B for both +# Column 2: source - The source string to be tested +# Column 3: toUnicode - The result of applying toUnicode to the source, using nontransitional. +# A blank value means the same as the source value; a value in [...] is a set of error codes. +# Column 4: toASCII - The result of applying toASCII to the source, using the specified type: T, N, or B. +# A blank value means the same as the toUnicode value; a value in [...] is a set of error codes. +# Column 5: idna2008 - NV8 is only present if the status is valid but the character is excluded by IDNA2008 +# from all domain names for all versions of Unicode. +# XV8 is present when the character is excluded by IDNA2008 for the current version of Unicode. +# These are informative values only. +# +# If the value of toUnicode is the same as source, the column will be blank. +# The line comments currently show visible characters that have been escaped +# (after removing default-ignorables and controls, except for whitespace) +# +# The test is performed with the following flag settings: +# +# VerifyDnsLength: true +# CheckHyphens: true +# CheckBidi: true +# CheckJoiners: true +# UseSTD3ASCIIRules: true +# +# An error in toUnicode or toASCII is indicated by a value in square brackets, such as "[B5 B6]". +# In such a case, the contents is a list of error codes based on the step numbers in UTS46 and IDNA2008, +# with the following formats: +# +# Pn for Section 4 Processing step n +# Vn for 4.1 Validity Criteria step n +# An for 4.2 ToASCII step n +# Bn for Bidi (in IDNA2008) +# Cn for ContextJ (in IDNA2008) +# +# However, these particular error codes are only informative; +# the important feature is whether or not there is an error. +# +# CONFORMANCE: +# +# To test for conformance to UTS46, an implementation must first perform the toUnicode operation +# on the source string, then the toASCII operation (with the indicated type) on the source string. +# Implementations may be more strict than UTS46; thus they may have errors where the file indicates results. +# In particular, an implementation conformant to IDNA2008 would disallow the input for lines marked with NV8. +# +# Moreover, the error codes in the file are informative; implementations need only record that there is an error: +# they need not reproduce those codes. Thus to then verify conformance for the toASCII and toUnicode columns: +# +# - If the file indicates an error, the implementation must also have an error. +# - If the file does not indicate an error, then the implementation must either have an error, +# or must have a matching result. +# +# ==================================================================================================== +B; fass.de; ; +T; faß.de; ; fass.de +N; faß.de; ; xn--fa-hia.de +T; Faß.de; faß.de; fass.de +N; Faß.de; faß.de; xn--fa-hia.de +B; xn--fa-hia.de; faß.de; xn--fa-hia.de + +# BIDI TESTS + +B; à\u05D0; [B5 B6]; [B5 B6] # àא +B; a\u0300\u05D0; [B5 B6]; [B5 B6] # àא +B; A\u0300\u05D0; [B5 B6]; [B5 B6] # àא +B; À\u05D0; [B5 B6]; [B5 B6] # àא +B; xn--0ca24w; [B5 B6]; [B5 B6] # àא +B; 0à.\u05D0; [B1]; [B1] # 0à.א +B; 0a\u0300.\u05D0; [B1]; [B1] # 0à.א +B; 0A\u0300.\u05D0; [B1]; [B1] # 0à.א +B; 0À.\u05D0; [B1]; [B1] # 0à.א +B; xn--0-sfa.xn--4db; [B1]; [B1] # 0à.א +B; à.\u05D0\u0308; ; xn--0ca.xn--ssa73l # à.א̈ +B; a\u0300.\u05D0\u0308; à.\u05D0\u0308; xn--0ca.xn--ssa73l # à.א̈ +B; A\u0300.\u05D0\u0308; à.\u05D0\u0308; xn--0ca.xn--ssa73l # à.א̈ +B; À.\u05D0\u0308; à.\u05D0\u0308; xn--0ca.xn--ssa73l # à.א̈ +B; xn--0ca.xn--ssa73l; à.\u05D0\u0308; xn--0ca.xn--ssa73l # à.א̈ +B; à.\u05D00\u0660\u05D0; [B4]; [B4] # à.א0٠א +B; a\u0300.\u05D00\u0660\u05D0; [B4]; [B4] # à.א0٠א +B; A\u0300.\u05D00\u0660\u05D0; [B4]; [B4] # à.א0٠א +B; À.\u05D00\u0660\u05D0; [B4]; [B4] # à.א0٠א +B; xn--0ca.xn--0-zhcb98c; [B4]; [B4] # à.א0٠א +B; \u0308.\u05D0; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ̈.א +B; xn--ssa.xn--4db; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ̈.א +B; à.\u05D00\u0660; [B4]; [B4] # à.א0٠ +B; a\u0300.\u05D00\u0660; [B4]; [B4] # à.א0٠ +B; A\u0300.\u05D00\u0660; [B4]; [B4] # à.א0٠ +B; À.\u05D00\u0660; [B4]; [B4] # à.א0٠ +B; xn--0ca.xn--0-zhc74b; [B4]; [B4] # à.א0٠ +B; àˇ.\u05D0; [B6]; [B6] # àˇ.א +B; a\u0300ˇ.\u05D0; [B6]; [B6] # àˇ.א +B; A\u0300ˇ.\u05D0; [B6]; [B6] # àˇ.א +B; Àˇ.\u05D0; [B6]; [B6] # àˇ.א +B; xn--0ca88g.xn--4db; [B6]; [B6] # àˇ.א +B; à\u0308.\u05D0; ; xn--0ca81i.xn--4db # à̈.א +B; a\u0300\u0308.\u05D0; à\u0308.\u05D0; xn--0ca81i.xn--4db # à̈.א +B; A\u0300\u0308.\u05D0; à\u0308.\u05D0; xn--0ca81i.xn--4db # à̈.א +B; À\u0308.\u05D0; à\u0308.\u05D0; xn--0ca81i.xn--4db # à̈.א +B; xn--0ca81i.xn--4db; à\u0308.\u05D0; xn--0ca81i.xn--4db # à̈.א + +# CONTEXT TESTS + +T; a\u200Cb; [C1]; ab # ab +N; a\u200Cb; [C1]; [C1] # ab +T; A\u200CB; [C1]; ab # ab +N; A\u200CB; [C1]; [C1] # ab +T; A\u200Cb; [C1]; ab # ab +N; A\u200Cb; [C1]; [C1] # ab +B; ab; ; +B; xn--ab-j1t; [C1]; [C1] # ab +T; a\u094D\u200Cb; ; xn--ab-fsf # a्b +N; a\u094D\u200Cb; ; xn--ab-fsf604u # a्b +T; A\u094D\u200CB; a\u094D\u200Cb; xn--ab-fsf # a्b +N; A\u094D\u200CB; a\u094D\u200Cb; xn--ab-fsf604u # a्b +T; A\u094D\u200Cb; a\u094D\u200Cb; xn--ab-fsf # a्b +N; A\u094D\u200Cb; a\u094D\u200Cb; xn--ab-fsf604u # a्b +B; xn--ab-fsf; a\u094Db; xn--ab-fsf # a्b +B; a\u094Db; ; xn--ab-fsf # a्b +B; A\u094DB; a\u094Db; xn--ab-fsf # a्b +B; A\u094Db; a\u094Db; xn--ab-fsf # a्b +B; xn--ab-fsf604u; a\u094D\u200Cb; xn--ab-fsf604u # a्b +T; \u0308\u200C\u0308\u0628b; [B1 C1 V5]; [B1 V5] # ̈̈بb +N; \u0308\u200C\u0308\u0628b; [B1 C1 V5]; [B1 C1 V5] # ̈̈بb +T; \u0308\u200C\u0308\u0628B; [B1 C1 V5]; [B1 V5] # ̈̈بb +N; \u0308\u200C\u0308\u0628B; [B1 C1 V5]; [B1 C1 V5] # ̈̈بb +B; xn--b-bcba413a; [B1 V5]; [B1 V5] # ̈̈بb +B; xn--b-bcba413a2w8b; [B1 C1 V5]; [B1 C1 V5] # ̈̈بb +T; a\u0628\u0308\u200C\u0308; [B5 B6 C1]; [B5 B6] # aب̈̈ +N; a\u0628\u0308\u200C\u0308; [B5 B6 C1]; [B5 B6 C1] # aب̈̈ +T; A\u0628\u0308\u200C\u0308; [B5 B6 C1]; [B5 B6] # aب̈̈ +N; A\u0628\u0308\u200C\u0308; [B5 B6 C1]; [B5 B6 C1] # aب̈̈ +B; xn--a-ccba213a; [B5 B6]; [B5 B6] # aب̈̈ +B; xn--a-ccba213a5w8b; [B5 B6 C1]; [B5 B6 C1] # aب̈̈ +T; a\u0628\u0308\u200C\u0308\u0628b; [B5]; [B5] # aب̈̈بb +N; a\u0628\u0308\u200C\u0308\u0628b; [B5]; [B5] # aب̈̈بb +T; A\u0628\u0308\u200C\u0308\u0628B; [B5]; [B5] # aب̈̈بb +N; A\u0628\u0308\u200C\u0308\u0628B; [B5]; [B5] # aب̈̈بb +T; A\u0628\u0308\u200C\u0308\u0628b; [B5]; [B5] # aب̈̈بb +N; A\u0628\u0308\u200C\u0308\u0628b; [B5]; [B5] # aب̈̈بb +B; xn--ab-uuba211bca; [B5]; [B5] # aب̈̈بb +B; xn--ab-uuba211bca8057b; [B5]; [B5] # aب̈̈بb +T; a\u200Db; [C2]; ab # ab +N; a\u200Db; [C2]; [C2] # ab +T; A\u200DB; [C2]; ab # ab +N; A\u200DB; [C2]; [C2] # ab +T; A\u200Db; [C2]; ab # ab +N; A\u200Db; [C2]; [C2] # ab +B; xn--ab-m1t; [C2]; [C2] # ab +T; a\u094D\u200Db; ; xn--ab-fsf # a्b +N; a\u094D\u200Db; ; xn--ab-fsf014u # a्b +T; A\u094D\u200DB; a\u094D\u200Db; xn--ab-fsf # a्b +N; A\u094D\u200DB; a\u094D\u200Db; xn--ab-fsf014u # a्b +T; A\u094D\u200Db; a\u094D\u200Db; xn--ab-fsf # a्b +N; A\u094D\u200Db; a\u094D\u200Db; xn--ab-fsf014u # a्b +B; xn--ab-fsf014u; a\u094D\u200Db; xn--ab-fsf014u # a्b +T; \u0308\u200D\u0308\u0628b; [B1 C2 V5]; [B1 V5] # ̈̈بb +N; \u0308\u200D\u0308\u0628b; [B1 C2 V5]; [B1 C2 V5] # ̈̈بb +T; \u0308\u200D\u0308\u0628B; [B1 C2 V5]; [B1 V5] # ̈̈بb +N; \u0308\u200D\u0308\u0628B; [B1 C2 V5]; [B1 C2 V5] # ̈̈بb +B; xn--b-bcba413a7w8b; [B1 C2 V5]; [B1 C2 V5] # ̈̈بb +T; a\u0628\u0308\u200D\u0308; [B5 B6 C2]; [B5 B6] # aب̈̈ +N; a\u0628\u0308\u200D\u0308; [B5 B6 C2]; [B5 B6 C2] # aب̈̈ +T; A\u0628\u0308\u200D\u0308; [B5 B6 C2]; [B5 B6] # aب̈̈ +N; A\u0628\u0308\u200D\u0308; [B5 B6 C2]; [B5 B6 C2] # aب̈̈ +B; xn--a-ccba213abx8b; [B5 B6 C2]; [B5 B6 C2] # aب̈̈ +T; a\u0628\u0308\u200D\u0308\u0628b; [B5 C2]; [B5] # aب̈̈بb +N; a\u0628\u0308\u200D\u0308\u0628b; [B5 C2]; [B5 C2] # aب̈̈بb +T; A\u0628\u0308\u200D\u0308\u0628B; [B5 C2]; [B5] # aب̈̈بb +N; A\u0628\u0308\u200D\u0308\u0628B; [B5 C2]; [B5 C2] # aب̈̈بb +T; A\u0628\u0308\u200D\u0308\u0628b; [B5 C2]; [B5] # aب̈̈بb +N; A\u0628\u0308\u200D\u0308\u0628b; [B5 C2]; [B5 C2] # aب̈̈بb +B; xn--ab-uuba211bca5157b; [B5 C2]; [B5 C2] # aب̈̈بb + +# SELECTED TESTS + +B; ¡; ; xn--7a; NV8 +B; xn--7a; ¡; xn--7a; NV8 +B; ᧚; ; xn--pkf; XV8 +B; xn--pkf; ᧚; xn--pkf; XV8 +B; 。; [A4_2]; [A4_2] +B; .; [A4_2]; [A4_2] +B; ꭠ; ; xn--3y9a +B; xn--3y9a; ꭠ; xn--3y9a +B; 1234567890ä1234567890123456789012345678901234567890123456; ; [A4_2] +B; 1234567890a\u03081234567890123456789012345678901234567890123456; 1234567890ä1234567890123456789012345678901234567890123456; [A4_2] +B; 1234567890A\u03081234567890123456789012345678901234567890123456; 1234567890ä1234567890123456789012345678901234567890123456; [A4_2] +B; 1234567890Ä1234567890123456789012345678901234567890123456; 1234567890ä1234567890123456789012345678901234567890123456; [A4_2] +B; xn--12345678901234567890123456789012345678901234567890123456-fxe; 1234567890ä1234567890123456789012345678901234567890123456; [A4_2] +B; www.eXample.cOm; www.example.com; +B; Bücher.de; bücher.de; xn--bcher-kva.de +B; Bu\u0308cher.de; bücher.de; xn--bcher-kva.de +B; bu\u0308cher.de; bücher.de; xn--bcher-kva.de +B; bücher.de; ; xn--bcher-kva.de +B; BÜCHER.DE; bücher.de; xn--bcher-kva.de +B; BU\u0308CHER.DE; bücher.de; xn--bcher-kva.de +B; xn--bcher-kva.de; bücher.de; xn--bcher-kva.de +B; ÖBB; öbb; xn--bb-eka +B; O\u0308BB; öbb; xn--bb-eka +B; o\u0308bb; öbb; xn--bb-eka +B; öbb; ; xn--bb-eka +B; Öbb; öbb; xn--bb-eka +B; O\u0308bb; öbb; xn--bb-eka +B; xn--bb-eka; öbb; xn--bb-eka +T; βόλος.com; ; xn--nxasmq6b.com +N; βόλος.com; ; xn--nxasmm1c.com +T; βο\u0301λος.com; βόλος.com; xn--nxasmq6b.com +N; βο\u0301λος.com; βόλος.com; xn--nxasmm1c.com +B; ΒΟ\u0301ΛΟΣ.COM; βόλοσ.com; xn--nxasmq6b.com +B; ΒΌΛΟΣ.COM; βόλοσ.com; xn--nxasmq6b.com +B; βόλοσ.com; ; xn--nxasmq6b.com +B; βο\u0301λοσ.com; βόλοσ.com; xn--nxasmq6b.com +B; Βο\u0301λοσ.com; βόλοσ.com; xn--nxasmq6b.com +B; Βόλοσ.com; βόλοσ.com; xn--nxasmq6b.com +B; xn--nxasmq6b.com; βόλοσ.com; xn--nxasmq6b.com +T; Βο\u0301λος.com; βόλος.com; xn--nxasmq6b.com +N; Βο\u0301λος.com; βόλος.com; xn--nxasmm1c.com +T; Βόλος.com; βόλος.com; xn--nxasmq6b.com +N; Βόλος.com; βόλος.com; xn--nxasmm1c.com +B; xn--nxasmm1c.com; βόλος.com; xn--nxasmm1c.com +B; xn--nxasmm1c; βόλος; xn--nxasmm1c +T; βόλος; ; xn--nxasmq6b +N; βόλος; ; xn--nxasmm1c +T; βο\u0301λος; βόλος; xn--nxasmq6b +N; βο\u0301λος; βόλος; xn--nxasmm1c +B; ΒΟ\u0301ΛΟΣ; βόλοσ; xn--nxasmq6b +B; ΒΌΛΟΣ; βόλοσ; xn--nxasmq6b +B; βόλοσ; ; xn--nxasmq6b +B; βο\u0301λοσ; βόλοσ; xn--nxasmq6b +B; Βο\u0301λοσ; βόλοσ; xn--nxasmq6b +B; Βόλοσ; βόλοσ; xn--nxasmq6b +B; xn--nxasmq6b; βόλοσ; xn--nxasmq6b +T; Βόλος; βόλος; xn--nxasmq6b +N; Βόλος; βόλος; xn--nxasmm1c +T; Βο\u0301λος; βόλος; xn--nxasmq6b +N; Βο\u0301λος; βόλος; xn--nxasmm1c +T; www.ශ\u0DCA\u200Dර\u0DD3.com; ; www.xn--10cl1a0b.com # www.ශ්රී.com +N; www.ශ\u0DCA\u200Dර\u0DD3.com; ; www.xn--10cl1a0b660p.com # www.ශ්රී.com +T; WWW.ශ\u0DCA\u200Dර\u0DD3.COM; www.ශ\u0DCA\u200Dර\u0DD3.com; www.xn--10cl1a0b.com # www.ශ්රී.com +N; WWW.ශ\u0DCA\u200Dර\u0DD3.COM; www.ශ\u0DCA\u200Dර\u0DD3.com; www.xn--10cl1a0b660p.com # www.ශ්රී.com +T; Www.ශ\u0DCA\u200Dර\u0DD3.com; www.ශ\u0DCA\u200Dර\u0DD3.com; www.xn--10cl1a0b.com # www.ශ්රී.com +N; Www.ශ\u0DCA\u200Dර\u0DD3.com; www.ශ\u0DCA\u200Dර\u0DD3.com; www.xn--10cl1a0b660p.com # www.ශ්රී.com +B; www.xn--10cl1a0b.com; www.ශ\u0DCAර\u0DD3.com; www.xn--10cl1a0b.com # www.ශ්රී.com +B; www.ශ\u0DCAර\u0DD3.com; ; www.xn--10cl1a0b.com # www.ශ්රී.com +B; WWW.ශ\u0DCAර\u0DD3.COM; www.ශ\u0DCAර\u0DD3.com; www.xn--10cl1a0b.com # www.ශ්රී.com +B; Www.ශ\u0DCAර\u0DD3.com; www.ශ\u0DCAර\u0DD3.com; www.xn--10cl1a0b.com # www.ශ්රී.com +B; www.xn--10cl1a0b660p.com; www.ශ\u0DCA\u200Dර\u0DD3.com; www.xn--10cl1a0b660p.com # www.ශ්රී.com +T; \u0646\u0627\u0645\u0647\u200C\u0627\u06CC; ; xn--mgba3gch31f # نامهای +N; \u0646\u0627\u0645\u0647\u200C\u0627\u06CC; ; xn--mgba3gch31f060k # نامهای +B; xn--mgba3gch31f; \u0646\u0627\u0645\u0647\u0627\u06CC; xn--mgba3gch31f # نامهای +B; \u0646\u0627\u0645\u0647\u0627\u06CC; ; xn--mgba3gch31f # نامهای +B; xn--mgba3gch31f060k; \u0646\u0627\u0645\u0647\u200C\u0627\u06CC; xn--mgba3gch31f060k # نامهای +B; xn--mgba3gch31f060k.com; \u0646\u0627\u0645\u0647\u200C\u0627\u06CC.com; xn--mgba3gch31f060k.com # نامهای.com +T; \u0646\u0627\u0645\u0647\u200C\u0627\u06CC.com; ; xn--mgba3gch31f.com # نامهای.com +N; \u0646\u0627\u0645\u0647\u200C\u0627\u06CC.com; ; xn--mgba3gch31f060k.com # نامهای.com +T; \u0646\u0627\u0645\u0647\u200C\u0627\u06CC.COM; \u0646\u0627\u0645\u0647\u200C\u0627\u06CC.com; xn--mgba3gch31f.com # نامهای.com +N; \u0646\u0627\u0645\u0647\u200C\u0627\u06CC.COM; \u0646\u0627\u0645\u0647\u200C\u0627\u06CC.com; xn--mgba3gch31f060k.com # نامهای.com +T; \u0646\u0627\u0645\u0647\u200C\u0627\u06CC.Com; \u0646\u0627\u0645\u0647\u200C\u0627\u06CC.com; xn--mgba3gch31f.com # نامهای.com +N; \u0646\u0627\u0645\u0647\u200C\u0627\u06CC.Com; \u0646\u0627\u0645\u0647\u200C\u0627\u06CC.com; xn--mgba3gch31f060k.com # نامهای.com +B; xn--mgba3gch31f.com; \u0646\u0627\u0645\u0647\u0627\u06CC.com; xn--mgba3gch31f.com # نامهای.com +B; \u0646\u0627\u0645\u0647\u0627\u06CC.com; ; xn--mgba3gch31f.com # نامهای.com +B; \u0646\u0627\u0645\u0647\u0627\u06CC.COM; \u0646\u0627\u0645\u0647\u0627\u06CC.com; xn--mgba3gch31f.com # نامهای.com +B; \u0646\u0627\u0645\u0647\u0627\u06CC.Com; \u0646\u0627\u0645\u0647\u0627\u06CC.com; xn--mgba3gch31f.com # نامهای.com +B; a.b.c。d。; a.b.c.d.; +B; a.b.c。d。; a.b.c.d.; +B; A.B.C。D。; a.b.c.d.; +B; A.b.c。D。; a.b.c.d.; +B; a.b.c.d.; ; +B; A.B.C。D。; a.b.c.d.; +B; A.b.c。D。; a.b.c.d.; +B; U\u0308.xn--tda; ü.ü; xn--tda.xn--tda +B; Ü.xn--tda; ü.ü; xn--tda.xn--tda +B; ü.xn--tda; ü.ü; xn--tda.xn--tda +B; u\u0308.xn--tda; ü.ü; xn--tda.xn--tda +B; U\u0308.XN--TDA; ü.ü; xn--tda.xn--tda +B; Ü.XN--TDA; ü.ü; xn--tda.xn--tda +B; Ü.xn--Tda; ü.ü; xn--tda.xn--tda +B; U\u0308.xn--Tda; ü.ü; xn--tda.xn--tda +B; xn--tda.xn--tda; ü.ü; xn--tda.xn--tda +B; ü.ü; ; xn--tda.xn--tda +B; u\u0308.u\u0308; ü.ü; xn--tda.xn--tda +B; U\u0308.U\u0308; ü.ü; xn--tda.xn--tda +B; Ü.Ü; ü.ü; xn--tda.xn--tda +B; Ü.ü; ü.ü; xn--tda.xn--tda +B; U\u0308.u\u0308; ü.ü; xn--tda.xn--tda +B; xn--u-ccb; [V1]; [V1] # ü +B; a⒈com; [P1 V6]; [P1 V6] +B; a1.com; ; +B; A⒈COM; [P1 V6]; [P1 V6] +B; A⒈Com; [P1 V6]; [P1 V6] +B; xn--acom-0w1b; [V6]; [V6] +B; xn--a-ecp.ru; [V6]; [V6] +B; xn--0.pt; [A3]; [A3] +B; xn--a.pt; [V6]; [V6] # .pt +B; xn--a-Ä.pt; [A3]; [A3] +B; xn--a-A\u0308.pt; [A3]; [A3] +B; xn--a-a\u0308.pt; [A3]; [A3] +B; xn--a-ä.pt; [A3]; [A3] +B; XN--A-Ä.PT; [A3]; [A3] +B; XN--A-A\u0308.PT; [A3]; [A3] +B; Xn--A-A\u0308.pt; [A3]; [A3] +B; Xn--A-Ä.pt; [A3]; [A3] +B; xn--xn--a--gua.pt; [V2]; [V2] +B; 日本語。JP; 日本語.jp; xn--wgv71a119e.jp +B; 日本語。JP; 日本語.jp; xn--wgv71a119e.jp +B; 日本語。jp; 日本語.jp; xn--wgv71a119e.jp +B; 日本語。Jp; 日本語.jp; xn--wgv71a119e.jp +B; xn--wgv71a119e.jp; 日本語.jp; xn--wgv71a119e.jp +B; 日本語.jp; ; xn--wgv71a119e.jp +B; 日本語.JP; 日本語.jp; xn--wgv71a119e.jp +B; 日本語.Jp; 日本語.jp; xn--wgv71a119e.jp +B; 日本語。jp; 日本語.jp; xn--wgv71a119e.jp +B; 日本語。Jp; 日本語.jp; xn--wgv71a119e.jp +B; ☕; ; xn--53h; NV8 +B; xn--53h; ☕; xn--53h; NV8 +T; 1.aß\u200C\u200Db\u200C\u200Dcßßßßdςσßßßßßßßßeßßßßßßßßßßxßßßßßßßßßßyßßßßßßßß\u0302ßz; [C1 C2]; [A4_2] # 1.aßbcßßßßdςσßßßßßßßßeßßßßßßßßßßxßßßßßßßßßßyßßßßßßßß̂ßz +N; 1.aß\u200C\u200Db\u200C\u200Dcßßßßdςσßßßßßßßßeßßßßßßßßßßxßßßßßßßßßßyßßßßßßßß\u0302ßz; [C1 C2]; [C1 C2 A4_2] # 1.aßbcßßßßdςσßßßßßßßßeßßßßßßßßßßxßßßßßßßßßßyßßßßßßßß̂ßz +T; 1.ASS\u200C\u200DB\u200C\u200DCSSSSSSSSDΣΣSSSSSSSSSSSSSSSSESSSSSSSSSSSSSSSSSSSSXSSSSSSSSSSSSSSSSSSSSYSSSSSSSSSSSSSSSS\u0302SSZ; [C1 C2]; [A4_2] # 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz +N; 1.ASS\u200C\u200DB\u200C\u200DCSSSSSSSSDΣΣSSSSSSSSSSSSSSSSESSSSSSSSSSSSSSSSSSSSXSSSSSSSSSSSSSSSSSSSSYSSSSSSSSSSSSSSSS\u0302SSZ; [C1 C2]; [C1 C2 A4_2] # 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz +T; 1.ASS\u200C\u200DB\u200C\u200DCSSSSSSSSDΣΣSSSSSSSSSSSSSSSSESSSSSSSSSSSSSSSSSSSSXSSSSSSSSSSSSSSSSSSSSYSSSSSSSSSSSSSSSŜSSZ; [C1 C2]; [A4_2] # 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz +N; 1.ASS\u200C\u200DB\u200C\u200DCSSSSSSSSDΣΣSSSSSSSSSSSSSSSSESSSSSSSSSSSSSSSSSSSSXSSSSSSSSSSSSSSSSSSSSYSSSSSSSSSSSSSSSŜSSZ; [C1 C2]; [C1 C2 A4_2] # 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz +T; 1.ass\u200C\u200Db\u200C\u200Dcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz; [C1 C2]; [A4_2] # 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz +N; 1.ass\u200C\u200Db\u200C\u200Dcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz; [C1 C2]; [C1 C2 A4_2] # 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz +T; 1.ass\u200C\u200Db\u200C\u200Dcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssyssssssssssssssss\u0302ssz; [C1 C2]; [A4_2] # 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz +N; 1.ass\u200C\u200Db\u200C\u200Dcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssyssssssssssssssss\u0302ssz; [C1 C2]; [C1 C2 A4_2] # 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz +T; 1.Ass\u200C\u200Db\u200C\u200Dcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssyssssssssssssssss\u0302ssz; [C1 C2]; [A4_2] # 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz +N; 1.Ass\u200C\u200Db\u200C\u200Dcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssyssssssssssssssss\u0302ssz; [C1 C2]; [C1 C2 A4_2] # 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz +T; 1.Ass\u200C\u200Db\u200C\u200Dcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz; [C1 C2]; [A4_2] # 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz +N; 1.Ass\u200C\u200Db\u200C\u200Dcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz; [C1 C2]; [C1 C2 A4_2] # 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz +B; 1.xn--assbcssssssssdssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssssz-pxq1419aa; 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz; [A4_2] +B; 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz; ; [A4_2] +B; 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssyssssssssssssssss\u0302ssz; 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz; [A4_2] +B; 1.ASSBCSSSSSSSSDΣΣSSSSSSSSSSSSSSSSESSSSSSSSSSSSSSSSSSSSXSSSSSSSSSSSSSSSSSSSSYSSSSSSSSSSSSSSSS\u0302SSZ; 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz; [A4_2] +B; 1.ASSBCSSSSSSSSDΣΣSSSSSSSSSSSSSSSSESSSSSSSSSSSSSSSSSSSSXSSSSSSSSSSSSSSSSSSSSYSSSSSSSSSSSSSSSŜSSZ; 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz; [A4_2] +B; 1.Assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz; 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz; [A4_2] +B; 1.Assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssyssssssssssssssss\u0302ssz; 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz; [A4_2] +B; 1.xn--assbcssssssssdssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssssz-pxq1419aa69989dba9gc; [C1 C2]; [C1 C2 A4_2] # 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz +T; 1.Aß\u200C\u200Db\u200C\u200Dcßßßßdςσßßßßßßßßeßßßßßßßßßßxßßßßßßßßßßyßßßßßßßß\u0302ßz; [C1 C2]; [A4_2] # 1.aßbcßßßßdςσßßßßßßßßeßßßßßßßßßßxßßßßßßßßßßyßßßßßßßß̂ßz +N; 1.Aß\u200C\u200Db\u200C\u200Dcßßßßdςσßßßßßßßßeßßßßßßßßßßxßßßßßßßßßßyßßßßßßßß\u0302ßz; [C1 C2]; [C1 C2 A4_2] # 1.aßbcßßßßdςσßßßßßßßßeßßßßßßßßßßxßßßßßßßßßßyßßßßßßßß̂ßz +B; 1.xn--abcdexyz-qyacaaabaaaaaaabaaaaaaaaabaaaaaaaaabaaaaaaaa010ze2isb1140zba8cc; [C1 C2]; [C1 C2 A4_2] # 1.aßbcßßßßdςσßßßßßßßßeßßßßßßßßßßxßßßßßßßßßßyßßßßßßßß̂ßz +T; \u200Cx\u200Dn\u200C-\u200D-bß; [C1 C2]; xn--bss # xn--bß +N; \u200Cx\u200Dn\u200C-\u200D-bß; [C1 C2]; [C1 C2] # xn--bß +T; \u200CX\u200DN\u200C-\u200D-BSS; [C1 C2]; xn--bss # xn--bss +N; \u200CX\u200DN\u200C-\u200D-BSS; [C1 C2]; [C1 C2] # xn--bss +T; \u200Cx\u200Dn\u200C-\u200D-bss; [C1 C2]; xn--bss # xn--bss +N; \u200Cx\u200Dn\u200C-\u200D-bss; [C1 C2]; [C1 C2] # xn--bss +T; \u200CX\u200Dn\u200C-\u200D-Bss; [C1 C2]; xn--bss # xn--bss +N; \u200CX\u200Dn\u200C-\u200D-Bss; [C1 C2]; [C1 C2] # xn--bss +B; xn--bss; 夙; xn--bss +B; 夙; ; xn--bss +B; xn--xn--bss-7z6ccid; [C1 C2]; [C1 C2] # xn--bss +T; \u200CX\u200Dn\u200C-\u200D-Bß; [C1 C2]; xn--bss # xn--bß +N; \u200CX\u200Dn\u200C-\u200D-Bß; [C1 C2]; [C1 C2] # xn--bß +B; xn--xn--b-pqa5796ccahd; [C1 C2]; [C1 C2] # xn--bß +B; ˣ\u034Fℕ\u200B﹣\u00AD-\u180Cℬ\uFE00ſ\u2064𝔰󠇯ffl; 夡夞夜夙; xn--bssffl +B; x\u034FN\u200B-\u00AD-\u180CB\uFE00s\u2064s󠇯ffl; 夡夞夜夙; xn--bssffl +B; x\u034Fn\u200B-\u00AD-\u180Cb\uFE00s\u2064s󠇯ffl; 夡夞夜夙; xn--bssffl +B; X\u034FN\u200B-\u00AD-\u180CB\uFE00S\u2064S󠇯FFL; 夡夞夜夙; xn--bssffl +B; X\u034Fn\u200B-\u00AD-\u180CB\uFE00s\u2064s󠇯ffl; 夡夞夜夙; xn--bssffl +B; xn--bssffl; 夡夞夜夙; xn--bssffl +B; 夡夞夜夙; ; xn--bssffl +B; ˣ\u034Fℕ\u200B﹣\u00AD-\u180Cℬ\uFE00S\u2064𝔰󠇯FFL; 夡夞夜夙; xn--bssffl +B; x\u034FN\u200B-\u00AD-\u180CB\uFE00S\u2064s󠇯FFL; 夡夞夜夙; xn--bssffl +B; ˣ\u034Fℕ\u200B﹣\u00AD-\u180Cℬ\uFE00s\u2064𝔰󠇯ffl; 夡夞夜夙; xn--bssffl +B; 123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; ; +B; 123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901.; ; +B; 123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890123.12345678901234567890123456789012345678901234567890123456789012; ; [A4_1] +B; 123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901234.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890; ; [A4_2] +B; 123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901234.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890.; ; [A4_2] +B; 123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901234.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; ; [A4_1 A4_2] +B; ä1234567890123456789012345678901234567890123456789012345; ; xn--1234567890123456789012345678901234567890123456789012345-9te +B; a\u03081234567890123456789012345678901234567890123456789012345; ä1234567890123456789012345678901234567890123456789012345; xn--1234567890123456789012345678901234567890123456789012345-9te +B; A\u03081234567890123456789012345678901234567890123456789012345; ä1234567890123456789012345678901234567890123456789012345; xn--1234567890123456789012345678901234567890123456789012345-9te +B; Ä1234567890123456789012345678901234567890123456789012345; ä1234567890123456789012345678901234567890123456789012345; xn--1234567890123456789012345678901234567890123456789012345-9te +B; xn--1234567890123456789012345678901234567890123456789012345-9te; ä1234567890123456789012345678901234567890123456789012345; xn--1234567890123456789012345678901234567890123456789012345-9te +B; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; ; 123456789012345678901234567890123456789012345678901234567890123.xn--1234567890123456789012345678901234567890123456789012345-kue.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901 +B; 123456789012345678901234567890123456789012345678901234567890123.1234567890a\u0308123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; 123456789012345678901234567890123456789012345678901234567890123.xn--1234567890123456789012345678901234567890123456789012345-kue.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901 +B; 123456789012345678901234567890123456789012345678901234567890123.1234567890A\u0308123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; 123456789012345678901234567890123456789012345678901234567890123.xn--1234567890123456789012345678901234567890123456789012345-kue.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901 +B; 123456789012345678901234567890123456789012345678901234567890123.1234567890Ä123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; 123456789012345678901234567890123456789012345678901234567890123.xn--1234567890123456789012345678901234567890123456789012345-kue.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901 +B; 123456789012345678901234567890123456789012345678901234567890123.xn--1234567890123456789012345678901234567890123456789012345-kue.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; 123456789012345678901234567890123456789012345678901234567890123.xn--1234567890123456789012345678901234567890123456789012345-kue.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901 +B; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901.; ; 123456789012345678901234567890123456789012345678901234567890123.xn--1234567890123456789012345678901234567890123456789012345-kue.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901. +B; 123456789012345678901234567890123456789012345678901234567890123.1234567890a\u0308123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901.; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901.; 123456789012345678901234567890123456789012345678901234567890123.xn--1234567890123456789012345678901234567890123456789012345-kue.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901. +B; 123456789012345678901234567890123456789012345678901234567890123.1234567890A\u0308123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901.; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901.; 123456789012345678901234567890123456789012345678901234567890123.xn--1234567890123456789012345678901234567890123456789012345-kue.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901. +B; 123456789012345678901234567890123456789012345678901234567890123.1234567890Ä123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901.; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901.; 123456789012345678901234567890123456789012345678901234567890123.xn--1234567890123456789012345678901234567890123456789012345-kue.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901. +B; 123456789012345678901234567890123456789012345678901234567890123.xn--1234567890123456789012345678901234567890123456789012345-kue.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901.; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901.; 123456789012345678901234567890123456789012345678901234567890123.xn--1234567890123456789012345678901234567890123456789012345-kue.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901. +B; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.12345678901234567890123456789012345678901234567890123456789012; ; [A4_1] +B; 123456789012345678901234567890123456789012345678901234567890123.1234567890a\u0308123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.12345678901234567890123456789012345678901234567890123456789012; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.12345678901234567890123456789012345678901234567890123456789012; [A4_1] +B; 123456789012345678901234567890123456789012345678901234567890123.1234567890A\u0308123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.12345678901234567890123456789012345678901234567890123456789012; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.12345678901234567890123456789012345678901234567890123456789012; [A4_1] +B; 123456789012345678901234567890123456789012345678901234567890123.1234567890Ä123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.12345678901234567890123456789012345678901234567890123456789012; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.12345678901234567890123456789012345678901234567890123456789012; [A4_1] +B; 123456789012345678901234567890123456789012345678901234567890123.xn--1234567890123456789012345678901234567890123456789012345-kue.123456789012345678901234567890123456789012345678901234567890123.12345678901234567890123456789012345678901234567890123456789012; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.12345678901234567890123456789012345678901234567890123456789012; [A4_1] +B; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä1234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890; ; [A4_2] +B; 123456789012345678901234567890123456789012345678901234567890123.1234567890a\u03081234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä1234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890; [A4_2] +B; 123456789012345678901234567890123456789012345678901234567890123.1234567890A\u03081234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä1234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890; [A4_2] +B; 123456789012345678901234567890123456789012345678901234567890123.1234567890Ä1234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä1234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890; [A4_2] +B; 123456789012345678901234567890123456789012345678901234567890123.xn--12345678901234567890123456789012345678901234567890123456-fxe.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä1234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890; [A4_2] +B; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä1234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890.; ; [A4_2] +B; 123456789012345678901234567890123456789012345678901234567890123.1234567890a\u03081234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890.; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä1234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890.; [A4_2] +B; 123456789012345678901234567890123456789012345678901234567890123.1234567890A\u03081234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890.; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä1234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890.; [A4_2] +B; 123456789012345678901234567890123456789012345678901234567890123.1234567890Ä1234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890.; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä1234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890.; [A4_2] +B; 123456789012345678901234567890123456789012345678901234567890123.xn--12345678901234567890123456789012345678901234567890123456-fxe.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890.; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä1234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890.; [A4_2] +B; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä1234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; ; [A4_1 A4_2] +B; 123456789012345678901234567890123456789012345678901234567890123.1234567890a\u03081234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä1234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; [A4_1 A4_2] +B; 123456789012345678901234567890123456789012345678901234567890123.1234567890A\u03081234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä1234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; [A4_1 A4_2] +B; 123456789012345678901234567890123456789012345678901234567890123.1234567890Ä1234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä1234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; [A4_1 A4_2] +B; 123456789012345678901234567890123456789012345678901234567890123.xn--12345678901234567890123456789012345678901234567890123456-fxe.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä1234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; [A4_1 A4_2] +B; a.b..-q--a-.e; [V2 V3 A4_2]; [V2 V3 A4_2] +B; a.b..-q--ä-.e; [V2 V3 A4_2]; [V2 V3 A4_2] +B; a.b..-q--a\u0308-.e; [V2 V3 A4_2]; [V2 V3 A4_2] +B; A.B..-Q--A\u0308-.E; [V2 V3 A4_2]; [V2 V3 A4_2] +B; A.B..-Q--Ä-.E; [V2 V3 A4_2]; [V2 V3 A4_2] +B; A.b..-Q--Ä-.E; [V2 V3 A4_2]; [V2 V3 A4_2] +B; A.b..-Q--A\u0308-.E; [V2 V3 A4_2]; [V2 V3 A4_2] +B; a.b..xn---q----jra.e; [V2 V3 A4_2]; [V2 V3 A4_2] +B; a..c; [A4_2]; [A4_2] +B; a.-b.; [V3]; [V3] +B; a.b-.c; [V3]; [V3] +B; a.-.c; [V3]; [V3] +B; a.bc--de.f; [V2]; [V2] +B; ä.\u00AD.c; [A4_2]; [A4_2] +B; a\u0308.\u00AD.c; [A4_2]; [A4_2] +B; A\u0308.\u00AD.C; [A4_2]; [A4_2] +B; Ä.\u00AD.C; [A4_2]; [A4_2] +B; xn--4ca..c; [A4_2]; [A4_2] +B; ä.-b.; [V3]; [V3] +B; a\u0308.-b.; [V3]; [V3] +B; A\u0308.-B.; [V3]; [V3] +B; Ä.-B.; [V3]; [V3] +B; xn--4ca.-b.; [V3]; [V3] +B; ä.b-.c; [V3]; [V3] +B; a\u0308.b-.c; [V3]; [V3] +B; A\u0308.B-.C; [V3]; [V3] +B; Ä.B-.C; [V3]; [V3] +B; Ä.b-.C; [V3]; [V3] +B; A\u0308.b-.C; [V3]; [V3] +B; xn--4ca.b-.c; [V3]; [V3] +B; ä.-.c; [V3]; [V3] +B; a\u0308.-.c; [V3]; [V3] +B; A\u0308.-.C; [V3]; [V3] +B; Ä.-.C; [V3]; [V3] +B; xn--4ca.-.c; [V3]; [V3] +B; ä.bc--de.f; [V2]; [V2] +B; a\u0308.bc--de.f; [V2]; [V2] +B; A\u0308.BC--DE.F; [V2]; [V2] +B; Ä.BC--DE.F; [V2]; [V2] +B; Ä.bc--De.f; [V2]; [V2] +B; A\u0308.bc--De.f; [V2]; [V2] +B; xn--4ca.bc--de.f; [V2]; [V2] +B; a.b.\u0308c.d; [V5]; [V5] # a.b.̈c.d +B; A.B.\u0308C.D; [V5]; [V5] # a.b.̈c.d +B; A.b.\u0308c.d; [V5]; [V5] # a.b.̈c.d +B; a.b.xn--c-bcb.d; [V5]; [V5] # a.b.̈c.d +B; A0; a0; +B; 0A; 0a; +B; 0A.\u05D0; [B1]; [B1] # 0a.א +B; 0a.\u05D0; [B1]; [B1] # 0a.א +B; 0a.xn--4db; [B1]; [B1] # 0a.א +B; c.xn--0-eha.xn--4db; [B1]; [B1] # c.0ü.א +B; b-.\u05D0; [B6 V3]; [B6 V3] # b-.א +B; B-.\u05D0; [B6 V3]; [B6 V3] # b-.א +B; b-.xn--4db; [B6 V3]; [B6 V3] # b-.א +B; d.xn----dha.xn--4db; [B6 V3]; [B6 V3] # d.ü-.א +B; a\u05D0; [B5 B6]; [B5 B6] # aא +B; A\u05D0; [B5 B6]; [B5 B6] # aא +B; xn--a-0hc; [B5 B6]; [B5 B6] # aא +B; \u05D0\u05C7; ; xn--vdbr # אׇ +B; xn--vdbr; \u05D0\u05C7; xn--vdbr # אׇ +B; \u05D09\u05C7; ; xn--9-ihcz # א9ׇ +B; xn--9-ihcz; \u05D09\u05C7; xn--9-ihcz # א9ׇ +B; \u05D0a\u05C7; [B2 B3]; [B2 B3] # אaׇ +B; \u05D0A\u05C7; [B2 B3]; [B2 B3] # אaׇ +B; xn--a-ihcz; [B2 B3]; [B2 B3] # אaׇ +B; \u05D0\u05EA; ; xn--4db6c # את +B; xn--4db6c; \u05D0\u05EA; xn--4db6c # את +B; \u05D0\u05F3\u05EA; ; xn--4db6c0a # א׳ת +B; xn--4db6c0a; \u05D0\u05F3\u05EA; xn--4db6c0a # א׳ת +B; a\u05D0Tz; [B5]; [B5] # aאtz +B; a\u05D0tz; [B5]; [B5] # aאtz +B; A\u05D0TZ; [B5]; [B5] # aאtz +B; A\u05D0tz; [B5]; [B5] # aאtz +B; xn--atz-qpe; [B5]; [B5] # aאtz +B; \u05D0T\u05EA; [B2]; [B2] # אtת +B; \u05D0t\u05EA; [B2]; [B2] # אtת +B; xn--t-zhc3f; [B2]; [B2] # אtת +B; \u05D07\u05EA; ; xn--7-zhc3f # א7ת +B; xn--7-zhc3f; \u05D07\u05EA; xn--7-zhc3f # א7ת +B; \u05D0\u0667\u05EA; ; xn--4db6c6t # א٧ת +B; xn--4db6c6t; \u05D0\u0667\u05EA; xn--4db6c6t # א٧ת +B; a7\u0667z; [B5]; [B5] # a7٧z +B; A7\u0667Z; [B5]; [B5] # a7٧z +B; A7\u0667z; [B5]; [B5] # a7٧z +B; xn--a7z-06e; [B5]; [B5] # a7٧z +B; \u05D07\u0667\u05EA; [B4]; [B4] # א7٧ת +B; xn--7-zhc3fty; [B4]; [B4] # א7٧ת +T; ஹ\u0BCD\u200D; ; xn--dmc4b # ஹ் +N; ஹ\u0BCD\u200D; ; xn--dmc4b194h # ஹ் +B; xn--dmc4b; ஹ\u0BCD; xn--dmc4b # ஹ் +B; ஹ\u0BCD; ; xn--dmc4b # ஹ் +B; xn--dmc4b194h; ஹ\u0BCD\u200D; xn--dmc4b194h # ஹ் +T; ஹ\u200D; [C2]; xn--dmc # ஹ +N; ஹ\u200D; [C2]; [C2] # ஹ +B; xn--dmc; ஹ; xn--dmc +B; ஹ; ; xn--dmc +B; xn--dmc225h; [C2]; [C2] # ஹ +T; \u200D; [C2]; [A4_2] # +N; \u200D; [C2]; [C2] # +B; ; [A4_2]; [A4_2] +B; xn--1ug; [C2]; [C2] # +T; ஹ\u0BCD\u200C; ; xn--dmc4b # ஹ் +N; ஹ\u0BCD\u200C; ; xn--dmc4by94h # ஹ் +B; xn--dmc4by94h; ஹ\u0BCD\u200C; xn--dmc4by94h # ஹ் +T; ஹ\u200C; [C1]; xn--dmc # ஹ +N; ஹ\u200C; [C1]; [C1] # ஹ +B; xn--dmc025h; [C1]; [C1] # ஹ +T; \u200C; [C1]; [A4_2] # +N; \u200C; [C1]; [C1] # +B; xn--0ug; [C1]; [C1] # +T; \u0644\u0670\u200C\u06ED\u06EF; ; xn--ghb2gxqia # لٰۭۯ +N; \u0644\u0670\u200C\u06ED\u06EF; ; xn--ghb2gxqia7523a # لٰۭۯ +B; xn--ghb2gxqia; \u0644\u0670\u06ED\u06EF; xn--ghb2gxqia # لٰۭۯ +B; \u0644\u0670\u06ED\u06EF; ; xn--ghb2gxqia # لٰۭۯ +B; xn--ghb2gxqia7523a; \u0644\u0670\u200C\u06ED\u06EF; xn--ghb2gxqia7523a # لٰۭۯ +T; \u0644\u0670\u200C\u06EF; ; xn--ghb2g3q # لٰۯ +N; \u0644\u0670\u200C\u06EF; ; xn--ghb2g3qq34f # لٰۯ +B; xn--ghb2g3q; \u0644\u0670\u06EF; xn--ghb2g3q # لٰۯ +B; \u0644\u0670\u06EF; ; xn--ghb2g3q # لٰۯ +B; xn--ghb2g3qq34f; \u0644\u0670\u200C\u06EF; xn--ghb2g3qq34f # لٰۯ +T; \u0644\u200C\u06ED\u06EF; ; xn--ghb25aga # لۭۯ +N; \u0644\u200C\u06ED\u06EF; ; xn--ghb25aga828w # لۭۯ +B; xn--ghb25aga; \u0644\u06ED\u06EF; xn--ghb25aga # لۭۯ +B; \u0644\u06ED\u06EF; ; xn--ghb25aga # لۭۯ +B; xn--ghb25aga828w; \u0644\u200C\u06ED\u06EF; xn--ghb25aga828w # لۭۯ +T; \u0644\u200C\u06EF; ; xn--ghb65a # لۯ +N; \u0644\u200C\u06EF; ; xn--ghb65a953d # لۯ +B; xn--ghb65a; \u0644\u06EF; xn--ghb65a # لۯ +B; \u0644\u06EF; ; xn--ghb65a # لۯ +B; xn--ghb65a953d; \u0644\u200C\u06EF; xn--ghb65a953d # لۯ +T; \u0644\u0670\u200C\u06ED; [B3 C1]; xn--ghb2gxq # لٰۭ +N; \u0644\u0670\u200C\u06ED; [B3 C1]; [B3 C1] # لٰۭ +B; xn--ghb2gxq; \u0644\u0670\u06ED; xn--ghb2gxq # لٰۭ +B; \u0644\u0670\u06ED; ; xn--ghb2gxq # لٰۭ +B; xn--ghb2gxqy34f; [B3 C1]; [B3 C1] # لٰۭ +T; \u06EF\u200C\u06EF; [C1]; xn--cmba # ۯۯ +N; \u06EF\u200C\u06EF; [C1]; [C1] # ۯۯ +B; xn--cmba; \u06EF\u06EF; xn--cmba # ۯۯ +B; \u06EF\u06EF; ; xn--cmba # ۯۯ +B; xn--cmba004q; [C1]; [C1] # ۯۯ +T; \u0644\u200C; [B3 C1]; xn--ghb # ل +N; \u0644\u200C; [B3 C1]; [B3 C1] # ل +B; xn--ghb; \u0644; xn--ghb # ل +B; \u0644; ; xn--ghb # ل +B; xn--ghb413k; [B3 C1]; [B3 C1] # ل +B; a。。b; [A4_2]; [A4_2] +B; A。。B; [A4_2]; [A4_2] +B; a..b; [A4_2]; [A4_2] +T; \u200D。。\u06B9\u200C; [B1 B3 C1 C2 A4_2]; [A4_2] # ..ڹ +N; \u200D。。\u06B9\u200C; [B1 B3 C1 C2 A4_2]; [B1 B3 C1 C2 A4_2] # ..ڹ +B; ..xn--skb; [A4_2]; [A4_2] # ..ڹ +B; xn--1ug..xn--skb080k; [B1 B3 C1 C2 A4_2]; [B1 B3 C1 C2 A4_2] # ..ڹ +B; \u05D00\u0660; [B4]; [B4] # א0٠ +B; xn--0-zhc74b; [B4]; [B4] # א0٠ +B; $; [P1 V6]; [P1 V6] + +# RANDOMIZED TESTS + +B; c.0ü.\u05D0; [B1]; [B1] # c.0ü.א +B; c.0u\u0308.\u05D0; [B1]; [B1] # c.0ü.א +B; C.0U\u0308.\u05D0; [B1]; [B1] # c.0ü.א +B; C.0Ü.\u05D0; [B1]; [B1] # c.0ü.א +B; ⒕∝\u065F򓤦.-󠄯; [P1 V3 V6]; [P1 V3 V6] # ⒕∝ٟ.- +B; 14.∝\u065F򓤦.-󠄯; [P1 V3 V6]; [P1 V3 V6] # 14.∝ٟ.- +B; 14.xn--7hb713l3v90n.-; [V3 V6]; [V3 V6] # 14.∝ٟ.- +B; xn--7hb713lfwbi1311b.-; [V3 V6]; [V3 V6] # ⒕∝ٟ.- +B; ꡣ.\u07CF; ; xn--8c9a.xn--qsb # ꡣ.ߏ +B; xn--8c9a.xn--qsb; ꡣ.\u07CF; xn--8c9a.xn--qsb # ꡣ.ߏ +B; ≯\u0603。-; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ≯.- +B; >\u0338\u0603。-; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ≯.- +B; ≯\u0603。-; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ≯.- +B; >\u0338\u0603。-; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ≯.- +B; xn--lfb566l.-; [B1 V3 V6]; [B1 V3 V6] # ≯.- +T; ⾛𐹧⾕.\u115F󠗰ςႭ; [B5 P1 V6]; [B5 P1 V6] # 走𐹧谷.ςႭ +N; ⾛𐹧⾕.\u115F󠗰ςႭ; [B5 P1 V6]; [B5 P1 V6] # 走𐹧谷.ςႭ +T; 走𐹧谷.\u115F󠗰ςႭ; [B5 P1 V6]; [B5 P1 V6] # 走𐹧谷.ςႭ +N; 走𐹧谷.\u115F󠗰ςႭ; [B5 P1 V6]; [B5 P1 V6] # 走𐹧谷.ςႭ +T; 走𐹧谷.\u115F󠗰ςⴍ; [B5 P1 V6]; [B5 P1 V6] # 走𐹧谷.ςⴍ +N; 走𐹧谷.\u115F󠗰ςⴍ; [B5 P1 V6]; [B5 P1 V6] # 走𐹧谷.ςⴍ +B; 走𐹧谷.\u115F󠗰ΣႭ; [B5 P1 V6]; [B5 P1 V6] # 走𐹧谷.σႭ +B; 走𐹧谷.\u115F󠗰σⴍ; [B5 P1 V6]; [B5 P1 V6] # 走𐹧谷.σⴍ +B; 走𐹧谷.\u115F󠗰Σⴍ; [B5 P1 V6]; [B5 P1 V6] # 走𐹧谷.σⴍ +B; xn--6g3a1x434z.xn--4xa180eotvh7453a; [B5 V6]; [B5 V6] # 走𐹧谷.σⴍ +B; xn--6g3a1x434z.xn--4xa627dhpae6345i; [B5 V6]; [B5 V6] # 走𐹧谷.σႭ +B; xn--6g3a1x434z.xn--3xa380eotvh7453a; [B5 V6]; [B5 V6] # 走𐹧谷.ςⴍ +B; xn--6g3a1x434z.xn--3xa827dhpae6345i; [B5 V6]; [B5 V6] # 走𐹧谷.ςႭ +T; ⾛𐹧⾕.\u115F󠗰ςⴍ; [B5 P1 V6]; [B5 P1 V6] # 走𐹧谷.ςⴍ +N; ⾛𐹧⾕.\u115F󠗰ςⴍ; [B5 P1 V6]; [B5 P1 V6] # 走𐹧谷.ςⴍ +B; ⾛𐹧⾕.\u115F󠗰ΣႭ; [B5 P1 V6]; [B5 P1 V6] # 走𐹧谷.σႭ +B; ⾛𐹧⾕.\u115F󠗰σⴍ; [B5 P1 V6]; [B5 P1 V6] # 走𐹧谷.σⴍ +B; ⾛𐹧⾕.\u115F󠗰Σⴍ; [B5 P1 V6]; [B5 P1 V6] # 走𐹧谷.σⴍ +T; \u200D≠ᢙ≯.솣-ᡴႠ; [C2 P1 V6]; [P1 V6] # ≠ᢙ≯.솣-ᡴႠ +N; \u200D≠ᢙ≯.솣-ᡴႠ; [C2 P1 V6]; [C2 P1 V6] # ≠ᢙ≯.솣-ᡴႠ +T; \u200D=\u0338ᢙ>\u0338.솣-ᡴႠ; [C2 P1 V6]; [P1 V6] # ≠ᢙ≯.솣-ᡴႠ +N; \u200D=\u0338ᢙ>\u0338.솣-ᡴႠ; [C2 P1 V6]; [C2 P1 V6] # ≠ᢙ≯.솣-ᡴႠ +T; \u200D=\u0338ᢙ>\u0338.솣-ᡴⴀ; [C2 P1 V6]; [P1 V6] # ≠ᢙ≯.솣-ᡴⴀ +N; \u200D=\u0338ᢙ>\u0338.솣-ᡴⴀ; [C2 P1 V6]; [C2 P1 V6] # ≠ᢙ≯.솣-ᡴⴀ +T; \u200D≠ᢙ≯.솣-ᡴⴀ; [C2 P1 V6]; [P1 V6] # ≠ᢙ≯.솣-ᡴⴀ +N; \u200D≠ᢙ≯.솣-ᡴⴀ; [C2 P1 V6]; [C2 P1 V6] # ≠ᢙ≯.솣-ᡴⴀ +B; xn--jbf911clb.xn----p9j493ivi4l; [V6]; [V6] +B; xn--jbf929a90b0b.xn----p9j493ivi4l; [C2 V6]; [C2 V6] # ≠ᢙ≯.솣-ᡴⴀ +B; xn--jbf911clb.xn----6zg521d196p; [V6]; [V6] +B; xn--jbf929a90b0b.xn----6zg521d196p; [C2 V6]; [C2 V6] # ≠ᢙ≯.솣-ᡴႠ +B; 񯞜.𐿇\u0FA2\u077D\u0600; [P1 V6]; [P1 V6] # .ྡྷݽ +B; 񯞜.𐿇\u0FA1\u0FB7\u077D\u0600; [P1 V6]; [P1 V6] # .ྡྷݽ +B; 񯞜.𐿇\u0FA1\u0FB7\u077D\u0600; [P1 V6]; [P1 V6] # .ྡྷݽ +B; xn--gw68a.xn--ifb57ev2psc6027m; [V6]; [V6] # .ྡྷݽ +B; 𣳔\u0303.𑓂; [V5]; [V5] # 𣳔̃.𑓂 +B; xn--nsa95820a.xn--wz1d; [V5]; [V5] # 𣳔̃.𑓂 +B; 𞤀𞥅񘐱。󠄌Ⴣꡥ; [B2 B3 P1 V6]; [B2 B3 P1 V6] +B; 𞤢𞥅񘐱。󠄌ⴣꡥ; [B2 B3 P1 V6]; [B2 B3 P1 V6] +B; xn--9d6hgcy3556a.xn--rlju750b; [B2 B3 V6]; [B2 B3 V6] +B; xn--9d6hgcy3556a.xn--7nd0578e; [B2 B3 V6]; [B2 B3 V6] +B; 𞤀𞥅񘐱。󠄌ⴣꡥ; [B2 B3 P1 V6]; [B2 B3 P1 V6] +T; \u08E2𑁿ς𖬱。󠅡렧; [B1 P1 V6]; [B1 P1 V6] # 𑁿ς𖬱.렧 +N; \u08E2𑁿ς𖬱。󠅡렧; [B1 P1 V6]; [B1 P1 V6] # 𑁿ς𖬱.렧 +T; \u08E2𑁿ς𖬱。󠅡렧; [B1 P1 V6]; [B1 P1 V6] # 𑁿ς𖬱.렧 +N; \u08E2𑁿ς𖬱。󠅡렧; [B1 P1 V6]; [B1 P1 V6] # 𑁿ς𖬱.렧 +B; \u08E2𑁿Σ𖬱。󠅡렧; [B1 P1 V6]; [B1 P1 V6] # 𑁿σ𖬱.렧 +B; \u08E2𑁿Σ𖬱。󠅡렧; [B1 P1 V6]; [B1 P1 V6] # 𑁿σ𖬱.렧 +B; \u08E2𑁿σ𖬱。󠅡렧; [B1 P1 V6]; [B1 P1 V6] # 𑁿σ𖬱.렧 +B; \u08E2𑁿σ𖬱。󠅡렧; [B1 P1 V6]; [B1 P1 V6] # 𑁿σ𖬱.렧 +B; xn--4xa53xp48ys2xc.xn--kn2b; [B1 V6]; [B1 V6] # 𑁿σ𖬱.렧 +B; xn--3xa73xp48ys2xc.xn--kn2b; [B1 V6]; [B1 V6] # 𑁿ς𖬱.렧 +T; -\u200D。𞤍\u200C\u200D⒈; [B1 C1 C2 P1 V3 V6]; [B1 P1 V3 V6] # -.𞤯⒈ +N; -\u200D。𞤍\u200C\u200D⒈; [B1 C1 C2 P1 V3 V6]; [B1 C1 C2 P1 V3 V6] # -.𞤯⒈ +T; -\u200D。𞤍\u200C\u200D1.; [B1 C1 C2 V3]; [B1 V3] # -.𞤯1. +N; -\u200D。𞤍\u200C\u200D1.; [B1 C1 C2 V3]; [B1 C1 C2 V3] # -.𞤯1. +T; -\u200D。𞤯\u200C\u200D1.; [B1 C1 C2 V3]; [B1 V3] # -.𞤯1. +N; -\u200D。𞤯\u200C\u200D1.; [B1 C1 C2 V3]; [B1 C1 C2 V3] # -.𞤯1. +B; -.xn--1-0i8r.; [B1 V3]; [B1 V3] +B; xn----ugn.xn--1-rgnd61297b.; [B1 C1 C2 V3]; [B1 C1 C2 V3] # -.𞤯1. +T; -\u200D。𞤯\u200C\u200D⒈; [B1 C1 C2 P1 V3 V6]; [B1 P1 V3 V6] # -.𞤯⒈ +N; -\u200D。𞤯\u200C\u200D⒈; [B1 C1 C2 P1 V3 V6]; [B1 C1 C2 P1 V3 V6] # -.𞤯⒈ +B; -.xn--tsh3666n; [B1 V3 V6]; [B1 V3 V6] +B; xn----ugn.xn--0ugc555aiv51d; [B1 C1 C2 V3 V6]; [B1 C1 C2 V3 V6] # -.𞤯⒈ +T; \u200C򅎭.Ⴒ𑇀; [C1 P1 V6]; [P1 V6] # .Ⴒ𑇀 +N; \u200C򅎭.Ⴒ𑇀; [C1 P1 V6]; [C1 P1 V6] # .Ⴒ𑇀 +T; \u200C򅎭.ⴒ𑇀; [C1 P1 V6]; [P1 V6] # .ⴒ𑇀 +N; \u200C򅎭.ⴒ𑇀; [C1 P1 V6]; [C1 P1 V6] # .ⴒ𑇀 +B; xn--bn95b.xn--9kj2034e; [V6]; [V6] +B; xn--0ug15083f.xn--9kj2034e; [C1 V6]; [C1 V6] # .ⴒ𑇀 +B; xn--bn95b.xn--qnd6272k; [V6]; [V6] +B; xn--0ug15083f.xn--qnd6272k; [C1 V6]; [C1 V6] # .Ⴒ𑇀 +T; 繱𑖿\u200D.8︒; [P1 V6]; [P1 V6] # 繱𑖿.8︒ +N; 繱𑖿\u200D.8︒; [P1 V6]; [P1 V6] # 繱𑖿.8︒ +T; 繱𑖿\u200D.8。; 繱𑖿\u200D.8.; xn--gl0as212a.8. # 繱𑖿.8. +N; 繱𑖿\u200D.8。; 繱𑖿\u200D.8.; xn--1ug6928ac48e.8. # 繱𑖿.8. +B; xn--gl0as212a.8.; 繱𑖿.8.; xn--gl0as212a.8. +B; 繱𑖿.8.; ; xn--gl0as212a.8. +B; xn--1ug6928ac48e.8.; 繱𑖿\u200D.8.; xn--1ug6928ac48e.8. # 繱𑖿.8. +T; 繱𑖿\u200D.8.; ; xn--gl0as212a.8. # 繱𑖿.8. +N; 繱𑖿\u200D.8.; ; xn--1ug6928ac48e.8. # 繱𑖿.8. +B; xn--gl0as212a.xn--8-o89h; [V6]; [V6] +B; xn--1ug6928ac48e.xn--8-o89h; [V6]; [V6] # 繱𑖿.8︒ +B; 󠆾.𞀈; [V5 A4_2]; [V5 A4_2] +B; 󠆾.𞀈; [V5 A4_2]; [V5 A4_2] +B; .xn--ph4h; [V5 A4_2]; [V5 A4_2] +T; ß\u06EB。\u200D; [C2]; xn--ss-59d. # ß۫. +N; ß\u06EB。\u200D; [C2]; [C2] # ß۫. +T; SS\u06EB。\u200D; [C2]; xn--ss-59d. # ss۫. +N; SS\u06EB。\u200D; [C2]; [C2] # ss۫. +T; ss\u06EB。\u200D; [C2]; xn--ss-59d. # ss۫. +N; ss\u06EB。\u200D; [C2]; [C2] # ss۫. +T; Ss\u06EB。\u200D; [C2]; xn--ss-59d. # ss۫. +N; Ss\u06EB。\u200D; [C2]; [C2] # ss۫. +B; xn--ss-59d.; ss\u06EB.; xn--ss-59d. # ss۫. +B; ss\u06EB.; ; xn--ss-59d. # ss۫. +B; SS\u06EB.; ss\u06EB.; xn--ss-59d. # ss۫. +B; Ss\u06EB.; ss\u06EB.; xn--ss-59d. # ss۫. +B; xn--ss-59d.xn--1ug; [C2]; [C2] # ss۫. +B; xn--zca012a.xn--1ug; [C2]; [C2] # ß۫. +T; 󠐵\u200C⒈.󠎇; [C1 P1 V6]; [P1 V6] # ⒈. +N; 󠐵\u200C⒈.󠎇; [C1 P1 V6]; [C1 P1 V6] # ⒈. +T; 󠐵\u200C1..󠎇; [C1 P1 V6 A4_2]; [P1 V6 A4_2] # 1.. +N; 󠐵\u200C1..󠎇; [C1 P1 V6 A4_2]; [C1 P1 V6 A4_2] # 1.. +B; xn--1-bs31m..xn--tv36e; [V6 A4_2]; [V6 A4_2] +B; xn--1-rgn37671n..xn--tv36e; [C1 V6 A4_2]; [C1 V6 A4_2] # 1.. +B; xn--tshz2001k.xn--tv36e; [V6]; [V6] +B; xn--0ug88o47900b.xn--tv36e; [C1 V6]; [C1 V6] # ⒈. +T; 󟈣\u065F\uAAB2ß。󌓧; [P1 V6]; [P1 V6] # ٟꪲß. +N; 󟈣\u065F\uAAB2ß。󌓧; [P1 V6]; [P1 V6] # ٟꪲß. +B; 󟈣\u065F\uAAB2SS。󌓧; [P1 V6]; [P1 V6] # ٟꪲss. +B; 󟈣\u065F\uAAB2ss。󌓧; [P1 V6]; [P1 V6] # ٟꪲss. +B; 󟈣\u065F\uAAB2Ss。󌓧; [P1 V6]; [P1 V6] # ٟꪲss. +B; xn--ss-3xd2839nncy1m.xn--bb79d; [V6]; [V6] # ٟꪲss. +B; xn--zca92z0t7n5w96j.xn--bb79d; [V6]; [V6] # ٟꪲß. +T; \u0774\u200C𞤿。𽘐䉜\u200D񿤼; [C1 C2 P1 V6]; [P1 V6] # ݴ𞤿.䉜 +N; \u0774\u200C𞤿。𽘐䉜\u200D񿤼; [C1 C2 P1 V6]; [C1 C2 P1 V6] # ݴ𞤿.䉜 +T; \u0774\u200C𞤝。𽘐䉜\u200D񿤼; [C1 C2 P1 V6]; [P1 V6] # ݴ𞤿.䉜 +N; \u0774\u200C𞤝。𽘐䉜\u200D񿤼; [C1 C2 P1 V6]; [C1 C2 P1 V6] # ݴ𞤿.䉜 +B; xn--4pb2977v.xn--z0nt555ukbnv; [V6]; [V6] # ݴ𞤿.䉜 +B; xn--4pb607jjt73a.xn--1ug236ke314donv1a; [C1 C2 V6]; [C1 C2 V6] # ݴ𞤿.䉜 +T; 򔭜ςᡱ⒈.≮𑄳\u200D𐮍; [B1 P1 V6]; [B1 P1 V6] # ςᡱ⒈.≮𑄳𐮍 +N; 򔭜ςᡱ⒈.≮𑄳\u200D𐮍; [B1 P1 V6]; [B1 P1 V6] # ςᡱ⒈.≮𑄳𐮍 +T; 򔭜ςᡱ⒈.<\u0338𑄳\u200D𐮍; [B1 P1 V6]; [B1 P1 V6] # ςᡱ⒈.≮𑄳𐮍 +N; 򔭜ςᡱ⒈.<\u0338𑄳\u200D𐮍; [B1 P1 V6]; [B1 P1 V6] # ςᡱ⒈.≮𑄳𐮍 +T; 򔭜ςᡱ1..≮𑄳\u200D𐮍; [B1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # ςᡱ1..≮𑄳𐮍 +N; 򔭜ςᡱ1..≮𑄳\u200D𐮍; [B1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # ςᡱ1..≮𑄳𐮍 +T; 򔭜ςᡱ1..<\u0338𑄳\u200D𐮍; [B1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # ςᡱ1..≮𑄳𐮍 +N; 򔭜ςᡱ1..<\u0338𑄳\u200D𐮍; [B1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # ςᡱ1..≮𑄳𐮍 +T; 򔭜Σᡱ1..<\u0338𑄳\u200D𐮍; [B1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # σᡱ1..≮𑄳𐮍 +N; 򔭜Σᡱ1..<\u0338𑄳\u200D𐮍; [B1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # σᡱ1..≮𑄳𐮍 +T; 򔭜Σᡱ1..≮𑄳\u200D𐮍; [B1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # σᡱ1..≮𑄳𐮍 +N; 򔭜Σᡱ1..≮𑄳\u200D𐮍; [B1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # σᡱ1..≮𑄳𐮍 +T; 򔭜σᡱ1..≮𑄳\u200D𐮍; [B1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # σᡱ1..≮𑄳𐮍 +N; 򔭜σᡱ1..≮𑄳\u200D𐮍; [B1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # σᡱ1..≮𑄳𐮍 +T; 򔭜σᡱ1..<\u0338𑄳\u200D𐮍; [B1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # σᡱ1..≮𑄳𐮍 +N; 򔭜σᡱ1..<\u0338𑄳\u200D𐮍; [B1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # σᡱ1..≮𑄳𐮍 +B; xn--1-zmb699meq63t..xn--gdh5392g6sd; [B1 V6 A4_2]; [B1 V6 A4_2] +B; xn--1-zmb699meq63t..xn--1ug85gn777ahze; [B1 V6 A4_2]; [B1 V6 A4_2] # σᡱ1..≮𑄳𐮍 +B; xn--1-xmb999meq63t..xn--1ug85gn777ahze; [B1 V6 A4_2]; [B1 V6 A4_2] # ςᡱ1..≮𑄳𐮍 +T; 򔭜Σᡱ⒈.<\u0338𑄳\u200D𐮍; [B1 P1 V6]; [B1 P1 V6] # σᡱ⒈.≮𑄳𐮍 +N; 򔭜Σᡱ⒈.<\u0338𑄳\u200D𐮍; [B1 P1 V6]; [B1 P1 V6] # σᡱ⒈.≮𑄳𐮍 +T; 򔭜Σᡱ⒈.≮𑄳\u200D𐮍; [B1 P1 V6]; [B1 P1 V6] # σᡱ⒈.≮𑄳𐮍 +N; 򔭜Σᡱ⒈.≮𑄳\u200D𐮍; [B1 P1 V6]; [B1 P1 V6] # σᡱ⒈.≮𑄳𐮍 +T; 򔭜σᡱ⒈.≮𑄳\u200D𐮍; [B1 P1 V6]; [B1 P1 V6] # σᡱ⒈.≮𑄳𐮍 +N; 򔭜σᡱ⒈.≮𑄳\u200D𐮍; [B1 P1 V6]; [B1 P1 V6] # σᡱ⒈.≮𑄳𐮍 +T; 򔭜σᡱ⒈.<\u0338𑄳\u200D𐮍; [B1 P1 V6]; [B1 P1 V6] # σᡱ⒈.≮𑄳𐮍 +N; 򔭜σᡱ⒈.<\u0338𑄳\u200D𐮍; [B1 P1 V6]; [B1 P1 V6] # σᡱ⒈.≮𑄳𐮍 +B; xn--4xa207hkzinr77u.xn--gdh5392g6sd; [B1 V6]; [B1 V6] +B; xn--4xa207hkzinr77u.xn--1ug85gn777ahze; [B1 V6]; [B1 V6] # σᡱ⒈.≮𑄳𐮍 +B; xn--3xa407hkzinr77u.xn--1ug85gn777ahze; [B1 V6]; [B1 V6] # ςᡱ⒈.≮𑄳𐮍 +B; \u3164\u094DႠ\u17D0.\u180B; [P1 V6]; [P1 V6] # ्Ⴀ័. +B; \u1160\u094DႠ\u17D0.\u180B; [P1 V6]; [P1 V6] # ्Ⴀ័. +B; \u1160\u094Dⴀ\u17D0.\u180B; [P1 V6]; [P1 V6] # ्ⴀ័. +B; xn--n3b742bkqf4ty.; [V6]; [V6] # ्ⴀ័. +B; xn--n3b468aoqa89r.; [V6]; [V6] # ्Ⴀ័. +B; \u3164\u094Dⴀ\u17D0.\u180B; [P1 V6]; [P1 V6] # ्ⴀ័. +B; xn--n3b445e53po6d.; [V6]; [V6] # ्ⴀ័. +B; xn--n3b468azngju2a.; [V6]; [V6] # ्Ⴀ័. +T; ❣\u200D.\u09CD𑰽\u0612\uA929; [C2 V5]; [V5] # ❣.্𑰽ؒꤩ +N; ❣\u200D.\u09CD𑰽\u0612\uA929; [C2 V5]; [C2 V5] # ❣.্𑰽ؒꤩ +T; ❣\u200D.\u09CD𑰽\u0612\uA929; [C2 V5]; [V5] # ❣.্𑰽ؒꤩ +N; ❣\u200D.\u09CD𑰽\u0612\uA929; [C2 V5]; [C2 V5] # ❣.্𑰽ؒꤩ +B; xn--pei.xn--0fb32q3w7q2g4d; [V5]; [V5] # ❣.্𑰽ؒꤩ +B; xn--1ugy10a.xn--0fb32q3w7q2g4d; [C2 V5]; [C2 V5] # ❣.্𑰽ؒꤩ +B; ≮𐳺𐹄.≯񪮸ꡅ; [B1 P1 V6]; [B1 P1 V6] +B; <\u0338𐳺𐹄.>\u0338񪮸ꡅ; [B1 P1 V6]; [B1 P1 V6] +B; xn--gdh7943gk2a.xn--hdh1383c5e36c; [B1 V6]; [B1 V6] +B; \u0CCC𐧅𐳏󠲺。\u0CCDᠦ; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ೌ𐧅𐳏.್ᠦ +B; \u0CCC𐧅𐳏󠲺。\u0CCDᠦ; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ೌ𐧅𐳏.್ᠦ +B; \u0CCC𐧅𐲏󠲺。\u0CCDᠦ; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ೌ𐧅𐳏.್ᠦ +B; xn--7tc6360ky5bn2732c.xn--8tc429c; [B1 V5 V6]; [B1 V5 V6] # ೌ𐧅𐳏.್ᠦ +B; \u0CCC𐧅𐲏󠲺。\u0CCDᠦ; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ೌ𐧅𐳏.್ᠦ +B; \u0349。𧡫; [V5]; [V5] # ͉.𧡫 +B; xn--nua.xn--bc6k; [V5]; [V5] # ͉.𧡫 +B; 𑰿󠅦.\u1160; [P1 V5 V6]; [P1 V5 V6] # 𑰿. +B; 𑰿󠅦.\u1160; [P1 V5 V6]; [P1 V5 V6] # 𑰿. +B; xn--ok3d.xn--psd; [V5 V6]; [V5 V6] # 𑰿. +T; -𞤆\u200D。󸼄𞳒; [B1 B5 B6 C2 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # -𞤨. +N; -𞤆\u200D。󸼄𞳒; [B1 B5 B6 C2 P1 V3 V6]; [B1 B5 B6 C2 P1 V3 V6] # -𞤨. +T; -𞤨\u200D。󸼄𞳒; [B1 B5 B6 C2 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # -𞤨. +N; -𞤨\u200D。󸼄𞳒; [B1 B5 B6 C2 P1 V3 V6]; [B1 B5 B6 C2 P1 V3 V6] # -𞤨. +B; xn----ni8r.xn--846h96596c; [B1 B5 B6 V3 V6]; [B1 B5 B6 V3 V6] +B; xn----ugnx367r.xn--846h96596c; [B1 B5 B6 C2 V3 V6]; [B1 B5 B6 C2 V3 V6] # -𞤨. +B; ꡏ󠇶≯𳾽。\u1DFD⾇滸𐹰; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ꡏ≯.᷽舛滸𐹰 +B; ꡏ󠇶>\u0338𳾽。\u1DFD⾇滸𐹰; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ꡏ≯.᷽舛滸𐹰 +B; ꡏ󠇶≯𳾽。\u1DFD舛滸𐹰; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ꡏ≯.᷽舛滸𐹰 +B; ꡏ󠇶>\u0338𳾽。\u1DFD舛滸𐹰; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ꡏ≯.᷽舛滸𐹰 +B; xn--hdh7483cu6twwki8e.xn--yfg0765a58l0n6k; [B1 V5 V6]; [B1 V5 V6] # ꡏ≯.᷽舛滸𐹰 +B; 蔏。𑰺; [V5]; [V5] +B; 蔏。𑰺; [V5]; [V5] +B; xn--uy1a.xn--jk3d; [V5]; [V5] +B; 𝟿𐮋。󠄊; [B1]; [B1] +B; 9𐮋。󠄊; [B1]; [B1] +B; xn--9-rv5i.; [B1]; [B1] +B; 󟇇-䟖F。\u07CB⒈\u0662; [B4 P1 V6]; [B4 P1 V6] # -䟖f.ߋ⒈٢ +B; 󟇇-䟖F。\u07CB1.\u0662; [B1 P1 V6]; [B1 P1 V6] # -䟖f.ߋ1.٢ +B; 󟇇-䟖f。\u07CB1.\u0662; [B1 P1 V6]; [B1 P1 V6] # -䟖f.ߋ1.٢ +B; xn---f-mz8b08788k.xn--1-ybd.xn--bib; [B1 V6]; [B1 V6] # -䟖f.ߋ1.٢ +B; 󟇇-䟖f。\u07CB⒈\u0662; [B4 P1 V6]; [B4 P1 V6] # -䟖f.ߋ⒈٢ +B; xn---f-mz8b08788k.xn--bib53ev44d; [B4 V6]; [B4 V6] # -䟖f.ߋ⒈٢ +T; \u200C。𐹺; [B1 C1]; [B1 A4_2] # .𐹺 +N; \u200C。𐹺; [B1 C1]; [B1 C1] # .𐹺 +T; \u200C。𐹺; [B1 C1]; [B1 A4_2] # .𐹺 +N; \u200C。𐹺; [B1 C1]; [B1 C1] # .𐹺 +B; .xn--yo0d; [B1 A4_2]; [B1 A4_2] +B; xn--0ug.xn--yo0d; [B1 C1]; [B1 C1] # .𐹺 +T; 𐡆.≯\u200C-𞥀; [B1 C1 P1 V6]; [B1 P1 V6] # 𐡆.≯-𞥀 +N; 𐡆.≯\u200C-𞥀; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐡆.≯-𞥀 +T; 𐡆.>\u0338\u200C-𞥀; [B1 C1 P1 V6]; [B1 P1 V6] # 𐡆.≯-𞥀 +N; 𐡆.>\u0338\u200C-𞥀; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐡆.≯-𞥀 +T; 𐡆.>\u0338\u200C-𞤞; [B1 C1 P1 V6]; [B1 P1 V6] # 𐡆.≯-𞥀 +N; 𐡆.>\u0338\u200C-𞤞; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐡆.≯-𞥀 +T; 𐡆.≯\u200C-𞤞; [B1 C1 P1 V6]; [B1 P1 V6] # 𐡆.≯-𞥀 +N; 𐡆.≯\u200C-𞤞; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐡆.≯-𞥀 +B; xn--le9c.xn----ogo9956r; [B1 V6]; [B1 V6] +B; xn--le9c.xn----rgn40iy359e; [B1 C1 V6]; [B1 C1 V6] # 𐡆.≯-𞥀 +B; 󠁀-。≠\uFCD7; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -.≠هج +B; 󠁀-。=\u0338\uFCD7; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -.≠هج +B; 󠁀-。≠\u0647\u062C; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -.≠هج +B; 󠁀-。=\u0338\u0647\u062C; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -.≠هج +B; xn----f411m.xn--rgb7c611j; [B1 V3 V6]; [B1 V3 V6] # -.≠هج +T; 񻬹𑈵。\u200D𞨶; [B1 C2 P1 V6]; [P1 V6] # 𑈵. +N; 񻬹𑈵。\u200D𞨶; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 𑈵. +B; xn--8g1d12120a.xn--5l6h; [V6]; [V6] +B; xn--8g1d12120a.xn--1ug6651p; [B1 C2 V6]; [B1 C2 V6] # 𑈵. +B; 𑋧\uA9C02。㧉򒖄; [P1 V5 V6]; [P1 V5 V6] # 𑋧꧀2.㧉 +B; 𑋧\uA9C02。㧉򒖄; [P1 V5 V6]; [P1 V5 V6] # 𑋧꧀2.㧉 +B; xn--2-5z4eu89y.xn--97l02706d; [V5 V6]; [V5 V6] # 𑋧꧀2.㧉 +T; \u200C𽬄𐹴𞩥。≯6; [B1 C1 P1 V6]; [B1 B5 B6 P1 V6] # 𐹴.≯6 +N; \u200C𽬄𐹴𞩥。≯6; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹴.≯6 +T; \u200C𽬄𐹴𞩥。>\u03386; [B1 C1 P1 V6]; [B1 B5 B6 P1 V6] # 𐹴.≯6 +N; \u200C𽬄𐹴𞩥。>\u03386; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹴.≯6 +B; xn--so0du768aim9m.xn--6-ogo; [B1 B5 B6 V6]; [B1 B5 B6 V6] +B; xn--0ug7105gf5wfxepq.xn--6-ogo; [B1 C1 V6]; [B1 C1 V6] # 𐹴.≯6 +T; 𑁿.𐹦𻞵-\u200D; [B1 B3 B6 C2 P1 V5 V6]; [B1 B3 B6 P1 V3 V5 V6] # 𑁿.𐹦- +N; 𑁿.𐹦𻞵-\u200D; [B1 B3 B6 C2 P1 V5 V6]; [B1 B3 B6 C2 P1 V5 V6] # 𑁿.𐹦- +T; 𑁿.𐹦𻞵-\u200D; [B1 B3 B6 C2 P1 V5 V6]; [B1 B3 B6 P1 V3 V5 V6] # 𑁿.𐹦- +N; 𑁿.𐹦𻞵-\u200D; [B1 B3 B6 C2 P1 V5 V6]; [B1 B3 B6 C2 P1 V5 V6] # 𑁿.𐹦- +B; xn--q30d.xn----i26i1299n; [B1 B3 B6 V3 V5 V6]; [B1 B3 B6 V3 V5 V6] +B; xn--q30d.xn----ugn1088hfsxv; [B1 B3 B6 C2 V5 V6]; [B1 B3 B6 C2 V5 V6] # 𑁿.𐹦- +T; ⤸ς𺱀。\uFFA0; [P1 V6]; [P1 V6] # ⤸ς. +N; ⤸ς𺱀。\uFFA0; [P1 V6]; [P1 V6] # ⤸ς. +T; ⤸ς𺱀。\u1160; [P1 V6]; [P1 V6] # ⤸ς. +N; ⤸ς𺱀。\u1160; [P1 V6]; [P1 V6] # ⤸ς. +B; ⤸Σ𺱀。\u1160; [P1 V6]; [P1 V6] # ⤸σ. +B; ⤸σ𺱀。\u1160; [P1 V6]; [P1 V6] # ⤸σ. +B; xn--4xa192qmp03d.xn--psd; [V6]; [V6] # ⤸σ. +B; xn--3xa392qmp03d.xn--psd; [V6]; [V6] # ⤸ς. +B; ⤸Σ𺱀。\uFFA0; [P1 V6]; [P1 V6] # ⤸σ. +B; ⤸σ𺱀。\uFFA0; [P1 V6]; [P1 V6] # ⤸σ. +B; xn--4xa192qmp03d.xn--cl7c; [V6]; [V6] # ⤸σ. +B; xn--3xa392qmp03d.xn--cl7c; [V6]; [V6] # ⤸ς. +B; \u0765\u1035𐫔\u06D5.𐦬𑋪Ⴃ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ݥဵ𐫔ە.𐦬𑋪Ⴃ +B; \u0765\u1035𐫔\u06D5.𐦬𑋪ⴃ; [B2 B3]; [B2 B3] # ݥဵ𐫔ە.𐦬𑋪ⴃ +B; xn--llb10as9tqp5y.xn--ukj7371e21f; [B2 B3]; [B2 B3] # ݥဵ𐫔ە.𐦬𑋪ⴃ +B; xn--llb10as9tqp5y.xn--bnd9168j21f; [B2 B3 V6]; [B2 B3 V6] # ݥဵ𐫔ە.𐦬𑋪Ⴃ +B; \u0661\u1B44-킼.\u1BAA\u0616\u066C≯; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ١᭄-킼.᮪ؖ٬≯ +B; \u0661\u1B44-킼.\u1BAA\u0616\u066C>\u0338; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ١᭄-킼.᮪ؖ٬≯ +B; xn----9pc551nk39n.xn--4fb6o571degg; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] # ١᭄-킼.᮪ؖ٬≯ +B; -。\u06C2\u0604򅖡𑓂; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # -.ۂ𑓂 +B; -。\u06C1\u0654\u0604򅖡𑓂; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # -.ۂ𑓂 +B; -.xn--mfb39a7208dzgs3d; [B1 B2 B3 V3 V6]; [B1 B2 B3 V3 V6] # -.ۂ𑓂 +T; \u200D󯑖󠁐.\u05BD𙮰ꡝ𐋡; [C2 P1 V5 V6]; [P1 V5 V6] # .ֽꡝ𐋡 +N; \u200D󯑖󠁐.\u05BD𙮰ꡝ𐋡; [C2 P1 V5 V6]; [C2 P1 V5 V6] # .ֽꡝ𐋡 +T; \u200D󯑖󠁐.\u05BD𙮰ꡝ𐋡; [C2 P1 V5 V6]; [P1 V5 V6] # .ֽꡝ𐋡 +N; \u200D󯑖󠁐.\u05BD𙮰ꡝ𐋡; [C2 P1 V5 V6]; [C2 P1 V5 V6] # .ֽꡝ𐋡 +B; xn--b726ey18m.xn--ldb8734fg0qcyzzg; [V5 V6]; [V5 V6] # .ֽꡝ𐋡 +B; xn--1ug66101lt8me.xn--ldb8734fg0qcyzzg; [C2 V5 V6]; [C2 V5 V6] # .ֽꡝ𐋡 +T; ︒􃈵ς񀠇。𐮈; [B1 P1 V6]; [B1 P1 V6] +N; ︒􃈵ς񀠇。𐮈; [B1 P1 V6]; [B1 P1 V6] +T; 。􃈵ς񀠇。𐮈; [P1 V6 A4_2]; [P1 V6 A4_2] +N; 。􃈵ς񀠇。𐮈; [P1 V6 A4_2]; [P1 V6 A4_2] +B; 。􃈵Σ񀠇。𐮈; [P1 V6 A4_2]; [P1 V6 A4_2] +B; 。􃈵σ񀠇。𐮈; [P1 V6 A4_2]; [P1 V6 A4_2] +B; .xn--4xa68573c7n64d.xn--f29c; [V6 A4_2]; [V6 A4_2] +B; .xn--3xa88573c7n64d.xn--f29c; [V6 A4_2]; [V6 A4_2] +B; ︒􃈵Σ񀠇。𐮈; [B1 P1 V6]; [B1 P1 V6] +B; ︒􃈵σ񀠇。𐮈; [B1 P1 V6]; [B1 P1 V6] +B; xn--4xa1729jwz5t7gl5f.xn--f29c; [B1 V6]; [B1 V6] +B; xn--3xa3729jwz5t7gl5f.xn--f29c; [B1 V6]; [B1 V6] +B; \u07D9.\u06EE󆾃≯󠅲; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ߙ.ۮ≯ +B; \u07D9.\u06EE󆾃>\u0338󠅲; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ߙ.ۮ≯ +B; \u07D9.\u06EE󆾃≯󠅲; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ߙ.ۮ≯ +B; \u07D9.\u06EE󆾃>\u0338󠅲; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ߙ.ۮ≯ +B; xn--0sb.xn--bmb691l0524t; [B2 B3 V6]; [B2 B3 V6] # ߙ.ۮ≯ +B; \u1A73󚙸.𐭍; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ᩳ.𐭍 +B; xn--2of22352n.xn--q09c; [B1 V5 V6]; [B1 V5 V6] # ᩳ.𐭍 +B; ⒉󠊓≠。Ⴟ⬣Ⴈ; [P1 V6]; [P1 V6] +B; ⒉󠊓=\u0338。Ⴟ⬣Ⴈ; [P1 V6]; [P1 V6] +B; 2.󠊓≠。Ⴟ⬣Ⴈ; [P1 V6]; [P1 V6] +B; 2.󠊓=\u0338。Ⴟ⬣Ⴈ; [P1 V6]; [P1 V6] +B; 2.󠊓=\u0338。ⴟ⬣ⴈ; [P1 V6]; [P1 V6] +B; 2.󠊓≠。ⴟ⬣ⴈ; [P1 V6]; [P1 V6] +B; 2.xn--1chz4101l.xn--45iz7d6b; [V6]; [V6] +B; 2.xn--1chz4101l.xn--gnd9b297j; [V6]; [V6] +B; ⒉󠊓=\u0338。ⴟ⬣ⴈ; [P1 V6]; [P1 V6] +B; ⒉󠊓≠。ⴟ⬣ⴈ; [P1 V6]; [P1 V6] +B; xn--1ch07f91401d.xn--45iz7d6b; [V6]; [V6] +B; xn--1ch07f91401d.xn--gnd9b297j; [V6]; [V6] +B; -󠉱\u0FB8Ⴥ。-𐹽\u0774𞣑; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -ྸჅ.-𐹽ݴ𞣑 +B; -󠉱\u0FB8ⴥ。-𐹽\u0774𞣑; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -ྸⴥ.-𐹽ݴ𞣑 +B; xn----xmg317tgv352a.xn----05c4213ryr0g; [B1 V3 V6]; [B1 V3 V6] # -ྸⴥ.-𐹽ݴ𞣑 +B; xn----xmg12fm2555h.xn----05c4213ryr0g; [B1 V3 V6]; [B1 V3 V6] # -ྸჅ.-𐹽ݴ𞣑 +B; \u0659。𑄴︒\u0627\u07DD; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # ٙ.𑄴︒اߝ +B; \u0659。𑄴。\u0627\u07DD; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ٙ.𑄴.اߝ +B; xn--1hb.xn--w80d.xn--mgb09f; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ٙ.𑄴.اߝ +B; xn--1hb.xn--mgb09fp820c08pa; [B1 B3 B6 V5 V6]; [B1 B3 B6 V5 V6] # ٙ.𑄴︒اߝ +T; Ⴙ\u0638.󠆓\u200D; [B1 B5 B6 C2 P1 V6]; [B5 B6 P1 V6] # Ⴙظ. +N; Ⴙ\u0638.󠆓\u200D; [B1 B5 B6 C2 P1 V6]; [B1 B5 B6 C2 P1 V6] # Ⴙظ. +T; ⴙ\u0638.󠆓\u200D; [B1 B5 B6 C2]; [B5 B6] # ⴙظ. +N; ⴙ\u0638.󠆓\u200D; [B1 B5 B6 C2]; [B1 B5 B6 C2] # ⴙظ. +B; xn--3gb910r.; [B5 B6]; [B5 B6] # ⴙظ. +B; xn--3gb910r.xn--1ug; [B1 B5 B6 C2]; [B1 B5 B6 C2] # ⴙظ. +B; xn--3gb194c.; [B5 B6 V6]; [B5 B6 V6] # Ⴙظ. +B; xn--3gb194c.xn--1ug; [B1 B5 B6 C2 V6]; [B1 B5 B6 C2 V6] # Ⴙظ. +B; 󠆸。₆0𐺧\u0756; [B1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # .60ݖ +B; 󠆸。60𐺧\u0756; [B1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # .60ݖ +B; .xn--60-cke9470y; [B1 V6 A4_2]; [B1 V6 A4_2] # .60ݖ +B; 6\u084F。-𑈴; [B1 V3]; [B1 V3] # 6ࡏ.-𑈴 +B; 6\u084F。-𑈴; [B1 V3]; [B1 V3] # 6ࡏ.-𑈴 +B; xn--6-jjd.xn----6n8i; [B1 V3]; [B1 V3] # 6ࡏ.-𑈴 +T; \u200D񋌿𐹰。\u0ACDς𞰎\u08D6; [B1 C2 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # 𐹰.્ςࣖ +N; \u200D񋌿𐹰。\u0ACDς𞰎\u08D6; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # 𐹰.્ςࣖ +T; \u200D񋌿𐹰。\u0ACDς𞰎\u08D6; [B1 C2 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # 𐹰.્ςࣖ +N; \u200D񋌿𐹰。\u0ACDς𞰎\u08D6; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # 𐹰.્ςࣖ +T; \u200D񋌿𐹰。\u0ACDΣ𞰎\u08D6; [B1 C2 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # 𐹰.્σࣖ +N; \u200D񋌿𐹰。\u0ACDΣ𞰎\u08D6; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # 𐹰.્σࣖ +T; \u200D񋌿𐹰。\u0ACDσ𞰎\u08D6; [B1 C2 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # 𐹰.્σࣖ +N; \u200D񋌿𐹰。\u0ACDσ𞰎\u08D6; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # 𐹰.્σࣖ +B; xn--oo0d1330n.xn--4xa21xcwbfz15g; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] # 𐹰.્σࣖ +B; xn--1ugx105gq26y.xn--4xa21xcwbfz15g; [B1 C2 V5 V6]; [B1 C2 V5 V6] # 𐹰.્σࣖ +B; xn--1ugx105gq26y.xn--3xa41xcwbfz15g; [B1 C2 V5 V6]; [B1 C2 V5 V6] # 𐹰.્ςࣖ +T; \u200D񋌿𐹰。\u0ACDΣ𞰎\u08D6; [B1 C2 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # 𐹰.્σࣖ +N; \u200D񋌿𐹰。\u0ACDΣ𞰎\u08D6; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # 𐹰.્σࣖ +T; \u200D񋌿𐹰。\u0ACDσ𞰎\u08D6; [B1 C2 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # 𐹰.્σࣖ +N; \u200D񋌿𐹰。\u0ACDσ𞰎\u08D6; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # 𐹰.્σࣖ +B; ⒈񟄜Ⴓ⒪.\u0DCA򘘶\u088B𐹢; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ⒈Ⴓ⒪.්𐹢 +B; 1.񟄜Ⴓ(o).\u0DCA򘘶\u088B𐹢; [B1 B6 P1 V5 V6]; [B1 B6 P1 V5 V6] # 1.Ⴓ(o).්𐹢 +B; 1.񟄜ⴓ(o).\u0DCA򘘶\u088B𐹢; [B1 B6 P1 V5 V6]; [B1 B6 P1 V5 V6] # 1.ⴓ(o).්𐹢 +B; 1.񟄜Ⴓ(O).\u0DCA򘘶\u088B𐹢; [B1 B6 P1 V5 V6]; [B1 B6 P1 V5 V6] # 1.Ⴓ(o).්𐹢 +B; 1.xn--(o)-7sn88849j.xn--3xb99xpx1yoes3e; [B1 B6 P1 V5 V6]; [B1 B6 P1 V5 V6] # 1.Ⴓ(o).්𐹢 +B; 1.xn--(o)-ej1bu5389e.xn--3xb99xpx1yoes3e; [B1 B6 P1 V5 V6]; [B1 B6 P1 V5 V6] # 1.ⴓ(o).්𐹢 +B; ⒈񟄜ⴓ⒪.\u0DCA򘘶\u088B𐹢; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ⒈ⴓ⒪.්𐹢 +B; xn--tsh0ds63atl31n.xn--3xb99xpx1yoes3e; [B1 V5 V6]; [B1 V5 V6] # ⒈ⴓ⒪.්𐹢 +B; xn--rnd762h7cx3027d.xn--3xb99xpx1yoes3e; [B1 V5 V6]; [B1 V5 V6] # ⒈Ⴓ⒪.්𐹢 +B; 𞤷.𐮐𞢁𐹠\u0624; ; xn--ve6h.xn--jgb1694kz0b2176a; NV8 # 𞤷.𐮐𞢁𐹠ؤ +B; 𞤷.𐮐𞢁𐹠\u0648\u0654; 𞤷.𐮐𞢁𐹠\u0624; xn--ve6h.xn--jgb1694kz0b2176a; NV8 # 𞤷.𐮐𞢁𐹠ؤ +B; 𞤕.𐮐𞢁𐹠\u0648\u0654; 𞤷.𐮐𞢁𐹠\u0624; xn--ve6h.xn--jgb1694kz0b2176a; NV8 # 𞤷.𐮐𞢁𐹠ؤ +B; 𞤕.𐮐𞢁𐹠\u0624; 𞤷.𐮐𞢁𐹠\u0624; xn--ve6h.xn--jgb1694kz0b2176a; NV8 # 𞤷.𐮐𞢁𐹠ؤ +B; xn--ve6h.xn--jgb1694kz0b2176a; 𞤷.𐮐𞢁𐹠\u0624; xn--ve6h.xn--jgb1694kz0b2176a; NV8 # 𞤷.𐮐𞢁𐹠ؤ +B; 𐲈-。𑄳񢌻; [B1 B3 P1 V3 V5 V6]; [B1 B3 P1 V3 V5 V6] +B; 𐲈-。𑄳񢌻; [B1 B3 P1 V3 V5 V6]; [B1 B3 P1 V3 V5 V6] +B; 𐳈-。𑄳񢌻; [B1 B3 P1 V3 V5 V6]; [B1 B3 P1 V3 V5 V6] +B; xn----ue6i.xn--v80d6662t; [B1 B3 V3 V5 V6]; [B1 B3 V3 V5 V6] +B; 𐳈-。𑄳񢌻; [B1 B3 P1 V3 V5 V6]; [B1 B3 P1 V3 V5 V6] +B; -󠉖ꡧ.󠊂񇆃🄉; [P1 V3 V6]; [P1 V3 V6] +B; -󠉖ꡧ.󠊂񇆃8,; [P1 V3 V6]; [P1 V3 V6] +B; xn----hg4ei0361g.xn--8,-k362evu488a; [P1 V3 V6]; [P1 V3 V6] +B; xn----hg4ei0361g.xn--207ht163h7m94c; [V3 V6]; [V3 V6] +B; 󠾛󠈴臯𧔤.\u0768𝟝; [B1 P1 V6]; [B1 P1 V6] # 臯𧔤.ݨ5 +B; 󠾛󠈴臯𧔤.\u07685; [B1 P1 V6]; [B1 P1 V6] # 臯𧔤.ݨ5 +B; xn--zb1at733hm579ddhla.xn--5-b5c; [B1 V6]; [B1 V6] # 臯𧔤.ݨ5 +B; ≮𐹣.𝨿; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] +B; <\u0338𐹣.𝨿; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] +B; ≮𐹣.𝨿; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] +B; <\u0338𐹣.𝨿; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] +B; xn--gdh1504g.xn--e92h; [B1 B3 B6 V5 V6]; [B1 B3 B6 V5 V6] +B; 𐹯ᯛ\u0A4D。脥; [B1]; [B1] # 𐹯ᯛ੍.脥 +B; 𐹯ᯛ\u0A4D。脥; [B1]; [B1] # 𐹯ᯛ੍.脥 +B; xn--ybc101g3m1p.xn--740a; [B1]; [B1] # 𐹯ᯛ੍.脥 +B; \u1B44\u115F𞷿򃀍.-; [B1 B5 P1 V3 V5 V6]; [B1 B5 P1 V3 V5 V6] # ᭄.- +B; xn--osd971cpx70btgt8b.-; [B1 B5 V3 V5 V6]; [B1 B5 V3 V5 V6] # ᭄.- +T; \u200C。\u0354; [C1 V5]; [V5 A4_2] # .͔ +N; \u200C。\u0354; [C1 V5]; [C1 V5] # .͔ +T; \u200C。\u0354; [C1 V5]; [V5 A4_2] # .͔ +N; \u200C。\u0354; [C1 V5]; [C1 V5] # .͔ +B; .xn--yua; [V5 A4_2]; [V5 A4_2] # .͔ +B; xn--0ug.xn--yua; [C1 V5]; [C1 V5] # .͔ +B; 𞤥󠅮.ᡄႮ; [P1 V6]; [P1 V6] +B; 𞤥󠅮.ᡄႮ; [P1 V6]; [P1 V6] +B; 𞤥󠅮.ᡄⴎ; 𞤥.ᡄⴎ; xn--de6h.xn--37e857h +B; 𞤃󠅮.ᡄႮ; [P1 V6]; [P1 V6] +B; 𞤃󠅮.ᡄⴎ; 𞤥.ᡄⴎ; xn--de6h.xn--37e857h +B; xn--de6h.xn--37e857h; 𞤥.ᡄⴎ; xn--de6h.xn--37e857h +B; 𞤥.ᡄⴎ; ; xn--de6h.xn--37e857h +B; 𞤃.ᡄႮ; [P1 V6]; [P1 V6] +B; 𞤃.ᡄⴎ; 𞤥.ᡄⴎ; xn--de6h.xn--37e857h +B; xn--de6h.xn--mnd799a; [V6]; [V6] +B; 𞤥󠅮.ᡄⴎ; 𞤥.ᡄⴎ; xn--de6h.xn--37e857h +B; 𞤃󠅮.ᡄႮ; [P1 V6]; [P1 V6] +B; 𞤃󠅮.ᡄⴎ; 𞤥.ᡄⴎ; xn--de6h.xn--37e857h +B; 𞤥.ᡄႮ; [P1 V6]; [P1 V6] +B; 𞤧𝨨Ξ.𪺏㛨❸; [B2 B3 B6]; [B2 B3 B6] +B; 𞤧𝨨Ξ.𪺏㛨❸; [B2 B3 B6]; [B2 B3 B6] +B; 𞤧𝨨ξ.𪺏㛨❸; [B2 B3 B6]; [B2 B3 B6] +B; 𞤅𝨨Ξ.𪺏㛨❸; [B2 B3 B6]; [B2 B3 B6] +B; 𞤅𝨨ξ.𪺏㛨❸; [B2 B3 B6]; [B2 B3 B6] +B; xn--zxa5691vboja.xn--bfi293ci119b; [B2 B3 B6]; [B2 B3 B6] +B; 𞤧𝨨ξ.𪺏㛨❸; [B2 B3 B6]; [B2 B3 B6] +B; 𞤅𝨨Ξ.𪺏㛨❸; [B2 B3 B6]; [B2 B3 B6] +B; 𞤅𝨨ξ.𪺏㛨❸; [B2 B3 B6]; [B2 B3 B6] +T; ᠆몆\u200C-。Ⴛ𐦅︒; [B1 B5 B6 C1 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # ᠆몆-.Ⴛ𐦅︒ +N; ᠆몆\u200C-。Ⴛ𐦅︒; [B1 B5 B6 C1 P1 V3 V6]; [B1 B5 B6 C1 P1 V3 V6] # ᠆몆-.Ⴛ𐦅︒ +T; ᠆몆\u200C-。Ⴛ𐦅︒; [B1 B5 B6 C1 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # ᠆몆-.Ⴛ𐦅︒ +N; ᠆몆\u200C-。Ⴛ𐦅︒; [B1 B5 B6 C1 P1 V3 V6]; [B1 B5 B6 C1 P1 V3 V6] # ᠆몆-.Ⴛ𐦅︒ +T; ᠆몆\u200C-。Ⴛ𐦅。; [B1 B5 B6 C1 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # ᠆몆-.Ⴛ𐦅. +N; ᠆몆\u200C-。Ⴛ𐦅。; [B1 B5 B6 C1 P1 V3 V6]; [B1 B5 B6 C1 P1 V3 V6] # ᠆몆-.Ⴛ𐦅. +T; ᠆몆\u200C-。Ⴛ𐦅。; [B1 B5 B6 C1 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # ᠆몆-.Ⴛ𐦅. +N; ᠆몆\u200C-。Ⴛ𐦅。; [B1 B5 B6 C1 P1 V3 V6]; [B1 B5 B6 C1 P1 V3 V6] # ᠆몆-.Ⴛ𐦅. +T; ᠆몆\u200C-。ⴛ𐦅。; [B1 B5 B6 C1 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # ᠆몆-.ⴛ𐦅. +N; ᠆몆\u200C-。ⴛ𐦅。; [B1 B5 B6 C1 P1 V3 V6]; [B1 B5 B6 C1 P1 V3 V6] # ᠆몆-.ⴛ𐦅. +T; ᠆몆\u200C-。ⴛ𐦅。; [B1 B5 B6 C1 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # ᠆몆-.ⴛ𐦅. +N; ᠆몆\u200C-。ⴛ𐦅。; [B1 B5 B6 C1 P1 V3 V6]; [B1 B5 B6 C1 P1 V3 V6] # ᠆몆-.ⴛ𐦅. +B; xn----e3j6620g.xn--jlju661e.; [B1 B5 B6 V3 V6]; [B1 B5 B6 V3 V6] +B; xn----e3j425bsk1o.xn--jlju661e.; [B1 B5 B6 C1 V3 V6]; [B1 B5 B6 C1 V3 V6] # ᠆몆-.ⴛ𐦅. +B; xn----e3j6620g.xn--znd4948j.; [B1 B5 B6 V3 V6]; [B1 B5 B6 V3 V6] +B; xn----e3j425bsk1o.xn--znd4948j.; [B1 B5 B6 C1 V3 V6]; [B1 B5 B6 C1 V3 V6] # ᠆몆-.Ⴛ𐦅. +T; ᠆몆\u200C-。ⴛ𐦅︒; [B1 B5 B6 C1 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # ᠆몆-.ⴛ𐦅︒ +N; ᠆몆\u200C-。ⴛ𐦅︒; [B1 B5 B6 C1 P1 V3 V6]; [B1 B5 B6 C1 P1 V3 V6] # ᠆몆-.ⴛ𐦅︒ +T; ᠆몆\u200C-。ⴛ𐦅︒; [B1 B5 B6 C1 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # ᠆몆-.ⴛ𐦅︒ +N; ᠆몆\u200C-。ⴛ𐦅︒; [B1 B5 B6 C1 P1 V3 V6]; [B1 B5 B6 C1 P1 V3 V6] # ᠆몆-.ⴛ𐦅︒ +B; xn----e3j6620g.xn--jlj4997dhgh; [B1 B5 B6 V3 V6]; [B1 B5 B6 V3 V6] +B; xn----e3j425bsk1o.xn--jlj4997dhgh; [B1 B5 B6 C1 V3 V6]; [B1 B5 B6 C1 V3 V6] # ᠆몆-.ⴛ𐦅︒ +B; xn----e3j6620g.xn--znd2362jhgh; [B1 B5 B6 V3 V6]; [B1 B5 B6 V3 V6] +B; xn----e3j425bsk1o.xn--znd2362jhgh; [B1 B5 B6 C1 V3 V6]; [B1 B5 B6 C1 V3 V6] # ᠆몆-.Ⴛ𐦅︒ +T; 󠾳.︒⥱\u200C𐹬; [B1 C1 P1 V6]; [B1 P1 V6] # .︒⥱𐹬 +N; 󠾳.︒⥱\u200C𐹬; [B1 C1 P1 V6]; [B1 C1 P1 V6] # .︒⥱𐹬 +T; 󠾳.。⥱\u200C𐹬; [B1 C1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # ..⥱𐹬 +N; 󠾳.。⥱\u200C𐹬; [B1 C1 P1 V6 A4_2]; [B1 C1 P1 V6 A4_2] # ..⥱𐹬 +B; xn--uf66e..xn--qti2829e; [B1 V6 A4_2]; [B1 V6 A4_2] +B; xn--uf66e..xn--0ugz28as66q; [B1 C1 V6 A4_2]; [B1 C1 V6 A4_2] # ..⥱𐹬 +B; xn--uf66e.xn--qtiz073e3ik; [B1 V6]; [B1 V6] +B; xn--uf66e.xn--0ugz28axl3pqxna; [B1 C1 V6]; [B1 C1 V6] # .︒⥱𐹬 +B; 𐯖.𐹠Ⴑ񚇜𐫊; [B1 P1 V6]; [B1 P1 V6] +B; 𐯖.𐹠ⴑ񚇜𐫊; [B1 P1 V6]; [B1 P1 V6] +B; xn--n49c.xn--8kj8702ewicl862o; [B1 V6]; [B1 V6] +B; xn--n49c.xn--pnd4619jwicl862o; [B1 V6]; [B1 V6] +B; \u0FA4񱤯.𝟭Ⴛ; [P1 V5 V6]; [P1 V5 V6] # ྤ.1Ⴛ +B; \u0FA4񱤯.1Ⴛ; [P1 V5 V6]; [P1 V5 V6] # ྤ.1Ⴛ +B; \u0FA4񱤯.1ⴛ; [P1 V5 V6]; [P1 V5 V6] # ྤ.1ⴛ +B; xn--0fd40533g.xn--1-tws; [V5 V6]; [V5 V6] # ྤ.1ⴛ +B; xn--0fd40533g.xn--1-q1g; [V5 V6]; [V5 V6] # ྤ.1Ⴛ +B; \u0FA4񱤯.𝟭ⴛ; [P1 V5 V6]; [P1 V5 V6] # ྤ.1ⴛ +B; -\u0826齀。릿𐸋; [B1 B5 B6 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # -ࠦ齀.릿 +B; -\u0826齀。릿𐸋; [B1 B5 B6 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # -ࠦ齀.릿 +B; xn----6gd0617i.xn--7y2bm55m; [B1 B5 B6 V3 V6]; [B1 B5 B6 V3 V6] # -ࠦ齀.릿 +T; 󠔊\u071C鹝꾗。񾵐\u200D\u200D⏃; [B1 B6 C2 P1 V6]; [B1 B6 P1 V6] # ܜ鹝꾗.⏃ +N; 󠔊\u071C鹝꾗。񾵐\u200D\u200D⏃; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # ܜ鹝꾗.⏃ +T; 󠔊\u071C鹝꾗。񾵐\u200D\u200D⏃; [B1 B6 C2 P1 V6]; [B1 B6 P1 V6] # ܜ鹝꾗.⏃ +N; 󠔊\u071C鹝꾗。񾵐\u200D\u200D⏃; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # ܜ鹝꾗.⏃ +B; xn--mnb6558e91kyq533a.xn--6mh27269e; [B1 B6 V6]; [B1 B6 V6] # ܜ鹝꾗.⏃ +B; xn--mnb6558e91kyq533a.xn--1uga46zs309y; [B1 B6 C2 V6]; [B1 B6 C2 V6] # ܜ鹝꾗.⏃ +B; ≮.-\u0708--; [B1 P1 V2 V3 V6]; [B1 P1 V2 V3 V6] # ≮.-܈-- +B; <\u0338.-\u0708--; [B1 P1 V2 V3 V6]; [B1 P1 V2 V3 V6] # ≮.-܈-- +B; ≮.-\u0708--; [B1 P1 V2 V3 V6]; [B1 P1 V2 V3 V6] # ≮.-܈-- +B; <\u0338.-\u0708--; [B1 P1 V2 V3 V6]; [B1 P1 V2 V3 V6] # ≮.-܈-- +B; xn--gdh.xn------eqf; [B1 V2 V3 V6]; [B1 V2 V3 V6] # ≮.-܈-- +T; 𐹸󠋳。\u200Dς𝟩; [B1 C2 P1 V6]; [B1 P1 V6] # 𐹸.ς7 +N; 𐹸󠋳。\u200Dς𝟩; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 𐹸.ς7 +T; 𐹸󠋳。\u200Dς7; [B1 C2 P1 V6]; [B1 P1 V6] # 𐹸.ς7 +N; 𐹸󠋳。\u200Dς7; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 𐹸.ς7 +T; 𐹸󠋳。\u200DΣ7; [B1 C2 P1 V6]; [B1 P1 V6] # 𐹸.σ7 +N; 𐹸󠋳。\u200DΣ7; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 𐹸.σ7 +T; 𐹸󠋳。\u200Dσ7; [B1 C2 P1 V6]; [B1 P1 V6] # 𐹸.σ7 +N; 𐹸󠋳。\u200Dσ7; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 𐹸.σ7 +B; xn--wo0di5177c.xn--7-zmb; [B1 V6]; [B1 V6] +B; xn--wo0di5177c.xn--7-zmb938s; [B1 C2 V6]; [B1 C2 V6] # 𐹸.σ7 +B; xn--wo0di5177c.xn--7-xmb248s; [B1 C2 V6]; [B1 C2 V6] # 𐹸.ς7 +T; 𐹸󠋳。\u200DΣ𝟩; [B1 C2 P1 V6]; [B1 P1 V6] # 𐹸.σ7 +N; 𐹸󠋳。\u200DΣ𝟩; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 𐹸.σ7 +T; 𐹸󠋳。\u200Dσ𝟩; [B1 C2 P1 V6]; [B1 P1 V6] # 𐹸.σ7 +N; 𐹸󠋳。\u200Dσ𝟩; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 𐹸.σ7 +T; ς򅜌8.𞭤; [P1 V6]; [P1 V6] +N; ς򅜌8.𞭤; [P1 V6]; [P1 V6] +T; ς򅜌8.𞭤; [P1 V6]; [P1 V6] +N; ς򅜌8.𞭤; [P1 V6]; [P1 V6] +B; Σ򅜌8.𞭤; [P1 V6]; [P1 V6] +B; σ򅜌8.𞭤; [P1 V6]; [P1 V6] +B; xn--8-zmb14974n.xn--su6h; [V6]; [V6] +B; xn--8-xmb44974n.xn--su6h; [V6]; [V6] +B; Σ򅜌8.𞭤; [P1 V6]; [P1 V6] +B; σ򅜌8.𞭤; [P1 V6]; [P1 V6] +T; \u200Cᡑ🄀\u0684.-𐫄𑲤; [B1 C1 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # ᡑ🄀ڄ.-𐫄𑲤 +N; \u200Cᡑ🄀\u0684.-𐫄𑲤; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # ᡑ🄀ڄ.-𐫄𑲤 +T; \u200Cᡑ0.\u0684.-𐫄𑲤; [B1 C1 V3]; [B1 V3] # ᡑ0.ڄ.-𐫄𑲤 +N; \u200Cᡑ0.\u0684.-𐫄𑲤; [B1 C1 V3]; [B1 C1 V3] # ᡑ0.ڄ.-𐫄𑲤 +B; xn--0-o7j.xn--9ib.xn----ek5i065b; [B1 V3]; [B1 V3] # ᡑ0.ڄ.-𐫄𑲤 +B; xn--0-o7j263b.xn--9ib.xn----ek5i065b; [B1 C1 V3]; [B1 C1 V3] # ᡑ0.ڄ.-𐫄𑲤 +B; xn--9ib722gbw95a.xn----ek5i065b; [B1 B5 B6 V3 V6]; [B1 B5 B6 V3 V6] # ᡑ🄀ڄ.-𐫄𑲤 +B; xn--9ib722gvtfi563c.xn----ek5i065b; [B1 C1 V3 V6]; [B1 C1 V3 V6] # ᡑ🄀ڄ.-𐫄𑲤 +B; 𖠍。𐪿넯򞵲; [B2 B3 P1 V6]; [B2 B3 P1 V6] +B; 𖠍。𐪿넯򞵲; [B2 B3 P1 V6]; [B2 B3 P1 V6] +B; xn--4e9e.xn--l60bj21opd57g; [B2 B3 V6]; [B2 B3 V6] +B; ᠇Ⴘ。\u0603Ⴈ𝆊; [B1 P1 V6]; [B1 P1 V6] # ᠇Ⴘ.Ⴈ𝆊 +B; ᠇ⴘ。\u0603ⴈ𝆊; [B1 P1 V6]; [B1 P1 V6] # ᠇ⴘ.ⴈ𝆊 +B; xn--d6e009h.xn--lfb290rfu3z; [B1 V6]; [B1 V6] # ᠇ⴘ.ⴈ𝆊 +B; xn--wnd558a.xn--lfb465c1v87a; [B1 V6]; [B1 V6] # ᠇Ⴘ.Ⴈ𝆊 +B; ⒚󠋑𞤰。牣\u0667Ⴜᣥ; [B1 B5 P1 V6]; [B1 B5 P1 V6] # ⒚𞤰.牣٧Ⴜᣥ +B; 19.󠋑𞤰。牣\u0667Ⴜᣥ; [B1 B5 P1 V6]; [B1 B5 P1 V6] # 19.𞤰.牣٧Ⴜᣥ +B; 19.󠋑𞤰。牣\u0667ⴜᣥ; [B1 B5 P1 V6]; [B1 B5 P1 V6] # 19.𞤰.牣٧ⴜᣥ +B; 19.󠋑𞤎。牣\u0667Ⴜᣥ; [B1 B5 P1 V6]; [B1 B5 P1 V6] # 19.𞤰.牣٧Ⴜᣥ +B; 19.xn--oe6h75760c.xn--gib404ccxgh00h; [B1 B5 V6]; [B1 B5 V6] # 19.𞤰.牣٧Ⴜᣥ +B; 19.xn--oe6h75760c.xn--gib285gtxo2l9d; [B1 B5 V6]; [B1 B5 V6] # 19.𞤰.牣٧ⴜᣥ +B; ⒚󠋑𞤰。牣\u0667ⴜᣥ; [B1 B5 P1 V6]; [B1 B5 P1 V6] # ⒚𞤰.牣٧ⴜᣥ +B; ⒚󠋑𞤎。牣\u0667Ⴜᣥ; [B1 B5 P1 V6]; [B1 B5 P1 V6] # ⒚𞤰.牣٧Ⴜᣥ +B; xn--cthy466n29j3e.xn--gib404ccxgh00h; [B1 B5 V6]; [B1 B5 V6] # ⒚𞤰.牣٧Ⴜᣥ +B; xn--cthy466n29j3e.xn--gib285gtxo2l9d; [B1 B5 V6]; [B1 B5 V6] # ⒚𞤰.牣٧ⴜᣥ +B; -𐋱𐰽⒈.Ⴓ; [B1 P1 V3 V6]; [B1 P1 V3 V6] +B; -𐋱𐰽1..Ⴓ; [B1 P1 V3 V6 A4_2]; [B1 P1 V3 V6 A4_2] +B; -𐋱𐰽1..ⴓ; [B1 V3 A4_2]; [B1 V3 A4_2] +B; xn---1-895nq11a..xn--blj; [B1 V3 A4_2]; [B1 V3 A4_2] +B; xn---1-895nq11a..xn--rnd; [B1 V3 V6 A4_2]; [B1 V3 V6 A4_2] +B; -𐋱𐰽⒈.ⴓ; [B1 P1 V3 V6]; [B1 P1 V3 V6] +B; xn----ecp0206g90h.xn--blj; [B1 V3 V6]; [B1 V3 V6] +B; xn----ecp0206g90h.xn--rnd; [B1 V3 V6]; [B1 V3 V6] +T; \u200C긃.榶-; [C1 V3]; [V3] # 긃.榶- +N; \u200C긃.榶-; [C1 V3]; [C1 V3] # 긃.榶- +T; \u200C긃.榶-; [C1 V3]; [V3] # 긃.榶- +N; \u200C긃.榶-; [C1 V3]; [C1 V3] # 긃.榶- +B; xn--ej0b.xn----d87b; [V3]; [V3] +B; xn--0ug3307c.xn----d87b; [C1 V3]; [C1 V3] # 긃.榶- +T; 뉓泓𜵽.\u09CD\u200D; [P1 V5 V6]; [P1 V5 V6] # 뉓泓.্ +N; 뉓泓𜵽.\u09CD\u200D; [P1 V5 V6]; [P1 V5 V6] # 뉓泓.্ +T; 뉓泓𜵽.\u09CD\u200D; [P1 V5 V6]; [P1 V5 V6] # 뉓泓.্ +N; 뉓泓𜵽.\u09CD\u200D; [P1 V5 V6]; [P1 V5 V6] # 뉓泓.্ +B; xn--lwwp69lqs7m.xn--b7b; [V5 V6]; [V5 V6] # 뉓泓.্ +B; xn--lwwp69lqs7m.xn--b7b605i; [V5 V6]; [V5 V6] # 뉓泓.্ +T; \u200D𐹴ß。\u0EB4\u2B75񪅌; [B1 C2 P1 V5 V6]; [B1 P1 V5 V6] # 𐹴ß.ິ +N; \u200D𐹴ß。\u0EB4\u2B75񪅌; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # 𐹴ß.ິ +T; \u200D𐹴ß。\u0EB4\u2B75񪅌; [B1 C2 P1 V5 V6]; [B1 P1 V5 V6] # 𐹴ß.ິ +N; \u200D𐹴ß。\u0EB4\u2B75񪅌; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # 𐹴ß.ິ +T; \u200D𐹴SS。\u0EB4\u2B75񪅌; [B1 C2 P1 V5 V6]; [B1 P1 V5 V6] # 𐹴ss.ິ +N; \u200D𐹴SS。\u0EB4\u2B75񪅌; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # 𐹴ss.ິ +T; \u200D𐹴ss。\u0EB4\u2B75񪅌; [B1 C2 P1 V5 V6]; [B1 P1 V5 V6] # 𐹴ss.ິ +N; \u200D𐹴ss。\u0EB4\u2B75񪅌; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # 𐹴ss.ິ +T; \u200D𐹴Ss。\u0EB4\u2B75񪅌; [B1 C2 P1 V5 V6]; [B1 P1 V5 V6] # 𐹴ss.ິ +N; \u200D𐹴Ss。\u0EB4\u2B75񪅌; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # 𐹴ss.ິ +B; xn--ss-ti3o.xn--57c638l8774i; [B1 V5 V6]; [B1 V5 V6] # 𐹴ss.ິ +B; xn--ss-l1t5169j.xn--57c638l8774i; [B1 C2 V5 V6]; [B1 C2 V5 V6] # 𐹴ss.ິ +B; xn--zca770nip7n.xn--57c638l8774i; [B1 C2 V5 V6]; [B1 C2 V5 V6] # 𐹴ß.ິ +T; \u200D𐹴SS。\u0EB4\u2B75񪅌; [B1 C2 P1 V5 V6]; [B1 P1 V5 V6] # 𐹴ss.ິ +N; \u200D𐹴SS。\u0EB4\u2B75񪅌; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # 𐹴ss.ິ +T; \u200D𐹴ss。\u0EB4\u2B75񪅌; [B1 C2 P1 V5 V6]; [B1 P1 V5 V6] # 𐹴ss.ິ +N; \u200D𐹴ss。\u0EB4\u2B75񪅌; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # 𐹴ss.ິ +T; \u200D𐹴Ss。\u0EB4\u2B75񪅌; [B1 C2 P1 V5 V6]; [B1 P1 V5 V6] # 𐹴ss.ິ +N; \u200D𐹴Ss。\u0EB4\u2B75񪅌; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # 𐹴ss.ິ +B; \u1B44.\u1BAA-≮≠; [P1 V5 V6]; [P1 V5 V6] # ᭄.᮪-≮≠ +B; \u1B44.\u1BAA-<\u0338=\u0338; [P1 V5 V6]; [P1 V5 V6] # ᭄.᮪-≮≠ +B; \u1B44.\u1BAA-≮≠; [P1 V5 V6]; [P1 V5 V6] # ᭄.᮪-≮≠ +B; \u1B44.\u1BAA-<\u0338=\u0338; [P1 V5 V6]; [P1 V5 V6] # ᭄.᮪-≮≠ +B; xn--1uf.xn----nmlz65aub; [V5 V6]; [V5 V6] # ᭄.᮪-≮≠ +B; \u1BF3Ⴑ\u115F.𑄴Ⅎ; [P1 V5 V6]; [P1 V5 V6] # ᯳Ⴑ.𑄴Ⅎ +B; \u1BF3Ⴑ\u115F.𑄴Ⅎ; [P1 V5 V6]; [P1 V5 V6] # ᯳Ⴑ.𑄴Ⅎ +B; \u1BF3ⴑ\u115F.𑄴ⅎ; [P1 V5 V6]; [P1 V5 V6] # ᯳ⴑ.𑄴ⅎ +B; \u1BF3Ⴑ\u115F.𑄴ⅎ; [P1 V5 V6]; [P1 V5 V6] # ᯳Ⴑ.𑄴ⅎ +B; xn--pnd26a55x.xn--73g3065g; [V5 V6]; [V5 V6] # ᯳Ⴑ.𑄴ⅎ +B; xn--osd925cvyn.xn--73g3065g; [V5 V6]; [V5 V6] # ᯳ⴑ.𑄴ⅎ +B; xn--pnd26a55x.xn--f3g7465g; [V5 V6]; [V5 V6] # ᯳Ⴑ.𑄴Ⅎ +B; \u1BF3ⴑ\u115F.𑄴ⅎ; [P1 V5 V6]; [P1 V5 V6] # ᯳ⴑ.𑄴ⅎ +B; \u1BF3Ⴑ\u115F.𑄴ⅎ; [P1 V5 V6]; [P1 V5 V6] # ᯳Ⴑ.𑄴ⅎ +B; 𜉆。Ⴃ𐴣𐹹똯; [B5 P1 V6]; [B5 P1 V6] +B; 𜉆。Ⴃ𐴣𐹹똯; [B5 P1 V6]; [B5 P1 V6] +B; 𜉆。ⴃ𐴣𐹹똯; [B5 P1 V6]; [B5 P1 V6] +B; 𜉆。ⴃ𐴣𐹹똯; [B5 P1 V6]; [B5 P1 V6] +B; xn--187g.xn--ukjy205b8rscdeb; [B5 V6]; [B5 V6] +B; xn--187g.xn--bnd4785f8r8bdeb; [B5 V6]; [B5 V6] +B; 𐫀。⳻󠙾󠄷\u3164; [B1 P1 V6]; [B1 P1 V6] # 𐫀.⳻ +B; 𐫀。⳻󠙾󠄷\u1160; [B1 P1 V6]; [B1 P1 V6] # 𐫀.⳻ +B; xn--pw9c.xn--psd742lxt32w; [B1 V6]; [B1 V6] # 𐫀.⳻ +B; xn--pw9c.xn--mkj83l4v899a; [B1 V6]; [B1 V6] # 𐫀.⳻ +B; \u079A⾇.\u071E-𐋰; [B2 B3]; [B2 B3] # ޚ舛.ܞ-𐋰 +B; \u079A舛.\u071E-𐋰; [B2 B3]; [B2 B3] # ޚ舛.ܞ-𐋰 +B; xn--7qb6383d.xn----20c3154q; [B2 B3]; [B2 B3] # ޚ舛.ܞ-𐋰 +B; Ⴉ猕󹛫≮.︒; [P1 V6]; [P1 V6] +B; Ⴉ猕󹛫<\u0338.︒; [P1 V6]; [P1 V6] +B; Ⴉ猕󹛫≮.。; [P1 V6 A4_2]; [P1 V6 A4_2] +B; Ⴉ猕󹛫<\u0338.。; [P1 V6 A4_2]; [P1 V6 A4_2] +B; ⴉ猕󹛫<\u0338.。; [P1 V6 A4_2]; [P1 V6 A4_2] +B; ⴉ猕󹛫≮.。; [P1 V6 A4_2]; [P1 V6 A4_2] +B; xn--gdh892bbz0d5438s..; [V6 A4_2]; [V6 A4_2] +B; xn--hnd212gz32d54x5r..; [V6 A4_2]; [V6 A4_2] +B; ⴉ猕󹛫<\u0338.︒; [P1 V6]; [P1 V6] +B; ⴉ猕󹛫≮.︒; [P1 V6]; [P1 V6] +B; xn--gdh892bbz0d5438s.xn--y86c; [V6]; [V6] +B; xn--hnd212gz32d54x5r.xn--y86c; [V6]; [V6] +B; 🏮。\u062B鳳\u07E2󠅉; [B1 B2]; [B1 B2] # 🏮.ث鳳ߢ +B; 🏮。\u062B鳳\u07E2󠅉; [B1 B2]; [B1 B2] # 🏮.ث鳳ߢ +B; xn--8m8h.xn--qgb29f6z90a; [B1 B2]; [B1 B2] # 🏮.ث鳳ߢ +T; \u200D𐹶。ß; [B1 C2]; [B1] # 𐹶.ß +N; \u200D𐹶。ß; [B1 C2]; [B1 C2] # 𐹶.ß +T; \u200D𐹶。SS; [B1 C2]; [B1] # 𐹶.ss +N; \u200D𐹶。SS; [B1 C2]; [B1 C2] # 𐹶.ss +T; \u200D𐹶。ss; [B1 C2]; [B1] # 𐹶.ss +N; \u200D𐹶。ss; [B1 C2]; [B1 C2] # 𐹶.ss +T; \u200D𐹶。Ss; [B1 C2]; [B1] # 𐹶.ss +N; \u200D𐹶。Ss; [B1 C2]; [B1 C2] # 𐹶.ss +B; xn--uo0d.ss; [B1]; [B1] +B; xn--1ug9105g.ss; [B1 C2]; [B1 C2] # 𐹶.ss +B; xn--1ug9105g.xn--zca; [B1 C2]; [B1 C2] # 𐹶.ß +T; Å둄-.\u200C; [C1 V3]; [V3] # å둄-. +N; Å둄-.\u200C; [C1 V3]; [C1 V3] # å둄-. +T; A\u030A둄-.\u200C; [C1 V3]; [V3] # å둄-. +N; A\u030A둄-.\u200C; [C1 V3]; [C1 V3] # å둄-. +T; Å둄-.\u200C; [C1 V3]; [V3] # å둄-. +N; Å둄-.\u200C; [C1 V3]; [C1 V3] # å둄-. +T; A\u030A둄-.\u200C; [C1 V3]; [V3] # å둄-. +N; A\u030A둄-.\u200C; [C1 V3]; [C1 V3] # å둄-. +T; a\u030A둄-.\u200C; [C1 V3]; [V3] # å둄-. +N; a\u030A둄-.\u200C; [C1 V3]; [C1 V3] # å둄-. +T; å둄-.\u200C; [C1 V3]; [V3] # å둄-. +N; å둄-.\u200C; [C1 V3]; [C1 V3] # å둄-. +B; xn----1fa1788k.; [V3]; [V3] +B; xn----1fa1788k.xn--0ug; [C1 V3]; [C1 V3] # å둄-. +T; a\u030A둄-.\u200C; [C1 V3]; [V3] # å둄-. +N; a\u030A둄-.\u200C; [C1 V3]; [C1 V3] # å둄-. +T; å둄-.\u200C; [C1 V3]; [V3] # å둄-. +N; å둄-.\u200C; [C1 V3]; [C1 V3] # å둄-. +B; \u3099򬎑\u1DD7𞤀.򱲢-\u0953; [B1 B6 P1 V5 V6]; [B1 B6 P1 V5 V6] # ゙ᷗ𞤢.-॓ +B; \u3099򬎑\u1DD7𞤢.򱲢-\u0953; [B1 B6 P1 V5 V6]; [B1 B6 P1 V5 V6] # ゙ᷗ𞤢.-॓ +B; xn--veg121fwg63altj9d.xn----eyd92688s; [B1 B6 V5 V6]; [B1 B6 V5 V6] # ゙ᷗ𞤢.-॓ +T; ς.ß񴱄\u06DD\u2D7F; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ς.ß⵿ +N; ς.ß񴱄\u06DD\u2D7F; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ς.ß⵿ +B; Σ.SS񴱄\u06DD\u2D7F; [B5 B6 P1 V6]; [B5 B6 P1 V6] # σ.ss⵿ +B; σ.ss񴱄\u06DD\u2D7F; [B5 B6 P1 V6]; [B5 B6 P1 V6] # σ.ss⵿ +B; Σ.ss񴱄\u06DD\u2D7F; [B5 B6 P1 V6]; [B5 B6 P1 V6] # σ.ss⵿ +B; xn--4xa.xn--ss-y8d4760biv60n; [B5 B6 V6]; [B5 B6 V6] # σ.ss⵿ +T; Σ.ß񴱄\u06DD\u2D7F; [B5 B6 P1 V6]; [B5 B6 P1 V6] # σ.ß⵿ +N; Σ.ß񴱄\u06DD\u2D7F; [B5 B6 P1 V6]; [B5 B6 P1 V6] # σ.ß⵿ +T; σ.ß񴱄\u06DD\u2D7F; [B5 B6 P1 V6]; [B5 B6 P1 V6] # σ.ß⵿ +N; σ.ß񴱄\u06DD\u2D7F; [B5 B6 P1 V6]; [B5 B6 P1 V6] # σ.ß⵿ +B; xn--4xa.xn--zca281az71b8x73m; [B5 B6 V6]; [B5 B6 V6] # σ.ß⵿ +B; xn--3xa.xn--zca281az71b8x73m; [B5 B6 V6]; [B5 B6 V6] # ς.ß⵿ +B; ꡀ𞀟。\u066B\u0599; [B1]; [B1] # ꡀ𞀟.٫֙ +B; ꡀ𞀟。\u066B\u0599; [B1]; [B1] # ꡀ𞀟.٫֙ +B; xn--8b9a1720d.xn--kcb33b; [B1]; [B1] # ꡀ𞀟.٫֙ +T; 򈛉\u200C\u08A9。⧅񘘡-𐭡; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 P1 V6] # ࢩ.⧅-𐭡 +N; 򈛉\u200C\u08A9。⧅񘘡-𐭡; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # ࢩ.⧅-𐭡 +T; 򈛉\u200C\u08A9。⧅񘘡-𐭡; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 P1 V6] # ࢩ.⧅-𐭡 +N; 򈛉\u200C\u08A9。⧅񘘡-𐭡; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # ࢩ.⧅-𐭡 +B; xn--yyb56242i.xn----zir1232guu71b; [B1 B5 B6 V6]; [B1 B5 B6 V6] # ࢩ.⧅-𐭡 +B; xn--yyb780jll63m.xn----zir1232guu71b; [B1 B5 B6 C1 V6]; [B1 B5 B6 C1 V6] # ࢩ.⧅-𐭡 +T; 룱\u200D𰍨\u200C。𝨖︒; [C1 C2 P1 V5 V6]; [P1 V5 V6] # 룱.𝨖︒ +N; 룱\u200D𰍨\u200C。𝨖︒; [C1 C2 P1 V5 V6]; [C1 C2 P1 V5 V6] # 룱.𝨖︒ +T; 룱\u200D𰍨\u200C。𝨖︒; [C1 C2 P1 V5 V6]; [P1 V5 V6] # 룱.𝨖︒ +N; 룱\u200D𰍨\u200C。𝨖︒; [C1 C2 P1 V5 V6]; [C1 C2 P1 V5 V6] # 룱.𝨖︒ +T; 룱\u200D𰍨\u200C。𝨖。; [C1 C2 P1 V5 V6]; [P1 V5 V6] # 룱.𝨖. +N; 룱\u200D𰍨\u200C。𝨖。; [C1 C2 P1 V5 V6]; [C1 C2 P1 V5 V6] # 룱.𝨖. +T; 룱\u200D𰍨\u200C。𝨖。; [C1 C2 P1 V5 V6]; [P1 V5 V6] # 룱.𝨖. +N; 룱\u200D𰍨\u200C。𝨖。; [C1 C2 P1 V5 V6]; [C1 C2 P1 V5 V6] # 룱.𝨖. +B; xn--ct2b0738h.xn--772h.; [V5 V6]; [V5 V6] +B; xn--0ugb3358ili2v.xn--772h.; [C1 C2 V5 V6]; [C1 C2 V5 V6] # 룱.𝨖. +B; xn--ct2b0738h.xn--y86cl899a; [V5 V6]; [V5 V6] +B; xn--0ugb3358ili2v.xn--y86cl899a; [C1 C2 V5 V6]; [C1 C2 V5 V6] # 룱.𝨖︒ +T; 🄄.\u1CDC⒈ß; [P1 V5 V6]; [P1 V5 V6] # 🄄.᳜⒈ß +N; 🄄.\u1CDC⒈ß; [P1 V5 V6]; [P1 V5 V6] # 🄄.᳜⒈ß +T; 3,.\u1CDC1.ß; [P1 V5 V6]; [P1 V5 V6] # 3,.᳜1.ß +N; 3,.\u1CDC1.ß; [P1 V5 V6]; [P1 V5 V6] # 3,.᳜1.ß +B; 3,.\u1CDC1.SS; [P1 V5 V6]; [P1 V5 V6] # 3,.᳜1.ss +B; 3,.\u1CDC1.ss; [P1 V5 V6]; [P1 V5 V6] # 3,.᳜1.ss +B; 3,.\u1CDC1.Ss; [P1 V5 V6]; [P1 V5 V6] # 3,.᳜1.ss +B; 3,.xn--1-43l.ss; [P1 V5 V6]; [P1 V5 V6] # 3,.᳜1.ss +B; 3,.xn--1-43l.xn--zca; [P1 V5 V6]; [P1 V5 V6] # 3,.᳜1.ß +B; 🄄.\u1CDC⒈SS; [P1 V5 V6]; [P1 V5 V6] # 🄄.᳜⒈ss +B; 🄄.\u1CDC⒈ss; [P1 V5 V6]; [P1 V5 V6] # 🄄.᳜⒈ss +B; 🄄.\u1CDC⒈Ss; [P1 V5 V6]; [P1 V5 V6] # 🄄.᳜⒈ss +B; xn--x07h.xn--ss-k1r094b; [V5 V6]; [V5 V6] # 🄄.᳜⒈ss +B; xn--x07h.xn--zca344lmif; [V5 V6]; [V5 V6] # 🄄.᳜⒈ß +B; 񇌍\u2D7F。𞼓򡄨𑐺; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ⵿.𑐺 +B; 񇌍\u2D7F。𞼓򡄨𑐺; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ⵿.𑐺 +B; xn--eoj16016a.xn--0v1d3848a3lr0d; [B2 B3 V6]; [B2 B3 V6] # ⵿.𑐺 +T; \u1DFD\u103A\u094D.≠\u200D㇛; [C2 P1 V5 V6]; [P1 V5 V6] # ်्᷽.≠㇛ +N; \u1DFD\u103A\u094D.≠\u200D㇛; [C2 P1 V5 V6]; [C2 P1 V5 V6] # ်्᷽.≠㇛ +T; \u103A\u094D\u1DFD.≠\u200D㇛; [C2 P1 V5 V6]; [P1 V5 V6] # ်्᷽.≠㇛ +N; \u103A\u094D\u1DFD.≠\u200D㇛; [C2 P1 V5 V6]; [C2 P1 V5 V6] # ်्᷽.≠㇛ +T; \u103A\u094D\u1DFD.=\u0338\u200D㇛; [C2 P1 V5 V6]; [P1 V5 V6] # ်्᷽.≠㇛ +N; \u103A\u094D\u1DFD.=\u0338\u200D㇛; [C2 P1 V5 V6]; [C2 P1 V5 V6] # ်्᷽.≠㇛ +T; \u103A\u094D\u1DFD.≠\u200D㇛; [C2 P1 V5 V6]; [P1 V5 V6] # ်्᷽.≠㇛ +N; \u103A\u094D\u1DFD.≠\u200D㇛; [C2 P1 V5 V6]; [C2 P1 V5 V6] # ်्᷽.≠㇛ +T; \u103A\u094D\u1DFD.=\u0338\u200D㇛; [C2 P1 V5 V6]; [P1 V5 V6] # ်्᷽.≠㇛ +N; \u103A\u094D\u1DFD.=\u0338\u200D㇛; [C2 P1 V5 V6]; [C2 P1 V5 V6] # ်्᷽.≠㇛ +B; xn--n3b956a9zm.xn--1ch912d; [V5 V6]; [V5 V6] # ်्᷽.≠㇛ +B; xn--n3b956a9zm.xn--1ug63gz5w; [C2 V5 V6]; [C2 V5 V6] # ်्᷽.≠㇛ +T; Ⴁ𐋨娤.\u200D\u033C\u0662𑖿; [B1 C2 P1 V6]; [B1 P1 V5 V6] # Ⴁ𐋨娤.̼٢𑖿 +N; Ⴁ𐋨娤.\u200D\u033C\u0662𑖿; [B1 C2 P1 V6]; [B1 C2 P1 V6] # Ⴁ𐋨娤.̼٢𑖿 +T; ⴁ𐋨娤.\u200D\u033C\u0662𑖿; [B1 C2]; [B1 V5] # ⴁ𐋨娤.̼٢𑖿 +N; ⴁ𐋨娤.\u200D\u033C\u0662𑖿; [B1 C2]; [B1 C2] # ⴁ𐋨娤.̼٢𑖿 +B; xn--skjw75lg29h.xn--9ta62nrv36a; [B1 V5]; [B1 V5] # ⴁ𐋨娤.̼٢𑖿 +B; xn--skjw75lg29h.xn--9ta62ngt6aou8t; [B1 C2]; [B1 C2] # ⴁ𐋨娤.̼٢𑖿 +B; xn--8md2578ag21g.xn--9ta62nrv36a; [B1 V5 V6]; [B1 V5 V6] # Ⴁ𐋨娤.̼٢𑖿 +B; xn--8md2578ag21g.xn--9ta62ngt6aou8t; [B1 C2 V6]; [B1 C2 V6] # Ⴁ𐋨娤.̼٢𑖿 +T; 🄀Ⴄ\u0669\u0820。⒈\u0FB6ß; [B1 P1 V6]; [B1 P1 V6] # 🄀Ⴄ٩ࠠ.⒈ྶß +N; 🄀Ⴄ\u0669\u0820。⒈\u0FB6ß; [B1 P1 V6]; [B1 P1 V6] # 🄀Ⴄ٩ࠠ.⒈ྶß +T; 0.Ⴄ\u0669\u0820。1.\u0FB6ß; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # 0.Ⴄ٩ࠠ.1.ྶß +N; 0.Ⴄ\u0669\u0820。1.\u0FB6ß; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # 0.Ⴄ٩ࠠ.1.ྶß +T; 0.ⴄ\u0669\u0820。1.\u0FB6ß; [B1 B5 B6 V5]; [B1 B5 B6 V5] # 0.ⴄ٩ࠠ.1.ྶß +N; 0.ⴄ\u0669\u0820。1.\u0FB6ß; [B1 B5 B6 V5]; [B1 B5 B6 V5] # 0.ⴄ٩ࠠ.1.ྶß +B; 0.Ⴄ\u0669\u0820。1.\u0FB6SS; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # 0.Ⴄ٩ࠠ.1.ྶss +B; 0.ⴄ\u0669\u0820。1.\u0FB6ss; [B1 B5 B6 V5]; [B1 B5 B6 V5] # 0.ⴄ٩ࠠ.1.ྶss +B; 0.Ⴄ\u0669\u0820。1.\u0FB6Ss; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # 0.Ⴄ٩ࠠ.1.ྶss +B; 0.xn--iib29f26o.1.xn--ss-1sj; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] # 0.Ⴄ٩ࠠ.1.ྶss +B; 0.xn--iib29fp25e.1.xn--ss-1sj; [B1 B5 B6 V5]; [B1 B5 B6 V5] # 0.ⴄ٩ࠠ.1.ྶss +B; 0.xn--iib29fp25e.1.xn--zca117e; [B1 B5 B6 V5]; [B1 B5 B6 V5] # 0.ⴄ٩ࠠ.1.ྶß +B; 0.xn--iib29f26o.1.xn--zca117e; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] # 0.Ⴄ٩ࠠ.1.ྶß +T; 🄀ⴄ\u0669\u0820。⒈\u0FB6ß; [B1 P1 V6]; [B1 P1 V6] # 🄀ⴄ٩ࠠ.⒈ྶß +N; 🄀ⴄ\u0669\u0820。⒈\u0FB6ß; [B1 P1 V6]; [B1 P1 V6] # 🄀ⴄ٩ࠠ.⒈ྶß +B; 🄀Ⴄ\u0669\u0820。⒈\u0FB6SS; [B1 P1 V6]; [B1 P1 V6] # 🄀Ⴄ٩ࠠ.⒈ྶss +B; 🄀ⴄ\u0669\u0820。⒈\u0FB6ss; [B1 P1 V6]; [B1 P1 V6] # 🄀ⴄ٩ࠠ.⒈ྶss +B; 🄀Ⴄ\u0669\u0820。⒈\u0FB6Ss; [B1 P1 V6]; [B1 P1 V6] # 🄀Ⴄ٩ࠠ.⒈ྶss +B; xn--iib29f26o6n43c.xn--ss-1sj588o; [B1 V6]; [B1 V6] # 🄀Ⴄ٩ࠠ.⒈ྶss +B; xn--iib29fp25e0219a.xn--ss-1sj588o; [B1 V6]; [B1 V6] # 🄀ⴄ٩ࠠ.⒈ྶss +B; xn--iib29fp25e0219a.xn--zca117e3vp; [B1 V6]; [B1 V6] # 🄀ⴄ٩ࠠ.⒈ྶß +B; xn--iib29f26o6n43c.xn--zca117e3vp; [B1 V6]; [B1 V6] # 🄀Ⴄ٩ࠠ.⒈ྶß +T; ≠.\u200C-\u066B; [B1 C1 P1 V6]; [B1 P1 V3 V6] # ≠.-٫ +N; ≠.\u200C-\u066B; [B1 C1 P1 V6]; [B1 C1 P1 V6] # ≠.-٫ +T; =\u0338.\u200C-\u066B; [B1 C1 P1 V6]; [B1 P1 V3 V6] # ≠.-٫ +N; =\u0338.\u200C-\u066B; [B1 C1 P1 V6]; [B1 C1 P1 V6] # ≠.-٫ +B; xn--1ch.xn----vqc; [B1 V3 V6]; [B1 V3 V6] # ≠.-٫ +B; xn--1ch.xn----vqc597q; [B1 C1 V6]; [B1 C1 V6] # ≠.-٫ +B; \u0660۱。󠳶𞠁\u0665; [B1 P1 V6]; [B1 P1 V6] # ٠۱.𞠁٥ +B; \u0660۱。󠳶𞠁\u0665; [B1 P1 V6]; [B1 P1 V6] # ٠۱.𞠁٥ +B; xn--8hb40a.xn--eib7967vner3e; [B1 V6]; [B1 V6] # ٠۱.𞠁٥ +T; \u200C\u0663⒖。󱅉𽷛\u1BF3; [B1 C1 P1 V6]; [B1 P1 V6] # ٣⒖.᯳ +N; \u200C\u0663⒖。󱅉𽷛\u1BF3; [B1 C1 P1 V6]; [B1 C1 P1 V6] # ٣⒖.᯳ +T; \u200C\u066315.。󱅉𽷛\u1BF3; [B1 C1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # ٣15..᯳ +N; \u200C\u066315.。󱅉𽷛\u1BF3; [B1 C1 P1 V6 A4_2]; [B1 C1 P1 V6 A4_2] # ٣15..᯳ +B; xn--15-gyd..xn--1zf13512buy41d; [B1 V6 A4_2]; [B1 V6 A4_2] # ٣15..᯳ +B; xn--15-gyd983x..xn--1zf13512buy41d; [B1 C1 V6 A4_2]; [B1 C1 V6 A4_2] # ٣15..᯳ +B; xn--cib675m.xn--1zf13512buy41d; [B1 V6]; [B1 V6] # ٣⒖.᯳ +B; xn--cib152kwgd.xn--1zf13512buy41d; [B1 C1 V6]; [B1 C1 V6] # ٣⒖.᯳ +B; \u1BF3.-逋񳦭󙙮; [P1 V3 V5 V6]; [P1 V3 V5 V6] # ᯳.-逋 +B; xn--1zf.xn----483d46987byr50b; [V3 V5 V6]; [V3 V5 V6] # ᯳.-逋 +T; \u0756。\u3164\u200Dς; [C2 P1 V6]; [P1 V6] # ݖ.ς +N; \u0756。\u3164\u200Dς; [C2 P1 V6]; [C2 P1 V6] # ݖ.ς +T; \u0756。\u1160\u200Dς; [C2 P1 V6]; [P1 V6] # ݖ.ς +N; \u0756。\u1160\u200Dς; [C2 P1 V6]; [C2 P1 V6] # ݖ.ς +T; \u0756。\u1160\u200DΣ; [C2 P1 V6]; [P1 V6] # ݖ.σ +N; \u0756。\u1160\u200DΣ; [C2 P1 V6]; [C2 P1 V6] # ݖ.σ +T; \u0756。\u1160\u200Dσ; [C2 P1 V6]; [P1 V6] # ݖ.σ +N; \u0756。\u1160\u200Dσ; [C2 P1 V6]; [C2 P1 V6] # ݖ.σ +B; xn--9ob.xn--4xa380e; [V6]; [V6] # ݖ.σ +B; xn--9ob.xn--4xa380ebol; [C2 V6]; [C2 V6] # ݖ.σ +B; xn--9ob.xn--3xa580ebol; [C2 V6]; [C2 V6] # ݖ.ς +T; \u0756。\u3164\u200DΣ; [C2 P1 V6]; [P1 V6] # ݖ.σ +N; \u0756。\u3164\u200DΣ; [C2 P1 V6]; [C2 P1 V6] # ݖ.σ +T; \u0756。\u3164\u200Dσ; [C2 P1 V6]; [P1 V6] # ݖ.σ +N; \u0756。\u3164\u200Dσ; [C2 P1 V6]; [C2 P1 V6] # ݖ.σ +B; xn--9ob.xn--4xa574u; [V6]; [V6] # ݖ.σ +B; xn--9ob.xn--4xa795lq2l; [C2 V6]; [C2 V6] # ݖ.σ +B; xn--9ob.xn--3xa995lq2l; [C2 V6]; [C2 V6] # ݖ.ς +T; ᡆႣ。󞢧\u0315\u200D\u200D; [C2 P1 V6]; [P1 V6] # ᡆႣ.̕ +N; ᡆႣ。󞢧\u0315\u200D\u200D; [C2 P1 V6]; [C2 P1 V6] # ᡆႣ.̕ +T; ᡆႣ。󞢧\u0315\u200D\u200D; [C2 P1 V6]; [P1 V6] # ᡆႣ.̕ +N; ᡆႣ。󞢧\u0315\u200D\u200D; [C2 P1 V6]; [C2 P1 V6] # ᡆႣ.̕ +T; ᡆⴃ。󞢧\u0315\u200D\u200D; [C2 P1 V6]; [P1 V6] # ᡆⴃ.̕ +N; ᡆⴃ。󞢧\u0315\u200D\u200D; [C2 P1 V6]; [C2 P1 V6] # ᡆⴃ.̕ +B; xn--57e237h.xn--5sa98523p; [V6]; [V6] # ᡆⴃ.̕ +B; xn--57e237h.xn--5sa649la993427a; [C2 V6]; [C2 V6] # ᡆⴃ.̕ +B; xn--bnd320b.xn--5sa98523p; [V6]; [V6] # ᡆႣ.̕ +B; xn--bnd320b.xn--5sa649la993427a; [C2 V6]; [C2 V6] # ᡆႣ.̕ +T; ᡆⴃ。󞢧\u0315\u200D\u200D; [C2 P1 V6]; [P1 V6] # ᡆⴃ.̕ +N; ᡆⴃ。󞢧\u0315\u200D\u200D; [C2 P1 V6]; [C2 P1 V6] # ᡆⴃ.̕ +T; 㭄\u200D\u084F𑚵.ς𐮮\u200C\u200D; [B5 B6 C1 C2]; [B5 B6] # 㭄ࡏ𑚵.ς𐮮 +N; 㭄\u200D\u084F𑚵.ς𐮮\u200C\u200D; [B5 B6 C1 C2]; [B5 B6 C1 C2] # 㭄ࡏ𑚵.ς𐮮 +T; 㭄\u200D\u084F𑚵.ς𐮮\u200C\u200D; [B5 B6 C1 C2]; [B5 B6] # 㭄ࡏ𑚵.ς𐮮 +N; 㭄\u200D\u084F𑚵.ς𐮮\u200C\u200D; [B5 B6 C1 C2]; [B5 B6 C1 C2] # 㭄ࡏ𑚵.ς𐮮 +T; 㭄\u200D\u084F𑚵.Σ𐮮\u200C\u200D; [B5 B6 C1 C2]; [B5 B6] # 㭄ࡏ𑚵.σ𐮮 +N; 㭄\u200D\u084F𑚵.Σ𐮮\u200C\u200D; [B5 B6 C1 C2]; [B5 B6 C1 C2] # 㭄ࡏ𑚵.σ𐮮 +T; 㭄\u200D\u084F𑚵.σ𐮮\u200C\u200D; [B5 B6 C1 C2]; [B5 B6] # 㭄ࡏ𑚵.σ𐮮 +N; 㭄\u200D\u084F𑚵.σ𐮮\u200C\u200D; [B5 B6 C1 C2]; [B5 B6 C1 C2] # 㭄ࡏ𑚵.σ𐮮 +B; xn--ewb302xhu1l.xn--4xa0426k; [B5 B6]; [B5 B6] # 㭄ࡏ𑚵.σ𐮮 +B; xn--ewb962jfitku4r.xn--4xa695lda6932v; [B5 B6 C1 C2]; [B5 B6 C1 C2] # 㭄ࡏ𑚵.σ𐮮 +B; xn--ewb962jfitku4r.xn--3xa895lda6932v; [B5 B6 C1 C2]; [B5 B6 C1 C2] # 㭄ࡏ𑚵.ς𐮮 +T; 㭄\u200D\u084F𑚵.Σ𐮮\u200C\u200D; [B5 B6 C1 C2]; [B5 B6] # 㭄ࡏ𑚵.σ𐮮 +N; 㭄\u200D\u084F𑚵.Σ𐮮\u200C\u200D; [B5 B6 C1 C2]; [B5 B6 C1 C2] # 㭄ࡏ𑚵.σ𐮮 +T; 㭄\u200D\u084F𑚵.σ𐮮\u200C\u200D; [B5 B6 C1 C2]; [B5 B6] # 㭄ࡏ𑚵.σ𐮮 +N; 㭄\u200D\u084F𑚵.σ𐮮\u200C\u200D; [B5 B6 C1 C2]; [B5 B6 C1 C2] # 㭄ࡏ𑚵.σ𐮮 +B; \u17B5。𞯸ꡀ🄋; [B1 B2 B3 B6 P1 V5 V6]; [B1 B2 B3 B6 P1 V5 V6] # .ꡀ🄋 +B; xn--03e.xn--8b9ar252dngd; [B1 B2 B3 B6 V5 V6]; [B1 B2 B3 B6 V5 V6] # .ꡀ🄋 +B; 󐪺暑.⾑\u0668; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 暑.襾٨ +B; 󐪺暑.襾\u0668; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 暑.襾٨ +B; xn--tlvq3513e.xn--hib9228d; [B5 B6 V6]; [B5 B6 V6] # 暑.襾٨ +B; 󠄚≯ꡢ。\u0891\u1DFF; [B1 P1 V6]; [B1 P1 V6] # ≯ꡢ.᷿ +B; 󠄚>\u0338ꡢ。\u0891\u1DFF; [B1 P1 V6]; [B1 P1 V6] # ≯ꡢ.᷿ +B; xn--hdh7783c.xn--9xb680i; [B1 V6]; [B1 V6] # ≯ꡢ.᷿ +B; \uFDC3𮁱\u0B4D𐨿.󐧤Ⴗ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # كمم𮁱୍𐨿.Ⴗ +B; \u0643\u0645\u0645𮁱\u0B4D𐨿.󐧤Ⴗ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # كمم𮁱୍𐨿.Ⴗ +B; \u0643\u0645\u0645𮁱\u0B4D𐨿.󐧤ⴗ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # كمم𮁱୍𐨿.ⴗ +B; xn--fhbea662czx68a2tju.xn--fljz2846h; [B2 B3 V6]; [B2 B3 V6] # كمم𮁱୍𐨿.ⴗ +B; xn--fhbea662czx68a2tju.xn--vnd55511o; [B2 B3 V6]; [B2 B3 V6] # كمم𮁱୍𐨿.Ⴗ +B; \uFDC3𮁱\u0B4D𐨿.󐧤ⴗ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # كمم𮁱୍𐨿.ⴗ +B; 𞀨。\u1B44򡛨𞎇; [P1 V5 V6]; [P1 V5 V6] # 𞀨.᭄ +B; 𞀨。\u1B44򡛨𞎇; [P1 V5 V6]; [P1 V5 V6] # 𞀨.᭄ +B; xn--mi4h.xn--1uf6843smg20c; [V5 V6]; [V5 V6] # 𞀨.᭄ +T; 󠣼\u200C.𐺰\u200Cᡟ; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 P1 V6] # .ᡟ +N; 󠣼\u200C.𐺰\u200Cᡟ; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # .ᡟ +T; 󠣼\u200C.𐺰\u200Cᡟ; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 P1 V6] # .ᡟ +N; 󠣼\u200C.𐺰\u200Cᡟ; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # .ᡟ +B; xn--q046e.xn--v8e7227j; [B1 B2 B3 V6]; [B1 B2 B3 V6] +B; xn--0ug18531l.xn--v8e340bp21t; [B1 B2 B3 C1 V6]; [B1 B2 B3 C1 V6] # .ᡟ +T; ᢛ󨅟ß.ጧ; [P1 V6]; [P1 V6] +N; ᢛ󨅟ß.ጧ; [P1 V6]; [P1 V6] +B; ᢛ󨅟SS.ጧ; [P1 V6]; [P1 V6] +B; ᢛ󨅟ss.ጧ; [P1 V6]; [P1 V6] +B; ᢛ󨅟Ss.ጧ; [P1 V6]; [P1 V6] +B; xn--ss-7dp66033t.xn--p5d; [V6]; [V6] +B; xn--zca562jc642x.xn--p5d; [V6]; [V6] +T; ⮒\u200C.񒚗\u200C; [C1 P1 V6]; [P1 V6] # ⮒. +N; ⮒\u200C.񒚗\u200C; [C1 P1 V6]; [C1 P1 V6] # ⮒. +B; xn--b9i.xn--5p9y; [V6]; [V6] +B; xn--0ugx66b.xn--0ugz2871c; [C1 V6]; [C1 V6] # ⮒. +B; 𞤂񹞁𐹯。Ⴜ; [B2 P1 V6]; [B2 P1 V6] +B; 𞤤񹞁𐹯。ⴜ; [B2 P1 V6]; [B2 P1 V6] +B; xn--no0dr648a51o3b.xn--klj; [B2 V6]; [B2 V6] +B; xn--no0dr648a51o3b.xn--0nd; [B2 V6]; [B2 V6] +B; 𞤂񹞁𐹯。ⴜ; [B2 P1 V6]; [B2 P1 V6] +T; 𐹵⮣\u200C𑄰。񷴿\uFCB7; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 P1 V6] # 𐹵⮣𑄰.ضم +N; 𐹵⮣\u200C𑄰。񷴿\uFCB7; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # 𐹵⮣𑄰.ضم +T; 𐹵⮣\u200C𑄰。񷴿\u0636\u0645; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 P1 V6] # 𐹵⮣𑄰.ضم +N; 𐹵⮣\u200C𑄰。񷴿\u0636\u0645; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # 𐹵⮣𑄰.ضم +B; xn--s9i5458e7yb.xn--1gb4a66004i; [B1 B5 B6 V6]; [B1 B5 B6 V6] # 𐹵⮣𑄰.ضم +B; xn--0ug586bcj8p7jc.xn--1gb4a66004i; [B1 B5 B6 C1 V6]; [B1 B5 B6 C1 V6] # 𐹵⮣𑄰.ضم +T; Ⴒ。デß𞤵\u0C4D; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴒ.デß𞤵్ +N; Ⴒ。デß𞤵\u0C4D; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴒ.デß𞤵్ +T; Ⴒ。テ\u3099ß𞤵\u0C4D; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴒ.デß𞤵్ +N; Ⴒ。テ\u3099ß𞤵\u0C4D; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴒ.デß𞤵్ +T; ⴒ。テ\u3099ß𞤵\u0C4D; [B5 B6]; [B5 B6] # ⴒ.デß𞤵్ +N; ⴒ。テ\u3099ß𞤵\u0C4D; [B5 B6]; [B5 B6] # ⴒ.デß𞤵్ +T; ⴒ。デß𞤵\u0C4D; [B5 B6]; [B5 B6] # ⴒ.デß𞤵్ +N; ⴒ。デß𞤵\u0C4D; [B5 B6]; [B5 B6] # ⴒ.デß𞤵్ +B; Ⴒ。デSS𞤓\u0C4D; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴒ.デss𞤵్ +B; Ⴒ。テ\u3099SS𞤓\u0C4D; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴒ.デss𞤵్ +B; ⴒ。テ\u3099ss𞤵\u0C4D; [B5 B6]; [B5 B6] # ⴒ.デss𞤵్ +B; ⴒ。デss𞤵\u0C4D; [B5 B6]; [B5 B6] # ⴒ.デss𞤵్ +B; Ⴒ。デSs𞤵\u0C4D; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴒ.デss𞤵్ +B; Ⴒ。テ\u3099Ss𞤵\u0C4D; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴒ.デss𞤵్ +B; xn--qnd.xn--ss-9nh3648ahh20b; [B5 B6 V6]; [B5 B6 V6] # Ⴒ.デss𞤵్ +B; xn--9kj.xn--ss-9nh3648ahh20b; [B5 B6]; [B5 B6] # ⴒ.デss𞤵్ +B; xn--9kj.xn--zca669cmr3a0f28a; [B5 B6]; [B5 B6] # ⴒ.デß𞤵్ +B; xn--qnd.xn--zca669cmr3a0f28a; [B5 B6 V6]; [B5 B6 V6] # Ⴒ.デß𞤵్ +B; Ⴒ。デSS𞤵\u0C4D; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴒ.デss𞤵్ +B; Ⴒ。テ\u3099SS𞤵\u0C4D; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴒ.デss𞤵్ +B; 𑁿\u0D4D.7-\u07D2; [B1 B3 B6 V5]; [B1 B3 B6 V5] # 𑁿്.7-ߒ +B; 𑁿\u0D4D.7-\u07D2; [B1 B3 B6 V5]; [B1 B3 B6 V5] # 𑁿്.7-ߒ +B; xn--wxc1283k.xn--7--yue; [B1 B3 B6 V5]; [B1 B3 B6 V5] # 𑁿്.7-ߒ +B; ≯𑜫󠭇.\u1734񒞤𑍬ᢧ; [P1 V5 V6]; [P1 V5 V6] # ≯𑜫.᜴𑍬ᢧ +B; >\u0338𑜫󠭇.\u1734񒞤𑍬ᢧ; [P1 V5 V6]; [P1 V5 V6] # ≯𑜫.᜴𑍬ᢧ +B; xn--hdhx157g68o0g.xn--c0e65eu616c34o7a; [V5 V6]; [V5 V6] # ≯𑜫.᜴𑍬ᢧ +B; \u1DDB򎐙Ⴗ쏔。\u0781; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ᷛႷ쏔.ށ +B; \u1DDB򎐙Ⴗ쏔。\u0781; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ᷛႷ쏔.ށ +B; \u1DDB򎐙ⴗ쏔。\u0781; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ᷛⴗ쏔.ށ +B; \u1DDB򎐙ⴗ쏔。\u0781; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ᷛⴗ쏔.ށ +B; xn--zegy26dw47iy6w2f.xn--iqb; [B1 V5 V6]; [B1 V5 V6] # ᷛⴗ쏔.ށ +B; xn--vnd148d733ky6n9e.xn--iqb; [B1 V5 V6]; [B1 V5 V6] # ᷛႷ쏔.ށ +T; ß。𐋳Ⴌ\u0FB8; [P1 V6]; [P1 V6] # ß.𐋳Ⴌྸ +N; ß。𐋳Ⴌ\u0FB8; [P1 V6]; [P1 V6] # ß.𐋳Ⴌྸ +T; ß。𐋳Ⴌ\u0FB8; [P1 V6]; [P1 V6] # ß.𐋳Ⴌྸ +N; ß。𐋳Ⴌ\u0FB8; [P1 V6]; [P1 V6] # ß.𐋳Ⴌྸ +T; ß。𐋳ⴌ\u0FB8; ß.𐋳ⴌ\u0FB8; ss.xn--lgd921mvv0m; NV8 # ß.𐋳ⴌྸ +N; ß。𐋳ⴌ\u0FB8; ß.𐋳ⴌ\u0FB8; xn--zca.xn--lgd921mvv0m; NV8 # ß.𐋳ⴌྸ +B; SS。𐋳Ⴌ\u0FB8; [P1 V6]; [P1 V6] # ss.𐋳Ⴌྸ +B; ss。𐋳ⴌ\u0FB8; ss.𐋳ⴌ\u0FB8; ss.xn--lgd921mvv0m; NV8 # ss.𐋳ⴌྸ +B; Ss。𐋳Ⴌ\u0FB8; [P1 V6]; [P1 V6] # ss.𐋳Ⴌྸ +B; ss.xn--lgd10cu829c; [V6]; [V6] # ss.𐋳Ⴌྸ +B; ss.xn--lgd921mvv0m; ss.𐋳ⴌ\u0FB8; ss.xn--lgd921mvv0m; NV8 # ss.𐋳ⴌྸ +B; ss.𐋳ⴌ\u0FB8; ; ss.xn--lgd921mvv0m; NV8 # ss.𐋳ⴌྸ +B; SS.𐋳Ⴌ\u0FB8; [P1 V6]; [P1 V6] # ss.𐋳Ⴌྸ +B; Ss.𐋳Ⴌ\u0FB8; [P1 V6]; [P1 V6] # ss.𐋳Ⴌྸ +B; xn--zca.xn--lgd921mvv0m; ß.𐋳ⴌ\u0FB8; xn--zca.xn--lgd921mvv0m; NV8 # ß.𐋳ⴌྸ +T; ß.𐋳ⴌ\u0FB8; ; ss.xn--lgd921mvv0m; NV8 # ß.𐋳ⴌྸ +N; ß.𐋳ⴌ\u0FB8; ; xn--zca.xn--lgd921mvv0m; NV8 # ß.𐋳ⴌྸ +B; xn--zca.xn--lgd10cu829c; [V6]; [V6] # ß.𐋳Ⴌྸ +T; ß。𐋳ⴌ\u0FB8; ß.𐋳ⴌ\u0FB8; ss.xn--lgd921mvv0m; NV8 # ß.𐋳ⴌྸ +N; ß。𐋳ⴌ\u0FB8; ß.𐋳ⴌ\u0FB8; xn--zca.xn--lgd921mvv0m; NV8 # ß.𐋳ⴌྸ +B; SS。𐋳Ⴌ\u0FB8; [P1 V6]; [P1 V6] # ss.𐋳Ⴌྸ +B; ss。𐋳ⴌ\u0FB8; ss.𐋳ⴌ\u0FB8; ss.xn--lgd921mvv0m; NV8 # ss.𐋳ⴌྸ +B; Ss。𐋳Ⴌ\u0FB8; [P1 V6]; [P1 V6] # ss.𐋳Ⴌྸ +T; -\u069E𐶡.\u200C⾝\u09CD; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # -ڞ.身্ +N; -\u069E𐶡.\u200C⾝\u09CD; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # -ڞ.身্ +T; -\u069E𐶡.\u200C身\u09CD; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # -ڞ.身্ +N; -\u069E𐶡.\u200C身\u09CD; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # -ڞ.身্ +B; xn----stc7013r.xn--b7b1419d; [B1 V3 V6]; [B1 V3 V6] # -ڞ.身্ +B; xn----stc7013r.xn--b7b305imj2f; [B1 C1 V3 V6]; [B1 C1 V3 V6] # -ڞ.身্ +T; 😮\u0764𑈵𞀖.💅\u200D; [B1 C2]; [B1] # 😮ݤ𑈵𞀖.💅 +N; 😮\u0764𑈵𞀖.💅\u200D; [B1 C2]; [B1 C2] # 😮ݤ𑈵𞀖.💅 +T; 😮\u0764𑈵𞀖.💅\u200D; [B1 C2]; [B1] # 😮ݤ𑈵𞀖.💅 +N; 😮\u0764𑈵𞀖.💅\u200D; [B1 C2]; [B1 C2] # 😮ݤ𑈵𞀖.💅 +B; xn--opb4277kuc7elqsa.xn--kr8h; [B1]; [B1] # 😮ݤ𑈵𞀖.💅 +B; xn--opb4277kuc7elqsa.xn--1ug5265p; [B1 C2]; [B1 C2] # 😮ݤ𑈵𞀖.💅 +T; \u08F2\u200D꙳\u0712.ᢏ\u200C󠍄; [B1 B6 C1 C2 P1 V5 V6]; [B1 B6 P1 V5 V6] # ࣲ꙳ܒ.ᢏ +N; \u08F2\u200D꙳\u0712.ᢏ\u200C󠍄; [B1 B6 C1 C2 P1 V5 V6]; [B1 B6 C1 C2 P1 V5 V6] # ࣲ꙳ܒ.ᢏ +B; xn--cnb37gdy00a.xn--89e02253p; [B1 B6 V5 V6]; [B1 B6 V5 V6] # ࣲ꙳ܒ.ᢏ +B; xn--cnb37g904be26j.xn--89e849ax9363a; [B1 B6 C1 C2 V5 V6]; [B1 B6 C1 C2 V5 V6] # ࣲ꙳ܒ.ᢏ +B; Ⴑ.\u06BF𞯓ᠲ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # Ⴑ.ڿᠲ +B; Ⴑ.\u06BF𞯓ᠲ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # Ⴑ.ڿᠲ +B; ⴑ.\u06BF𞯓ᠲ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ⴑ.ڿᠲ +B; xn--8kj.xn--ykb840gd555a; [B2 B3 V6]; [B2 B3 V6] # ⴑ.ڿᠲ +B; xn--pnd.xn--ykb840gd555a; [B2 B3 V6]; [B2 B3 V6] # Ⴑ.ڿᠲ +B; ⴑ.\u06BF𞯓ᠲ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ⴑ.ڿᠲ +B; \u1A5A𛦝\u0C4D。𚝬𝟵; [P1 V5 V6]; [P1 V5 V6] # ᩚ్.9 +B; \u1A5A𛦝\u0C4D。𚝬9; [P1 V5 V6]; [P1 V5 V6] # ᩚ్.9 +B; xn--lqc703ebm93a.xn--9-000p; [V5 V6]; [V5 V6] # ᩚ్.9 +T; \u200C\u06A0𿺆𝟗。Ⴣ꒘\uFCD0񐘖; [B1 B5 C1 P1 V6]; [B2 B5 P1 V6] # ڠ9.Ⴣ꒘مخ +N; \u200C\u06A0𿺆𝟗。Ⴣ꒘\uFCD0񐘖; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # ڠ9.Ⴣ꒘مخ +T; \u200C\u06A0𿺆9。Ⴣ꒘\u0645\u062E񐘖; [B1 B5 C1 P1 V6]; [B2 B5 P1 V6] # ڠ9.Ⴣ꒘مخ +N; \u200C\u06A0𿺆9。Ⴣ꒘\u0645\u062E񐘖; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # ڠ9.Ⴣ꒘مخ +T; \u200C\u06A0𿺆9。ⴣ꒘\u0645\u062E񐘖; [B1 B5 C1 P1 V6]; [B2 B5 P1 V6] # ڠ9.ⴣ꒘مخ +N; \u200C\u06A0𿺆9。ⴣ꒘\u0645\u062E񐘖; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # ڠ9.ⴣ꒘مخ +B; xn--9-vtc42319e.xn--tgb9bz87p833hw316c; [B2 B5 V6]; [B2 B5 V6] # ڠ9.ⴣ꒘مخ +B; xn--9-vtc736qts91g.xn--tgb9bz87p833hw316c; [B1 B5 C1 V6]; [B1 B5 C1 V6] # ڠ9.ⴣ꒘مخ +B; xn--9-vtc42319e.xn--tgb9bz61cfn8mw3t2c; [B2 B5 V6]; [B2 B5 V6] # ڠ9.Ⴣ꒘مخ +B; xn--9-vtc736qts91g.xn--tgb9bz61cfn8mw3t2c; [B1 B5 C1 V6]; [B1 B5 C1 V6] # ڠ9.Ⴣ꒘مخ +T; \u200C\u06A0𿺆𝟗。ⴣ꒘\uFCD0񐘖; [B1 B5 C1 P1 V6]; [B2 B5 P1 V6] # ڠ9.ⴣ꒘مخ +N; \u200C\u06A0𿺆𝟗。ⴣ꒘\uFCD0񐘖; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # ڠ9.ⴣ꒘مخ +B; ᡖ。\u031F񗛨\u0B82-; [P1 V3 V5 V6]; [P1 V3 V5 V6] # ᡖ.̟ஂ- +B; ᡖ。\u031F񗛨\u0B82-; [P1 V3 V5 V6]; [P1 V3 V5 V6] # ᡖ.̟ஂ- +B; xn--m8e.xn----mdb555dkk71m; [V3 V5 V6]; [V3 V5 V6] # ᡖ.̟ஂ- +B; 𞠠浘。絧𞀀; [B2 B3]; [B2 B3] +B; xn--e0wp491f.xn--ud0a3573e; [B2 B3]; [B2 B3] +B; \u0596Ⴋ.𝟳≯︒\uFE0A; [P1 V5 V6]; [P1 V5 V6] # ֖Ⴋ.7≯︒ +B; \u0596Ⴋ.𝟳>\u0338︒\uFE0A; [P1 V5 V6]; [P1 V5 V6] # ֖Ⴋ.7≯︒ +B; \u0596Ⴋ.7≯。\uFE0A; [P1 V5 V6]; [P1 V5 V6] # ֖Ⴋ.7≯. +B; \u0596Ⴋ.7>\u0338。\uFE0A; [P1 V5 V6]; [P1 V5 V6] # ֖Ⴋ.7≯. +B; \u0596ⴋ.7>\u0338。\uFE0A; [P1 V5 V6]; [P1 V5 V6] # ֖ⴋ.7≯. +B; \u0596ⴋ.7≯。\uFE0A; [P1 V5 V6]; [P1 V5 V6] # ֖ⴋ.7≯. +B; xn--hcb613r.xn--7-pgo.; [V5 V6]; [V5 V6] # ֖ⴋ.7≯. +B; xn--hcb887c.xn--7-pgo.; [V5 V6]; [V5 V6] # ֖Ⴋ.7≯. +B; \u0596ⴋ.𝟳>\u0338︒\uFE0A; [P1 V5 V6]; [P1 V5 V6] # ֖ⴋ.7≯︒ +B; \u0596ⴋ.𝟳≯︒\uFE0A; [P1 V5 V6]; [P1 V5 V6] # ֖ⴋ.7≯︒ +B; xn--hcb613r.xn--7-pgoy530h; [V5 V6]; [V5 V6] # ֖ⴋ.7≯︒ +B; xn--hcb887c.xn--7-pgoy530h; [V5 V6]; [V5 V6] # ֖Ⴋ.7≯︒ +T; \u200DF𑓂。󠺨︒\u077E𐹢; [B1 C2 P1 V6]; [B1 P1 V6] # f𑓂.︒ݾ𐹢 +N; \u200DF𑓂。󠺨︒\u077E𐹢; [B1 C2 P1 V6]; [B1 C2 P1 V6] # f𑓂.︒ݾ𐹢 +T; \u200DF𑓂。󠺨。\u077E𐹢; [B1 C2 P1 V6]; [B1 P1 V6] # f𑓂..ݾ𐹢 +N; \u200DF𑓂。󠺨。\u077E𐹢; [B1 C2 P1 V6]; [B1 C2 P1 V6] # f𑓂..ݾ𐹢 +T; \u200Df𑓂。󠺨。\u077E𐹢; [B1 C2 P1 V6]; [B1 P1 V6] # f𑓂..ݾ𐹢 +N; \u200Df𑓂。󠺨。\u077E𐹢; [B1 C2 P1 V6]; [B1 C2 P1 V6] # f𑓂..ݾ𐹢 +B; xn--f-kq9i.xn--7656e.xn--fqb4175k; [B1 V6]; [B1 V6] # f𑓂..ݾ𐹢 +B; xn--f-tgn9761i.xn--7656e.xn--fqb4175k; [B1 C2 V6]; [B1 C2 V6] # f𑓂..ݾ𐹢 +T; \u200Df𑓂。󠺨︒\u077E𐹢; [B1 C2 P1 V6]; [B1 P1 V6] # f𑓂.︒ݾ𐹢 +N; \u200Df𑓂。󠺨︒\u077E𐹢; [B1 C2 P1 V6]; [B1 C2 P1 V6] # f𑓂.︒ݾ𐹢 +B; xn--f-kq9i.xn--fqb1637j8hky9452a; [B1 V6]; [B1 V6] # f𑓂.︒ݾ𐹢 +B; xn--f-tgn9761i.xn--fqb1637j8hky9452a; [B1 C2 V6]; [B1 C2 V6] # f𑓂.︒ݾ𐹢 +B; \u0845🄇𐼗︒。𐹻𑜫; [B1 B3 P1 V6]; [B1 B3 P1 V6] # ࡅ🄇︒.𐹻𑜫 +B; \u08456,𐼗。。𐹻𑜫; [B1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # ࡅ6,..𐹻𑜫 +B; xn--6,-r4e4420y..xn--zo0di2m; [B1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # ࡅ6,..𐹻𑜫 +B; xn--3vb4696jpxkjh7s.xn--zo0di2m; [B1 B3 V6]; [B1 B3 V6] # ࡅ🄇︒.𐹻𑜫 +B; 𐹈.\u1DC0𑈱𐦭; [B1 P1 V5 V6]; [B1 P1 V5 V6] # .᷀𑈱𐦭 +B; xn--jn0d.xn--7dg0871h3lf; [B1 V5 V6]; [B1 V5 V6] # .᷀𑈱𐦭 +B; Ⴂ䠺。𞤃񅏎󙮦\u0693; [B2 P1 V6]; [B2 P1 V6] # Ⴂ䠺.𞤥ړ +B; ⴂ䠺。𞤥񅏎󙮦\u0693; [B2 P1 V6]; [B2 P1 V6] # ⴂ䠺.𞤥ړ +B; xn--tkj638f.xn--pjb9818vg4xno967d; [B2 V6]; [B2 V6] # ⴂ䠺.𞤥ړ +B; xn--9md875z.xn--pjb9818vg4xno967d; [B2 V6]; [B2 V6] # Ⴂ䠺.𞤥ړ +B; ⴂ䠺。𞤃񅏎󙮦\u0693; [B2 P1 V6]; [B2 P1 V6] # ⴂ䠺.𞤥ړ +B; 🄇伐︒.𜙚\uA8C4; [P1 V6]; [P1 V6] # 🄇伐︒.꣄ +B; 6,伐。.𜙚\uA8C4; [P1 V6 A4_2]; [P1 V6 A4_2] # 6,伐..꣄ +B; xn--6,-7i3c..xn--0f9ao925c; [P1 V6 A4_2]; [P1 V6 A4_2] # 6,伐..꣄ +B; xn--woqs083bel0g.xn--0f9ao925c; [V6]; [V6] # 🄇伐︒.꣄ +T; \u200D𐹠\uABED\uFFFB。\u200D𐫓Ⴚ𑂹; [B1 C2 P1 V6]; [B1 B2 B3 P1 V6] # 𐹠꯭.𐫓Ⴚ𑂹 +N; \u200D𐹠\uABED\uFFFB。\u200D𐫓Ⴚ𑂹; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 𐹠꯭.𐫓Ⴚ𑂹 +T; \u200D𐹠\uABED\uFFFB。\u200D𐫓ⴚ𑂹; [B1 C2 P1 V6]; [B1 B2 B3 P1 V6] # 𐹠꯭.𐫓ⴚ𑂹 +N; \u200D𐹠\uABED\uFFFB。\u200D𐫓ⴚ𑂹; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 𐹠꯭.𐫓ⴚ𑂹 +B; xn--429az70n29i.xn--ilj7702eqyd; [B1 B2 B3 V6]; [B1 B2 B3 V6] # 𐹠꯭.𐫓ⴚ𑂹 +B; xn--1ugz126coy7bdbm.xn--1ug062chv7ov6e; [B1 C2 V6]; [B1 C2 V6] # 𐹠꯭.𐫓ⴚ𑂹 +B; xn--429az70n29i.xn--ynd3619jqyd; [B1 B2 B3 V6]; [B1 B2 B3 V6] # 𐹠꯭.𐫓Ⴚ𑂹 +B; xn--1ugz126coy7bdbm.xn--ynd959evs1pv6e; [B1 C2 V6]; [B1 C2 V6] # 𐹠꯭.𐫓Ⴚ𑂹 +B; 󠆠.񷐴󌟈; [P1 V6 A4_2]; [P1 V6 A4_2] +B; 󠆠.񷐴󌟈; [P1 V6 A4_2]; [P1 V6 A4_2] +B; .xn--rx21bhv12i; [V6 A4_2]; [V6 A4_2] +T; 𐫃\u200CႦ.≠𞷙; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 P1 V6] # 𐫃Ⴆ.≠ +N; 𐫃\u200CႦ.≠𞷙; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # 𐫃Ⴆ.≠ +T; 𐫃\u200CႦ.=\u0338𞷙; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 P1 V6] # 𐫃Ⴆ.≠ +N; 𐫃\u200CႦ.=\u0338𞷙; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # 𐫃Ⴆ.≠ +T; 𐫃\u200Cⴆ.=\u0338𞷙; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 P1 V6] # 𐫃ⴆ.≠ +N; 𐫃\u200Cⴆ.=\u0338𞷙; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # 𐫃ⴆ.≠ +T; 𐫃\u200Cⴆ.≠𞷙; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 P1 V6] # 𐫃ⴆ.≠ +N; 𐫃\u200Cⴆ.≠𞷙; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # 𐫃ⴆ.≠ +B; xn--xkjz802e.xn--1ch2802p; [B1 B2 B3 V6]; [B1 B2 B3 V6] +B; xn--0ug132csv7o.xn--1ch2802p; [B1 B2 B3 C1 V6]; [B1 B2 B3 C1 V6] # 𐫃ⴆ.≠ +B; xn--end1719j.xn--1ch2802p; [B1 B2 B3 V6]; [B1 B2 B3 V6] +B; xn--end799ekr1p.xn--1ch2802p; [B1 B2 B3 C1 V6]; [B1 B2 B3 C1 V6] # 𐫃Ⴆ.≠ +B; 󠁲𙩢𝟥ꘌ.\u0841; [B1 P1 V6]; [B1 P1 V6] # 3ꘌ.ࡁ +B; 󠁲𙩢3ꘌ.\u0841; [B1 P1 V6]; [B1 P1 V6] # 3ꘌ.ࡁ +B; xn--3-0g3es485d8i15h.xn--zvb; [B1 V6]; [B1 V6] # 3ꘌ.ࡁ +B; -.\u1886󡲣-; [P1 V3 V5 V6]; [P1 V3 V5 V6] # -.ᢆ- +B; -.xn----pbkx6497q; [V3 V5 V6]; [V3 V5 V6] # -.ᢆ- +T; 󲚗\u200C。\u200C𞰆ς; [B1 B6 C1 P1 V6]; [B2 B3 P1 V6] # .ς +N; 󲚗\u200C。\u200C𞰆ς; [B1 B6 C1 P1 V6]; [B1 B6 C1 P1 V6] # .ς +T; 󲚗\u200C。\u200C𞰆ς; [B1 B6 C1 P1 V6]; [B2 B3 P1 V6] # .ς +N; 󲚗\u200C。\u200C𞰆ς; [B1 B6 C1 P1 V6]; [B1 B6 C1 P1 V6] # .ς +T; 󲚗\u200C。\u200C𞰆Σ; [B1 B6 C1 P1 V6]; [B2 B3 P1 V6] # .σ +N; 󲚗\u200C。\u200C𞰆Σ; [B1 B6 C1 P1 V6]; [B1 B6 C1 P1 V6] # .σ +T; 󲚗\u200C。\u200C𞰆σ; [B1 B6 C1 P1 V6]; [B2 B3 P1 V6] # .σ +N; 󲚗\u200C。\u200C𞰆σ; [B1 B6 C1 P1 V6]; [B1 B6 C1 P1 V6] # .σ +B; xn--qp42f.xn--4xa3011w; [B2 B3 V6]; [B2 B3 V6] +B; xn--0ug76062m.xn--4xa595lhn92a; [B1 B6 C1 V6]; [B1 B6 C1 V6] # .σ +B; xn--0ug76062m.xn--3xa795lhn92a; [B1 B6 C1 V6]; [B1 B6 C1 V6] # .ς +T; 󲚗\u200C。\u200C𞰆Σ; [B1 B6 C1 P1 V6]; [B2 B3 P1 V6] # .σ +N; 󲚗\u200C。\u200C𞰆Σ; [B1 B6 C1 P1 V6]; [B1 B6 C1 P1 V6] # .σ +T; 󲚗\u200C。\u200C𞰆σ; [B1 B6 C1 P1 V6]; [B2 B3 P1 V6] # .σ +N; 󲚗\u200C。\u200C𞰆σ; [B1 B6 C1 P1 V6]; [B1 B6 C1 P1 V6] # .σ +T; 堕𑓂\u1B02。𐮇𞤽\u200C-; [B3 C1 V3]; [B3 V3] # 堕𑓂ᬂ.𐮇𞤽- +N; 堕𑓂\u1B02。𐮇𞤽\u200C-; [B3 C1 V3]; [B3 C1 V3] # 堕𑓂ᬂ.𐮇𞤽- +T; 堕𑓂\u1B02。𐮇𞤛\u200C-; [B3 C1 V3]; [B3 V3] # 堕𑓂ᬂ.𐮇𞤽- +N; 堕𑓂\u1B02。𐮇𞤛\u200C-; [B3 C1 V3]; [B3 C1 V3] # 堕𑓂ᬂ.𐮇𞤽- +B; xn--5sf345zdk8h.xn----iv5iw606c; [B3 V3]; [B3 V3] # 堕𑓂ᬂ.𐮇𞤽- +B; xn--5sf345zdk8h.xn----rgnt157hwl9g; [B3 C1 V3]; [B3 C1 V3] # 堕𑓂ᬂ.𐮇𞤽- +T; 𐹶𑁆ᡕ𞤢。ᡥς\u062Aς; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥςتς +N; 𐹶𑁆ᡕ𞤢。ᡥς\u062Aς; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥςتς +T; 𐹶𑁆ᡕ𞤢。ᡥς\u062Aς; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥςتς +N; 𐹶𑁆ᡕ𞤢。ᡥς\u062Aς; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥςتς +B; 𐹶𑁆ᡕ𞤀。ᡥΣ\u062AΣ; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتσ +B; 𐹶𑁆ᡕ𞤢。ᡥσ\u062Aσ; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتσ +B; 𐹶𑁆ᡕ𞤀。ᡥΣ\u062Aσ; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتσ +B; xn--l8e1317j1ebz456b.xn--4xaa85plx4a; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتσ +T; 𐹶𑁆ᡕ𞤀。ᡥΣ\u062Aς; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتς +N; 𐹶𑁆ᡕ𞤀。ᡥΣ\u062Aς; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتς +T; 𐹶𑁆ᡕ𞤢。ᡥσ\u062Aς; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتς +N; 𐹶𑁆ᡕ𞤢。ᡥσ\u062Aς; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتς +B; xn--l8e1317j1ebz456b.xn--3xab95plx4a; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتς +B; xn--l8e1317j1ebz456b.xn--3xaa16plx4a; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥςتς +B; 𐹶𑁆ᡕ𞤀。ᡥΣ\u062AΣ; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتσ +B; 𐹶𑁆ᡕ𞤢。ᡥσ\u062Aσ; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتσ +B; 𐹶𑁆ᡕ𞤀。ᡥΣ\u062Aσ; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتσ +T; 𐹶𑁆ᡕ𞤀。ᡥΣ\u062Aς; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتς +N; 𐹶𑁆ᡕ𞤀。ᡥΣ\u062Aς; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتς +T; 𐹶𑁆ᡕ𞤢。ᡥσ\u062Aς; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتς +N; 𐹶𑁆ᡕ𞤢。ᡥσ\u062Aς; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتς +B; 𐹶𑁆ᡕ𞤢。ᡥΣ\u062AΣ; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتσ +B; 𐹶𑁆ᡕ𞤢。ᡥΣ\u062Aσ; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتσ +T; 𐹶𑁆ᡕ𞤢。ᡥΣ\u062Aς; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتς +N; 𐹶𑁆ᡕ𞤢。ᡥΣ\u062Aς; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتς +B; 𐹶𑁆ᡕ𞤢。ᡥΣ\u062AΣ; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتσ +B; 𐹶𑁆ᡕ𞤢。ᡥΣ\u062Aσ; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتσ +T; 𐹶𑁆ᡕ𞤢。ᡥΣ\u062Aς; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتς +N; 𐹶𑁆ᡕ𞤢。ᡥΣ\u062Aς; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتς +T; 󏒰.-𝟻ß; [P1 V3 V6]; [P1 V3 V6] +N; 󏒰.-𝟻ß; [P1 V3 V6]; [P1 V3 V6] +T; 󏒰.-5ß; [P1 V3 V6]; [P1 V3 V6] +N; 󏒰.-5ß; [P1 V3 V6]; [P1 V3 V6] +B; 󏒰.-5SS; [P1 V3 V6]; [P1 V3 V6] +B; 󏒰.-5ss; [P1 V3 V6]; [P1 V3 V6] +B; 󏒰.-5Ss; [P1 V3 V6]; [P1 V3 V6] +B; xn--t960e.-5ss; [V3 V6]; [V3 V6] +B; xn--t960e.xn---5-hia; [V3 V6]; [V3 V6] +B; 󏒰.-𝟻SS; [P1 V3 V6]; [P1 V3 V6] +B; 󏒰.-𝟻ss; [P1 V3 V6]; [P1 V3 V6] +B; 󏒰.-𝟻Ss; [P1 V3 V6]; [P1 V3 V6] +T; \u200D𐨿.🤒Ⴥ򑮶; [C2 P1 V6]; [P1 V5 V6] # 𐨿.🤒Ⴥ +N; \u200D𐨿.🤒Ⴥ򑮶; [C2 P1 V6]; [C2 P1 V6] # 𐨿.🤒Ⴥ +T; \u200D𐨿.🤒ⴥ򑮶; [C2 P1 V6]; [P1 V5 V6] # 𐨿.🤒ⴥ +N; \u200D𐨿.🤒ⴥ򑮶; [C2 P1 V6]; [C2 P1 V6] # 𐨿.🤒ⴥ +B; xn--0s9c.xn--tljz038l0gz4b; [V5 V6]; [V5 V6] +B; xn--1ug9533g.xn--tljz038l0gz4b; [C2 V6]; [C2 V6] # 𐨿.🤒ⴥ +B; xn--0s9c.xn--9nd3211w0gz4b; [V5 V6]; [V5 V6] +B; xn--1ug9533g.xn--9nd3211w0gz4b; [C2 V6]; [C2 V6] # 𐨿.🤒Ⴥ +T; 𵋅。ß𬵩\u200D; [C2 P1 V6]; [P1 V6] # .ß𬵩 +N; 𵋅。ß𬵩\u200D; [C2 P1 V6]; [C2 P1 V6] # .ß𬵩 +T; 𵋅。SS𬵩\u200D; [C2 P1 V6]; [P1 V6] # .ss𬵩 +N; 𵋅。SS𬵩\u200D; [C2 P1 V6]; [C2 P1 V6] # .ss𬵩 +T; 𵋅。ss𬵩\u200D; [C2 P1 V6]; [P1 V6] # .ss𬵩 +N; 𵋅。ss𬵩\u200D; [C2 P1 V6]; [C2 P1 V6] # .ss𬵩 +T; 𵋅。Ss𬵩\u200D; [C2 P1 V6]; [P1 V6] # .ss𬵩 +N; 𵋅。Ss𬵩\u200D; [C2 P1 V6]; [C2 P1 V6] # .ss𬵩 +B; xn--ey1p.xn--ss-eq36b; [V6]; [V6] +B; xn--ey1p.xn--ss-n1tx0508a; [C2 V6]; [C2 V6] # .ss𬵩 +B; xn--ey1p.xn--zca870nz438b; [C2 V6]; [C2 V6] # .ß𬵩 +T; \u200C𭉝。\u07F1\u0301𞹻; [B1 C1 V5]; [B1 V5] # 𭉝.߱́غ +N; \u200C𭉝。\u07F1\u0301𞹻; [B1 C1 V5]; [B1 C1 V5] # 𭉝.߱́غ +T; \u200C𭉝。\u07F1\u0301\u063A; [B1 C1 V5]; [B1 V5] # 𭉝.߱́غ +N; \u200C𭉝。\u07F1\u0301\u063A; [B1 C1 V5]; [B1 C1 V5] # 𭉝.߱́غ +B; xn--634m.xn--lsa46nuub; [B1 V5]; [B1 V5] # 𭉝.߱́غ +B; xn--0ugy003y.xn--lsa46nuub; [B1 C1 V5]; [B1 C1 V5] # 𭉝.߱́غ +T; 𞼌\u200C𑈶。𐹡; [B1 B3 C1 P1 V6]; [B1 P1 V6] # 𑈶.𐹡 +N; 𞼌\u200C𑈶。𐹡; [B1 B3 C1 P1 V6]; [B1 B3 C1 P1 V6] # 𑈶.𐹡 +B; xn--9g1d1288a.xn--8n0d; [B1 V6]; [B1 V6] +B; xn--0ug7946gzpxf.xn--8n0d; [B1 B3 C1 V6]; [B1 B3 C1 V6] # 𑈶.𐹡 +T; 󠅯򇽭\u200C🜭。𑖿\u1ABBς≠; [C1 P1 V5 V6]; [P1 V5 V6] # 🜭.𑖿᪻ς≠ +N; 󠅯򇽭\u200C🜭。𑖿\u1ABBς≠; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 🜭.𑖿᪻ς≠ +T; 󠅯򇽭\u200C🜭。𑖿\u1ABBς=\u0338; [C1 P1 V5 V6]; [P1 V5 V6] # 🜭.𑖿᪻ς≠ +N; 󠅯򇽭\u200C🜭。𑖿\u1ABBς=\u0338; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 🜭.𑖿᪻ς≠ +T; 󠅯򇽭\u200C🜭。𑖿\u1ABBς≠; [C1 P1 V5 V6]; [P1 V5 V6] # 🜭.𑖿᪻ς≠ +N; 󠅯򇽭\u200C🜭。𑖿\u1ABBς≠; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 🜭.𑖿᪻ς≠ +T; 󠅯򇽭\u200C🜭。𑖿\u1ABBς=\u0338; [C1 P1 V5 V6]; [P1 V5 V6] # 🜭.𑖿᪻ς≠ +N; 󠅯򇽭\u200C🜭。𑖿\u1ABBς=\u0338; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 🜭.𑖿᪻ς≠ +T; 󠅯򇽭\u200C🜭。𑖿\u1ABBΣ=\u0338; [C1 P1 V5 V6]; [P1 V5 V6] # 🜭.𑖿᪻σ≠ +N; 󠅯򇽭\u200C🜭。𑖿\u1ABBΣ=\u0338; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 🜭.𑖿᪻σ≠ +T; 󠅯򇽭\u200C🜭。𑖿\u1ABBΣ≠; [C1 P1 V5 V6]; [P1 V5 V6] # 🜭.𑖿᪻σ≠ +N; 󠅯򇽭\u200C🜭。𑖿\u1ABBΣ≠; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 🜭.𑖿᪻σ≠ +T; 󠅯򇽭\u200C🜭。𑖿\u1ABBσ≠; [C1 P1 V5 V6]; [P1 V5 V6] # 🜭.𑖿᪻σ≠ +N; 󠅯򇽭\u200C🜭。𑖿\u1ABBσ≠; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 🜭.𑖿᪻σ≠ +T; 󠅯򇽭\u200C🜭。𑖿\u1ABBσ=\u0338; [C1 P1 V5 V6]; [P1 V5 V6] # 🜭.𑖿᪻σ≠ +N; 󠅯򇽭\u200C🜭。𑖿\u1ABBσ=\u0338; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 🜭.𑖿᪻σ≠ +B; xn--zb9h5968x.xn--4xa378i1mfjw7y; [V5 V6]; [V5 V6] # 🜭.𑖿᪻σ≠ +B; xn--0ug3766p5nm1b.xn--4xa378i1mfjw7y; [C1 V5 V6]; [C1 V5 V6] # 🜭.𑖿᪻σ≠ +B; xn--0ug3766p5nm1b.xn--3xa578i1mfjw7y; [C1 V5 V6]; [C1 V5 V6] # 🜭.𑖿᪻ς≠ +T; 󠅯򇽭\u200C🜭。𑖿\u1ABBΣ=\u0338; [C1 P1 V5 V6]; [P1 V5 V6] # 🜭.𑖿᪻σ≠ +N; 󠅯򇽭\u200C🜭。𑖿\u1ABBΣ=\u0338; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 🜭.𑖿᪻σ≠ +T; 󠅯򇽭\u200C🜭。𑖿\u1ABBΣ≠; [C1 P1 V5 V6]; [P1 V5 V6] # 🜭.𑖿᪻σ≠ +N; 󠅯򇽭\u200C🜭。𑖿\u1ABBΣ≠; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 🜭.𑖿᪻σ≠ +T; 󠅯򇽭\u200C🜭。𑖿\u1ABBσ≠; [C1 P1 V5 V6]; [P1 V5 V6] # 🜭.𑖿᪻σ≠ +N; 󠅯򇽭\u200C🜭。𑖿\u1ABBσ≠; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 🜭.𑖿᪻σ≠ +T; 󠅯򇽭\u200C🜭。𑖿\u1ABBσ=\u0338; [C1 P1 V5 V6]; [P1 V5 V6] # 🜭.𑖿᪻σ≠ +N; 󠅯򇽭\u200C🜭。𑖿\u1ABBσ=\u0338; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 🜭.𑖿᪻σ≠ +T; ⒋。⒈\u200D򳴢; [C2 P1 V6]; [P1 V6] # ⒋.⒈ +N; ⒋。⒈\u200D򳴢; [C2 P1 V6]; [C2 P1 V6] # ⒋.⒈ +T; 4.。1.\u200D򳴢; [C2 P1 V6 A4_2]; [P1 V6 A4_2] # 4..1. +N; 4.。1.\u200D򳴢; [C2 P1 V6 A4_2]; [C2 P1 V6 A4_2] # 4..1. +B; 4..1.xn--sf51d; [V6 A4_2]; [V6 A4_2] +B; 4..1.xn--1ug64613i; [C2 V6 A4_2]; [C2 V6 A4_2] # 4..1. +B; xn--wsh.xn--tsh07994h; [V6]; [V6] +B; xn--wsh.xn--1ug58o74922a; [C2 V6]; [C2 V6] # ⒋.⒈ +T; \u0644ß。𐇽\u1A60򾅢𞤾; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لß.᩠𐇽𞤾 +N; \u0644ß。𐇽\u1A60򾅢𞤾; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لß.᩠𐇽𞤾 +T; \u0644ß。\u1A60𐇽򾅢𞤾; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لß.᩠𐇽𞤾 +N; \u0644ß。\u1A60𐇽򾅢𞤾; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لß.᩠𐇽𞤾 +T; \u0644ß。\u1A60𐇽򾅢𞤾; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لß.᩠𐇽𞤾 +N; \u0644ß。\u1A60𐇽򾅢𞤾; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لß.᩠𐇽𞤾 +B; \u0644SS。\u1A60𐇽򾅢𞤜; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لss.᩠𐇽𞤾 +B; \u0644ss。\u1A60𐇽򾅢𞤾; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لss.᩠𐇽𞤾 +B; \u0644Ss。\u1A60𐇽򾅢𞤜; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لss.᩠𐇽𞤾 +B; xn--ss-svd.xn--jof2298hn83fln78f; [B1 B2 B3 V5 V6]; [B1 B2 B3 V5 V6] # لss.᩠𐇽𞤾 +B; xn--zca57y.xn--jof2298hn83fln78f; [B1 B2 B3 V5 V6]; [B1 B2 B3 V5 V6] # لß.᩠𐇽𞤾 +B; \u0644SS。\u1A60𐇽򾅢𞤜; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لss.᩠𐇽𞤾 +B; \u0644ss。\u1A60𐇽򾅢𞤾; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لss.᩠𐇽𞤾 +B; \u0644Ss。\u1A60𐇽򾅢𞤜; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لss.᩠𐇽𞤾 +B; \u0644SS。𐇽\u1A60򾅢𞤜; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لss.᩠𐇽𞤾 +B; \u0644ss。𐇽\u1A60򾅢𞤾; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لss.᩠𐇽𞤾 +B; \u0644Ss。𐇽\u1A60򾅢𞤜; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لss.᩠𐇽𞤾 +B; \u0644SS。\u1A60𐇽򾅢𞤾; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لss.᩠𐇽𞤾 +B; \u0644Ss。\u1A60𐇽򾅢𞤾; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لss.᩠𐇽𞤾 +B; \u0644SS。\u1A60𐇽򾅢𞤾; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لss.᩠𐇽𞤾 +B; \u0644Ss。\u1A60𐇽򾅢𞤾; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لss.᩠𐇽𞤾 +B; \u0644SS。𐇽\u1A60򾅢𞤾; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لss.᩠𐇽𞤾 +B; \u0644Ss。𐇽\u1A60򾅢𞤾; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لss.᩠𐇽𞤾 +B; 𐹽𑄳񼜲.\u1DDF\u17B8\uA806𑜫; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # 𐹽𑄳.ᷟី꠆𑜫 +B; xn--1o0di0c0652w.xn--33e362arr1l153d; [B1 B3 B6 V5 V6]; [B1 B3 B6 V5 V6] # 𐹽𑄳.ᷟី꠆𑜫 +T; Ⴓ𑜫\u200D򗭓.\u06A7𑰶; [P1 V6]; [P1 V6] # Ⴓ𑜫.ڧ𑰶 +N; Ⴓ𑜫\u200D򗭓.\u06A7𑰶; [P1 V6]; [P1 V6] # Ⴓ𑜫.ڧ𑰶 +T; Ⴓ𑜫\u200D򗭓.\u06A7𑰶; [P1 V6]; [P1 V6] # Ⴓ𑜫.ڧ𑰶 +N; Ⴓ𑜫\u200D򗭓.\u06A7𑰶; [P1 V6]; [P1 V6] # Ⴓ𑜫.ڧ𑰶 +T; ⴓ𑜫\u200D򗭓.\u06A7𑰶; [P1 V6]; [P1 V6] # ⴓ𑜫.ڧ𑰶 +N; ⴓ𑜫\u200D򗭓.\u06A7𑰶; [P1 V6]; [P1 V6] # ⴓ𑜫.ڧ𑰶 +B; xn--blj6306ey091d.xn--9jb4223l; [V6]; [V6] # ⴓ𑜫.ڧ𑰶 +B; xn--1ugy52cym7p7xu5e.xn--9jb4223l; [V6]; [V6] # ⴓ𑜫.ڧ𑰶 +B; xn--rnd8945ky009c.xn--9jb4223l; [V6]; [V6] # Ⴓ𑜫.ڧ𑰶 +B; xn--rnd479ep20q7x12e.xn--9jb4223l; [V6]; [V6] # Ⴓ𑜫.ڧ𑰶 +T; ⴓ𑜫\u200D򗭓.\u06A7𑰶; [P1 V6]; [P1 V6] # ⴓ𑜫.ڧ𑰶 +N; ⴓ𑜫\u200D򗭓.\u06A7𑰶; [P1 V6]; [P1 V6] # ⴓ𑜫.ڧ𑰶 +B; 𐨿.🄆—; [P1 V5 V6]; [P1 V5 V6] +B; 𐨿.5,—; [P1 V5 V6]; [P1 V5 V6] +B; xn--0s9c.xn--5,-81t; [P1 V5 V6]; [P1 V5 V6] +B; xn--0s9c.xn--8ug8324p; [V5 V6]; [V5 V6] +B; 򔊱񁦮۸。󠾭-; [P1 V3 V6]; [P1 V3 V6] +B; xn--lmb18944c0g2z.xn----2k81m; [V3 V6]; [V3 V6] +B; 𼗸\u07CD𐹮。\u06DDᡎᠴ; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # ߍ𐹮.ᡎᠴ +B; xn--osb0855kcc2r.xn--tlb299fhc; [B1 B5 B6 V6]; [B1 B5 B6 V6] # ߍ𐹮.ᡎᠴ +T; \u200DᠮႾ🄂.🚗\u0841𮹌\u200C; [B1 C1 C2 P1 V6]; [B1 P1 V6] # ᠮႾ🄂.🚗ࡁ +N; \u200DᠮႾ🄂.🚗\u0841𮹌\u200C; [B1 C1 C2 P1 V6]; [B1 C1 C2 P1 V6] # ᠮႾ🄂.🚗ࡁ +T; \u200DᠮႾ1,.🚗\u0841𮹌\u200C; [B1 C1 C2 P1 V6]; [B1 B6 P1 V6] # ᠮႾ1,.🚗ࡁ +N; \u200DᠮႾ1,.🚗\u0841𮹌\u200C; [B1 C1 C2 P1 V6]; [B1 C1 C2 P1 V6] # ᠮႾ1,.🚗ࡁ +T; \u200Dᠮⴞ1,.🚗\u0841𮹌\u200C; [B1 C1 C2 P1 V6]; [B1 B6 P1 V6] # ᠮⴞ1,.🚗ࡁ +N; \u200Dᠮⴞ1,.🚗\u0841𮹌\u200C; [B1 C1 C2 P1 V6]; [B1 C1 C2 P1 V6] # ᠮⴞ1,.🚗ࡁ +B; xn--1,-v3o625k.xn--zvb3124wpkpf; [B1 B6 P1 V6]; [B1 B6 P1 V6] # ᠮⴞ1,.🚗ࡁ +B; xn--1,-v3o161c53q.xn--zvb692j9664aic1g; [B1 C1 C2 P1 V6]; [B1 C1 C2 P1 V6] # ᠮⴞ1,.🚗ࡁ +B; xn--1,-ogkx89c.xn--zvb3124wpkpf; [B1 B6 P1 V6]; [B1 B6 P1 V6] # ᠮႾ1,.🚗ࡁ +B; xn--1,-ogkx89c39j.xn--zvb692j9664aic1g; [B1 C1 C2 P1 V6]; [B1 C1 C2 P1 V6] # ᠮႾ1,.🚗ࡁ +T; \u200Dᠮⴞ🄂.🚗\u0841𮹌\u200C; [B1 C1 C2 P1 V6]; [B1 P1 V6] # ᠮⴞ🄂.🚗ࡁ +N; \u200Dᠮⴞ🄂.🚗\u0841𮹌\u200C; [B1 C1 C2 P1 V6]; [B1 C1 C2 P1 V6] # ᠮⴞ🄂.🚗ࡁ +B; xn--h7e438h1p44a.xn--zvb3124wpkpf; [B1 V6]; [B1 V6] # ᠮⴞ🄂.🚗ࡁ +B; xn--h7e341b0wlbv45b.xn--zvb692j9664aic1g; [B1 C1 C2 V6]; [B1 C1 C2 V6] # ᠮⴞ🄂.🚗ࡁ +B; xn--2nd129ai554b.xn--zvb3124wpkpf; [B1 V6]; [B1 V6] # ᠮႾ🄂.🚗ࡁ +B; xn--2nd129ay2gnw71c.xn--zvb692j9664aic1g; [B1 C1 C2 V6]; [B1 C1 C2 V6] # ᠮႾ🄂.🚗ࡁ +B; \u0601\u0697.𑚶񼡷⾆; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ڗ.𑚶舌 +B; \u0601\u0697.𑚶񼡷舌; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ڗ.𑚶舌 +B; xn--jfb41a.xn--tc1ap851axo39c; [B1 V5 V6]; [B1 V5 V6] # ڗ.𑚶舌 +B; 🞅󠳡󜍙.񲖷; [P1 V6]; [P1 V6] +B; xn--ie9hi1349bqdlb.xn--oj69a; [V6]; [V6] +T; \u20E7񯡎-򫣝.4Ⴄ\u200C; [C1 P1 V5 V6]; [P1 V5 V6] # ⃧-.4Ⴄ +N; \u20E7񯡎-򫣝.4Ⴄ\u200C; [C1 P1 V5 V6]; [C1 P1 V5 V6] # ⃧-.4Ⴄ +T; \u20E7񯡎-򫣝.4ⴄ\u200C; [C1 P1 V5 V6]; [P1 V5 V6] # ⃧-.4ⴄ +N; \u20E7񯡎-򫣝.4ⴄ\u200C; [C1 P1 V5 V6]; [C1 P1 V5 V6] # ⃧-.4ⴄ +B; xn----9snu5320fi76w.xn--4-ivs; [V5 V6]; [V5 V6] # ⃧-.4ⴄ +B; xn----9snu5320fi76w.xn--4-sgn589c; [C1 V5 V6]; [C1 V5 V6] # ⃧-.4ⴄ +B; xn----9snu5320fi76w.xn--4-f0g; [V5 V6]; [V5 V6] # ⃧-.4Ⴄ +B; xn----9snu5320fi76w.xn--4-f0g649i; [C1 V5 V6]; [C1 V5 V6] # ⃧-.4Ⴄ +T; ᚭ。𝌠ß𖫱; ᚭ.𝌠ß𖫱; xn--hwe.xn--ss-ci1ub261a; NV8 +N; ᚭ。𝌠ß𖫱; ᚭ.𝌠ß𖫱; xn--hwe.xn--zca4946pblnc; NV8 +T; ᚭ。𝌠ß𖫱; ᚭ.𝌠ß𖫱; xn--hwe.xn--ss-ci1ub261a; NV8 +N; ᚭ。𝌠ß𖫱; ᚭ.𝌠ß𖫱; xn--hwe.xn--zca4946pblnc; NV8 +B; ᚭ。𝌠SS𖫱; ᚭ.𝌠ss𖫱; xn--hwe.xn--ss-ci1ub261a; NV8 +B; ᚭ。𝌠ss𖫱; ᚭ.𝌠ss𖫱; xn--hwe.xn--ss-ci1ub261a; NV8 +B; ᚭ。𝌠Ss𖫱; ᚭ.𝌠ss𖫱; xn--hwe.xn--ss-ci1ub261a; NV8 +B; xn--hwe.xn--ss-ci1ub261a; ᚭ.𝌠ss𖫱; xn--hwe.xn--ss-ci1ub261a; NV8 +B; ᚭ.𝌠ss𖫱; ; xn--hwe.xn--ss-ci1ub261a; NV8 +B; ᚭ.𝌠SS𖫱; ᚭ.𝌠ss𖫱; xn--hwe.xn--ss-ci1ub261a; NV8 +B; ᚭ.𝌠Ss𖫱; ᚭ.𝌠ss𖫱; xn--hwe.xn--ss-ci1ub261a; NV8 +B; xn--hwe.xn--zca4946pblnc; ᚭ.𝌠ß𖫱; xn--hwe.xn--zca4946pblnc; NV8 +T; ᚭ.𝌠ß𖫱; ; xn--hwe.xn--ss-ci1ub261a; NV8 +N; ᚭ.𝌠ß𖫱; ; xn--hwe.xn--zca4946pblnc; NV8 +B; ᚭ。𝌠SS𖫱; ᚭ.𝌠ss𖫱; xn--hwe.xn--ss-ci1ub261a; NV8 +B; ᚭ。𝌠ss𖫱; ᚭ.𝌠ss𖫱; xn--hwe.xn--ss-ci1ub261a; NV8 +B; ᚭ。𝌠Ss𖫱; ᚭ.𝌠ss𖫱; xn--hwe.xn--ss-ci1ub261a; NV8 +B; ₁。𞤫ꡪ; [B1 B2 B3]; [B1 B2 B3] +B; 1。𞤫ꡪ; [B1 B2 B3]; [B1 B2 B3] +B; 1。𞤉ꡪ; [B1 B2 B3]; [B1 B2 B3] +B; 1.xn--gd9al691d; [B1 B2 B3]; [B1 B2 B3] +B; ₁。𞤉ꡪ; [B1 B2 B3]; [B1 B2 B3] +T; 𯻼\u200C.𞶞򻙤񥘇; [B2 B3 B6 C1 P1 V6]; [B2 B3 P1 V6] # . +N; 𯻼\u200C.𞶞򻙤񥘇; [B2 B3 B6 C1 P1 V6]; [B2 B3 B6 C1 P1 V6] # . +B; xn--kg4n.xn--2b7hs861pl540a; [B2 B3 V6]; [B2 B3 V6] +B; xn--0ug27500a.xn--2b7hs861pl540a; [B2 B3 B6 C1 V6]; [B2 B3 B6 C1 V6] # . +B; 𑑄≯。𑜤; [P1 V5 V6]; [P1 V5 V6] +B; 𑑄>\u0338。𑜤; [P1 V5 V6]; [P1 V5 V6] +B; 𑑄≯。𑜤; [P1 V5 V6]; [P1 V5 V6] +B; 𑑄>\u0338。𑜤; [P1 V5 V6]; [P1 V5 V6] +B; xn--hdh5636g.xn--ci2d; [V5 V6]; [V5 V6] +T; Ⴋ≮𱲆。\u200D\u07A7𐋣; [C2 P1 V6]; [P1 V5 V6] # Ⴋ≮.ާ𐋣 +N; Ⴋ≮𱲆。\u200D\u07A7𐋣; [C2 P1 V6]; [C2 P1 V6] # Ⴋ≮.ާ𐋣 +T; Ⴋ<\u0338𱲆。\u200D\u07A7𐋣; [C2 P1 V6]; [P1 V5 V6] # Ⴋ≮.ާ𐋣 +N; Ⴋ<\u0338𱲆。\u200D\u07A7𐋣; [C2 P1 V6]; [C2 P1 V6] # Ⴋ≮.ާ𐋣 +T; ⴋ<\u0338𱲆。\u200D\u07A7𐋣; [C2 P1 V6]; [P1 V5 V6] # ⴋ≮.ާ𐋣 +N; ⴋ<\u0338𱲆。\u200D\u07A7𐋣; [C2 P1 V6]; [C2 P1 V6] # ⴋ≮.ާ𐋣 +T; ⴋ≮𱲆。\u200D\u07A7𐋣; [C2 P1 V6]; [P1 V5 V6] # ⴋ≮.ާ𐋣 +N; ⴋ≮𱲆。\u200D\u07A7𐋣; [C2 P1 V6]; [C2 P1 V6] # ⴋ≮.ާ𐋣 +B; xn--gdhz03bxt42d.xn--lrb6479j; [V5 V6]; [V5 V6] # ⴋ≮.ާ𐋣 +B; xn--gdhz03bxt42d.xn--lrb506jqr4n; [C2 V6]; [C2 V6] # ⴋ≮.ާ𐋣 +B; xn--jnd802gsm17c.xn--lrb6479j; [V5 V6]; [V5 V6] # Ⴋ≮.ާ𐋣 +B; xn--jnd802gsm17c.xn--lrb506jqr4n; [C2 V6]; [C2 V6] # Ⴋ≮.ާ𐋣 +B; \u17D2.򆽒≯; [P1 V5 V6]; [P1 V5 V6] # ្.≯ +B; \u17D2.򆽒>\u0338; [P1 V5 V6]; [P1 V5 V6] # ្.≯ +B; xn--u4e.xn--hdhx0084f; [V5 V6]; [V5 V6] # ្.≯ +B; 񏁇\u1734.𐨺É⬓𑄴; [P1 V5 V6]; [P1 V5 V6] # ᜴.𐨺é⬓𑄴 +B; 񏁇\u1734.𐨺E\u0301⬓𑄴; [P1 V5 V6]; [P1 V5 V6] # ᜴.𐨺é⬓𑄴 +B; 񏁇\u1734.𐨺É⬓𑄴; [P1 V5 V6]; [P1 V5 V6] # ᜴.𐨺é⬓𑄴 +B; 񏁇\u1734.𐨺E\u0301⬓𑄴; [P1 V5 V6]; [P1 V5 V6] # ᜴.𐨺é⬓𑄴 +B; 񏁇\u1734.𐨺e\u0301⬓𑄴; [P1 V5 V6]; [P1 V5 V6] # ᜴.𐨺é⬓𑄴 +B; 񏁇\u1734.𐨺é⬓𑄴; [P1 V5 V6]; [P1 V5 V6] # ᜴.𐨺é⬓𑄴 +B; xn--c0e34564d.xn--9ca207st53lg3f; [V5 V6]; [V5 V6] # ᜴.𐨺é⬓𑄴 +B; 񏁇\u1734.𐨺e\u0301⬓𑄴; [P1 V5 V6]; [P1 V5 V6] # ᜴.𐨺é⬓𑄴 +B; 񏁇\u1734.𐨺é⬓𑄴; [P1 V5 V6]; [P1 V5 V6] # ᜴.𐨺é⬓𑄴 +T; ᢇ\u200D\uA8C4。︒𞤺; [B1 B6 C2 P1 V6]; [B1 P1 V6] # ᢇ꣄.︒𞤺 +N; ᢇ\u200D\uA8C4。︒𞤺; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # ᢇ꣄.︒𞤺 +T; ᢇ\u200D\uA8C4。。𞤺; [B6 C2 A4_2]; [A4_2] # ᢇ꣄..𞤺 +N; ᢇ\u200D\uA8C4。。𞤺; [B6 C2 A4_2]; [B6 C2 A4_2] # ᢇ꣄..𞤺 +T; ᢇ\u200D\uA8C4。。𞤘; [B6 C2 A4_2]; [A4_2] # ᢇ꣄..𞤺 +N; ᢇ\u200D\uA8C4。。𞤘; [B6 C2 A4_2]; [B6 C2 A4_2] # ᢇ꣄..𞤺 +B; xn--09e4694e..xn--ye6h; [A4_2]; [A4_2] # ᢇ꣄..𞤺 +B; xn--09e669a6x8j..xn--ye6h; [B6 C2 A4_2]; [B6 C2 A4_2] # ᢇ꣄..𞤺 +T; ᢇ\u200D\uA8C4。︒𞤘; [B1 B6 C2 P1 V6]; [B1 P1 V6] # ᢇ꣄.︒𞤺 +N; ᢇ\u200D\uA8C4。︒𞤘; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # ᢇ꣄.︒𞤺 +B; xn--09e4694e.xn--y86cv562b; [B1 V6]; [B1 V6] # ᢇ꣄.︒𞤺 +B; xn--09e669a6x8j.xn--y86cv562b; [B1 B6 C2 V6]; [B1 B6 C2 V6] # ᢇ꣄.︒𞤺 +T; 𞩬򖙱\u1714\u200C。\u0631\u07AA≮; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ᜔.رު≮ +N; 𞩬򖙱\u1714\u200C。\u0631\u07AA≮; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ᜔.رު≮ +T; 𞩬򖙱\u1714\u200C。\u0631\u07AA<\u0338; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ᜔.رު≮ +N; 𞩬򖙱\u1714\u200C。\u0631\u07AA<\u0338; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ᜔.رު≮ +T; 𞩬򖙱\u1714\u200C。\u0631\u07AA≮; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ᜔.رު≮ +N; 𞩬򖙱\u1714\u200C。\u0631\u07AA≮; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ᜔.رު≮ +T; 𞩬򖙱\u1714\u200C。\u0631\u07AA<\u0338; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ᜔.رު≮ +N; 𞩬򖙱\u1714\u200C。\u0631\u07AA<\u0338; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ᜔.رު≮ +B; xn--fze3930v7hz6b.xn--wgb86el10d; [B2 B3 V6]; [B2 B3 V6] # ᜔.رު≮ +B; xn--fze607b9651bjwl7c.xn--wgb86el10d; [B2 B3 V6]; [B2 B3 V6] # ᜔.رު≮ +B; Ⴣ.\u0653ᢤ; [P1 V5 V6]; [P1 V5 V6] # Ⴣ.ٓᢤ +B; Ⴣ.\u0653ᢤ; [P1 V5 V6]; [P1 V5 V6] # Ⴣ.ٓᢤ +B; ⴣ.\u0653ᢤ; [V5]; [V5] # ⴣ.ٓᢤ +B; xn--rlj.xn--vhb294g; [V5]; [V5] # ⴣ.ٓᢤ +B; xn--7nd.xn--vhb294g; [V5 V6]; [V5 V6] # Ⴣ.ٓᢤ +B; ⴣ.\u0653ᢤ; [V5]; [V5] # ⴣ.ٓᢤ +B; 󠄈\u0813.싉򄆻Ⴤ򂡐; [P1 V6]; [P1 V6] # ࠓ.싉Ⴤ +B; 󠄈\u0813.싉򄆻Ⴤ򂡐; [P1 V6]; [P1 V6] # ࠓ.싉Ⴤ +B; 󠄈\u0813.싉򄆻Ⴤ򂡐; [P1 V6]; [P1 V6] # ࠓ.싉Ⴤ +B; 󠄈\u0813.싉򄆻Ⴤ򂡐; [P1 V6]; [P1 V6] # ࠓ.싉Ⴤ +B; 󠄈\u0813.싉򄆻ⴤ򂡐; [P1 V6]; [P1 V6] # ࠓ.싉ⴤ +B; 󠄈\u0813.싉򄆻ⴤ򂡐; [P1 V6]; [P1 V6] # ࠓ.싉ⴤ +B; xn--oub.xn--sljz109bpe25dviva; [V6]; [V6] # ࠓ.싉ⴤ +B; xn--oub.xn--8nd9522gpe69cviva; [V6]; [V6] # ࠓ.싉Ⴤ +B; 󠄈\u0813.싉򄆻ⴤ򂡐; [P1 V6]; [P1 V6] # ࠓ.싉ⴤ +B; 󠄈\u0813.싉򄆻ⴤ򂡐; [P1 V6]; [P1 V6] # ࠓ.싉ⴤ +B; \uAA2C𑲫≮.⤂; [P1 V5 V6]; [P1 V5 V6] # ꨬ𑲫≮.⤂ +B; \uAA2C𑲫<\u0338.⤂; [P1 V5 V6]; [P1 V5 V6] # ꨬ𑲫≮.⤂ +B; \uAA2C𑲫≮.⤂; [P1 V5 V6]; [P1 V5 V6] # ꨬ𑲫≮.⤂ +B; \uAA2C𑲫<\u0338.⤂; [P1 V5 V6]; [P1 V5 V6] # ꨬ𑲫≮.⤂ +B; xn--gdh1854cn19c.xn--kqi; [V5 V6]; [V5 V6] # ꨬ𑲫≮.⤂ +B; \u0604𐩔≮Ⴢ.Ⴃ; [B1 P1 V6]; [B1 P1 V6] # 𐩔≮Ⴢ.Ⴃ +B; \u0604𐩔<\u0338Ⴢ.Ⴃ; [B1 P1 V6]; [B1 P1 V6] # 𐩔≮Ⴢ.Ⴃ +B; \u0604𐩔≮Ⴢ.Ⴃ; [B1 P1 V6]; [B1 P1 V6] # 𐩔≮Ⴢ.Ⴃ +B; \u0604𐩔<\u0338Ⴢ.Ⴃ; [B1 P1 V6]; [B1 P1 V6] # 𐩔≮Ⴢ.Ⴃ +B; \u0604𐩔<\u0338ⴢ.ⴃ; [B1 P1 V6]; [B1 P1 V6] # 𐩔≮ⴢ.ⴃ +B; \u0604𐩔≮ⴢ.ⴃ; [B1 P1 V6]; [B1 P1 V6] # 𐩔≮ⴢ.ⴃ +B; \u0604𐩔≮Ⴢ.ⴃ; [B1 P1 V6]; [B1 P1 V6] # 𐩔≮Ⴢ.ⴃ +B; \u0604𐩔<\u0338Ⴢ.ⴃ; [B1 P1 V6]; [B1 P1 V6] # 𐩔≮Ⴢ.ⴃ +B; xn--mfb416c0jox02t.xn--ukj; [B1 V6]; [B1 V6] # 𐩔≮Ⴢ.ⴃ +B; xn--mfb266l4khr54u.xn--ukj; [B1 V6]; [B1 V6] # 𐩔≮ⴢ.ⴃ +B; xn--mfb416c0jox02t.xn--bnd; [B1 V6]; [B1 V6] # 𐩔≮Ⴢ.Ⴃ +B; \u0604𐩔<\u0338ⴢ.ⴃ; [B1 P1 V6]; [B1 P1 V6] # 𐩔≮ⴢ.ⴃ +B; \u0604𐩔≮ⴢ.ⴃ; [B1 P1 V6]; [B1 P1 V6] # 𐩔≮ⴢ.ⴃ +B; \u0604𐩔≮Ⴢ.ⴃ; [B1 P1 V6]; [B1 P1 V6] # 𐩔≮Ⴢ.ⴃ +B; \u0604𐩔<\u0338Ⴢ.ⴃ; [B1 P1 V6]; [B1 P1 V6] # 𐩔≮Ⴢ.ⴃ +B; 𑁅。-; [V3 V5]; [V3 V5] +B; xn--210d.-; [V3 V5]; [V3 V5] +B; \u0DCA򕸽󠧱。饈≠\u0664; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ්.饈≠٤ +B; \u0DCA򕸽󠧱。饈=\u0338\u0664; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ්.饈≠٤ +B; \u0DCA򕸽󠧱。饈≠\u0664; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ්.饈≠٤ +B; \u0DCA򕸽󠧱。饈=\u0338\u0664; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ්.饈≠٤ +B; xn--h1c25913jfwov.xn--dib144ler5f; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] # ්.饈≠٤ +B; 𞥃ᠠ⁷。≯邅⬻4; [B1 B2 P1 V6]; [B1 B2 P1 V6] +B; 𞥃ᠠ⁷。>\u0338邅⬻4; [B1 B2 P1 V6]; [B1 B2 P1 V6] +B; 𞥃ᠠ7。≯邅⬻4; [B1 B2 P1 V6]; [B1 B2 P1 V6] +B; 𞥃ᠠ7。>\u0338邅⬻4; [B1 B2 P1 V6]; [B1 B2 P1 V6] +B; 𞤡ᠠ7。>\u0338邅⬻4; [B1 B2 P1 V6]; [B1 B2 P1 V6] +B; 𞤡ᠠ7。≯邅⬻4; [B1 B2 P1 V6]; [B1 B2 P1 V6] +B; xn--7-v4j2826w.xn--4-ogoy01bou3i; [B1 B2 V6]; [B1 B2 V6] +B; 𞤡ᠠ⁷。>\u0338邅⬻4; [B1 B2 P1 V6]; [B1 B2 P1 V6] +B; 𞤡ᠠ⁷。≯邅⬻4; [B1 B2 P1 V6]; [B1 B2 P1 V6] +B; 򠿯ᡳ-𑐻.𐹴𐋫\u0605󑎳; [B1 B6 P1 V6]; [B1 B6 P1 V6] # ᡳ-𑐻.𐹴𐋫 +B; xn----m9j3429kxmy7e.xn--nfb7950kdihrp812a; [B1 B6 V6]; [B1 B6 V6] # ᡳ-𑐻.𐹴𐋫 +B; 򠶆\u0845\u0A51.넨-󶧈; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ࡅੑ.넨- +B; 򠶆\u0845\u0A51.넨-󶧈; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ࡅੑ.넨- +B; xn--3vb26hb6834b.xn----i37ez0957g; [B5 B6 V6]; [B5 B6 V6] # ࡅੑ.넨- +T; ꡦᡑ\u200D⒈。𐋣-; [C2 P1 V3 V6]; [P1 V3 V6] # ꡦᡑ⒈.𐋣- +N; ꡦᡑ\u200D⒈。𐋣-; [C2 P1 V3 V6]; [C2 P1 V3 V6] # ꡦᡑ⒈.𐋣- +T; ꡦᡑ\u200D1.。𐋣-; [C2 V3 A4_2]; [V3 A4_2] # ꡦᡑ1..𐋣- +N; ꡦᡑ\u200D1.。𐋣-; [C2 V3 A4_2]; [C2 V3 A4_2] # ꡦᡑ1..𐋣- +B; xn--1-o7j0610f..xn----381i; [V3 A4_2]; [V3 A4_2] +B; xn--1-o7j663bdl7m..xn----381i; [C2 V3 A4_2]; [C2 V3 A4_2] # ꡦᡑ1..𐋣- +B; xn--h8e863drj7h.xn----381i; [V3 V6]; [V3 V6] +B; xn--h8e470bl0d838o.xn----381i; [C2 V3 V6]; [C2 V3 V6] # ꡦᡑ⒈.𐋣- +B; Ⴌ。􍼠\uFB69; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴌ.ٹ +B; Ⴌ。􍼠\u0679; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴌ.ٹ +B; ⴌ。􍼠\u0679; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ⴌ.ٹ +B; xn--3kj.xn--yib19191t; [B5 B6 V6]; [B5 B6 V6] # ⴌ.ٹ +B; xn--knd.xn--yib19191t; [B5 B6 V6]; [B5 B6 V6] # Ⴌ.ٹ +B; ⴌ。􍼠\uFB69; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ⴌ.ٹ +B; 𐮁𐭱.\u0F84\u135E-\u1CFA; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𐮁𐭱.྄፞- +B; xn--r19c5a.xn----xjg270ag3m; [B1 V5 V6]; [B1 V5 V6] # 𐮁𐭱.྄፞- +T; ⒈䰹\u200D-。웈; [C2 P1 V3 V6]; [P1 V3 V6] # ⒈䰹-.웈 +N; ⒈䰹\u200D-。웈; [C2 P1 V3 V6]; [C2 P1 V3 V6] # ⒈䰹-.웈 +T; ⒈䰹\u200D-。웈; [C2 P1 V3 V6]; [P1 V3 V6] # ⒈䰹-.웈 +N; ⒈䰹\u200D-。웈; [C2 P1 V3 V6]; [C2 P1 V3 V6] # ⒈䰹-.웈 +T; 1.䰹\u200D-。웈; [C2 V3]; [V3] # 1.䰹-.웈 +N; 1.䰹\u200D-。웈; [C2 V3]; [C2 V3] # 1.䰹-.웈 +T; 1.䰹\u200D-。웈; [C2 V3]; [V3] # 1.䰹-.웈 +N; 1.䰹\u200D-。웈; [C2 V3]; [C2 V3] # 1.䰹-.웈 +B; 1.xn----zw5a.xn--kp5b; [V3]; [V3] +B; 1.xn----tgnz80r.xn--kp5b; [C2 V3]; [C2 V3] # 1.䰹-.웈 +B; xn----dcp160o.xn--kp5b; [V3 V6]; [V3 V6] +B; xn----tgnx5rjr6c.xn--kp5b; [C2 V3 V6]; [C2 V3 V6] # ⒈䰹-.웈 +T; て。\u200C󠳽\u07F3; [C1 P1 V6]; [P1 V6] # て.߳ +N; て。\u200C󠳽\u07F3; [C1 P1 V6]; [C1 P1 V6] # て.߳ +B; xn--m9j.xn--rtb10784p; [V6]; [V6] # て.߳ +B; xn--m9j.xn--rtb154j9l73w; [C1 V6]; [C1 V6] # て.߳ +T; ς。\uA9C0\u06E7; [V5]; [V5] # ς.꧀ۧ +N; ς。\uA9C0\u06E7; [V5]; [V5] # ς.꧀ۧ +T; ς。\uA9C0\u06E7; [V5]; [V5] # ς.꧀ۧ +N; ς。\uA9C0\u06E7; [V5]; [V5] # ς.꧀ۧ +B; Σ。\uA9C0\u06E7; [V5]; [V5] # σ.꧀ۧ +B; σ。\uA9C0\u06E7; [V5]; [V5] # σ.꧀ۧ +B; xn--4xa.xn--3lb1944f; [V5]; [V5] # σ.꧀ۧ +B; xn--3xa.xn--3lb1944f; [V5]; [V5] # ς.꧀ۧ +B; Σ。\uA9C0\u06E7; [V5]; [V5] # σ.꧀ۧ +B; σ。\uA9C0\u06E7; [V5]; [V5] # σ.꧀ۧ +B; \u0BCD󥫅򌉑.ႢႵ; [P1 V5 V6]; [P1 V5 V6] # ்.ႢႵ +B; \u0BCD󥫅򌉑.ⴂⴕ; [P1 V5 V6]; [P1 V5 V6] # ்.ⴂⴕ +B; \u0BCD󥫅򌉑.Ⴂⴕ; [P1 V5 V6]; [P1 V5 V6] # ்.Ⴂⴕ +B; xn--xmc83135idcxza.xn--9md086l; [V5 V6]; [V5 V6] # ்.Ⴂⴕ +B; xn--xmc83135idcxza.xn--tkjwb; [V5 V6]; [V5 V6] # ்.ⴂⴕ +B; xn--xmc83135idcxza.xn--9md2b; [V5 V6]; [V5 V6] # ்.ႢႵ +T; \u1C32🄈⾛\u05A6.\u200D򯥤\u07FD; [B1 C2 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ᰲ🄈走֦. +N; \u1C32🄈⾛\u05A6.\u200D򯥤\u07FD; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # ᰲ🄈走֦. +T; \u1C327,走\u05A6.\u200D򯥤\u07FD; [B1 C2 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ᰲ7,走֦. +N; \u1C327,走\u05A6.\u200D򯥤\u07FD; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # ᰲ7,走֦. +B; xn--7,-bid991urn3k.xn--1tb13454l; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ᰲ7,走֦. +B; xn--7,-bid991urn3k.xn--1tb334j1197q; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # ᰲ7,走֦. +B; xn--xcb756i493fwi5o.xn--1tb13454l; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] # ᰲ🄈走֦. +B; xn--xcb756i493fwi5o.xn--1tb334j1197q; [B1 C2 V5 V6]; [B1 C2 V5 V6] # ᰲ🄈走֦. +B; ᢗ。Ӏ񝄻; [P1 V6]; [P1 V6] +B; ᢗ。Ӏ񝄻; [P1 V6]; [P1 V6] +B; ᢗ。ӏ񝄻; [P1 V6]; [P1 V6] +B; xn--hbf.xn--s5a83117e; [V6]; [V6] +B; xn--hbf.xn--d5a86117e; [V6]; [V6] +B; ᢗ。ӏ񝄻; [P1 V6]; [P1 V6] +B; \u0668-。񠏇🝆ᄾ; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ٨-.🝆ᄾ +B; xn----oqc.xn--qrd1699v327w; [B1 V3 V6]; [B1 V3 V6] # ٨-.🝆ᄾ +B; -𐋷𖾑。󠆬; [V3]; [V3] +B; xn----991iq40y.; [V3]; [V3] +T; \u200C𐹳🐴멈.\uABED񐡼; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6] # 𐹳🐴멈.꯭ +N; \u200C𐹳🐴멈.\uABED񐡼; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # 𐹳🐴멈.꯭ +T; \u200C𐹳🐴멈.\uABED񐡼; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6] # 𐹳🐴멈.꯭ +N; \u200C𐹳🐴멈.\uABED񐡼; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # 𐹳🐴멈.꯭ +B; xn--422b325mqb6i.xn--429a8682s; [B1 V5 V6]; [B1 V5 V6] # 𐹳🐴멈.꯭ +B; xn--0ug6681d406b7bwk.xn--429a8682s; [B1 C1 V5 V6]; [B1 C1 V5 V6] # 𐹳🐴멈.꯭ +B; ≮.\u0769\u0603; [B1 P1 V6]; [B1 P1 V6] # ≮.ݩ +B; <\u0338.\u0769\u0603; [B1 P1 V6]; [B1 P1 V6] # ≮.ݩ +B; xn--gdh.xn--lfb92e; [B1 V6]; [B1 V6] # ≮.ݩ +T; 𐶭⾆。\u200C𑚶򟱃𞰘; [B1 B2 B3 C1 P1 V6]; [B2 B3 B5 B6 P1 V5 V6] # 舌.𑚶 +N; 𐶭⾆。\u200C𑚶򟱃𞰘; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # 舌.𑚶 +T; 𐶭舌。\u200C𑚶򟱃𞰘; [B1 B2 B3 C1 P1 V6]; [B2 B3 B5 B6 P1 V5 V6] # 舌.𑚶 +N; 𐶭舌。\u200C𑚶򟱃𞰘; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # 舌.𑚶 +B; xn--tc1ao37z.xn--6e2dw557azds2d; [B2 B3 B5 B6 V5 V6]; [B2 B3 B5 B6 V5 V6] +B; xn--tc1ao37z.xn--0ugx728gi1nfwqz2e; [B1 B2 B3 C1 V6]; [B1 B2 B3 C1 V6] # 舌.𑚶 +T; \u200CჀ-.𝟷ς𞴺ς; [B1 C1 P1 V3 V6]; [B1 B6 P1 V3 V6] # Ⴠ-.1ςς +N; \u200CჀ-.𝟷ς𞴺ς; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # Ⴠ-.1ςς +T; \u200CჀ-.1ς𞴺ς; [B1 C1 P1 V3 V6]; [B1 B6 P1 V3 V6] # Ⴠ-.1ςς +N; \u200CჀ-.1ς𞴺ς; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # Ⴠ-.1ςς +T; \u200Cⴠ-.1ς𞴺ς; [B1 C1 P1 V3 V6]; [B1 B6 P1 V3 V6] # ⴠ-.1ςς +N; \u200Cⴠ-.1ς𞴺ς; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # ⴠ-.1ςς +T; \u200CჀ-.1Σ𞴺Σ; [B1 C1 P1 V3 V6]; [B1 B6 P1 V3 V6] # Ⴠ-.1σσ +N; \u200CჀ-.1Σ𞴺Σ; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # Ⴠ-.1σσ +T; \u200Cⴠ-.1σ𞴺σ; [B1 C1 P1 V3 V6]; [B1 B6 P1 V3 V6] # ⴠ-.1σσ +N; \u200Cⴠ-.1σ𞴺σ; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # ⴠ-.1σσ +B; xn----2ws.xn--1-0mba52321c; [B1 B6 V3 V6]; [B1 B6 V3 V6] +B; xn----rgn530d.xn--1-0mba52321c; [B1 C1 V3 V6]; [B1 C1 V3 V6] # ⴠ-.1σσ +B; xn----z1g.xn--1-0mba52321c; [B1 B6 V3 V6]; [B1 B6 V3 V6] +B; xn----z1g168i.xn--1-0mba52321c; [B1 C1 V3 V6]; [B1 C1 V3 V6] # Ⴠ-.1σσ +B; xn----rgn530d.xn--1-ymba92321c; [B1 C1 V3 V6]; [B1 C1 V3 V6] # ⴠ-.1ςς +B; xn----z1g168i.xn--1-ymba92321c; [B1 C1 V3 V6]; [B1 C1 V3 V6] # Ⴠ-.1ςς +T; \u200Cⴠ-.𝟷ς𞴺ς; [B1 C1 P1 V3 V6]; [B1 B6 P1 V3 V6] # ⴠ-.1ςς +N; \u200Cⴠ-.𝟷ς𞴺ς; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # ⴠ-.1ςς +T; \u200CჀ-.𝟷Σ𞴺Σ; [B1 C1 P1 V3 V6]; [B1 B6 P1 V3 V6] # Ⴠ-.1σσ +N; \u200CჀ-.𝟷Σ𞴺Σ; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # Ⴠ-.1σσ +T; \u200Cⴠ-.𝟷σ𞴺σ; [B1 C1 P1 V3 V6]; [B1 B6 P1 V3 V6] # ⴠ-.1σσ +N; \u200Cⴠ-.𝟷σ𞴺σ; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # ⴠ-.1σσ +B; 𑲘󠄒𓑡。𝟪Ⴜ; [P1 V5 V6]; [P1 V5 V6] +B; 𑲘󠄒𓑡。8Ⴜ; [P1 V5 V6]; [P1 V5 V6] +B; 𑲘󠄒𓑡。8ⴜ; [P1 V5 V6]; [P1 V5 V6] +B; xn--7m3d291b.xn--8-vws; [V5 V6]; [V5 V6] +B; xn--7m3d291b.xn--8-s1g; [V5 V6]; [V5 V6] +B; 𑲘󠄒𓑡。𝟪ⴜ; [P1 V5 V6]; [P1 V5 V6] +B; 䪏\u06AB\u07E0\u0941。뭕ᢝ\u17B9; [B5 B6]; [B5 B6] # 䪏ګߠु.뭕ᢝឹ +B; 䪏\u06AB\u07E0\u0941。뭕ᢝ\u17B9; [B5 B6]; [B5 B6] # 䪏ګߠु.뭕ᢝឹ +B; 䪏\u06AB\u07E0\u0941。뭕ᢝ\u17B9; [B5 B6]; [B5 B6] # 䪏ګߠु.뭕ᢝឹ +B; 䪏\u06AB\u07E0\u0941。뭕ᢝ\u17B9; [B5 B6]; [B5 B6] # 䪏ګߠु.뭕ᢝឹ +B; xn--ekb23dj4at01n.xn--43e96bh910b; [B5 B6]; [B5 B6] # 䪏ګߠु.뭕ᢝឹ +B; \u1BAB。🂉󠁰; [P1 V5 V6]; [P1 V5 V6] # ᮫.🂉 +B; \u1BAB。🂉󠁰; [P1 V5 V6]; [P1 V5 V6] # ᮫.🂉 +B; xn--zxf.xn--fx7ho0250c; [V5 V6]; [V5 V6] # ᮫.🂉 +T; 󩎃\u0AC4。ς\u200D𐹮𑈵; [B5 C2 P1 V6]; [B5 P1 V6] # ૄ.ς𐹮𑈵 +N; 󩎃\u0AC4。ς\u200D𐹮𑈵; [B5 C2 P1 V6]; [B5 C2 P1 V6] # ૄ.ς𐹮𑈵 +T; 󩎃\u0AC4。Σ\u200D𐹮𑈵; [B5 C2 P1 V6]; [B5 P1 V6] # ૄ.σ𐹮𑈵 +N; 󩎃\u0AC4。Σ\u200D𐹮𑈵; [B5 C2 P1 V6]; [B5 C2 P1 V6] # ૄ.σ𐹮𑈵 +T; 󩎃\u0AC4。σ\u200D𐹮𑈵; [B5 C2 P1 V6]; [B5 P1 V6] # ૄ.σ𐹮𑈵 +N; 󩎃\u0AC4。σ\u200D𐹮𑈵; [B5 C2 P1 V6]; [B5 C2 P1 V6] # ૄ.σ𐹮𑈵 +B; xn--dfc53161q.xn--4xa8467k5mc; [B5 V6]; [B5 V6] # ૄ.σ𐹮𑈵 +B; xn--dfc53161q.xn--4xa895lzo7nsfd; [B5 C2 V6]; [B5 C2 V6] # ૄ.σ𐹮𑈵 +B; xn--dfc53161q.xn--3xa006lzo7nsfd; [B5 C2 V6]; [B5 C2 V6] # ૄ.ς𐹮𑈵 +B; 𐫀ᡂ𑜫.𑘿; [B1 B2 B3 B6 V5]; [B1 B2 B3 B6 V5] +B; 𐫀ᡂ𑜫.𑘿; [B1 B2 B3 B6 V5]; [B1 B2 B3 B6 V5] +B; xn--17e9625js1h.xn--sb2d; [B1 B2 B3 B6 V5]; [B1 B2 B3 B6 V5] +T; 󬚶󸋖򖩰-。\u200C; [C1 P1 V3 V6]; [P1 V3 V6] # -. +N; 󬚶󸋖򖩰-。\u200C; [C1 P1 V3 V6]; [C1 P1 V3 V6] # -. +B; xn----7i12hu122k9ire.; [V3 V6]; [V3 V6] +B; xn----7i12hu122k9ire.xn--0ug; [C1 V3 V6]; [C1 V3 V6] # -. +B; 𐹣.\u07C2; [B1]; [B1] # 𐹣.߂ +B; 𐹣.\u07C2; [B1]; [B1] # 𐹣.߂ +B; xn--bo0d.xn--dsb; [B1]; [B1] # 𐹣.߂ +B; -\u07E1。Ↄ; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -ߡ.Ↄ +B; -\u07E1。Ↄ; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -ߡ.Ↄ +B; -\u07E1。ↄ; [B1 V3]; [B1 V3] # -ߡ.ↄ +B; xn----8cd.xn--r5g; [B1 V3]; [B1 V3] # -ߡ.ↄ +B; xn----8cd.xn--q5g; [B1 V3 V6]; [B1 V3 V6] # -ߡ.Ↄ +B; -\u07E1。ↄ; [B1 V3]; [B1 V3] # -ߡ.ↄ +T; \u200D-︒󠄄。ß哑\u200C𐵿; [B1 B5 B6 C1 C2 P1 V6]; [B1 B5 B6 P1 V3 V6] # -︒.ß哑 +N; \u200D-︒󠄄。ß哑\u200C𐵿; [B1 B5 B6 C1 C2 P1 V6]; [B1 B5 B6 C1 C2 P1 V6] # -︒.ß哑 +T; \u200D-。󠄄。ß哑\u200C𐵿; [B1 B5 B6 C1 C2 P1 V3 V6 A4_2]; [B1 B5 B6 P1 V3 V6 A4_2] # -..ß哑 +N; \u200D-。󠄄。ß哑\u200C𐵿; [B1 B5 B6 C1 C2 P1 V3 V6 A4_2]; [B1 B5 B6 C1 C2 P1 V3 V6 A4_2] # -..ß哑 +T; \u200D-。󠄄。SS哑\u200C𐵿; [B1 B5 B6 C1 C2 P1 V3 V6 A4_2]; [B1 B5 B6 P1 V3 V6 A4_2] # -..ss哑 +N; \u200D-。󠄄。SS哑\u200C𐵿; [B1 B5 B6 C1 C2 P1 V3 V6 A4_2]; [B1 B5 B6 C1 C2 P1 V3 V6 A4_2] # -..ss哑 +T; \u200D-。󠄄。ss哑\u200C𐵿; [B1 B5 B6 C1 C2 P1 V3 V6 A4_2]; [B1 B5 B6 P1 V3 V6 A4_2] # -..ss哑 +N; \u200D-。󠄄。ss哑\u200C𐵿; [B1 B5 B6 C1 C2 P1 V3 V6 A4_2]; [B1 B5 B6 C1 C2 P1 V3 V6 A4_2] # -..ss哑 +T; \u200D-。󠄄。Ss哑\u200C𐵿; [B1 B5 B6 C1 C2 P1 V3 V6 A4_2]; [B1 B5 B6 P1 V3 V6 A4_2] # -..ss哑 +N; \u200D-。󠄄。Ss哑\u200C𐵿; [B1 B5 B6 C1 C2 P1 V3 V6 A4_2]; [B1 B5 B6 C1 C2 P1 V3 V6 A4_2] # -..ss哑 +B; -..xn--ss-h46c5711e; [B1 B5 B6 V3 V6 A4_2]; [B1 B5 B6 V3 V6 A4_2] +B; xn----tgn..xn--ss-k1ts75zb8ym; [B1 B5 B6 C1 C2 V3 V6 A4_2]; [B1 B5 B6 C1 C2 V3 V6 A4_2] # -..ss哑 +B; xn----tgn..xn--zca670n5f0binyk; [B1 B5 B6 C1 C2 V3 V6 A4_2]; [B1 B5 B6 C1 C2 V3 V6 A4_2] # -..ß哑 +T; \u200D-︒󠄄。SS哑\u200C𐵿; [B1 B5 B6 C1 C2 P1 V6]; [B1 B5 B6 P1 V3 V6] # -︒.ss哑 +N; \u200D-︒󠄄。SS哑\u200C𐵿; [B1 B5 B6 C1 C2 P1 V6]; [B1 B5 B6 C1 C2 P1 V6] # -︒.ss哑 +T; \u200D-︒󠄄。ss哑\u200C𐵿; [B1 B5 B6 C1 C2 P1 V6]; [B1 B5 B6 P1 V3 V6] # -︒.ss哑 +N; \u200D-︒󠄄。ss哑\u200C𐵿; [B1 B5 B6 C1 C2 P1 V6]; [B1 B5 B6 C1 C2 P1 V6] # -︒.ss哑 +T; \u200D-︒󠄄。Ss哑\u200C𐵿; [B1 B5 B6 C1 C2 P1 V6]; [B1 B5 B6 P1 V3 V6] # -︒.ss哑 +N; \u200D-︒󠄄。Ss哑\u200C𐵿; [B1 B5 B6 C1 C2 P1 V6]; [B1 B5 B6 C1 C2 P1 V6] # -︒.ss哑 +B; xn----o89h.xn--ss-h46c5711e; [B1 B5 B6 V3 V6]; [B1 B5 B6 V3 V6] +B; xn----tgnt341h.xn--ss-k1ts75zb8ym; [B1 B5 B6 C1 C2 V6]; [B1 B5 B6 C1 C2 V6] # -︒.ss哑 +B; xn----tgnt341h.xn--zca670n5f0binyk; [B1 B5 B6 C1 C2 V6]; [B1 B5 B6 C1 C2 V6] # -︒.ß哑 +B; ︒.\uFE2F𑑂; [P1 V5 V6]; [P1 V5 V6] # ︒.𑑂︯ +B; ︒.𑑂\uFE2F; [P1 V5 V6]; [P1 V5 V6] # ︒.𑑂︯ +B; 。.𑑂\uFE2F; [V5 A4_2]; [V5 A4_2] # ..𑑂︯ +B; ..xn--s96cu30b; [V5 A4_2]; [V5 A4_2] # ..𑑂︯ +B; xn--y86c.xn--s96cu30b; [V5 V6]; [V5 V6] # ︒.𑑂︯ +T; \uA92C。\u200D; [C2 V5]; [V5] # ꤬. +N; \uA92C。\u200D; [C2 V5]; [C2 V5] # ꤬. +B; xn--zi9a.; [V5]; [V5] # ꤬. +B; xn--zi9a.xn--1ug; [C2 V5]; [C2 V5] # ꤬. +T; \u200D󠸡。\uFCD7; [B1 C2 P1 V6]; [B1 P1 V6] # .هج +N; \u200D󠸡。\uFCD7; [B1 C2 P1 V6]; [B1 C2 P1 V6] # .هج +T; \u200D󠸡。\u0647\u062C; [B1 C2 P1 V6]; [B1 P1 V6] # .هج +N; \u200D󠸡。\u0647\u062C; [B1 C2 P1 V6]; [B1 C2 P1 V6] # .هج +B; xn--d356e.xn--rgb7c; [B1 V6]; [B1 V6] # .هج +B; xn--1ug80651l.xn--rgb7c; [B1 C2 V6]; [B1 C2 V6] # .هج +T; -Ⴄ𝟢\u0663.𑍴ς; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # -Ⴄ0٣.𑍴ς +N; -Ⴄ𝟢\u0663.𑍴ς; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # -Ⴄ0٣.𑍴ς +T; -Ⴄ0\u0663.𑍴ς; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # -Ⴄ0٣.𑍴ς +N; -Ⴄ0\u0663.𑍴ς; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # -Ⴄ0٣.𑍴ς +T; -ⴄ0\u0663.𑍴ς; [B1 V3 V5]; [B1 V3 V5] # -ⴄ0٣.𑍴ς +N; -ⴄ0\u0663.𑍴ς; [B1 V3 V5]; [B1 V3 V5] # -ⴄ0٣.𑍴ς +B; -Ⴄ0\u0663.𑍴Σ; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # -Ⴄ0٣.𑍴σ +B; -ⴄ0\u0663.𑍴σ; [B1 V3 V5]; [B1 V3 V5] # -ⴄ0٣.𑍴σ +B; xn---0-iyd8660b.xn--4xa9120l; [B1 V3 V5]; [B1 V3 V5] # -ⴄ0٣.𑍴σ +B; xn---0-iyd216h.xn--4xa9120l; [B1 V3 V5 V6]; [B1 V3 V5 V6] # -Ⴄ0٣.𑍴σ +B; xn---0-iyd8660b.xn--3xa1220l; [B1 V3 V5]; [B1 V3 V5] # -ⴄ0٣.𑍴ς +B; xn---0-iyd216h.xn--3xa1220l; [B1 V3 V5 V6]; [B1 V3 V5 V6] # -Ⴄ0٣.𑍴ς +T; -ⴄ𝟢\u0663.𑍴ς; [B1 V3 V5]; [B1 V3 V5] # -ⴄ0٣.𑍴ς +N; -ⴄ𝟢\u0663.𑍴ς; [B1 V3 V5]; [B1 V3 V5] # -ⴄ0٣.𑍴ς +B; -Ⴄ𝟢\u0663.𑍴Σ; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # -Ⴄ0٣.𑍴σ +B; -ⴄ𝟢\u0663.𑍴σ; [B1 V3 V5]; [B1 V3 V5] # -ⴄ0٣.𑍴σ +B; 󦈄。-; [P1 V3 V6]; [P1 V3 V6] +B; xn--xm38e.-; [V3 V6]; [V3 V6] +T; ⋠𐋮.򶈮\u0F18ß≯; [P1 V6]; [P1 V6] # ⋠𐋮.༘ß≯ +N; ⋠𐋮.򶈮\u0F18ß≯; [P1 V6]; [P1 V6] # ⋠𐋮.༘ß≯ +T; ≼\u0338𐋮.򶈮\u0F18ß>\u0338; [P1 V6]; [P1 V6] # ⋠𐋮.༘ß≯ +N; ≼\u0338𐋮.򶈮\u0F18ß>\u0338; [P1 V6]; [P1 V6] # ⋠𐋮.༘ß≯ +T; ⋠𐋮.򶈮\u0F18ß≯; [P1 V6]; [P1 V6] # ⋠𐋮.༘ß≯ +N; ⋠𐋮.򶈮\u0F18ß≯; [P1 V6]; [P1 V6] # ⋠𐋮.༘ß≯ +T; ≼\u0338𐋮.򶈮\u0F18ß>\u0338; [P1 V6]; [P1 V6] # ⋠𐋮.༘ß≯ +N; ≼\u0338𐋮.򶈮\u0F18ß>\u0338; [P1 V6]; [P1 V6] # ⋠𐋮.༘ß≯ +B; ≼\u0338𐋮.򶈮\u0F18SS>\u0338; [P1 V6]; [P1 V6] # ⋠𐋮.༘ss≯ +B; ⋠𐋮.򶈮\u0F18SS≯; [P1 V6]; [P1 V6] # ⋠𐋮.༘ss≯ +B; ⋠𐋮.򶈮\u0F18ss≯; [P1 V6]; [P1 V6] # ⋠𐋮.༘ss≯ +B; ≼\u0338𐋮.򶈮\u0F18ss>\u0338; [P1 V6]; [P1 V6] # ⋠𐋮.༘ss≯ +B; ≼\u0338𐋮.򶈮\u0F18Ss>\u0338; [P1 V6]; [P1 V6] # ⋠𐋮.༘ss≯ +B; ⋠𐋮.򶈮\u0F18Ss≯; [P1 V6]; [P1 V6] # ⋠𐋮.༘ss≯ +B; xn--pgh4639f.xn--ss-ifj426nle504a; [V6]; [V6] # ⋠𐋮.༘ss≯ +B; xn--pgh4639f.xn--zca593eo6oc013y; [V6]; [V6] # ⋠𐋮.༘ß≯ +B; ≼\u0338𐋮.򶈮\u0F18SS>\u0338; [P1 V6]; [P1 V6] # ⋠𐋮.༘ss≯ +B; ⋠𐋮.򶈮\u0F18SS≯; [P1 V6]; [P1 V6] # ⋠𐋮.༘ss≯ +B; ⋠𐋮.򶈮\u0F18ss≯; [P1 V6]; [P1 V6] # ⋠𐋮.༘ss≯ +B; ≼\u0338𐋮.򶈮\u0F18ss>\u0338; [P1 V6]; [P1 V6] # ⋠𐋮.༘ss≯ +B; ≼\u0338𐋮.򶈮\u0F18Ss>\u0338; [P1 V6]; [P1 V6] # ⋠𐋮.༘ss≯ +B; ⋠𐋮.򶈮\u0F18Ss≯; [P1 V6]; [P1 V6] # ⋠𐋮.༘ss≯ +B; 1𐋸\u0664。󠢮\uFBA4񷝊; [B1 P1 V6]; [B1 P1 V6] # 1𐋸٤.ۀ +B; 1𐋸\u0664。󠢮\u06C0񷝊; [B1 P1 V6]; [B1 P1 V6] # 1𐋸٤.ۀ +B; 1𐋸\u0664。󠢮\u06D5\u0654񷝊; [B1 P1 V6]; [B1 P1 V6] # 1𐋸٤.ۀ +B; xn--1-hqc3905q.xn--zkb83268gqee4a; [B1 V6]; [B1 V6] # 1𐋸٤.ۀ +T; 儭-。𐹴Ⴢ񥳠\u200C; [B1 B6 C1 P1 V3 V6]; [B1 B6 P1 V3 V6] # 儭-.𐹴Ⴢ +N; 儭-。𐹴Ⴢ񥳠\u200C; [B1 B6 C1 P1 V3 V6]; [B1 B6 C1 P1 V3 V6] # 儭-.𐹴Ⴢ +T; 儭-。𐹴Ⴢ񥳠\u200C; [B1 B6 C1 P1 V3 V6]; [B1 B6 P1 V3 V6] # 儭-.𐹴Ⴢ +N; 儭-。𐹴Ⴢ񥳠\u200C; [B1 B6 C1 P1 V3 V6]; [B1 B6 C1 P1 V3 V6] # 儭-.𐹴Ⴢ +T; 儭-。𐹴ⴢ񥳠\u200C; [B1 B6 C1 P1 V3 V6]; [B1 B6 P1 V3 V6] # 儭-.𐹴ⴢ +N; 儭-。𐹴ⴢ񥳠\u200C; [B1 B6 C1 P1 V3 V6]; [B1 B6 C1 P1 V3 V6] # 儭-.𐹴ⴢ +B; xn----gz7a.xn--qlj9223eywx0b; [B1 B6 V3 V6]; [B1 B6 V3 V6] +B; xn----gz7a.xn--0ug472cfq0pus98b; [B1 B6 C1 V3 V6]; [B1 B6 C1 V3 V6] # 儭-.𐹴ⴢ +B; xn----gz7a.xn--6nd5001kyw98a; [B1 B6 V3 V6]; [B1 B6 V3 V6] +B; xn----gz7a.xn--6nd249ejl4pusr7b; [B1 B6 C1 V3 V6]; [B1 B6 C1 V3 V6] # 儭-.𐹴Ⴢ +T; 儭-。𐹴ⴢ񥳠\u200C; [B1 B6 C1 P1 V3 V6]; [B1 B6 P1 V3 V6] # 儭-.𐹴ⴢ +N; 儭-。𐹴ⴢ񥳠\u200C; [B1 B6 C1 P1 V3 V6]; [B1 B6 C1 P1 V3 V6] # 儭-.𐹴ⴢ +B; 𝟺𐋷\u06B9.𞤭򿍡; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] # 4𐋷ڹ.𞤭 +B; 4𐋷\u06B9.𞤭򿍡; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] # 4𐋷ڹ.𞤭 +B; 4𐋷\u06B9.𞤋򿍡; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] # 4𐋷ڹ.𞤭 +B; xn--4-cvc5384q.xn--le6hi7322b; [B1 B2 B3 V6]; [B1 B2 B3 V6] # 4𐋷ڹ.𞤭 +B; 𝟺𐋷\u06B9.𞤋򿍡; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] # 4𐋷ڹ.𞤭 +B; ≯-ꡋ𑲣.⒈𐹭; [B1 P1 V6]; [B1 P1 V6] +B; >\u0338-ꡋ𑲣.⒈𐹭; [B1 P1 V6]; [B1 P1 V6] +B; ≯-ꡋ𑲣.1.𐹭; [B1 P1 V6]; [B1 P1 V6] +B; >\u0338-ꡋ𑲣.1.𐹭; [B1 P1 V6]; [B1 P1 V6] +B; xn----ogox061d5i8d.1.xn--lo0d; [B1 V6]; [B1 V6] +B; xn----ogox061d5i8d.xn--tsh0666f; [B1 V6]; [B1 V6] +B; \u0330.󰜱蚀; [P1 V5 V6]; [P1 V5 V6] # ̰.蚀 +B; \u0330.󰜱蚀; [P1 V5 V6]; [P1 V5 V6] # ̰.蚀 +B; xn--xta.xn--e91aw9417e; [V5 V6]; [V5 V6] # ̰.蚀 +T; \uFB39Ⴘ.𞡼𑇀ß\u20D7; [B2 B3 P1 V6]; [B2 B3 P1 V6] # יּႸ.𞡼𑇀ß⃗ +N; \uFB39Ⴘ.𞡼𑇀ß\u20D7; [B2 B3 P1 V6]; [B2 B3 P1 V6] # יּႸ.𞡼𑇀ß⃗ +T; \u05D9\u05BCႸ.𞡼𑇀ß\u20D7; [B2 B3 P1 V6]; [B2 B3 P1 V6] # יּႸ.𞡼𑇀ß⃗ +N; \u05D9\u05BCႸ.𞡼𑇀ß\u20D7; [B2 B3 P1 V6]; [B2 B3 P1 V6] # יּႸ.𞡼𑇀ß⃗ +T; \u05D9\u05BCⴘ.𞡼𑇀ß\u20D7; [B2 B3]; [B2 B3] # יּⴘ.𞡼𑇀ß⃗ +N; \u05D9\u05BCⴘ.𞡼𑇀ß\u20D7; [B2 B3]; [B2 B3] # יּⴘ.𞡼𑇀ß⃗ +B; \u05D9\u05BCႸ.𞡼𑇀SS\u20D7; [B2 B3 P1 V6]; [B2 B3 P1 V6] # יּႸ.𞡼𑇀ss⃗ +B; \u05D9\u05BCⴘ.𞡼𑇀ss\u20D7; [B2 B3]; [B2 B3] # יּⴘ.𞡼𑇀ss⃗ +B; \u05D9\u05BCႸ.𞡼𑇀ss\u20D7; [B2 B3 P1 V6]; [B2 B3 P1 V6] # יּႸ.𞡼𑇀ss⃗ +B; xn--kdb1d867b.xn--ss-yju5690ken9h; [B2 B3 V6]; [B2 B3 V6] # יּႸ.𞡼𑇀ss⃗ +B; xn--kdb1d278n.xn--ss-yju5690ken9h; [B2 B3]; [B2 B3] # יּⴘ.𞡼𑇀ss⃗ +B; xn--kdb1d278n.xn--zca284nhg9nrrxg; [B2 B3]; [B2 B3] # יּⴘ.𞡼𑇀ß⃗ +B; xn--kdb1d867b.xn--zca284nhg9nrrxg; [B2 B3 V6]; [B2 B3 V6] # יּႸ.𞡼𑇀ß⃗ +T; \uFB39ⴘ.𞡼𑇀ß\u20D7; [B2 B3]; [B2 B3] # יּⴘ.𞡼𑇀ß⃗ +N; \uFB39ⴘ.𞡼𑇀ß\u20D7; [B2 B3]; [B2 B3] # יּⴘ.𞡼𑇀ß⃗ +B; \uFB39Ⴘ.𞡼𑇀SS\u20D7; [B2 B3 P1 V6]; [B2 B3 P1 V6] # יּႸ.𞡼𑇀ss⃗ +B; \uFB39ⴘ.𞡼𑇀ss\u20D7; [B2 B3]; [B2 B3] # יּⴘ.𞡼𑇀ss⃗ +B; \uFB39Ⴘ.𞡼𑇀ss\u20D7; [B2 B3 P1 V6]; [B2 B3 P1 V6] # יּႸ.𞡼𑇀ss⃗ +B; \u1BA3𐹰򁱓。凬; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ᮣ𐹰.凬 +B; \u1BA3𐹰򁱓。凬; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ᮣ𐹰.凬 +B; xn--rxfz314ilg20c.xn--t9q; [B1 V5 V6]; [B1 V5 V6] # ᮣ𐹰.凬 +T; 🢟🄈\u200Dꡎ。\u0F84; [C2 P1 V5 V6]; [P1 V5 V6] # 🢟🄈ꡎ.྄ +N; 🢟🄈\u200Dꡎ。\u0F84; [C2 P1 V5 V6]; [C2 P1 V5 V6] # 🢟🄈ꡎ.྄ +T; 🢟7,\u200Dꡎ。\u0F84; [C2 P1 V5 V6]; [P1 V5 V6] # 🢟7,ꡎ.྄ +N; 🢟7,\u200Dꡎ。\u0F84; [C2 P1 V5 V6]; [C2 P1 V5 V6] # 🢟7,ꡎ.྄ +B; xn--7,-gh9hg322i.xn--3ed; [P1 V5 V6]; [P1 V5 V6] # 🢟7,ꡎ.྄ +B; xn--7,-n1t0654eqo3o.xn--3ed; [C2 P1 V5 V6]; [C2 P1 V5 V6] # 🢟7,ꡎ.྄ +B; xn--nc9aq743ds0e.xn--3ed; [V5 V6]; [V5 V6] # 🢟🄈ꡎ.྄ +B; xn--1ug4874cfd0kbmg.xn--3ed; [C2 V5 V6]; [C2 V5 V6] # 🢟🄈ꡎ.྄ +B; ꡔ。\u1039ᢇ; [V5]; [V5] # ꡔ.္ᢇ +B; xn--tc9a.xn--9jd663b; [V5]; [V5] # ꡔ.္ᢇ +B; \u20EB≮.𝨖; [P1 V5 V6]; [P1 V5 V6] # ⃫≮.𝨖 +B; \u20EB<\u0338.𝨖; [P1 V5 V6]; [P1 V5 V6] # ⃫≮.𝨖 +B; xn--e1g71d.xn--772h; [V5 V6]; [V5 V6] # ⃫≮.𝨖 +B; Ⴢ≯褦.ᠪ\u07EAႾ\u0767; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴢ≯褦.ᠪߪႾݧ +B; Ⴢ>\u0338褦.ᠪ\u07EAႾ\u0767; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴢ≯褦.ᠪߪႾݧ +B; Ⴢ≯褦.ᠪ\u07EAႾ\u0767; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴢ≯褦.ᠪߪႾݧ +B; Ⴢ>\u0338褦.ᠪ\u07EAႾ\u0767; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴢ≯褦.ᠪߪႾݧ +B; ⴢ>\u0338褦.ᠪ\u07EAⴞ\u0767; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ⴢ≯褦.ᠪߪⴞݧ +B; ⴢ≯褦.ᠪ\u07EAⴞ\u0767; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ⴢ≯褦.ᠪߪⴞݧ +B; xn--hdh433bev8e.xn--rpb5x392bcyt; [B5 B6 V6]; [B5 B6 V6] # ⴢ≯褦.ᠪߪⴞݧ +B; xn--6nd461g478e.xn--rpb5x49td2h; [B5 B6 V6]; [B5 B6 V6] # Ⴢ≯褦.ᠪߪႾݧ +B; ⴢ>\u0338褦.ᠪ\u07EAⴞ\u0767; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ⴢ≯褦.ᠪߪⴞݧ +B; ⴢ≯褦.ᠪ\u07EAⴞ\u0767; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ⴢ≯褦.ᠪߪⴞݧ +T; 򊉆󠆒\u200C\uA953。𞤙\u067Bꡘ; [B2 B3 C1 P1 V6]; [B2 B3 P1 V6] # ꥓.𞤻ٻꡘ +N; 򊉆󠆒\u200C\uA953。𞤙\u067Bꡘ; [B2 B3 C1 P1 V6]; [B2 B3 C1 P1 V6] # ꥓.𞤻ٻꡘ +T; 򊉆󠆒\u200C\uA953。𞤻\u067Bꡘ; [B2 B3 C1 P1 V6]; [B2 B3 P1 V6] # ꥓.𞤻ٻꡘ +N; 򊉆󠆒\u200C\uA953。𞤻\u067Bꡘ; [B2 B3 C1 P1 V6]; [B2 B3 C1 P1 V6] # ꥓.𞤻ٻꡘ +B; xn--3j9al6189a.xn--0ib8893fegvj; [B2 B3 V6]; [B2 B3 V6] # ꥓.𞤻ٻꡘ +B; xn--0ug8815chtz0e.xn--0ib8893fegvj; [B2 B3 C1 V6]; [B2 B3 C1 V6] # ꥓.𞤻ٻꡘ +T; \u200C.≯; [C1 P1 V6]; [P1 V6 A4_2] # .≯ +N; \u200C.≯; [C1 P1 V6]; [C1 P1 V6] # .≯ +T; \u200C.>\u0338; [C1 P1 V6]; [P1 V6 A4_2] # .≯ +N; \u200C.>\u0338; [C1 P1 V6]; [C1 P1 V6] # .≯ +B; .xn--hdh; [V6 A4_2]; [V6 A4_2] +B; xn--0ug.xn--hdh; [C1 V6]; [C1 V6] # .≯ +B; 𰅧񣩠-.\uABED-悜; [P1 V3 V5 V6]; [P1 V3 V5 V6] # -.꯭-悜 +B; 𰅧񣩠-.\uABED-悜; [P1 V3 V5 V6]; [P1 V3 V5 V6] # -.꯭-悜 +B; xn----7m53aj640l.xn----8f4br83t; [V3 V5 V6]; [V3 V5 V6] # -.꯭-悜 +T; ᡉ𶓧⬞ᢜ.-\u200D𞣑\u202E; [C2 P1 V3 V6]; [P1 V3 V6] # ᡉ⬞ᢜ.-𞣑 +N; ᡉ𶓧⬞ᢜ.-\u200D𞣑\u202E; [C2 P1 V3 V6]; [C2 P1 V3 V6] # ᡉ⬞ᢜ.-𞣑 +B; xn--87e0ol04cdl39e.xn----qinu247r; [V3 V6]; [V3 V6] # ᡉ⬞ᢜ.-𞣑 +B; xn--87e0ol04cdl39e.xn----ugn5e3763s; [C2 V3 V6]; [C2 V3 V6] # ᡉ⬞ᢜ.-𞣑 +T; ⒐\u200C衃Ⴝ.\u0682Ⴔ; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 P1 V6] # ⒐衃Ⴝ.ڂႴ +N; ⒐\u200C衃Ⴝ.\u0682Ⴔ; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # ⒐衃Ⴝ.ڂႴ +T; 9.\u200C衃Ⴝ.\u0682Ⴔ; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 P1 V6] # 9.衃Ⴝ.ڂႴ +N; 9.\u200C衃Ⴝ.\u0682Ⴔ; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # 9.衃Ⴝ.ڂႴ +T; 9.\u200C衃ⴝ.\u0682ⴔ; [B1 B2 B3 C1]; [B1 B2 B3] # 9.衃ⴝ.ڂⴔ +N; 9.\u200C衃ⴝ.\u0682ⴔ; [B1 B2 B3 C1]; [B1 B2 B3 C1] # 9.衃ⴝ.ڂⴔ +T; 9.\u200C衃Ⴝ.\u0682ⴔ; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 P1 V6] # 9.衃Ⴝ.ڂⴔ +N; 9.\u200C衃Ⴝ.\u0682ⴔ; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # 9.衃Ⴝ.ڂⴔ +B; 9.xn--1nd9032d.xn--7ib268q; [B1 B2 B3 V6]; [B1 B2 B3 V6] # 9.衃Ⴝ.ڂⴔ +B; 9.xn--1nd159e1y2f.xn--7ib268q; [B1 B2 B3 C1 V6]; [B1 B2 B3 C1 V6] # 9.衃Ⴝ.ڂⴔ +B; 9.xn--llj1920a.xn--7ib268q; [B1 B2 B3]; [B1 B2 B3] # 9.衃ⴝ.ڂⴔ +B; 9.xn--0ug862cbm5e.xn--7ib268q; [B1 B2 B3 C1]; [B1 B2 B3 C1] # 9.衃ⴝ.ڂⴔ +B; 9.xn--1nd9032d.xn--7ib433c; [B1 B2 B3 V6]; [B1 B2 B3 V6] # 9.衃Ⴝ.ڂႴ +B; 9.xn--1nd159e1y2f.xn--7ib433c; [B1 B2 B3 C1 V6]; [B1 B2 B3 C1 V6] # 9.衃Ⴝ.ڂႴ +T; ⒐\u200C衃ⴝ.\u0682ⴔ; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 P1 V6] # ⒐衃ⴝ.ڂⴔ +N; ⒐\u200C衃ⴝ.\u0682ⴔ; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # ⒐衃ⴝ.ڂⴔ +T; ⒐\u200C衃Ⴝ.\u0682ⴔ; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 P1 V6] # ⒐衃Ⴝ.ڂⴔ +N; ⒐\u200C衃Ⴝ.\u0682ⴔ; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # ⒐衃Ⴝ.ڂⴔ +B; xn--1nd362hy16e.xn--7ib268q; [B1 B2 B3 V6]; [B1 B2 B3 V6] # ⒐衃Ⴝ.ڂⴔ +B; xn--1nd159ecmd785k.xn--7ib268q; [B1 B2 B3 C1 V6]; [B1 B2 B3 C1 V6] # ⒐衃Ⴝ.ڂⴔ +B; xn--1shy52abz3f.xn--7ib268q; [B1 B2 B3 V6]; [B1 B2 B3 V6] # ⒐衃ⴝ.ڂⴔ +B; xn--0ugx0px1izu2h.xn--7ib268q; [B1 B2 B3 C1 V6]; [B1 B2 B3 C1 V6] # ⒐衃ⴝ.ڂⴔ +B; xn--1nd362hy16e.xn--7ib433c; [B1 B2 B3 V6]; [B1 B2 B3 V6] # ⒐衃Ⴝ.ڂႴ +B; xn--1nd159ecmd785k.xn--7ib433c; [B1 B2 B3 C1 V6]; [B1 B2 B3 C1 V6] # ⒐衃Ⴝ.ڂႴ +T; \u07E1\u200C。--⸬; [B1 B3 C1 V3]; [B1 V3] # ߡ.--⸬ +N; \u07E1\u200C。--⸬; [B1 B3 C1 V3]; [B1 B3 C1 V3] # ߡ.--⸬ +B; xn--8sb.xn-----iw2a; [B1 V3]; [B1 V3] # ߡ.--⸬ +B; xn--8sb884j.xn-----iw2a; [B1 B3 C1 V3]; [B1 B3 C1 V3] # ߡ.--⸬ +B; 𞥓.\u0718; 𞥓.\u0718; xn--of6h.xn--inb # 𞥓.ܘ +B; 𞥓.\u0718; ; xn--of6h.xn--inb # 𞥓.ܘ +B; xn--of6h.xn--inb; 𞥓.\u0718; xn--of6h.xn--inb # 𞥓.ܘ +B; 󠄽-.-\u0DCA; [V3]; [V3] # -.-් +B; 󠄽-.-\u0DCA; [V3]; [V3] # -.-් +B; -.xn----ptf; [V3]; [V3] # -.-් +B; 󠇝\u075B-.\u1927; [B1 B3 B6 V3 V5]; [B1 B3 B6 V3 V5] # ݛ-.ᤧ +B; xn----k4c.xn--lff; [B1 B3 B6 V3 V5]; [B1 B3 B6 V3 V5] # ݛ-.ᤧ +B; 𞤴󠆹⦉𐹺.\uA806⒌󘤸; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𞤴⦉𐹺.꠆⒌ +B; 𞤴󠆹⦉𐹺.\uA8065.󘤸; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𞤴⦉𐹺.꠆5. +B; 𞤒󠆹⦉𐹺.\uA8065.󘤸; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𞤴⦉𐹺.꠆5. +B; xn--fuix729epewf.xn--5-w93e.xn--7b83e; [B1 V5 V6]; [B1 V5 V6] # 𞤴⦉𐹺.꠆5. +B; 𞤒󠆹⦉𐹺.\uA806⒌󘤸; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𞤴⦉𐹺.꠆⒌ +B; xn--fuix729epewf.xn--xsh5029b6e77i; [B1 V5 V6]; [B1 V5 V6] # 𞤴⦉𐹺.꠆⒌ +T; 󠄸₀。𑖿\u200C𐦂\u200D; [B1 C2 V5]; [B1 V5] # 0.𑖿𐦂 +N; 󠄸₀。𑖿\u200C𐦂\u200D; [B1 C2 V5]; [B1 C2 V5] # 0.𑖿𐦂 +T; 󠄸0。𑖿\u200C𐦂\u200D; [B1 C2 V5]; [B1 V5] # 0.𑖿𐦂 +N; 󠄸0。𑖿\u200C𐦂\u200D; [B1 C2 V5]; [B1 C2 V5] # 0.𑖿𐦂 +B; 0.xn--mn9cz2s; [B1 V5]; [B1 V5] +B; 0.xn--0ugc8040p9hk; [B1 C2 V5]; [B1 C2 V5] # 0.𑖿𐦂 +B; Ⴚ𐋸󠄄。𝟝ퟶ\u103A; [P1 V6]; [P1 V6] # Ⴚ𐋸.5ퟶ် +B; Ⴚ𐋸󠄄。5ퟶ\u103A; [P1 V6]; [P1 V6] # Ⴚ𐋸.5ퟶ် +B; ⴚ𐋸󠄄。5ퟶ\u103A; ⴚ𐋸.5ퟶ\u103A; xn--ilj2659d.xn--5-dug9054m; NV8 # ⴚ𐋸.5ퟶ် +B; xn--ilj2659d.xn--5-dug9054m; ⴚ𐋸.5ퟶ\u103A; xn--ilj2659d.xn--5-dug9054m; NV8 # ⴚ𐋸.5ퟶ် +B; ⴚ𐋸.5ퟶ\u103A; ; xn--ilj2659d.xn--5-dug9054m; NV8 # ⴚ𐋸.5ퟶ် +B; Ⴚ𐋸.5ퟶ\u103A; [P1 V6]; [P1 V6] # Ⴚ𐋸.5ퟶ် +B; xn--ynd2415j.xn--5-dug9054m; [V6]; [V6] # Ⴚ𐋸.5ퟶ် +B; ⴚ𐋸󠄄。𝟝ퟶ\u103A; ⴚ𐋸.5ퟶ\u103A; xn--ilj2659d.xn--5-dug9054m; NV8 # ⴚ𐋸.5ퟶ် +T; \u200D-ᠹ﹪.\u1DE1\u1922; [C2 P1 V5 V6]; [P1 V3 V5 V6] # -ᠹ﹪.ᷡᤢ +N; \u200D-ᠹ﹪.\u1DE1\u1922; [C2 P1 V5 V6]; [C2 P1 V5 V6] # -ᠹ﹪.ᷡᤢ +T; \u200D-ᠹ%.\u1DE1\u1922; [C2 P1 V5 V6]; [P1 V3 V5 V6] # -ᠹ%.ᷡᤢ +N; \u200D-ᠹ%.\u1DE1\u1922; [C2 P1 V5 V6]; [C2 P1 V5 V6] # -ᠹ%.ᷡᤢ +B; xn---%-u4o.xn--gff52t; [P1 V3 V5 V6]; [P1 V3 V5 V6] # -ᠹ%.ᷡᤢ +B; xn---%-u4oy48b.xn--gff52t; [C2 P1 V5 V6]; [C2 P1 V5 V6] # -ᠹ%.ᷡᤢ +B; xn----c6jx047j.xn--gff52t; [V3 V5 V6]; [V3 V5 V6] # -ᠹ﹪.ᷡᤢ +B; xn----c6j614b1z4v.xn--gff52t; [C2 V5 V6]; [C2 V5 V6] # -ᠹ﹪.ᷡᤢ +B; ≠.ᠿ; [P1 V6]; [P1 V6] +B; =\u0338.ᠿ; [P1 V6]; [P1 V6] +B; xn--1ch.xn--y7e; [V6]; [V6] +B; \u0723\u05A3。㌪; \u0723\u05A3.ハイツ; xn--ucb18e.xn--eck4c5a # ܣ֣.ハイツ +B; \u0723\u05A3。ハイツ; \u0723\u05A3.ハイツ; xn--ucb18e.xn--eck4c5a # ܣ֣.ハイツ +B; xn--ucb18e.xn--eck4c5a; \u0723\u05A3.ハイツ; xn--ucb18e.xn--eck4c5a # ܣ֣.ハイツ +B; \u0723\u05A3.ハイツ; ; xn--ucb18e.xn--eck4c5a # ܣ֣.ハイツ +B; 𞷥󠆀≮.\u2D7F-; [B1 B3 P1 V3 V5 V6]; [B1 B3 P1 V3 V5 V6] # ≮.⵿- +B; 𞷥󠆀<\u0338.\u2D7F-; [B1 B3 P1 V3 V5 V6]; [B1 B3 P1 V3 V5 V6] # ≮.⵿- +B; xn--gdhx802p.xn----i2s; [B1 B3 V3 V5 V6]; [B1 B3 V3 V5 V6] # ≮.⵿- +B; ₆榎򦖎\u0D4D。𞤅\u06ED\uFC5A󠮨; [B1 B3 P1 V6]; [B1 B3 P1 V6] # 6榎്.𞤧ۭيي +B; 6榎򦖎\u0D4D。𞤅\u06ED\u064A\u064A󠮨; [B1 B3 P1 V6]; [B1 B3 P1 V6] # 6榎്.𞤧ۭيي +B; 6榎򦖎\u0D4D。𞤧\u06ED\u064A\u064A󠮨; [B1 B3 P1 V6]; [B1 B3 P1 V6] # 6榎്.𞤧ۭيي +B; xn--6-kmf4691ejv41j.xn--mhba10ch545mn8v8h; [B1 B3 V6]; [B1 B3 V6] # 6榎്.𞤧ۭيي +B; ₆榎򦖎\u0D4D。𞤧\u06ED\uFC5A󠮨; [B1 B3 P1 V6]; [B1 B3 P1 V6] # 6榎്.𞤧ۭيي +B; 𣩫.򌑲; [P1 V6]; [P1 V6] +B; 𣩫.򌑲; [P1 V6]; [P1 V6] +B; xn--td3j.xn--4628b; [V6]; [V6] +T; \u200D︒。\u06B9\u200C; [B1 B3 C1 C2 P1 V6]; [B1 P1 V6] # ︒.ڹ +N; \u200D︒。\u06B9\u200C; [B1 B3 C1 C2 P1 V6]; [B1 B3 C1 C2 P1 V6] # ︒.ڹ +B; xn--y86c.xn--skb; [B1 V6]; [B1 V6] # ︒.ڹ +B; xn--1ug2658f.xn--skb080k; [B1 B3 C1 C2 V6]; [B1 B3 C1 C2 V6] # ︒.ڹ +B; xn--skb; \u06B9; xn--skb # ڹ +B; \u06B9; ; xn--skb # ڹ +T; 𐹦\u200C𐹶。\u206D; [B1 C1 P1 V6]; [B1 P1 V6] # 𐹦𐹶. +N; 𐹦\u200C𐹶。\u206D; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹦𐹶. +B; xn--eo0d6a.xn--sxg; [B1 V6]; [B1 V6] # 𐹦𐹶. +B; xn--0ug4994goba.xn--sxg; [B1 C1 V6]; [B1 C1 V6] # 𐹦𐹶. +B; \u0C4D𝨾\u05A9𝟭。-𑜨; [V3 V5]; [V3 V5] # ్𝨾֩1.-𑜨 +B; \u0C4D𝨾\u05A91。-𑜨; [V3 V5]; [V3 V5] # ్𝨾֩1.-𑜨 +B; xn--1-rfc312cdp45c.xn----nq0j; [V3 V5]; [V3 V5] # ్𝨾֩1.-𑜨 +B; 򣿈。뙏; [P1 V6]; [P1 V6] +B; 򣿈。뙏; [P1 V6]; [P1 V6] +B; xn--ph26c.xn--281b; [V6]; [V6] +B; 񕨚󠄌󑽀ᡀ.\u08B6; [P1 V6]; [P1 V6] # ᡀ.ࢶ +B; xn--z7e98100evc01b.xn--czb; [V6]; [V6] # ᡀ.ࢶ +T; \u200D。񅁛; [C2 P1 V6]; [P1 V6 A4_2] # . +N; \u200D。񅁛; [C2 P1 V6]; [C2 P1 V6] # . +T; \u200D。񅁛; [C2 P1 V6]; [P1 V6 A4_2] # . +N; \u200D。񅁛; [C2 P1 V6]; [C2 P1 V6] # . +B; .xn--6x4u; [V6 A4_2]; [V6 A4_2] +B; xn--1ug.xn--6x4u; [C2 V6]; [C2 V6] # . +B; \u084B皥.-; [B1 B2 B3 V3]; [B1 B2 B3 V3] # ࡋ皥.- +B; \u084B皥.-; [B1 B2 B3 V3]; [B1 B2 B3 V3] # ࡋ皥.- +B; xn--9vb4167c.-; [B1 B2 B3 V3]; [B1 B2 B3 V3] # ࡋ皥.- +B; 𐣸\u0315𐮇.⒈ꡦ; [B1 P1 V6]; [B1 P1 V6] # ̕𐮇.⒈ꡦ +B; 𐣸\u0315𐮇.1.ꡦ; [B1 P1 V6]; [B1 P1 V6] # ̕𐮇.1.ꡦ +B; xn--5sa9915kgvb.1.xn--cd9a; [B1 V6]; [B1 V6] # ̕𐮇.1.ꡦ +B; xn--5sa9915kgvb.xn--tshw539b; [B1 V6]; [B1 V6] # ̕𐮇.⒈ꡦ +T; Ⴛ\u200C\u05A2\u200D。\uFFA0ā𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 P1 V6] # Ⴛ֢.ā𐹦 +N; Ⴛ\u200C\u05A2\u200D。\uFFA0ā𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 C1 C2 P1 V6] # Ⴛ֢.ā𐹦 +T; Ⴛ\u200C\u05A2\u200D。\uFFA0a\u0304𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 P1 V6] # Ⴛ֢.ā𐹦 +N; Ⴛ\u200C\u05A2\u200D。\uFFA0a\u0304𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 C1 C2 P1 V6] # Ⴛ֢.ā𐹦 +T; Ⴛ\u200C\u05A2\u200D。\u1160ā𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 P1 V6] # Ⴛ֢.ā𐹦 +N; Ⴛ\u200C\u05A2\u200D。\u1160ā𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 C1 C2 P1 V6] # Ⴛ֢.ā𐹦 +T; Ⴛ\u200C\u05A2\u200D。\u1160a\u0304𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 P1 V6] # Ⴛ֢.ā𐹦 +N; Ⴛ\u200C\u05A2\u200D。\u1160a\u0304𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 C1 C2 P1 V6] # Ⴛ֢.ā𐹦 +T; ⴛ\u200C\u05A2\u200D。\u1160a\u0304𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 P1 V6] # ⴛ֢.ā𐹦 +N; ⴛ\u200C\u05A2\u200D。\u1160a\u0304𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 C1 C2 P1 V6] # ⴛ֢.ā𐹦 +T; ⴛ\u200C\u05A2\u200D。\u1160ā𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 P1 V6] # ⴛ֢.ā𐹦 +N; ⴛ\u200C\u05A2\u200D。\u1160ā𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 C1 C2 P1 V6] # ⴛ֢.ā𐹦 +T; Ⴛ\u200C\u05A2\u200D。\u1160Ā𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 P1 V6] # Ⴛ֢.ā𐹦 +N; Ⴛ\u200C\u05A2\u200D。\u1160Ā𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 C1 C2 P1 V6] # Ⴛ֢.ā𐹦 +T; Ⴛ\u200C\u05A2\u200D。\u1160A\u0304𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 P1 V6] # Ⴛ֢.ā𐹦 +N; Ⴛ\u200C\u05A2\u200D。\u1160A\u0304𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 C1 C2 P1 V6] # Ⴛ֢.ā𐹦 +B; xn--tcb597c.xn--yda594fdn5q; [B5 B6 V6]; [B5 B6 V6] # Ⴛ֢.ā𐹦 +B; xn--tcb597cdmmfa.xn--yda594fdn5q; [B5 B6 C1 C2 V6]; [B5 B6 C1 C2 V6] # Ⴛ֢.ā𐹦 +B; xn--tcb323r.xn--yda594fdn5q; [B5 B6 V6]; [B5 B6 V6] # ⴛ֢.ā𐹦 +B; xn--tcb736kea974k.xn--yda594fdn5q; [B5 B6 C1 C2 V6]; [B5 B6 C1 C2 V6] # ⴛ֢.ā𐹦 +T; ⴛ\u200C\u05A2\u200D。\uFFA0a\u0304𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 P1 V6] # ⴛ֢.ā𐹦 +N; ⴛ\u200C\u05A2\u200D。\uFFA0a\u0304𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 C1 C2 P1 V6] # ⴛ֢.ā𐹦 +T; ⴛ\u200C\u05A2\u200D。\uFFA0ā𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 P1 V6] # ⴛ֢.ā𐹦 +N; ⴛ\u200C\u05A2\u200D。\uFFA0ā𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 C1 C2 P1 V6] # ⴛ֢.ā𐹦 +T; Ⴛ\u200C\u05A2\u200D。\uFFA0Ā𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 P1 V6] # Ⴛ֢.ā𐹦 +N; Ⴛ\u200C\u05A2\u200D。\uFFA0Ā𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 C1 C2 P1 V6] # Ⴛ֢.ā𐹦 +T; Ⴛ\u200C\u05A2\u200D。\uFFA0A\u0304𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 P1 V6] # Ⴛ֢.ā𐹦 +N; Ⴛ\u200C\u05A2\u200D。\uFFA0A\u0304𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 C1 C2 P1 V6] # Ⴛ֢.ā𐹦 +B; xn--tcb597c.xn--yda9741khjj; [B5 B6 V6]; [B5 B6 V6] # Ⴛ֢.ā𐹦 +B; xn--tcb597cdmmfa.xn--yda9741khjj; [B5 B6 C1 C2 V6]; [B5 B6 C1 C2 V6] # Ⴛ֢.ā𐹦 +B; xn--tcb323r.xn--yda9741khjj; [B5 B6 V6]; [B5 B6 V6] # ⴛ֢.ā𐹦 +B; xn--tcb736kea974k.xn--yda9741khjj; [B5 B6 C1 C2 V6]; [B5 B6 C1 C2 V6] # ⴛ֢.ā𐹦 +T; \uFFF9\u200C。曳⾑𐋰≯; [C1 P1 V6]; [P1 V6] # .曳襾𐋰≯ +N; \uFFF9\u200C。曳⾑𐋰≯; [C1 P1 V6]; [C1 P1 V6] # .曳襾𐋰≯ +T; \uFFF9\u200C。曳⾑𐋰>\u0338; [C1 P1 V6]; [P1 V6] # .曳襾𐋰≯ +N; \uFFF9\u200C。曳⾑𐋰>\u0338; [C1 P1 V6]; [C1 P1 V6] # .曳襾𐋰≯ +T; \uFFF9\u200C。曳襾𐋰≯; [C1 P1 V6]; [P1 V6] # .曳襾𐋰≯ +N; \uFFF9\u200C。曳襾𐋰≯; [C1 P1 V6]; [C1 P1 V6] # .曳襾𐋰≯ +T; \uFFF9\u200C。曳襾𐋰>\u0338; [C1 P1 V6]; [P1 V6] # .曳襾𐋰≯ +N; \uFFF9\u200C。曳襾𐋰>\u0338; [C1 P1 V6]; [C1 P1 V6] # .曳襾𐋰≯ +B; xn--vn7c.xn--hdh501y8wvfs5h; [V6]; [V6] # .曳襾𐋰≯ +B; xn--0ug2139f.xn--hdh501y8wvfs5h; [C1 V6]; [C1 V6] # .曳襾𐋰≯ +T; ≯⒈。ß; [P1 V6]; [P1 V6] +N; ≯⒈。ß; [P1 V6]; [P1 V6] +T; >\u0338⒈。ß; [P1 V6]; [P1 V6] +N; >\u0338⒈。ß; [P1 V6]; [P1 V6] +T; ≯1.。ß; [P1 V6 A4_2]; [P1 V6 A4_2] +N; ≯1.。ß; [P1 V6 A4_2]; [P1 V6 A4_2] +T; >\u03381.。ß; [P1 V6 A4_2]; [P1 V6 A4_2] +N; >\u03381.。ß; [P1 V6 A4_2]; [P1 V6 A4_2] +B; >\u03381.。SS; [P1 V6 A4_2]; [P1 V6 A4_2] +B; ≯1.。SS; [P1 V6 A4_2]; [P1 V6 A4_2] +B; ≯1.。ss; [P1 V6 A4_2]; [P1 V6 A4_2] +B; >\u03381.。ss; [P1 V6 A4_2]; [P1 V6 A4_2] +B; >\u03381.。Ss; [P1 V6 A4_2]; [P1 V6 A4_2] +B; ≯1.。Ss; [P1 V6 A4_2]; [P1 V6 A4_2] +B; xn--1-ogo..ss; [V6 A4_2]; [V6 A4_2] +B; xn--1-ogo..xn--zca; [V6 A4_2]; [V6 A4_2] +B; >\u0338⒈。SS; [P1 V6]; [P1 V6] +B; ≯⒈。SS; [P1 V6]; [P1 V6] +B; ≯⒈。ss; [P1 V6]; [P1 V6] +B; >\u0338⒈。ss; [P1 V6]; [P1 V6] +B; >\u0338⒈。Ss; [P1 V6]; [P1 V6] +B; ≯⒈。Ss; [P1 V6]; [P1 V6] +B; xn--hdh84f.ss; [V6]; [V6] +B; xn--hdh84f.xn--zca; [V6]; [V6] +T; \u0667\u200D\uFB96。\u07DA-₆Ⴙ; [B1 B2 B3 C2 P1 V6]; [B1 B2 B3 P1 V6] # ٧ڳ.ߚ-6Ⴙ +N; \u0667\u200D\uFB96。\u07DA-₆Ⴙ; [B1 B2 B3 C2 P1 V6]; [B1 B2 B3 C2 P1 V6] # ٧ڳ.ߚ-6Ⴙ +T; \u0667\u200D\u06B3。\u07DA-6Ⴙ; [B1 B2 B3 C2 P1 V6]; [B1 B2 B3 P1 V6] # ٧ڳ.ߚ-6Ⴙ +N; \u0667\u200D\u06B3。\u07DA-6Ⴙ; [B1 B2 B3 C2 P1 V6]; [B1 B2 B3 C2 P1 V6] # ٧ڳ.ߚ-6Ⴙ +T; \u0667\u200D\u06B3。\u07DA-6ⴙ; [B1 B2 B3 C2]; [B1 B2 B3] # ٧ڳ.ߚ-6ⴙ +N; \u0667\u200D\u06B3。\u07DA-6ⴙ; [B1 B2 B3 C2]; [B1 B2 B3 C2] # ٧ڳ.ߚ-6ⴙ +B; xn--gib6m.xn---6-lve6529a; [B1 B2 B3]; [B1 B2 B3] # ٧ڳ.ߚ-6ⴙ +B; xn--gib6m343e.xn---6-lve6529a; [B1 B2 B3 C2]; [B1 B2 B3 C2] # ٧ڳ.ߚ-6ⴙ +B; xn--gib6m.xn---6-lve002g; [B1 B2 B3 V6]; [B1 B2 B3 V6] # ٧ڳ.ߚ-6Ⴙ +B; xn--gib6m343e.xn---6-lve002g; [B1 B2 B3 C2 V6]; [B1 B2 B3 C2 V6] # ٧ڳ.ߚ-6Ⴙ +T; \u0667\u200D\uFB96。\u07DA-₆ⴙ; [B1 B2 B3 C2]; [B1 B2 B3] # ٧ڳ.ߚ-6ⴙ +N; \u0667\u200D\uFB96。\u07DA-₆ⴙ; [B1 B2 B3 C2]; [B1 B2 B3 C2] # ٧ڳ.ߚ-6ⴙ +T; \u200C。≠; [C1 P1 V6]; [P1 V6 A4_2] # .≠ +N; \u200C。≠; [C1 P1 V6]; [C1 P1 V6] # .≠ +T; \u200C。=\u0338; [C1 P1 V6]; [P1 V6 A4_2] # .≠ +N; \u200C。=\u0338; [C1 P1 V6]; [C1 P1 V6] # .≠ +T; \u200C。≠; [C1 P1 V6]; [P1 V6 A4_2] # .≠ +N; \u200C。≠; [C1 P1 V6]; [C1 P1 V6] # .≠ +T; \u200C。=\u0338; [C1 P1 V6]; [P1 V6 A4_2] # .≠ +N; \u200C。=\u0338; [C1 P1 V6]; [C1 P1 V6] # .≠ +B; .xn--1ch; [V6 A4_2]; [V6 A4_2] +B; xn--0ug.xn--1ch; [C1 V6]; [C1 V6] # .≠ +T; 𑖿𝨔.ᡟ𑖿\u1B42\u200C; [C1 V5]; [V5] # 𑖿𝨔.ᡟ𑖿ᭂ +N; 𑖿𝨔.ᡟ𑖿\u1B42\u200C; [C1 V5]; [C1 V5] # 𑖿𝨔.ᡟ𑖿ᭂ +B; xn--461dw464a.xn--v8e29loy65a; [V5]; [V5] # 𑖿𝨔.ᡟ𑖿ᭂ +B; xn--461dw464a.xn--v8e29ldzfo952a; [C1 V5]; [C1 V5] # 𑖿𝨔.ᡟ𑖿ᭂ +T; 򔣳\u200D򑝱.𖬴Ↄ≠-; [C2 P1 V3 V5 V6]; [P1 V3 V5 V6] # .𖬴Ↄ≠- +N; 򔣳\u200D򑝱.𖬴Ↄ≠-; [C2 P1 V3 V5 V6]; [C2 P1 V3 V5 V6] # .𖬴Ↄ≠- +T; 򔣳\u200D򑝱.𖬴Ↄ=\u0338-; [C2 P1 V3 V5 V6]; [P1 V3 V5 V6] # .𖬴Ↄ≠- +N; 򔣳\u200D򑝱.𖬴Ↄ=\u0338-; [C2 P1 V3 V5 V6]; [C2 P1 V3 V5 V6] # .𖬴Ↄ≠- +T; 򔣳\u200D򑝱.𖬴ↄ=\u0338-; [C2 P1 V3 V5 V6]; [P1 V3 V5 V6] # .𖬴ↄ≠- +N; 򔣳\u200D򑝱.𖬴ↄ=\u0338-; [C2 P1 V3 V5 V6]; [C2 P1 V3 V5 V6] # .𖬴ↄ≠- +T; 򔣳\u200D򑝱.𖬴ↄ≠-; [C2 P1 V3 V5 V6]; [P1 V3 V5 V6] # .𖬴ↄ≠- +N; 򔣳\u200D򑝱.𖬴ↄ≠-; [C2 P1 V3 V5 V6]; [C2 P1 V3 V5 V6] # .𖬴ↄ≠- +B; xn--6j00chy9a.xn----81n51bt713h; [V3 V5 V6]; [V3 V5 V6] +B; xn--1ug15151gkb5a.xn----81n51bt713h; [C2 V3 V5 V6]; [C2 V3 V5 V6] # .𖬴ↄ≠- +B; xn--6j00chy9a.xn----61n81bt713h; [V3 V5 V6]; [V3 V5 V6] +B; xn--1ug15151gkb5a.xn----61n81bt713h; [C2 V3 V5 V6]; [C2 V3 V5 V6] # .𖬴Ↄ≠- +T; \u07E2ς\u200D𝟳。蔑򛖢; [B2 C2 P1 V6]; [B2 P1 V6] # ߢς7.蔑 +N; \u07E2ς\u200D𝟳。蔑򛖢; [B2 C2 P1 V6]; [B2 C2 P1 V6] # ߢς7.蔑 +T; \u07E2ς\u200D7。蔑򛖢; [B2 C2 P1 V6]; [B2 P1 V6] # ߢς7.蔑 +N; \u07E2ς\u200D7。蔑򛖢; [B2 C2 P1 V6]; [B2 C2 P1 V6] # ߢς7.蔑 +T; \u07E2Σ\u200D7。蔑򛖢; [B2 C2 P1 V6]; [B2 P1 V6] # ߢσ7.蔑 +N; \u07E2Σ\u200D7。蔑򛖢; [B2 C2 P1 V6]; [B2 C2 P1 V6] # ߢσ7.蔑 +T; \u07E2σ\u200D7。蔑򛖢; [B2 C2 P1 V6]; [B2 P1 V6] # ߢσ7.蔑 +N; \u07E2σ\u200D7。蔑򛖢; [B2 C2 P1 V6]; [B2 C2 P1 V6] # ߢσ7.蔑 +B; xn--7-zmb872a.xn--wy1ao4929b; [B2 V6]; [B2 V6] # ߢσ7.蔑 +B; xn--7-zmb872aez5a.xn--wy1ao4929b; [B2 C2 V6]; [B2 C2 V6] # ߢσ7.蔑 +B; xn--7-xmb182aez5a.xn--wy1ao4929b; [B2 C2 V6]; [B2 C2 V6] # ߢς7.蔑 +T; \u07E2Σ\u200D𝟳。蔑򛖢; [B2 C2 P1 V6]; [B2 P1 V6] # ߢσ7.蔑 +N; \u07E2Σ\u200D𝟳。蔑򛖢; [B2 C2 P1 V6]; [B2 C2 P1 V6] # ߢσ7.蔑 +T; \u07E2σ\u200D𝟳。蔑򛖢; [B2 C2 P1 V6]; [B2 P1 V6] # ߢσ7.蔑 +N; \u07E2σ\u200D𝟳。蔑򛖢; [B2 C2 P1 V6]; [B2 C2 P1 V6] # ߢσ7.蔑 +B; 𐹰.\u0600; [B1 P1 V6]; [B1 P1 V6] # 𐹰. +B; xn--oo0d.xn--ifb; [B1 V6]; [B1 V6] # 𐹰. +B; -\u08A8.𱠖; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -ࢨ. +B; xn----mod.xn--5o9n; [B1 V3 V6]; [B1 V3 V6] # -ࢨ. +B; ≯𞱸󠇀。誆⒈; [B1 P1 V6]; [B1 P1 V6] +B; >\u0338𞱸󠇀。誆⒈; [B1 P1 V6]; [B1 P1 V6] +B; ≯𞱸󠇀。誆1.; [B1 P1 V6]; [B1 P1 V6] +B; >\u0338𞱸󠇀。誆1.; [B1 P1 V6]; [B1 P1 V6] +B; xn--hdh7151p.xn--1-dy1d.; [B1 V6]; [B1 V6] +B; xn--hdh7151p.xn--tsh1248a; [B1 V6]; [B1 V6] +B; \u0616𞥙䐊\u0650.︒\u0645↺\u069C; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ؖ𞥙䐊ِ.︒م↺ڜ +B; \u0616𞥙䐊\u0650.。\u0645↺\u069C; [B1 V5 A4_2]; [B1 V5 A4_2] # ؖ𞥙䐊ِ..م↺ڜ +B; xn--4fb0j490qjg4x..xn--hhb8o948e; [B1 V5 A4_2]; [B1 V5 A4_2] # ؖ𞥙䐊ِ..م↺ڜ +B; xn--4fb0j490qjg4x.xn--hhb8o948euo5r; [B1 V5 V6]; [B1 V5 V6] # ؖ𞥙䐊ِ.︒م↺ڜ +T; 퀬-\uDF7E񶳒.\u200C\u0AC5󩸤۴; [C1 P1 V6]; [P1 V5 V6 A3] # 퀬-.ૅ۴ +N; 퀬-\uDF7E񶳒.\u200C\u0AC5󩸤۴; [C1 P1 V6]; [C1 P1 V6 A3] # 퀬-.ૅ۴ +T; 퀬-\uDF7E񶳒.\u200C\u0AC5󩸤۴; [C1 P1 V6]; [P1 V5 V6 A3] # 퀬-.ૅ۴ +N; 퀬-\uDF7E񶳒.\u200C\u0AC5󩸤۴; [C1 P1 V6]; [C1 P1 V6 A3] # 퀬-.ૅ۴ +B; 퀬-\uDF7E񶳒.xn--hmb76q74166b; [P1 V5 V6]; [P1 V5 V6 A3] # 퀬-.ૅ۴ +B; 퀬-\uDF7E񶳒.xn--hmb76q74166b; [P1 V5 V6]; [P1 V5 V6 A3] # 퀬-.ૅ۴ +B; 퀬-\uDF7E񶳒.XN--HMB76Q74166B; [P1 V5 V6]; [P1 V5 V6 A3] # 퀬-.ૅ۴ +B; 퀬-\uDF7E񶳒.XN--HMB76Q74166B; [P1 V5 V6]; [P1 V5 V6 A3] # 퀬-.ૅ۴ +B; 퀬-\uDF7E񶳒.Xn--Hmb76q74166b; [P1 V5 V6]; [P1 V5 V6 A3] # 퀬-.ૅ۴ +B; 퀬-\uDF7E񶳒.Xn--Hmb76q74166b; [P1 V5 V6]; [P1 V5 V6 A3] # 퀬-.ૅ۴ +B; 퀬-\uDF7E񶳒.xn--hmb76q48y18505a; [C1 P1 V6]; [C1 P1 V6 A3] # 퀬-.ૅ۴ +B; 퀬-\uDF7E񶳒.xn--hmb76q48y18505a; [C1 P1 V6]; [C1 P1 V6 A3] # 퀬-.ૅ۴ +B; 퀬-\uDF7E񶳒.XN--HMB76Q48Y18505A; [C1 P1 V6]; [C1 P1 V6 A3] # 퀬-.ૅ۴ +B; 퀬-\uDF7E񶳒.XN--HMB76Q48Y18505A; [C1 P1 V6]; [C1 P1 V6 A3] # 퀬-.ૅ۴ +B; 퀬-\uDF7E񶳒.Xn--Hmb76q48y18505a; [C1 P1 V6]; [C1 P1 V6 A3] # 퀬-.ૅ۴ +B; 퀬-\uDF7E񶳒.Xn--Hmb76q48y18505a; [C1 P1 V6]; [C1 P1 V6 A3] # 퀬-.ૅ۴ +B; Ⴌ.𐹾︒𑁿𞾄; [B1 P1 V6]; [B1 P1 V6] +B; Ⴌ.𐹾。𑁿𞾄; [B1 P1 V5 V6]; [B1 P1 V5 V6] +B; ⴌ.𐹾。𑁿𞾄; [B1 P1 V5 V6]; [B1 P1 V5 V6] +B; xn--3kj.xn--2o0d.xn--q30dg029a; [B1 V5 V6]; [B1 V5 V6] +B; xn--knd.xn--2o0d.xn--q30dg029a; [B1 V5 V6]; [B1 V5 V6] +B; ⴌ.𐹾︒𑁿𞾄; [B1 P1 V6]; [B1 P1 V6] +B; xn--3kj.xn--y86c030a9ob6374b; [B1 V6]; [B1 V6] +B; xn--knd.xn--y86c030a9ob6374b; [B1 V6]; [B1 V6] +B; 񧞿╏。𞩕󠁾; [B3 B6 P1 V6]; [B3 B6 P1 V6] +B; xn--iyh90030d.xn--1m6hs0260c; [B3 B6 V6]; [B3 B6 V6] +T; \u200D┮󠇐.\u0C00\u0C4D\u1734\u200D; [C2 V5]; [V5] # ┮.ఀ్᜴ +N; \u200D┮󠇐.\u0C00\u0C4D\u1734\u200D; [C2 V5]; [C2 V5] # ┮.ఀ్᜴ +T; \u200D┮󠇐.\u0C00\u0C4D\u1734\u200D; [C2 V5]; [V5] # ┮.ఀ్᜴ +N; \u200D┮󠇐.\u0C00\u0C4D\u1734\u200D; [C2 V5]; [C2 V5] # ┮.ఀ్᜴ +B; xn--kxh.xn--eoc8m432a; [V5]; [V5] # ┮.ఀ్᜴ +B; xn--1ug04r.xn--eoc8m432a40i; [C2 V5]; [C2 V5] # ┮.ఀ్᜴ +B; 򹚪。🄂; [P1 V6]; [P1 V6] +B; 򹚪。1,; [P1 V6]; [P1 V6] +B; xn--n433d.1,; [P1 V6]; [P1 V6] +B; xn--n433d.xn--v07h; [V6]; [V6] +B; 𑍨刍.🛦; [V5]; [V5] +B; xn--rbry728b.xn--y88h; [V5]; [V5] +B; 󠌏3。\u1BF1𝟒; [P1 V5 V6]; [P1 V5 V6] # 3.ᯱ4 +B; 󠌏3。\u1BF14; [P1 V5 V6]; [P1 V5 V6] # 3.ᯱ4 +B; xn--3-ib31m.xn--4-pql; [V5 V6]; [V5 V6] # 3.ᯱ4 +T; \u06876Ⴔ辘.\uFD22\u0687\u200C; [B2 B3 C1 P1 V6]; [B2 B3 P1 V6] # ڇ6Ⴔ辘.صيڇ +N; \u06876Ⴔ辘.\uFD22\u0687\u200C; [B2 B3 C1 P1 V6]; [B2 B3 C1 P1 V6] # ڇ6Ⴔ辘.صيڇ +T; \u06876Ⴔ辘.\u0635\u064A\u0687\u200C; [B2 B3 C1 P1 V6]; [B2 B3 P1 V6] # ڇ6Ⴔ辘.صيڇ +N; \u06876Ⴔ辘.\u0635\u064A\u0687\u200C; [B2 B3 C1 P1 V6]; [B2 B3 C1 P1 V6] # ڇ6Ⴔ辘.صيڇ +T; \u06876ⴔ辘.\u0635\u064A\u0687\u200C; [B2 B3 C1]; [B2 B3] # ڇ6ⴔ辘.صيڇ +N; \u06876ⴔ辘.\u0635\u064A\u0687\u200C; [B2 B3 C1]; [B2 B3 C1] # ڇ6ⴔ辘.صيڇ +B; xn--6-gsc2270akm6f.xn--0gb6bxk; [B2 B3]; [B2 B3] # ڇ6ⴔ辘.صيڇ +B; xn--6-gsc2270akm6f.xn--0gb6bxkx18g; [B2 B3 C1]; [B2 B3 C1] # ڇ6ⴔ辘.صيڇ +B; xn--6-gsc039eqq6k.xn--0gb6bxk; [B2 B3 V6]; [B2 B3 V6] # ڇ6Ⴔ辘.صيڇ +B; xn--6-gsc039eqq6k.xn--0gb6bxkx18g; [B2 B3 C1 V6]; [B2 B3 C1 V6] # ڇ6Ⴔ辘.صيڇ +T; \u06876ⴔ辘.\uFD22\u0687\u200C; [B2 B3 C1]; [B2 B3] # ڇ6ⴔ辘.صيڇ +N; \u06876ⴔ辘.\uFD22\u0687\u200C; [B2 B3 C1]; [B2 B3 C1] # ڇ6ⴔ辘.صيڇ +B; 󠄍.𐮭𞰬򻫞۹; [B2 P1 V6 A4_2]; [B2 P1 V6 A4_2] +B; .xn--mmb3954kd0uf1zx7f; [B2 V6 A4_2]; [B2 V6 A4_2] +B; \uA87D≯.򻲀򒳄; [P1 V6]; [P1 V6] # ≯. +B; \uA87D>\u0338.򻲀򒳄; [P1 V6]; [P1 V6] # ≯. +B; \uA87D≯.򻲀򒳄; [P1 V6]; [P1 V6] # ≯. +B; \uA87D>\u0338.򻲀򒳄; [P1 V6]; [P1 V6] # ≯. +B; xn--hdh8193c.xn--5z40cp629b; [V6]; [V6] # ≯. +T; ςო\u067B.ς\u0714; [B5 B6]; [B5 B6] # ςოٻ.ςܔ +N; ςო\u067B.ς\u0714; [B5 B6]; [B5 B6] # ςოٻ.ςܔ +B; Σო\u067B.Σ\u0714; [B5 B6]; [B5 B6] # σოٻ.σܔ +B; σო\u067B.σ\u0714; [B5 B6]; [B5 B6] # σოٻ.σܔ +B; Σო\u067B.σ\u0714; [B5 B6]; [B5 B6] # σოٻ.σܔ +B; xn--4xa60l26n.xn--4xa21o; [B5 B6]; [B5 B6] # σოٻ.σܔ +T; Σო\u067B.ς\u0714; [B5 B6]; [B5 B6] # σოٻ.ςܔ +N; Σო\u067B.ς\u0714; [B5 B6]; [B5 B6] # σოٻ.ςܔ +T; σო\u067B.ς\u0714; [B5 B6]; [B5 B6] # σოٻ.ςܔ +N; σო\u067B.ς\u0714; [B5 B6]; [B5 B6] # σოٻ.ςܔ +B; xn--4xa60l26n.xn--3xa41o; [B5 B6]; [B5 B6] # σოٻ.ςܔ +B; xn--3xa80l26n.xn--3xa41o; [B5 B6]; [B5 B6] # ςოٻ.ςܔ +B; 򄖚\u0748𠄯\u075F。󠛩; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # ݈𠄯ݟ. +B; 򄖚\u0748𠄯\u075F。󠛩; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # ݈𠄯ݟ. +B; xn--vob0c4369twfv8b.xn--kl46e; [B1 B5 B6 V6]; [B1 B5 B6 V6] # ݈𠄯ݟ. +T; 󠳛.\u200D䤫≠Ⴞ; [C2 P1 V6]; [P1 V6] # .䤫≠Ⴞ +N; 󠳛.\u200D䤫≠Ⴞ; [C2 P1 V6]; [C2 P1 V6] # .䤫≠Ⴞ +T; 󠳛.\u200D䤫=\u0338Ⴞ; [C2 P1 V6]; [P1 V6] # .䤫≠Ⴞ +N; 󠳛.\u200D䤫=\u0338Ⴞ; [C2 P1 V6]; [C2 P1 V6] # .䤫≠Ⴞ +T; 󠳛.\u200D䤫≠Ⴞ; [C2 P1 V6]; [P1 V6] # .䤫≠Ⴞ +N; 󠳛.\u200D䤫≠Ⴞ; [C2 P1 V6]; [C2 P1 V6] # .䤫≠Ⴞ +T; 󠳛.\u200D䤫=\u0338Ⴞ; [C2 P1 V6]; [P1 V6] # .䤫≠Ⴞ +N; 󠳛.\u200D䤫=\u0338Ⴞ; [C2 P1 V6]; [C2 P1 V6] # .䤫≠Ⴞ +T; 󠳛.\u200D䤫=\u0338ⴞ; [C2 P1 V6]; [P1 V6] # .䤫≠ⴞ +N; 󠳛.\u200D䤫=\u0338ⴞ; [C2 P1 V6]; [C2 P1 V6] # .䤫≠ⴞ +T; 󠳛.\u200D䤫≠ⴞ; [C2 P1 V6]; [P1 V6] # .䤫≠ⴞ +N; 󠳛.\u200D䤫≠ⴞ; [C2 P1 V6]; [C2 P1 V6] # .䤫≠ⴞ +B; xn--1t56e.xn--1ch153bqvw; [V6]; [V6] +B; xn--1t56e.xn--1ug73gzzpwi3a; [C2 V6]; [C2 V6] # .䤫≠ⴞ +B; xn--1t56e.xn--2nd141ghl2a; [V6]; [V6] +B; xn--1t56e.xn--2nd159e9vb743e; [C2 V6]; [C2 V6] # .䤫≠Ⴞ +T; 󠳛.\u200D䤫=\u0338ⴞ; [C2 P1 V6]; [P1 V6] # .䤫≠ⴞ +N; 󠳛.\u200D䤫=\u0338ⴞ; [C2 P1 V6]; [C2 P1 V6] # .䤫≠ⴞ +T; 󠳛.\u200D䤫≠ⴞ; [C2 P1 V6]; [P1 V6] # .䤫≠ⴞ +N; 󠳛.\u200D䤫≠ⴞ; [C2 P1 V6]; [C2 P1 V6] # .䤫≠ⴞ +B; 𐽘𑈵.𐹣🕥; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] +B; 𐽘𑈵.𐹣🕥; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] +B; xn--bv0d02c.xn--bo0dq650b; [B1 B2 B3 V6]; [B1 B2 B3 V6] +B; ⒊⒈𑁄。9; [P1 V6]; [P1 V6] +B; 3.1.𑁄。9; [V5]; [V5] +B; 3.1.xn--110d.9; [V5]; [V5] +B; xn--tshd3512p.9; [V6]; [V6] +T; -\u200C\u2DF1≮.𐹱򭏴4₉; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # -ⷱ≮.𐹱49 +N; -\u200C\u2DF1≮.𐹱򭏴4₉; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # -ⷱ≮.𐹱49 +T; -\u200C\u2DF1<\u0338.𐹱򭏴4₉; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # -ⷱ≮.𐹱49 +N; -\u200C\u2DF1<\u0338.𐹱򭏴4₉; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # -ⷱ≮.𐹱49 +T; -\u200C\u2DF1≮.𐹱򭏴49; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # -ⷱ≮.𐹱49 +N; -\u200C\u2DF1≮.𐹱򭏴49; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # -ⷱ≮.𐹱49 +T; -\u200C\u2DF1<\u0338.𐹱򭏴49; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # -ⷱ≮.𐹱49 +N; -\u200C\u2DF1<\u0338.𐹱򭏴49; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # -ⷱ≮.𐹱49 +B; xn----ngo823c.xn--49-ki3om2611f; [B1 V3 V6]; [B1 V3 V6] # -ⷱ≮.𐹱49 +B; xn----sgn20i14s.xn--49-ki3om2611f; [B1 C1 V3 V6]; [B1 C1 V3 V6] # -ⷱ≮.𐹱49 +B; -≯딾。\u0847; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -≯딾.ࡇ +B; ->\u0338딾。\u0847; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -≯딾.ࡇ +B; -≯딾。\u0847; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -≯딾.ࡇ +B; ->\u0338딾。\u0847; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -≯딾.ࡇ +B; xn----pgow547d.xn--5vb; [B1 V3 V6]; [B1 V3 V6] # -≯딾.ࡇ +T; 𑙢⒈𐹠-。󠗐\u200C; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # 𑙢⒈𐹠-. +N; 𑙢⒈𐹠-。󠗐\u200C; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # 𑙢⒈𐹠-. +T; 𑙢1.𐹠-。󠗐\u200C; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # 𑙢1.𐹠-. +N; 𑙢1.𐹠-。󠗐\u200C; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # 𑙢1.𐹠-. +B; xn--1-bf0j.xn----516i.xn--jd46e; [B1 V3 V6]; [B1 V3 V6] +B; xn--1-bf0j.xn----516i.xn--0ug23321l; [B1 C1 V3 V6]; [B1 C1 V3 V6] # 𑙢1.𐹠-. +B; xn----dcpy090hiyg.xn--jd46e; [B1 V3 V6]; [B1 V3 V6] +B; xn----dcpy090hiyg.xn--0ug23321l; [B1 C1 V3 V6]; [B1 C1 V3 V6] # 𑙢⒈𐹠-. +B; \u034A.𐨎; [V5]; [V5] # ͊.𐨎 +B; \u034A.𐨎; [V5]; [V5] # ͊.𐨎 +B; xn--oua.xn--mr9c; [V5]; [V5] # ͊.𐨎 +B; 훉≮。\u0E34; [P1 V5 V6]; [P1 V5 V6] # 훉≮.ิ +B; 훉<\u0338。\u0E34; [P1 V5 V6]; [P1 V5 V6] # 훉≮.ิ +B; 훉≮。\u0E34; [P1 V5 V6]; [P1 V5 V6] # 훉≮.ิ +B; 훉<\u0338。\u0E34; [P1 V5 V6]; [P1 V5 V6] # 훉≮.ิ +B; xn--gdh2512e.xn--i4c; [V5 V6]; [V5 V6] # 훉≮.ิ +B; \u2DF7򞣉🃘.𴈇𝟸\u0659𞤯; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ⷷ🃘.2ٙ𞤯 +B; \u2DF7򞣉🃘.𴈇2\u0659𞤯; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ⷷ🃘.2ٙ𞤯 +B; \u2DF7򞣉🃘.𴈇2\u0659𞤍; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ⷷ🃘.2ٙ𞤯 +B; xn--trj8045le6s9b.xn--2-upc23918acjsj; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] # ⷷ🃘.2ٙ𞤯 +B; \u2DF7򞣉🃘.𴈇𝟸\u0659𞤍; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ⷷ🃘.2ٙ𞤯 +T; 󗇩ßᢞ\u200C。\u0660𞷻\uFCD4-; [B1 B6 C1 P1 V3 V6]; [B1 P1 V3 V6] # ßᢞ.٠نخ- +N; 󗇩ßᢞ\u200C。\u0660𞷻\uFCD4-; [B1 B6 C1 P1 V3 V6]; [B1 B6 C1 P1 V3 V6] # ßᢞ.٠نخ- +T; 󗇩ßᢞ\u200C。\u0660𞷻\u0646\u062E-; [B1 B6 C1 P1 V3 V6]; [B1 P1 V3 V6] # ßᢞ.٠نخ- +N; 󗇩ßᢞ\u200C。\u0660𞷻\u0646\u062E-; [B1 B6 C1 P1 V3 V6]; [B1 B6 C1 P1 V3 V6] # ßᢞ.٠نخ- +T; 󗇩SSᢞ\u200C。\u0660𞷻\u0646\u062E-; [B1 B6 C1 P1 V3 V6]; [B1 P1 V3 V6] # ssᢞ.٠نخ- +N; 󗇩SSᢞ\u200C。\u0660𞷻\u0646\u062E-; [B1 B6 C1 P1 V3 V6]; [B1 B6 C1 P1 V3 V6] # ssᢞ.٠نخ- +T; 󗇩ssᢞ\u200C。\u0660𞷻\u0646\u062E-; [B1 B6 C1 P1 V3 V6]; [B1 P1 V3 V6] # ssᢞ.٠نخ- +N; 󗇩ssᢞ\u200C。\u0660𞷻\u0646\u062E-; [B1 B6 C1 P1 V3 V6]; [B1 B6 C1 P1 V3 V6] # ssᢞ.٠نخ- +T; 󗇩Ssᢞ\u200C。\u0660𞷻\u0646\u062E-; [B1 B6 C1 P1 V3 V6]; [B1 P1 V3 V6] # ssᢞ.٠نخ- +N; 󗇩Ssᢞ\u200C。\u0660𞷻\u0646\u062E-; [B1 B6 C1 P1 V3 V6]; [B1 B6 C1 P1 V3 V6] # ssᢞ.٠نخ- +B; xn--ss-jepz4596r.xn----dnc5e1er384z; [B1 V3 V6]; [B1 V3 V6] # ssᢞ.٠نخ- +B; xn--ss-jep006bqt765b.xn----dnc5e1er384z; [B1 B6 C1 V3 V6]; [B1 B6 C1 V3 V6] # ssᢞ.٠نخ- +B; xn--zca272jbif10059a.xn----dnc5e1er384z; [B1 B6 C1 V3 V6]; [B1 B6 C1 V3 V6] # ßᢞ.٠نخ- +T; 󗇩SSᢞ\u200C。\u0660𞷻\uFCD4-; [B1 B6 C1 P1 V3 V6]; [B1 P1 V3 V6] # ssᢞ.٠نخ- +N; 󗇩SSᢞ\u200C。\u0660𞷻\uFCD4-; [B1 B6 C1 P1 V3 V6]; [B1 B6 C1 P1 V3 V6] # ssᢞ.٠نخ- +T; 󗇩ssᢞ\u200C。\u0660𞷻\uFCD4-; [B1 B6 C1 P1 V3 V6]; [B1 P1 V3 V6] # ssᢞ.٠نخ- +N; 󗇩ssᢞ\u200C。\u0660𞷻\uFCD4-; [B1 B6 C1 P1 V3 V6]; [B1 B6 C1 P1 V3 V6] # ssᢞ.٠نخ- +T; 󗇩Ssᢞ\u200C。\u0660𞷻\uFCD4-; [B1 B6 C1 P1 V3 V6]; [B1 P1 V3 V6] # ssᢞ.٠نخ- +N; 󗇩Ssᢞ\u200C。\u0660𞷻\uFCD4-; [B1 B6 C1 P1 V3 V6]; [B1 B6 C1 P1 V3 V6] # ssᢞ.٠نخ- +B; ꡆ。Ↄ\u0FB5놮-; [P1 V3 V6]; [P1 V3 V6] # ꡆ.Ↄྵ놮- +B; ꡆ。Ↄ\u0FB5놮-; [P1 V3 V6]; [P1 V3 V6] # ꡆ.Ↄྵ놮- +B; ꡆ。ↄ\u0FB5놮-; [V3]; [V3] # ꡆ.ↄྵ놮- +B; ꡆ。ↄ\u0FB5놮-; [V3]; [V3] # ꡆ.ↄྵ놮- +B; xn--fc9a.xn----qmg097k469k; [V3]; [V3] # ꡆ.ↄྵ놮- +B; xn--fc9a.xn----qmg787k869k; [V3 V6]; [V3 V6] # ꡆ.Ↄྵ놮- +T; \uFDAD\u200D.񥰌\u06A9; [B3 B5 B6 C2 P1 V6]; [B5 B6 P1 V6] # لمي.ک +N; \uFDAD\u200D.񥰌\u06A9; [B3 B5 B6 C2 P1 V6]; [B3 B5 B6 C2 P1 V6] # لمي.ک +T; \u0644\u0645\u064A\u200D.񥰌\u06A9; [B3 B5 B6 C2 P1 V6]; [B5 B6 P1 V6] # لمي.ک +N; \u0644\u0645\u064A\u200D.񥰌\u06A9; [B3 B5 B6 C2 P1 V6]; [B3 B5 B6 C2 P1 V6] # لمي.ک +B; xn--ghbcp.xn--ckb36214f; [B5 B6 V6]; [B5 B6 V6] # لمي.ک +B; xn--ghbcp494x.xn--ckb36214f; [B3 B5 B6 C2 V6]; [B3 B5 B6 C2 V6] # لمي.ک +B; Ⴜ\u1C2F𐳒≯。\u06E0\u1732\u0FBA; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # Ⴜᰯ𐳒≯.۠ᜲྺ +B; Ⴜ\u1C2F𐳒>\u0338。\u06E0\u1732\u0FBA; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # Ⴜᰯ𐳒≯.۠ᜲྺ +B; ⴜ\u1C2F𐳒>\u0338。\u06E0\u1732\u0FBA; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # ⴜᰯ𐳒≯.۠ᜲྺ +B; ⴜ\u1C2F𐳒≯。\u06E0\u1732\u0FBA; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # ⴜᰯ𐳒≯.۠ᜲྺ +B; Ⴜ\u1C2F𐲒≯。\u06E0\u1732\u0FBA; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # Ⴜᰯ𐳒≯.۠ᜲྺ +B; Ⴜ\u1C2F𐲒>\u0338。\u06E0\u1732\u0FBA; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # Ⴜᰯ𐳒≯.۠ᜲྺ +B; xn--0nd679cf3eq67y.xn--wlb646b4ng; [B1 B3 B5 B6 V5 V6]; [B1 B3 B5 B6 V5 V6] # Ⴜᰯ𐳒≯.۠ᜲྺ +B; xn--r1f68xh1jgv7u.xn--wlb646b4ng; [B1 B3 B5 B6 V5 V6]; [B1 B3 B5 B6 V5 V6] # ⴜᰯ𐳒≯.۠ᜲྺ +B; 𐋵。\uFCEC; [B1]; [B1] # 𐋵.كم +B; 𐋵。\u0643\u0645; [B1]; [B1] # 𐋵.كم +B; xn--p97c.xn--fhbe; [B1]; [B1] # 𐋵.كم +B; 𐋵.\u0643\u0645; [B1]; [B1] # 𐋵.كم +B; ≮𝅶.񱲁\uAAEC⹈󰥭; [P1 V6]; [P1 V6] # ≮.ꫬ⹈ +B; <\u0338𝅶.񱲁\uAAEC⹈󰥭; [P1 V6]; [P1 V6] # ≮.ꫬ⹈ +B; ≮𝅶.񱲁\uAAEC⹈󰥭; [P1 V6]; [P1 V6] # ≮.ꫬ⹈ +B; <\u0338𝅶.񱲁\uAAEC⹈󰥭; [P1 V6]; [P1 V6] # ≮.ꫬ⹈ +B; xn--gdh0880o.xn--4tjx101bsg00ds9pyc; [V6]; [V6] # ≮.ꫬ⹈ +B; \u2DF0\u0358ᢕ.\u0361𐹷󠴍; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ⷰ͘ᢕ.͡𐹷 +B; \u2DF0\u0358ᢕ.\u0361𐹷󠴍; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ⷰ͘ᢕ.͡𐹷 +B; xn--2ua889htsp.xn--cva2687k2tv0g; [B1 V5 V6]; [B1 V5 V6] # ⷰ͘ᢕ.͡𐹷 +T; \uFD79ᡐ\u200C\u06AD.𑋪\u05C7; [B1 B2 B3 B6 V5]; [B1 B2 B3 B6 V5] # غممᡐڭ.𑋪ׇ +N; \uFD79ᡐ\u200C\u06AD.𑋪\u05C7; [B1 B2 B3 B6 V5]; [B1 B2 B3 B6 V5] # غممᡐڭ.𑋪ׇ +T; \u063A\u0645\u0645ᡐ\u200C\u06AD.𑋪\u05C7; [B1 B2 B3 B6 V5]; [B1 B2 B3 B6 V5] # غممᡐڭ.𑋪ׇ +N; \u063A\u0645\u0645ᡐ\u200C\u06AD.𑋪\u05C7; [B1 B2 B3 B6 V5]; [B1 B2 B3 B6 V5] # غممᡐڭ.𑋪ׇ +B; xn--5gbwa03bg24e.xn--vdb1198k; [B1 B2 B3 B6 V5]; [B1 B2 B3 B6 V5] # غممᡐڭ.𑋪ׇ +B; xn--5gbwa03bg24eptk.xn--vdb1198k; [B1 B2 B3 B6 V5]; [B1 B2 B3 B6 V5] # غممᡐڭ.𑋪ׇ +T; 𑑂。\u200D󥞀🞕򥁔; [C2 P1 V5 V6]; [P1 V5 V6] # 𑑂.🞕 +N; 𑑂。\u200D󥞀🞕򥁔; [C2 P1 V5 V6]; [C2 P1 V5 V6] # 𑑂.🞕 +T; 𑑂。\u200D󥞀🞕򥁔; [C2 P1 V5 V6]; [P1 V5 V6] # 𑑂.🞕 +N; 𑑂。\u200D󥞀🞕򥁔; [C2 P1 V5 V6]; [C2 P1 V5 V6] # 𑑂.🞕 +B; xn--8v1d.xn--ye9h41035a2qqs; [V5 V6]; [V5 V6] +B; xn--8v1d.xn--1ug1386plvx1cd8vya; [C2 V5 V6]; [C2 V5 V6] # 𑑂.🞕 +B; -\u05E9。⒚; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -ש.⒚ +B; -\u05E9。19.; [B1 V3]; [B1 V3] # -ש.19. +B; xn----gjc.19.; [B1 V3]; [B1 V3] # -ש.19. +B; xn----gjc.xn--cth; [B1 V3 V6]; [B1 V3 V6] # -ש.⒚ +T; 􊾻\u0845\u200C。ᢎ\u200D; [B5 B6 C1 C2 P1 V6]; [B5 B6 P1 V6] # ࡅ.ᢎ +N; 􊾻\u0845\u200C。ᢎ\u200D; [B5 B6 C1 C2 P1 V6]; [B5 B6 C1 C2 P1 V6] # ࡅ.ᢎ +T; 􊾻\u0845\u200C。ᢎ\u200D; [B5 B6 C1 C2 P1 V6]; [B5 B6 P1 V6] # ࡅ.ᢎ +N; 􊾻\u0845\u200C。ᢎ\u200D; [B5 B6 C1 C2 P1 V6]; [B5 B6 C1 C2 P1 V6] # ࡅ.ᢎ +B; xn--3vb50049s.xn--79e; [B5 B6 V6]; [B5 B6 V6] # ࡅ.ᢎ +B; xn--3vb882jz4411a.xn--79e259a; [B5 B6 C1 C2 V6]; [B5 B6 C1 C2 V6] # ࡅ.ᢎ +T; ß\u09C1\u1DED。\u06208₅; ß\u09C1\u1DED.\u062085; xn--ss-e2f077r.xn--85-psd # ßুᷭ.ؠ85 +N; ß\u09C1\u1DED。\u06208₅; ß\u09C1\u1DED.\u062085; xn--zca266bwrr.xn--85-psd # ßুᷭ.ؠ85 +T; ß\u09C1\u1DED。\u062085; ß\u09C1\u1DED.\u062085; xn--ss-e2f077r.xn--85-psd # ßুᷭ.ؠ85 +N; ß\u09C1\u1DED。\u062085; ß\u09C1\u1DED.\u062085; xn--zca266bwrr.xn--85-psd # ßুᷭ.ؠ85 +B; SS\u09C1\u1DED。\u062085; ss\u09C1\u1DED.\u062085; xn--ss-e2f077r.xn--85-psd # ssুᷭ.ؠ85 +B; ss\u09C1\u1DED。\u062085; ss\u09C1\u1DED.\u062085; xn--ss-e2f077r.xn--85-psd # ssুᷭ.ؠ85 +B; Ss\u09C1\u1DED。\u062085; ss\u09C1\u1DED.\u062085; xn--ss-e2f077r.xn--85-psd # ssুᷭ.ؠ85 +B; xn--ss-e2f077r.xn--85-psd; ss\u09C1\u1DED.\u062085; xn--ss-e2f077r.xn--85-psd # ssুᷭ.ؠ85 +B; ss\u09C1\u1DED.\u062085; ; xn--ss-e2f077r.xn--85-psd # ssুᷭ.ؠ85 +B; SS\u09C1\u1DED.\u062085; ss\u09C1\u1DED.\u062085; xn--ss-e2f077r.xn--85-psd # ssুᷭ.ؠ85 +B; Ss\u09C1\u1DED.\u062085; ss\u09C1\u1DED.\u062085; xn--ss-e2f077r.xn--85-psd # ssুᷭ.ؠ85 +B; xn--zca266bwrr.xn--85-psd; ß\u09C1\u1DED.\u062085; xn--zca266bwrr.xn--85-psd # ßুᷭ.ؠ85 +T; ß\u09C1\u1DED.\u062085; ; xn--ss-e2f077r.xn--85-psd # ßুᷭ.ؠ85 +N; ß\u09C1\u1DED.\u062085; ; xn--zca266bwrr.xn--85-psd # ßুᷭ.ؠ85 +B; SS\u09C1\u1DED。\u06208₅; ss\u09C1\u1DED.\u062085; xn--ss-e2f077r.xn--85-psd # ssুᷭ.ؠ85 +B; ss\u09C1\u1DED。\u06208₅; ss\u09C1\u1DED.\u062085; xn--ss-e2f077r.xn--85-psd # ssুᷭ.ؠ85 +B; Ss\u09C1\u1DED。\u06208₅; ss\u09C1\u1DED.\u062085; xn--ss-e2f077r.xn--85-psd # ssুᷭ.ؠ85 +T; \u0ACD\u0484魅𝟣.₃𐹥ß; [B1 V5]; [B1 V5] # ્҄魅1.3𐹥ß +N; \u0ACD\u0484魅𝟣.₃𐹥ß; [B1 V5]; [B1 V5] # ્҄魅1.3𐹥ß +T; \u0ACD\u0484魅1.3𐹥ß; [B1 V5]; [B1 V5] # ્҄魅1.3𐹥ß +N; \u0ACD\u0484魅1.3𐹥ß; [B1 V5]; [B1 V5] # ્҄魅1.3𐹥ß +B; \u0ACD\u0484魅1.3𐹥SS; [B1 V5]; [B1 V5] # ્҄魅1.3𐹥ss +B; \u0ACD\u0484魅1.3𐹥ss; [B1 V5]; [B1 V5] # ્҄魅1.3𐹥ss +B; \u0ACD\u0484魅1.3𐹥Ss; [B1 V5]; [B1 V5] # ્҄魅1.3𐹥ss +B; xn--1-0xb049b102o.xn--3ss-nv9t; [B1 V5]; [B1 V5] # ્҄魅1.3𐹥ss +B; xn--1-0xb049b102o.xn--3-qfa7018r; [B1 V5]; [B1 V5] # ્҄魅1.3𐹥ß +B; \u0ACD\u0484魅𝟣.₃𐹥SS; [B1 V5]; [B1 V5] # ્҄魅1.3𐹥ss +B; \u0ACD\u0484魅𝟣.₃𐹥ss; [B1 V5]; [B1 V5] # ્҄魅1.3𐹥ss +B; \u0ACD\u0484魅𝟣.₃𐹥Ss; [B1 V5]; [B1 V5] # ્҄魅1.3𐹥ss +B; \u072B。𑓂⒈𑜫󠿻; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ܫ.𑓂⒈𑜫 +B; \u072B。𑓂1.𑜫󠿻; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ܫ.𑓂1.𑜫 +B; xn--1nb.xn--1-jq9i.xn--ji2dg9877c; [B1 V5 V6]; [B1 V5 V6] # ܫ.𑓂1.𑜫 +B; xn--1nb.xn--tsh7798f6rbrt828c; [B1 V5 V6]; [B1 V5 V6] # ܫ.𑓂⒈𑜫 +B; \uFE0Dછ。嵨; છ.嵨; xn--6dc.xn--tot +B; xn--6dc.xn--tot; છ.嵨; xn--6dc.xn--tot +B; છ.嵨; ; xn--6dc.xn--tot +B; Ⴔ≠Ⴀ.𐹥𐹰; [B1 P1 V6]; [B1 P1 V6] +B; Ⴔ=\u0338Ⴀ.𐹥𐹰; [B1 P1 V6]; [B1 P1 V6] +B; ⴔ=\u0338ⴀ.𐹥𐹰; [B1 P1 V6]; [B1 P1 V6] +B; ⴔ≠ⴀ.𐹥𐹰; [B1 P1 V6]; [B1 P1 V6] +B; xn--1ch603bxb.xn--do0dwa; [B1 V6]; [B1 V6] +B; xn--7md3b171g.xn--do0dwa; [B1 V6]; [B1 V6] +T; -\u200C⒙𐫥。𝨵; [C1 P1 V3 V5 V6]; [P1 V3 V5 V6] # -⒙𐫥.𝨵 +N; -\u200C⒙𐫥。𝨵; [C1 P1 V3 V5 V6]; [C1 P1 V3 V5 V6] # -⒙𐫥.𝨵 +T; -\u200C18.𐫥。𝨵; [C1 V3 V5]; [V3 V5] # -18.𐫥.𝨵 +N; -\u200C18.𐫥。𝨵; [C1 V3 V5]; [C1 V3 V5] # -18.𐫥.𝨵 +B; -18.xn--rx9c.xn--382h; [V3 V5]; [V3 V5] +B; xn---18-9m0a.xn--rx9c.xn--382h; [C1 V3 V5]; [C1 V3 V5] # -18.𐫥.𝨵 +B; xn----ddps939g.xn--382h; [V3 V5 V6]; [V3 V5 V6] +B; xn----sgn18r3191a.xn--382h; [C1 V3 V5 V6]; [C1 V3 V5 V6] # -⒙𐫥.𝨵 +B; ︒.ʌᠣ-𐹽; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] +B; 。.ʌᠣ-𐹽; [B5 B6 A4_2]; [B5 B6 A4_2] +B; 。.Ʌᠣ-𐹽; [B5 B6 A4_2]; [B5 B6 A4_2] +B; ..xn----73a596nuh9t; [B5 B6 A4_2]; [B5 B6 A4_2] +B; ︒.Ʌᠣ-𐹽; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] +B; xn--y86c.xn----73a596nuh9t; [B1 B5 B6 V6]; [B1 B5 B6 V6] +B; \uFE05︒。𦀾\u1CE0; [P1 V6]; [P1 V6] # ︒.𦀾᳠ +B; \uFE05。。𦀾\u1CE0; [A4_2]; [A4_2] # ..𦀾᳠ +B; ..xn--t6f5138v; [A4_2]; [A4_2] # ..𦀾᳠ +B; xn--y86c.xn--t6f5138v; [V6]; [V6] # ︒.𦀾᳠ +B; xn--t6f5138v; 𦀾\u1CE0; xn--t6f5138v # 𦀾᳠ +B; 𦀾\u1CE0; ; xn--t6f5138v # 𦀾᳠ +T; 𞮑ß􏞞。ᡁ; [B2 B3 P1 V6]; [B2 B3 P1 V6] +N; 𞮑ß􏞞。ᡁ; [B2 B3 P1 V6]; [B2 B3 P1 V6] +B; 𞮑SS􏞞。ᡁ; [B2 B3 P1 V6]; [B2 B3 P1 V6] +B; 𞮑ss􏞞。ᡁ; [B2 B3 P1 V6]; [B2 B3 P1 V6] +B; 𞮑Ss􏞞。ᡁ; [B2 B3 P1 V6]; [B2 B3 P1 V6] +B; xn--ss-o412ac6305g.xn--07e; [B2 B3 V6]; [B2 B3 V6] +B; xn--zca9432wb989f.xn--07e; [B2 B3 V6]; [B2 B3 V6] +T; \uA953\u200D\u062C\u066C。𱆎󻡟\u200C󠅆; [B5 B6 C1 P1 V5 V6]; [B5 B6 P1 V5 V6] # ꥓ج٬. +N; \uA953\u200D\u062C\u066C。𱆎󻡟\u200C󠅆; [B5 B6 C1 P1 V5 V6]; [B5 B6 C1 P1 V5 V6] # ꥓ج٬. +B; xn--rgb2k6711c.xn--ec8nj3948b; [B5 B6 V5 V6]; [B5 B6 V5 V6] # ꥓ج٬. +B; xn--rgb2k500fhq9j.xn--0ug78870a5sp9d; [B5 B6 C1 V5 V6]; [B5 B6 C1 V5 V6] # ꥓ج٬. +T; 󠕏.-ß\u200C≠; [C1 P1 V3 V6]; [P1 V3 V6] # .-ß≠ +N; 󠕏.-ß\u200C≠; [C1 P1 V3 V6]; [C1 P1 V3 V6] # .-ß≠ +T; 󠕏.-ß\u200C=\u0338; [C1 P1 V3 V6]; [P1 V3 V6] # .-ß≠ +N; 󠕏.-ß\u200C=\u0338; [C1 P1 V3 V6]; [C1 P1 V3 V6] # .-ß≠ +T; 󠕏.-ß\u200C≠; [C1 P1 V3 V6]; [P1 V3 V6] # .-ß≠ +N; 󠕏.-ß\u200C≠; [C1 P1 V3 V6]; [C1 P1 V3 V6] # .-ß≠ +T; 󠕏.-ß\u200C=\u0338; [C1 P1 V3 V6]; [P1 V3 V6] # .-ß≠ +N; 󠕏.-ß\u200C=\u0338; [C1 P1 V3 V6]; [C1 P1 V3 V6] # .-ß≠ +T; 󠕏.-SS\u200C=\u0338; [C1 P1 V3 V6]; [P1 V3 V6] # .-ss≠ +N; 󠕏.-SS\u200C=\u0338; [C1 P1 V3 V6]; [C1 P1 V3 V6] # .-ss≠ +T; 󠕏.-SS\u200C≠; [C1 P1 V3 V6]; [P1 V3 V6] # .-ss≠ +N; 󠕏.-SS\u200C≠; [C1 P1 V3 V6]; [C1 P1 V3 V6] # .-ss≠ +T; 󠕏.-ss\u200C≠; [C1 P1 V3 V6]; [P1 V3 V6] # .-ss≠ +N; 󠕏.-ss\u200C≠; [C1 P1 V3 V6]; [C1 P1 V3 V6] # .-ss≠ +T; 󠕏.-ss\u200C=\u0338; [C1 P1 V3 V6]; [P1 V3 V6] # .-ss≠ +N; 󠕏.-ss\u200C=\u0338; [C1 P1 V3 V6]; [C1 P1 V3 V6] # .-ss≠ +T; 󠕏.-Ss\u200C=\u0338; [C1 P1 V3 V6]; [P1 V3 V6] # .-ss≠ +N; 󠕏.-Ss\u200C=\u0338; [C1 P1 V3 V6]; [C1 P1 V3 V6] # .-ss≠ +T; 󠕏.-Ss\u200C≠; [C1 P1 V3 V6]; [P1 V3 V6] # .-ss≠ +N; 󠕏.-Ss\u200C≠; [C1 P1 V3 V6]; [C1 P1 V3 V6] # .-ss≠ +B; xn--u836e.xn---ss-gl2a; [V3 V6]; [V3 V6] +B; xn--u836e.xn---ss-cn0at5l; [C1 V3 V6]; [C1 V3 V6] # .-ss≠ +B; xn--u836e.xn----qfa750ve7b; [C1 V3 V6]; [C1 V3 V6] # .-ß≠ +T; 󠕏.-SS\u200C=\u0338; [C1 P1 V3 V6]; [P1 V3 V6] # .-ss≠ +N; 󠕏.-SS\u200C=\u0338; [C1 P1 V3 V6]; [C1 P1 V3 V6] # .-ss≠ +T; 󠕏.-SS\u200C≠; [C1 P1 V3 V6]; [P1 V3 V6] # .-ss≠ +N; 󠕏.-SS\u200C≠; [C1 P1 V3 V6]; [C1 P1 V3 V6] # .-ss≠ +T; 󠕏.-ss\u200C≠; [C1 P1 V3 V6]; [P1 V3 V6] # .-ss≠ +N; 󠕏.-ss\u200C≠; [C1 P1 V3 V6]; [C1 P1 V3 V6] # .-ss≠ +T; 󠕏.-ss\u200C=\u0338; [C1 P1 V3 V6]; [P1 V3 V6] # .-ss≠ +N; 󠕏.-ss\u200C=\u0338; [C1 P1 V3 V6]; [C1 P1 V3 V6] # .-ss≠ +T; 󠕏.-Ss\u200C=\u0338; [C1 P1 V3 V6]; [P1 V3 V6] # .-ss≠ +N; 󠕏.-Ss\u200C=\u0338; [C1 P1 V3 V6]; [C1 P1 V3 V6] # .-ss≠ +T; 󠕏.-Ss\u200C≠; [C1 P1 V3 V6]; [P1 V3 V6] # .-ss≠ +N; 󠕏.-Ss\u200C≠; [C1 P1 V3 V6]; [C1 P1 V3 V6] # .-ss≠ +T; ᡙ\u200C。≯𐋲≠; [C1 P1 V6]; [P1 V6] # ᡙ.≯𐋲≠ +N; ᡙ\u200C。≯𐋲≠; [C1 P1 V6]; [C1 P1 V6] # ᡙ.≯𐋲≠ +T; ᡙ\u200C。>\u0338𐋲=\u0338; [C1 P1 V6]; [P1 V6] # ᡙ.≯𐋲≠ +N; ᡙ\u200C。>\u0338𐋲=\u0338; [C1 P1 V6]; [C1 P1 V6] # ᡙ.≯𐋲≠ +T; ᡙ\u200C。≯𐋲≠; [C1 P1 V6]; [P1 V6] # ᡙ.≯𐋲≠ +N; ᡙ\u200C。≯𐋲≠; [C1 P1 V6]; [C1 P1 V6] # ᡙ.≯𐋲≠ +T; ᡙ\u200C。>\u0338𐋲=\u0338; [C1 P1 V6]; [P1 V6] # ᡙ.≯𐋲≠ +N; ᡙ\u200C。>\u0338𐋲=\u0338; [C1 P1 V6]; [C1 P1 V6] # ᡙ.≯𐋲≠ +B; xn--p8e.xn--1ch3a7084l; [V6]; [V6] +B; xn--p8e650b.xn--1ch3a7084l; [C1 V6]; [C1 V6] # ᡙ.≯𐋲≠ +B; 𐹧𞲄󠁭񆼩。\u034E🄀; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𐹧.͎🄀 +B; 𐹧𞲄󠁭񆼩。\u034E0.; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𐹧.͎0. +B; xn--fo0dw409aq58qrn69d.xn--0-bgb.; [B1 V5 V6]; [B1 V5 V6] # 𐹧.͎0. +B; xn--fo0dw409aq58qrn69d.xn--sua6883w; [B1 V5 V6]; [B1 V5 V6] # 𐹧.͎🄀 +T; Ⴄ.\u200D\u0721󻣋ς; [B1 C2 P1 V6]; [B2 B3 P1 V6] # Ⴄ.ܡς +N; Ⴄ.\u200D\u0721󻣋ς; [B1 C2 P1 V6]; [B1 C2 P1 V6] # Ⴄ.ܡς +T; Ⴄ.\u200D\u0721󻣋ς; [B1 C2 P1 V6]; [B2 B3 P1 V6] # Ⴄ.ܡς +N; Ⴄ.\u200D\u0721󻣋ς; [B1 C2 P1 V6]; [B1 C2 P1 V6] # Ⴄ.ܡς +T; ⴄ.\u200D\u0721󻣋ς; [B1 C2 P1 V6]; [B2 B3 P1 V6] # ⴄ.ܡς +N; ⴄ.\u200D\u0721󻣋ς; [B1 C2 P1 V6]; [B1 C2 P1 V6] # ⴄ.ܡς +T; Ⴄ.\u200D\u0721󻣋Σ; [B1 C2 P1 V6]; [B2 B3 P1 V6] # Ⴄ.ܡσ +N; Ⴄ.\u200D\u0721󻣋Σ; [B1 C2 P1 V6]; [B1 C2 P1 V6] # Ⴄ.ܡσ +T; ⴄ.\u200D\u0721󻣋σ; [B1 C2 P1 V6]; [B2 B3 P1 V6] # ⴄ.ܡσ +N; ⴄ.\u200D\u0721󻣋σ; [B1 C2 P1 V6]; [B1 C2 P1 V6] # ⴄ.ܡσ +B; xn--vkj.xn--4xa73ob5892c; [B2 B3 V6]; [B2 B3 V6] # ⴄ.ܡσ +B; xn--vkj.xn--4xa73o3t5ajq467a; [B1 C2 V6]; [B1 C2 V6] # ⴄ.ܡσ +B; xn--cnd.xn--4xa73ob5892c; [B2 B3 V6]; [B2 B3 V6] # Ⴄ.ܡσ +B; xn--cnd.xn--4xa73o3t5ajq467a; [B1 C2 V6]; [B1 C2 V6] # Ⴄ.ܡσ +B; xn--vkj.xn--3xa93o3t5ajq467a; [B1 C2 V6]; [B1 C2 V6] # ⴄ.ܡς +B; xn--cnd.xn--3xa93o3t5ajq467a; [B1 C2 V6]; [B1 C2 V6] # Ⴄ.ܡς +T; ⴄ.\u200D\u0721󻣋ς; [B1 C2 P1 V6]; [B2 B3 P1 V6] # ⴄ.ܡς +N; ⴄ.\u200D\u0721󻣋ς; [B1 C2 P1 V6]; [B1 C2 P1 V6] # ⴄ.ܡς +T; Ⴄ.\u200D\u0721󻣋Σ; [B1 C2 P1 V6]; [B2 B3 P1 V6] # Ⴄ.ܡσ +N; Ⴄ.\u200D\u0721󻣋Σ; [B1 C2 P1 V6]; [B1 C2 P1 V6] # Ⴄ.ܡσ +T; ⴄ.\u200D\u0721󻣋σ; [B1 C2 P1 V6]; [B2 B3 P1 V6] # ⴄ.ܡσ +N; ⴄ.\u200D\u0721󻣋σ; [B1 C2 P1 V6]; [B1 C2 P1 V6] # ⴄ.ܡσ +B; 򮵛\u0613.Ⴕ; [P1 V6]; [P1 V6] # ؓ.Ⴕ +B; 򮵛\u0613.ⴕ; [P1 V6]; [P1 V6] # ؓ.ⴕ +B; xn--1fb94204l.xn--dlj; [V6]; [V6] # ؓ.ⴕ +B; xn--1fb94204l.xn--tnd; [V6]; [V6] # ؓ.Ⴕ +T; ≯\u1DF3𞤥。\u200C\uA8C4󠪉\u200D; [B1 C1 C2 P1 V6]; [B1 P1 V5 V6] # ≯ᷳ𞤥.꣄ +N; ≯\u1DF3𞤥。\u200C\uA8C4󠪉\u200D; [B1 C1 C2 P1 V6]; [B1 C1 C2 P1 V6] # ≯ᷳ𞤥.꣄ +T; >\u0338\u1DF3𞤥。\u200C\uA8C4󠪉\u200D; [B1 C1 C2 P1 V6]; [B1 P1 V5 V6] # ≯ᷳ𞤥.꣄ +N; >\u0338\u1DF3𞤥。\u200C\uA8C4󠪉\u200D; [B1 C1 C2 P1 V6]; [B1 C1 C2 P1 V6] # ≯ᷳ𞤥.꣄ +T; >\u0338\u1DF3𞤃。\u200C\uA8C4󠪉\u200D; [B1 C1 C2 P1 V6]; [B1 P1 V5 V6] # ≯ᷳ𞤥.꣄ +N; >\u0338\u1DF3𞤃。\u200C\uA8C4󠪉\u200D; [B1 C1 C2 P1 V6]; [B1 C1 C2 P1 V6] # ≯ᷳ𞤥.꣄ +T; ≯\u1DF3𞤃。\u200C\uA8C4󠪉\u200D; [B1 C1 C2 P1 V6]; [B1 P1 V5 V6] # ≯ᷳ𞤥.꣄ +N; ≯\u1DF3𞤃。\u200C\uA8C4󠪉\u200D; [B1 C1 C2 P1 V6]; [B1 C1 C2 P1 V6] # ≯ᷳ𞤥.꣄ +B; xn--ofg13qyr21c.xn--0f9au6706d; [B1 V5 V6]; [B1 V5 V6] # ≯ᷳ𞤥.꣄ +B; xn--ofg13qyr21c.xn--0ugc0116hix29k; [B1 C1 C2 V6]; [B1 C1 C2 V6] # ≯ᷳ𞤥.꣄ +T; \u200C󠄷。򒑁; [C1 P1 V6]; [P1 V6 A4_2] # . +N; \u200C󠄷。򒑁; [C1 P1 V6]; [C1 P1 V6] # . +T; \u200C󠄷。򒑁; [C1 P1 V6]; [P1 V6 A4_2] # . +N; \u200C󠄷。򒑁; [C1 P1 V6]; [C1 P1 V6] # . +B; .xn--w720c; [V6 A4_2]; [V6 A4_2] +B; xn--0ug.xn--w720c; [C1 V6]; [C1 V6] # . +T; ⒈\u0DD6焅.󗡙\u200Dꡟ; [C2 P1 V6]; [P1 V6] # ⒈ූ焅.ꡟ +N; ⒈\u0DD6焅.󗡙\u200Dꡟ; [C2 P1 V6]; [C2 P1 V6] # ⒈ූ焅.ꡟ +T; 1.\u0DD6焅.󗡙\u200Dꡟ; [C2 P1 V5 V6]; [P1 V5 V6] # 1.ූ焅.ꡟ +N; 1.\u0DD6焅.󗡙\u200Dꡟ; [C2 P1 V5 V6]; [C2 P1 V5 V6] # 1.ූ焅.ꡟ +B; 1.xn--t1c6981c.xn--4c9a21133d; [V5 V6]; [V5 V6] # 1.ූ焅.ꡟ +B; 1.xn--t1c6981c.xn--1ugz184c9lw7i; [C2 V5 V6]; [C2 V5 V6] # 1.ූ焅.ꡟ +B; xn--t1c337io97c.xn--4c9a21133d; [V6]; [V6] # ⒈ූ焅.ꡟ +B; xn--t1c337io97c.xn--1ugz184c9lw7i; [C2 V6]; [C2 V6] # ⒈ූ焅.ꡟ +T; \u1DCDς≮.ς𝪦𞤕0; [B1 B5 P1 V5 V6]; [B1 B5 P1 V5 V6] # ᷍ς≮.ς𝪦𞤷0 +N; \u1DCDς≮.ς𝪦𞤕0; [B1 B5 P1 V5 V6]; [B1 B5 P1 V5 V6] # ᷍ς≮.ς𝪦𞤷0 +T; \u1DCDς<\u0338.ς𝪦𞤕0; [B1 B5 P1 V5 V6]; [B1 B5 P1 V5 V6] # ᷍ς≮.ς𝪦𞤷0 +N; \u1DCDς<\u0338.ς𝪦𞤕0; [B1 B5 P1 V5 V6]; [B1 B5 P1 V5 V6] # ᷍ς≮.ς𝪦𞤷0 +T; \u1DCDς<\u0338.ς𝪦𞤷0; [B1 B5 P1 V5 V6]; [B1 B5 P1 V5 V6] # ᷍ς≮.ς𝪦𞤷0 +N; \u1DCDς<\u0338.ς𝪦𞤷0; [B1 B5 P1 V5 V6]; [B1 B5 P1 V5 V6] # ᷍ς≮.ς𝪦𞤷0 +T; \u1DCDς≮.ς𝪦𞤷0; [B1 B5 P1 V5 V6]; [B1 B5 P1 V5 V6] # ᷍ς≮.ς𝪦𞤷0 +N; \u1DCDς≮.ς𝪦𞤷0; [B1 B5 P1 V5 V6]; [B1 B5 P1 V5 V6] # ᷍ς≮.ς𝪦𞤷0 +B; \u1DCDΣ≮.Σ𝪦𞤕0; [B1 B5 P1 V5 V6]; [B1 B5 P1 V5 V6] # ᷍σ≮.σ𝪦𞤷0 +B; \u1DCDΣ<\u0338.Σ𝪦𞤕0; [B1 B5 P1 V5 V6]; [B1 B5 P1 V5 V6] # ᷍σ≮.σ𝪦𞤷0 +B; \u1DCDσ<\u0338.σ𝪦𞤷0; [B1 B5 P1 V5 V6]; [B1 B5 P1 V5 V6] # ᷍σ≮.σ𝪦𞤷0 +B; \u1DCDσ≮.σ𝪦𞤷0; [B1 B5 P1 V5 V6]; [B1 B5 P1 V5 V6] # ᷍σ≮.σ𝪦𞤷0 +B; \u1DCDΣ≮.Σ𝪦𞤷0; [B1 B5 P1 V5 V6]; [B1 B5 P1 V5 V6] # ᷍σ≮.σ𝪦𞤷0 +B; \u1DCDΣ<\u0338.Σ𝪦𞤷0; [B1 B5 P1 V5 V6]; [B1 B5 P1 V5 V6] # ᷍σ≮.σ𝪦𞤷0 +B; xn--4xa544kvid.xn--0-zmb55727aggma; [B1 B5 V5 V6]; [B1 B5 V5 V6] # ᷍σ≮.σ𝪦𞤷0 +B; xn--3xa744kvid.xn--0-xmb85727aggma; [B1 B5 V5 V6]; [B1 B5 V5 V6] # ᷍ς≮.ς𝪦𞤷0 +B; \u1DCDσ≮.σ𝪦𞤕0; [B1 B5 P1 V5 V6]; [B1 B5 P1 V5 V6] # ᷍σ≮.σ𝪦𞤷0 +B; \u1DCDσ<\u0338.σ𝪦𞤕0; [B1 B5 P1 V5 V6]; [B1 B5 P1 V5 V6] # ᷍σ≮.σ𝪦𞤷0 +T; 򢦾ß\u05B9𐫙.\u05AD\u08A1; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ßֹ𐫙.֭ࢡ +N; 򢦾ß\u05B9𐫙.\u05AD\u08A1; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ßֹ𐫙.֭ࢡ +B; 򢦾SS\u05B9𐫙.\u05AD\u08A1; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ssֹ𐫙.֭ࢡ +B; 򢦾ss\u05B9𐫙.\u05AD\u08A1; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ssֹ𐫙.֭ࢡ +B; 򢦾Ss\u05B9𐫙.\u05AD\u08A1; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ssֹ𐫙.֭ࢡ +B; xn--ss-xjd6058xlz50g.xn--4cb62m; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] # ssֹ𐫙.֭ࢡ +B; xn--zca89v339zj118e.xn--4cb62m; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] # ßֹ𐫙.֭ࢡ +B; -𞣄。⒈; [B1 P1 V3 V6]; [B1 P1 V3 V6] +B; -𞣄。1.; [B1 V3]; [B1 V3] +B; xn----xc8r.1.; [B1 V3]; [B1 V3] +B; xn----xc8r.xn--tsh; [B1 V3 V6]; [B1 V3 V6] +B; 񈠢𐫖𝟡。\u063E𑘿; [B5 P1 V6]; [B5 P1 V6] # 𐫖9.ؾ𑘿 +B; 񈠢𐫖9。\u063E𑘿; [B5 P1 V6]; [B5 P1 V6] # 𐫖9.ؾ𑘿 +B; xn--9-el5iv442t.xn--9gb0830l; [B5 V6]; [B5 V6] # 𐫖9.ؾ𑘿 +T; \u0668\uFC8C\u0668\u1A5D.\u200D; [B1 C2]; [B1] # ٨نم٨ᩝ. +N; \u0668\uFC8C\u0668\u1A5D.\u200D; [B1 C2]; [B1 C2] # ٨نم٨ᩝ. +T; \u0668\u0646\u0645\u0668\u1A5D.\u200D; [B1 C2]; [B1] # ٨نم٨ᩝ. +N; \u0668\u0646\u0645\u0668\u1A5D.\u200D; [B1 C2]; [B1 C2] # ٨نم٨ᩝ. +B; xn--hhbb5hc956w.; [B1]; [B1] # ٨نم٨ᩝ. +B; xn--hhbb5hc956w.xn--1ug; [B1 C2]; [B1 C2] # ٨نم٨ᩝ. +B; 𝟘.Ⴇ󀳑\uFD50񫃱; [B1 B5 P1 V6]; [B1 B5 P1 V6] # 0.Ⴇتجم +B; 0.Ⴇ󀳑\u062A\u062C\u0645񫃱; [B1 B5 P1 V6]; [B1 B5 P1 V6] # 0.Ⴇتجم +B; 0.ⴇ󀳑\u062A\u062C\u0645񫃱; [B1 B5 P1 V6]; [B1 B5 P1 V6] # 0.ⴇتجم +B; 0.xn--pgbe9ez79qd207lvff8b; [B1 B5 V6]; [B1 B5 V6] # 0.ⴇتجم +B; 0.xn--pgbe9e344c2725svff8b; [B1 B5 V6]; [B1 B5 V6] # 0.Ⴇتجم +B; 𝟘.ⴇ󀳑\uFD50񫃱; [B1 B5 P1 V6]; [B1 B5 P1 V6] # 0.ⴇتجم +B; 𑇀▍.⁞ᠰ; [V5]; [V5] +B; xn--9zh3057f.xn--j7e103b; [V5]; [V5] +T; \u200D-\u067A.򏯩; [B1 C2 P1 V6]; [B1 P1 V3 V6] # -ٺ. +N; \u200D-\u067A.򏯩; [B1 C2 P1 V6]; [B1 C2 P1 V6] # -ٺ. +B; xn----qrc.xn--ts49b; [B1 V3 V6]; [B1 V3 V6] # -ٺ. +B; xn----qrc357q.xn--ts49b; [B1 C2 V6]; [B1 C2 V6] # -ٺ. +T; ᠢ𐮂𐫘寐。\u200C≯✳; [B1 B5 C1 P1 V6]; [B1 B5 P1 V6] # ᠢ𐮂𐫘寐.≯✳ +N; ᠢ𐮂𐫘寐。\u200C≯✳; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # ᠢ𐮂𐫘寐.≯✳ +T; ᠢ𐮂𐫘寐。\u200C>\u0338✳; [B1 B5 C1 P1 V6]; [B1 B5 P1 V6] # ᠢ𐮂𐫘寐.≯✳ +N; ᠢ𐮂𐫘寐。\u200C>\u0338✳; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # ᠢ𐮂𐫘寐.≯✳ +T; ᠢ𐮂𐫘寐。\u200C≯✳; [B1 B5 C1 P1 V6]; [B1 B5 P1 V6] # ᠢ𐮂𐫘寐.≯✳ +N; ᠢ𐮂𐫘寐。\u200C≯✳; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # ᠢ𐮂𐫘寐.≯✳ +T; ᠢ𐮂𐫘寐。\u200C>\u0338✳; [B1 B5 C1 P1 V6]; [B1 B5 P1 V6] # ᠢ𐮂𐫘寐.≯✳ +N; ᠢ𐮂𐫘寐。\u200C>\u0338✳; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # ᠢ𐮂𐫘寐.≯✳ +B; xn--46e6675axzzhota.xn--hdh99p; [B1 B5 V6]; [B1 B5 V6] +B; xn--46e6675axzzhota.xn--0ug06gu8f; [B1 B5 C1 V6]; [B1 B5 C1 V6] # ᠢ𐮂𐫘寐.≯✳ +T; \u200D。󸲜ႺႴ𞨇; [B1 B5 B6 C2 P1 V6]; [B5 B6 P1 V6 A4_2] # .ႺႴ +N; \u200D。󸲜ႺႴ𞨇; [B1 B5 B6 C2 P1 V6]; [B1 B5 B6 C2 P1 V6] # .ႺႴ +T; \u200D。󸲜ႺႴ𞨇; [B1 B5 B6 C2 P1 V6]; [B5 B6 P1 V6 A4_2] # .ႺႴ +N; \u200D。󸲜ႺႴ𞨇; [B1 B5 B6 C2 P1 V6]; [B1 B5 B6 C2 P1 V6] # .ႺႴ +T; \u200D。󸲜ⴚⴔ𞨇; [B1 B5 B6 C2 P1 V6]; [B5 B6 P1 V6 A4_2] # .ⴚⴔ +N; \u200D。󸲜ⴚⴔ𞨇; [B1 B5 B6 C2 P1 V6]; [B1 B5 B6 C2 P1 V6] # .ⴚⴔ +T; \u200D。󸲜Ⴚⴔ𞨇; [B1 B5 B6 C2 P1 V6]; [B5 B6 P1 V6 A4_2] # .Ⴚⴔ +N; \u200D。󸲜Ⴚⴔ𞨇; [B1 B5 B6 C2 P1 V6]; [B1 B5 B6 C2 P1 V6] # .Ⴚⴔ +B; .xn--ynd036lq981an3r4h; [B5 B6 V6 A4_2]; [B5 B6 V6 A4_2] +B; xn--1ug.xn--ynd036lq981an3r4h; [B1 B5 B6 C2 V6]; [B1 B5 B6 C2 V6] # .Ⴚⴔ +B; .xn--cljl81825an3r4h; [B5 B6 V6 A4_2]; [B5 B6 V6 A4_2] +B; xn--1ug.xn--cljl81825an3r4h; [B1 B5 B6 C2 V6]; [B1 B5 B6 C2 V6] # .ⴚⴔ +B; .xn--sndl01647an3h1h; [B5 B6 V6 A4_2]; [B5 B6 V6 A4_2] +B; xn--1ug.xn--sndl01647an3h1h; [B1 B5 B6 C2 V6]; [B1 B5 B6 C2 V6] # .ႺႴ +T; \u200D。󸲜ⴚⴔ𞨇; [B1 B5 B6 C2 P1 V6]; [B5 B6 P1 V6 A4_2] # .ⴚⴔ +N; \u200D。󸲜ⴚⴔ𞨇; [B1 B5 B6 C2 P1 V6]; [B1 B5 B6 C2 P1 V6] # .ⴚⴔ +T; \u200D。󸲜Ⴚⴔ𞨇; [B1 B5 B6 C2 P1 V6]; [B5 B6 P1 V6 A4_2] # .Ⴚⴔ +N; \u200D。󸲜Ⴚⴔ𞨇; [B1 B5 B6 C2 P1 V6]; [B1 B5 B6 C2 P1 V6] # .Ⴚⴔ +T; -3.\u200Dヌᢕ; [C2 V3]; [V3] # -3.ヌᢕ +N; -3.\u200Dヌᢕ; [C2 V3]; [C2 V3] # -3.ヌᢕ +B; -3.xn--fbf115j; [V3]; [V3] +B; -3.xn--fbf739aq5o; [C2 V3]; [C2 V3] # -3.ヌᢕ +T; 🂃\u0666ß\u200D。󠠂򭰍𞩒-; [B1 C2 P1 V3 V6]; [B1 P1 V3 V6] # 🂃٦ß.- +N; 🂃\u0666ß\u200D。󠠂򭰍𞩒-; [B1 C2 P1 V3 V6]; [B1 C2 P1 V3 V6] # 🂃٦ß.- +T; 🂃\u0666SS\u200D。󠠂򭰍𞩒-; [B1 C2 P1 V3 V6]; [B1 P1 V3 V6] # 🂃٦ss.- +N; 🂃\u0666SS\u200D。󠠂򭰍𞩒-; [B1 C2 P1 V3 V6]; [B1 C2 P1 V3 V6] # 🂃٦ss.- +T; 🂃\u0666ss\u200D。󠠂򭰍𞩒-; [B1 C2 P1 V3 V6]; [B1 P1 V3 V6] # 🂃٦ss.- +N; 🂃\u0666ss\u200D。󠠂򭰍𞩒-; [B1 C2 P1 V3 V6]; [B1 C2 P1 V3 V6] # 🂃٦ss.- +T; 🂃\u0666Ss\u200D。󠠂򭰍𞩒-; [B1 C2 P1 V3 V6]; [B1 P1 V3 V6] # 🂃٦ss.- +N; 🂃\u0666Ss\u200D。󠠂򭰍𞩒-; [B1 C2 P1 V3 V6]; [B1 C2 P1 V3 V6] # 🂃٦ss.- +B; xn--ss-pyd98921c.xn----nz8rh7531csznt; [B1 V3 V6]; [B1 V3 V6] # 🂃٦ss.- +B; xn--ss-pyd483x5k99b.xn----nz8rh7531csznt; [B1 C2 V3 V6]; [B1 C2 V3 V6] # 🂃٦ss.- +B; xn--zca34z68yzu83b.xn----nz8rh7531csznt; [B1 C2 V3 V6]; [B1 C2 V3 V6] # 🂃٦ß.- +T; ꇟ-𐾺\u069F。򰀺\u200C; [B5 B6 C1 P1 V6]; [B5 B6 P1 V6] # ꇟ-ڟ. +N; ꇟ-𐾺\u069F。򰀺\u200C; [B5 B6 C1 P1 V6]; [B5 B6 C1 P1 V6] # ꇟ-ڟ. +B; xn----utc4430jd3zd.xn--bp20d; [B5 B6 V6]; [B5 B6 V6] # ꇟ-ڟ. +B; xn----utc4430jd3zd.xn--0ugx6670i; [B5 B6 C1 V6]; [B5 B6 C1 V6] # ꇟ-ڟ. +B; \u0665.\u0484𐨗𝩋𴤃; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ٥.҄𐨗𝩋 +B; xn--eib.xn--n3a0405kus8eft5l; [B1 V5 V6]; [B1 V5 V6] # ٥.҄𐨗𝩋 +B; -.񱼓\u0649𐨿; [B1 B5 B6 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # -.ى𐨿 +B; -.xn--lhb4124khbq4b; [B1 B5 B6 V3 V6]; [B1 B5 B6 V3 V6] # -.ى𐨿 +T; 󾬨ς.𞶙녫ß; [B2 B3 P1 V6]; [B2 B3 P1 V6] +N; 󾬨ς.𞶙녫ß; [B2 B3 P1 V6]; [B2 B3 P1 V6] +T; 󾬨ς.𞶙녫ß; [B2 B3 P1 V6]; [B2 B3 P1 V6] +N; 󾬨ς.𞶙녫ß; [B2 B3 P1 V6]; [B2 B3 P1 V6] +B; 󾬨Σ.𞶙녫SS; [B2 B3 P1 V6]; [B2 B3 P1 V6] +B; 󾬨Σ.𞶙녫SS; [B2 B3 P1 V6]; [B2 B3 P1 V6] +B; 󾬨σ.𞶙녫ss; [B2 B3 P1 V6]; [B2 B3 P1 V6] +B; 󾬨σ.𞶙녫ss; [B2 B3 P1 V6]; [B2 B3 P1 V6] +B; 󾬨Σ.𞶙녫Ss; [B2 B3 P1 V6]; [B2 B3 P1 V6] +B; 󾬨Σ.𞶙녫Ss; [B2 B3 P1 V6]; [B2 B3 P1 V6] +B; xn--4xa76659r.xn--ss-d64i8755h; [B2 B3 V6]; [B2 B3 V6] +B; xn--3xa96659r.xn--zca5051g4h4i; [B2 B3 V6]; [B2 B3 V6] +T; Ⅎ\u17D2\u200D。≠\u200D\u200C; [C1 C2 P1 V6]; [P1 V6] # Ⅎ្.≠ +N; Ⅎ\u17D2\u200D。≠\u200D\u200C; [C1 C2 P1 V6]; [C1 C2 P1 V6] # Ⅎ្.≠ +T; Ⅎ\u17D2\u200D。=\u0338\u200D\u200C; [C1 C2 P1 V6]; [P1 V6] # Ⅎ្.≠ +N; Ⅎ\u17D2\u200D。=\u0338\u200D\u200C; [C1 C2 P1 V6]; [C1 C2 P1 V6] # Ⅎ្.≠ +T; Ⅎ\u17D2\u200D。≠\u200D\u200C; [C1 C2 P1 V6]; [P1 V6] # Ⅎ្.≠ +N; Ⅎ\u17D2\u200D。≠\u200D\u200C; [C1 C2 P1 V6]; [C1 C2 P1 V6] # Ⅎ្.≠ +T; Ⅎ\u17D2\u200D。=\u0338\u200D\u200C; [C1 C2 P1 V6]; [P1 V6] # Ⅎ្.≠ +N; Ⅎ\u17D2\u200D。=\u0338\u200D\u200C; [C1 C2 P1 V6]; [C1 C2 P1 V6] # Ⅎ្.≠ +T; ⅎ\u17D2\u200D。=\u0338\u200D\u200C; [C1 C2 P1 V6]; [P1 V6] # ⅎ្.≠ +N; ⅎ\u17D2\u200D。=\u0338\u200D\u200C; [C1 C2 P1 V6]; [C1 C2 P1 V6] # ⅎ្.≠ +T; ⅎ\u17D2\u200D。≠\u200D\u200C; [C1 C2 P1 V6]; [P1 V6] # ⅎ្.≠ +N; ⅎ\u17D2\u200D。≠\u200D\u200C; [C1 C2 P1 V6]; [C1 C2 P1 V6] # ⅎ្.≠ +B; xn--u4e969b.xn--1ch; [V6]; [V6] # ⅎ្.≠ +B; xn--u4e823bq1a.xn--0ugb89o; [C1 C2 V6]; [C1 C2 V6] # ⅎ្.≠ +B; xn--u4e319b.xn--1ch; [V6]; [V6] # Ⅎ្.≠ +B; xn--u4e823bcza.xn--0ugb89o; [C1 C2 V6]; [C1 C2 V6] # Ⅎ្.≠ +T; ⅎ\u17D2\u200D。=\u0338\u200D\u200C; [C1 C2 P1 V6]; [P1 V6] # ⅎ្.≠ +N; ⅎ\u17D2\u200D。=\u0338\u200D\u200C; [C1 C2 P1 V6]; [C1 C2 P1 V6] # ⅎ្.≠ +T; ⅎ\u17D2\u200D。≠\u200D\u200C; [C1 C2 P1 V6]; [P1 V6] # ⅎ្.≠ +N; ⅎ\u17D2\u200D。≠\u200D\u200C; [C1 C2 P1 V6]; [C1 C2 P1 V6] # ⅎ្.≠ +T; 𐋺\uAAF6\uA953󧦉.\u200C\u1714\u068F; [B1 C1 P1 V6]; [B1 P1 V5 V6] # 𐋺꫶꥓.᜔ڏ +N; 𐋺\uAAF6\uA953󧦉.\u200C\u1714\u068F; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐋺꫶꥓.᜔ڏ +T; 𐋺\uAAF6\uA953󧦉.\u200C\u1714\u068F; [B1 C1 P1 V6]; [B1 P1 V5 V6] # 𐋺꫶꥓.᜔ڏ +N; 𐋺\uAAF6\uA953󧦉.\u200C\u1714\u068F; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐋺꫶꥓.᜔ڏ +B; xn--3j9a14ak27osbz2o.xn--ljb175f; [B1 V5 V6]; [B1 V5 V6] # 𐋺꫶꥓.᜔ڏ +B; xn--3j9a14ak27osbz2o.xn--ljb175f1wg; [B1 C1 V6]; [B1 C1 V6] # 𐋺꫶꥓.᜔ڏ +B; 񺔯\u0FA8.≯; [P1 V6]; [P1 V6] # ྨ.≯ +B; 񺔯\u0FA8.>\u0338; [P1 V6]; [P1 V6] # ྨ.≯ +B; 񺔯\u0FA8.≯; [P1 V6]; [P1 V6] # ྨ.≯ +B; 񺔯\u0FA8.>\u0338; [P1 V6]; [P1 V6] # ྨ.≯ +B; xn--4fd57150h.xn--hdh; [V6]; [V6] # ྨ.≯ +T; \u200D𞡄Ⴓ.𐇽; [B1 B3 B6 C2 P1 V5 V6]; [B1 B2 B3 B6 P1 V5 V6] # 𞡄Ⴓ.𐇽 +N; \u200D𞡄Ⴓ.𐇽; [B1 B3 B6 C2 P1 V5 V6]; [B1 B3 B6 C2 P1 V5 V6] # 𞡄Ⴓ.𐇽 +T; \u200D𞡄Ⴓ.𐇽; [B1 B3 B6 C2 P1 V5 V6]; [B1 B2 B3 B6 P1 V5 V6] # 𞡄Ⴓ.𐇽 +N; \u200D𞡄Ⴓ.𐇽; [B1 B3 B6 C2 P1 V5 V6]; [B1 B3 B6 C2 P1 V5 V6] # 𞡄Ⴓ.𐇽 +T; \u200D𞡄ⴓ.𐇽; [B1 B3 B6 C2 V5]; [B1 B2 B3 B6 V5] # 𞡄ⴓ.𐇽 +N; \u200D𞡄ⴓ.𐇽; [B1 B3 B6 C2 V5]; [B1 B3 B6 C2 V5] # 𞡄ⴓ.𐇽 +B; xn--blj7492l.xn--m27c; [B1 B2 B3 B6 V5]; [B1 B2 B3 B6 V5] +B; xn--1ugz52c4i16a.xn--m27c; [B1 B3 B6 C2 V5]; [B1 B3 B6 C2 V5] # 𞡄ⴓ.𐇽 +B; xn--rnd5552v.xn--m27c; [B1 B2 B3 B6 V5 V6]; [B1 B2 B3 B6 V5 V6] +B; xn--rnd379ex885a.xn--m27c; [B1 B3 B6 C2 V5 V6]; [B1 B3 B6 C2 V5 V6] # 𞡄Ⴓ.𐇽 +T; \u200D𞡄ⴓ.𐇽; [B1 B3 B6 C2 V5]; [B1 B2 B3 B6 V5] # 𞡄ⴓ.𐇽 +N; \u200D𞡄ⴓ.𐇽; [B1 B3 B6 C2 V5]; [B1 B3 B6 C2 V5] # 𞡄ⴓ.𐇽 +T; 𐪒ß\uA8EA.ᡤ; [B2 B3]; [B2 B3] # 𐪒ß꣪.ᡤ +N; 𐪒ß\uA8EA.ᡤ; [B2 B3]; [B2 B3] # 𐪒ß꣪.ᡤ +T; 𐪒ß\uA8EA.ᡤ; [B2 B3]; [B2 B3] # 𐪒ß꣪.ᡤ +N; 𐪒ß\uA8EA.ᡤ; [B2 B3]; [B2 B3] # 𐪒ß꣪.ᡤ +B; 𐪒SS\uA8EA.ᡤ; [B2 B3]; [B2 B3] # 𐪒ss꣪.ᡤ +B; 𐪒ss\uA8EA.ᡤ; [B2 B3]; [B2 B3] # 𐪒ss꣪.ᡤ +B; 𐪒Ss\uA8EA.ᡤ; [B2 B3]; [B2 B3] # 𐪒ss꣪.ᡤ +B; xn--ss-tu9hw933a.xn--08e; [B2 B3]; [B2 B3] # 𐪒ss꣪.ᡤ +B; xn--zca2517f2hvc.xn--08e; [B2 B3]; [B2 B3] # 𐪒ß꣪.ᡤ +B; 𐪒SS\uA8EA.ᡤ; [B2 B3]; [B2 B3] # 𐪒ss꣪.ᡤ +B; 𐪒ss\uA8EA.ᡤ; [B2 B3]; [B2 B3] # 𐪒ss꣪.ᡤ +B; 𐪒Ss\uA8EA.ᡤ; [B2 B3]; [B2 B3] # 𐪒ss꣪.ᡤ +T; 𐨿󠆌鸮𑚶.ς; [V5]; [V5] +N; 𐨿󠆌鸮𑚶.ς; [V5]; [V5] +B; 𐨿󠆌鸮𑚶.Σ; [V5]; [V5] +B; 𐨿󠆌鸮𑚶.σ; [V5]; [V5] +B; xn--l76a726rt2h.xn--4xa; [V5]; [V5] +B; xn--l76a726rt2h.xn--3xa; [V5]; [V5] +B; ⒗𞤬。-𑚶; [B1 P1 V3 V6]; [B1 P1 V3 V6] +B; 16.𞤬。-𑚶; [B1 V3]; [B1 V3] +B; 16.𞤊。-𑚶; [B1 V3]; [B1 V3] +B; 16.xn--ke6h.xn----4j0j; [B1 V3]; [B1 V3] +B; ⒗𞤊。-𑚶; [B1 P1 V3 V6]; [B1 P1 V3 V6] +B; xn--8shw466n.xn----4j0j; [B1 V3 V6]; [B1 V3 V6] +B; \u08B3𞤿⾫。𐹣\u068F⒈; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] # ࢳ𞤿隹.𐹣ڏ⒈ +B; \u08B3𞤿隹。𐹣\u068F1.; [B1 B2 B3]; [B1 B2 B3] # ࢳ𞤿隹.𐹣ڏ1. +B; \u08B3𞤝隹。𐹣\u068F1.; [B1 B2 B3]; [B1 B2 B3] # ࢳ𞤿隹.𐹣ڏ1. +B; xn--8yb0383efiwk.xn--1-wsc3373r.; [B1 B2 B3]; [B1 B2 B3] # ࢳ𞤿隹.𐹣ڏ1. +B; \u08B3𞤝⾫。𐹣\u068F⒈; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] # ࢳ𞤿隹.𐹣ڏ⒈ +B; xn--8yb0383efiwk.xn--ljb064mol4n; [B1 B2 B3 V6]; [B1 B2 B3 V6] # ࢳ𞤿隹.𐹣ڏ⒈ +B; \u2433𚎛𝟧\u0661.ᡢ8\u0F72\u0600; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 5١.ᡢ8ི +B; \u2433𚎛5\u0661.ᡢ8\u0F72\u0600; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 5١.ᡢ8ི +B; xn--5-bqc410un435a.xn--8-rkc763epjj; [B5 B6 V6]; [B5 B6 V6] # 5١.ᡢ8ི +B; 𐹠.🄀⒒-󨰈; [B1 P1 V6]; [B1 P1 V6] +B; 𐹠.0.11.-󨰈; [B1 P1 V3 V6]; [B1 P1 V3 V6] +B; xn--7n0d.0.11.xn----8j07m; [B1 V3 V6]; [B1 V3 V6] +B; xn--7n0d.xn----xcp9757q1s13g; [B1 V6]; [B1 V6] +T; ς-。\u200C𝟭-; [C1 V3]; [V3] # ς-.1- +N; ς-。\u200C𝟭-; [C1 V3]; [C1 V3] # ς-.1- +T; ς-。\u200C1-; [C1 V3]; [V3] # ς-.1- +N; ς-。\u200C1-; [C1 V3]; [C1 V3] # ς-.1- +T; Σ-。\u200C1-; [C1 V3]; [V3] # σ-.1- +N; Σ-。\u200C1-; [C1 V3]; [C1 V3] # σ-.1- +T; σ-。\u200C1-; [C1 V3]; [V3] # σ-.1- +N; σ-。\u200C1-; [C1 V3]; [C1 V3] # σ-.1- +B; xn----zmb.1-; [V3]; [V3] +B; xn----zmb.xn--1--i1t; [C1 V3]; [C1 V3] # σ-.1- +B; xn----xmb.xn--1--i1t; [C1 V3]; [C1 V3] # ς-.1- +T; Σ-。\u200C𝟭-; [C1 V3]; [V3] # σ-.1- +N; Σ-。\u200C𝟭-; [C1 V3]; [C1 V3] # σ-.1- +T; σ-。\u200C𝟭-; [C1 V3]; [V3] # σ-.1- +N; σ-。\u200C𝟭-; [C1 V3]; [C1 V3] # σ-.1- +B; \u1734-\u0CE2.󠄩Ⴄ; [P1 V5 V6]; [P1 V5 V6] # ᜴-ೢ.Ⴄ +B; \u1734-\u0CE2.󠄩Ⴄ; [P1 V5 V6]; [P1 V5 V6] # ᜴-ೢ.Ⴄ +B; \u1734-\u0CE2.󠄩ⴄ; [V5]; [V5] # ᜴-ೢ.ⴄ +B; xn----ggf830f.xn--vkj; [V5]; [V5] # ᜴-ೢ.ⴄ +B; xn----ggf830f.xn--cnd; [V5 V6]; [V5 V6] # ᜴-ೢ.Ⴄ +B; \u1734-\u0CE2.󠄩ⴄ; [V5]; [V5] # ᜴-ೢ.ⴄ +B; 򭈗♋\u06BB𐦥。\u0954⒈; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ♋ڻ𐦥.॔⒈ +B; 򭈗♋\u06BB𐦥。\u09541.; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ♋ڻ𐦥.॔1. +B; xn--ukb372n129m3rs7f.xn--1-fyd.; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] # ♋ڻ𐦥.॔1. +B; xn--ukb372n129m3rs7f.xn--u3b240l; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] # ♋ڻ𐦥.॔⒈ +T; \u05A4.\u06C1\u1AB3\u200C; [B1 B3 B6 C1 V5]; [B1 B3 B6 V5] # ֤.ہ᪳ +N; \u05A4.\u06C1\u1AB3\u200C; [B1 B3 B6 C1 V5]; [B1 B3 B6 C1 V5] # ֤.ہ᪳ +T; \u05A4.\u06C1\u1AB3\u200C; [B1 B3 B6 C1 V5]; [B1 B3 B6 V5] # ֤.ہ᪳ +N; \u05A4.\u06C1\u1AB3\u200C; [B1 B3 B6 C1 V5]; [B1 B3 B6 C1 V5] # ֤.ہ᪳ +B; xn--vcb.xn--0kb623h; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ֤.ہ᪳ +B; xn--vcb.xn--0kb623hm1d; [B1 B3 B6 C1 V5]; [B1 B3 B6 C1 V5] # ֤.ہ᪳ +B; 񢭏\u0846≮\u0ACD.𞦊; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ࡆ≮્. +B; 񢭏\u0846<\u0338\u0ACD.𞦊; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ࡆ≮્. +B; 񢭏\u0846≮\u0ACD.𞦊; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ࡆ≮્. +B; 񢭏\u0846<\u0338\u0ACD.𞦊; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ࡆ≮્. +B; xn--4vb80kq29ayo62l.xn--8g6h; [B5 B6 V6]; [B5 B6 V6] # ࡆ≮્. +T; \u200D。𞀘⒈ꡍ擉; [C2 P1 V5 V6]; [P1 V5 V6 A4_2] # .𞀘⒈ꡍ擉 +N; \u200D。𞀘⒈ꡍ擉; [C2 P1 V5 V6]; [C2 P1 V5 V6] # .𞀘⒈ꡍ擉 +T; \u200D。𞀘1.ꡍ擉; [C2 V5]; [V5 A4_2] # .𞀘1.ꡍ擉 +N; \u200D。𞀘1.ꡍ擉; [C2 V5]; [C2 V5] # .𞀘1.ꡍ擉 +B; .xn--1-1p4r.xn--s7uv61m; [V5 A4_2]; [V5 A4_2] +B; xn--1ug.xn--1-1p4r.xn--s7uv61m; [C2 V5]; [C2 V5] # .𞀘1.ꡍ擉 +B; .xn--tsh026uql4bew9p; [V5 V6 A4_2]; [V5 V6 A4_2] +B; xn--1ug.xn--tsh026uql4bew9p; [C2 V5 V6]; [C2 V5 V6] # .𞀘⒈ꡍ擉 +B; ₈\u07CB.\uFB64≠; [B1 B3 P1 V6]; [B1 B3 P1 V6] # 8ߋ.ٿ≠ +B; ₈\u07CB.\uFB64=\u0338; [B1 B3 P1 V6]; [B1 B3 P1 V6] # 8ߋ.ٿ≠ +B; 8\u07CB.\u067F≠; [B1 B3 P1 V6]; [B1 B3 P1 V6] # 8ߋ.ٿ≠ +B; 8\u07CB.\u067F=\u0338; [B1 B3 P1 V6]; [B1 B3 P1 V6] # 8ߋ.ٿ≠ +B; xn--8-zbd.xn--4ib883l; [B1 B3 V6]; [B1 B3 V6] # 8ߋ.ٿ≠ +B; ᢡ\u07DE򹐣.⒒\u0642𑍦; [B1 B5 P1 V6]; [B1 B5 P1 V6] # ᢡߞ.⒒ق𑍦 +B; ᢡ\u07DE򹐣.11.\u0642𑍦; [B1 B5 P1 V6]; [B1 B5 P1 V6] # ᢡߞ.11.ق𑍦 +B; xn--5sb596fi873t.11.xn--ehb4198k; [B1 B5 V6]; [B1 B5 V6] # ᢡߞ.11.ق𑍦 +B; xn--5sb596fi873t.xn--ehb336mvy7n; [B1 B5 V6]; [B1 B5 V6] # ᢡߞ.⒒ق𑍦 +B; \u0E48-𐹺𝟜.\u0363\u06E1⒏; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ่-𐹺4.ͣۡ⒏ +B; \u0E48-𐹺4.\u0363\u06E18.; [B1 V5]; [B1 V5] # ่-𐹺4.ͣۡ8. +B; xn---4-owiz479s.xn--8-ihb69x.; [B1 V5]; [B1 V5] # ่-𐹺4.ͣۡ8. +B; xn---4-owiz479s.xn--eva20pjv9a; [B1 V5 V6]; [B1 V5 V6] # ่-𐹺4.ͣۡ⒏ +B; ⫐。Ⴠ-󃐢; [P1 V6]; [P1 V6] +B; ⫐。Ⴠ-󃐢; [P1 V6]; [P1 V6] +B; ⫐。ⴠ-󃐢; [P1 V6]; [P1 V6] +B; xn--r3i.xn----2wst7439i; [V6]; [V6] +B; xn--r3i.xn----z1g58579u; [V6]; [V6] +B; ⫐。ⴠ-󃐢; [P1 V6]; [P1 V6] +B; 𑑂◊.⦟∠; [V5]; [V5] +B; 𑑂◊.⦟∠; [V5]; [V5] +B; xn--01h3338f.xn--79g270a; [V5]; [V5] +B; 𿌰-\u0662。󋸛ꡂ; [B5 B6 P1 V6]; [B5 B6 P1 V6] # -٢.ꡂ +B; xn----dqc20828e.xn--bc9an2879c; [B5 B6 V6]; [B5 B6 V6] # -٢.ꡂ +B; \u0678。󠏬\u0741𞪭𐹪; [B1 P1 V6]; [B1 P1 V6] # يٴ.݁𐹪 +B; \u064A\u0674。󠏬\u0741𞪭𐹪; [B1 P1 V6]; [B1 P1 V6] # يٴ.݁𐹪 +B; xn--mhb8f.xn--oob2585kfdsfsbo7h; [B1 V6]; [B1 V6] # يٴ.݁𐹪 +T; 𐫆ꌄ。\u200Dᣬ; [B1 B2 B3 C2]; [B2 B3] # 𐫆ꌄ.ᣬ +N; 𐫆ꌄ。\u200Dᣬ; [B1 B2 B3 C2]; [B1 B2 B3 C2] # 𐫆ꌄ.ᣬ +T; 𐫆ꌄ。\u200Dᣬ; [B1 B2 B3 C2]; [B2 B3] # 𐫆ꌄ.ᣬ +N; 𐫆ꌄ。\u200Dᣬ; [B1 B2 B3 C2]; [B1 B2 B3 C2] # 𐫆ꌄ.ᣬ +B; xn--y77ao18q.xn--wdf; [B2 B3]; [B2 B3] +B; xn--y77ao18q.xn--wdf367a; [B1 B2 B3 C2]; [B1 B2 B3 C2] # 𐫆ꌄ.ᣬ +B; ₀\u0662。󅪞≯-; [B1 B6 P1 V3 V6]; [B1 B6 P1 V3 V6] # 0٢.≯- +B; ₀\u0662。󅪞>\u0338-; [B1 B6 P1 V3 V6]; [B1 B6 P1 V3 V6] # 0٢.≯- +B; 0\u0662。󅪞≯-; [B1 B6 P1 V3 V6]; [B1 B6 P1 V3 V6] # 0٢.≯- +B; 0\u0662。󅪞>\u0338-; [B1 B6 P1 V3 V6]; [B1 B6 P1 V3 V6] # 0٢.≯- +B; xn--0-dqc.xn----ogov3342l; [B1 B6 V3 V6]; [B1 B6 V3 V6] # 0٢.≯- +B; \u031C𐹫-𞯃.𐋤\u0845; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ̜𐹫-.𐋤ࡅ +B; xn----gdb7046r692g.xn--3vb1349j; [B1 V5 V6]; [B1 V5 V6] # ̜𐹫-.𐋤ࡅ +B; ≠。𝩑𐹩Ⴡ\u0594; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ≠.𝩑𐹩Ⴡ֔ +B; =\u0338。𝩑𐹩Ⴡ\u0594; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ≠.𝩑𐹩Ⴡ֔ +B; ≠。𝩑𐹩Ⴡ\u0594; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ≠.𝩑𐹩Ⴡ֔ +B; =\u0338。𝩑𐹩Ⴡ\u0594; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ≠.𝩑𐹩Ⴡ֔ +B; =\u0338。𝩑𐹩ⴡ\u0594; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ≠.𝩑𐹩ⴡ֔ +B; ≠。𝩑𐹩ⴡ\u0594; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ≠.𝩑𐹩ⴡ֔ +B; xn--1ch.xn--fcb363rk03mypug; [B1 V5 V6]; [B1 V5 V6] # ≠.𝩑𐹩ⴡ֔ +B; xn--1ch.xn--fcb538c649rypog; [B1 V5 V6]; [B1 V5 V6] # ≠.𝩑𐹩Ⴡ֔ +B; =\u0338。𝩑𐹩ⴡ\u0594; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ≠.𝩑𐹩ⴡ֔ +B; ≠。𝩑𐹩ⴡ\u0594; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ≠.𝩑𐹩ⴡ֔ +B; 𖫳≠.Ⴀ𐮀; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] +B; 𖫳=\u0338.Ⴀ𐮀; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] +B; 𖫳=\u0338.ⴀ𐮀; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] +B; 𖫳≠.ⴀ𐮀; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] +B; xn--1ch9250k.xn--rkj6232e; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] +B; xn--1ch9250k.xn--7md2659j; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] +B; 󠅾\u0736\u0726.ᢚ閪\u08E2𝩟; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ܶܦ.ᢚ閪𝩟 +B; 󠅾\u0736\u0726.ᢚ閪\u08E2𝩟; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ܶܦ.ᢚ閪𝩟 +B; xn--wnb5a.xn--l0b161fis8gbp5m; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] # ܶܦ.ᢚ閪𝩟 +T; \u200D󠇜\u06CB\uA8E9。\u20DD\u0FB0-ᛟ; [B1 C2 V5]; [B1 V5] # ۋ꣩.⃝ྰ-ᛟ +N; \u200D󠇜\u06CB\uA8E9。\u20DD\u0FB0-ᛟ; [B1 C2 V5]; [B1 C2 V5] # ۋ꣩.⃝ྰ-ᛟ +T; \u200D󠇜\u06CB\uA8E9。\u20DD\u0FB0-ᛟ; [B1 C2 V5]; [B1 V5] # ۋ꣩.⃝ྰ-ᛟ +N; \u200D󠇜\u06CB\uA8E9。\u20DD\u0FB0-ᛟ; [B1 C2 V5]; [B1 C2 V5] # ۋ꣩.⃝ྰ-ᛟ +B; xn--blb8114f.xn----gmg236cj6k; [B1 V5]; [B1 V5] # ۋ꣩.⃝ྰ-ᛟ +B; xn--blb540ke10h.xn----gmg236cj6k; [B1 C2 V5]; [B1 C2 V5] # ۋ꣩.⃝ྰ-ᛟ +B; 헁󘖙\u0E3A󚍚。\u06BA𝟜; [P1 V6]; [P1 V6] # 헁ฺ.ں4 +B; 헁󘖙\u0E3A󚍚。\u06BA𝟜; [P1 V6]; [P1 V6] # 헁ฺ.ں4 +B; 헁󘖙\u0E3A󚍚。\u06BA4; [P1 V6]; [P1 V6] # 헁ฺ.ں4 +B; 헁󘖙\u0E3A󚍚。\u06BA4; [P1 V6]; [P1 V6] # 헁ฺ.ں4 +B; xn--o4c1723h8g85gt4ya.xn--4-dvc; [V6]; [V6] # 헁ฺ.ں4 +T; 𐹭。󃱂\u200CႾ; [B1 C1 P1 V6]; [B1 P1 V6] # 𐹭.Ⴞ +N; 𐹭。󃱂\u200CႾ; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹭.Ⴞ +T; 𐹭。󃱂\u200CႾ; [B1 C1 P1 V6]; [B1 P1 V6] # 𐹭.Ⴞ +N; 𐹭。󃱂\u200CႾ; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹭.Ⴞ +T; 𐹭。󃱂\u200Cⴞ; [B1 C1 P1 V6]; [B1 P1 V6] # 𐹭.ⴞ +N; 𐹭。󃱂\u200Cⴞ; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹭.ⴞ +B; xn--lo0d.xn--mljx1099g; [B1 V6]; [B1 V6] +B; xn--lo0d.xn--0ugx72cwi33v; [B1 C1 V6]; [B1 C1 V6] # 𐹭.ⴞ +B; xn--lo0d.xn--2nd75260n; [B1 V6]; [B1 V6] +B; xn--lo0d.xn--2nd949eqw95u; [B1 C1 V6]; [B1 C1 V6] # 𐹭.Ⴞ +T; 𐹭。󃱂\u200Cⴞ; [B1 C1 P1 V6]; [B1 P1 V6] # 𐹭.ⴞ +N; 𐹭。󃱂\u200Cⴞ; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹭.ⴞ +B; \uA953.\u033D𑂽馋; [P1 V5 V6]; [P1 V5 V6] # ꥓.̽馋 +B; xn--3j9a.xn--bua0708eqzrd; [V5 V6]; [V5 V6] # ꥓.̽馋 +T; 󈫝򪛸\u200D。䜖; [C2 P1 V6]; [P1 V6] # .䜖 +N; 󈫝򪛸\u200D。䜖; [C2 P1 V6]; [C2 P1 V6] # .䜖 +T; 󈫝򪛸\u200D。䜖; [C2 P1 V6]; [P1 V6] # .䜖 +N; 󈫝򪛸\u200D。䜖; [C2 P1 V6]; [C2 P1 V6] # .䜖 +B; xn--g138cxw05a.xn--k0o; [V6]; [V6] +B; xn--1ug30527h9mxi.xn--k0o; [C2 V6]; [C2 V6] # .䜖 +T; ᡯ⚉姶🄉.۷\u200D🎪\u200D; [C2 P1 V6]; [P1 V6] # ᡯ⚉姶🄉.۷🎪 +N; ᡯ⚉姶🄉.۷\u200D🎪\u200D; [C2 P1 V6]; [C2 P1 V6] # ᡯ⚉姶🄉.۷🎪 +T; ᡯ⚉姶8,.۷\u200D🎪\u200D; [C2 P1 V6]; [P1 V6] # ᡯ⚉姶8,.۷🎪 +N; ᡯ⚉姶8,.۷\u200D🎪\u200D; [C2 P1 V6]; [C2 P1 V6] # ᡯ⚉姶8,.۷🎪 +B; xn--8,-g9oy26fzu4d.xn--kmb6733w; [P1 V6]; [P1 V6] +B; xn--8,-g9oy26fzu4d.xn--kmb859ja94998b; [C2 P1 V6]; [C2 P1 V6] # ᡯ⚉姶8,.۷🎪 +B; xn--c9e433epi4b3j20a.xn--kmb6733w; [V6]; [V6] +B; xn--c9e433epi4b3j20a.xn--kmb859ja94998b; [C2 V6]; [C2 V6] # ᡯ⚉姶🄉.۷🎪 +B; 𞽀.𐹸🚖\u0E3A; [B1 P1 V6]; [B1 P1 V6] # .𐹸🚖ฺ +B; xn--0n7h.xn--o4c9032klszf; [B1 V6]; [B1 V6] # .𐹸🚖ฺ +B; Ⴔᠵ。𐹧\u0747۹; [B1 P1 V6]; [B1 P1 V6] # Ⴔᠵ.𐹧݇۹ +B; Ⴔᠵ。𐹧\u0747۹; [B1 P1 V6]; [B1 P1 V6] # Ⴔᠵ.𐹧݇۹ +B; ⴔᠵ。𐹧\u0747۹; [B1]; [B1] # ⴔᠵ.𐹧݇۹ +B; xn--o7e997h.xn--mmb9ml895e; [B1]; [B1] # ⴔᠵ.𐹧݇۹ +B; xn--snd659a.xn--mmb9ml895e; [B1 V6]; [B1 V6] # Ⴔᠵ.𐹧݇۹ +B; ⴔᠵ。𐹧\u0747۹; [B1]; [B1] # ⴔᠵ.𐹧݇۹ +T; \u135Fᡈ\u200C.︒-𖾐-; [C1 P1 V3 V5 V6]; [P1 V3 V5 V6] # ፟ᡈ.︒-𖾐- +N; \u135Fᡈ\u200C.︒-𖾐-; [C1 P1 V3 V5 V6]; [C1 P1 V3 V5 V6] # ፟ᡈ.︒-𖾐- +T; \u135Fᡈ\u200C.。-𖾐-; [C1 V3 V5 A4_2]; [V3 V5 A4_2] # ፟ᡈ..-𖾐- +N; \u135Fᡈ\u200C.。-𖾐-; [C1 V3 V5 A4_2]; [C1 V3 V5 A4_2] # ፟ᡈ..-𖾐- +B; xn--b7d82w..xn-----pe4u; [V3 V5 A4_2]; [V3 V5 A4_2] # ፟ᡈ..-𖾐- +B; xn--b7d82wo4h..xn-----pe4u; [C1 V3 V5 A4_2]; [C1 V3 V5 A4_2] # ፟ᡈ..-𖾐- +B; xn--b7d82w.xn-----c82nz547a; [V3 V5 V6]; [V3 V5 V6] # ፟ᡈ.︒-𖾐- +B; xn--b7d82wo4h.xn-----c82nz547a; [C1 V3 V5 V6]; [C1 V3 V5 V6] # ፟ᡈ.︒-𖾐- +T; ⒈\u0601⒖\u200C.\u1DF0\u07DB; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6] # ⒈⒖.ᷰߛ +N; ⒈\u0601⒖\u200C.\u1DF0\u07DB; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # ⒈⒖.ᷰߛ +T; 1.\u060115.\u200C.\u1DF0\u07DB; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6 A4_2] # 1.15..ᷰߛ +N; 1.\u060115.\u200C.\u1DF0\u07DB; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # 1.15..ᷰߛ +B; 1.xn--15-1pd..xn--2sb914i; [B1 V5 V6 A4_2]; [B1 V5 V6 A4_2] # 1.15..ᷰߛ +B; 1.xn--15-1pd.xn--0ug.xn--2sb914i; [B1 C1 V5 V6]; [B1 C1 V5 V6] # 1.15..ᷰߛ +B; xn--jfb347mib.xn--2sb914i; [B1 V5 V6]; [B1 V5 V6] # ⒈⒖.ᷰߛ +B; xn--jfb844kmfdwb.xn--2sb914i; [B1 C1 V5 V6]; [B1 C1 V5 V6] # ⒈⒖.ᷰߛ +B; 𝩜。-\u0B4DႫ; [P1 V3 V5 V6]; [P1 V3 V5 V6] # 𝩜.-୍Ⴋ +B; 𝩜。-\u0B4Dⴋ; [V3 V5]; [V3 V5] # 𝩜.-୍ⴋ +B; xn--792h.xn----bse820x; [V3 V5]; [V3 V5] # 𝩜.-୍ⴋ +B; xn--792h.xn----bse632b; [V3 V5 V6]; [V3 V5 V6] # 𝩜.-୍Ⴋ +T; ßჀ.\u0620刯Ⴝ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ßჀ.ؠ刯Ⴝ +N; ßჀ.\u0620刯Ⴝ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ßჀ.ؠ刯Ⴝ +T; ßⴠ.\u0620刯ⴝ; [B2 B3]; [B2 B3] # ßⴠ.ؠ刯ⴝ +N; ßⴠ.\u0620刯ⴝ; [B2 B3]; [B2 B3] # ßⴠ.ؠ刯ⴝ +B; SSჀ.\u0620刯Ⴝ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ssჀ.ؠ刯Ⴝ +B; ssⴠ.\u0620刯ⴝ; [B2 B3]; [B2 B3] # ssⴠ.ؠ刯ⴝ +B; Ssⴠ.\u0620刯Ⴝ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ssⴠ.ؠ刯Ⴝ +B; xn--ss-j81a.xn--fgb845cb66c; [B2 B3 V6]; [B2 B3 V6] # ssⴠ.ؠ刯Ⴝ +B; xn--ss-j81a.xn--fgb670rovy; [B2 B3]; [B2 B3] # ssⴠ.ؠ刯ⴝ +B; xn--ss-wgk.xn--fgb845cb66c; [B2 B3 V6]; [B2 B3 V6] # ssჀ.ؠ刯Ⴝ +B; xn--zca277t.xn--fgb670rovy; [B2 B3]; [B2 B3] # ßⴠ.ؠ刯ⴝ +B; xn--zca442f.xn--fgb845cb66c; [B2 B3 V6]; [B2 B3 V6] # ßჀ.ؠ刯Ⴝ +B; \u1BAAႣℲ。ᠳ툻\u0673; [B5 B6 P1 V5 V6]; [B5 B6 P1 V5 V6] # ᮪ႣℲ.ᠳ툻ٳ +B; \u1BAAႣℲ。ᠳ툻\u0673; [B5 B6 P1 V5 V6]; [B5 B6 P1 V5 V6] # ᮪ႣℲ.ᠳ툻ٳ +B; \u1BAAႣℲ。ᠳ툻\u0673; [B5 B6 P1 V5 V6]; [B5 B6 P1 V5 V6] # ᮪ႣℲ.ᠳ툻ٳ +B; \u1BAAႣℲ。ᠳ툻\u0673; [B5 B6 P1 V5 V6]; [B5 B6 P1 V5 V6] # ᮪ႣℲ.ᠳ툻ٳ +B; \u1BAAⴃⅎ。ᠳ툻\u0673; [B5 B6 V5]; [B5 B6 V5] # ᮪ⴃⅎ.ᠳ툻ٳ +B; \u1BAAⴃⅎ。ᠳ툻\u0673; [B5 B6 V5]; [B5 B6 V5] # ᮪ⴃⅎ.ᠳ툻ٳ +B; \u1BAAႣⅎ。ᠳ툻\u0673; [B5 B6 P1 V5 V6]; [B5 B6 P1 V5 V6] # ᮪Ⴃⅎ.ᠳ툻ٳ +B; \u1BAAႣⅎ。ᠳ툻\u0673; [B5 B6 P1 V5 V6]; [B5 B6 P1 V5 V6] # ᮪Ⴃⅎ.ᠳ툻ٳ +B; xn--bnd957c2pe.xn--sib102gc69k; [B5 B6 V5 V6]; [B5 B6 V5 V6] # ᮪Ⴃⅎ.ᠳ툻ٳ +B; xn--yxf24x4ol.xn--sib102gc69k; [B5 B6 V5]; [B5 B6 V5] # ᮪ⴃⅎ.ᠳ툻ٳ +B; xn--bnd957cone.xn--sib102gc69k; [B5 B6 V5 V6]; [B5 B6 V5 V6] # ᮪ႣℲ.ᠳ툻ٳ +B; \u1BAAⴃⅎ。ᠳ툻\u0673; [B5 B6 V5]; [B5 B6 V5] # ᮪ⴃⅎ.ᠳ툻ٳ +B; \u1BAAⴃⅎ。ᠳ툻\u0673; [B5 B6 V5]; [B5 B6 V5] # ᮪ⴃⅎ.ᠳ툻ٳ +B; \u1BAAႣⅎ。ᠳ툻\u0673; [B5 B6 P1 V5 V6]; [B5 B6 P1 V5 V6] # ᮪Ⴃⅎ.ᠳ툻ٳ +B; \u1BAAႣⅎ。ᠳ툻\u0673; [B5 B6 P1 V5 V6]; [B5 B6 P1 V5 V6] # ᮪Ⴃⅎ.ᠳ툻ٳ +B; \u06EC.\u08A2𐹫\u067C; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ۬.ࢢ𐹫ټ +B; xn--8lb.xn--1ib31ily45b; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ۬.ࢢ𐹫ټ +B; \u06B6\u06DF。₇\uA806; [B1]; [B1] # ڶ۟.7꠆ +B; \u06B6\u06DF。7\uA806; [B1]; [B1] # ڶ۟.7꠆ +B; xn--pkb6f.xn--7-x93e; [B1]; [B1] # ڶ۟.7꠆ +B; \u06B6\u06DF.7\uA806; [B1]; [B1] # ڶ۟.7꠆ +T; Ⴣ𐹻.\u200C𝪣≮󠩉; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 P1 V5 V6] # Ⴣ𐹻.𝪣≮ +N; Ⴣ𐹻.\u200C𝪣≮󠩉; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # Ⴣ𐹻.𝪣≮ +T; Ⴣ𐹻.\u200C𝪣<\u0338󠩉; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 P1 V5 V6] # Ⴣ𐹻.𝪣≮ +N; Ⴣ𐹻.\u200C𝪣<\u0338󠩉; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # Ⴣ𐹻.𝪣≮ +T; ⴣ𐹻.\u200C𝪣<\u0338󠩉; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 P1 V5 V6] # ⴣ𐹻.𝪣≮ +N; ⴣ𐹻.\u200C𝪣<\u0338󠩉; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # ⴣ𐹻.𝪣≮ +T; ⴣ𐹻.\u200C𝪣≮󠩉; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 P1 V5 V6] # ⴣ𐹻.𝪣≮ +N; ⴣ𐹻.\u200C𝪣≮󠩉; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # ⴣ𐹻.𝪣≮ +B; xn--rlj6323e.xn--gdh4944ob3x3e; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] +B; xn--rlj6323e.xn--0ugy6gn120eb103g; [B1 B5 B6 C1 V6]; [B1 B5 B6 C1 V6] # ⴣ𐹻.𝪣≮ +B; xn--7nd8101k.xn--gdh4944ob3x3e; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] +B; xn--7nd8101k.xn--0ugy6gn120eb103g; [B1 B5 B6 C1 V6]; [B1 B5 B6 C1 V6] # Ⴣ𐹻.𝪣≮ +T; 𝟵隁⯮.\u180D\u200C; [C1]; xn--9-mfs8024b. # 9隁⯮. +N; 𝟵隁⯮.\u180D\u200C; [C1]; [C1] # 9隁⯮. +T; 9隁⯮.\u180D\u200C; [C1]; xn--9-mfs8024b. # 9隁⯮. +N; 9隁⯮.\u180D\u200C; [C1]; [C1] # 9隁⯮. +B; xn--9-mfs8024b.; 9隁⯮.; xn--9-mfs8024b.; NV8 +B; 9隁⯮.; ; xn--9-mfs8024b.; NV8 +B; xn--9-mfs8024b.xn--0ug; [C1]; [C1] # 9隁⯮. +B; ⒏𐹧。Ⴣ\u0F84彦; [B1 P1 V6]; [B1 P1 V6] # ⒏𐹧.Ⴣ྄彦 +B; 8.𐹧。Ⴣ\u0F84彦; [B1 P1 V6]; [B1 P1 V6] # 8.𐹧.Ⴣ྄彦 +B; 8.𐹧。ⴣ\u0F84彦; [B1]; [B1] # 8.𐹧.ⴣ྄彦 +B; 8.xn--fo0d.xn--3ed972m6o8a; [B1]; [B1] # 8.𐹧.ⴣ྄彦 +B; 8.xn--fo0d.xn--3ed15dt93o; [B1 V6]; [B1 V6] # 8.𐹧.Ⴣ྄彦 +B; ⒏𐹧。ⴣ\u0F84彦; [B1 P1 V6]; [B1 P1 V6] # ⒏𐹧.ⴣ྄彦 +B; xn--0sh2466f.xn--3ed972m6o8a; [B1 V6]; [B1 V6] # ⒏𐹧.ⴣ྄彦 +B; xn--0sh2466f.xn--3ed15dt93o; [B1 V6]; [B1 V6] # ⒏𐹧.Ⴣ྄彦 +B; -问񬰔⒛。\u0604-񜗉橬; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -问⒛.-橬 +B; -问񬰔20.。\u0604-񜗉橬; [B1 P1 V3 V6 A4_2]; [B1 P1 V3 V6 A4_2] # -问20..-橬 +B; xn---20-658jx1776d..xn----ykc7228efm46d; [B1 V3 V6 A4_2]; [B1 V3 V6 A4_2] # -问20..-橬 +B; xn----hdpu849bhis3e.xn----ykc7228efm46d; [B1 V3 V6]; [B1 V3 V6] # -问⒛.-橬 +T; \u1BACႬ\u200C\u0325。𝟸; [C1 P1 V5 V6]; [P1 V5 V6] # ᮬႬ̥.2 +N; \u1BACႬ\u200C\u0325。𝟸; [C1 P1 V5 V6]; [C1 P1 V5 V6] # ᮬႬ̥.2 +T; \u1BACႬ\u200C\u0325。2; [C1 P1 V5 V6]; [P1 V5 V6] # ᮬႬ̥.2 +N; \u1BACႬ\u200C\u0325。2; [C1 P1 V5 V6]; [C1 P1 V5 V6] # ᮬႬ̥.2 +T; \u1BACⴌ\u200C\u0325。2; [C1 V5]; [V5] # ᮬⴌ̥.2 +N; \u1BACⴌ\u200C\u0325。2; [C1 V5]; [C1 V5] # ᮬⴌ̥.2 +B; xn--mta176jjjm.2; [V5]; [V5] # ᮬⴌ̥.2 +B; xn--mta176j97cl2q.2; [C1 V5]; [C1 V5] # ᮬⴌ̥.2 +B; xn--mta930emri.2; [V5 V6]; [V5 V6] # ᮬႬ̥.2 +B; xn--mta930emribme.2; [C1 V5 V6]; [C1 V5 V6] # ᮬႬ̥.2 +T; \u1BACⴌ\u200C\u0325。𝟸; [C1 V5]; [V5] # ᮬⴌ̥.2 +N; \u1BACⴌ\u200C\u0325。𝟸; [C1 V5]; [C1 V5] # ᮬⴌ̥.2 +B; \uDC5F。\uA806\u0669󠒩; [B1 P1 V5 V6]; [B1 P1 V5 V6 A3] # .꠆٩ +B; \uDC5F.xn--iib9583fusy0i; [B1 P1 V5 V6]; [B1 P1 V5 V6 A3] # .꠆٩ +B; \uDC5F.XN--IIB9583FUSY0I; [B1 P1 V5 V6]; [B1 P1 V5 V6 A3] # .꠆٩ +B; \uDC5F.Xn--Iib9583fusy0i; [B1 P1 V5 V6]; [B1 P1 V5 V6 A3] # .꠆٩ +B; 󠄁\u035F⾶。₇︒눇≮; [P1 V5 V6]; [P1 V5 V6] # ͟飛.7︒눇≮ +B; 󠄁\u035F⾶。₇︒눇<\u0338; [P1 V5 V6]; [P1 V5 V6] # ͟飛.7︒눇≮ +B; 󠄁\u035F飛。7。눇≮; [P1 V5 V6]; [P1 V5 V6] # ͟飛.7.눇≮ +B; 󠄁\u035F飛。7。눇<\u0338; [P1 V5 V6]; [P1 V5 V6] # ͟飛.7.눇≮ +B; xn--9ua0567e.7.xn--gdh6767c; [V5 V6]; [V5 V6] # ͟飛.7.눇≮ +B; xn--9ua0567e.xn--7-ngou006d1ttc; [V5 V6]; [V5 V6] # ͟飛.7︒눇≮ +T; \u200C\uFE09𐹴\u200D.\u200C⿃; [B1 C1 C2]; [B1] # 𐹴.鳥 +N; \u200C\uFE09𐹴\u200D.\u200C⿃; [B1 C1 C2]; [B1 C1 C2] # 𐹴.鳥 +T; \u200C\uFE09𐹴\u200D.\u200C鳥; [B1 C1 C2]; [B1] # 𐹴.鳥 +N; \u200C\uFE09𐹴\u200D.\u200C鳥; [B1 C1 C2]; [B1 C1 C2] # 𐹴.鳥 +B; xn--so0d.xn--6x6a; [B1]; [B1] +B; xn--0ugc6024p.xn--0ug1920c; [B1 C1 C2]; [B1 C1 C2] # 𐹴.鳥 +T; 🍮.\u200D󠗒𐦁𝨝; [B1 C2 P1 V6]; [B1 P1 V6] # 🍮.𐦁𝨝 +N; 🍮.\u200D󠗒𐦁𝨝; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 🍮.𐦁𝨝 +T; 🍮.\u200D󠗒𐦁𝨝; [B1 C2 P1 V6]; [B1 P1 V6] # 🍮.𐦁𝨝 +N; 🍮.\u200D󠗒𐦁𝨝; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 🍮.𐦁𝨝 +B; xn--lj8h.xn--ln9ci476aqmr2g; [B1 V6]; [B1 V6] +B; xn--lj8h.xn--1ug6603gr1pfwq37h; [B1 C2 V6]; [B1 C2 V6] # 🍮.𐦁𝨝 +T; \u067D\u0943.𞤓\u200D; [B3 C2]; xn--2ib43l.xn--te6h # ٽृ.𞤵 +N; \u067D\u0943.𞤓\u200D; [B3 C2]; [B3 C2] # ٽृ.𞤵 +T; \u067D\u0943.𞤵\u200D; [B3 C2]; xn--2ib43l.xn--te6h # ٽृ.𞤵 +N; \u067D\u0943.𞤵\u200D; [B3 C2]; [B3 C2] # ٽृ.𞤵 +B; xn--2ib43l.xn--te6h; \u067D\u0943.𞤵; xn--2ib43l.xn--te6h # ٽृ.𞤵 +B; \u067D\u0943.𞤵; ; xn--2ib43l.xn--te6h # ٽृ.𞤵 +B; \u067D\u0943.𞤓; \u067D\u0943.𞤵; xn--2ib43l.xn--te6h # ٽृ.𞤵 +B; xn--2ib43l.xn--1ugy711p; [B3 C2]; [B3 C2] # ٽृ.𞤵 +B; \u0664\u0A4D-.󥜽\u1039񦦐; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ٤੍-.္ +B; \u0664\u0A4D-.󥜽\u1039񦦐; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ٤੍-.္ +B; xn----gqc711a.xn--9jd88234f3qm0b; [B1 V3 V6]; [B1 V3 V6] # ٤੍-.္ +T; 4\u103A-𐹸。\uAA29\u200C𐹴≮; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6] # 4်-𐹸.ꨩ𐹴≮ +N; 4\u103A-𐹸。\uAA29\u200C𐹴≮; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # 4်-𐹸.ꨩ𐹴≮ +T; 4\u103A-𐹸。\uAA29\u200C𐹴<\u0338; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6] # 4်-𐹸.ꨩ𐹴≮ +N; 4\u103A-𐹸。\uAA29\u200C𐹴<\u0338; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # 4်-𐹸.ꨩ𐹴≮ +T; 4\u103A-𐹸。\uAA29\u200C𐹴≮; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6] # 4်-𐹸.ꨩ𐹴≮ +N; 4\u103A-𐹸。\uAA29\u200C𐹴≮; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # 4်-𐹸.ꨩ𐹴≮ +T; 4\u103A-𐹸。\uAA29\u200C𐹴<\u0338; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6] # 4်-𐹸.ꨩ𐹴≮ +N; 4\u103A-𐹸。\uAA29\u200C𐹴<\u0338; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # 4်-𐹸.ꨩ𐹴≮ +B; xn--4--e4j7831r.xn--gdh8754cz40c; [B1 V5 V6]; [B1 V5 V6] # 4်-𐹸.ꨩ𐹴≮ +B; xn--4--e4j7831r.xn--0ugy6gjy5sl3ud; [B1 C1 V5 V6]; [B1 C1 V5 V6] # 4်-𐹸.ꨩ𐹴≮ +T; \u200C。\uFFA0\u0F84\u0F96; [C1 P1 V6]; [P1 V6 A4_2] # .྄ྖ +N; \u200C。\uFFA0\u0F84\u0F96; [C1 P1 V6]; [C1 P1 V6] # .྄ྖ +T; \u200C。\u1160\u0F84\u0F96; [C1 P1 V6]; [P1 V6 A4_2] # .྄ྖ +N; \u200C。\u1160\u0F84\u0F96; [C1 P1 V6]; [C1 P1 V6] # .྄ྖ +B; .xn--3ed0b20h; [V6 A4_2]; [V6 A4_2] # .྄ྖ +B; xn--0ug.xn--3ed0b20h; [C1 V6]; [C1 V6] # .྄ྖ +B; .xn--3ed0by082k; [V6 A4_2]; [V6 A4_2] # .྄ྖ +B; xn--0ug.xn--3ed0by082k; [C1 V6]; [C1 V6] # .྄ྖ +T; ≯򍘅.\u200D𐅼򲇛; [C2 P1 V6]; [P1 V6] # ≯.𐅼 +N; ≯򍘅.\u200D𐅼򲇛; [C2 P1 V6]; [C2 P1 V6] # ≯.𐅼 +T; >\u0338򍘅.\u200D𐅼򲇛; [C2 P1 V6]; [P1 V6] # ≯.𐅼 +N; >\u0338򍘅.\u200D𐅼򲇛; [C2 P1 V6]; [C2 P1 V6] # ≯.𐅼 +T; ≯򍘅.\u200D𐅼򲇛; [C2 P1 V6]; [P1 V6] # ≯.𐅼 +N; ≯򍘅.\u200D𐅼򲇛; [C2 P1 V6]; [C2 P1 V6] # ≯.𐅼 +T; >\u0338򍘅.\u200D𐅼򲇛; [C2 P1 V6]; [P1 V6] # ≯.𐅼 +N; >\u0338򍘅.\u200D𐅼򲇛; [C2 P1 V6]; [C2 P1 V6] # ≯.𐅼 +B; xn--hdh84488f.xn--xy7cw2886b; [V6]; [V6] +B; xn--hdh84488f.xn--1ug8099fbjp4e; [C2 V6]; [C2 V6] # ≯.𐅼 +T; \u0641ß𐰯。𝟕𐫫; [B1 B2]; [B1 B2] # فß𐰯.7𐫫 +N; \u0641ß𐰯。𝟕𐫫; [B1 B2]; [B1 B2] # فß𐰯.7𐫫 +T; \u0641ß𐰯。7𐫫; [B1 B2]; [B1 B2] # فß𐰯.7𐫫 +N; \u0641ß𐰯。7𐫫; [B1 B2]; [B1 B2] # فß𐰯.7𐫫 +B; \u0641SS𐰯。7𐫫; [B1 B2]; [B1 B2] # فss𐰯.7𐫫 +B; \u0641ss𐰯。7𐫫; [B1 B2]; [B1 B2] # فss𐰯.7𐫫 +B; \u0641Ss𐰯。7𐫫; [B1 B2]; [B1 B2] # فss𐰯.7𐫫 +B; xn--ss-jvd2339x.xn--7-mm5i; [B1 B2]; [B1 B2] # فss𐰯.7𐫫 +B; xn--zca96ys96y.xn--7-mm5i; [B1 B2]; [B1 B2] # فß𐰯.7𐫫 +B; \u0641SS𐰯。𝟕𐫫; [B1 B2]; [B1 B2] # فss𐰯.7𐫫 +B; \u0641ss𐰯。𝟕𐫫; [B1 B2]; [B1 B2] # فss𐰯.7𐫫 +B; \u0641Ss𐰯。𝟕𐫫; [B1 B2]; [B1 B2] # فss𐰯.7𐫫 +T; ß\u07AC\u07A7\u08B1。𐭁􅮙𐹲; [B2 B5 B6 P1 V6]; [B2 B5 B6 P1 V6] # ßެާࢱ.𐭁𐹲 +N; ß\u07AC\u07A7\u08B1。𐭁􅮙𐹲; [B2 B5 B6 P1 V6]; [B2 B5 B6 P1 V6] # ßެާࢱ.𐭁𐹲 +B; SS\u07AC\u07A7\u08B1。𐭁􅮙𐹲; [B2 B5 B6 P1 V6]; [B2 B5 B6 P1 V6] # ssެާࢱ.𐭁𐹲 +B; ss\u07AC\u07A7\u08B1。𐭁􅮙𐹲; [B2 B5 B6 P1 V6]; [B2 B5 B6 P1 V6] # ssެާࢱ.𐭁𐹲 +B; Ss\u07AC\u07A7\u08B1。𐭁􅮙𐹲; [B2 B5 B6 P1 V6]; [B2 B5 B6 P1 V6] # ssެާࢱ.𐭁𐹲 +B; xn--ss-9qet02k.xn--e09co8cr9861c; [B2 B5 B6 V6]; [B2 B5 B6 V6] # ssެާࢱ.𐭁𐹲 +B; xn--zca685aoa95h.xn--e09co8cr9861c; [B2 B5 B6 V6]; [B2 B5 B6 V6] # ßެާࢱ.𐭁𐹲 +B; -。󠉗⒌𞯛; [B1 P1 V3 V6]; [B1 P1 V3 V6] +B; -。󠉗5.𞯛; [B1 P1 V3 V6]; [B1 P1 V3 V6] +B; -.xn--5-zz21m.xn--6x6h; [B1 V3 V6]; [B1 V3 V6] +B; -.xn--xsh6367n1bi3e; [B1 V3 V6]; [B1 V3 V6] +T; 𼎏ς.-≮\uFCAB; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ς.-≮خج +N; 𼎏ς.-≮\uFCAB; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ς.-≮خج +T; 𼎏ς.-<\u0338\uFCAB; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ς.-≮خج +N; 𼎏ς.-<\u0338\uFCAB; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ς.-≮خج +T; 𼎏ς.-≮\u062E\u062C; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ς.-≮خج +N; 𼎏ς.-≮\u062E\u062C; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ς.-≮خج +T; 𼎏ς.-<\u0338\u062E\u062C; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ς.-≮خج +N; 𼎏ς.-<\u0338\u062E\u062C; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ς.-≮خج +B; 𼎏Σ.-<\u0338\u062E\u062C; [B1 P1 V3 V6]; [B1 P1 V3 V6] # σ.-≮خج +B; 𼎏Σ.-≮\u062E\u062C; [B1 P1 V3 V6]; [B1 P1 V3 V6] # σ.-≮خج +B; 𼎏σ.-≮\u062E\u062C; [B1 P1 V3 V6]; [B1 P1 V3 V6] # σ.-≮خج +B; 𼎏σ.-<\u0338\u062E\u062C; [B1 P1 V3 V6]; [B1 P1 V3 V6] # σ.-≮خج +B; xn--4xa92520c.xn----9mcf1400a; [B1 V3 V6]; [B1 V3 V6] # σ.-≮خج +B; xn--3xa13520c.xn----9mcf1400a; [B1 V3 V6]; [B1 V3 V6] # ς.-≮خج +B; 𼎏Σ.-<\u0338\uFCAB; [B1 P1 V3 V6]; [B1 P1 V3 V6] # σ.-≮خج +B; 𼎏Σ.-≮\uFCAB; [B1 P1 V3 V6]; [B1 P1 V3 V6] # σ.-≮خج +B; 𼎏σ.-≮\uFCAB; [B1 P1 V3 V6]; [B1 P1 V3 V6] # σ.-≮خج +B; 𼎏σ.-<\u0338\uFCAB; [B1 P1 V3 V6]; [B1 P1 V3 V6] # σ.-≮خج +B; ꡗ\u08B8\u0719.񔤔󠛙\u0C4D\uFC3E; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ꡗࢸܙ.్كي +B; ꡗ\u08B8\u0719.񔤔󠛙\u0C4D\u0643\u064A; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ꡗࢸܙ.్كي +B; xn--jnb34fs003a.xn--fhbo927bk128mpi24d; [B5 B6 V6]; [B5 B6 V6] # ꡗࢸܙ.్كي +B; 𐠰\u08B7𞤌𐫭。𐋦\u17CD𝩃; [B1]; [B1] # 𐠰ࢷ𞤮𐫭.𐋦៍𝩃 +B; 𐠰\u08B7𞤮𐫭。𐋦\u17CD𝩃; [B1]; [B1] # 𐠰ࢷ𞤮𐫭.𐋦៍𝩃 +B; xn--dzb5191kezbrw47a.xn--p4e3841jz9tf; [B1]; [B1] # 𐠰ࢷ𞤮𐫭.𐋦៍𝩃 +B; 𐠰\u08B7𞤮𐫭.𐋦\u17CD𝩃; [B1]; [B1] # 𐠰ࢷ𞤮𐫭.𐋦៍𝩃 +B; 𐠰\u08B7𞤌𐫭.𐋦\u17CD𝩃; [B1]; [B1] # 𐠰ࢷ𞤮𐫭.𐋦៍𝩃 +T; ₂㘷--。\u06D3\u200C𐫆𑖿; [B1 C1 V2 V3]; [B1 V2 V3] # 2㘷--.ۓ𐫆𑖿 +N; ₂㘷--。\u06D3\u200C𐫆𑖿; [B1 C1 V2 V3]; [B1 C1 V2 V3] # 2㘷--.ۓ𐫆𑖿 +T; ₂㘷--。\u06D2\u0654\u200C𐫆𑖿; [B1 C1 V2 V3]; [B1 V2 V3] # 2㘷--.ۓ𐫆𑖿 +N; ₂㘷--。\u06D2\u0654\u200C𐫆𑖿; [B1 C1 V2 V3]; [B1 C1 V2 V3] # 2㘷--.ۓ𐫆𑖿 +T; 2㘷--。\u06D3\u200C𐫆𑖿; [B1 C1 V2 V3]; [B1 V2 V3] # 2㘷--.ۓ𐫆𑖿 +N; 2㘷--。\u06D3\u200C𐫆𑖿; [B1 C1 V2 V3]; [B1 C1 V2 V3] # 2㘷--.ۓ𐫆𑖿 +T; 2㘷--。\u06D2\u0654\u200C𐫆𑖿; [B1 C1 V2 V3]; [B1 V2 V3] # 2㘷--.ۓ𐫆𑖿 +N; 2㘷--。\u06D2\u0654\u200C𐫆𑖿; [B1 C1 V2 V3]; [B1 C1 V2 V3] # 2㘷--.ۓ𐫆𑖿 +B; xn--2---u58b.xn--jlb8024k14g; [B1 V2 V3]; [B1 V2 V3] # 2㘷--.ۓ𐫆𑖿 +B; xn--2---u58b.xn--jlb820ku99nbgj; [B1 C1 V2 V3]; [B1 C1 V2 V3] # 2㘷--.ۓ𐫆𑖿 +B; -𘊻.ᡮ\u062D-; [B1 B5 B6 V3]; [B1 B5 B6 V3] # -𘊻.ᡮح- +B; -𘊻.ᡮ\u062D-; [B1 B5 B6 V3]; [B1 B5 B6 V3] # -𘊻.ᡮح- +B; xn----bp5n.xn----bnc231l; [B1 B5 B6 V3]; [B1 B5 B6 V3] # -𘊻.ᡮح- +T; \u200C-ß。ᢣ𐹭\u063F; [B1 B5 B6 C1]; [B1 B5 B6 V3] # -ß.ᢣ𐹭ؿ +N; \u200C-ß。ᢣ𐹭\u063F; [B1 B5 B6 C1]; [B1 B5 B6 C1] # -ß.ᢣ𐹭ؿ +T; \u200C-ß。ᢣ𐹭\u063F; [B1 B5 B6 C1]; [B1 B5 B6 V3] # -ß.ᢣ𐹭ؿ +N; \u200C-ß。ᢣ𐹭\u063F; [B1 B5 B6 C1]; [B1 B5 B6 C1] # -ß.ᢣ𐹭ؿ +T; \u200C-SS。ᢣ𐹭\u063F; [B1 B5 B6 C1]; [B1 B5 B6 V3] # -ss.ᢣ𐹭ؿ +N; \u200C-SS。ᢣ𐹭\u063F; [B1 B5 B6 C1]; [B1 B5 B6 C1] # -ss.ᢣ𐹭ؿ +T; \u200C-ss。ᢣ𐹭\u063F; [B1 B5 B6 C1]; [B1 B5 B6 V3] # -ss.ᢣ𐹭ؿ +N; \u200C-ss。ᢣ𐹭\u063F; [B1 B5 B6 C1]; [B1 B5 B6 C1] # -ss.ᢣ𐹭ؿ +T; \u200C-Ss。ᢣ𐹭\u063F; [B1 B5 B6 C1]; [B1 B5 B6 V3] # -ss.ᢣ𐹭ؿ +N; \u200C-Ss。ᢣ𐹭\u063F; [B1 B5 B6 C1]; [B1 B5 B6 C1] # -ss.ᢣ𐹭ؿ +B; -ss.xn--bhb925glx3p; [B1 B5 B6 V3]; [B1 B5 B6 V3] # -ss.ᢣ𐹭ؿ +B; xn---ss-8m0a.xn--bhb925glx3p; [B1 B5 B6 C1]; [B1 B5 B6 C1] # -ss.ᢣ𐹭ؿ +B; xn----qfa550v.xn--bhb925glx3p; [B1 B5 B6 C1]; [B1 B5 B6 C1] # -ß.ᢣ𐹭ؿ +T; \u200C-SS。ᢣ𐹭\u063F; [B1 B5 B6 C1]; [B1 B5 B6 V3] # -ss.ᢣ𐹭ؿ +N; \u200C-SS。ᢣ𐹭\u063F; [B1 B5 B6 C1]; [B1 B5 B6 C1] # -ss.ᢣ𐹭ؿ +T; \u200C-ss。ᢣ𐹭\u063F; [B1 B5 B6 C1]; [B1 B5 B6 V3] # -ss.ᢣ𐹭ؿ +N; \u200C-ss。ᢣ𐹭\u063F; [B1 B5 B6 C1]; [B1 B5 B6 C1] # -ss.ᢣ𐹭ؿ +T; \u200C-Ss。ᢣ𐹭\u063F; [B1 B5 B6 C1]; [B1 B5 B6 V3] # -ss.ᢣ𐹭ؿ +N; \u200C-Ss。ᢣ𐹭\u063F; [B1 B5 B6 C1]; [B1 B5 B6 C1] # -ss.ᢣ𐹭ؿ +B; ꧐Ӏ\u1BAA\u08F6.눵; [P1 V6]; [P1 V6] # ꧐Ӏ᮪ࣶ.눵 +B; ꧐Ӏ\u1BAA\u08F6.눵; [P1 V6]; [P1 V6] # ꧐Ӏ᮪ࣶ.눵 +B; ꧐Ӏ\u1BAA\u08F6.눵; [P1 V6]; [P1 V6] # ꧐Ӏ᮪ࣶ.눵 +B; ꧐Ӏ\u1BAA\u08F6.눵; [P1 V6]; [P1 V6] # ꧐Ӏ᮪ࣶ.눵 +B; ꧐ӏ\u1BAA\u08F6.눵; ꧐ӏ\u1BAA\u08F6.눵; xn--s5a04sn4u297k.xn--2e1b # ꧐ӏ᮪ࣶ.눵 +B; ꧐ӏ\u1BAA\u08F6.눵; ; xn--s5a04sn4u297k.xn--2e1b # ꧐ӏ᮪ࣶ.눵 +B; xn--s5a04sn4u297k.xn--2e1b; ꧐ӏ\u1BAA\u08F6.눵; xn--s5a04sn4u297k.xn--2e1b # ꧐ӏ᮪ࣶ.눵 +B; xn--d5a07sn4u297k.xn--2e1b; [V6]; [V6] # ꧐Ӏ᮪ࣶ.눵 +B; ꧐ӏ\u1BAA\u08F6.눵; ꧐ӏ\u1BAA\u08F6.눵; xn--s5a04sn4u297k.xn--2e1b # ꧐ӏ᮪ࣶ.눵 +B; ꧐ӏ\u1BAA\u08F6.눵; ꧐ӏ\u1BAA\u08F6.눵; xn--s5a04sn4u297k.xn--2e1b # ꧐ӏ᮪ࣶ.눵 +B; \uA8EA。𖄿𑆾󠇗; [P1 V5 V6]; [P1 V5 V6] # ꣪.𑆾 +B; \uA8EA。𖄿𑆾󠇗; [P1 V5 V6]; [P1 V5 V6] # ꣪.𑆾 +B; xn--3g9a.xn--ud1dz07k; [V5 V6]; [V5 V6] # ꣪.𑆾 +B; 󇓓𑚳。񐷿≯⾇; [P1 V6]; [P1 V6] +B; 󇓓𑚳。񐷿>\u0338⾇; [P1 V6]; [P1 V6] +B; 󇓓𑚳。񐷿≯舛; [P1 V6]; [P1 V6] +B; 󇓓𑚳。񐷿>\u0338舛; [P1 V6]; [P1 V6] +B; xn--3e2d79770c.xn--hdh0088abyy1c; [V6]; [V6] +T; 𐫇\u0661\u200C.\u200D\u200C; [B1 B3 C1 C2]; xn--9hb7344k. # 𐫇١. +N; 𐫇\u0661\u200C.\u200D\u200C; [B1 B3 C1 C2]; [B1 B3 C1 C2] # 𐫇١. +T; 𐫇\u0661\u200C.\u200D\u200C; [B1 B3 C1 C2]; xn--9hb7344k. # 𐫇١. +N; 𐫇\u0661\u200C.\u200D\u200C; [B1 B3 C1 C2]; [B1 B3 C1 C2] # 𐫇١. +B; xn--9hb7344k.; 𐫇\u0661.; xn--9hb7344k. # 𐫇١. +B; 𐫇\u0661.; ; xn--9hb7344k. # 𐫇١. +B; xn--9hb652kv99n.xn--0ugb; [B1 B3 C1 C2]; [B1 B3 C1 C2] # 𐫇١. +T; 񡅈砪≯ᢑ。≯𝩚򓴔\u200C; [C1 P1 V6]; [P1 V6] # 砪≯ᢑ.≯𝩚 +N; 񡅈砪≯ᢑ。≯𝩚򓴔\u200C; [C1 P1 V6]; [C1 P1 V6] # 砪≯ᢑ.≯𝩚 +T; 񡅈砪>\u0338ᢑ。>\u0338𝩚򓴔\u200C; [C1 P1 V6]; [P1 V6] # 砪≯ᢑ.≯𝩚 +N; 񡅈砪>\u0338ᢑ。>\u0338𝩚򓴔\u200C; [C1 P1 V6]; [C1 P1 V6] # 砪≯ᢑ.≯𝩚 +T; 񡅈砪≯ᢑ。≯𝩚򓴔\u200C; [C1 P1 V6]; [P1 V6] # 砪≯ᢑ.≯𝩚 +N; 񡅈砪≯ᢑ。≯𝩚򓴔\u200C; [C1 P1 V6]; [C1 P1 V6] # 砪≯ᢑ.≯𝩚 +T; 񡅈砪>\u0338ᢑ。>\u0338𝩚򓴔\u200C; [C1 P1 V6]; [P1 V6] # 砪≯ᢑ.≯𝩚 +N; 񡅈砪>\u0338ᢑ。>\u0338𝩚򓴔\u200C; [C1 P1 V6]; [C1 P1 V6] # 砪≯ᢑ.≯𝩚 +B; xn--bbf561cf95e57y3e.xn--hdh0834o7mj6b; [V6]; [V6] +B; xn--bbf561cf95e57y3e.xn--0ugz6gc910ejro8c; [C1 V6]; [C1 V6] # 砪≯ᢑ.≯𝩚 +B; Ⴥ.𑄳㊸; [P1 V5 V6]; [P1 V5 V6] +B; Ⴥ.𑄳43; [P1 V5 V6]; [P1 V5 V6] +B; ⴥ.𑄳43; [V5]; [V5] +B; xn--tlj.xn--43-274o; [V5]; [V5] +B; xn--9nd.xn--43-274o; [V5 V6]; [V5 V6] +B; ⴥ.𑄳㊸; [V5]; [V5] +B; 𝟎\u0663。Ⴒᡇ\u08F2𐹠; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # 0٣.Ⴒᡇࣲ𐹠 +B; 0\u0663。Ⴒᡇ\u08F2𐹠; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # 0٣.Ⴒᡇࣲ𐹠 +B; 0\u0663。ⴒᡇ\u08F2𐹠; [B1 B5 B6]; [B1 B5 B6] # 0٣.ⴒᡇࣲ𐹠 +B; xn--0-fqc.xn--10b369eivp359r; [B1 B5 B6]; [B1 B5 B6] # 0٣.ⴒᡇࣲ𐹠 +B; xn--0-fqc.xn--10b180bnwgfy0z; [B1 B5 B6 V6]; [B1 B5 B6 V6] # 0٣.Ⴒᡇࣲ𐹠 +B; 𝟎\u0663。ⴒᡇ\u08F2𐹠; [B1 B5 B6]; [B1 B5 B6] # 0٣.ⴒᡇࣲ𐹠 +B; 񗪨󠄉\uFFA0\u0FB7.񸞰\uA953; [P1 V6]; [P1 V6] # ྷ.꥓ +B; 񗪨󠄉\u1160\u0FB7.񸞰\uA953; [P1 V6]; [P1 V6] # ྷ.꥓ +B; xn--kgd36f9z57y.xn--3j9au7544a; [V6]; [V6] # ྷ.꥓ +B; xn--kgd7493jee34a.xn--3j9au7544a; [V6]; [V6] # ྷ.꥓ +T; \u0618.۳\u200C\uA953; [C1 V5]; [V5] # ؘ.۳꥓ +N; \u0618.۳\u200C\uA953; [C1 V5]; [C1 V5] # ؘ.۳꥓ +B; xn--6fb.xn--gmb0524f; [V5]; [V5] # ؘ.۳꥓ +B; xn--6fb.xn--gmb469jjf1h; [C1 V5]; [C1 V5] # ؘ.۳꥓ +B; ᡌ.︒ᢑ; [P1 V6]; [P1 V6] +B; ᡌ.。ᢑ; [A4_2]; [A4_2] +B; xn--c8e..xn--bbf; [A4_2]; [A4_2] +B; xn--c8e.xn--bbf9168i; [V6]; [V6] +B; 𑋪\u1073。𞽧; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # 𑋪ၳ. +B; 𑋪\u1073。𞽧; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # 𑋪ၳ. +B; xn--xld7443k.xn--4o7h; [B1 B3 B6 V5 V6]; [B1 B3 B6 V5 V6] # 𑋪ၳ. +B; 𞷏。ᠢ򓘆; [P1 V6]; [P1 V6] +B; xn--hd7h.xn--46e66060j; [V6]; [V6] +T; 𑄳㴼.\u200C𐹡\u20EB񫺦; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6] # 𑄳㴼.𐹡⃫ +N; 𑄳㴼.\u200C𐹡\u20EB񫺦; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # 𑄳㴼.𐹡⃫ +T; 𑄳㴼.\u200C𐹡\u20EB񫺦; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6] # 𑄳㴼.𐹡⃫ +N; 𑄳㴼.\u200C𐹡\u20EB񫺦; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # 𑄳㴼.𐹡⃫ +B; xn--iym9428c.xn--e1g3464g08p3b; [B1 V5 V6]; [B1 V5 V6] # 𑄳㴼.𐹡⃫ +B; xn--iym9428c.xn--0ug46a7218cllv0c; [B1 C1 V5 V6]; [B1 C1 V5 V6] # 𑄳㴼.𐹡⃫ +B; 񠻟𐹳𑈯。\u031D; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # 𐹳𑈯.̝ +B; 񠻟𐹳𑈯。\u031D; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # 𐹳𑈯.̝ +B; xn--ro0dw7dey96m.xn--eta; [B1 B3 B5 B6 V5 V6]; [B1 B3 B5 B6 V5 V6] # 𐹳𑈯.̝ +B; ᢊ뾜󠱴𑚶。\u089D𐹥; [P1 V6]; [P1 V6] # ᢊ뾜𑚶.𐹥 +B; ᢊ뾜󠱴𑚶。\u089D𐹥; [P1 V6]; [P1 V6] # ᢊ뾜𑚶.𐹥 +B; xn--39e4566fjv8bwmt6n.xn--myb6415k; [V6]; [V6] # ᢊ뾜𑚶.𐹥 +T; 𐹥≠。𐋲󠧠\u200C; [B1 C1 P1 V6]; [B1 P1 V6] # 𐹥≠.𐋲 +N; 𐹥≠。𐋲󠧠\u200C; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹥≠.𐋲 +T; 𐹥=\u0338。𐋲󠧠\u200C; [B1 C1 P1 V6]; [B1 P1 V6] # 𐹥≠.𐋲 +N; 𐹥=\u0338。𐋲󠧠\u200C; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹥≠.𐋲 +T; 𐹥≠。𐋲󠧠\u200C; [B1 C1 P1 V6]; [B1 P1 V6] # 𐹥≠.𐋲 +N; 𐹥≠。𐋲󠧠\u200C; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹥≠.𐋲 +T; 𐹥=\u0338。𐋲󠧠\u200C; [B1 C1 P1 V6]; [B1 P1 V6] # 𐹥≠.𐋲 +N; 𐹥=\u0338。𐋲󠧠\u200C; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹥≠.𐋲 +B; xn--1ch6704g.xn--m97cw2999c; [B1 V6]; [B1 V6] +B; xn--1ch6704g.xn--0ug3840g51u4g; [B1 C1 V6]; [B1 C1 V6] # 𐹥≠.𐋲 +T; \u115F񙯠\u094D.\u200D\uA953𐪤; [B1 C2 P1 V6]; [B5 B6 P1 V5 V6] # ्.꥓ +N; \u115F񙯠\u094D.\u200D\uA953𐪤; [B1 C2 P1 V6]; [B1 C2 P1 V6] # ्.꥓ +T; \u115F񙯠\u094D.\u200D\uA953𐪤; [B1 C2 P1 V6]; [B5 B6 P1 V5 V6] # ्.꥓ +N; \u115F񙯠\u094D.\u200D\uA953𐪤; [B1 C2 P1 V6]; [B1 C2 P1 V6] # ्.꥓ +B; xn--n3b542bb085j.xn--3j9al95p; [B5 B6 V5 V6]; [B5 B6 V5 V6] # ्.꥓ +B; xn--n3b542bb085j.xn--1ug6815co9wc; [B1 C2 V6]; [B1 C2 V6] # ्.꥓ +B; 򌋔󠆎󠆗𑲕。≮; [P1 V6]; [P1 V6] +B; 򌋔󠆎󠆗𑲕。<\u0338; [P1 V6]; [P1 V6] +B; xn--4m3dv4354a.xn--gdh; [V6]; [V6] +B; 󠆦.\u08E3暀≠; [P1 V5 V6 A4_2]; [P1 V5 V6 A4_2] # .ࣣ暀≠ +B; 󠆦.\u08E3暀=\u0338; [P1 V5 V6 A4_2]; [P1 V5 V6 A4_2] # .ࣣ暀≠ +B; .xn--m0b461k3g2c; [V5 V6 A4_2]; [V5 V6 A4_2] # .ࣣ暀≠ +B; 𐡤\uABED。\uFD30򜖅\u1DF0; [B2 B3 P1 V6]; [B2 B3 P1 V6] # 𐡤꯭.شمᷰ +B; 𐡤\uABED。\u0634\u0645򜖅\u1DF0; [B2 B3 P1 V6]; [B2 B3 P1 V6] # 𐡤꯭.شمᷰ +B; xn--429ak76o.xn--zgb8a701kox37t; [B2 B3 V6]; [B2 B3 V6] # 𐡤꯭.شمᷰ +T; 𝉃\u200D⒈。Ⴌ𞱓; [B1 B5 B6 C2 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # 𝉃⒈.Ⴌ +N; 𝉃\u200D⒈。Ⴌ𞱓; [B1 B5 B6 C2 P1 V5 V6]; [B1 B5 B6 C2 P1 V5 V6] # 𝉃⒈.Ⴌ +T; 𝉃\u200D1.。Ⴌ𞱓; [B1 B5 B6 C2 P1 V5 V6 A4_2]; [B1 B5 B6 P1 V5 V6 A4_2] # 𝉃1..Ⴌ +N; 𝉃\u200D1.。Ⴌ𞱓; [B1 B5 B6 C2 P1 V5 V6 A4_2]; [B1 B5 B6 C2 P1 V5 V6 A4_2] # 𝉃1..Ⴌ +T; 𝉃\u200D1.。ⴌ𞱓; [B1 B5 B6 C2 P1 V5 V6 A4_2]; [B1 B5 B6 P1 V5 V6 A4_2] # 𝉃1..ⴌ +N; 𝉃\u200D1.。ⴌ𞱓; [B1 B5 B6 C2 P1 V5 V6 A4_2]; [B1 B5 B6 C2 P1 V5 V6 A4_2] # 𝉃1..ⴌ +B; xn--1-px8q..xn--3kj4524l; [B1 B5 B6 V5 V6 A4_2]; [B1 B5 B6 V5 V6 A4_2] +B; xn--1-tgn9827q..xn--3kj4524l; [B1 B5 B6 C2 V5 V6 A4_2]; [B1 B5 B6 C2 V5 V6 A4_2] # 𝉃1..ⴌ +B; xn--1-px8q..xn--knd8464v; [B1 B5 B6 V5 V6 A4_2]; [B1 B5 B6 V5 V6 A4_2] +B; xn--1-tgn9827q..xn--knd8464v; [B1 B5 B6 C2 V5 V6 A4_2]; [B1 B5 B6 C2 V5 V6 A4_2] # 𝉃1..Ⴌ +T; 𝉃\u200D⒈。ⴌ𞱓; [B1 B5 B6 C2 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # 𝉃⒈.ⴌ +N; 𝉃\u200D⒈。ⴌ𞱓; [B1 B5 B6 C2 P1 V5 V6]; [B1 B5 B6 C2 P1 V5 V6] # 𝉃⒈.ⴌ +B; xn--tshz828m.xn--3kj4524l; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] +B; xn--1ug68oq348b.xn--3kj4524l; [B1 B5 B6 C2 V5 V6]; [B1 B5 B6 C2 V5 V6] # 𝉃⒈.ⴌ +B; xn--tshz828m.xn--knd8464v; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] +B; xn--1ug68oq348b.xn--knd8464v; [B1 B5 B6 C2 V5 V6]; [B1 B5 B6 C2 V5 V6] # 𝉃⒈.Ⴌ +T; 󠣙\u0A4D𱫘𞤸.ς񵯞􈰔; [B1 P1 V6]; [B1 P1 V6] # ੍𞤸.ς +N; 󠣙\u0A4D𱫘𞤸.ς񵯞􈰔; [B1 P1 V6]; [B1 P1 V6] # ੍𞤸.ς +B; 󠣙\u0A4D𱫘𞤖.Σ񵯞􈰔; [B1 P1 V6]; [B1 P1 V6] # ੍𞤸.σ +B; 󠣙\u0A4D𱫘𞤸.σ񵯞􈰔; [B1 P1 V6]; [B1 P1 V6] # ੍𞤸.σ +B; 󠣙\u0A4D𱫘𞤖.σ񵯞􈰔; [B1 P1 V6]; [B1 P1 V6] # ੍𞤸.σ +B; xn--ybc0236vjvxgt5q0g.xn--4xa82737giye6b; [B1 V6]; [B1 V6] # ੍𞤸.σ +T; 󠣙\u0A4D𱫘𞤖.ς񵯞􈰔; [B1 P1 V6]; [B1 P1 V6] # ੍𞤸.ς +N; 󠣙\u0A4D𱫘𞤖.ς񵯞􈰔; [B1 P1 V6]; [B1 P1 V6] # ੍𞤸.ς +B; xn--ybc0236vjvxgt5q0g.xn--3xa03737giye6b; [B1 V6]; [B1 V6] # ੍𞤸.ς +B; 󠣙\u0A4D𱫘𞤸.Σ񵯞􈰔; [B1 P1 V6]; [B1 P1 V6] # ੍𞤸.σ +T; \u07D3。\u200C𐫀򞭱; [B1 C1 P1 V6]; [B2 B3 P1 V6] # ߓ.𐫀 +N; \u07D3。\u200C𐫀򞭱; [B1 C1 P1 V6]; [B1 C1 P1 V6] # ߓ.𐫀 +B; xn--usb.xn--pw9ci1099a; [B2 B3 V6]; [B2 B3 V6] # ߓ.𐫀 +B; xn--usb.xn--0ug9553gm3v5d; [B1 C1 V6]; [B1 C1 V6] # ߓ.𐫀 +B; \u1C2E𞀝.\u05A6ꡟ𞤕󠆖; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ᰮ𞀝.֦ꡟ𞤷 +B; \u1C2E𞀝.\u05A6ꡟ𞤷󠆖; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ᰮ𞀝.֦ꡟ𞤷 +B; xn--q1f4493q.xn--xcb8244fifvj; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ᰮ𞀝.֦ꡟ𞤷 +T; 䂹󾖅𐋦.\u200D; [C2 P1 V6]; [P1 V6] # 䂹𐋦. +N; 䂹󾖅𐋦.\u200D; [C2 P1 V6]; [C2 P1 V6] # 䂹𐋦. +T; 䂹󾖅𐋦.\u200D; [C2 P1 V6]; [P1 V6] # 䂹𐋦. +N; 䂹󾖅𐋦.\u200D; [C2 P1 V6]; [C2 P1 V6] # 䂹𐋦. +B; xn--0on3543c5981i.; [V6]; [V6] +B; xn--0on3543c5981i.xn--1ug; [C2 V6]; [C2 V6] # 䂹𐋦. +T; \uA9C0\u200C𐹲\u200C。\u0767🄉; [B5 B6 C1 P1 V5 V6]; [B5 B6 P1 V5 V6] # ꧀𐹲.ݧ🄉 +N; \uA9C0\u200C𐹲\u200C。\u0767🄉; [B5 B6 C1 P1 V5 V6]; [B5 B6 C1 P1 V5 V6] # ꧀𐹲.ݧ🄉 +T; \uA9C0\u200C𐹲\u200C。\u07678,; [B3 B5 B6 C1 P1 V5 V6]; [B3 B5 B6 P1 V5 V6] # ꧀𐹲.ݧ8, +N; \uA9C0\u200C𐹲\u200C。\u07678,; [B3 B5 B6 C1 P1 V5 V6]; [B3 B5 B6 C1 P1 V5 V6] # ꧀𐹲.ݧ8, +B; xn--7m9an32q.xn--8,-qle; [B3 B5 B6 P1 V5 V6]; [B3 B5 B6 P1 V5 V6] # ꧀𐹲.ݧ8, +B; xn--0uga8686hdgvd.xn--8,-qle; [B3 B5 B6 C1 P1 V5 V6]; [B3 B5 B6 C1 P1 V5 V6] # ꧀𐹲.ݧ8, +B; xn--7m9an32q.xn--rpb6081w; [B5 B6 V5 V6]; [B5 B6 V5 V6] # ꧀𐹲.ݧ🄉 +B; xn--0uga8686hdgvd.xn--rpb6081w; [B5 B6 C1 V5 V6]; [B5 B6 C1 V5 V6] # ꧀𐹲.ݧ🄉 +B; ︒。Ⴃ≯; [P1 V6]; [P1 V6] +B; ︒。Ⴃ>\u0338; [P1 V6]; [P1 V6] +B; 。。Ⴃ≯; [P1 V6 A4_2]; [P1 V6 A4_2] +B; 。。Ⴃ>\u0338; [P1 V6 A4_2]; [P1 V6 A4_2] +B; 。。ⴃ>\u0338; [P1 V6 A4_2]; [P1 V6 A4_2] +B; 。。ⴃ≯; [P1 V6 A4_2]; [P1 V6 A4_2] +B; ..xn--hdh782b; [V6 A4_2]; [V6 A4_2] +B; ..xn--bnd622g; [V6 A4_2]; [V6 A4_2] +B; ︒。ⴃ>\u0338; [P1 V6]; [P1 V6] +B; ︒。ⴃ≯; [P1 V6]; [P1 V6] +B; xn--y86c.xn--hdh782b; [V6]; [V6] +B; xn--y86c.xn--bnd622g; [V6]; [V6] +T; 𐹮。󠢼\u200D; [B1 C2 P1 V6]; [B1 P1 V6] # 𐹮. +N; 𐹮。󠢼\u200D; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 𐹮. +T; 𐹮。󠢼\u200D; [B1 C2 P1 V6]; [B1 P1 V6] # 𐹮. +N; 𐹮。󠢼\u200D; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 𐹮. +B; xn--mo0d.xn--wy46e; [B1 V6]; [B1 V6] +B; xn--mo0d.xn--1ug18431l; [B1 C2 V6]; [B1 C2 V6] # 𐹮. +T; Ⴞ𐹨。︒\u077D\u200DႯ; [B1 B5 B6 C2 P1 V6]; [B1 B5 B6 P1 V6] # Ⴞ𐹨.︒ݽႯ +N; Ⴞ𐹨。︒\u077D\u200DႯ; [B1 B5 B6 C2 P1 V6]; [B1 B5 B6 C2 P1 V6] # Ⴞ𐹨.︒ݽႯ +T; Ⴞ𐹨。。\u077D\u200DႯ; [B2 B3 B5 B6 C2 P1 V6 A4_2]; [B2 B3 B5 B6 P1 V6 A4_2] # Ⴞ𐹨..ݽႯ +N; Ⴞ𐹨。。\u077D\u200DႯ; [B2 B3 B5 B6 C2 P1 V6 A4_2]; [B2 B3 B5 B6 C2 P1 V6 A4_2] # Ⴞ𐹨..ݽႯ +T; ⴞ𐹨。。\u077D\u200Dⴏ; [B2 B3 B5 B6 C2 A4_2]; [B2 B3 B5 B6 A4_2] # ⴞ𐹨..ݽⴏ +N; ⴞ𐹨。。\u077D\u200Dⴏ; [B2 B3 B5 B6 C2 A4_2]; [B2 B3 B5 B6 C2 A4_2] # ⴞ𐹨..ݽⴏ +B; xn--mlju223e..xn--eqb053q; [B2 B3 B5 B6 A4_2]; [B2 B3 B5 B6 A4_2] # ⴞ𐹨..ݽⴏ +B; xn--mlju223e..xn--eqb096jpgj; [B2 B3 B5 B6 C2 A4_2]; [B2 B3 B5 B6 C2 A4_2] # ⴞ𐹨..ݽⴏ +B; xn--2nd0990k..xn--eqb228b; [B2 B3 B5 B6 V6 A4_2]; [B2 B3 B5 B6 V6 A4_2] # Ⴞ𐹨..ݽႯ +B; xn--2nd0990k..xn--eqb228bgzm; [B2 B3 B5 B6 C2 V6 A4_2]; [B2 B3 B5 B6 C2 V6 A4_2] # Ⴞ𐹨..ݽႯ +T; ⴞ𐹨。︒\u077D\u200Dⴏ; [B1 B5 B6 C2 P1 V6]; [B1 B5 B6 P1 V6] # ⴞ𐹨.︒ݽⴏ +N; ⴞ𐹨。︒\u077D\u200Dⴏ; [B1 B5 B6 C2 P1 V6]; [B1 B5 B6 C2 P1 V6] # ⴞ𐹨.︒ݽⴏ +B; xn--mlju223e.xn--eqb053qjk7l; [B1 B5 B6 V6]; [B1 B5 B6 V6] # ⴞ𐹨.︒ݽⴏ +B; xn--mlju223e.xn--eqb096jpgj9y7r; [B1 B5 B6 C2 V6]; [B1 B5 B6 C2 V6] # ⴞ𐹨.︒ݽⴏ +B; xn--2nd0990k.xn--eqb228b583r; [B1 B5 B6 V6]; [B1 B5 B6 V6] # Ⴞ𐹨.︒ݽႯ +B; xn--2nd0990k.xn--eqb228bgzmvp0t; [B1 B5 B6 C2 V6]; [B1 B5 B6 C2 V6] # Ⴞ𐹨.︒ݽႯ +T; \u200CႦ𝟹。-\u20D2-\u07D1; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # Ⴆ3.-⃒-ߑ +N; \u200CႦ𝟹。-\u20D2-\u07D1; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # Ⴆ3.-⃒-ߑ +T; \u200CႦ3。-\u20D2-\u07D1; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # Ⴆ3.-⃒-ߑ +N; \u200CႦ3。-\u20D2-\u07D1; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # Ⴆ3.-⃒-ߑ +T; \u200Cⴆ3。-\u20D2-\u07D1; [B1 C1 V3]; [B1 V3] # ⴆ3.-⃒-ߑ +N; \u200Cⴆ3。-\u20D2-\u07D1; [B1 C1 V3]; [B1 C1 V3] # ⴆ3.-⃒-ߑ +B; xn--3-lvs.xn-----vue617w; [B1 V3]; [B1 V3] # ⴆ3.-⃒-ߑ +B; xn--3-rgnv99c.xn-----vue617w; [B1 C1 V3]; [B1 C1 V3] # ⴆ3.-⃒-ߑ +B; xn--3-i0g.xn-----vue617w; [B1 V3 V6]; [B1 V3 V6] # Ⴆ3.-⃒-ߑ +B; xn--3-i0g939i.xn-----vue617w; [B1 C1 V3 V6]; [B1 C1 V3 V6] # Ⴆ3.-⃒-ߑ +T; \u200Cⴆ𝟹。-\u20D2-\u07D1; [B1 C1 V3]; [B1 V3] # ⴆ3.-⃒-ߑ +N; \u200Cⴆ𝟹。-\u20D2-\u07D1; [B1 C1 V3]; [B1 C1 V3] # ⴆ3.-⃒-ߑ +B; 箃Ⴡ-󠁝。≠-🤖; [P1 V6]; [P1 V6] +B; 箃Ⴡ-󠁝。=\u0338-🤖; [P1 V6]; [P1 V6] +B; 箃Ⴡ-󠁝。≠-🤖; [P1 V6]; [P1 V6] +B; 箃Ⴡ-󠁝。=\u0338-🤖; [P1 V6]; [P1 V6] +B; 箃ⴡ-󠁝。=\u0338-🤖; [P1 V6]; [P1 V6] +B; 箃ⴡ-󠁝。≠-🤖; [P1 V6]; [P1 V6] +B; xn----4wsr321ay823p.xn----tfot873s; [V6]; [V6] +B; xn----11g3013fy8x5m.xn----tfot873s; [V6]; [V6] +B; 箃ⴡ-󠁝。=\u0338-🤖; [P1 V6]; [P1 V6] +B; 箃ⴡ-󠁝。≠-🤖; [P1 V6]; [P1 V6] +B; \u07E5.\u06B5; ; xn--dtb.xn--okb # ߥ.ڵ +B; xn--dtb.xn--okb; \u07E5.\u06B5; xn--dtb.xn--okb # ߥ.ڵ +T; \u200C\u200D.𞤿; [B1 C1 C2]; [A4_2] # .𞤿 +N; \u200C\u200D.𞤿; [B1 C1 C2]; [B1 C1 C2] # .𞤿 +T; \u200C\u200D.𞤝; [B1 C1 C2]; [A4_2] # .𞤿 +N; \u200C\u200D.𞤝; [B1 C1 C2]; [B1 C1 C2] # .𞤿 +B; .xn--3e6h; [A4_2]; [A4_2] +B; xn--0ugc.xn--3e6h; [B1 C1 C2]; [B1 C1 C2] # .𞤿 +B; xn--3e6h; 𞤿; xn--3e6h +B; 𞤿; ; xn--3e6h +B; 𞤝; 𞤿; xn--3e6h +T; 🜑𐹧\u0639.ς𑍍蜹; [B1]; [B1] # 🜑𐹧ع.ς𑍍蜹 +N; 🜑𐹧\u0639.ς𑍍蜹; [B1]; [B1] # 🜑𐹧ع.ς𑍍蜹 +B; 🜑𐹧\u0639.Σ𑍍蜹; [B1]; [B1] # 🜑𐹧ع.σ𑍍蜹 +B; 🜑𐹧\u0639.σ𑍍蜹; [B1]; [B1] # 🜑𐹧ع.σ𑍍蜹 +B; xn--4gb3736kk4zf.xn--4xa2248dy27d; [B1]; [B1] # 🜑𐹧ع.σ𑍍蜹 +B; xn--4gb3736kk4zf.xn--3xa4248dy27d; [B1]; [B1] # 🜑𐹧ع.ς𑍍蜹 +B; 򫠐ス􆟤\u0669.󚃟; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ス٩. +B; 򫠐ス􆟤\u0669.󚃟; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ス٩. +B; xn--iib777sp230oo708a.xn--7824e; [B5 B6 V6]; [B5 B6 V6] # ス٩. +B; 𝪣򕡝.\u059A\uD850\u06C2; [B1 P1 V5 V6]; [B1 P1 V5 V6 A3] # 𝪣.֚ۂ +B; 𝪣򕡝.\u059A\uD850\u06C1\u0654; [B1 P1 V5 V6]; [B1 P1 V5 V6 A3] # 𝪣.֚ۂ +B; 𝪣򕡝.\u059A\uD850\u06C2; [B1 P1 V5 V6]; [B1 P1 V5 V6 A3] # 𝪣.֚ۂ +B; 𝪣򕡝.\u059A\uD850\u06C1\u0654; [B1 P1 V5 V6]; [B1 P1 V5 V6 A3] # 𝪣.֚ۂ +B; xn--8c3hu7971a.\u059A\uD850\u06C2; [B1 P1 V5 V6]; [B1 P1 V5 V6 A3] # 𝪣.֚ۂ +B; xn--8c3hu7971a.\u059A\uD850\u06C1\u0654; [B1 P1 V5 V6]; [B1 P1 V5 V6 A3] # 𝪣.֚ۂ +B; XN--8C3HU7971A.\u059A\uD850\u06C1\u0654; [B1 P1 V5 V6]; [B1 P1 V5 V6 A3] # 𝪣.֚ۂ +B; XN--8C3HU7971A.\u059A\uD850\u06C2; [B1 P1 V5 V6]; [B1 P1 V5 V6 A3] # 𝪣.֚ۂ +B; Xn--8C3hu7971a.\u059A\uD850\u06C2; [B1 P1 V5 V6]; [B1 P1 V5 V6 A3] # 𝪣.֚ۂ +B; Xn--8C3hu7971a.\u059A\uD850\u06C1\u0654; [B1 P1 V5 V6]; [B1 P1 V5 V6 A3] # 𝪣.֚ۂ +T; \u0660򪓵\u200C。\u0757; [B1 C1 P1 V6]; [B1 P1 V6] # ٠.ݗ +N; \u0660򪓵\u200C。\u0757; [B1 C1 P1 V6]; [B1 C1 P1 V6] # ٠.ݗ +B; xn--8hb82030l.xn--bpb; [B1 V6]; [B1 V6] # ٠.ݗ +B; xn--8hb852ke991q.xn--bpb; [B1 C1 V6]; [B1 C1 V6] # ٠.ݗ +T; \u103A\u200D\u200C。-\u200C; [C1 V3 V5]; [V3 V5] # ်.- +N; \u103A\u200D\u200C。-\u200C; [C1 V3 V5]; [C1 V3 V5] # ်.- +B; xn--bkd.-; [V3 V5]; [V3 V5] # ်.- +B; xn--bkd412fca.xn----sgn; [C1 V3 V5]; [C1 V3 V5] # ်.- +B; ︒。\u1B44ᡉ; [P1 V5 V6]; [P1 V5 V6] # ︒.᭄ᡉ +B; 。。\u1B44ᡉ; [V5 A4_2]; [V5 A4_2] # ..᭄ᡉ +B; ..xn--87e93m; [V5 A4_2]; [V5 A4_2] # ..᭄ᡉ +B; xn--y86c.xn--87e93m; [V5 V6]; [V5 V6] # ︒.᭄ᡉ +T; \u0758ß。ጫᢊ\u0768𝟐; [B2 B3 B5]; [B2 B3 B5] # ݘß.ጫᢊݨ2 +N; \u0758ß。ጫᢊ\u0768𝟐; [B2 B3 B5]; [B2 B3 B5] # ݘß.ጫᢊݨ2 +T; \u0758ß。ጫᢊ\u07682; [B2 B3 B5]; [B2 B3 B5] # ݘß.ጫᢊݨ2 +N; \u0758ß。ጫᢊ\u07682; [B2 B3 B5]; [B2 B3 B5] # ݘß.ጫᢊݨ2 +B; \u0758SS。ጫᢊ\u07682; [B2 B3 B5]; [B2 B3 B5] # ݘss.ጫᢊݨ2 +B; \u0758ss。ጫᢊ\u07682; [B2 B3 B5]; [B2 B3 B5] # ݘss.ጫᢊݨ2 +B; \u0758Ss。ጫᢊ\u07682; [B2 B3 B5]; [B2 B3 B5] # ݘss.ጫᢊݨ2 +B; xn--ss-gke.xn--2-b5c641gfmf; [B2 B3 B5]; [B2 B3 B5] # ݘss.ጫᢊݨ2 +B; xn--zca724a.xn--2-b5c641gfmf; [B2 B3 B5]; [B2 B3 B5] # ݘß.ጫᢊݨ2 +B; \u0758SS。ጫᢊ\u0768𝟐; [B2 B3 B5]; [B2 B3 B5] # ݘss.ጫᢊݨ2 +B; \u0758ss。ጫᢊ\u0768𝟐; [B2 B3 B5]; [B2 B3 B5] # ݘss.ጫᢊݨ2 +B; \u0758Ss。ጫᢊ\u0768𝟐; [B2 B3 B5]; [B2 B3 B5] # ݘss.ጫᢊݨ2 +B; \u07C3𞶇ᚲ.\u0902\u0353𝟚\u09CD; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # ߃ᚲ.ं͓2্ +B; \u07C3𞶇ᚲ.\u0902\u03532\u09CD; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # ߃ᚲ.ं͓2্ +B; xn--esb067enh07a.xn--2-lgb874bjxa; [B1 B2 B3 V5 V6]; [B1 B2 B3 V5 V6] # ߃ᚲ.ं͓2্ +T; -\u1BAB︒\u200D.񒶈񥹓; [C2 P1 V3 V6]; [P1 V3 V6] # -᮫︒. +N; -\u1BAB︒\u200D.񒶈񥹓; [C2 P1 V3 V6]; [C2 P1 V3 V6] # -᮫︒. +T; -\u1BAB。\u200D.񒶈񥹓; [C2 P1 V3 V6]; [P1 V3 V6 A4_2] # -᮫.. +N; -\u1BAB。\u200D.񒶈񥹓; [C2 P1 V3 V6]; [C2 P1 V3 V6] # -᮫.. +B; xn----qml..xn--x50zy803a; [V3 V6 A4_2]; [V3 V6 A4_2] # -᮫.. +B; xn----qml.xn--1ug.xn--x50zy803a; [C2 V3 V6]; [C2 V3 V6] # -᮫.. +B; xn----qml1407i.xn--x50zy803a; [V3 V6]; [V3 V6] # -᮫︒. +B; xn----qmlv7tw180a.xn--x50zy803a; [C2 V3 V6]; [C2 V3 V6] # -᮫︒. +B; 󠦮.≯𞀆; [P1 V6]; [P1 V6] +B; 󠦮.>\u0338𞀆; [P1 V6]; [P1 V6] +B; xn--t546e.xn--hdh5166o; [V6]; [V6] +B; -𑄳󠊗𐹩。𞮱; [B1 P1 V3 V6]; [B1 P1 V3 V6] +B; xn----p26i72em2894c.xn--zw6h; [B1 V3 V6]; [B1 V3 V6] +B; \u06B9.ᡳ\u115F; [P1 V6]; [P1 V6] # ڹ.ᡳ +B; \u06B9.ᡳ\u115F; [P1 V6]; [P1 V6] # ڹ.ᡳ +B; xn--skb.xn--osd737a; [V6]; [V6] # ڹ.ᡳ +B; 㨛𘱎.︒𝟕\u0D01; [P1 V6]; [P1 V6] # 㨛.︒7ഁ +B; 㨛𘱎.。7\u0D01; [P1 V6 A4_2]; [P1 V6 A4_2] # 㨛..7ഁ +B; xn--mbm8237g..xn--7-7hf; [V6 A4_2]; [V6 A4_2] # 㨛..7ഁ +B; xn--mbm8237g.xn--7-7hf1526p; [V6]; [V6] # 㨛.︒7ഁ +B; \u06DD𻱧-。𞷁\u2064𞤣≮; [B1 B3 P1 V3 V6]; [B1 B3 P1 V3 V6] # -.𞤣≮ +B; \u06DD𻱧-。𞷁\u2064𞤣<\u0338; [B1 B3 P1 V3 V6]; [B1 B3 P1 V3 V6] # -.𞤣≮ +B; \u06DD𻱧-。𞷁\u2064𞤣≮; [B1 B3 P1 V3 V6]; [B1 B3 P1 V3 V6] # -.𞤣≮ +B; \u06DD𻱧-。𞷁\u2064𞤣<\u0338; [B1 B3 P1 V3 V6]; [B1 B3 P1 V3 V6] # -.𞤣≮ +B; \u06DD𻱧-。𞷁\u2064𞤁<\u0338; [B1 B3 P1 V3 V6]; [B1 B3 P1 V3 V6] # -.𞤣≮ +B; \u06DD𻱧-。𞷁\u2064𞤁≮; [B1 B3 P1 V3 V6]; [B1 B3 P1 V3 V6] # -.𞤣≮ +B; xn----dxc06304e.xn--gdh5020pk5c; [B1 B3 V3 V6]; [B1 B3 V3 V6] # -.𞤣≮ +B; \u06DD𻱧-。𞷁\u2064𞤁<\u0338; [B1 B3 P1 V3 V6]; [B1 B3 P1 V3 V6] # -.𞤣≮ +B; \u06DD𻱧-。𞷁\u2064𞤁≮; [B1 B3 P1 V3 V6]; [B1 B3 P1 V3 V6] # -.𞤣≮ +T; ß\u200C\uAAF6ᢥ.⊶ჁႶ; [C1 P1 V6]; [P1 V6] # ß꫶ᢥ.⊶ჁႶ +N; ß\u200C\uAAF6ᢥ.⊶ჁႶ; [C1 P1 V6]; [C1 P1 V6] # ß꫶ᢥ.⊶ჁႶ +T; ß\u200C\uAAF6ᢥ.⊶ჁႶ; [C1 P1 V6]; [P1 V6] # ß꫶ᢥ.⊶ჁႶ +N; ß\u200C\uAAF6ᢥ.⊶ჁႶ; [C1 P1 V6]; [C1 P1 V6] # ß꫶ᢥ.⊶ჁႶ +T; ß\u200C\uAAF6ᢥ.⊶ⴡⴖ; [C1]; xn--ss-4epx629f.xn--ifh802b6a # ß꫶ᢥ.⊶ⴡⴖ +N; ß\u200C\uAAF6ᢥ.⊶ⴡⴖ; [C1]; [C1] # ß꫶ᢥ.⊶ⴡⴖ +T; SS\u200C\uAAF6ᢥ.⊶ჁႶ; [C1 P1 V6]; [P1 V6] # ss꫶ᢥ.⊶ჁႶ +N; SS\u200C\uAAF6ᢥ.⊶ჁႶ; [C1 P1 V6]; [C1 P1 V6] # ss꫶ᢥ.⊶ჁႶ +T; ss\u200C\uAAF6ᢥ.⊶ⴡⴖ; [C1]; xn--ss-4epx629f.xn--ifh802b6a # ss꫶ᢥ.⊶ⴡⴖ +N; ss\u200C\uAAF6ᢥ.⊶ⴡⴖ; [C1]; [C1] # ss꫶ᢥ.⊶ⴡⴖ +T; Ss\u200C\uAAF6ᢥ.⊶Ⴡⴖ; [C1 P1 V6]; [P1 V6] # ss꫶ᢥ.⊶Ⴡⴖ +N; Ss\u200C\uAAF6ᢥ.⊶Ⴡⴖ; [C1 P1 V6]; [C1 P1 V6] # ss꫶ᢥ.⊶Ⴡⴖ +B; xn--ss-4epx629f.xn--5nd703gyrh; [V6]; [V6] # ss꫶ᢥ.⊶Ⴡⴖ +B; xn--ss-4ep585bkm5p.xn--5nd703gyrh; [C1 V6]; [C1 V6] # ss꫶ᢥ.⊶Ⴡⴖ +B; xn--ss-4epx629f.xn--ifh802b6a; ss\uAAF6ᢥ.⊶ⴡⴖ; xn--ss-4epx629f.xn--ifh802b6a; NV8 # ss꫶ᢥ.⊶ⴡⴖ +B; ss\uAAF6ᢥ.⊶ⴡⴖ; ; xn--ss-4epx629f.xn--ifh802b6a; NV8 # ss꫶ᢥ.⊶ⴡⴖ +B; SS\uAAF6ᢥ.⊶ჁႶ; [P1 V6]; [P1 V6] # ss꫶ᢥ.⊶ჁႶ +B; Ss\uAAF6ᢥ.⊶Ⴡⴖ; [P1 V6]; [P1 V6] # ss꫶ᢥ.⊶Ⴡⴖ +B; xn--ss-4epx629f.xn--undv409k; [V6]; [V6] # ss꫶ᢥ.⊶ჁႶ +B; xn--ss-4ep585bkm5p.xn--ifh802b6a; [C1]; [C1] # ss꫶ᢥ.⊶ⴡⴖ +B; xn--ss-4ep585bkm5p.xn--undv409k; [C1 V6]; [C1 V6] # ss꫶ᢥ.⊶ჁႶ +B; xn--zca682johfi89m.xn--ifh802b6a; [C1]; [C1] # ß꫶ᢥ.⊶ⴡⴖ +B; xn--zca682johfi89m.xn--undv409k; [C1 V6]; [C1 V6] # ß꫶ᢥ.⊶ჁႶ +T; ß\u200C\uAAF6ᢥ.⊶ⴡⴖ; [C1]; xn--ss-4epx629f.xn--ifh802b6a # ß꫶ᢥ.⊶ⴡⴖ +N; ß\u200C\uAAF6ᢥ.⊶ⴡⴖ; [C1]; [C1] # ß꫶ᢥ.⊶ⴡⴖ +T; SS\u200C\uAAF6ᢥ.⊶ჁႶ; [C1 P1 V6]; [P1 V6] # ss꫶ᢥ.⊶ჁႶ +N; SS\u200C\uAAF6ᢥ.⊶ჁႶ; [C1 P1 V6]; [C1 P1 V6] # ss꫶ᢥ.⊶ჁႶ +T; ss\u200C\uAAF6ᢥ.⊶ⴡⴖ; [C1]; xn--ss-4epx629f.xn--ifh802b6a # ss꫶ᢥ.⊶ⴡⴖ +N; ss\u200C\uAAF6ᢥ.⊶ⴡⴖ; [C1]; [C1] # ss꫶ᢥ.⊶ⴡⴖ +T; Ss\u200C\uAAF6ᢥ.⊶Ⴡⴖ; [C1 P1 V6]; [P1 V6] # ss꫶ᢥ.⊶Ⴡⴖ +N; Ss\u200C\uAAF6ᢥ.⊶Ⴡⴖ; [C1 P1 V6]; [C1 P1 V6] # ss꫶ᢥ.⊶Ⴡⴖ +T; \u200D。ς󠁉; [C2 P1 V6]; [P1 V6 A4_2] # .ς +N; \u200D。ς󠁉; [C2 P1 V6]; [C2 P1 V6] # .ς +T; \u200D。Σ󠁉; [C2 P1 V6]; [P1 V6 A4_2] # .σ +N; \u200D。Σ󠁉; [C2 P1 V6]; [C2 P1 V6] # .σ +T; \u200D。σ󠁉; [C2 P1 V6]; [P1 V6 A4_2] # .σ +N; \u200D。σ󠁉; [C2 P1 V6]; [C2 P1 V6] # .σ +B; .xn--4xa24344p; [V6 A4_2]; [V6 A4_2] +B; xn--1ug.xn--4xa24344p; [C2 V6]; [C2 V6] # .σ +B; xn--1ug.xn--3xa44344p; [C2 V6]; [C2 V6] # .ς +T; 𞵑ß.\u0751\u200D𞤛-; [B2 B3 C2 P1 V3 V6]; [B2 B3 P1 V3 V6] # ß.ݑ𞤽- +N; 𞵑ß.\u0751\u200D𞤛-; [B2 B3 C2 P1 V3 V6]; [B2 B3 C2 P1 V3 V6] # ß.ݑ𞤽- +T; 𞵑ß.\u0751\u200D𞤽-; [B2 B3 C2 P1 V3 V6]; [B2 B3 P1 V3 V6] # ß.ݑ𞤽- +N; 𞵑ß.\u0751\u200D𞤽-; [B2 B3 C2 P1 V3 V6]; [B2 B3 C2 P1 V3 V6] # ß.ݑ𞤽- +T; 𞵑SS.\u0751\u200D𞤛-; [B2 B3 C2 P1 V3 V6]; [B2 B3 P1 V3 V6] # ss.ݑ𞤽- +N; 𞵑SS.\u0751\u200D𞤛-; [B2 B3 C2 P1 V3 V6]; [B2 B3 C2 P1 V3 V6] # ss.ݑ𞤽- +T; 𞵑ss.\u0751\u200D𞤽-; [B2 B3 C2 P1 V3 V6]; [B2 B3 P1 V3 V6] # ss.ݑ𞤽- +N; 𞵑ss.\u0751\u200D𞤽-; [B2 B3 C2 P1 V3 V6]; [B2 B3 C2 P1 V3 V6] # ss.ݑ𞤽- +T; 𞵑Ss.\u0751\u200D𞤽-; [B2 B3 C2 P1 V3 V6]; [B2 B3 P1 V3 V6] # ss.ݑ𞤽- +N; 𞵑Ss.\u0751\u200D𞤽-; [B2 B3 C2 P1 V3 V6]; [B2 B3 C2 P1 V3 V6] # ss.ݑ𞤽- +B; xn--ss-2722a.xn----z3c03218a; [B2 B3 V3 V6]; [B2 B3 V3 V6] # ss.ݑ𞤽- +B; xn--ss-2722a.xn----z3c011q9513b; [B2 B3 C2 V3 V6]; [B2 B3 C2 V3 V6] # ss.ݑ𞤽- +B; xn--zca5423w.xn----z3c011q9513b; [B2 B3 C2 V3 V6]; [B2 B3 C2 V3 V6] # ß.ݑ𞤽- +T; 𞵑ss.\u0751\u200D𞤛-; [B2 B3 C2 P1 V3 V6]; [B2 B3 P1 V3 V6] # ss.ݑ𞤽- +N; 𞵑ss.\u0751\u200D𞤛-; [B2 B3 C2 P1 V3 V6]; [B2 B3 C2 P1 V3 V6] # ss.ݑ𞤽- +T; 𞵑Ss.\u0751\u200D𞤛-; [B2 B3 C2 P1 V3 V6]; [B2 B3 P1 V3 V6] # ss.ݑ𞤽- +N; 𞵑Ss.\u0751\u200D𞤛-; [B2 B3 C2 P1 V3 V6]; [B2 B3 C2 P1 V3 V6] # ss.ݑ𞤽- +T; 𑘽\u200D𞤧.𐹧󡦪-; [B1 C2 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # 𑘽𞤧.𐹧- +N; 𑘽\u200D𞤧.𐹧󡦪-; [B1 C2 P1 V3 V5 V6]; [B1 C2 P1 V3 V5 V6] # 𑘽𞤧.𐹧- +T; 𑘽\u200D𞤧.𐹧󡦪-; [B1 C2 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # 𑘽𞤧.𐹧- +N; 𑘽\u200D𞤧.𐹧󡦪-; [B1 C2 P1 V3 V5 V6]; [B1 C2 P1 V3 V5 V6] # 𑘽𞤧.𐹧- +T; 𑘽\u200D𞤅.𐹧󡦪-; [B1 C2 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # 𑘽𞤧.𐹧- +N; 𑘽\u200D𞤅.𐹧󡦪-; [B1 C2 P1 V3 V5 V6]; [B1 C2 P1 V3 V5 V6] # 𑘽𞤧.𐹧- +B; xn--qb2ds317a.xn----k26iq1483f; [B1 V3 V5 V6]; [B1 V3 V5 V6] +B; xn--1ugz808gdimf.xn----k26iq1483f; [B1 C2 V3 V5 V6]; [B1 C2 V3 V5 V6] # 𑘽𞤧.𐹧- +T; 𑘽\u200D𞤅.𐹧󡦪-; [B1 C2 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # 𑘽𞤧.𐹧- +N; 𑘽\u200D𞤅.𐹧󡦪-; [B1 C2 P1 V3 V5 V6]; [B1 C2 P1 V3 V5 V6] # 𑘽𞤧.𐹧- +B; ⒒򨘙򳳠𑓀.-󞡊; [P1 V3 V6]; [P1 V3 V6] +B; 11.򨘙򳳠𑓀.-󞡊; [P1 V3 V6]; [P1 V3 V6] +B; 11.xn--uz1d59632bxujd.xn----x310m; [V3 V6]; [V3 V6] +B; xn--3shy698frsu9dt1me.xn----x310m; [V3 V6]; [V3 V6] +T; -。\u200D; [C2 V3]; [V3] # -. +N; -。\u200D; [C2 V3]; [C2 V3] # -. +T; -。\u200D; [C2 V3]; [V3] # -. +N; -。\u200D; [C2 V3]; [C2 V3] # -. +B; -.; [V3]; [V3] +B; -.xn--1ug; [C2 V3]; [C2 V3] # -. +T; ≮ᡬ.ς¹-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.ς1- +N; ≮ᡬ.ς¹-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.ς1- +T; <\u0338ᡬ.ς¹-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.ς1- +N; <\u0338ᡬ.ς¹-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.ς1- +T; ≮ᡬ.ς1-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.ς1- +N; ≮ᡬ.ς1-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.ς1- +T; <\u0338ᡬ.ς1-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.ς1- +N; <\u0338ᡬ.ς1-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.ς1- +B; <\u0338ᡬ.Σ1-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.σ1- +B; ≮ᡬ.Σ1-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.σ1- +B; ≮ᡬ.σ1-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.σ1- +B; <\u0338ᡬ.σ1-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.σ1- +B; xn--88e732c.σ1-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.σ1- +B; XN--88E732C.Σ1-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.σ1- +T; xn--88e732c.ς1-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.ς1- +N; xn--88e732c.ς1-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.ς1- +T; Xn--88E732c.ς1-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.ς1- +N; Xn--88E732c.ς1-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.ς1- +B; Xn--88E732c.σ1-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.σ1- +B; <\u0338ᡬ.Σ¹-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.σ1- +B; ≮ᡬ.Σ¹-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.σ1- +B; ≮ᡬ.σ¹-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.σ1- +B; <\u0338ᡬ.σ¹-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.σ1- +B; ቬ򔠼񁗶。𐨬𝟠; [P1 V6]; [P1 V6] +B; ቬ򔠼񁗶。𐨬8; [P1 V6]; [P1 V6] +B; xn--d0d41273c887z.xn--8-ob5i; [V6]; [V6] +B; 𐱲。蔫\u0766; [B5 B6 P1 V6]; [B5 B6 P1 V6] # .蔫ݦ +B; xn--389c.xn--qpb7055d; [B5 B6 V6]; [B5 B6 V6] # .蔫ݦ +B; 򒲧₃。ꡚ𛇑󠄳\u0647; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 3.ꡚ𛇑ه +B; 򒲧3。ꡚ𛇑󠄳\u0647; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 3.ꡚ𛇑ه +B; xn--3-ep59g.xn--jhb5904fcp0h; [B5 B6 V6]; [B5 B6 V6] # 3.ꡚ𛇑ه +T; 蓸\u0642≠.ß; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 蓸ق≠.ß +N; 蓸\u0642≠.ß; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 蓸ق≠.ß +T; 蓸\u0642=\u0338.ß; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 蓸ق≠.ß +N; 蓸\u0642=\u0338.ß; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 蓸ق≠.ß +B; 蓸\u0642=\u0338.SS; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 蓸ق≠.ss +B; 蓸\u0642≠.SS; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 蓸ق≠.ss +B; 蓸\u0642≠.ss; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 蓸ق≠.ss +B; 蓸\u0642=\u0338.ss; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 蓸ق≠.ss +B; 蓸\u0642=\u0338.Ss; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 蓸ق≠.ss +B; 蓸\u0642≠.Ss; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 蓸ق≠.ss +B; xn--ehb015lnt1e.ss; [B5 B6 V6]; [B5 B6 V6] # 蓸ق≠.ss +B; xn--ehb015lnt1e.xn--zca; [B5 B6 V6]; [B5 B6 V6] # 蓸ق≠.ß +T; \u084E\u067A\u0DD3⒊.𐹹𞱩󠃪\u200C; [B1 C1 P1 V6]; [B1 P1 V6] # ࡎٺී⒊.𐹹 +N; \u084E\u067A\u0DD3⒊.𐹹𞱩󠃪\u200C; [B1 C1 P1 V6]; [B1 C1 P1 V6] # ࡎٺී⒊.𐹹 +T; \u084E\u067A\u0DD33..𐹹𞱩󠃪\u200C; [B1 C1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # ࡎٺී3..𐹹 +N; \u084E\u067A\u0DD33..𐹹𞱩󠃪\u200C; [B1 C1 P1 V6 A4_2]; [B1 C1 P1 V6 A4_2] # ࡎٺී3..𐹹 +B; xn--3-prc71ls9j..xn--xo0dw109an237f; [B1 V6 A4_2]; [B1 V6 A4_2] # ࡎٺී3..𐹹 +B; xn--3-prc71ls9j..xn--0ug3205g7eyf3c96h; [B1 C1 V6 A4_2]; [B1 C1 V6 A4_2] # ࡎٺී3..𐹹 +B; xn--zib94gfziuq1a.xn--xo0dw109an237f; [B1 V6]; [B1 V6] # ࡎٺී⒊.𐹹 +B; xn--zib94gfziuq1a.xn--0ug3205g7eyf3c96h; [B1 C1 V6]; [B1 C1 V6] # ࡎٺී⒊.𐹹 +T; ς\u200D-.Ⴣ𦟙; [C2 P1 V3 V6]; [P1 V3 V6] # ς-.Ⴣ𦟙 +N; ς\u200D-.Ⴣ𦟙; [C2 P1 V3 V6]; [C2 P1 V3 V6] # ς-.Ⴣ𦟙 +T; ς\u200D-.ⴣ𦟙; [C2 V3]; [V3] # ς-.ⴣ𦟙 +N; ς\u200D-.ⴣ𦟙; [C2 V3]; [C2 V3] # ς-.ⴣ𦟙 +T; Σ\u200D-.Ⴣ𦟙; [C2 P1 V3 V6]; [P1 V3 V6] # σ-.Ⴣ𦟙 +N; Σ\u200D-.Ⴣ𦟙; [C2 P1 V3 V6]; [C2 P1 V3 V6] # σ-.Ⴣ𦟙 +T; σ\u200D-.ⴣ𦟙; [C2 V3]; [V3] # σ-.ⴣ𦟙 +N; σ\u200D-.ⴣ𦟙; [C2 V3]; [C2 V3] # σ-.ⴣ𦟙 +B; xn----zmb.xn--rlj2573p; [V3]; [V3] +B; xn----zmb048s.xn--rlj2573p; [C2 V3]; [C2 V3] # σ-.ⴣ𦟙 +B; xn----zmb.xn--7nd64871a; [V3 V6]; [V3 V6] +B; xn----zmb048s.xn--7nd64871a; [C2 V3 V6]; [C2 V3 V6] # σ-.Ⴣ𦟙 +B; xn----xmb348s.xn--rlj2573p; [C2 V3]; [C2 V3] # ς-.ⴣ𦟙 +B; xn----xmb348s.xn--7nd64871a; [C2 V3 V6]; [C2 V3 V6] # ς-.Ⴣ𦟙 +B; ≠。🞳𝟲; [P1 V6]; [P1 V6] +B; =\u0338。🞳𝟲; [P1 V6]; [P1 V6] +B; ≠。🞳6; [P1 V6]; [P1 V6] +B; =\u0338。🞳6; [P1 V6]; [P1 V6] +B; xn--1ch.xn--6-dl4s; [V6]; [V6] +B; 󅬽.蠔; [P1 V6]; [P1 V6] +B; xn--g747d.xn--xl2a; [V6]; [V6] +T; \u08E6\u200D.뼽; [C2 V5]; [V5] # ࣦ.뼽 +N; \u08E6\u200D.뼽; [C2 V5]; [C2 V5] # ࣦ.뼽 +T; \u08E6\u200D.뼽; [C2 V5]; [V5] # ࣦ.뼽 +N; \u08E6\u200D.뼽; [C2 V5]; [C2 V5] # ࣦ.뼽 +T; \u08E6\u200D.뼽; [C2 V5]; [V5] # ࣦ.뼽 +N; \u08E6\u200D.뼽; [C2 V5]; [C2 V5] # ࣦ.뼽 +T; \u08E6\u200D.뼽; [C2 V5]; [V5] # ࣦ.뼽 +N; \u08E6\u200D.뼽; [C2 V5]; [C2 V5] # ࣦ.뼽 +B; xn--p0b.xn--e43b; [V5]; [V5] # ࣦ.뼽 +B; xn--p0b869i.xn--e43b; [C2 V5]; [C2 V5] # ࣦ.뼽 +B; ₇\u0BCD􃂷\u06D2。👖\u0675-𞪑; [B1 P1 V6]; [B1 P1 V6] # 7்ے.👖اٴ- +B; 7\u0BCD􃂷\u06D2。👖\u0627\u0674-𞪑; [B1 P1 V6]; [B1 P1 V6] # 7்ے.👖اٴ- +B; xn--7-rwc839aj3073c.xn----ymc5uv818oghka; [B1 V6]; [B1 V6] # 7்ے.👖اٴ- +B; -。\u077B; [B1 V3]; [B1 V3] # -.ݻ +B; -。\u077B; [B1 V3]; [B1 V3] # -.ݻ +B; -.xn--cqb; [B1 V3]; [B1 V3] # -.ݻ +B; 𑇌𵛓。-⒈ꡏ\u072B; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # 𑇌.-⒈ꡏܫ +B; 𑇌𵛓。-1.ꡏ\u072B; [B1 B5 B6 P1 V3 V5 V6]; [B1 B5 B6 P1 V3 V5 V6] # 𑇌.-1.ꡏܫ +B; xn--8d1dg030h.-1.xn--1nb7163f; [B1 B5 B6 V3 V5 V6]; [B1 B5 B6 V3 V5 V6] # 𑇌.-1.ꡏܫ +B; xn--8d1dg030h.xn----u1c466tp10j; [B1 V3 V5 V6]; [B1 V3 V5 V6] # 𑇌.-⒈ꡏܫ +B; 璛\u1734\u06AF.-; [B1 B5 B6 V3]; [B1 B5 B6 V3] # 璛᜴گ.- +B; xn--ikb175frt4e.-; [B1 B5 B6 V3]; [B1 B5 B6 V3] # 璛᜴گ.- +B; 󠆰\u08A1\u0A4D샕.𐹲휁; [B1 B2 B3]; [B1 B2 B3] # ࢡ੍샕.𐹲휁 +B; 󠆰\u08A1\u0A4D샕.𐹲휁; [B1 B2 B3]; [B1 B2 B3] # ࢡ੍샕.𐹲휁 +B; 󠆰\u08A1\u0A4D샕.𐹲휁; [B1 B2 B3]; [B1 B2 B3] # ࢡ੍샕.𐹲휁 +B; 󠆰\u08A1\u0A4D샕.𐹲휁; [B1 B2 B3]; [B1 B2 B3] # ࢡ੍샕.𐹲휁 +B; xn--qyb07fj857a.xn--728bv72h; [B1 B2 B3]; [B1 B2 B3] # ࢡ੍샕.𐹲휁 +B; 񍨽.񋸕; [P1 V6]; [P1 V6] +B; 񍨽.񋸕; [P1 V6]; [P1 V6] +B; xn--pr3x.xn--rv7w; [V6]; [V6] +B; \u067D𞥕。𑑂𞤶Ⴍ-; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ٽ𞥕.𑑂𞤶Ⴍ- +B; \u067D𞥕。𑑂𞤶Ⴍ-; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ٽ𞥕.𑑂𞤶Ⴍ- +B; \u067D𞥕。𑑂𞤶ⴍ-; [B1 V3 V5]; [B1 V3 V5] # ٽ𞥕.𑑂𞤶ⴍ- +B; \u067D𞥕。𑑂𞤔Ⴍ-; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ٽ𞥕.𑑂𞤶Ⴍ- +B; \u067D𞥕。𑑂𞤔ⴍ-; [B1 V3 V5]; [B1 V3 V5] # ٽ𞥕.𑑂𞤶ⴍ- +B; xn--2ib0338v.xn----zvs0199fo91g; [B1 V3 V5]; [B1 V3 V5] # ٽ𞥕.𑑂𞤶ⴍ- +B; xn--2ib0338v.xn----w0g2740ro9vg; [B1 V3 V5 V6]; [B1 V3 V5 V6] # ٽ𞥕.𑑂𞤶Ⴍ- +B; \u067D𞥕。𑑂𞤶ⴍ-; [B1 V3 V5]; [B1 V3 V5] # ٽ𞥕.𑑂𞤶ⴍ- +B; \u067D𞥕。𑑂𞤔Ⴍ-; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ٽ𞥕.𑑂𞤶Ⴍ- +B; \u067D𞥕。𑑂𞤔ⴍ-; [B1 V3 V5]; [B1 V3 V5] # ٽ𞥕.𑑂𞤶ⴍ- +B; 𐯀𐸉𞧏。񢚧₄Ⴋ񂹫; [P1 V6]; [P1 V6] +B; 𐯀𐸉𞧏。񢚧4Ⴋ񂹫; [P1 V6]; [P1 V6] +B; 𐯀𐸉𞧏。񢚧4ⴋ񂹫; [P1 V6]; [P1 V6] +B; xn--039c42bq865a.xn--4-wvs27840bnrzm; [V6]; [V6] +B; xn--039c42bq865a.xn--4-t0g49302fnrzm; [V6]; [V6] +B; 𐯀𐸉𞧏。񢚧₄ⴋ񂹫; [P1 V6]; [P1 V6] +B; 4\u06BD︒󠑥.≠; [B1 P1 V6]; [B1 P1 V6] # 4ڽ︒.≠ +B; 4\u06BD︒󠑥.=\u0338; [B1 P1 V6]; [B1 P1 V6] # 4ڽ︒.≠ +B; 4\u06BD。󠑥.≠; [B1 P1 V6]; [B1 P1 V6] # 4ڽ..≠ +B; 4\u06BD。󠑥.=\u0338; [B1 P1 V6]; [B1 P1 V6] # 4ڽ..≠ +B; xn--4-kvc.xn--5136e.xn--1ch; [B1 V6]; [B1 V6] # 4ڽ..≠ +B; xn--4-kvc5601q2h50i.xn--1ch; [B1 V6]; [B1 V6] # 4ڽ︒.≠ +B; 𝟓。\u06D7; [V5]; [V5] # 5.ۗ +B; 5。\u06D7; [V5]; [V5] # 5.ۗ +B; 5.xn--nlb; [V5]; [V5] # 5.ۗ +T; \u200C򺸩.⾕; [C1 P1 V6]; [P1 V6] # .谷 +N; \u200C򺸩.⾕; [C1 P1 V6]; [C1 P1 V6] # .谷 +T; \u200C򺸩.谷; [C1 P1 V6]; [P1 V6] # .谷 +N; \u200C򺸩.谷; [C1 P1 V6]; [C1 P1 V6] # .谷 +B; xn--i183d.xn--6g3a; [V6]; [V6] +B; xn--0ug26167i.xn--6g3a; [C1 V6]; [C1 V6] # .谷 +T; ︒󎰇\u200D.-\u073C\u200C; [C1 C2 P1 V3 V6]; [P1 V3 V6] # ︒.-ܼ +N; ︒󎰇\u200D.-\u073C\u200C; [C1 C2 P1 V3 V6]; [C1 C2 P1 V3 V6] # ︒.-ܼ +T; 。󎰇\u200D.-\u073C\u200C; [C1 C2 P1 V3 V6 A4_2]; [P1 V3 V6 A4_2] # ..-ܼ +N; 。󎰇\u200D.-\u073C\u200C; [C1 C2 P1 V3 V6 A4_2]; [C1 C2 P1 V3 V6 A4_2] # ..-ܼ +B; .xn--hh50e.xn----t2c; [V3 V6 A4_2]; [V3 V6 A4_2] # ..-ܼ +B; .xn--1ug05310k.xn----t2c071q; [C1 C2 V3 V6 A4_2]; [C1 C2 V3 V6 A4_2] # ..-ܼ +B; xn--y86c71305c.xn----t2c; [V3 V6]; [V3 V6] # ︒.-ܼ +B; xn--1ug1658ftw26f.xn----t2c071q; [C1 C2 V3 V6]; [C1 C2 V3 V6] # ︒.-ܼ +B; ≯𞤟。ᡨ; [B1 P1 V6]; [B1 P1 V6] +B; >\u0338𞤟。ᡨ; [B1 P1 V6]; [B1 P1 V6] +B; >\u0338𞥁。ᡨ; [B1 P1 V6]; [B1 P1 V6] +B; ≯𞥁。ᡨ; [B1 P1 V6]; [B1 P1 V6] +B; xn--hdhz520p.xn--48e; [B1 V6]; [B1 V6] +B; \u0F74𫫰𝨄。\u0713𐹦; [B1 V5]; [B1 V5] # ུ𫫰𝨄.ܓ𐹦 +B; xn--ned8985uo92e.xn--dnb6395k; [B1 V5]; [B1 V5] # ུ𫫰𝨄.ܓ𐹦 +B; \u033C\u07DB⁷𝟹。𝟬; [B1 V5]; [B1 V5] # ̼ߛ73.0 +B; \u033C\u07DB73。0; [B1 V5]; [B1 V5] # ̼ߛ73.0 +B; xn--73-9yb648b.0; [B1 V5]; [B1 V5] # ̼ߛ73.0 +T; \u200D.𝟗; [C2]; [A4_2] # .9 +N; \u200D.𝟗; [C2]; [C2] # .9 +T; \u200D.9; [C2]; [A4_2] # .9 +N; \u200D.9; [C2]; [C2] # .9 +B; .9; [A4_2]; [A4_2] +B; xn--1ug.9; [C2]; [C2] # .9 +B; 9; ; +B; \u0779ᡭ𪕈。\u06B6\u08D9; [B2 B3]; [B2 B3] # ݹᡭ𪕈.ڶࣙ +B; xn--9pb497fs270c.xn--pkb80i; [B2 B3]; [B2 B3] # ݹᡭ𪕈.ڶࣙ +B; \u07265\u07E2겙。\u1CF4𐷚; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # ܦ5ߢ겙.᳴ +B; \u07265\u07E2겙。\u1CF4𐷚; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # ܦ5ߢ겙.᳴ +B; \u07265\u07E2겙。\u1CF4𐷚; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # ܦ5ߢ겙.᳴ +B; \u07265\u07E2겙。\u1CF4𐷚; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # ܦ5ߢ겙.᳴ +B; xn--5-j1c97c2483c.xn--e7f2093h; [B1 B2 B3 V5 V6]; [B1 B2 B3 V5 V6] # ܦ5ߢ겙.᳴ +T; Ⴍ𿣍ꡨ\u05AE。Ⴞ\u200C\u200C; [C1 P1 V6]; [P1 V6] # Ⴍꡨ֮.Ⴞ +N; Ⴍ𿣍ꡨ\u05AE。Ⴞ\u200C\u200C; [C1 P1 V6]; [C1 P1 V6] # Ⴍꡨ֮.Ⴞ +T; ⴍ𿣍ꡨ\u05AE。ⴞ\u200C\u200C; [C1 P1 V6]; [P1 V6] # ⴍꡨ֮.ⴞ +N; ⴍ𿣍ꡨ\u05AE。ⴞ\u200C\u200C; [C1 P1 V6]; [C1 P1 V6] # ⴍꡨ֮.ⴞ +B; xn--5cb172r175fug38a.xn--mlj; [V6]; [V6] # ⴍꡨ֮.ⴞ +B; xn--5cb172r175fug38a.xn--0uga051h; [C1 V6]; [C1 V6] # ⴍꡨ֮.ⴞ +B; xn--5cb347co96jug15a.xn--2nd; [V6]; [V6] # Ⴍꡨ֮.Ⴞ +B; xn--5cb347co96jug15a.xn--2nd059ea; [C1 V6]; [C1 V6] # Ⴍꡨ֮.Ⴞ +B; 𐋰。󑓱; [P1 V6]; [P1 V6] +B; xn--k97c.xn--q031e; [V6]; [V6] +B; 󡎦\u17B4\u0B4D.𐹾; [B1 P1 V6]; [B1 P1 V6] # ୍.𐹾 +B; xn--9ic364dho91z.xn--2o0d; [B1 V6]; [B1 V6] # ୍.𐹾 +B; \u08DFႫ𶿸귤.򠅼𝟢휪\u0AE3; [P1 V5 V6]; [P1 V5 V6] # ࣟႫ귤.0휪ૣ +B; \u08DFႫ𶿸귤.򠅼𝟢휪\u0AE3; [P1 V5 V6]; [P1 V5 V6] # ࣟႫ귤.0휪ૣ +B; \u08DFႫ𶿸귤.򠅼0휪\u0AE3; [P1 V5 V6]; [P1 V5 V6] # ࣟႫ귤.0휪ૣ +B; \u08DFႫ𶿸귤.򠅼0휪\u0AE3; [P1 V5 V6]; [P1 V5 V6] # ࣟႫ귤.0휪ૣ +B; \u08DFⴋ𶿸귤.򠅼0휪\u0AE3; [P1 V5 V6]; [P1 V5 V6] # ࣟⴋ귤.0휪ૣ +B; \u08DFⴋ𶿸귤.򠅼0휪\u0AE3; [P1 V5 V6]; [P1 V5 V6] # ࣟⴋ귤.0휪ૣ +B; xn--i0b436pkl2g2h42a.xn--0-8le8997mulr5f; [V5 V6]; [V5 V6] # ࣟⴋ귤.0휪ૣ +B; xn--i0b601b6r7l2hs0a.xn--0-8le8997mulr5f; [V5 V6]; [V5 V6] # ࣟႫ귤.0휪ૣ +B; \u08DFⴋ𶿸귤.򠅼𝟢휪\u0AE3; [P1 V5 V6]; [P1 V5 V6] # ࣟⴋ귤.0휪ૣ +B; \u08DFⴋ𶿸귤.򠅼𝟢휪\u0AE3; [P1 V5 V6]; [P1 V5 V6] # ࣟⴋ귤.0휪ૣ +B; \u0784.𞡝\u0601; [P1 V6]; [P1 V6] # ބ.𞡝 +B; \u0784.𞡝\u0601; [P1 V6]; [P1 V6] # ބ.𞡝 +B; xn--lqb.xn--jfb1808v; [V6]; [V6] # ބ.𞡝 +T; \u0ACD₃.8\uA8C4\u200D🃤; [V5]; [V5] # ્3.8꣄🃤 +N; \u0ACD₃.8\uA8C4\u200D🃤; [V5]; [V5] # ્3.8꣄🃤 +T; \u0ACD3.8\uA8C4\u200D🃤; [V5]; [V5] # ્3.8꣄🃤 +N; \u0ACD3.8\uA8C4\u200D🃤; [V5]; [V5] # ્3.8꣄🃤 +B; xn--3-yke.xn--8-sl4et308f; [V5]; [V5] # ્3.8꣄🃤 +B; xn--3-yke.xn--8-ugnv982dbkwm; [V5]; [V5] # ્3.8꣄🃤 +B; ℻⩷𝆆。𞤠󠆁\u180C; [B6]; [B6] +B; FAX⩷𝆆。𞤠󠆁\u180C; [B6]; [B6] +B; fax⩷𝆆。𞥂󠆁\u180C; [B6]; [B6] +B; Fax⩷𝆆。𞤠󠆁\u180C; [B6]; [B6] +B; xn--fax-4c9a1676t.xn--6e6h; [B6]; [B6] +B; ℻⩷𝆆。𞥂󠆁\u180C; [B6]; [B6] +B; FAX⩷𝆆。𞥂󠆁\u180C; [B6]; [B6] +B; fax⩷𝆆。𞤠󠆁\u180C; [B6]; [B6] +B; fax⩷𝆆.𞥂; [B6]; [B6] +B; FAX⩷𝆆.𞤠; [B6]; [B6] +B; Fax⩷𝆆.𞤠; [B6]; [B6] +B; FAX⩷𝆆.𞥂; [B6]; [B6] +B; Fax⩷𝆆.𞥂; [B6]; [B6] +B; ꡕ≠\u105E󮿱。𐵧󠄫\uFFA0; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ꡕ≠ၞ. +B; ꡕ=\u0338\u105E󮿱。𐵧󠄫\uFFA0; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ꡕ≠ၞ. +B; ꡕ≠\u105E󮿱。𐵧󠄫\u1160; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ꡕ≠ၞ. +B; ꡕ=\u0338\u105E󮿱。𐵧󠄫\u1160; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ꡕ≠ၞ. +B; xn--cld333gn31h0158l.xn--psd1510k; [B2 B3 V6]; [B2 B3 V6] # ꡕ≠ၞ. +B; xn--cld333gn31h0158l.xn--cl7c96v; [B2 B3 V6]; [B2 B3 V6] # ꡕ≠ၞ. +T; 鱊。\u200C; [C1]; xn--rt6a. # 鱊. +N; 鱊。\u200C; [C1]; [C1] # 鱊. +B; xn--rt6a.; 鱊.; xn--rt6a. +B; 鱊.; ; xn--rt6a. +B; xn--rt6a.xn--0ug; [C1]; [C1] # 鱊. +B; 8𐹣.𑍨; [B1 B3 B6 V5]; [B1 B3 B6 V5] +B; 8𐹣.𑍨; [B1 B3 B6 V5]; [B1 B3 B6 V5] +B; xn--8-d26i.xn--0p1d; [B1 B3 B6 V5]; [B1 B3 B6 V5] +B; ⏹𐧀.𐫯; [B1]; [B1] +B; ⏹𐧀.𐫯; [B1]; [B1] +B; xn--qoh9161g.xn--1x9c; [B1]; [B1] +T; 𞤺\u07CC4.\u200D; [B1 C2]; xn--4-0bd15808a. # 𞤺ߌ4. +N; 𞤺\u07CC4.\u200D; [B1 C2]; [B1 C2] # 𞤺ߌ4. +T; 𞤺\u07CC4.\u200D; [B1 C2]; xn--4-0bd15808a. # 𞤺ߌ4. +N; 𞤺\u07CC4.\u200D; [B1 C2]; [B1 C2] # 𞤺ߌ4. +T; 𞤘\u07CC4.\u200D; [B1 C2]; xn--4-0bd15808a. # 𞤺ߌ4. +N; 𞤘\u07CC4.\u200D; [B1 C2]; [B1 C2] # 𞤺ߌ4. +B; xn--4-0bd15808a.; 𞤺\u07CC4.; xn--4-0bd15808a. # 𞤺ߌ4. +B; 𞤺\u07CC4.; ; xn--4-0bd15808a. # 𞤺ߌ4. +B; 𞤘\u07CC4.; 𞤺\u07CC4.; xn--4-0bd15808a. # 𞤺ߌ4. +B; xn--4-0bd15808a.xn--1ug; [B1 C2]; [B1 C2] # 𞤺ߌ4. +T; 𞤘\u07CC4.\u200D; [B1 C2]; xn--4-0bd15808a. # 𞤺ߌ4. +N; 𞤘\u07CC4.\u200D; [B1 C2]; [B1 C2] # 𞤺ߌ4. +B; ⒗\u0981\u20EF-.\u08E2•; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ⒗ঁ⃯-.• +B; 16.\u0981\u20EF-.\u08E2•; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # 16.ঁ⃯-.• +B; 16.xn----z0d801p.xn--l0b810j; [B1 V3 V5 V6]; [B1 V3 V5 V6] # 16.ঁ⃯-.• +B; xn----z0d801p6kd.xn--l0b810j; [B1 V3 V6]; [B1 V3 V6] # ⒗ঁ⃯-.• +B; -。䏛; [V3]; [V3] +B; -。䏛; [V3]; [V3] +B; -.xn--xco; [V3]; [V3] +T; \u200C񒃠.\u200D; [C1 C2 P1 V6]; [P1 V6] # . +N; \u200C񒃠.\u200D; [C1 C2 P1 V6]; [C1 C2 P1 V6] # . +T; \u200C񒃠.\u200D; [C1 C2 P1 V6]; [P1 V6] # . +N; \u200C񒃠.\u200D; [C1 C2 P1 V6]; [C1 C2 P1 V6] # . +B; xn--dj8y.; [V6]; [V6] +B; xn--0ugz7551c.xn--1ug; [C1 C2 V6]; [C1 C2 V6] # . +T; ⒈⓰󥣇。𐹠\u200D򗷦Ⴕ; [B1 C2 P1 V6]; [B1 P1 V6] # ⒈⓰.𐹠Ⴕ +N; ⒈⓰󥣇。𐹠\u200D򗷦Ⴕ; [B1 C2 P1 V6]; [B1 C2 P1 V6] # ⒈⓰.𐹠Ⴕ +T; 1.⓰󥣇。𐹠\u200D򗷦Ⴕ; [B1 C2 P1 V6]; [B1 P1 V6] # 1.⓰.𐹠Ⴕ +N; 1.⓰󥣇。𐹠\u200D򗷦Ⴕ; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 1.⓰.𐹠Ⴕ +T; 1.⓰󥣇。𐹠\u200D򗷦ⴕ; [B1 C2 P1 V6]; [B1 P1 V6] # 1.⓰.𐹠ⴕ +N; 1.⓰󥣇。𐹠\u200D򗷦ⴕ; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 1.⓰.𐹠ⴕ +B; 1.xn--svh00804k.xn--dljv223ee5t2d; [B1 V6]; [B1 V6] +B; 1.xn--svh00804k.xn--1ug352csp0psg45e; [B1 C2 V6]; [B1 C2 V6] # 1.⓰.𐹠ⴕ +B; 1.xn--svh00804k.xn--tnd1990ke579c; [B1 V6]; [B1 V6] +B; 1.xn--svh00804k.xn--tnd969erj4psgl3e; [B1 C2 V6]; [B1 C2 V6] # 1.⓰.𐹠Ⴕ +T; ⒈⓰󥣇。𐹠\u200D򗷦ⴕ; [B1 C2 P1 V6]; [B1 P1 V6] # ⒈⓰.𐹠ⴕ +N; ⒈⓰󥣇。𐹠\u200D򗷦ⴕ; [B1 C2 P1 V6]; [B1 C2 P1 V6] # ⒈⓰.𐹠ⴕ +B; xn--tsh0nz9380h.xn--dljv223ee5t2d; [B1 V6]; [B1 V6] +B; xn--tsh0nz9380h.xn--1ug352csp0psg45e; [B1 C2 V6]; [B1 C2 V6] # ⒈⓰.𐹠ⴕ +B; xn--tsh0nz9380h.xn--tnd1990ke579c; [B1 V6]; [B1 V6] +B; xn--tsh0nz9380h.xn--tnd969erj4psgl3e; [B1 C2 V6]; [B1 C2 V6] # ⒈⓰.𐹠Ⴕ +T; 𞠊ᠮ-ß。\u1CD0効\u0601𷣭; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # 𞠊ᠮ-ß.᳐効 +N; 𞠊ᠮ-ß。\u1CD0効\u0601𷣭; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # 𞠊ᠮ-ß.᳐効 +T; 𞠊ᠮ-ß。\u1CD0効\u0601𷣭; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # 𞠊ᠮ-ß.᳐効 +N; 𞠊ᠮ-ß。\u1CD0効\u0601𷣭; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # 𞠊ᠮ-ß.᳐効 +B; 𞠊ᠮ-SS。\u1CD0効\u0601𷣭; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # 𞠊ᠮ-ss.᳐効 +B; 𞠊ᠮ-ss。\u1CD0効\u0601𷣭; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # 𞠊ᠮ-ss.᳐効 +B; 𞠊ᠮ-Ss。\u1CD0効\u0601𷣭; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # 𞠊ᠮ-ss.᳐効 +B; xn---ss-21t18904a.xn--jfb197i791bi6x4c; [B1 B2 B3 V5 V6]; [B1 B2 B3 V5 V6] # 𞠊ᠮ-ss.᳐効 +B; xn----qfa310pg973b.xn--jfb197i791bi6x4c; [B1 B2 B3 V5 V6]; [B1 B2 B3 V5 V6] # 𞠊ᠮ-ß.᳐効 +B; 𞠊ᠮ-SS。\u1CD0効\u0601𷣭; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # 𞠊ᠮ-ss.᳐効 +B; 𞠊ᠮ-ss。\u1CD0効\u0601𷣭; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # 𞠊ᠮ-ss.᳐効 +B; 𞠊ᠮ-Ss。\u1CD0効\u0601𷣭; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # 𞠊ᠮ-ss.᳐効 +B; 𑇀.󠨱; [P1 V5 V6]; [P1 V5 V6] +B; xn--wd1d.xn--k946e; [V5 V6]; [V5 V6] +B; ␒3\uFB88。𝟘𐨿𐹆; [B1 P1 V6]; [B1 P1 V6] # ␒3ڈ.0𐨿 +B; ␒3\u0688。0𐨿𐹆; [B1 P1 V6]; [B1 P1 V6] # ␒3ڈ.0𐨿 +B; xn--3-jsc897t.xn--0-sc5iy3h; [B1 V6]; [B1 V6] # ␒3ڈ.0𐨿 +B; \u076B6\u0A81\u08A6。\u1DE3; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ݫ6ઁࢦ.ᷣ +B; \u076B6\u0A81\u08A6。\u1DE3; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ݫ6ઁࢦ.ᷣ +B; xn--6-h5c06gj6c.xn--7eg; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ݫ6ઁࢦ.ᷣ +T; \u0605-𽤞Ⴂ。򅤶\u200D; [B1 B6 C2 P1 V6]; [B1 P1 V6] # -Ⴂ. +N; \u0605-𽤞Ⴂ。򅤶\u200D; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # -Ⴂ. +T; \u0605-𽤞ⴂ。򅤶\u200D; [B1 B6 C2 P1 V6]; [B1 P1 V6] # -ⴂ. +N; \u0605-𽤞ⴂ。򅤶\u200D; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # -ⴂ. +B; xn----0kc8501a5399e.xn--ss06b; [B1 V6]; [B1 V6] # -ⴂ. +B; xn----0kc8501a5399e.xn--1ugy3204f; [B1 B6 C2 V6]; [B1 B6 C2 V6] # -ⴂ. +B; xn----0kc662fc152h.xn--ss06b; [B1 V6]; [B1 V6] # -Ⴂ. +B; xn----0kc662fc152h.xn--1ugy3204f; [B1 B6 C2 V6]; [B1 B6 C2 V6] # -Ⴂ. +T; ⾆.ꡈ5≯ß; [P1 V6]; [P1 V6] +N; ⾆.ꡈ5≯ß; [P1 V6]; [P1 V6] +T; ⾆.ꡈ5>\u0338ß; [P1 V6]; [P1 V6] +N; ⾆.ꡈ5>\u0338ß; [P1 V6]; [P1 V6] +T; 舌.ꡈ5≯ß; [P1 V6]; [P1 V6] +N; 舌.ꡈ5≯ß; [P1 V6]; [P1 V6] +T; 舌.ꡈ5>\u0338ß; [P1 V6]; [P1 V6] +N; 舌.ꡈ5>\u0338ß; [P1 V6]; [P1 V6] +B; 舌.ꡈ5>\u0338SS; [P1 V6]; [P1 V6] +B; 舌.ꡈ5≯SS; [P1 V6]; [P1 V6] +B; 舌.ꡈ5≯ss; [P1 V6]; [P1 V6] +B; 舌.ꡈ5>\u0338ss; [P1 V6]; [P1 V6] +B; 舌.ꡈ5>\u0338Ss; [P1 V6]; [P1 V6] +B; 舌.ꡈ5≯Ss; [P1 V6]; [P1 V6] +B; xn--tc1a.xn--5ss-3m2a5009e; [V6]; [V6] +B; xn--tc1a.xn--5-qfa988w745i; [V6]; [V6] +B; ⾆.ꡈ5>\u0338SS; [P1 V6]; [P1 V6] +B; ⾆.ꡈ5≯SS; [P1 V6]; [P1 V6] +B; ⾆.ꡈ5≯ss; [P1 V6]; [P1 V6] +B; ⾆.ꡈ5>\u0338ss; [P1 V6]; [P1 V6] +B; ⾆.ꡈ5>\u0338Ss; [P1 V6]; [P1 V6] +B; ⾆.ꡈ5≯Ss; [P1 V6]; [P1 V6] +T; \u0ACD8\u200D.򾂈\u075C; [B1 B5 B6 C2 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ્8.ݜ +N; \u0ACD8\u200D.򾂈\u075C; [B1 B5 B6 C2 P1 V5 V6]; [B1 B5 B6 C2 P1 V5 V6] # ્8.ݜ +T; \u0ACD8\u200D.򾂈\u075C; [B1 B5 B6 C2 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ્8.ݜ +N; \u0ACD8\u200D.򾂈\u075C; [B1 B5 B6 C2 P1 V5 V6]; [B1 B5 B6 C2 P1 V5 V6] # ્8.ݜ +B; xn--8-yke.xn--gpb79046m; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] # ્8.ݜ +B; xn--8-yke534n.xn--gpb79046m; [B1 B5 B6 C2 V5 V6]; [B1 B5 B6 C2 V5 V6] # ્8.ݜ +B; 򸷆\u0A70≮򹓙.񞎧⁷󠯙\u06B6; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ੰ≮.7ڶ +B; 򸷆\u0A70<\u0338򹓙.񞎧⁷󠯙\u06B6; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ੰ≮.7ڶ +B; 򸷆\u0A70≮򹓙.񞎧7󠯙\u06B6; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ੰ≮.7ڶ +B; 򸷆\u0A70<\u0338򹓙.񞎧7󠯙\u06B6; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ੰ≮.7ڶ +B; xn--ycc893jqh38rb6fa.xn--7-5uc53836ixt41c; [B5 B6 V6]; [B5 B6 V6] # ੰ≮.7ڶ +T; 𞤪.ς; ; xn--ie6h.xn--4xa +N; 𞤪.ς; ; xn--ie6h.xn--3xa +B; 𞤈.Σ; 𞤪.σ; xn--ie6h.xn--4xa +B; 𞤪.σ; ; xn--ie6h.xn--4xa +B; 𞤈.σ; 𞤪.σ; xn--ie6h.xn--4xa +B; xn--ie6h.xn--4xa; 𞤪.σ; xn--ie6h.xn--4xa +T; 𞤈.ς; 𞤪.ς; xn--ie6h.xn--4xa +N; 𞤈.ς; 𞤪.ς; xn--ie6h.xn--3xa +B; xn--ie6h.xn--3xa; 𞤪.ς; xn--ie6h.xn--3xa +B; 𞤪.Σ; 𞤪.σ; xn--ie6h.xn--4xa +T; \u200CႺ。ς; [C1 P1 V6]; [P1 V6] # Ⴚ.ς +N; \u200CႺ。ς; [C1 P1 V6]; [C1 P1 V6] # Ⴚ.ς +T; \u200CႺ。ς; [C1 P1 V6]; [P1 V6] # Ⴚ.ς +N; \u200CႺ。ς; [C1 P1 V6]; [C1 P1 V6] # Ⴚ.ς +T; \u200Cⴚ。ς; [C1]; xn--ilj.xn--4xa # ⴚ.ς +N; \u200Cⴚ。ς; [C1]; [C1] # ⴚ.ς +T; \u200CႺ。Σ; [C1 P1 V6]; [P1 V6] # Ⴚ.σ +N; \u200CႺ。Σ; [C1 P1 V6]; [C1 P1 V6] # Ⴚ.σ +T; \u200Cⴚ。σ; [C1]; xn--ilj.xn--4xa # ⴚ.σ +N; \u200Cⴚ。σ; [C1]; [C1] # ⴚ.σ +B; xn--ilj.xn--4xa; ⴚ.σ; xn--ilj.xn--4xa +B; ⴚ.σ; ; xn--ilj.xn--4xa +B; Ⴚ.Σ; [P1 V6]; [P1 V6] +T; ⴚ.ς; ; xn--ilj.xn--4xa +N; ⴚ.ς; ; xn--ilj.xn--3xa +T; Ⴚ.ς; [P1 V6]; [P1 V6] +N; Ⴚ.ς; [P1 V6]; [P1 V6] +B; xn--ynd.xn--4xa; [V6]; [V6] +B; xn--ynd.xn--3xa; [V6]; [V6] +B; xn--ilj.xn--3xa; ⴚ.ς; xn--ilj.xn--3xa +B; Ⴚ.σ; [P1 V6]; [P1 V6] +B; xn--0ug262c.xn--4xa; [C1]; [C1] # ⴚ.σ +B; xn--ynd759e.xn--4xa; [C1 V6]; [C1 V6] # Ⴚ.σ +B; xn--0ug262c.xn--3xa; [C1]; [C1] # ⴚ.ς +B; xn--ynd759e.xn--3xa; [C1 V6]; [C1 V6] # Ⴚ.ς +T; \u200Cⴚ。ς; [C1]; xn--ilj.xn--4xa # ⴚ.ς +N; \u200Cⴚ。ς; [C1]; [C1] # ⴚ.ς +T; \u200CႺ。Σ; [C1 P1 V6]; [P1 V6] # Ⴚ.σ +N; \u200CႺ。Σ; [C1 P1 V6]; [C1 P1 V6] # Ⴚ.σ +T; \u200Cⴚ。σ; [C1]; xn--ilj.xn--4xa # ⴚ.σ +N; \u200Cⴚ。σ; [C1]; [C1] # ⴚ.σ +B; 𞤃.𐹦; [B1]; [B1] +B; 𞤃.𐹦; [B1]; [B1] +B; 𞤥.𐹦; [B1]; [B1] +B; xn--de6h.xn--eo0d; [B1]; [B1] +B; 𞤥.𐹦; [B1]; [B1] +T; \u200D⾕。\u200C\u0310\uA953ꡎ; [C1 C2]; [V5] # 谷.꥓̐ꡎ +N; \u200D⾕。\u200C\u0310\uA953ꡎ; [C1 C2]; [C1 C2] # 谷.꥓̐ꡎ +T; \u200D⾕。\u200C\uA953\u0310ꡎ; [C1 C2]; [V5] # 谷.꥓̐ꡎ +N; \u200D⾕。\u200C\uA953\u0310ꡎ; [C1 C2]; [C1 C2] # 谷.꥓̐ꡎ +T; \u200D谷。\u200C\uA953\u0310ꡎ; [C1 C2]; [V5] # 谷.꥓̐ꡎ +N; \u200D谷。\u200C\uA953\u0310ꡎ; [C1 C2]; [C1 C2] # 谷.꥓̐ꡎ +B; xn--6g3a.xn--0sa8175flwa; [V5]; [V5] # 谷.꥓̐ꡎ +B; xn--1ug0273b.xn--0sa359l6n7g13a; [C1 C2]; [C1 C2] # 谷.꥓̐ꡎ +T; \u06AA-뉔.𞤐\u200C; [B2 B3 C1]; [B2 B3] # ڪ-뉔.𞤲 +N; \u06AA-뉔.𞤐\u200C; [B2 B3 C1]; [B2 B3 C1] # ڪ-뉔.𞤲 +T; \u06AA-뉔.𞤐\u200C; [B2 B3 C1]; [B2 B3] # ڪ-뉔.𞤲 +N; \u06AA-뉔.𞤐\u200C; [B2 B3 C1]; [B2 B3 C1] # ڪ-뉔.𞤲 +T; \u06AA-뉔.𞤐\u200C; [B2 B3 C1]; [B2 B3] # ڪ-뉔.𞤲 +N; \u06AA-뉔.𞤐\u200C; [B2 B3 C1]; [B2 B3 C1] # ڪ-뉔.𞤲 +T; \u06AA-뉔.𞤐\u200C; [B2 B3 C1]; [B2 B3] # ڪ-뉔.𞤲 +N; \u06AA-뉔.𞤐\u200C; [B2 B3 C1]; [B2 B3 C1] # ڪ-뉔.𞤲 +T; \u06AA-뉔.𞤲\u200C; [B2 B3 C1]; [B2 B3] # ڪ-뉔.𞤲 +N; \u06AA-뉔.𞤲\u200C; [B2 B3 C1]; [B2 B3 C1] # ڪ-뉔.𞤲 +T; \u06AA-뉔.𞤲\u200C; [B2 B3 C1]; [B2 B3] # ڪ-뉔.𞤲 +N; \u06AA-뉔.𞤲\u200C; [B2 B3 C1]; [B2 B3 C1] # ڪ-뉔.𞤲 +B; xn----guc3592k.xn--qe6h; [B2 B3]; [B2 B3] # ڪ-뉔.𞤲 +B; xn----guc3592k.xn--0ug7611p; [B2 B3 C1]; [B2 B3 C1] # ڪ-뉔.𞤲 +T; \u06AA-뉔.𞤲\u200C; [B2 B3 C1]; [B2 B3] # ڪ-뉔.𞤲 +N; \u06AA-뉔.𞤲\u200C; [B2 B3 C1]; [B2 B3 C1] # ڪ-뉔.𞤲 +T; \u06AA-뉔.𞤲\u200C; [B2 B3 C1]; [B2 B3] # ڪ-뉔.𞤲 +N; \u06AA-뉔.𞤲\u200C; [B2 B3 C1]; [B2 B3 C1] # ڪ-뉔.𞤲 +T; 񔲵5ᦛς.\uA8C4\u077B\u1CD2\u0738; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 5ᦛς.꣄ݻܸ᳒ +N; 񔲵5ᦛς.\uA8C4\u077B\u1CD2\u0738; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 5ᦛς.꣄ݻܸ᳒ +T; 񔲵5ᦛς.\uA8C4\u077B\u0738\u1CD2; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 5ᦛς.꣄ݻܸ᳒ +N; 񔲵5ᦛς.\uA8C4\u077B\u0738\u1CD2; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 5ᦛς.꣄ݻܸ᳒ +T; 񔲵5ᦛς.\uA8C4\u077B\u0738\u1CD2; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 5ᦛς.꣄ݻܸ᳒ +N; 񔲵5ᦛς.\uA8C4\u077B\u0738\u1CD2; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 5ᦛς.꣄ݻܸ᳒ +B; 񔲵5ᦛΣ.\uA8C4\u077B\u0738\u1CD2; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 5ᦛσ.꣄ݻܸ᳒ +B; 񔲵5ᦛσ.\uA8C4\u077B\u0738\u1CD2; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 5ᦛσ.꣄ݻܸ᳒ +B; xn--5-0mb988ng603j.xn--fob7kk44dl41k; [B1 V5 V6]; [B1 V5 V6] # 5ᦛσ.꣄ݻܸ᳒ +B; xn--5-ymb298ng603j.xn--fob7kk44dl41k; [B1 V5 V6]; [B1 V5 V6] # 5ᦛς.꣄ݻܸ᳒ +B; 񔲵5ᦛΣ.\uA8C4\u077B\u0738\u1CD2; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 5ᦛσ.꣄ݻܸ᳒ +B; 񔲵5ᦛσ.\uA8C4\u077B\u0738\u1CD2; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 5ᦛσ.꣄ݻܸ᳒ +B; 񔲵5ᦛΣ.\uA8C4\u077B\u1CD2\u0738; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 5ᦛσ.꣄ݻܸ᳒ +B; 񔲵5ᦛσ.\uA8C4\u077B\u1CD2\u0738; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 5ᦛσ.꣄ݻܸ᳒ +B; 淽。ᠾ; 淽.ᠾ; xn--34w.xn--x7e +B; xn--34w.xn--x7e; 淽.ᠾ; xn--34w.xn--x7e +B; 淽.ᠾ; ; xn--34w.xn--x7e +B; 𐹴𑘷。-; [B1 V3]; [B1 V3] +B; xn--so0do6k.-; [B1 V3]; [B1 V3] +B; 򬨩Ⴓ❓。𑄨; [P1 V5 V6]; [P1 V5 V6] +B; 򬨩Ⴓ❓。𑄨; [P1 V5 V6]; [P1 V5 V6] +B; 򬨩ⴓ❓。𑄨; [P1 V5 V6]; [P1 V5 V6] +B; xn--8di78qvw32y.xn--k80d; [V5 V6]; [V5 V6] +B; xn--rnd896i0j14q.xn--k80d; [V5 V6]; [V5 V6] +B; 򬨩ⴓ❓。𑄨; [P1 V5 V6]; [P1 V5 V6] +T; \u200C𐹡𞤌Ⴇ。ßႣ; [B1 C1 P1 V6]; [B1 P1 V6] # 𐹡𞤮Ⴇ.ßႣ +N; \u200C𐹡𞤌Ⴇ。ßႣ; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹡𞤮Ⴇ.ßႣ +T; \u200C𐹡𞤌Ⴇ。ßႣ; [B1 C1 P1 V6]; [B1 P1 V6] # 𐹡𞤮Ⴇ.ßႣ +N; \u200C𐹡𞤌Ⴇ。ßႣ; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹡𞤮Ⴇ.ßႣ +T; \u200C𐹡𞤮ⴇ。ßⴃ; [B1 C1]; [B1] # 𐹡𞤮ⴇ.ßⴃ +N; \u200C𐹡𞤮ⴇ。ßⴃ; [B1 C1]; [B1 C1] # 𐹡𞤮ⴇ.ßⴃ +T; \u200C𐹡𞤌Ⴇ。SSႣ; [B1 C1 P1 V6]; [B1 P1 V6] # 𐹡𞤮Ⴇ.ssႣ +N; \u200C𐹡𞤌Ⴇ。SSႣ; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹡𞤮Ⴇ.ssႣ +T; \u200C𐹡𞤮ⴇ。ssⴃ; [B1 C1]; [B1] # 𐹡𞤮ⴇ.ssⴃ +N; \u200C𐹡𞤮ⴇ。ssⴃ; [B1 C1]; [B1 C1] # 𐹡𞤮ⴇ.ssⴃ +T; \u200C𐹡𞤌ⴇ。Ssⴃ; [B1 C1]; [B1] # 𐹡𞤮ⴇ.ssⴃ +N; \u200C𐹡𞤌ⴇ。Ssⴃ; [B1 C1]; [B1 C1] # 𐹡𞤮ⴇ.ssⴃ +B; xn--ykj9323eegwf.xn--ss-151a; [B1]; [B1] +B; xn--0ug332c3q0pr56g.xn--ss-151a; [B1 C1]; [B1 C1] # 𐹡𞤮ⴇ.ssⴃ +B; xn--fnd1201kegrf.xn--ss-fek; [B1 V6]; [B1 V6] +B; xn--fnd599eyj4pr50g.xn--ss-fek; [B1 C1 V6]; [B1 C1 V6] # 𐹡𞤮Ⴇ.ssႣ +B; xn--0ug332c3q0pr56g.xn--zca417t; [B1 C1]; [B1 C1] # 𐹡𞤮ⴇ.ßⴃ +B; xn--fnd599eyj4pr50g.xn--zca681f; [B1 C1 V6]; [B1 C1 V6] # 𐹡𞤮Ⴇ.ßႣ +T; \u200C𐹡𞤮ⴇ。ßⴃ; [B1 C1]; [B1] # 𐹡𞤮ⴇ.ßⴃ +N; \u200C𐹡𞤮ⴇ。ßⴃ; [B1 C1]; [B1 C1] # 𐹡𞤮ⴇ.ßⴃ +T; \u200C𐹡𞤌Ⴇ。SSႣ; [B1 C1 P1 V6]; [B1 P1 V6] # 𐹡𞤮Ⴇ.ssႣ +N; \u200C𐹡𞤌Ⴇ。SSႣ; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹡𞤮Ⴇ.ssႣ +T; \u200C𐹡𞤮ⴇ。ssⴃ; [B1 C1]; [B1] # 𐹡𞤮ⴇ.ssⴃ +N; \u200C𐹡𞤮ⴇ。ssⴃ; [B1 C1]; [B1 C1] # 𐹡𞤮ⴇ.ssⴃ +T; \u200C𐹡𞤌ⴇ。Ssⴃ; [B1 C1]; [B1] # 𐹡𞤮ⴇ.ssⴃ +N; \u200C𐹡𞤌ⴇ。Ssⴃ; [B1 C1]; [B1 C1] # 𐹡𞤮ⴇ.ssⴃ +T; \u200C𐹡𞤌ⴇ。ßⴃ; [B1 C1]; [B1] # 𐹡𞤮ⴇ.ßⴃ +N; \u200C𐹡𞤌ⴇ。ßⴃ; [B1 C1]; [B1 C1] # 𐹡𞤮ⴇ.ßⴃ +T; \u200C𐹡𞤌ⴇ。ssⴃ; [B1 C1]; [B1] # 𐹡𞤮ⴇ.ssⴃ +N; \u200C𐹡𞤌ⴇ。ssⴃ; [B1 C1]; [B1 C1] # 𐹡𞤮ⴇ.ssⴃ +T; \u200C𐹡𞤌Ⴇ。Ssⴃ; [B1 C1 P1 V6]; [B1 P1 V6] # 𐹡𞤮Ⴇ.ssⴃ +N; \u200C𐹡𞤌Ⴇ。Ssⴃ; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹡𞤮Ⴇ.ssⴃ +B; xn--fnd1201kegrf.xn--ss-151a; [B1 V6]; [B1 V6] +B; xn--fnd599eyj4pr50g.xn--ss-151a; [B1 C1 V6]; [B1 C1 V6] # 𐹡𞤮Ⴇ.ssⴃ +T; \u200C𐹡𞤌ⴇ。ßⴃ; [B1 C1]; [B1] # 𐹡𞤮ⴇ.ßⴃ +N; \u200C𐹡𞤌ⴇ。ßⴃ; [B1 C1]; [B1 C1] # 𐹡𞤮ⴇ.ßⴃ +T; \u200C𐹡𞤌ⴇ。ssⴃ; [B1 C1]; [B1] # 𐹡𞤮ⴇ.ssⴃ +N; \u200C𐹡𞤌ⴇ。ssⴃ; [B1 C1]; [B1 C1] # 𐹡𞤮ⴇ.ssⴃ +T; \u200C𐹡𞤌Ⴇ。Ssⴃ; [B1 C1 P1 V6]; [B1 P1 V6] # 𐹡𞤮Ⴇ.ssⴃ +N; \u200C𐹡𞤌Ⴇ。Ssⴃ; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹡𞤮Ⴇ.ssⴃ +B; \u17FF。𞬳; [P1 V6]; [P1 V6] # . +B; \u17FF。𞬳; [P1 V6]; [P1 V6] # . +B; xn--45e.xn--et6h; [V6]; [V6] # . +T; \u0652\u200D。\u0CCD𑚳; [C2 V5]; [V5] # ْ.್𑚳 +N; \u0652\u200D。\u0CCD𑚳; [C2 V5]; [C2 V5] # ْ.್𑚳 +T; \u0652\u200D。\u0CCD𑚳; [C2 V5]; [V5] # ْ.್𑚳 +N; \u0652\u200D。\u0CCD𑚳; [C2 V5]; [C2 V5] # ْ.್𑚳 +B; xn--uhb.xn--8tc4527k; [V5]; [V5] # ْ.್𑚳 +B; xn--uhb882k.xn--8tc4527k; [C2 V5]; [C2 V5] # ْ.್𑚳 +B; -≠ᠻ.\u076D𞥃≮󟷺; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # -≠ᠻ.ݭ𞥃≮ +B; -=\u0338ᠻ.\u076D𞥃<\u0338󟷺; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # -≠ᠻ.ݭ𞥃≮ +B; -≠ᠻ.\u076D𞥃≮󟷺; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # -≠ᠻ.ݭ𞥃≮ +B; -=\u0338ᠻ.\u076D𞥃<\u0338󟷺; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # -≠ᠻ.ݭ𞥃≮ +B; -=\u0338ᠻ.\u076D𞤡<\u0338󟷺; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # -≠ᠻ.ݭ𞥃≮ +B; -≠ᠻ.\u076D𞤡≮󟷺; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # -≠ᠻ.ݭ𞥃≮ +B; xn----g6j886c.xn--xpb049kk353abj99f; [B1 B2 B3 V3 V6]; [B1 B2 B3 V3 V6] # -≠ᠻ.ݭ𞥃≮ +B; -=\u0338ᠻ.\u076D𞤡<\u0338󟷺; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # -≠ᠻ.ݭ𞥃≮ +B; -≠ᠻ.\u076D𞤡≮󟷺; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # -≠ᠻ.ݭ𞥃≮ +B; 󠰆≯\u07B5𐻪.򊥕≮𑁆\u084C; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # ≯.≮𑁆ࡌ +B; 󠰆>\u0338\u07B5𐻪.򊥕<\u0338𑁆\u084C; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # ≯.≮𑁆ࡌ +B; 󠰆≯\u07B5𐻪.򊥕≮𑁆\u084C; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # ≯.≮𑁆ࡌ +B; 󠰆>\u0338\u07B5𐻪.򊥕<\u0338𑁆\u084C; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # ≯.≮𑁆ࡌ +B; xn--zrb797kdm1oes34i.xn--bwb394k8k2o25n6d; [B1 B5 B6 V6]; [B1 B5 B6 V6] # ≯.≮𑁆ࡌ +B; ≠󦋂.\u0600\u0BCD-\u06B9; [B1 P1 V6]; [B1 P1 V6] # ≠.்-ڹ +B; =\u0338󦋂.\u0600\u0BCD-\u06B9; [B1 P1 V6]; [B1 P1 V6] # ≠.்-ڹ +B; xn--1ch22084l.xn----qkc07co6n; [B1 V6]; [B1 V6] # ≠.்-ڹ +B; \u17DD󠁣≠。𐹼𐋤; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ៝≠.𐹼𐋤 +B; \u17DD󠁣=\u0338。𐹼𐋤; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ៝≠.𐹼𐋤 +B; \u17DD󠁣≠。𐹼𐋤; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ៝≠.𐹼𐋤 +B; \u17DD󠁣=\u0338。𐹼𐋤; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ៝≠.𐹼𐋤 +B; xn--54e694cn389z.xn--787ct8r; [B1 V5 V6]; [B1 V5 V6] # ៝≠.𐹼𐋤 +T; ß𰀻񆬗。𝩨🕮ß; [P1 V5 V6]; [P1 V5 V6] +N; ß𰀻񆬗。𝩨🕮ß; [P1 V5 V6]; [P1 V5 V6] +T; ß𰀻񆬗。𝩨🕮ß; [P1 V5 V6]; [P1 V5 V6] +N; ß𰀻񆬗。𝩨🕮ß; [P1 V5 V6]; [P1 V5 V6] +B; SS𰀻񆬗。𝩨🕮SS; [P1 V5 V6]; [P1 V5 V6] +B; ss𰀻񆬗。𝩨🕮ss; [P1 V5 V6]; [P1 V5 V6] +B; Ss𰀻񆬗。𝩨🕮Ss; [P1 V5 V6]; [P1 V5 V6] +B; xn--ss-jl59biy67d.xn--ss-4d11aw87d; [V5 V6]; [V5 V6] +B; xn--zca20040bgrkh.xn--zca3653v86qa; [V5 V6]; [V5 V6] +B; SS𰀻񆬗。𝩨🕮SS; [P1 V5 V6]; [P1 V5 V6] +B; ss𰀻񆬗。𝩨🕮ss; [P1 V5 V6]; [P1 V5 V6] +B; Ss𰀻񆬗。𝩨🕮Ss; [P1 V5 V6]; [P1 V5 V6] +T; \u200D。\u200C; [C1 C2]; [A4_2] # . +N; \u200D。\u200C; [C1 C2]; [C1 C2] # . +B; xn--1ug.xn--0ug; [C1 C2]; [C1 C2] # . +T; \u0483𐭞\u200D.\u17B9𞯌򟩚; [B1 C2 P1 V5 V6]; [B1 P1 V5 V6] # ҃𐭞.ឹ +N; \u0483𐭞\u200D.\u17B9𞯌򟩚; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # ҃𐭞.ឹ +B; xn--m3a6965k.xn--43e8670vmd79b; [B1 V5 V6]; [B1 V5 V6] # ҃𐭞.ឹ +B; xn--m3a412lrr0o.xn--43e8670vmd79b; [B1 C2 V5 V6]; [B1 C2 V5 V6] # ҃𐭞.ឹ +T; \u200C𐠨\u200C临。ꡢ򄷞ⶏ𐹣; [B1 B5 B6 C1 P1 V6]; [B2 B3 B5 B6 P1 V6] # 𐠨临.ꡢⶏ𐹣 +N; \u200C𐠨\u200C临。ꡢ򄷞ⶏ𐹣; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # 𐠨临.ꡢⶏ𐹣 +B; xn--miq9646b.xn--uojv340bk71c99u9f; [B2 B3 B5 B6 V6]; [B2 B3 B5 B6 V6] +B; xn--0uga2656aop9k.xn--uojv340bk71c99u9f; [B1 B5 B6 C1 V6]; [B1 B5 B6 C1 V6] # 𐠨临.ꡢⶏ𐹣 +B; 󠑘.󠄮; [P1 V6]; [P1 V6] +B; 󠑘.󠄮; [P1 V6]; [P1 V6] +B; xn--s136e.; [V6]; [V6] +B; 𐫄\u0D4D.\uAAF6; [B1 B3 B6 V5]; [B1 B3 B6 V5] # 𐫄്.꫶ +B; 𐫄\u0D4D.\uAAF6; [B1 B3 B6 V5]; [B1 B3 B6 V5] # 𐫄്.꫶ +B; xn--wxc7880k.xn--2v9a; [B1 B3 B6 V5]; [B1 B3 B6 V5] # 𐫄്.꫶ +B; \uA9B7󝵙멹。⒛󠨇; [P1 V5 V6]; [P1 V5 V6] # ꦷ멹.⒛ +B; \uA9B7󝵙멹。⒛󠨇; [P1 V5 V6]; [P1 V5 V6] # ꦷ멹.⒛ +B; \uA9B7󝵙멹。20.󠨇; [P1 V5 V6]; [P1 V5 V6] # ꦷ멹.20. +B; \uA9B7󝵙멹。20.󠨇; [P1 V5 V6]; [P1 V5 V6] # ꦷ멹.20. +B; xn--ym9av13acp85w.20.xn--d846e; [V5 V6]; [V5 V6] # ꦷ멹.20. +B; xn--ym9av13acp85w.xn--dth22121k; [V5 V6]; [V5 V6] # ꦷ멹.⒛ +B; Ⴅ󲬹릖󠶚.\u0777𐹳⒊; [B4 B6 P1 V6]; [B4 B6 P1 V6] # Ⴅ릖.ݷ𐹳⒊ +B; Ⴅ󲬹릖󠶚.\u0777𐹳⒊; [B4 B6 P1 V6]; [B4 B6 P1 V6] # Ⴅ릖.ݷ𐹳⒊ +B; Ⴅ󲬹릖󠶚.\u0777𐹳3.; [B4 B6 P1 V6]; [B4 B6 P1 V6] # Ⴅ릖.ݷ𐹳3. +B; Ⴅ󲬹릖󠶚.\u0777𐹳3.; [B4 B6 P1 V6]; [B4 B6 P1 V6] # Ⴅ릖.ݷ𐹳3. +B; ⴅ󲬹릖󠶚.\u0777𐹳3.; [B4 B6 P1 V6]; [B4 B6 P1 V6] # ⴅ릖.ݷ𐹳3. +B; ⴅ󲬹릖󠶚.\u0777𐹳3.; [B4 B6 P1 V6]; [B4 B6 P1 V6] # ⴅ릖.ݷ𐹳3. +B; xn--wkj8016bne45io02g.xn--3-55c6803r.; [B4 B6 V6]; [B4 B6 V6] # ⴅ릖.ݷ𐹳3. +B; xn--dnd2167fnet0io02g.xn--3-55c6803r.; [B4 B6 V6]; [B4 B6 V6] # Ⴅ릖.ݷ𐹳3. +B; ⴅ󲬹릖󠶚.\u0777𐹳⒊; [B4 B6 P1 V6]; [B4 B6 P1 V6] # ⴅ릖.ݷ𐹳⒊ +B; ⴅ󲬹릖󠶚.\u0777𐹳⒊; [B4 B6 P1 V6]; [B4 B6 P1 V6] # ⴅ릖.ݷ𐹳⒊ +B; xn--wkj8016bne45io02g.xn--7pb000mwm4n; [B4 B6 V6]; [B4 B6 V6] # ⴅ릖.ݷ𐹳⒊ +B; xn--dnd2167fnet0io02g.xn--7pb000mwm4n; [B4 B6 V6]; [B4 B6 V6] # Ⴅ릖.ݷ𐹳⒊ +T; \u200C。︒; [C1 P1 V6]; [P1 V6 A4_2] # .︒ +N; \u200C。︒; [C1 P1 V6]; [C1 P1 V6] # .︒ +T; \u200C。。; [C1 A4_2]; [A4_2] # .. +N; \u200C。。; [C1 A4_2]; [C1 A4_2] # .. +B; ..; [A4_2]; [A4_2] +B; xn--0ug..; [C1 A4_2]; [C1 A4_2] # .. +B; .xn--y86c; [V6 A4_2]; [V6 A4_2] +B; xn--0ug.xn--y86c; [C1 V6]; [C1 V6] # .︒ +B; ≯\u076D.₄; [B1 P1 V6]; [B1 P1 V6] # ≯ݭ.4 +B; >\u0338\u076D.₄; [B1 P1 V6]; [B1 P1 V6] # ≯ݭ.4 +B; ≯\u076D.4; [B1 P1 V6]; [B1 P1 V6] # ≯ݭ.4 +B; >\u0338\u076D.4; [B1 P1 V6]; [B1 P1 V6] # ≯ݭ.4 +B; xn--xpb149k.4; [B1 V6]; [B1 V6] # ≯ݭ.4 +T; ᡲ-𝟹.ß-\u200C-; [C1 V3]; [V2 V3] # ᡲ-3.ß-- +N; ᡲ-𝟹.ß-\u200C-; [C1 V3]; [C1 V3] # ᡲ-3.ß-- +T; ᡲ-3.ß-\u200C-; [C1 V3]; [V2 V3] # ᡲ-3.ß-- +N; ᡲ-3.ß-\u200C-; [C1 V3]; [C1 V3] # ᡲ-3.ß-- +T; ᡲ-3.SS-\u200C-; [C1 V3]; [V2 V3] # ᡲ-3.ss-- +N; ᡲ-3.SS-\u200C-; [C1 V3]; [C1 V3] # ᡲ-3.ss-- +T; ᡲ-3.ss-\u200C-; [C1 V3]; [V2 V3] # ᡲ-3.ss-- +N; ᡲ-3.ss-\u200C-; [C1 V3]; [C1 V3] # ᡲ-3.ss-- +T; ᡲ-3.Ss-\u200C-; [C1 V3]; [V2 V3] # ᡲ-3.ss-- +N; ᡲ-3.Ss-\u200C-; [C1 V3]; [C1 V3] # ᡲ-3.ss-- +B; xn---3-p9o.ss--; [V2 V3]; [V2 V3] +B; xn---3-p9o.xn--ss---276a; [C1 V3]; [C1 V3] # ᡲ-3.ss-- +B; xn---3-p9o.xn-----fia9303a; [C1 V3]; [C1 V3] # ᡲ-3.ß-- +T; ᡲ-𝟹.SS-\u200C-; [C1 V3]; [V2 V3] # ᡲ-3.ss-- +N; ᡲ-𝟹.SS-\u200C-; [C1 V3]; [C1 V3] # ᡲ-3.ss-- +T; ᡲ-𝟹.ss-\u200C-; [C1 V3]; [V2 V3] # ᡲ-3.ss-- +N; ᡲ-𝟹.ss-\u200C-; [C1 V3]; [C1 V3] # ᡲ-3.ss-- +T; ᡲ-𝟹.Ss-\u200C-; [C1 V3]; [V2 V3] # ᡲ-3.ss-- +N; ᡲ-𝟹.Ss-\u200C-; [C1 V3]; [C1 V3] # ᡲ-3.ss-- +B; \uFD08𝟦\u0647󎊯。Ӏ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ضي4ه.Ӏ +B; \u0636\u064A4\u0647󎊯。Ӏ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ضي4ه.Ӏ +B; \u0636\u064A4\u0647󎊯。ӏ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ضي4ه.ӏ +B; xn--4-tnc6ck183523b.xn--s5a; [B2 B3 V6]; [B2 B3 V6] # ضي4ه.ӏ +B; xn--4-tnc6ck183523b.xn--d5a; [B2 B3 V6]; [B2 B3 V6] # ضي4ه.Ӏ +B; \uFD08𝟦\u0647󎊯。ӏ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ضي4ه.ӏ +B; -.\u0602\u0622𑆾🐹; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -.آ𑆾🐹 +B; -.\u0602\u0627\u0653𑆾🐹; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -.آ𑆾🐹 +B; -.xn--kfb8dy983hgl7g; [B1 V3 V6]; [B1 V3 V6] # -.آ𑆾🐹 +B; 󙶜ᢘ。\u1A7F⺢; [P1 V5 V6]; [P1 V5 V6] # ᢘ.᩿⺢ +B; xn--ibf35138o.xn--fpfz94g; [V5 V6]; [V5 V6] # ᢘ.᩿⺢ +B; ≠ႷᠤႫ。\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠ႷᠤႫ.͌س觴 +B; =\u0338ႷᠤႫ。\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠ႷᠤႫ.͌س觴 +B; ≠ႷᠤႫ。\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠ႷᠤႫ.͌س觴 +B; =\u0338ႷᠤႫ。\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠ႷᠤႫ.͌س觴 +B; =\u0338ⴗᠤⴋ。\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠ⴗᠤⴋ.͌س觴 +B; ≠ⴗᠤⴋ。\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠ⴗᠤⴋ.͌س觴 +B; ≠Ⴗᠤⴋ。\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠Ⴗᠤⴋ.͌س觴 +B; =\u0338Ⴗᠤⴋ。\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠Ⴗᠤⴋ.͌س觴 +B; xn--vnd619as6ig6k.\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠Ⴗᠤⴋ.͌س觴 +B; XN--VND619AS6IG6K.\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠Ⴗᠤⴋ.͌س觴 +B; Xn--Vnd619as6ig6k.\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠Ⴗᠤⴋ.͌س觴 +B; xn--66e353ce0ilb.\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠ⴗᠤⴋ.͌س觴 +B; XN--66E353CE0ILB.\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠ⴗᠤⴋ.͌س觴 +B; Xn--66E353ce0ilb.\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠ⴗᠤⴋ.͌س觴 +B; xn--jndx718cnnl.\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠ႷᠤႫ.͌س觴 +B; XN--JNDX718CNNL.\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠ႷᠤႫ.͌س觴 +B; Xn--Jndx718cnnl.\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠ႷᠤႫ.͌س觴 +B; =\u0338ⴗᠤⴋ。\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠ⴗᠤⴋ.͌س觴 +B; ≠ⴗᠤⴋ。\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠ⴗᠤⴋ.͌س觴 +B; ≠Ⴗᠤⴋ。\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠Ⴗᠤⴋ.͌س觴 +B; =\u0338Ⴗᠤⴋ。\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠Ⴗᠤⴋ.͌س觴 +B; \u0667.𐥨; [B1 P1 V6]; [B1 P1 V6] # ٧. +B; xn--gib.xn--vm9c; [B1 V6]; [B1 V6] # ٧. +T; \uA9C0𝟯。\u200D񼑥𐹪\u1BF3; [B1 C2 P1 V5 V6]; [B5 P1 V5 V6] # ꧀3.𐹪᯳ +N; \uA9C0𝟯。\u200D񼑥𐹪\u1BF3; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # ꧀3.𐹪᯳ +T; \uA9C03。\u200D񼑥𐹪\u1BF3; [B1 C2 P1 V5 V6]; [B5 P1 V5 V6] # ꧀3.𐹪᯳ +N; \uA9C03。\u200D񼑥𐹪\u1BF3; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # ꧀3.𐹪᯳ +B; xn--3-5z4e.xn--1zfz754hncv8b; [B5 V5 V6]; [B5 V5 V6] # ꧀3.𐹪᯳ +B; xn--3-5z4e.xn--1zf96ony8ygd68c; [B1 C2 V5 V6]; [B1 C2 V5 V6] # ꧀3.𐹪᯳ +B; 򣕄4񠖽.≯\u0664𑀾󠸌; [B1 P1 V6]; [B1 P1 V6] # 4.≯٤𑀾 +B; 򣕄4񠖽.>\u0338\u0664𑀾󠸌; [B1 P1 V6]; [B1 P1 V6] # 4.≯٤𑀾 +B; xn--4-fg85dl688i.xn--dib174li86ntdy0i; [B1 V6]; [B1 V6] # 4.≯٤𑀾 +B; 򗆧𝟯。⒈\u1A76𝟚򠘌; [P1 V6]; [P1 V6] # 3.⒈᩶2 +B; 򗆧3。1.\u1A762򠘌; [P1 V5 V6]; [P1 V5 V6] # 3.1.᩶2 +B; xn--3-rj42h.1.xn--2-13k96240l; [V5 V6]; [V5 V6] # 3.1.᩶2 +B; xn--3-rj42h.xn--2-13k746cq465x; [V6]; [V6] # 3.⒈᩶2 +T; \u200D₅⒈。≯𝟴\u200D; [C2 P1 V6]; [P1 V6] # 5⒈.≯8 +N; \u200D₅⒈。≯𝟴\u200D; [C2 P1 V6]; [C2 P1 V6] # 5⒈.≯8 +T; \u200D₅⒈。>\u0338𝟴\u200D; [C2 P1 V6]; [P1 V6] # 5⒈.≯8 +N; \u200D₅⒈。>\u0338𝟴\u200D; [C2 P1 V6]; [C2 P1 V6] # 5⒈.≯8 +T; \u200D51.。≯8\u200D; [C2 P1 V6 A4_2]; [P1 V6 A4_2] # 51..≯8 +N; \u200D51.。≯8\u200D; [C2 P1 V6 A4_2]; [C2 P1 V6 A4_2] # 51..≯8 +T; \u200D51.。>\u03388\u200D; [C2 P1 V6 A4_2]; [P1 V6 A4_2] # 51..≯8 +N; \u200D51.。>\u03388\u200D; [C2 P1 V6 A4_2]; [C2 P1 V6 A4_2] # 51..≯8 +B; 51..xn--8-ogo; [V6 A4_2]; [V6 A4_2] +B; xn--51-l1t..xn--8-ugn00i; [C2 V6 A4_2]; [C2 V6 A4_2] # 51..≯8 +B; xn--5-ecp.xn--8-ogo; [V6]; [V6] +B; xn--5-tgnz5r.xn--8-ugn00i; [C2 V6]; [C2 V6] # 5⒈.≯8 +T; ꡰ\u0697\u1086.򪘙\u072F≠\u200C; [B5 B6 C1 P1 V6]; [B5 B6 P1 V6] # ꡰڗႆ.ܯ≠ +N; ꡰ\u0697\u1086.򪘙\u072F≠\u200C; [B5 B6 C1 P1 V6]; [B5 B6 C1 P1 V6] # ꡰڗႆ.ܯ≠ +T; ꡰ\u0697\u1086.򪘙\u072F=\u0338\u200C; [B5 B6 C1 P1 V6]; [B5 B6 P1 V6] # ꡰڗႆ.ܯ≠ +N; ꡰ\u0697\u1086.򪘙\u072F=\u0338\u200C; [B5 B6 C1 P1 V6]; [B5 B6 C1 P1 V6] # ꡰڗႆ.ܯ≠ +T; ꡰ\u0697\u1086.򪘙\u072F≠\u200C; [B5 B6 C1 P1 V6]; [B5 B6 P1 V6] # ꡰڗႆ.ܯ≠ +N; ꡰ\u0697\u1086.򪘙\u072F≠\u200C; [B5 B6 C1 P1 V6]; [B5 B6 C1 P1 V6] # ꡰڗႆ.ܯ≠ +T; ꡰ\u0697\u1086.򪘙\u072F=\u0338\u200C; [B5 B6 C1 P1 V6]; [B5 B6 P1 V6] # ꡰڗႆ.ܯ≠ +N; ꡰ\u0697\u1086.򪘙\u072F=\u0338\u200C; [B5 B6 C1 P1 V6]; [B5 B6 C1 P1 V6] # ꡰڗႆ.ܯ≠ +B; xn--tjb002cn51k.xn--5nb630lbj91q; [B5 B6 V6]; [B5 B6 V6] # ꡰڗႆ.ܯ≠ +B; xn--tjb002cn51k.xn--5nb448jcubcz547b; [B5 B6 C1 V6]; [B5 B6 C1 V6] # ꡰڗႆ.ܯ≠ +B; 𑄱。򪌿𐹵; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] +B; 𑄱。򪌿𐹵; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] +B; xn--t80d.xn--to0d14792b; [B1 B3 B5 B6 V5 V6]; [B1 B3 B5 B6 V5 V6] +B; 𝟥\u0600。\u073D; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # 3.ܽ +B; 3\u0600。\u073D; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # 3.ܽ +B; xn--3-rkc.xn--kob; [B1 B3 B6 V5 V6]; [B1 B3 B6 V5 V6] # 3.ܽ +B; \u0637𐹣\u0666.\u076D긷; [B2 B3]; [B2 B3] # ط𐹣٦.ݭ긷 +B; \u0637𐹣\u0666.\u076D긷; [B2 B3]; [B2 B3] # ط𐹣٦.ݭ긷 +B; xn--2gb8gu829f.xn--xpb0156f; [B2 B3]; [B2 B3] # ط𐹣٦.ݭ긷 +B; ︒Ↄ\u2DE7򾀃.Ⴗ𐣞; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # ︒Ↄⷧ.Ⴗ +B; 。Ↄ\u2DE7򾀃.Ⴗ𐣞; [B5 B6 P1 V6 A4_2]; [B5 B6 P1 V6 A4_2] # .Ↄⷧ.Ⴗ +B; 。ↄ\u2DE7򾀃.ⴗ𐣞; [B5 B6 P1 V6 A4_2]; [B5 B6 P1 V6 A4_2] # .ↄⷧ.ⴗ +B; .xn--r5gy00cll06u.xn--flj4541e; [B5 B6 V6 A4_2]; [B5 B6 V6 A4_2] # .ↄⷧ.ⴗ +B; .xn--q5g000cll06u.xn--vnd8618j; [B5 B6 V6 A4_2]; [B5 B6 V6 A4_2] # .Ↄⷧ.Ⴗ +B; ︒ↄ\u2DE7򾀃.ⴗ𐣞; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # ︒ↄⷧ.ⴗ +B; xn--r5gy00c056n0226g.xn--flj4541e; [B1 B5 B6 V6]; [B1 B5 B6 V6] # ︒ↄⷧ.ⴗ +B; xn--q5g000c056n0226g.xn--vnd8618j; [B1 B5 B6 V6]; [B1 B5 B6 V6] # ︒Ↄⷧ.Ⴗ +B; \u0600.\u05B1; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # .ֱ +B; xn--ifb.xn--8cb; [B1 B3 B6 V5 V6]; [B1 B3 B6 V5 V6] # .ֱ +T; ς≯。𐹽; [B1 B6 P1 V6]; [B1 B6 P1 V6] +N; ς≯。𐹽; [B1 B6 P1 V6]; [B1 B6 P1 V6] +T; ς>\u0338。𐹽; [B1 B6 P1 V6]; [B1 B6 P1 V6] +N; ς>\u0338。𐹽; [B1 B6 P1 V6]; [B1 B6 P1 V6] +T; ς≯。𐹽; [B1 B6 P1 V6]; [B1 B6 P1 V6] +N; ς≯。𐹽; [B1 B6 P1 V6]; [B1 B6 P1 V6] +T; ς>\u0338。𐹽; [B1 B6 P1 V6]; [B1 B6 P1 V6] +N; ς>\u0338。𐹽; [B1 B6 P1 V6]; [B1 B6 P1 V6] +B; Σ>\u0338。𐹽; [B1 B6 P1 V6]; [B1 B6 P1 V6] +B; Σ≯。𐹽; [B1 B6 P1 V6]; [B1 B6 P1 V6] +B; σ≯。𐹽; [B1 B6 P1 V6]; [B1 B6 P1 V6] +B; σ>\u0338。𐹽; [B1 B6 P1 V6]; [B1 B6 P1 V6] +B; xn--4xa818m.xn--1o0d; [B1 B6 V6]; [B1 B6 V6] +B; xn--3xa028m.xn--1o0d; [B1 B6 V6]; [B1 B6 V6] +B; Σ>\u0338。𐹽; [B1 B6 P1 V6]; [B1 B6 P1 V6] +B; Σ≯。𐹽; [B1 B6 P1 V6]; [B1 B6 P1 V6] +B; σ≯。𐹽; [B1 B6 P1 V6]; [B1 B6 P1 V6] +B; σ>\u0338。𐹽; [B1 B6 P1 V6]; [B1 B6 P1 V6] +T; \u17D2\u200D\u075F。𐹶; [B1 V5]; [B1 V5] # ្ݟ.𐹶 +N; \u17D2\u200D\u075F。𐹶; [B1 V5]; [B1 V5] # ្ݟ.𐹶 +B; xn--jpb535f.xn--uo0d; [B1 V5]; [B1 V5] # ្ݟ.𐹶 +B; xn--jpb535fv9f.xn--uo0d; [B1 V5]; [B1 V5] # ្ݟ.𐹶 +B; 𾷂\u0A42Ⴊ񂂟.≮; [P1 V6]; [P1 V6] # ੂႪ.≮ +B; 𾷂\u0A42Ⴊ񂂟.<\u0338; [P1 V6]; [P1 V6] # ੂႪ.≮ +B; 𾷂\u0A42ⴊ񂂟.<\u0338; [P1 V6]; [P1 V6] # ੂⴊ.≮ +B; 𾷂\u0A42ⴊ񂂟.≮; [P1 V6]; [P1 V6] # ੂⴊ.≮ +B; xn--nbc229o4y27dgskb.xn--gdh; [V6]; [V6] # ੂⴊ.≮ +B; xn--nbc493aro75ggskb.xn--gdh; [V6]; [V6] # ੂႪ.≮ +B; ꡠ.۲; ꡠ.۲; xn--5c9a.xn--fmb +B; ꡠ.۲; ; xn--5c9a.xn--fmb +B; xn--5c9a.xn--fmb; ꡠ.۲; xn--5c9a.xn--fmb +B; 𐹣񄷄。ꡬ🄄; [B1 P1 V6]; [B1 P1 V6] +B; 𐹣񄷄。ꡬ3,; [B1 B6 P1 V6]; [B1 B6 P1 V6] +B; xn--bo0d0203l.xn--3,-yj9h; [B1 B6 P1 V6]; [B1 B6 P1 V6] +B; xn--bo0d0203l.xn--id9a4443d; [B1 V6]; [B1 V6] +T; -\u0C4D𞾀𑲓。\u200D\u0D4D; [B1 C2 P1 V3 V6]; [B1 B3 B6 P1 V3 V5 V6] # -్𑲓.് +N; -\u0C4D𞾀𑲓。\u200D\u0D4D; [B1 C2 P1 V3 V6]; [B1 C2 P1 V3 V6] # -్𑲓.് +T; -\u0C4D𞾀𑲓。\u200D\u0D4D; [B1 C2 P1 V3 V6]; [B1 B3 B6 P1 V3 V5 V6] # -్𑲓.് +N; -\u0C4D𞾀𑲓。\u200D\u0D4D; [B1 C2 P1 V3 V6]; [B1 C2 P1 V3 V6] # -్𑲓.് +B; xn----x6e0220sclug.xn--wxc; [B1 B3 B6 V3 V5 V6]; [B1 B3 B6 V3 V5 V6] # -్𑲓.് +B; xn----x6e0220sclug.xn--wxc317g; [B1 C2 V3 V6]; [B1 C2 V3 V6] # -్𑲓.് +T; \uA67D\u200C霣🄆。\u200C𑁂\u1B01; [C1 P1 V5 V6]; [P1 V5 V6] # ꙽霣🄆.𑁂ᬁ +N; \uA67D\u200C霣🄆。\u200C𑁂\u1B01; [C1 P1 V5 V6]; [C1 P1 V5 V6] # ꙽霣🄆.𑁂ᬁ +T; \uA67D\u200C霣🄆。\u200C𑁂\u1B01; [C1 P1 V5 V6]; [P1 V5 V6] # ꙽霣🄆.𑁂ᬁ +N; \uA67D\u200C霣🄆。\u200C𑁂\u1B01; [C1 P1 V5 V6]; [C1 P1 V5 V6] # ꙽霣🄆.𑁂ᬁ +T; \uA67D\u200C霣5,。\u200C𑁂\u1B01; [C1 P1 V5 V6]; [P1 V5 V6] # ꙽霣5,.𑁂ᬁ +N; \uA67D\u200C霣5,。\u200C𑁂\u1B01; [C1 P1 V5 V6]; [C1 P1 V5 V6] # ꙽霣5,.𑁂ᬁ +B; xn--5,-op8g373c.xn--4sf0725i; [P1 V5 V6]; [P1 V5 V6] # ꙽霣5,.𑁂ᬁ +B; xn--5,-i1tz135dnbqa.xn--4sf36u6u4w; [C1 P1 V5 V6]; [C1 P1 V5 V6] # ꙽霣5,.𑁂ᬁ +B; xn--2q5a751a653w.xn--4sf0725i; [V5 V6]; [V5 V6] # ꙽霣🄆.𑁂ᬁ +B; xn--0ug4208b2vjuk63a.xn--4sf36u6u4w; [C1 V5 V6]; [C1 V5 V6] # ꙽霣🄆.𑁂ᬁ +B; 兎。ᠼ󠴜𑚶𑰿; [P1 V6]; [P1 V6] +B; 兎。ᠼ󠴜𑚶𑰿; [P1 V6]; [P1 V6] +B; xn--b5q.xn--v7e6041kqqd4m251b; [V6]; [V6] +T; 𝟙。\u200D𝟸\u200D⁷; [C2]; 1.27 # 1.27 +N; 𝟙。\u200D𝟸\u200D⁷; [C2]; [C2] # 1.27 +T; 1。\u200D2\u200D7; [C2]; 1.27 # 1.27 +N; 1。\u200D2\u200D7; [C2]; [C2] # 1.27 +B; 1.27; ; +B; 1.xn--27-l1tb; [C2]; [C2] # 1.27 +B; ᡨ-。󠻋𝟷; [P1 V3 V6]; [P1 V3 V6] +B; ᡨ-。󠻋1; [P1 V3 V6]; [P1 V3 V6] +B; xn----z8j.xn--1-5671m; [V3 V6]; [V3 V6] +B; 𑰻񵀐𐫚.\u0668⁹; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𑰻𐫚.٨9 +B; 𑰻񵀐𐫚.\u06689; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𑰻𐫚.٨9 +B; xn--gx9cr01aul57i.xn--9-oqc; [B1 V5 V6]; [B1 V5 V6] # 𑰻𐫚.٨9 +T; Ⴜ򈷭\u0F80⾇。Ⴏ♀\u200C\u200C; [C1 P1 V6]; [P1 V6] # Ⴜྀ舛.Ⴏ♀ +N; Ⴜ򈷭\u0F80⾇。Ⴏ♀\u200C\u200C; [C1 P1 V6]; [C1 P1 V6] # Ⴜྀ舛.Ⴏ♀ +T; Ⴜ򈷭\u0F80舛。Ⴏ♀\u200C\u200C; [C1 P1 V6]; [P1 V6] # Ⴜྀ舛.Ⴏ♀ +N; Ⴜ򈷭\u0F80舛。Ⴏ♀\u200C\u200C; [C1 P1 V6]; [C1 P1 V6] # Ⴜྀ舛.Ⴏ♀ +T; ⴜ򈷭\u0F80舛。ⴏ♀\u200C\u200C; [C1 P1 V6]; [P1 V6] # ⴜྀ舛.ⴏ♀ +N; ⴜ򈷭\u0F80舛。ⴏ♀\u200C\u200C; [C1 P1 V6]; [C1 P1 V6] # ⴜྀ舛.ⴏ♀ +B; xn--zed372mdj2do3v4h.xn--e5h11w; [V6]; [V6] # ⴜྀ舛.ⴏ♀ +B; xn--zed372mdj2do3v4h.xn--0uga678bgyh; [C1 V6]; [C1 V6] # ⴜྀ舛.ⴏ♀ +B; xn--zed54dz10wo343g.xn--nnd651i; [V6]; [V6] # Ⴜྀ舛.Ⴏ♀ +B; xn--zed54dz10wo343g.xn--nnd089ea464d; [C1 V6]; [C1 V6] # Ⴜྀ舛.Ⴏ♀ +T; ⴜ򈷭\u0F80⾇。ⴏ♀\u200C\u200C; [C1 P1 V6]; [P1 V6] # ⴜྀ舛.ⴏ♀ +N; ⴜ򈷭\u0F80⾇。ⴏ♀\u200C\u200C; [C1 P1 V6]; [C1 P1 V6] # ⴜྀ舛.ⴏ♀ +T; 𑁆𝟰.\u200D; [C2 V5]; [V5] # 𑁆4. +N; 𑁆𝟰.\u200D; [C2 V5]; [C2 V5] # 𑁆4. +T; 𑁆4.\u200D; [C2 V5]; [V5] # 𑁆4. +N; 𑁆4.\u200D; [C2 V5]; [C2 V5] # 𑁆4. +B; xn--4-xu7i.; [V5]; [V5] +B; xn--4-xu7i.xn--1ug; [C2 V5]; [C2 V5] # 𑁆4. +T; 񮴘Ⴞ癀。𑘿\u200D\u200C붼; [C1 P1 V5 V6]; [P1 V5 V6] # Ⴞ癀.𑘿붼 +N; 񮴘Ⴞ癀。𑘿\u200D\u200C붼; [C1 P1 V5 V6]; [C1 P1 V5 V6] # Ⴞ癀.𑘿붼 +T; 񮴘Ⴞ癀。𑘿\u200D\u200C붼; [C1 P1 V5 V6]; [P1 V5 V6] # Ⴞ癀.𑘿붼 +N; 񮴘Ⴞ癀。𑘿\u200D\u200C붼; [C1 P1 V5 V6]; [C1 P1 V5 V6] # Ⴞ癀.𑘿붼 +T; 񮴘Ⴞ癀。𑘿\u200D\u200C붼; [C1 P1 V5 V6]; [P1 V5 V6] # Ⴞ癀.𑘿붼 +N; 񮴘Ⴞ癀。𑘿\u200D\u200C붼; [C1 P1 V5 V6]; [C1 P1 V5 V6] # Ⴞ癀.𑘿붼 +T; 񮴘Ⴞ癀。𑘿\u200D\u200C붼; [C1 P1 V5 V6]; [P1 V5 V6] # Ⴞ癀.𑘿붼 +N; 񮴘Ⴞ癀。𑘿\u200D\u200C붼; [C1 P1 V5 V6]; [C1 P1 V5 V6] # Ⴞ癀.𑘿붼 +T; 񮴘ⴞ癀。𑘿\u200D\u200C붼; [C1 P1 V5 V6]; [P1 V5 V6] # ⴞ癀.𑘿붼 +N; 񮴘ⴞ癀。𑘿\u200D\u200C붼; [C1 P1 V5 V6]; [C1 P1 V5 V6] # ⴞ癀.𑘿붼 +T; 񮴘ⴞ癀。𑘿\u200D\u200C붼; [C1 P1 V5 V6]; [P1 V5 V6] # ⴞ癀.𑘿붼 +N; 񮴘ⴞ癀。𑘿\u200D\u200C붼; [C1 P1 V5 V6]; [C1 P1 V5 V6] # ⴞ癀.𑘿붼 +B; xn--mlju35u7qx2f.xn--et3bn23n; [V5 V6]; [V5 V6] +B; xn--mlju35u7qx2f.xn--0ugb6122js83c; [C1 V5 V6]; [C1 V5 V6] # ⴞ癀.𑘿붼 +B; xn--2nd6803c7q37d.xn--et3bn23n; [V5 V6]; [V5 V6] +B; xn--2nd6803c7q37d.xn--0ugb6122js83c; [C1 V5 V6]; [C1 V5 V6] # Ⴞ癀.𑘿붼 +T; 񮴘ⴞ癀。𑘿\u200D\u200C붼; [C1 P1 V5 V6]; [P1 V5 V6] # ⴞ癀.𑘿붼 +N; 񮴘ⴞ癀。𑘿\u200D\u200C붼; [C1 P1 V5 V6]; [C1 P1 V5 V6] # ⴞ癀.𑘿붼 +T; 񮴘ⴞ癀。𑘿\u200D\u200C붼; [C1 P1 V5 V6]; [P1 V5 V6] # ⴞ癀.𑘿붼 +N; 񮴘ⴞ癀。𑘿\u200D\u200C붼; [C1 P1 V5 V6]; [C1 P1 V5 V6] # ⴞ癀.𑘿붼 +B; 󚀅-\u0BCD。\u06B9; [B6 P1 V6]; [B6 P1 V6] # -்.ڹ +B; xn----mze84808x.xn--skb; [B6 V6]; [B6 V6] # -்.ڹ +B; ᡃ𝟧≯ᠣ.氁񨏱ꁫ; [P1 V6]; [P1 V6] +B; ᡃ𝟧>\u0338ᠣ.氁񨏱ꁫ; [P1 V6]; [P1 V6] +B; ᡃ5≯ᠣ.氁񨏱ꁫ; [P1 V6]; [P1 V6] +B; ᡃ5>\u0338ᠣ.氁񨏱ꁫ; [P1 V6]; [P1 V6] +B; xn--5-24jyf768b.xn--lqw213ime95g; [V6]; [V6] +B; 𐹬𝩇.\u0F76; [B1 B3 B6 V5]; [B1 B3 B6 V5] # 𐹬𝩇.ྲྀ +B; 𐹬𝩇.\u0FB2\u0F80; [B1 B3 B6 V5]; [B1 B3 B6 V5] # 𐹬𝩇.ྲྀ +B; 𐹬𝩇.\u0FB2\u0F80; [B1 B3 B6 V5]; [B1 B3 B6 V5] # 𐹬𝩇.ྲྀ +B; xn--ko0d8295a.xn--zed3h; [B1 B3 B6 V5]; [B1 B3 B6 V5] # 𐹬𝩇.ྲྀ +B; -𑈶⒏.⒎𰛢󠎭; [P1 V3 V6]; [P1 V3 V6] +B; -𑈶8..7.𰛢󠎭; [P1 V3 V6 A4_2]; [P1 V3 V6 A4_2] +B; xn---8-bv5o..7.xn--c35nf1622b; [V3 V6 A4_2]; [V3 V6 A4_2] +B; xn----scp6252h.xn--zshy411yzpx2d; [V3 V6]; [V3 V6] +T; \u200CႡ畝\u200D.≮; [C1 C2 P1 V6]; [P1 V6] # Ⴁ畝.≮ +N; \u200CႡ畝\u200D.≮; [C1 C2 P1 V6]; [C1 C2 P1 V6] # Ⴁ畝.≮ +T; \u200CႡ畝\u200D.<\u0338; [C1 C2 P1 V6]; [P1 V6] # Ⴁ畝.≮ +N; \u200CႡ畝\u200D.<\u0338; [C1 C2 P1 V6]; [C1 C2 P1 V6] # Ⴁ畝.≮ +T; \u200CႡ畝\u200D.≮; [C1 C2 P1 V6]; [P1 V6] # Ⴁ畝.≮ +N; \u200CႡ畝\u200D.≮; [C1 C2 P1 V6]; [C1 C2 P1 V6] # Ⴁ畝.≮ +T; \u200CႡ畝\u200D.<\u0338; [C1 C2 P1 V6]; [P1 V6] # Ⴁ畝.≮ +N; \u200CႡ畝\u200D.<\u0338; [C1 C2 P1 V6]; [C1 C2 P1 V6] # Ⴁ畝.≮ +T; \u200Cⴁ畝\u200D.<\u0338; [C1 C2 P1 V6]; [P1 V6] # ⴁ畝.≮ +N; \u200Cⴁ畝\u200D.<\u0338; [C1 C2 P1 V6]; [C1 C2 P1 V6] # ⴁ畝.≮ +T; \u200Cⴁ畝\u200D.≮; [C1 C2 P1 V6]; [P1 V6] # ⴁ畝.≮ +N; \u200Cⴁ畝\u200D.≮; [C1 C2 P1 V6]; [C1 C2 P1 V6] # ⴁ畝.≮ +B; xn--skjy82u.xn--gdh; [V6]; [V6] +B; xn--0ugc160hb36e.xn--gdh; [C1 C2 V6]; [C1 C2 V6] # ⴁ畝.≮ +B; xn--8md0962c.xn--gdh; [V6]; [V6] +B; xn--8md700fea3748f.xn--gdh; [C1 C2 V6]; [C1 C2 V6] # Ⴁ畝.≮ +T; \u200Cⴁ畝\u200D.<\u0338; [C1 C2 P1 V6]; [P1 V6] # ⴁ畝.≮ +N; \u200Cⴁ畝\u200D.<\u0338; [C1 C2 P1 V6]; [C1 C2 P1 V6] # ⴁ畝.≮ +T; \u200Cⴁ畝\u200D.≮; [C1 C2 P1 V6]; [P1 V6] # ⴁ畝.≮ +N; \u200Cⴁ畝\u200D.≮; [C1 C2 P1 V6]; [C1 C2 P1 V6] # ⴁ畝.≮ +T; 歷。𐹻≯󳛽\u200D; [B1 C2 P1 V6]; [B1 P1 V6] # 歷.𐹻≯ +N; 歷。𐹻≯󳛽\u200D; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 歷.𐹻≯ +T; 歷。𐹻>\u0338󳛽\u200D; [B1 C2 P1 V6]; [B1 P1 V6] # 歷.𐹻≯ +N; 歷。𐹻>\u0338󳛽\u200D; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 歷.𐹻≯ +T; 歷。𐹻≯󳛽\u200D; [B1 C2 P1 V6]; [B1 P1 V6] # 歷.𐹻≯ +N; 歷。𐹻≯󳛽\u200D; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 歷.𐹻≯ +T; 歷。𐹻>\u0338󳛽\u200D; [B1 C2 P1 V6]; [B1 P1 V6] # 歷.𐹻≯ +N; 歷。𐹻>\u0338󳛽\u200D; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 歷.𐹻≯ +B; xn--nmw.xn--hdh7804gdms2h; [B1 V6]; [B1 V6] +B; xn--nmw.xn--1ugx6gs128a1134j; [B1 C2 V6]; [B1 C2 V6] # 歷.𐹻≯ +T; \u0ECB\u200D.鎁󠰑; [C2 P1 V5 V6]; [P1 V5 V6] # ໋.鎁 +N; \u0ECB\u200D.鎁󠰑; [C2 P1 V5 V6]; [C2 P1 V5 V6] # ໋.鎁 +T; \u0ECB\u200D.鎁󠰑; [C2 P1 V5 V6]; [P1 V5 V6] # ໋.鎁 +N; \u0ECB\u200D.鎁󠰑; [C2 P1 V5 V6]; [C2 P1 V5 V6] # ໋.鎁 +B; xn--t8c.xn--iz4a43209d; [V5 V6]; [V5 V6] # ໋.鎁 +B; xn--t8c059f.xn--iz4a43209d; [C2 V5 V6]; [C2 V5 V6] # ໋.鎁 +T; \u200D\u200C𞤀。𱘅𐶃; [B1 B5 B6 C1 C2 P1 V6]; [B5 B6 P1 V6] # 𞤢. +N; \u200D\u200C𞤀。𱘅𐶃; [B1 B5 B6 C1 C2 P1 V6]; [B1 B5 B6 C1 C2 P1 V6] # 𞤢. +T; \u200D\u200C𞤀。𱘅𐶃; [B1 B5 B6 C1 C2 P1 V6]; [B5 B6 P1 V6] # 𞤢. +N; \u200D\u200C𞤀。𱘅𐶃; [B1 B5 B6 C1 C2 P1 V6]; [B1 B5 B6 C1 C2 P1 V6] # 𞤢. +T; \u200D\u200C𞤢。𱘅𐶃; [B1 B5 B6 C1 C2 P1 V6]; [B5 B6 P1 V6] # 𞤢. +N; \u200D\u200C𞤢。𱘅𐶃; [B1 B5 B6 C1 C2 P1 V6]; [B1 B5 B6 C1 C2 P1 V6] # 𞤢. +B; xn--9d6h.xn--wh0dj799f; [B5 B6 V6]; [B5 B6 V6] +B; xn--0ugb45126a.xn--wh0dj799f; [B1 B5 B6 C1 C2 V6]; [B1 B5 B6 C1 C2 V6] # 𞤢. +T; \u200D\u200C𞤢。𱘅𐶃; [B1 B5 B6 C1 C2 P1 V6]; [B5 B6 P1 V6] # 𞤢. +N; \u200D\u200C𞤢。𱘅𐶃; [B1 B5 B6 C1 C2 P1 V6]; [B1 B5 B6 C1 C2 P1 V6] # 𞤢. +T; \u0628≠𝟫-.ς⒍𐹦≠; [B3 B5 B6 P1 V3 V6]; [B3 B5 B6 P1 V3 V6] # ب≠9-.ς⒍𐹦≠ +N; \u0628≠𝟫-.ς⒍𐹦≠; [B3 B5 B6 P1 V3 V6]; [B3 B5 B6 P1 V3 V6] # ب≠9-.ς⒍𐹦≠ +T; \u0628=\u0338𝟫-.ς⒍𐹦=\u0338; [B3 B5 B6 P1 V3 V6]; [B3 B5 B6 P1 V3 V6] # ب≠9-.ς⒍𐹦≠ +N; \u0628=\u0338𝟫-.ς⒍𐹦=\u0338; [B3 B5 B6 P1 V3 V6]; [B3 B5 B6 P1 V3 V6] # ب≠9-.ς⒍𐹦≠ +T; \u0628≠9-.ς6.𐹦≠; [B1 B3 P1 V3 V6]; [B1 B3 P1 V3 V6] # ب≠9-.ς6.𐹦≠ +N; \u0628≠9-.ς6.𐹦≠; [B1 B3 P1 V3 V6]; [B1 B3 P1 V3 V6] # ب≠9-.ς6.𐹦≠ +T; \u0628=\u03389-.ς6.𐹦=\u0338; [B1 B3 P1 V3 V6]; [B1 B3 P1 V3 V6] # ب≠9-.ς6.𐹦≠ +N; \u0628=\u03389-.ς6.𐹦=\u0338; [B1 B3 P1 V3 V6]; [B1 B3 P1 V3 V6] # ب≠9-.ς6.𐹦≠ +B; \u0628=\u03389-.Σ6.𐹦=\u0338; [B1 B3 P1 V3 V6]; [B1 B3 P1 V3 V6] # ب≠9-.σ6.𐹦≠ +B; \u0628≠9-.Σ6.𐹦≠; [B1 B3 P1 V3 V6]; [B1 B3 P1 V3 V6] # ب≠9-.σ6.𐹦≠ +B; \u0628≠9-.σ6.𐹦≠; [B1 B3 P1 V3 V6]; [B1 B3 P1 V3 V6] # ب≠9-.σ6.𐹦≠ +B; \u0628=\u03389-.σ6.𐹦=\u0338; [B1 B3 P1 V3 V6]; [B1 B3 P1 V3 V6] # ب≠9-.σ6.𐹦≠ +B; xn--9--etd0100a.xn--6-zmb.xn--1ch8704g; [B1 B3 V3 V6]; [B1 B3 V3 V6] # ب≠9-.σ6.𐹦≠ +B; xn--9--etd0100a.xn--6-xmb.xn--1ch8704g; [B1 B3 V3 V6]; [B1 B3 V3 V6] # ب≠9-.ς6.𐹦≠ +B; \u0628=\u0338𝟫-.Σ⒍𐹦=\u0338; [B3 B5 B6 P1 V3 V6]; [B3 B5 B6 P1 V3 V6] # ب≠9-.σ⒍𐹦≠ +B; \u0628≠𝟫-.Σ⒍𐹦≠; [B3 B5 B6 P1 V3 V6]; [B3 B5 B6 P1 V3 V6] # ب≠9-.σ⒍𐹦≠ +B; \u0628≠𝟫-.σ⒍𐹦≠; [B3 B5 B6 P1 V3 V6]; [B3 B5 B6 P1 V3 V6] # ب≠9-.σ⒍𐹦≠ +B; \u0628=\u0338𝟫-.σ⒍𐹦=\u0338; [B3 B5 B6 P1 V3 V6]; [B3 B5 B6 P1 V3 V6] # ب≠9-.σ⒍𐹦≠ +B; xn--9--etd0100a.xn--4xa887mzpbzz04b; [B3 B5 B6 V3 V6]; [B3 B5 B6 V3 V6] # ب≠9-.σ⒍𐹦≠ +B; xn--9--etd0100a.xn--3xa097mzpbzz04b; [B3 B5 B6 V3 V6]; [B3 B5 B6 V3 V6] # ب≠9-.ς⒍𐹦≠ +B; 򉛴.-ᡢ\u0592𝨠; [P1 V3 V6]; [P1 V3 V6] # .-ᡢ֒𝨠 +B; xn--ep37b.xn----hec165lho83b; [V3 V6]; [V3 V6] # .-ᡢ֒𝨠 +T; \u06CB⒈ß󠄽。񷋍-; [B2 B3 B6 P1 V3 V6]; [B2 B3 B6 P1 V3 V6] # ۋ⒈ß.- +N; \u06CB⒈ß󠄽。񷋍-; [B2 B3 B6 P1 V3 V6]; [B2 B3 B6 P1 V3 V6] # ۋ⒈ß.- +T; \u06CB1.ß󠄽。񷋍-; [B6 P1 V3 V6]; [B6 P1 V3 V6] # ۋ1.ß.- +N; \u06CB1.ß󠄽。񷋍-; [B6 P1 V3 V6]; [B6 P1 V3 V6] # ۋ1.ß.- +B; \u06CB1.SS󠄽。񷋍-; [B6 P1 V3 V6]; [B6 P1 V3 V6] # ۋ1.ss.- +B; \u06CB1.ss󠄽。񷋍-; [B6 P1 V3 V6]; [B6 P1 V3 V6] # ۋ1.ss.- +B; \u06CB1.Ss󠄽。񷋍-; [B6 P1 V3 V6]; [B6 P1 V3 V6] # ۋ1.ss.- +B; xn--1-cwc.ss.xn----q001f; [B6 V3 V6]; [B6 V3 V6] # ۋ1.ss.- +B; xn--1-cwc.xn--zca.xn----q001f; [B6 V3 V6]; [B6 V3 V6] # ۋ1.ß.- +B; \u06CB⒈SS󠄽。񷋍-; [B2 B3 B6 P1 V3 V6]; [B2 B3 B6 P1 V3 V6] # ۋ⒈ss.- +B; \u06CB⒈ss󠄽。񷋍-; [B2 B3 B6 P1 V3 V6]; [B2 B3 B6 P1 V3 V6] # ۋ⒈ss.- +B; \u06CB⒈Ss󠄽。񷋍-; [B2 B3 B6 P1 V3 V6]; [B2 B3 B6 P1 V3 V6] # ۋ⒈ss.- +B; xn--ss-d7d6651a.xn----q001f; [B2 B3 B6 V3 V6]; [B2 B3 B6 V3 V6] # ۋ⒈ss.- +B; xn--zca541ato3a.xn----q001f; [B2 B3 B6 V3 V6]; [B2 B3 B6 V3 V6] # ۋ⒈ß.- +T; 𿀫.\u1BAAςႦ\u200D; [C2 P1 V5 V6]; [P1 V5 V6] # .᮪ςႦ +N; 𿀫.\u1BAAςႦ\u200D; [C2 P1 V5 V6]; [C2 P1 V5 V6] # .᮪ςႦ +T; 𿀫.\u1BAAςႦ\u200D; [C2 P1 V5 V6]; [P1 V5 V6] # .᮪ςႦ +N; 𿀫.\u1BAAςႦ\u200D; [C2 P1 V5 V6]; [C2 P1 V5 V6] # .᮪ςႦ +T; 𿀫.\u1BAAςⴆ\u200D; [C2 P1 V5 V6]; [P1 V5 V6] # .᮪ςⴆ +N; 𿀫.\u1BAAςⴆ\u200D; [C2 P1 V5 V6]; [C2 P1 V5 V6] # .᮪ςⴆ +T; 𿀫.\u1BAAΣႦ\u200D; [C2 P1 V5 V6]; [P1 V5 V6] # .᮪σႦ +N; 𿀫.\u1BAAΣႦ\u200D; [C2 P1 V5 V6]; [C2 P1 V5 V6] # .᮪σႦ +T; 𿀫.\u1BAAσⴆ\u200D; [C2 P1 V5 V6]; [P1 V5 V6] # .᮪σⴆ +N; 𿀫.\u1BAAσⴆ\u200D; [C2 P1 V5 V6]; [C2 P1 V5 V6] # .᮪σⴆ +T; 𿀫.\u1BAAΣⴆ\u200D; [C2 P1 V5 V6]; [P1 V5 V6] # .᮪σⴆ +N; 𿀫.\u1BAAΣⴆ\u200D; [C2 P1 V5 V6]; [C2 P1 V5 V6] # .᮪σⴆ +B; xn--nu4s.xn--4xa153j7im; [V5 V6]; [V5 V6] # .᮪σⴆ +B; xn--nu4s.xn--4xa153jk8cs1q; [C2 V5 V6]; [C2 V5 V6] # .᮪σⴆ +B; xn--nu4s.xn--4xa217dxri; [V5 V6]; [V5 V6] # .᮪σႦ +B; xn--nu4s.xn--4xa217dxriome; [C2 V5 V6]; [C2 V5 V6] # .᮪σႦ +B; xn--nu4s.xn--3xa353jk8cs1q; [C2 V5 V6]; [C2 V5 V6] # .᮪ςⴆ +B; xn--nu4s.xn--3xa417dxriome; [C2 V5 V6]; [C2 V5 V6] # .᮪ςႦ +T; 𿀫.\u1BAAςⴆ\u200D; [C2 P1 V5 V6]; [P1 V5 V6] # .᮪ςⴆ +N; 𿀫.\u1BAAςⴆ\u200D; [C2 P1 V5 V6]; [C2 P1 V5 V6] # .᮪ςⴆ +T; 𿀫.\u1BAAΣႦ\u200D; [C2 P1 V5 V6]; [P1 V5 V6] # .᮪σႦ +N; 𿀫.\u1BAAΣႦ\u200D; [C2 P1 V5 V6]; [C2 P1 V5 V6] # .᮪σႦ +T; 𿀫.\u1BAAσⴆ\u200D; [C2 P1 V5 V6]; [P1 V5 V6] # .᮪σⴆ +N; 𿀫.\u1BAAσⴆ\u200D; [C2 P1 V5 V6]; [C2 P1 V5 V6] # .᮪σⴆ +T; 𿀫.\u1BAAΣⴆ\u200D; [C2 P1 V5 V6]; [P1 V5 V6] # .᮪σⴆ +N; 𿀫.\u1BAAΣⴆ\u200D; [C2 P1 V5 V6]; [C2 P1 V5 V6] # .᮪σⴆ +B; ⾆\u08E2.𝈴; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # 舌.𝈴 +B; 舌\u08E2.𝈴; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # 舌.𝈴 +B; xn--l0b9413d.xn--kl1h; [B1 B5 B6 V6]; [B1 B5 B6 V6] # 舌.𝈴 +B; ⫞𐹶𖫴。⭠⒈; [B1 P1 V6]; [B1 P1 V6] +B; ⫞𐹶𖫴。⭠1.; [B1]; [B1] +B; xn--53ix188et88b.xn--1-h6r.; [B1]; [B1] +B; xn--53ix188et88b.xn--tsh52w; [B1 V6]; [B1 V6] +T; ⒈\u200C\uAAEC︒.\u0ACD; [C1 P1 V5 V6]; [P1 V5 V6] # ⒈ꫬ︒.્ +N; ⒈\u200C\uAAEC︒.\u0ACD; [C1 P1 V5 V6]; [C1 P1 V5 V6] # ⒈ꫬ︒.્ +T; 1.\u200C\uAAEC。.\u0ACD; [C1 V5 A4_2]; [V5 A4_2] # 1.ꫬ..્ +N; 1.\u200C\uAAEC。.\u0ACD; [C1 V5 A4_2]; [C1 V5 A4_2] # 1.ꫬ..્ +B; 1.xn--sv9a..xn--mfc; [V5 A4_2]; [V5 A4_2] # 1.ꫬ..્ +B; 1.xn--0ug7185c..xn--mfc; [C1 V5 A4_2]; [C1 V5 A4_2] # 1.ꫬ..્ +B; xn--tsh0720cse8b.xn--mfc; [V5 V6]; [V5 V6] # ⒈ꫬ︒.્ +B; xn--0ug78o720myr1c.xn--mfc; [C1 V5 V6]; [C1 V5 V6] # ⒈ꫬ︒.્ +B; \u0C46。䰀\u0668𞭅󠅼; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # ె.䰀٨ +B; xn--eqc.xn--hib5476aim6t; [B1 B3 B5 B6 V5 V6]; [B1 B3 B5 B6 V5 V6] # ె.䰀٨ +T; ß\u200D.\u1BF2񄾼; [C2 P1 V5 V6]; [P1 V5 V6] # ß.᯲ +N; ß\u200D.\u1BF2񄾼; [C2 P1 V5 V6]; [C2 P1 V5 V6] # ß.᯲ +T; SS\u200D.\u1BF2񄾼; [C2 P1 V5 V6]; [P1 V5 V6] # ss.᯲ +N; SS\u200D.\u1BF2񄾼; [C2 P1 V5 V6]; [C2 P1 V5 V6] # ss.᯲ +T; ss\u200D.\u1BF2񄾼; [C2 P1 V5 V6]; [P1 V5 V6] # ss.᯲ +N; ss\u200D.\u1BF2񄾼; [C2 P1 V5 V6]; [C2 P1 V5 V6] # ss.᯲ +T; Ss\u200D.\u1BF2񄾼; [C2 P1 V5 V6]; [P1 V5 V6] # ss.᯲ +N; Ss\u200D.\u1BF2񄾼; [C2 P1 V5 V6]; [C2 P1 V5 V6] # ss.᯲ +B; ss.xn--0zf22107b; [V5 V6]; [V5 V6] # ss.᯲ +B; xn--ss-n1t.xn--0zf22107b; [C2 V5 V6]; [C2 V5 V6] # ss.᯲ +B; xn--zca870n.xn--0zf22107b; [C2 V5 V6]; [C2 V5 V6] # ß.᯲ +T; 𑓂\u200C≮.≮; [P1 V5 V6]; [P1 V5 V6] # 𑓂≮.≮ +N; 𑓂\u200C≮.≮; [P1 V5 V6]; [P1 V5 V6] # 𑓂≮.≮ +T; 𑓂\u200C<\u0338.<\u0338; [P1 V5 V6]; [P1 V5 V6] # 𑓂≮.≮ +N; 𑓂\u200C<\u0338.<\u0338; [P1 V5 V6]; [P1 V5 V6] # 𑓂≮.≮ +B; xn--gdhz656g.xn--gdh; [V5 V6]; [V5 V6] +B; xn--0ugy6glz29a.xn--gdh; [V5 V6]; [V5 V6] # 𑓂≮.≮ +B; 🕼.\uFFA0; [P1 V6]; [P1 V6] # 🕼. +B; 🕼.\u1160; [P1 V6]; [P1 V6] # 🕼. +B; xn--my8h.xn--psd; [V6]; [V6] # 🕼. +B; xn--my8h.xn--cl7c; [V6]; [V6] # 🕼. +B; ᡔ\uFD82。񷘎; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ᡔلحى. +B; ᡔ\u0644\u062D\u0649。񷘎; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ᡔلحى. +B; xn--sgb9bq785p.xn--bc31b; [B5 B6 V6]; [B5 B6 V6] # ᡔلحى. +B; 爕򳙑.𝟰気; [P1 V6]; [P1 V6] +B; 爕򳙑.4気; [P1 V6]; [P1 V6] +B; xn--1zxq3199c.xn--4-678b; [V6]; [V6] +B; ⒋𑍍Ⴝ-.𞬪\u0DCA\u05B5; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ⒋𑍍Ⴝ-.්ֵ +B; 4.𑍍Ⴝ-.𞬪\u0DCA\u05B5; [B1 B6 P1 V3 V5 V6]; [B1 B6 P1 V3 V5 V6] # 4.𑍍Ⴝ-.්ֵ +B; 4.𑍍ⴝ-.𞬪\u0DCA\u05B5; [B1 B6 P1 V3 V5 V6]; [B1 B6 P1 V3 V5 V6] # 4.𑍍ⴝ-.්ֵ +B; 4.xn----wwsx259f.xn--ddb152b7y23b; [B1 B6 V3 V5 V6]; [B1 B6 V3 V5 V6] # 4.𑍍ⴝ-.්ֵ +B; 4.xn----t1g9869q.xn--ddb152b7y23b; [B1 B6 V3 V5 V6]; [B1 B6 V3 V5 V6] # 4.𑍍Ⴝ-.්ֵ +B; ⒋𑍍ⴝ-.𞬪\u0DCA\u05B5; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ⒋𑍍ⴝ-.්ֵ +B; xn----jcp487avl3w.xn--ddb152b7y23b; [B1 V3 V6]; [B1 V3 V6] # ⒋𑍍ⴝ-.්ֵ +B; xn----t1g323mnk9t.xn--ddb152b7y23b; [B1 V3 V6]; [B1 V3 V6] # ⒋𑍍Ⴝ-.්ֵ +B; 󞝃。򑆃񉢗--; [P1 V2 V3 V6]; [P1 V2 V3 V6] +B; xn--2y75e.xn-----1l15eer88n; [V2 V3 V6]; [V2 V3 V6] +T; \u200D\u07DF。\u200C\uABED; [B1 C1 C2]; [B1 B3 B6 V5] # ߟ.꯭ +N; \u200D\u07DF。\u200C\uABED; [B1 C1 C2]; [B1 C1 C2] # ߟ.꯭ +T; \u200D\u07DF。\u200C\uABED; [B1 C1 C2]; [B1 B3 B6 V5] # ߟ.꯭ +N; \u200D\u07DF。\u200C\uABED; [B1 C1 C2]; [B1 C1 C2] # ߟ.꯭ +B; xn--6sb.xn--429a; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ߟ.꯭ +B; xn--6sb394j.xn--0ug1126c; [B1 C1 C2]; [B1 C1 C2] # ߟ.꯭ +B; 𞮽\u07FF\u084E。ᢍ򝹁𐫘; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ࡎ.ᢍ𐫘 +B; 𞮽\u07FF\u084E。ᢍ򝹁𐫘; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ࡎ.ᢍ𐫘 +B; xn--3tb2nz468k.xn--69e8615j5rn5d; [B5 B6 V6]; [B5 B6 V6] # ࡎ.ᢍ𐫘 +B; \u06ED𞺌𑄚\u1714.ꡞ\u08B7; [B1 B5 B6 V5]; [B1 B5 B6 V5] # ۭم𑄚᜔.ꡞࢷ +B; \u06ED\u0645𑄚\u1714.ꡞ\u08B7; [B1 B5 B6 V5]; [B1 B5 B6 V5] # ۭم𑄚᜔.ꡞࢷ +B; xn--hhb94ag41b739u.xn--dzb5582f; [B1 B5 B6 V5]; [B1 B5 B6 V5] # ۭم𑄚᜔.ꡞࢷ +T; 񻂵킃𑘶\u07DC。ς\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.ςؼς +N; 񻂵킃𑘶\u07DC。ς\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.ςؼς +T; 񻂵킃𑘶\u07DC。ς\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.ςؼς +N; 񻂵킃𑘶\u07DC。ς\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.ςؼς +T; 񻂵킃𑘶\u07DC。ς\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.ςؼς +N; 񻂵킃𑘶\u07DC。ς\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.ςؼς +T; 񻂵킃𑘶\u07DC。ς\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.ςؼς +N; 񻂵킃𑘶\u07DC。ς\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.ςؼς +B; 񻂵킃𑘶\u07DC。Σ\u063CΣ; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼσ +B; 񻂵킃𑘶\u07DC。Σ\u063CΣ; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼσ +B; 񻂵킃𑘶\u07DC。σ\u063Cσ; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼσ +B; 񻂵킃𑘶\u07DC。σ\u063Cσ; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼσ +B; 񻂵킃𑘶\u07DC。Σ\u063Cσ; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼσ +B; 񻂵킃𑘶\u07DC。Σ\u063Cσ; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼσ +B; xn--3sb7483hoyvbbe76g.xn--4xaa21q; [B5 B6 V6]; [B5 B6 V6] # 킃𑘶ߜ.σؼσ +T; 񻂵킃𑘶\u07DC。Σ\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼς +N; 񻂵킃𑘶\u07DC。Σ\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼς +T; 񻂵킃𑘶\u07DC。Σ\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼς +N; 񻂵킃𑘶\u07DC。Σ\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼς +T; 񻂵킃𑘶\u07DC。σ\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼς +N; 񻂵킃𑘶\u07DC。σ\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼς +T; 񻂵킃𑘶\u07DC。σ\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼς +N; 񻂵킃𑘶\u07DC。σ\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼς +B; xn--3sb7483hoyvbbe76g.xn--3xab31q; [B5 B6 V6]; [B5 B6 V6] # 킃𑘶ߜ.σؼς +B; xn--3sb7483hoyvbbe76g.xn--3xaa51q; [B5 B6 V6]; [B5 B6 V6] # 킃𑘶ߜ.ςؼς +B; 񻂵킃𑘶\u07DC。Σ\u063CΣ; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼσ +B; 񻂵킃𑘶\u07DC。Σ\u063CΣ; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼσ +B; 񻂵킃𑘶\u07DC。σ\u063Cσ; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼσ +B; 񻂵킃𑘶\u07DC。σ\u063Cσ; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼσ +B; 񻂵킃𑘶\u07DC。Σ\u063Cσ; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼσ +B; 񻂵킃𑘶\u07DC。Σ\u063Cσ; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼσ +T; 񻂵킃𑘶\u07DC。Σ\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼς +N; 񻂵킃𑘶\u07DC。Σ\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼς +T; 񻂵킃𑘶\u07DC。Σ\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼς +N; 񻂵킃𑘶\u07DC。Σ\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼς +T; 񻂵킃𑘶\u07DC。σ\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼς +N; 񻂵킃𑘶\u07DC。σ\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼς +T; 񻂵킃𑘶\u07DC。σ\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼς +N; 񻂵킃𑘶\u07DC。σ\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼς +B; 蔰。󠁹\u08DD-𑈵; [P1 V6]; [P1 V6] # 蔰.ࣝ-𑈵 +B; xn--sz1a.xn----mrd9984r3dl0i; [V6]; [V6] # 蔰.ࣝ-𑈵 +T; ςჅ。\u075A; [P1 V6]; [P1 V6] # ςჅ.ݚ +N; ςჅ。\u075A; [P1 V6]; [P1 V6] # ςჅ.ݚ +T; ςⴥ。\u075A; ςⴥ.\u075A; xn--4xa203s.xn--epb # ςⴥ.ݚ +N; ςⴥ。\u075A; ςⴥ.\u075A; xn--3xa403s.xn--epb # ςⴥ.ݚ +B; ΣჅ。\u075A; [P1 V6]; [P1 V6] # σჅ.ݚ +B; σⴥ。\u075A; σⴥ.\u075A; xn--4xa203s.xn--epb # σⴥ.ݚ +B; Σⴥ。\u075A; σⴥ.\u075A; xn--4xa203s.xn--epb # σⴥ.ݚ +B; xn--4xa203s.xn--epb; σⴥ.\u075A; xn--4xa203s.xn--epb # σⴥ.ݚ +B; σⴥ.\u075A; ; xn--4xa203s.xn--epb # σⴥ.ݚ +B; ΣჅ.\u075A; [P1 V6]; [P1 V6] # σჅ.ݚ +B; Σⴥ.\u075A; σⴥ.\u075A; xn--4xa203s.xn--epb # σⴥ.ݚ +B; xn--4xa477d.xn--epb; [V6]; [V6] # σჅ.ݚ +B; xn--3xa403s.xn--epb; ςⴥ.\u075A; xn--3xa403s.xn--epb # ςⴥ.ݚ +T; ςⴥ.\u075A; ; xn--4xa203s.xn--epb # ςⴥ.ݚ +N; ςⴥ.\u075A; ; xn--3xa403s.xn--epb # ςⴥ.ݚ +B; xn--3xa677d.xn--epb; [V6]; [V6] # ςჅ.ݚ +B; \u0C4DႩ𞰓.\u1B72; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # ్Ⴉ.᭲ +B; \u0C4DႩ𞰓.\u1B72; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # ్Ⴉ.᭲ +B; \u0C4Dⴉ𞰓.\u1B72; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # ్ⴉ.᭲ +B; xn--lqc478nlr02a.xn--dwf; [B1 B3 B6 V5 V6]; [B1 B3 B6 V5 V6] # ్ⴉ.᭲ +B; xn--lqc64t7t26c.xn--dwf; [B1 B3 B6 V5 V6]; [B1 B3 B6 V5 V6] # ్Ⴉ.᭲ +B; \u0C4Dⴉ𞰓.\u1B72; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # ్ⴉ.᭲ +B; ⮷≮񎈴󠄟。𐠄; [B1 P1 V6]; [B1 P1 V6] +B; ⮷<\u0338񎈴󠄟。𐠄; [B1 P1 V6]; [B1 P1 V6] +B; xn--gdh877a3513h.xn--pc9c; [B1 V6]; [B1 V6] +T; \u06BC。\u200Dẏ\u200Cᡤ; [B1 C1 C2]; xn--vkb.xn--08e172a # ڼ.ẏᡤ +N; \u06BC。\u200Dẏ\u200Cᡤ; [B1 C1 C2]; [B1 C1 C2] # ڼ.ẏᡤ +T; \u06BC。\u200Dy\u0307\u200Cᡤ; [B1 C1 C2]; xn--vkb.xn--08e172a # ڼ.ẏᡤ +N; \u06BC。\u200Dy\u0307\u200Cᡤ; [B1 C1 C2]; [B1 C1 C2] # ڼ.ẏᡤ +T; \u06BC。\u200Dẏ\u200Cᡤ; [B1 C1 C2]; xn--vkb.xn--08e172a # ڼ.ẏᡤ +N; \u06BC。\u200Dẏ\u200Cᡤ; [B1 C1 C2]; [B1 C1 C2] # ڼ.ẏᡤ +T; \u06BC。\u200Dy\u0307\u200Cᡤ; [B1 C1 C2]; xn--vkb.xn--08e172a # ڼ.ẏᡤ +N; \u06BC。\u200Dy\u0307\u200Cᡤ; [B1 C1 C2]; [B1 C1 C2] # ڼ.ẏᡤ +T; \u06BC。\u200DY\u0307\u200Cᡤ; [B1 C1 C2]; xn--vkb.xn--08e172a # ڼ.ẏᡤ +N; \u06BC。\u200DY\u0307\u200Cᡤ; [B1 C1 C2]; [B1 C1 C2] # ڼ.ẏᡤ +T; \u06BC。\u200DẎ\u200Cᡤ; [B1 C1 C2]; xn--vkb.xn--08e172a # ڼ.ẏᡤ +N; \u06BC。\u200DẎ\u200Cᡤ; [B1 C1 C2]; [B1 C1 C2] # ڼ.ẏᡤ +B; xn--vkb.xn--08e172a; \u06BC.ẏᡤ; xn--vkb.xn--08e172a # ڼ.ẏᡤ +B; \u06BC.ẏᡤ; ; xn--vkb.xn--08e172a # ڼ.ẏᡤ +B; \u06BC.y\u0307ᡤ; \u06BC.ẏᡤ; xn--vkb.xn--08e172a # ڼ.ẏᡤ +B; \u06BC.Y\u0307ᡤ; \u06BC.ẏᡤ; xn--vkb.xn--08e172a # ڼ.ẏᡤ +B; \u06BC.Ẏᡤ; \u06BC.ẏᡤ; xn--vkb.xn--08e172a # ڼ.ẏᡤ +B; xn--vkb.xn--08e172ax6aca; [B1 C1 C2]; [B1 C1 C2] # ڼ.ẏᡤ +T; \u06BC。\u200DY\u0307\u200Cᡤ; [B1 C1 C2]; xn--vkb.xn--08e172a # ڼ.ẏᡤ +N; \u06BC。\u200DY\u0307\u200Cᡤ; [B1 C1 C2]; [B1 C1 C2] # ڼ.ẏᡤ +T; \u06BC。\u200DẎ\u200Cᡤ; [B1 C1 C2]; xn--vkb.xn--08e172a # ڼ.ẏᡤ +N; \u06BC。\u200DẎ\u200Cᡤ; [B1 C1 C2]; [B1 C1 C2] # ڼ.ẏᡤ +B; 𐹹𑲛。񑂐\u0DCA; [B1 P1 V6]; [B1 P1 V6] # 𐹹𑲛.් +B; xn--xo0dg5v.xn--h1c39876d; [B1 V6]; [B1 V6] # 𐹹𑲛.් +B; -≠𑈵。嵕\uFEF1۴\uA953; [B1 B5 P1 V3 V6]; [B1 B5 P1 V3 V6] # -≠𑈵.嵕ي۴꥓ +B; -=\u0338𑈵。嵕\uFEF1۴\uA953; [B1 B5 P1 V3 V6]; [B1 B5 P1 V3 V6] # -≠𑈵.嵕ي۴꥓ +B; -≠𑈵。嵕\u064A۴\uA953; [B1 B5 P1 V3 V6]; [B1 B5 P1 V3 V6] # -≠𑈵.嵕ي۴꥓ +B; -=\u0338𑈵。嵕\u064A۴\uA953; [B1 B5 P1 V3 V6]; [B1 B5 P1 V3 V6] # -≠𑈵.嵕ي۴꥓ +B; xn----ufo4749h.xn--mhb45a235sns3c; [B1 B5 V3 V6]; [B1 B5 V3 V6] # -≠𑈵.嵕ي۴꥓ +T; \u200C񍸰𐹶\u076E.\u06C1\u200D≯\u200D; [B1 B3 C1 C2 P1 V6]; [B3 B5 B6 P1 V6] # 𐹶ݮ.ہ≯ +N; \u200C񍸰𐹶\u076E.\u06C1\u200D≯\u200D; [B1 B3 C1 C2 P1 V6]; [B1 B3 C1 C2 P1 V6] # 𐹶ݮ.ہ≯ +T; \u200C񍸰𐹶\u076E.\u06C1\u200D>\u0338\u200D; [B1 B3 C1 C2 P1 V6]; [B3 B5 B6 P1 V6] # 𐹶ݮ.ہ≯ +N; \u200C񍸰𐹶\u076E.\u06C1\u200D>\u0338\u200D; [B1 B3 C1 C2 P1 V6]; [B1 B3 C1 C2 P1 V6] # 𐹶ݮ.ہ≯ +T; \u200C񍸰𐹶\u076E.\u06C1\u200D≯\u200D; [B1 B3 C1 C2 P1 V6]; [B3 B5 B6 P1 V6] # 𐹶ݮ.ہ≯ +N; \u200C񍸰𐹶\u076E.\u06C1\u200D≯\u200D; [B1 B3 C1 C2 P1 V6]; [B1 B3 C1 C2 P1 V6] # 𐹶ݮ.ہ≯ +T; \u200C񍸰𐹶\u076E.\u06C1\u200D>\u0338\u200D; [B1 B3 C1 C2 P1 V6]; [B3 B5 B6 P1 V6] # 𐹶ݮ.ہ≯ +N; \u200C񍸰𐹶\u076E.\u06C1\u200D>\u0338\u200D; [B1 B3 C1 C2 P1 V6]; [B1 B3 C1 C2 P1 V6] # 𐹶ݮ.ہ≯ +B; xn--ypb5875khz9y.xn--0kb682l; [B3 B5 B6 V6]; [B3 B5 B6 V6] # 𐹶ݮ.ہ≯ +B; xn--ypb717jrx2o7v94a.xn--0kb660ka35v; [B1 B3 C1 C2 V6]; [B1 B3 C1 C2 V6] # 𐹶ݮ.ہ≯ +B; ≮.\u17B5\u0855𐫔; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ≮.ࡕ𐫔 +B; <\u0338.\u17B5\u0855𐫔; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ≮.ࡕ𐫔 +B; ≮.\u17B5\u0855𐫔; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ≮.ࡕ𐫔 +B; <\u0338.\u17B5\u0855𐫔; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ≮.ࡕ𐫔 +B; xn--gdh.xn--kwb589e217p; [B1 V5 V6]; [B1 V5 V6] # ≮.ࡕ𐫔 +T; 𐩗\u200D。ႩႵ; [B3 C2 P1 V6]; [P1 V6] # 𐩗.ႩႵ +N; 𐩗\u200D。ႩႵ; [B3 C2 P1 V6]; [B3 C2 P1 V6] # 𐩗.ႩႵ +T; 𐩗\u200D。ႩႵ; [B3 C2 P1 V6]; [P1 V6] # 𐩗.ႩႵ +N; 𐩗\u200D。ႩႵ; [B3 C2 P1 V6]; [B3 C2 P1 V6] # 𐩗.ႩႵ +T; 𐩗\u200D。ⴉⴕ; [B3 C2]; xn--pt9c.xn--0kjya # 𐩗.ⴉⴕ +N; 𐩗\u200D。ⴉⴕ; [B3 C2]; [B3 C2] # 𐩗.ⴉⴕ +T; 𐩗\u200D。Ⴉⴕ; [B3 C2 P1 V6]; [P1 V6] # 𐩗.Ⴉⴕ +N; 𐩗\u200D。Ⴉⴕ; [B3 C2 P1 V6]; [B3 C2 P1 V6] # 𐩗.Ⴉⴕ +B; xn--pt9c.xn--hnd666l; [V6]; [V6] +B; xn--1ug4933g.xn--hnd666l; [B3 C2 V6]; [B3 C2 V6] # 𐩗.Ⴉⴕ +B; xn--pt9c.xn--0kjya; 𐩗.ⴉⴕ; xn--pt9c.xn--0kjya; NV8 +B; 𐩗.ⴉⴕ; ; xn--pt9c.xn--0kjya; NV8 +B; 𐩗.ႩႵ; [P1 V6]; [P1 V6] +B; 𐩗.Ⴉⴕ; [P1 V6]; [P1 V6] +B; xn--pt9c.xn--hndy; [V6]; [V6] +B; xn--1ug4933g.xn--0kjya; [B3 C2]; [B3 C2] # 𐩗.ⴉⴕ +B; xn--1ug4933g.xn--hndy; [B3 C2 V6]; [B3 C2 V6] # 𐩗.ႩႵ +T; 𐩗\u200D。ⴉⴕ; [B3 C2]; xn--pt9c.xn--0kjya # 𐩗.ⴉⴕ +N; 𐩗\u200D。ⴉⴕ; [B3 C2]; [B3 C2] # 𐩗.ⴉⴕ +T; 𐩗\u200D。Ⴉⴕ; [B3 C2 P1 V6]; [P1 V6] # 𐩗.Ⴉⴕ +N; 𐩗\u200D。Ⴉⴕ; [B3 C2 P1 V6]; [B3 C2 P1 V6] # 𐩗.Ⴉⴕ +T; \u200C\u200Cㄤ.\u032E󕨑\u09C2; [C1 P1 V5 V6]; [P1 V5 V6] # ㄤ.̮ূ +N; \u200C\u200Cㄤ.\u032E󕨑\u09C2; [C1 P1 V5 V6]; [C1 P1 V5 V6] # ㄤ.̮ূ +T; \u200C\u200Cㄤ.\u032E󕨑\u09C2; [C1 P1 V5 V6]; [P1 V5 V6] # ㄤ.̮ূ +N; \u200C\u200Cㄤ.\u032E󕨑\u09C2; [C1 P1 V5 V6]; [C1 P1 V5 V6] # ㄤ.̮ূ +B; xn--1fk.xn--vta284a9o563a; [V5 V6]; [V5 V6] # ㄤ.̮ূ +B; xn--0uga242k.xn--vta284a9o563a; [C1 V5 V6]; [C1 V5 V6] # ㄤ.̮ূ +T; 𐋻。-\u200C𐫄Ⴗ; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # 𐋻.-𐫄Ⴗ +N; 𐋻。-\u200C𐫄Ⴗ; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # 𐋻.-𐫄Ⴗ +T; 𐋻。-\u200C𐫄Ⴗ; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # 𐋻.-𐫄Ⴗ +N; 𐋻。-\u200C𐫄Ⴗ; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # 𐋻.-𐫄Ⴗ +T; 𐋻。-\u200C𐫄ⴗ; [B1 C1 V3]; [B1 V3] # 𐋻.-𐫄ⴗ +N; 𐋻。-\u200C𐫄ⴗ; [B1 C1 V3]; [B1 C1 V3] # 𐋻.-𐫄ⴗ +B; xn--v97c.xn----lws0526f; [B1 V3]; [B1 V3] +B; xn--v97c.xn----sgnv20du99s; [B1 C1 V3]; [B1 C1 V3] # 𐋻.-𐫄ⴗ +B; xn--v97c.xn----i1g2513q; [B1 V3 V6]; [B1 V3 V6] +B; xn--v97c.xn----i1g888ih12u; [B1 C1 V3 V6]; [B1 C1 V3 V6] # 𐋻.-𐫄Ⴗ +T; 𐋻。-\u200C𐫄ⴗ; [B1 C1 V3]; [B1 V3] # 𐋻.-𐫄ⴗ +N; 𐋻。-\u200C𐫄ⴗ; [B1 C1 V3]; [B1 C1 V3] # 𐋻.-𐫄ⴗ +T; 🙑𐷺.≠\u200C; [B1 C1 P1 V6]; [B1 P1 V6] # 🙑.≠ +N; 🙑𐷺.≠\u200C; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 🙑.≠ +T; 🙑𐷺.=\u0338\u200C; [B1 C1 P1 V6]; [B1 P1 V6] # 🙑.≠ +N; 🙑𐷺.=\u0338\u200C; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 🙑.≠ +T; 🙑𐷺.≠\u200C; [B1 C1 P1 V6]; [B1 P1 V6] # 🙑.≠ +N; 🙑𐷺.≠\u200C; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 🙑.≠ +T; 🙑𐷺.=\u0338\u200C; [B1 C1 P1 V6]; [B1 P1 V6] # 🙑.≠ +N; 🙑𐷺.=\u0338\u200C; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 🙑.≠ +B; xn--bl0dh970b.xn--1ch; [B1 V6]; [B1 V6] +B; xn--bl0dh970b.xn--0ug83g; [B1 C1 V6]; [B1 C1 V6] # 🙑.≠ +B; \u064C\u1CD2。𞮞\u2D7F⧎; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # ٌ᳒.⵿⧎ +B; \u064C\u1CD2。𞮞\u2D7F⧎; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # ٌ᳒.⵿⧎ +B; xn--ohb646i.xn--ewi38jf765c; [B1 B3 B6 V5 V6]; [B1 B3 B6 V5 V6] # ٌ᳒.⵿⧎ +B; Ⴔ𝨨₃󠁦.𝟳𑂹\u0B82; [P1 V6]; [P1 V6] # Ⴔ𝨨3.7𑂹ஂ +B; Ⴔ𝨨3󠁦.7𑂹\u0B82; [P1 V6]; [P1 V6] # Ⴔ𝨨3.7𑂹ஂ +B; ⴔ𝨨3󠁦.7𑂹\u0B82; [P1 V6]; [P1 V6] # ⴔ𝨨3.7𑂹ஂ +B; xn--3-ews6985n35s3g.xn--7-cve6271r; [V6]; [V6] # ⴔ𝨨3.7𑂹ஂ +B; xn--3-b1g83426a35t0g.xn--7-cve6271r; [V6]; [V6] # Ⴔ𝨨3.7𑂹ஂ +B; ⴔ𝨨₃󠁦.𝟳𑂹\u0B82; [P1 V6]; [P1 V6] # ⴔ𝨨3.7𑂹ஂ +T; 䏈\u200C。\u200C⒈񱢕; [C1 P1 V6]; [P1 V6] # 䏈.⒈ +N; 䏈\u200C。\u200C⒈񱢕; [C1 P1 V6]; [C1 P1 V6] # 䏈.⒈ +T; 䏈\u200C。\u200C1.񱢕; [C1 P1 V6]; [P1 V6] # 䏈.1. +N; 䏈\u200C。\u200C1.񱢕; [C1 P1 V6]; [C1 P1 V6] # 䏈.1. +B; xn--eco.1.xn--ms39a; [V6]; [V6] +B; xn--0ug491l.xn--1-rgn.xn--ms39a; [C1 V6]; [C1 V6] # 䏈.1. +B; xn--eco.xn--tsh21126d; [V6]; [V6] +B; xn--0ug491l.xn--0ug88oot66q; [C1 V6]; [C1 V6] # 䏈.⒈ +T; 1\uAAF6ß𑲥。\u1DD8; [V5]; [V5] # 1꫶ß𑲥.ᷘ +N; 1\uAAF6ß𑲥。\u1DD8; [V5]; [V5] # 1꫶ß𑲥.ᷘ +T; 1\uAAF6ß𑲥。\u1DD8; [V5]; [V5] # 1꫶ß𑲥.ᷘ +N; 1\uAAF6ß𑲥。\u1DD8; [V5]; [V5] # 1꫶ß𑲥.ᷘ +B; 1\uAAF6SS𑲥。\u1DD8; [V5]; [V5] # 1꫶ss𑲥.ᷘ +B; 1\uAAF6ss𑲥。\u1DD8; [V5]; [V5] # 1꫶ss𑲥.ᷘ +B; 1\uAAF6Ss𑲥。\u1DD8; [V5]; [V5] # 1꫶ss𑲥.ᷘ +B; xn--1ss-ir6ln166b.xn--weg; [V5]; [V5] # 1꫶ss𑲥.ᷘ +B; xn--1-qfa2471kdb0d.xn--weg; [V5]; [V5] # 1꫶ß𑲥.ᷘ +B; 1\uAAF6SS𑲥。\u1DD8; [V5]; [V5] # 1꫶ss𑲥.ᷘ +B; 1\uAAF6ss𑲥。\u1DD8; [V5]; [V5] # 1꫶ss𑲥.ᷘ +B; 1\uAAF6Ss𑲥。\u1DD8; [V5]; [V5] # 1꫶ss𑲥.ᷘ +T; \u200D񫶩𞪯\u0CCD。\u077C⒈; [B1 C2 P1 V6]; [B5 B6 P1 V6] # ್.ݼ⒈ +N; \u200D񫶩𞪯\u0CCD。\u077C⒈; [B1 C2 P1 V6]; [B1 C2 P1 V6] # ್.ݼ⒈ +T; \u200D񫶩𞪯\u0CCD。\u077C1.; [B1 C2 P1 V6]; [B5 B6 P1 V6] # ್.ݼ1. +N; \u200D񫶩𞪯\u0CCD。\u077C1.; [B1 C2 P1 V6]; [B1 C2 P1 V6] # ್.ݼ1. +B; xn--8tc9875v5is1a.xn--1-g6c.; [B5 B6 V6]; [B5 B6 V6] # ್.ݼ1. +B; xn--8tc969gzn94a4lm8a.xn--1-g6c.; [B1 C2 V6]; [B1 C2 V6] # ್.ݼ1. +B; xn--8tc9875v5is1a.xn--dqb689l; [B5 B6 V6]; [B5 B6 V6] # ್.ݼ⒈ +B; xn--8tc969gzn94a4lm8a.xn--dqb689l; [B1 C2 V6]; [B1 C2 V6] # ್.ݼ⒈ +B; \u1AB6.𞤳򓢖򻉒\u07D7; [B1 B2 B3 B6 P1 V5 V6]; [B1 B2 B3 B6 P1 V5 V6] # ᪶.𞤳ߗ +B; \u1AB6.𞤳򓢖򻉒\u07D7; [B1 B2 B3 B6 P1 V5 V6]; [B1 B2 B3 B6 P1 V5 V6] # ᪶.𞤳ߗ +B; \u1AB6.𞤑򓢖򻉒\u07D7; [B1 B2 B3 B6 P1 V5 V6]; [B1 B2 B3 B6 P1 V5 V6] # ᪶.𞤳ߗ +B; xn--zqf.xn--ysb9657vuiz5bj0ep; [B1 B2 B3 B6 V5 V6]; [B1 B2 B3 B6 V5 V6] # ᪶.𞤳ߗ +B; \u1AB6.𞤑򓢖򻉒\u07D7; [B1 B2 B3 B6 P1 V5 V6]; [B1 B2 B3 B6 P1 V5 V6] # ᪶.𞤳ߗ +B; \u0842𞩚⒈.󠬌8򏳏\u0770; [B1 P1 V6]; [B1 P1 V6] # ࡂ⒈.8ݰ +B; \u0842𞩚1..󠬌8򏳏\u0770; [B1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # ࡂ1..8ݰ +B; xn--1-rid26318a..xn--8-s5c22427ox454a; [B1 V6 A4_2]; [B1 V6 A4_2] # ࡂ1..8ݰ +B; xn--0vb095ldg52a.xn--8-s5c22427ox454a; [B1 V6]; [B1 V6] # ࡂ⒈.8ݰ +B; \u0361𐫫\u0369ᡷ。-󠰛鞰; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ͡𐫫ͩᡷ.-鞰 +B; xn--cvaq482npv5t.xn----yg7dt1332g; [B1 V3 V5 V6]; [B1 V3 V5 V6] # ͡𐫫ͩᡷ.-鞰 +T; -.\u0ACD剘ß𐫃; [B1 V3 V5]; [B1 V3 V5] # -.્剘ß𐫃 +N; -.\u0ACD剘ß𐫃; [B1 V3 V5]; [B1 V3 V5] # -.્剘ß𐫃 +B; -.\u0ACD剘SS𐫃; [B1 V3 V5]; [B1 V3 V5] # -.્剘ss𐫃 +B; -.\u0ACD剘ss𐫃; [B1 V3 V5]; [B1 V3 V5] # -.્剘ss𐫃 +B; -.\u0ACD剘Ss𐫃; [B1 V3 V5]; [B1 V3 V5] # -.્剘ss𐫃 +B; -.xn--ss-bqg4734erywk; [B1 V3 V5]; [B1 V3 V5] # -.્剘ss𐫃 +B; -.xn--zca791c493duf8i; [B1 V3 V5]; [B1 V3 V5] # -.્剘ß𐫃 +B; \u08FB𞵸。-; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ࣻ.- +B; \u08FB𞵸。-; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ࣻ.- +B; xn--b1b2719v.-; [B1 V3 V5 V6]; [B1 V3 V5 V6] # ࣻ.- +B; ⒈󠈻𐹲。≠\u0603𐹽; [B1 P1 V6]; [B1 P1 V6] # ⒈𐹲.≠𐹽 +B; ⒈󠈻𐹲。=\u0338\u0603𐹽; [B1 P1 V6]; [B1 P1 V6] # ⒈𐹲.≠𐹽 +B; 1.󠈻𐹲。≠\u0603𐹽; [B1 P1 V6]; [B1 P1 V6] # 1.𐹲.≠𐹽 +B; 1.󠈻𐹲。=\u0338\u0603𐹽; [B1 P1 V6]; [B1 P1 V6] # 1.𐹲.≠𐹽 +B; 1.xn--qo0dl3077c.xn--lfb536lb35n; [B1 V6]; [B1 V6] # 1.𐹲.≠𐹽 +B; xn--tshw766f1153g.xn--lfb536lb35n; [B1 V6]; [B1 V6] # ⒈𐹲.≠𐹽 +T; 𐹢󠈚Ⴎ\u200C.㖾𐹡; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 P1 V6] # 𐹢Ⴎ.㖾𐹡 +N; 𐹢󠈚Ⴎ\u200C.㖾𐹡; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # 𐹢Ⴎ.㖾𐹡 +T; 𐹢󠈚ⴎ\u200C.㖾𐹡; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 P1 V6] # 𐹢ⴎ.㖾𐹡 +N; 𐹢󠈚ⴎ\u200C.㖾𐹡; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # 𐹢ⴎ.㖾𐹡 +B; xn--5kjx323em053g.xn--pelu572d; [B1 B5 B6 V6]; [B1 B5 B6 V6] +B; xn--0ug342clq0pqxv4i.xn--pelu572d; [B1 B5 B6 C1 V6]; [B1 B5 B6 C1 V6] # 𐹢ⴎ.㖾𐹡 +B; xn--mnd9001km0o0g.xn--pelu572d; [B1 B5 B6 V6]; [B1 B5 B6 V6] +B; xn--mnd289ezj4pqxp0i.xn--pelu572d; [B1 B5 B6 C1 V6]; [B1 B5 B6 C1 V6] # 𐹢Ⴎ.㖾𐹡 +B; 򩼗.\u07C7ᡖႳႧ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # .߇ᡖႳႧ +B; 򩼗.\u07C7ᡖႳႧ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # .߇ᡖႳႧ +B; 򩼗.\u07C7ᡖⴓⴇ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # .߇ᡖⴓⴇ +B; 򩼗.\u07C7ᡖႳⴇ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # .߇ᡖႳⴇ +B; xn--te28c.xn--isb286btrgo7w; [B2 B3 V6]; [B2 B3 V6] # .߇ᡖႳⴇ +B; xn--te28c.xn--isb295fbtpmb; [B2 B3 V6]; [B2 B3 V6] # .߇ᡖⴓⴇ +B; xn--te28c.xn--isb856b9a631d; [B2 B3 V6]; [B2 B3 V6] # .߇ᡖႳႧ +B; 򩼗.\u07C7ᡖⴓⴇ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # .߇ᡖⴓⴇ +B; 򩼗.\u07C7ᡖႳⴇ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # .߇ᡖႳⴇ +T; \u200D􅍉.\u06B3\u0775; [B1 C2 P1 V6]; [P1 V6] # .ڳݵ +N; \u200D􅍉.\u06B3\u0775; [B1 C2 P1 V6]; [B1 C2 P1 V6] # .ڳݵ +B; xn--3j78f.xn--mkb20b; [V6]; [V6] # .ڳݵ +B; xn--1ug39444n.xn--mkb20b; [B1 C2 V6]; [B1 C2 V6] # .ڳݵ +B; 𲤱⒛⾳.ꡦ⒈; [P1 V6]; [P1 V6] +B; 𲤱20.音.ꡦ1.; [P1 V6]; [P1 V6] +B; xn--20-9802c.xn--0w5a.xn--1-eg4e.; [V6]; [V6] +B; xn--dth6033bzbvx.xn--tsh9439b; [V6]; [V6] +B; \u07DC8񳦓-。򞲙𑁿𐩥\u09CD; [B2 B3 B5 B6 P1 V3 V6]; [B2 B3 B5 B6 P1 V3 V6] # ߜ8-.𑁿𐩥্ +B; \u07DC8񳦓-。򞲙𑁿𐩥\u09CD; [B2 B3 B5 B6 P1 V3 V6]; [B2 B3 B5 B6 P1 V3 V6] # ߜ8-.𑁿𐩥্ +B; xn--8--rve13079p.xn--b7b9842k42df776x; [B2 B3 B5 B6 V3 V6]; [B2 B3 B5 B6 V3 V6] # ߜ8-.𑁿𐩥্ +T; Ⴕ。۰≮ß\u0745; [P1 V6]; [P1 V6] # Ⴕ.۰≮ß݅ +N; Ⴕ。۰≮ß\u0745; [P1 V6]; [P1 V6] # Ⴕ.۰≮ß݅ +T; Ⴕ。۰<\u0338ß\u0745; [P1 V6]; [P1 V6] # Ⴕ.۰≮ß݅ +N; Ⴕ。۰<\u0338ß\u0745; [P1 V6]; [P1 V6] # Ⴕ.۰≮ß݅ +T; ⴕ。۰<\u0338ß\u0745; [P1 V6]; [P1 V6] # ⴕ.۰≮ß݅ +N; ⴕ。۰<\u0338ß\u0745; [P1 V6]; [P1 V6] # ⴕ.۰≮ß݅ +T; ⴕ。۰≮ß\u0745; [P1 V6]; [P1 V6] # ⴕ.۰≮ß݅ +N; ⴕ。۰≮ß\u0745; [P1 V6]; [P1 V6] # ⴕ.۰≮ß݅ +B; Ⴕ。۰≮SS\u0745; [P1 V6]; [P1 V6] # Ⴕ.۰≮ss݅ +B; Ⴕ。۰<\u0338SS\u0745; [P1 V6]; [P1 V6] # Ⴕ.۰≮ss݅ +B; ⴕ。۰<\u0338ss\u0745; [P1 V6]; [P1 V6] # ⴕ.۰≮ss݅ +B; ⴕ。۰≮ss\u0745; [P1 V6]; [P1 V6] # ⴕ.۰≮ss݅ +B; Ⴕ。۰≮Ss\u0745; [P1 V6]; [P1 V6] # Ⴕ.۰≮ss݅ +B; Ⴕ。۰<\u0338Ss\u0745; [P1 V6]; [P1 V6] # Ⴕ.۰≮ss݅ +B; xn--tnd.xn--ss-jbe65aw27i; [V6]; [V6] # Ⴕ.۰≮ss݅ +B; xn--dlj.xn--ss-jbe65aw27i; [V6]; [V6] # ⴕ.۰≮ss݅ +B; xn--dlj.xn--zca912alh227g; [V6]; [V6] # ⴕ.۰≮ß݅ +B; xn--tnd.xn--zca912alh227g; [V6]; [V6] # Ⴕ.۰≮ß݅ +B; \u07E9-.𝨗꒱\u1B72; [B1 B3 V3 V5]; [B1 B3 V3 V5] # ߩ-.𝨗꒱᭲ +B; xn----odd.xn--dwf8994dc8wj; [B1 B3 V3 V5]; [B1 B3 V3 V5] # ߩ-.𝨗꒱᭲ +T; 𞼸\u200C.≯䕵⫧; [B1 B3 C1 P1 V6]; [B1 P1 V6] # .≯䕵⫧ +N; 𞼸\u200C.≯䕵⫧; [B1 B3 C1 P1 V6]; [B1 B3 C1 P1 V6] # .≯䕵⫧ +T; 𞼸\u200C.>\u0338䕵⫧; [B1 B3 C1 P1 V6]; [B1 P1 V6] # .≯䕵⫧ +N; 𞼸\u200C.>\u0338䕵⫧; [B1 B3 C1 P1 V6]; [B1 B3 C1 P1 V6] # .≯䕵⫧ +B; xn--sn7h.xn--hdh754ax6w; [B1 V6]; [B1 V6] +B; xn--0ugx453p.xn--hdh754ax6w; [B1 B3 C1 V6]; [B1 B3 C1 V6] # .≯䕵⫧ +T; 𐨅ß\uFC57.\u06AC۳︒; [B1 B3 P1 V5 V6]; [B1 B3 P1 V5 V6] # 𐨅ßيخ.ڬ۳︒ +N; 𐨅ß\uFC57.\u06AC۳︒; [B1 B3 P1 V5 V6]; [B1 B3 P1 V5 V6] # 𐨅ßيخ.ڬ۳︒ +T; 𐨅ß\u064A\u062E.\u06AC۳。; [B1 V5]; [B1 V5] # 𐨅ßيخ.ڬ۳. +N; 𐨅ß\u064A\u062E.\u06AC۳。; [B1 V5]; [B1 V5] # 𐨅ßيخ.ڬ۳. +B; 𐨅SS\u064A\u062E.\u06AC۳。; [B1 V5]; [B1 V5] # 𐨅ssيخ.ڬ۳. +B; 𐨅ss\u064A\u062E.\u06AC۳。; [B1 V5]; [B1 V5] # 𐨅ssيخ.ڬ۳. +B; 𐨅Ss\u064A\u062E.\u06AC۳。; [B1 V5]; [B1 V5] # 𐨅ssيخ.ڬ۳. +B; xn--ss-ytd5i7765l.xn--fkb6l.; [B1 V5]; [B1 V5] # 𐨅ssيخ.ڬ۳. +B; xn--zca23yncs877j.xn--fkb6l.; [B1 V5]; [B1 V5] # 𐨅ßيخ.ڬ۳. +B; 𐨅SS\uFC57.\u06AC۳︒; [B1 B3 P1 V5 V6]; [B1 B3 P1 V5 V6] # 𐨅ssيخ.ڬ۳︒ +B; 𐨅ss\uFC57.\u06AC۳︒; [B1 B3 P1 V5 V6]; [B1 B3 P1 V5 V6] # 𐨅ssيخ.ڬ۳︒ +B; 𐨅Ss\uFC57.\u06AC۳︒; [B1 B3 P1 V5 V6]; [B1 B3 P1 V5 V6] # 𐨅ssيخ.ڬ۳︒ +B; xn--ss-ytd5i7765l.xn--fkb6lp314e; [B1 B3 V5 V6]; [B1 B3 V5 V6] # 𐨅ssيخ.ڬ۳︒ +B; xn--zca23yncs877j.xn--fkb6lp314e; [B1 B3 V5 V6]; [B1 B3 V5 V6] # 𐨅ßيخ.ڬ۳︒ +B; -≮🡒\u1CED.񏿾Ⴁ\u0714; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -≮🡒᳭.Ⴁܔ +B; -<\u0338🡒\u1CED.񏿾Ⴁ\u0714; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -≮🡒᳭.Ⴁܔ +B; -<\u0338🡒\u1CED.񏿾ⴁ\u0714; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -≮🡒᳭.ⴁܔ +B; -≮🡒\u1CED.񏿾ⴁ\u0714; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -≮🡒᳭.ⴁܔ +B; xn----44l04zxt68c.xn--enb135qf106f; [B1 V3 V6]; [B1 V3 V6] # -≮🡒᳭.ⴁܔ +B; xn----44l04zxt68c.xn--enb300c1597h; [B1 V3 V6]; [B1 V3 V6] # -≮🡒᳭.Ⴁܔ +T; 𞤨。ꡏ\u200D\u200C; [B6 C1 C2]; xn--ge6h.xn--oc9a # 𞤨.ꡏ +N; 𞤨。ꡏ\u200D\u200C; [B6 C1 C2]; [B6 C1 C2] # 𞤨.ꡏ +T; 𞤨。ꡏ\u200D\u200C; [B6 C1 C2]; xn--ge6h.xn--oc9a # 𞤨.ꡏ +N; 𞤨。ꡏ\u200D\u200C; [B6 C1 C2]; [B6 C1 C2] # 𞤨.ꡏ +T; 𞤆。ꡏ\u200D\u200C; [B6 C1 C2]; xn--ge6h.xn--oc9a # 𞤨.ꡏ +N; 𞤆。ꡏ\u200D\u200C; [B6 C1 C2]; [B6 C1 C2] # 𞤨.ꡏ +B; xn--ge6h.xn--oc9a; 𞤨.ꡏ; xn--ge6h.xn--oc9a +B; 𞤨.ꡏ; ; xn--ge6h.xn--oc9a +B; 𞤆.ꡏ; 𞤨.ꡏ; xn--ge6h.xn--oc9a +B; xn--ge6h.xn--0ugb9575h; [B6 C1 C2]; [B6 C1 C2] # 𞤨.ꡏ +T; 𞤆。ꡏ\u200D\u200C; [B6 C1 C2]; xn--ge6h.xn--oc9a # 𞤨.ꡏ +N; 𞤆。ꡏ\u200D\u200C; [B6 C1 C2]; [B6 C1 C2] # 𞤨.ꡏ +B; 󠅹𑂶.ᢌ𑂹\u0669; [B1 B3 B5 B6 V5]; [B1 B3 B5 B6 V5] # 𑂶.ᢌ𑂹٩ +B; 󠅹𑂶.ᢌ𑂹\u0669; [B1 B3 B5 B6 V5]; [B1 B3 B5 B6 V5] # 𑂶.ᢌ𑂹٩ +B; xn--b50d.xn--iib993gyp5p; [B1 B3 B5 B6 V5]; [B1 B3 B5 B6 V5] # 𑂶.ᢌ𑂹٩ +B; Ⅎ󠅺񝵒。≯⾑; [P1 V6]; [P1 V6] +B; Ⅎ󠅺񝵒。>\u0338⾑; [P1 V6]; [P1 V6] +B; Ⅎ󠅺񝵒。≯襾; [P1 V6]; [P1 V6] +B; Ⅎ󠅺񝵒。>\u0338襾; [P1 V6]; [P1 V6] +B; ⅎ󠅺񝵒。>\u0338襾; [P1 V6]; [P1 V6] +B; ⅎ󠅺񝵒。≯襾; [P1 V6]; [P1 V6] +B; xn--73g39298c.xn--hdhz171b; [V6]; [V6] +B; xn--f3g73398c.xn--hdhz171b; [V6]; [V6] +B; ⅎ󠅺񝵒。>\u0338⾑; [P1 V6]; [P1 V6] +B; ⅎ󠅺񝵒。≯⾑; [P1 V6]; [P1 V6] +T; ς\u200D\u0DD4\u0660。-; [B1 B5 B6 C2 V3]; [B1 B5 B6 V3] # ςු٠.- +N; ς\u200D\u0DD4\u0660。-; [B1 B5 B6 C2 V3]; [B1 B5 B6 C2 V3] # ςු٠.- +T; ς\u200D\u0DD4\u0660。-; [B1 B5 B6 C2 V3]; [B1 B5 B6 V3] # ςු٠.- +N; ς\u200D\u0DD4\u0660。-; [B1 B5 B6 C2 V3]; [B1 B5 B6 C2 V3] # ςු٠.- +T; Σ\u200D\u0DD4\u0660。-; [B1 B5 B6 C2 V3]; [B1 B5 B6 V3] # σු٠.- +N; Σ\u200D\u0DD4\u0660。-; [B1 B5 B6 C2 V3]; [B1 B5 B6 C2 V3] # σු٠.- +T; σ\u200D\u0DD4\u0660。-; [B1 B5 B6 C2 V3]; [B1 B5 B6 V3] # σු٠.- +N; σ\u200D\u0DD4\u0660。-; [B1 B5 B6 C2 V3]; [B1 B5 B6 C2 V3] # σු٠.- +B; xn--4xa25ks2j.-; [B1 B5 B6 V3]; [B1 B5 B6 V3] # σු٠.- +B; xn--4xa25ks2jenu.-; [B1 B5 B6 C2 V3]; [B1 B5 B6 C2 V3] # σු٠.- +B; xn--3xa45ks2jenu.-; [B1 B5 B6 C2 V3]; [B1 B5 B6 C2 V3] # ςු٠.- +T; Σ\u200D\u0DD4\u0660。-; [B1 B5 B6 C2 V3]; [B1 B5 B6 V3] # σු٠.- +N; Σ\u200D\u0DD4\u0660。-; [B1 B5 B6 C2 V3]; [B1 B5 B6 C2 V3] # σු٠.- +T; σ\u200D\u0DD4\u0660。-; [B1 B5 B6 C2 V3]; [B1 B5 B6 V3] # σු٠.- +N; σ\u200D\u0DD4\u0660。-; [B1 B5 B6 C2 V3]; [B1 B5 B6 C2 V3] # σු٠.- +T; \u200C.ßႩ-; [C1 P1 V3 V6]; [P1 V3 V6 A4_2] # .ßႩ- +N; \u200C.ßႩ-; [C1 P1 V3 V6]; [C1 P1 V3 V6] # .ßႩ- +T; \u200C.ßⴉ-; [C1 V3]; [V3 A4_2] # .ßⴉ- +N; \u200C.ßⴉ-; [C1 V3]; [C1 V3] # .ßⴉ- +T; \u200C.SSႩ-; [C1 P1 V3 V6]; [P1 V3 V6 A4_2] # .ssႩ- +N; \u200C.SSႩ-; [C1 P1 V3 V6]; [C1 P1 V3 V6] # .ssႩ- +T; \u200C.ssⴉ-; [C1 V3]; [V3 A4_2] # .ssⴉ- +N; \u200C.ssⴉ-; [C1 V3]; [C1 V3] # .ssⴉ- +T; \u200C.Ssⴉ-; [C1 V3]; [V3 A4_2] # .ssⴉ- +N; \u200C.Ssⴉ-; [C1 V3]; [C1 V3] # .ssⴉ- +B; .xn--ss--bi1b; [V3 A4_2]; [V3 A4_2] +B; xn--0ug.xn--ss--bi1b; [C1 V3]; [C1 V3] # .ssⴉ- +B; .xn--ss--4rn; [V3 V6 A4_2]; [V3 V6 A4_2] +B; xn--0ug.xn--ss--4rn; [C1 V3 V6]; [C1 V3 V6] # .ssႩ- +B; xn--0ug.xn----pfa2305a; [C1 V3]; [C1 V3] # .ßⴉ- +B; xn--0ug.xn----pfa042j; [C1 V3 V6]; [C1 V3 V6] # .ßႩ- +B; 󍭲𐫍㓱。⾑; [B5 P1 V6]; [B5 P1 V6] +B; 󍭲𐫍㓱。襾; [B5 P1 V6]; [B5 P1 V6] +B; xn--u7kt691dlj09f.xn--9v2a; [B5 V6]; [B5 V6] +T; \u06A0𐮋𐹰≮。≯󠦗\u200D; [B1 B3 C2 P1 V6]; [B1 B3 P1 V6] # ڠ𐮋𐹰≮.≯ +N; \u06A0𐮋𐹰≮。≯󠦗\u200D; [B1 B3 C2 P1 V6]; [B1 B3 C2 P1 V6] # ڠ𐮋𐹰≮.≯ +T; \u06A0𐮋𐹰<\u0338。>\u0338󠦗\u200D; [B1 B3 C2 P1 V6]; [B1 B3 P1 V6] # ڠ𐮋𐹰≮.≯ +N; \u06A0𐮋𐹰<\u0338。>\u0338󠦗\u200D; [B1 B3 C2 P1 V6]; [B1 B3 C2 P1 V6] # ڠ𐮋𐹰≮.≯ +B; xn--2jb053lf13nyoc.xn--hdh08821l; [B1 B3 V6]; [B1 B3 V6] # ڠ𐮋𐹰≮.≯ +B; xn--2jb053lf13nyoc.xn--1ugx6gc8096c; [B1 B3 C2 V6]; [B1 B3 C2 V6] # ڠ𐮋𐹰≮.≯ +B; 𝟞。񃰶\u0777\u08B0⩋; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # 6.ݷࢰ⩋ +B; 6。񃰶\u0777\u08B0⩋; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # 6.ݷࢰ⩋ +B; 6.xn--7pb04do15eq748f; [B1 B5 B6 V6]; [B1 B5 B6 V6] # 6.ݷࢰ⩋ +B; -\uFCFD。𑇀𑍴; [B1 V3 V5]; [B1 V3 V5] # -شى.𑇀𑍴 +B; -\uFCFD。𑇀𑍴; [B1 V3 V5]; [B1 V3 V5] # -شى.𑇀𑍴 +B; -\u0634\u0649。𑇀𑍴; [B1 V3 V5]; [B1 V3 V5] # -شى.𑇀𑍴 +B; xn----qnc7d.xn--wd1d62a; [B1 V3 V5]; [B1 V3 V5] # -شى.𑇀𑍴 +T; \u200C󠊶𝟏.\u0D43򪥐𐹬󊓶; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6] # 1.ൃ𐹬 +N; \u200C󠊶𝟏.\u0D43򪥐𐹬󊓶; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # 1.ൃ𐹬 +T; \u200C󠊶1.\u0D43򪥐𐹬󊓶; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6] # 1.ൃ𐹬 +N; \u200C󠊶1.\u0D43򪥐𐹬󊓶; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # 1.ൃ𐹬 +B; xn--1-f521m.xn--mxc0872kcu37dnmem; [B1 V5 V6]; [B1 V5 V6] # 1.ൃ𐹬 +B; xn--1-rgnu0071n.xn--mxc0872kcu37dnmem; [B1 C1 V5 V6]; [B1 C1 V5 V6] # 1.ൃ𐹬 +T; 齙--𝟰.ß; 齙--4.ß; xn----4-p16k.ss +N; 齙--𝟰.ß; 齙--4.ß; xn----4-p16k.xn--zca +T; 齙--4.ß; ; xn----4-p16k.ss +N; 齙--4.ß; ; xn----4-p16k.xn--zca +B; 齙--4.SS; 齙--4.ss; xn----4-p16k.ss +B; 齙--4.ss; ; xn----4-p16k.ss +B; 齙--4.Ss; 齙--4.ss; xn----4-p16k.ss +B; xn----4-p16k.ss; 齙--4.ss; xn----4-p16k.ss +B; xn----4-p16k.xn--zca; 齙--4.ß; xn----4-p16k.xn--zca +B; 齙--𝟰.SS; 齙--4.ss; xn----4-p16k.ss +B; 齙--𝟰.ss; 齙--4.ss; xn----4-p16k.ss +B; 齙--𝟰.Ss; 齙--4.ss; xn----4-p16k.ss +T; \u1BF2.𐹢𞀖\u200C; [B1 C1 V5]; [B1 V5] # ᯲.𐹢𞀖 +N; \u1BF2.𐹢𞀖\u200C; [B1 C1 V5]; [B1 C1 V5] # ᯲.𐹢𞀖 +B; xn--0zf.xn--9n0d2296a; [B1 V5]; [B1 V5] # ᯲.𐹢𞀖 +B; xn--0zf.xn--0ug9894grqqf; [B1 C1 V5]; [B1 C1 V5] # ᯲.𐹢𞀖 +T; 󃲙󠋘。\uDEDE-\u200D; [C2 P1 V6]; [P1 V3 V6 A3] # .- +N; 󃲙󠋘。\uDEDE-\u200D; [C2 P1 V6]; [C2 P1 V6 A3] # .- +T; 󃲙󠋘。\uDEDE-\u200D; [C2 P1 V6]; [P1 V3 V6 A3] # .- +N; 󃲙󠋘。\uDEDE-\u200D; [C2 P1 V6]; [C2 P1 V6 A3] # .- +B; xn--ct86d8w51a.\uDEDE-; [P1 V3 V6]; [P1 V3 V6 A3] # .- +B; XN--CT86D8W51A.\uDEDE-; [P1 V3 V6]; [P1 V3 V6 A3] # .- +B; Xn--Ct86d8w51a.\uDEDE-; [P1 V3 V6]; [P1 V3 V6 A3] # .- +T; xn--ct86d8w51a.\uDEDE-\u200D; [C2 P1 V6]; [P1 V3 V6 A3] # .- +N; xn--ct86d8w51a.\uDEDE-\u200D; [C2 P1 V6]; [C2 P1 V6 A3] # .- +T; XN--CT86D8W51A.\uDEDE-\u200D; [C2 P1 V6]; [P1 V3 V6 A3] # .- +N; XN--CT86D8W51A.\uDEDE-\u200D; [C2 P1 V6]; [C2 P1 V6 A3] # .- +T; Xn--Ct86d8w51a.\uDEDE-\u200D; [C2 P1 V6]; [P1 V3 V6 A3] # .- +N; Xn--Ct86d8w51a.\uDEDE-\u200D; [C2 P1 V6]; [C2 P1 V6 A3] # .- +B; \u1A60.𞵷-𝪩悎; [B1 B2 B3 B6 P1 V5 V6]; [B1 B2 B3 B6 P1 V5 V6] # ᩠.-𝪩悎 +B; \u1A60.𞵷-𝪩悎; [B1 B2 B3 B6 P1 V5 V6]; [B1 B2 B3 B6 P1 V5 V6] # ᩠.-𝪩悎 +B; xn--jof.xn----gf4bq282iezpa; [B1 B2 B3 B6 V5 V6]; [B1 B2 B3 B6 V5 V6] # ᩠.-𝪩悎 +B; 𛜯󠊛.𞤳񏥾; [B2 B3 B6 P1 V6]; [B2 B3 B6 P1 V6] +B; 𛜯󠊛.𞤳񏥾; [B2 B3 B6 P1 V6]; [B2 B3 B6 P1 V6] +B; 𛜯󠊛.𞤑񏥾; [B2 B3 B6 P1 V6]; [B2 B3 B6 P1 V6] +B; xn--xx5gy2741c.xn--re6hw266j; [B2 B3 B6 V6]; [B2 B3 B6 V6] +B; 𛜯󠊛.𞤑񏥾; [B2 B3 B6 P1 V6]; [B2 B3 B6 P1 V6] +B; \u071C𐫒\u062E.𐋲; [B1]; [B1] # ܜ𐫒خ.𐋲 +B; xn--tgb98b8643d.xn--m97c; [B1]; [B1] # ܜ𐫒خ.𐋲 +B; 𐼑𞤓\u0637\u08E2.\uDF56; [P1 V6]; [P1 V6 A3] # 𞤵ط. +B; 𐼑𞤵\u0637\u08E2.\uDF56; [P1 V6]; [P1 V6 A3] # 𞤵ط. +B; xn--2gb08k9w69agm0g.\uDF56; [P1 V6]; [P1 V6 A3] # 𞤵ط. +B; XN--2GB08K9W69AGM0G.\uDF56; [P1 V6]; [P1 V6 A3] # 𞤵ط. +B; Xn--2Gb08k9w69agm0g.\uDF56; [P1 V6]; [P1 V6 A3] # 𞤵ط. +B; Ↄ。\u0A4D\u1CD4𞷣; [B1 P1 V5 V6]; [B1 P1 V5 V6] # Ↄ.᳔੍ +B; Ↄ。\u1CD4\u0A4D𞷣; [B1 P1 V5 V6]; [B1 P1 V5 V6] # Ↄ.᳔੍ +B; ↄ。\u1CD4\u0A4D𞷣; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ↄ.᳔੍ +B; xn--r5g.xn--ybc995g0835a; [B1 V5 V6]; [B1 V5 V6] # ↄ.᳔੍ +B; xn--q5g.xn--ybc995g0835a; [B1 V5 V6]; [B1 V5 V6] # Ↄ.᳔੍ +B; ↄ。\u0A4D\u1CD4𞷣; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ↄ.᳔੍ +B; 󠪢-。򛂏≮𑜫; [P1 V3 V6]; [P1 V3 V6] +B; 󠪢-。򛂏<\u0338𑜫; [P1 V3 V6]; [P1 V3 V6] +B; xn----bh61m.xn--gdhz157g0em1d; [V3 V6]; [V3 V6] +T; \u200C󠉹\u200D。򌿧≮Ⴉ; [C1 C2 P1 V6]; [P1 V6] # .≮Ⴉ +N; \u200C󠉹\u200D。򌿧≮Ⴉ; [C1 C2 P1 V6]; [C1 C2 P1 V6] # .≮Ⴉ +T; \u200C󠉹\u200D。򌿧<\u0338Ⴉ; [C1 C2 P1 V6]; [P1 V6] # .≮Ⴉ +N; \u200C󠉹\u200D。򌿧<\u0338Ⴉ; [C1 C2 P1 V6]; [C1 C2 P1 V6] # .≮Ⴉ +T; \u200C󠉹\u200D。򌿧<\u0338ⴉ; [C1 C2 P1 V6]; [P1 V6] # .≮ⴉ +N; \u200C󠉹\u200D。򌿧<\u0338ⴉ; [C1 C2 P1 V6]; [C1 C2 P1 V6] # .≮ⴉ +T; \u200C󠉹\u200D。򌿧≮ⴉ; [C1 C2 P1 V6]; [P1 V6] # .≮ⴉ +N; \u200C󠉹\u200D。򌿧≮ⴉ; [C1 C2 P1 V6]; [C1 C2 P1 V6] # .≮ⴉ +B; xn--3n36e.xn--gdh992byu01p; [V6]; [V6] +B; xn--0ugc90904y.xn--gdh992byu01p; [C1 C2 V6]; [C1 C2 V6] # .≮ⴉ +B; xn--3n36e.xn--hnd112gpz83n; [V6]; [V6] +B; xn--0ugc90904y.xn--hnd112gpz83n; [C1 C2 V6]; [C1 C2 V6] # .≮Ⴉ +B; 𐹯-𑄴\u08BC。︒䖐⾆; [B1 P1 V6]; [B1 P1 V6] # 𐹯-𑄴ࢼ.︒䖐舌 +B; 𐹯-𑄴\u08BC。。䖐舌; [B1 A4_2]; [B1 A4_2] # 𐹯-𑄴ࢼ..䖐舌 +B; xn----rpd7902rclc..xn--fpo216m; [B1 A4_2]; [B1 A4_2] # 𐹯-𑄴ࢼ..䖐舌 +B; xn----rpd7902rclc.xn--fpo216mn07e; [B1 V6]; [B1 V6] # 𐹯-𑄴ࢼ.︒䖐舌 +B; 𝪞Ⴐ。쪡; [P1 V5 V6]; [P1 V5 V6] +B; 𝪞Ⴐ。쪡; [P1 V5 V6]; [P1 V5 V6] +B; 𝪞Ⴐ。쪡; [P1 V5 V6]; [P1 V5 V6] +B; 𝪞Ⴐ。쪡; [P1 V5 V6]; [P1 V5 V6] +B; 𝪞ⴐ。쪡; [V5]; [V5] +B; 𝪞ⴐ。쪡; [V5]; [V5] +B; xn--7kj1858k.xn--pi6b; [V5]; [V5] +B; xn--ond3755u.xn--pi6b; [V5 V6]; [V5 V6] +B; 𝪞ⴐ。쪡; [V5]; [V5] +B; 𝪞ⴐ。쪡; [V5]; [V5] +B; \u0E3A쩁𐹬.􋉳; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ฺ쩁𐹬. +B; \u0E3A쩁𐹬.􋉳; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ฺ쩁𐹬. +B; xn--o4c4837g2zvb.xn--5f70g; [B1 V5 V6]; [B1 V5 V6] # ฺ쩁𐹬. +T; ᡅ0\u200C。⎢󤨄; [C1 P1 V6]; [P1 V6] # ᡅ0.⎢ +N; ᡅ0\u200C。⎢󤨄; [C1 P1 V6]; [C1 P1 V6] # ᡅ0.⎢ +T; ᡅ0\u200C。⎢󤨄; [C1 P1 V6]; [P1 V6] # ᡅ0.⎢ +N; ᡅ0\u200C。⎢󤨄; [C1 P1 V6]; [C1 P1 V6] # ᡅ0.⎢ +B; xn--0-z6j.xn--8lh28773l; [V6]; [V6] +B; xn--0-z6jy93b.xn--8lh28773l; [C1 V6]; [C1 V6] # ᡅ0.⎢ +T; 𲮚9ꍩ\u17D3.\u200Dß; [C2 P1 V6]; [P1 V6] # 9ꍩ៓.ß +N; 𲮚9ꍩ\u17D3.\u200Dß; [C2 P1 V6]; [C2 P1 V6] # 9ꍩ៓.ß +T; 𲮚9ꍩ\u17D3.\u200Dß; [C2 P1 V6]; [P1 V6] # 9ꍩ៓.ß +N; 𲮚9ꍩ\u17D3.\u200Dß; [C2 P1 V6]; [C2 P1 V6] # 9ꍩ៓.ß +T; 𲮚9ꍩ\u17D3.\u200DSS; [C2 P1 V6]; [P1 V6] # 9ꍩ៓.ss +N; 𲮚9ꍩ\u17D3.\u200DSS; [C2 P1 V6]; [C2 P1 V6] # 9ꍩ៓.ss +T; 𲮚9ꍩ\u17D3.\u200Dss; [C2 P1 V6]; [P1 V6] # 9ꍩ៓.ss +N; 𲮚9ꍩ\u17D3.\u200Dss; [C2 P1 V6]; [C2 P1 V6] # 9ꍩ៓.ss +T; 𲮚9ꍩ\u17D3.\u200DSs; [C2 P1 V6]; [P1 V6] # 9ꍩ៓.ss +N; 𲮚9ꍩ\u17D3.\u200DSs; [C2 P1 V6]; [C2 P1 V6] # 9ꍩ៓.ss +B; xn--9-i0j5967eg3qz.ss; [V6]; [V6] # 9ꍩ៓.ss +B; xn--9-i0j5967eg3qz.xn--ss-l1t; [C2 V6]; [C2 V6] # 9ꍩ៓.ss +B; xn--9-i0j5967eg3qz.xn--zca770n; [C2 V6]; [C2 V6] # 9ꍩ៓.ß +T; 𲮚9ꍩ\u17D3.\u200DSS; [C2 P1 V6]; [P1 V6] # 9ꍩ៓.ss +N; 𲮚9ꍩ\u17D3.\u200DSS; [C2 P1 V6]; [C2 P1 V6] # 9ꍩ៓.ss +T; 𲮚9ꍩ\u17D3.\u200Dss; [C2 P1 V6]; [P1 V6] # 9ꍩ៓.ss +N; 𲮚9ꍩ\u17D3.\u200Dss; [C2 P1 V6]; [C2 P1 V6] # 9ꍩ៓.ss +T; 𲮚9ꍩ\u17D3.\u200DSs; [C2 P1 V6]; [P1 V6] # 9ꍩ៓.ss +N; 𲮚9ꍩ\u17D3.\u200DSs; [C2 P1 V6]; [C2 P1 V6] # 9ꍩ៓.ss +B; ꗷ𑆀.\u075D𐩒; ; xn--ju8a625r.xn--hpb0073k; NV8 # ꗷ𑆀.ݝ𐩒 +B; xn--ju8a625r.xn--hpb0073k; ꗷ𑆀.\u075D𐩒; xn--ju8a625r.xn--hpb0073k; NV8 # ꗷ𑆀.ݝ𐩒 +B; ⒐≯-。︒򩑣-񞛠; [P1 V3 V6]; [P1 V3 V6] +B; ⒐>\u0338-。︒򩑣-񞛠; [P1 V3 V6]; [P1 V3 V6] +B; 9.≯-。。򩑣-񞛠; [P1 V3 V6 A4_2]; [P1 V3 V6 A4_2] +B; 9.>\u0338-。。򩑣-񞛠; [P1 V3 V6 A4_2]; [P1 V3 V6 A4_2] +B; 9.xn----ogo..xn----xj54d1s69k; [V3 V6 A4_2]; [V3 V6 A4_2] +B; xn----ogot9g.xn----n89hl0522az9u2a; [V3 V6]; [V3 V6] +B; 򈪚\u0CE3Ⴡ󠢏.\u061D; [B6 P1 V6]; [B6 P1 V6] # ೣჁ. +B; 򈪚\u0CE3Ⴡ󠢏.\u061D; [B6 P1 V6]; [B6 P1 V6] # ೣჁ. +B; 򈪚\u0CE3ⴡ󠢏.\u061D; [B6 P1 V6]; [B6 P1 V6] # ೣⴡ. +B; xn--vuc226n8n28lmju7a.xn--cgb; [B6 V6]; [B6 V6] # ೣⴡ. +B; xn--vuc49qvu85xmju7a.xn--cgb; [B6 V6]; [B6 V6] # ೣჁ. +B; 򈪚\u0CE3ⴡ󠢏.\u061D; [B6 P1 V6]; [B6 P1 V6] # ೣⴡ. +B; \u1DEB。𐋩\u0638-𐫮; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ᷫ.𐋩ظ-𐫮 +B; xn--gfg.xn----xnc0815qyyg; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ᷫ.𐋩ظ-𐫮 +B; 싇。⾇𐳋Ⴝ; [B5 P1 V6]; [B5 P1 V6] +B; 싇。⾇𐳋Ⴝ; [B5 P1 V6]; [B5 P1 V6] +B; 싇。舛𐳋Ⴝ; [B5 P1 V6]; [B5 P1 V6] +B; 싇。舛𐳋Ⴝ; [B5 P1 V6]; [B5 P1 V6] +B; 싇。舛𐳋ⴝ; [B5]; [B5] +B; 싇。舛𐳋ⴝ; [B5]; [B5] +B; 싇。舛𐲋Ⴝ; [B5 P1 V6]; [B5 P1 V6] +B; 싇。舛𐲋Ⴝ; [B5 P1 V6]; [B5 P1 V6] +B; 싇。舛𐲋ⴝ; [B5]; [B5] +B; 싇。舛𐲋ⴝ; [B5]; [B5] +B; xn--9u4b.xn--llj123yh74e; [B5]; [B5] +B; xn--9u4b.xn--1nd7519ch79d; [B5 V6]; [B5 V6] +B; 싇。⾇𐳋ⴝ; [B5]; [B5] +B; 싇。⾇𐳋ⴝ; [B5]; [B5] +B; 싇。⾇𐲋Ⴝ; [B5 P1 V6]; [B5 P1 V6] +B; 싇。⾇𐲋Ⴝ; [B5 P1 V6]; [B5 P1 V6] +B; 싇。⾇𐲋ⴝ; [B5]; [B5] +B; 싇。⾇𐲋ⴝ; [B5]; [B5] +T; 𐹠ς。\u200C\u06BFჀ; [B1 C1 P1 V6]; [B1 B2 B3 P1 V6] # 𐹠ς.ڿჀ +N; 𐹠ς。\u200C\u06BFჀ; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹠ς.ڿჀ +T; 𐹠ς。\u200C\u06BFⴠ; [B1 C1]; [B1 B2 B3] # 𐹠ς.ڿⴠ +N; 𐹠ς。\u200C\u06BFⴠ; [B1 C1]; [B1 C1] # 𐹠ς.ڿⴠ +T; 𐹠Σ。\u200C\u06BFჀ; [B1 C1 P1 V6]; [B1 B2 B3 P1 V6] # 𐹠σ.ڿჀ +N; 𐹠Σ。\u200C\u06BFჀ; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹠σ.ڿჀ +T; 𐹠σ。\u200C\u06BFⴠ; [B1 C1]; [B1 B2 B3] # 𐹠σ.ڿⴠ +N; 𐹠σ。\u200C\u06BFⴠ; [B1 C1]; [B1 C1] # 𐹠σ.ڿⴠ +B; xn--4xa9167k.xn--ykb467q; [B1 B2 B3]; [B1 B2 B3] # 𐹠σ.ڿⴠ +B; xn--4xa9167k.xn--ykb760k9hj; [B1 C1]; [B1 C1] # 𐹠σ.ڿⴠ +B; xn--4xa9167k.xn--ykb632c; [B1 B2 B3 V6]; [B1 B2 B3 V6] # 𐹠σ.ڿჀ +B; xn--4xa9167k.xn--ykb632cvxm; [B1 C1 V6]; [B1 C1 V6] # 𐹠σ.ڿჀ +B; xn--3xa1267k.xn--ykb760k9hj; [B1 C1]; [B1 C1] # 𐹠ς.ڿⴠ +B; xn--3xa1267k.xn--ykb632cvxm; [B1 C1 V6]; [B1 C1 V6] # 𐹠ς.ڿჀ +T; 򇒐\u200C\u0604.\u069A-ß; [B2 B3 B5 B6 C1 P1 V6]; [B2 B3 B5 B6 P1 V6] # .ښ-ß +N; 򇒐\u200C\u0604.\u069A-ß; [B2 B3 B5 B6 C1 P1 V6]; [B2 B3 B5 B6 C1 P1 V6] # .ښ-ß +T; 򇒐\u200C\u0604.\u069A-SS; [B2 B3 B5 B6 C1 P1 V6]; [B2 B3 B5 B6 P1 V6] # .ښ-ss +N; 򇒐\u200C\u0604.\u069A-SS; [B2 B3 B5 B6 C1 P1 V6]; [B2 B3 B5 B6 C1 P1 V6] # .ښ-ss +T; 򇒐\u200C\u0604.\u069A-ss; [B2 B3 B5 B6 C1 P1 V6]; [B2 B3 B5 B6 P1 V6] # .ښ-ss +N; 򇒐\u200C\u0604.\u069A-ss; [B2 B3 B5 B6 C1 P1 V6]; [B2 B3 B5 B6 C1 P1 V6] # .ښ-ss +T; 򇒐\u200C\u0604.\u069A-Ss; [B2 B3 B5 B6 C1 P1 V6]; [B2 B3 B5 B6 P1 V6] # .ښ-ss +N; 򇒐\u200C\u0604.\u069A-Ss; [B2 B3 B5 B6 C1 P1 V6]; [B2 B3 B5 B6 C1 P1 V6] # .ښ-ss +B; xn--mfb98261i.xn---ss-sdf; [B2 B3 B5 B6 V6]; [B2 B3 B5 B6 V6] # .ښ-ss +B; xn--mfb144kqo32m.xn---ss-sdf; [B2 B3 B5 B6 C1 V6]; [B2 B3 B5 B6 C1 V6] # .ښ-ss +B; xn--mfb144kqo32m.xn----qfa315b; [B2 B3 B5 B6 C1 V6]; [B2 B3 B5 B6 C1 V6] # .ښ-ß +T; \u200C\u200D\u17B5\u067A.-\uFBB0󅄞𐸚; [B1 C1 C2 P1 V3 V6]; [B1 P1 V3 V5 V6] # ٺ.-ۓ +N; \u200C\u200D\u17B5\u067A.-\uFBB0󅄞𐸚; [B1 C1 C2 P1 V3 V6]; [B1 C1 C2 P1 V3 V6] # ٺ.-ۓ +T; \u200C\u200D\u17B5\u067A.-\u06D3󅄞𐸚; [B1 C1 C2 P1 V3 V6]; [B1 P1 V3 V5 V6] # ٺ.-ۓ +N; \u200C\u200D\u17B5\u067A.-\u06D3󅄞𐸚; [B1 C1 C2 P1 V3 V6]; [B1 C1 C2 P1 V3 V6] # ٺ.-ۓ +T; \u200C\u200D\u17B5\u067A.-\u06D2\u0654󅄞𐸚; [B1 C1 C2 P1 V3 V6]; [B1 P1 V3 V5 V6] # ٺ.-ۓ +N; \u200C\u200D\u17B5\u067A.-\u06D2\u0654󅄞𐸚; [B1 C1 C2 P1 V3 V6]; [B1 C1 C2 P1 V3 V6] # ٺ.-ۓ +B; xn--zib539f.xn----twc1133r17r6g; [B1 V3 V5 V6]; [B1 V3 V5 V6] # ٺ.-ۓ +B; xn--zib539f8igea.xn----twc1133r17r6g; [B1 C1 C2 V3 V6]; [B1 C1 C2 V3 V6] # ٺ.-ۓ +B; 򡶱。𐮬≠; [B3 P1 V6]; [B3 P1 V6] +B; 򡶱。𐮬=\u0338; [B3 P1 V6]; [B3 P1 V6] +B; 򡶱。𐮬≠; [B3 P1 V6]; [B3 P1 V6] +B; 򡶱。𐮬=\u0338; [B3 P1 V6]; [B3 P1 V6] +B; xn--dd55c.xn--1ch3003g; [B3 V6]; [B3 V6] +B; \u0FB2𞶅。𐹮𐹷덝۵; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ྲ.𐹮𐹷덝۵ +B; \u0FB2𞶅。𐹮𐹷덝۵; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ྲ.𐹮𐹷덝۵ +B; \u0FB2𞶅。𐹮𐹷덝۵; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ྲ.𐹮𐹷덝۵ +B; \u0FB2𞶅。𐹮𐹷덝۵; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ྲ.𐹮𐹷덝۵ +B; xn--fgd0675v.xn--imb5839fidpcbba; [B1 V5 V6]; [B1 V5 V6] # ྲ.𐹮𐹷덝۵ +T; Ⴏ󠅋-.\u200DႩ; [C2 P1 V3 V6]; [P1 V3 V6] # Ⴏ-.Ⴉ +N; Ⴏ󠅋-.\u200DႩ; [C2 P1 V3 V6]; [C2 P1 V3 V6] # Ⴏ-.Ⴉ +T; Ⴏ󠅋-.\u200DႩ; [C2 P1 V3 V6]; [P1 V3 V6] # Ⴏ-.Ⴉ +N; Ⴏ󠅋-.\u200DႩ; [C2 P1 V3 V6]; [C2 P1 V3 V6] # Ⴏ-.Ⴉ +T; ⴏ󠅋-.\u200Dⴉ; [C2 V3]; [V3] # ⴏ-.ⴉ +N; ⴏ󠅋-.\u200Dⴉ; [C2 V3]; [C2 V3] # ⴏ-.ⴉ +B; xn----3vs.xn--0kj; [V3]; [V3] +B; xn----3vs.xn--1ug532c; [C2 V3]; [C2 V3] # ⴏ-.ⴉ +B; xn----00g.xn--hnd; [V3 V6]; [V3 V6] +B; xn----00g.xn--hnd399e; [C2 V3 V6]; [C2 V3 V6] # Ⴏ-.Ⴉ +T; ⴏ󠅋-.\u200Dⴉ; [C2 V3]; [V3] # ⴏ-.ⴉ +N; ⴏ󠅋-.\u200Dⴉ; [C2 V3]; [C2 V3] # ⴏ-.ⴉ +B; ⇧𐨏󠾈󯶅。\u0600󠈵󠆉; [B1 P1 V6]; [B1 P1 V6] # ⇧𐨏. +B; xn--l8g5552g64t4g46xf.xn--ifb08144p; [B1 V6]; [B1 V6] # ⇧𐨏. +B; ≠𐮂.↑🄇⒈; [B1 P1 V6]; [B1 P1 V6] +B; =\u0338𐮂.↑🄇⒈; [B1 P1 V6]; [B1 P1 V6] +B; ≠𐮂.↑6,1.; [B1 P1 V6]; [B1 P1 V6] +B; =\u0338𐮂.↑6,1.; [B1 P1 V6]; [B1 P1 V6] +B; xn--1chy492g.xn--6,1-pw1a.; [B1 P1 V6]; [B1 P1 V6] +B; xn--1chy492g.xn--45gx9iuy44d; [B1 V6]; [B1 V6] +T; 𝩏󠲉ß.ᢤ򄦌\u200C𐹫; [B1 B5 B6 C1 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # 𝩏ß.ᢤ𐹫 +N; 𝩏󠲉ß.ᢤ򄦌\u200C𐹫; [B1 B5 B6 C1 P1 V5 V6]; [B1 B5 B6 C1 P1 V5 V6] # 𝩏ß.ᢤ𐹫 +T; 𝩏󠲉SS.ᢤ򄦌\u200C𐹫; [B1 B5 B6 C1 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # 𝩏ss.ᢤ𐹫 +N; 𝩏󠲉SS.ᢤ򄦌\u200C𐹫; [B1 B5 B6 C1 P1 V5 V6]; [B1 B5 B6 C1 P1 V5 V6] # 𝩏ss.ᢤ𐹫 +T; 𝩏󠲉ss.ᢤ򄦌\u200C𐹫; [B1 B5 B6 C1 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # 𝩏ss.ᢤ𐹫 +N; 𝩏󠲉ss.ᢤ򄦌\u200C𐹫; [B1 B5 B6 C1 P1 V5 V6]; [B1 B5 B6 C1 P1 V5 V6] # 𝩏ss.ᢤ𐹫 +T; 𝩏󠲉Ss.ᢤ򄦌\u200C𐹫; [B1 B5 B6 C1 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # 𝩏ss.ᢤ𐹫 +N; 𝩏󠲉Ss.ᢤ򄦌\u200C𐹫; [B1 B5 B6 C1 P1 V5 V6]; [B1 B5 B6 C1 P1 V5 V6] # 𝩏ss.ᢤ𐹫 +B; xn--ss-zb11ap1427e.xn--ubf2596jbt61c; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] +B; xn--ss-zb11ap1427e.xn--ubf609atw1tynn3d; [B1 B5 B6 C1 V5 V6]; [B1 B5 B6 C1 V5 V6] # 𝩏ss.ᢤ𐹫 +B; xn--zca3153vupz3e.xn--ubf609atw1tynn3d; [B1 B5 B6 C1 V5 V6]; [B1 B5 B6 C1 V5 V6] # 𝩏ß.ᢤ𐹫 +T; ß𐵳񗘁Ⴇ。\uA67A; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # ßႧ.ꙺ +N; ß𐵳񗘁Ⴇ。\uA67A; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # ßႧ.ꙺ +T; ß𐵳񗘁Ⴇ。\uA67A; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # ßႧ.ꙺ +N; ß𐵳񗘁Ⴇ。\uA67A; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # ßႧ.ꙺ +T; ß𐵳񗘁ⴇ。\uA67A; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # ßⴇ.ꙺ +N; ß𐵳񗘁ⴇ。\uA67A; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # ßⴇ.ꙺ +B; SS𐵳񗘁Ⴇ。\uA67A; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # ssႧ.ꙺ +B; ss𐵳񗘁ⴇ。\uA67A; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # ssⴇ.ꙺ +B; Ss𐵳񗘁Ⴇ。\uA67A; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # ssႧ.ꙺ +B; xn--ss-rek7420r4hs7b.xn--9x8a; [B1 B3 B5 B6 V5 V6]; [B1 B3 B5 B6 V5 V6] # ssႧ.ꙺ +B; xn--ss-e61ar955h4hs7b.xn--9x8a; [B1 B3 B5 B6 V5 V6]; [B1 B3 B5 B6 V5 V6] # ssⴇ.ꙺ +B; xn--zca227tpy4lkns1b.xn--9x8a; [B1 B3 B5 B6 V5 V6]; [B1 B3 B5 B6 V5 V6] # ßⴇ.ꙺ +B; xn--zca491fci5qkn79a.xn--9x8a; [B1 B3 B5 B6 V5 V6]; [B1 B3 B5 B6 V5 V6] # ßႧ.ꙺ +T; ß𐵳񗘁ⴇ。\uA67A; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # ßⴇ.ꙺ +N; ß𐵳񗘁ⴇ。\uA67A; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # ßⴇ.ꙺ +B; SS𐵳񗘁Ⴇ。\uA67A; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # ssႧ.ꙺ +B; ss𐵳񗘁ⴇ。\uA67A; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # ssⴇ.ꙺ +B; Ss𐵳񗘁Ⴇ。\uA67A; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # ssႧ.ꙺ +B; \u1714。󠆣-𑋪; [V3 V5]; [V3 V5] # ᜔.-𑋪 +B; xn--fze.xn----ly8i; [V3 V5]; [V3 V5] # ᜔.-𑋪 +T; \uABE8-.򨏜\u05BDß; [P1 V3 V5 V6]; [P1 V3 V5 V6] # ꯨ-.ֽß +N; \uABE8-.򨏜\u05BDß; [P1 V3 V5 V6]; [P1 V3 V5 V6] # ꯨ-.ֽß +T; \uABE8-.򨏜\u05BDß; [P1 V3 V5 V6]; [P1 V3 V5 V6] # ꯨ-.ֽß +N; \uABE8-.򨏜\u05BDß; [P1 V3 V5 V6]; [P1 V3 V5 V6] # ꯨ-.ֽß +B; \uABE8-.򨏜\u05BDSS; [P1 V3 V5 V6]; [P1 V3 V5 V6] # ꯨ-.ֽss +B; \uABE8-.򨏜\u05BDss; [P1 V3 V5 V6]; [P1 V3 V5 V6] # ꯨ-.ֽss +B; \uABE8-.򨏜\u05BDSs; [P1 V3 V5 V6]; [P1 V3 V5 V6] # ꯨ-.ֽss +B; xn----pw5e.xn--ss-7jd10716y; [V3 V5 V6]; [V3 V5 V6] # ꯨ-.ֽss +B; xn----pw5e.xn--zca50wfv060a; [V3 V5 V6]; [V3 V5 V6] # ꯨ-.ֽß +B; \uABE8-.򨏜\u05BDSS; [P1 V3 V5 V6]; [P1 V3 V5 V6] # ꯨ-.ֽss +B; \uABE8-.򨏜\u05BDss; [P1 V3 V5 V6]; [P1 V3 V5 V6] # ꯨ-.ֽss +B; \uABE8-.򨏜\u05BDSs; [P1 V3 V5 V6]; [P1 V3 V5 V6] # ꯨ-.ֽss +B; ᡓ-≮。\u066B󠅱ᡄ; [B1 B6 P1 V6]; [B1 B6 P1 V6] # ᡓ-≮.٫ᡄ +B; ᡓ-<\u0338。\u066B󠅱ᡄ; [B1 B6 P1 V6]; [B1 B6 P1 V6] # ᡓ-≮.٫ᡄ +B; xn----s7j866c.xn--kib252g; [B1 B6 V6]; [B1 B6 V6] # ᡓ-≮.٫ᡄ +B; 𝟥♮𑜫\u08ED.\u17D2𑜫8󠆏; [V5]; [V5] # 3♮𑜫࣭.្𑜫8 +B; 3♮𑜫\u08ED.\u17D2𑜫8󠆏; [V5]; [V5] # 3♮𑜫࣭.្𑜫8 +B; xn--3-ksd277tlo7s.xn--8-f0jx021l; [V5]; [V5] # 3♮𑜫࣭.្𑜫8 +T; -。򕌀\u200D❡; [C2 P1 V3 V6]; [P1 V3 V6] # -.❡ +N; -。򕌀\u200D❡; [C2 P1 V3 V6]; [C2 P1 V3 V6] # -.❡ +T; -。򕌀\u200D❡; [C2 P1 V3 V6]; [P1 V3 V6] # -.❡ +N; -。򕌀\u200D❡; [C2 P1 V3 V6]; [C2 P1 V3 V6] # -.❡ +B; -.xn--nei54421f; [V3 V6]; [V3 V6] +B; -.xn--1ug800aq795s; [C2 V3 V6]; [C2 V3 V6] # -.❡ +B; 𝟓☱𝟐򥰵。𝪮񐡳; [P1 V5 V6]; [P1 V5 V6] +B; 5☱2򥰵。𝪮񐡳; [P1 V5 V6]; [P1 V5 V6] +B; xn--52-dwx47758j.xn--kd3hk431k; [V5 V6]; [V5 V6] +B; -.-├򖦣; [P1 V3 V6]; [P1 V3 V6] +B; -.xn----ukp70432h; [V3 V6]; [V3 V6] +T; \u05A5\u076D。\u200D󠀘; [B1 C2 P1 V5 V6]; [B1 P1 V5 V6] # ֥ݭ. +N; \u05A5\u076D。\u200D󠀘; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # ֥ݭ. +T; \u05A5\u076D。\u200D󠀘; [B1 C2 P1 V5 V6]; [B1 P1 V5 V6] # ֥ݭ. +N; \u05A5\u076D。\u200D󠀘; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # ֥ݭ. +B; xn--wcb62g.xn--p526e; [B1 V5 V6]; [B1 V5 V6] # ֥ݭ. +B; xn--wcb62g.xn--1ugy8001l; [B1 C2 V5 V6]; [B1 C2 V5 V6] # ֥ݭ. +T; 쥥󔏉Ⴎ.\u200C⒈⒈𐫒; [B1 C1 P1 V6]; [B1 P1 V6] # 쥥Ⴎ.⒈⒈𐫒 +N; 쥥󔏉Ⴎ.\u200C⒈⒈𐫒; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 쥥Ⴎ.⒈⒈𐫒 +T; 쥥󔏉Ⴎ.\u200C⒈⒈𐫒; [B1 C1 P1 V6]; [B1 P1 V6] # 쥥Ⴎ.⒈⒈𐫒 +N; 쥥󔏉Ⴎ.\u200C⒈⒈𐫒; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 쥥Ⴎ.⒈⒈𐫒 +T; 쥥󔏉Ⴎ.\u200C1.1.𐫒; [B1 C1 P1 V6]; [B1 P1 V6] # 쥥Ⴎ.1.1.𐫒 +N; 쥥󔏉Ⴎ.\u200C1.1.𐫒; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 쥥Ⴎ.1.1.𐫒 +T; 쥥󔏉Ⴎ.\u200C1.1.𐫒; [B1 C1 P1 V6]; [B1 P1 V6] # 쥥Ⴎ.1.1.𐫒 +N; 쥥󔏉Ⴎ.\u200C1.1.𐫒; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 쥥Ⴎ.1.1.𐫒 +T; 쥥󔏉ⴎ.\u200C1.1.𐫒; [B1 C1 P1 V6]; [B1 P1 V6] # 쥥ⴎ.1.1.𐫒 +N; 쥥󔏉ⴎ.\u200C1.1.𐫒; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 쥥ⴎ.1.1.𐫒 +T; 쥥󔏉ⴎ.\u200C1.1.𐫒; [B1 C1 P1 V6]; [B1 P1 V6] # 쥥ⴎ.1.1.𐫒 +N; 쥥󔏉ⴎ.\u200C1.1.𐫒; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 쥥ⴎ.1.1.𐫒 +B; xn--5kj3511ccyw3h.1.1.xn--7w9c; [B1 V6]; [B1 V6] +B; xn--5kj3511ccyw3h.xn--1-rgn.1.xn--7w9c; [B1 C1 V6]; [B1 C1 V6] # 쥥ⴎ.1.1.𐫒 +B; xn--mnd7865gcy28g.1.1.xn--7w9c; [B1 V6]; [B1 V6] +B; xn--mnd7865gcy28g.xn--1-rgn.1.xn--7w9c; [B1 C1 V6]; [B1 C1 V6] # 쥥Ⴎ.1.1.𐫒 +T; 쥥󔏉ⴎ.\u200C⒈⒈𐫒; [B1 C1 P1 V6]; [B1 P1 V6] # 쥥ⴎ.⒈⒈𐫒 +N; 쥥󔏉ⴎ.\u200C⒈⒈𐫒; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 쥥ⴎ.⒈⒈𐫒 +T; 쥥󔏉ⴎ.\u200C⒈⒈𐫒; [B1 C1 P1 V6]; [B1 P1 V6] # 쥥ⴎ.⒈⒈𐫒 +N; 쥥󔏉ⴎ.\u200C⒈⒈𐫒; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 쥥ⴎ.⒈⒈𐫒 +B; xn--5kj3511ccyw3h.xn--tsha6797o; [B1 V6]; [B1 V6] +B; xn--5kj3511ccyw3h.xn--0ug88oa0396u; [B1 C1 V6]; [B1 C1 V6] # 쥥ⴎ.⒈⒈𐫒 +B; xn--mnd7865gcy28g.xn--tsha6797o; [B1 V6]; [B1 V6] +B; xn--mnd7865gcy28g.xn--0ug88oa0396u; [B1 C1 V6]; [B1 C1 V6] # 쥥Ⴎ.⒈⒈𐫒 +B; \u0827𝟶\u06A0-。𑄳; [B1 B3 B6 V3 V5]; [B1 B3 B6 V3 V5] # ࠧ0ڠ-.𑄳 +B; \u08270\u06A0-。𑄳; [B1 B3 B6 V3 V5]; [B1 B3 B6 V3 V5] # ࠧ0ڠ-.𑄳 +B; xn--0--p3d67m.xn--v80d; [B1 B3 B6 V3 V5]; [B1 B3 B6 V3 V5] # ࠧ0ڠ-.𑄳 +T; ς.\uFDC1🞛⒈; [P1 V6]; [P1 V6] # ς.فمي🞛⒈ +N; ς.\uFDC1🞛⒈; [P1 V6]; [P1 V6] # ς.فمي🞛⒈ +T; ς.\u0641\u0645\u064A🞛1.; ; xn--4xa.xn--1-gocmu97674d.; NV8 # ς.فمي🞛1. +N; ς.\u0641\u0645\u064A🞛1.; ; xn--3xa.xn--1-gocmu97674d.; NV8 # ς.فمي🞛1. +B; Σ.\u0641\u0645\u064A🞛1.; σ.\u0641\u0645\u064A🞛1.; xn--4xa.xn--1-gocmu97674d.; NV8 # σ.فمي🞛1. +B; σ.\u0641\u0645\u064A🞛1.; ; xn--4xa.xn--1-gocmu97674d.; NV8 # σ.فمي🞛1. +B; xn--4xa.xn--1-gocmu97674d.; σ.\u0641\u0645\u064A🞛1.; xn--4xa.xn--1-gocmu97674d.; NV8 # σ.فمي🞛1. +B; xn--3xa.xn--1-gocmu97674d.; ς.\u0641\u0645\u064A🞛1.; xn--3xa.xn--1-gocmu97674d.; NV8 # ς.فمي🞛1. +B; Σ.\uFDC1🞛⒈; [P1 V6]; [P1 V6] # σ.فمي🞛⒈ +B; σ.\uFDC1🞛⒈; [P1 V6]; [P1 V6] # σ.فمي🞛⒈ +B; xn--4xa.xn--dhbip2802atb20c; [V6]; [V6] # σ.فمي🞛⒈ +B; xn--3xa.xn--dhbip2802atb20c; [V6]; [V6] # ς.فمي🞛⒈ +B; 🗩-。𐹻󐞆񥉮; [B1 P1 V3 V6]; [B1 P1 V3 V6] +B; 🗩-。𐹻󐞆񥉮; [B1 P1 V3 V6]; [B1 P1 V3 V6] +B; xn----6t3s.xn--zo0d4811u6ru6a; [B1 V3 V6]; [B1 V3 V6] +T; 𐡜-🔪。𝟻\u200C𐿀; [B1 B3 C1 P1 V6]; [B1 B3 P1 V6] # 𐡜-🔪.5 +N; 𐡜-🔪。𝟻\u200C𐿀; [B1 B3 C1 P1 V6]; [B1 B3 C1 P1 V6] # 𐡜-🔪.5 +T; 𐡜-🔪。5\u200C𐿀; [B1 B3 C1 P1 V6]; [B1 B3 P1 V6] # 𐡜-🔪.5 +N; 𐡜-🔪。5\u200C𐿀; [B1 B3 C1 P1 V6]; [B1 B3 C1 P1 V6] # 𐡜-🔪.5 +B; xn----5j4iv089c.xn--5-bn7i; [B1 B3 V6]; [B1 B3 V6] +B; xn----5j4iv089c.xn--5-sgn7149h; [B1 B3 C1 V6]; [B1 B3 C1 V6] # 𐡜-🔪.5 +T; 𐹣늿\u200Dß.\u07CF0\u05BC; [B1 C2]; [B1] # 𐹣늿ß.ߏ0ּ +N; 𐹣늿\u200Dß.\u07CF0\u05BC; [B1 C2]; [B1 C2] # 𐹣늿ß.ߏ0ּ +T; 𐹣늿\u200Dß.\u07CF0\u05BC; [B1 C2]; [B1] # 𐹣늿ß.ߏ0ּ +N; 𐹣늿\u200Dß.\u07CF0\u05BC; [B1 C2]; [B1 C2] # 𐹣늿ß.ߏ0ּ +T; 𐹣늿\u200Dß.\u07CF0\u05BC; [B1 C2]; [B1] # 𐹣늿ß.ߏ0ּ +N; 𐹣늿\u200Dß.\u07CF0\u05BC; [B1 C2]; [B1 C2] # 𐹣늿ß.ߏ0ּ +T; 𐹣늿\u200Dß.\u07CF0\u05BC; [B1 C2]; [B1] # 𐹣늿ß.ߏ0ּ +N; 𐹣늿\u200Dß.\u07CF0\u05BC; [B1 C2]; [B1 C2] # 𐹣늿ß.ߏ0ּ +T; 𐹣늿\u200DSS.\u07CF0\u05BC; [B1 C2]; [B1] # 𐹣늿ss.ߏ0ּ +N; 𐹣늿\u200DSS.\u07CF0\u05BC; [B1 C2]; [B1 C2] # 𐹣늿ss.ߏ0ּ +T; 𐹣늿\u200DSS.\u07CF0\u05BC; [B1 C2]; [B1] # 𐹣늿ss.ߏ0ּ +N; 𐹣늿\u200DSS.\u07CF0\u05BC; [B1 C2]; [B1 C2] # 𐹣늿ss.ߏ0ּ +T; 𐹣늿\u200Dss.\u07CF0\u05BC; [B1 C2]; [B1] # 𐹣늿ss.ߏ0ּ +N; 𐹣늿\u200Dss.\u07CF0\u05BC; [B1 C2]; [B1 C2] # 𐹣늿ss.ߏ0ּ +T; 𐹣늿\u200Dss.\u07CF0\u05BC; [B1 C2]; [B1] # 𐹣늿ss.ߏ0ּ +N; 𐹣늿\u200Dss.\u07CF0\u05BC; [B1 C2]; [B1 C2] # 𐹣늿ss.ߏ0ּ +T; 𐹣늿\u200DSs.\u07CF0\u05BC; [B1 C2]; [B1] # 𐹣늿ss.ߏ0ּ +N; 𐹣늿\u200DSs.\u07CF0\u05BC; [B1 C2]; [B1 C2] # 𐹣늿ss.ߏ0ּ +T; 𐹣늿\u200DSs.\u07CF0\u05BC; [B1 C2]; [B1] # 𐹣늿ss.ߏ0ּ +N; 𐹣늿\u200DSs.\u07CF0\u05BC; [B1 C2]; [B1 C2] # 𐹣늿ss.ߏ0ּ +B; xn--ss-i05i7041a.xn--0-vgc50n; [B1]; [B1] # 𐹣늿ss.ߏ0ּ +B; xn--ss-l1tu910fo0xd.xn--0-vgc50n; [B1 C2]; [B1 C2] # 𐹣늿ss.ߏ0ּ +B; xn--zca770n5s4hev6c.xn--0-vgc50n; [B1 C2]; [B1 C2] # 𐹣늿ß.ߏ0ּ +T; 𐹣늿\u200DSS.\u07CF0\u05BC; [B1 C2]; [B1] # 𐹣늿ss.ߏ0ּ +N; 𐹣늿\u200DSS.\u07CF0\u05BC; [B1 C2]; [B1 C2] # 𐹣늿ss.ߏ0ּ +T; 𐹣늿\u200DSS.\u07CF0\u05BC; [B1 C2]; [B1] # 𐹣늿ss.ߏ0ּ +N; 𐹣늿\u200DSS.\u07CF0\u05BC; [B1 C2]; [B1 C2] # 𐹣늿ss.ߏ0ּ +T; 𐹣늿\u200Dss.\u07CF0\u05BC; [B1 C2]; [B1] # 𐹣늿ss.ߏ0ּ +N; 𐹣늿\u200Dss.\u07CF0\u05BC; [B1 C2]; [B1 C2] # 𐹣늿ss.ߏ0ּ +T; 𐹣늿\u200Dss.\u07CF0\u05BC; [B1 C2]; [B1] # 𐹣늿ss.ߏ0ּ +N; 𐹣늿\u200Dss.\u07CF0\u05BC; [B1 C2]; [B1 C2] # 𐹣늿ss.ߏ0ּ +T; 𐹣늿\u200DSs.\u07CF0\u05BC; [B1 C2]; [B1] # 𐹣늿ss.ߏ0ּ +N; 𐹣늿\u200DSs.\u07CF0\u05BC; [B1 C2]; [B1 C2] # 𐹣늿ss.ߏ0ּ +T; 𐹣늿\u200DSs.\u07CF0\u05BC; [B1 C2]; [B1] # 𐹣늿ss.ߏ0ּ +N; 𐹣늿\u200DSs.\u07CF0\u05BC; [B1 C2]; [B1 C2] # 𐹣늿ss.ߏ0ּ +B; 9󠇥.󪴴ᢓ; [P1 V6]; [P1 V6] +B; 9󠇥.󪴴ᢓ; [P1 V6]; [P1 V6] +B; 9.xn--dbf91222q; [V6]; [V6] +T; \u200C\uFFA0.𐫭🠗ß⽟; [B1 B2 B3 C1 P1 V6]; [B2 B3 P1 V6] # .𐫭🠗ß玉 +N; \u200C\uFFA0.𐫭🠗ß⽟; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # .𐫭🠗ß玉 +T; \u200C\u1160.𐫭🠗ß玉; [B1 B2 B3 C1 P1 V6]; [B2 B3 P1 V6] # .𐫭🠗ß玉 +N; \u200C\u1160.𐫭🠗ß玉; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # .𐫭🠗ß玉 +T; \u200C\u1160.𐫭🠗SS玉; [B1 B2 B3 C1 P1 V6]; [B2 B3 P1 V6] # .𐫭🠗ss玉 +N; \u200C\u1160.𐫭🠗SS玉; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # .𐫭🠗ss玉 +T; \u200C\u1160.𐫭🠗ss玉; [B1 B2 B3 C1 P1 V6]; [B2 B3 P1 V6] # .𐫭🠗ss玉 +N; \u200C\u1160.𐫭🠗ss玉; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # .𐫭🠗ss玉 +T; \u200C\u1160.𐫭🠗Ss玉; [B1 B2 B3 C1 P1 V6]; [B2 B3 P1 V6] # .𐫭🠗ss玉 +N; \u200C\u1160.𐫭🠗Ss玉; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # .𐫭🠗ss玉 +B; xn--psd.xn--ss-je6eq954cp25j; [B2 B3 V6]; [B2 B3 V6] # .𐫭🠗ss玉 +B; xn--psd526e.xn--ss-je6eq954cp25j; [B1 B2 B3 C1 V6]; [B1 B2 B3 C1 V6] # .𐫭🠗ss玉 +B; xn--psd526e.xn--zca2289c550e0iwi; [B1 B2 B3 C1 V6]; [B1 B2 B3 C1 V6] # .𐫭🠗ß玉 +T; \u200C\uFFA0.𐫭🠗SS⽟; [B1 B2 B3 C1 P1 V6]; [B2 B3 P1 V6] # .𐫭🠗ss玉 +N; \u200C\uFFA0.𐫭🠗SS⽟; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # .𐫭🠗ss玉 +T; \u200C\uFFA0.𐫭🠗ss⽟; [B1 B2 B3 C1 P1 V6]; [B2 B3 P1 V6] # .𐫭🠗ss玉 +N; \u200C\uFFA0.𐫭🠗ss⽟; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # .𐫭🠗ss玉 +T; \u200C\uFFA0.𐫭🠗Ss⽟; [B1 B2 B3 C1 P1 V6]; [B2 B3 P1 V6] # .𐫭🠗ss玉 +N; \u200C\uFFA0.𐫭🠗Ss⽟; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # .𐫭🠗ss玉 +B; xn--cl7c.xn--ss-je6eq954cp25j; [B2 B3 V6]; [B2 B3 V6] # .𐫭🠗ss玉 +B; xn--0ug7719f.xn--ss-je6eq954cp25j; [B1 B2 B3 C1 V6]; [B1 B2 B3 C1 V6] # .𐫭🠗ss玉 +B; xn--0ug7719f.xn--zca2289c550e0iwi; [B1 B2 B3 C1 V6]; [B1 B2 B3 C1 V6] # .𐫭🠗ß玉 +T; ︒Ⴖ\u0366.\u200C; [C1 P1 V6]; [P1 V6] # ︒Ⴖͦ. +N; ︒Ⴖ\u0366.\u200C; [C1 P1 V6]; [C1 P1 V6] # ︒Ⴖͦ. +T; 。Ⴖ\u0366.\u200C; [C1 P1 V6 A4_2]; [P1 V6 A4_2] # .Ⴖͦ. +N; 。Ⴖ\u0366.\u200C; [C1 P1 V6 A4_2]; [C1 P1 V6 A4_2] # .Ⴖͦ. +T; 。ⴖ\u0366.\u200C; [C1 A4_2]; [A4_2] # .ⴖͦ. +N; 。ⴖ\u0366.\u200C; [C1 A4_2]; [C1 A4_2] # .ⴖͦ. +B; .xn--hva754s.; [A4_2]; [A4_2] # .ⴖͦ. +B; .xn--hva754s.xn--0ug; [C1 A4_2]; [C1 A4_2] # .ⴖͦ. +B; .xn--hva929d.; [V6 A4_2]; [V6 A4_2] # .Ⴖͦ. +B; .xn--hva929d.xn--0ug; [C1 V6 A4_2]; [C1 V6 A4_2] # .Ⴖͦ. +T; ︒ⴖ\u0366.\u200C; [C1 P1 V6]; [P1 V6] # ︒ⴖͦ. +N; ︒ⴖ\u0366.\u200C; [C1 P1 V6]; [C1 P1 V6] # ︒ⴖͦ. +B; xn--hva754sy94k.; [V6]; [V6] # ︒ⴖͦ. +B; xn--hva754sy94k.xn--0ug; [C1 V6]; [C1 V6] # ︒ⴖͦ. +B; xn--hva929dl29p.; [V6]; [V6] # ︒Ⴖͦ. +B; xn--hva929dl29p.xn--0ug; [C1 V6]; [C1 V6] # ︒Ⴖͦ. +B; xn--hva754s.; ⴖ\u0366.; xn--hva754s. # ⴖͦ. +B; ⴖ\u0366.; ; xn--hva754s. # ⴖͦ. +B; Ⴖ\u0366.; [P1 V6]; [P1 V6] # Ⴖͦ. +B; xn--hva929d.; [V6]; [V6] # Ⴖͦ. +T; \u08BB.\u200CႣ𞀒; [B1 C1 P1 V6]; [P1 V6] # ࢻ.Ⴃ𞀒 +N; \u08BB.\u200CႣ𞀒; [B1 C1 P1 V6]; [B1 C1 P1 V6] # ࢻ.Ⴃ𞀒 +T; \u08BB.\u200CႣ𞀒; [B1 C1 P1 V6]; [P1 V6] # ࢻ.Ⴃ𞀒 +N; \u08BB.\u200CႣ𞀒; [B1 C1 P1 V6]; [B1 C1 P1 V6] # ࢻ.Ⴃ𞀒 +T; \u08BB.\u200Cⴃ𞀒; [B1 C1]; xn--hzb.xn--ukj4430l # ࢻ.ⴃ𞀒 +N; \u08BB.\u200Cⴃ𞀒; [B1 C1]; [B1 C1] # ࢻ.ⴃ𞀒 +B; xn--hzb.xn--ukj4430l; \u08BB.ⴃ𞀒; xn--hzb.xn--ukj4430l # ࢻ.ⴃ𞀒 +B; \u08BB.ⴃ𞀒; ; xn--hzb.xn--ukj4430l # ࢻ.ⴃ𞀒 +B; \u08BB.Ⴃ𞀒; [P1 V6]; [P1 V6] # ࢻ.Ⴃ𞀒 +B; xn--hzb.xn--bnd2938u; [V6]; [V6] # ࢻ.Ⴃ𞀒 +B; xn--hzb.xn--0ug822cp045a; [B1 C1]; [B1 C1] # ࢻ.ⴃ𞀒 +B; xn--hzb.xn--bnd300f7225a; [B1 C1 V6]; [B1 C1 V6] # ࢻ.Ⴃ𞀒 +T; \u08BB.\u200Cⴃ𞀒; [B1 C1]; xn--hzb.xn--ukj4430l # ࢻ.ⴃ𞀒 +N; \u08BB.\u200Cⴃ𞀒; [B1 C1]; [B1 C1] # ࢻ.ⴃ𞀒 +T; \u200D\u200C。2䫷󠧷; [C1 C2 P1 V6]; [P1 V6 A4_2] # .2䫷 +N; \u200D\u200C。2䫷󠧷; [C1 C2 P1 V6]; [C1 C2 P1 V6] # .2䫷 +T; \u200D\u200C。2䫷󠧷; [C1 C2 P1 V6]; [P1 V6 A4_2] # .2䫷 +N; \u200D\u200C。2䫷󠧷; [C1 C2 P1 V6]; [C1 C2 P1 V6] # .2䫷 +B; .xn--2-me5ay1273i; [V6 A4_2]; [V6 A4_2] +B; xn--0ugb.xn--2-me5ay1273i; [C1 C2 V6]; [C1 C2 V6] # .2䫷 +B; -𞀤󜠐。򈬖; [P1 V3 V6]; [P1 V3 V6] +B; xn----rq4re4997d.xn--l707b; [V3 V6]; [V3 V6] +T; 󳛂︒\u200C㟀.\u0624⒈; [C1 P1 V6]; [P1 V6] # ︒㟀.ؤ⒈ +N; 󳛂︒\u200C㟀.\u0624⒈; [C1 P1 V6]; [C1 P1 V6] # ︒㟀.ؤ⒈ +T; 󳛂︒\u200C㟀.\u0648\u0654⒈; [C1 P1 V6]; [P1 V6] # ︒㟀.ؤ⒈ +N; 󳛂︒\u200C㟀.\u0648\u0654⒈; [C1 P1 V6]; [C1 P1 V6] # ︒㟀.ؤ⒈ +T; 󳛂。\u200C㟀.\u06241.; [B1 C1 P1 V6]; [P1 V6] # .㟀.ؤ1. +N; 󳛂。\u200C㟀.\u06241.; [B1 C1 P1 V6]; [B1 C1 P1 V6] # .㟀.ؤ1. +T; 󳛂。\u200C㟀.\u0648\u06541.; [B1 C1 P1 V6]; [P1 V6] # .㟀.ؤ1. +N; 󳛂。\u200C㟀.\u0648\u06541.; [B1 C1 P1 V6]; [B1 C1 P1 V6] # .㟀.ؤ1. +B; xn--z272f.xn--etl.xn--1-smc.; [V6]; [V6] # .㟀.ؤ1. +B; xn--z272f.xn--0ug754g.xn--1-smc.; [B1 C1 V6]; [B1 C1 V6] # .㟀.ؤ1. +B; xn--etlt457ccrq7h.xn--jgb476m; [V6]; [V6] # ︒㟀.ؤ⒈ +B; xn--0ug754gxl4ldlt0k.xn--jgb476m; [C1 V6]; [C1 V6] # ︒㟀.ؤ⒈ +T; 𑲜\u07CA𝅼。-\u200D; [B1 C2 V3 V5]; [B1 V3 V5] # 𑲜ߊ𝅼.- +N; 𑲜\u07CA𝅼。-\u200D; [B1 C2 V3 V5]; [B1 C2 V3 V5] # 𑲜ߊ𝅼.- +B; xn--lsb5482l7nre.-; [B1 V3 V5]; [B1 V3 V5] # 𑲜ߊ𝅼.- +B; xn--lsb5482l7nre.xn----ugn; [B1 C2 V3 V5]; [B1 C2 V3 V5] # 𑲜ߊ𝅼.- +T; \u200C.Ⴉ≠𐫶; [B1 B5 B6 C1 P1 V6]; [B5 B6 P1 V6 A4_2] # .Ⴉ≠𐫶 +N; \u200C.Ⴉ≠𐫶; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .Ⴉ≠𐫶 +T; \u200C.Ⴉ=\u0338𐫶; [B1 B5 B6 C1 P1 V6]; [B5 B6 P1 V6 A4_2] # .Ⴉ≠𐫶 +N; \u200C.Ⴉ=\u0338𐫶; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .Ⴉ≠𐫶 +T; \u200C.Ⴉ≠𐫶; [B1 B5 B6 C1 P1 V6]; [B5 B6 P1 V6 A4_2] # .Ⴉ≠𐫶 +N; \u200C.Ⴉ≠𐫶; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .Ⴉ≠𐫶 +T; \u200C.Ⴉ=\u0338𐫶; [B1 B5 B6 C1 P1 V6]; [B5 B6 P1 V6 A4_2] # .Ⴉ≠𐫶 +N; \u200C.Ⴉ=\u0338𐫶; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .Ⴉ≠𐫶 +T; \u200C.ⴉ=\u0338𐫶; [B1 B5 B6 C1 P1 V6]; [B5 B6 P1 V6 A4_2] # .ⴉ≠𐫶 +N; \u200C.ⴉ=\u0338𐫶; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .ⴉ≠𐫶 +T; \u200C.ⴉ≠𐫶; [B1 B5 B6 C1 P1 V6]; [B5 B6 P1 V6 A4_2] # .ⴉ≠𐫶 +N; \u200C.ⴉ≠𐫶; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .ⴉ≠𐫶 +B; .xn--1chx23bzj4p; [B5 B6 V6 A4_2]; [B5 B6 V6 A4_2] +B; xn--0ug.xn--1chx23bzj4p; [B1 B5 B6 C1 V6]; [B1 B5 B6 C1 V6] # .ⴉ≠𐫶 +B; .xn--hnd481gv73o; [B5 B6 V6 A4_2]; [B5 B6 V6 A4_2] +B; xn--0ug.xn--hnd481gv73o; [B1 B5 B6 C1 V6]; [B1 B5 B6 C1 V6] # .Ⴉ≠𐫶 +T; \u200C.ⴉ=\u0338𐫶; [B1 B5 B6 C1 P1 V6]; [B5 B6 P1 V6 A4_2] # .ⴉ≠𐫶 +N; \u200C.ⴉ=\u0338𐫶; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .ⴉ≠𐫶 +T; \u200C.ⴉ≠𐫶; [B1 B5 B6 C1 P1 V6]; [B5 B6 P1 V6 A4_2] # .ⴉ≠𐫶 +N; \u200C.ⴉ≠𐫶; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .ⴉ≠𐫶 +T; \u0750。≯ς; [B1 P1 V6]; [B1 P1 V6] # ݐ.≯ς +N; \u0750。≯ς; [B1 P1 V6]; [B1 P1 V6] # ݐ.≯ς +T; \u0750。>\u0338ς; [B1 P1 V6]; [B1 P1 V6] # ݐ.≯ς +N; \u0750。>\u0338ς; [B1 P1 V6]; [B1 P1 V6] # ݐ.≯ς +B; \u0750。>\u0338Σ; [B1 P1 V6]; [B1 P1 V6] # ݐ.≯σ +B; \u0750。≯Σ; [B1 P1 V6]; [B1 P1 V6] # ݐ.≯σ +B; \u0750。≯σ; [B1 P1 V6]; [B1 P1 V6] # ݐ.≯σ +B; \u0750。>\u0338σ; [B1 P1 V6]; [B1 P1 V6] # ݐ.≯σ +B; xn--3ob.xn--4xa718m; [B1 V6]; [B1 V6] # ݐ.≯σ +B; xn--3ob.xn--3xa918m; [B1 V6]; [B1 V6] # ݐ.≯ς +B; \u07FC𐸆.𓖏︒񊨩Ⴐ; [P1 V6]; [P1 V6] # .︒Ⴐ +B; \u07FC𐸆.𓖏。񊨩Ⴐ; [P1 V6]; [P1 V6] # ..Ⴐ +B; \u07FC𐸆.𓖏。񊨩ⴐ; [P1 V6]; [P1 V6] # ..ⴐ +B; xn--0tb8725k.xn--tu8d.xn--7kj73887a; [V6]; [V6] # ..ⴐ +B; xn--0tb8725k.xn--tu8d.xn--ond97931d; [V6]; [V6] # ..Ⴐ +B; \u07FC𐸆.𓖏︒񊨩ⴐ; [P1 V6]; [P1 V6] # .︒ⴐ +B; xn--0tb8725k.xn--7kj9008dt18a7py9c; [V6]; [V6] # .︒ⴐ +B; xn--0tb8725k.xn--ond3562jt18a7py9c; [V6]; [V6] # .︒Ⴐ +B; Ⴥ⚭󠖫⋃。𑌼; [P1 V5 V6]; [P1 V5 V6] +B; Ⴥ⚭󠖫⋃。𑌼; [P1 V5 V6]; [P1 V5 V6] +B; ⴥ⚭󠖫⋃。𑌼; [P1 V5 V6]; [P1 V5 V6] +B; xn--vfh16m67gx1162b.xn--ro1d; [V5 V6]; [V5 V6] +B; xn--9nd623g4zc5z060c.xn--ro1d; [V5 V6]; [V5 V6] +B; ⴥ⚭󠖫⋃。𑌼; [P1 V5 V6]; [P1 V5 V6] +B; 🄈。󠷳\u0844; [B1 P1 V6]; [B1 P1 V6] # 🄈.ࡄ +B; 7,。󠷳\u0844; [B1 P1 V6]; [B1 P1 V6] # 7,.ࡄ +B; 7,.xn--2vb13094p; [B1 P1 V6]; [B1 P1 V6] # 7,.ࡄ +B; xn--107h.xn--2vb13094p; [B1 V6]; [B1 V6] # 🄈.ࡄ +T; ≮\u0846。섖쮖ß; [B1 P1 V6]; [B1 P1 V6] # ≮ࡆ.섖쮖ß +N; ≮\u0846。섖쮖ß; [B1 P1 V6]; [B1 P1 V6] # ≮ࡆ.섖쮖ß +T; <\u0338\u0846。섖쮖ß; [B1 P1 V6]; [B1 P1 V6] # ≮ࡆ.섖쮖ß +N; <\u0338\u0846。섖쮖ß; [B1 P1 V6]; [B1 P1 V6] # ≮ࡆ.섖쮖ß +B; <\u0338\u0846。섖쮖SS; [B1 P1 V6]; [B1 P1 V6] # ≮ࡆ.섖쮖ss +B; ≮\u0846。섖쮖SS; [B1 P1 V6]; [B1 P1 V6] # ≮ࡆ.섖쮖ss +B; ≮\u0846。섖쮖ss; [B1 P1 V6]; [B1 P1 V6] # ≮ࡆ.섖쮖ss +B; <\u0338\u0846。섖쮖ss; [B1 P1 V6]; [B1 P1 V6] # ≮ࡆ.섖쮖ss +B; <\u0338\u0846。섖쮖Ss; [B1 P1 V6]; [B1 P1 V6] # ≮ࡆ.섖쮖ss +B; ≮\u0846。섖쮖Ss; [B1 P1 V6]; [B1 P1 V6] # ≮ࡆ.섖쮖ss +B; xn--4vb505k.xn--ss-5z4j006a; [B1 V6]; [B1 V6] # ≮ࡆ.섖쮖ss +B; xn--4vb505k.xn--zca7259goug; [B1 V6]; [B1 V6] # ≮ࡆ.섖쮖ß +B; 󠆓⛏-。ꡒ; [V3]; [V3] +B; xn----o9p.xn--rc9a; [V3]; [V3] +T; \u07BB𐹳\u0626𑁆。\u08A7\u06B0\u200Cᢒ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # 𐹳ئ𑁆.ࢧڰᢒ +N; \u07BB𐹳\u0626𑁆。\u08A7\u06B0\u200Cᢒ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # 𐹳ئ𑁆.ࢧڰᢒ +T; \u07BB𐹳\u064A𑁆\u0654。\u08A7\u06B0\u200Cᢒ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # 𐹳ئ𑁆.ࢧڰᢒ +N; \u07BB𐹳\u064A𑁆\u0654。\u08A7\u06B0\u200Cᢒ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # 𐹳ئ𑁆.ࢧڰᢒ +B; xn--lgb32f2753cosb.xn--jkb91hlz1a; [B2 B3 V6]; [B2 B3 V6] # 𐹳ئ𑁆.ࢧڰᢒ +B; xn--lgb32f2753cosb.xn--jkb91hlz1azih; [B2 B3 V6]; [B2 B3 V6] # 𐹳ئ𑁆.ࢧڰᢒ +B; \u0816.𐨕𚚕; [B1 B2 B3 B6 P1 V5 V6]; [B1 B2 B3 B6 P1 V5 V6] # ࠖ.𐨕 +B; xn--rub.xn--tr9c248x; [B1 B2 B3 B6 V5 V6]; [B1 B2 B3 B6 V5 V6] # ࠖ.𐨕 +B; --。𽊆\u0767𐽋𞠬; [B1 B5 B6 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # --.ݧ𞠬 +B; --.xn--rpb6226k77pfh58p; [B1 B5 B6 V3 V6]; [B1 B5 B6 V3 V6] # --.ݧ𞠬 +B; 򛭦𐋥𹸐.≯\u08B0\u08A6󔛣; [B1 P1 V6]; [B1 P1 V6] # 𐋥.≯ࢰࢦ +B; 򛭦𐋥𹸐.>\u0338\u08B0\u08A6󔛣; [B1 P1 V6]; [B1 P1 V6] # 𐋥.≯ࢰࢦ +B; xn--887c2298i5mv6a.xn--vybt688qm8981a; [B1 V6]; [B1 V6] # 𐋥.≯ࢰࢦ +B; 䔛󠇒򤸞𐹧.-䤷; [B1 B5 B6 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] +B; 䔛󠇒򤸞𐹧.-䤷; [B1 B5 B6 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] +B; xn--2loy662coo60e.xn----0n4a; [B1 B5 B6 V3 V6]; [B1 B5 B6 V3 V6] +T; 𐹩.\u200D-; [B1 C2 V3]; [B1 V3] # 𐹩.- +N; 𐹩.\u200D-; [B1 C2 V3]; [B1 C2 V3] # 𐹩.- +T; 𐹩.\u200D-; [B1 C2 V3]; [B1 V3] # 𐹩.- +N; 𐹩.\u200D-; [B1 C2 V3]; [B1 C2 V3] # 𐹩.- +B; xn--ho0d.-; [B1 V3]; [B1 V3] +B; xn--ho0d.xn----tgn; [B1 C2 V3]; [B1 C2 V3] # 𐹩.- +B; 񂈦帷。≯萺\u1DC8-; [P1 V3 V6]; [P1 V3 V6] # 帷.≯萺᷈- +B; 񂈦帷。>\u0338萺\u1DC8-; [P1 V3 V6]; [P1 V3 V6] # 帷.≯萺᷈- +B; 񂈦帷。≯萺\u1DC8-; [P1 V3 V6]; [P1 V3 V6] # 帷.≯萺᷈- +B; 񂈦帷。>\u0338萺\u1DC8-; [P1 V3 V6]; [P1 V3 V6] # 帷.≯萺᷈- +B; xn--qutw175s.xn----mimu6tf67j; [V3 V6]; [V3 V6] # 帷.≯萺᷈- +T; \u200D攌\uABED。ᢖ-Ⴘ; [C2 P1 V6]; [P1 V6] # 攌꯭.ᢖ-Ⴘ +N; \u200D攌\uABED。ᢖ-Ⴘ; [C2 P1 V6]; [C2 P1 V6] # 攌꯭.ᢖ-Ⴘ +T; \u200D攌\uABED。ᢖ-ⴘ; [C2]; xn--p9ut19m.xn----mck373i # 攌꯭.ᢖ-ⴘ +N; \u200D攌\uABED。ᢖ-ⴘ; [C2]; [C2] # 攌꯭.ᢖ-ⴘ +B; xn--p9ut19m.xn----mck373i; 攌\uABED.ᢖ-ⴘ; xn--p9ut19m.xn----mck373i # 攌꯭.ᢖ-ⴘ +B; 攌\uABED.ᢖ-ⴘ; ; xn--p9ut19m.xn----mck373i # 攌꯭.ᢖ-ⴘ +B; 攌\uABED.ᢖ-Ⴘ; [P1 V6]; [P1 V6] # 攌꯭.ᢖ-Ⴘ +B; xn--p9ut19m.xn----k1g451d; [V6]; [V6] # 攌꯭.ᢖ-Ⴘ +B; xn--1ug592ykp6b.xn----mck373i; [C2]; [C2] # 攌꯭.ᢖ-ⴘ +B; xn--1ug592ykp6b.xn----k1g451d; [C2 V6]; [C2 V6] # 攌꯭.ᢖ-Ⴘ +T; \u200Cꖨ.⒗3툒۳; [C1 P1 V6]; [P1 V6] # ꖨ.⒗3툒۳ +N; \u200Cꖨ.⒗3툒۳; [C1 P1 V6]; [C1 P1 V6] # ꖨ.⒗3툒۳ +T; \u200Cꖨ.⒗3툒۳; [C1 P1 V6]; [P1 V6] # ꖨ.⒗3툒۳ +N; \u200Cꖨ.⒗3툒۳; [C1 P1 V6]; [C1 P1 V6] # ꖨ.⒗3툒۳ +T; \u200Cꖨ.16.3툒۳; [C1]; xn--9r8a.16.xn--3-nyc0117m # ꖨ.16.3툒۳ +N; \u200Cꖨ.16.3툒۳; [C1]; [C1] # ꖨ.16.3툒۳ +T; \u200Cꖨ.16.3툒۳; [C1]; xn--9r8a.16.xn--3-nyc0117m # ꖨ.16.3툒۳ +N; \u200Cꖨ.16.3툒۳; [C1]; [C1] # ꖨ.16.3툒۳ +B; xn--9r8a.16.xn--3-nyc0117m; ꖨ.16.3툒۳; xn--9r8a.16.xn--3-nyc0117m +B; ꖨ.16.3툒۳; ; xn--9r8a.16.xn--3-nyc0117m +B; ꖨ.16.3툒۳; ꖨ.16.3툒۳; xn--9r8a.16.xn--3-nyc0117m +B; xn--0ug2473c.16.xn--3-nyc0117m; [C1]; [C1] # ꖨ.16.3툒۳ +B; xn--9r8a.xn--3-nyc678tu07m; [V6]; [V6] +B; xn--0ug2473c.xn--3-nyc678tu07m; [C1 V6]; [C1 V6] # ꖨ.⒗3툒۳ +B; ⒈걾6.𐱁\u06D0; [B1 P1 V6]; [B1 P1 V6] # ⒈걾6.𐱁ې +B; ⒈걾6.𐱁\u06D0; [B1 P1 V6]; [B1 P1 V6] # ⒈걾6.𐱁ې +B; 1.걾6.𐱁\u06D0; [B1]; [B1] # 1.걾6.𐱁ې +B; 1.걾6.𐱁\u06D0; [B1]; [B1] # 1.걾6.𐱁ې +B; 1.xn--6-945e.xn--glb1794k; [B1]; [B1] # 1.걾6.𐱁ې +B; xn--6-dcps419c.xn--glb1794k; [B1 V6]; [B1 V6] # ⒈걾6.𐱁ې +B; 𐲞𝟶≮≮.󠀧\u0639; [B1 B3 P1 V6]; [B1 B3 P1 V6] # 𐳞0≮≮.ع +B; 𐲞𝟶<\u0338<\u0338.󠀧\u0639; [B1 B3 P1 V6]; [B1 B3 P1 V6] # 𐳞0≮≮.ع +B; 𐲞0≮≮.󠀧\u0639; [B1 B3 P1 V6]; [B1 B3 P1 V6] # 𐳞0≮≮.ع +B; 𐲞0<\u0338<\u0338.󠀧\u0639; [B1 B3 P1 V6]; [B1 B3 P1 V6] # 𐳞0≮≮.ع +B; 𐳞0<\u0338<\u0338.󠀧\u0639; [B1 B3 P1 V6]; [B1 B3 P1 V6] # 𐳞0≮≮.ع +B; 𐳞0≮≮.󠀧\u0639; [B1 B3 P1 V6]; [B1 B3 P1 V6] # 𐳞0≮≮.ع +B; xn--0-ngoa5711v.xn--4gb31034p; [B1 B3 V6]; [B1 B3 V6] # 𐳞0≮≮.ع +B; 𐳞𝟶<\u0338<\u0338.󠀧\u0639; [B1 B3 P1 V6]; [B1 B3 P1 V6] # 𐳞0≮≮.ع +B; 𐳞𝟶≮≮.󠀧\u0639; [B1 B3 P1 V6]; [B1 B3 P1 V6] # 𐳞0≮≮.ع +B; \u0AE3.𐹺\u115F; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # ૣ.𐹺 +B; xn--8fc.xn--osd3070k; [B1 B3 B6 V5 V6]; [B1 B3 B6 V5 V6] # ૣ.𐹺 +T; 𝟏𝨙⸖.\u200D; [C2]; xn--1-5bt6845n. # 1𝨙⸖. +N; 𝟏𝨙⸖.\u200D; [C2]; [C2] # 1𝨙⸖. +T; 1𝨙⸖.\u200D; [C2]; xn--1-5bt6845n. # 1𝨙⸖. +N; 1𝨙⸖.\u200D; [C2]; [C2] # 1𝨙⸖. +B; xn--1-5bt6845n.; 1𝨙⸖.; xn--1-5bt6845n.; NV8 +B; 1𝨙⸖.; ; xn--1-5bt6845n.; NV8 +B; xn--1-5bt6845n.xn--1ug; [C2]; [C2] # 1𝨙⸖. +T; 𞤐≠\u0726\u1A60。-\u200C\u07D5; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # 𞤲≠ܦ᩠.-ߕ +N; 𞤐≠\u0726\u1A60。-\u200C\u07D5; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # 𞤲≠ܦ᩠.-ߕ +T; 𞤐=\u0338\u0726\u1A60。-\u200C\u07D5; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # 𞤲≠ܦ᩠.-ߕ +N; 𞤐=\u0338\u0726\u1A60。-\u200C\u07D5; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # 𞤲≠ܦ᩠.-ߕ +T; 𞤐≠\u0726\u1A60。-\u200C\u07D5; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # 𞤲≠ܦ᩠.-ߕ +N; 𞤐≠\u0726\u1A60。-\u200C\u07D5; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # 𞤲≠ܦ᩠.-ߕ +T; 𞤐=\u0338\u0726\u1A60。-\u200C\u07D5; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # 𞤲≠ܦ᩠.-ߕ +N; 𞤐=\u0338\u0726\u1A60。-\u200C\u07D5; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # 𞤲≠ܦ᩠.-ߕ +T; 𞤲=\u0338\u0726\u1A60。-\u200C\u07D5; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # 𞤲≠ܦ᩠.-ߕ +N; 𞤲=\u0338\u0726\u1A60。-\u200C\u07D5; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # 𞤲≠ܦ᩠.-ߕ +T; 𞤲≠\u0726\u1A60。-\u200C\u07D5; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # 𞤲≠ܦ᩠.-ߕ +N; 𞤲≠\u0726\u1A60。-\u200C\u07D5; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # 𞤲≠ܦ᩠.-ߕ +B; xn--wnb859grzfzw60c.xn----kcd; [B1 V3 V6]; [B1 V3 V6] # 𞤲≠ܦ᩠.-ߕ +B; xn--wnb859grzfzw60c.xn----kcd017p; [B1 C1 V3 V6]; [B1 C1 V3 V6] # 𞤲≠ܦ᩠.-ߕ +T; 𞤲=\u0338\u0726\u1A60。-\u200C\u07D5; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # 𞤲≠ܦ᩠.-ߕ +N; 𞤲=\u0338\u0726\u1A60。-\u200C\u07D5; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # 𞤲≠ܦ᩠.-ߕ +T; 𞤲≠\u0726\u1A60。-\u200C\u07D5; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # 𞤲≠ܦ᩠.-ߕ +N; 𞤲≠\u0726\u1A60。-\u200C\u07D5; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # 𞤲≠ܦ᩠.-ߕ +B; 𐹰\u0368-ꡧ。\u0675; [B1]; [B1] # 𐹰ͨ-ꡧ.اٴ +B; 𐹰\u0368-ꡧ。\u0627\u0674; [B1]; [B1] # 𐹰ͨ-ꡧ.اٴ +B; xn----shb2387jgkqd.xn--mgb8m; [B1]; [B1] # 𐹰ͨ-ꡧ.اٴ +B; F󠅟。򏗅♚; [P1 V6]; [P1 V6] +B; F󠅟。򏗅♚; [P1 V6]; [P1 V6] +B; f󠅟。򏗅♚; [P1 V6]; [P1 V6] +B; f.xn--45hz6953f; [V6]; [V6] +B; f󠅟。򏗅♚; [P1 V6]; [P1 V6] +B; \u0B4D𑄴\u1DE9。𝟮Ⴘ𞀨񃥇; [P1 V5 V6]; [P1 V5 V6] # ୍𑄴ᷩ.2Ⴘ𞀨 +B; \u0B4D𑄴\u1DE9。2Ⴘ𞀨񃥇; [P1 V5 V6]; [P1 V5 V6] # ୍𑄴ᷩ.2Ⴘ𞀨 +B; \u0B4D𑄴\u1DE9。2ⴘ𞀨񃥇; [P1 V5 V6]; [P1 V5 V6] # ୍𑄴ᷩ.2ⴘ𞀨 +B; xn--9ic246gs21p.xn--2-nws2918ndrjr; [V5 V6]; [V5 V6] # ୍𑄴ᷩ.2ⴘ𞀨 +B; xn--9ic246gs21p.xn--2-k1g43076adrwq; [V5 V6]; [V5 V6] # ୍𑄴ᷩ.2Ⴘ𞀨 +B; \u0B4D𑄴\u1DE9。𝟮ⴘ𞀨񃥇; [P1 V5 V6]; [P1 V5 V6] # ୍𑄴ᷩ.2ⴘ𞀨 +T; 򓠭\u200C\u200C⒈。勉𑁅; [C1 P1 V6]; [P1 V6] # ⒈.勉𑁅 +N; 򓠭\u200C\u200C⒈。勉𑁅; [C1 P1 V6]; [C1 P1 V6] # ⒈.勉𑁅 +T; 򓠭\u200C\u200C1.。勉𑁅; [C1 P1 V6 A4_2]; [P1 V6 A4_2] # 1..勉𑁅 +N; 򓠭\u200C\u200C1.。勉𑁅; [C1 P1 V6 A4_2]; [C1 P1 V6 A4_2] # 1..勉𑁅 +B; xn--1-yi00h..xn--4grs325b; [V6 A4_2]; [V6 A4_2] +B; xn--1-rgna61159u..xn--4grs325b; [C1 V6 A4_2]; [C1 V6 A4_2] # 1..勉𑁅 +B; xn--tsh11906f.xn--4grs325b; [V6]; [V6] +B; xn--0uga855aez302a.xn--4grs325b; [C1 V6]; [C1 V6] # ⒈.勉𑁅 +B; ᡃ.玿񫈜󕞐; [P1 V6]; [P1 V6] +B; xn--27e.xn--7cy81125a0yq4a; [V6]; [V6] +T; \u200C\u200C。⒈≯𝟵; [C1 P1 V6]; [P1 V6 A4_2] # .⒈≯9 +N; \u200C\u200C。⒈≯𝟵; [C1 P1 V6]; [C1 P1 V6] # .⒈≯9 +T; \u200C\u200C。⒈>\u0338𝟵; [C1 P1 V6]; [P1 V6 A4_2] # .⒈≯9 +N; \u200C\u200C。⒈>\u0338𝟵; [C1 P1 V6]; [C1 P1 V6] # .⒈≯9 +T; \u200C\u200C。1.≯9; [C1 P1 V6]; [P1 V6 A4_2] # .1.≯9 +N; \u200C\u200C。1.≯9; [C1 P1 V6]; [C1 P1 V6] # .1.≯9 +T; \u200C\u200C。1.>\u03389; [C1 P1 V6]; [P1 V6 A4_2] # .1.≯9 +N; \u200C\u200C。1.>\u03389; [C1 P1 V6]; [C1 P1 V6] # .1.≯9 +B; .1.xn--9-ogo; [V6 A4_2]; [V6 A4_2] +B; xn--0uga.1.xn--9-ogo; [C1 V6]; [C1 V6] # .1.≯9 +B; .xn--9-ogo37g; [V6 A4_2]; [V6 A4_2] +B; xn--0uga.xn--9-ogo37g; [C1 V6]; [C1 V6] # .⒈≯9 +B; \u115F\u1DE0򐀁.𺻆≯𐮁; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ᷠ.≯𐮁 +B; \u115F\u1DE0򐀁.𺻆>\u0338𐮁; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ᷠ.≯𐮁 +B; xn--osd615d5659o.xn--hdh5192gkm6r; [B5 B6 V6]; [B5 B6 V6] # ᷠ.≯𐮁 +T; 󠄫𝩤\u200D\u063E.𝩩-\u081E󑼩; [B1 C2 P1 V5 V6]; [B1 P1 V5 V6] # 𝩤ؾ.𝩩-ࠞ +N; 󠄫𝩤\u200D\u063E.𝩩-\u081E󑼩; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # 𝩤ؾ.𝩩-ࠞ +B; xn--9gb5080v.xn----qgd52296avol4f; [B1 V5 V6]; [B1 V5 V6] # 𝩤ؾ.𝩩-ࠞ +B; xn--9gb723kg862a.xn----qgd52296avol4f; [B1 C2 V5 V6]; [B1 C2 V5 V6] # 𝩤ؾ.𝩩-ࠞ +B; \u20DA.𑘿-; [V3 V5]; [V3 V5] # ⃚.𑘿- +B; \u20DA.𑘿-; [V3 V5]; [V3 V5] # ⃚.𑘿- +B; xn--w0g.xn----bd0j; [V3 V5]; [V3 V5] # ⃚.𑘿- +T; 䮸ß.󠵟󠭎紙\u08A8; [B1 P1 V6]; [B1 P1 V6] # 䮸ß.紙ࢨ +N; 䮸ß.󠵟󠭎紙\u08A8; [B1 P1 V6]; [B1 P1 V6] # 䮸ß.紙ࢨ +B; 䮸SS.󠵟󠭎紙\u08A8; [B1 P1 V6]; [B1 P1 V6] # 䮸ss.紙ࢨ +B; 䮸ss.󠵟󠭎紙\u08A8; [B1 P1 V6]; [B1 P1 V6] # 䮸ss.紙ࢨ +B; 䮸Ss.󠵟󠭎紙\u08A8; [B1 P1 V6]; [B1 P1 V6] # 䮸ss.紙ࢨ +B; xn--ss-sf1c.xn--xyb1370div70kpzba; [B1 V6]; [B1 V6] # 䮸ss.紙ࢨ +B; xn--zca5349a.xn--xyb1370div70kpzba; [B1 V6]; [B1 V6] # 䮸ß.紙ࢨ +B; -Ⴞ.-𝩨⅔𐦕; [B1 P1 V3 V6]; [B1 P1 V3 V6] +B; -Ⴞ.-𝩨2⁄3𐦕; [B1 P1 V3 V6]; [B1 P1 V3 V6] +B; -ⴞ.-𝩨2⁄3𐦕; [B1 V3]; [B1 V3] +B; xn----zws.xn---23-pt0a0433lk3jj; [B1 V3]; [B1 V3] +B; xn----w1g.xn---23-pt0a0433lk3jj; [B1 V3 V6]; [B1 V3 V6] +B; -ⴞ.-𝩨⅔𐦕; [B1 V3]; [B1 V3] +B; 󧈯𐹯\u0AC2。򖢨𐮁񇼖ᡂ; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 𐹯ૂ.𐮁ᡂ +B; 󧈯𐹯\u0AC2。򖢨𐮁񇼖ᡂ; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 𐹯ૂ.𐮁ᡂ +B; xn--bfc7604kv8m3g.xn--17e5565jl7zw4h16a; [B5 B6 V6]; [B5 B6 V6] # 𐹯ૂ.𐮁ᡂ +T; \u1082-\u200D\uA8EA.ꡊ\u200D񼸳; [C2 P1 V5 V6]; [P1 V5 V6] # ႂ-꣪.ꡊ +N; \u1082-\u200D\uA8EA.ꡊ\u200D񼸳; [C2 P1 V5 V6]; [C2 P1 V5 V6] # ႂ-꣪.ꡊ +T; \u1082-\u200D\uA8EA.ꡊ\u200D񼸳; [C2 P1 V5 V6]; [P1 V5 V6] # ႂ-꣪.ꡊ +N; \u1082-\u200D\uA8EA.ꡊ\u200D񼸳; [C2 P1 V5 V6]; [C2 P1 V5 V6] # ႂ-꣪.ꡊ +B; xn----gyg3618i.xn--jc9ao4185a; [V5 V6]; [V5 V6] # ႂ-꣪.ꡊ +B; xn----gyg250jio7k.xn--1ug8774cri56d; [C2 V5 V6]; [C2 V5 V6] # ႂ-꣪.ꡊ +B; ۱。≠\u0668; [B1 P1 V6]; [B1 P1 V6] # ۱.≠٨ +B; ۱。=\u0338\u0668; [B1 P1 V6]; [B1 P1 V6] # ۱.≠٨ +B; xn--emb.xn--hib334l; [B1 V6]; [B1 V6] # ۱.≠٨ +B; 𑈵廊.𐠍; [V5]; [V5] +B; xn--xytw701b.xn--yc9c; [V5]; [V5] +T; \u200D\u0356-.-Ⴐ\u0661; [B1 C2 P1 V3 V6]; [B1 P1 V3 V5 V6] # ͖-.-Ⴐ١ +N; \u200D\u0356-.-Ⴐ\u0661; [B1 C2 P1 V3 V6]; [B1 C2 P1 V3 V6] # ͖-.-Ⴐ١ +T; \u200D\u0356-.-Ⴐ\u0661; [B1 C2 P1 V3 V6]; [B1 P1 V3 V5 V6] # ͖-.-Ⴐ١ +N; \u200D\u0356-.-Ⴐ\u0661; [B1 C2 P1 V3 V6]; [B1 C2 P1 V3 V6] # ͖-.-Ⴐ١ +T; \u200D\u0356-.-ⴐ\u0661; [B1 C2 V3]; [B1 V3 V5] # ͖-.-ⴐ١ +N; \u200D\u0356-.-ⴐ\u0661; [B1 C2 V3]; [B1 C2 V3] # ͖-.-ⴐ١ +B; xn----rgb.xn----bqc2280a; [B1 V3 V5]; [B1 V3 V5] # ͖-.-ⴐ١ +B; xn----rgb661t.xn----bqc2280a; [B1 C2 V3]; [B1 C2 V3] # ͖-.-ⴐ١ +B; xn----rgb.xn----bqc030f; [B1 V3 V5 V6]; [B1 V3 V5 V6] # ͖-.-Ⴐ١ +B; xn----rgb661t.xn----bqc030f; [B1 C2 V3 V6]; [B1 C2 V3 V6] # ͖-.-Ⴐ١ +T; \u200D\u0356-.-ⴐ\u0661; [B1 C2 V3]; [B1 V3 V5] # ͖-.-ⴐ١ +N; \u200D\u0356-.-ⴐ\u0661; [B1 C2 V3]; [B1 C2 V3] # ͖-.-ⴐ١ +B; \u063A\u0661挏󾯐.-; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # غ١挏.- +B; xn--5gb2f4205aqi47p.-; [B1 B2 B3 V3 V6]; [B1 B2 B3 V3 V6] # غ١挏.- +B; \u06EF。𐹧𞤽; [B1]; [B1] # ۯ.𐹧𞤽 +B; \u06EF。𐹧𞤽; [B1]; [B1] # ۯ.𐹧𞤽 +B; \u06EF。𐹧𞤛; [B1]; [B1] # ۯ.𐹧𞤽 +B; xn--cmb.xn--fo0dy848a; [B1]; [B1] # ۯ.𐹧𞤽 +B; \u06EF。𐹧𞤛; [B1]; [B1] # ۯ.𐹧𞤽 +B; Ⴞ𶛀𛗻.ᢗ릫; [P1 V6]; [P1 V6] +B; Ⴞ𶛀𛗻.ᢗ릫; [P1 V6]; [P1 V6] +B; Ⴞ𶛀𛗻.ᢗ릫; [P1 V6]; [P1 V6] +B; Ⴞ𶛀𛗻.ᢗ릫; [P1 V6]; [P1 V6] +B; ⴞ𶛀𛗻.ᢗ릫; [P1 V6]; [P1 V6] +B; ⴞ𶛀𛗻.ᢗ릫; [P1 V6]; [P1 V6] +B; xn--mlj0486jgl2j.xn--hbf6853f; [V6]; [V6] +B; xn--2nd8876sgl2j.xn--hbf6853f; [V6]; [V6] +B; ⴞ𶛀𛗻.ᢗ릫; [P1 V6]; [P1 V6] +B; ⴞ𶛀𛗻.ᢗ릫; [P1 V6]; [P1 V6] +T; 󠎃󗭞\u06B7𐹷。≯\u200C\u1DFE; [B1 C1 P1 V6]; [B1 P1 V6] # ڷ𐹷.≯᷾ +N; 󠎃󗭞\u06B7𐹷。≯\u200C\u1DFE; [B1 C1 P1 V6]; [B1 C1 P1 V6] # ڷ𐹷.≯᷾ +T; 󠎃󗭞\u06B7𐹷。>\u0338\u200C\u1DFE; [B1 C1 P1 V6]; [B1 P1 V6] # ڷ𐹷.≯᷾ +N; 󠎃󗭞\u06B7𐹷。>\u0338\u200C\u1DFE; [B1 C1 P1 V6]; [B1 C1 P1 V6] # ڷ𐹷.≯᷾ +T; 󠎃󗭞\u06B7𐹷。≯\u200C\u1DFE; [B1 C1 P1 V6]; [B1 P1 V6] # ڷ𐹷.≯᷾ +N; 󠎃󗭞\u06B7𐹷。≯\u200C\u1DFE; [B1 C1 P1 V6]; [B1 C1 P1 V6] # ڷ𐹷.≯᷾ +T; 󠎃󗭞\u06B7𐹷。>\u0338\u200C\u1DFE; [B1 C1 P1 V6]; [B1 P1 V6] # ڷ𐹷.≯᷾ +N; 󠎃󗭞\u06B7𐹷。>\u0338\u200C\u1DFE; [B1 C1 P1 V6]; [B1 C1 P1 V6] # ڷ𐹷.≯᷾ +B; xn--qkb4516kbi06fg2id.xn--zfg31q; [B1 V6]; [B1 V6] # ڷ𐹷.≯᷾ +B; xn--qkb4516kbi06fg2id.xn--zfg59fm0c; [B1 C1 V6]; [B1 C1 V6] # ڷ𐹷.≯᷾ +T; ᛎ󠅍󠐕\u200D。𐹾𐹪𐻝-; [B1 B6 C2 P1 V3 V6]; [B1 B6 P1 V3 V6] # ᛎ.𐹾𐹪- +N; ᛎ󠅍󠐕\u200D。𐹾𐹪𐻝-; [B1 B6 C2 P1 V3 V6]; [B1 B6 C2 P1 V3 V6] # ᛎ.𐹾𐹪- +T; ᛎ󠅍󠐕\u200D。𐹾𐹪𐻝-; [B1 B6 C2 P1 V3 V6]; [B1 B6 P1 V3 V6] # ᛎ.𐹾𐹪- +N; ᛎ󠅍󠐕\u200D。𐹾𐹪𐻝-; [B1 B6 C2 P1 V3 V6]; [B1 B6 C2 P1 V3 V6] # ᛎ.𐹾𐹪- +B; xn--fxe63563p.xn----q26i2bvu; [B1 B6 V3 V6]; [B1 B6 V3 V6] +B; xn--fxe848bq3411a.xn----q26i2bvu; [B1 B6 C2 V3 V6]; [B1 B6 C2 V3 V6] # ᛎ.𐹾𐹪- +B; 𐹶.𐫂; [B1]; [B1] +B; xn--uo0d.xn--rw9c; [B1]; [B1] +T; ß\u200D\u103A。⒈; [C2 P1 V6]; [P1 V6] # ß်.⒈ +N; ß\u200D\u103A。⒈; [C2 P1 V6]; [C2 P1 V6] # ß်.⒈ +T; ß\u200D\u103A。1.; [C2]; xn--ss-f4j.1. # ß်.1. +N; ß\u200D\u103A。1.; [C2]; [C2] # ß်.1. +T; SS\u200D\u103A。1.; [C2]; xn--ss-f4j.1. # ss်.1. +N; SS\u200D\u103A。1.; [C2]; [C2] # ss်.1. +T; ss\u200D\u103A。1.; [C2]; xn--ss-f4j.1. # ss်.1. +N; ss\u200D\u103A。1.; [C2]; [C2] # ss်.1. +T; Ss\u200D\u103A。1.; [C2]; xn--ss-f4j.1. # ss်.1. +N; Ss\u200D\u103A。1.; [C2]; [C2] # ss်.1. +B; xn--ss-f4j.1.; ss\u103A.1.; xn--ss-f4j.1. # ss်.1. +B; ss\u103A.1.; ; xn--ss-f4j.1. # ss်.1. +B; SS\u103A.1.; ss\u103A.1.; xn--ss-f4j.1. # ss်.1. +B; Ss\u103A.1.; ss\u103A.1.; xn--ss-f4j.1. # ss်.1. +B; xn--ss-f4j585j.1.; [C2]; [C2] # ss်.1. +B; xn--zca679eh2l.1.; [C2]; [C2] # ß်.1. +T; SS\u200D\u103A。⒈; [C2 P1 V6]; [P1 V6] # ss်.⒈ +N; SS\u200D\u103A。⒈; [C2 P1 V6]; [C2 P1 V6] # ss်.⒈ +T; ss\u200D\u103A。⒈; [C2 P1 V6]; [P1 V6] # ss်.⒈ +N; ss\u200D\u103A。⒈; [C2 P1 V6]; [C2 P1 V6] # ss်.⒈ +T; Ss\u200D\u103A。⒈; [C2 P1 V6]; [P1 V6] # ss်.⒈ +N; Ss\u200D\u103A。⒈; [C2 P1 V6]; [C2 P1 V6] # ss်.⒈ +B; xn--ss-f4j.xn--tsh; [V6]; [V6] # ss်.⒈ +B; xn--ss-f4j585j.xn--tsh; [C2 V6]; [C2 V6] # ss်.⒈ +B; xn--zca679eh2l.xn--tsh; [C2 V6]; [C2 V6] # ß်.⒈ +T; \u0B4D\u200C𙶵𞻘。\u200D; [B1 C2 P1 V5 V6]; [B1 P1 V5 V6] # ୍. +N; \u0B4D\u200C𙶵𞻘。\u200D; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # ୍. +B; xn--9ic6417rn4xb.; [B1 V5 V6]; [B1 V5 V6] # ୍. +B; xn--9ic637hz82z32jc.xn--1ug; [B1 C2 V5 V6]; [B1 C2 V5 V6] # ୍. +B; 𐮅。\u06BC🁕; [B3]; [B3] # 𐮅.ڼ🁕 +B; 𐮅。\u06BC🁕; [B3]; [B3] # 𐮅.ڼ🁕 +B; xn--c29c.xn--vkb8871w; [B3]; [B3] # 𐮅.ڼ🁕 +T; \u0620\u17D2。𐫔󠀧\u200C𑈵; [B2 B3 C1 P1 V6]; [B2 B3 P1 V6] # ؠ្.𐫔𑈵 +N; \u0620\u17D2。𐫔󠀧\u200C𑈵; [B2 B3 C1 P1 V6]; [B2 B3 C1 P1 V6] # ؠ្.𐫔𑈵 +B; xn--fgb471g.xn--9w9c29jw3931a; [B2 B3 V6]; [B2 B3 V6] # ؠ្.𐫔𑈵 +B; xn--fgb471g.xn--0ug9853g7verp838a; [B2 B3 C1 V6]; [B2 B3 C1 V6] # ؠ្.𐫔𑈵 +B; 񋉕.𞣕𞤊; [B1 P1 V5 V6]; [B1 P1 V5 V6] +B; 񋉕.𞣕𞤬; [B1 P1 V5 V6]; [B1 P1 V5 V6] +B; xn--tf5w.xn--2b6hof; [B1 V5 V6]; [B1 V5 V6] +T; \u06CC𐨿.ß\u0F84𑍬; \u06CC𐨿.ß\u0F84𑍬; xn--clb2593k.xn--ss-toj6092t # ی𐨿.ß྄𑍬 +N; \u06CC𐨿.ß\u0F84𑍬; \u06CC𐨿.ß\u0F84𑍬; xn--clb2593k.xn--zca216edt0r # ی𐨿.ß྄𑍬 +T; \u06CC𐨿.ß\u0F84𑍬; ; xn--clb2593k.xn--ss-toj6092t # ی𐨿.ß྄𑍬 +N; \u06CC𐨿.ß\u0F84𑍬; ; xn--clb2593k.xn--zca216edt0r # ی𐨿.ß྄𑍬 +B; \u06CC𐨿.SS\u0F84𑍬; \u06CC𐨿.ss\u0F84𑍬; xn--clb2593k.xn--ss-toj6092t # ی𐨿.ss྄𑍬 +B; \u06CC𐨿.ss\u0F84𑍬; ; xn--clb2593k.xn--ss-toj6092t # ی𐨿.ss྄𑍬 +B; \u06CC𐨿.Ss\u0F84𑍬; \u06CC𐨿.ss\u0F84𑍬; xn--clb2593k.xn--ss-toj6092t # ی𐨿.ss྄𑍬 +B; xn--clb2593k.xn--ss-toj6092t; \u06CC𐨿.ss\u0F84𑍬; xn--clb2593k.xn--ss-toj6092t # ی𐨿.ss྄𑍬 +B; xn--clb2593k.xn--zca216edt0r; \u06CC𐨿.ß\u0F84𑍬; xn--clb2593k.xn--zca216edt0r # ی𐨿.ß྄𑍬 +B; \u06CC𐨿.SS\u0F84𑍬; \u06CC𐨿.ss\u0F84𑍬; xn--clb2593k.xn--ss-toj6092t # ی𐨿.ss྄𑍬 +B; \u06CC𐨿.ss\u0F84𑍬; \u06CC𐨿.ss\u0F84𑍬; xn--clb2593k.xn--ss-toj6092t # ی𐨿.ss྄𑍬 +B; \u06CC𐨿.Ss\u0F84𑍬; \u06CC𐨿.ss\u0F84𑍬; xn--clb2593k.xn--ss-toj6092t # ی𐨿.ss྄𑍬 +T; 𝟠≮\u200C。󠅱\u17B4; [C1 P1 V5 V6]; [P1 V5 V6] # 8≮. +N; 𝟠≮\u200C。󠅱\u17B4; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 8≮. +T; 𝟠<\u0338\u200C。󠅱\u17B4; [C1 P1 V5 V6]; [P1 V5 V6] # 8≮. +N; 𝟠<\u0338\u200C。󠅱\u17B4; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 8≮. +T; 8≮\u200C。󠅱\u17B4; [C1 P1 V5 V6]; [P1 V5 V6] # 8≮. +N; 8≮\u200C。󠅱\u17B4; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 8≮. +T; 8<\u0338\u200C。󠅱\u17B4; [C1 P1 V5 V6]; [P1 V5 V6] # 8≮. +N; 8<\u0338\u200C。󠅱\u17B4; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 8≮. +B; xn--8-ngo.xn--z3e; [V5 V6]; [V5 V6] # 8≮. +B; xn--8-sgn10i.xn--z3e; [C1 V5 V6]; [C1 V5 V6] # 8≮. +B; ᢕ≯︒񄂯.Ⴀ; [P1 V6]; [P1 V6] +B; ᢕ>\u0338︒񄂯.Ⴀ; [P1 V6]; [P1 V6] +B; ᢕ≯。񄂯.Ⴀ; [P1 V6]; [P1 V6] +B; ᢕ>\u0338。񄂯.Ⴀ; [P1 V6]; [P1 V6] +B; ᢕ>\u0338。񄂯.ⴀ; [P1 V6]; [P1 V6] +B; ᢕ≯。񄂯.ⴀ; [P1 V6]; [P1 V6] +B; xn--fbf851c.xn--ko1u.xn--rkj; [V6]; [V6] +B; xn--fbf851c.xn--ko1u.xn--7md; [V6]; [V6] +B; ᢕ>\u0338︒񄂯.ⴀ; [P1 V6]; [P1 V6] +B; ᢕ≯︒񄂯.ⴀ; [P1 V6]; [P1 V6] +B; xn--fbf851cq98poxw1a.xn--rkj; [V6]; [V6] +B; xn--fbf851cq98poxw1a.xn--7md; [V6]; [V6] +B; \u0F9F.-\u082A; [V3 V5]; [V3 V5] # ྟ.-ࠪ +B; \u0F9F.-\u082A; [V3 V5]; [V3 V5] # ྟ.-ࠪ +B; xn--vfd.xn----fhd; [V3 V5]; [V3 V5] # ྟ.-ࠪ +B; ᵬ󠆠.핒⒒⒈􈄦; [P1 V6]; [P1 V6] +B; ᵬ󠆠.핒⒒⒈􈄦; [P1 V6]; [P1 V6] +B; ᵬ󠆠.핒11.1.􈄦; [P1 V6]; [P1 V6] +B; ᵬ󠆠.핒11.1.􈄦; [P1 V6]; [P1 V6] +B; xn--tbg.xn--11-5o7k.1.xn--k469f; [V6]; [V6] +B; xn--tbg.xn--tsht7586kyts9l; [V6]; [V6] +T; ς𑓂𐋢.\u0668; [B1]; [B1] # ς𑓂𐋢.٨ +N; ς𑓂𐋢.\u0668; [B1]; [B1] # ς𑓂𐋢.٨ +T; ς𑓂𐋢.\u0668; [B1]; [B1] # ς𑓂𐋢.٨ +N; ς𑓂𐋢.\u0668; [B1]; [B1] # ς𑓂𐋢.٨ +B; Σ𑓂𐋢.\u0668; [B1]; [B1] # σ𑓂𐋢.٨ +B; σ𑓂𐋢.\u0668; [B1]; [B1] # σ𑓂𐋢.٨ +B; xn--4xa6371khhl.xn--hib; [B1]; [B1] # σ𑓂𐋢.٨ +B; xn--3xa8371khhl.xn--hib; [B1]; [B1] # ς𑓂𐋢.٨ +B; Σ𑓂𐋢.\u0668; [B1]; [B1] # σ𑓂𐋢.٨ +B; σ𑓂𐋢.\u0668; [B1]; [B1] # σ𑓂𐋢.٨ +T; \uA953\u200C𐋻\u200D.\u2DF8𞿄𐹲; [B1 B6 C2 P1 V5 V6]; [B1 P1 V5 V6] # ꥓𐋻.ⷸ𐹲 +N; \uA953\u200C𐋻\u200D.\u2DF8𞿄𐹲; [B1 B6 C2 P1 V5 V6]; [B1 B6 C2 P1 V5 V6] # ꥓𐋻.ⷸ𐹲 +B; xn--3j9a531o.xn--urju692efj0f; [B1 V5 V6]; [B1 V5 V6] # ꥓𐋻.ⷸ𐹲 +B; xn--0ugc8356he76c.xn--urju692efj0f; [B1 B6 C2 V5 V6]; [B1 B6 C2 V5 V6] # ꥓𐋻.ⷸ𐹲 +B; ⊼。񪧖\u0695; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # ⊼.ڕ +B; xn--ofh.xn--rjb13118f; [B1 B5 B6 V6]; [B1 B5 B6 V6] # ⊼.ڕ +B; 𐯬񖋔。󜳥; [B2 B3 P1 V6]; [B2 B3 P1 V6] +B; xn--949co370q.xn--7g25e; [B2 B3 V6]; [B2 B3 V6] +T; \u0601𑍧\u07DD。ς򬍘🀞\u17B5; [B1 B6 P1 V6]; [B1 B6 P1 V6] # 𑍧ߝ.ς🀞 +N; \u0601𑍧\u07DD。ς򬍘🀞\u17B5; [B1 B6 P1 V6]; [B1 B6 P1 V6] # 𑍧ߝ.ς🀞 +B; \u0601𑍧\u07DD。Σ򬍘🀞\u17B5; [B1 B6 P1 V6]; [B1 B6 P1 V6] # 𑍧ߝ.σ🀞 +B; \u0601𑍧\u07DD。σ򬍘🀞\u17B5; [B1 B6 P1 V6]; [B1 B6 P1 V6] # 𑍧ߝ.σ🀞 +B; xn--jfb66gt010c.xn--4xa623h9p95ars26d; [B1 B6 V6]; [B1 B6 V6] # 𑍧ߝ.σ🀞 +B; xn--jfb66gt010c.xn--3xa823h9p95ars26d; [B1 B6 V6]; [B1 B6 V6] # 𑍧ߝ.ς🀞 +B; -𐳲\u0646󠺐。\uABED𝟥; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # -𐳲ن.꯭3 +B; -𐳲\u0646󠺐。\uABED3; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # -𐳲ن.꯭3 +B; -𐲲\u0646󠺐。\uABED3; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # -𐳲ن.꯭3 +B; xn----roc5482rek10i.xn--3-zw5e; [B1 V3 V5 V6]; [B1 V3 V5 V6] # -𐳲ن.꯭3 +B; -𐲲\u0646󠺐。\uABED𝟥; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # -𐳲ن.꯭3 +T; \u200C󠴦。񲨕≮𐦜; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 P1 V6] # .≮𐦜 +N; \u200C󠴦。񲨕≮𐦜; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .≮𐦜 +T; \u200C󠴦。񲨕<\u0338𐦜; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 P1 V6] # .≮𐦜 +N; \u200C󠴦。񲨕<\u0338𐦜; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .≮𐦜 +T; \u200C󠴦。񲨕≮𐦜; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 P1 V6] # .≮𐦜 +N; \u200C󠴦。񲨕≮𐦜; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .≮𐦜 +T; \u200C󠴦。񲨕<\u0338𐦜; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 P1 V6] # .≮𐦜 +N; \u200C󠴦。񲨕<\u0338𐦜; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .≮𐦜 +B; xn--6v56e.xn--gdhz712gzlr6b; [B1 B5 B6 V6]; [B1 B5 B6 V6] +B; xn--0ug22251l.xn--gdhz712gzlr6b; [B1 B5 B6 C1 V6]; [B1 B5 B6 C1 V6] # .≮𐦜 +B; ⒈✌򟬟.𝟡񠱣; [P1 V6]; [P1 V6] +B; 1.✌򟬟.9񠱣; [P1 V6]; [P1 V6] +B; 1.xn--7bi44996f.xn--9-o706d; [V6]; [V6] +B; xn--tsh24g49550b.xn--9-o706d; [V6]; [V6] +B; 𑆾𞤬𐮆.\u0666\u1DD4; [B1 V5]; [B1 V5] # 𑆾𞤬𐮆.٦ᷔ +B; 𑆾𞤊𐮆.\u0666\u1DD4; [B1 V5]; [B1 V5] # 𑆾𞤬𐮆.٦ᷔ +B; xn--d29c79hf98r.xn--fib011j; [B1 V5]; [B1 V5] # 𑆾𞤬𐮆.٦ᷔ +T; ς.\uA9C0\uA8C4; [V5]; [V5] # ς.꧀꣄ +N; ς.\uA9C0\uA8C4; [V5]; [V5] # ς.꧀꣄ +T; ς.\uA9C0\uA8C4; [V5]; [V5] # ς.꧀꣄ +N; ς.\uA9C0\uA8C4; [V5]; [V5] # ς.꧀꣄ +B; Σ.\uA9C0\uA8C4; [V5]; [V5] # σ.꧀꣄ +B; σ.\uA9C0\uA8C4; [V5]; [V5] # σ.꧀꣄ +B; xn--4xa.xn--0f9ars; [V5]; [V5] # σ.꧀꣄ +B; xn--3xa.xn--0f9ars; [V5]; [V5] # ς.꧀꣄ +B; Σ.\uA9C0\uA8C4; [V5]; [V5] # σ.꧀꣄ +B; σ.\uA9C0\uA8C4; [V5]; [V5] # σ.꧀꣄ +T; 𑰶\u200C≯𐳐.\u085B; [B1 B3 B6 C1 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # 𑰶≯𐳐.࡛ +N; 𑰶\u200C≯𐳐.\u085B; [B1 B3 B6 C1 P1 V5 V6]; [B1 B3 B6 C1 P1 V5 V6] # 𑰶≯𐳐.࡛ +T; 𑰶\u200C>\u0338𐳐.\u085B; [B1 B3 B6 C1 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # 𑰶≯𐳐.࡛ +N; 𑰶\u200C>\u0338𐳐.\u085B; [B1 B3 B6 C1 P1 V5 V6]; [B1 B3 B6 C1 P1 V5 V6] # 𑰶≯𐳐.࡛ +T; 𑰶\u200C≯𐳐.\u085B; [B1 B3 B6 C1 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # 𑰶≯𐳐.࡛ +N; 𑰶\u200C≯𐳐.\u085B; [B1 B3 B6 C1 P1 V5 V6]; [B1 B3 B6 C1 P1 V5 V6] # 𑰶≯𐳐.࡛ +T; 𑰶\u200C>\u0338𐳐.\u085B; [B1 B3 B6 C1 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # 𑰶≯𐳐.࡛ +N; 𑰶\u200C>\u0338𐳐.\u085B; [B1 B3 B6 C1 P1 V5 V6]; [B1 B3 B6 C1 P1 V5 V6] # 𑰶≯𐳐.࡛ +T; 𑰶\u200C>\u0338𐲐.\u085B; [B1 B3 B6 C1 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # 𑰶≯𐳐.࡛ +N; 𑰶\u200C>\u0338𐲐.\u085B; [B1 B3 B6 C1 P1 V5 V6]; [B1 B3 B6 C1 P1 V5 V6] # 𑰶≯𐳐.࡛ +T; 𑰶\u200C≯𐲐.\u085B; [B1 B3 B6 C1 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # 𑰶≯𐳐.࡛ +N; 𑰶\u200C≯𐲐.\u085B; [B1 B3 B6 C1 P1 V5 V6]; [B1 B3 B6 C1 P1 V5 V6] # 𑰶≯𐳐.࡛ +B; xn--hdhz343g3wj.xn--qwb; [B1 B3 B6 V5 V6]; [B1 B3 B6 V5 V6] # 𑰶≯𐳐.࡛ +B; xn--0ug06g7697ap4ma.xn--qwb; [B1 B3 B6 C1 V5 V6]; [B1 B3 B6 C1 V5 V6] # 𑰶≯𐳐.࡛ +T; 𑰶\u200C>\u0338𐲐.\u085B; [B1 B3 B6 C1 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # 𑰶≯𐳐.࡛ +N; 𑰶\u200C>\u0338𐲐.\u085B; [B1 B3 B6 C1 P1 V5 V6]; [B1 B3 B6 C1 P1 V5 V6] # 𑰶≯𐳐.࡛ +T; 𑰶\u200C≯𐲐.\u085B; [B1 B3 B6 C1 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # 𑰶≯𐳐.࡛ +N; 𑰶\u200C≯𐲐.\u085B; [B1 B3 B6 C1 P1 V5 V6]; [B1 B3 B6 C1 P1 V5 V6] # 𑰶≯𐳐.࡛ +B; 羚。≯; [P1 V6]; [P1 V6] +B; 羚。>\u0338; [P1 V6]; [P1 V6] +B; 羚。≯; [P1 V6]; [P1 V6] +B; 羚。>\u0338; [P1 V6]; [P1 V6] +B; xn--xt0a.xn--hdh; [V6]; [V6] +B; 𑓂\u1759.\u08A8; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𑓂.ࢨ +B; 𑓂\u1759.\u08A8; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𑓂.ࢨ +B; xn--e1e9580k.xn--xyb; [B1 V5 V6]; [B1 V5 V6] # 𑓂.ࢨ +T; 󨣿󠇀\u200D。\u0663ҠჀ𝟑; [B1 B6 C2 P1 V6]; [B1 P1 V6] # .٣ҡჀ3 +N; 󨣿󠇀\u200D。\u0663ҠჀ𝟑; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # .٣ҡჀ3 +T; 󨣿󠇀\u200D。\u0663ҠჀ3; [B1 B6 C2 P1 V6]; [B1 P1 V6] # .٣ҡჀ3 +N; 󨣿󠇀\u200D。\u0663ҠჀ3; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # .٣ҡჀ3 +T; 󨣿󠇀\u200D。\u0663ҡⴠ3; [B1 B6 C2 P1 V6]; [B1 P1 V6] # .٣ҡⴠ3 +N; 󨣿󠇀\u200D。\u0663ҡⴠ3; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # .٣ҡⴠ3 +T; 󨣿󠇀\u200D。\u0663Ҡⴠ3; [B1 B6 C2 P1 V6]; [B1 P1 V6] # .٣ҡⴠ3 +N; 󨣿󠇀\u200D。\u0663Ҡⴠ3; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # .٣ҡⴠ3 +B; xn--1r19e.xn--3-ozb36ko13f; [B1 V6]; [B1 V6] # .٣ҡⴠ3 +B; xn--1ug89936l.xn--3-ozb36ko13f; [B1 B6 C2 V6]; [B1 B6 C2 V6] # .٣ҡⴠ3 +B; xn--1r19e.xn--3-ozb36kixu; [B1 V6]; [B1 V6] # .٣ҡჀ3 +B; xn--1ug89936l.xn--3-ozb36kixu; [B1 B6 C2 V6]; [B1 B6 C2 V6] # .٣ҡჀ3 +T; 󨣿󠇀\u200D。\u0663ҡⴠ𝟑; [B1 B6 C2 P1 V6]; [B1 P1 V6] # .٣ҡⴠ3 +N; 󨣿󠇀\u200D。\u0663ҡⴠ𝟑; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # .٣ҡⴠ3 +T; 󨣿󠇀\u200D。\u0663Ҡⴠ𝟑; [B1 B6 C2 P1 V6]; [B1 P1 V6] # .٣ҡⴠ3 +N; 󨣿󠇀\u200D。\u0663Ҡⴠ𝟑; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # .٣ҡⴠ3 +B; ᡷ。𐹢\u08E0; [B1]; [B1] # ᡷ.𐹢࣠ +B; xn--k9e.xn--j0b5005k; [B1]; [B1] # ᡷ.𐹢࣠ +T; 򕮇\u1BF3。\u0666񗜼\u17D2ß; [B1 P1 V6]; [B1 P1 V6] # ᯳.٦្ß +N; 򕮇\u1BF3。\u0666񗜼\u17D2ß; [B1 P1 V6]; [B1 P1 V6] # ᯳.٦្ß +T; 򕮇\u1BF3。\u0666񗜼\u17D2ß; [B1 P1 V6]; [B1 P1 V6] # ᯳.٦្ß +N; 򕮇\u1BF3。\u0666񗜼\u17D2ß; [B1 P1 V6]; [B1 P1 V6] # ᯳.٦្ß +B; 򕮇\u1BF3。\u0666񗜼\u17D2SS; [B1 P1 V6]; [B1 P1 V6] # ᯳.٦្ss +B; 򕮇\u1BF3。\u0666񗜼\u17D2ss; [B1 P1 V6]; [B1 P1 V6] # ᯳.٦្ss +B; 򕮇\u1BF3。\u0666񗜼\u17D2Ss; [B1 P1 V6]; [B1 P1 V6] # ᯳.٦្ss +B; xn--1zf58212h.xn--ss-pyd459o3258m; [B1 V6]; [B1 V6] # ᯳.٦្ss +B; xn--1zf58212h.xn--zca34zk4qx711k; [B1 V6]; [B1 V6] # ᯳.٦្ß +B; 򕮇\u1BF3。\u0666񗜼\u17D2SS; [B1 P1 V6]; [B1 P1 V6] # ᯳.٦្ss +B; 򕮇\u1BF3。\u0666񗜼\u17D2ss; [B1 P1 V6]; [B1 P1 V6] # ᯳.٦្ss +B; 򕮇\u1BF3。\u0666񗜼\u17D2Ss; [B1 P1 V6]; [B1 P1 V6] # ᯳.٦្ss +B; \u0664򤽎𑲛.󠔢︒≠; [B1 P1 V6]; [B1 P1 V6] # ٤𑲛.︒≠ +B; \u0664򤽎𑲛.󠔢︒=\u0338; [B1 P1 V6]; [B1 P1 V6] # ٤𑲛.︒≠ +B; \u0664򤽎𑲛.󠔢。≠; [B1 P1 V6]; [B1 P1 V6] # ٤𑲛..≠ +B; \u0664򤽎𑲛.󠔢。=\u0338; [B1 P1 V6]; [B1 P1 V6] # ٤𑲛..≠ +B; xn--dib0653l2i02d.xn--k736e.xn--1ch; [B1 V6]; [B1 V6] # ٤𑲛..≠ +B; xn--dib0653l2i02d.xn--1ch7467f14u4g; [B1 V6]; [B1 V6] # ٤𑲛.︒≠ +B; ➆񷧕ỗ⒈.򑬒񡘮\u085B𝟫; [P1 V6]; [P1 V6] # ➆ỗ⒈.࡛9 +B; ➆񷧕o\u0302\u0303⒈.򑬒񡘮\u085B𝟫; [P1 V6]; [P1 V6] # ➆ỗ⒈.࡛9 +B; ➆񷧕ỗ1..򑬒񡘮\u085B9; [P1 V6 A4_2]; [P1 V6 A4_2] # ➆ỗ1..࡛9 +B; ➆񷧕o\u0302\u03031..򑬒񡘮\u085B9; [P1 V6 A4_2]; [P1 V6 A4_2] # ➆ỗ1..࡛9 +B; ➆񷧕O\u0302\u03031..򑬒񡘮\u085B9; [P1 V6 A4_2]; [P1 V6 A4_2] # ➆ỗ1..࡛9 +B; ➆񷧕Ỗ1..򑬒񡘮\u085B9; [P1 V6 A4_2]; [P1 V6 A4_2] # ➆ỗ1..࡛9 +B; xn--1-3xm292b6044r..xn--9-6jd87310jtcqs; [V6 A4_2]; [V6 A4_2] # ➆ỗ1..࡛9 +B; ➆񷧕O\u0302\u0303⒈.򑬒񡘮\u085B𝟫; [P1 V6]; [P1 V6] # ➆ỗ⒈.࡛9 +B; ➆񷧕Ỗ⒈.򑬒񡘮\u085B𝟫; [P1 V6]; [P1 V6] # ➆ỗ⒈.࡛9 +B; xn--6lg26tvvc6v99z.xn--9-6jd87310jtcqs; [V6]; [V6] # ➆ỗ⒈.࡛9 +T; \u200D。𞤘; [B1 C2]; [A4_2] # .𞤺 +N; \u200D。𞤘; [B1 C2]; [B1 C2] # .𞤺 +T; \u200D。𞤘; [B1 C2]; [A4_2] # .𞤺 +N; \u200D。𞤘; [B1 C2]; [B1 C2] # .𞤺 +T; \u200D。𞤺; [B1 C2]; [A4_2] # .𞤺 +N; \u200D。𞤺; [B1 C2]; [B1 C2] # .𞤺 +B; .xn--ye6h; [A4_2]; [A4_2] +B; xn--1ug.xn--ye6h; [B1 C2]; [B1 C2] # .𞤺 +T; \u200D。𞤺; [B1 C2]; [A4_2] # .𞤺 +N; \u200D。𞤺; [B1 C2]; [B1 C2] # .𞤺 +B; xn--ye6h; 𞤺; xn--ye6h +B; 𞤺; ; xn--ye6h +B; 𞤘; 𞤺; xn--ye6h +B; \u0829\u0724.ᢣ; [B1 V5]; [B1 V5] # ࠩܤ.ᢣ +B; xn--unb53c.xn--tbf; [B1 V5]; [B1 V5] # ࠩܤ.ᢣ +T; \u073C\u200C-。𓐾ß; [C1 P1 V3 V5 V6]; [P1 V3 V5 V6] # ܼ-.ß +N; \u073C\u200C-。𓐾ß; [C1 P1 V3 V5 V6]; [C1 P1 V3 V5 V6] # ܼ-.ß +T; \u073C\u200C-。𓐾SS; [C1 P1 V3 V5 V6]; [P1 V3 V5 V6] # ܼ-.ss +N; \u073C\u200C-。𓐾SS; [C1 P1 V3 V5 V6]; [C1 P1 V3 V5 V6] # ܼ-.ss +T; \u073C\u200C-。𓐾ss; [C1 P1 V3 V5 V6]; [P1 V3 V5 V6] # ܼ-.ss +N; \u073C\u200C-。𓐾ss; [C1 P1 V3 V5 V6]; [C1 P1 V3 V5 V6] # ܼ-.ss +T; \u073C\u200C-。𓐾Ss; [C1 P1 V3 V5 V6]; [P1 V3 V5 V6] # ܼ-.ss +N; \u073C\u200C-。𓐾Ss; [C1 P1 V3 V5 V6]; [C1 P1 V3 V5 V6] # ܼ-.ss +B; xn----s2c.xn--ss-066q; [V3 V5 V6]; [V3 V5 V6] # ܼ-.ss +B; xn----s2c071q.xn--ss-066q; [C1 V3 V5 V6]; [C1 V3 V5 V6] # ܼ-.ss +B; xn----s2c071q.xn--zca7848m; [C1 V3 V5 V6]; [C1 V3 V5 V6] # ܼ-.ß +T; \u200Cς🃡⒗.\u0CC6仧\u0756; [B1 B5 B6 C1 P1 V5 V6]; [B5 B6 P1 V5 V6] # ς🃡⒗.ೆ仧ݖ +N; \u200Cς🃡⒗.\u0CC6仧\u0756; [B1 B5 B6 C1 P1 V5 V6]; [B1 B5 B6 C1 P1 V5 V6] # ς🃡⒗.ೆ仧ݖ +T; \u200Cς🃡16..\u0CC6仧\u0756; [B1 B5 B6 C1 V5 A4_2]; [B5 B6 V5 A4_2] # ς🃡16..ೆ仧ݖ +N; \u200Cς🃡16..\u0CC6仧\u0756; [B1 B5 B6 C1 V5 A4_2]; [B1 B5 B6 C1 V5 A4_2] # ς🃡16..ೆ仧ݖ +T; \u200CΣ🃡16..\u0CC6仧\u0756; [B1 B5 B6 C1 V5 A4_2]; [B5 B6 V5 A4_2] # σ🃡16..ೆ仧ݖ +N; \u200CΣ🃡16..\u0CC6仧\u0756; [B1 B5 B6 C1 V5 A4_2]; [B1 B5 B6 C1 V5 A4_2] # σ🃡16..ೆ仧ݖ +T; \u200Cσ🃡16..\u0CC6仧\u0756; [B1 B5 B6 C1 V5 A4_2]; [B5 B6 V5 A4_2] # σ🃡16..ೆ仧ݖ +N; \u200Cσ🃡16..\u0CC6仧\u0756; [B1 B5 B6 C1 V5 A4_2]; [B1 B5 B6 C1 V5 A4_2] # σ🃡16..ೆ仧ݖ +B; xn--16-ubc66061c..xn--9ob79ycx2e; [B5 B6 V5 A4_2]; [B5 B6 V5 A4_2] # σ🃡16..ೆ仧ݖ +B; xn--16-ubc7700avy99b..xn--9ob79ycx2e; [B1 B5 B6 C1 V5 A4_2]; [B1 B5 B6 C1 V5 A4_2] # σ🃡16..ೆ仧ݖ +B; xn--16-rbc1800avy99b..xn--9ob79ycx2e; [B1 B5 B6 C1 V5 A4_2]; [B1 B5 B6 C1 V5 A4_2] # ς🃡16..ೆ仧ݖ +T; \u200CΣ🃡⒗.\u0CC6仧\u0756; [B1 B5 B6 C1 P1 V5 V6]; [B5 B6 P1 V5 V6] # σ🃡⒗.ೆ仧ݖ +N; \u200CΣ🃡⒗.\u0CC6仧\u0756; [B1 B5 B6 C1 P1 V5 V6]; [B1 B5 B6 C1 P1 V5 V6] # σ🃡⒗.ೆ仧ݖ +T; \u200Cσ🃡⒗.\u0CC6仧\u0756; [B1 B5 B6 C1 P1 V5 V6]; [B5 B6 P1 V5 V6] # σ🃡⒗.ೆ仧ݖ +N; \u200Cσ🃡⒗.\u0CC6仧\u0756; [B1 B5 B6 C1 P1 V5 V6]; [B1 B5 B6 C1 P1 V5 V6] # σ🃡⒗.ೆ仧ݖ +B; xn--4xa229nbu92a.xn--9ob79ycx2e; [B5 B6 V5 V6]; [B5 B6 V5 V6] # σ🃡⒗.ೆ仧ݖ +B; xn--4xa595lz9czy52d.xn--9ob79ycx2e; [B1 B5 B6 C1 V5 V6]; [B1 B5 B6 C1 V5 V6] # σ🃡⒗.ೆ仧ݖ +B; xn--3xa795lz9czy52d.xn--9ob79ycx2e; [B1 B5 B6 C1 V5 V6]; [B1 B5 B6 C1 V5 V6] # ς🃡⒗.ೆ仧ݖ +B; -.𞸚; [B1 V3]; [B1 V3] # -.ظ +B; -.\u0638; [B1 V3]; [B1 V3] # -.ظ +B; -.xn--3gb; [B1 V3]; [B1 V3] # -.ظ +B; 򏛓\u0683.\u0F7E\u0634; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ڃ.ཾش +B; xn--8ib92728i.xn--zgb968b; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] # ڃ.ཾش +B; \u0FE6\u0843񽶬.𐮏; [B5 P1 V6]; [B5 P1 V6] # ࡃ.𐮏 +B; xn--1vb320b5m04p.xn--m29c; [B5 V6]; [B5 V6] # ࡃ.𐮏 +T; 2񎨠\u07CBß。ᠽ; [B1 P1 V6]; [B1 P1 V6] # 2ߋß.ᠽ +N; 2񎨠\u07CBß。ᠽ; [B1 P1 V6]; [B1 P1 V6] # 2ߋß.ᠽ +B; 2񎨠\u07CBSS。ᠽ; [B1 P1 V6]; [B1 P1 V6] # 2ߋss.ᠽ +B; 2񎨠\u07CBss。ᠽ; [B1 P1 V6]; [B1 P1 V6] # 2ߋss.ᠽ +B; 2񎨠\u07CBSs。ᠽ; [B1 P1 V6]; [B1 P1 V6] # 2ߋss.ᠽ +B; xn--2ss-odg83511n.xn--w7e; [B1 V6]; [B1 V6] # 2ߋss.ᠽ +B; xn--2-qfa924cez02l.xn--w7e; [B1 V6]; [B1 V6] # 2ߋß.ᠽ +T; 㸳\u07CA≮.\u06CEß-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V3 V6] # 㸳ߊ≮.ێß- +N; 㸳\u07CA≮.\u06CEß-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # 㸳ߊ≮.ێß- +T; 㸳\u07CA<\u0338.\u06CEß-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V3 V6] # 㸳ߊ≮.ێß- +N; 㸳\u07CA<\u0338.\u06CEß-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # 㸳ߊ≮.ێß- +T; 㸳\u07CA≮.\u06CEß-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V3 V6] # 㸳ߊ≮.ێß- +N; 㸳\u07CA≮.\u06CEß-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # 㸳ߊ≮.ێß- +T; 㸳\u07CA<\u0338.\u06CEß-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V3 V6] # 㸳ߊ≮.ێß- +N; 㸳\u07CA<\u0338.\u06CEß-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # 㸳ߊ≮.ێß- +T; 㸳\u07CA<\u0338.\u06CESS-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V3 V6] # 㸳ߊ≮.ێss- +N; 㸳\u07CA<\u0338.\u06CESS-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # 㸳ߊ≮.ێss- +T; 㸳\u07CA≮.\u06CESS-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V3 V6] # 㸳ߊ≮.ێss- +N; 㸳\u07CA≮.\u06CESS-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # 㸳ߊ≮.ێss- +T; 㸳\u07CA≮.\u06CEss-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V3 V6] # 㸳ߊ≮.ێss- +N; 㸳\u07CA≮.\u06CEss-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # 㸳ߊ≮.ێss- +T; 㸳\u07CA<\u0338.\u06CEss-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V3 V6] # 㸳ߊ≮.ێss- +N; 㸳\u07CA<\u0338.\u06CEss-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # 㸳ߊ≮.ێss- +T; 㸳\u07CA<\u0338.\u06CESs-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V3 V6] # 㸳ߊ≮.ێss- +N; 㸳\u07CA<\u0338.\u06CESs-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # 㸳ߊ≮.ێss- +T; 㸳\u07CA≮.\u06CESs-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V3 V6] # 㸳ߊ≮.ێss- +N; 㸳\u07CA≮.\u06CESs-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # 㸳ߊ≮.ێss- +B; xn--lsb457kkut.xn--ss--qjf; [B2 B3 B5 B6 V3 V6]; [B2 B3 B5 B6 V3 V6] # 㸳ߊ≮.ێss- +B; xn--lsb457kkut.xn--ss--qjf2343a; [B2 B3 B5 B6 C2 V6]; [B2 B3 B5 B6 C2 V6] # 㸳ߊ≮.ێss- +B; xn--lsb457kkut.xn----pfa076bys4a; [B2 B3 B5 B6 C2 V6]; [B2 B3 B5 B6 C2 V6] # 㸳ߊ≮.ێß- +T; 㸳\u07CA<\u0338.\u06CESS-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V3 V6] # 㸳ߊ≮.ێss- +N; 㸳\u07CA<\u0338.\u06CESS-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # 㸳ߊ≮.ێss- +T; 㸳\u07CA≮.\u06CESS-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V3 V6] # 㸳ߊ≮.ێss- +N; 㸳\u07CA≮.\u06CESS-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # 㸳ߊ≮.ێss- +T; 㸳\u07CA≮.\u06CEss-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V3 V6] # 㸳ߊ≮.ێss- +N; 㸳\u07CA≮.\u06CEss-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # 㸳ߊ≮.ێss- +T; 㸳\u07CA<\u0338.\u06CEss-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V3 V6] # 㸳ߊ≮.ێss- +N; 㸳\u07CA<\u0338.\u06CEss-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # 㸳ߊ≮.ێss- +T; 㸳\u07CA<\u0338.\u06CESs-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V3 V6] # 㸳ߊ≮.ێss- +N; 㸳\u07CA<\u0338.\u06CESs-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # 㸳ߊ≮.ێss- +T; 㸳\u07CA≮.\u06CESs-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V3 V6] # 㸳ߊ≮.ێss- +N; 㸳\u07CA≮.\u06CESs-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # 㸳ߊ≮.ێss- +B; -򷝬\u135E𑜧.\u1DEB-︒; [P1 V3 V5 V6]; [P1 V3 V5 V6] # -፞𑜧.ᷫ-︒ +B; -򷝬\u135E𑜧.\u1DEB-。; [P1 V3 V5 V6]; [P1 V3 V5 V6] # -፞𑜧.ᷫ-. +B; xn----b5h1837n2ok9f.xn----mkm.; [V3 V5 V6]; [V3 V5 V6] # -፞𑜧.ᷫ-. +B; xn----b5h1837n2ok9f.xn----mkmw278h; [V3 V5 V6]; [V3 V5 V6] # -፞𑜧.ᷫ-︒ +B; ︒.򚠡\u1A59; [P1 V6]; [P1 V6] # ︒.ᩙ +B; 。.򚠡\u1A59; [P1 V6 A4_2]; [P1 V6 A4_2] # ..ᩙ +B; ..xn--cof61594i; [V6 A4_2]; [V6 A4_2] # ..ᩙ +B; xn--y86c.xn--cof61594i; [V6]; [V6] # ︒.ᩙ +T; \u0323\u2DE1。\u200C⓾\u200C\u06B9; [B1 B3 B6 C1 V5]; [B1 B3 B6 V5] # ̣ⷡ.⓾ڹ +N; \u0323\u2DE1。\u200C⓾\u200C\u06B9; [B1 B3 B6 C1 V5]; [B1 B3 B6 C1 V5] # ̣ⷡ.⓾ڹ +B; xn--kta899s.xn--skb116m; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ̣ⷡ.⓾ڹ +B; xn--kta899s.xn--skb970ka771c; [B1 B3 B6 C1 V5]; [B1 B3 B6 C1 V5] # ̣ⷡ.⓾ڹ +B; 𞠶ᠴ\u06DD。\u1074𞤵󠅦; [B1 B2 P1 V5 V6]; [B1 B2 P1 V5 V6] # 𞠶ᠴ.ၴ𞤵 +B; 𞠶ᠴ\u06DD。\u1074𞤵󠅦; [B1 B2 P1 V5 V6]; [B1 B2 P1 V5 V6] # 𞠶ᠴ.ၴ𞤵 +B; 𞠶ᠴ\u06DD。\u1074𞤓󠅦; [B1 B2 P1 V5 V6]; [B1 B2 P1 V5 V6] # 𞠶ᠴ.ၴ𞤵 +B; xn--tlb199fwl35a.xn--yld4613v; [B1 B2 V5 V6]; [B1 B2 V5 V6] # 𞠶ᠴ.ၴ𞤵 +B; 𞠶ᠴ\u06DD。\u1074𞤓󠅦; [B1 B2 P1 V5 V6]; [B1 B2 P1 V5 V6] # 𞠶ᠴ.ၴ𞤵 +B; 𑰺.-򑟏; [P1 V3 V5 V6]; [P1 V3 V5 V6] +B; xn--jk3d.xn----iz68g; [V3 V5 V6]; [V3 V5 V6] +B; 󠻩.赏; [P1 V6]; [P1 V6] +B; 󠻩.赏; [P1 V6]; [P1 V6] +B; xn--2856e.xn--6o3a; [V6]; [V6] +B; \u06B0ᠡ。Ⴁ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ڰᠡ.Ⴁ +B; \u06B0ᠡ。Ⴁ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ڰᠡ.Ⴁ +B; \u06B0ᠡ。ⴁ; [B2 B3]; [B2 B3] # ڰᠡ.ⴁ +B; xn--jkb440g.xn--skj; [B2 B3]; [B2 B3] # ڰᠡ.ⴁ +B; xn--jkb440g.xn--8md; [B2 B3 V6]; [B2 B3 V6] # ڰᠡ.Ⴁ +B; \u06B0ᠡ。ⴁ; [B2 B3]; [B2 B3] # ڰᠡ.ⴁ +T; \u20DEႪ\u06BBς。-; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ⃞Ⴊڻς.- +N; \u20DEႪ\u06BBς。-; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ⃞Ⴊڻς.- +T; \u20DEႪ\u06BBς。-; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ⃞Ⴊڻς.- +N; \u20DEႪ\u06BBς。-; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ⃞Ⴊڻς.- +T; \u20DEⴊ\u06BBς。-; [B1 V3 V5]; [B1 V3 V5] # ⃞ⴊڻς.- +N; \u20DEⴊ\u06BBς。-; [B1 V3 V5]; [B1 V3 V5] # ⃞ⴊڻς.- +B; \u20DEႪ\u06BBΣ。-; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ⃞Ⴊڻσ.- +B; \u20DEⴊ\u06BBσ。-; [B1 V3 V5]; [B1 V3 V5] # ⃞ⴊڻσ.- +B; \u20DEႪ\u06BBσ。-; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ⃞Ⴊڻσ.- +B; xn--4xa33m7zmb0q.-; [B1 V3 V5 V6]; [B1 V3 V5 V6] # ⃞Ⴊڻσ.- +B; xn--4xa33mr38aeel.-; [B1 V3 V5]; [B1 V3 V5] # ⃞ⴊڻσ.- +B; xn--3xa53mr38aeel.-; [B1 V3 V5]; [B1 V3 V5] # ⃞ⴊڻς.- +B; xn--3xa53m7zmb0q.-; [B1 V3 V5 V6]; [B1 V3 V5 V6] # ⃞Ⴊڻς.- +T; \u20DEⴊ\u06BBς。-; [B1 V3 V5]; [B1 V3 V5] # ⃞ⴊڻς.- +N; \u20DEⴊ\u06BBς。-; [B1 V3 V5]; [B1 V3 V5] # ⃞ⴊڻς.- +B; \u20DEႪ\u06BBΣ。-; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ⃞Ⴊڻσ.- +B; \u20DEⴊ\u06BBσ。-; [B1 V3 V5]; [B1 V3 V5] # ⃞ⴊڻσ.- +B; \u20DEႪ\u06BBσ。-; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ⃞Ⴊڻσ.- +T; Ⴍ.񍇦\u200C; [C1 P1 V6]; [P1 V6] # Ⴍ. +N; Ⴍ.񍇦\u200C; [C1 P1 V6]; [C1 P1 V6] # Ⴍ. +T; Ⴍ.񍇦\u200C; [C1 P1 V6]; [P1 V6] # Ⴍ. +N; Ⴍ.񍇦\u200C; [C1 P1 V6]; [C1 P1 V6] # Ⴍ. +T; ⴍ.񍇦\u200C; [C1 P1 V6]; [P1 V6] # ⴍ. +N; ⴍ.񍇦\u200C; [C1 P1 V6]; [C1 P1 V6] # ⴍ. +B; xn--4kj.xn--p01x; [V6]; [V6] +B; xn--4kj.xn--0ug56448b; [C1 V6]; [C1 V6] # ⴍ. +B; xn--lnd.xn--p01x; [V6]; [V6] +B; xn--lnd.xn--0ug56448b; [C1 V6]; [C1 V6] # Ⴍ. +T; ⴍ.񍇦\u200C; [C1 P1 V6]; [P1 V6] # ⴍ. +N; ⴍ.񍇦\u200C; [C1 P1 V6]; [C1 P1 V6] # ⴍ. +B; 򉟂󠵣.𐫫\u1A60󴺖\u1B44; [B2 B3 B6 P1 V6]; [B2 B3 B6 P1 V6] # .𐫫᩠᭄ +B; xn--9u37blu98h.xn--jof13bt568cork1j; [B2 B3 B6 V6]; [B2 B3 B6 V6] # .𐫫᩠᭄ +B; ≯❊ᠯ。𐹱⺨; [B1 P1 V6]; [B1 P1 V6] +B; >\u0338❊ᠯ。𐹱⺨; [B1 P1 V6]; [B1 P1 V6] +B; ≯❊ᠯ。𐹱⺨; [B1 P1 V6]; [B1 P1 V6] +B; >\u0338❊ᠯ。𐹱⺨; [B1 P1 V6]; [B1 P1 V6] +B; xn--i7e163ct2d.xn--vwj7372e; [B1 V6]; [B1 V6] +B; 􁕜𐹧𞭁𐹩。Ⴈ𐫮Ⴏ; [B5 B6 P1 V6]; [B5 B6 P1 V6] +B; 􁕜𐹧𞭁𐹩。ⴈ𐫮ⴏ; [B5 B6 P1 V6]; [B5 B6 P1 V6] +B; xn--fo0de1270ope54j.xn--zkjo0151o; [B5 B6 V6]; [B5 B6 V6] +B; xn--fo0de1270ope54j.xn--gndo2033q; [B5 B6 V6]; [B5 B6 V6] +B; 𞠂。\uA926; [B1 B3 B6 V5]; [B1 B3 B6 V5] # 𞠂.ꤦ +B; xn--145h.xn--ti9a; [B1 B3 B6 V5]; [B1 B3 B6 V5] # 𞠂.ꤦ +B; 𝟔𐹫.\u0733\u10379ꡇ; [B1 V5]; [B1 V5] # 6𐹫.့ܳ9ꡇ +B; 𝟔𐹫.\u1037\u07339ꡇ; [B1 V5]; [B1 V5] # 6𐹫.့ܳ9ꡇ +B; 6𐹫.\u1037\u07339ꡇ; [B1 V5]; [B1 V5] # 6𐹫.့ܳ9ꡇ +B; xn--6-t26i.xn--9-91c730e8u8n; [B1 V5]; [B1 V5] # 6𐹫.့ܳ9ꡇ +B; \u0724\u0603𞲶.\u06D8; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # ܤ.ۘ +B; \u0724\u0603𞲶.\u06D8; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # ܤ.ۘ +B; xn--lfb19ct414i.xn--olb; [B1 B3 B6 V5 V6]; [B1 B3 B6 V5 V6] # ܤ.ۘ +T; ✆񱔩ꡋ.\u0632\u200D𞣴; [B1 C2 P1 V6]; [B1 P1 V6] # ✆ꡋ.ز +N; ✆񱔩ꡋ.\u0632\u200D𞣴; [B1 C2 P1 V6]; [B1 C2 P1 V6] # ✆ꡋ.ز +T; ✆񱔩ꡋ.\u0632\u200D𞣴; [B1 C2 P1 V6]; [B1 P1 V6] # ✆ꡋ.ز +N; ✆񱔩ꡋ.\u0632\u200D𞣴; [B1 C2 P1 V6]; [B1 C2 P1 V6] # ✆ꡋ.ز +B; xn--1biv525bcix0d.xn--xgb6828v; [B1 V6]; [B1 V6] # ✆ꡋ.ز +B; xn--1biv525bcix0d.xn--xgb253k0m73a; [B1 C2 V6]; [B1 C2 V6] # ✆ꡋ.ز +B; \u0845񃾰𞸍-.≠򃁟𑋪; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # ࡅن-.≠𑋪 +B; \u0845񃾰𞸍-.=\u0338򃁟𑋪; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # ࡅن-.≠𑋪 +B; \u0845񃾰\u0646-.≠򃁟𑋪; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # ࡅن-.≠𑋪 +B; \u0845񃾰\u0646-.=\u0338򃁟𑋪; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # ࡅن-.≠𑋪 +B; xn----qoc64my971s.xn--1ch7585g76o3c; [B1 B2 B3 V3 V6]; [B1 B2 B3 V3 V6] # ࡅن-.≠𑋪 +B; 𝟛.笠; 3.笠; 3.xn--6vz +B; 𝟛.笠; 3.笠; 3.xn--6vz +B; 3.笠; ; 3.xn--6vz +B; 3.xn--6vz; 3.笠; 3.xn--6vz +T; -\u200D.Ⴞ𐋷; [C2 P1 V3 V6]; [P1 V3 V6] # -.Ⴞ𐋷 +N; -\u200D.Ⴞ𐋷; [C2 P1 V3 V6]; [C2 P1 V3 V6] # -.Ⴞ𐋷 +T; -\u200D.ⴞ𐋷; [C2 V3]; [V3] # -.ⴞ𐋷 +N; -\u200D.ⴞ𐋷; [C2 V3]; [C2 V3] # -.ⴞ𐋷 +B; -.xn--mlj8559d; [V3]; [V3] +B; xn----ugn.xn--mlj8559d; [C2 V3]; [C2 V3] # -.ⴞ𐋷 +B; -.xn--2nd2315j; [V3 V6]; [V3 V6] +B; xn----ugn.xn--2nd2315j; [C2 V3 V6]; [C2 V3 V6] # -.Ⴞ𐋷 +T; \u200Dςß\u0731.\u0BCD; [C2 V5]; [V5] # ςßܱ.் +N; \u200Dςß\u0731.\u0BCD; [C2 V5]; [C2 V5] # ςßܱ.் +T; \u200Dςß\u0731.\u0BCD; [C2 V5]; [V5] # ςßܱ.் +N; \u200Dςß\u0731.\u0BCD; [C2 V5]; [C2 V5] # ςßܱ.் +T; \u200DΣSS\u0731.\u0BCD; [C2 V5]; [V5] # σssܱ.் +N; \u200DΣSS\u0731.\u0BCD; [C2 V5]; [C2 V5] # σssܱ.் +T; \u200Dσss\u0731.\u0BCD; [C2 V5]; [V5] # σssܱ.் +N; \u200Dσss\u0731.\u0BCD; [C2 V5]; [C2 V5] # σssܱ.் +T; \u200DΣss\u0731.\u0BCD; [C2 V5]; [V5] # σssܱ.் +N; \u200DΣss\u0731.\u0BCD; [C2 V5]; [C2 V5] # σssܱ.் +B; xn--ss-ubc826a.xn--xmc; [V5]; [V5] # σssܱ.் +B; xn--ss-ubc826ab34b.xn--xmc; [C2 V5]; [C2 V5] # σssܱ.் +T; \u200DΣß\u0731.\u0BCD; [C2 V5]; [V5] # σßܱ.் +N; \u200DΣß\u0731.\u0BCD; [C2 V5]; [C2 V5] # σßܱ.் +T; \u200Dσß\u0731.\u0BCD; [C2 V5]; [V5] # σßܱ.் +N; \u200Dσß\u0731.\u0BCD; [C2 V5]; [C2 V5] # σßܱ.் +B; xn--zca39lk1di19a.xn--xmc; [C2 V5]; [C2 V5] # σßܱ.் +B; xn--zca19ln1di19a.xn--xmc; [C2 V5]; [C2 V5] # ςßܱ.் +T; \u200DΣSS\u0731.\u0BCD; [C2 V5]; [V5] # σssܱ.் +N; \u200DΣSS\u0731.\u0BCD; [C2 V5]; [C2 V5] # σssܱ.் +T; \u200Dσss\u0731.\u0BCD; [C2 V5]; [V5] # σssܱ.் +N; \u200Dσss\u0731.\u0BCD; [C2 V5]; [C2 V5] # σssܱ.் +T; \u200DΣss\u0731.\u0BCD; [C2 V5]; [V5] # σssܱ.் +N; \u200DΣss\u0731.\u0BCD; [C2 V5]; [C2 V5] # σssܱ.் +T; \u200DΣß\u0731.\u0BCD; [C2 V5]; [V5] # σßܱ.் +N; \u200DΣß\u0731.\u0BCD; [C2 V5]; [C2 V5] # σßܱ.் +T; \u200Dσß\u0731.\u0BCD; [C2 V5]; [V5] # σßܱ.் +N; \u200Dσß\u0731.\u0BCD; [C2 V5]; [C2 V5] # σßܱ.் +T; ≠.\u200D; [C2 P1 V6]; [P1 V6] # ≠. +N; ≠.\u200D; [C2 P1 V6]; [C2 P1 V6] # ≠. +T; =\u0338.\u200D; [C2 P1 V6]; [P1 V6] # ≠. +N; =\u0338.\u200D; [C2 P1 V6]; [C2 P1 V6] # ≠. +T; ≠.\u200D; [C2 P1 V6]; [P1 V6] # ≠. +N; ≠.\u200D; [C2 P1 V6]; [C2 P1 V6] # ≠. +T; =\u0338.\u200D; [C2 P1 V6]; [P1 V6] # ≠. +N; =\u0338.\u200D; [C2 P1 V6]; [C2 P1 V6] # ≠. +B; xn--1ch.; [V6]; [V6] +B; xn--1ch.xn--1ug; [C2 V6]; [C2 V6] # ≠. +B; \uFC01。\u0C81ᠼ▗򒁋; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ئح.ಁᠼ▗ +B; \u0626\u062D。\u0C81ᠼ▗򒁋; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ئح.ಁᠼ▗ +B; \u064A\u0654\u062D。\u0C81ᠼ▗򒁋; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ئح.ಁᠼ▗ +B; xn--lgbo.xn--2rc021dcxkrx55t; [B1 V5 V6]; [B1 V5 V6] # ئح.ಁᠼ▗ +T; 󧋵\u09CDς.ς𐨿; [P1 V6]; [P1 V6] # ্ς.ς𐨿 +N; 󧋵\u09CDς.ς𐨿; [P1 V6]; [P1 V6] # ্ς.ς𐨿 +T; 󧋵\u09CDς.ς𐨿; [P1 V6]; [P1 V6] # ্ς.ς𐨿 +N; 󧋵\u09CDς.ς𐨿; [P1 V6]; [P1 V6] # ্ς.ς𐨿 +B; 󧋵\u09CDΣ.Σ𐨿; [P1 V6]; [P1 V6] # ্σ.σ𐨿 +T; 󧋵\u09CDσ.ς𐨿; [P1 V6]; [P1 V6] # ্σ.ς𐨿 +N; 󧋵\u09CDσ.ς𐨿; [P1 V6]; [P1 V6] # ্σ.ς𐨿 +B; 󧋵\u09CDσ.σ𐨿; [P1 V6]; [P1 V6] # ্σ.σ𐨿 +B; 󧋵\u09CDΣ.σ𐨿; [P1 V6]; [P1 V6] # ্σ.σ𐨿 +B; xn--4xa502av8297a.xn--4xa6055k; [V6]; [V6] # ্σ.σ𐨿 +T; 󧋵\u09CDΣ.ς𐨿; [P1 V6]; [P1 V6] # ্σ.ς𐨿 +N; 󧋵\u09CDΣ.ς𐨿; [P1 V6]; [P1 V6] # ্σ.ς𐨿 +B; xn--4xa502av8297a.xn--3xa8055k; [V6]; [V6] # ্σ.ς𐨿 +B; xn--3xa702av8297a.xn--3xa8055k; [V6]; [V6] # ্ς.ς𐨿 +B; 󧋵\u09CDΣ.Σ𐨿; [P1 V6]; [P1 V6] # ্σ.σ𐨿 +T; 󧋵\u09CDσ.ς𐨿; [P1 V6]; [P1 V6] # ্σ.ς𐨿 +N; 󧋵\u09CDσ.ς𐨿; [P1 V6]; [P1 V6] # ্σ.ς𐨿 +B; 󧋵\u09CDσ.σ𐨿; [P1 V6]; [P1 V6] # ্σ.σ𐨿 +B; 󧋵\u09CDΣ.σ𐨿; [P1 V6]; [P1 V6] # ্σ.σ𐨿 +T; 󧋵\u09CDΣ.ς𐨿; [P1 V6]; [P1 V6] # ্σ.ς𐨿 +N; 󧋵\u09CDΣ.ς𐨿; [P1 V6]; [P1 V6] # ্σ.ς𐨿 +B; 𐫓\u07D8牅\u08F8。𞦤\u1A17򱍰Ⴙ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # 𐫓ߘ牅ࣸ.ᨗႹ +B; 𐫓\u07D8牅\u08F8。𞦤\u1A17򱍰Ⴙ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # 𐫓ߘ牅ࣸ.ᨗႹ +B; 𐫓\u07D8牅\u08F8。𞦤\u1A17򱍰ⴙ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # 𐫓ߘ牅ࣸ.ᨗⴙ +B; xn--zsb09cu46vjs6f.xn--gmf469fr883am5r1e; [B2 B3 V6]; [B2 B3 V6] # 𐫓ߘ牅ࣸ.ᨗⴙ +B; xn--zsb09cu46vjs6f.xn--xnd909bv540bm5k9d; [B2 B3 V6]; [B2 B3 V6] # 𐫓ߘ牅ࣸ.ᨗႹ +B; 𐫓\u07D8牅\u08F8。𞦤\u1A17򱍰ⴙ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # 𐫓ߘ牅ࣸ.ᨗⴙ +B; 񣤒。륧; [P1 V6]; [P1 V6] +B; 񣤒。륧; [P1 V6]; [P1 V6] +B; 񣤒。륧; [P1 V6]; [P1 V6] +B; 񣤒。륧; [P1 V6]; [P1 V6] +B; xn--s264a.xn--pw2b; [V6]; [V6] +T; 𐹷\u200D。󉵢; [B1 C2 P1 V6]; [B1 P1 V6] # 𐹷. +N; 𐹷\u200D。󉵢; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 𐹷. +B; xn--vo0d.xn--8088d; [B1 V6]; [B1 V6] +B; xn--1ugx205g.xn--8088d; [B1 C2 V6]; [B1 C2 V6] # 𐹷. +B; Ⴘ\u06C2𑲭。-; [B1 B5 B6 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # Ⴘۂ𑲭.- +B; Ⴘ\u06C1\u0654𑲭。-; [B1 B5 B6 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # Ⴘۂ𑲭.- +B; Ⴘ\u06C2𑲭。-; [B1 B5 B6 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # Ⴘۂ𑲭.- +B; Ⴘ\u06C1\u0654𑲭。-; [B1 B5 B6 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # Ⴘۂ𑲭.- +B; ⴘ\u06C1\u0654𑲭。-; [B1 B5 B6 V3]; [B1 B5 B6 V3] # ⴘۂ𑲭.- +B; ⴘ\u06C2𑲭。-; [B1 B5 B6 V3]; [B1 B5 B6 V3] # ⴘۂ𑲭.- +B; xn--1kb147qfk3n.-; [B1 B5 B6 V3]; [B1 B5 B6 V3] # ⴘۂ𑲭.- +B; xn--1kb312c139t.-; [B1 B5 B6 V3 V6]; [B1 B5 B6 V3 V6] # Ⴘۂ𑲭.- +B; ⴘ\u06C1\u0654𑲭。-; [B1 B5 B6 V3]; [B1 B5 B6 V3] # ⴘۂ𑲭.- +B; ⴘ\u06C2𑲭。-; [B1 B5 B6 V3]; [B1 B5 B6 V3] # ⴘۂ𑲭.- +B; \uA806\u067B₆ᡐ。🛇\uFCDD; [B1 V5]; [B1 V5] # ꠆ٻ6ᡐ.🛇يم +B; \uA806\u067B6ᡐ。🛇\u064A\u0645; [B1 V5]; [B1 V5] # ꠆ٻ6ᡐ.🛇يم +B; xn--6-rrc018krt9k.xn--hhbj61429a; [B1 V5]; [B1 V5] # ꠆ٻ6ᡐ.🛇يم +B; 򸍂.㇄ᡟ𐫂\u0622; [B1 P1 V6]; [B1 P1 V6] # .㇄ᡟ𐫂آ +B; 򸍂.㇄ᡟ𐫂\u0627\u0653; [B1 P1 V6]; [B1 P1 V6] # .㇄ᡟ𐫂آ +B; xn--p292d.xn--hgb154ghrsvm2r; [B1 V6]; [B1 V6] # .㇄ᡟ𐫂آ +B; \u07DF򵚌。-\u07E9; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # ߟ.-ߩ +B; xn--6sb88139l.xn----pdd; [B1 B2 B3 V3 V6]; [B1 B2 B3 V3 V6] # ߟ.-ߩ +T; ς\u0643⾑.\u200Cᢟ\u200C⒈; [B1 B5 C1 P1 V6]; [B5 P1 V6] # ςك襾.ᢟ⒈ +N; ς\u0643⾑.\u200Cᢟ\u200C⒈; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # ςك襾.ᢟ⒈ +T; ς\u0643襾.\u200Cᢟ\u200C1.; [B1 B5 C1]; [B5] # ςك襾.ᢟ1. +N; ς\u0643襾.\u200Cᢟ\u200C1.; [B1 B5 C1]; [B1 B5 C1] # ςك襾.ᢟ1. +T; Σ\u0643襾.\u200Cᢟ\u200C1.; [B1 B5 C1]; [B5] # σك襾.ᢟ1. +N; Σ\u0643襾.\u200Cᢟ\u200C1.; [B1 B5 C1]; [B1 B5 C1] # σك襾.ᢟ1. +T; σ\u0643襾.\u200Cᢟ\u200C1.; [B1 B5 C1]; [B5] # σك襾.ᢟ1. +N; σ\u0643襾.\u200Cᢟ\u200C1.; [B1 B5 C1]; [B1 B5 C1] # σك襾.ᢟ1. +B; xn--4xa49jux8r.xn--1-4ck.; [B5]; [B5] # σك襾.ᢟ1. +B; xn--4xa49jux8r.xn--1-4ck691bba.; [B1 B5 C1]; [B1 B5 C1] # σك襾.ᢟ1. +B; xn--3xa69jux8r.xn--1-4ck691bba.; [B1 B5 C1]; [B1 B5 C1] # ςك襾.ᢟ1. +T; Σ\u0643⾑.\u200Cᢟ\u200C⒈; [B1 B5 C1 P1 V6]; [B5 P1 V6] # σك襾.ᢟ⒈ +N; Σ\u0643⾑.\u200Cᢟ\u200C⒈; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # σك襾.ᢟ⒈ +T; σ\u0643⾑.\u200Cᢟ\u200C⒈; [B1 B5 C1 P1 V6]; [B5 P1 V6] # σك襾.ᢟ⒈ +N; σ\u0643⾑.\u200Cᢟ\u200C⒈; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # σك襾.ᢟ⒈ +B; xn--4xa49jux8r.xn--pbf212d; [B5 V6]; [B5 V6] # σك襾.ᢟ⒈ +B; xn--4xa49jux8r.xn--pbf519aba607b; [B1 B5 C1 V6]; [B1 B5 C1 V6] # σك襾.ᢟ⒈ +B; xn--3xa69jux8r.xn--pbf519aba607b; [B1 B5 C1 V6]; [B1 B5 C1 V6] # ςك襾.ᢟ⒈ +B; ᡆ𑓝.𞵆; [P1 V6]; [P1 V6] +B; ᡆ𑓝.𞵆; [P1 V6]; [P1 V6] +B; xn--57e0440k.xn--k86h; [V6]; [V6] +T; \u0A4D𦍓\u1DEE。\u200C\u08BD񝹲; [B1 C1 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # ੍𦍓ᷮ.ࢽ +N; \u0A4D𦍓\u1DEE。\u200C\u08BD񝹲; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # ੍𦍓ᷮ.ࢽ +T; \u0A4D𦍓\u1DEE。\u200C\u08BD񝹲; [B1 C1 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # ੍𦍓ᷮ.ࢽ +N; \u0A4D𦍓\u1DEE。\u200C\u08BD񝹲; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # ੍𦍓ᷮ.ࢽ +B; xn--ybc461hph93b.xn--jzb29857e; [B1 B2 B3 V5 V6]; [B1 B2 B3 V5 V6] # ੍𦍓ᷮ.ࢽ +B; xn--ybc461hph93b.xn--jzb740j1y45h; [B1 C1 V5 V6]; [B1 C1 V5 V6] # ੍𦍓ᷮ.ࢽ +T; \u062E\u0748񅪪-.\u200C먿; [B1 B2 B3 C1 P1 V3 V6]; [B2 B3 P1 V3 V6] # خ݈-.먿 +N; \u062E\u0748񅪪-.\u200C먿; [B1 B2 B3 C1 P1 V3 V6]; [B1 B2 B3 C1 P1 V3 V6] # خ݈-.먿 +T; \u062E\u0748񅪪-.\u200C먿; [B1 B2 B3 C1 P1 V3 V6]; [B2 B3 P1 V3 V6] # خ݈-.먿 +N; \u062E\u0748񅪪-.\u200C먿; [B1 B2 B3 C1 P1 V3 V6]; [B1 B2 B3 C1 P1 V3 V6] # خ݈-.먿 +T; \u062E\u0748񅪪-.\u200C먿; [B1 B2 B3 C1 P1 V3 V6]; [B2 B3 P1 V3 V6] # خ݈-.먿 +N; \u062E\u0748񅪪-.\u200C먿; [B1 B2 B3 C1 P1 V3 V6]; [B1 B2 B3 C1 P1 V3 V6] # خ݈-.먿 +T; \u062E\u0748񅪪-.\u200C먿; [B1 B2 B3 C1 P1 V3 V6]; [B2 B3 P1 V3 V6] # خ݈-.먿 +N; \u062E\u0748񅪪-.\u200C먿; [B1 B2 B3 C1 P1 V3 V6]; [B1 B2 B3 C1 P1 V3 V6] # خ݈-.먿 +B; xn----dnc06f42153a.xn--v22b; [B2 B3 V3 V6]; [B2 B3 V3 V6] # خ݈-.먿 +B; xn----dnc06f42153a.xn--0ug1581d; [B1 B2 B3 C1 V3 V6]; [B1 B2 B3 C1 V3 V6] # خ݈-.먿 +B; 􋿦。ᠽ; [P1 V6]; [P1 V6] +B; 􋿦。ᠽ; [P1 V6]; [P1 V6] +B; xn--j890g.xn--w7e; [V6]; [V6] +T; 嬃𝍌.\u200D\u0B44; [C2]; [V5] # 嬃𝍌.ୄ +N; 嬃𝍌.\u200D\u0B44; [C2]; [C2] # 嬃𝍌.ୄ +T; 嬃𝍌.\u200D\u0B44; [C2]; [V5] # 嬃𝍌.ୄ +N; 嬃𝍌.\u200D\u0B44; [C2]; [C2] # 嬃𝍌.ୄ +B; xn--b6s0078f.xn--0ic; [V5]; [V5] # 嬃𝍌.ୄ +B; xn--b6s0078f.xn--0ic557h; [C2]; [C2] # 嬃𝍌.ୄ +B; \u0602𝌪≯.𚋲򵁨; [B1 P1 V6]; [B1 P1 V6] # 𝌪≯. +B; \u0602𝌪>\u0338.𚋲򵁨; [B1 P1 V6]; [B1 P1 V6] # 𝌪≯. +B; \u0602𝌪≯.𚋲򵁨; [B1 P1 V6]; [B1 P1 V6] # 𝌪≯. +B; \u0602𝌪>\u0338.𚋲򵁨; [B1 P1 V6]; [B1 P1 V6] # 𝌪≯. +B; xn--kfb866llx01a.xn--wp1gm3570b; [B1 V6]; [B1 V6] # 𝌪≯. +B; 򫾥\u08B7\u17CC\uA9C0.𞼠; [B5 P1 V6]; [B5 P1 V6] # ࢷ៌꧀. +B; xn--dzb638ewm4i1iy1h.xn--3m7h; [B5 V6]; [B5 V6] # ࢷ៌꧀. +T; \u200C.񟛤; [C1 P1 V6]; [P1 V6 A4_2] # . +N; \u200C.񟛤; [C1 P1 V6]; [C1 P1 V6] # . +B; .xn--q823a; [V6 A4_2]; [V6 A4_2] +B; xn--0ug.xn--q823a; [C1 V6]; [C1 V6] # . +B; 򺛕Ⴃ䠅.𐸑; [P1 V6]; [P1 V6] +B; 򺛕Ⴃ䠅.𐸑; [P1 V6]; [P1 V6] +B; 򺛕ⴃ䠅.𐸑; [P1 V6]; [P1 V6] +B; xn--ukju77frl47r.xn--yl0d; [V6]; [V6] +B; xn--bnd074zr557n.xn--yl0d; [V6]; [V6] +B; 򺛕ⴃ䠅.𐸑; [P1 V6]; [P1 V6] +B; \u1BF1𐹳𐹵𞤚。𝟨Ⴅ; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ᯱ𐹳𐹵𞤼.6Ⴅ +B; \u1BF1𐹳𐹵𞤚。6Ⴅ; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ᯱ𐹳𐹵𞤼.6Ⴅ +B; \u1BF1𐹳𐹵𞤼。6ⴅ; [B1 V5]; [B1 V5] # ᯱ𐹳𐹵𞤼.6ⴅ +B; xn--zzfy954hga2415t.xn--6-kvs; [B1 V5]; [B1 V5] # ᯱ𐹳𐹵𞤼.6ⴅ +B; xn--zzfy954hga2415t.xn--6-h0g; [B1 V5 V6]; [B1 V5 V6] # ᯱ𐹳𐹵𞤼.6Ⴅ +B; \u1BF1𐹳𐹵𞤼。𝟨ⴅ; [B1 V5]; [B1 V5] # ᯱ𐹳𐹵𞤼.6ⴅ +B; \u1BF1𐹳𐹵𞤚。6ⴅ; [B1 V5]; [B1 V5] # ᯱ𐹳𐹵𞤼.6ⴅ +B; \u1BF1𐹳𐹵𞤚。𝟨ⴅ; [B1 V5]; [B1 V5] # ᯱ𐹳𐹵𞤼.6ⴅ +B; -。︒; [P1 V3 V6]; [P1 V3 V6] +B; -。。; [V3 A4_2]; [V3 A4_2] +B; -..; [V3 A4_2]; [V3 A4_2] +B; -.xn--y86c; [V3 V6]; [V3 V6] +B; \u07DBჀ。-⁵--; [B1 B2 B3 P1 V2 V3 V6]; [B1 B2 B3 P1 V2 V3 V6] # ߛჀ.-5-- +B; \u07DBჀ。-5--; [B1 B2 B3 P1 V2 V3 V6]; [B1 B2 B3 P1 V2 V3 V6] # ߛჀ.-5-- +B; \u07DBⴠ。-5--; [B1 B2 B3 V2 V3]; [B1 B2 B3 V2 V3] # ߛⴠ.-5-- +B; xn--2sb691q.-5--; [B1 B2 B3 V2 V3]; [B1 B2 B3 V2 V3] # ߛⴠ.-5-- +B; xn--2sb866b.-5--; [B1 B2 B3 V2 V3 V6]; [B1 B2 B3 V2 V3 V6] # ߛჀ.-5-- +B; \u07DBⴠ。-⁵--; [B1 B2 B3 V2 V3]; [B1 B2 B3 V2 V3] # ߛⴠ.-5-- +B; ≯\uD8DD󠑕。𐹷𐹻≯𐷒; [B1 P1 V6]; [B1 P1 V6 A3] # ≯.𐹷𐹻≯ +B; >\u0338\uD8DD󠑕。𐹷𐹻>\u0338𐷒; [B1 P1 V6]; [B1 P1 V6 A3] # ≯.𐹷𐹻≯ +B; ≯\uD8DD󠑕。𐹷𐹻≯𐷒; [B1 P1 V6]; [B1 P1 V6 A3] # ≯.𐹷𐹻≯ +B; >\u0338\uD8DD󠑕。𐹷𐹻>\u0338𐷒; [B1 P1 V6]; [B1 P1 V6 A3] # ≯.𐹷𐹻≯ +B; ≯\uD8DD󠑕.xn--hdh8283gdoaqa; [B1 P1 V6]; [B1 P1 V6 A3] # ≯.𐹷𐹻≯ +B; >\u0338\uD8DD󠑕.xn--hdh8283gdoaqa; [B1 P1 V6]; [B1 P1 V6 A3] # ≯.𐹷𐹻≯ +B; >\u0338\uD8DD󠑕.XN--HDH8283GDOAQA; [B1 P1 V6]; [B1 P1 V6 A3] # ≯.𐹷𐹻≯ +B; ≯\uD8DD󠑕.XN--HDH8283GDOAQA; [B1 P1 V6]; [B1 P1 V6 A3] # ≯.𐹷𐹻≯ +B; ≯\uD8DD󠑕.Xn--Hdh8283gdoaqa; [B1 P1 V6]; [B1 P1 V6 A3] # ≯.𐹷𐹻≯ +B; >\u0338\uD8DD󠑕.Xn--Hdh8283gdoaqa; [B1 P1 V6]; [B1 P1 V6 A3] # ≯.𐹷𐹻≯ +T; ㍔\u08E6\u077C\u200D。\u0346򁳊𝅶\u0604; [B1 B5 B6 C2 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ルーブルࣦݼ.͆ +N; ㍔\u08E6\u077C\u200D。\u0346򁳊𝅶\u0604; [B1 B5 B6 C2 P1 V5 V6]; [B1 B5 B6 C2 P1 V5 V6] # ルーブルࣦݼ.͆ +T; ルーブル\u08E6\u077C\u200D。\u0346򁳊𝅶\u0604; [B1 B5 B6 C2 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ルーブルࣦݼ.͆ +N; ルーブル\u08E6\u077C\u200D。\u0346򁳊𝅶\u0604; [B1 B5 B6 C2 P1 V5 V6]; [B1 B5 B6 C2 P1 V5 V6] # ルーブルࣦݼ.͆ +T; ルーフ\u3099ル\u08E6\u077C\u200D。\u0346򁳊𝅶\u0604; [B1 B5 B6 C2 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ルーブルࣦݼ.͆ +N; ルーフ\u3099ル\u08E6\u077C\u200D。\u0346򁳊𝅶\u0604; [B1 B5 B6 C2 P1 V5 V6]; [B1 B5 B6 C2 P1 V5 V6] # ルーブルࣦݼ.͆ +B; xn--dqb73el09fncab4h.xn--kua81ls548d3608b; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] # ルーブルࣦݼ.͆ +B; xn--dqb73ec22c9kp8cb1j.xn--kua81ls548d3608b; [B1 B5 B6 C2 V5 V6]; [B1 B5 B6 C2 V5 V6] # ルーブルࣦݼ.͆ +T; \u200D.F; [C2]; [A4_2] # .f +N; \u200D.F; [C2]; [C2] # .f +T; \u200D.f; [C2]; [A4_2] # .f +N; \u200D.f; [C2]; [C2] # .f +B; .f; [A4_2]; [A4_2] +B; xn--1ug.f; [C2]; [C2] # .f +B; f; ; +T; \u200D㨲。ß; [C2]; xn--9bm.ss # 㨲.ß +N; \u200D㨲。ß; [C2]; [C2] # 㨲.ß +T; \u200D㨲。ß; [C2]; xn--9bm.ss # 㨲.ß +N; \u200D㨲。ß; [C2]; [C2] # 㨲.ß +T; \u200D㨲。SS; [C2]; xn--9bm.ss # 㨲.ss +N; \u200D㨲。SS; [C2]; [C2] # 㨲.ss +T; \u200D㨲。ss; [C2]; xn--9bm.ss # 㨲.ss +N; \u200D㨲。ss; [C2]; [C2] # 㨲.ss +T; \u200D㨲。Ss; [C2]; xn--9bm.ss # 㨲.ss +N; \u200D㨲。Ss; [C2]; [C2] # 㨲.ss +B; xn--9bm.ss; 㨲.ss; xn--9bm.ss +B; 㨲.ss; ; xn--9bm.ss +B; 㨲.SS; 㨲.ss; xn--9bm.ss +B; 㨲.Ss; 㨲.ss; xn--9bm.ss +B; xn--1ug914h.ss; [C2]; [C2] # 㨲.ss +B; xn--1ug914h.xn--zca; [C2]; [C2] # 㨲.ß +T; \u200D㨲。SS; [C2]; xn--9bm.ss # 㨲.ss +N; \u200D㨲。SS; [C2]; [C2] # 㨲.ss +T; \u200D㨲。ss; [C2]; xn--9bm.ss # 㨲.ss +N; \u200D㨲。ss; [C2]; [C2] # 㨲.ss +T; \u200D㨲。Ss; [C2]; xn--9bm.ss # 㨲.ss +N; \u200D㨲。Ss; [C2]; [C2] # 㨲.ss +B; \u0605\u067E。\u08A8; [B1 P1 V6]; [B1 P1 V6] # پ.ࢨ +B; \u0605\u067E。\u08A8; [B1 P1 V6]; [B1 P1 V6] # پ.ࢨ +B; xn--nfb6v.xn--xyb; [B1 V6]; [B1 V6] # پ.ࢨ +B; ⾑\u0753𞤁。𐹵\u0682; [B1 B5 B6]; [B1 B5 B6] # 襾ݓ𞤣.𐹵ڂ +B; 襾\u0753𞤁。𐹵\u0682; [B1 B5 B6]; [B1 B5 B6] # 襾ݓ𞤣.𐹵ڂ +B; 襾\u0753𞤣。𐹵\u0682; [B1 B5 B6]; [B1 B5 B6] # 襾ݓ𞤣.𐹵ڂ +B; xn--6ob9577deqwl.xn--7ib5526k; [B1 B5 B6]; [B1 B5 B6] # 襾ݓ𞤣.𐹵ڂ +B; ⾑\u0753𞤣。𐹵\u0682; [B1 B5 B6]; [B1 B5 B6] # 襾ݓ𞤣.𐹵ڂ +T; 񦴻ς-\u20EB。\u0754-ꡛ; [B2 B3 B6 P1 V6]; [B2 B3 B6 P1 V6] # ς-⃫.ݔ-ꡛ +N; 񦴻ς-\u20EB。\u0754-ꡛ; [B2 B3 B6 P1 V6]; [B2 B3 B6 P1 V6] # ς-⃫.ݔ-ꡛ +T; 񦴻ς-\u20EB。\u0754-ꡛ; [B2 B3 B6 P1 V6]; [B2 B3 B6 P1 V6] # ς-⃫.ݔ-ꡛ +N; 񦴻ς-\u20EB。\u0754-ꡛ; [B2 B3 B6 P1 V6]; [B2 B3 B6 P1 V6] # ς-⃫.ݔ-ꡛ +B; 񦴻Σ-\u20EB。\u0754-ꡛ; [B2 B3 B6 P1 V6]; [B2 B3 B6 P1 V6] # σ-⃫.ݔ-ꡛ +B; 񦴻σ-\u20EB。\u0754-ꡛ; [B2 B3 B6 P1 V6]; [B2 B3 B6 P1 V6] # σ-⃫.ݔ-ꡛ +B; xn----zmb705tuo34l.xn----53c4874j; [B2 B3 B6 V6]; [B2 B3 B6 V6] # σ-⃫.ݔ-ꡛ +B; xn----xmb015tuo34l.xn----53c4874j; [B2 B3 B6 V6]; [B2 B3 B6 V6] # ς-⃫.ݔ-ꡛ +B; 񦴻Σ-\u20EB。\u0754-ꡛ; [B2 B3 B6 P1 V6]; [B2 B3 B6 P1 V6] # σ-⃫.ݔ-ꡛ +B; 񦴻σ-\u20EB。\u0754-ꡛ; [B2 B3 B6 P1 V6]; [B2 B3 B6 P1 V6] # σ-⃫.ݔ-ꡛ +T; \u200D.􀸨; [C2 P1 V6]; [P1 V6 A4_2] # . +N; \u200D.􀸨; [C2 P1 V6]; [C2 P1 V6] # . +T; \u200D.􀸨; [C2 P1 V6]; [P1 V6 A4_2] # . +N; \u200D.􀸨; [C2 P1 V6]; [C2 P1 V6] # . +B; .xn--h327f; [V6 A4_2]; [V6 A4_2] +B; xn--1ug.xn--h327f; [C2 V6]; [C2 V6] # . +B; 񣭻񌥁。≠𝟲; [P1 V6]; [P1 V6] +B; 񣭻񌥁。=\u0338𝟲; [P1 V6]; [P1 V6] +B; 񣭻񌥁。≠6; [P1 V6]; [P1 V6] +B; 񣭻񌥁。=\u03386; [P1 V6]; [P1 V6] +B; xn--h79w4z99a.xn--6-tfo; [V6]; [V6] +T; 󠅊ᡭ\u200D.𐥡; [B6 C2 P1 V6]; [P1 V6] # ᡭ. +N; 󠅊ᡭ\u200D.𐥡; [B6 C2 P1 V6]; [B6 C2 P1 V6] # ᡭ. +B; xn--98e.xn--om9c; [V6]; [V6] +B; xn--98e810b.xn--om9c; [B6 C2 V6]; [B6 C2 V6] # ᡭ. +B; \u0C40\u0855𐥛𑄴.󭰵; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ీࡕ𑄴. +B; \u0C40\u0855𐥛𑄴.󭰵; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ీࡕ𑄴. +B; xn--kwb91r5112avtg.xn--o580f; [B1 V5 V6]; [B1 V5 V6] # ీࡕ𑄴. +T; 𞤮。𑇊\u200C≯\u1CE6; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6] # 𞤮.𑇊≯᳦ +N; 𞤮。𑇊\u200C≯\u1CE6; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # 𞤮.𑇊≯᳦ +T; 𞤮。𑇊\u200C>\u0338\u1CE6; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6] # 𞤮.𑇊≯᳦ +N; 𞤮。𑇊\u200C>\u0338\u1CE6; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # 𞤮.𑇊≯᳦ +T; 𞤌。𑇊\u200C>\u0338\u1CE6; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6] # 𞤮.𑇊≯᳦ +N; 𞤌。𑇊\u200C>\u0338\u1CE6; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # 𞤮.𑇊≯᳦ +T; 𞤌。𑇊\u200C≯\u1CE6; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6] # 𞤮.𑇊≯᳦ +N; 𞤌。𑇊\u200C≯\u1CE6; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # 𞤮.𑇊≯᳦ +B; xn--me6h.xn--z6fz8ueq2v; [B1 V5 V6]; [B1 V5 V6] # 𞤮.𑇊≯᳦ +B; xn--me6h.xn--z6f16kn9b2642b; [B1 C1 V5 V6]; [B1 C1 V5 V6] # 𞤮.𑇊≯᳦ +B; 󠄀𝟕.𞤌񛗓Ⴉ; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] +B; 󠄀7.𞤌񛗓Ⴉ; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] +B; 󠄀7.𞤮񛗓ⴉ; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] +B; 7.xn--0kjz523lv1vv; [B1 B2 B3 V6]; [B1 B2 B3 V6] +B; 7.xn--hnd3403vv1vv; [B1 B2 B3 V6]; [B1 B2 B3 V6] +B; 󠄀𝟕.𞤮񛗓ⴉ; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] +B; 󠄀7.𞤌񛗓ⴉ; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] +B; 󠄀𝟕.𞤌񛗓ⴉ; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] +B; 閃9𝩍。Ↄ\u0669\u08B1\u0B4D; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 閃9𝩍.Ↄ٩ࢱ୍ +B; 閃9𝩍。ↄ\u0669\u08B1\u0B4D; [B5 B6]; [B5 B6] # 閃9𝩍.ↄ٩ࢱ୍ +B; xn--9-3j6dk517f.xn--iib28ij3c4t9a; [B5 B6]; [B5 B6] # 閃9𝩍.ↄ٩ࢱ୍ +B; xn--9-3j6dk517f.xn--iib28ij3c0t9a; [B5 B6 V6]; [B5 B6 V6] # 閃9𝩍.Ↄ٩ࢱ୍ +B; \uAAF6ᢏ\u0E3A2.𐋢\u0745\u0F9F︒; [P1 V5 V6]; [P1 V5 V6] # ꫶ᢏฺ2.𐋢݅ྟ︒ +B; \uAAF6ᢏ\u0E3A2.𐋢\u0745\u0F9F。; [V5]; [V5] # ꫶ᢏฺ2.𐋢݅ྟ. +B; xn--2-2zf840fk16m.xn--sob093b2m7s.; [V5]; [V5] # ꫶ᢏฺ2.𐋢݅ྟ. +B; xn--2-2zf840fk16m.xn--sob093bj62sz9d; [V5 V6]; [V5 V6] # ꫶ᢏฺ2.𐋢݅ྟ︒ +B; 󅴧。≠-󠙄⾛; [P1 V6]; [P1 V6] +B; 󅴧。=\u0338-󠙄⾛; [P1 V6]; [P1 V6] +B; 󅴧。≠-󠙄走; [P1 V6]; [P1 V6] +B; 󅴧。=\u0338-󠙄走; [P1 V6]; [P1 V6] +B; xn--gm57d.xn----tfo4949b3664m; [V6]; [V6] +B; \u076E\u0604Ⴊ。-≠\u1160; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # ݮႪ.-≠ +B; \u076E\u0604Ⴊ。-=\u0338\u1160; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # ݮႪ.-≠ +B; \u076E\u0604ⴊ。-=\u0338\u1160; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # ݮⴊ.-≠ +B; \u076E\u0604ⴊ。-≠\u1160; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # ݮⴊ.-≠ +B; xn--mfb73ek93f.xn----5bh589i; [B1 B2 B3 V3 V6]; [B1 B2 B3 V3 V6] # ݮⴊ.-≠ +B; xn--mfb73ex6r.xn----5bh589i; [B1 B2 B3 V3 V6]; [B1 B2 B3 V3 V6] # ݮႪ.-≠ +T; \uFB4F𐹧𝟒≯。\u200C; [B1 B3 B4 C1 P1 V6]; [B3 B4 P1 V6] # אל𐹧4≯. +N; \uFB4F𐹧𝟒≯。\u200C; [B1 B3 B4 C1 P1 V6]; [B1 B3 B4 C1 P1 V6] # אל𐹧4≯. +T; \uFB4F𐹧𝟒>\u0338。\u200C; [B1 B3 B4 C1 P1 V6]; [B3 B4 P1 V6] # אל𐹧4≯. +N; \uFB4F𐹧𝟒>\u0338。\u200C; [B1 B3 B4 C1 P1 V6]; [B1 B3 B4 C1 P1 V6] # אל𐹧4≯. +T; \u05D0\u05DC𐹧4≯。\u200C; [B1 B3 B4 C1 P1 V6]; [B3 B4 P1 V6] # אל𐹧4≯. +N; \u05D0\u05DC𐹧4≯。\u200C; [B1 B3 B4 C1 P1 V6]; [B1 B3 B4 C1 P1 V6] # אל𐹧4≯. +T; \u05D0\u05DC𐹧4>\u0338。\u200C; [B1 B3 B4 C1 P1 V6]; [B3 B4 P1 V6] # אל𐹧4≯. +N; \u05D0\u05DC𐹧4>\u0338。\u200C; [B1 B3 B4 C1 P1 V6]; [B1 B3 B4 C1 P1 V6] # אל𐹧4≯. +B; xn--4-zhc0by36txt0w.; [B3 B4 V6]; [B3 B4 V6] # אל𐹧4≯. +B; xn--4-zhc0by36txt0w.xn--0ug; [B1 B3 B4 C1 V6]; [B1 B3 B4 C1 V6] # אל𐹧4≯. +B; 𝟎。甯; 0.甯; 0.xn--qny +B; 0。甯; 0.甯; 0.xn--qny +B; 0.xn--qny; 0.甯; 0.xn--qny +B; 0.甯; ; 0.xn--qny +B; -⾆.\uAAF6; [V3 V5]; [V3 V5] # -舌.꫶ +B; -舌.\uAAF6; [V3 V5]; [V3 V5] # -舌.꫶ +B; xn----ef8c.xn--2v9a; [V3 V5]; [V3 V5] # -舌.꫶ +B; -。ᢘ; [V3]; [V3] +B; -。ᢘ; [V3]; [V3] +B; -.xn--ibf; [V3]; [V3] +B; 🂴Ⴋ.≮; [P1 V6]; [P1 V6] +B; 🂴Ⴋ.<\u0338; [P1 V6]; [P1 V6] +B; 🂴ⴋ.<\u0338; [P1 V6]; [P1 V6] +B; 🂴ⴋ.≮; [P1 V6]; [P1 V6] +B; xn--2kj7565l.xn--gdh; [V6]; [V6] +B; xn--jnd1986v.xn--gdh; [V6]; [V6] +T; 璼𝨭。\u200C󠇟; [C1]; xn--gky8837e. # 璼𝨭. +N; 璼𝨭。\u200C󠇟; [C1]; [C1] # 璼𝨭. +T; 璼𝨭。\u200C󠇟; [C1]; xn--gky8837e. # 璼𝨭. +N; 璼𝨭。\u200C󠇟; [C1]; [C1] # 璼𝨭. +B; xn--gky8837e.; 璼𝨭.; xn--gky8837e. +B; 璼𝨭.; ; xn--gky8837e. +B; xn--gky8837e.xn--0ug; [C1]; [C1] # 璼𝨭. +B; \u06698񂍽。-5🞥; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ٩8.-5🞥 +B; \u06698񂍽。-5🞥; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ٩8.-5🞥 +B; xn--8-qqc97891f.xn---5-rp92a; [B1 V3 V6]; [B1 V3 V6] # ٩8.-5🞥 +T; \u200C.\u200C; [C1]; [A4_2] # . +N; \u200C.\u200C; [C1]; [C1] # . +B; xn--0ug.xn--0ug; [C1]; [C1] # . +T; \u200D튛.\u0716; [B1 C2]; xn--157b.xn--gnb # 튛.ܖ +N; \u200D튛.\u0716; [B1 C2]; [B1 C2] # 튛.ܖ +T; \u200D튛.\u0716; [B1 C2]; xn--157b.xn--gnb # 튛.ܖ +N; \u200D튛.\u0716; [B1 C2]; [B1 C2] # 튛.ܖ +B; xn--157b.xn--gnb; 튛.\u0716; xn--157b.xn--gnb # 튛.ܖ +B; 튛.\u0716; ; xn--157b.xn--gnb # 튛.ܖ +B; 튛.\u0716; 튛.\u0716; xn--157b.xn--gnb # 튛.ܖ +B; xn--1ug4441e.xn--gnb; [B1 C2]; [B1 C2] # 튛.ܖ +B; ᡋ𐹰𞽳.\u0779ⴞ; [B2 B3 B5 B6 P1 V6]; [B2 B3 B5 B6 P1 V6] # ᡋ𐹰.ݹⴞ +B; ᡋ𐹰𞽳.\u0779Ⴞ; [B2 B3 B5 B6 P1 V6]; [B2 B3 B5 B6 P1 V6] # ᡋ𐹰.ݹႾ +B; xn--b8e0417jocvf.xn--9pb068b; [B2 B3 B5 B6 V6]; [B2 B3 B5 B6 V6] # ᡋ𐹰.ݹႾ +B; xn--b8e0417jocvf.xn--9pb883q; [B2 B3 B5 B6 V6]; [B2 B3 B5 B6 V6] # ᡋ𐹰.ݹⴞ +B; 𐷃\u0662𝅻𝟧.𐹮𐹬Ⴇ; [B1 B4 P1 V6]; [B1 B4 P1 V6] # ٢𝅻5.𐹮𐹬Ⴇ +B; 𐷃\u0662𝅻5.𐹮𐹬Ⴇ; [B1 B4 P1 V6]; [B1 B4 P1 V6] # ٢𝅻5.𐹮𐹬Ⴇ +B; 𐷃\u0662𝅻5.𐹮𐹬ⴇ; [B1 B4 P1 V6]; [B1 B4 P1 V6] # ٢𝅻5.𐹮𐹬ⴇ +B; xn--5-cqc8833rhv7f.xn--ykjz523efa; [B1 B4 V6]; [B1 B4 V6] # ٢𝅻5.𐹮𐹬ⴇ +B; xn--5-cqc8833rhv7f.xn--fnd3401kfa; [B1 B4 V6]; [B1 B4 V6] # ٢𝅻5.𐹮𐹬Ⴇ +B; 𐷃\u0662𝅻𝟧.𐹮𐹬ⴇ; [B1 B4 P1 V6]; [B1 B4 P1 V6] # ٢𝅻5.𐹮𐹬ⴇ +B; Ⴗ.\u05C2𑄴\uA9B7񘃨; [P1 V5 V6]; [P1 V5 V6] # Ⴗ.𑄴ׂꦷ +B; Ⴗ.𑄴\u05C2\uA9B7񘃨; [P1 V5 V6]; [P1 V5 V6] # Ⴗ.𑄴ׂꦷ +B; Ⴗ.𑄴\u05C2\uA9B7񘃨; [P1 V5 V6]; [P1 V5 V6] # Ⴗ.𑄴ׂꦷ +B; ⴗ.𑄴\u05C2\uA9B7񘃨; [P1 V5 V6]; [P1 V5 V6] # ⴗ.𑄴ׂꦷ +B; xn--flj.xn--qdb0605f14ycrms3c; [V5 V6]; [V5 V6] # ⴗ.𑄴ׂꦷ +B; xn--vnd.xn--qdb0605f14ycrms3c; [V5 V6]; [V5 V6] # Ⴗ.𑄴ׂꦷ +B; ⴗ.𑄴\u05C2\uA9B7񘃨; [P1 V5 V6]; [P1 V5 V6] # ⴗ.𑄴ׂꦷ +B; ⴗ.\u05C2𑄴\uA9B7񘃨; [P1 V5 V6]; [P1 V5 V6] # ⴗ.𑄴ׂꦷ +B; 𝟾𾤘.򇕛\u066C; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # 8.٬ +B; 8𾤘.򇕛\u066C; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # 8.٬ +B; xn--8-kh23b.xn--lib78461i; [B1 B5 B6 V6]; [B1 B5 B6 V6] # 8.٬ +B; ⒈酫︒。\u08D6; [P1 V5 V6]; [P1 V5 V6] # ⒈酫︒.ࣖ +B; 1.酫。。\u08D6; [V5 A4_2]; [V5 A4_2] # 1.酫..ࣖ +B; 1.xn--8j4a..xn--8zb; [V5 A4_2]; [V5 A4_2] # 1.酫..ࣖ +B; xn--tsh4490bfe8c.xn--8zb; [V5 V6]; [V5 V6] # ⒈酫︒.ࣖ +T; \u2DE3\u200C≮\u1A6B.\u200C\u0E3A; [C1 P1 V5 V6]; [P1 V5 V6] # ⷣ≮ᩫ.ฺ +N; \u2DE3\u200C≮\u1A6B.\u200C\u0E3A; [C1 P1 V5 V6]; [C1 P1 V5 V6] # ⷣ≮ᩫ.ฺ +T; \u2DE3\u200C<\u0338\u1A6B.\u200C\u0E3A; [C1 P1 V5 V6]; [P1 V5 V6] # ⷣ≮ᩫ.ฺ +N; \u2DE3\u200C<\u0338\u1A6B.\u200C\u0E3A; [C1 P1 V5 V6]; [C1 P1 V5 V6] # ⷣ≮ᩫ.ฺ +B; xn--uof548an0j.xn--o4c; [V5 V6]; [V5 V6] # ⷣ≮ᩫ.ฺ +B; xn--uof63xk4bf3s.xn--o4c732g; [C1 V5 V6]; [C1 V5 V6] # ⷣ≮ᩫ.ฺ +T; 𞪂。ႷႽ¹\u200D; [B6 C2 P1 V6]; [P1 V6] # .ႷႽ1 +N; 𞪂。ႷႽ¹\u200D; [B6 C2 P1 V6]; [B6 C2 P1 V6] # .ႷႽ1 +T; 𞪂。ႷႽ1\u200D; [B6 C2 P1 V6]; [P1 V6] # .ႷႽ1 +N; 𞪂。ႷႽ1\u200D; [B6 C2 P1 V6]; [B6 C2 P1 V6] # .ႷႽ1 +T; 𞪂。ⴗⴝ1\u200D; [B6 C2 P1 V6]; [P1 V6] # .ⴗⴝ1 +N; 𞪂。ⴗⴝ1\u200D; [B6 C2 P1 V6]; [B6 C2 P1 V6] # .ⴗⴝ1 +T; 𞪂。Ⴗⴝ1\u200D; [B6 C2 P1 V6]; [P1 V6] # .Ⴗⴝ1 +N; 𞪂。Ⴗⴝ1\u200D; [B6 C2 P1 V6]; [B6 C2 P1 V6] # .Ⴗⴝ1 +B; xn--co6h.xn--1-h1g429s; [V6]; [V6] +B; xn--co6h.xn--1-h1g398iewm; [B6 C2 V6]; [B6 C2 V6] # .Ⴗⴝ1 +B; xn--co6h.xn--1-kwssa; [V6]; [V6] +B; xn--co6h.xn--1-ugn710dya; [B6 C2 V6]; [B6 C2 V6] # .ⴗⴝ1 +B; xn--co6h.xn--1-h1gs; [V6]; [V6] +B; xn--co6h.xn--1-h1gs597m; [B6 C2 V6]; [B6 C2 V6] # .ႷႽ1 +T; 𞪂。ⴗⴝ¹\u200D; [B6 C2 P1 V6]; [P1 V6] # .ⴗⴝ1 +N; 𞪂。ⴗⴝ¹\u200D; [B6 C2 P1 V6]; [B6 C2 P1 V6] # .ⴗⴝ1 +T; 𞪂。Ⴗⴝ¹\u200D; [B6 C2 P1 V6]; [P1 V6] # .Ⴗⴝ1 +N; 𞪂。Ⴗⴝ¹\u200D; [B6 C2 P1 V6]; [B6 C2 P1 V6] # .Ⴗⴝ1 +B; 𑄴𑄳2.𞳿󠀳-; [B1 B3 P1 V3 V5 V6]; [B1 B3 P1 V3 V5 V6] +B; xn--2-h87ic.xn----s39r33498d; [B1 B3 V3 V5 V6]; [B1 B3 V3 V5 V6] +B; 󠕲󟶶\u0665。񀁁𑄳𞤃\u0710; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # ٥.𑄳𞤥ܐ +B; 󠕲󟶶\u0665。񀁁𑄳𞤃\u0710; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # ٥.𑄳𞤥ܐ +B; 󠕲󟶶\u0665。񀁁𑄳𞤥\u0710; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # ٥.𑄳𞤥ܐ +B; xn--eib57614py3ea.xn--9mb5737kqnpfzkwr; [B1 B5 B6 V6]; [B1 B5 B6 V6] # ٥.𑄳𞤥ܐ +B; 󠕲󟶶\u0665。񀁁𑄳𞤥\u0710; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # ٥.𑄳𞤥ܐ +T; \u0720򲠽𐹢\u17BB。ςᢈ🝭\u200C; [B2 B6 C1 P1 V6]; [B2 B6 P1 V6] # ܠ𐹢ុ.ςᢈ🝭 +N; \u0720򲠽𐹢\u17BB。ςᢈ🝭\u200C; [B2 B6 C1 P1 V6]; [B2 B6 C1 P1 V6] # ܠ𐹢ុ.ςᢈ🝭 +T; \u0720򲠽𐹢\u17BB。ςᢈ🝭\u200C; [B2 B6 C1 P1 V6]; [B2 B6 P1 V6] # ܠ𐹢ុ.ςᢈ🝭 +N; \u0720򲠽𐹢\u17BB。ςᢈ🝭\u200C; [B2 B6 C1 P1 V6]; [B2 B6 C1 P1 V6] # ܠ𐹢ុ.ςᢈ🝭 +T; \u0720򲠽𐹢\u17BB。Σᢈ🝭\u200C; [B2 B6 C1 P1 V6]; [B2 B6 P1 V6] # ܠ𐹢ុ.σᢈ🝭 +N; \u0720򲠽𐹢\u17BB。Σᢈ🝭\u200C; [B2 B6 C1 P1 V6]; [B2 B6 C1 P1 V6] # ܠ𐹢ុ.σᢈ🝭 +T; \u0720򲠽𐹢\u17BB。σᢈ🝭\u200C; [B2 B6 C1 P1 V6]; [B2 B6 P1 V6] # ܠ𐹢ុ.σᢈ🝭 +N; \u0720򲠽𐹢\u17BB。σᢈ🝭\u200C; [B2 B6 C1 P1 V6]; [B2 B6 C1 P1 V6] # ܠ𐹢ុ.σᢈ🝭 +B; xn--qnb616fis0qzt36f.xn--4xa847hli46a; [B2 B6 V6]; [B2 B6 V6] # ܠ𐹢ុ.σᢈ🝭 +B; xn--qnb616fis0qzt36f.xn--4xa847h6ofgl44c; [B2 B6 C1 V6]; [B2 B6 C1 V6] # ܠ𐹢ុ.σᢈ🝭 +B; xn--qnb616fis0qzt36f.xn--3xa057h6ofgl44c; [B2 B6 C1 V6]; [B2 B6 C1 V6] # ܠ𐹢ុ.ςᢈ🝭 +T; \u0720򲠽𐹢\u17BB。Σᢈ🝭\u200C; [B2 B6 C1 P1 V6]; [B2 B6 P1 V6] # ܠ𐹢ុ.σᢈ🝭 +N; \u0720򲠽𐹢\u17BB。Σᢈ🝭\u200C; [B2 B6 C1 P1 V6]; [B2 B6 C1 P1 V6] # ܠ𐹢ុ.σᢈ🝭 +T; \u0720򲠽𐹢\u17BB。σᢈ🝭\u200C; [B2 B6 C1 P1 V6]; [B2 B6 P1 V6] # ܠ𐹢ុ.σᢈ🝭 +N; \u0720򲠽𐹢\u17BB。σᢈ🝭\u200C; [B2 B6 C1 P1 V6]; [B2 B6 C1 P1 V6] # ܠ𐹢ុ.σᢈ🝭 +T; \u200D--≮。𐹧; [B1 C2 P1 V6]; [B1 P1 V3 V6] # --≮.𐹧 +N; \u200D--≮。𐹧; [B1 C2 P1 V6]; [B1 C2 P1 V6] # --≮.𐹧 +T; \u200D--<\u0338。𐹧; [B1 C2 P1 V6]; [B1 P1 V3 V6] # --≮.𐹧 +N; \u200D--<\u0338。𐹧; [B1 C2 P1 V6]; [B1 C2 P1 V6] # --≮.𐹧 +B; xn-----ujv.xn--fo0d; [B1 V3 V6]; [B1 V3 V6] +B; xn-----l1tz1k.xn--fo0d; [B1 C2 V6]; [B1 C2 V6] # --≮.𐹧 +B; \uA806。𻚏\u0FB0⒕; [P1 V5 V6]; [P1 V5 V6] # ꠆.ྰ⒕ +B; \uA806。𻚏\u0FB014.; [P1 V5 V6]; [P1 V5 V6] # ꠆.ྰ14. +B; xn--l98a.xn--14-jsj57880f.; [V5 V6]; [V5 V6] # ꠆.ྰ14. +B; xn--l98a.xn--dgd218hhp28d; [V5 V6]; [V5 V6] # ꠆.ྰ⒕ +B; 򮉂\u06BC.𑆺\u0669; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ڼ.𑆺٩ +B; 򮉂\u06BC.𑆺\u0669; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ڼ.𑆺٩ +B; xn--vkb92243l.xn--iib9797k; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] # ڼ.𑆺٩ +B; 󠁎\u06D0-。𞤴; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ې-.𞤴 +B; 󠁎\u06D0-。𞤒; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ې-.𞤴 +B; xn----mwc72685y.xn--se6h; [B1 V3 V6]; [B1 V3 V6] # ې-.𞤴 +T; 𝟠4󠇗𝈻.\u200D𐋵⛧\u200D; [C2]; xn--84-s850a.xn--59h6326e # 84𝈻.𐋵⛧ +N; 𝟠4󠇗𝈻.\u200D𐋵⛧\u200D; [C2]; [C2] # 84𝈻.𐋵⛧ +T; 84󠇗𝈻.\u200D𐋵⛧\u200D; [C2]; xn--84-s850a.xn--59h6326e # 84𝈻.𐋵⛧ +N; 84󠇗𝈻.\u200D𐋵⛧\u200D; [C2]; [C2] # 84𝈻.𐋵⛧ +B; xn--84-s850a.xn--59h6326e; 84𝈻.𐋵⛧; xn--84-s850a.xn--59h6326e; NV8 +B; 84𝈻.𐋵⛧; ; xn--84-s850a.xn--59h6326e; NV8 +B; xn--84-s850a.xn--1uga573cfq1w; [C2]; [C2] # 84𝈻.𐋵⛧ +B; -\u0601。ᡪ; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -.ᡪ +B; -\u0601。ᡪ; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -.ᡪ +B; xn----tkc.xn--68e; [B1 V3 V6]; [B1 V3 V6] # -.ᡪ +T; ≮𝟕.謖ß≯; [P1 V6]; [P1 V6] +N; ≮𝟕.謖ß≯; [P1 V6]; [P1 V6] +T; <\u0338𝟕.謖ß>\u0338; [P1 V6]; [P1 V6] +N; <\u0338𝟕.謖ß>\u0338; [P1 V6]; [P1 V6] +T; ≮7.謖ß≯; [P1 V6]; [P1 V6] +N; ≮7.謖ß≯; [P1 V6]; [P1 V6] +T; <\u03387.謖ß>\u0338; [P1 V6]; [P1 V6] +N; <\u03387.謖ß>\u0338; [P1 V6]; [P1 V6] +B; <\u03387.謖SS>\u0338; [P1 V6]; [P1 V6] +B; ≮7.謖SS≯; [P1 V6]; [P1 V6] +B; ≮7.謖ss≯; [P1 V6]; [P1 V6] +B; <\u03387.謖ss>\u0338; [P1 V6]; [P1 V6] +B; <\u03387.謖Ss>\u0338; [P1 V6]; [P1 V6] +B; ≮7.謖Ss≯; [P1 V6]; [P1 V6] +B; xn--7-mgo.xn--ss-xjvv174c; [V6]; [V6] +B; xn--7-mgo.xn--zca892oly5e; [V6]; [V6] +B; <\u0338𝟕.謖SS>\u0338; [P1 V6]; [P1 V6] +B; ≮𝟕.謖SS≯; [P1 V6]; [P1 V6] +B; ≮𝟕.謖ss≯; [P1 V6]; [P1 V6] +B; <\u0338𝟕.謖ss>\u0338; [P1 V6]; [P1 V6] +B; <\u0338𝟕.謖Ss>\u0338; [P1 V6]; [P1 V6] +B; ≮𝟕.謖Ss≯; [P1 V6]; [P1 V6] +B; 朶Ⴉ𞪡.𝨽\u0825📻-; [B1 B5 B6 P1 V3 V5 V6]; [B1 B5 B6 P1 V3 V5 V6] # 朶Ⴉ.𝨽ࠥ📻- +B; 朶ⴉ𞪡.𝨽\u0825📻-; [B1 B5 B6 P1 V3 V5 V6]; [B1 B5 B6 P1 V3 V5 V6] # 朶ⴉ.𝨽ࠥ📻- +B; xn--0kjz47pd57t.xn----3gd37096apmwa; [B1 B5 B6 V3 V5 V6]; [B1 B5 B6 V3 V5 V6] # 朶ⴉ.𝨽ࠥ📻- +B; xn--hnd7245bd56p.xn----3gd37096apmwa; [B1 B5 B6 V3 V5 V6]; [B1 B5 B6 V3 V5 V6] # 朶Ⴉ.𝨽ࠥ📻- +T; 𐤎。󑿰\u200C≮\u200D; [B6 C1 C2 P1 V6]; [B6 P1 V6] # 𐤎.≮ +N; 𐤎。󑿰\u200C≮\u200D; [B6 C1 C2 P1 V6]; [B6 C1 C2 P1 V6] # 𐤎.≮ +T; 𐤎。󑿰\u200C<\u0338\u200D; [B6 C1 C2 P1 V6]; [B6 P1 V6] # 𐤎.≮ +N; 𐤎。󑿰\u200C<\u0338\u200D; [B6 C1 C2 P1 V6]; [B6 C1 C2 P1 V6] # 𐤎.≮ +B; xn--bk9c.xn--gdhx6802k; [B6 V6]; [B6 V6] +B; xn--bk9c.xn--0ugc04p2u638c; [B6 C1 C2 V6]; [B6 C1 C2 V6] # 𐤎.≮ +T; 񭜎⒈。\u200C𝟤; [C1 P1 V6]; [P1 V6] # ⒈.2 +N; 񭜎⒈。\u200C𝟤; [C1 P1 V6]; [C1 P1 V6] # ⒈.2 +T; 񭜎1.。\u200C2; [C1 P1 V6 A4_2]; [P1 V6 A4_2] # 1..2 +N; 񭜎1.。\u200C2; [C1 P1 V6 A4_2]; [C1 P1 V6 A4_2] # 1..2 +B; xn--1-ex54e..2; [V6 A4_2]; [V6 A4_2] +B; xn--1-ex54e..xn--2-rgn; [C1 V6 A4_2]; [C1 V6 A4_2] # 1..2 +B; xn--tsh94183d.2; [V6]; [V6] +B; xn--tsh94183d.xn--2-rgn; [C1 V6]; [C1 V6] # ⒈.2 +T; 󠟊𐹤\u200D.𐹳󙄵𐹶; [B1 C2 P1 V6]; [B1 P1 V6] # 𐹤.𐹳𐹶 +N; 󠟊𐹤\u200D.𐹳󙄵𐹶; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 𐹤.𐹳𐹶 +T; 󠟊𐹤\u200D.𐹳󙄵𐹶; [B1 C2 P1 V6]; [B1 P1 V6] # 𐹤.𐹳𐹶 +N; 󠟊𐹤\u200D.𐹳󙄵𐹶; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 𐹤.𐹳𐹶 +B; xn--co0d98977c.xn--ro0dga22807v; [B1 V6]; [B1 V6] +B; xn--1ugy994g7k93g.xn--ro0dga22807v; [B1 C2 V6]; [B1 C2 V6] # 𐹤.𐹳𐹶 +B; 𞤴𐹻𑓂𐭝.\u094D\uFE07􉛯; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𞤴𐹻𑓂𐭝.् +B; 𞤴𐹻𑓂𐭝.\u094D\uFE07􉛯; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𞤴𐹻𑓂𐭝.् +B; 𞤒𐹻𑓂𐭝.\u094D\uFE07􉛯; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𞤴𐹻𑓂𐭝.् +B; xn--609c96c09grp2w.xn--n3b28708s; [B1 V5 V6]; [B1 V5 V6] # 𞤴𐹻𑓂𐭝.् +B; 𞤒𐹻𑓂𐭝.\u094D\uFE07􉛯; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𞤴𐹻𑓂𐭝.् +B; \u0668。𐹠𐹽񗮶; [B1 P1 V6]; [B1 P1 V6] # ٨.𐹠𐹽 +B; \u0668。𐹠𐹽񗮶; [B1 P1 V6]; [B1 P1 V6] # ٨.𐹠𐹽 +B; xn--hib.xn--7n0d2bu9196b; [B1 V6]; [B1 V6] # ٨.𐹠𐹽 +B; \u1160񍀜.8򶾵\u069C; [B1 P1 V6]; [B1 P1 V6] # .8ڜ +B; xn--psd85033d.xn--8-otc61545t; [B1 V6]; [B1 V6] # .8ڜ +T; \u200D\u200C󠆪。ß𑓃; [C1 C2]; [A4_2] # .ß𑓃 +N; \u200D\u200C󠆪。ß𑓃; [C1 C2]; [C1 C2] # .ß𑓃 +T; \u200D\u200C󠆪。ß𑓃; [C1 C2]; [A4_2] # .ß𑓃 +N; \u200D\u200C󠆪。ß𑓃; [C1 C2]; [C1 C2] # .ß𑓃 +T; \u200D\u200C󠆪。SS𑓃; [C1 C2]; [A4_2] # .ss𑓃 +N; \u200D\u200C󠆪。SS𑓃; [C1 C2]; [C1 C2] # .ss𑓃 +T; \u200D\u200C󠆪。ss𑓃; [C1 C2]; [A4_2] # .ss𑓃 +N; \u200D\u200C󠆪。ss𑓃; [C1 C2]; [C1 C2] # .ss𑓃 +T; \u200D\u200C󠆪。Ss𑓃; [C1 C2]; [A4_2] # .ss𑓃 +N; \u200D\u200C󠆪。Ss𑓃; [C1 C2]; [C1 C2] # .ss𑓃 +B; .xn--ss-bh7o; [A4_2]; [A4_2] +B; xn--0ugb.xn--ss-bh7o; [C1 C2]; [C1 C2] # .ss𑓃 +B; xn--0ugb.xn--zca0732l; [C1 C2]; [C1 C2] # .ß𑓃 +T; \u200D\u200C󠆪。SS𑓃; [C1 C2]; [A4_2] # .ss𑓃 +N; \u200D\u200C󠆪。SS𑓃; [C1 C2]; [C1 C2] # .ss𑓃 +T; \u200D\u200C󠆪。ss𑓃; [C1 C2]; [A4_2] # .ss𑓃 +N; \u200D\u200C󠆪。ss𑓃; [C1 C2]; [C1 C2] # .ss𑓃 +T; \u200D\u200C󠆪。Ss𑓃; [C1 C2]; [A4_2] # .ss𑓃 +N; \u200D\u200C󠆪。Ss𑓃; [C1 C2]; [C1 C2] # .ss𑓃 +B; xn--ss-bh7o; ss𑓃; xn--ss-bh7o +B; ss𑓃; ; xn--ss-bh7o +B; SS𑓃; ss𑓃; xn--ss-bh7o +B; Ss𑓃; ss𑓃; xn--ss-bh7o +T; ︒\u200Cヶ䒩.ꡪ; [C1 P1 V6]; [P1 V6] # ︒ヶ䒩.ꡪ +N; ︒\u200Cヶ䒩.ꡪ; [C1 P1 V6]; [C1 P1 V6] # ︒ヶ䒩.ꡪ +T; 。\u200Cヶ䒩.ꡪ; [C1 A4_2]; [A4_2] # .ヶ䒩.ꡪ +N; 。\u200Cヶ䒩.ꡪ; [C1 A4_2]; [C1 A4_2] # .ヶ䒩.ꡪ +B; .xn--qekw60d.xn--gd9a; [A4_2]; [A4_2] +B; .xn--0ug287dj0o.xn--gd9a; [C1 A4_2]; [C1 A4_2] # .ヶ䒩.ꡪ +B; xn--qekw60dns9k.xn--gd9a; [V6]; [V6] +B; xn--0ug287dj0or48o.xn--gd9a; [C1 V6]; [C1 V6] # ︒ヶ䒩.ꡪ +B; xn--qekw60d.xn--gd9a; ヶ䒩.ꡪ; xn--qekw60d.xn--gd9a +B; ヶ䒩.ꡪ; ; xn--qekw60d.xn--gd9a +T; \u200C⒈𤮍.󢓋\u1A60; [C1 P1 V6]; [P1 V6] # ⒈𤮍.᩠ +N; \u200C⒈𤮍.󢓋\u1A60; [C1 P1 V6]; [C1 P1 V6] # ⒈𤮍.᩠ +T; \u200C1.𤮍.󢓋\u1A60; [C1 P1 V6]; [P1 V6] # 1.𤮍.᩠ +N; \u200C1.𤮍.󢓋\u1A60; [C1 P1 V6]; [C1 P1 V6] # 1.𤮍.᩠ +B; 1.xn--4x6j.xn--jof45148n; [V6]; [V6] # 1.𤮍.᩠ +B; xn--1-rgn.xn--4x6j.xn--jof45148n; [C1 V6]; [C1 V6] # 1.𤮍.᩠ +B; xn--tshw462r.xn--jof45148n; [V6]; [V6] # ⒈𤮍.᩠ +B; xn--0ug88o7471d.xn--jof45148n; [C1 V6]; [C1 V6] # ⒈𤮍.᩠ +T; ⒈\u200C𐫓󠀺。\u1A60񤰵\u200D; [B1 C1 C2 P1 V5 V6]; [B1 P1 V5 V6] # ⒈𐫓.᩠ +N; ⒈\u200C𐫓󠀺。\u1A60񤰵\u200D; [B1 C1 C2 P1 V5 V6]; [B1 C1 C2 P1 V5 V6] # ⒈𐫓.᩠ +T; 1.\u200C𐫓󠀺。\u1A60񤰵\u200D; [B1 C1 C2 P1 V5 V6]; [B1 B3 P1 V5 V6] # 1.𐫓.᩠ +N; 1.\u200C𐫓󠀺。\u1A60񤰵\u200D; [B1 C1 C2 P1 V5 V6]; [B1 C1 C2 P1 V5 V6] # 1.𐫓.᩠ +B; 1.xn--8w9c40377c.xn--jofz5294e; [B1 B3 V5 V6]; [B1 B3 V5 V6] # 1.𐫓.᩠ +B; 1.xn--0ug8853gk263g.xn--jof95xex98m; [B1 C1 C2 V5 V6]; [B1 C1 C2 V5 V6] # 1.𐫓.᩠ +B; xn--tsh4435fk263g.xn--jofz5294e; [B1 V5 V6]; [B1 V5 V6] # ⒈𐫓.᩠ +B; xn--0ug78ol75wzcx4i.xn--jof95xex98m; [B1 C1 C2 V5 V6]; [B1 C1 C2 V5 V6] # ⒈𐫓.᩠ +B; 𝅵。𝟫𞀈䬺⒈; [P1 V6]; [P1 V6] +B; 𝅵。9𞀈䬺1.; [P1 V6]; [P1 V6] +B; xn--3f1h.xn--91-030c1650n.; [V6]; [V6] +B; xn--3f1h.xn--9-ecp936non25a; [V6]; [V6] +B; 򡼺≯。盚\u0635; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ≯.盚ص +B; 򡼺>\u0338。盚\u0635; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ≯.盚ص +B; xn--hdh30181h.xn--0gb7878c; [B5 B6 V6]; [B5 B6 V6] # ≯.盚ص +B; -񿰭\u05B4。-󠁊𐢸≯; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -ִ.-≯ +B; -񿰭\u05B4。-󠁊𐢸>\u0338; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -ִ.-≯ +B; xn----fgc06667m.xn----pgoy615he5y4i; [B1 V3 V6]; [B1 V3 V6] # -ִ.-≯ +T; 󿭓\u1B44\u200C\u0A4D.𐭛񳋔; [B2 B3 B6 P1 V6]; [B2 B3 P1 V6] # ᭄੍.𐭛 +N; 󿭓\u1B44\u200C\u0A4D.𐭛񳋔; [B2 B3 B6 P1 V6]; [B2 B3 B6 P1 V6] # ᭄੍.𐭛 +T; 󿭓\u1B44\u200C\u0A4D.𐭛񳋔; [B2 B3 B6 P1 V6]; [B2 B3 P1 V6] # ᭄੍.𐭛 +N; 󿭓\u1B44\u200C\u0A4D.𐭛񳋔; [B2 B3 B6 P1 V6]; [B2 B3 B6 P1 V6] # ᭄੍.𐭛 +B; xn--ybc997fb5881a.xn--409c6100y; [B2 B3 V6]; [B2 B3 V6] # ᭄੍.𐭛 +B; xn--ybc997f6rd2n772c.xn--409c6100y; [B2 B3 B6 V6]; [B2 B3 B6 V6] # ᭄੍.𐭛 +T; ⾇.\u067D𞤴\u06BB\u200D; [B3 C2]; xn--8c1a.xn--2ib8jn539l # 舛.ٽ𞤴ڻ +N; ⾇.\u067D𞤴\u06BB\u200D; [B3 C2]; [B3 C2] # 舛.ٽ𞤴ڻ +T; 舛.\u067D𞤴\u06BB\u200D; [B3 C2]; xn--8c1a.xn--2ib8jn539l # 舛.ٽ𞤴ڻ +N; 舛.\u067D𞤴\u06BB\u200D; [B3 C2]; [B3 C2] # 舛.ٽ𞤴ڻ +T; 舛.\u067D𞤒\u06BB\u200D; [B3 C2]; xn--8c1a.xn--2ib8jn539l # 舛.ٽ𞤴ڻ +N; 舛.\u067D𞤒\u06BB\u200D; [B3 C2]; [B3 C2] # 舛.ٽ𞤴ڻ +B; xn--8c1a.xn--2ib8jn539l; 舛.\u067D𞤴\u06BB; xn--8c1a.xn--2ib8jn539l # 舛.ٽ𞤴ڻ +B; 舛.\u067D𞤴\u06BB; ; xn--8c1a.xn--2ib8jn539l # 舛.ٽ𞤴ڻ +B; 舛.\u067D𞤒\u06BB; 舛.\u067D𞤴\u06BB; xn--8c1a.xn--2ib8jn539l # 舛.ٽ𞤴ڻ +B; xn--8c1a.xn--2ib8jv19e6413b; [B3 C2]; [B3 C2] # 舛.ٽ𞤴ڻ +T; ⾇.\u067D𞤒\u06BB\u200D; [B3 C2]; xn--8c1a.xn--2ib8jn539l # 舛.ٽ𞤴ڻ +N; ⾇.\u067D𞤒\u06BB\u200D; [B3 C2]; [B3 C2] # 舛.ٽ𞤴ڻ +B; 4򭆥。\u0767≯; [B1 B3 P1 V6]; [B1 B3 P1 V6] # 4.ݧ≯ +B; 4򭆥。\u0767>\u0338; [B1 B3 P1 V6]; [B1 B3 P1 V6] # 4.ݧ≯ +B; xn--4-xn17i.xn--rpb459k; [B1 B3 V6]; [B1 B3 V6] # 4.ݧ≯ +B; 𲔏𞫨񺿂硲.\u06AD; [B5 P1 V6]; [B5 P1 V6] # 硲.ڭ +B; 𲔏𞫨񺿂硲.\u06AD; [B5 P1 V6]; [B5 P1 V6] # 硲.ڭ +B; xn--lcz1610fn78gk609a.xn--gkb; [B5 V6]; [B5 V6] # 硲.ڭ +T; \u200C.\uFE08\u0666Ⴆ℮; [B1 C1 P1 V6]; [B1 P1 V6 A4_2] # .٦Ⴆ℮ +N; \u200C.\uFE08\u0666Ⴆ℮; [B1 C1 P1 V6]; [B1 C1 P1 V6] # .٦Ⴆ℮ +T; \u200C.\uFE08\u0666ⴆ℮; [B1 C1]; [B1 A4_2] # .٦ⴆ℮ +N; \u200C.\uFE08\u0666ⴆ℮; [B1 C1]; [B1 C1] # .٦ⴆ℮ +B; .xn--fib628k4li; [B1 A4_2]; [B1 A4_2] # .٦ⴆ℮ +B; xn--0ug.xn--fib628k4li; [B1 C1]; [B1 C1] # .٦ⴆ℮ +B; .xn--fib263c0yn; [B1 V6 A4_2]; [B1 V6 A4_2] # .٦Ⴆ℮ +B; xn--0ug.xn--fib263c0yn; [B1 C1 V6]; [B1 C1 V6] # .٦Ⴆ℮ +T; \u06A3.\u0D4D\u200DϞ; [B1 V5]; [B1 V5] # ڣ.്ϟ +N; \u06A3.\u0D4D\u200DϞ; [B1 V5]; [B1 V5] # ڣ.്ϟ +T; \u06A3.\u0D4D\u200DϞ; [B1 V5]; [B1 V5] # ڣ.്ϟ +N; \u06A3.\u0D4D\u200DϞ; [B1 V5]; [B1 V5] # ڣ.്ϟ +T; \u06A3.\u0D4D\u200Dϟ; [B1 V5]; [B1 V5] # ڣ.്ϟ +N; \u06A3.\u0D4D\u200Dϟ; [B1 V5]; [B1 V5] # ڣ.്ϟ +B; xn--5jb.xn--xya149b; [B1 V5]; [B1 V5] # ڣ.്ϟ +B; xn--5jb.xn--xya149bpvp; [B1 V5]; [B1 V5] # ڣ.്ϟ +T; \u06A3.\u0D4D\u200Dϟ; [B1 V5]; [B1 V5] # ڣ.്ϟ +N; \u06A3.\u0D4D\u200Dϟ; [B1 V5]; [B1 V5] # ڣ.്ϟ +T; \u200C𞸇𑘿。\u0623𐮂-腍; [B1 B2 B3 C1]; [B2 B3] # ح𑘿.أ𐮂-腍 +N; \u200C𞸇𑘿。\u0623𐮂-腍; [B1 B2 B3 C1]; [B1 B2 B3 C1] # ح𑘿.أ𐮂-腍 +T; \u200C𞸇𑘿。\u0627\u0654𐮂-腍; [B1 B2 B3 C1]; [B2 B3] # ح𑘿.أ𐮂-腍 +N; \u200C𞸇𑘿。\u0627\u0654𐮂-腍; [B1 B2 B3 C1]; [B1 B2 B3 C1] # ح𑘿.أ𐮂-腍 +T; \u200C\u062D𑘿。\u0623𐮂-腍; [B1 B2 B3 C1]; [B2 B3] # ح𑘿.أ𐮂-腍 +N; \u200C\u062D𑘿。\u0623𐮂-腍; [B1 B2 B3 C1]; [B1 B2 B3 C1] # ح𑘿.أ𐮂-腍 +T; \u200C\u062D𑘿。\u0627\u0654𐮂-腍; [B1 B2 B3 C1]; [B2 B3] # ح𑘿.أ𐮂-腍 +N; \u200C\u062D𑘿。\u0627\u0654𐮂-腍; [B1 B2 B3 C1]; [B1 B2 B3 C1] # ح𑘿.أ𐮂-腍 +B; xn--sgb4140l.xn----qmc5075grs9e; [B2 B3]; [B2 B3] # ح𑘿.أ𐮂-腍 +B; xn--sgb953kmi8o.xn----qmc5075grs9e; [B1 B2 B3 C1]; [B1 B2 B3 C1] # ح𑘿.أ𐮂-腍 +B; -򭷙\u066B纛。𝟛񭤇🄅; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -٫纛.3🄅 +B; -򭷙\u066B纛。3񭤇4,; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -٫纛.34, +B; xn----vqc8143g0tt4i.xn--34,-8787l; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -٫纛.34, +B; xn----vqc8143g0tt4i.xn--3-os1sn476y; [B1 V3 V6]; [B1 V3 V6] # -٫纛.3🄅 +B; 🔔.Ⴂ\u07CC\u0BCD𐋮; [B1 B5 P1 V6]; [B1 B5 P1 V6] # 🔔.Ⴂߌ்𐋮 +B; 🔔.Ⴂ\u07CC\u0BCD𐋮; [B1 B5 P1 V6]; [B1 B5 P1 V6] # 🔔.Ⴂߌ்𐋮 +B; 🔔.ⴂ\u07CC\u0BCD𐋮; [B1 B5]; [B1 B5] # 🔔.ⴂߌ்𐋮 +B; xn--nv8h.xn--nsb46rvz1b222p; [B1 B5]; [B1 B5] # 🔔.ⴂߌ்𐋮 +B; xn--nv8h.xn--nsb46r83e8112a; [B1 B5 V6]; [B1 B5 V6] # 🔔.Ⴂߌ்𐋮 +B; 🔔.ⴂ\u07CC\u0BCD𐋮; [B1 B5]; [B1 B5] # 🔔.ⴂߌ்𐋮 +B; 軥\u06B3.-𖬵; [B1 B5 B6 V3]; [B1 B5 B6 V3] # 軥ڳ.-𖬵 +B; xn--mkb5480e.xn----6u5m; [B1 B5 B6 V3]; [B1 B5 B6 V3] # 軥ڳ.-𖬵 +B; 𐹤\u07CA\u06B6.𐨂-; [B1 V3 V5]; [B1 V3 V5] # 𐹤ߊڶ.𐨂- +B; xn--pkb56cn614d.xn----974i; [B1 V3 V5]; [B1 V3 V5] # 𐹤ߊڶ.𐨂- +B; -󠅱0。\u17CF\u1DFD톇십; [V3 V5]; [V3 V5] # -0.៏᷽톇십 +B; -󠅱0。\u17CF\u1DFD톇십; [V3 V5]; [V3 V5] # -0.៏᷽톇십 +B; -󠅱0。\u17CF\u1DFD톇십; [V3 V5]; [V3 V5] # -0.៏᷽톇십 +B; -󠅱0。\u17CF\u1DFD톇십; [V3 V5]; [V3 V5] # -0.៏᷽톇십 +B; -0.xn--r4e872ah77nghm; [V3 V5]; [V3 V5] # -0.៏᷽톇십 +B; ꡰ︒--。\u17CC靈𐹢񘳮; [B1 B6 P1 V2 V3 V5 V6]; [B1 B6 P1 V2 V3 V5 V6] # ꡰ︒--.៌靈𐹢 +B; ꡰ。--。\u17CC靈𐹢񘳮; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ꡰ.--.៌靈𐹢 +B; xn--md9a.--.xn--o4e6836dpxudz0v1c; [B1 V3 V5 V6]; [B1 V3 V5 V6] # ꡰ.--.៌靈𐹢 +B; xn-----bk9hu24z.xn--o4e6836dpxudz0v1c; [B1 B6 V2 V3 V5 V6]; [B1 B6 V2 V3 V5 V6] # ꡰ︒--.៌靈𐹢 +B; \u115FႿႵრ。\u0B4D; [P1 V5 V6]; [P1 V5 V6] # ႿႵრ.୍ +B; \u115FႿႵრ。\u0B4D; [P1 V5 V6]; [P1 V5 V6] # ႿႵრ.୍ +B; \u115Fⴟⴕრ。\u0B4D; [P1 V5 V6]; [P1 V5 V6] # ⴟⴕრ.୍ +B; \u115FႿⴕრ。\u0B4D; [P1 V5 V6]; [P1 V5 V6] # Ⴟⴕრ.୍ +B; xn--3nd0etsm92g.xn--9ic; [V5 V6]; [V5 V6] # Ⴟⴕრ.୍ +B; xn--1od7wz74eeb.xn--9ic; [V5 V6]; [V5 V6] # ⴟⴕრ.୍ +B; xn--tndt4hvw.xn--9ic; [V5 V6]; [V5 V6] # ႿႵრ.୍ +B; \u115Fⴟⴕრ。\u0B4D; [P1 V5 V6]; [P1 V5 V6] # ⴟⴕრ.୍ +B; \u115FႿⴕრ。\u0B4D; [P1 V5 V6]; [P1 V5 V6] # Ⴟⴕრ.୍ +B; 🄃𐹠.\u0664󠅇; [B1 P1 V6]; [B1 P1 V6] # 🄃𐹠.٤ +B; 2,𐹠.\u0664󠅇; [B1 P1 V6]; [B1 P1 V6] # 2,𐹠.٤ +B; xn--2,-5g3o.xn--dib; [B1 P1 V6]; [B1 P1 V6] # 2,𐹠.٤ +B; xn--7n0d1189a.xn--dib; [B1 V6]; [B1 V6] # 🄃𐹠.٤ +T; 򻲼\u200C\uFC5B.\u07D2\u0848\u1BF3; [B2 B3 B5 B6 C1 P1 V6]; [B2 B3 B5 B6 P1 V6] # ذٰ.ߒࡈ᯳ +N; 򻲼\u200C\uFC5B.\u07D2\u0848\u1BF3; [B2 B3 B5 B6 C1 P1 V6]; [B2 B3 B5 B6 C1 P1 V6] # ذٰ.ߒࡈ᯳ +T; 򻲼\u200C\u0630\u0670.\u07D2\u0848\u1BF3; [B2 B3 B5 B6 C1 P1 V6]; [B2 B3 B5 B6 P1 V6] # ذٰ.ߒࡈ᯳ +N; 򻲼\u200C\u0630\u0670.\u07D2\u0848\u1BF3; [B2 B3 B5 B6 C1 P1 V6]; [B2 B3 B5 B6 C1 P1 V6] # ذٰ.ߒࡈ᯳ +B; xn--vgb2kp1223g.xn--tsb0vz43c; [B2 B3 B5 B6 V6]; [B2 B3 B5 B6 V6] # ذٰ.ߒࡈ᯳ +B; xn--vgb2kq00fl213y.xn--tsb0vz43c; [B2 B3 B5 B6 C1 V6]; [B2 B3 B5 B6 C1 V6] # ذٰ.ߒࡈ᯳ +T; \u200D\u200D𞵪\u200C。ᡘ𑲭\u17B5; [B1 C1 C2 P1 V6]; [P1 V6] # .ᡘ𑲭 +N; \u200D\u200D𞵪\u200C。ᡘ𑲭\u17B5; [B1 C1 C2 P1 V6]; [B1 C1 C2 P1 V6] # .ᡘ𑲭 +B; xn--l96h.xn--03e93aq365d; [V6]; [V6] # .ᡘ𑲭 +B; xn--0ugba05538b.xn--03e93aq365d; [B1 C1 C2 V6]; [B1 C1 C2 V6] # .ᡘ𑲭 +B; 𞷻。⚄񗑇𑁿; [B1 P1 V6]; [B1 P1 V6] +B; xn--qe7h.xn--c7h2966f7so4a; [B1 V6]; [B1 V6] +B; \uA8C4≠.𞠨\u0667; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ꣄≠.𞠨٧ +B; \uA8C4=\u0338.𞠨\u0667; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ꣄≠.𞠨٧ +B; \uA8C4≠.𞠨\u0667; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ꣄≠.𞠨٧ +B; \uA8C4=\u0338.𞠨\u0667; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ꣄≠.𞠨٧ +B; xn--1chy504c.xn--gib1777v; [B1 V5 V6]; [B1 V5 V6] # ꣄≠.𞠨٧ +B; 𝟛𝆪\uA8C4。\uA8EA-; [V3 V5]; [V3 V5] # 3꣄𝆪.꣪- +B; 𝟛\uA8C4𝆪。\uA8EA-; [V3 V5]; [V3 V5] # 3꣄𝆪.꣪- +B; 3\uA8C4𝆪。\uA8EA-; [V3 V5]; [V3 V5] # 3꣄𝆪.꣪- +B; xn--3-sl4eu679e.xn----xn4e; [V3 V5]; [V3 V5] # 3꣄𝆪.꣪- +B; \u075F\u1BA2\u103AႧ.4; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] # ݟᮢ်Ⴇ.4 +B; \u075F\u1BA2\u103Aⴇ.4; [B1 B2 B3]; [B1 B2 B3] # ݟᮢ်ⴇ.4 +B; xn--jpb846bjzj7pr.4; [B1 B2 B3]; [B1 B2 B3] # ݟᮢ်ⴇ.4 +B; xn--jpb846bmjw88a.4; [B1 B2 B3 V6]; [B1 B2 B3 V6] # ݟᮢ်Ⴇ.4 +B; ᄹ。\u0ECA򠯤󠄞; [P1 V5 V6]; [P1 V5 V6] # ᄹ.໊ +B; ᄹ。\u0ECA򠯤󠄞; [P1 V5 V6]; [P1 V5 V6] # ᄹ.໊ +B; xn--lrd.xn--s8c05302k; [V5 V6]; [V5 V6] # ᄹ.໊ +B; Ⴆ򻢩.󠆡\uFE09𞤍; [P1 V6]; [P1 V6] +B; Ⴆ򻢩.󠆡\uFE09𞤍; [P1 V6]; [P1 V6] +B; ⴆ򻢩.󠆡\uFE09𞤯; [P1 V6]; [P1 V6] +B; xn--xkjw3965g.xn--ne6h; [V6]; [V6] +B; xn--end82983m.xn--ne6h; [V6]; [V6] +B; ⴆ򻢩.󠆡\uFE09𞤯; [P1 V6]; [P1 V6] +B; ⴆ򻢩.󠆡\uFE09𞤍; [P1 V6]; [P1 V6] +B; ⴆ򻢩.󠆡\uFE09𞤍; [P1 V6]; [P1 V6] +T; ß\u080B︒\u067B.帼F∬\u200C; [B5 B6 C1 P1 V6]; [B5 B6 P1 V6] # ßࠋ︒ٻ.帼f∫∫ +N; ß\u080B︒\u067B.帼F∬\u200C; [B5 B6 C1 P1 V6]; [B5 B6 C1 P1 V6] # ßࠋ︒ٻ.帼f∫∫ +T; ß\u080B。\u067B.帼F∫∫\u200C; [B5 B6 C1]; [B5 B6] # ßࠋ.ٻ.帼f∫∫ +N; ß\u080B。\u067B.帼F∫∫\u200C; [B5 B6 C1]; [B5 B6 C1] # ßࠋ.ٻ.帼f∫∫ +T; ß\u080B。\u067B.帼f∫∫\u200C; [B5 B6 C1]; [B5 B6] # ßࠋ.ٻ.帼f∫∫ +N; ß\u080B。\u067B.帼f∫∫\u200C; [B5 B6 C1]; [B5 B6 C1] # ßࠋ.ٻ.帼f∫∫ +T; SS\u080B。\u067B.帼F∫∫\u200C; [B5 B6 C1]; [B5 B6] # ssࠋ.ٻ.帼f∫∫ +N; SS\u080B。\u067B.帼F∫∫\u200C; [B5 B6 C1]; [B5 B6 C1] # ssࠋ.ٻ.帼f∫∫ +T; ss\u080B。\u067B.帼f∫∫\u200C; [B5 B6 C1]; [B5 B6] # ssࠋ.ٻ.帼f∫∫ +N; ss\u080B。\u067B.帼f∫∫\u200C; [B5 B6 C1]; [B5 B6 C1] # ssࠋ.ٻ.帼f∫∫ +T; Ss\u080B。\u067B.帼F∫∫\u200C; [B5 B6 C1]; [B5 B6] # ssࠋ.ٻ.帼f∫∫ +N; Ss\u080B。\u067B.帼F∫∫\u200C; [B5 B6 C1]; [B5 B6 C1] # ssࠋ.ٻ.帼f∫∫ +B; xn--ss-uze.xn--0ib.xn--f-tcoa9162d; [B5 B6]; [B5 B6] # ssࠋ.ٻ.帼f∫∫ +B; xn--ss-uze.xn--0ib.xn--f-sgn48ga6997e; [B5 B6 C1]; [B5 B6 C1] # ssࠋ.ٻ.帼f∫∫ +B; xn--zca687a.xn--0ib.xn--f-sgn48ga6997e; [B5 B6 C1]; [B5 B6 C1] # ßࠋ.ٻ.帼f∫∫ +T; ß\u080B︒\u067B.帼f∬\u200C; [B5 B6 C1 P1 V6]; [B5 B6 P1 V6] # ßࠋ︒ٻ.帼f∫∫ +N; ß\u080B︒\u067B.帼f∬\u200C; [B5 B6 C1 P1 V6]; [B5 B6 C1 P1 V6] # ßࠋ︒ٻ.帼f∫∫ +T; SS\u080B︒\u067B.帼F∬\u200C; [B5 B6 C1 P1 V6]; [B5 B6 P1 V6] # ssࠋ︒ٻ.帼f∫∫ +N; SS\u080B︒\u067B.帼F∬\u200C; [B5 B6 C1 P1 V6]; [B5 B6 C1 P1 V6] # ssࠋ︒ٻ.帼f∫∫ +T; ss\u080B︒\u067B.帼f∬\u200C; [B5 B6 C1 P1 V6]; [B5 B6 P1 V6] # ssࠋ︒ٻ.帼f∫∫ +N; ss\u080B︒\u067B.帼f∬\u200C; [B5 B6 C1 P1 V6]; [B5 B6 C1 P1 V6] # ssࠋ︒ٻ.帼f∫∫ +T; Ss\u080B︒\u067B.帼F∬\u200C; [B5 B6 C1 P1 V6]; [B5 B6 P1 V6] # ssࠋ︒ٻ.帼f∫∫ +N; Ss\u080B︒\u067B.帼F∬\u200C; [B5 B6 C1 P1 V6]; [B5 B6 C1 P1 V6] # ssࠋ︒ٻ.帼f∫∫ +B; xn--ss-k0d31nu121d.xn--f-tcoa9162d; [B5 B6 V6]; [B5 B6 V6] # ssࠋ︒ٻ.帼f∫∫ +B; xn--ss-k0d31nu121d.xn--f-sgn48ga6997e; [B5 B6 C1 V6]; [B5 B6 C1 V6] # ssࠋ︒ٻ.帼f∫∫ +B; xn--zca68zj8ac956c.xn--f-sgn48ga6997e; [B5 B6 C1 V6]; [B5 B6 C1 V6] # ßࠋ︒ٻ.帼f∫∫ +T; 󘪗。𐹴𞨌\u200D; [B1 C2 P1 V6]; [B1 P1 V6] # .𐹴 +N; 󘪗。𐹴𞨌\u200D; [B1 C2 P1 V6]; [B1 C2 P1 V6] # .𐹴 +T; 󘪗。𐹴𞨌\u200D; [B1 C2 P1 V6]; [B1 P1 V6] # .𐹴 +N; 󘪗。𐹴𞨌\u200D; [B1 C2 P1 V6]; [B1 C2 P1 V6] # .𐹴 +B; xn--8l83e.xn--so0dw168a; [B1 V6]; [B1 V6] +B; xn--8l83e.xn--1ug4105gsxwf; [B1 C2 V6]; [B1 C2 V6] # .𐹴 +B; 񗛨.򅟢𝟨\uA8C4; [P1 V6]; [P1 V6] # .6꣄ +B; 񗛨.򅟢6\uA8C4; [P1 V6]; [P1 V6] # .6꣄ +B; xn--mi60a.xn--6-sl4es8023c; [V6]; [V6] # .6꣄ +B; \u1AB2\uFD8E。-۹ႱႨ; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ᪲مخج.-۹ႱႨ +B; \u1AB2\u0645\u062E\u062C。-۹ႱႨ; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ᪲مخج.-۹ႱႨ +B; \u1AB2\u0645\u062E\u062C。-۹ⴑⴈ; [B1 V3 V5]; [B1 V3 V5] # ᪲مخج.-۹ⴑⴈ +B; \u1AB2\u0645\u062E\u062C。-۹Ⴑⴈ; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ᪲مخج.-۹Ⴑⴈ +B; xn--rgbd2e831i.xn----zyc875efr3a; [B1 V3 V5 V6]; [B1 V3 V5 V6] # ᪲مخج.-۹Ⴑⴈ +B; xn--rgbd2e831i.xn----zyc3430a9a; [B1 V3 V5]; [B1 V3 V5] # ᪲مخج.-۹ⴑⴈ +B; xn--rgbd2e831i.xn----zyc155e9a; [B1 V3 V5 V6]; [B1 V3 V5 V6] # ᪲مخج.-۹ႱႨ +B; \u1AB2\uFD8E。-۹ⴑⴈ; [B1 V3 V5]; [B1 V3 V5] # ᪲مخج.-۹ⴑⴈ +B; \u1AB2\uFD8E。-۹Ⴑⴈ; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ᪲مخج.-۹Ⴑⴈ +B; 𞤤.-\u08A3︒; [B1 P1 V3 V6]; [B1 P1 V3 V6] # 𞤤.-ࢣ︒ +B; 𞤤.-\u08A3。; [B1 V3]; [B1 V3] # 𞤤.-ࢣ. +B; 𞤂.-\u08A3。; [B1 V3]; [B1 V3] # 𞤤.-ࢣ. +B; xn--ce6h.xn----cod.; [B1 V3]; [B1 V3] # 𞤤.-ࢣ. +B; 𞤂.-\u08A3︒; [B1 P1 V3 V6]; [B1 P1 V3 V6] # 𞤤.-ࢣ︒ +B; xn--ce6h.xn----cod7069p; [B1 V3 V6]; [B1 V3 V6] # 𞤤.-ࢣ︒ +T; \u200C𐺨.\u0859--; [B1 C1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # .࡙-- +N; \u200C𐺨.\u0859--; [B1 C1 P1 V3 V5 V6]; [B1 C1 P1 V3 V5 V6] # .࡙-- +B; xn--9p0d.xn-----h6e; [B1 V3 V5 V6]; [B1 V3 V5 V6] # .࡙-- +B; xn--0ug7905g.xn-----h6e; [B1 C1 V3 V5 V6]; [B1 C1 V3 V5 V6] # .࡙-- +B; 𐋸󮘋Ⴢ.Ⴁ; [P1 V6]; [P1 V6] +B; 𐋸󮘋ⴢ.ⴁ; [P1 V6]; [P1 V6] +B; 𐋸󮘋Ⴢ.ⴁ; [P1 V6]; [P1 V6] +B; xn--6nd5215jr2u0h.xn--skj; [V6]; [V6] +B; xn--qlj1559dr224h.xn--skj; [V6]; [V6] +B; xn--6nd5215jr2u0h.xn--8md; [V6]; [V6] +T; 񗑿\uA806₄򩞆。𲩧󠒹ς; [P1 V6]; [P1 V6] # ꠆4.ς +N; 񗑿\uA806₄򩞆。𲩧󠒹ς; [P1 V6]; [P1 V6] # ꠆4.ς +T; 񗑿\uA8064򩞆。𲩧󠒹ς; [P1 V6]; [P1 V6] # ꠆4.ς +N; 񗑿\uA8064򩞆。𲩧󠒹ς; [P1 V6]; [P1 V6] # ꠆4.ς +B; 񗑿\uA8064򩞆。𲩧󠒹Σ; [P1 V6]; [P1 V6] # ꠆4.σ +B; 񗑿\uA8064򩞆。𲩧󠒹σ; [P1 V6]; [P1 V6] # ꠆4.σ +B; xn--4-w93ej7463a9io5a.xn--4xa31142bk3f0d; [V6]; [V6] # ꠆4.σ +B; xn--4-w93ej7463a9io5a.xn--3xa51142bk3f0d; [V6]; [V6] # ꠆4.ς +B; 񗑿\uA806₄򩞆。𲩧󠒹Σ; [P1 V6]; [P1 V6] # ꠆4.σ +B; 񗑿\uA806₄򩞆。𲩧󠒹σ; [P1 V6]; [P1 V6] # ꠆4.σ +B; 󠆀\u0723。\u1DF4\u0775; [B1 V5]; [B1 V5] # ܣ.ᷴݵ +B; xn--tnb.xn--5pb136i; [B1 V5]; [B1 V5] # ܣ.ᷴݵ +T; 𐹱\u0842𝪨。𬼖Ⴑ\u200D; [B1 B6 C2 P1 V6]; [B1 P1 V6] # 𐹱ࡂ𝪨.𬼖Ⴑ +N; 𐹱\u0842𝪨。𬼖Ⴑ\u200D; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # 𐹱ࡂ𝪨.𬼖Ⴑ +T; 𐹱\u0842𝪨。𬼖Ⴑ\u200D; [B1 B6 C2 P1 V6]; [B1 P1 V6] # 𐹱ࡂ𝪨.𬼖Ⴑ +N; 𐹱\u0842𝪨。𬼖Ⴑ\u200D; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # 𐹱ࡂ𝪨.𬼖Ⴑ +T; 𐹱\u0842𝪨。𬼖ⴑ\u200D; [B1 B6 C2]; [B1] # 𐹱ࡂ𝪨.𬼖ⴑ +N; 𐹱\u0842𝪨。𬼖ⴑ\u200D; [B1 B6 C2]; [B1 B6 C2] # 𐹱ࡂ𝪨.𬼖ⴑ +B; xn--0vb1535kdb6e.xn--8kjz186s; [B1]; [B1] # 𐹱ࡂ𝪨.𬼖ⴑ +B; xn--0vb1535kdb6e.xn--1ug742c5714c; [B1 B6 C2]; [B1 B6 C2] # 𐹱ࡂ𝪨.𬼖ⴑ +B; xn--0vb1535kdb6e.xn--pnd93707a; [B1 V6]; [B1 V6] # 𐹱ࡂ𝪨.𬼖Ⴑ +B; xn--0vb1535kdb6e.xn--pnd879eqy33c; [B1 B6 C2 V6]; [B1 B6 C2 V6] # 𐹱ࡂ𝪨.𬼖Ⴑ +T; 𐹱\u0842𝪨。𬼖ⴑ\u200D; [B1 B6 C2]; [B1] # 𐹱ࡂ𝪨.𬼖ⴑ +N; 𐹱\u0842𝪨。𬼖ⴑ\u200D; [B1 B6 C2]; [B1 B6 C2] # 𐹱ࡂ𝪨.𬼖ⴑ +T; \u1714𐭪󠙘\u200D。-𐹴; [B1 C2 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ᜔𐭪.-𐹴 +N; \u1714𐭪󠙘\u200D。-𐹴; [B1 C2 P1 V3 V5 V6]; [B1 C2 P1 V3 V5 V6] # ᜔𐭪.-𐹴 +T; \u1714𐭪󠙘\u200D。-𐹴; [B1 C2 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ᜔𐭪.-𐹴 +N; \u1714𐭪󠙘\u200D。-𐹴; [B1 C2 P1 V3 V5 V6]; [B1 C2 P1 V3 V5 V6] # ᜔𐭪.-𐹴 +B; xn--fze4126jujt0g.xn----c36i; [B1 V3 V5 V6]; [B1 V3 V5 V6] # ᜔𐭪.-𐹴 +B; xn--fze807bso0spy14i.xn----c36i; [B1 C2 V3 V5 V6]; [B1 C2 V3 V5 V6] # ᜔𐭪.-𐹴 +B; 𾢬。\u0729︒쯙𝟧; [B2 P1 V6]; [B2 P1 V6] # .ܩ︒쯙5 +B; 𾢬。\u0729︒쯙𝟧; [B2 P1 V6]; [B2 P1 V6] # .ܩ︒쯙5 +B; 𾢬。\u0729。쯙5; [P1 V6]; [P1 V6] # .ܩ.쯙5 +B; 𾢬。\u0729。쯙5; [P1 V6]; [P1 V6] # .ܩ.쯙5 +B; xn--t92s.xn--znb.xn--5-y88f; [V6]; [V6] # .ܩ.쯙5 +B; xn--t92s.xn--5-p1c0712mm8rb; [B2 V6]; [B2 V6] # .ܩ︒쯙5 +B; 𞤟-。\u0762≮뻐; [B2 B3 P1 V3 V6]; [B2 B3 P1 V3 V6] # 𞥁-.ݢ≮뻐 +B; 𞤟-。\u0762<\u0338뻐; [B2 B3 P1 V3 V6]; [B2 B3 P1 V3 V6] # 𞥁-.ݢ≮뻐 +B; 𞥁-。\u0762<\u0338뻐; [B2 B3 P1 V3 V6]; [B2 B3 P1 V3 V6] # 𞥁-.ݢ≮뻐 +B; 𞥁-。\u0762≮뻐; [B2 B3 P1 V3 V6]; [B2 B3 P1 V3 V6] # 𞥁-.ݢ≮뻐 +B; xn----1j8r.xn--mpb269krv4i; [B2 B3 V3 V6]; [B2 B3 V3 V6] # 𞥁-.ݢ≮뻐 +B; 𞥩-򊫠.\u08B4≠; [B2 B3 P1 V6]; [B2 B3 P1 V6] # -.ࢴ≠ +B; 𞥩-򊫠.\u08B4=\u0338; [B2 B3 P1 V6]; [B2 B3 P1 V6] # -.ࢴ≠ +B; 𞥩-򊫠.\u08B4≠; [B2 B3 P1 V6]; [B2 B3 P1 V6] # -.ࢴ≠ +B; 𞥩-򊫠.\u08B4=\u0338; [B2 B3 P1 V6]; [B2 B3 P1 V6] # -.ࢴ≠ +B; xn----cm8rp3609a.xn--9yb852k; [B2 B3 V6]; [B2 B3 V6] # -.ࢴ≠ +T; -񅂏ςႼ.\u0661; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -ςႼ.١ +N; -񅂏ςႼ.\u0661; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -ςႼ.١ +T; -񅂏ςႼ.\u0661; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -ςႼ.١ +N; -񅂏ςႼ.\u0661; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -ςႼ.١ +T; -񅂏ςⴜ.\u0661; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -ςⴜ.١ +N; -񅂏ςⴜ.\u0661; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -ςⴜ.١ +B; -񅂏ΣႼ.\u0661; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -σႼ.١ +B; -񅂏σⴜ.\u0661; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -σⴜ.١ +B; -񅂏Σⴜ.\u0661; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -σⴜ.١ +B; xn----0mb9682aov12f.xn--9hb; [B1 V3 V6]; [B1 V3 V6] # -σⴜ.١ +B; xn----0mb770hun11i.xn--9hb; [B1 V3 V6]; [B1 V3 V6] # -σႼ.١ +B; xn----ymb2782aov12f.xn--9hb; [B1 V3 V6]; [B1 V3 V6] # -ςⴜ.١ +B; xn----ymb080hun11i.xn--9hb; [B1 V3 V6]; [B1 V3 V6] # -ςႼ.١ +T; -񅂏ςⴜ.\u0661; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -ςⴜ.١ +N; -񅂏ςⴜ.\u0661; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -ςⴜ.١ +B; -񅂏ΣႼ.\u0661; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -σႼ.١ +B; -񅂏σⴜ.\u0661; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -σⴜ.١ +B; -񅂏Σⴜ.\u0661; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -σⴜ.١ +T; \u17CA.\u200D𝟮𑀿; [C2 V5]; [V5] # ៊.2𑀿 +N; \u17CA.\u200D𝟮𑀿; [C2 V5]; [C2 V5] # ៊.2𑀿 +T; \u17CA.\u200D2𑀿; [C2 V5]; [V5] # ៊.2𑀿 +N; \u17CA.\u200D2𑀿; [C2 V5]; [C2 V5] # ៊.2𑀿 +B; xn--m4e.xn--2-ku7i; [V5]; [V5] # ៊.2𑀿 +B; xn--m4e.xn--2-tgnv469h; [C2 V5]; [C2 V5] # ៊.2𑀿 +B; ≯𝟖。\u1A60𐫓򟇑; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ≯8.᩠𐫓 +B; >\u0338𝟖。\u1A60𐫓򟇑; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ≯8.᩠𐫓 +B; ≯8。\u1A60𐫓򟇑; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ≯8.᩠𐫓 +B; >\u03388。\u1A60𐫓򟇑; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ≯8.᩠𐫓 +B; xn--8-ogo.xn--jof5303iv1z5d; [B1 V5 V6]; [B1 V5 V6] # ≯8.᩠𐫓 +T; 𑲫Ↄ\u0664。\u200C; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6] # 𑲫Ↄ٤. +N; 𑲫Ↄ\u0664。\u200C; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # 𑲫Ↄ٤. +T; 𑲫Ↄ\u0664。\u200C; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6] # 𑲫Ↄ٤. +N; 𑲫Ↄ\u0664。\u200C; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # 𑲫Ↄ٤. +T; 𑲫ↄ\u0664。\u200C; [B1 C1 V5]; [B1 V5] # 𑲫ↄ٤. +N; 𑲫ↄ\u0664。\u200C; [B1 C1 V5]; [B1 C1 V5] # 𑲫ↄ٤. +B; xn--dib100l8x1p.; [B1 V5]; [B1 V5] # 𑲫ↄ٤. +B; xn--dib100l8x1p.xn--0ug; [B1 C1 V5]; [B1 C1 V5] # 𑲫ↄ٤. +B; xn--dib999kcy1p.; [B1 V5 V6]; [B1 V5 V6] # 𑲫Ↄ٤. +B; xn--dib999kcy1p.xn--0ug; [B1 C1 V5 V6]; [B1 C1 V5 V6] # 𑲫Ↄ٤. +T; 𑲫ↄ\u0664。\u200C; [B1 C1 V5]; [B1 V5] # 𑲫ↄ٤. +N; 𑲫ↄ\u0664。\u200C; [B1 C1 V5]; [B1 C1 V5] # 𑲫ↄ٤. +T; \u0C00𝟵\u200D\uFC9D.\u200D\u0750⒈; [B1 C2 P1 V5 V6]; [B1 P1 V5 V6] # ఀ9بح.ݐ⒈ +N; \u0C00𝟵\u200D\uFC9D.\u200D\u0750⒈; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # ఀ9بح.ݐ⒈ +T; \u0C009\u200D\u0628\u062D.\u200D\u07501.; [B1 C2 V5]; [B1 V5] # ఀ9بح.ݐ1. +N; \u0C009\u200D\u0628\u062D.\u200D\u07501.; [B1 C2 V5]; [B1 C2 V5] # ఀ9بح.ݐ1. +B; xn--9-1mcp570d.xn--1-x3c.; [B1 V5]; [B1 V5] # ఀ9بح.ݐ1. +B; xn--9-1mcp570dl51a.xn--1-x3c211q.; [B1 C2 V5]; [B1 C2 V5] # ఀ9بح.ݐ1. +B; xn--9-1mcp570d.xn--3ob470m; [B1 V5 V6]; [B1 V5 V6] # ఀ9بح.ݐ⒈ +B; xn--9-1mcp570dl51a.xn--3ob977jmfd; [B1 C2 V5 V6]; [B1 C2 V5 V6] # ఀ9بح.ݐ⒈ +T; \uAAF6。嬶ß葽; [V5]; [V5] # ꫶.嬶ß葽 +N; \uAAF6。嬶ß葽; [V5]; [V5] # ꫶.嬶ß葽 +B; \uAAF6。嬶SS葽; [V5]; [V5] # ꫶.嬶ss葽 +B; \uAAF6。嬶ss葽; [V5]; [V5] # ꫶.嬶ss葽 +B; \uAAF6。嬶Ss葽; [V5]; [V5] # ꫶.嬶ss葽 +B; xn--2v9a.xn--ss-q40dp97m; [V5]; [V5] # ꫶.嬶ss葽 +B; xn--2v9a.xn--zca7637b14za; [V5]; [V5] # ꫶.嬶ß葽 +B; 𑚶⒈。񞻡𐹺; [B5 B6 P1 V5 V6]; [B5 B6 P1 V5 V6] +B; 𑚶1.。񞻡𐹺; [B5 B6 P1 V5 V6 A4_2]; [B5 B6 P1 V5 V6 A4_2] +B; xn--1-3j0j..xn--yo0d5914s; [B5 B6 V5 V6 A4_2]; [B5 B6 V5 V6 A4_2] +B; xn--tshz969f.xn--yo0d5914s; [B5 B6 V5 V6]; [B5 B6 V5 V6] +B; 𑜤︒≮.񚕽\u05D8𞾩; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # 𑜤︒≮.ט +B; 𑜤︒<\u0338.񚕽\u05D8𞾩; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # 𑜤︒≮.ט +B; 𑜤。≮.񚕽\u05D8𞾩; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # 𑜤.≮.ט +B; 𑜤。<\u0338.񚕽\u05D8𞾩; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # 𑜤.≮.ט +B; xn--ci2d.xn--gdh.xn--deb0091w5q9u; [B1 B3 B5 B6 V5 V6]; [B1 B3 B5 B6 V5 V6] # 𑜤.≮.ט +B; xn--gdh5267fdzpa.xn--deb0091w5q9u; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] # 𑜤︒≮.ט +T; 󠆋\u0603񏦤.⇁ς򏋈򺇥; [B1 P1 V6]; [B1 P1 V6] # .⇁ς +N; 󠆋\u0603񏦤.⇁ς򏋈򺇥; [B1 P1 V6]; [B1 P1 V6] # .⇁ς +B; 󠆋\u0603񏦤.⇁Σ򏋈򺇥; [B1 P1 V6]; [B1 P1 V6] # .⇁σ +B; 󠆋\u0603񏦤.⇁σ򏋈򺇥; [B1 P1 V6]; [B1 P1 V6] # .⇁σ +B; xn--lfb04106d.xn--4xa964mxv16m8moq; [B1 V6]; [B1 V6] # .⇁σ +B; xn--lfb04106d.xn--3xa174mxv16m8moq; [B1 V6]; [B1 V6] # .⇁ς +T; ς𑐽𵢈𑜫。𞬩\u200C𐫄; [C1 P1 V6]; [P1 V6] # ς𑐽𑜫.𐫄 +N; ς𑐽𵢈𑜫。𞬩\u200C𐫄; [C1 P1 V6]; [C1 P1 V6] # ς𑐽𑜫.𐫄 +T; ς𑐽𵢈𑜫。𞬩\u200C𐫄; [C1 P1 V6]; [P1 V6] # ς𑐽𑜫.𐫄 +N; ς𑐽𵢈𑜫。𞬩\u200C𐫄; [C1 P1 V6]; [C1 P1 V6] # ς𑐽𑜫.𐫄 +T; Σ𑐽𵢈𑜫。𞬩\u200C𐫄; [C1 P1 V6]; [P1 V6] # σ𑐽𑜫.𐫄 +N; Σ𑐽𵢈𑜫。𞬩\u200C𐫄; [C1 P1 V6]; [C1 P1 V6] # σ𑐽𑜫.𐫄 +T; σ𑐽𵢈𑜫。𞬩\u200C𐫄; [C1 P1 V6]; [P1 V6] # σ𑐽𑜫.𐫄 +N; σ𑐽𵢈𑜫。𞬩\u200C𐫄; [C1 P1 V6]; [C1 P1 V6] # σ𑐽𑜫.𐫄 +B; xn--4xa2260lk3b8z15g.xn--tw9ct349a; [V6]; [V6] +B; xn--4xa2260lk3b8z15g.xn--0ug4653g2xzf; [C1 V6]; [C1 V6] # σ𑐽𑜫.𐫄 +B; xn--3xa4260lk3b8z15g.xn--0ug4653g2xzf; [C1 V6]; [C1 V6] # ς𑐽𑜫.𐫄 +T; Σ𑐽𵢈𑜫。𞬩\u200C𐫄; [C1 P1 V6]; [P1 V6] # σ𑐽𑜫.𐫄 +N; Σ𑐽𵢈𑜫。𞬩\u200C𐫄; [C1 P1 V6]; [C1 P1 V6] # σ𑐽𑜫.𐫄 +T; σ𑐽𵢈𑜫。𞬩\u200C𐫄; [C1 P1 V6]; [P1 V6] # σ𑐽𑜫.𐫄 +N; σ𑐽𵢈𑜫。𞬩\u200C𐫄; [C1 P1 V6]; [C1 P1 V6] # σ𑐽𑜫.𐫄 +B; -򵏽。-\uFC4C\u075B; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -.-نحݛ +B; -򵏽。-\u0646\u062D\u075B; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -.-نحݛ +B; xn----o452j.xn----cnc8e38c; [B1 V3 V6]; [B1 V3 V6] # -.-نحݛ +T; ⺢򇺅𝟤。\u200D🚷; [C2 P1 V6]; [P1 V6] # ⺢2.🚷 +N; ⺢򇺅𝟤。\u200D🚷; [C2 P1 V6]; [C2 P1 V6] # ⺢2.🚷 +T; ⺢򇺅2。\u200D🚷; [C2 P1 V6]; [P1 V6] # ⺢2.🚷 +N; ⺢򇺅2。\u200D🚷; [C2 P1 V6]; [C2 P1 V6] # ⺢2.🚷 +B; xn--2-4jtr4282f.xn--m78h; [V6]; [V6] +B; xn--2-4jtr4282f.xn--1ugz946p; [C2 V6]; [C2 V6] # ⺢2.🚷 +T; \u0CF8\u200D\u2DFE𐹲。򤐶; [B5 B6 C2 P1 V6]; [B5 B6 P1 V6] # ⷾ𐹲. +N; \u0CF8\u200D\u2DFE𐹲。򤐶; [B5 B6 C2 P1 V6]; [B5 B6 C2 P1 V6] # ⷾ𐹲. +T; \u0CF8\u200D\u2DFE𐹲。򤐶; [B5 B6 C2 P1 V6]; [B5 B6 P1 V6] # ⷾ𐹲. +N; \u0CF8\u200D\u2DFE𐹲。򤐶; [B5 B6 C2 P1 V6]; [B5 B6 C2 P1 V6] # ⷾ𐹲. +B; xn--hvc220of37m.xn--3e36c; [B5 B6 V6]; [B5 B6 V6] # ⷾ𐹲. +B; xn--hvc488g69j402t.xn--3e36c; [B5 B6 C2 V6]; [B5 B6 C2 V6] # ⷾ𐹲. +B; 𐹢.Ⴍ₉⁸; [B1 P1 V6]; [B1 P1 V6] +B; 𐹢.Ⴍ98; [B1 P1 V6]; [B1 P1 V6] +B; 𐹢.ⴍ98; [B1]; [B1] +B; xn--9n0d.xn--98-u61a; [B1]; [B1] +B; xn--9n0d.xn--98-7ek; [B1 V6]; [B1 V6] +B; 𐹢.ⴍ₉⁸; [B1]; [B1] +T; \u200C\u034F。ß\u08E2⒚≯; [B1 B5 B6 C1 P1 V6]; [B5 B6 P1 V6 A4_2] # .ß⒚≯ +N; \u200C\u034F。ß\u08E2⒚≯; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .ß⒚≯ +T; \u200C\u034F。ß\u08E2⒚>\u0338; [B1 B5 B6 C1 P1 V6]; [B5 B6 P1 V6 A4_2] # .ß⒚≯ +N; \u200C\u034F。ß\u08E2⒚>\u0338; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .ß⒚≯ +T; \u200C\u034F。ß\u08E219.≯; [B1 B5 C1 P1 V6]; [B1 B5 P1 V6 A4_2] # .ß19.≯ +N; \u200C\u034F。ß\u08E219.≯; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # .ß19.≯ +T; \u200C\u034F。ß\u08E219.>\u0338; [B1 B5 C1 P1 V6]; [B1 B5 P1 V6 A4_2] # .ß19.≯ +N; \u200C\u034F。ß\u08E219.>\u0338; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # .ß19.≯ +T; \u200C\u034F。SS\u08E219.>\u0338; [B1 B5 C1 P1 V6]; [B1 B5 P1 V6 A4_2] # .ss19.≯ +N; \u200C\u034F。SS\u08E219.>\u0338; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # .ss19.≯ +T; \u200C\u034F。SS\u08E219.≯; [B1 B5 C1 P1 V6]; [B1 B5 P1 V6 A4_2] # .ss19.≯ +N; \u200C\u034F。SS\u08E219.≯; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # .ss19.≯ +T; \u200C\u034F。ss\u08E219.≯; [B1 B5 C1 P1 V6]; [B1 B5 P1 V6 A4_2] # .ss19.≯ +N; \u200C\u034F。ss\u08E219.≯; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # .ss19.≯ +T; \u200C\u034F。ss\u08E219.>\u0338; [B1 B5 C1 P1 V6]; [B1 B5 P1 V6 A4_2] # .ss19.≯ +N; \u200C\u034F。ss\u08E219.>\u0338; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # .ss19.≯ +T; \u200C\u034F。Ss\u08E219.>\u0338; [B1 B5 C1 P1 V6]; [B1 B5 P1 V6 A4_2] # .ss19.≯ +N; \u200C\u034F。Ss\u08E219.>\u0338; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # .ss19.≯ +T; \u200C\u034F。Ss\u08E219.≯; [B1 B5 C1 P1 V6]; [B1 B5 P1 V6 A4_2] # .ss19.≯ +N; \u200C\u034F。Ss\u08E219.≯; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # .ss19.≯ +B; .xn--ss19-w0i.xn--hdh; [B1 B5 V6 A4_2]; [B1 B5 V6 A4_2] # .ss19.≯ +B; xn--0ug.xn--ss19-w0i.xn--hdh; [B1 B5 C1 V6]; [B1 B5 C1 V6] # .ss19.≯ +B; xn--0ug.xn--19-fia813f.xn--hdh; [B1 B5 C1 V6]; [B1 B5 C1 V6] # .ß19.≯ +T; \u200C\u034F。SS\u08E2⒚>\u0338; [B1 B5 B6 C1 P1 V6]; [B5 B6 P1 V6 A4_2] # .ss⒚≯ +N; \u200C\u034F。SS\u08E2⒚>\u0338; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .ss⒚≯ +T; \u200C\u034F。SS\u08E2⒚≯; [B1 B5 B6 C1 P1 V6]; [B5 B6 P1 V6 A4_2] # .ss⒚≯ +N; \u200C\u034F。SS\u08E2⒚≯; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .ss⒚≯ +T; \u200C\u034F。ss\u08E2⒚≯; [B1 B5 B6 C1 P1 V6]; [B5 B6 P1 V6 A4_2] # .ss⒚≯ +N; \u200C\u034F。ss\u08E2⒚≯; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .ss⒚≯ +T; \u200C\u034F。ss\u08E2⒚>\u0338; [B1 B5 B6 C1 P1 V6]; [B5 B6 P1 V6 A4_2] # .ss⒚≯ +N; \u200C\u034F。ss\u08E2⒚>\u0338; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .ss⒚≯ +T; \u200C\u034F。Ss\u08E2⒚>\u0338; [B1 B5 B6 C1 P1 V6]; [B5 B6 P1 V6 A4_2] # .ss⒚≯ +N; \u200C\u034F。Ss\u08E2⒚>\u0338; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .ss⒚≯ +T; \u200C\u034F。Ss\u08E2⒚≯; [B1 B5 B6 C1 P1 V6]; [B5 B6 P1 V6 A4_2] # .ss⒚≯ +N; \u200C\u034F。Ss\u08E2⒚≯; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .ss⒚≯ +B; .xn--ss-9if872xjjc; [B5 B6 V6 A4_2]; [B5 B6 V6 A4_2] # .ss⒚≯ +B; xn--0ug.xn--ss-9if872xjjc; [B1 B5 B6 C1 V6]; [B1 B5 B6 C1 V6] # .ss⒚≯ +B; xn--0ug.xn--zca612bx9vo5b; [B1 B5 B6 C1 V6]; [B1 B5 B6 C1 V6] # .ß⒚≯ +T; \u200C𞥍ᡌ.𣃔; [B1 C1 P1 V6]; [B2 B3 P1 V6] # ᡌ.𣃔 +N; \u200C𞥍ᡌ.𣃔; [B1 C1 P1 V6]; [B1 C1 P1 V6] # ᡌ.𣃔 +T; \u200C𞥍ᡌ.𣃔; [B1 C1 P1 V6]; [B2 B3 P1 V6] # ᡌ.𣃔 +N; \u200C𞥍ᡌ.𣃔; [B1 C1 P1 V6]; [B1 C1 P1 V6] # ᡌ.𣃔 +B; xn--c8e5919u.xn--od1j; [B2 B3 V6]; [B2 B3 V6] +B; xn--c8e180bqz13b.xn--od1j; [B1 C1 V6]; [B1 C1 V6] # ᡌ.𣃔 +B; \u07D0򜬝-񡢬。\u0FA0Ⴛ𞷏𝆬; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # ߐ-.ྠႻ𝆬 +B; \u07D0򜬝-񡢬。\u0FA0ⴛ𞷏𝆬; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # ߐ-.ྠⴛ𝆬 +B; xn----8bd11730jefvw.xn--wfd802mpm20agsxa; [B1 B2 B3 V5 V6]; [B1 B2 B3 V5 V6] # ߐ-.ྠⴛ𝆬 +B; xn----8bd11730jefvw.xn--wfd08cd265hgsxa; [B1 B2 B3 V5 V6]; [B1 B2 B3 V5 V6] # ߐ-.ྠႻ𝆬 +B; 𝨥。⫟𑈾; [V5]; [V5] +B; xn--n82h.xn--63iw010f; [V5]; [V5] +T; ⾛\u0753.Ⴕ𞠬\u0604\u200D; [B5 B6 C2 P1 V6]; [B5 B6 P1 V6] # 走ݓ.Ⴕ𞠬 +N; ⾛\u0753.Ⴕ𞠬\u0604\u200D; [B5 B6 C2 P1 V6]; [B5 B6 C2 P1 V6] # 走ݓ.Ⴕ𞠬 +T; 走\u0753.Ⴕ𞠬\u0604\u200D; [B5 B6 C2 P1 V6]; [B5 B6 P1 V6] # 走ݓ.Ⴕ𞠬 +N; 走\u0753.Ⴕ𞠬\u0604\u200D; [B5 B6 C2 P1 V6]; [B5 B6 C2 P1 V6] # 走ݓ.Ⴕ𞠬 +T; 走\u0753.ⴕ𞠬\u0604\u200D; [B5 B6 C2 P1 V6]; [B5 B6 P1 V6] # 走ݓ.ⴕ𞠬 +N; 走\u0753.ⴕ𞠬\u0604\u200D; [B5 B6 C2 P1 V6]; [B5 B6 C2 P1 V6] # 走ݓ.ⴕ𞠬 +B; xn--6ob9779d.xn--mfb511rxu80a; [B5 B6 V6]; [B5 B6 V6] # 走ݓ.ⴕ𞠬 +B; xn--6ob9779d.xn--mfb444k5gjt754b; [B5 B6 C2 V6]; [B5 B6 C2 V6] # 走ݓ.ⴕ𞠬 +B; xn--6ob9779d.xn--mfb785ck569a; [B5 B6 V6]; [B5 B6 V6] # 走ݓ.Ⴕ𞠬 +B; xn--6ob9779d.xn--mfb785czmm0y85b; [B5 B6 C2 V6]; [B5 B6 C2 V6] # 走ݓ.Ⴕ𞠬 +T; ⾛\u0753.ⴕ𞠬\u0604\u200D; [B5 B6 C2 P1 V6]; [B5 B6 P1 V6] # 走ݓ.ⴕ𞠬 +N; ⾛\u0753.ⴕ𞠬\u0604\u200D; [B5 B6 C2 P1 V6]; [B5 B6 C2 P1 V6] # 走ݓ.ⴕ𞠬 +T; -ᢗ\u200C🄄.𑜢; [C1 P1 V3 V5 V6]; [P1 V3 V5 V6] # -ᢗ🄄.𑜢 +N; -ᢗ\u200C🄄.𑜢; [C1 P1 V3 V5 V6]; [C1 P1 V3 V5 V6] # -ᢗ🄄.𑜢 +T; -ᢗ\u200C3,.𑜢; [C1 P1 V3 V5 V6]; [P1 V3 V5 V6] # -ᢗ3,.𑜢 +N; -ᢗ\u200C3,.𑜢; [C1 P1 V3 V5 V6]; [C1 P1 V3 V5 V6] # -ᢗ3,.𑜢 +B; xn---3,-3eu.xn--9h2d; [P1 V3 V5 V6]; [P1 V3 V5 V6] +B; xn---3,-3eu051c.xn--9h2d; [C1 P1 V3 V5 V6]; [C1 P1 V3 V5 V6] # -ᢗ3,.𑜢 +B; xn----pck1820x.xn--9h2d; [V3 V5 V6]; [V3 V5 V6] +B; xn----pck312bx563c.xn--9h2d; [C1 V3 V5 V6]; [C1 V3 V5 V6] # -ᢗ🄄.𑜢 +T; ≠𐸁𹏁\u200C.Ⴚ򳄠; [B1 C1 P1 V6]; [B1 P1 V6] # ≠.Ⴚ +N; ≠𐸁𹏁\u200C.Ⴚ򳄠; [B1 C1 P1 V6]; [B1 C1 P1 V6] # ≠.Ⴚ +T; =\u0338𐸁𹏁\u200C.Ⴚ򳄠; [B1 C1 P1 V6]; [B1 P1 V6] # ≠.Ⴚ +N; =\u0338𐸁𹏁\u200C.Ⴚ򳄠; [B1 C1 P1 V6]; [B1 C1 P1 V6] # ≠.Ⴚ +T; =\u0338𐸁𹏁\u200C.ⴚ򳄠; [B1 C1 P1 V6]; [B1 P1 V6] # ≠.ⴚ +N; =\u0338𐸁𹏁\u200C.ⴚ򳄠; [B1 C1 P1 V6]; [B1 C1 P1 V6] # ≠.ⴚ +T; ≠𐸁𹏁\u200C.ⴚ򳄠; [B1 C1 P1 V6]; [B1 P1 V6] # ≠.ⴚ +N; ≠𐸁𹏁\u200C.ⴚ򳄠; [B1 C1 P1 V6]; [B1 C1 P1 V6] # ≠.ⴚ +B; xn--1ch2293gv3nr.xn--ilj23531g; [B1 V6]; [B1 V6] +B; xn--0ug83gn618a21ov.xn--ilj23531g; [B1 C1 V6]; [B1 C1 V6] # ≠.ⴚ +B; xn--1ch2293gv3nr.xn--ynd49496l; [B1 V6]; [B1 V6] +B; xn--0ug83gn618a21ov.xn--ynd49496l; [B1 C1 V6]; [B1 C1 V6] # ≠.Ⴚ +B; \u0669。󠇀𑇊; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ٩.𑇊 +B; \u0669。󠇀𑇊; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ٩.𑇊 +B; xn--iib.xn--6d1d; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ٩.𑇊 +B; \u1086𞶀≯⒍。-; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ႆ≯⒍.- +B; \u1086𞶀>\u0338⒍。-; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ႆ≯⒍.- +B; \u1086𞶀≯6.。-; [B1 P1 V3 V5 V6 A4_2]; [B1 P1 V3 V5 V6 A4_2] # ႆ≯6..- +B; \u1086𞶀>\u03386.。-; [B1 P1 V3 V5 V6 A4_2]; [B1 P1 V3 V5 V6 A4_2] # ႆ≯6..- +B; xn--6-oyg968k7h74b..-; [B1 V3 V5 V6 A4_2]; [B1 V3 V5 V6 A4_2] # ႆ≯6..- +B; xn--hmd482gqqb8730g.-; [B1 V3 V5 V6]; [B1 V3 V5 V6] # ႆ≯⒍.- +B; \u17B4.쮇-; [P1 V3 V5 V6]; [P1 V3 V5 V6] # .쮇- +B; \u17B4.쮇-; [P1 V3 V5 V6]; [P1 V3 V5 V6] # .쮇- +B; xn--z3e.xn----938f; [V3 V5 V6]; [V3 V5 V6] # .쮇- +T; \u200C𑓂。⒈-􀪛; [C1 P1 V6]; [P1 V5 V6] # 𑓂.⒈- +N; \u200C𑓂。⒈-􀪛; [C1 P1 V6]; [C1 P1 V6] # 𑓂.⒈- +T; \u200C𑓂。1.-􀪛; [C1 P1 V3 V6]; [P1 V3 V5 V6] # 𑓂.1.- +N; \u200C𑓂。1.-􀪛; [C1 P1 V3 V6]; [C1 P1 V3 V6] # 𑓂.1.- +B; xn--wz1d.1.xn----rg03o; [V3 V5 V6]; [V3 V5 V6] +B; xn--0ugy057g.1.xn----rg03o; [C1 V3 V6]; [C1 V3 V6] # 𑓂.1.- +B; xn--wz1d.xn----dcp29674o; [V5 V6]; [V5 V6] +B; xn--0ugy057g.xn----dcp29674o; [C1 V6]; [C1 V6] # 𑓂.⒈- +T; ⒈\uFEAE\u200C。\u20E9🖞\u200C𖬴; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6] # ⒈ر.⃩🖞𖬴 +N; ⒈\uFEAE\u200C。\u20E9🖞\u200C𖬴; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # ⒈ر.⃩🖞𖬴 +T; 1.\u0631\u200C。\u20E9🖞\u200C𖬴; [B1 B3 C1 V5]; [B1 V5] # 1.ر.⃩🖞𖬴 +N; 1.\u0631\u200C。\u20E9🖞\u200C𖬴; [B1 B3 C1 V5]; [B1 B3 C1 V5] # 1.ر.⃩🖞𖬴 +B; 1.xn--wgb.xn--c1g6021kg18c; [B1 V5]; [B1 V5] # 1.ر.⃩🖞𖬴 +B; 1.xn--wgb253k.xn--0ugz6a8040fty5d; [B1 B3 C1 V5]; [B1 B3 C1 V5] # 1.ر.⃩🖞𖬴 +B; xn--wgb746m.xn--c1g6021kg18c; [B1 V5 V6]; [B1 V5 V6] # ⒈ر.⃩🖞𖬴 +B; xn--wgb253kmfd.xn--0ugz6a8040fty5d; [B1 C1 V5 V6]; [B1 C1 V5 V6] # ⒈ر.⃩🖞𖬴 +B; 󌭇。𝟐\u1BA8\u07D4; [B1 P1 V6]; [B1 P1 V6] # .2ᮨߔ +B; 󌭇。2\u1BA8\u07D4; [B1 P1 V6]; [B1 P1 V6] # .2ᮨߔ +B; xn--xm89d.xn--2-icd143m; [B1 V6]; [B1 V6] # .2ᮨߔ +T; \uFD8F򫳺.ς\u200D𐹷; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V6] # مخم.ς𐹷 +N; \uFD8F򫳺.ς\u200D𐹷; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # مخم.ς𐹷 +T; \u0645\u062E\u0645򫳺.ς\u200D𐹷; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V6] # مخم.ς𐹷 +N; \u0645\u062E\u0645򫳺.ς\u200D𐹷; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # مخم.ς𐹷 +T; \u0645\u062E\u0645򫳺.Σ\u200D𐹷; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V6] # مخم.σ𐹷 +N; \u0645\u062E\u0645򫳺.Σ\u200D𐹷; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # مخم.σ𐹷 +T; \u0645\u062E\u0645򫳺.σ\u200D𐹷; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V6] # مخم.σ𐹷 +N; \u0645\u062E\u0645򫳺.σ\u200D𐹷; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # مخم.σ𐹷 +B; xn--tgb9bb64691z.xn--4xa6667k; [B2 B3 B5 B6 V6]; [B2 B3 B5 B6 V6] # مخم.σ𐹷 +B; xn--tgb9bb64691z.xn--4xa895lrp7n; [B2 B3 B5 B6 C2 V6]; [B2 B3 B5 B6 C2 V6] # مخم.σ𐹷 +B; xn--tgb9bb64691z.xn--3xa006lrp7n; [B2 B3 B5 B6 C2 V6]; [B2 B3 B5 B6 C2 V6] # مخم.ς𐹷 +T; \uFD8F򫳺.Σ\u200D𐹷; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V6] # مخم.σ𐹷 +N; \uFD8F򫳺.Σ\u200D𐹷; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # مخم.σ𐹷 +T; \uFD8F򫳺.σ\u200D𐹷; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V6] # مخم.σ𐹷 +N; \uFD8F򫳺.σ\u200D𐹷; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # مخم.σ𐹷 +B; ⒎\u06C1\u0605。\uAAF6۵𐇽; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ⒎ہ.꫶۵𐇽 +B; 7.\u06C1\u0605。\uAAF6۵𐇽; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 7.ہ.꫶۵𐇽 +B; 7.xn--nfb98a.xn--imb3805fxt8b; [B1 V5 V6]; [B1 V5 V6] # 7.ہ.꫶۵𐇽 +B; xn--nfb98ai25e.xn--imb3805fxt8b; [B1 V5 V6]; [B1 V5 V6] # ⒎ہ.꫶۵𐇽 +B; -ᡥ᠆󍲭。\u0605\u1A5D𐹡; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -ᡥ᠆.ᩝ𐹡 +B; xn----f3j6s87156i.xn--nfb035hoo2p; [B1 V3 V6]; [B1 V3 V6] # -ᡥ᠆.ᩝ𐹡 +T; \u200D.\u06BD\u0663\u0596; [B1 C2]; [A4_2] # .ڽ٣֖ +N; \u200D.\u06BD\u0663\u0596; [B1 C2]; [B1 C2] # .ڽ٣֖ +B; .xn--hcb32bni; [A4_2]; [A4_2] # .ڽ٣֖ +B; xn--1ug.xn--hcb32bni; [B1 C2]; [B1 C2] # .ڽ٣֖ +B; xn--hcb32bni; \u06BD\u0663\u0596; xn--hcb32bni # ڽ٣֖ +B; \u06BD\u0663\u0596; ; xn--hcb32bni # ڽ٣֖ +T; 㒧۱.Ⴚ\u0678\u200D; [B5 B6 C2 P1 V6]; [B5 B6 P1 V6] # 㒧۱.Ⴚيٴ +N; 㒧۱.Ⴚ\u0678\u200D; [B5 B6 C2 P1 V6]; [B5 B6 C2 P1 V6] # 㒧۱.Ⴚيٴ +T; 㒧۱.Ⴚ\u064A\u0674\u200D; [B5 B6 C2 P1 V6]; [B5 B6 P1 V6] # 㒧۱.Ⴚيٴ +N; 㒧۱.Ⴚ\u064A\u0674\u200D; [B5 B6 C2 P1 V6]; [B5 B6 C2 P1 V6] # 㒧۱.Ⴚيٴ +T; 㒧۱.ⴚ\u064A\u0674\u200D; [B5 B6 C2]; [B5 B6] # 㒧۱.ⴚيٴ +N; 㒧۱.ⴚ\u064A\u0674\u200D; [B5 B6 C2]; [B5 B6 C2] # 㒧۱.ⴚيٴ +B; xn--emb715u.xn--mhb8fy26k; [B5 B6]; [B5 B6] # 㒧۱.ⴚيٴ +B; xn--emb715u.xn--mhb8f960g03l; [B5 B6 C2]; [B5 B6 C2] # 㒧۱.ⴚيٴ +B; xn--emb715u.xn--mhb8f817a; [B5 B6 V6]; [B5 B6 V6] # 㒧۱.Ⴚيٴ +B; xn--emb715u.xn--mhb8f817ao2p; [B5 B6 C2 V6]; [B5 B6 C2 V6] # 㒧۱.Ⴚيٴ +T; 㒧۱.ⴚ\u0678\u200D; [B5 B6 C2]; [B5 B6] # 㒧۱.ⴚيٴ +N; 㒧۱.ⴚ\u0678\u200D; [B5 B6 C2]; [B5 B6 C2] # 㒧۱.ⴚيٴ +B; \u0F94ꡋ-.-𖬴; [V3 V5]; [V3 V5] # ྔꡋ-.-𖬴 +B; \u0F94ꡋ-.-𖬴; [V3 V5]; [V3 V5] # ྔꡋ-.-𖬴 +B; xn----ukg9938i.xn----4u5m; [V3 V5]; [V3 V5] # ྔꡋ-.-𖬴 +T; 񿒳-⋢\u200C.标-; [C1 P1 V3 V6]; [P1 V3 V6] # -⋢.标- +N; 񿒳-⋢\u200C.标-; [C1 P1 V3 V6]; [C1 P1 V3 V6] # -⋢.标- +T; 񿒳-⊑\u0338\u200C.标-; [C1 P1 V3 V6]; [P1 V3 V6] # -⋢.标- +N; 񿒳-⊑\u0338\u200C.标-; [C1 P1 V3 V6]; [C1 P1 V3 V6] # -⋢.标- +T; 񿒳-⋢\u200C.标-; [C1 P1 V3 V6]; [P1 V3 V6] # -⋢.标- +N; 񿒳-⋢\u200C.标-; [C1 P1 V3 V6]; [C1 P1 V3 V6] # -⋢.标- +T; 񿒳-⊑\u0338\u200C.标-; [C1 P1 V3 V6]; [P1 V3 V6] # -⋢.标- +N; 񿒳-⊑\u0338\u200C.标-; [C1 P1 V3 V6]; [C1 P1 V3 V6] # -⋢.标- +B; xn----9mo67451g.xn----qj7b; [V3 V6]; [V3 V6] +B; xn----sgn90kn5663a.xn----qj7b; [C1 V3 V6]; [C1 V3 V6] # -⋢.标- +T; \u0671.ς\u07DC; [B5 B6]; [B5 B6] # ٱ.ςߜ +N; \u0671.ς\u07DC; [B5 B6]; [B5 B6] # ٱ.ςߜ +T; \u0671.ς\u07DC; [B5 B6]; [B5 B6] # ٱ.ςߜ +N; \u0671.ς\u07DC; [B5 B6]; [B5 B6] # ٱ.ςߜ +B; \u0671.Σ\u07DC; [B5 B6]; [B5 B6] # ٱ.σߜ +B; \u0671.σ\u07DC; [B5 B6]; [B5 B6] # ٱ.σߜ +B; xn--qib.xn--4xa21s; [B5 B6]; [B5 B6] # ٱ.σߜ +B; xn--qib.xn--3xa41s; [B5 B6]; [B5 B6] # ٱ.ςߜ +B; \u0671.Σ\u07DC; [B5 B6]; [B5 B6] # ٱ.σߜ +B; \u0671.σ\u07DC; [B5 B6]; [B5 B6] # ٱ.σߜ +T; 񼈶\u0605.\u08C1\u200D𑑂𱼱; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V6] # .𑑂 +N; 񼈶\u0605.\u08C1\u200D𑑂𱼱; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # .𑑂 +T; 񼈶\u0605.\u08C1\u200D𑑂𱼱; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V6] # .𑑂 +N; 񼈶\u0605.\u08C1\u200D𑑂𱼱; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # .𑑂 +B; xn--nfb17942h.xn--nzb6708kx3pn; [B2 B3 B5 B6 V6]; [B2 B3 B5 B6 V6] # .𑑂 +B; xn--nfb17942h.xn--nzb240jv06otevq; [B2 B3 B5 B6 C2 V6]; [B2 B3 B5 B6 C2 V6] # .𑑂 +B; 𐹾𐋩𞵜。\u1BF2; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𐹾𐋩.᯲ +B; 𐹾𐋩𞵜。\u1BF2; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𐹾𐋩.᯲ +B; xn--d97cn8rn44p.xn--0zf; [B1 V5 V6]; [B1 V5 V6] # 𐹾𐋩.᯲ +T; 6\u1160\u1C33󠸧.򟜊锰\u072Cς; [B1 B5 P1 V6]; [B1 B5 P1 V6] # 6ᰳ.锰ܬς +N; 6\u1160\u1C33󠸧.򟜊锰\u072Cς; [B1 B5 P1 V6]; [B1 B5 P1 V6] # 6ᰳ.锰ܬς +B; 6\u1160\u1C33󠸧.򟜊锰\u072CΣ; [B1 B5 P1 V6]; [B1 B5 P1 V6] # 6ᰳ.锰ܬσ +B; 6\u1160\u1C33󠸧.򟜊锰\u072Cσ; [B1 B5 P1 V6]; [B1 B5 P1 V6] # 6ᰳ.锰ܬσ +B; xn--6-5bh476ewr517a.xn--4xa95ohw6pk078g; [B1 B5 V6]; [B1 B5 V6] # 6ᰳ.锰ܬσ +B; xn--6-5bh476ewr517a.xn--3xa16ohw6pk078g; [B1 B5 V6]; [B1 B5 V6] # 6ᰳ.锰ܬς +B; \u06B3\uFE04񅎦𝟽。𐹽; [B1 B2 P1 V6]; [B1 B2 P1 V6] # ڳ7.𐹽 +B; \u06B3\uFE04񅎦7。𐹽; [B1 B2 P1 V6]; [B1 B2 P1 V6] # ڳ7.𐹽 +B; xn--7-yuc34665f.xn--1o0d; [B1 B2 V6]; [B1 B2 V6] # ڳ7.𐹽 +T; 𞮧.\u200C⫞; [B1 C1 P1 V6]; [B1 P1 V6] # .⫞ +N; 𞮧.\u200C⫞; [B1 C1 P1 V6]; [B1 C1 P1 V6] # .⫞ +T; 𞮧.\u200C⫞; [B1 C1 P1 V6]; [B1 P1 V6] # .⫞ +N; 𞮧.\u200C⫞; [B1 C1 P1 V6]; [B1 C1 P1 V6] # .⫞ +B; xn--pw6h.xn--53i; [B1 V6]; [B1 V6] +B; xn--pw6h.xn--0ug283b; [B1 C1 V6]; [B1 C1 V6] # .⫞ +B; -񕉴.\u06E0ᢚ-; [P1 V3 V5 V6]; [P1 V3 V5 V6] # -.۠ᢚ- +B; xn----qi38c.xn----jxc827k; [V3 V5 V6]; [V3 V5 V6] # -.۠ᢚ- +T; ⌁\u200D𑄴.\u200C𝟩\u066C; [B1 C1 C2]; [B1] # ⌁𑄴.7٬ +N; ⌁\u200D𑄴.\u200C𝟩\u066C; [B1 C1 C2]; [B1 C1 C2] # ⌁𑄴.7٬ +T; ⌁\u200D𑄴.\u200C7\u066C; [B1 C1 C2]; [B1] # ⌁𑄴.7٬ +N; ⌁\u200D𑄴.\u200C7\u066C; [B1 C1 C2]; [B1 C1 C2] # ⌁𑄴.7٬ +B; xn--nhh5394g.xn--7-xqc; [B1]; [B1] # ⌁𑄴.7٬ +B; xn--1ug38i2093a.xn--7-xqc297q; [B1 C1 C2]; [B1 C1 C2] # ⌁𑄴.7٬ +B; ︒\uFD05\u0E37\uFEFC。岓\u1BF2󠾃ᡂ; [B1 P1 V6]; [B1 P1 V6] # ︒صىืلا.岓᯲ᡂ +B; 。\u0635\u0649\u0E37\u0644\u0627。岓\u1BF2󠾃ᡂ; [P1 V6 A4_2]; [P1 V6 A4_2] # .صىืلا.岓᯲ᡂ +B; .xn--mgb1a7bt462h.xn--17e10qe61f9r71s; [V6 A4_2]; [V6 A4_2] # .صىืلا.岓᯲ᡂ +B; xn--mgb1a7bt462hf267a.xn--17e10qe61f9r71s; [B1 V6]; [B1 V6] # ︒صىืلا.岓᯲ᡂ +B; 𐹨。8𑁆; [B1]; [B1] +B; xn--go0d.xn--8-yu7i; [B1]; [B1] +B; 𞀕\u0D43.ꡚ\u08FA𐹰\u0D44; [B1 B3 B5 B6 V5]; [B1 B3 B5 B6 V5] # 𞀕ൃ.ꡚࣺ𐹰ൄ +B; 𞀕\u0D43.ꡚ\u08FA𐹰\u0D44; [B1 B3 B5 B6 V5]; [B1 B3 B5 B6 V5] # 𞀕ൃ.ꡚࣺ𐹰ൄ +B; xn--mxc5210v.xn--90b01t8u2p1ltd; [B1 B3 B5 B6 V5]; [B1 B3 B5 B6 V5] # 𞀕ൃ.ꡚࣺ𐹰ൄ +B; 󆩏𐦹\u0303。󠍅; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # ̃. +B; 󆩏𐦹\u0303。󠍅; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # ̃. +B; xn--nsa1265kp9z9e.xn--xt36e; [B1 B5 B6 V6]; [B1 B5 B6 V6] # ̃. +B; ᢌ.-\u085A; [V3]; [V3] # ᢌ.-࡚ +B; ᢌ.-\u085A; [V3]; [V3] # ᢌ.-࡚ +B; xn--59e.xn----5jd; [V3]; [V3] # ᢌ.-࡚ +B; 𥛛𑘶。𐹬𐲸\u0BCD; [B1 P1 V6]; [B1 P1 V6] # 𥛛𑘶.𐹬் +B; 𥛛𑘶。𐹬𐲸\u0BCD; [B1 P1 V6]; [B1 P1 V6] # 𥛛𑘶.𐹬் +B; xn--jb2dj685c.xn--xmc5562kmcb; [B1 V6]; [B1 V6] # 𥛛𑘶.𐹬் +T; Ⴐ\u077F.\u200C; [B1 B5 B6 C1 P1 V6]; [B5 B6 P1 V6] # Ⴐݿ. +N; Ⴐ\u077F.\u200C; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # Ⴐݿ. +T; Ⴐ\u077F.\u200C; [B1 B5 B6 C1 P1 V6]; [B5 B6 P1 V6] # Ⴐݿ. +N; Ⴐ\u077F.\u200C; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # Ⴐݿ. +T; ⴐ\u077F.\u200C; [B1 B5 B6 C1]; [B5 B6] # ⴐݿ. +N; ⴐ\u077F.\u200C; [B1 B5 B6 C1]; [B1 B5 B6 C1] # ⴐݿ. +B; xn--gqb743q.; [B5 B6]; [B5 B6] # ⴐݿ. +B; xn--gqb743q.xn--0ug; [B1 B5 B6 C1]; [B1 B5 B6 C1] # ⴐݿ. +B; xn--gqb918b.; [B5 B6 V6]; [B5 B6 V6] # Ⴐݿ. +B; xn--gqb918b.xn--0ug; [B1 B5 B6 C1 V6]; [B1 B5 B6 C1 V6] # Ⴐݿ. +T; ⴐ\u077F.\u200C; [B1 B5 B6 C1]; [B5 B6] # ⴐݿ. +N; ⴐ\u077F.\u200C; [B1 B5 B6 C1]; [B1 B5 B6 C1] # ⴐݿ. +T; 🄅𑲞-⒈。\u200Dᠩ\u06A5; [B1 C2 P1 V6]; [B1 B5 B6 P1 V6] # 🄅𑲞-⒈.ᠩڥ +N; 🄅𑲞-⒈。\u200Dᠩ\u06A5; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 🄅𑲞-⒈.ᠩڥ +T; 4,𑲞-1.。\u200Dᠩ\u06A5; [B1 C2 P1 V6 A4_2]; [B1 B5 B6 P1 V6 A4_2] # 4,𑲞-1..ᠩڥ +N; 4,𑲞-1.。\u200Dᠩ\u06A5; [B1 C2 P1 V6 A4_2]; [B1 C2 P1 V6 A4_2] # 4,𑲞-1..ᠩڥ +B; xn--4,-1-w401a..xn--7jb180g; [B1 B5 B6 P1 V6 A4_2]; [B1 B5 B6 P1 V6 A4_2] # 4,𑲞-1..ᠩڥ +B; xn--4,-1-w401a..xn--7jb180gexf; [B1 C2 P1 V6 A4_2]; [B1 C2 P1 V6 A4_2] # 4,𑲞-1..ᠩڥ +B; xn----ecp8796hjtvg.xn--7jb180g; [B1 B5 B6 V6]; [B1 B5 B6 V6] # 🄅𑲞-⒈.ᠩڥ +B; xn----ecp8796hjtvg.xn--7jb180gexf; [B1 C2 V6]; [B1 C2 V6] # 🄅𑲞-⒈.ᠩڥ +B; 񗀤。𞤪򮿋; [B2 B3 P1 V6]; [B2 B3 P1 V6] +B; 񗀤。𞤈򮿋; [B2 B3 P1 V6]; [B2 B3 P1 V6] +B; xn--4240a.xn--ie6h83808a; [B2 B3 V6]; [B2 B3 V6] +B; \u05C1۲。𐮊\u066C𝨊鄨; [B1 B2 B3 V5]; [B1 B2 B3 V5] # ׁ۲.𐮊٬𝨊鄨 +B; \u05C1۲。𐮊\u066C𝨊鄨; [B1 B2 B3 V5]; [B1 B2 B3 V5] # ׁ۲.𐮊٬𝨊鄨 +B; xn--pdb42d.xn--lib6412enztdwv6h; [B1 B2 B3 V5]; [B1 B2 B3 V5] # ׁ۲.𐮊٬𝨊鄨 +B; 𞭳-ꡁ。\u1A69\u0BCD-; [B1 B2 B3 P1 V3 V5 V6]; [B1 B2 B3 P1 V3 V5 V6] # -ꡁ.ᩩ்- +B; xn----be4e4276f.xn----lze333i; [B1 B2 B3 V3 V5 V6]; [B1 B2 B3 V3 V5 V6] # -ꡁ.ᩩ்- +T; \u1039-𚮭🞢.ß; [P1 V5 V6]; [P1 V5 V6] # ္-🞢.ß +N; \u1039-𚮭🞢.ß; [P1 V5 V6]; [P1 V5 V6] # ္-🞢.ß +T; \u1039-𚮭🞢.ß; [P1 V5 V6]; [P1 V5 V6] # ္-🞢.ß +N; \u1039-𚮭🞢.ß; [P1 V5 V6]; [P1 V5 V6] # ္-🞢.ß +B; \u1039-𚮭🞢.SS; [P1 V5 V6]; [P1 V5 V6] # ္-🞢.ss +B; \u1039-𚮭🞢.ss; [P1 V5 V6]; [P1 V5 V6] # ္-🞢.ss +B; \u1039-𚮭🞢.Ss; [P1 V5 V6]; [P1 V5 V6] # ္-🞢.ss +B; xn----9tg11172akr8b.ss; [V5 V6]; [V5 V6] # ္-🞢.ss +B; xn----9tg11172akr8b.xn--zca; [V5 V6]; [V5 V6] # ္-🞢.ß +B; \u1039-𚮭🞢.SS; [P1 V5 V6]; [P1 V5 V6] # ္-🞢.ss +B; \u1039-𚮭🞢.ss; [P1 V5 V6]; [P1 V5 V6] # ္-🞢.ss +B; \u1039-𚮭🞢.Ss; [P1 V5 V6]; [P1 V5 V6] # ္-🞢.ss +T; \uFCF2-\u200C。Ⴟ\u200C␣; [B3 B6 C1 P1 V6]; [B3 B6 P1 V3 V6] # ـَّ-.Ⴟ␣ +N; \uFCF2-\u200C。Ⴟ\u200C␣; [B3 B6 C1 P1 V6]; [B3 B6 C1 P1 V6] # ـَّ-.Ⴟ␣ +T; \u0640\u064E\u0651-\u200C。Ⴟ\u200C␣; [B3 B6 C1 P1 V6]; [B3 B6 P1 V3 V6] # ـَّ-.Ⴟ␣ +N; \u0640\u064E\u0651-\u200C。Ⴟ\u200C␣; [B3 B6 C1 P1 V6]; [B3 B6 C1 P1 V6] # ـَّ-.Ⴟ␣ +T; \u0640\u064E\u0651-\u200C。ⴟ\u200C␣; [B3 B6 C1]; [B3 B6 V3] # ـَّ-.ⴟ␣ +N; \u0640\u064E\u0651-\u200C。ⴟ\u200C␣; [B3 B6 C1]; [B3 B6 C1] # ـَّ-.ⴟ␣ +B; xn----eoc6bm.xn--xph904a; [B3 B6 V3]; [B3 B6 V3] # ـَّ-.ⴟ␣ +B; xn----eoc6bm0504a.xn--0ug13nd0j; [B3 B6 C1]; [B3 B6 C1] # ـَّ-.ⴟ␣ +B; xn----eoc6bm.xn--3nd240h; [B3 B6 V3 V6]; [B3 B6 V3 V6] # ـَّ-.Ⴟ␣ +B; xn----eoc6bm0504a.xn--3nd849e05c; [B3 B6 C1 V6]; [B3 B6 C1 V6] # ـَّ-.Ⴟ␣ +T; \uFCF2-\u200C。ⴟ\u200C␣; [B3 B6 C1]; [B3 B6 V3] # ـَّ-.ⴟ␣ +N; \uFCF2-\u200C。ⴟ\u200C␣; [B3 B6 C1]; [B3 B6 C1] # ـَّ-.ⴟ␣ +T; \u0D4D-\u200D\u200C。񥞧₅≠; [C1 C2 P1 V5 V6]; [P1 V3 V5 V6] # ്-.5≠ +N; \u0D4D-\u200D\u200C。񥞧₅≠; [C1 C2 P1 V5 V6]; [C1 C2 P1 V5 V6] # ്-.5≠ +T; \u0D4D-\u200D\u200C。񥞧₅=\u0338; [C1 C2 P1 V5 V6]; [P1 V3 V5 V6] # ്-.5≠ +N; \u0D4D-\u200D\u200C。񥞧₅=\u0338; [C1 C2 P1 V5 V6]; [C1 C2 P1 V5 V6] # ്-.5≠ +T; \u0D4D-\u200D\u200C。񥞧5≠; [C1 C2 P1 V5 V6]; [P1 V3 V5 V6] # ്-.5≠ +N; \u0D4D-\u200D\u200C。񥞧5≠; [C1 C2 P1 V5 V6]; [C1 C2 P1 V5 V6] # ്-.5≠ +T; \u0D4D-\u200D\u200C。񥞧5=\u0338; [C1 C2 P1 V5 V6]; [P1 V3 V5 V6] # ്-.5≠ +N; \u0D4D-\u200D\u200C。񥞧5=\u0338; [C1 C2 P1 V5 V6]; [C1 C2 P1 V5 V6] # ്-.5≠ +B; xn----jmf.xn--5-ufo50192e; [V3 V5 V6]; [V3 V5 V6] # ്-.5≠ +B; xn----jmf215lda.xn--5-ufo50192e; [C1 C2 V5 V6]; [C1 C2 V5 V6] # ്-.5≠ +B; 锣。\u0A4D󠘻󠚆; [P1 V5 V6]; [P1 V5 V6] # 锣.੍ +B; xn--gc5a.xn--ybc83044ppga; [V5 V6]; [V5 V6] # 锣.੍ +T; \u063D𑈾.\u0649\u200D\uA92B; [B3 C2]; xn--8gb2338k.xn--lhb0154f # ؽ𑈾.ى꤫ +N; \u063D𑈾.\u0649\u200D\uA92B; [B3 C2]; [B3 C2] # ؽ𑈾.ى꤫ +T; \u063D𑈾.\u0649\u200D\uA92B; [B3 C2]; xn--8gb2338k.xn--lhb0154f # ؽ𑈾.ى꤫ +N; \u063D𑈾.\u0649\u200D\uA92B; [B3 C2]; [B3 C2] # ؽ𑈾.ى꤫ +B; xn--8gb2338k.xn--lhb0154f; \u063D𑈾.\u0649\uA92B; xn--8gb2338k.xn--lhb0154f # ؽ𑈾.ى꤫ +B; \u063D𑈾.\u0649\uA92B; ; xn--8gb2338k.xn--lhb0154f # ؽ𑈾.ى꤫ +B; xn--8gb2338k.xn--lhb603k060h; [B3 C2]; [B3 C2] # ؽ𑈾.ى꤫ +T; \u0666⁴Ⴅ.\u08BD\u200C; [B1 B3 C1 P1 V6]; [B1 P1 V6] # ٦4Ⴅ.ࢽ +N; \u0666⁴Ⴅ.\u08BD\u200C; [B1 B3 C1 P1 V6]; [B1 B3 C1 P1 V6] # ٦4Ⴅ.ࢽ +T; \u06664Ⴅ.\u08BD\u200C; [B1 B3 C1 P1 V6]; [B1 P1 V6] # ٦4Ⴅ.ࢽ +N; \u06664Ⴅ.\u08BD\u200C; [B1 B3 C1 P1 V6]; [B1 B3 C1 P1 V6] # ٦4Ⴅ.ࢽ +T; \u06664ⴅ.\u08BD\u200C; [B1 B3 C1]; [B1] # ٦4ⴅ.ࢽ +N; \u06664ⴅ.\u08BD\u200C; [B1 B3 C1]; [B1 B3 C1] # ٦4ⴅ.ࢽ +B; xn--4-kqc6770a.xn--jzb; [B1]; [B1] # ٦4ⴅ.ࢽ +B; xn--4-kqc6770a.xn--jzb840j; [B1 B3 C1]; [B1 B3 C1] # ٦4ⴅ.ࢽ +B; xn--4-kqc489e.xn--jzb; [B1 V6]; [B1 V6] # ٦4Ⴅ.ࢽ +B; xn--4-kqc489e.xn--jzb840j; [B1 B3 C1 V6]; [B1 B3 C1 V6] # ٦4Ⴅ.ࢽ +T; \u0666⁴ⴅ.\u08BD\u200C; [B1 B3 C1]; [B1] # ٦4ⴅ.ࢽ +N; \u0666⁴ⴅ.\u08BD\u200C; [B1 B3 C1]; [B1 B3 C1] # ٦4ⴅ.ࢽ +T; ჁႱ6\u0318。ß\u1B03; [P1 V6]; [P1 V6] # ჁႱ6̘.ßᬃ +N; ჁႱ6\u0318。ß\u1B03; [P1 V6]; [P1 V6] # ჁႱ6̘.ßᬃ +T; ⴡⴑ6\u0318。ß\u1B03; ⴡⴑ6\u0318.ß\u1B03; xn--6-8cb7433a2ba.xn--ss-2vq # ⴡⴑ6̘.ßᬃ +N; ⴡⴑ6\u0318。ß\u1B03; ⴡⴑ6\u0318.ß\u1B03; xn--6-8cb7433a2ba.xn--zca894k # ⴡⴑ6̘.ßᬃ +B; ჁႱ6\u0318。SS\u1B03; [P1 V6]; [P1 V6] # ჁႱ6̘.ssᬃ +B; ⴡⴑ6\u0318。ss\u1B03; ⴡⴑ6\u0318.ss\u1B03; xn--6-8cb7433a2ba.xn--ss-2vq # ⴡⴑ6̘.ssᬃ +B; Ⴡⴑ6\u0318。Ss\u1B03; [P1 V6]; [P1 V6] # Ⴡⴑ6̘.ssᬃ +B; xn--6-8cb306hms1a.xn--ss-2vq; [V6]; [V6] # Ⴡⴑ6̘.ssᬃ +B; xn--6-8cb7433a2ba.xn--ss-2vq; ⴡⴑ6\u0318.ss\u1B03; xn--6-8cb7433a2ba.xn--ss-2vq # ⴡⴑ6̘.ssᬃ +B; ⴡⴑ6\u0318.ss\u1B03; ; xn--6-8cb7433a2ba.xn--ss-2vq # ⴡⴑ6̘.ssᬃ +B; ჁႱ6\u0318.SS\u1B03; [P1 V6]; [P1 V6] # ჁႱ6̘.ssᬃ +B; Ⴡⴑ6\u0318.Ss\u1B03; [P1 V6]; [P1 V6] # Ⴡⴑ6̘.ssᬃ +B; xn--6-8cb555h2b.xn--ss-2vq; [V6]; [V6] # ჁႱ6̘.ssᬃ +B; xn--6-8cb7433a2ba.xn--zca894k; ⴡⴑ6\u0318.ß\u1B03; xn--6-8cb7433a2ba.xn--zca894k # ⴡⴑ6̘.ßᬃ +T; ⴡⴑ6\u0318.ß\u1B03; ; xn--6-8cb7433a2ba.xn--ss-2vq # ⴡⴑ6̘.ßᬃ +N; ⴡⴑ6\u0318.ß\u1B03; ; xn--6-8cb7433a2ba.xn--zca894k # ⴡⴑ6̘.ßᬃ +B; xn--6-8cb555h2b.xn--zca894k; [V6]; [V6] # ჁႱ6̘.ßᬃ +B; 򋡐。≯𑋪; [P1 V6]; [P1 V6] +B; 򋡐。>\u0338𑋪; [P1 V6]; [P1 V6] +B; 򋡐。≯𑋪; [P1 V6]; [P1 V6] +B; 򋡐。>\u0338𑋪; [P1 V6]; [P1 V6] +B; xn--eo08b.xn--hdh3385g; [V6]; [V6] +T; \u065A۲。\u200C-\u1BF3\u08E2; [B1 C1 P1 V5 V6]; [B1 P1 V3 V5 V6] # ٚ۲.-᯳ +N; \u065A۲。\u200C-\u1BF3\u08E2; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # ٚ۲.-᯳ +B; xn--2hb81a.xn----xrd657l; [B1 V3 V5 V6]; [B1 V3 V5 V6] # ٚ۲.-᯳ +B; xn--2hb81a.xn----xrd657l30d; [B1 C1 V5 V6]; [B1 C1 V5 V6] # ٚ۲.-᯳ +B; 󠄏𖬴󠲽。\uFFA0; [P1 V5 V6]; [P1 V5 V6] # 𖬴. +B; 󠄏𖬴󠲽。\u1160; [P1 V5 V6]; [P1 V5 V6] # 𖬴. +B; xn--619ep9154c.xn--psd; [V5 V6]; [V5 V6] # 𖬴. +B; xn--619ep9154c.xn--cl7c; [V5 V6]; [V5 V6] # 𖬴. +T; ß⒈\u0760\uD7AE.􉖲󠅄\u0605򉔯; [B5 P1 V6]; [B5 P1 V6] # ß⒈ݠ. +N; ß⒈\u0760\uD7AE.􉖲󠅄\u0605򉔯; [B5 P1 V6]; [B5 P1 V6] # ß⒈ݠ. +T; ß1.\u0760\uD7AE.􉖲󠅄\u0605򉔯; [B2 B3 B5 P1 V6]; [B2 B3 B5 P1 V6] # ß1.ݠ. +N; ß1.\u0760\uD7AE.􉖲󠅄\u0605򉔯; [B2 B3 B5 P1 V6]; [B2 B3 B5 P1 V6] # ß1.ݠ. +B; SS1.\u0760\uD7AE.􉖲󠅄\u0605򉔯; [B2 B3 B5 P1 V6]; [B2 B3 B5 P1 V6] # ss1.ݠ. +B; ss1.\u0760\uD7AE.􉖲󠅄\u0605򉔯; [B2 B3 B5 P1 V6]; [B2 B3 B5 P1 V6] # ss1.ݠ. +B; Ss1.\u0760\uD7AE.􉖲󠅄\u0605򉔯; [B2 B3 B5 P1 V6]; [B2 B3 B5 P1 V6] # ss1.ݠ. +B; ss1.xn--kpb6677h.xn--nfb09923ifkyyb; [B2 B3 B5 V6]; [B2 B3 B5 V6] # ss1.ݠ. +B; xn--1-pfa.xn--kpb6677h.xn--nfb09923ifkyyb; [B2 B3 B5 V6]; [B2 B3 B5 V6] # ß1.ݠ. +B; SS⒈\u0760\uD7AE.􉖲󠅄\u0605򉔯; [B5 P1 V6]; [B5 P1 V6] # ss⒈ݠ. +B; ss⒈\u0760\uD7AE.􉖲󠅄\u0605򉔯; [B5 P1 V6]; [B5 P1 V6] # ss⒈ݠ. +B; Ss⒈\u0760\uD7AE.􉖲󠅄\u0605򉔯; [B5 P1 V6]; [B5 P1 V6] # ss⒈ݠ. +B; xn--ss-6ke9690a0g1q.xn--nfb09923ifkyyb; [B5 V6]; [B5 V6] # ss⒈ݠ. +B; xn--zca444a0s1ao12n.xn--nfb09923ifkyyb; [B5 V6]; [B5 V6] # ß⒈ݠ. +B; 󠭔.𐋱₂; [P1 V6]; [P1 V6] +B; 󠭔.𐋱2; [P1 V6]; [P1 V6] +B; xn--vi56e.xn--2-w91i; [V6]; [V6] +T; \u0716\u0947。-ß\u06A5\u200C; [B1 C1 V3]; [B1 V3] # ܖे.-ßڥ +N; \u0716\u0947。-ß\u06A5\u200C; [B1 C1 V3]; [B1 C1 V3] # ܖे.-ßڥ +T; \u0716\u0947。-SS\u06A5\u200C; [B1 C1 V3]; [B1 V3] # ܖे.-ssڥ +N; \u0716\u0947。-SS\u06A5\u200C; [B1 C1 V3]; [B1 C1 V3] # ܖे.-ssڥ +T; \u0716\u0947。-ss\u06A5\u200C; [B1 C1 V3]; [B1 V3] # ܖे.-ssڥ +N; \u0716\u0947。-ss\u06A5\u200C; [B1 C1 V3]; [B1 C1 V3] # ܖे.-ssڥ +T; \u0716\u0947。-Ss\u06A5\u200C; [B1 C1 V3]; [B1 V3] # ܖे.-ssڥ +N; \u0716\u0947。-Ss\u06A5\u200C; [B1 C1 V3]; [B1 C1 V3] # ܖे.-ssڥ +B; xn--gnb63i.xn---ss-4ef; [B1 V3]; [B1 V3] # ܖे.-ssڥ +B; xn--gnb63i.xn---ss-4ef9263a; [B1 C1 V3]; [B1 C1 V3] # ܖे.-ssڥ +B; xn--gnb63i.xn----qfa845bhx4a; [B1 C1 V3]; [B1 C1 V3] # ܖे.-ßڥ +T; \u1BA9\u200D\u062A񡚈.\u1CD5䷉Ⴡ; [B1 C2 P1 V5 V6]; [B1 P1 V5 V6] # ᮩت.᳕䷉Ⴡ +N; \u1BA9\u200D\u062A񡚈.\u1CD5䷉Ⴡ; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # ᮩت.᳕䷉Ⴡ +T; \u1BA9\u200D\u062A񡚈.\u1CD5䷉Ⴡ; [B1 C2 P1 V5 V6]; [B1 P1 V5 V6] # ᮩت.᳕䷉Ⴡ +N; \u1BA9\u200D\u062A񡚈.\u1CD5䷉Ⴡ; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # ᮩت.᳕䷉Ⴡ +T; \u1BA9\u200D\u062A񡚈.\u1CD5䷉ⴡ; [B1 C2 P1 V5 V6]; [B1 P1 V5 V6] # ᮩت.᳕䷉ⴡ +N; \u1BA9\u200D\u062A񡚈.\u1CD5䷉ⴡ; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # ᮩت.᳕䷉ⴡ +B; xn--pgb911izv33i.xn--i6f270etuy; [B1 V5 V6]; [B1 V5 V6] # ᮩت.᳕䷉ⴡ +B; xn--pgb911imgdrw34r.xn--i6f270etuy; [B1 C2 V5 V6]; [B1 C2 V5 V6] # ᮩت.᳕䷉ⴡ +B; xn--pgb911izv33i.xn--5nd792dgv3b; [B1 V5 V6]; [B1 V5 V6] # ᮩت.᳕䷉Ⴡ +B; xn--pgb911imgdrw34r.xn--5nd792dgv3b; [B1 C2 V5 V6]; [B1 C2 V5 V6] # ᮩت.᳕䷉Ⴡ +T; \u1BA9\u200D\u062A񡚈.\u1CD5䷉ⴡ; [B1 C2 P1 V5 V6]; [B1 P1 V5 V6] # ᮩت.᳕䷉ⴡ +N; \u1BA9\u200D\u062A񡚈.\u1CD5䷉ⴡ; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # ᮩت.᳕䷉ⴡ +T; \u2DBF.ß\u200D; [C2 P1 V6]; [P1 V6] # .ß +N; \u2DBF.ß\u200D; [C2 P1 V6]; [C2 P1 V6] # .ß +T; \u2DBF.SS\u200D; [C2 P1 V6]; [P1 V6] # .ss +N; \u2DBF.SS\u200D; [C2 P1 V6]; [C2 P1 V6] # .ss +T; \u2DBF.ss\u200D; [C2 P1 V6]; [P1 V6] # .ss +N; \u2DBF.ss\u200D; [C2 P1 V6]; [C2 P1 V6] # .ss +T; \u2DBF.Ss\u200D; [C2 P1 V6]; [P1 V6] # .ss +N; \u2DBF.Ss\u200D; [C2 P1 V6]; [C2 P1 V6] # .ss +B; xn--7pj.ss; [V6]; [V6] # .ss +B; xn--7pj.xn--ss-n1t; [C2 V6]; [C2 V6] # .ss +B; xn--7pj.xn--zca870n; [C2 V6]; [C2 V6] # .ß +B; \u1BF3︒.\u062A≯ꡂ; [B2 B3 B6 P1 V5 V6]; [B2 B3 B6 P1 V5 V6] # ᯳︒.ت≯ꡂ +B; \u1BF3︒.\u062A>\u0338ꡂ; [B2 B3 B6 P1 V5 V6]; [B2 B3 B6 P1 V5 V6] # ᯳︒.ت≯ꡂ +B; \u1BF3。.\u062A≯ꡂ; [B2 B3 P1 V5 V6 A4_2]; [B2 B3 P1 V5 V6 A4_2] # ᯳..ت≯ꡂ +B; \u1BF3。.\u062A>\u0338ꡂ; [B2 B3 P1 V5 V6 A4_2]; [B2 B3 P1 V5 V6 A4_2] # ᯳..ت≯ꡂ +B; xn--1zf..xn--pgb885lry5g; [B2 B3 V5 V6 A4_2]; [B2 B3 V5 V6 A4_2] # ᯳..ت≯ꡂ +B; xn--1zf8957g.xn--pgb885lry5g; [B2 B3 B6 V5 V6]; [B2 B3 B6 V5 V6] # ᯳︒.ت≯ꡂ +B; ≮≠񏻃。-𫠆\u06B7𐹪; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ≮≠.-𫠆ڷ𐹪 +B; <\u0338=\u0338񏻃。-𫠆\u06B7𐹪; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ≮≠.-𫠆ڷ𐹪 +B; ≮≠񏻃。-𫠆\u06B7𐹪; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ≮≠.-𫠆ڷ𐹪 +B; <\u0338=\u0338񏻃。-𫠆\u06B7𐹪; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ≮≠.-𫠆ڷ𐹪 +B; xn--1ch1a29470f.xn----7uc5363rc1rn; [B1 V3 V6]; [B1 V3 V6] # ≮≠.-𫠆ڷ𐹪 +B; 𐹡\u0777。ꡂ; [B1]; [B1] # 𐹡ݷ.ꡂ +B; xn--7pb5275k.xn--bc9a; [B1]; [B1] # 𐹡ݷ.ꡂ +T; Ⴉ𝆅񔻅\u0619.ß𐧦𐹳\u0775; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴉؙ𝆅.ß𐧦𐹳ݵ +N; Ⴉ𝆅񔻅\u0619.ß𐧦𐹳\u0775; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴉؙ𝆅.ß𐧦𐹳ݵ +T; ⴉ𝆅񔻅\u0619.ß𐧦𐹳\u0775; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ⴉؙ𝆅.ß𐧦𐹳ݵ +N; ⴉ𝆅񔻅\u0619.ß𐧦𐹳\u0775; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ⴉؙ𝆅.ß𐧦𐹳ݵ +B; Ⴉ𝆅񔻅\u0619.SS𐧦𐹳\u0775; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴉؙ𝆅.ss𐧦𐹳ݵ +B; ⴉ𝆅񔻅\u0619.ss𐧦𐹳\u0775; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ⴉؙ𝆅.ss𐧦𐹳ݵ +B; Ⴉ𝆅񔻅\u0619.Ss𐧦𐹳\u0775; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴉؙ𝆅.ss𐧦𐹳ݵ +B; xn--7fb125cjv87a7xvz.xn--ss-zme7575xp0e; [B5 B6 V6]; [B5 B6 V6] # Ⴉؙ𝆅.ss𐧦𐹳ݵ +B; xn--7fb940rwt3z7xvz.xn--ss-zme7575xp0e; [B5 B6 V6]; [B5 B6 V6] # ⴉؙ𝆅.ss𐧦𐹳ݵ +B; xn--7fb940rwt3z7xvz.xn--zca684a699vf2d; [B5 B6 V6]; [B5 B6 V6] # ⴉؙ𝆅.ß𐧦𐹳ݵ +B; xn--7fb125cjv87a7xvz.xn--zca684a699vf2d; [B5 B6 V6]; [B5 B6 V6] # Ⴉؙ𝆅.ß𐧦𐹳ݵ +T; \u200D\u0643𐧾↙.񊽡; [B1 C2 P1 V6]; [B3 P1 V6] # ك𐧾↙. +N; \u200D\u0643𐧾↙.񊽡; [B1 C2 P1 V6]; [B1 C2 P1 V6] # ك𐧾↙. +B; xn--fhb011lnp8n.xn--7s4w; [B3 V6]; [B3 V6] # ك𐧾↙. +B; xn--fhb713k87ag053c.xn--7s4w; [B1 C2 V6]; [B1 C2 V6] # ك𐧾↙. +T; 梉。\u200C; [C1]; xn--7zv. # 梉. +N; 梉。\u200C; [C1]; [C1] # 梉. +B; xn--7zv.; 梉.; xn--7zv. +B; 梉.; ; xn--7zv. +B; xn--7zv.xn--0ug; [C1]; [C1] # 梉. +T; ꡣ-≠.\u200D𞤗𐅢Ↄ; [B1 B6 C2 P1 V6]; [B2 B3 B6 P1 V6] # ꡣ-≠.𞤹𐅢Ↄ +N; ꡣ-≠.\u200D𞤗𐅢Ↄ; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # ꡣ-≠.𞤹𐅢Ↄ +T; ꡣ-=\u0338.\u200D𞤗𐅢Ↄ; [B1 B6 C2 P1 V6]; [B2 B3 B6 P1 V6] # ꡣ-≠.𞤹𐅢Ↄ +N; ꡣ-=\u0338.\u200D𞤗𐅢Ↄ; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # ꡣ-≠.𞤹𐅢Ↄ +T; ꡣ-=\u0338.\u200D𞤹𐅢ↄ; [B1 B6 C2 P1 V6]; [B2 B3 B6 P1 V6] # ꡣ-≠.𞤹𐅢ↄ +N; ꡣ-=\u0338.\u200D𞤹𐅢ↄ; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # ꡣ-≠.𞤹𐅢ↄ +T; ꡣ-≠.\u200D𞤹𐅢ↄ; [B1 B6 C2 P1 V6]; [B2 B3 B6 P1 V6] # ꡣ-≠.𞤹𐅢ↄ +N; ꡣ-≠.\u200D𞤹𐅢ↄ; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # ꡣ-≠.𞤹𐅢ↄ +T; ꡣ-≠.\u200D𞤗𐅢ↄ; [B1 B6 C2 P1 V6]; [B2 B3 B6 P1 V6] # ꡣ-≠.𞤹𐅢ↄ +N; ꡣ-≠.\u200D𞤗𐅢ↄ; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # ꡣ-≠.𞤹𐅢ↄ +T; ꡣ-=\u0338.\u200D𞤗𐅢ↄ; [B1 B6 C2 P1 V6]; [B2 B3 B6 P1 V6] # ꡣ-≠.𞤹𐅢ↄ +N; ꡣ-=\u0338.\u200D𞤗𐅢ↄ; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # ꡣ-≠.𞤹𐅢ↄ +B; xn----ufo9661d.xn--r5gy929fhm4f; [B2 B3 B6 V6]; [B2 B3 B6 V6] +B; xn----ufo9661d.xn--1ug99cj620c71sh; [B1 B6 C2 V6]; [B1 B6 C2 V6] # ꡣ-≠.𞤹𐅢ↄ +B; xn----ufo9661d.xn--q5g0929fhm4f; [B2 B3 B6 V6]; [B2 B3 B6 V6] +B; xn----ufo9661d.xn--1ug79cm620c71sh; [B1 B6 C2 V6]; [B1 B6 C2 V6] # ꡣ-≠.𞤹𐅢Ↄ +T; ς⒐𝆫⸵。𐱢🄊𝟳; [B6 P1 V6]; [B6 P1 V6] +N; ς⒐𝆫⸵。𐱢🄊𝟳; [B6 P1 V6]; [B6 P1 V6] +T; ς9.𝆫⸵。𐱢9,7; [B1 P1 V5 V6]; [B1 P1 V5 V6] +N; ς9.𝆫⸵。𐱢9,7; [B1 P1 V5 V6]; [B1 P1 V5 V6] +B; Σ9.𝆫⸵。𐱢9,7; [B1 P1 V5 V6]; [B1 P1 V5 V6] +B; σ9.𝆫⸵。𐱢9,7; [B1 P1 V5 V6]; [B1 P1 V5 V6] +B; xn--9-zmb.xn--ltj1535k.xn--9,7-r67t; [B1 P1 V5 V6]; [B1 P1 V5 V6] +B; xn--9-xmb.xn--ltj1535k.xn--9,7-r67t; [B1 P1 V5 V6]; [B1 P1 V5 V6] +B; Σ⒐𝆫⸵。𐱢🄊𝟳; [B6 P1 V6]; [B6 P1 V6] +B; σ⒐𝆫⸵。𐱢🄊𝟳; [B6 P1 V6]; [B6 P1 V6] +B; xn--4xa809nwtghi25b.xn--7-075iy877c; [B6 V6]; [B6 V6] +B; xn--3xa019nwtghi25b.xn--7-075iy877c; [B6 V6]; [B6 V6] +T; \u0853.\u200Cß; [B1 C1]; xn--iwb.ss # ࡓ.ß +N; \u0853.\u200Cß; [B1 C1]; [B1 C1] # ࡓ.ß +T; \u0853.\u200Cß; [B1 C1]; xn--iwb.ss # ࡓ.ß +N; \u0853.\u200Cß; [B1 C1]; [B1 C1] # ࡓ.ß +T; \u0853.\u200CSS; [B1 C1]; xn--iwb.ss # ࡓ.ss +N; \u0853.\u200CSS; [B1 C1]; [B1 C1] # ࡓ.ss +T; \u0853.\u200Css; [B1 C1]; xn--iwb.ss # ࡓ.ss +N; \u0853.\u200Css; [B1 C1]; [B1 C1] # ࡓ.ss +T; \u0853.\u200CSs; [B1 C1]; xn--iwb.ss # ࡓ.ss +N; \u0853.\u200CSs; [B1 C1]; [B1 C1] # ࡓ.ss +B; xn--iwb.ss; \u0853.ss; xn--iwb.ss # ࡓ.ss +B; \u0853.ss; ; xn--iwb.ss # ࡓ.ss +B; \u0853.SS; \u0853.ss; xn--iwb.ss # ࡓ.ss +B; \u0853.Ss; \u0853.ss; xn--iwb.ss # ࡓ.ss +B; xn--iwb.xn--ss-i1t; [B1 C1]; [B1 C1] # ࡓ.ss +B; xn--iwb.xn--zca570n; [B1 C1]; [B1 C1] # ࡓ.ß +T; \u0853.\u200CSS; [B1 C1]; xn--iwb.ss # ࡓ.ss +N; \u0853.\u200CSS; [B1 C1]; [B1 C1] # ࡓ.ss +T; \u0853.\u200Css; [B1 C1]; xn--iwb.ss # ࡓ.ss +N; \u0853.\u200Css; [B1 C1]; [B1 C1] # ࡓ.ss +T; \u0853.\u200CSs; [B1 C1]; xn--iwb.ss # ࡓ.ss +N; \u0853.\u200CSs; [B1 C1]; [B1 C1] # ࡓ.ss +T; 񯶣-.\u200D\u074E\uA94D󠻨; [B1 B6 C2 P1 V3 V6]; [B3 B6 P1 V3 V6] # -.ݎꥍ +N; 񯶣-.\u200D\u074E\uA94D󠻨; [B1 B6 C2 P1 V3 V6]; [B1 B6 C2 P1 V3 V6] # -.ݎꥍ +B; xn----s116e.xn--1ob6504fmf40i; [B3 B6 V3 V6]; [B3 B6 V3 V6] # -.ݎꥍ +B; xn----s116e.xn--1ob387jy90hq459k; [B1 B6 C2 V3 V6]; [B1 B6 C2 V3 V6] # -.ݎꥍ +B; 䃚蟥-。-񽒘⒈; [P1 V3 V6]; [P1 V3 V6] +B; 䃚蟥-。-񽒘1.; [P1 V3 V6]; [P1 V3 V6] +B; xn----n50a258u.xn---1-up07j.; [V3 V6]; [V3 V6] +B; xn----n50a258u.xn----ecp33805f; [V3 V6]; [V3 V6] +B; 𐹸䚵-ꡡ。⺇; [B1]; [B1] +B; xn----bm3an932a1l5d.xn--xvj; [B1]; [B1] +B; 𑄳。\u1ADC𐹻; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # 𑄳.𐹻 +B; xn--v80d.xn--2rf1154i; [B1 B3 B5 B6 V5 V6]; [B1 B3 B5 B6 V5 V6] # 𑄳.𐹻 +B; ≮𐹻.⒎𑂵\u06BA\u0602; [B1 P1 V6]; [B1 P1 V6] # ≮𐹻.⒎𑂵ں +B; <\u0338𐹻.⒎𑂵\u06BA\u0602; [B1 P1 V6]; [B1 P1 V6] # ≮𐹻.⒎𑂵ں +B; ≮𐹻.7.𑂵\u06BA\u0602; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ≮𐹻.7.𑂵ں +B; <\u0338𐹻.7.𑂵\u06BA\u0602; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ≮𐹻.7.𑂵ں +B; xn--gdhx904g.7.xn--kfb18an307d; [B1 V5 V6]; [B1 V5 V6] # ≮𐹻.7.𑂵ں +B; xn--gdhx904g.xn--kfb18a325efm3s; [B1 V6]; [B1 V6] # ≮𐹻.⒎𑂵ں +T; ᢔ≠􋉂.\u200D𐋢; [C2 P1 V6]; [P1 V6] # ᢔ≠.𐋢 +N; ᢔ≠􋉂.\u200D𐋢; [C2 P1 V6]; [C2 P1 V6] # ᢔ≠.𐋢 +T; ᢔ=\u0338􋉂.\u200D𐋢; [C2 P1 V6]; [P1 V6] # ᢔ≠.𐋢 +N; ᢔ=\u0338􋉂.\u200D𐋢; [C2 P1 V6]; [C2 P1 V6] # ᢔ≠.𐋢 +B; xn--ebf031cf7196a.xn--587c; [V6]; [V6] +B; xn--ebf031cf7196a.xn--1ug9540g; [C2 V6]; [C2 V6] # ᢔ≠.𐋢 +B; 𐩁≮񣊛≯.\u066C𞵕⳿; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] # 𐩁≮≯.٬⳿ +B; 𐩁<\u0338񣊛>\u0338.\u066C𞵕⳿; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] # 𐩁≮≯.٬⳿ +B; 𐩁≮񣊛≯.\u066C𞵕⳿; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] # 𐩁≮≯.٬⳿ +B; 𐩁<\u0338񣊛>\u0338.\u066C𞵕⳿; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] # 𐩁≮≯.٬⳿ +B; xn--gdhc0519o0y27b.xn--lib468q0d21a; [B1 B2 B3 V6]; [B1 B2 B3 V6] # 𐩁≮≯.٬⳿ +B; -。⺐; [V3]; [V3] +B; -。⺐; [V3]; [V3] +B; -.xn--6vj; [V3]; [V3] +B; 󠰩𑲬.\u065C; [P1 V5 V6]; [P1 V5 V6] # 𑲬.ٜ +B; 󠰩𑲬.\u065C; [P1 V5 V6]; [P1 V5 V6] # 𑲬.ٜ +B; xn--sn3d59267c.xn--4hb; [V5 V6]; [V5 V6] # 𑲬.ٜ +T; 𐍺.񚇃\u200C; [C1 P1 V5 V6]; [P1 V5 V6] # 𐍺. +N; 𐍺.񚇃\u200C; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 𐍺. +B; xn--ie8c.xn--2g51a; [V5 V6]; [V5 V6] +B; xn--ie8c.xn--0ug03366c; [C1 V5 V6]; [C1 V5 V6] # 𐍺. +B; \u063D\u06E3.𐨎; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ؽۣ.𐨎 +B; xn--8gb64a.xn--mr9c; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ؽۣ.𐨎 +T; 漦Ⴙς.񡻀𐴄; [B5 B6 P1 V6]; [B5 B6 P1 V6] +N; 漦Ⴙς.񡻀𐴄; [B5 B6 P1 V6]; [B5 B6 P1 V6] +T; 漦ⴙς.񡻀𐴄; [B5 B6 P1 V6]; [B5 B6 P1 V6] +N; 漦ⴙς.񡻀𐴄; [B5 B6 P1 V6]; [B5 B6 P1 V6] +B; 漦ႹΣ.񡻀𐴄; [B5 B6 P1 V6]; [B5 B6 P1 V6] +B; 漦ⴙσ.񡻀𐴄; [B5 B6 P1 V6]; [B5 B6 P1 V6] +B; 漦Ⴙσ.񡻀𐴄; [B5 B6 P1 V6]; [B5 B6 P1 V6] +B; xn--4xa947d717e.xn--9d0d3162t; [B5 B6 V6]; [B5 B6 V6] +B; xn--4xa772sl47b.xn--9d0d3162t; [B5 B6 V6]; [B5 B6 V6] +B; xn--3xa972sl47b.xn--9d0d3162t; [B5 B6 V6]; [B5 B6 V6] +B; xn--3xa157d717e.xn--9d0d3162t; [B5 B6 V6]; [B5 B6 V6] +B; 𐹫踧\u0CCD򫚇.󜀃⒈𝨤; [B1 P1 V6]; [B1 P1 V6] # 𐹫踧್.⒈𝨤 +B; 𐹫踧\u0CCD򫚇.󜀃1.𝨤; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # 𐹫踧್.1.𝨤 +B; xn--8tc1437dro0d6q06h.xn--1-p948l.xn--m82h; [B1 B3 B6 V5 V6]; [B1 B3 B6 V5 V6] # 𐹫踧್.1.𝨤 +B; xn--8tc1437dro0d6q06h.xn--tsh2611ncu71e; [B1 V6]; [B1 V6] # 𐹫踧್.⒈𝨤 +T; \u200D≮.󠟪𹫏-; [C2 P1 V3 V6]; [P1 V3 V6] # ≮.- +N; \u200D≮.󠟪𹫏-; [C2 P1 V3 V6]; [C2 P1 V3 V6] # ≮.- +T; \u200D<\u0338.󠟪𹫏-; [C2 P1 V3 V6]; [P1 V3 V6] # ≮.- +N; \u200D<\u0338.󠟪𹫏-; [C2 P1 V3 V6]; [C2 P1 V3 V6] # ≮.- +T; \u200D≮.󠟪𹫏-; [C2 P1 V3 V6]; [P1 V3 V6] # ≮.- +N; \u200D≮.󠟪𹫏-; [C2 P1 V3 V6]; [C2 P1 V3 V6] # ≮.- +T; \u200D<\u0338.󠟪𹫏-; [C2 P1 V3 V6]; [P1 V3 V6] # ≮.- +N; \u200D<\u0338.󠟪𹫏-; [C2 P1 V3 V6]; [C2 P1 V3 V6] # ≮.- +B; xn--gdh.xn----cr99a1w710b; [V3 V6]; [V3 V6] +B; xn--1ug95g.xn----cr99a1w710b; [C2 V3 V6]; [C2 V3 V6] # ≮.- +T; \u200D\u200D襔。Ⴜ5ꡮ񵝏; [C2 P1 V6]; [P1 V6] # 襔.Ⴜ5ꡮ +N; \u200D\u200D襔。Ⴜ5ꡮ񵝏; [C2 P1 V6]; [C2 P1 V6] # 襔.Ⴜ5ꡮ +T; \u200D\u200D襔。ⴜ5ꡮ񵝏; [C2 P1 V6]; [P1 V6] # 襔.ⴜ5ꡮ +N; \u200D\u200D襔。ⴜ5ꡮ񵝏; [C2 P1 V6]; [C2 P1 V6] # 襔.ⴜ5ꡮ +B; xn--2u2a.xn--5-uws5848bpf44e; [V6]; [V6] +B; xn--1uga7691f.xn--5-uws5848bpf44e; [C2 V6]; [C2 V6] # 襔.ⴜ5ꡮ +B; xn--2u2a.xn--5-r1g7167ipfw8d; [V6]; [V6] +B; xn--1uga7691f.xn--5-r1g7167ipfw8d; [C2 V6]; [C2 V6] # 襔.Ⴜ5ꡮ +T; 𐫜𑌼\u200D.婀; [B3 C2]; xn--ix9c26l.xn--q0s # 𐫜𑌼.婀 +N; 𐫜𑌼\u200D.婀; [B3 C2]; [B3 C2] # 𐫜𑌼.婀 +T; 𐫜𑌼\u200D.婀; [B3 C2]; xn--ix9c26l.xn--q0s # 𐫜𑌼.婀 +N; 𐫜𑌼\u200D.婀; [B3 C2]; [B3 C2] # 𐫜𑌼.婀 +B; xn--ix9c26l.xn--q0s; 𐫜𑌼.婀; xn--ix9c26l.xn--q0s +B; 𐫜𑌼.婀; ; xn--ix9c26l.xn--q0s +B; xn--1ugx063g1if.xn--q0s; [B3 C2]; [B3 C2] # 𐫜𑌼.婀 +B; 󠅽︒︒𐹯。⬳\u1A78; [B1 P1 V6]; [B1 P1 V6] # ︒︒𐹯.⬳᩸ +B; 󠅽。。𐹯。⬳\u1A78; [B1 A4_2]; [B1 A4_2] # ..𐹯.⬳᩸ +B; ..xn--no0d.xn--7of309e; [B1 A4_2]; [B1 A4_2] # ..𐹯.⬳᩸ +B; xn--y86ca186j.xn--7of309e; [B1 V6]; [B1 V6] # ︒︒𐹯.⬳᩸ +T; 𝟖ß.󠄐-\uDBDAႯ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ß.-Ⴏ +N; 𝟖ß.󠄐-\uDBDAႯ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ß.-Ⴏ +T; 8ß.󠄐-\uDBDAႯ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ß.-Ⴏ +N; 8ß.󠄐-\uDBDAႯ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ß.-Ⴏ +T; 8ß.󠄐-\uDBDAⴏ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ß.-ⴏ +N; 8ß.󠄐-\uDBDAⴏ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ß.-ⴏ +B; 8SS.󠄐-\uDBDAႯ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ss.-Ⴏ +B; 8ss.󠄐-\uDBDAⴏ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ss.-ⴏ +B; 8Ss.󠄐-\uDBDAႯ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ss.-Ⴏ +B; 8ss.-\uDBDAႯ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ss.-Ⴏ +B; 8ss.-\uDBDAⴏ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ss.-ⴏ +B; 8SS.-\uDBDAႯ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ss.-Ⴏ +B; 8Ss.-\uDBDAႯ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ss.-Ⴏ +B; xn--8-qfa.-\uDBDAⴏ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ß.-ⴏ +B; XN--8-QFA.-\uDBDAႯ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ß.-Ⴏ +B; Xn--8-Qfa.-\uDBDAႯ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ß.-Ⴏ +B; xn--8-qfa.-\uDBDAႯ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ß.-Ⴏ +T; 𝟖ß.󠄐-\uDBDAⴏ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ß.-ⴏ +N; 𝟖ß.󠄐-\uDBDAⴏ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ß.-ⴏ +B; 𝟖SS.󠄐-\uDBDAႯ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ss.-Ⴏ +B; 𝟖ss.󠄐-\uDBDAⴏ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ss.-ⴏ +B; 𝟖Ss.󠄐-\uDBDAႯ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ss.-Ⴏ +T; -\u200D󠋟.\u200C𐹣Ⴅ; [B1 C1 C2 P1 V3 V6]; [B1 P1 V3 V6] # -.𐹣Ⴅ +N; -\u200D󠋟.\u200C𐹣Ⴅ; [B1 C1 C2 P1 V3 V6]; [B1 C1 C2 P1 V3 V6] # -.𐹣Ⴅ +T; -\u200D󠋟.\u200C𐹣ⴅ; [B1 C1 C2 P1 V3 V6]; [B1 P1 V3 V6] # -.𐹣ⴅ +N; -\u200D󠋟.\u200C𐹣ⴅ; [B1 C1 C2 P1 V3 V6]; [B1 C1 C2 P1 V3 V6] # -.𐹣ⴅ +B; xn----s721m.xn--wkj1423e; [B1 V3 V6]; [B1 V3 V6] +B; xn----ugnv7071n.xn--0ugz32cgr0p; [B1 C1 C2 V3 V6]; [B1 C1 C2 V3 V6] # -.𐹣ⴅ +B; xn----s721m.xn--dnd9201k; [B1 V3 V6]; [B1 V3 V6] +B; xn----ugnv7071n.xn--dnd999e4j4p; [B1 C1 C2 V3 V6]; [B1 C1 C2 V3 V6] # -.𐹣Ⴅ +T; \uA9B9\u200D큷𻶡。₂; [C2 P1 V5 V6]; [P1 V5 V6] # ꦹ큷.2 +N; \uA9B9\u200D큷𻶡。₂; [C2 P1 V5 V6]; [C2 P1 V5 V6] # ꦹ큷.2 +T; \uA9B9\u200D큷𻶡。₂; [C2 P1 V5 V6]; [P1 V5 V6] # ꦹ큷.2 +N; \uA9B9\u200D큷𻶡。₂; [C2 P1 V5 V6]; [C2 P1 V5 V6] # ꦹ큷.2 +T; \uA9B9\u200D큷𻶡。2; [C2 P1 V5 V6]; [P1 V5 V6] # ꦹ큷.2 +N; \uA9B9\u200D큷𻶡。2; [C2 P1 V5 V6]; [C2 P1 V5 V6] # ꦹ큷.2 +T; \uA9B9\u200D큷𻶡。2; [C2 P1 V5 V6]; [P1 V5 V6] # ꦹ큷.2 +N; \uA9B9\u200D큷𻶡。2; [C2 P1 V5 V6]; [C2 P1 V5 V6] # ꦹ큷.2 +B; xn--0m9as84e2e21c.2; [V5 V6]; [V5 V6] # ꦹ큷.2 +B; xn--1ug1435cfkyaoi04d.2; [C2 V5 V6]; [C2 V5 V6] # ꦹ큷.2 +B; \uDF4D.🄄𞯘; [B1 P1 V6]; [B1 P1 V6 A3] # .🄄 +B; \uDF4D.3,𞯘; [B1 P1 V6]; [B1 P1 V6 A3] # .3, +B; \uDF4D.xn--3,-tb22a; [B1 P1 V6]; [B1 P1 V6 A3] # .3, +B; \uDF4D.XN--3,-TB22A; [B1 P1 V6]; [B1 P1 V6 A3] # .3, +B; \uDF4D.Xn--3,-Tb22a; [B1 P1 V6]; [B1 P1 V6 A3] # .3, +B; \uDF4D.xn--3x6hx6f; [B1 P1 V6]; [B1 P1 V6 A3] # .🄄 +B; \uDF4D.XN--3X6HX6F; [B1 P1 V6]; [B1 P1 V6 A3] # .🄄 +B; \uDF4D.Xn--3X6hx6f; [B1 P1 V6]; [B1 P1 V6 A3] # .🄄 +B; 𝨖𐩙。\u06DD󀡶\uA8C5⒈; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𝨖.ꣅ⒈ +B; 𝨖𐩙。\u06DD󀡶\uA8C51.; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𝨖.ꣅ1. +B; xn--rt9cl956a.xn--1-dxc8545j0693i.; [B1 V5 V6]; [B1 V5 V6] # 𝨖.ꣅ1. +B; xn--rt9cl956a.xn--tlb403mxv4g06s9i; [B1 V5 V6]; [B1 V5 V6] # 𝨖.ꣅ⒈ +T; 򒈣\u05E1\u06B8。Ⴈ\u200D; [B5 B6 C2 P1 V6]; [B5 B6 P1 V6] # סڸ.Ⴈ +N; 򒈣\u05E1\u06B8。Ⴈ\u200D; [B5 B6 C2 P1 V6]; [B5 B6 C2 P1 V6] # סڸ.Ⴈ +T; 򒈣\u05E1\u06B8。ⴈ\u200D; [B5 B6 C2 P1 V6]; [B5 B6 P1 V6] # סڸ.ⴈ +N; 򒈣\u05E1\u06B8。ⴈ\u200D; [B5 B6 C2 P1 V6]; [B5 B6 C2 P1 V6] # סڸ.ⴈ +B; xn--meb44b57607c.xn--zkj; [B5 B6 V6]; [B5 B6 V6] # סڸ.ⴈ +B; xn--meb44b57607c.xn--1ug232c; [B5 B6 C2 V6]; [B5 B6 C2 V6] # סڸ.ⴈ +B; xn--meb44b57607c.xn--gnd; [B5 B6 V6]; [B5 B6 V6] # סڸ.Ⴈ +B; xn--meb44b57607c.xn--gnd699e; [B5 B6 C2 V6]; [B5 B6 C2 V6] # סڸ.Ⴈ +T; 󀚶𝨱\u07E6⒈.𑗝髯\u200C; [B1 B5 C1 P1 V5 V6]; [B1 B5 P1 V5 V6] # 𝨱ߦ⒈.𑗝髯 +N; 󀚶𝨱\u07E6⒈.𑗝髯\u200C; [B1 B5 C1 P1 V5 V6]; [B1 B5 C1 P1 V5 V6] # 𝨱ߦ⒈.𑗝髯 +T; 󀚶𝨱\u07E61..𑗝髯\u200C; [B1 B5 C1 P1 V5 V6 A4_2]; [B1 B5 P1 V5 V6 A4_2] # 𝨱ߦ1..𑗝髯 +N; 󀚶𝨱\u07E61..𑗝髯\u200C; [B1 B5 C1 P1 V5 V6 A4_2]; [B1 B5 C1 P1 V5 V6 A4_2] # 𝨱ߦ1..𑗝髯 +B; xn--1-idd62296a1fr6e..xn--uj6at43v; [B1 B5 V5 V6 A4_2]; [B1 B5 V5 V6 A4_2] # 𝨱ߦ1..𑗝髯 +B; xn--1-idd62296a1fr6e..xn--0ugx259bocxd; [B1 B5 C1 V5 V6 A4_2]; [B1 B5 C1 V5 V6 A4_2] # 𝨱ߦ1..𑗝髯 +B; xn--etb477lq931a1f58e.xn--uj6at43v; [B1 B5 V5 V6]; [B1 B5 V5 V6] # 𝨱ߦ⒈.𑗝髯 +B; xn--etb477lq931a1f58e.xn--0ugx259bocxd; [B1 B5 C1 V5 V6]; [B1 B5 C1 V5 V6] # 𝨱ߦ⒈.𑗝髯 +B; 𐫀.\u0689𑌀; 𐫀.\u0689𑌀; xn--pw9c.xn--fjb8658k # 𐫀.ډ𑌀 +B; 𐫀.\u0689𑌀; ; xn--pw9c.xn--fjb8658k # 𐫀.ډ𑌀 +B; xn--pw9c.xn--fjb8658k; 𐫀.\u0689𑌀; xn--pw9c.xn--fjb8658k # 𐫀.ډ𑌀 +B; 𑋪.𐳝; [B1 B3 B6 V5]; [B1 B3 B6 V5] +B; 𑋪.𐳝; [B1 B3 B6 V5]; [B1 B3 B6 V5] +B; 𑋪.𐲝; [B1 B3 B6 V5]; [B1 B3 B6 V5] +B; xn--fm1d.xn--5c0d; [B1 B3 B6 V5]; [B1 B3 B6 V5] +B; 𑋪.𐲝; [B1 B3 B6 V5]; [B1 B3 B6 V5] +B; ≠膣。\u0F83; [P1 V5 V6]; [P1 V5 V6] # ≠膣.ྃ +B; =\u0338膣。\u0F83; [P1 V5 V6]; [P1 V5 V6] # ≠膣.ྃ +B; xn--1chy468a.xn--2ed; [V5 V6]; [V5 V6] # ≠膣.ྃ +T; 񰀎-\u077D。ß; [B5 B6 P1 V6]; [B5 B6 P1 V6] # -ݽ.ß +N; 񰀎-\u077D。ß; [B5 B6 P1 V6]; [B5 B6 P1 V6] # -ݽ.ß +T; 񰀎-\u077D。ß; [B5 B6 P1 V6]; [B5 B6 P1 V6] # -ݽ.ß +N; 񰀎-\u077D。ß; [B5 B6 P1 V6]; [B5 B6 P1 V6] # -ݽ.ß +B; 񰀎-\u077D。SS; [B5 B6 P1 V6]; [B5 B6 P1 V6] # -ݽ.ss +B; 񰀎-\u077D。ss; [B5 B6 P1 V6]; [B5 B6 P1 V6] # -ݽ.ss +B; 񰀎-\u077D。Ss; [B5 B6 P1 V6]; [B5 B6 P1 V6] # -ݽ.ss +B; xn----j6c95618k.ss; [B5 B6 V6]; [B5 B6 V6] # -ݽ.ss +B; xn----j6c95618k.xn--zca; [B5 B6 V6]; [B5 B6 V6] # -ݽ.ß +B; 񰀎-\u077D。SS; [B5 B6 P1 V6]; [B5 B6 P1 V6] # -ݽ.ss +B; 񰀎-\u077D。ss; [B5 B6 P1 V6]; [B5 B6 P1 V6] # -ݽ.ss +B; 񰀎-\u077D。Ss; [B5 B6 P1 V6]; [B5 B6 P1 V6] # -ݽ.ss +T; ς𐹠ᡚ𑄳.⾭𐹽𽐖𐫜; [B5 B6 P1 V6]; [B5 B6 P1 V6] +N; ς𐹠ᡚ𑄳.⾭𐹽𽐖𐫜; [B5 B6 P1 V6]; [B5 B6 P1 V6] +T; ς𐹠ᡚ𑄳.靑𐹽𽐖𐫜; [B5 B6 P1 V6]; [B5 B6 P1 V6] +N; ς𐹠ᡚ𑄳.靑𐹽𽐖𐫜; [B5 B6 P1 V6]; [B5 B6 P1 V6] +B; Σ𐹠ᡚ𑄳.靑𐹽𽐖𐫜; [B5 B6 P1 V6]; [B5 B6 P1 V6] +B; σ𐹠ᡚ𑄳.靑𐹽𽐖𐫜; [B5 B6 P1 V6]; [B5 B6 P1 V6] +B; xn--4xa656hp23pxmc.xn--es5a888tvjc2u15h; [B5 B6 V6]; [B5 B6 V6] +B; xn--3xa856hp23pxmc.xn--es5a888tvjc2u15h; [B5 B6 V6]; [B5 B6 V6] +B; Σ𐹠ᡚ𑄳.⾭𐹽𽐖𐫜; [B5 B6 P1 V6]; [B5 B6 P1 V6] +B; σ𐹠ᡚ𑄳.⾭𐹽𽐖𐫜; [B5 B6 P1 V6]; [B5 B6 P1 V6] +T; 𐋷。\u200D; [C2]; xn--r97c. # 𐋷. +N; 𐋷。\u200D; [C2]; [C2] # 𐋷. +B; xn--r97c.; 𐋷.; xn--r97c.; NV8 +B; 𐋷.; ; xn--r97c.; NV8 +B; xn--r97c.xn--1ug; [C2]; [C2] # 𐋷. +B; 𑰳𑈯。⥪; [V5]; [V5] +B; xn--2g1d14o.xn--jti; [V5]; [V5] +T; 𑆀䁴񤧣.Ⴕ𝟜\u200C\u0348; [C1 P1 V5 V6]; [P1 V5 V6] # 𑆀䁴.Ⴕ4͈ +N; 𑆀䁴񤧣.Ⴕ𝟜\u200C\u0348; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 𑆀䁴.Ⴕ4͈ +T; 𑆀䁴񤧣.Ⴕ4\u200C\u0348; [C1 P1 V5 V6]; [P1 V5 V6] # 𑆀䁴.Ⴕ4͈ +N; 𑆀䁴񤧣.Ⴕ4\u200C\u0348; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 𑆀䁴.Ⴕ4͈ +T; 𑆀䁴񤧣.ⴕ4\u200C\u0348; [C1 P1 V5 V6]; [P1 V5 V6] # 𑆀䁴.ⴕ4͈ +N; 𑆀䁴񤧣.ⴕ4\u200C\u0348; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 𑆀䁴.ⴕ4͈ +B; xn--1mnx647cg3x1b.xn--4-zfb5123a; [V5 V6]; [V5 V6] # 𑆀䁴.ⴕ4͈ +B; xn--1mnx647cg3x1b.xn--4-zfb502tlsl; [C1 V5 V6]; [C1 V5 V6] # 𑆀䁴.ⴕ4͈ +B; xn--1mnx647cg3x1b.xn--4-zfb324h; [V5 V6]; [V5 V6] # 𑆀䁴.Ⴕ4͈ +B; xn--1mnx647cg3x1b.xn--4-zfb324h32o; [C1 V5 V6]; [C1 V5 V6] # 𑆀䁴.Ⴕ4͈ +T; 𑆀䁴񤧣.ⴕ𝟜\u200C\u0348; [C1 P1 V5 V6]; [P1 V5 V6] # 𑆀䁴.ⴕ4͈ +N; 𑆀䁴񤧣.ⴕ𝟜\u200C\u0348; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 𑆀䁴.ⴕ4͈ +T; 憡\uDF1F\u200CႴ.𐋮\u200D≠; [C1 C2 P1 V6]; [P1 V6 A3] # 憡Ⴔ.𐋮≠ +N; 憡\uDF1F\u200CႴ.𐋮\u200D≠; [C1 C2 P1 V6]; [C1 C2 P1 V6 A3] # 憡Ⴔ.𐋮≠ +T; 憡\uDF1F\u200CႴ.𐋮\u200D=\u0338; [C1 C2 P1 V6]; [P1 V6 A3] # 憡Ⴔ.𐋮≠ +N; 憡\uDF1F\u200CႴ.𐋮\u200D=\u0338; [C1 C2 P1 V6]; [C1 C2 P1 V6 A3] # 憡Ⴔ.𐋮≠ +T; 憡\uDF1F\u200Cⴔ.𐋮\u200D=\u0338; [C1 C2 P1 V6]; [P1 V6 A3] # 憡ⴔ.𐋮≠ +N; 憡\uDF1F\u200Cⴔ.𐋮\u200D=\u0338; [C1 C2 P1 V6]; [C1 C2 P1 V6 A3] # 憡ⴔ.𐋮≠ +T; 憡\uDF1F\u200Cⴔ.𐋮\u200D≠; [C1 C2 P1 V6]; [P1 V6 A3] # 憡ⴔ.𐋮≠ +N; 憡\uDF1F\u200Cⴔ.𐋮\u200D≠; [C1 C2 P1 V6]; [C1 C2 P1 V6 A3] # 憡ⴔ.𐋮≠ +B; 憡\uDF1Fⴔ.xn--1chz659f; [P1 V6]; [P1 V6 A3] # 憡ⴔ.𐋮≠ +B; 憡\uDF1FႴ.XN--1CHZ659F; [P1 V6]; [P1 V6 A3] # 憡Ⴔ.𐋮≠ +B; 憡\uDF1FႴ.xn--1Chz659f; [P1 V6]; [P1 V6 A3] # 憡Ⴔ.𐋮≠ +B; 憡\uDF1FႴ.xn--1chz659f; [P1 V6]; [P1 V6 A3] # 憡Ⴔ.𐋮≠ +T; 憡\uDF1F\u200Cⴔ.xn--1ug73gl146a; [C1 C2 P1 V6]; [C2 P1 V6 A3] # 憡ⴔ.𐋮≠ +N; 憡\uDF1F\u200Cⴔ.xn--1ug73gl146a; [C1 C2 P1 V6]; [C1 C2 P1 V6 A3] # 憡ⴔ.𐋮≠ +T; 憡\uDF1F\u200CႴ.XN--1UG73GL146A; [C1 C2 P1 V6]; [C2 P1 V6 A3] # 憡Ⴔ.𐋮≠ +N; 憡\uDF1F\u200CႴ.XN--1UG73GL146A; [C1 C2 P1 V6]; [C1 C2 P1 V6 A3] # 憡Ⴔ.𐋮≠ +T; 憡\uDF1F\u200CႴ.xn--1Ug73gl146a; [C1 C2 P1 V6]; [C2 P1 V6 A3] # 憡Ⴔ.𐋮≠ +N; 憡\uDF1F\u200CႴ.xn--1Ug73gl146a; [C1 C2 P1 V6]; [C1 C2 P1 V6 A3] # 憡Ⴔ.𐋮≠ +B; 憡\uDF1FႴ.xn--1ug73gl146a; [C2 P1 V6]; [C2 P1 V6 A3] # 憡Ⴔ.𐋮≠ +B; 憡\uDF1Fⴔ.xn--1ug73gl146a; [C2 P1 V6]; [C2 P1 V6 A3] # 憡ⴔ.𐋮≠ +B; 憡\uDF1FႴ.XN--1UG73GL146A; [C2 P1 V6]; [C2 P1 V6 A3] # 憡Ⴔ.𐋮≠ +B; 憡\uDF1FႴ.xn--1Ug73gl146a; [C2 P1 V6]; [C2 P1 V6 A3] # 憡Ⴔ.𐋮≠ +T; 憡\uDF1F\u200CႴ.xn--1ug73gl146a; [C1 C2 P1 V6]; [C2 P1 V6 A3] # 憡Ⴔ.𐋮≠ +N; 憡\uDF1F\u200CႴ.xn--1ug73gl146a; [C1 C2 P1 V6]; [C1 C2 P1 V6 A3] # 憡Ⴔ.𐋮≠ diff --git a/idna/tests/punycode.rs b/idna/tests/punycode.rs new file mode 100644 index 000000000..67988e80c --- /dev/null +++ b/idna/tests/punycode.rs @@ -0,0 +1,65 @@ +// Copyright 2013 The rust-url developers. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use idna::punycode::{decode, encode_str}; +use rustc_serialize::json::{Json, Object}; +use test::TestFn; + +fn one_test(decoded: &str, encoded: &str) { + match decode(encoded) { + None => panic!("Decoding {} failed.", encoded), + Some(result) => { + let result = result.into_iter().collect::(); + assert!(result == decoded, + format!("Incorrect decoding of \"{}\":\n \"{}\"\n!= \"{}\"\n", + encoded, result, decoded)) + } + } + + match encode_str(decoded) { + None => panic!("Encoding {} failed.", decoded), + Some(result) => { + assert!(result == encoded, + format!("Incorrect encoding of \"{}\":\n \"{}\"\n!= \"{}\"\n", + decoded, result, encoded)) + } + } +} + +fn get_string<'a>(map: &'a Object, key: &str) -> &'a str { + match map.get(&key.to_string()) { + Some(&Json::String(ref s)) => s, + None => "", + _ => panic!(), + } +} + +pub fn collect_tests(add_test: &mut F) { + match Json::from_str(include_str!("punycode_tests.json")) { + Ok(Json::Array(tests)) => for (i, test) in tests.into_iter().enumerate() { + match test { + Json::Object(o) => { + let test_name = { + let desc = get_string(&o, "description"); + if desc.is_empty() { + format!("Punycode {}", i + 1) + } else { + format!("Punycode {}: {}", i + 1, desc) + } + }; + add_test(test_name, TestFn::dyn_test_fn(move || one_test( + get_string(&o, "decoded"), + get_string(&o, "encoded"), + ))) + } + _ => panic!(), + } + }, + other => panic!("{:?}", other) + } +} diff --git a/idna/tests/punycode_tests.json b/idna/tests/punycode_tests.json new file mode 100644 index 000000000..86785b124 --- /dev/null +++ b/idna/tests/punycode_tests.json @@ -0,0 +1,120 @@ +[ +{ + "description": "These tests are copied from https://github.com/bestiejs/punycode.js/blob/master/tests/tests.js , used under the MIT license.", + "decoded": "", + "encoded": "" +}, +{ + "description": "a single basic code point", + "decoded": "Bach", + "encoded": "Bach-" +}, +{ + "description": "a single non-ASCII character", + "decoded": "\u00FC", + "encoded": "tda" +}, +{ + "description": "multiple non-ASCII characters", + "decoded": "\u00FC\u00EB\u00E4\u00F6\u2665", + "encoded": "4can8av2009b" +}, +{ + "description": "mix of ASCII and non-ASCII characters", + "decoded": "b\u00FCcher", + "encoded": "bcher-kva" +}, +{ + "description": "long string with both ASCII and non-ASCII characters", + "decoded": "Willst du die Bl\u00FCthe des fr\u00FChen, die Fr\u00FCchte des sp\u00E4teren Jahres", + "encoded": "Willst du die Blthe des frhen, die Frchte des spteren Jahres-x9e96lkal" +}, +{ + "description": "Arabic (Egyptian)", + "decoded": "\u0644\u064A\u0647\u0645\u0627\u0628\u062A\u0643\u0644\u0645\u0648\u0634\u0639\u0631\u0628\u064A\u061F", + "encoded": "egbpdaj6bu4bxfgehfvwxn" +}, +{ + "description": "Chinese (simplified)", + "decoded": "\u4ED6\u4EEC\u4E3A\u4EC0\u4E48\u4E0D\u8BF4\u4E2d\u6587", + "encoded": "ihqwcrb4cv8a8dqg056pqjye" +}, +{ + "description": "Chinese (traditional)", + "decoded": "\u4ED6\u5011\u7232\u4EC0\u9EBD\u4E0D\u8AAA\u4E2D\u6587", + "encoded": "ihqwctvzc91f659drss3x8bo0yb" +}, +{ + "description": "Czech", + "decoded": "Pro\u010Dprost\u011Bnemluv\u00ED\u010Desky", + "encoded": "Proprostnemluvesky-uyb24dma41a" +}, +{ + "description": "Hebrew", + "decoded": "\u05DC\u05DE\u05D4\u05D4\u05DD\u05E4\u05E9\u05D5\u05D8\u05DC\u05D0\u05DE\u05D3\u05D1\u05E8\u05D9\u05DD\u05E2\u05D1\u05E8\u05D9\u05EA", + "encoded": "4dbcagdahymbxekheh6e0a7fei0b" +}, +{ + "description": "Hindi (Devanagari)", + "decoded": "\u092F\u0939\u0932\u094B\u0917\u0939\u093F\u0928\u094D\u0926\u0940\u0915\u094D\u092F\u094B\u0902\u0928\u0939\u0940\u0902\u092C\u094B\u0932\u0938\u0915\u0924\u0947\u0939\u0948\u0902", + "encoded": "i1baa7eci9glrd9b2ae1bj0hfcgg6iyaf8o0a1dig0cd" +}, +{ + "description": "Japanese (kanji and hiragana)", + "decoded": "\u306A\u305C\u307F\u3093\u306A\u65E5\u672C\u8A9E\u3092\u8A71\u3057\u3066\u304F\u308C\u306A\u3044\u306E\u304B", + "encoded": "n8jok5ay5dzabd5bym9f0cm5685rrjetr6pdxa" +}, +{ + "description": "Korean (Hangul syllables)", + "decoded": "\uC138\uACC4\uC758\uBAA8\uB4E0\uC0AC\uB78C\uB4E4\uC774\uD55C\uAD6D\uC5B4\uB97C\uC774\uD574\uD55C\uB2E4\uBA74\uC5BC\uB9C8\uB098\uC88B\uC744\uAE4C", + "encoded": "989aomsvi5e83db1d2a355cv1e0vak1dwrv93d5xbh15a0dt30a5jpsd879ccm6fea98c" +}, +{ + "description": "Russian (Cyrillic)", + "decoded": "\u043F\u043E\u0447\u0435\u043C\u0443\u0436\u0435\u043E\u043D\u0438\u043D\u0435\u0433\u043E\u0432\u043E\u0440\u044F\u0442\u043F\u043E\u0440\u0443\u0441\u0441\u043A\u0438", + "encoded": "b1abfaaepdrnnbgefbadotcwatmq2g4l" +}, +{ + "description": "Spanish", + "decoded": "Porqu\u00E9nopuedensimplementehablarenEspa\u00F1ol", + "encoded": "PorqunopuedensimplementehablarenEspaol-fmd56a" +}, +{ + "description": "Vietnamese", + "decoded": "T\u1EA1isaoh\u1ECDkh\u00F4ngth\u1EC3ch\u1EC9n\u00F3iti\u1EBFngVi\u1EC7t", + "encoded": "TisaohkhngthchnitingVit-kjcr8268qyxafd2f1b9g" +}, +{ + "decoded": "3\u5E74B\u7D44\u91D1\u516B\u5148\u751F", + "encoded": "3B-ww4c5e180e575a65lsy2b" +}, +{ + "decoded": "\u5B89\u5BA4\u5948\u7F8E\u6075-with-SUPER-MONKEYS", + "encoded": "-with-SUPER-MONKEYS-pc58ag80a8qai00g7n9n" +}, +{ + "decoded": "Hello-Another-Way-\u305D\u308C\u305E\u308C\u306E\u5834\u6240", + "encoded": "Hello-Another-Way--fc4qua05auwb3674vfr0b" +}, +{ + "decoded": "\u3072\u3068\u3064\u5C4B\u6839\u306E\u4E0B2", + "encoded": "2-u9tlzr9756bt3uc0v" +}, +{ + "decoded": "Maji\u3067Koi\u3059\u308B5\u79D2\u524D", + "encoded": "MajiKoi5-783gue6qz075azm5e" +}, +{ + "decoded": "\u30D1\u30D5\u30A3\u30FCde\u30EB\u30F3\u30D0", + "encoded": "de-jg4avhby1noc0d" +}, +{ + "decoded": "\u305D\u306E\u30B9\u30D4\u30FC\u30C9\u3067", + "encoded": "d9juau41awczczp" +}, +{ + "description": "ASCII string that breaks the existing rules for host-name labels (It's not a realistic example for IDNA, because IDNA never encodes pure ASCII labels.)", + "decoded": "-> $1.00 <-", + "encoded": "-> $1.00 <--" +} +] diff --git a/idna/tests/tests.rs b/idna/tests/tests.rs new file mode 100644 index 000000000..808ad6ba8 --- /dev/null +++ b/idna/tests/tests.rs @@ -0,0 +1,21 @@ +extern crate idna; +extern crate rustc_serialize; +extern crate rustc_test as test; + +mod punycode; +mod uts46; + +fn main() { + let mut tests = Vec::new(); + { + let mut add_test = |name, run| { + tests.push(test::TestDescAndFn { + desc: test::TestDesc::new(test::DynTestName(name)), + testfn: run, + }) + }; + punycode::collect_tests(&mut add_test); + uts46::collect_tests(&mut add_test); + } + test::test_main(&std::env::args().collect::>(), tests) +} diff --git a/idna/tests/unit.rs b/idna/tests/unit.rs new file mode 100644 index 000000000..a7d158d5c --- /dev/null +++ b/idna/tests/unit.rs @@ -0,0 +1,40 @@ +extern crate idna; +extern crate unicode_normalization; + +use idna::uts46; +use unicode_normalization::char::is_combining_mark; + + +fn _to_ascii(domain: &str) -> Result { + uts46::to_ascii(domain, uts46::Flags { + transitional_processing: false, + use_std3_ascii_rules: true, + verify_dns_length: true, + }) +} + +#[test] +fn test_v5() { + // IdnaTest:784 蔏。𑰺 + assert!(is_combining_mark('\u{11C3A}')); + assert!(_to_ascii("\u{11C3A}").is_err()); + assert!(_to_ascii("\u{850f}.\u{11C3A}").is_err()); + assert!(_to_ascii("\u{850f}\u{ff61}\u{11C3A}").is_err()); +} + +#[test] +fn test_v8_bidi_rules() { + assert_eq!(_to_ascii("abc").unwrap(), "abc"); + assert_eq!(_to_ascii("123").unwrap(), "123"); + assert_eq!(_to_ascii("אבּג").unwrap(), "xn--kdb3bdf"); + assert_eq!(_to_ascii("ابج").unwrap(), "xn--mgbcm"); + assert_eq!(_to_ascii("abc.ابج").unwrap(), "abc.xn--mgbcm"); + assert_eq!(_to_ascii("אבּג.ابج").unwrap(), "xn--kdb3bdf.xn--mgbcm"); + + // Bidi domain names cannot start with digits + assert!(_to_ascii("0a.\u{05D0}").is_err()); + assert!(_to_ascii("0à.\u{05D0}").is_err()); + + // Bidi chars may be punycode-encoded + assert!(_to_ascii("xn--0ca24w").is_err()); +} diff --git a/idna/tests/uts46.rs b/idna/tests/uts46.rs new file mode 100644 index 000000000..59ec1cd76 --- /dev/null +++ b/idna/tests/uts46.rs @@ -0,0 +1,124 @@ +// Copyright 2013-2014 The rust-url developers. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use std::char; +use idna::uts46; +use test::TestFn; + +pub fn collect_tests(add_test: &mut F) { + // http://www.unicode.org/Public/idna/latest/IdnaTest.txt + for (i, line) in include_str!("IdnaTest.txt").lines().enumerate() { + if line == "" || line.starts_with("#") { + continue + } + // Remove comments + let mut line = match line.find("#") { + Some(index) => &line[0..index], + None => line + }; + + let mut expected_failure = false; + if line.starts_with("XFAIL") { + expected_failure = true; + line = &line[5..line.len()]; + }; + + let mut pieces = line.split(';').map(|x| x.trim()).collect::>(); + + let test_type = pieces.remove(0); + let original = pieces.remove(0); + let source = unescape(original); + let to_unicode = pieces.remove(0); + let to_ascii = pieces.remove(0); + let nv8 = if pieces.len() > 0 { pieces.remove(0) } else { "" }; + + if expected_failure { + continue; + } + + let test_name = format!("UTS #46 line {}", i + 1); + add_test(test_name, TestFn::dyn_test_fn(move || { + let result = uts46::to_ascii(&source, uts46::Flags { + use_std3_ascii_rules: true, + transitional_processing: test_type == "T", + verify_dns_length: true, + }); + + if to_ascii.starts_with("[") { + if to_ascii.starts_with("[C") { + // http://unicode.org/reports/tr46/#Deviations + // applications that perform IDNA2008 lookup are not required to check + // for these contexts + return; + } + if to_ascii == "[V2]" { + // Everybody ignores V2 + // https://github.com/servo/rust-url/pull/240 + // https://github.com/whatwg/url/issues/53#issuecomment-181528158 + // http://www.unicode.org/review/pri317/ + return; + } + let res = result.ok(); + assert!(res == None, "Expected error. result: {} | original: {} | source: {}", + res.unwrap(), original, source); + return; + } + + let to_ascii = if to_ascii.len() > 0 { + to_ascii.to_string() + } else { + if to_unicode.len() > 0 { + to_unicode.to_string() + } else { + source.clone() + } + }; + + if nv8 == "NV8" { + // This result isn't valid under IDNA2008. Skip it + return; + } + + assert!(result.is_ok(), "Couldn't parse {} | original: {} | error: {:?}", + source, original, result.err()); + let output = result.ok().unwrap(); + assert!(output == to_ascii, "result: {} | expected: {} | original: {} | source: {}", + output, to_ascii, original, source); + })) + } +} + +fn unescape(input: &str) -> String { + let mut output = String::new(); + let mut chars = input.chars(); + loop { + match chars.next() { + None => return output, + Some(c) => + if c == '\\' { + match chars.next().unwrap() { + '\\' => output.push('\\'), + 'u' => { + let c1 = chars.next().unwrap().to_digit(16).unwrap(); + let c2 = chars.next().unwrap().to_digit(16).unwrap(); + let c3 = chars.next().unwrap().to_digit(16).unwrap(); + let c4 = chars.next().unwrap().to_digit(16).unwrap(); + match char::from_u32(((c1 * 16 + c2) * 16 + c3) * 16 + c4) + { + Some(c) => output.push(c), + None => { output.push_str(&format!("\\u{:X}{:X}{:X}{:X}",c1,c2,c3,c4)); } + }; + } + _ => panic!("Invalid test data input"), + } + } else { + output.push(c); + } + } + } +} diff --git a/ignore/.cargo-checksum.json b/ignore/.cargo-checksum.json new file mode 100644 index 000000000..cd6330b82 --- /dev/null +++ b/ignore/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{},"package":"36ecfc5ad80f0b1226df948c562e2cddd446096be3f644c95106400eae8a5e01"} \ No newline at end of file diff --git a/ignore/COPYING b/ignore/COPYING new file mode 100644 index 000000000..bb9c20a09 --- /dev/null +++ b/ignore/COPYING @@ -0,0 +1,3 @@ +This project is dual-licensed under the Unlicense and MIT licenses. + +You may use this code under the terms of either license. diff --git a/ignore/Cargo.toml b/ignore/Cargo.toml new file mode 100644 index 000000000..c7dc32e0b --- /dev/null +++ b/ignore/Cargo.toml @@ -0,0 +1,60 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g. crates.io) dependencies +# +# If you believe there's an error in this file please file an +# issue against the rust-lang/cargo repository. If you're +# editing this file be aware that the upstream Cargo.toml +# will likely look very different (and much more reasonable) + +[package] +name = "ignore" +version = "0.4.4" +authors = ["Andrew Gallant "] +description = "A fast library for efficiently matching ignore files such as `.gitignore`\nagainst file paths.\n" +homepage = "https://github.com/BurntSushi/ripgrep/tree/master/ignore" +documentation = "https://docs.rs/ignore" +readme = "README.md" +keywords = ["glob", "ignore", "gitignore", "pattern", "file"] +license = "Unlicense/MIT" +repository = "https://github.com/BurntSushi/ripgrep/tree/master/ignore" + +[lib] +name = "ignore" +bench = false +[dependencies.crossbeam-channel] +version = "0.2.4" + +[dependencies.globset] +version = "0.4.2" + +[dependencies.lazy_static] +version = "1.1.0" + +[dependencies.log] +version = "0.4.5" + +[dependencies.memchr] +version = "2.0.2" + +[dependencies.regex] +version = "1.0.5" + +[dependencies.same-file] +version = "1.0.3" + +[dependencies.thread_local] +version = "0.3.6" + +[dependencies.walkdir] +version = "2.2.5" +[dev-dependencies.tempdir] +version = "0.3.7" + +[features] +simd-accel = ["globset/simd-accel"] +[target."cfg(windows)".dependencies.winapi-util] +version = "0.1.1" diff --git a/ignore/LICENSE-MIT b/ignore/LICENSE-MIT new file mode 100644 index 000000000..3b0a5dc09 --- /dev/null +++ b/ignore/LICENSE-MIT @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015 Andrew Gallant + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/ignore/README.md b/ignore/README.md new file mode 100644 index 000000000..b0e659a9c --- /dev/null +++ b/ignore/README.md @@ -0,0 +1,66 @@ +ignore +====== +The ignore crate provides a fast recursive directory iterator that respects +various filters such as globs, file types and `.gitignore` files. This crate +also provides lower level direct access to gitignore and file type matchers. + +[![Linux build status](https://api.travis-ci.org/BurntSushi/ripgrep.svg)](https://travis-ci.org/BurntSushi/ripgrep) +[![Windows build status](https://ci.appveyor.com/api/projects/status/github/BurntSushi/ripgrep?svg=true)](https://ci.appveyor.com/project/BurntSushi/ripgrep) +[![](https://img.shields.io/crates/v/ignore.svg)](https://crates.io/crates/ignore) + +Dual-licensed under MIT or the [UNLICENSE](http://unlicense.org). + +### Documentation + +[https://docs.rs/ignore](https://docs.rs/ignore) + +### Usage + +Add this to your `Cargo.toml`: + +```toml +[dependencies] +ignore = "0.4" +``` + +and this to your crate root: + +```rust +extern crate ignore; +``` + +### Example + +This example shows the most basic usage of this crate. This code will +recursively traverse the current directory while automatically filtering out +files and directories according to ignore globs found in files like +`.ignore` and `.gitignore`: + + +```rust,no_run +use ignore::Walk; + +for result in Walk::new("./") { + // Each item yielded by the iterator is either a directory entry or an + // error, so either print the path or the error. + match result { + Ok(entry) => println!("{}", entry.path().display()), + Err(err) => println!("ERROR: {}", err), + } +} +``` + +### Example: advanced + +By default, the recursive directory iterator will ignore hidden files and +directories. This can be disabled by building the iterator with `WalkBuilder`: + +```rust,no_run +use ignore::WalkBuilder; + +for result in WalkBuilder::new("./").hidden(false).build() { + println!("{:?}", result); +} +``` + +See the documentation for `WalkBuilder` for many other options. diff --git a/ignore/UNLICENSE b/ignore/UNLICENSE new file mode 100644 index 000000000..68a49daad --- /dev/null +++ b/ignore/UNLICENSE @@ -0,0 +1,24 @@ +This is free and unencumbered software released into the public domain. + +Anyone is free to copy, modify, publish, use, compile, sell, or +distribute this software, either in source code form or as a compiled +binary, for any purpose, commercial or non-commercial, and by any +means. + +In jurisdictions that recognize copyright laws, the author or authors +of this software dedicate any and all copyright interest in the +software to the public domain. We make this dedication for the benefit +of the public at large and to the detriment of our heirs and +successors. We intend this dedication to be an overt act of +relinquishment in perpetuity of all present and future rights to this +software under copyright law. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR +OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. + +For more information, please refer to diff --git a/ignore/examples/walk.rs b/ignore/examples/walk.rs new file mode 100644 index 000000000..67432b71a --- /dev/null +++ b/ignore/examples/walk.rs @@ -0,0 +1,84 @@ +extern crate crossbeam_channel as channel; +extern crate ignore; +extern crate walkdir; + +use std::env; +use std::io::{self, Write}; +use std::path::Path; +use std::thread; + +use ignore::WalkBuilder; +use walkdir::WalkDir; + +fn main() { + let mut path = env::args().nth(1).unwrap(); + let mut parallel = false; + let mut simple = false; + let (tx, rx) = channel::bounded::(100); + if path == "parallel" { + path = env::args().nth(2).unwrap(); + parallel = true; + } else if path == "walkdir" { + path = env::args().nth(2).unwrap(); + simple = true; + } + + let stdout_thread = thread::spawn(move || { + let mut stdout = io::BufWriter::new(io::stdout()); + for dent in rx { + write_path(&mut stdout, dent.path()); + } + }); + + if parallel { + let walker = WalkBuilder::new(path).threads(6).build_parallel(); + walker.run(|| { + let tx = tx.clone(); + Box::new(move |result| { + use ignore::WalkState::*; + + tx.send(DirEntry::Y(result.unwrap())); + Continue + }) + }); + } else if simple { + let walker = WalkDir::new(path); + for result in walker { + tx.send(DirEntry::X(result.unwrap())); + } + } else { + let walker = WalkBuilder::new(path).build(); + for result in walker { + tx.send(DirEntry::Y(result.unwrap())); + } + } + drop(tx); + stdout_thread.join().unwrap(); +} + +enum DirEntry { + X(walkdir::DirEntry), + Y(ignore::DirEntry), +} + +impl DirEntry { + fn path(&self) -> &Path { + match *self { + DirEntry::X(ref x) => x.path(), + DirEntry::Y(ref y) => y.path(), + } + } +} + +#[cfg(unix)] +fn write_path(mut wtr: W, path: &Path) { + use std::os::unix::ffi::OsStrExt; + wtr.write(path.as_os_str().as_bytes()).unwrap(); + wtr.write(b"\n").unwrap(); +} + +#[cfg(not(unix))] +fn write_path(mut wtr: W, path: &Path) { + wtr.write(path.to_string_lossy().as_bytes()).unwrap(); + wtr.write(b"\n").unwrap(); +} diff --git a/ignore/src/dir.rs b/ignore/src/dir.rs new file mode 100644 index 000000000..162b4c035 --- /dev/null +++ b/ignore/src/dir.rs @@ -0,0 +1,940 @@ +// This module provides a data structure, `Ignore`, that connects "directory +// traversal" with "ignore matchers." Specifically, it knows about gitignore +// semantics and precedence, and is organized based on directory hierarchy. +// Namely, every matcher logically corresponds to ignore rules from a single +// directory, and points to the matcher for its corresponding parent directory. +// In this sense, `Ignore` is a *persistent* data structure. +// +// This design was specifically chosen to make it possible to use this data +// structure in a parallel directory iterator. +// +// My initial intention was to expose this module as part of this crate's +// public API, but I think the data structure's public API is too complicated +// with non-obvious failure modes. Alas, such things haven't been documented +// well. + +use std::collections::HashMap; +use std::ffi::{OsString, OsStr}; +use std::path::{Path, PathBuf}; +use std::sync::{Arc, RwLock}; + +use gitignore::{self, Gitignore, GitignoreBuilder}; +use pathutil::{is_hidden, strip_prefix}; +use overrides::{self, Override}; +use types::{self, Types}; +use {Error, Match, PartialErrorBuilder}; + +/// IgnoreMatch represents information about where a match came from when using +/// the `Ignore` matcher. +#[derive(Clone, Debug)] +pub struct IgnoreMatch<'a>(IgnoreMatchInner<'a>); + +/// IgnoreMatchInner describes precisely where the match information came from. +/// This is private to allow expansion to more matchers in the future. +#[derive(Clone, Debug)] +enum IgnoreMatchInner<'a> { + Override(overrides::Glob<'a>), + Gitignore(&'a gitignore::Glob), + Types(types::Glob<'a>), + Hidden, +} + +impl<'a> IgnoreMatch<'a> { + fn overrides(x: overrides::Glob<'a>) -> IgnoreMatch<'a> { + IgnoreMatch(IgnoreMatchInner::Override(x)) + } + + fn gitignore(x: &'a gitignore::Glob) -> IgnoreMatch<'a> { + IgnoreMatch(IgnoreMatchInner::Gitignore(x)) + } + + fn types(x: types::Glob<'a>) -> IgnoreMatch<'a> { + IgnoreMatch(IgnoreMatchInner::Types(x)) + } + + fn hidden() -> IgnoreMatch<'static> { + IgnoreMatch(IgnoreMatchInner::Hidden) + } +} + +/// Options for the ignore matcher, shared between the matcher itself and the +/// builder. +#[derive(Clone, Copy, Debug)] +struct IgnoreOptions { + /// Whether to ignore hidden file paths or not. + hidden: bool, + /// Whether to read .ignore files. + ignore: bool, + /// Whether to respect any ignore files in parent directories. + parents: bool, + /// Whether to read git's global gitignore file. + git_global: bool, + /// Whether to read .gitignore files. + git_ignore: bool, + /// Whether to read .git/info/exclude files. + git_exclude: bool, +} + +/// Ignore is a matcher useful for recursively walking one or more directories. +#[derive(Clone, Debug)] +pub struct Ignore(Arc); + +#[derive(Clone, Debug)] +struct IgnoreInner { + /// A map of all existing directories that have already been + /// compiled into matchers. + /// + /// Note that this is never used during matching, only when adding new + /// parent directory matchers. This avoids needing to rebuild glob sets for + /// parent directories if many paths are being searched. + compiled: Arc>>, + /// The path to the directory that this matcher was built from. + dir: PathBuf, + /// An override matcher (default is empty). + overrides: Arc, + /// A file type matcher. + types: Arc, + /// The parent directory to match next. + /// + /// If this is the root directory or there are otherwise no more + /// directories to match, then `parent` is `None`. + parent: Option, + /// Whether this is an absolute parent matcher, as added by add_parent. + is_absolute_parent: bool, + /// The absolute base path of this matcher. Populated only if parent + /// directories are added. + absolute_base: Option>, + /// Explicit global ignore matchers specified by the caller. + explicit_ignores: Arc>, + /// Ignore files used in addition to `.ignore` + custom_ignore_filenames: Arc>, + /// The matcher for custom ignore files + custom_ignore_matcher: Gitignore, + /// The matcher for .ignore files. + ignore_matcher: Gitignore, + /// A global gitignore matcher, usually from $XDG_CONFIG_HOME/git/ignore. + git_global_matcher: Arc, + /// The matcher for .gitignore files. + git_ignore_matcher: Gitignore, + /// Special matcher for `.git/info/exclude` files. + git_exclude_matcher: Gitignore, + /// Whether this directory contains a .git sub-directory. + has_git: bool, + /// Ignore config. + opts: IgnoreOptions, +} + +impl Ignore { + /// Return the directory path of this matcher. + pub fn path(&self) -> &Path { + &self.0.dir + } + + /// Return true if this matcher has no parent. + pub fn is_root(&self) -> bool { + self.0.parent.is_none() + } + + /// Returns true if this matcher was added via the `add_parents` method. + pub fn is_absolute_parent(&self) -> bool { + self.0.is_absolute_parent + } + + /// Return this matcher's parent, if one exists. + pub fn parent(&self) -> Option { + self.0.parent.clone() + } + + /// Create a new `Ignore` matcher with the parent directories of `dir`. + /// + /// Note that this can only be called on an `Ignore` matcher with no + /// parents (i.e., `is_root` returns `true`). This will panic otherwise. + pub fn add_parents>( + &self, + path: P, + ) -> (Ignore, Option) { + if !self.0.opts.parents + && !self.0.opts.git_ignore + && !self.0.opts.git_exclude + && !self.0.opts.git_global + { + // If we never need info from parent directories, then don't do + // anything. + return (self.clone(), None); + } + if !self.is_root() { + panic!("Ignore::add_parents called on non-root matcher"); + } + let absolute_base = match path.as_ref().canonicalize() { + Ok(path) => Arc::new(path), + Err(_) => { + // There's not much we can do here, so just return our + // existing matcher. We drop the error to be consistent + // with our general pattern of ignoring I/O errors when + // processing ignore files. + return (self.clone(), None); + } + }; + // List of parents, from child to root. + let mut parents = vec![]; + let mut path = &**absolute_base; + while let Some(parent) = path.parent() { + parents.push(parent); + path = parent; + } + let mut errs = PartialErrorBuilder::default(); + let mut ig = self.clone(); + for parent in parents.into_iter().rev() { + let mut compiled = self.0.compiled.write().unwrap(); + if let Some(prebuilt) = compiled.get(parent.as_os_str()) { + ig = prebuilt.clone(); + continue; + } + let (mut igtmp, err) = ig.add_child_path(parent); + errs.maybe_push(err); + igtmp.is_absolute_parent = true; + igtmp.absolute_base = Some(absolute_base.clone()); + igtmp.has_git = parent.join(".git").exists(); + ig = Ignore(Arc::new(igtmp)); + compiled.insert(parent.as_os_str().to_os_string(), ig.clone()); + } + (ig, errs.into_error_option()) + } + + /// Create a new `Ignore` matcher for the given child directory. + /// + /// Since building the matcher may require reading from multiple + /// files, it's possible that this method partially succeeds. Therefore, + /// a matcher is always returned (which may match nothing) and an error is + /// returned if it exists. + /// + /// Note that all I/O errors are completely ignored. + pub fn add_child>( + &self, + dir: P, + ) -> (Ignore, Option) { + let (ig, err) = self.add_child_path(dir.as_ref()); + (Ignore(Arc::new(ig)), err) + } + + /// Like add_child, but takes a full path and returns an IgnoreInner. + fn add_child_path(&self, dir: &Path) -> (IgnoreInner, Option) { + let mut errs = PartialErrorBuilder::default(); + let custom_ig_matcher = + if self.0.custom_ignore_filenames.is_empty() { + Gitignore::empty() + } else { + let (m, err) = + create_gitignore(&dir, &self.0.custom_ignore_filenames); + errs.maybe_push(err); + m + }; + let ig_matcher = + if !self.0.opts.ignore { + Gitignore::empty() + } else { + let (m, err) = create_gitignore(&dir, &[".ignore"]); + errs.maybe_push(err); + m + }; + let gi_matcher = + if !self.0.opts.git_ignore { + Gitignore::empty() + } else { + let (m, err) = create_gitignore(&dir, &[".gitignore"]); + errs.maybe_push(err); + m + }; + let gi_exclude_matcher = + if !self.0.opts.git_exclude { + Gitignore::empty() + } else { + let (m, err) = create_gitignore(&dir, &[".git/info/exclude"]); + errs.maybe_push(err); + m + }; + let ig = IgnoreInner { + compiled: self.0.compiled.clone(), + dir: dir.to_path_buf(), + overrides: self.0.overrides.clone(), + types: self.0.types.clone(), + parent: Some(self.clone()), + is_absolute_parent: false, + absolute_base: self.0.absolute_base.clone(), + explicit_ignores: self.0.explicit_ignores.clone(), + custom_ignore_filenames: self.0.custom_ignore_filenames.clone(), + custom_ignore_matcher: custom_ig_matcher, + ignore_matcher: ig_matcher, + git_global_matcher: self.0.git_global_matcher.clone(), + git_ignore_matcher: gi_matcher, + git_exclude_matcher: gi_exclude_matcher, + has_git: dir.join(".git").exists(), + opts: self.0.opts, + }; + (ig, errs.into_error_option()) + } + + /// Returns true if at least one type of ignore rule should be matched. + fn has_any_ignore_rules(&self) -> bool { + let opts = self.0.opts; + let has_custom_ignore_files = !self.0.custom_ignore_filenames.is_empty(); + let has_explicit_ignores = !self.0.explicit_ignores.is_empty(); + + opts.ignore || opts.git_global || opts.git_ignore + || opts.git_exclude || has_custom_ignore_files + || has_explicit_ignores + } + + /// Returns a match indicating whether the given file path should be + /// ignored or not. + /// + /// The match contains information about its origin. + pub fn matched<'a, P: AsRef>( + &'a self, + path: P, + is_dir: bool, + ) -> Match> { + // We need to be careful with our path. If it has a leading ./, then + // strip it because it causes nothing but trouble. + let mut path = path.as_ref(); + if let Some(p) = strip_prefix("./", path) { + path = p; + } + // Match against the override patterns. If an override matches + // regardless of whether it's whitelist/ignore, then we quit and + // return that result immediately. Overrides have the highest + // precedence. + if !self.0.overrides.is_empty() { + let mat = + self.0.overrides.matched(path, is_dir) + .map(IgnoreMatch::overrides); + if !mat.is_none() { + return mat; + } + } + let mut whitelisted = Match::None; + if self.has_any_ignore_rules() { + let mat = self.matched_ignore(path, is_dir); + if mat.is_ignore() { + return mat; + } else if mat.is_whitelist() { + whitelisted = mat; + } + } + if !self.0.types.is_empty() { + let mat = + self.0.types.matched(path, is_dir).map(IgnoreMatch::types); + if mat.is_ignore() { + return mat; + } else if mat.is_whitelist() { + whitelisted = mat; + } + } + if whitelisted.is_none() && self.0.opts.hidden && is_hidden(path) { + return Match::Ignore(IgnoreMatch::hidden()); + } + whitelisted + } + + /// Performs matching only on the ignore files for this directory and + /// all parent directories. + fn matched_ignore<'a>( + &'a self, + path: &Path, + is_dir: bool, + ) -> Match> { + let (mut m_custom_ignore, mut m_ignore, mut m_gi, mut m_gi_exclude, mut m_explicit) = + (Match::None, Match::None, Match::None, Match::None, Match::None); + let any_git = self.parents().any(|ig| ig.0.has_git); + let mut saw_git = false; + for ig in self.parents().take_while(|ig| !ig.0.is_absolute_parent) { + if m_custom_ignore.is_none() { + m_custom_ignore = + ig.0.custom_ignore_matcher.matched(path, is_dir) + .map(IgnoreMatch::gitignore); + } + if m_ignore.is_none() { + m_ignore = + ig.0.ignore_matcher.matched(path, is_dir) + .map(IgnoreMatch::gitignore); + } + if any_git && !saw_git && m_gi.is_none() { + m_gi = + ig.0.git_ignore_matcher.matched(path, is_dir) + .map(IgnoreMatch::gitignore); + } + if any_git && !saw_git && m_gi_exclude.is_none() { + m_gi_exclude = + ig.0.git_exclude_matcher.matched(path, is_dir) + .map(IgnoreMatch::gitignore); + } + saw_git = saw_git || ig.0.has_git; + } + if self.0.opts.parents { + if let Some(abs_parent_path) = self.absolute_base() { + let path = abs_parent_path.join(path); + for ig in self.parents().skip_while(|ig|!ig.0.is_absolute_parent) { + if m_custom_ignore.is_none() { + m_custom_ignore = + ig.0.custom_ignore_matcher.matched(&path, is_dir) + .map(IgnoreMatch::gitignore); + } + if m_ignore.is_none() { + m_ignore = + ig.0.ignore_matcher.matched(&path, is_dir) + .map(IgnoreMatch::gitignore); + } + if any_git && !saw_git && m_gi.is_none() { + m_gi = + ig.0.git_ignore_matcher.matched(&path, is_dir) + .map(IgnoreMatch::gitignore); + } + if any_git && !saw_git && m_gi_exclude.is_none() { + m_gi_exclude = + ig.0.git_exclude_matcher.matched(&path, is_dir) + .map(IgnoreMatch::gitignore); + } + saw_git = saw_git || ig.0.has_git; + } + } + } + for gi in self.0.explicit_ignores.iter().rev() { + if !m_explicit.is_none() { + break; + } + m_explicit = gi.matched(&path, is_dir).map(IgnoreMatch::gitignore); + } + let m_global = + if any_git { + self.0.git_global_matcher + .matched(&path, is_dir) + .map(IgnoreMatch::gitignore) + } else { + Match::None + }; + + m_custom_ignore.or(m_ignore).or(m_gi).or(m_gi_exclude).or(m_global).or(m_explicit) + } + + /// Returns an iterator over parent ignore matchers, including this one. + pub fn parents(&self) -> Parents { + Parents(Some(self)) + } + + /// Returns the first absolute path of the first absolute parent, if + /// one exists. + fn absolute_base(&self) -> Option<&Path> { + self.0.absolute_base.as_ref().map(|p| &***p) + } +} + +/// An iterator over all parents of an ignore matcher, including itself. +/// +/// The lifetime `'a` refers to the lifetime of the initial `Ignore` matcher. +pub struct Parents<'a>(Option<&'a Ignore>); + +impl<'a> Iterator for Parents<'a> { + type Item = &'a Ignore; + + fn next(&mut self) -> Option<&'a Ignore> { + match self.0.take() { + None => None, + Some(ig) => { + self.0 = ig.0.parent.as_ref(); + Some(ig) + } + } + } +} + +/// A builder for creating an Ignore matcher. +#[derive(Clone, Debug)] +pub struct IgnoreBuilder { + /// The root directory path for this ignore matcher. + dir: PathBuf, + /// An override matcher (default is empty). + overrides: Arc, + /// A type matcher (default is empty). + types: Arc, + /// Explicit global ignore matchers. + explicit_ignores: Vec, + /// Ignore files in addition to .ignore. + custom_ignore_filenames: Vec, + /// Ignore config. + opts: IgnoreOptions, +} + +impl IgnoreBuilder { + /// Create a new builder for an `Ignore` matcher. + /// + /// All relative file paths are resolved with respect to the current + /// working directory. + pub fn new() -> IgnoreBuilder { + IgnoreBuilder { + dir: Path::new("").to_path_buf(), + overrides: Arc::new(Override::empty()), + types: Arc::new(Types::empty()), + explicit_ignores: vec![], + custom_ignore_filenames: vec![], + opts: IgnoreOptions { + hidden: true, + ignore: true, + parents: true, + git_global: true, + git_ignore: true, + git_exclude: true, + }, + } + } + + /// Builds a new `Ignore` matcher. + /// + /// The matcher returned won't match anything until ignore rules from + /// directories are added to it. + pub fn build(&self) -> Ignore { + let git_global_matcher = + if !self.opts.git_global { + Gitignore::empty() + } else { + let (gi, err) = Gitignore::global(); + if let Some(err) = err { + debug!("{}", err); + } + gi + }; + + Ignore(Arc::new(IgnoreInner { + compiled: Arc::new(RwLock::new(HashMap::new())), + dir: self.dir.clone(), + overrides: self.overrides.clone(), + types: self.types.clone(), + parent: None, + is_absolute_parent: true, + absolute_base: None, + explicit_ignores: Arc::new(self.explicit_ignores.clone()), + custom_ignore_filenames: Arc::new(self.custom_ignore_filenames.clone()), + custom_ignore_matcher: Gitignore::empty(), + ignore_matcher: Gitignore::empty(), + git_global_matcher: Arc::new(git_global_matcher), + git_ignore_matcher: Gitignore::empty(), + git_exclude_matcher: Gitignore::empty(), + has_git: false, + opts: self.opts, + })) + } + + /// Add an override matcher. + /// + /// By default, no override matcher is used. + /// + /// This overrides any previous setting. + pub fn overrides(&mut self, overrides: Override) -> &mut IgnoreBuilder { + self.overrides = Arc::new(overrides); + self + } + + /// Add a file type matcher. + /// + /// By default, no file type matcher is used. + /// + /// This overrides any previous setting. + pub fn types(&mut self, types: Types) -> &mut IgnoreBuilder { + self.types = Arc::new(types); + self + } + + /// Adds a new global ignore matcher from the ignore file path given. + pub fn add_ignore(&mut self, ig: Gitignore) -> &mut IgnoreBuilder { + self.explicit_ignores.push(ig); + self + } + + /// Add a custom ignore file name + /// + /// These ignore files have higher precedence than all other ignore files. + /// + /// When specifying multiple names, earlier names have lower precedence than + /// later names. + pub fn add_custom_ignore_filename>( + &mut self, + file_name: S + ) -> &mut IgnoreBuilder { + self.custom_ignore_filenames.push(file_name.as_ref().to_os_string()); + self + } + + /// Enables ignoring hidden files. + /// + /// This is enabled by default. + pub fn hidden(&mut self, yes: bool) -> &mut IgnoreBuilder { + self.opts.hidden = yes; + self + } + + /// Enables reading `.ignore` files. + /// + /// `.ignore` files have the same semantics as `gitignore` files and are + /// supported by search tools such as ripgrep and The Silver Searcher. + /// + /// This is enabled by default. + pub fn ignore(&mut self, yes: bool) -> &mut IgnoreBuilder { + self.opts.ignore = yes; + self + } + + /// Enables reading ignore files from parent directories. + /// + /// If this is enabled, then .gitignore files in parent directories of each + /// file path given are respected. Otherwise, they are ignored. + /// + /// This is enabled by default. + pub fn parents(&mut self, yes: bool) -> &mut IgnoreBuilder { + self.opts.parents = yes; + self + } + + /// Add a global gitignore matcher. + /// + /// Its precedence is lower than both normal `.gitignore` files and + /// `.git/info/exclude` files. + /// + /// This overwrites any previous global gitignore setting. + /// + /// This is enabled by default. + pub fn git_global(&mut self, yes: bool) -> &mut IgnoreBuilder { + self.opts.git_global = yes; + self + } + + /// Enables reading `.gitignore` files. + /// + /// `.gitignore` files have match semantics as described in the `gitignore` + /// man page. + /// + /// This is enabled by default. + pub fn git_ignore(&mut self, yes: bool) -> &mut IgnoreBuilder { + self.opts.git_ignore = yes; + self + } + + /// Enables reading `.git/info/exclude` files. + /// + /// `.git/info/exclude` files have match semantics as described in the + /// `gitignore` man page. + /// + /// This is enabled by default. + pub fn git_exclude(&mut self, yes: bool) -> &mut IgnoreBuilder { + self.opts.git_exclude = yes; + self + } +} + +/// Creates a new gitignore matcher for the directory given. +/// +/// Ignore globs are extracted from each of the file names in `dir` in the +/// order given (earlier names have lower precedence than later names). +/// +/// I/O errors are ignored. +pub fn create_gitignore>( + dir: &Path, + names: &[T], +) -> (Gitignore, Option) { + let mut builder = GitignoreBuilder::new(dir); + let mut errs = PartialErrorBuilder::default(); + for name in names { + let gipath = dir.join(name.as_ref()); + errs.maybe_push_ignore_io(builder.add(gipath)); + } + let gi = match builder.build() { + Ok(gi) => gi, + Err(err) => { + errs.push(err); + GitignoreBuilder::new(dir).build().unwrap() + } + }; + (gi, errs.into_error_option()) +} + +#[cfg(test)] +mod tests { + use std::fs::{self, File}; + use std::io::Write; + use std::path::Path; + + use tempdir::TempDir; + + use dir::IgnoreBuilder; + use gitignore::Gitignore; + use Error; + + fn wfile>(path: P, contents: &str) { + let mut file = File::create(path).unwrap(); + file.write_all(contents.as_bytes()).unwrap(); + } + + fn mkdirp>(path: P) { + fs::create_dir_all(path).unwrap(); + } + + fn partial(err: Error) -> Vec { + match err { + Error::Partial(errs) => errs, + _ => panic!("expected partial error but got {:?}", err), + } + } + + #[test] + fn explicit_ignore() { + let td = TempDir::new("ignore-test-").unwrap(); + wfile(td.path().join("not-an-ignore"), "foo\n!bar"); + + let (gi, err) = Gitignore::new(td.path().join("not-an-ignore")); + assert!(err.is_none()); + let (ig, err) = IgnoreBuilder::new() + .add_ignore(gi).build().add_child(td.path()); + assert!(err.is_none()); + assert!(ig.matched("foo", false).is_ignore()); + assert!(ig.matched("bar", false).is_whitelist()); + assert!(ig.matched("baz", false).is_none()); + } + + #[test] + fn git_exclude() { + let td = TempDir::new("ignore-test-").unwrap(); + mkdirp(td.path().join(".git/info")); + wfile(td.path().join(".git/info/exclude"), "foo\n!bar"); + + let (ig, err) = IgnoreBuilder::new().build().add_child(td.path()); + assert!(err.is_none()); + assert!(ig.matched("foo", false).is_ignore()); + assert!(ig.matched("bar", false).is_whitelist()); + assert!(ig.matched("baz", false).is_none()); + } + + #[test] + fn gitignore() { + let td = TempDir::new("ignore-test-").unwrap(); + mkdirp(td.path().join(".git")); + wfile(td.path().join(".gitignore"), "foo\n!bar"); + + let (ig, err) = IgnoreBuilder::new().build().add_child(td.path()); + assert!(err.is_none()); + assert!(ig.matched("foo", false).is_ignore()); + assert!(ig.matched("bar", false).is_whitelist()); + assert!(ig.matched("baz", false).is_none()); + } + + #[test] + fn gitignore_no_git() { + let td = TempDir::new("ignore-test-").unwrap(); + wfile(td.path().join(".gitignore"), "foo\n!bar"); + + let (ig, err) = IgnoreBuilder::new().build().add_child(td.path()); + assert!(err.is_none()); + assert!(ig.matched("foo", false).is_none()); + assert!(ig.matched("bar", false).is_none()); + assert!(ig.matched("baz", false).is_none()); + } + + #[test] + fn ignore() { + let td = TempDir::new("ignore-test-").unwrap(); + wfile(td.path().join(".ignore"), "foo\n!bar"); + + let (ig, err) = IgnoreBuilder::new().build().add_child(td.path()); + assert!(err.is_none()); + assert!(ig.matched("foo", false).is_ignore()); + assert!(ig.matched("bar", false).is_whitelist()); + assert!(ig.matched("baz", false).is_none()); + } + + #[test] + fn custom_ignore() { + let td = TempDir::new("ignore-test-").unwrap(); + let custom_ignore = ".customignore"; + wfile(td.path().join(custom_ignore), "foo\n!bar"); + + let (ig, err) = IgnoreBuilder::new() + .add_custom_ignore_filename(custom_ignore) + .build().add_child(td.path()); + assert!(err.is_none()); + assert!(ig.matched("foo", false).is_ignore()); + assert!(ig.matched("bar", false).is_whitelist()); + assert!(ig.matched("baz", false).is_none()); + } + + // Tests that a custom ignore file will override an .ignore. + #[test] + fn custom_ignore_over_ignore() { + let td = TempDir::new("ignore-test-").unwrap(); + let custom_ignore = ".customignore"; + wfile(td.path().join(".ignore"), "foo"); + wfile(td.path().join(custom_ignore), "!foo"); + + let (ig, err) = IgnoreBuilder::new() + .add_custom_ignore_filename(custom_ignore) + .build().add_child(td.path()); + assert!(err.is_none()); + assert!(ig.matched("foo", false).is_whitelist()); + } + + // Tests that earlier custom ignore files have lower precedence than later. + #[test] + fn custom_ignore_precedence() { + let td = TempDir::new("ignore-test-").unwrap(); + let custom_ignore1 = ".customignore1"; + let custom_ignore2 = ".customignore2"; + wfile(td.path().join(custom_ignore1), "foo"); + wfile(td.path().join(custom_ignore2), "!foo"); + + let (ig, err) = IgnoreBuilder::new() + .add_custom_ignore_filename(custom_ignore1) + .add_custom_ignore_filename(custom_ignore2) + .build().add_child(td.path()); + assert!(err.is_none()); + assert!(ig.matched("foo", false).is_whitelist()); + } + + // Tests that an .ignore will override a .gitignore. + #[test] + fn ignore_over_gitignore() { + let td = TempDir::new("ignore-test-").unwrap(); + wfile(td.path().join(".gitignore"), "foo"); + wfile(td.path().join(".ignore"), "!foo"); + + let (ig, err) = IgnoreBuilder::new().build().add_child(td.path()); + assert!(err.is_none()); + assert!(ig.matched("foo", false).is_whitelist()); + } + + // Tests that exclude has lower precedent than both .ignore and .gitignore. + #[test] + fn exclude_lowest() { + let td = TempDir::new("ignore-test-").unwrap(); + wfile(td.path().join(".gitignore"), "!foo"); + wfile(td.path().join(".ignore"), "!bar"); + mkdirp(td.path().join(".git/info")); + wfile(td.path().join(".git/info/exclude"), "foo\nbar\nbaz"); + + let (ig, err) = IgnoreBuilder::new().build().add_child(td.path()); + assert!(err.is_none()); + assert!(ig.matched("baz", false).is_ignore()); + assert!(ig.matched("foo", false).is_whitelist()); + assert!(ig.matched("bar", false).is_whitelist()); + } + + #[test] + fn errored() { + let td = TempDir::new("ignore-test-").unwrap(); + wfile(td.path().join(".gitignore"), "f**oo"); + + let (_, err) = IgnoreBuilder::new().build().add_child(td.path()); + assert!(err.is_some()); + } + + #[test] + fn errored_both() { + let td = TempDir::new("ignore-test-").unwrap(); + wfile(td.path().join(".gitignore"), "f**oo"); + wfile(td.path().join(".ignore"), "fo**o"); + + let (_, err) = IgnoreBuilder::new().build().add_child(td.path()); + assert_eq!(2, partial(err.expect("an error")).len()); + } + + #[test] + fn errored_partial() { + let td = TempDir::new("ignore-test-").unwrap(); + mkdirp(td.path().join(".git")); + wfile(td.path().join(".gitignore"), "f**oo\nbar"); + + let (ig, err) = IgnoreBuilder::new().build().add_child(td.path()); + assert!(err.is_some()); + assert!(ig.matched("bar", false).is_ignore()); + } + + #[test] + fn errored_partial_and_ignore() { + let td = TempDir::new("ignore-test-").unwrap(); + wfile(td.path().join(".gitignore"), "f**oo\nbar"); + wfile(td.path().join(".ignore"), "!bar"); + + let (ig, err) = IgnoreBuilder::new().build().add_child(td.path()); + assert!(err.is_some()); + assert!(ig.matched("bar", false).is_whitelist()); + } + + #[test] + fn not_present_empty() { + let td = TempDir::new("ignore-test-").unwrap(); + + let (_, err) = IgnoreBuilder::new().build().add_child(td.path()); + assert!(err.is_none()); + } + + #[test] + fn stops_at_git_dir() { + // This tests that .gitignore files beyond a .git barrier aren't + // matched, but .ignore files are. + let td = TempDir::new("ignore-test-").unwrap(); + mkdirp(td.path().join(".git")); + mkdirp(td.path().join("foo/.git")); + wfile(td.path().join(".gitignore"), "foo"); + wfile(td.path().join(".ignore"), "bar"); + + let ig0 = IgnoreBuilder::new().build(); + let (ig1, err) = ig0.add_child(td.path()); + assert!(err.is_none()); + let (ig2, err) = ig1.add_child(ig1.path().join("foo")); + assert!(err.is_none()); + + assert!(ig1.matched("foo", false).is_ignore()); + assert!(ig2.matched("foo", false).is_none()); + + assert!(ig1.matched("bar", false).is_ignore()); + assert!(ig2.matched("bar", false).is_ignore()); + } + + #[test] + fn absolute_parent() { + let td = TempDir::new("ignore-test-").unwrap(); + mkdirp(td.path().join(".git")); + mkdirp(td.path().join("foo")); + wfile(td.path().join(".gitignore"), "bar"); + + // First, check that the parent gitignore file isn't detected if the + // parent isn't added. This establishes a baseline. + let ig0 = IgnoreBuilder::new().build(); + let (ig1, err) = ig0.add_child(td.path().join("foo")); + assert!(err.is_none()); + assert!(ig1.matched("bar", false).is_none()); + + // Second, check that adding a parent directory actually works. + let ig0 = IgnoreBuilder::new().build(); + let (ig1, err) = ig0.add_parents(td.path().join("foo")); + assert!(err.is_none()); + let (ig2, err) = ig1.add_child(td.path().join("foo")); + assert!(err.is_none()); + assert!(ig2.matched("bar", false).is_ignore()); + } + + #[test] + fn absolute_parent_anchored() { + let td = TempDir::new("ignore-test-").unwrap(); + mkdirp(td.path().join(".git")); + mkdirp(td.path().join("src/llvm")); + wfile(td.path().join(".gitignore"), "/llvm/\nfoo"); + + let ig0 = IgnoreBuilder::new().build(); + let (ig1, err) = ig0.add_parents(td.path().join("src")); + assert!(err.is_none()); + let (ig2, err) = ig1.add_child("src"); + assert!(err.is_none()); + + assert!(ig1.matched("llvm", true).is_none()); + assert!(ig2.matched("llvm", true).is_none()); + assert!(ig2.matched("src/llvm", true).is_none()); + assert!(ig2.matched("foo", false).is_ignore()); + assert!(ig2.matched("src/foo", false).is_ignore()); + } +} diff --git a/ignore/src/gitignore.rs b/ignore/src/gitignore.rs new file mode 100644 index 000000000..15827fd29 --- /dev/null +++ b/ignore/src/gitignore.rs @@ -0,0 +1,764 @@ +/*! +The gitignore module provides a way to match globs from a gitignore file +against file paths. + +Note that this module implements the specification as described in the +`gitignore` man page from scratch. That is, this module does *not* shell out to +the `git` command line tool. +*/ + +use std::cell::RefCell; +use std::env; +use std::fs::File; +use std::io::{self, BufRead, Read}; +use std::path::{Path, PathBuf}; +use std::str; +use std::sync::Arc; + +use globset::{Candidate, GlobBuilder, GlobSet, GlobSetBuilder}; +use regex::bytes::Regex; +use thread_local::ThreadLocal; + +use pathutil::{is_file_name, strip_prefix}; +use {Error, Match, PartialErrorBuilder}; + +/// Glob represents a single glob in a gitignore file. +/// +/// This is used to report information about the highest precedent glob that +/// matched in one or more gitignore files. +#[derive(Clone, Debug)] +pub struct Glob { + /// The file path that this glob was extracted from. + from: Option, + /// The original glob string. + original: String, + /// The actual glob string used to convert to a regex. + actual: String, + /// Whether this is a whitelisted glob or not. + is_whitelist: bool, + /// Whether this glob should only match directories or not. + is_only_dir: bool, +} + +impl Glob { + /// Returns the file path that defined this glob. + pub fn from(&self) -> Option<&Path> { + self.from.as_ref().map(|p| &**p) + } + + /// The original glob as it was defined in a gitignore file. + pub fn original(&self) -> &str { + &self.original + } + + /// The actual glob that was compiled to respect gitignore + /// semantics. + pub fn actual(&self) -> &str { + &self.actual + } + + /// Whether this was a whitelisted glob or not. + pub fn is_whitelist(&self) -> bool { + self.is_whitelist + } + + /// Whether this glob must match a directory or not. + pub fn is_only_dir(&self) -> bool { + self.is_only_dir + } + + /// Returns true if and only if this glob has a `**/` prefix. + fn has_doublestar_prefix(&self) -> bool { + self.actual.starts_with("**/") + || (self.actual == "**" && self.is_only_dir) + } +} + +/// Gitignore is a matcher for the globs in one or more gitignore files +/// in the same directory. +#[derive(Clone, Debug)] +pub struct Gitignore { + set: GlobSet, + root: PathBuf, + globs: Vec, + num_ignores: u64, + num_whitelists: u64, + matches: Option>>>>, +} + +impl Gitignore { + /// Creates a new gitignore matcher from the gitignore file path given. + /// + /// If it's desirable to include multiple gitignore files in a single + /// matcher, or read gitignore globs from a different source, then + /// use `GitignoreBuilder`. + /// + /// This always returns a valid matcher, even if it's empty. In particular, + /// a Gitignore file can be partially valid, e.g., when one glob is invalid + /// but the rest aren't. + /// + /// Note that I/O errors are ignored. For more granular control over + /// errors, use `GitignoreBuilder`. + pub fn new>( + gitignore_path: P, + ) -> (Gitignore, Option) { + let path = gitignore_path.as_ref(); + let parent = path.parent().unwrap_or(Path::new("/")); + let mut builder = GitignoreBuilder::new(parent); + let mut errs = PartialErrorBuilder::default(); + errs.maybe_push_ignore_io(builder.add(path)); + match builder.build() { + Ok(gi) => (gi, errs.into_error_option()), + Err(err) => { + errs.push(err); + (Gitignore::empty(), errs.into_error_option()) + } + } + } + + /// Creates a new gitignore matcher from the global ignore file, if one + /// exists. + /// + /// The global config file path is specified by git's `core.excludesFile` + /// config option. + /// + /// Git's config file location is `$HOME/.gitconfig`. If `$HOME/.gitconfig` + /// does not exist or does not specify `core.excludesFile`, then + /// `$XDG_CONFIG_HOME/git/ignore` is read. If `$XDG_CONFIG_HOME` is not + /// set or is empty, then `$HOME/.config/git/ignore` is used instead. + pub fn global() -> (Gitignore, Option) { + match gitconfig_excludes_path() { + None => (Gitignore::empty(), None), + Some(path) => { + if !path.is_file() { + (Gitignore::empty(), None) + } else { + Gitignore::new(path) + } + } + } + } + + /// Creates a new empty gitignore matcher that never matches anything. + /// + /// Its path is empty. + pub fn empty() -> Gitignore { + Gitignore { + set: GlobSet::empty(), + root: PathBuf::from(""), + globs: vec![], + num_ignores: 0, + num_whitelists: 0, + matches: None, + } + } + + /// Returns the directory containing this gitignore matcher. + /// + /// All matches are done relative to this path. + pub fn path(&self) -> &Path { + &*self.root + } + + /// Returns true if and only if this gitignore has zero globs, and + /// therefore never matches any file path. + pub fn is_empty(&self) -> bool { + self.set.is_empty() + } + + /// Returns the total number of globs, which should be equivalent to + /// `num_ignores + num_whitelists`. + pub fn len(&self) -> usize { + self.set.len() + } + + /// Returns the total number of ignore globs. + pub fn num_ignores(&self) -> u64 { + self.num_ignores + } + + /// Returns the total number of whitelisted globs. + pub fn num_whitelists(&self) -> u64 { + self.num_whitelists + } + + /// Returns whether the given path (file or directory) matched a pattern in + /// this gitignore matcher. + /// + /// `is_dir` should be true if the path refers to a directory and false + /// otherwise. + /// + /// The given path is matched relative to the path given when building + /// the matcher. Specifically, before matching `path`, its prefix (as + /// determined by a common suffix of the directory containing this + /// gitignore) is stripped. If there is no common suffix/prefix overlap, + /// then `path` is assumed to be relative to this matcher. + pub fn matched>( + &self, + path: P, + is_dir: bool, + ) -> Match<&Glob> { + if self.is_empty() { + return Match::None; + } + self.matched_stripped(self.strip(path.as_ref()), is_dir) + } + + /// Returns whether the given path (file or directory, and expected to be + /// under the root) or any of its parent directories (up to the root) + /// matched a pattern in this gitignore matcher. + /// + /// NOTE: This method is more expensive than walking the directory hierarchy + /// top-to-bottom and matching the entries. But, is easier to use in cases + /// when a list of paths are available without a hierarchy. + /// + /// `is_dir` should be true if the path refers to a directory and false + /// otherwise. + /// + /// The given path is matched relative to the path given when building + /// the matcher. Specifically, before matching `path`, its prefix (as + /// determined by a common suffix of the directory containing this + /// gitignore) is stripped. If there is no common suffix/prefix overlap, + /// then `path` is assumed to be relative to this matcher. + /// + /// # Panics + /// + /// This method panics if the given file path is not under the root path + /// of this matcher. + pub fn matched_path_or_any_parents>( + &self, + path: P, + is_dir: bool, + ) -> Match<&Glob> { + if self.is_empty() { + return Match::None; + } + let mut path = self.strip(path.as_ref()); + assert!(!path.has_root(), "path is expected to be under the root"); + + match self.matched_stripped(path, is_dir) { + Match::None => (), // walk up + a_match => return a_match, + } + while let Some(parent) = path.parent() { + match self.matched_stripped(parent, /* is_dir */ true) { + Match::None => path = parent, // walk up + a_match => return a_match, + } + } + Match::None + } + + /// Like matched, but takes a path that has already been stripped. + fn matched_stripped>( + &self, + path: P, + is_dir: bool, + ) -> Match<&Glob> { + if self.is_empty() { + return Match::None; + } + let path = path.as_ref(); + let _matches = self.matches.as_ref().unwrap().get_default(); + let mut matches = _matches.borrow_mut(); + let candidate = Candidate::new(path); + self.set.matches_candidate_into(&candidate, &mut *matches); + for &i in matches.iter().rev() { + let glob = &self.globs[i]; + if !glob.is_only_dir() || is_dir { + return if glob.is_whitelist() { + Match::Whitelist(glob) + } else { + Match::Ignore(glob) + }; + } + } + Match::None + } + + /// Strips the given path such that it's suitable for matching with this + /// gitignore matcher. + fn strip<'a, P: 'a + AsRef + ?Sized>( + &'a self, + path: &'a P, + ) -> &'a Path { + let mut path = path.as_ref(); + // A leading ./ is completely superfluous. We also strip it from + // our gitignore root path, so we need to strip it from our candidate + // path too. + if let Some(p) = strip_prefix("./", path) { + path = p; + } + // Strip any common prefix between the candidate path and the root + // of the gitignore, to make sure we get relative matching right. + // BUT, a file name might not have any directory components to it, + // in which case, we don't want to accidentally strip any part of the + // file name. + // + // As an additional special case, if the root is just `.`, then we + // shouldn't try to strip anything, e.g., when path begins with a `.`. + if self.root != Path::new(".") && !is_file_name(path) { + if let Some(p) = strip_prefix(&self.root, path) { + path = p; + // If we're left with a leading slash, get rid of it. + if let Some(p) = strip_prefix("/", path) { + path = p; + } + } + } + path + } +} + +/// Builds a matcher for a single set of globs from a .gitignore file. +#[derive(Clone, Debug)] +pub struct GitignoreBuilder { + builder: GlobSetBuilder, + root: PathBuf, + globs: Vec, + case_insensitive: bool, +} + +impl GitignoreBuilder { + /// Create a new builder for a gitignore file. + /// + /// The path given should be the path at which the globs for this gitignore + /// file should be matched. Note that paths are always matched relative + /// to the root path given here. Generally, the root path should correspond + /// to the *directory* containing a `.gitignore` file. + pub fn new>(root: P) -> GitignoreBuilder { + let root = root.as_ref(); + GitignoreBuilder { + builder: GlobSetBuilder::new(), + root: strip_prefix("./", root).unwrap_or(root).to_path_buf(), + globs: vec![], + case_insensitive: false, + } + } + + /// Builds a new matcher from the globs added so far. + /// + /// Once a matcher is built, no new globs can be added to it. + pub fn build(&self) -> Result { + let nignore = self.globs.iter().filter(|g| !g.is_whitelist()).count(); + let nwhite = self.globs.iter().filter(|g| g.is_whitelist()).count(); + let set = + self.builder.build().map_err(|err| { + Error::Glob { + glob: None, + err: err.to_string(), + } + })?; + Ok(Gitignore { + set: set, + root: self.root.clone(), + globs: self.globs.clone(), + num_ignores: nignore as u64, + num_whitelists: nwhite as u64, + matches: Some(Arc::new(ThreadLocal::default())), + }) + } + + /// Add each glob from the file path given. + /// + /// The file given should be formatted as a `gitignore` file. + /// + /// Note that partial errors can be returned. For example, if there was + /// a problem adding one glob, an error for that will be returned, but + /// all other valid globs will still be added. + pub fn add>(&mut self, path: P) -> Option { + let path = path.as_ref(); + let file = match File::open(path) { + Err(err) => return Some(Error::Io(err).with_path(path)), + Ok(file) => file, + }; + let rdr = io::BufReader::new(file); + let mut errs = PartialErrorBuilder::default(); + for (i, line) in rdr.lines().enumerate() { + let lineno = (i + 1) as u64; + let line = match line { + Ok(line) => line, + Err(err) => { + errs.push(Error::Io(err).tagged(path, lineno)); + break; + } + }; + if let Err(err) = self.add_line(Some(path.to_path_buf()), &line) { + errs.push(err.tagged(path, lineno)); + } + } + errs.into_error_option() + } + + /// Add each glob line from the string given. + /// + /// If this string came from a particular `gitignore` file, then its path + /// should be provided here. + /// + /// The string given should be formatted as a `gitignore` file. + #[cfg(test)] + fn add_str( + &mut self, + from: Option, + gitignore: &str, + ) -> Result<&mut GitignoreBuilder, Error> { + for line in gitignore.lines() { + self.add_line(from.clone(), line)?; + } + Ok(self) + } + + /// Add a line from a gitignore file to this builder. + /// + /// If this line came from a particular `gitignore` file, then its path + /// should be provided here. + /// + /// If the line could not be parsed as a glob, then an error is returned. + pub fn add_line( + &mut self, + from: Option, + mut line: &str, + ) -> Result<&mut GitignoreBuilder, Error> { + if line.starts_with("#") { + return Ok(self); + } + if !line.ends_with("\\ ") { + line = line.trim_right(); + } + if line.is_empty() { + return Ok(self); + } + let mut glob = Glob { + from: from, + original: line.to_string(), + actual: String::new(), + is_whitelist: false, + is_only_dir: false, + }; + let mut literal_separator = false; + let mut is_absolute = false; + if line.starts_with("\\!") || line.starts_with("\\#") { + line = &line[1..]; + is_absolute = line.chars().nth(0) == Some('/'); + } else { + if line.starts_with("!") { + glob.is_whitelist = true; + line = &line[1..]; + } + if line.starts_with("/") { + // `man gitignore` says that if a glob starts with a slash, + // then the glob can only match the beginning of a path + // (relative to the location of gitignore). We achieve this by + // simply banning wildcards from matching /. + literal_separator = true; + line = &line[1..]; + is_absolute = true; + } + } + // If it ends with a slash, then this should only match directories, + // but the slash should otherwise not be used while globbing. + if let Some((i, c)) = line.char_indices().rev().nth(0) { + if c == '/' { + glob.is_only_dir = true; + line = &line[..i]; + } + } + // If there is a literal slash, then we note that so that globbing + // doesn't let wildcards match slashes. + glob.actual = line.to_string(); + if is_absolute || line.chars().any(|c| c == '/') { + literal_separator = true; + } + // If there was a slash, then this is a glob that must match the entire + // path name. Otherwise, we should let it match anywhere, so use a **/ + // prefix. + if !literal_separator { + // ... but only if we don't already have a **/ prefix. + if !glob.has_doublestar_prefix() { + glob.actual = format!("**/{}", glob.actual); + } + } + // If the glob ends with `/**`, then we should only match everything + // inside a directory, but not the directory itself. Standard globs + // will match the directory. So we add `/*` to force the issue. + if glob.actual.ends_with("/**") { + glob.actual = format!("{}/*", glob.actual); + } + let parsed = + GlobBuilder::new(&glob.actual) + .literal_separator(literal_separator) + .case_insensitive(self.case_insensitive) + .backslash_escape(true) + .build() + .map_err(|err| { + Error::Glob { + glob: Some(glob.original.clone()), + err: err.kind().to_string(), + } + })?; + self.builder.add(parsed); + self.globs.push(glob); + Ok(self) + } + + /// Toggle whether the globs should be matched case insensitively or not. + /// + /// When this option is changed, only globs added after the change will be affected. + /// + /// This is disabled by default. + pub fn case_insensitive( + &mut self, yes: bool + ) -> Result<&mut GitignoreBuilder, Error> { + self.case_insensitive = yes; + Ok(self) + } +} + +/// Return the file path of the current environment's global gitignore file. +/// +/// Note that the file path returned may not exist. +fn gitconfig_excludes_path() -> Option { + // git supports $HOME/.gitconfig and $XDG_CONFIG_DIR/git/config. Notably, + // both can be active at the same time, where $HOME/.gitconfig takes + // precedent. So if $HOME/.gitconfig defines a `core.excludesFile`, then + // we're done. + match gitconfig_home_contents().and_then(|x| parse_excludes_file(&x)) { + Some(path) => return Some(path), + None => {} + } + match gitconfig_xdg_contents().and_then(|x| parse_excludes_file(&x)) { + Some(path) => return Some(path), + None => {} + } + excludes_file_default() +} + +/// Returns the file contents of git's global config file, if one exists, in +/// the user's home directory. +fn gitconfig_home_contents() -> Option> { + let home = match home_dir() { + None => return None, + Some(home) => home, + }; + let mut file = match File::open(home.join(".gitconfig")) { + Err(_) => return None, + Ok(file) => io::BufReader::new(file), + }; + let mut contents = vec![]; + file.read_to_end(&mut contents).ok().map(|_| contents) +} + +/// Returns the file contents of git's global config file, if one exists, in +/// the user's XDG_CONFIG_DIR directory. +fn gitconfig_xdg_contents() -> Option> { + let path = env::var_os("XDG_CONFIG_HOME") + .and_then(|x| if x.is_empty() { None } else { Some(PathBuf::from(x)) }) + .or_else(|| home_dir().map(|p| p.join(".config"))) + .map(|x| x.join("git/config")); + let mut file = match path.and_then(|p| File::open(p).ok()) { + None => return None, + Some(file) => io::BufReader::new(file), + }; + let mut contents = vec![]; + file.read_to_end(&mut contents).ok().map(|_| contents) +} + +/// Returns the default file path for a global .gitignore file. +/// +/// Specifically, this respects XDG_CONFIG_HOME. +fn excludes_file_default() -> Option { + env::var_os("XDG_CONFIG_HOME") + .and_then(|x| if x.is_empty() { None } else { Some(PathBuf::from(x)) }) + .or_else(|| home_dir().map(|p| p.join(".config"))) + .map(|x| x.join("git/ignore")) +} + +/// Extract git's `core.excludesfile` config setting from the raw file contents +/// given. +fn parse_excludes_file(data: &[u8]) -> Option { + // N.B. This is the lazy approach, and isn't technically correct, but + // probably works in more circumstances. I guess we would ideally have + // a full INI parser. Yuck. + lazy_static! { + static ref RE: Regex = Regex::new( + r"(?im)^\s*excludesfile\s*=\s*(.+)\s*$" + ).unwrap(); + }; + let caps = match RE.captures(data) { + None => return None, + Some(caps) => caps, + }; + str::from_utf8(&caps[1]).ok().map(|s| PathBuf::from(expand_tilde(s))) +} + +/// Expands ~ in file paths to the value of $HOME. +fn expand_tilde(path: &str) -> String { + let home = match home_dir() { + None => return path.to_string(), + Some(home) => home.to_string_lossy().into_owned(), + }; + path.replace("~", &home) +} + +/// Returns the location of the user's home directory. +fn home_dir() -> Option { + // We're fine with using env::home_dir for now. Its bugs are, IMO, pretty + // minor corner cases. We should still probably eventually migrate to + // the `dirs` crate to get a proper implementation. + #![allow(deprecated)] + env::home_dir() +} + +#[cfg(test)] +mod tests { + use std::path::Path; + use super::{Gitignore, GitignoreBuilder}; + + fn gi_from_str>(root: P, s: &str) -> Gitignore { + let mut builder = GitignoreBuilder::new(root); + builder.add_str(None, s).unwrap(); + builder.build().unwrap() + } + + macro_rules! ignored { + ($name:ident, $root:expr, $gi:expr, $path:expr) => { + ignored!($name, $root, $gi, $path, false); + }; + ($name:ident, $root:expr, $gi:expr, $path:expr, $is_dir:expr) => { + #[test] + fn $name() { + let gi = gi_from_str($root, $gi); + assert!(gi.matched($path, $is_dir).is_ignore()); + } + }; + } + + macro_rules! not_ignored { + ($name:ident, $root:expr, $gi:expr, $path:expr) => { + not_ignored!($name, $root, $gi, $path, false); + }; + ($name:ident, $root:expr, $gi:expr, $path:expr, $is_dir:expr) => { + #[test] + fn $name() { + let gi = gi_from_str($root, $gi); + assert!(!gi.matched($path, $is_dir).is_ignore()); + } + }; + } + + const ROOT: &'static str = "/home/foobar/rust/rg"; + + ignored!(ig1, ROOT, "months", "months"); + ignored!(ig2, ROOT, "*.lock", "Cargo.lock"); + ignored!(ig3, ROOT, "*.rs", "src/main.rs"); + ignored!(ig4, ROOT, "src/*.rs", "src/main.rs"); + ignored!(ig5, ROOT, "/*.c", "cat-file.c"); + ignored!(ig6, ROOT, "/src/*.rs", "src/main.rs"); + ignored!(ig7, ROOT, "!src/main.rs\n*.rs", "src/main.rs"); + ignored!(ig8, ROOT, "foo/", "foo", true); + ignored!(ig9, ROOT, "**/foo", "foo"); + ignored!(ig10, ROOT, "**/foo", "src/foo"); + ignored!(ig11, ROOT, "**/foo/**", "src/foo/bar"); + ignored!(ig12, ROOT, "**/foo/**", "wat/src/foo/bar/baz"); + ignored!(ig13, ROOT, "**/foo/bar", "foo/bar"); + ignored!(ig14, ROOT, "**/foo/bar", "src/foo/bar"); + ignored!(ig15, ROOT, "abc/**", "abc/x"); + ignored!(ig16, ROOT, "abc/**", "abc/x/y"); + ignored!(ig17, ROOT, "abc/**", "abc/x/y/z"); + ignored!(ig18, ROOT, "a/**/b", "a/b"); + ignored!(ig19, ROOT, "a/**/b", "a/x/b"); + ignored!(ig20, ROOT, "a/**/b", "a/x/y/b"); + ignored!(ig21, ROOT, r"\!xy", "!xy"); + ignored!(ig22, ROOT, r"\#foo", "#foo"); + ignored!(ig23, ROOT, "foo", "./foo"); + ignored!(ig24, ROOT, "target", "grep/target"); + ignored!(ig25, ROOT, "Cargo.lock", "./tabwriter-bin/Cargo.lock"); + ignored!(ig26, ROOT, "/foo/bar/baz", "./foo/bar/baz"); + ignored!(ig27, ROOT, "foo/", "xyz/foo", true); + ignored!(ig28, "./src", "/llvm/", "./src/llvm", true); + ignored!(ig29, ROOT, "node_modules/ ", "node_modules", true); + ignored!(ig30, ROOT, "**/", "foo/bar", true); + ignored!(ig31, ROOT, "path1/*", "path1/foo"); + ignored!(ig32, ROOT, ".a/b", ".a/b"); + ignored!(ig33, "./", ".a/b", ".a/b"); + ignored!(ig34, ".", ".a/b", ".a/b"); + ignored!(ig35, "./.", ".a/b", ".a/b"); + ignored!(ig36, "././", ".a/b", ".a/b"); + ignored!(ig37, "././.", ".a/b", ".a/b"); + ignored!(ig38, ROOT, "\\[", "["); + ignored!(ig39, ROOT, "\\?", "?"); + ignored!(ig40, ROOT, "\\*", "*"); + ignored!(ig41, ROOT, "\\a", "a"); + + not_ignored!(ignot1, ROOT, "amonths", "months"); + not_ignored!(ignot2, ROOT, "monthsa", "months"); + not_ignored!(ignot3, ROOT, "/src/*.rs", "src/grep/src/main.rs"); + not_ignored!(ignot4, ROOT, "/*.c", "mozilla-sha1/sha1.c"); + not_ignored!(ignot5, ROOT, "/src/*.rs", "src/grep/src/main.rs"); + not_ignored!(ignot6, ROOT, "*.rs\n!src/main.rs", "src/main.rs"); + not_ignored!(ignot7, ROOT, "foo/", "foo", false); + not_ignored!(ignot8, ROOT, "**/foo/**", "wat/src/afoo/bar/baz"); + not_ignored!(ignot9, ROOT, "**/foo/**", "wat/src/fooa/bar/baz"); + not_ignored!(ignot10, ROOT, "**/foo/bar", "foo/src/bar"); + not_ignored!(ignot11, ROOT, "#foo", "#foo"); + not_ignored!(ignot12, ROOT, "\n\n\n", "foo"); + not_ignored!(ignot13, ROOT, "foo/**", "foo", true); + not_ignored!( + ignot14, "./third_party/protobuf", "m4/ltoptions.m4", + "./third_party/protobuf/csharp/src/packages/repositories.config"); + not_ignored!(ignot15, ROOT, "!/bar", "foo/bar"); + not_ignored!(ignot16, ROOT, "*\n!**/", "foo", true); + not_ignored!(ignot17, ROOT, "src/*.rs", "src/grep/src/main.rs"); + not_ignored!(ignot18, ROOT, "path1/*", "path2/path1/foo"); + + fn bytes(s: &str) -> Vec { + s.to_string().into_bytes() + } + + fn path_string>(path: P) -> String { + path.as_ref().to_str().unwrap().to_string() + } + + #[test] + fn parse_excludes_file1() { + let data = bytes("[core]\nexcludesFile = /foo/bar"); + let got = super::parse_excludes_file(&data).unwrap(); + assert_eq!(path_string(got), "/foo/bar"); + } + + #[test] + fn parse_excludes_file2() { + let data = bytes("[core]\nexcludesFile = ~/foo/bar"); + let got = super::parse_excludes_file(&data).unwrap(); + assert_eq!(path_string(got), super::expand_tilde("~/foo/bar")); + } + + #[test] + fn parse_excludes_file3() { + let data = bytes("[core]\nexcludeFile = /foo/bar"); + assert!(super::parse_excludes_file(&data).is_none()); + } + + // See: https://github.com/BurntSushi/ripgrep/issues/106 + #[test] + fn regression_106() { + gi_from_str("/", " "); + } + + #[test] + fn case_insensitive() { + let gi = GitignoreBuilder::new(ROOT) + .case_insensitive(true).unwrap() + .add_str(None, "*.html").unwrap() + .build().unwrap(); + assert!(gi.matched("foo.html", false).is_ignore()); + assert!(gi.matched("foo.HTML", false).is_ignore()); + assert!(!gi.matched("foo.htm", false).is_ignore()); + assert!(!gi.matched("foo.HTM", false).is_ignore()); + } + + ignored!(cs1, ROOT, "*.html", "foo.html"); + not_ignored!(cs2, ROOT, "*.html", "foo.HTML"); + not_ignored!(cs3, ROOT, "*.html", "foo.htm"); + not_ignored!(cs4, ROOT, "*.html", "foo.HTM"); +} diff --git a/ignore/src/lib.rs b/ignore/src/lib.rs new file mode 100644 index 000000000..ee3136850 --- /dev/null +++ b/ignore/src/lib.rs @@ -0,0 +1,444 @@ +/*! +The ignore crate provides a fast recursive directory iterator that respects +various filters such as globs, file types and `.gitignore` files. The precise +matching rules and precedence is explained in the documentation for +`WalkBuilder`. + +Secondarily, this crate exposes gitignore and file type matchers for use cases +that demand more fine-grained control. + +# Example + +This example shows the most basic usage of this crate. This code will +recursively traverse the current directory while automatically filtering out +files and directories according to ignore globs found in files like +`.ignore` and `.gitignore`: + + +```rust,no_run +use ignore::Walk; + +for result in Walk::new("./") { + // Each item yielded by the iterator is either a directory entry or an + // error, so either print the path or the error. + match result { + Ok(entry) => println!("{}", entry.path().display()), + Err(err) => println!("ERROR: {}", err), + } +} +``` + +# Example: advanced + +By default, the recursive directory iterator will ignore hidden files and +directories. This can be disabled by building the iterator with `WalkBuilder`: + +```rust,no_run +use ignore::WalkBuilder; + +for result in WalkBuilder::new("./").hidden(false).build() { + println!("{:?}", result); +} +``` + +See the documentation for `WalkBuilder` for many other options. +*/ + +#![deny(missing_docs)] + +extern crate crossbeam_channel as channel; +extern crate globset; +#[macro_use] +extern crate lazy_static; +#[macro_use] +extern crate log; +extern crate memchr; +extern crate regex; +extern crate same_file; +#[cfg(test)] +extern crate tempdir; +extern crate thread_local; +extern crate walkdir; +#[cfg(windows)] +extern crate winapi_util; + +use std::error; +use std::fmt; +use std::io; +use std::path::{Path, PathBuf}; + +pub use walk::{DirEntry, Walk, WalkBuilder, WalkParallel, WalkState}; + +mod dir; +pub mod gitignore; +mod pathutil; +pub mod overrides; +pub mod types; +mod walk; + +/// Represents an error that can occur when parsing a gitignore file. +#[derive(Debug)] +pub enum Error { + /// A collection of "soft" errors. These occur when adding an ignore + /// file partially succeeded. + Partial(Vec), + /// An error associated with a specific line number. + WithLineNumber { + /// The line number. + line: u64, + /// The underlying error. + err: Box, + }, + /// An error associated with a particular file path. + WithPath { + /// The file path. + path: PathBuf, + /// The underlying error. + err: Box, + }, + /// An error associated with a particular directory depth when recursively + /// walking a directory. + WithDepth { + /// The directory depth. + depth: usize, + /// The underlying error. + err: Box, + }, + /// An error that occurs when a file loop is detected when traversing + /// symbolic links. + Loop { + /// The ancestor file path in the loop. + ancestor: PathBuf, + /// The child file path in the loop. + child: PathBuf, + }, + /// An error that occurs when doing I/O, such as reading an ignore file. + Io(io::Error), + /// An error that occurs when trying to parse a glob. + Glob { + /// The original glob that caused this error. This glob, when + /// available, always corresponds to the glob provided by an end user. + /// e.g., It is the glob as written in a `.gitignore` file. + /// + /// (This glob may be distinct from the glob that is actually + /// compiled, after accounting for `gitignore` semantics.) + glob: Option, + /// The underlying glob error as a string. + err: String, + }, + /// A type selection for a file type that is not defined. + UnrecognizedFileType(String), + /// A user specified file type definition could not be parsed. + InvalidDefinition, +} + +impl Clone for Error { + fn clone(&self) -> Error { + match *self { + Error::Partial(ref errs) => Error::Partial(errs.clone()), + Error::WithLineNumber { line, ref err } => { + Error::WithLineNumber { line: line, err: err.clone() } + } + Error::WithPath { ref path, ref err } => { + Error::WithPath { path: path.clone(), err: err.clone() } + } + Error::WithDepth { depth, ref err } => { + Error::WithDepth { depth: depth, err: err.clone() } + } + Error::Loop { ref ancestor, ref child } => { + Error::Loop { + ancestor: ancestor.clone(), + child: child.clone() + } + } + Error::Io(ref err) => { + match err.raw_os_error() { + Some(e) => Error::Io(io::Error::from_raw_os_error(e)), + None => { + Error::Io(io::Error::new(err.kind(), err.to_string())) + } + } + } + Error::Glob { ref glob, ref err } => { + Error::Glob { glob: glob.clone(), err: err.clone() } + } + Error::UnrecognizedFileType(ref err) => { + Error::UnrecognizedFileType(err.clone()) + } + Error::InvalidDefinition => Error::InvalidDefinition, + } + } +} + +impl Error { + /// Returns true if this is a partial error. + /// + /// A partial error occurs when only some operations failed while others + /// may have succeeded. For example, an ignore file may contain an invalid + /// glob among otherwise valid globs. + pub fn is_partial(&self) -> bool { + match *self { + Error::Partial(_) => true, + Error::WithLineNumber { ref err, .. } => err.is_partial(), + Error::WithPath { ref err, .. } => err.is_partial(), + Error::WithDepth { ref err, .. } => err.is_partial(), + _ => false, + } + } + + /// Returns true if this error is exclusively an I/O error. + pub fn is_io(&self) -> bool { + match *self { + Error::Partial(ref errs) => errs.len() == 1 && errs[0].is_io(), + Error::WithLineNumber { ref err, .. } => err.is_io(), + Error::WithPath { ref err, .. } => err.is_io(), + Error::WithDepth { ref err, .. } => err.is_io(), + Error::Loop { .. } => false, + Error::Io(_) => true, + Error::Glob { .. } => false, + Error::UnrecognizedFileType(_) => false, + Error::InvalidDefinition => false, + } + } + + /// Returns a depth associated with recursively walking a directory (if + /// this error was generated from a recursive directory iterator). + pub fn depth(&self) -> Option { + match *self { + Error::WithPath { ref err, .. } => err.depth(), + Error::WithDepth { depth, .. } => Some(depth), + _ => None, + } + } + + /// Turn an error into a tagged error with the given file path. + fn with_path>(self, path: P) -> Error { + Error::WithPath { + path: path.as_ref().to_path_buf(), + err: Box::new(self), + } + } + + /// Turn an error into a tagged error with the given depth. + fn with_depth(self, depth: usize) -> Error { + Error::WithDepth { + depth: depth, + err: Box::new(self), + } + } + + /// Turn an error into a tagged error with the given file path and line + /// number. If path is empty, then it is omitted from the error. + fn tagged>(self, path: P, lineno: u64) -> Error { + let errline = Error::WithLineNumber { + line: lineno, + err: Box::new(self), + }; + if path.as_ref().as_os_str().is_empty() { + return errline; + } + errline.with_path(path) + } + + /// Build an error from a walkdir error. + fn from_walkdir(err: walkdir::Error) -> Error { + let depth = err.depth(); + if let (Some(anc), Some(child)) = (err.loop_ancestor(), err.path()) { + return Error::WithDepth { + depth: depth, + err: Box::new(Error::Loop { + ancestor: anc.to_path_buf(), + child: child.to_path_buf(), + }), + }; + } + let path = err.path().map(|p| p.to_path_buf()); + let mut ig_err = Error::Io(io::Error::from(err)); + if let Some(path) = path { + ig_err = Error::WithPath { + path: path, + err: Box::new(ig_err), + }; + } + ig_err + } +} + +impl error::Error for Error { + fn description(&self) -> &str { + match *self { + Error::Partial(_) => "partial error", + Error::WithLineNumber { ref err, .. } => err.description(), + Error::WithPath { ref err, .. } => err.description(), + Error::WithDepth { ref err, .. } => err.description(), + Error::Loop { .. } => "file system loop found", + Error::Io(ref err) => err.description(), + Error::Glob { ref err, .. } => err, + Error::UnrecognizedFileType(_) => "unrecognized file type", + Error::InvalidDefinition => "invalid definition", + } + } +} + +impl fmt::Display for Error { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match *self { + Error::Partial(ref errs) => { + let msgs: Vec = + errs.iter().map(|err| err.to_string()).collect(); + write!(f, "{}", msgs.join("\n")) + } + Error::WithLineNumber { line, ref err } => { + write!(f, "line {}: {}", line, err) + } + Error::WithPath { ref path, ref err } => { + write!(f, "{}: {}", path.display(), err) + } + Error::WithDepth { ref err, .. } => err.fmt(f), + Error::Loop { ref ancestor, ref child } => { + write!(f, "File system loop found: \ + {} points to an ancestor {}", + child.display(), ancestor.display()) + } + Error::Io(ref err) => err.fmt(f), + Error::Glob { glob: None, ref err } => write!(f, "{}", err), + Error::Glob { glob: Some(ref glob), ref err } => { + write!(f, "error parsing glob '{}': {}", glob, err) + } + Error::UnrecognizedFileType(ref ty) => { + write!(f, "unrecognized file type: {}", ty) + } + Error::InvalidDefinition => { + write!(f, "invalid definition (format is type:glob, e.g., \ + html:*.html)") + } + } + } +} + +impl From for Error { + fn from(err: io::Error) -> Error { + Error::Io(err) + } +} + +#[derive(Debug, Default)] +struct PartialErrorBuilder(Vec); + +impl PartialErrorBuilder { + fn push(&mut self, err: Error) { + self.0.push(err); + } + + fn push_ignore_io(&mut self, err: Error) { + if !err.is_io() { + self.push(err); + } + } + + fn maybe_push(&mut self, err: Option) { + if let Some(err) = err { + self.push(err); + } + } + + fn maybe_push_ignore_io(&mut self, err: Option) { + if let Some(err) = err { + self.push_ignore_io(err); + } + } + + fn into_error_option(mut self) -> Option { + if self.0.is_empty() { + None + } else if self.0.len() == 1 { + Some(self.0.pop().unwrap()) + } else { + Some(Error::Partial(self.0)) + } + } +} + +/// The result of a glob match. +/// +/// The type parameter `T` typically refers to a type that provides more +/// information about a particular match. For example, it might identify +/// the specific gitignore file and the specific glob pattern that caused +/// the match. +#[derive(Clone, Debug)] +pub enum Match { + /// The path didn't match any glob. + None, + /// The highest precedent glob matched indicates the path should be + /// ignored. + Ignore(T), + /// The highest precedent glob matched indicates the path should be + /// whitelisted. + Whitelist(T), +} + +impl Match { + /// Returns true if the match result didn't match any globs. + pub fn is_none(&self) -> bool { + match *self { + Match::None => true, + Match::Ignore(_) | Match::Whitelist(_) => false, + } + } + + /// Returns true if the match result implies the path should be ignored. + pub fn is_ignore(&self) -> bool { + match *self { + Match::Ignore(_) => true, + Match::None | Match::Whitelist(_) => false, + } + } + + /// Returns true if the match result implies the path should be + /// whitelisted. + pub fn is_whitelist(&self) -> bool { + match *self { + Match::Whitelist(_) => true, + Match::None | Match::Ignore(_) => false, + } + } + + /// Inverts the match so that `Ignore` becomes `Whitelist` and + /// `Whitelist` becomes `Ignore`. A non-match remains the same. + pub fn invert(self) -> Match { + match self { + Match::None => Match::None, + Match::Ignore(t) => Match::Whitelist(t), + Match::Whitelist(t) => Match::Ignore(t), + } + } + + /// Return the value inside this match if it exists. + pub fn inner(&self) -> Option<&T> { + match *self { + Match::None => None, + Match::Ignore(ref t) => Some(t), + Match::Whitelist(ref t) => Some(t), + } + } + + /// Apply the given function to the value inside this match. + /// + /// If the match has no value, then return the match unchanged. + pub fn map U>(self, f: F) -> Match { + match self { + Match::None => Match::None, + Match::Ignore(t) => Match::Ignore(f(t)), + Match::Whitelist(t) => Match::Whitelist(f(t)), + } + } + + /// Return the match if it is not none. Otherwise, return other. + pub fn or(self, other: Self) -> Self { + if self.is_none() { + other + } else { + self + } + } +} diff --git a/ignore/src/overrides.rs b/ignore/src/overrides.rs new file mode 100644 index 000000000..c63532af8 --- /dev/null +++ b/ignore/src/overrides.rs @@ -0,0 +1,259 @@ +/*! +The overrides module provides a way to specify a set of override globs. +This provides functionality similar to `--include` or `--exclude` in command +line tools. +*/ + +use std::path::Path; + +use gitignore::{self, Gitignore, GitignoreBuilder}; +use {Error, Match}; + +/// Glob represents a single glob in an override matcher. +/// +/// This is used to report information about the highest precedent glob +/// that matched. +/// +/// Note that not all matches necessarily correspond to a specific glob. For +/// example, if there are one or more whitelist globs and a file path doesn't +/// match any glob in the set, then the file path is considered to be ignored. +/// +/// The lifetime `'a` refers to the lifetime of the matcher that produced +/// this glob. +#[derive(Clone, Debug)] +pub struct Glob<'a>(GlobInner<'a>); + +#[derive(Clone, Debug)] +enum GlobInner<'a> { + /// No glob matched, but the file path should still be ignored. + UnmatchedIgnore, + /// A glob matched. + Matched(&'a gitignore::Glob), +} + +impl<'a> Glob<'a> { + fn unmatched() -> Glob<'a> { + Glob(GlobInner::UnmatchedIgnore) + } +} + +/// Manages a set of overrides provided explicitly by the end user. +#[derive(Clone, Debug)] +pub struct Override(Gitignore); + +impl Override { + /// Returns an empty matcher that never matches any file path. + pub fn empty() -> Override { + Override(Gitignore::empty()) + } + + /// Returns the directory of this override set. + /// + /// All matches are done relative to this path. + pub fn path(&self) -> &Path { + self.0.path() + } + + /// Returns true if and only if this matcher is empty. + /// + /// When a matcher is empty, it will never match any file path. + pub fn is_empty(&self) -> bool { + self.0.is_empty() + } + + /// Returns the total number of ignore globs. + pub fn num_ignores(&self) -> u64 { + self.0.num_whitelists() + } + + /// Returns the total number of whitelisted globs. + pub fn num_whitelists(&self) -> u64 { + self.0.num_ignores() + } + + /// Returns whether the given file path matched a pattern in this override + /// matcher. + /// + /// `is_dir` should be true if the path refers to a directory and false + /// otherwise. + /// + /// If there are no overrides, then this always returns `Match::None`. + /// + /// If there is at least one whitelist override and `is_dir` is false, then + /// this never returns `Match::None`, since non-matches are interpreted as + /// ignored. + /// + /// The given path is matched to the globs relative to the path given + /// when building the override matcher. Specifically, before matching + /// `path`, its prefix (as determined by a common suffix of the directory + /// given) is stripped. If there is no common suffix/prefix overlap, then + /// `path` is assumed to reside in the same directory as the root path for + /// this set of overrides. + pub fn matched<'a, P: AsRef>( + &'a self, + path: P, + is_dir: bool, + ) -> Match> { + if self.is_empty() { + return Match::None; + } + let mat = self.0.matched(path, is_dir).invert(); + if mat.is_none() && self.num_whitelists() > 0 && !is_dir { + return Match::Ignore(Glob::unmatched()); + } + mat.map(move |giglob| Glob(GlobInner::Matched(giglob))) + } +} + +/// Builds a matcher for a set of glob overrides. +pub struct OverrideBuilder { + builder: GitignoreBuilder, +} + +impl OverrideBuilder { + /// Create a new override builder. + /// + /// Matching is done relative to the directory path provided. + pub fn new>(path: P) -> OverrideBuilder { + OverrideBuilder { + builder: GitignoreBuilder::new(path), + } + } + + /// Builds a new override matcher from the globs added so far. + /// + /// Once a matcher is built, no new globs can be added to it. + pub fn build(&self) -> Result { + Ok(Override(self.builder.build()?)) + } + + /// Add a glob to the set of overrides. + /// + /// Globs provided here have precisely the same semantics as a single + /// line in a `gitignore` file, where the meaning of `!` is inverted: + /// namely, `!` at the beginning of a glob will ignore a file. Without `!`, + /// all matches of the glob provided are treated as whitelist matches. + pub fn add(&mut self, glob: &str) -> Result<&mut OverrideBuilder, Error> { + self.builder.add_line(None, glob)?; + Ok(self) + } + + /// Toggle whether the globs should be matched case insensitively or not. + /// + /// When this option is changed, only globs added after the change will be affected. + /// + /// This is disabled by default. + pub fn case_insensitive( + &mut self, yes: bool + ) -> Result<&mut OverrideBuilder, Error> { + self.builder.case_insensitive(yes)?; + Ok(self) + } +} + +#[cfg(test)] +mod tests { + use super::{Override, OverrideBuilder}; + + const ROOT: &'static str = "/home/andrew/foo"; + + fn ov(globs: &[&str]) -> Override { + let mut builder = OverrideBuilder::new(ROOT); + for glob in globs { + builder.add(glob).unwrap(); + } + builder.build().unwrap() + } + + #[test] + fn empty() { + let ov = ov(&[]); + assert!(ov.matched("a.foo", false).is_none()); + assert!(ov.matched("a", false).is_none()); + assert!(ov.matched("", false).is_none()); + } + + #[test] + fn simple() { + let ov = ov(&["*.foo", "!*.bar"]); + assert!(ov.matched("a.foo", false).is_whitelist()); + assert!(ov.matched("a.foo", true).is_whitelist()); + assert!(ov.matched("a.rs", false).is_ignore()); + assert!(ov.matched("a.rs", true).is_none()); + assert!(ov.matched("a.bar", false).is_ignore()); + assert!(ov.matched("a.bar", true).is_ignore()); + } + + #[test] + fn only_ignores() { + let ov = ov(&["!*.bar"]); + assert!(ov.matched("a.rs", false).is_none()); + assert!(ov.matched("a.rs", true).is_none()); + assert!(ov.matched("a.bar", false).is_ignore()); + assert!(ov.matched("a.bar", true).is_ignore()); + } + + #[test] + fn precedence() { + let ov = ov(&["*.foo", "!*.bar.foo"]); + assert!(ov.matched("a.foo", false).is_whitelist()); + assert!(ov.matched("a.baz", false).is_ignore()); + assert!(ov.matched("a.bar.foo", false).is_ignore()); + } + + #[test] + fn gitignore() { + let ov = ov(&["/foo", "bar/*.rs", "baz/**"]); + assert!(ov.matched("bar/lib.rs", false).is_whitelist()); + assert!(ov.matched("bar/wat/lib.rs", false).is_ignore()); + assert!(ov.matched("wat/bar/lib.rs", false).is_ignore()); + assert!(ov.matched("foo", false).is_whitelist()); + assert!(ov.matched("wat/foo", false).is_ignore()); + assert!(ov.matched("baz", false).is_ignore()); + assert!(ov.matched("baz/a", false).is_whitelist()); + assert!(ov.matched("baz/a/b", false).is_whitelist()); + } + + #[test] + fn allow_directories() { + // This tests that directories are NOT ignored when they are unmatched. + let ov = ov(&["*.rs"]); + assert!(ov.matched("foo.rs", false).is_whitelist()); + assert!(ov.matched("foo.c", false).is_ignore()); + assert!(ov.matched("foo", false).is_ignore()); + assert!(ov.matched("foo", true).is_none()); + assert!(ov.matched("src/foo.rs", false).is_whitelist()); + assert!(ov.matched("src/foo.c", false).is_ignore()); + assert!(ov.matched("src/foo", false).is_ignore()); + assert!(ov.matched("src/foo", true).is_none()); + } + + #[test] + fn absolute_path() { + let ov = ov(&["!/bar"]); + assert!(ov.matched("./foo/bar", false).is_none()); + } + + #[test] + fn case_insensitive() { + let ov = OverrideBuilder::new(ROOT) + .case_insensitive(true).unwrap() + .add("*.html").unwrap() + .build().unwrap(); + assert!(ov.matched("foo.html", false).is_whitelist()); + assert!(ov.matched("foo.HTML", false).is_whitelist()); + assert!(ov.matched("foo.htm", false).is_ignore()); + assert!(ov.matched("foo.HTM", false).is_ignore()); + } + + #[test] + fn default_case_sensitive() { + let ov = OverrideBuilder::new(ROOT) + .add("*.html").unwrap() + .build().unwrap(); + assert!(ov.matched("foo.html", false).is_whitelist()); + assert!(ov.matched("foo.HTML", false).is_ignore()); + assert!(ov.matched("foo.htm", false).is_ignore()); + assert!(ov.matched("foo.HTM", false).is_ignore()); + } +} diff --git a/ignore/src/pathutil.rs b/ignore/src/pathutil.rs new file mode 100644 index 000000000..bfd43de3e --- /dev/null +++ b/ignore/src/pathutil.rs @@ -0,0 +1,108 @@ +use std::ffi::OsStr; +use std::path::Path; + +/// Returns true if and only if this file path is considered to be hidden. +#[cfg(unix)] +pub fn is_hidden>(path: P) -> bool { + use std::os::unix::ffi::OsStrExt; + + if let Some(name) = file_name(path.as_ref()) { + name.as_bytes().get(0) == Some(&b'.') + } else { + false + } +} + +/// Returns true if and only if this file path is considered to be hidden. +#[cfg(not(unix))] +pub fn is_hidden>(path: P) -> bool { + if let Some(name) = file_name(path.as_ref()) { + name.to_str().map(|s| s.starts_with(".")).unwrap_or(false) + } else { + false + } +} + +/// Strip `prefix` from the `path` and return the remainder. +/// +/// If `path` doesn't have a prefix `prefix`, then return `None`. +#[cfg(unix)] +pub fn strip_prefix<'a, P: AsRef + ?Sized>( + prefix: &'a P, + path: &'a Path, +) -> Option<&'a Path> { + use std::os::unix::ffi::OsStrExt; + + let prefix = prefix.as_ref().as_os_str().as_bytes(); + let path = path.as_os_str().as_bytes(); + if prefix.len() > path.len() || prefix != &path[0..prefix.len()] { + None + } else { + Some(&Path::new(OsStr::from_bytes(&path[prefix.len()..]))) + } +} + +/// Strip `prefix` from the `path` and return the remainder. +/// +/// If `path` doesn't have a prefix `prefix`, then return `None`. +#[cfg(not(unix))] +pub fn strip_prefix<'a, P: AsRef + ?Sized>( + prefix: &'a P, + path: &'a Path, +) -> Option<&'a Path> { + path.strip_prefix(prefix).ok() +} + +/// Returns true if this file path is just a file name. i.e., Its parent is +/// the empty string. +#[cfg(unix)] +pub fn is_file_name>(path: P) -> bool { + use std::os::unix::ffi::OsStrExt; + use memchr::memchr; + + let path = path.as_ref().as_os_str().as_bytes(); + memchr(b'/', path).is_none() +} + +/// Returns true if this file path is just a file name. i.e., Its parent is +/// the empty string. +#[cfg(not(unix))] +pub fn is_file_name>(path: P) -> bool { + path.as_ref().parent().map(|p| p.as_os_str().is_empty()).unwrap_or(false) +} + +/// The final component of the path, if it is a normal file. +/// +/// If the path terminates in ., .., or consists solely of a root of prefix, +/// file_name will return None. +#[cfg(unix)] +pub fn file_name<'a, P: AsRef + ?Sized>( + path: &'a P, +) -> Option<&'a OsStr> { + use std::os::unix::ffi::OsStrExt; + use memchr::memrchr; + + let path = path.as_ref().as_os_str().as_bytes(); + if path.is_empty() { + return None; + } else if path.len() == 1 && path[0] == b'.' { + return None; + } else if path.last() == Some(&b'.') { + return None; + } else if path.len() >= 2 && &path[path.len() - 2..] == &b".."[..] { + return None; + } + let last_slash = memrchr(b'/', path).map(|i| i + 1).unwrap_or(0); + Some(OsStr::from_bytes(&path[last_slash..])) +} + +/// The final component of the path, if it is a normal file. +/// +/// If the path terminates in ., .., or consists solely of a root of prefix, +/// file_name will return None. +#[cfg(not(unix))] +pub fn file_name<'a, P: AsRef + ?Sized>( + path: &'a P, +) -> Option<&'a OsStr> { + path.as_ref().file_name() +} diff --git a/ignore/src/types.rs b/ignore/src/types.rs new file mode 100644 index 000000000..3c651ab29 --- /dev/null +++ b/ignore/src/types.rs @@ -0,0 +1,786 @@ +/*! +The types module provides a way of associating globs on file names to file +types. + +This can be used to match specific types of files. For example, among +the default file types provided, the Rust file type is defined to be `*.rs` +with name `rust`. Similarly, the C file type is defined to be `*.{c,h}` with +name `c`. + +Note that the set of default types may change over time. + +# Example + +This shows how to create and use a simple file type matcher using the default +file types defined in this crate. + +``` +use ignore::types::TypesBuilder; + +let mut builder = TypesBuilder::new(); +builder.add_defaults(); +builder.select("rust"); +let matcher = builder.build().unwrap(); + +assert!(matcher.matched("foo.rs", false).is_whitelist()); +assert!(matcher.matched("foo.c", false).is_ignore()); +``` + +# Example: negation + +This is like the previous example, but shows how negating a file type works. +That is, this will let us match file paths that *don't* correspond to a +particular file type. + +``` +use ignore::types::TypesBuilder; + +let mut builder = TypesBuilder::new(); +builder.add_defaults(); +builder.negate("c"); +let matcher = builder.build().unwrap(); + +assert!(matcher.matched("foo.rs", false).is_none()); +assert!(matcher.matched("foo.c", false).is_ignore()); +``` + +# Example: custom file type definitions + +This shows how to extend this library default file type definitions with +your own. + +``` +use ignore::types::TypesBuilder; + +let mut builder = TypesBuilder::new(); +builder.add_defaults(); +builder.add("foo", "*.foo"); +// Another way of adding a file type definition. +// This is useful when accepting input from an end user. +builder.add_def("bar:*.bar"); +// Note: we only select `foo`, not `bar`. +builder.select("foo"); +let matcher = builder.build().unwrap(); + +assert!(matcher.matched("x.foo", false).is_whitelist()); +// This is ignored because we only selected the `foo` file type. +assert!(matcher.matched("x.bar", false).is_ignore()); +``` + +We can also add file type definitions based on other definitions. + +``` +use ignore::types::TypesBuilder; + +let mut builder = TypesBuilder::new(); +builder.add_defaults(); +builder.add("foo", "*.foo"); +builder.add_def("bar:include:foo,cpp"); +builder.select("bar"); +let matcher = builder.build().unwrap(); + +assert!(matcher.matched("x.foo", false).is_whitelist()); +assert!(matcher.matched("y.cpp", false).is_whitelist()); +``` +*/ + +use std::cell::RefCell; +use std::collections::HashMap; +use std::path::Path; +use std::sync::Arc; + +use globset::{GlobBuilder, GlobSet, GlobSetBuilder}; +use regex::Regex; +use thread_local::ThreadLocal; + +use pathutil::file_name; +use {Error, Match}; + +const DEFAULT_TYPES: &'static [(&'static str, &'static [&'static str])] = &[ + ("agda", &["*.agda", "*.lagda"]), + ("ats", &["*.ats", "*.dats", "*.sats", "*.hats"]), + ("aidl", &["*.aidl"]), + ("amake", &["*.mk", "*.bp"]), + ("asciidoc", &["*.adoc", "*.asc", "*.asciidoc"]), + ("asm", &["*.asm", "*.s", "*.S"]), + ("avro", &["*.avdl", "*.avpr", "*.avsc"]), + ("awk", &["*.awk"]), + ("bazel", &["*.bzl", "WORKSPACE", "BUILD"]), + ("bitbake", &["*.bb", "*.bbappend", "*.bbclass", "*.conf", "*.inc"]), + ("bzip2", &["*.bz2"]), + ("c", &["*.c", "*.h", "*.H", "*.cats"]), + ("cabal", &["*.cabal"]), + ("cbor", &["*.cbor"]), + ("ceylon", &["*.ceylon"]), + ("clojure", &["*.clj", "*.cljc", "*.cljs", "*.cljx"]), + ("cmake", &["*.cmake", "CMakeLists.txt"]), + ("coffeescript", &["*.coffee"]), + ("creole", &["*.creole"]), + ("config", &["*.cfg", "*.conf", "*.config", "*.ini"]), + ("cpp", &[ + "*.C", "*.cc", "*.cpp", "*.cxx", + "*.h", "*.H", "*.hh", "*.hpp", "*.hxx", "*.inl", + ]), + ("crystal", &["Projectfile", "*.cr"]), + ("cs", &["*.cs"]), + ("csharp", &["*.cs"]), + ("cshtml", &["*.cshtml"]), + ("css", &["*.css", "*.scss"]), + ("csv", &["*.csv"]), + ("cython", &["*.pyx"]), + ("dart", &["*.dart"]), + ("d", &["*.d"]), + ("dhall", &["*.dhall"]), + ("docker", &["*Dockerfile*"]), + ("elisp", &["*.el"]), + ("elixir", &["*.ex", "*.eex", "*.exs"]), + ("elm", &["*.elm"]), + ("erlang", &["*.erl", "*.hrl"]), + ("fidl", &["*.fidl"]), + ("fish", &["*.fish"]), + ("fortran", &[ + "*.f", "*.F", "*.f77", "*.F77", "*.pfo", + "*.f90", "*.F90", "*.f95", "*.F95", + ]), + ("fsharp", &["*.fs", "*.fsx", "*.fsi"]), + ("gn", &["*.gn", "*.gni"]), + ("go", &["*.go"]), + ("gzip", &["*.gz"]), + ("groovy", &["*.groovy", "*.gradle"]), + ("h", &["*.h", "*.hpp"]), + ("hbs", &["*.hbs"]), + ("haskell", &["*.hs", "*.lhs", "*.cpphs", "*.c2hs", "*.hsc"]), + ("hs", &["*.hs", "*.lhs"]), + ("html", &["*.htm", "*.html", "*.ejs"]), + ("idris", &["*.idr", "*.lidr"]), + ("java", &["*.java", "*.jsp"]), + ("jinja", &["*.j2", "*.jinja", "*.jinja2"]), + ("js", &[ + "*.js", "*.jsx", "*.vue", + ]), + ("json", &["*.json", "composer.lock"]), + ("jsonl", &["*.jsonl"]), + ("julia", &["*.jl"]), + ("jupyter", &["*.ipynb", "*.jpynb"]), + ("jl", &["*.jl"]), + ("kotlin", &["*.kt", "*.kts"]), + ("less", &["*.less"]), + ("license", &[ + // General + "COPYING", "COPYING[.-]*", + "COPYRIGHT", "COPYRIGHT[.-]*", + "EULA", "EULA[.-]*", + "licen[cs]e", "licen[cs]e.*", + "LICEN[CS]E", "LICEN[CS]E[.-]*", "*[.-]LICEN[CS]E*", + "NOTICE", "NOTICE[.-]*", + "PATENTS", "PATENTS[.-]*", + "UNLICEN[CS]E", "UNLICEN[CS]E[.-]*", + // GPL (gpl.txt, etc.) + "agpl[.-]*", + "gpl[.-]*", + "lgpl[.-]*", + // Other license-specific (APACHE-2.0.txt, etc.) + "AGPL-*[0-9]*", + "APACHE-*[0-9]*", + "BSD-*[0-9]*", + "CC-BY-*", + "GFDL-*[0-9]*", + "GNU-*[0-9]*", + "GPL-*[0-9]*", + "LGPL-*[0-9]*", + "MIT-*[0-9]*", + "MPL-*[0-9]*", + "OFL-*[0-9]*", + ]), + ("lisp", &["*.el", "*.jl", "*.lisp", "*.lsp", "*.sc", "*.scm"]), + ("log", &["*.log"]), + ("lua", &["*.lua"]), + ("lzma", &["*.lzma"]), + ("lz4", &["*.lz4"]), + ("m4", &["*.ac", "*.m4"]), + ("make", &[ + "gnumakefile", "Gnumakefile", "GNUmakefile", + "makefile", "Makefile", + "*.mk", "*.mak" + ]), + ("mako", &["*.mako", "*.mao"]), + ("markdown", &["*.markdown", "*.md", "*.mdown", "*.mkdn"]), + ("md", &["*.markdown", "*.md", "*.mdown", "*.mkdn"]), + ("man", &["*.[0-9lnpx]", "*.[0-9][cEFMmpSx]"]), + ("matlab", &["*.m"]), + ("mk", &["mkfile"]), + ("ml", &["*.ml"]), + ("msbuild", &[ + "*.csproj", "*.fsproj", "*.vcxproj", "*.proj", "*.props", "*.targets" + ]), + ("nim", &["*.nim"]), + ("nix", &["*.nix"]), + ("objc", &["*.h", "*.m"]), + ("objcpp", &["*.h", "*.mm"]), + ("ocaml", &["*.ml", "*.mli", "*.mll", "*.mly"]), + ("org", &["*.org"]), + ("pascal", &["*.pas", "*.dpr", "*.lpr", "*.pp", "*.inc"]), + ("perl", &["*.perl", "*.pl", "*.PL", "*.plh", "*.plx", "*.pm", "*.t"]), + ("pdf", &["*.pdf"]), + ("php", &["*.php", "*.php3", "*.php4", "*.php5", "*.phtml"]), + ("pod", &["*.pod"]), + ("protobuf", &["*.proto"]), + ("ps", &["*.cdxml", "*.ps1", "*.ps1xml", "*.psd1", "*.psm1"]), + ("puppet", &["*.erb", "*.pp", "*.rb"]), + ("purs", &["*.purs"]), + ("py", &["*.py"]), + ("qmake", &["*.pro", "*.pri", "*.prf"]), + ("readme", &["README*", "*README"]), + ("r", &["*.R", "*.r", "*.Rmd", "*.Rnw"]), + ("rdoc", &["*.rdoc"]), + ("rst", &["*.rst"]), + ("ruby", &["Gemfile", "*.gemspec", ".irbrc", "Rakefile", "*.rb"]), + ("rust", &["*.rs"]), + ("sass", &["*.sass", "*.scss"]), + ("scala", &["*.scala", "*.sbt"]), + ("sh", &[ + // Portable/misc. init files + ".login", ".logout", ".profile", "profile", + // bash-specific init files + ".bash_login", "bash_login", + ".bash_logout", "bash_logout", + ".bash_profile", "bash_profile", + ".bashrc", "bashrc", "*.bashrc", + // csh-specific init files + ".cshrc", "*.cshrc", + // ksh-specific init files + ".kshrc", "*.kshrc", + // tcsh-specific init files + ".tcshrc", + // zsh-specific init files + ".zshenv", "zshenv", + ".zlogin", "zlogin", + ".zlogout", "zlogout", + ".zprofile", "zprofile", + ".zshrc", "zshrc", + // Extensions + "*.bash", "*.csh", "*.ksh", "*.sh", "*.tcsh", "*.zsh", + ]), + ("smarty", &["*.tpl"]), + ("sml", &["*.sml", "*.sig"]), + ("soy", &["*.soy"]), + ("spark", &["*.spark"]), + ("sql", &["*.sql", "*.psql"]), + ("stylus", &["*.styl"]), + ("sv", &["*.v", "*.vg", "*.sv", "*.svh", "*.h"]), + ("svg", &["*.svg"]), + ("swift", &["*.swift"]), + ("swig", &["*.def", "*.i"]), + ("systemd", &[ + "*.automount", "*.conf", "*.device", "*.link", "*.mount", "*.path", + "*.scope", "*.service", "*.slice", "*.socket", "*.swap", "*.target", + "*.timer", + ]), + ("taskpaper", &["*.taskpaper"]), + ("tcl", &["*.tcl"]), + ("tex", &["*.tex", "*.ltx", "*.cls", "*.sty", "*.bib"]), + ("textile", &["*.textile"]), + ("tf", &["*.tf"]), + ("ts", &["*.ts", "*.tsx"]), + ("txt", &["*.txt"]), + ("toml", &["*.toml", "Cargo.lock"]), + ("twig", &["*.twig"]), + ("vala", &["*.vala"]), + ("vb", &["*.vb"]), + ("verilog", &["*.v", "*.vh", "*.sv", "*.svh"]), + ("vhdl", &["*.vhd", "*.vhdl"]), + ("vim", &["*.vim"]), + ("vimscript", &["*.vim"]), + ("wiki", &["*.mediawiki", "*.wiki"]), + ("webidl", &["*.idl", "*.webidl", "*.widl"]), + ("xml", &["*.xml", "*.xml.dist"]), + ("xz", &["*.xz"]), + ("yacc", &["*.y"]), + ("yaml", &["*.yaml", "*.yml"]), + ("zsh", &[ + ".zshenv", "zshenv", + ".zlogin", "zlogin", + ".zlogout", "zlogout", + ".zprofile", "zprofile", + ".zshrc", "zshrc", + "*.zsh", + ]), +]; + +/// Glob represents a single glob in a set of file type definitions. +/// +/// There may be more than one glob for a particular file type. +/// +/// This is used to report information about the highest precedent glob +/// that matched. +/// +/// Note that not all matches necessarily correspond to a specific glob. +/// For example, if there are one or more selections and a file path doesn't +/// match any of those selections, then the file path is considered to be +/// ignored. +/// +/// The lifetime `'a` refers to the lifetime of the underlying file type +/// definition, which corresponds to the lifetime of the file type matcher. +#[derive(Clone, Debug)] +pub struct Glob<'a>(GlobInner<'a>); + +#[derive(Clone, Debug)] +enum GlobInner<'a> { + /// No glob matched, but the file path should still be ignored. + UnmatchedIgnore, + /// A glob matched. + Matched { + /// The file type definition which provided the glob. + def: &'a FileTypeDef, + /// The index of the glob that matched inside the file type definition. + which: usize, + /// Whether the selection was negated or not. + negated: bool, + } +} + +impl<'a> Glob<'a> { + fn unmatched() -> Glob<'a> { + Glob(GlobInner::UnmatchedIgnore) + } +} + +/// A single file type definition. +/// +/// File type definitions can be retrieved in aggregate from a file type +/// matcher. File type definitions are also reported when its responsible +/// for a match. +#[derive(Clone, Debug, Eq, PartialEq)] +pub struct FileTypeDef { + name: String, + globs: Vec, +} + +impl FileTypeDef { + /// Return the name of this file type. + pub fn name(&self) -> &str { + &self.name + } + + /// Return the globs used to recognize this file type. + pub fn globs(&self) -> &[String] { + &self.globs + } +} + +/// Types is a file type matcher. +#[derive(Clone, Debug)] +pub struct Types { + /// All of the file type definitions, sorted lexicographically by name. + defs: Vec, + /// All of the selections made by the user. + selections: Vec>, + /// Whether there is at least one Selection::Select in our selections. + /// When this is true, a Match::None is converted to Match::Ignore. + has_selected: bool, + /// A mapping from glob index in the set to two indices. The first is an + /// index into `selections` and the second is an index into the + /// corresponding file type definition's list of globs. + glob_to_selection: Vec<(usize, usize)>, + /// The set of all glob selections, used for actual matching. + set: GlobSet, + /// Temporary storage for globs that match. + matches: Arc>>>, +} + +/// Indicates the type of a selection for a particular file type. +#[derive(Clone, Debug)] +enum Selection { + Select(String, T), + Negate(String, T), +} + +impl Selection { + fn is_negated(&self) -> bool { + match *self { + Selection::Select(..) => false, + Selection::Negate(..) => true, + } + } + + fn name(&self) -> &str { + match *self { + Selection::Select(ref name, _) => name, + Selection::Negate(ref name, _) => name, + } + } + + fn map U>(self, f: F) -> Selection { + match self { + Selection::Select(name, inner) => { + Selection::Select(name, f(inner)) + } + Selection::Negate(name, inner) => { + Selection::Negate(name, f(inner)) + } + } + } + + fn inner(&self) -> &T { + match *self { + Selection::Select(_, ref inner) => inner, + Selection::Negate(_, ref inner) => inner, + } + } +} + +impl Types { + /// Creates a new file type matcher that never matches any path and + /// contains no file type definitions. + pub fn empty() -> Types { + Types { + defs: vec![], + selections: vec![], + has_selected: false, + glob_to_selection: vec![], + set: GlobSetBuilder::new().build().unwrap(), + matches: Arc::new(ThreadLocal::default()), + } + } + + /// Returns true if and only if this matcher has zero selections. + pub fn is_empty(&self) -> bool { + self.selections.is_empty() + } + + /// Returns the number of selections used in this matcher. + pub fn len(&self) -> usize { + self.selections.len() + } + + /// Return the set of current file type definitions. + /// + /// Definitions and globs are sorted. + pub fn definitions(&self) -> &[FileTypeDef] { + &self.defs + } + + /// Returns a match for the given path against this file type matcher. + /// + /// The path is considered whitelisted if it matches a selected file type. + /// The path is considered ignored if it matches a negated file type. + /// If at least one file type is selected and `path` doesn't match, then + /// the path is also considered ignored. + pub fn matched<'a, P: AsRef>( + &'a self, + path: P, + is_dir: bool, + ) -> Match> { + // File types don't apply to directories, and we can't do anything + // if our glob set is empty. + if is_dir || self.set.is_empty() { + return Match::None; + } + // We only want to match against the file name, so extract it. + // If one doesn't exist, then we can't match it. + let name = match file_name(path.as_ref()) { + Some(name) => name, + None if self.has_selected => { + return Match::Ignore(Glob::unmatched()); + } + None => { + return Match::None; + } + }; + let mut matches = self.matches.get_default().borrow_mut(); + self.set.matches_into(name, &mut *matches); + // The highest precedent match is the last one. + if let Some(&i) = matches.last() { + let (isel, iglob) = self.glob_to_selection[i]; + let sel = &self.selections[isel]; + let glob = Glob(GlobInner::Matched { + def: sel.inner(), + which: iglob, + negated: sel.is_negated(), + }); + return if sel.is_negated() { + Match::Ignore(glob) + } else { + Match::Whitelist(glob) + }; + } + if self.has_selected { + Match::Ignore(Glob::unmatched()) + } else { + Match::None + } + } +} + +/// TypesBuilder builds a type matcher from a set of file type definitions and +/// a set of file type selections. +pub struct TypesBuilder { + types: HashMap, + selections: Vec>, +} + +impl TypesBuilder { + /// Create a new builder for a file type matcher. + /// + /// The builder contains *no* type definitions to start with. A set + /// of default type definitions can be added with `add_defaults`, and + /// additional type definitions can be added with `select` and `negate`. + pub fn new() -> TypesBuilder { + TypesBuilder { + types: HashMap::new(), + selections: vec![], + } + } + + /// Build the current set of file type definitions *and* selections into + /// a file type matcher. + pub fn build(&self) -> Result { + let defs = self.definitions(); + let has_selected = self.selections.iter().any(|s| !s.is_negated()); + + let mut selections = vec![]; + let mut glob_to_selection = vec![]; + let mut build_set = GlobSetBuilder::new(); + for (isel, selection) in self.selections.iter().enumerate() { + let def = match self.types.get(selection.name()) { + Some(def) => def.clone(), + None => { + let name = selection.name().to_string(); + return Err(Error::UnrecognizedFileType(name)); + } + }; + for (iglob, glob) in def.globs.iter().enumerate() { + build_set.add( + GlobBuilder::new(glob) + .literal_separator(true) + .build() + .map_err(|err| { + Error::Glob { + glob: Some(glob.to_string()), + err: err.kind().to_string(), + } + })?); + glob_to_selection.push((isel, iglob)); + } + selections.push(selection.clone().map(move |_| def)); + } + let set = build_set.build().map_err(|err| { + Error::Glob { glob: None, err: err.to_string() } + })?; + Ok(Types { + defs: defs, + selections: selections, + has_selected: has_selected, + glob_to_selection: glob_to_selection, + set: set, + matches: Arc::new(ThreadLocal::default()), + }) + } + + /// Return the set of current file type definitions. + /// + /// Definitions and globs are sorted. + pub fn definitions(&self) -> Vec { + let mut defs = vec![]; + for def in self.types.values() { + let mut def = def.clone(); + def.globs.sort(); + defs.push(def); + } + defs.sort_by(|def1, def2| def1.name().cmp(def2.name())); + defs + } + + /// Select the file type given by `name`. + /// + /// If `name` is `all`, then all file types currently defined are selected. + pub fn select(&mut self, name: &str) -> &mut TypesBuilder { + if name == "all" { + for name in self.types.keys() { + self.selections.push(Selection::Select(name.to_string(), ())); + } + } else { + self.selections.push(Selection::Select(name.to_string(), ())); + } + self + } + + /// Ignore the file type given by `name`. + /// + /// If `name` is `all`, then all file types currently defined are negated. + pub fn negate(&mut self, name: &str) -> &mut TypesBuilder { + if name == "all" { + for name in self.types.keys() { + self.selections.push(Selection::Negate(name.to_string(), ())); + } + } else { + self.selections.push(Selection::Negate(name.to_string(), ())); + } + self + } + + /// Clear any file type definitions for the type name given. + pub fn clear(&mut self, name: &str) -> &mut TypesBuilder { + self.types.remove(name); + self + } + + /// Add a new file type definition. `name` can be arbitrary and `pat` + /// should be a glob recognizing file paths belonging to the `name` type. + /// + /// If `name` is `all` or otherwise contains any character that is not a + /// Unicode letter or number, then an error is returned. + pub fn add(&mut self, name: &str, glob: &str) -> Result<(), Error> { + lazy_static! { + static ref RE: Regex = Regex::new(r"^[\pL\pN]+$").unwrap(); + }; + if name == "all" || !RE.is_match(name) { + return Err(Error::InvalidDefinition); + } + let (key, glob) = (name.to_string(), glob.to_string()); + self.types.entry(key).or_insert_with(|| { + FileTypeDef { name: name.to_string(), globs: vec![] } + }).globs.push(glob); + Ok(()) + } + + /// Add a new file type definition specified in string form. There are two + /// valid formats: + /// 1. `{name}:{glob}`. This defines a 'root' definition that associates the + /// given name with the given glob. + /// 2. `{name}:include:{comma-separated list of already defined names}. + /// This defines an 'include' definition that associates the given name + /// with the definitions of the given existing types. + /// Names may not include any characters that are not + /// Unicode letters or numbers. + pub fn add_def(&mut self, def: &str) -> Result<(), Error> { + let parts: Vec<&str> = def.split(':').collect(); + match parts.len() { + 2 => { + let name = parts[0]; + let glob = parts[1]; + if name.is_empty() || glob.is_empty() { + return Err(Error::InvalidDefinition); + } + self.add(name, glob) + } + 3 => { + let name = parts[0]; + let types_string = parts[2]; + if name.is_empty() || parts[1] != "include" || types_string.is_empty() { + return Err(Error::InvalidDefinition); + } + let types = types_string.split(','); + // Check ahead of time to ensure that all types specified are + // present and fail fast if not. + if types.clone().any(|t| !self.types.contains_key(t)) { + return Err(Error::InvalidDefinition); + } + for type_name in types { + let globs = self.types.get(type_name).unwrap().globs.clone(); + for glob in globs { + self.add(name, &glob)?; + } + } + Ok(()) + } + _ => Err(Error::InvalidDefinition) + } + } + + /// Add a set of default file type definitions. + pub fn add_defaults(&mut self) -> &mut TypesBuilder { + static MSG: &'static str = "adding a default type should never fail"; + for &(name, exts) in DEFAULT_TYPES { + for ext in exts { + self.add(name, ext).expect(MSG); + } + } + self + } +} + +#[cfg(test)] +mod tests { + use super::TypesBuilder; + + macro_rules! matched { + ($name:ident, $types:expr, $sel:expr, $selnot:expr, + $path:expr) => { + matched!($name, $types, $sel, $selnot, $path, true); + }; + (not, $name:ident, $types:expr, $sel:expr, $selnot:expr, + $path:expr) => { + matched!($name, $types, $sel, $selnot, $path, false); + }; + ($name:ident, $types:expr, $sel:expr, $selnot:expr, + $path:expr, $matched:expr) => { + #[test] + fn $name() { + let mut btypes = TypesBuilder::new(); + for tydef in $types { + btypes.add_def(tydef).unwrap(); + } + for sel in $sel { + btypes.select(sel); + } + for selnot in $selnot { + btypes.negate(selnot); + } + let types = btypes.build().unwrap(); + let mat = types.matched($path, false); + assert_eq!($matched, !mat.is_ignore()); + } + }; + } + + fn types() -> Vec<&'static str> { + vec![ + "html:*.html", + "html:*.htm", + "rust:*.rs", + "js:*.js", + "foo:*.{rs,foo}", + "combo:include:html,rust" + ] + } + + matched!(match1, types(), vec!["rust"], vec![], "lib.rs"); + matched!(match2, types(), vec!["html"], vec![], "index.html"); + matched!(match3, types(), vec!["html"], vec![], "index.htm"); + matched!(match4, types(), vec!["html", "rust"], vec![], "main.rs"); + matched!(match5, types(), vec![], vec![], "index.html"); + matched!(match6, types(), vec![], vec!["rust"], "index.html"); + matched!(match7, types(), vec!["foo"], vec!["rust"], "main.foo"); + matched!(match8, types(), vec!["combo"], vec![], "index.html"); + matched!(match9, types(), vec!["combo"], vec![], "lib.rs"); + + matched!(not, matchnot1, types(), vec!["rust"], vec![], "index.html"); + matched!(not, matchnot2, types(), vec![], vec!["rust"], "main.rs"); + matched!(not, matchnot3, types(), vec!["foo"], vec!["rust"], "main.rs"); + matched!(not, matchnot4, types(), vec!["rust"], vec!["foo"], "main.rs"); + matched!(not, matchnot5, types(), vec!["rust"], vec!["foo"], "main.foo"); + matched!(not, matchnot6, types(), vec!["combo"], vec![], "leftpad.js"); + + #[test] + fn test_invalid_defs() { + let mut btypes = TypesBuilder::new(); + for tydef in types() { + btypes.add_def(tydef).unwrap(); + } + // Preserve the original definitions for later comparison. + let original_defs = btypes.definitions(); + let bad_defs = vec![ + // Reference to type that does not exist + "combo:include:html,python", + // Bad format + "combo:foobar:html,rust", + "" + ]; + for def in bad_defs { + assert!(btypes.add_def(def).is_err()); + // Ensure that nothing changed, even if some of the includes were valid. + assert_eq!(btypes.definitions(), original_defs); + } + } +} diff --git a/ignore/src/walk.rs b/ignore/src/walk.rs new file mode 100644 index 000000000..3a4515cdb --- /dev/null +++ b/ignore/src/walk.rs @@ -0,0 +1,2050 @@ +use std::cmp; +use std::ffi::OsStr; +use std::fmt; +use std::fs::{self, FileType, Metadata}; +use std::io; +use std::path::{Path, PathBuf}; +use std::sync::Arc; +use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering}; +use std::thread; +use std::time::Duration; +use std::vec; + +use channel; +use same_file::Handle; +use walkdir::{self, WalkDir}; + +use dir::{Ignore, IgnoreBuilder}; +use gitignore::GitignoreBuilder; +use overrides::Override; +use types::Types; +use {Error, PartialErrorBuilder}; + +/// A directory entry with a possible error attached. +/// +/// The error typically refers to a problem parsing ignore files in a +/// particular directory. +#[derive(Clone, Debug)] +pub struct DirEntry { + dent: DirEntryInner, + err: Option, +} + +impl DirEntry { + /// The full path that this entry represents. + pub fn path(&self) -> &Path { + self.dent.path() + } + + /// The full path that this entry represents. + /// Analogous to [`path`], but moves ownership of the path. + /// + /// [`path`]: struct.DirEntry.html#method.path + pub fn into_path(self) -> PathBuf { + self.dent.into_path() + } + + /// Whether this entry corresponds to a symbolic link or not. + pub fn path_is_symlink(&self) -> bool { + self.dent.path_is_symlink() + } + + /// Returns true if and only if this entry corresponds to stdin. + /// + /// i.e., The entry has depth 0 and its file name is `-`. + pub fn is_stdin(&self) -> bool { + self.dent.is_stdin() + } + + /// Return the metadata for the file that this entry points to. + pub fn metadata(&self) -> Result { + self.dent.metadata() + } + + /// Return the file type for the file that this entry points to. + /// + /// This entry doesn't have a file type if it corresponds to stdin. + pub fn file_type(&self) -> Option { + self.dent.file_type() + } + + /// Return the file name of this entry. + /// + /// If this entry has no file name (e.g., `/`), then the full path is + /// returned. + pub fn file_name(&self) -> &OsStr { + self.dent.file_name() + } + + /// Returns the depth at which this entry was created relative to the root. + pub fn depth(&self) -> usize { + self.dent.depth() + } + + /// Returns the underlying inode number if one exists. + /// + /// If this entry doesn't have an inode number, then `None` is returned. + #[cfg(unix)] + pub fn ino(&self) -> Option { + self.dent.ino() + } + + /// Returns an error, if one exists, associated with processing this entry. + /// + /// An example of an error is one that occurred while parsing an ignore + /// file. Errors related to traversing a directory tree itself are reported + /// as part of yielding the directory entry, and not with this method. + pub fn error(&self) -> Option<&Error> { + self.err.as_ref() + } + + /// Returns true if and only if this entry points to a directory. + fn is_dir(&self) -> bool { + self.dent.is_dir() + } + + fn new_stdin() -> DirEntry { + DirEntry { + dent: DirEntryInner::Stdin, + err: None, + } + } + + fn new_walkdir(dent: walkdir::DirEntry, err: Option) -> DirEntry { + DirEntry { + dent: DirEntryInner::Walkdir(dent), + err: err, + } + } + + fn new_raw(dent: DirEntryRaw, err: Option) -> DirEntry { + DirEntry { + dent: DirEntryInner::Raw(dent), + err: err, + } + } +} + +/// DirEntryInner is the implementation of DirEntry. +/// +/// It specifically represents three distinct sources of directory entries: +/// +/// 1. From the walkdir crate. +/// 2. Special entries that represent things like stdin. +/// 3. From a path. +/// +/// Specifically, (3) has to essentially re-create the DirEntry implementation +/// from WalkDir. +#[derive(Clone, Debug)] +enum DirEntryInner { + Stdin, + Walkdir(walkdir::DirEntry), + Raw(DirEntryRaw), +} + +impl DirEntryInner { + fn path(&self) -> &Path { + use self::DirEntryInner::*; + match *self { + Stdin => Path::new(""), + Walkdir(ref x) => x.path(), + Raw(ref x) => x.path(), + } + } + + fn into_path(self) -> PathBuf { + use self::DirEntryInner::*; + match self { + Stdin => PathBuf::from(""), + Walkdir(x) => x.into_path(), + Raw(x) => x.into_path(), + } + } + + fn path_is_symlink(&self) -> bool { + use self::DirEntryInner::*; + match *self { + Stdin => false, + Walkdir(ref x) => x.path_is_symlink(), + Raw(ref x) => x.path_is_symlink(), + } + } + + fn is_stdin(&self) -> bool { + match *self { + DirEntryInner::Stdin => true, + _ => false, + } + } + + fn metadata(&self) -> Result { + use self::DirEntryInner::*; + match *self { + Stdin => { + let err = Error::Io(io::Error::new( + io::ErrorKind::Other, " has no metadata")); + Err(err.with_path("")) + } + Walkdir(ref x) => { + x.metadata().map_err(|err| { + Error::Io(io::Error::from(err)).with_path(x.path()) + }) + } + Raw(ref x) => x.metadata(), + } + } + + fn file_type(&self) -> Option { + use self::DirEntryInner::*; + match *self { + Stdin => None, + Walkdir(ref x) => Some(x.file_type()), + Raw(ref x) => Some(x.file_type()), + } + } + + fn file_name(&self) -> &OsStr { + use self::DirEntryInner::*; + match *self { + Stdin => OsStr::new(""), + Walkdir(ref x) => x.file_name(), + Raw(ref x) => x.file_name(), + } + } + + fn depth(&self) -> usize { + use self::DirEntryInner::*; + match *self { + Stdin => 0, + Walkdir(ref x) => x.depth(), + Raw(ref x) => x.depth(), + } + } + + #[cfg(unix)] + fn ino(&self) -> Option { + use walkdir::DirEntryExt; + use self::DirEntryInner::*; + match *self { + Stdin => None, + Walkdir(ref x) => Some(x.ino()), + Raw(ref x) => Some(x.ino()), + } + } + + /// Returns true if and only if this entry points to a directory. + fn is_dir(&self) -> bool { + self.file_type().map(|ft| ft.is_dir()).unwrap_or(false) + } +} + +/// DirEntryRaw is essentially copied from the walkdir crate so that we can +/// build `DirEntry`s from whole cloth in the parallel iterator. +#[derive(Clone)] +struct DirEntryRaw { + /// The path as reported by the `fs::ReadDir` iterator (even if it's a + /// symbolic link). + path: PathBuf, + /// The file type. Necessary for recursive iteration, so store it. + ty: FileType, + /// Is set when this entry was created from a symbolic link and the user + /// expects the iterator to follow symbolic links. + follow_link: bool, + /// The depth at which this entry was generated relative to the root. + depth: usize, + /// The underlying inode number (Unix only). + #[cfg(unix)] + ino: u64, + /// The underlying metadata (Windows only). We store this on Windows + /// because this comes for free while reading a directory. + #[cfg(windows)] + metadata: fs::Metadata, +} + +impl fmt::Debug for DirEntryRaw { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + // Leaving out FileType because it doesn't have a debug impl + // in Rust 1.9. We could add it if we really wanted to by manually + // querying each possibly file type. Meh. ---AG + f.debug_struct("DirEntryRaw") + .field("path", &self.path) + .field("follow_link", &self.follow_link) + .field("depth", &self.depth) + .finish() + } +} + +impl DirEntryRaw { + fn path(&self) -> &Path { + &self.path + } + + fn into_path(self) -> PathBuf { + self.path + } + + fn path_is_symlink(&self) -> bool { + self.ty.is_symlink() || self.follow_link + } + + fn metadata(&self) -> Result { + self.metadata_internal() + } + + #[cfg(windows)] + fn metadata_internal(&self) -> Result { + if self.follow_link { + fs::metadata(&self.path) + } else { + Ok(self.metadata.clone()) + }.map_err(|err| Error::Io(io::Error::from(err)).with_path(&self.path)) + } + + #[cfg(not(windows))] + fn metadata_internal(&self) -> Result { + if self.follow_link { + fs::metadata(&self.path) + } else { + fs::symlink_metadata(&self.path) + }.map_err(|err| Error::Io(io::Error::from(err)).with_path(&self.path)) + } + + fn file_type(&self) -> FileType { + self.ty + } + + fn file_name(&self) -> &OsStr { + self.path.file_name().unwrap_or_else(|| self.path.as_os_str()) + } + + fn depth(&self) -> usize { + self.depth + } + + #[cfg(unix)] + fn ino(&self) -> u64 { + self.ino + } + + fn from_entry( + depth: usize, + ent: &fs::DirEntry, + ) -> Result { + let ty = ent.file_type().map_err(|err| { + let err = Error::Io(io::Error::from(err)).with_path(ent.path()); + Error::WithDepth { + depth: depth, + err: Box::new(err), + } + })?; + DirEntryRaw::from_entry_os(depth, ent, ty) + } + + #[cfg(windows)] + fn from_entry_os( + depth: usize, + ent: &fs::DirEntry, + ty: fs::FileType, + ) -> Result { + let md = ent.metadata().map_err(|err| { + let err = Error::Io(io::Error::from(err)).with_path(ent.path()); + Error::WithDepth { + depth: depth, + err: Box::new(err), + } + })?; + Ok(DirEntryRaw { + path: ent.path(), + ty: ty, + follow_link: false, + depth: depth, + metadata: md, + }) + } + + #[cfg(unix)] + fn from_entry_os( + depth: usize, + ent: &fs::DirEntry, + ty: fs::FileType, + ) -> Result { + use std::os::unix::fs::DirEntryExt; + + Ok(DirEntryRaw { + path: ent.path(), + ty: ty, + follow_link: false, + depth: depth, + ino: ent.ino(), + }) + } + + #[cfg(not(unix))] + fn from_path( + depth: usize, + pb: PathBuf, + link: bool, + ) -> Result { + let md = fs::metadata(&pb).map_err(|err| { + Error::Io(err).with_path(&pb) + })?; + Ok(DirEntryRaw { + path: pb, + ty: md.file_type(), + follow_link: link, + depth: depth, + metadata: md, + }) + } + + #[cfg(unix)] + fn from_path( + depth: usize, + pb: PathBuf, + link: bool, + ) -> Result { + use std::os::unix::fs::MetadataExt; + + let md = fs::metadata(&pb).map_err(|err| { + Error::Io(err).with_path(&pb) + })?; + Ok(DirEntryRaw { + path: pb, + ty: md.file_type(), + follow_link: link, + depth: depth, + ino: md.ino(), + }) + } +} + +/// WalkBuilder builds a recursive directory iterator. +/// +/// The builder supports a large number of configurable options. This includes +/// specific glob overrides, file type matching, toggling whether hidden +/// files are ignored or not, and of course, support for respecting gitignore +/// files. +/// +/// By default, all ignore files found are respected. This includes `.ignore`, +/// `.gitignore`, `.git/info/exclude` and even your global gitignore +/// globs, usually found in `$XDG_CONFIG_HOME/git/ignore`. +/// +/// Some standard recursive directory options are also supported, such as +/// limiting the recursive depth or whether to follow symbolic links (disabled +/// by default). +/// +/// # Ignore rules +/// +/// There are many rules that influence whether a particular file or directory +/// is skipped by this iterator. Those rules are documented here. Note that +/// the rules assume a default configuration. +/// +/// * First, glob overrides are checked. If a path matches a glob override, +/// then matching stops. The path is then only skipped if the glob that matched +/// the path is an ignore glob. (An override glob is a whitelist glob unless it +/// starts with a `!`, in which case it is an ignore glob.) +/// * Second, ignore files are checked. Ignore files currently only come from +/// git ignore files (`.gitignore`, `.git/info/exclude` and the configured +/// global gitignore file), plain `.ignore` files, which have the same format +/// as gitignore files, or explicitly added ignore files. The precedence order +/// is: `.ignore`, `.gitignore`, `.git/info/exclude`, global gitignore and +/// finally explicitly added ignore files. Note that precedence between +/// different types of ignore files is not impacted by the directory hierarchy; +/// any `.ignore` file overrides all `.gitignore` files. Within each precedence +/// level, more nested ignore files have a higher precedence than less nested +/// ignore files. +/// * Third, if the previous step yields an ignore match, then all matching +/// is stopped and the path is skipped. If it yields a whitelist match, then +/// matching continues. A whitelist match can be overridden by a later matcher. +/// * Fourth, unless the path is a directory, the file type matcher is run on +/// the path. As above, if it yields an ignore match, then all matching is +/// stopped and the path is skipped. If it yields a whitelist match, then +/// matching continues. +/// * Fifth, if the path hasn't been whitelisted and it is hidden, then the +/// path is skipped. +/// * Sixth, unless the path is a directory, the size of the file is compared +/// against the max filesize limit. If it exceeds the limit, it is skipped. +/// * Seventh, if the path has made it this far then it is yielded in the +/// iterator. +#[derive(Clone)] +pub struct WalkBuilder { + paths: Vec, + ig_builder: IgnoreBuilder, + max_depth: Option, + max_filesize: Option, + follow_links: bool, + same_file_system: bool, + sorter: Option, + threads: usize, + skip: Option>, +} + +#[derive(Clone)] +enum Sorter { + ByName(Arc cmp::Ordering + Send + Sync + 'static>), + ByPath(Arc cmp::Ordering + Send + Sync + 'static>), +} + +impl fmt::Debug for WalkBuilder { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_struct("WalkBuilder") + .field("paths", &self.paths) + .field("ig_builder", &self.ig_builder) + .field("max_depth", &self.max_depth) + .field("max_filesize", &self.max_filesize) + .field("follow_links", &self.follow_links) + .field("threads", &self.threads) + .field("skip", &self.skip) + .finish() + } +} + +impl WalkBuilder { + /// Create a new builder for a recursive directory iterator for the + /// directory given. + /// + /// Note that if you want to traverse multiple different directories, it + /// is better to call `add` on this builder than to create multiple + /// `Walk` values. + pub fn new>(path: P) -> WalkBuilder { + WalkBuilder { + paths: vec![path.as_ref().to_path_buf()], + ig_builder: IgnoreBuilder::new(), + max_depth: None, + max_filesize: None, + follow_links: false, + same_file_system: false, + sorter: None, + threads: 0, + skip: None, + } + } + + /// Build a new `Walk` iterator. + pub fn build(&self) -> Walk { + let follow_links = self.follow_links; + let max_depth = self.max_depth; + let sorter = self.sorter.clone(); + let its = self.paths.iter().map(move |p| { + if p == Path::new("-") { + (p.to_path_buf(), None) + } else { + let mut wd = WalkDir::new(p); + wd = wd.follow_links(follow_links || p.is_file()); + wd = wd.same_file_system(self.same_file_system); + if let Some(max_depth) = max_depth { + wd = wd.max_depth(max_depth); + } + if let Some(ref sorter) = sorter { + match sorter.clone() { + Sorter::ByName(cmp) => { + wd = wd.sort_by(move |a, b| { + cmp(a.file_name(), b.file_name()) + }); + } + Sorter::ByPath(cmp) => { + wd = wd.sort_by(move |a, b| { + cmp(a.path(), b.path()) + }); + } + } + } + (p.to_path_buf(), Some(WalkEventIter::from(wd))) + } + }).collect::>().into_iter(); + let ig_root = self.ig_builder.build(); + Walk { + its: its, + it: None, + ig_root: ig_root.clone(), + ig: ig_root.clone(), + max_filesize: self.max_filesize, + skip: self.skip.clone(), + } + } + + /// Build a new `WalkParallel` iterator. + /// + /// Note that this *doesn't* return something that implements `Iterator`. + /// Instead, the returned value must be run with a closure. e.g., + /// `builder.build_parallel().run(|| |path| println!("{:?}", path))`. + pub fn build_parallel(&self) -> WalkParallel { + WalkParallel { + paths: self.paths.clone().into_iter(), + ig_root: self.ig_builder.build(), + max_depth: self.max_depth, + max_filesize: self.max_filesize, + follow_links: self.follow_links, + same_file_system: self.same_file_system, + threads: self.threads, + skip: self.skip.clone(), + } + } + + /// Add a file path to the iterator. + /// + /// Each additional file path added is traversed recursively. This should + /// be preferred over building multiple `Walk` iterators since this + /// enables reusing resources across iteration. + pub fn add>(&mut self, path: P) -> &mut WalkBuilder { + self.paths.push(path.as_ref().to_path_buf()); + self + } + + /// The maximum depth to recurse. + /// + /// The default, `None`, imposes no depth restriction. + pub fn max_depth(&mut self, depth: Option) -> &mut WalkBuilder { + self.max_depth = depth; + self + } + + /// Whether to follow symbolic links or not. + pub fn follow_links(&mut self, yes: bool) -> &mut WalkBuilder { + self.follow_links = yes; + self + } + + /// Whether to ignore files above the specified limit. + pub fn max_filesize(&mut self, filesize: Option) -> &mut WalkBuilder { + self.max_filesize = filesize; + self + } + + /// The number of threads to use for traversal. + /// + /// Note that this only has an effect when using `build_parallel`. + /// + /// The default setting is `0`, which chooses the number of threads + /// automatically using heuristics. + pub fn threads(&mut self, n: usize) -> &mut WalkBuilder { + self.threads = n; + self + } + + /// Add a global ignore file to the matcher. + /// + /// This has lower precedence than all other sources of ignore rules. + /// + /// If there was a problem adding the ignore file, then an error is + /// returned. Note that the error may indicate *partial* failure. For + /// example, if an ignore file contains an invalid glob, all other globs + /// are still applied. + pub fn add_ignore>(&mut self, path: P) -> Option { + let mut builder = GitignoreBuilder::new(""); + let mut errs = PartialErrorBuilder::default(); + errs.maybe_push(builder.add(path)); + match builder.build() { + Ok(gi) => { self.ig_builder.add_ignore(gi); } + Err(err) => { errs.push(err); } + } + errs.into_error_option() + } + + /// Add a custom ignore file name + /// + /// These ignore files have higher precedence than all other ignore files. + /// + /// When specifying multiple names, earlier names have lower precedence than + /// later names. + pub fn add_custom_ignore_filename>( + &mut self, + file_name: S + ) -> &mut WalkBuilder { + self.ig_builder.add_custom_ignore_filename(file_name); + self + } + + /// Add an override matcher. + /// + /// By default, no override matcher is used. + /// + /// This overrides any previous setting. + pub fn overrides(&mut self, overrides: Override) -> &mut WalkBuilder { + self.ig_builder.overrides(overrides); + self + } + + /// Add a file type matcher. + /// + /// By default, no file type matcher is used. + /// + /// This overrides any previous setting. + pub fn types(&mut self, types: Types) -> &mut WalkBuilder { + self.ig_builder.types(types); + self + } + + /// Enables all the standard ignore filters. + /// + /// This toggles, as a group, all the filters that are enabled by default: + /// + /// - [hidden()](#method.hidden) + /// - [parents()](#method.parents) + /// - [ignore()](#method.ignore) + /// - [git_ignore()](#method.git_ignore) + /// - [git_global()](#method.git_global) + /// - [git_exclude()](#method.git_exclude) + /// + /// They may still be toggled individually after calling this function. + /// + /// This is (by definition) enabled by default. + pub fn standard_filters(&mut self, yes: bool) -> &mut WalkBuilder { + self.hidden(yes) + .parents(yes) + .ignore(yes) + .git_ignore(yes) + .git_global(yes) + .git_exclude(yes) + } + + /// Enables ignoring hidden files. + /// + /// This is enabled by default. + pub fn hidden(&mut self, yes: bool) -> &mut WalkBuilder { + self.ig_builder.hidden(yes); + self + } + + /// Enables reading ignore files from parent directories. + /// + /// If this is enabled, then .gitignore files in parent directories of each + /// file path given are respected. Otherwise, they are ignored. + /// + /// This is enabled by default. + pub fn parents(&mut self, yes: bool) -> &mut WalkBuilder { + self.ig_builder.parents(yes); + self + } + + /// Enables reading `.ignore` files. + /// + /// `.ignore` files have the same semantics as `gitignore` files and are + /// supported by search tools such as ripgrep and The Silver Searcher. + /// + /// This is enabled by default. + pub fn ignore(&mut self, yes: bool) -> &mut WalkBuilder { + self.ig_builder.ignore(yes); + self + } + + /// Enables reading a global gitignore file, whose path is specified in + /// git's `core.excludesFile` config option. + /// + /// Git's config file location is `$HOME/.gitconfig`. If `$HOME/.gitconfig` + /// does not exist or does not specify `core.excludesFile`, then + /// `$XDG_CONFIG_HOME/git/ignore` is read. If `$XDG_CONFIG_HOME` is not + /// set or is empty, then `$HOME/.config/git/ignore` is used instead. + /// + /// This is enabled by default. + pub fn git_global(&mut self, yes: bool) -> &mut WalkBuilder { + self.ig_builder.git_global(yes); + self + } + + /// Enables reading `.gitignore` files. + /// + /// `.gitignore` files have match semantics as described in the `gitignore` + /// man page. + /// + /// This is enabled by default. + pub fn git_ignore(&mut self, yes: bool) -> &mut WalkBuilder { + self.ig_builder.git_ignore(yes); + self + } + + /// Enables reading `.git/info/exclude` files. + /// + /// `.git/info/exclude` files have match semantics as described in the + /// `gitignore` man page. + /// + /// This is enabled by default. + pub fn git_exclude(&mut self, yes: bool) -> &mut WalkBuilder { + self.ig_builder.git_exclude(yes); + self + } + + /// Set a function for sorting directory entries by their path. + /// + /// If a compare function is set, the resulting iterator will return all + /// paths in sorted order. The compare function will be called to compare + /// entries from the same directory. + /// + /// This is like `sort_by_file_name`, except the comparator accepts + /// a `&Path` instead of the base file name, which permits it to sort by + /// more criteria. + /// + /// This method will override any previous sorter set by this method or + /// by `sort_by_file_name`. + /// + /// Note that this is not used in the parallel iterator. + pub fn sort_by_file_path( + &mut self, + cmp: F, + ) -> &mut WalkBuilder + where F: Fn(&Path, &Path) -> cmp::Ordering + Send + Sync + 'static + { + self.sorter = Some(Sorter::ByPath(Arc::new(cmp))); + self + } + + /// Set a function for sorting directory entries by file name. + /// + /// If a compare function is set, the resulting iterator will return all + /// paths in sorted order. The compare function will be called to compare + /// names from entries from the same directory using only the name of the + /// entry. + /// + /// This method will override any previous sorter set by this method or + /// by `sort_by_file_path`. + /// + /// Note that this is not used in the parallel iterator. + pub fn sort_by_file_name(&mut self, cmp: F) -> &mut WalkBuilder + where F: Fn(&OsStr, &OsStr) -> cmp::Ordering + Send + Sync + 'static + { + self.sorter = Some(Sorter::ByName(Arc::new(cmp))); + self + } + + /// Do not cross file system boundaries. + /// + /// When this option is enabled, directory traversal will not descend into + /// directories that are on a different file system from the root path. + /// + /// Currently, this option is only supported on Unix and Windows. If this + /// option is used on an unsupported platform, then directory traversal + /// will immediately return an error and will not yield any entries. + pub fn same_file_system(&mut self, yes: bool) -> &mut WalkBuilder { + self.same_file_system = yes; + self + } + + /// Do not yield directory entries that are believed to correspond to + /// stdout. + /// + /// This is useful when a command is invoked via shell redirection to a + /// file that is also being read. For example, `grep -r foo ./ > results` + /// might end up trying to search `results` even though it is also writing + /// to it, which could cause an unbounded feedback loop. Setting this + /// option prevents this from happening by skipping over the `results` + /// file. + /// + /// This is disabled by default. + pub fn skip_stdout(&mut self, yes: bool) -> &mut WalkBuilder { + if yes { + self.skip = stdout_handle().map(Arc::new); + } else { + self.skip = None; + } + self + } +} + +/// Walk is a recursive directory iterator over file paths in one or more +/// directories. +/// +/// Only file and directory paths matching the rules are returned. By default, +/// ignore files like `.gitignore` are respected. The precise matching rules +/// and precedence is explained in the documentation for `WalkBuilder`. +pub struct Walk { + its: vec::IntoIter<(PathBuf, Option)>, + it: Option, + ig_root: Ignore, + ig: Ignore, + max_filesize: Option, + skip: Option>, +} + +impl Walk { + /// Creates a new recursive directory iterator for the file path given. + /// + /// Note that this uses default settings, which include respecting + /// `.gitignore` files. To configure the iterator, use `WalkBuilder` + /// instead. + pub fn new>(path: P) -> Walk { + WalkBuilder::new(path).build() + } + + fn skip_entry(&self, ent: &DirEntry) -> Result { + if ent.depth() == 0 { + return Ok(false); + } + + if let Some(ref stdout) = self.skip { + if path_equals(ent, stdout)? { + return Ok(true); + } + } + let is_dir = ent.file_type().map_or(false, |ft| ft.is_dir()); + let max_size = self.max_filesize; + let should_skip_path = skip_path(&self.ig, ent.path(), is_dir); + let should_skip_filesize = if !is_dir && max_size.is_some() { + skip_filesize(max_size.unwrap(), ent.path(), &ent.metadata().ok()) + } else { + false + }; + + Ok(should_skip_path || should_skip_filesize) + } +} + +impl Iterator for Walk { + type Item = Result; + + #[inline(always)] + fn next(&mut self) -> Option> { + loop { + let ev = match self.it.as_mut().and_then(|it| it.next()) { + Some(ev) => ev, + None => { + match self.its.next() { + None => return None, + Some((_, None)) => { + return Some(Ok(DirEntry::new_stdin())); + } + Some((path, Some(it))) => { + self.it = Some(it); + if path.is_dir() { + let (ig, err) = self.ig_root.add_parents(path); + self.ig = ig; + if let Some(err) = err { + return Some(Err(err)); + } + } else { + self.ig = self.ig_root.clone(); + } + } + } + continue; + } + }; + match ev { + Err(err) => { + return Some(Err(Error::from_walkdir(err))); + } + Ok(WalkEvent::Exit) => { + self.ig = self.ig.parent().unwrap(); + } + Ok(WalkEvent::Dir(ent)) => { + let mut ent = DirEntry::new_walkdir(ent, None); + let should_skip = match self.skip_entry(&ent) { + Err(err) => return Some(Err(err)), + Ok(should_skip) => should_skip, + }; + if should_skip { + self.it.as_mut().unwrap().it.skip_current_dir(); + // Still need to push this on the stack because + // we'll get a WalkEvent::Exit event for this dir. + // We don't care if it errors though. + let (igtmp, _) = self.ig.add_child(ent.path()); + self.ig = igtmp; + continue; + } + let (igtmp, err) = self.ig.add_child(ent.path()); + self.ig = igtmp; + ent.err = err; + return Some(Ok(ent)); + } + Ok(WalkEvent::File(ent)) => { + let ent = DirEntry::new_walkdir(ent, None); + let should_skip = match self.skip_entry(&ent) { + Err(err) => return Some(Err(err)), + Ok(should_skip) => should_skip, + }; + if should_skip { + continue; + } + return Some(Ok(ent)); + } + } + } + } +} + +/// WalkEventIter transforms a WalkDir iterator into an iterator that more +/// accurately describes the directory tree. Namely, it emits events that are +/// one of three types: directory, file or "exit." An "exit" event means that +/// the entire contents of a directory have been enumerated. +struct WalkEventIter { + depth: usize, + it: walkdir::IntoIter, + next: Option>, +} + +#[derive(Debug)] +enum WalkEvent { + Dir(walkdir::DirEntry), + File(walkdir::DirEntry), + Exit, +} + +impl From for WalkEventIter { + fn from(it: WalkDir) -> WalkEventIter { + WalkEventIter { depth: 0, it: it.into_iter(), next: None } + } +} + +impl Iterator for WalkEventIter { + type Item = walkdir::Result; + + #[inline(always)] + fn next(&mut self) -> Option> { + let dent = self.next.take().or_else(|| self.it.next()); + let depth = match dent { + None => 0, + Some(Ok(ref dent)) => dent.depth(), + Some(Err(ref err)) => err.depth(), + }; + if depth < self.depth { + self.depth -= 1; + self.next = dent; + return Some(Ok(WalkEvent::Exit)); + } + self.depth = depth; + match dent { + None => None, + Some(Err(err)) => Some(Err(err)), + Some(Ok(dent)) => { + if dent.file_type().is_dir() { + self.depth += 1; + Some(Ok(WalkEvent::Dir(dent))) + } else { + Some(Ok(WalkEvent::File(dent))) + } + } + } + } +} + +/// WalkState is used in the parallel recursive directory iterator to indicate +/// whether walking should continue as normal, skip descending into a +/// particular directory or quit the walk entirely. +#[derive(Clone, Copy, Debug, Eq, PartialEq)] +pub enum WalkState { + /// Continue walking as normal. + Continue, + /// If the directory entry given is a directory, don't descend into it. + /// In all other cases, this has no effect. + Skip, + /// Quit the entire iterator as soon as possible. + /// + /// Note that this is an inherently asynchronous action. It is possible + /// for more entries to be yielded even after instructing the iterator + /// to quit. + Quit, +} + +impl WalkState { + fn is_quit(&self) -> bool { + *self == WalkState::Quit + } +} + +/// WalkParallel is a parallel recursive directory iterator over files paths +/// in one or more directories. +/// +/// Only file and directory paths matching the rules are returned. By default, +/// ignore files like `.gitignore` are respected. The precise matching rules +/// and precedence is explained in the documentation for `WalkBuilder`. +/// +/// Unlike `Walk`, this uses multiple threads for traversing a directory. +pub struct WalkParallel { + paths: vec::IntoIter, + ig_root: Ignore, + max_filesize: Option, + max_depth: Option, + follow_links: bool, + same_file_system: bool, + threads: usize, + skip: Option>, +} + +impl WalkParallel { + /// Execute the parallel recursive directory iterator. `mkf` is called + /// for each thread used for iteration. The function produced by `mkf` + /// is then in turn called for each visited file path. + pub fn run( + self, + mut mkf: F, + ) where F: FnMut() -> Box) -> WalkState + Send + 'static> { + let mut f = mkf(); + let threads = self.threads(); + // TODO: Figure out how to use a bounded channel here. With an + // unbounded channel, the workers can run away and fill up memory + // with all of the file paths. But a bounded channel doesn't work since + // our producers are also are consumers, so they end up getting stuck. + // + // We probably need to rethink parallel traversal completely to fix + // this. The best case scenario would be finding a way to use rayon + // to do this. + let (tx, rx) = channel::unbounded(); + let mut any_work = false; + // Send the initial set of root paths to the pool of workers. + // Note that we only send directories. For files, we send to them the + // callback directly. + for path in self.paths { + let (dent, root_device) = + if path == Path::new("-") { + (DirEntry::new_stdin(), None) + } else { + let root_device = + if !self.same_file_system { + None + } else { + match device_num(&path) { + Ok(root_device) => Some(root_device), + Err(err) => { + let err = Error::Io(err).with_path(path); + if f(Err(err)).is_quit() { + return; + } + continue; + } + } + }; + match DirEntryRaw::from_path(0, path, false) { + Ok(dent) => { + (DirEntry::new_raw(dent, None), root_device) + } + Err(err) => { + if f(Err(err)).is_quit() { + return; + } + continue; + } + } + }; + tx.send(Message::Work(Work { + dent: dent, + ignore: self.ig_root.clone(), + root_device: root_device, + })); + any_work = true; + } + // ... but there's no need to start workers if we don't need them. + if !any_work { + return; + } + // Create the workers and then wait for them to finish. + let num_waiting = Arc::new(AtomicUsize::new(0)); + let num_quitting = Arc::new(AtomicUsize::new(0)); + let quit_now = Arc::new(AtomicBool::new(false)); + let mut handles = vec![]; + for _ in 0..threads { + let worker = Worker { + f: mkf(), + tx: tx.clone(), + rx: rx.clone(), + quit_now: quit_now.clone(), + is_waiting: false, + is_quitting: false, + num_waiting: num_waiting.clone(), + num_quitting: num_quitting.clone(), + threads: threads, + max_depth: self.max_depth, + max_filesize: self.max_filesize, + follow_links: self.follow_links, + skip: self.skip.clone(), + }; + handles.push(thread::spawn(|| worker.run())); + } + drop(tx); + drop(rx); + for handle in handles { + handle.join().unwrap(); + } + } + + fn threads(&self) -> usize { + if self.threads == 0 { + 2 + } else { + self.threads + } + } +} + +/// Message is the set of instructions that a worker knows how to process. +enum Message { + /// A work item corresponds to a directory that should be descended into. + /// Work items for entries that should be skipped or ignored should not + /// be produced. + Work(Work), + /// This instruction indicates that the worker should start quitting. + Quit, +} + +/// A unit of work for each worker to process. +/// +/// Each unit of work corresponds to a directory that should be descended +/// into. +struct Work { + /// The directory entry. + dent: DirEntry, + /// Any ignore matchers that have been built for this directory's parents. + ignore: Ignore, + /// The root device number. When present, only files with the same device + /// number should be considered. + root_device: Option, +} + +impl Work { + /// Returns true if and only if this work item is a directory. + fn is_dir(&self) -> bool { + self.dent.is_dir() + } + + /// Returns true if and only if this work item is a symlink. + fn is_symlink(&self) -> bool { + self.dent.file_type().map_or(false, |ft| ft.is_symlink()) + } + + /// Adds ignore rules for parent directories. + /// + /// Note that this only applies to entries at depth 0. On all other + /// entries, this is a no-op. + fn add_parents(&mut self) -> Option { + if self.dent.depth() > 0 { + return None; + } + // At depth 0, the path of this entry is a root path, so we can + // use it directly to add parent ignore rules. + let (ig, err) = self.ignore.add_parents(self.dent.path()); + self.ignore = ig; + err + } + + /// Reads the directory contents of this work item and adds ignore + /// rules for this directory. + /// + /// If there was a problem with reading the directory contents, then + /// an error is returned. If there was a problem reading the ignore + /// rules for this directory, then the error is attached to this + /// work item's directory entry. + fn read_dir(&mut self) -> Result { + let readdir = match fs::read_dir(self.dent.path()) { + Ok(readdir) => readdir, + Err(err) => { + let err = Error::from(err) + .with_path(self.dent.path()) + .with_depth(self.dent.depth()); + return Err(err); + } + }; + let (ig, err) = self.ignore.add_child(self.dent.path()); + self.ignore = ig; + self.dent.err = err; + Ok(readdir) + } +} + +/// A worker is responsible for descending into directories, updating the +/// ignore matchers, producing new work and invoking the caller's callback. +/// +/// Note that a worker is *both* a producer and a consumer. +struct Worker { + /// The caller's callback. + f: Box) -> WalkState + Send + 'static>, + /// The push side of our mpmc queue. + tx: channel::Sender, + /// The receive side of our mpmc queue. + rx: channel::Receiver, + /// Whether all workers should quit at the next opportunity. Note that + /// this is distinct from quitting because of exhausting the contents of + /// a directory. Instead, this is used when the caller's callback indicates + /// that the iterator should quit immediately. + quit_now: Arc, + /// Whether this worker is waiting for more work. + is_waiting: bool, + /// Whether this worker has started to quit. + is_quitting: bool, + /// The number of workers waiting for more work. + num_waiting: Arc, + /// The number of workers waiting to quit. + num_quitting: Arc, + /// The total number of workers. + threads: usize, + /// The maximum depth of directories to descend. A value of `0` means no + /// descension at all. + max_depth: Option, + /// The maximum size a searched file can be (in bytes). If a file exceeds + /// this size it will be skipped. + max_filesize: Option, + /// Whether to follow symbolic links or not. When this is enabled, loop + /// detection is performed. + follow_links: bool, + /// A file handle to skip, currently is either `None` or stdout, if it's + /// a file and it has been requested to skip files identical to stdout. + skip: Option>, +} + +impl Worker { + /// Runs this worker until there is no more work left to do. + /// + /// The worker will call the caller's callback for all entries that aren't + /// skipped by the ignore matcher. + fn run(mut self) { + while let Some(mut work) = self.get_work() { + // If the work is not a directory, then we can just execute the + // caller's callback immediately and move on. + if work.is_symlink() || !work.is_dir() { + if (self.f)(Ok(work.dent)).is_quit() { + self.quit_now(); + return; + } + continue; + } + if let Some(err) = work.add_parents() { + if (self.f)(Err(err)).is_quit() { + self.quit_now(); + return; + } + } + let readdir = match work.read_dir() { + Ok(readdir) => readdir, + Err(err) => { + if (self.f)(Err(err)).is_quit() { + self.quit_now(); + return; + } + continue; + } + }; + let descend = + if let Some(root_device) = work.root_device { + match is_same_file_system(root_device, work.dent.path()) { + Ok(true) => true, + Ok(false) => false, + Err(err) => { + if (self.f)(Err(err)).is_quit() { + self.quit_now(); + return; + } + false + } + } + } else { + true + }; + + let depth = work.dent.depth(); + match (self.f)(Ok(work.dent)) { + WalkState::Continue => {} + WalkState::Skip => continue, + WalkState::Quit => { + self.quit_now(); + return; + } + } + if !descend { + continue; + } + if self.max_depth.map_or(false, |max| depth >= max) { + continue; + } + for result in readdir { + let state = self.run_one( + &work.ignore, + depth + 1, + work.root_device, + result, + ); + if state.is_quit() { + self.quit_now(); + return; + } + } + } + } + + /// Runs the worker on a single entry from a directory iterator. + /// + /// If the entry is a path that should be ignored, then this is a no-op. + /// Otherwise, the entry is pushed on to the queue. (The actual execution + /// of the callback happens in `run`.) + /// + /// If an error occurs while reading the entry, then it is sent to the + /// caller's callback. + /// + /// `ig` is the `Ignore` matcher for the parent directory. `depth` should + /// be the depth of this entry. `result` should be the item yielded by + /// a directory iterator. + fn run_one( + &mut self, + ig: &Ignore, + depth: usize, + root_device: Option, + result: Result, + ) -> WalkState { + let fs_dent = match result { + Ok(fs_dent) => fs_dent, + Err(err) => { + return (self.f)(Err(Error::from(err).with_depth(depth))); + } + }; + let mut dent = match DirEntryRaw::from_entry(depth, &fs_dent) { + Ok(dent) => DirEntry::new_raw(dent, None), + Err(err) => { + return (self.f)(Err(err)); + } + }; + let is_symlink = dent.file_type().map_or(false, |ft| ft.is_symlink()); + if self.follow_links && is_symlink { + let path = dent.path().to_path_buf(); + dent = match DirEntryRaw::from_path(depth, path, true) { + Ok(dent) => DirEntry::new_raw(dent, None), + Err(err) => { + return (self.f)(Err(err)); + } + }; + if dent.is_dir() { + if let Err(err) = check_symlink_loop(ig, dent.path(), depth) { + return (self.f)(Err(err)); + } + } + } + if let Some(ref stdout) = self.skip { + let is_stdout = match path_equals(&dent, stdout) { + Ok(is_stdout) => is_stdout, + Err(err) => return (self.f)(Err(err)), + }; + if is_stdout { + return WalkState::Continue; + } + } + let is_dir = dent.is_dir(); + let max_size = self.max_filesize; + let should_skip_path = skip_path(ig, dent.path(), is_dir); + let should_skip_filesize = + if !is_dir && max_size.is_some() { + skip_filesize( + max_size.unwrap(), + dent.path(), + &dent.metadata().ok(), + ) + } else { + false + }; + + if !should_skip_path && !should_skip_filesize { + self.tx.send(Message::Work(Work { + dent: dent, + ignore: ig.clone(), + root_device: root_device, + })); + } + WalkState::Continue + } + + /// Returns the next directory to descend into. + /// + /// If all work has been exhausted, then this returns None. The worker + /// should then subsequently quit. + fn get_work(&mut self) -> Option { + loop { + if self.is_quit_now() { + return None; + } + match self.rx.try_recv() { + Some(Message::Work(work)) => { + self.waiting(false); + self.quitting(false); + return Some(work); + } + Some(Message::Quit) => { + // We can't just quit because a Message::Quit could be + // spurious. For example, it's possible to observe that + // all workers are waiting even if there's more work to + // be done. + // + // Therefore, we do a bit of a dance to wait until all + // workers have signaled that they're ready to quit before + // actually quitting. + // + // If the Quit message turns out to be spurious, then the + // loop below will break and we'll go back to looking for + // more work. + self.waiting(true); + self.quitting(true); + while !self.is_quit_now() { + let nwait = self.num_waiting(); + let nquit = self.num_quitting(); + // If the number of waiting workers dropped, then + // abort our attempt to quit. + if nwait < self.threads { + break; + } + // If all workers are in this quit loop, then we + // can stop. + if nquit == self.threads { + return None; + } + // Otherwise, spin. + } + } + None => { + self.waiting(true); + self.quitting(false); + if self.num_waiting() == self.threads { + for _ in 0..self.threads { + self.tx.send(Message::Quit); + } + } else { + // You're right to consider this suspicious, but it's + // a useful heuristic to permit producers to catch up + // to consumers without burning the CPU. It is also + // useful as a means to prevent burning the CPU if only + // one worker is left doing actual work. It's not + // perfect and it doesn't leave the CPU completely + // idle, but it's not clear what else we can do. :-/ + thread::sleep(Duration::from_millis(1)); + } + } + } + } + } + + /// Indicates that all workers should quit immediately. + fn quit_now(&self) { + self.quit_now.store(true, Ordering::SeqCst); + } + + /// Returns true if this worker should quit immediately. + fn is_quit_now(&self) -> bool { + self.quit_now.load(Ordering::SeqCst) + } + + /// Returns the total number of workers waiting for work. + fn num_waiting(&self) -> usize { + self.num_waiting.load(Ordering::SeqCst) + } + + /// Returns the total number of workers ready to quit. + fn num_quitting(&self) -> usize { + self.num_quitting.load(Ordering::SeqCst) + } + + /// Sets this worker's "quitting" state to the value of `yes`. + fn quitting(&mut self, yes: bool) { + if yes { + if !self.is_quitting { + self.is_quitting = true; + self.num_quitting.fetch_add(1, Ordering::SeqCst); + } + } else { + if self.is_quitting { + self.is_quitting = false; + self.num_quitting.fetch_sub(1, Ordering::SeqCst); + } + } + } + + /// Sets this worker's "waiting" state to the value of `yes`. + fn waiting(&mut self, yes: bool) { + if yes { + if !self.is_waiting { + self.is_waiting = true; + self.num_waiting.fetch_add(1, Ordering::SeqCst); + } + } else { + if self.is_waiting { + self.is_waiting = false; + self.num_waiting.fetch_sub(1, Ordering::SeqCst); + } + } + } +} + +fn check_symlink_loop( + ig_parent: &Ignore, + child_path: &Path, + child_depth: usize, +) -> Result<(), Error> { + let hchild = Handle::from_path(child_path).map_err(|err| { + Error::from(err).with_path(child_path).with_depth(child_depth) + })?; + for ig in ig_parent.parents().take_while(|ig| !ig.is_absolute_parent()) { + let h = Handle::from_path(ig.path()).map_err(|err| { + Error::from(err).with_path(child_path).with_depth(child_depth) + })?; + if hchild == h { + return Err(Error::Loop { + ancestor: ig.path().to_path_buf(), + child: child_path.to_path_buf(), + }.with_depth(child_depth)); + } + } + Ok(()) +} + +// Before calling this function, make sure that you ensure that is really +// necessary as the arguments imply a file stat. +fn skip_filesize( + max_filesize: u64, + path: &Path, + ent: &Option +) -> bool { + let filesize = match *ent { + Some(ref md) => Some(md.len()), + None => None + }; + + if let Some(fs) = filesize { + if fs > max_filesize { + debug!("ignoring {}: {} bytes", path.display(), fs); + true + } else { + false + } + } else { + false + } +} + +fn skip_path( + ig: &Ignore, + path: &Path, + is_dir: bool, +) -> bool { + let m = ig.matched(path, is_dir); + if m.is_ignore() { + debug!("ignoring {}: {:?}", path.display(), m); + true + } else if m.is_whitelist() { + debug!("whitelisting {}: {:?}", path.display(), m); + false + } else { + false + } +} + +/// Returns a handle to stdout for filtering search. +/// +/// A handle is returned if and only if stdout is being redirected to a file. +/// The handle returned corresponds to that file. +/// +/// This can be used to ensure that we do not attempt to search a file that we +/// may also be writing to. +fn stdout_handle() -> Option { + let h = match Handle::stdout() { + Err(_) => return None, + Ok(h) => h, + }; + let md = match h.as_file().metadata() { + Err(_) => return None, + Ok(md) => md, + }; + if !md.is_file() { + return None; + } + Some(h) +} + +/// Returns true if and only if the given directory entry is believed to be +/// equivalent to the given handle. If there was a problem querying the path +/// for information to determine equality, then that error is returned. +fn path_equals(dent: &DirEntry, handle: &Handle) -> Result { + #[cfg(unix)] + fn never_equal(dent: &DirEntry, handle: &Handle) -> bool { + dent.ino() != Some(handle.ino()) + } + + #[cfg(not(unix))] + fn never_equal(_: &DirEntry, _: &Handle) -> bool { + false + } + + // If we know for sure that these two things aren't equal, then avoid + // the costly extra stat call to determine equality. + if dent.is_stdin() || never_equal(dent, handle) { + return Ok(false); + } + Handle::from_path(dent.path()) + .map(|h| &h == handle) + .map_err(|err| Error::Io(err).with_path(dent.path())) +} + +/// Returns true if and only if the given path is on the same device as the +/// given root device. +fn is_same_file_system(root_device: u64, path: &Path) -> Result { + let dent_device = device_num(path) + .map_err(|err| Error::Io(err).with_path(path))?; + Ok(root_device == dent_device) +} + +#[cfg(unix)] +fn device_num>(path: P)-> io::Result { + use std::os::unix::fs::MetadataExt; + + path.as_ref().metadata().map(|md| md.dev()) +} + + #[cfg(windows)] +fn device_num>(path: P) -> io::Result { + use winapi_util::{Handle, file}; + + let h = Handle::from_path_any(path)?; + file::information(h).map(|info| info.volume_serial_number()) +} + +#[cfg(not(any(unix, windows)))] +fn device_num>(_: P)-> io::Result { + Err(io::Error::new( + io::ErrorKind::Other, + "walkdir: same_file_system option not supported on this platform", + )) +} + +#[cfg(test)] +mod tests { + use std::fs::{self, File}; + use std::io::Write; + use std::path::Path; + use std::sync::{Arc, Mutex}; + + use tempdir::TempDir; + + use super::{DirEntry, WalkBuilder, WalkState}; + + fn wfile>(path: P, contents: &str) { + let mut file = File::create(path).unwrap(); + file.write_all(contents.as_bytes()).unwrap(); + } + + fn wfile_size>(path: P, size: u64) { + let file = File::create(path).unwrap(); + file.set_len(size).unwrap(); + } + + #[cfg(unix)] + fn symlink, Q: AsRef>(src: P, dst: Q) { + use std::os::unix::fs::symlink; + symlink(src, dst).unwrap(); + } + + fn mkdirp>(path: P) { + fs::create_dir_all(path).unwrap(); + } + + fn normal_path(unix: &str) -> String { + if cfg!(windows) { + unix.replace("\\", "/") + } else { + unix.to_string() + } + } + + fn walk_collect(prefix: &Path, builder: &WalkBuilder) -> Vec { + let mut paths = vec![]; + for result in builder.build() { + let dent = match result { + Err(_) => continue, + Ok(dent) => dent, + }; + let path = dent.path().strip_prefix(prefix).unwrap(); + if path.as_os_str().is_empty() { + continue; + } + paths.push(normal_path(path.to_str().unwrap())); + } + paths.sort(); + paths + } + + fn walk_collect_parallel( + prefix: &Path, + builder: &WalkBuilder, + ) -> Vec { + let mut paths = vec![]; + for dent in walk_collect_entries_parallel(builder) { + let path = dent.path().strip_prefix(prefix).unwrap(); + if path.as_os_str().is_empty() { + continue; + } + paths.push(normal_path(path.to_str().unwrap())); + } + paths.sort(); + paths + } + + fn walk_collect_entries_parallel(builder: &WalkBuilder) -> Vec { + let dents = Arc::new(Mutex::new(vec![])); + builder.build_parallel().run(|| { + let dents = dents.clone(); + Box::new(move |result| { + if let Ok(dent) = result { + dents.lock().unwrap().push(dent); + } + WalkState::Continue + }) + }); + + let dents = dents.lock().unwrap(); + dents.to_vec() + } + + fn mkpaths(paths: &[&str]) -> Vec { + let mut paths: Vec<_> = paths.iter().map(|s| s.to_string()).collect(); + paths.sort(); + paths + } + + fn assert_paths( + prefix: &Path, + builder: &WalkBuilder, + expected: &[&str], + ) { + let got = walk_collect(prefix, builder); + assert_eq!(got, mkpaths(expected), "single threaded"); + let got = walk_collect_parallel(prefix, builder); + assert_eq!(got, mkpaths(expected), "parallel"); + } + + #[test] + fn no_ignores() { + let td = TempDir::new("walk-test-").unwrap(); + mkdirp(td.path().join("a/b/c")); + mkdirp(td.path().join("x/y")); + wfile(td.path().join("a/b/foo"), ""); + wfile(td.path().join("x/y/foo"), ""); + + assert_paths(td.path(), &WalkBuilder::new(td.path()), &[ + "x", "x/y", "x/y/foo", "a", "a/b", "a/b/foo", "a/b/c", + ]); + } + + #[test] + fn custom_ignore() { + let td = TempDir::new("walk-test-").unwrap(); + let custom_ignore = ".customignore"; + mkdirp(td.path().join("a")); + wfile(td.path().join(custom_ignore), "foo"); + wfile(td.path().join("foo"), ""); + wfile(td.path().join("a/foo"), ""); + wfile(td.path().join("bar"), ""); + wfile(td.path().join("a/bar"), ""); + + let mut builder = WalkBuilder::new(td.path()); + builder.add_custom_ignore_filename(&custom_ignore); + assert_paths(td.path(), &builder, &["bar", "a", "a/bar"]); + } + + #[test] + fn custom_ignore_exclusive_use() { + let td = TempDir::new("walk-test-").unwrap(); + let custom_ignore = ".customignore"; + mkdirp(td.path().join("a")); + wfile(td.path().join(custom_ignore), "foo"); + wfile(td.path().join("foo"), ""); + wfile(td.path().join("a/foo"), ""); + wfile(td.path().join("bar"), ""); + wfile(td.path().join("a/bar"), ""); + + let mut builder = WalkBuilder::new(td.path()); + builder.ignore(false); + builder.git_ignore(false); + builder.git_global(false); + builder.git_exclude(false); + builder.add_custom_ignore_filename(&custom_ignore); + assert_paths(td.path(), &builder, &["bar", "a", "a/bar"]); + } + + #[test] + fn gitignore() { + let td = TempDir::new("walk-test-").unwrap(); + mkdirp(td.path().join(".git")); + mkdirp(td.path().join("a")); + wfile(td.path().join(".gitignore"), "foo"); + wfile(td.path().join("foo"), ""); + wfile(td.path().join("a/foo"), ""); + wfile(td.path().join("bar"), ""); + wfile(td.path().join("a/bar"), ""); + + assert_paths(td.path(), &WalkBuilder::new(td.path()), &[ + "bar", "a", "a/bar", + ]); + } + + #[test] + fn explicit_ignore() { + let td = TempDir::new("walk-test-").unwrap(); + let igpath = td.path().join(".not-an-ignore"); + mkdirp(td.path().join("a")); + wfile(&igpath, "foo"); + wfile(td.path().join("foo"), ""); + wfile(td.path().join("a/foo"), ""); + wfile(td.path().join("bar"), ""); + wfile(td.path().join("a/bar"), ""); + + let mut builder = WalkBuilder::new(td.path()); + assert!(builder.add_ignore(&igpath).is_none()); + assert_paths(td.path(), &builder, &["bar", "a", "a/bar"]); + } + + #[test] + fn explicit_ignore_exclusive_use() { + let td = TempDir::new("walk-test-").unwrap(); + let igpath = td.path().join(".not-an-ignore"); + mkdirp(td.path().join("a")); + wfile(&igpath, "foo"); + wfile(td.path().join("foo"), ""); + wfile(td.path().join("a/foo"), ""); + wfile(td.path().join("bar"), ""); + wfile(td.path().join("a/bar"), ""); + + let mut builder = WalkBuilder::new(td.path()); + builder.standard_filters(false); + assert!(builder.add_ignore(&igpath).is_none()); + assert_paths(td.path(), &builder, + &[".not-an-ignore", "bar", "a", "a/bar"]); + } + + #[test] + fn gitignore_parent() { + let td = TempDir::new("walk-test-").unwrap(); + mkdirp(td.path().join(".git")); + mkdirp(td.path().join("a")); + wfile(td.path().join(".gitignore"), "foo"); + wfile(td.path().join("a/foo"), ""); + wfile(td.path().join("a/bar"), ""); + + let root = td.path().join("a"); + assert_paths(&root, &WalkBuilder::new(&root), &["bar"]); + } + + #[test] + fn max_depth() { + let td = TempDir::new("walk-test-").unwrap(); + mkdirp(td.path().join("a/b/c")); + wfile(td.path().join("foo"), ""); + wfile(td.path().join("a/foo"), ""); + wfile(td.path().join("a/b/foo"), ""); + wfile(td.path().join("a/b/c/foo"), ""); + + let mut builder = WalkBuilder::new(td.path()); + assert_paths(td.path(), &builder, &[ + "a", "a/b", "a/b/c", "foo", "a/foo", "a/b/foo", "a/b/c/foo", + ]); + assert_paths(td.path(), builder.max_depth(Some(0)), &[]); + assert_paths(td.path(), builder.max_depth(Some(1)), &["a", "foo"]); + assert_paths(td.path(), builder.max_depth(Some(2)), &[ + "a", "a/b", "foo", "a/foo", + ]); + } + + #[test] + fn max_filesize() { + let td = TempDir::new("walk-test-").unwrap(); + mkdirp(td.path().join("a/b")); + wfile_size(td.path().join("foo"), 0); + wfile_size(td.path().join("bar"), 400); + wfile_size(td.path().join("baz"), 600); + wfile_size(td.path().join("a/foo"), 600); + wfile_size(td.path().join("a/bar"), 500); + wfile_size(td.path().join("a/baz"), 200); + + let mut builder = WalkBuilder::new(td.path()); + assert_paths(td.path(), &builder, &[ + "a", "a/b", "foo", "bar", "baz", "a/foo", "a/bar", "a/baz", + ]); + assert_paths(td.path(), builder.max_filesize(Some(0)), &[ + "a", "a/b", "foo" + ]); + assert_paths(td.path(), builder.max_filesize(Some(500)), &[ + "a", "a/b", "foo", "bar", "a/bar", "a/baz" + ]); + assert_paths(td.path(), builder.max_filesize(Some(50000)), &[ + "a", "a/b", "foo", "bar", "baz", "a/foo", "a/bar", "a/baz", + ]); + } + + #[cfg(unix)] // because symlinks on windows are weird + #[test] + fn symlinks() { + let td = TempDir::new("walk-test-").unwrap(); + mkdirp(td.path().join("a/b")); + symlink(td.path().join("a/b"), td.path().join("z")); + wfile(td.path().join("a/b/foo"), ""); + + let mut builder = WalkBuilder::new(td.path()); + assert_paths(td.path(), &builder, &[ + "a", "a/b", "a/b/foo", "z", + ]); + assert_paths(td.path(), &builder.follow_links(true), &[ + "a", "a/b", "a/b/foo", "z", "z/foo", + ]); + } + + #[cfg(unix)] // because symlinks on windows are weird + #[test] + fn first_path_not_symlink() { + let td = TempDir::new("walk-test-").unwrap(); + mkdirp(td.path().join("foo")); + + let dents = WalkBuilder::new(td.path().join("foo")) + .build() + .into_iter() + .collect::, _>>() + .unwrap(); + assert_eq!(1, dents.len()); + assert!(!dents[0].path_is_symlink()); + + let dents = walk_collect_entries_parallel( + &WalkBuilder::new(td.path().join("foo")), + ); + assert_eq!(1, dents.len()); + assert!(!dents[0].path_is_symlink()); + } + + #[cfg(unix)] // because symlinks on windows are weird + #[test] + fn symlink_loop() { + let td = TempDir::new("walk-test-").unwrap(); + mkdirp(td.path().join("a/b")); + symlink(td.path().join("a"), td.path().join("a/b/c")); + + let mut builder = WalkBuilder::new(td.path()); + assert_paths(td.path(), &builder, &[ + "a", "a/b", "a/b/c", + ]); + assert_paths(td.path(), &builder.follow_links(true), &[ + "a", "a/b", + ]); + } + + // It's a little tricky to test the 'same_file_system' option since + // we need an environment with more than one file system. We adopt a + // heuristic where /sys is typically a distinct volume on Linux and roll + // with that. + #[test] + #[cfg(target_os = "linux")] + fn same_file_system() { + use super::device_num; + + // If for some reason /sys doesn't exist or isn't a directory, just + // skip this test. + if !Path::new("/sys").is_dir() { + return; + } + + // If our test directory actually isn't a different volume from /sys, + // then this test is meaningless and we shouldn't run it. + let td = TempDir::new("walk-test-").unwrap(); + if device_num(td.path()).unwrap() == device_num("/sys").unwrap() { + return; + } + + mkdirp(td.path().join("same_file")); + symlink("/sys", td.path().join("same_file").join("alink")); + + // Create a symlink to sys and enable following symlinks. If the + // same_file_system option doesn't work, then this probably will hit a + // permission error. Otherwise, it should just skip over the symlink + // completely. + let mut builder = WalkBuilder::new(td.path()); + builder.follow_links(true).same_file_system(true); + assert_paths(td.path(), &builder, &[ + "same_file", "same_file/alink", + ]); + } +} diff --git a/ignore/tests/gitignore_matched_path_or_any_parents_tests.gitignore b/ignore/tests/gitignore_matched_path_or_any_parents_tests.gitignore new file mode 100644 index 000000000..ac09e12f7 --- /dev/null +++ b/ignore/tests/gitignore_matched_path_or_any_parents_tests.gitignore @@ -0,0 +1,216 @@ +# Based on https://github.com/behnam/gitignore-test/blob/master/.gitignore + +### file in root + +# MATCH /file_root_1 +file_root_00 + +# NO_MATCH +file_root_01/ + +# NO_MATCH +file_root_02/* + +# NO_MATCH +file_root_03/** + + +# MATCH /file_root_10 +/file_root_10 + +# NO_MATCH +/file_root_11/ + +# NO_MATCH +/file_root_12/* + +# NO_MATCH +/file_root_13/** + + +# NO_MATCH +*/file_root_20 + +# NO_MATCH +*/file_root_21/ + +# NO_MATCH +*/file_root_22/* + +# NO_MATCH +*/file_root_23/** + + +# MATCH /file_root_30 +**/file_root_30 + +# NO_MATCH +**/file_root_31/ + +# NO_MATCH +**/file_root_32/* + +# NO_MATCH +**/file_root_33/** + + +### file in sub-dir + +# MATCH /parent_dir/file_deep_1 +file_deep_00 + +# NO_MATCH +file_deep_01/ + +# NO_MATCH +file_deep_02/* + +# NO_MATCH +file_deep_03/** + + +# NO_MATCH +/file_deep_10 + +# NO_MATCH +/file_deep_11/ + +# NO_MATCH +/file_deep_12/* + +# NO_MATCH +/file_deep_13/** + + +# MATCH /parent_dir/file_deep_20 +*/file_deep_20 + +# NO_MATCH +*/file_deep_21/ + +# NO_MATCH +*/file_deep_22/* + +# NO_MATCH +*/file_deep_23/** + + +# MATCH /parent_dir/file_deep_30 +**/file_deep_30 + +# NO_MATCH +**/file_deep_31/ + +# NO_MATCH +**/file_deep_32/* + +# NO_MATCH +**/file_deep_33/** + + +### dir in root + +# MATCH /dir_root_00 +dir_root_00 + +# MATCH /dir_root_01 +dir_root_01/ + +# MATCH /dir_root_02 +dir_root_02/* + +# MATCH /dir_root_03 +dir_root_03/** + + +# MATCH /dir_root_10 +/dir_root_10 + +# MATCH /dir_root_11 +/dir_root_11/ + +# MATCH /dir_root_12 +/dir_root_12/* + +# MATCH /dir_root_13 +/dir_root_13/** + + +# NO_MATCH +*/dir_root_20 + +# NO_MATCH +*/dir_root_21/ + +# NO_MATCH +*/dir_root_22/* + +# NO_MATCH +*/dir_root_23/** + + +# MATCH /dir_root_30 +**/dir_root_30 + +# MATCH /dir_root_31 +**/dir_root_31/ + +# MATCH /dir_root_32 +**/dir_root_32/* + +# MATCH /dir_root_33 +**/dir_root_33/** + + +### dir in sub-dir + +# MATCH /parent_dir/dir_deep_00 +dir_deep_00 + +# MATCH /parent_dir/dir_deep_01 +dir_deep_01/ + +# NO_MATCH +dir_deep_02/* + +# NO_MATCH +dir_deep_03/** + + +# NO_MATCH +/dir_deep_10 + +# NO_MATCH +/dir_deep_11/ + +# NO_MATCH +/dir_deep_12/* + +# NO_MATCH +/dir_deep_13/** + + +# MATCH /parent_dir/dir_deep_20 +*/dir_deep_20 + +# MATCH /parent_dir/dir_deep_21 +*/dir_deep_21/ + +# MATCH /parent_dir/dir_deep_22 +*/dir_deep_22/* + +# MATCH /parent_dir/dir_deep_23 +*/dir_deep_23/** + + +# MATCH /parent_dir/dir_deep_30 +**/dir_deep_30 + +# MATCH /parent_dir/dir_deep_31 +**/dir_deep_31/ + +# MATCH /parent_dir/dir_deep_32 +**/dir_deep_32/* + +# MATCH /parent_dir/dir_deep_33 +**/dir_deep_33/** diff --git a/ignore/tests/gitignore_matched_path_or_any_parents_tests.rs b/ignore/tests/gitignore_matched_path_or_any_parents_tests.rs new file mode 100644 index 000000000..28d8e2f84 --- /dev/null +++ b/ignore/tests/gitignore_matched_path_or_any_parents_tests.rs @@ -0,0 +1,323 @@ +extern crate ignore; + +use std::path::Path; + +use ignore::gitignore::{Gitignore, GitignoreBuilder}; + +const IGNORE_FILE: &'static str = + "tests/gitignore_matched_path_or_any_parents_tests.gitignore"; + +fn get_gitignore() -> Gitignore { + let mut builder = GitignoreBuilder::new("ROOT"); + let error = builder.add(IGNORE_FILE); + assert!(error.is_none(), "failed to open gitignore file"); + builder.build().unwrap() +} + +#[test] +#[should_panic(expected = "path is expected to be under the root")] +fn test_path_should_be_under_root() { + let gitignore = get_gitignore(); + let path = "/tmp/some_file"; + gitignore.matched_path_or_any_parents(Path::new(path), false); + assert!(false); +} + +#[test] +fn test_files_in_root() { + let gitignore = get_gitignore(); + let m = |path: &str| { + gitignore.matched_path_or_any_parents(Path::new(path), false) + }; + + // 0x + assert!(m("ROOT/file_root_00").is_ignore()); + assert!(m("ROOT/file_root_01").is_none()); + assert!(m("ROOT/file_root_02").is_none()); + assert!(m("ROOT/file_root_03").is_none()); + + // 1x + assert!(m("ROOT/file_root_10").is_ignore()); + assert!(m("ROOT/file_root_11").is_none()); + assert!(m("ROOT/file_root_12").is_none()); + assert!(m("ROOT/file_root_13").is_none()); + + // 2x + assert!(m("ROOT/file_root_20").is_none()); + assert!(m("ROOT/file_root_21").is_none()); + assert!(m("ROOT/file_root_22").is_none()); + assert!(m("ROOT/file_root_23").is_none()); + + // 3x + assert!(m("ROOT/file_root_30").is_ignore()); + assert!(m("ROOT/file_root_31").is_none()); + assert!(m("ROOT/file_root_32").is_none()); + assert!(m("ROOT/file_root_33").is_none()); +} + + +#[test] +fn test_files_in_deep() { + let gitignore = get_gitignore(); + let m = |path: &str| { + gitignore.matched_path_or_any_parents(Path::new(path), false) + }; + + // 0x + assert!(m("ROOT/parent_dir/file_deep_00").is_ignore()); + assert!(m("ROOT/parent_dir/file_deep_01").is_none()); + assert!(m("ROOT/parent_dir/file_deep_02").is_none()); + assert!(m("ROOT/parent_dir/file_deep_03").is_none()); + + // 1x + assert!(m("ROOT/parent_dir/file_deep_10").is_none()); + assert!(m("ROOT/parent_dir/file_deep_11").is_none()); + assert!(m("ROOT/parent_dir/file_deep_12").is_none()); + assert!(m("ROOT/parent_dir/file_deep_13").is_none()); + + // 2x + assert!(m("ROOT/parent_dir/file_deep_20").is_ignore()); + assert!(m("ROOT/parent_dir/file_deep_21").is_none()); + assert!(m("ROOT/parent_dir/file_deep_22").is_none()); + assert!(m("ROOT/parent_dir/file_deep_23").is_none()); + + // 3x + assert!(m("ROOT/parent_dir/file_deep_30").is_ignore()); + assert!(m("ROOT/parent_dir/file_deep_31").is_none()); + assert!(m("ROOT/parent_dir/file_deep_32").is_none()); + assert!(m("ROOT/parent_dir/file_deep_33").is_none()); +} + + +#[test] +fn test_dirs_in_root() { + let gitignore = get_gitignore(); + let m = |path: &str, is_dir: bool| { + gitignore.matched_path_or_any_parents(Path::new(path), is_dir) + }; + + // 00 + assert!(m("ROOT/dir_root_00", true).is_ignore()); + assert!(m("ROOT/dir_root_00/file", false).is_ignore()); + assert!(m("ROOT/dir_root_00/child_dir", true).is_ignore()); + assert!(m("ROOT/dir_root_00/child_dir/file", false).is_ignore()); + + // 01 + assert!(m("ROOT/dir_root_01", true).is_ignore()); + assert!(m("ROOT/dir_root_01/file", false).is_ignore()); + assert!(m("ROOT/dir_root_01/child_dir", true).is_ignore()); + assert!(m("ROOT/dir_root_01/child_dir/file", false).is_ignore()); + + // 02 + assert!(m("ROOT/dir_root_02", true).is_none()); // dir itself doesn't match + assert!(m("ROOT/dir_root_02/file", false).is_ignore()); + assert!(m("ROOT/dir_root_02/child_dir", true).is_ignore()); + assert!(m("ROOT/dir_root_02/child_dir/file", false).is_ignore()); + + // 03 + assert!(m("ROOT/dir_root_03", true).is_none()); // dir itself doesn't match + assert!(m("ROOT/dir_root_03/file", false).is_ignore()); + assert!(m("ROOT/dir_root_03/child_dir", true).is_ignore()); + assert!(m("ROOT/dir_root_03/child_dir/file", false).is_ignore()); + + // 10 + assert!(m("ROOT/dir_root_10", true).is_ignore()); + assert!(m("ROOT/dir_root_10/file", false).is_ignore()); + assert!(m("ROOT/dir_root_10/child_dir", true).is_ignore()); + assert!(m("ROOT/dir_root_10/child_dir/file", false).is_ignore()); + + // 11 + assert!(m("ROOT/dir_root_11", true).is_ignore()); + assert!(m("ROOT/dir_root_11/file", false).is_ignore()); + assert!(m("ROOT/dir_root_11/child_dir", true).is_ignore()); + assert!(m("ROOT/dir_root_11/child_dir/file", false).is_ignore()); + + // 12 + assert!(m("ROOT/dir_root_12", true).is_none()); // dir itself doesn't match + assert!(m("ROOT/dir_root_12/file", false).is_ignore()); + assert!(m("ROOT/dir_root_12/child_dir", true).is_ignore()); + assert!(m("ROOT/dir_root_12/child_dir/file", false).is_ignore()); + + // 13 + assert!(m("ROOT/dir_root_13", true).is_none()); + assert!(m("ROOT/dir_root_13/file", false).is_ignore()); + assert!(m("ROOT/dir_root_13/child_dir", true).is_ignore()); + assert!(m("ROOT/dir_root_13/child_dir/file", false).is_ignore()); + + // 20 + assert!(m("ROOT/dir_root_20", true).is_none()); + assert!(m("ROOT/dir_root_20/file", false).is_none()); + assert!(m("ROOT/dir_root_20/child_dir", true).is_none()); + assert!(m("ROOT/dir_root_20/child_dir/file", false).is_none()); + + // 21 + assert!(m("ROOT/dir_root_21", true).is_none()); + assert!(m("ROOT/dir_root_21/file", false).is_none()); + assert!(m("ROOT/dir_root_21/child_dir", true).is_none()); + assert!(m("ROOT/dir_root_21/child_dir/file", false).is_none()); + + // 22 + assert!(m("ROOT/dir_root_22", true).is_none()); + assert!(m("ROOT/dir_root_22/file", false).is_none()); + assert!(m("ROOT/dir_root_22/child_dir", true).is_none()); + assert!(m("ROOT/dir_root_22/child_dir/file", false).is_none()); + + // 23 + assert!(m("ROOT/dir_root_23", true).is_none()); + assert!(m("ROOT/dir_root_23/file", false).is_none()); + assert!(m("ROOT/dir_root_23/child_dir", true).is_none()); + assert!(m("ROOT/dir_root_23/child_dir/file", false).is_none()); + + // 30 + assert!(m("ROOT/dir_root_30", true).is_ignore()); + assert!(m("ROOT/dir_root_30/file", false).is_ignore()); + assert!(m("ROOT/dir_root_30/child_dir", true).is_ignore()); + assert!(m("ROOT/dir_root_30/child_dir/file", false).is_ignore()); + + // 31 + assert!(m("ROOT/dir_root_31", true).is_ignore()); + assert!(m("ROOT/dir_root_31/file", false).is_ignore()); + assert!(m("ROOT/dir_root_31/child_dir", true).is_ignore()); + assert!(m("ROOT/dir_root_31/child_dir/file", false).is_ignore()); + + // 32 + assert!(m("ROOT/dir_root_32", true).is_none()); // dir itself doesn't match + assert!(m("ROOT/dir_root_32/file", false).is_ignore()); + assert!(m("ROOT/dir_root_32/child_dir", true).is_ignore()); + assert!(m("ROOT/dir_root_32/child_dir/file", false).is_ignore()); + + // 33 + assert!(m("ROOT/dir_root_33", true).is_none()); // dir itself doesn't match + assert!(m("ROOT/dir_root_33/file", false).is_ignore()); + assert!(m("ROOT/dir_root_33/child_dir", true).is_ignore()); + assert!(m("ROOT/dir_root_33/child_dir/file", false).is_ignore()); +} + + +#[test] +fn test_dirs_in_deep() { + let gitignore = get_gitignore(); + let m = |path: &str, is_dir: bool| { + gitignore.matched_path_or_any_parents(Path::new(path), is_dir) + }; + + // 00 + assert!(m("ROOT/parent_dir/dir_deep_00", true).is_ignore()); + assert!(m("ROOT/parent_dir/dir_deep_00/file", false).is_ignore()); + assert!(m("ROOT/parent_dir/dir_deep_00/child_dir", true).is_ignore()); + assert!( + m("ROOT/parent_dir/dir_deep_00/child_dir/file", false).is_ignore() + ); + + // 01 + assert!(m("ROOT/parent_dir/dir_deep_01", true).is_ignore()); + assert!(m("ROOT/parent_dir/dir_deep_01/file", false).is_ignore()); + assert!(m("ROOT/parent_dir/dir_deep_01/child_dir", true).is_ignore()); + assert!( + m("ROOT/parent_dir/dir_deep_01/child_dir/file", false).is_ignore() + ); + + // 02 + assert!(m("ROOT/parent_dir/dir_deep_02", true).is_none()); + assert!(m("ROOT/parent_dir/dir_deep_02/file", false).is_none()); + assert!(m("ROOT/parent_dir/dir_deep_02/child_dir", true).is_none()); + assert!(m("ROOT/parent_dir/dir_deep_02/child_dir/file", false).is_none()); + + // 03 + assert!(m("ROOT/parent_dir/dir_deep_03", true).is_none()); + assert!(m("ROOT/parent_dir/dir_deep_03/file", false).is_none()); + assert!(m("ROOT/parent_dir/dir_deep_03/child_dir", true).is_none()); + assert!(m("ROOT/parent_dir/dir_deep_03/child_dir/file", false).is_none()); + + // 10 + assert!(m("ROOT/parent_dir/dir_deep_10", true).is_none()); + assert!(m("ROOT/parent_dir/dir_deep_10/file", false).is_none()); + assert!(m("ROOT/parent_dir/dir_deep_10/child_dir", true).is_none()); + assert!(m("ROOT/parent_dir/dir_deep_10/child_dir/file", false).is_none()); + + // 11 + assert!(m("ROOT/parent_dir/dir_deep_11", true).is_none()); + assert!(m("ROOT/parent_dir/dir_deep_11/file", false).is_none()); + assert!(m("ROOT/parent_dir/dir_deep_11/child_dir", true).is_none()); + assert!(m("ROOT/parent_dir/dir_deep_11/child_dir/file", false).is_none()); + + // 12 + assert!(m("ROOT/parent_dir/dir_deep_12", true).is_none()); + assert!(m("ROOT/parent_dir/dir_deep_12/file", false).is_none()); + assert!(m("ROOT/parent_dir/dir_deep_12/child_dir", true).is_none()); + assert!(m("ROOT/parent_dir/dir_deep_12/child_dir/file", false).is_none()); + + // 13 + assert!(m("ROOT/parent_dir/dir_deep_13", true).is_none()); + assert!(m("ROOT/parent_dir/dir_deep_13/file", false).is_none()); + assert!(m("ROOT/parent_dir/dir_deep_13/child_dir", true).is_none()); + assert!(m("ROOT/parent_dir/dir_deep_13/child_dir/file", false).is_none()); + + // 20 + assert!(m("ROOT/parent_dir/dir_deep_20", true).is_ignore()); + assert!(m("ROOT/parent_dir/dir_deep_20/file", false).is_ignore()); + assert!(m("ROOT/parent_dir/dir_deep_20/child_dir", true).is_ignore()); + assert!( + m("ROOT/parent_dir/dir_deep_20/child_dir/file", false).is_ignore() + ); + + // 21 + assert!(m("ROOT/parent_dir/dir_deep_21", true).is_ignore()); + assert!(m("ROOT/parent_dir/dir_deep_21/file", false).is_ignore()); + assert!(m("ROOT/parent_dir/dir_deep_21/child_dir", true).is_ignore()); + assert!( + m("ROOT/parent_dir/dir_deep_21/child_dir/file", false).is_ignore() + ); + + // 22 + // dir itself doesn't match + assert!(m("ROOT/parent_dir/dir_deep_22", true).is_none()); + assert!(m("ROOT/parent_dir/dir_deep_22/file", false).is_ignore()); + assert!(m("ROOT/parent_dir/dir_deep_22/child_dir", true).is_ignore()); + assert!( + m("ROOT/parent_dir/dir_deep_22/child_dir/file", false).is_ignore() + ); + + // 23 + // dir itself doesn't match + assert!(m("ROOT/parent_dir/dir_deep_23", true).is_none()); + assert!(m("ROOT/parent_dir/dir_deep_23/file", false).is_ignore()); + assert!(m("ROOT/parent_dir/dir_deep_23/child_dir", true).is_ignore()); + assert!( + m("ROOT/parent_dir/dir_deep_23/child_dir/file", false).is_ignore() + ); + + // 30 + assert!(m("ROOT/parent_dir/dir_deep_30", true).is_ignore()); + assert!(m("ROOT/parent_dir/dir_deep_30/file", false).is_ignore()); + assert!(m("ROOT/parent_dir/dir_deep_30/child_dir", true).is_ignore()); + assert!( + m("ROOT/parent_dir/dir_deep_30/child_dir/file", false).is_ignore() + ); + + // 31 + assert!(m("ROOT/parent_dir/dir_deep_31", true).is_ignore()); + assert!(m("ROOT/parent_dir/dir_deep_31/file", false).is_ignore()); + assert!(m("ROOT/parent_dir/dir_deep_31/child_dir", true).is_ignore()); + assert!( + m("ROOT/parent_dir/dir_deep_31/child_dir/file", false).is_ignore() + ); + + // 32 + // dir itself doesn't match + assert!(m("ROOT/parent_dir/dir_deep_32", true).is_none()); + assert!(m("ROOT/parent_dir/dir_deep_32/file", false).is_ignore()); + assert!(m("ROOT/parent_dir/dir_deep_32/child_dir", true).is_ignore()); + assert!( + m("ROOT/parent_dir/dir_deep_32/child_dir/file", false).is_ignore() + ); + + // 33 + // dir itself doesn't match + assert!(m("ROOT/parent_dir/dir_deep_33", true).is_none()); + assert!(m("ROOT/parent_dir/dir_deep_33/file", false).is_ignore()); + assert!(m("ROOT/parent_dir/dir_deep_33/child_dir", true).is_ignore()); + assert!( + m("ROOT/parent_dir/dir_deep_33/child_dir/file", false).is_ignore() + ); +} diff --git a/itoa/.cargo-checksum.json b/itoa/.cargo-checksum.json new file mode 100644 index 000000000..2fb389bee --- /dev/null +++ b/itoa/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{},"package":"1306f3464951f30e30d12373d31c79fbd52d236e5e896fd92f96ec7babbbe60b"} \ No newline at end of file diff --git a/itoa/Cargo.toml b/itoa/Cargo.toml new file mode 100644 index 000000000..dd56fee2c --- /dev/null +++ b/itoa/Cargo.toml @@ -0,0 +1,30 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g. crates.io) dependencies +# +# If you believe there's an error in this file please file an +# issue against the rust-lang/cargo repository. If you're +# editing this file be aware that the upstream Cargo.toml +# will likely look very different (and much more reasonable) + +[package] +name = "itoa" +version = "0.4.3" +authors = ["David Tolnay "] +exclude = ["performance.png"] +description = "Fast functions for printing integer primitives to an io::Write" +documentation = "https://github.com/dtolnay/itoa" +readme = "README.md" +categories = ["value-formatting"] +license = "MIT/Apache-2.0" +repository = "https://github.com/dtolnay/itoa" + +[features] +default = ["std"] +i128 = [] +std = [] +[badges.travis-ci] +repository = "dtolnay/itoa" diff --git a/itoa/LICENSE-APACHE b/itoa/LICENSE-APACHE new file mode 100644 index 000000000..16fe87b06 --- /dev/null +++ b/itoa/LICENSE-APACHE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/itoa/LICENSE-MIT b/itoa/LICENSE-MIT new file mode 100644 index 000000000..31aa79387 --- /dev/null +++ b/itoa/LICENSE-MIT @@ -0,0 +1,23 @@ +Permission is hereby granted, free of charge, to any +person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the +Software without restriction, including without +limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software +is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions +of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/itoa/README.md b/itoa/README.md new file mode 100644 index 000000000..759be9d20 --- /dev/null +++ b/itoa/README.md @@ -0,0 +1,82 @@ +itoa +==== + +[![Build Status](https://api.travis-ci.org/dtolnay/itoa.svg?branch=master)](https://travis-ci.org/dtolnay/itoa) +[![Latest Version](https://img.shields.io/crates/v/itoa.svg)](https://crates.io/crates/itoa) + +This crate provides fast functions for printing integer primitives to an +[`io::Write`](https://doc.rust-lang.org/std/io/trait.Write.html) or a +[`fmt::Write`](https://doc.rust-lang.org/core/fmt/trait.Write.html). The +implementation comes straight from +[libcore](https://github.com/rust-lang/rust/blob/b8214dc6c6fc20d0a660fb5700dca9ebf51ebe89/src/libcore/fmt/num.rs#L201-L254) +but avoids the performance penalty of going through +[`fmt::Formatter`](https://doc.rust-lang.org/std/fmt/struct.Formatter.html). + +See also [`dtoa`](https://github.com/dtolnay/dtoa) for printing floating point +primitives. + +## Performance (lower is better) + +![performance](https://raw.githubusercontent.com/dtolnay/itoa/master/performance.png) + +## Functions + +```rust +extern crate itoa; + +// write to a vector or other io::Write +let mut buf = Vec::new(); +itoa::write(&mut buf, 128u64)?; +println!("{:?}", buf); + +// write to a stack buffer +let mut bytes = [b'\0'; 20]; +let n = itoa::write(&mut bytes[..], 128u64)?; +println!("{:?}", &bytes[..n]); + +// write to a String +let mut s = String::new(); +itoa::fmt(&mut s, 128u64)?; +println!("{}", s); +``` + +The function signatures are: + +```rust +fn write(writer: W, value: V) -> io::Result; + +fn fmt(writer: W, value: V) -> fmt::Result; +``` + +where `itoa::Integer` is implemented for `i8`, `u8`, `i16`, `u16`, `i32`, `u32`, +`i64`, `u64`, `i128`, `u128`, `isize` and `usize`. 128-bit integer support is +only available with the nightly compiler when the `i128` feature is enabled for +this crate. The return value gives the number of bytes written. + +The `write` function is only available when the `std` feature is enabled +(default is enabled). + +## Dependency + +Itoa is available on [crates.io](https://crates.io/crates/itoa). Use the +following in `Cargo.toml`: + +```toml +[dependencies] +itoa = "0.4" +``` + +## License + +Licensed under either of + + * Apache License, Version 2.0 ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0) + * MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT) + +at your option. + +### Contribution + +Unless you explicitly state otherwise, any contribution intentionally submitted +for inclusion in itoa by you, as defined in the Apache-2.0 license, shall be +dual licensed as above, without any additional terms or conditions. diff --git a/itoa/benches/bench.rs b/itoa/benches/bench.rs new file mode 100644 index 000000000..8f5f25d63 --- /dev/null +++ b/itoa/benches/bench.rs @@ -0,0 +1,84 @@ +#![cfg_attr(feature = "i128", feature(i128_type, i128))] +#![cfg_attr(feature = "cargo-clippy", allow(cast_lossless))] +#![feature(test)] +#![allow(non_snake_case)] + +extern crate itoa; +extern crate test; + +macro_rules! benches { + ( + $( + $(#[$attr:meta])* + $name:ident($value:expr) + ),* + ) => { + mod bench_itoa_write { + use test::{Bencher, black_box}; + $( + $(#[$attr])* + #[bench] + fn $name(b: &mut Bencher) { + use itoa; + + let mut buf = Vec::with_capacity(40); + + b.iter(|| { + buf.clear(); + itoa::write(&mut buf, black_box($value)).unwrap() + }); + } + )* + } + + mod bench_itoa_fmt { + use test::{Bencher, black_box}; + $( + $(#[$attr])* + #[bench] + fn $name(b: &mut Bencher) { + use itoa; + + let mut buf = String::with_capacity(40); + + b.iter(|| { + buf.clear(); + itoa::fmt(&mut buf, black_box($value)).unwrap() + }); + } + )* + } + + mod bench_std_fmt { + use test::{Bencher, black_box}; + $( + $(#[$attr])* + #[bench] + fn $name(b: &mut Bencher) { + use std::io::Write; + + let mut buf = Vec::with_capacity(40); + + b.iter(|| { + buf.clear(); + write!(&mut buf, "{}", black_box($value)).unwrap() + }); + } + )* + } + } +} + +benches!{ + bench_u64_0(0u64), + bench_u64_half(::max_value() as u64), + bench_u64_max(::max_value()), + + bench_i16_0(0i16), + bench_i16_min(::min_value()), + + #[cfg(feature = "i128")] + bench_u128_0(0u128), + #[cfg(feature = "i128")] + bench_u128_max(::max_value()) +} diff --git a/itoa/src/lib.rs b/itoa/src/lib.rs new file mode 100644 index 000000000..3122f7c5a --- /dev/null +++ b/itoa/src/lib.rs @@ -0,0 +1,299 @@ +// Copyright 2016 Itoa Developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![doc(html_root_url = "https://docs.rs/itoa/0.4.3")] +#![cfg_attr(not(feature = "std"), no_std)] +#![cfg_attr( + feature = "cargo-clippy", + allow(cast_lossless, unreadable_literal) +)] + +#[cfg(feature = "i128")] +mod udiv128; + +#[cfg(feature = "std")] +use std::{fmt, io, mem, ptr, slice, str}; + +#[cfg(not(feature = "std"))] +use core::{fmt, mem, ptr, slice, str}; + +/// Write integer to an `io::Write`. +#[cfg(feature = "std")] +#[inline] +pub fn write(mut wr: W, value: V) -> io::Result { + let mut buf = Buffer::new(); + let s = buf.format(value); + try!(wr.write_all(s.as_bytes())); + Ok(s.len()) +} + +/// Write integer to an `fmt::Write`. +#[inline] +pub fn fmt(mut wr: W, value: V) -> fmt::Result { + let mut buf = Buffer::new(); + wr.write_str(buf.format(value)) +} + +/// A safe API for formatting integers to text. +/// +/// # Example +/// +/// ```rust +/// let mut buffer = itoa::Buffer::new(); +/// let printed = buffer.format(1234); +/// assert_eq!(printed, "1234"); +/// ``` +#[derive(Copy)] +pub struct Buffer { + bytes: [u8; I128_MAX_LEN], +} + +impl Default for Buffer { + #[inline] + fn default() -> Buffer { + Buffer::new() + } +} + +impl Clone for Buffer { + #[inline] + fn clone(&self) -> Self { + Buffer::new() + } +} + +impl Buffer { + /// This is a cheap operation; you don't need to worry about reusing buffers + /// for efficiency. + #[inline] + pub fn new() -> Buffer { + Buffer { + bytes: unsafe { mem::uninitialized() }, + } + } + + /// Print an integer into this buffer and return a reference to its string representation + /// within the buffer. + pub fn format(&mut self, i: I) -> &str { + i.write(self) + } +} + +// Seal to prevent downstream implementations of the Integer trait. +mod private { + pub trait Sealed {} +} + +/// An integer that can be formatted by `itoa::write` and `itoa::fmt`. +/// +/// This trait is sealed and cannot be implemented for types outside of itoa. +pub trait Integer: private::Sealed { + // Not public API. + #[doc(hidden)] + fn write<'a>(self, buf: &'a mut Buffer) -> &'a str; +} + +trait IntegerPrivate { + fn write_to(self, buf: &mut B) -> &[u8]; +} + +const DEC_DIGITS_LUT: &'static [u8] = b"\ + 0001020304050607080910111213141516171819\ + 2021222324252627282930313233343536373839\ + 4041424344454647484950515253545556575859\ + 6061626364656667686970717273747576777879\ + 8081828384858687888990919293949596979899"; + +// Adaptation of the original implementation at +// https://github.com/rust-lang/rust/blob/b8214dc6c6fc20d0a660fb5700dca9ebf51ebe89/src/libcore/fmt/num.rs#L188-L266 +macro_rules! impl_IntegerCommon { + ($max_len:expr, $t:ident) => { + impl Integer for $t { + #[inline] + fn write<'a>(self, buf: &'a mut Buffer) -> &'a str { + unsafe { + debug_assert!($max_len <= I128_MAX_LEN); + let buf = mem::transmute::<&mut [u8; I128_MAX_LEN], &mut [u8; $max_len]>( + &mut buf.bytes, + ); + let bytes = self.write_to(buf); + str::from_utf8_unchecked(bytes) + } + } + } + + impl private::Sealed for $t {} + }; +} + +macro_rules! impl_Integer { + ($($max_len:expr => $t:ident),* as $conv_fn:ident) => {$( + impl_IntegerCommon!($max_len, $t); + + impl IntegerPrivate<[u8; $max_len]> for $t { + #[allow(unused_comparisons)] + #[inline] + fn write_to(self, buf: &mut [u8; $max_len]) -> &[u8] { + let is_nonnegative = self >= 0; + let mut n = if is_nonnegative { + self as $conv_fn + } else { + // convert the negative num to positive by summing 1 to it's 2 complement + (!(self as $conv_fn)).wrapping_add(1) + }; + let mut curr = buf.len() as isize; + let buf_ptr = buf.as_mut_ptr(); + let lut_ptr = DEC_DIGITS_LUT.as_ptr(); + + unsafe { + // need at least 16 bits for the 4-characters-at-a-time to work. + if mem::size_of::<$t>() >= 2 { + // eagerly decode 4 characters at a time + while n >= 10000 { + let rem = (n % 10000) as isize; + n /= 10000; + + let d1 = (rem / 100) << 1; + let d2 = (rem % 100) << 1; + curr -= 4; + ptr::copy_nonoverlapping(lut_ptr.offset(d1), buf_ptr.offset(curr), 2); + ptr::copy_nonoverlapping(lut_ptr.offset(d2), buf_ptr.offset(curr + 2), 2); + } + } + + // if we reach here numbers are <= 9999, so at most 4 chars long + let mut n = n as isize; // possibly reduce 64bit math + + // decode 2 more chars, if > 2 chars + if n >= 100 { + let d1 = (n % 100) << 1; + n /= 100; + curr -= 2; + ptr::copy_nonoverlapping(lut_ptr.offset(d1), buf_ptr.offset(curr), 2); + } + + // decode last 1 or 2 chars + if n < 10 { + curr -= 1; + *buf_ptr.offset(curr) = (n as u8) + b'0'; + } else { + let d1 = n << 1; + curr -= 2; + ptr::copy_nonoverlapping(lut_ptr.offset(d1), buf_ptr.offset(curr), 2); + } + + if !is_nonnegative { + curr -= 1; + *buf_ptr.offset(curr) = b'-'; + } + } + + let len = buf.len() - curr as usize; + unsafe { slice::from_raw_parts(buf_ptr.offset(curr), len) } + } + } + )*}; +} + +const I8_MAX_LEN: usize = 4; +const U8_MAX_LEN: usize = 3; +const I16_MAX_LEN: usize = 6; +const U16_MAX_LEN: usize = 5; +const I32_MAX_LEN: usize = 11; +const U32_MAX_LEN: usize = 10; +const I64_MAX_LEN: usize = 20; +const U64_MAX_LEN: usize = 20; + +impl_Integer!( + I8_MAX_LEN => i8, + U8_MAX_LEN => u8, + I16_MAX_LEN => i16, + U16_MAX_LEN => u16, + I32_MAX_LEN => i32, + U32_MAX_LEN => u32 + as u32); + +impl_Integer!(I64_MAX_LEN => i64, U64_MAX_LEN => u64 as u64); + +#[cfg(target_pointer_width = "16")] +impl_Integer!(I16_MAX_LEN => isize, U16_MAX_LEN => usize as u16); + +#[cfg(target_pointer_width = "32")] +impl_Integer!(I32_MAX_LEN => isize, U32_MAX_LEN => usize as u32); + +#[cfg(target_pointer_width = "64")] +impl_Integer!(I64_MAX_LEN => isize, U64_MAX_LEN => usize as u64); + +#[cfg(all(feature = "i128"))] +macro_rules! impl_Integer128 { + ($($max_len:expr => $t:ident),*) => {$( + impl_IntegerCommon!($max_len, $t); + + impl IntegerPrivate<[u8; $max_len]> for $t { + #[allow(unused_comparisons)] + #[inline] + fn write_to(self, buf: &mut [u8; $max_len]) -> &[u8] { + let is_nonnegative = self >= 0; + let n = if is_nonnegative { + self as u128 + } else { + // convert the negative num to positive by summing 1 to it's 2 complement + (!(self as u128)).wrapping_add(1) + }; + let mut curr = buf.len() as isize; + let buf_ptr = buf.as_mut_ptr(); + + unsafe { + // Divide by 10^19 which is the highest power less than 2^64. + let (n, rem) = udiv128::udivmod_1e19(n); + let buf1 = buf_ptr.offset(curr - U64_MAX_LEN as isize) as *mut [u8; U64_MAX_LEN]; + curr -= rem.write_to(&mut *buf1).len() as isize; + + if n != 0 { + // Memset the base10 leading zeros of rem. + let target = buf.len() as isize - 19; + ptr::write_bytes(buf_ptr.offset(target), b'0', (curr - target) as usize); + curr = target; + + // Divide by 10^19 again. + let (n, rem) = udiv128::udivmod_1e19(n); + let buf2 = buf_ptr.offset(curr - U64_MAX_LEN as isize) as *mut [u8; U64_MAX_LEN]; + curr -= rem.write_to(&mut *buf2).len() as isize; + + if n != 0 { + // Memset the leading zeros. + let target = buf.len() as isize - 38; + ptr::write_bytes(buf_ptr.offset(target), b'0', (curr - target) as usize); + curr = target; + + // There is at most one digit left + // because u128::max / 10^19 / 10^19 is 3. + curr -= 1; + *buf_ptr.offset(curr) = (n as u8) + b'0'; + } + } + + if !is_nonnegative { + curr -= 1; + *buf_ptr.offset(curr) = b'-'; + } + + let len = buf.len() - curr as usize; + slice::from_raw_parts(buf_ptr.offset(curr), len) + } + } + } + )*}; +} + +#[cfg(all(feature = "i128"))] +const U128_MAX_LEN: usize = 39; +const I128_MAX_LEN: usize = 40; + +#[cfg(all(feature = "i128"))] +impl_Integer128!(I128_MAX_LEN => i128, U128_MAX_LEN => u128); diff --git a/itoa/src/udiv128.rs b/itoa/src/udiv128.rs new file mode 100644 index 000000000..70fffc6ba --- /dev/null +++ b/itoa/src/udiv128.rs @@ -0,0 +1,63 @@ +// Copyright 2009-2016 compiler-builtins Developers +// +// The compiler-builtins crate is dual licensed under both the University of +// Illinois "BSD-Like" license and the MIT license. As a user of this code you may +// choose to use it under either license. As a contributor, you agree to allow +// your code to be used under both. +// +// Full text of the relevant licenses is found here: +// https://github.com/rust-lang-nursery/compiler-builtins/blob/master/LICENSE.TXT +// +// +// +// The following code is based on Rust’s [compiler-builtins crate] +// (https://github.com/rust-lang-nursery/compiler-builtins) which +// provides runtime functions for the Rust programs. The Rust +// compiler will automatically link your programs against this crate. +// +// We copied the implementation of '__udivmodti4()' which is an intrinsic +// implementing division with remainder for architectures without 128-bit integer support. +// We have done this two reasons, to work around [bad optimization by LLVM] +// (https://github.com/rust-lang/rust/issues/44545) and to allow function +// inlining which doesn’t happen with the intrinsic. + +#[inline] +pub fn udivmod_1e19(n: u128) -> (u128, u64) { + let d = 10_000_000_000_000_000_000_u64; // 10^19 + + let high = (n >> 64) as u64; + if high == 0 { + let low = n as u64; + return ((low / d) as u128, low % d); + } + + let sr = 65 - high.leading_zeros(); + + // 2 <= sr <= 65 + let mut q: u128 = n << (128 - sr); + let mut r: u128 = n >> sr; + let mut carry: u64 = 0; + + // Don't use a range because they may generate references to memcpy in unoptimized code + // + // Loop invariants: r < d; carry is 0 or 1 + let mut i = 0; + while i < sr { + i += 1; + + // r:q = ((r:q) << 1) | carry + r = (r << 1) | (q >> 127); + q = (q << 1) | carry as u128; + + // carry = 0 + // if r >= d { + // r -= d; + // carry = 1; + // } + let s = (d as u128).wrapping_sub(r).wrapping_sub(1) as i128 >> 127; + carry = (s & 1) as u64; + r -= (d as u128) & s as u128; + } + + ((q << 1) | carry as u128, r as u64) +} diff --git a/itoa/tests/test.rs b/itoa/tests/test.rs new file mode 100644 index 000000000..8047f3312 --- /dev/null +++ b/itoa/tests/test.rs @@ -0,0 +1,51 @@ +#![cfg_attr(feature = "i128", feature(i128_type, i128))] +#![cfg_attr( + feature = "cargo-clippy", + allow(cast_lossless, string_lit_as_bytes) +)] +#![allow(non_snake_case)] + +extern crate itoa; + +macro_rules! test { + ( + $( + $(#[$attr:meta])* + $name:ident($value:expr, $expected:expr) + ),* + ) => { + $( + $(#[$attr])* + #[test] + fn $name() { + #[cfg(feature = "std")] + { + let mut buf = [b'\0'; 40]; + let len = itoa::write(&mut buf[..], $value).unwrap(); + assert_eq!(&buf[0..len], $expected.as_bytes()); + } + + let mut s = String::new(); + itoa::fmt(&mut s, $value).unwrap(); + assert_eq!(s, $expected); + } + )* + } +} + +test!{ + test_u64_0(0u64, "0"), + test_u64_half(::max_value() as u64, "4294967295"), + test_u64_max(::max_value(), "18446744073709551615"), + test_i64_min(::min_value(), "-9223372036854775808"), + + test_i16_0(0i16, "0"), + test_i16_min(::min_value(), "-32768"), + + #[cfg(feature = "i128")] + test_u128_0(0u128, "0"), + #[cfg(feature = "i128")] + test_u128_max(::max_value(), "340282366920938463463374607431768211455"), + #[cfg(feature = "i128")] + test_i128_min(::min_value(), "-170141183460469231731687303715884105728") +} diff --git a/jobserver/.cargo-checksum.json b/jobserver/.cargo-checksum.json new file mode 100644 index 000000000..856bba2d3 --- /dev/null +++ b/jobserver/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{},"package":"60af5f849e1981434e4a31d3d782c4774ae9b434ce55b101a96ecfd09147e8be"} \ No newline at end of file diff --git a/jobserver/.pc/.quilt_patches b/jobserver/.pc/.quilt_patches new file mode 100644 index 000000000..6857a8d44 --- /dev/null +++ b/jobserver/.pc/.quilt_patches @@ -0,0 +1 @@ +debian/patches diff --git a/jobserver/.pc/.quilt_series b/jobserver/.pc/.quilt_series new file mode 100644 index 000000000..c2067066a --- /dev/null +++ b/jobserver/.pc/.quilt_series @@ -0,0 +1 @@ +series diff --git a/jobserver/.pc/.version b/jobserver/.pc/.version new file mode 100644 index 000000000..0cfbf0888 --- /dev/null +++ b/jobserver/.pc/.version @@ -0,0 +1 @@ +2 diff --git a/jobserver/.pc/applied-patches b/jobserver/.pc/applied-patches new file mode 100644 index 000000000..90590f229 --- /dev/null +++ b/jobserver/.pc/applied-patches @@ -0,0 +1 @@ +relax-dep-version.patch diff --git a/jobserver/.pc/relax-dep-version.patch/.timestamp b/jobserver/.pc/relax-dep-version.patch/.timestamp new file mode 100644 index 000000000..e69de29bb diff --git a/jobserver/.pc/relax-dep-version.patch/Cargo.toml b/jobserver/.pc/relax-dep-version.patch/Cargo.toml new file mode 100644 index 000000000..5499c5114 --- /dev/null +++ b/jobserver/.pc/relax-dep-version.patch/Cargo.toml @@ -0,0 +1,64 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g. crates.io) dependencies +# +# If you believe there's an error in this file please file an +# issue against the rust-lang/cargo repository. If you're +# editing this file be aware that the upstream Cargo.toml +# will likely look very different (and much more reasonable) + +[package] +name = "jobserver" +version = "0.1.11" +authors = ["Alex Crichton "] +description = "An implementation of the GNU make jobserver for Rust\n" +homepage = "https://github.com/alexcrichton/jobserver-rs" +documentation = "https://docs.rs/jobserver" +license = "MIT/Apache-2.0" +repository = "https://github.com/alexcrichton/jobserver-rs" + +[[test]] +name = "client" +path = "tests/client.rs" +harness = false + +[[test]] +name = "server" +path = "tests/server.rs" + +[[test]] +name = "client-of-myself" +path = "tests/client-of-myself.rs" +harness = false + +[[test]] +name = "make-as-a-client" +path = "tests/make-as-a-client.rs" +harness = false + +[[test]] +name = "helper" +path = "tests/helper.rs" +[dependencies.log] +version = "0.4" +[dev-dependencies.futures] +version = "0.1" + +[dev-dependencies.num_cpus] +version = "1.0" + +[dev-dependencies.tempdir] +version = "0.3" + +[dev-dependencies.tokio-core] +version = "0.1" + +[dev-dependencies.tokio-process] +version = "0.1" +[target."cfg(unix)".dependencies.libc] +version = "0.2" +[target."cfg(windows)".dependencies.rand] +version = "0.4" diff --git a/jobserver/Cargo.toml b/jobserver/Cargo.toml new file mode 100644 index 000000000..7eabdb4dc --- /dev/null +++ b/jobserver/Cargo.toml @@ -0,0 +1,64 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g. crates.io) dependencies +# +# If you believe there's an error in this file please file an +# issue against the rust-lang/cargo repository. If you're +# editing this file be aware that the upstream Cargo.toml +# will likely look very different (and much more reasonable) + +[package] +name = "jobserver" +version = "0.1.11" +authors = ["Alex Crichton "] +description = "An implementation of the GNU make jobserver for Rust\n" +homepage = "https://github.com/alexcrichton/jobserver-rs" +documentation = "https://docs.rs/jobserver" +license = "MIT/Apache-2.0" +repository = "https://github.com/alexcrichton/jobserver-rs" + +[[test]] +name = "client" +path = "tests/client.rs" +harness = false + +[[test]] +name = "server" +path = "tests/server.rs" + +[[test]] +name = "client-of-myself" +path = "tests/client-of-myself.rs" +harness = false + +[[test]] +name = "make-as-a-client" +path = "tests/make-as-a-client.rs" +harness = false + +[[test]] +name = "helper" +path = "tests/helper.rs" +[dependencies.log] +version = "0.4" +[dev-dependencies.futures] +version = "0.1" + +[dev-dependencies.num_cpus] +version = "1.0" + +[dev-dependencies.tempdir] +version = "0.3" + +[dev-dependencies.tokio-core] +version = "0.1" + +[dev-dependencies.tokio-process] +version = "0.1" +[target."cfg(unix)".dependencies.libc] +version = "0.2" +[target."cfg(windows)".dependencies.rand] +version = "< 0.6, >= 0.4" diff --git a/jobserver/LICENSE-APACHE b/jobserver/LICENSE-APACHE new file mode 100644 index 000000000..16fe87b06 --- /dev/null +++ b/jobserver/LICENSE-APACHE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/jobserver/LICENSE-MIT b/jobserver/LICENSE-MIT new file mode 100644 index 000000000..39e0ed660 --- /dev/null +++ b/jobserver/LICENSE-MIT @@ -0,0 +1,25 @@ +Copyright (c) 2014 Alex Crichton + +Permission is hereby granted, free of charge, to any +person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the +Software without restriction, including without +limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software +is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions +of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/jobserver/README.md b/jobserver/README.md new file mode 100644 index 000000000..53cf6de30 --- /dev/null +++ b/jobserver/README.md @@ -0,0 +1,41 @@ +# jobserver-rs + +An implementation of the GNU make jobserver for Rust + +[![Build Status](https://travis-ci.org/alexcrichton/jobserver-rs.svg?branch=master)](https://travis-ci.org/alexcrichton/jobserver-rs) +[![Build status](https://ci.appveyor.com/api/projects/status/h5jc30hohp7ejd9c/branch/master?svg=true)](https://ci.appveyor.com/project/alexcrichton/jobserver-rs/branch/master) +[![Crates.io](https://img.shields.io/crates/v/jobserver.svg?maxAge=2592000)](https://crates.io/crates/jobserver) + +[Documentation](https://docs.rs/jobserver) + +## Usage + +First, add this to your `Cargo.toml`: + +```toml +[dependencies] +jobserver = "0.1" +``` + +Next, add this to your crate: + +```rust +extern crate jobserver; +``` + +# License + +This project is licensed under either of + + * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or + http://www.apache.org/licenses/LICENSE-2.0) + * MIT license ([LICENSE-MIT](LICENSE-MIT) or + http://opensource.org/licenses/MIT) + +at your option. + +### Contribution + +Unless you explicitly state otherwise, any contribution intentionally submitted +for inclusion in Serde by you, as defined in the Apache-2.0 license, shall be +dual licensed as above, without any additional terms or conditions. diff --git a/jobserver/debian/patches/relax-dep-version.patch b/jobserver/debian/patches/relax-dep-version.patch new file mode 100644 index 000000000..b3218cc33 --- /dev/null +++ b/jobserver/debian/patches/relax-dep-version.patch @@ -0,0 +1,8 @@ +--- a/Cargo.toml 2018-08-03 01:58:48.002962262 -0700 ++++ b/Cargo.toml 2018-08-03 01:58:54.275006248 -0700 +@@ -61,4 +61,4 @@ + [target."cfg(unix)".dependencies.libc] + version = "0.2" + [target."cfg(windows)".dependencies.rand] +-version = "0.4" ++version = "< 0.6, >= 0.4" diff --git a/jobserver/debian/patches/relax-dep-version.patch~ b/jobserver/debian/patches/relax-dep-version.patch~ new file mode 100644 index 000000000..b3218cc33 --- /dev/null +++ b/jobserver/debian/patches/relax-dep-version.patch~ @@ -0,0 +1,8 @@ +--- a/Cargo.toml 2018-08-03 01:58:48.002962262 -0700 ++++ b/Cargo.toml 2018-08-03 01:58:54.275006248 -0700 +@@ -61,4 +61,4 @@ + [target."cfg(unix)".dependencies.libc] + version = "0.2" + [target."cfg(windows)".dependencies.rand] +-version = "0.4" ++version = "< 0.6, >= 0.4" diff --git a/jobserver/debian/patches/series b/jobserver/debian/patches/series new file mode 100644 index 000000000..90590f229 --- /dev/null +++ b/jobserver/debian/patches/series @@ -0,0 +1 @@ +relax-dep-version.patch diff --git a/jobserver/src/lib.rs b/jobserver/src/lib.rs new file mode 100644 index 000000000..caa88c2fa --- /dev/null +++ b/jobserver/src/lib.rs @@ -0,0 +1,942 @@ +//! An implementation of the GNU make jobserver. +//! +//! This crate is an implementation, in Rust, of the GNU `make` jobserver for +//! CLI tools that are interoperating with make or otherwise require some form +//! of parallelism limiting across process boundaries. This was originally +//! written for usage in Cargo to both (a) work when `cargo` is invoked from +//! `make` (using `make`'s jobserver) and (b) work when `cargo` invokes build +//! scripts, exporting a jobserver implementation for `make` processes to +//! transitively use. +//! +//! The jobserver implementation can be found in [detail online][docs] but +//! basically boils down to a cross-process semaphore. On Unix this is +//! implemented with the `pipe` syscall and read/write ends of a pipe and on +//! Windows this is implemented literally with IPC semaphores. +//! +//! The jobserver protocol in `make` also dictates when tokens are acquire to +//! run child work, and clients using this crate should take care to implement +//! such details to ensure correct interoperation with `make` itself. +//! +//! ## Examples +//! +//! Connect to a jobserver that was set up by `make` or a different process: +//! +//! ```no_run +//! use jobserver::Client; +//! +//! // See API documentation for why this is `unsafe` +//! let client = match unsafe { Client::from_env() } { +//! Some(client) => client, +//! None => panic!("client not configured"), +//! }; +//! ``` +//! +//! Acquire and release token from a jobserver: +//! +//! ```no_run +//! use jobserver::Client; +//! +//! let client = unsafe { Client::from_env().unwrap() }; +//! let token = client.acquire().unwrap(); // blocks until it is available +//! drop(token); // releases the token when the work is done +//! ``` +//! +//! Create a new jobserver and configure a child process to have access: +//! +//! ``` +//! use std::process::Command; +//! use jobserver::Client; +//! +//! let client = Client::new(4).expect("failed to create jobserver"); +//! let mut cmd = Command::new("make"); +//! client.configure(&mut cmd); +//! ``` +//! +//! ## Caveats +//! +//! This crate makes no attempt to release tokens back to a jobserver on +//! abnormal exit of a process. If a process which acquires a token is killed +//! with ctrl-c or some similar signal then tokens will not be released and the +//! jobserver may be in a corrupt state. +//! +//! Note that this is typically ok as ctrl-c means that an entire build process +//! is being torn down, but it's worth being aware of at least! +//! +//! ## Windows caveats +//! +//! There appear to be two implementations of `make` on Windows. On MSYS2 one +//! typically comes as `mingw32-make` and the other as `make` itself. I'm not +//! personally too familiar with what's going on here, but for jobserver-related +//! information the `mingw32-make` implementation uses Windows semaphores +//! whereas the `make` program does not. The `make` program appears to use file +//! descriptors and I'm not really sure how it works, so this crate is not +//! compatible with `make` on Windows. It is, however, compatible with +//! `mingw32-make`. +//! +//! [docs]: http://make.mad-scientist.net/papers/jobserver-implementation/ + +#![deny(missing_docs, missing_debug_implementations)] +#![doc(html_root_url = "https://docs.rs/jobserver/0.1")] + +#[macro_use] +extern crate log; + +use std::env; +use std::io; +use std::process::Command; +use std::sync::Arc; +use std::sync::mpsc::{self, Sender}; + +/// A client of a jobserver +/// +/// This structure is the main type exposed by this library, and is where +/// interaction to a jobserver is configured through. Clients are either created +/// from scratch in which case the internal semphore is initialied on the spot, +/// or a client is created from the environment to connect to a jobserver +/// already created. +/// +/// Some usage examples can be found in the crate documentation for using a +/// client. +/// +/// Note that a `Client` implements the `Clone` trait, and all instances of a +/// `Client` refer to the same jobserver instance. +#[derive(Clone, Debug)] +pub struct Client { + inner: Arc, +} + +/// An acquired token from a jobserver. +/// +/// This token will be released back to the jobserver when it is dropped and +/// otherwise represents the ability to spawn off another thread of work. +#[derive(Debug)] +pub struct Acquired { + client: Arc, + data: imp::Acquired, +} + +impl Client { + /// Creates a new jobserver initialized with the given parallelism limit. + /// + /// A client to the jobserver created will be returned. This client will + /// allow at most `limit` tokens to be acquired from it in parallel. More + /// calls to `acquire` will cause the calling thread to block. + /// + /// Note that the created `Client` is not automatically inherited into + /// spawned child processes from this program. Manual usage of the + /// `configure` function is required for a child process to have access to a + /// job server. + /// + /// # Examples + /// + /// ``` + /// use jobserver::Client; + /// + /// let client = Client::new(4).expect("failed to create jobserver"); + /// ``` + /// + /// # Errors + /// + /// Returns an error if any I/O error happens when attempting to create the + /// jobserver client. + pub fn new(limit: usize) -> io::Result { + Ok(Client { + inner: Arc::new(imp::Client::new(limit)?), + }) + } + + /// Attempts to connect to the jobserver specified in this process's + /// environment. + /// + /// When the a `make` executable calls a child process it will configure the + /// environment of the child to ensure that it has handles to the jobserver + /// it's passing down. This function will attempt to look for these details + /// and connect to the jobserver. + /// + /// Note that the created `Client` is not automatically inherited into + /// spawned child processes from this program. Manual usage of the + /// `configure` function is required for a child process to have access to a + /// job server. + /// + /// # Return value + /// + /// If a jobserver was found in the environment and it looks correct then + /// `Some` of the connected client will be returned. If no jobserver was + /// found then `None` will be returned. + /// + /// Note that on Unix the `Client` returned **takes ownership of the file + /// descriptors specified in the environment**. Jobservers on Unix are + /// implemented with `pipe` file descriptors, and they're inherited from + /// parent processes. This `Client` returned takes ownership of the file + /// descriptors for this process and will close the file descriptors after + /// this value is dropped. + /// + /// Additionally on Unix this function will configure the file descriptors + /// with `CLOEXEC` so they're not automatically inherited by spawned + /// children. + /// + /// # Unsafety + /// + /// This function is `unsafe` to call on Unix specifically as it + /// transitively requires usage of the `from_raw_fd` function, which is + /// itself unsafe in some circumstances. + /// + /// It's recommended to call this function very early in the lifetime of a + /// program before any other file descriptors are opened. That way you can + /// make sure to take ownership properly of the file descriptors passed + /// down, if any. + /// + /// It's generally unsafe to call this function twice in a program if the + /// previous invocation returned `Some`. + /// + /// Note, though, that on Windows it should be safe to call this function + /// any number of times. + pub unsafe fn from_env() -> Option { + let var = match env::var("CARGO_MAKEFLAGS") + .or(env::var("MAKEFLAGS")) + .or(env::var("MFLAGS")) { + Ok(s) => s, + Err(_) => return None, + }; + let mut arg = "--jobserver-fds="; + let pos = match var.find(arg) { + Some(i) => i, + None => { + arg = "--jobserver-auth="; + match var.find(arg) { + Some(i) => i, + None => return None, + } + } + }; + + let s = var[pos + arg.len()..].split(' ').next().unwrap(); + imp::Client::open(s).map(|c| { + Client { inner: Arc::new(c) } + }) + } + + /// Acquires a token from this jobserver client. + /// + /// This function will block the calling thread until a new token can be + /// acquired from the jobserver. + /// + /// # Return value + /// + /// On successful acquisition of a token an instance of `Acquired` is + /// returned. This structure, when dropped, will release the token back to + /// the jobserver. It's recommended to avoid leaking this value. + /// + /// # Errors + /// + /// If an I/O error happens while acquiring a token then this function will + /// return immediately with the error. If an error is returned then a token + /// was not acquired. + pub fn acquire(&self) -> io::Result { + let data = try!(self.inner.acquire()); + Ok(Acquired { + client: self.inner.clone(), + data: data, + }) + } + + /// Configures a child process to have access to this client's jobserver as + /// well. + /// + /// This function is required to be called to ensure that a jobserver is + /// properly inherited to a child process. If this function is *not* called + /// then this `Client` will not be accessible in the child process. In other + /// words, if not called, then `Client::from_env` will return `None` in the + /// child process (or the equivalent of `Child::from_env` that `make` uses). + /// + /// ## Platform-specific behavior + /// + /// On Unix and Windows this will clobber the `CARGO_MAKEFLAGS` environment + /// variables for the child process, and on Unix this will also allow the + /// two file descriptors for this client to be inherited to the child. + pub fn configure(&self, cmd: &mut Command) { + let arg = self.inner.string_arg(); + // Older implementations of make use `--jobserver-fds` and newer + // implementations use `--jobserver-auth`, pass both to try to catch + // both implementations. + let value = format!("--jobserver-fds={0} --jobserver-auth={0}", arg); + cmd.env("CARGO_MAKEFLAGS", &value); + self.inner.configure(cmd); + } + + /// Converts this `Client` into a helper thread to deal with a blocking + /// `acquire` function a little more easily. + /// + /// The fact that the `acquire` function on `Client` blocks isn't always + /// the easiest to work with. Typically you're using a jobserver to + /// manage running other events in parallel! This means that you need to + /// either (a) wait for an existing job to finish or (b) wait for a + /// new token to become available. + /// + /// Unfortunately the blocking in `acquire` happens at the implementation + /// layer of jobservers. On Unix this requires a blocking call to `read` + /// and on Windows this requires one of the `WaitFor*` functions. Both + /// of these situations aren't the easiest to deal with: + /// + /// * On Unix there's basically only one way to wake up a `read` early, and + /// that's through a signal. This is what the `make` implementation + /// itself uses, relying on `SIGCHLD` to wake up a blocking acquisition + /// of a new job token. Unfortunately nonblocking I/O is not an option + /// here, so it means that "waiting for one of two events" means that + /// the latter event must generate a signal! This is not always the case + /// on unix for all jobservers. + /// + /// * On Windows you'd have to basically use the `WaitForMultipleObjects` + /// which means that you've got to canonicalize all your event sources + /// into a `HANDLE` which also isn't the easiest thing to do + /// unfortunately. + /// + /// This function essentially attempts to ease these limitations by + /// converting this `Client` into a helper thread spawned into this + /// process. The application can then request that the helper thread + /// acquires tokens and the provided closure will be invoked for each token + /// acquired. + /// + /// The intention is that this function can be used to translate the event + /// of a token acquisition into an arbitrary user-defined event. + /// + /// # Arguments + /// + /// This function will consume the `Client` provided to be transferred to + /// the helper thread that is spawned. Additionally a closure `f` is + /// provided to be invoked whenever a token is acquired. + /// + /// This closure is only invoked after calls to + /// `HelperThread::request_token` have been made and a token itself has + /// been acquired. If an error happens while acquiring the token then + /// an error will be yielded to the closure as well. + /// + /// # Return Value + /// + /// This function will return an instance of the `HelperThread` structure + /// which is used to manage the helper thread associated with this client. + /// Through the `HelperThread` you'll request that tokens are acquired. + /// When acquired, the closure provided here is invoked. + /// + /// When the `HelperThread` structure is returned it will be gracefully + /// torn down, and the calling thread will be blocked until the thread is + /// torn down (which should be prompt). + /// + /// # Errors + /// + /// This function may fail due to creation of the helper thread or + /// auxiliary I/O objects to manage the helper thread. In any of these + /// situations the error is propagated upwards. + /// + /// # Platform-specific behavior + /// + /// On Windows this function behaves pretty normally as expected, but on + /// Unix the implementation is... a little heinous. As mentioned above + /// we're forced into blocking I/O for token acquisition, namely a blocking + /// call to `read`. We must be able to unblock this, however, to tear down + /// the helper thread gracefully! + /// + /// Essentially what happens is that we'll send a signal to the helper + /// thread spawned and rely on `EINTR` being returned to wake up the helper + /// thread. This involves installing a global `SIGUSR1` handler that does + /// nothing along with sending signals to that thread. This may cause + /// odd behavior in some applications, so it's recommended to review and + /// test thoroughly before using this. + pub fn into_helper_thread(self, f: F) -> io::Result + where F: FnMut(io::Result) + Send + 'static, + { + let (tx, rx) = mpsc::channel(); + Ok(HelperThread { + inner: Some(imp::spawn_helper(self, rx, Box::new(f))?), + tx: Some(tx), + }) + } +} + +impl Drop for Acquired { + fn drop(&mut self) { + drop(self.client.release(&self.data)); + } +} + +/// Structure returned from `Client::into_helper_thread` to manage the lifetime +/// of the helper thread returned, see those associated docs for more info. +#[derive(Debug)] +pub struct HelperThread { + inner: Option, + tx: Option>, +} + +impl HelperThread { + /// Request that the helper thread acquires a token, eventually calling the + /// original closure with a token when it's available. + /// + /// For more information, see the docs on that function. + pub fn request_token(&self) { + self.tx.as_ref().unwrap().send(()).unwrap(); + } +} + +impl Drop for HelperThread { + fn drop(&mut self) { + drop(self.tx.take()); + self.inner.take().unwrap().join(); + } +} + +#[cfg(unix)] +mod imp { + extern crate libc; + + use std::fs::File; + use std::io::{self, Read, Write}; + use std::mem; + use std::os::unix::prelude::*; + use std::process::Command; + use std::ptr; + use std::sync::atomic::{AtomicBool, AtomicUsize, + ATOMIC_BOOL_INIT, ATOMIC_USIZE_INIT, + Ordering}; + use std::sync::mpsc::{self, Receiver, RecvTimeoutError}; + use std::sync::{Arc, Once, ONCE_INIT}; + use std::thread::{self, JoinHandle, Builder}; + use std::time::Duration; + + use self::libc::c_int; + + #[derive(Debug)] + pub struct Client { + read: File, + write: File, + } + + #[derive(Debug)] + pub struct Acquired { + byte: u8, + } + + impl Client { + pub fn new(limit: usize) -> io::Result { + let client = unsafe { Client::mk()? }; + // I don't think the character written here matters, but I could be + // wrong! + for _ in 0..limit { + (&client.write).write(&[b'|'])?; + } + info!("created a jobserver: {:?}", client); + Ok(client) + } + + unsafe fn mk() -> io::Result { + static INVALID: AtomicBool = ATOMIC_BOOL_INIT; + let mut pipes = [0; 2]; + + // Attempt atomically-create-with-cloexec if we can. Note that even + // when libc has the symbol, `pipe2` might still not be supported on + // the running kernel -> `ENOSYS`, then we need to use the fallback. + if cfg!(target_os = "linux") && !INVALID.load(Ordering::SeqCst) { + if let Some(pipe2) = pipe2() { + match cvt(pipe2(pipes.as_mut_ptr(), libc::O_CLOEXEC)) { + Ok(_) => return Ok(Client::from_fds(pipes[0], pipes[1])), + Err(ref e) if e.raw_os_error() == Some(libc::ENOSYS) => { + INVALID.store(true, Ordering::SeqCst); + } + Err(e) => return Err(e), + } + } + } + + cvt(libc::pipe(pipes.as_mut_ptr()))?; + drop(set_cloexec(pipes[0], true)); + drop(set_cloexec(pipes[1], true)); + Ok(Client::from_fds(pipes[0], pipes[1])) + } + + pub unsafe fn open(s: &str) -> Option { + let mut parts = s.splitn(2, ','); + let read = parts.next().unwrap(); + let write = match parts.next() { + Some(s) => s, + None => return None, + }; + + let read = match read.parse() { + Ok(n) => n, + Err(_) => return None, + }; + let write = match write.parse() { + Ok(n) => n, + Err(_) => return None, + }; + + // Ok so we've got two integers that look like file descriptors, but + // for extra sanity checking let's see if they actually look like + // instances of a pipe before we return the client. + // + // If we're called from `make` *without* the leading + on our rule + // then we'll have `MAKEFLAGS` env vars but won't actually have + // access to the file descriptors. + if is_valid_fd(read) && is_valid_fd(write) { + info!("using env fds {} and {}", read, write); + drop(set_cloexec(read, true)); + drop(set_cloexec(write, true)); + Some(Client::from_fds(read, write)) + } else { + info!("one of {} or {} isn't a pipe", read, write); + None + } + } + + unsafe fn from_fds(read: c_int, write: c_int) -> Client { + Client { + read: File::from_raw_fd(read), + write: File::from_raw_fd(write), + } + } + + pub fn acquire(&self) -> io::Result { + // We don't actually know if the file descriptor here is set in + // blocking or nonblocking mode. AFAIK all released versions of + // `make` use blocking fds for the jobserver, but the unreleased + // version of `make` doesn't. In the unreleased version jobserver + // fds are set to nonblocking and combined with `pselect` + // internally. + // + // Here we try to be compatible with both strategies. We + // unconditionally expect the file descriptor to be in nonblocking + // mode and if it happens to be in blocking mode then most of this + // won't end up actually being necessary! + // + // We use `poll` here to block this thread waiting for read + // readiness, and then afterwards we perform the `read` itself. If + // the `read` returns that it would block then we start over and try + // again. + // + // Also note that we explicitly don't handle EINTR here. That's used + // to shut us down, so we otherwise punt all errors upwards. + unsafe { + let mut fd: libc::pollfd = mem::zeroed(); + fd.fd = self.read.as_raw_fd(); + fd.events = libc::POLLIN; + loop { + fd.revents = 0; + if libc::poll(&mut fd, 1, -1) == -1 { + return Err(io::Error::last_os_error()) + } + if fd.revents == 0 { + continue + } + let mut buf = [0]; + match (&self.read).read(&mut buf) { + Ok(1) => return Ok(Acquired { byte: buf[0] }), + Ok(_) => { + return Err(io::Error::new(io::ErrorKind::Other, + "early EOF on jobserver pipe")) + } + Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => {} + Err(e) => return Err(e), + } + } + } + } + + pub fn release(&self, data: &Acquired) -> io::Result<()> { + // Note that the fd may be nonblocking but we're going to go ahead + // and assume that the writes here are always nonblocking (we can + // always quickly release a token). If that turns out to not be the + // case we'll get an error anyway! + match (&self.write).write(&[data.byte])? { + 1 => Ok(()), + _ => Err(io::Error::new(io::ErrorKind::Other, + "failed to write token back to jobserver")), + } + } + + pub fn string_arg(&self) -> String { + format!("{},{} -j", self.read.as_raw_fd(), self.write.as_raw_fd()) + } + + pub fn configure(&self, cmd: &mut Command) { + // Here we basically just want to say that in the child process + // we'll configure the read/write file descriptors to *not* be + // cloexec, so they're inherited across the exec and specified as + // integers through `string_arg` above. + let read = self.read.as_raw_fd(); + let write = self.write.as_raw_fd(); + cmd.before_exec(move || { + set_cloexec(read, false)?; + set_cloexec(write, false)?; + Ok(()) + }); + } + } + + #[derive(Debug)] + pub struct Helper { + thread: JoinHandle<()>, + quitting: Arc, + rx_done: Receiver<()>, + } + + pub fn spawn_helper(client: ::Client, + rx: Receiver<()>, + mut f: Box) + Send>) + -> io::Result + { + static USR1_INIT: Once = ONCE_INIT; + let mut err = None; + USR1_INIT.call_once(|| unsafe { + let mut new: libc::sigaction = mem::zeroed(); + new.sa_sigaction = sigusr1_handler as usize; + new.sa_flags = libc::SA_SIGINFO as _; + if libc::sigaction(libc::SIGUSR1, &new, ptr::null_mut()) != 0 { + err = Some(io::Error::last_os_error()); + } + }); + + if let Some(e) = err.take() { + return Err(e) + } + + let quitting = Arc::new(AtomicBool::new(false)); + let quitting2 = quitting.clone(); + let (tx_done, rx_done) = mpsc::channel(); + let thread = Builder::new().spawn(move || { + 'outer: + for () in rx { + loop { + let res = client.acquire(); + if let Err(ref e) = res { + if e.kind() == io::ErrorKind::Interrupted { + if quitting2.load(Ordering::SeqCst) { + break 'outer + } else { + continue + } + } + } + f(res); + break + } + } + tx_done.send(()).unwrap(); + })?; + + Ok(Helper { + thread: thread, + quitting: quitting, + rx_done: rx_done, + }) + } + + impl Helper { + pub fn join(self) { + self.quitting.store(true, Ordering::SeqCst); + let dur = Duration::from_millis(10); + let mut done = false; + for _ in 0..100 { + unsafe { + // Ignore the return value here of `pthread_kill`, + // apparently on OSX if you kill a dead thread it will + // return an error, but on other platforms it may not. In + // that sense we don't actually know if this will succeed or + // not! + libc::pthread_kill(self.thread.as_pthread_t(), libc::SIGUSR1); + match self.rx_done.recv_timeout(dur) { + Ok(()) | + Err(RecvTimeoutError::Disconnected) => { + done = true; + break + } + Err(RecvTimeoutError::Timeout) => {} + } + } + thread::yield_now(); + } + if done { + drop(self.thread.join()); + } + } + } + + fn is_valid_fd(fd: c_int) -> bool { + unsafe { + return libc::fcntl(fd, libc::F_GETFD) != -1; + } + } + + fn set_cloexec(fd: c_int, set: bool) -> io::Result<()> { + unsafe { + let previous = cvt(libc::fcntl(fd, libc::F_GETFD))?; + let new = if set { + previous | libc::FD_CLOEXEC + } else { + previous & !libc::FD_CLOEXEC + }; + if new != previous { + cvt(libc::fcntl(fd, libc::F_SETFD, new))?; + } + Ok(()) + } + } + + fn cvt(t: c_int) -> io::Result { + if t == -1 { + Err(io::Error::last_os_error()) + } else { + Ok(t) + } + } + + unsafe fn pipe2() -> Option<&'static fn(*mut c_int, c_int) -> c_int> { + static PIPE2: AtomicUsize = ATOMIC_USIZE_INIT; + + if PIPE2.load(Ordering::SeqCst) == 0 { + let name = "pipe2\0"; + let n = match libc::dlsym(libc::RTLD_DEFAULT, name.as_ptr() as *const _) as usize { + 0 => 1, + n => n, + }; + PIPE2.store(n, Ordering::SeqCst); + } + if PIPE2.load(Ordering::SeqCst) == 1 { + None + } else { + mem::transmute(&PIPE2) + } + } + + extern fn sigusr1_handler(_signum: c_int, + _info: *mut libc::siginfo_t, + _ptr: *mut libc::c_void) { + // nothing to do + } +} + +#[cfg(windows)] +mod imp { + extern crate rand; + + use std::ffi::CString; + use std::io; + use std::process::Command; + use std::ptr; + use std::sync::Arc; + use std::sync::mpsc::Receiver; + use std::thread::{Builder, JoinHandle}; + + #[derive(Debug)] + pub struct Client { + sem: Handle, + name: String, + } + + #[derive(Debug)] + pub struct Acquired; + + type BOOL = i32; + type DWORD = u32; + type HANDLE = *mut u8; + type LONG = i32; + + const ERROR_ALREADY_EXISTS: DWORD = 183; + const FALSE: BOOL = 0; + const INFINITE: DWORD = 0xffffffff; + const SEMAPHORE_MODIFY_STATE: DWORD = 0x2; + const SYNCHRONIZE: DWORD = 0x00100000; + const TRUE: BOOL = 1; + const WAIT_OBJECT_0: DWORD = 0; + + extern "system" { + fn CloseHandle(handle: HANDLE) -> BOOL; + fn SetEvent(hEvent: HANDLE) -> BOOL; + fn WaitForMultipleObjects(ncount: DWORD, + lpHandles: *const HANDLE, + bWaitAll: BOOL, + dwMilliseconds: DWORD) -> DWORD; + fn CreateEventA(lpEventAttributes: *mut u8, + bManualReset: BOOL, + bInitialState: BOOL, + lpName: *const i8) -> HANDLE; + fn ReleaseSemaphore(hSemaphore: HANDLE, + lReleaseCount: LONG, + lpPreviousCount: *mut LONG) -> BOOL; + fn CreateSemaphoreA(lpEventAttributes: *mut u8, + lInitialCount: LONG, + lMaximumCount: LONG, + lpName: *const i8) -> HANDLE; + fn OpenSemaphoreA(dwDesiredAccess: DWORD, + bInheritHandle: BOOL, + lpName: *const i8) -> HANDLE; + fn WaitForSingleObject(hHandle: HANDLE, + dwMilliseconds: DWORD) -> DWORD; + } + + impl Client { + pub fn new(limit: usize) -> io::Result { + // Try a bunch of random semaphore names until we get a unique one, + // but don't try for too long. + // + // Note that `limit == 0` is a valid argument above but Windows + // won't let us create a semaphore with 0 slots available to it. Get + // `limit == 0` working by creating a semaphore instead with one + // slot and then immediately acquire it (without ever releaseing it + // back). + for _ in 0..100 { + let mut name = format!("__rust_jobserver_semaphore_{}\0", + rand::random::()); + unsafe { + let create_limit = if limit == 0 {1} else {limit}; + let r = CreateSemaphoreA(ptr::null_mut(), + create_limit as LONG, + create_limit as LONG, + name.as_ptr() as *const _); + if r.is_null() { + return Err(io::Error::last_os_error()) + } + let handle = Handle(r); + + let err = io::Error::last_os_error(); + if err.raw_os_error() == Some(ERROR_ALREADY_EXISTS as i32) { + continue + } + name.pop(); // chop off the trailing nul + let client = Client { + sem: handle, + name: name, + }; + if create_limit != limit { + client.acquire()?; + } + info!("created jobserver {:?}", client); + return Ok(client) + } + } + + Err(io::Error::new(io::ErrorKind::Other, + "failed to find a unique name for a semaphore")) + } + + pub unsafe fn open(s: &str) -> Option { + let name = match CString::new(s) { + Ok(s) => s, + Err(_) => return None, + }; + + let sem = OpenSemaphoreA(SYNCHRONIZE | SEMAPHORE_MODIFY_STATE, + FALSE, + name.as_ptr()); + if sem.is_null() { + info!("failed to open environment semaphore {}", s); + None + } else { + info!("opened environment semaphore {}", s); + Some(Client { + sem: Handle(sem), + name: s.to_string(), + }) + } + } + + pub fn acquire(&self) -> io::Result { + unsafe { + let r = WaitForSingleObject(self.sem.0, INFINITE); + if r == WAIT_OBJECT_0 { + Ok(Acquired) + } else { + Err(io::Error::last_os_error()) + } + } + } + + pub fn release(&self, _data: &Acquired) -> io::Result<()> { + unsafe { + let r = ReleaseSemaphore(self.sem.0, 1, ptr::null_mut()); + if r != 0 { + Ok(()) + } else { + Err(io::Error::last_os_error()) + } + } + } + + pub fn string_arg(&self) -> String { + self.name.clone() + } + + pub fn configure(&self, _cmd: &mut Command) { + // nothing to do here, we gave the name of our semaphore to the + // child above + } + } + + #[derive(Debug)] + struct Handle(HANDLE); + // HANDLE is a raw ptr, but we're send/sync + unsafe impl Sync for Handle {} + unsafe impl Send for Handle {} + + impl Drop for Handle { + fn drop(&mut self) { + unsafe { + CloseHandle(self.0); + } + } + } + + #[derive(Debug)] + pub struct Helper { + event: Arc, + thread: JoinHandle<()>, + } + + pub fn spawn_helper(client: ::Client, + rx: Receiver<()>, + mut f: Box) + Send>) + -> io::Result + { + let event = unsafe { + let r = CreateEventA(ptr::null_mut(), TRUE, FALSE, ptr::null()); + if r.is_null() { + return Err(io::Error::last_os_error()) + } else { + Handle(r) + } + }; + let event = Arc::new(event); + let event2 = event.clone(); + let thread = Builder::new().spawn(move || { + let objects = [event2.0, client.inner.sem.0]; + for () in rx { + let r = unsafe { + WaitForMultipleObjects(2, objects.as_ptr(), FALSE, INFINITE) + }; + if r == WAIT_OBJECT_0 { + break + } + if r == WAIT_OBJECT_0 + 1 { + f(Ok(::Acquired { + client: client.inner.clone(), + data: Acquired, + })) + } else { + f(Err(io::Error::last_os_error())) + } + } + })?; + Ok(Helper { + thread: thread, + event: event, + }) + } + + impl Helper { + pub fn join(self) { + let r = unsafe { SetEvent(self.event.0) }; + if r == 0 { + panic!("failed to set event: {}", io::Error::last_os_error()); + } + drop(self.thread.join()); + } + } +} diff --git a/jobserver/tests/client-of-myself.rs b/jobserver/tests/client-of-myself.rs new file mode 100644 index 000000000..106368949 --- /dev/null +++ b/jobserver/tests/client-of-myself.rs @@ -0,0 +1,60 @@ +extern crate jobserver; + +use std::env; +use std::io::BufReader; +use std::io::prelude::*; +use std::process::{Command, Stdio}; +use std::sync::mpsc; +use std::thread; + +use jobserver::Client; + +macro_rules! t { + ($e:expr) => (match $e { + Ok(e) => e, + Err(e) => panic!("{} failed with {}", stringify!($e), e), + }) +} + +fn main() { + if env::var("I_AM_THE_CLIENT").is_ok() { + client(); + } else { + server(); + } +} + +fn server() { + let me = t!(env::current_exe()); + let client = t!(Client::new(1)); + let mut cmd = Command::new(me); + cmd.env("I_AM_THE_CLIENT", "1") + .stdout(Stdio::piped()); + client.configure(&mut cmd); + let acq = client.acquire().unwrap(); + let mut child = t!(cmd.spawn()); + let stdout = child.stdout.take().unwrap(); + let (tx, rx) = mpsc::channel(); + let t = thread::spawn(move || { + for line in BufReader::new(stdout).lines() { + tx.send(t!(line)).unwrap(); + } + }); + + for _ in 0..100 { + assert!(rx.try_recv().is_err()); + } + + drop(acq); + assert_eq!(rx.recv().unwrap(), "hello!"); + t.join().unwrap(); + assert!(rx.recv().is_err()); + client.acquire().unwrap(); +} + +fn client() { + let client = unsafe { Client::from_env().unwrap() }; + let acq = client.acquire().unwrap(); + println!("hello!"); + drop(acq); +} diff --git a/jobserver/tests/client.rs b/jobserver/tests/client.rs new file mode 100644 index 000000000..4147191f7 --- /dev/null +++ b/jobserver/tests/client.rs @@ -0,0 +1,191 @@ +extern crate futures; +extern crate jobserver; +extern crate num_cpus; +extern crate tempdir; +extern crate tokio_core; +extern crate tokio_process; + +use std::env; +use std::fs::File; +use std::io::Write; +use std::process::Command; +use std::sync::Arc; +use std::sync::atomic::{AtomicBool, Ordering}; +use std::sync::mpsc; +use std::thread; + +use futures::future::{self, Future}; +use futures::stream::{self, Stream}; +use jobserver::Client; +use tempdir::TempDir; +use tokio_core::reactor::Core; +use tokio_process::CommandExt; + +macro_rules! t { + ($e:expr) => (match $e { + Ok(e) => e, + Err(e) => panic!("{} failed with {}", stringify!($e), e), + }) +} + +struct Test { + name: &'static str, + f: &'static Fn(), + make_args: &'static [&'static str], + rule: &'static Fn(&str) -> String, +} + +const TESTS: &[Test] = &[ + Test { + name: "no j args", + make_args: &[], + rule: &|me| format!("{}", me), + f: &|| { + assert!(unsafe { Client::from_env().is_none() }); + }, + }, + Test { + name: "no j args with plus", + make_args: &[], + rule: &|me| format!("+{}", me), + f: &|| { + assert!(unsafe { Client::from_env().is_none() }); + }, + }, + Test { + name: "j args with plus", + make_args: &["-j2"], + rule: &|me| format!("+{}", me), + f: &|| { + assert!(unsafe { Client::from_env().is_some() }); + }, + }, + Test { + name: "acquire", + make_args: &["-j2"], + rule: &|me| format!("+{}", me), + f: &|| { + let c = unsafe { Client::from_env().unwrap() }; + drop(c.acquire().unwrap()); + drop(c.acquire().unwrap()); + }, + }, + Test { + name: "acquire3", + make_args: &["-j3"], + rule: &|me| format!("+{}", me), + f: &|| { + let c = unsafe { Client::from_env().unwrap() }; + let a = c.acquire().unwrap(); + let b = c.acquire().unwrap(); + drop((a, b)); + }, + }, + Test { + name: "acquire blocks", + make_args: &["-j2"], + rule: &|me| format!("+{}", me), + f: &|| { + let c = unsafe { Client::from_env().unwrap() }; + let a = c.acquire().unwrap(); + let hit = Arc::new(AtomicBool::new(false)); + let hit2 = hit.clone(); + let (tx, rx) = mpsc::channel(); + let t = thread::spawn(move || { + tx.send(()).unwrap(); + let _b = c.acquire().unwrap(); + hit2.store(true, Ordering::SeqCst); + }); + rx.recv().unwrap(); + assert!(!hit.load(Ordering::SeqCst)); + drop(a); + t.join().unwrap(); + assert!(hit.load(Ordering::SeqCst)); + }, + }, +]; + +fn main() { + if let Ok(test) = env::var("TEST_TO_RUN") { + return (TESTS.iter().find(|t| t.name == test).unwrap().f)() + } + + let me = t!(env::current_exe()); + let me = me.to_str().unwrap(); + let filter = env::args().skip(1).next(); + + let mut core = t!(Core::new()); + let ref handle = core.handle(); + + let futures = TESTS.iter().filter(|test| { + match filter { + Some(ref s) => test.name.contains(s), + None => true, + } + }).map(|test| { + let td = t!(TempDir::new("foo")); + let makefile = format!("\ +all: export TEST_TO_RUN={} +all: +\t{} +", test.name, (test.rule)(me)); + t!(t!(File::create(td.path().join("Makefile"))) + .write_all(makefile.as_bytes())); + let prog = env::var("MAKE").unwrap_or("make".to_string()); + let mut cmd = Command::new(prog); + cmd.args(test.make_args); + cmd.current_dir(td.path()); + future::lazy(move || { + cmd.output_async(&handle).map(move |e| { + drop(td); + (test, e) + }) + }) + }).collect::>(); + + println!("\nrunning {} tests\n", futures.len()); + + let stream = stream::iter(futures.into_iter().map(Ok)) + .buffer_unordered(num_cpus::get()); + + let mut failures = Vec::new(); + t!(core.run(stream.for_each(|(test, output)| { + if output.status.success() { + println!("test {} ... ok", test.name); + } else { + println!("test {} ... FAIL", test.name); + failures.push((test, output)); + } + Ok(()) + }))); + + if failures.len() == 0 { + println!("\ntest result: ok\n"); + return + } + + println!("\n----------- failures"); + + for (test, output) in failures { + println!("test {}", test.name); + let stdout = String::from_utf8_lossy(&output.stdout); + let stderr = String::from_utf8_lossy(&output.stderr); + + println!("\texit status: {}", output.status); + if !stdout.is_empty() { + println!("\tstdout ==="); + for line in stdout.lines() { + println!("\t\t{}", line); + } + } + + if !stderr.is_empty() { + println!("\tstderr ==="); + for line in stderr.lines() { + println!("\t\t{}", line); + } + } + } + + std::process::exit(4); +} diff --git a/jobserver/tests/helper.rs b/jobserver/tests/helper.rs new file mode 100644 index 000000000..ed046ae59 --- /dev/null +++ b/jobserver/tests/helper.rs @@ -0,0 +1,44 @@ +extern crate jobserver; + +use std::sync::mpsc; + +use jobserver::Client; + +macro_rules! t { + ($e:expr) => (match $e { + Ok(e) => e, + Err(e) => panic!("{} failed with {}", stringify!($e), e), + }) +} + +#[test] +fn helper_smoke() { + let client = t!(Client::new(1)); + drop(client.clone().into_helper_thread(|_| ()).unwrap()); + drop(client.clone().into_helper_thread(|_| ()).unwrap()); + drop(client.clone().into_helper_thread(|_| ()).unwrap()); + drop(client.clone().into_helper_thread(|_| ()).unwrap()); + drop(client.clone().into_helper_thread(|_| ()).unwrap()); + drop(client.clone().into_helper_thread(|_| ()).unwrap()); +} + +#[test] +fn acquire() { + let (tx, rx) = mpsc::channel(); + let client = t!(Client::new(1)); + let helper = client.into_helper_thread(move |a| drop(tx.send(a))).unwrap(); + assert!(rx.try_recv().is_err()); + helper.request_token(); + rx.recv().unwrap().unwrap(); + helper.request_token(); + rx.recv().unwrap().unwrap(); + + helper.request_token(); + helper.request_token(); + rx.recv().unwrap().unwrap(); + rx.recv().unwrap().unwrap(); + + helper.request_token(); + helper.request_token(); + drop(helper); +} diff --git a/jobserver/tests/make-as-a-client.rs b/jobserver/tests/make-as-a-client.rs new file mode 100644 index 000000000..1f029555f --- /dev/null +++ b/jobserver/tests/make-as-a-client.rs @@ -0,0 +1,77 @@ +extern crate jobserver; +extern crate tempdir; + +use std::env; +use std::fs::File; +use std::io::prelude::*; +use std::net::{TcpStream, TcpListener}; +use std::process::Command; + +use jobserver::Client; +use tempdir::TempDir; + +macro_rules! t { + ($e:expr) => (match $e { + Ok(e) => e, + Err(e) => panic!("{} failed with {}", stringify!($e), e), + }) +} + +fn main() { + if env::var("_DO_THE_TEST").is_ok() { + std::process::exit( + Command::new(env::var_os("MAKE").unwrap()) + .env("MAKEFLAGS", env::var_os("CARGO_MAKEFLAGS").unwrap()) + .env_remove("_DO_THE_TEST") + .args(&env::args_os().skip(1).collect::>()) + .status() + .unwrap() + .code() + .unwrap_or(1) + ); + } + + if let Ok(s) = env::var("TEST_ADDR") { + let mut contents = Vec::new(); + t!(t!(TcpStream::connect(&s)).read_to_end(&mut contents)); + return + } + + let c = t!(Client::new(1)); + let td = TempDir::new("foo").unwrap(); + + let prog = env::var("MAKE").unwrap_or("make".to_string()); + + let me = t!(env::current_exe()); + let me = me.to_str().unwrap(); + + let mut cmd = Command::new(&me); + cmd.current_dir(td.path()); + cmd.env("MAKE", prog); + cmd.env("_DO_THE_TEST", "1"); + + t!(t!(File::create(td.path().join("Makefile"))).write_all(format!("\ +all: foo bar +foo: +\t{0} +bar: +\t{0} +", me).as_bytes())); + + // We're leaking one extra token to `make` sort of violating the makefile + // jobserver protocol. It has the desired effect though. + c.configure(&mut cmd); + + let listener = t!(TcpListener::bind("127.0.0.1:0")); + let addr = t!(listener.local_addr()); + cmd.env("TEST_ADDR", addr.to_string()); + let mut child = t!(cmd.spawn()); + + // We should get both connections as the two programs should be run + // concurrently. + let a = t!(listener.accept()); + let b = t!(listener.accept()); + drop((a, b)); + + assert!(t!(child.wait()).success()); +} diff --git a/jobserver/tests/server.rs b/jobserver/tests/server.rs new file mode 100644 index 000000000..94cd71c9c --- /dev/null +++ b/jobserver/tests/server.rs @@ -0,0 +1,145 @@ +extern crate jobserver; +extern crate tempdir; + +use std::env; +use std::fs::File; +use std::io::prelude::*; +use std::process::Command; +use std::sync::Arc; +use std::sync::atomic::{AtomicBool, Ordering}; +use std::sync::mpsc; +use std::thread; + +use jobserver::Client; +use tempdir::TempDir; + +macro_rules! t { + ($e:expr) => (match $e { + Ok(e) => e, + Err(e) => panic!("{} failed with {}", stringify!($e), e), + }) +} + +#[test] +fn server_smoke() { + let c = t!(Client::new(1)); + drop(c.acquire().unwrap()); + drop(c.acquire().unwrap()); +} + +#[test] +fn server_multiple() { + let c = t!(Client::new(2)); + let a = c.acquire().unwrap(); + let b = c.acquire().unwrap(); + drop((a, b)); +} + +#[test] +fn server_blocks() { + let c = t!(Client::new(1)); + let a = c.acquire().unwrap(); + let hit = Arc::new(AtomicBool::new(false)); + let hit2 = hit.clone(); + let (tx, rx) = mpsc::channel(); + let t = thread::spawn(move || { + tx.send(()).unwrap(); + let _b = c.acquire().unwrap(); + hit2.store(true, Ordering::SeqCst); + }); + rx.recv().unwrap(); + assert!(!hit.load(Ordering::SeqCst)); + drop(a); + t.join().unwrap(); + assert!(hit.load(Ordering::SeqCst)); +} + +#[test] +fn make_as_a_single_thread_client() { + let c = t!(Client::new(1)); + let td = TempDir::new("foo").unwrap(); + + let prog = env::var("MAKE").unwrap_or("make".to_string()); + let mut cmd = Command::new(prog); + cmd.current_dir(td.path()); + + t!(t!(File::create(td.path().join("Makefile"))).write_all(b" +all: foo bar +foo: +\techo foo +bar: +\techo bar +")); + + // The jobserver protocol means that the `make` process itself "runs with a + // token", so we acquire our one token to drain the jobserver, and this + // should mean that `make` itself never has a second token available to it. + let _a = c.acquire(); + c.configure(&mut cmd); + let output = t!(cmd.output()); + println!("\n\t=== stderr\n\t\t{}", + String::from_utf8_lossy(&output.stderr).replace("\n", "\n\t\t")); + println!("\t=== stdout\n\t\t{}", + String::from_utf8_lossy(&output.stdout).replace("\n", "\n\t\t")); + + assert!(output.status.success()); + assert!(output.stderr.is_empty()); + + let stdout = String::from_utf8_lossy(&output.stdout).replace("\r\n", "\n"); + let a = "\ +echo foo +foo +echo bar +bar +"; + let b = "\ +echo bar +bar +echo foo +foo +"; + + assert!(stdout == a || stdout == b); +} + +#[test] +fn make_as_a_multi_thread_client() { + let c = t!(Client::new(1)); + let td = TempDir::new("foo").unwrap(); + + let prog = env::var("MAKE").unwrap_or("make".to_string()); + let mut cmd = Command::new(prog); + cmd.current_dir(td.path()); + + t!(t!(File::create(td.path().join("Makefile"))).write_all(b" +all: foo bar +foo: +\techo foo +bar: +\techo bar +")); + + // We're leaking one extra token to `make` sort of violating the makefile + // jobserver protocol. It has the desired effect though. + c.configure(&mut cmd); + let output = t!(cmd.output()); + println!("\n\t=== stderr\n\t\t{}", + String::from_utf8_lossy(&output.stderr).replace("\n", "\n\t\t")); + println!("\t=== stdout\n\t\t{}", + String::from_utf8_lossy(&output.stdout).replace("\n", "\n\t\t")); + + assert!(output.status.success()); +} + +#[test] +fn zero_client() { + let client = t!(Client::new(0)); + let (tx, rx) = mpsc::channel(); + let helper = client.into_helper_thread(move |a| drop(tx.send(a))).unwrap(); + helper.request_token(); + helper.request_token(); + + for _ in 0..1000 { + assert!(rx.try_recv().is_err()); + } +} diff --git a/lazy_static/.cargo-checksum.json b/lazy_static/.cargo-checksum.json new file mode 100644 index 000000000..d2e4af05b --- /dev/null +++ b/lazy_static/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{},"package":"ca488b89a5657b0a2ecd45b95609b3e848cf1755da332a0da46e2b2b1cb371a7"} \ No newline at end of file diff --git a/lazy_static/Cargo.toml b/lazy_static/Cargo.toml new file mode 100644 index 000000000..dfa585a73 --- /dev/null +++ b/lazy_static/Cargo.toml @@ -0,0 +1,47 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g. crates.io) dependencies +# +# If you believe there's an error in this file please file an +# issue against the rust-lang/cargo repository. If you're +# editing this file be aware that the upstream Cargo.toml +# will likely look very different (and much more reasonable) + +[package] +name = "lazy_static" +version = "1.1.0" +authors = ["Marvin Löbel "] +build = "build.rs" +description = "A macro for declaring lazily evaluated statics in Rust." +documentation = "https://docs.rs/lazy_static" +readme = "README.md" +keywords = ["macro", "lazy", "static"] +categories = ["no-std", "rust-patterns", "memory-management"] +license = "MIT/Apache-2.0" +repository = "https://github.com/rust-lang-nursery/lazy-static.rs" +[dependencies.spin] +version = "0.4.6" +optional = true +[build-dependencies.version_check] +version = "0.1.4" + +[features] +nightly = [] +spin_no_std = ["nightly", "spin"] +[badges.appveyor] +repository = "rust-lang-nursery/lazy-static.rs" + +[badges.is-it-maintained-issue-resolution] +repository = "rust-lang-nursery/lazy-static.rs" + +[badges.is-it-maintained-open-issues] +repository = "rust-lang-nursery/lazy-static.rs" + +[badges.maintenance] +status = "passively-maintained" + +[badges.travis-ci] +repository = "rust-lang-nursery/lazy-static.rs" diff --git a/lazy_static/LICENSE-APACHE b/lazy_static/LICENSE-APACHE new file mode 100644 index 000000000..16fe87b06 --- /dev/null +++ b/lazy_static/LICENSE-APACHE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/lazy_static/LICENSE-MIT b/lazy_static/LICENSE-MIT new file mode 100644 index 000000000..25597d583 --- /dev/null +++ b/lazy_static/LICENSE-MIT @@ -0,0 +1,25 @@ +Copyright (c) 2010 The Rust Project Developers + +Permission is hereby granted, free of charge, to any +person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the +Software without restriction, including without +limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software +is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions +of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/lazy_static/README.md b/lazy_static/README.md new file mode 100644 index 000000000..b91f73352 --- /dev/null +++ b/lazy_static/README.md @@ -0,0 +1,73 @@ +lazy-static.rs +============== + +A macro for declaring lazily evaluated statics in Rust. + +Using this macro, it is possible to have `static`s that require code to be +executed at runtime in order to be initialized. +This includes anything requiring heap allocations, like vectors or hash maps, +as well as anything that requires non-const function calls to be computed. + +[![Travis-CI Status](https://travis-ci.org/rust-lang-nursery/lazy-static.rs.svg?branch=master)](https://travis-ci.org/rust-lang-nursery/lazy-static.rs) +[![Latest version](https://img.shields.io/crates/v/lazy_static.svg)](https://crates.io/crates/lazy_static) +[![Documentation](https://docs.rs/lazy_static/badge.svg)](https://docs.rs/lazy_static) +[![License](https://img.shields.io/crates/l/lazy_static.svg)](https://github.com/rust-lang-nursery/lazy-static.rs#license) + + +# Getting Started + +[lazy-static.rs is available on crates.io](https://crates.io/crates/lazy_static). +It is recommended to look there for the newest released version, as well as links to the newest builds of the docs. + +At the point of the last update of this README, the latest published version could be used like this: + +Add the following dependency to your Cargo manifest... + +```toml +[dependencies] +lazy_static = "1.1.0" +``` + +...and see the [docs](https://docs.rs/lazy_static) for how to use it. + +# Example + +```rust +#[macro_use] +extern crate lazy_static; + +use std::collections::HashMap; + +lazy_static! { + static ref HASHMAP: HashMap = { + let mut m = HashMap::new(); + m.insert(0, "foo"); + m.insert(1, "bar"); + m.insert(2, "baz"); + m + }; +} + +fn main() { + // First access to `HASHMAP` initializes it + println!("The entry for `0` is \"{}\".", HASHMAP.get(&0).unwrap()); + + // Any further access to `HASHMAP` just returns the computed value + println!("The entry for `1` is \"{}\".", HASHMAP.get(&1).unwrap()); +} +``` + +## License + +Licensed under either of + + * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0) + * MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT) + +at your option. + +### Contribution + +Unless you explicitly state otherwise, any contribution intentionally submitted +for inclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any +additional terms or conditions. diff --git a/lazy_static/appveyor.yml b/lazy_static/appveyor.yml new file mode 100644 index 000000000..b13845248 --- /dev/null +++ b/lazy_static/appveyor.yml @@ -0,0 +1,61 @@ +environment: + global: + PROJECT_NAME: lazy_static + # When this was added there were revocation check failures when using the + # libcurl backend as libcurl checks by default, but rustup doesn't provide the + # switch to turn this off. Switch to Hyper which looks to not check for + # revocation by default like libcurl does. + RUSTUP_USE_REQWEST: 1 + CARGO_HTTP_CHECK_REVOKE: false + matrix: + # Stable channel + - TARGET: i686-pc-windows-gnu + CHANNEL: stable + - TARGET: i686-pc-windows-msvc + CHANNEL: stable + - TARGET: x86_64-pc-windows-gnu + CHANNEL: stable + - TARGET: x86_64-pc-windows-msvc + CHANNEL: stable + # Beta channel + - TARGET: i686-pc-windows-gnu + CHANNEL: beta + - TARGET: i686-pc-windows-msvc + CHANNEL: beta + - TARGET: x86_64-pc-windows-gnu + CHANNEL: beta + - TARGET: x86_64-pc-windows-msvc + CHANNEL: beta + # Nightly channel + - TARGET: i686-pc-windows-gnu + CHANNEL: nightly + - TARGET: i686-pc-windows-msvc + CHANNEL: nightly + - TARGET: x86_64-pc-windows-gnu + CHANNEL: nightly + - TARGET: x86_64-pc-windows-msvc + CHANNEL: nightly + +# Install Rust and Cargo +# (Based on from https://github.com/rust-lang/libc/blob/master/appveyor.yml) +install: + - appveyor-retry appveyor DownloadFile https://win.rustup.rs/ -FileName rustup-init.exe + - rustup-init.exe -y --default-toolchain %CHANNEL% --default-host %TARGET% + - set PATH=%PATH%;C:\Users\appveyor\.cargo\bin + - if "%TARGET%" == "i686-pc-windows-gnu" set PATH=%PATH%;C:\msys64\mingw32\bin + - if "%TARGET%" == "x86_64-pc-windows-gnu" set PATH=%PATH%;C:\msys64\mingw64\bin + - rustc -V + - cargo -V + +build: false + +test_script: + - cargo build --verbose + - cargo test + - if [%CHANNEL%]==[nightly] ( + cd compiletest && + cargo clean && + cargo build --verbose && + cargo test && + cd ../ + ) diff --git a/lazy_static/build.rs b/lazy_static/build.rs new file mode 100644 index 000000000..3d579f551 --- /dev/null +++ b/lazy_static/build.rs @@ -0,0 +1,44 @@ +extern crate version_check; + +fn main() { + let is_var_set = |s| std::env::var_os(s).is_some(); + + // one can manually set a cfg to force an impl -- mostly useful for our own testing + let force_heap_cfg = is_var_set("CARGO_CFG_LAZY_STATIC_HEAP_IMPL"); + let force_inline_cfg = is_var_set("CARGO_CFG_LAZY_STATIC_INLINE_IMPL"); + let force_spin_cfg = is_var_set("CARGO_CFG_LAZY_STATIC_SPIN_IMPL"); + + let impls_forced = [force_heap_cfg, force_inline_cfg, force_spin_cfg] + .into_iter() + .filter(|&&f| f) + .count(); + + assert!( + impls_forced <= 1, + "lazy_static can only be built with one configuration at a time." + ); + + let nightly_feature_enabled = is_var_set("CARGO_FEATURE_NIGHTLY"); + let spin_feature_enabled = is_var_set("CARGO_FEATURE_SPIN_NO_STD"); + + let version_geq_122 = version_check::is_min_version("1.22.0").unwrap().0; + let drop_in_static_supported = version_geq_122 || nightly_feature_enabled; + + // precedence: + // 1. explicit requests via cfg or spin_no_std feature + // 2. inline impl with newer rustc version or nightly feature (latter for backcompat) + // 3. fallback to allocating implementation + let impl_name = if force_heap_cfg { + "heap" + } else if force_inline_cfg { + "inline" + } else if force_spin_cfg || spin_feature_enabled { + "spin" + } else if drop_in_static_supported { + "inline" + } else { + "heap" + }; + + println!("cargo:rustc-cfg=lazy_static_{}_impl", impl_name); +} diff --git a/lazy_static/src/core_lazy.rs b/lazy_static/src/core_lazy.rs new file mode 100644 index 000000000..ba496a68b --- /dev/null +++ b/lazy_static/src/core_lazy.rs @@ -0,0 +1,34 @@ +// Copyright 2016 lazy-static.rs Developers +// +// Licensed under the Apache License, Version 2.0, or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. + +extern crate spin; + +use self::spin::Once; + +pub struct Lazy(Once); + +impl Lazy { + #[inline(always)] + pub const fn new() -> Self { + Lazy(Once::new()) + } + + #[inline(always)] + pub fn get(&'static self, builder: F) -> &T + where F: FnOnce() -> T + { + self.0.call_once(builder) + } +} + +#[macro_export] +#[doc(hidden)] +macro_rules! __lazy_static_create { + ($NAME:ident, $T:ty) => { + static $NAME: $crate::lazy::Lazy<$T> = $crate::lazy::Lazy::new(); + } +} diff --git a/lazy_static/src/heap_lazy.rs b/lazy_static/src/heap_lazy.rs new file mode 100644 index 000000000..c67a30189 --- /dev/null +++ b/lazy_static/src/heap_lazy.rs @@ -0,0 +1,43 @@ +// Copyright 2016 lazy-static.rs Developers +// +// Licensed under the Apache License, Version 2.0, or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. + +extern crate std; + +use self::std::prelude::v1::*; +use self::std::sync::Once; +pub use self::std::sync::ONCE_INIT; + +pub struct Lazy(*const T, Once); + +impl Lazy { + pub const INIT: Self = Lazy(0 as *const T, ONCE_INIT); + + #[inline(always)] + pub fn get(&'static mut self, f: F) -> &T + where + F: FnOnce() -> T, + { + unsafe { + let r = &mut self.0; + self.1.call_once(|| { + *r = Box::into_raw(Box::new(f())); + }); + + &*self.0 + } + } +} + +unsafe impl Sync for Lazy {} + +#[macro_export] +#[doc(hidden)] +macro_rules! __lazy_static_create { + ($NAME:ident, $T:ty) => { + static mut $NAME: $crate::lazy::Lazy<$T> = $crate::lazy::Lazy::INIT; + }; +} diff --git a/lazy_static/src/inline_lazy.rs b/lazy_static/src/inline_lazy.rs new file mode 100644 index 000000000..201a3c0b5 --- /dev/null +++ b/lazy_static/src/inline_lazy.rs @@ -0,0 +1,59 @@ +// Copyright 2016 lazy-static.rs Developers +// +// Licensed under the Apache License, Version 2.0, or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. + +extern crate core; +extern crate std; + +use self::std::prelude::v1::*; +use self::std::sync::Once; +pub use self::std::sync::ONCE_INIT; + +pub struct Lazy(Option, Once); + +impl Lazy { + pub const INIT: Self = Lazy(None, ONCE_INIT); + + #[inline(always)] + pub fn get(&'static mut self, f: F) -> &T + where + F: FnOnce() -> T, + { + { + let r = &mut self.0; + self.1.call_once(|| { + *r = Some(f()); + }); + } + unsafe { + match self.0 { + Some(ref x) => x, + None => unreachable_unchecked(), + } + } + } +} + +unsafe impl Sync for Lazy {} + +#[macro_export] +#[doc(hidden)] +macro_rules! __lazy_static_create { + ($NAME:ident, $T:ty) => { + static mut $NAME: $crate::lazy::Lazy<$T> = $crate::lazy::Lazy::INIT; + }; +} + +/// Polyfill for std::hint::unreachable_unchecked. There currently exists a +/// [crate](https://docs.rs/unreachable) for an equivalent to std::hint::unreachable_unchecked, but +/// lazy_static currently doesn't include any runtime dependencies and we've chosen to include this +/// short polyfill rather than include a new crate in every consumer's build. +/// +/// This should be replaced by std's version when lazy_static starts to require at least Rust 1.27. +unsafe fn unreachable_unchecked() -> ! { + enum Void {} + match std::mem::uninitialized::() {} +} diff --git a/lazy_static/src/lib.rs b/lazy_static/src/lib.rs new file mode 100644 index 000000000..a01dd4bc0 --- /dev/null +++ b/lazy_static/src/lib.rs @@ -0,0 +1,223 @@ +// Copyright 2016 lazy-static.rs Developers +// +// Licensed under the Apache License, Version 2.0, or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. + +/*! +A macro for declaring lazily evaluated statics. + +Using this macro, it is possible to have `static`s that require code to be +executed at runtime in order to be initialized. +This includes anything requiring heap allocations, like vectors or hash maps, +as well as anything that requires function calls to be computed. + +# Syntax + +```ignore +lazy_static! { + [pub] static ref NAME_1: TYPE_1 = EXPR_1; + [pub] static ref NAME_2: TYPE_2 = EXPR_2; + ... + [pub] static ref NAME_N: TYPE_N = EXPR_N; +} +``` + +Attributes (including doc comments) are supported as well: + +```rust +# #[macro_use] +# extern crate lazy_static; +# fn main() { +lazy_static! { + /// This is an example for using doc comment attributes + static ref EXAMPLE: u8 = 42; +} +# } +``` + +# Semantics + +For a given `static ref NAME: TYPE = EXPR;`, the macro generates a unique type that +implements `Deref` and stores it in a static with name `NAME`. (Attributes end up +attaching to this type.) + +On first deref, `EXPR` gets evaluated and stored internally, such that all further derefs +can return a reference to the same object. Note that this can lead to deadlocks +if you have multiple lazy statics that depend on each other in their initialization. + +Apart from the lazy initialization, the resulting "static ref" variables +have generally the same properties as regular "static" variables: + +- Any type in them needs to fulfill the `Sync` trait. +- If the type has a destructor, then it will not run when the process exits. + +# Example + +Using the macro: + +```rust +#[macro_use] +extern crate lazy_static; + +use std::collections::HashMap; + +lazy_static! { + static ref HASHMAP: HashMap = { + let mut m = HashMap::new(); + m.insert(0, "foo"); + m.insert(1, "bar"); + m.insert(2, "baz"); + m + }; + static ref COUNT: usize = HASHMAP.len(); + static ref NUMBER: u32 = times_two(21); +} + +fn times_two(n: u32) -> u32 { n * 2 } + +fn main() { + println!("The map has {} entries.", *COUNT); + println!("The entry for `0` is \"{}\".", HASHMAP.get(&0).unwrap()); + println!("A expensive calculation on a static results in: {}.", *NUMBER); +} +``` + +# Implementation details + +The `Deref` implementation uses a hidden static variable that is guarded by an atomic check on each access. + +# Cargo features + +This crate provides two cargo features: + +- `nightly`: This uses unstable language features only available on the nightly release channel for a more optimal implementation. In practice this currently means avoiding a heap allocation per static. This feature might get deprecated at a later point once all relevant optimizations are usable from stable. +- `spin_no_std` (implies `nightly`): This allows using this crate in a no-std environment, by depending on the standalone `spin` crate. + +Both features depend on unstable language features, which means +no guarantees can be made about them in regard to SemVer stability. + +*/ + +// NOTE: see build.rs for where these cfg values are set. +#![cfg_attr(lazy_static_spin_impl, feature(const_fn))] + +#![doc(html_root_url = "https://docs.rs/lazy_static/1.1.0")] +#![no_std] + +#[cfg(lazy_static_heap_impl)] +#[path="heap_lazy.rs"] +#[doc(hidden)] +pub mod lazy; + +#[cfg(lazy_static_inline_impl)] +#[path="inline_lazy.rs"] +#[doc(hidden)] +pub mod lazy; + +#[cfg(lazy_static_spin_impl)] +#[path="core_lazy.rs"] +#[doc(hidden)] +pub mod lazy; + +#[doc(hidden)] +pub use core::ops::Deref as __Deref; + +#[macro_export(local_inner_macros)] +#[doc(hidden)] +macro_rules! __lazy_static_internal { + // optional visibility restrictions are wrapped in `()` to allow for + // explicitly passing otherwise implicit information about private items + ($(#[$attr:meta])* ($($vis:tt)*) static ref $N:ident : $T:ty = $e:expr; $($t:tt)*) => { + __lazy_static_internal!(@MAKE TY, $(#[$attr])*, ($($vis)*), $N); + __lazy_static_internal!(@TAIL, $N : $T = $e); + lazy_static!($($t)*); + }; + (@TAIL, $N:ident : $T:ty = $e:expr) => { + impl $crate::__Deref for $N { + type Target = $T; + #[allow(unsafe_code)] + fn deref(&self) -> &$T { + unsafe { + #[inline(always)] + fn __static_ref_initialize() -> $T { $e } + + #[inline(always)] + unsafe fn __stability() -> &'static $T { + __lazy_static_create!(LAZY, $T); + LAZY.get(__static_ref_initialize) + } + __stability() + } + } + } + impl $crate::LazyStatic for $N { + fn initialize(lazy: &Self) { + let _ = &**lazy; + } + } + }; + // `vis` is wrapped in `()` to prevent parsing ambiguity + (@MAKE TY, $(#[$attr:meta])*, ($($vis:tt)*), $N:ident) => { + #[allow(missing_copy_implementations)] + #[allow(non_camel_case_types)] + #[allow(dead_code)] + $(#[$attr])* + $($vis)* struct $N {__private_field: ()} + #[doc(hidden)] + $($vis)* static $N: $N = $N {__private_field: ()}; + }; + () => () +} + +#[macro_export(local_inner_macros)] +macro_rules! lazy_static { + ($(#[$attr:meta])* static ref $N:ident : $T:ty = $e:expr; $($t:tt)*) => { + // use `()` to explicitly forward the information about private items + __lazy_static_internal!($(#[$attr])* () static ref $N : $T = $e; $($t)*); + }; + ($(#[$attr:meta])* pub static ref $N:ident : $T:ty = $e:expr; $($t:tt)*) => { + __lazy_static_internal!($(#[$attr])* (pub) static ref $N : $T = $e; $($t)*); + }; + ($(#[$attr:meta])* pub ($($vis:tt)+) static ref $N:ident : $T:ty = $e:expr; $($t:tt)*) => { + __lazy_static_internal!($(#[$attr])* (pub ($($vis)+)) static ref $N : $T = $e; $($t)*); + }; + () => () +} + +/// Support trait for enabling a few common operation on lazy static values. +/// +/// This is implemented by each defined lazy static, and +/// used by the free functions in this crate. +pub trait LazyStatic { + #[doc(hidden)] + fn initialize(lazy: &Self); +} + +/// Takes a shared reference to a lazy static and initializes +/// it if it has not been already. +/// +/// This can be used to control the initialization point of a lazy static. +/// +/// Example: +/// +/// ```rust +/// #[macro_use] +/// extern crate lazy_static; +/// +/// lazy_static! { +/// static ref BUFFER: Vec = (0..65537).collect(); +/// } +/// +/// fn main() { +/// lazy_static::initialize(&BUFFER); +/// +/// // ... +/// work_with_initialized_data(&BUFFER); +/// } +/// # fn work_with_initialized_data(_: &[u8]) {} +/// ``` +pub fn initialize(lazy: &T) { + LazyStatic::initialize(lazy); +} diff --git a/lazy_static/tests/no_std.rs b/lazy_static/tests/no_std.rs new file mode 100644 index 000000000..b460e7932 --- /dev/null +++ b/lazy_static/tests/no_std.rs @@ -0,0 +1,21 @@ +#![cfg(feature="spin_no_std")] +#![feature(const_fn)] + +#![no_std] + +#[macro_use] +extern crate lazy_static; + +lazy_static! { + /// Documentation! + pub static ref NUMBER: u32 = times_two(3); +} + +fn times_two(n: u32) -> u32 { + n * 2 +} + +#[test] +fn test_basic() { + assert_eq!(*NUMBER, 6); +} diff --git a/lazy_static/tests/test.rs b/lazy_static/tests/test.rs new file mode 100644 index 000000000..654abc541 --- /dev/null +++ b/lazy_static/tests/test.rs @@ -0,0 +1,162 @@ +#[macro_use] +extern crate lazy_static; +use std::collections::HashMap; + +lazy_static! { + /// Documentation! + pub static ref NUMBER: u32 = times_two(3); + + static ref ARRAY_BOXES: [Box; 3] = [Box::new(1), Box::new(2), Box::new(3)]; + + /// More documentation! + #[allow(unused_variables)] + #[derive(Copy, Clone, Debug)] + pub static ref STRING: String = "hello".to_string(); + + static ref HASHMAP: HashMap = { + let mut m = HashMap::new(); + m.insert(0, "abc"); + m.insert(1, "def"); + m.insert(2, "ghi"); + m + }; + + // This should not compile if the unsafe is removed. + static ref UNSAFE: u32 = unsafe { + std::mem::transmute::(-1) + }; +} + +lazy_static! { + static ref S1: &'static str = "a"; + static ref S2: &'static str = "b"; +} +lazy_static! { + static ref S3: String = [*S1, *S2].join(""); +} + +#[test] +fn s3() { + assert_eq!(&*S3, "ab"); +} + +fn times_two(n: u32) -> u32 { + n * 2 +} + +#[test] +fn test_basic() { + assert_eq!(&**STRING, "hello"); + assert_eq!(*NUMBER, 6); + assert!(HASHMAP.get(&1).is_some()); + assert!(HASHMAP.get(&3).is_none()); + assert_eq!(&*ARRAY_BOXES, &[Box::new(1), Box::new(2), Box::new(3)]); + assert_eq!(*UNSAFE, std::u32::MAX); +} + +#[test] +fn test_repeat() { + assert_eq!(*NUMBER, 6); + assert_eq!(*NUMBER, 6); + assert_eq!(*NUMBER, 6); +} + +#[test] +fn test_meta() { + // this would not compile if STRING were not marked #[derive(Copy, Clone)] + let copy_of_string = STRING; + // just to make sure it was copied + assert!(&STRING as *const _ != ©_of_string as *const _); + + // this would not compile if STRING were not marked #[derive(Debug)] + assert_eq!(format!("{:?}", STRING), "STRING { __private_field: () }".to_string()); +} + +mod visibility { + lazy_static! { + pub static ref FOO: Box = Box::new(0); + static ref BAR: Box = Box::new(98); + } + + pub mod inner { + lazy_static! { + pub(in visibility) static ref BAZ: Box = Box::new(42); + pub(crate) static ref BAG: Box = Box::new(37); + } + } + + #[test] + fn sub_test() { + assert_eq!(**FOO, 0); + assert_eq!(**BAR, 98); + assert_eq!(**inner::BAZ, 42); + assert_eq!(**inner::BAG, 37); + } +} + +#[test] +fn test_visibility() { + assert_eq!(*visibility::FOO, Box::new(0)); + assert_eq!(*visibility::inner::BAG, Box::new(37)); +} + +// This should not cause a warning about a missing Copy implementation +lazy_static! { + pub static ref VAR: i32 = { 0 }; +} + +#[derive(Copy, Clone, Debug, PartialEq)] +struct X; +struct Once(X); +const ONCE_INIT: Once = Once(X); +static DATA: X = X; +static ONCE: X = X; +fn require_sync() -> X { X } +fn transmute() -> X { X } +fn __static_ref_initialize() -> X { X } +fn test(_: Vec) -> X { X } + +// All these names should not be shadowed +lazy_static! { + static ref ITEM_NAME_TEST: X = { + test(vec![X, Once(X).0, ONCE_INIT.0, DATA, ONCE, + require_sync(), transmute(), + // Except this, which will sadly be shadowed by internals: + // __static_ref_initialize() + ]) + }; +} + +#[test] +fn item_name_shadowing() { + assert_eq!(*ITEM_NAME_TEST, X); +} + +use std::sync::atomic::AtomicBool; +use std::sync::atomic::ATOMIC_BOOL_INIT; +use std::sync::atomic::Ordering::SeqCst; + +static PRE_INIT_FLAG: AtomicBool = ATOMIC_BOOL_INIT; + +lazy_static! { + static ref PRE_INIT: () = { + PRE_INIT_FLAG.store(true, SeqCst); + () + }; +} + +#[test] +fn pre_init() { + assert_eq!(PRE_INIT_FLAG.load(SeqCst), false); + lazy_static::initialize(&PRE_INIT); + assert_eq!(PRE_INIT_FLAG.load(SeqCst), true); +} + +lazy_static! { + static ref LIFETIME_NAME: for<'a> fn(&'a u8) = { fn f(_: &u8) {} f }; +} + +#[test] +fn lifetime_name() { + let _ = LIFETIME_NAME; +} diff --git a/lazycell/.cargo-checksum.json b/lazycell/.cargo-checksum.json new file mode 100644 index 000000000..3f261de16 --- /dev/null +++ b/lazycell/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{},"package":"ddba4c30a78328befecec92fc94970e53b3ae385827d28620f0f5bb2493081e0"} \ No newline at end of file diff --git a/lazycell/.pc/.quilt_patches b/lazycell/.pc/.quilt_patches new file mode 100644 index 000000000..6857a8d44 --- /dev/null +++ b/lazycell/.pc/.quilt_patches @@ -0,0 +1 @@ +debian/patches diff --git a/lazycell/.pc/.quilt_series b/lazycell/.pc/.quilt_series new file mode 100644 index 000000000..c2067066a --- /dev/null +++ b/lazycell/.pc/.quilt_series @@ -0,0 +1 @@ +series diff --git a/lazycell/.pc/.version b/lazycell/.pc/.version new file mode 100644 index 000000000..0cfbf0888 --- /dev/null +++ b/lazycell/.pc/.version @@ -0,0 +1 @@ +2 diff --git a/lazycell/.pc/applied-patches b/lazycell/.pc/applied-patches new file mode 100644 index 000000000..bce906376 --- /dev/null +++ b/lazycell/.pc/applied-patches @@ -0,0 +1 @@ +no-clippy.patch diff --git a/lazycell/.pc/no-clippy.patch/.timestamp b/lazycell/.pc/no-clippy.patch/.timestamp new file mode 100644 index 000000000..e69de29bb diff --git a/lazycell/.pc/no-clippy.patch/Cargo.toml b/lazycell/.pc/no-clippy.patch/Cargo.toml new file mode 100644 index 000000000..14bb08880 --- /dev/null +++ b/lazycell/.pc/no-clippy.patch/Cargo.toml @@ -0,0 +1,30 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g. crates.io) dependencies +# +# If you believe there's an error in this file please file an +# issue against the rust-lang/cargo repository. If you're +# editing this file be aware that the upstream Cargo.toml +# will likely look very different (and much more reasonable) + +[package] +name = "lazycell" +version = "1.2.0" +authors = ["Alex Crichton ", "Nikita Pekin "] +include = ["CHANGELOG.md", "Cargo.toml", "LICENSE-MIT", "LICENSE-APACHE", "README.md", "src/**/*.rs"] +description = "A library providing a lazily filled Cell struct" +documentation = "http://indiv0.github.io/lazycell/lazycell/" +readme = "README.md" +keywords = ["lazycell", "lazy", "cell", "library"] +license = "MIT/Apache-2.0" +repository = "https://github.com/indiv0/lazycell" +[dependencies.clippy] +version = "0.0" +optional = true + +[features] +nightly = [] +nightly-testing = ["clippy", "nightly"] diff --git a/lazycell/CHANGELOG.md b/lazycell/CHANGELOG.md new file mode 100644 index 000000000..45974b07b --- /dev/null +++ b/lazycell/CHANGELOG.md @@ -0,0 +1,168 @@ + +## v1.2.0 (2018-09-19) + + +#### Features + +* add `LazyCell::replace` for infallible access ([a63ffb90](https://github.com/indiv0/lazycell/commit/a63ffb9040a5e0683a9bbf9d3d5ef589f2ca8b7c)) + + + + +## v1.1.0 (2018-09-10) + + +#### Documentation + +* add note regarding LazyCell::borrow_mut ([9d634d1f](https://github.com/indiv0/lazycell/commit/9d634d1fd9a21b7aa075d407bedf9fe77ba8b79f)) +* describe mutability more consistently ([b8078029](https://github.com/indiv0/lazycell/commit/b80780294611e92efddcdd33a701b3049ab5c5eb), closes [#78](https://github.com/indiv0/lazycell/issues/78)) + +#### Improvements + +* add NONE constant for an empty AtomicLazyCell ([31aff0da](https://github.com/indiv0/lazycell/commit/31aff0dacf824841c5f38ef4acf0aa71ec4c36eb), closes [#87](https://github.com/indiv0/lazycell/issues/87)) +* add `LazyCell::borrow_mut_with` and `LazyCell::try_borrow_mut_with` ([fdc6d268](https://github.com/indiv0/lazycell/commit/fdc6d268f0e9a6668768302f45fe2bb4aa9a7c34), closes [#79](https://github.com/indiv0/lazycell/issues/79), [#80](https://github.com/indiv0/lazycell/issues/80)) + + + + +## v1.0.0 (2018-06-06) + + +#### Features + +* Add #![no_std] ([e59f6b55](https://github.com/indiv0/lazycell/commit/e59f6b5531e310d3df26b0eb40b1431937f38096)) + + + + +## 0.6.0 (2017-11-25) + + +#### Bug Fixes + +* fix soundness hole in borrow_with ([d1f46bef](https://github.com/indiv0/lazycell/commit/d1f46bef9d1397570aa9c3e87e18e0d16e6d1585)) + +#### Features + +* add Default derives ([71bc5088](https://github.com/indiv0/lazycell/commit/71bc50880cd8e20002038197c9b890f5b76ad096)) +* add LazyCell::try_borrow_with ([bffa4028](https://github.com/indiv0/lazycell/commit/bffa402896670b5c78a9ec050d82a58ee98de6fb)) +* add LazyCell::borrow_mut method ([fd419dea](https://github.com/indiv0/lazycell/commit/fd419dea965ff1ad3853f26f37e8d107c6ca096c)) + +#### Breaking Changes + +* add `T: Send` for `AtomicLazyCell` `Sync` impl ([668bb2fa](https://github.com/indiv0/lazycell/commit/668bb2fa974fd6707c4c7edad292c76a9017d74d), closes [#67](https://github.com/indiv0/lazycell/issues/67)) + +#### Improvements + +* add `T: Send` for `AtomicLazyCell` `Sync` impl ([668bb2fa](https://github.com/indiv0/lazycell/commit/668bb2fa974fd6707c4c7edad292c76a9017d74d), closes [#67](https://github.com/indiv0/lazycell/issues/67)) + + + + +## v0.5.1 (2017-03-24) + + +#### Documentation + +* fix missing backticks ([44bafaaf](https://github.com/indiv0/lazycell/commit/44bafaaf93a91641261f58ee38adadcd4af6458e)) + +#### Improvements + +* derive `Debug` impls ([9da0a5a2](https://github.com/indiv0/lazycell/commit/9da0a5a2ffac1fef03ef02851c2c89d26d67d225)) + +#### Features + +* Add get method for Copy types ([dc8f8209](https://github.com/indiv0/lazycell/commit/dc8f8209888b6eba6d18717eba6a22614629b997)) + + + + +## v0.5.0 (2016-12-08) + + +#### Features + +* add borrow_with to LazyCell ([a15efa35](https://github.com/indiv0/lazycell/commit/a15efa359ea5a31a66ba57fc5b25f90c87b4b0dd)) + + + + +## (2016-08-17) + + +#### Breaking Changes + +* **LazyCell:** return Err(value) on full cell ([68f3415d](https://github.com/indiv0/lazycell/commit/68f3415dd5d6a66ba047a133b7028ebe4f1c5070), breaks [#](https://github.com/indiv0/lazycell/issues/)) + +#### Improvements + +* **LazyCell:** return Err(value) on full cell ([68f3415d](https://github.com/indiv0/lazycell/commit/68f3415dd5d6a66ba047a133b7028ebe4f1c5070), breaks [#](https://github.com/indiv0/lazycell/issues/)) + + + + +## (2016-08-16) + + +#### Features + +* add AtomicLazyCell which is thread-safe ([85afbd36](https://github.com/indiv0/lazycell/commit/85afbd36d8a148e14cc53654b39ddb523980124d)) + +#### Improvements + +* Use UnsafeCell instead of RefCell ([3347a8e9](https://github.com/indiv0/lazycell/commit/3347a8e97d2215a47e25c1e2fc953e8052ad8eb6)) + + + + +## (2016-04-18) + + +#### Documentation + +* put types in between backticks ([607cf939](https://github.com/indiv0/lazycell/commit/607cf939b05e35001ba3070ec7a0b17b064e7be1)) + + + + +## v0.2.0 (2016-03-28) + + +#### Features + +* **lazycell:** + * add tests for `LazyCell` struct ([38f1313d](https://github.com/indiv0/lazycell/commit/38f1313d98542ca8c98b424edfa9ba9c3975f99e), closes [#30](https://github.com/indiv0/lazycell/issues/30)) + * remove unnecessary `Default` impl ([68c16d2d](https://github.com/indiv0/lazycell/commit/68c16d2df4e9d13d5298162c06edf918246fd758)) + +#### Documentation + +* **CHANGELOG:** removed unnecessary sections ([1cc0555d](https://github.com/indiv0/lazycell/commit/1cc0555d875898a01b0832ff967aed6b40e720eb)) +* **README:** add link to documentation ([c8dc33f0](https://github.com/indiv0/lazycell/commit/c8dc33f01f2c0dc187f59ee53a2b73081053012b), closes [#13](https://github.com/indiv0/lazycell/issues/13)) + + + + +## v0.1.0 (2016-03-16) + + +#### Features + +* **lib.rs:** implement Default trait for LazyCell ([150a6304](https://github.com/indiv0/LazyCell/commit/150a6304a230ee1de8424e49c447ec1b2d6578ce)) + + + + +## v0.0.1 (2016-03-16) + + +#### Bug Fixes + +* **Cargo.toml:** loosen restrictions on Clippy version ([84dd8f96](https://github.com/indiv0/LazyCell/commit/84dd8f960000294f9dad47d776a41b98ed812981)) + +#### Features + +* add initial implementation ([4b39764a](https://github.com/indiv0/LazyCell/commit/4b39764a575bcb701dbd8047b966f72720fd18a4)) +* add initial commit ([a80407a9](https://github.com/indiv0/LazyCell/commit/a80407a907ef7c9401f120104663172f6965521a)) + + + diff --git a/lazycell/Cargo.toml b/lazycell/Cargo.toml new file mode 100644 index 000000000..0303a40e1 --- /dev/null +++ b/lazycell/Cargo.toml @@ -0,0 +1,23 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g. crates.io) dependencies +# +# If you believe there's an error in this file please file an +# issue against the rust-lang/cargo repository. If you're +# editing this file be aware that the upstream Cargo.toml +# will likely look very different (and much more reasonable) + +[package] +name = "lazycell" +version = "1.2.0" +authors = ["Alex Crichton ", "Nikita Pekin "] +include = ["CHANGELOG.md", "Cargo.toml", "LICENSE-MIT", "LICENSE-APACHE", "README.md", "src/**/*.rs"] +description = "A library providing a lazily filled Cell struct" +documentation = "http://indiv0.github.io/lazycell/lazycell/" +readme = "README.md" +keywords = ["lazycell", "lazy", "cell", "library"] +license = "MIT/Apache-2.0" +repository = "https://github.com/indiv0/lazycell" diff --git a/lazycell/LICENSE-APACHE b/lazycell/LICENSE-APACHE new file mode 100644 index 000000000..16fe87b06 --- /dev/null +++ b/lazycell/LICENSE-APACHE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/lazycell/LICENSE-MIT b/lazycell/LICENSE-MIT new file mode 100644 index 000000000..9a871b847 --- /dev/null +++ b/lazycell/LICENSE-MIT @@ -0,0 +1,26 @@ +Original work Copyright (c) 2014 The Rust Project Developers +Modified work Copyright (c) 2016-2017 Nikita Pekin and lazycell contributors + +Permission is hereby granted, free of charge, to any +person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the +Software without restriction, including without +limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software +is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions +of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/lazycell/README.md b/lazycell/README.md new file mode 100644 index 000000000..4e1dd993f --- /dev/null +++ b/lazycell/README.md @@ -0,0 +1,72 @@ +# lazycell + + + + + + + + + +
Linuxtravis-badge
+ api-docs-badge + crates-io + license-badge + coveralls-badge +
+ +Rust library providing a lazily filled Cell. + +# Table of Contents + +* [Usage](#usage) +* [Contributing](#contributing) +* [Credits](#credits) +* [License](#license) + +## Usage + +Add the following to your `Cargo.toml`: + +```toml +[dependencies] +lazycell = "1.2" +``` + +And in your `lib.rs` or `main.rs`: + +```rust +extern crate lazycell; +``` + +See the [API docs][api-docs] for information on using the crate in your library. + +## Contributing + +Contributions are always welcome! +If you have an idea for something to add (code, documentation, tests, examples, +etc.) feel free to give it a shot. + +Please read [CONTRIBUTING.md][contributing] before you start contributing. + +## Credits + +The LazyCell library is based originally on work by The Rust Project Developers +for the project [crates.io][crates-io-repo]. + +The list of contributors to this project can be found at +[CONTRIBUTORS.md][contributors]. + +## License + +LazyCell is distributed under the terms of both the MIT license and the Apache +License (Version 2.0). + +See [LICENSE-APACHE][license-apache], and [LICENSE-MIT][license-mit] for details. + +[api-docs]: https://indiv0.github.io/lazycell/lazycell +[contributing]: https://github.com/indiv0/lazycell/blob/master/CONTRIBUTING.md "Contribution Guide" +[contributors]: https://github.com/indiv0/lazycell/blob/master/CONTRIBUTORS.md "List of Contributors" +[crates-io-repo]: https://github.com/rust-lang/crates.io "rust-lang/crates.io: Source code for crates.io" +[license-apache]: https://github.com/indiv0/lazycell/blob/master/LICENSE-APACHE "Apache-2.0 License" +[license-mit]: https://github.com/indiv0/lazycell/blob/master/LICENSE-MIT "MIT License" diff --git a/lazycell/debian/patches/no-clippy.patch b/lazycell/debian/patches/no-clippy.patch new file mode 100644 index 000000000..5881ed6da --- /dev/null +++ b/lazycell/debian/patches/no-clippy.patch @@ -0,0 +1,11 @@ +--- a/Cargo.toml ++++ b/Cargo.toml +@@ -23,8 +23 @@ license = "MIT/Apache-2.0" + repository = "https://github.com/indiv0/lazycell" +-[dependencies.clippy] +-version = "0.0" +-optional = true +- +-[features] +-nightly = [] +-nightly-testing = ["clippy", "nightly"] diff --git a/lazycell/debian/patches/no-clippy.patch~ b/lazycell/debian/patches/no-clippy.patch~ new file mode 100644 index 000000000..5881ed6da --- /dev/null +++ b/lazycell/debian/patches/no-clippy.patch~ @@ -0,0 +1,11 @@ +--- a/Cargo.toml ++++ b/Cargo.toml +@@ -23,8 +23 @@ license = "MIT/Apache-2.0" + repository = "https://github.com/indiv0/lazycell" +-[dependencies.clippy] +-version = "0.0" +-optional = true +- +-[features] +-nightly = [] +-nightly-testing = ["clippy", "nightly"] diff --git a/lazycell/debian/patches/series b/lazycell/debian/patches/series new file mode 100644 index 000000000..bce906376 --- /dev/null +++ b/lazycell/debian/patches/series @@ -0,0 +1 @@ +no-clippy.patch diff --git a/lazycell/src/lib.rs b/lazycell/src/lib.rs new file mode 100644 index 000000000..b72cfffaa --- /dev/null +++ b/lazycell/src/lib.rs @@ -0,0 +1,583 @@ +// Original work Copyright (c) 2014 The Rust Project Developers +// Modified work Copyright (c) 2016-2018 Nikita Pekin and the lazycell contributors +// See the README.md file at the top-level directory of this distribution. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![cfg_attr(not(test), no_std)] + +#![deny(missing_docs)] +#![cfg_attr(feature = "nightly", feature(plugin))] +#![cfg_attr(feature = "clippy", plugin(clippy))] + +//! This crate provides a `LazyCell` struct which acts as a lazily filled +//! `Cell`. +//! +//! With a `RefCell`, the inner contents cannot be borrowed for the lifetime of +//! the entire object, but only of the borrows returned. A `LazyCell` is a +//! variation on `RefCell` which allows borrows to be tied to the lifetime of +//! the outer object. +//! +//! # Example +//! +//! The following example shows a quick example of the basic functionality of +//! `LazyCell`. +//! +//! ``` +//! use lazycell::LazyCell; +//! +//! let lazycell = LazyCell::new(); +//! +//! assert_eq!(lazycell.borrow(), None); +//! assert!(!lazycell.filled()); +//! lazycell.fill(1).ok(); +//! assert!(lazycell.filled()); +//! assert_eq!(lazycell.borrow(), Some(&1)); +//! assert_eq!(lazycell.into_inner(), Some(1)); +//! ``` +//! +//! `AtomicLazyCell` is a variant that uses an atomic variable to manage +//! coordination in a thread-safe fashion. The limitation of an `AtomicLazyCell` +//! is that after it is initialized, it can't be modified. + + +#[cfg(not(test))] +#[macro_use] +extern crate core as std; + +use std::cell::UnsafeCell; +use std::mem; +use std::sync::atomic::{AtomicUsize, Ordering}; + +/// A lazily filled `Cell`, with mutable contents. +/// +/// A `LazyCell` is completely frozen once filled, **unless** you have `&mut` +/// access to it, in which case `LazyCell::borrow_mut` may be used to mutate the +/// contents. +#[derive(Debug, Default)] +pub struct LazyCell { + inner: UnsafeCell>, +} + +impl LazyCell { + /// Creates a new, empty, `LazyCell`. + pub fn new() -> LazyCell { + LazyCell { inner: UnsafeCell::new(None) } + } + + /// Put a value into this cell. + /// + /// This function will return `Err(value)` is the cell is already full. + pub fn fill(&self, value: T) -> Result<(), T> { + let slot = unsafe { &mut *self.inner.get() }; + if slot.is_some() { + return Err(value); + } + *slot = Some(value); + + Ok(()) + } + + /// Put a value into this cell. + /// + /// Note that this function is infallible but requires `&mut self`. By + /// requiring `&mut self` we're guaranteed that no active borrows to this + /// cell can exist so we can always fill in the value. This may not always + /// be usable, however, as `&mut self` may not be possible to borrow. + /// + /// # Return value + /// + /// This function returns the previous value, if any. + pub fn replace(&mut self, value: T) -> Option { + mem::replace(unsafe { &mut *self.inner.get() }, Some(value)) + } + + /// Test whether this cell has been previously filled. + pub fn filled(&self) -> bool { + self.borrow().is_some() + } + + /// Borrows the contents of this lazy cell for the duration of the cell + /// itself. + /// + /// This function will return `Some` if the cell has been previously + /// initialized, and `None` if it has not yet been initialized. + pub fn borrow(&self) -> Option<&T> { + unsafe { &*self.inner.get() }.as_ref() + } + + /// Borrows the contents of this lazy cell mutably for the duration of the cell + /// itself. + /// + /// This function will return `Some` if the cell has been previously + /// initialized, and `None` if it has not yet been initialized. + pub fn borrow_mut(&mut self) -> Option<&mut T> { + unsafe { &mut *self.inner.get() }.as_mut() + } + + /// Borrows the contents of this lazy cell for the duration of the cell + /// itself. + /// + /// If the cell has not yet been filled, the cell is first filled using the + /// function provided. + /// + /// # Panics + /// + /// Panics if the cell becomes filled as a side effect of `f`. + pub fn borrow_with T>(&self, f: F) -> &T { + if let Some(value) = self.borrow() { + return value; + } + let value = f(); + if self.fill(value).is_err() { + panic!("borrow_with: cell was filled by closure") + } + self.borrow().unwrap() + } + + /// Borrows the contents of this `LazyCell` mutably for the duration of the + /// cell itself. + /// + /// If the cell has not yet been filled, the cell is first filled using the + /// function provided. + /// + /// # Panics + /// + /// Panics if the cell becomes filled as a side effect of `f`. + pub fn borrow_mut_with T>(&mut self, f: F) -> &mut T { + if !self.filled() { + let value = f(); + if self.fill(value).is_err() { + panic!("borrow_mut_with: cell was filled by closure") + } + } + + self.borrow_mut().unwrap() + } + + /// Same as `borrow_with`, but allows the initializing function to fail. + /// + /// # Panics + /// + /// Panics if the cell becomes filled as a side effect of `f`. + pub fn try_borrow_with(&self, f: F) -> Result<&T, E> + where F: FnOnce() -> Result + { + if let Some(value) = self.borrow() { + return Ok(value); + } + let value = f()?; + if self.fill(value).is_err() { + panic!("try_borrow_with: cell was filled by closure") + } + Ok(self.borrow().unwrap()) + } + + /// Same as `borrow_mut_with`, but allows the initializing function to fail. + /// + /// # Panics + /// + /// Panics if the cell becomes filled as a side effect of `f`. + pub fn try_borrow_mut_with(&mut self, f: F) -> Result<&mut T, E> + where F: FnOnce() -> Result + { + if self.filled() { + return Ok(self.borrow_mut().unwrap()); + } + let value = f()?; + if self.fill(value).is_err() { + panic!("try_borrow_mut_with: cell was filled by closure") + } + Ok(self.borrow_mut().unwrap()) + } + + /// Consumes this `LazyCell`, returning the underlying value. + pub fn into_inner(self) -> Option { + // Rust 1.25 changed UnsafeCell::into_inner() from unsafe to safe + // function. This unsafe can be removed when supporting Rust older than + // 1.25 is not needed. + #[allow(unused_unsafe)] + unsafe { self.inner.into_inner() } + } +} + +impl LazyCell { + /// Returns a copy of the contents of the lazy cell. + /// + /// This function will return `Some` if the cell has been previously initialized, + /// and `None` if it has not yet been initialized. + pub fn get(&self) -> Option { + unsafe { *self.inner.get() } + } +} + +// Tracks the AtomicLazyCell inner state +const NONE: usize = 0; +const LOCK: usize = 1; +const SOME: usize = 2; + +/// A lazily filled and thread-safe `Cell`, with frozen contents. +#[derive(Debug, Default)] +pub struct AtomicLazyCell { + inner: UnsafeCell>, + state: AtomicUsize, +} + +impl AtomicLazyCell { + /// An empty `AtomicLazyCell`. + pub const NONE: Self = Self { + inner: UnsafeCell::new(None), + state: AtomicUsize::new(NONE), + }; + + /// Creates a new, empty, `AtomicLazyCell`. + pub fn new() -> AtomicLazyCell { + Self::NONE + } + + /// Put a value into this cell. + /// + /// This function will return `Err(value)` is the cell is already full. + pub fn fill(&self, t: T) -> Result<(), T> { + if NONE != self.state.compare_and_swap(NONE, LOCK, Ordering::Acquire) { + return Err(t); + } + + unsafe { *self.inner.get() = Some(t) }; + + if LOCK != self.state.compare_and_swap(LOCK, SOME, Ordering::Release) { + panic!("unable to release lock"); + } + + Ok(()) + } + + /// Put a value into this cell. + /// + /// Note that this function is infallible but requires `&mut self`. By + /// requiring `&mut self` we're guaranteed that no active borrows to this + /// cell can exist so we can always fill in the value. This may not always + /// be usable, however, as `&mut self` may not be possible to borrow. + /// + /// # Return value + /// + /// This function returns the previous value, if any. + pub fn replace(&mut self, value: T) -> Option { + match mem::replace(self.state.get_mut(), SOME) { + NONE | SOME => {} + _ => panic!("cell in inconsistent state"), + } + mem::replace(unsafe { &mut *self.inner.get() }, Some(value)) + } + + /// Test whether this cell has been previously filled. + pub fn filled(&self) -> bool { + self.state.load(Ordering::Acquire) == SOME + } + + /// Borrows the contents of this lazy cell for the duration of the cell + /// itself. + /// + /// This function will return `Some` if the cell has been previously + /// initialized, and `None` if it has not yet been initialized. + pub fn borrow(&self) -> Option<&T> { + match self.state.load(Ordering::Acquire) { + SOME => unsafe { &*self.inner.get() }.as_ref(), + _ => None, + } + } + + /// Consumes this `LazyCell`, returning the underlying value. + pub fn into_inner(self) -> Option { + // Rust 1.25 changed UnsafeCell::into_inner() from unsafe to safe + // function. This unsafe can be removed when supporting Rust older than + // 1.25 is not needed. + #[allow(unused_unsafe)] + unsafe { self.inner.into_inner() } + } +} + +impl AtomicLazyCell { + /// Returns a copy of the contents of the lazy cell. + /// + /// This function will return `Some` if the cell has been previously initialized, + /// and `None` if it has not yet been initialized. + pub fn get(&self) -> Option { + match self.state.load(Ordering::Acquire) { + SOME => unsafe { *self.inner.get() }, + _ => None, + } + } +} + +unsafe impl Sync for AtomicLazyCell {} + +unsafe impl Send for AtomicLazyCell {} + +#[cfg(test)] +mod tests { + use super::{AtomicLazyCell, LazyCell}; + + #[test] + fn test_borrow_from_empty() { + let lazycell: LazyCell = LazyCell::new(); + + let value = lazycell.borrow(); + assert_eq!(value, None); + + let value = lazycell.get(); + assert_eq!(value, None); + } + + #[test] + fn test_fill_and_borrow() { + let lazycell = LazyCell::new(); + + assert!(!lazycell.filled()); + lazycell.fill(1).unwrap(); + assert!(lazycell.filled()); + + let value = lazycell.borrow(); + assert_eq!(value, Some(&1)); + + let value = lazycell.get(); + assert_eq!(value, Some(1)); + } + + #[test] + fn test_borrow_mut() { + let mut lazycell = LazyCell::new(); + assert!(lazycell.borrow_mut().is_none()); + + lazycell.fill(1).unwrap(); + assert_eq!(lazycell.borrow_mut(), Some(&mut 1)); + + *lazycell.borrow_mut().unwrap() = 2; + assert_eq!(lazycell.borrow_mut(), Some(&mut 2)); + + // official way to reset the cell + lazycell = LazyCell::new(); + assert!(lazycell.borrow_mut().is_none()); + } + + #[test] + fn test_already_filled_error() { + let lazycell = LazyCell::new(); + + lazycell.fill(1).unwrap(); + assert_eq!(lazycell.fill(1), Err(1)); + } + + #[test] + fn test_borrow_with() { + let lazycell = LazyCell::new(); + + let value = lazycell.borrow_with(|| 1); + assert_eq!(&1, value); + } + + #[test] + fn test_borrow_with_already_filled() { + let lazycell = LazyCell::new(); + lazycell.fill(1).unwrap(); + + let value = lazycell.borrow_with(|| 1); + assert_eq!(&1, value); + } + + #[test] + fn test_borrow_with_not_called_when_filled() { + let lazycell = LazyCell::new(); + + lazycell.fill(1).unwrap(); + + let value = lazycell.borrow_with(|| 2); + assert_eq!(&1, value); + } + + #[test] + #[should_panic] + fn test_borrow_with_sound_with_reentrancy() { + // Kudos to dbaupp for discovering this issue + // https://www.reddit.com/r/rust/comments/5vs9rt/lazycell_a_rust_library_providing_a_lazilyfilled/de527xm/ + let lazycell: LazyCell> = LazyCell::new(); + + let mut reference: Option<&i32> = None; + + lazycell.borrow_with(|| { + let _ = lazycell.fill(Box::new(1)); + reference = lazycell.borrow().map(|r| &**r); + Box::new(2) + }); + } + + #[test] + fn test_borrow_mut_with() { + let mut lazycell = LazyCell::new(); + + { + let value = lazycell.borrow_mut_with(|| 1); + assert_eq!(&mut 1, value); + *value = 2; + } + assert_eq!(&2, lazycell.borrow().unwrap()); + } + + #[test] + fn test_borrow_mut_with_already_filled() { + let mut lazycell = LazyCell::new(); + lazycell.fill(1).unwrap(); + + let value = lazycell.borrow_mut_with(|| 1); + assert_eq!(&1, value); + } + + #[test] + fn test_borrow_mut_with_not_called_when_filled() { + let mut lazycell = LazyCell::new(); + + lazycell.fill(1).unwrap(); + + let value = lazycell.borrow_mut_with(|| 2); + assert_eq!(&1, value); + } + + #[test] + fn test_try_borrow_with_ok() { + let lazycell = LazyCell::new(); + let result = lazycell.try_borrow_with::<(), _>(|| Ok(1)); + assert_eq!(result, Ok(&1)); + } + + #[test] + fn test_try_borrow_with_err() { + let lazycell = LazyCell::<()>::new(); + let result = lazycell.try_borrow_with(|| Err(1)); + assert_eq!(result, Err(1)); + } + + #[test] + fn test_try_borrow_with_already_filled() { + let lazycell = LazyCell::new(); + lazycell.fill(1).unwrap(); + let result = lazycell.try_borrow_with::<(), _>(|| unreachable!()); + assert_eq!(result, Ok(&1)); + } + + #[test] + #[should_panic] + fn test_try_borrow_with_sound_with_reentrancy() { + let lazycell: LazyCell> = LazyCell::new(); + + let mut reference: Option<&i32> = None; + + let _ = lazycell.try_borrow_with::<(), _>(|| { + let _ = lazycell.fill(Box::new(1)); + reference = lazycell.borrow().map(|r| &**r); + Ok(Box::new(2)) + }); + } + + #[test] + fn test_try_borrow_mut_with_ok() { + let mut lazycell = LazyCell::new(); + { + let result = lazycell.try_borrow_mut_with::<(), _>(|| Ok(1)); + assert_eq!(result, Ok(&mut 1)); + *result.unwrap() = 2; + } + assert_eq!(&mut 2, lazycell.borrow().unwrap()); + } + + #[test] + fn test_try_borrow_mut_with_err() { + let mut lazycell = LazyCell::<()>::new(); + let result = lazycell.try_borrow_mut_with(|| Err(1)); + assert_eq!(result, Err(1)); + } + + #[test] + fn test_try_borrow_mut_with_already_filled() { + let mut lazycell = LazyCell::new(); + lazycell.fill(1).unwrap(); + let result = lazycell.try_borrow_mut_with::<(), _>(|| unreachable!()); + assert_eq!(result, Ok(&mut 1)); + } + + #[test] + fn test_into_inner() { + let lazycell = LazyCell::new(); + + lazycell.fill(1).unwrap(); + let value = lazycell.into_inner(); + assert_eq!(value, Some(1)); + } + + #[test] + fn test_atomic_borrow_from_empty() { + let lazycell: AtomicLazyCell = AtomicLazyCell::new(); + + let value = lazycell.borrow(); + assert_eq!(value, None); + + let value = lazycell.get(); + assert_eq!(value, None); + } + + #[test] + fn test_atomic_fill_and_borrow() { + let lazycell = AtomicLazyCell::new(); + + assert!(!lazycell.filled()); + lazycell.fill(1).unwrap(); + assert!(lazycell.filled()); + + let value = lazycell.borrow(); + assert_eq!(value, Some(&1)); + + let value = lazycell.get(); + assert_eq!(value, Some(1)); + } + + #[test] + fn test_atomic_already_filled_panic() { + let lazycell = AtomicLazyCell::new(); + + lazycell.fill(1).unwrap(); + assert_eq!(1, lazycell.fill(1).unwrap_err()); + } + + #[test] + fn test_atomic_into_inner() { + let lazycell = AtomicLazyCell::new(); + + lazycell.fill(1).unwrap(); + let value = lazycell.into_inner(); + assert_eq!(value, Some(1)); + } + + #[test] + fn normal_replace() { + let mut cell = LazyCell::new(); + assert_eq!(cell.fill(1), Ok(())); + assert_eq!(cell.replace(2), Some(1)); + assert_eq!(cell.replace(3), Some(2)); + assert_eq!(cell.borrow(), Some(&3)); + + let mut cell = LazyCell::new(); + assert_eq!(cell.replace(2), None); + } + + #[test] + fn atomic_replace() { + let mut cell = AtomicLazyCell::new(); + assert_eq!(cell.fill(1), Ok(())); + assert_eq!(cell.replace(2), Some(1)); + assert_eq!(cell.replace(3), Some(2)); + assert_eq!(cell.borrow(), Some(&3)); + } +} diff --git a/libc/.cargo-checksum.json b/libc/.cargo-checksum.json new file mode 100644 index 000000000..23b2d5b48 --- /dev/null +++ b/libc/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{},"package":"76e3a3ef172f1a0b9a9ff0dd1491ae5e6c948b94479a3021819ba7d860c8645d"} \ No newline at end of file diff --git a/libc/Cargo.toml b/libc/Cargo.toml new file mode 100644 index 000000000..876b127c0 --- /dev/null +++ b/libc/Cargo.toml @@ -0,0 +1,33 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g. crates.io) dependencies +# +# If you believe there's an error in this file please file an +# issue against the rust-lang/cargo repository. If you're +# editing this file be aware that the upstream Cargo.toml +# will likely look very different (and much more reasonable) + +[package] +name = "libc" +version = "0.2.43" +authors = ["The Rust Project Developers"] +description = "A library for types and bindings to native C functions often found in libc or\nother common platform libraries.\n" +homepage = "https://github.com/rust-lang/libc" +documentation = "http://doc.rust-lang.org/libc" +readme = "README.md" +license = "MIT/Apache-2.0" +repository = "https://github.com/rust-lang/libc" + +[features] +align = [] +default = ["use_std"] +use_std = [] +[badges.appveyor] +project_name = "rust-lang-libs/libc" +repository = "rust-lang/libc" + +[badges.travis-ci] +repository = "rust-lang/libc" diff --git a/libc/LICENSE-APACHE b/libc/LICENSE-APACHE new file mode 100644 index 000000000..16fe87b06 --- /dev/null +++ b/libc/LICENSE-APACHE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/libc/LICENSE-MIT b/libc/LICENSE-MIT new file mode 100644 index 000000000..39d4bdb5a --- /dev/null +++ b/libc/LICENSE-MIT @@ -0,0 +1,25 @@ +Copyright (c) 2014 The Rust Project Developers + +Permission is hereby granted, free of charge, to any +person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the +Software without restriction, including without +limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software +is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions +of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/libc/README.md b/libc/README.md new file mode 100644 index 000000000..a19a56ee0 --- /dev/null +++ b/libc/README.md @@ -0,0 +1,174 @@ +libc +==== + +A Rust library with native bindings to the types and functions commonly found on +various systems, including libc. + +[![Build Status](https://travis-ci.org/rust-lang/libc.svg?branch=master)](https://travis-ci.org/rust-lang/libc) +[![Build status](https://ci.appveyor.com/api/projects/status/github/rust-lang/libc?svg=true)](https://ci.appveyor.com/project/rust-lang-libs/libc) +[![Latest version](https://img.shields.io/crates/v/libc.svg)](https://crates.io/crates/libc) +[![Documentation](https://docs.rs/libc/badge.svg)](https://docs.rs/libc) +![License](https://img.shields.io/crates/l/libc.svg) + + +## Usage + +First, add the following to your `Cargo.toml`: + +```toml +[dependencies] +libc = "0.2" +``` + +Next, add this to your crate root: + +```rust +extern crate libc; +``` + +Currently libc by default links to the standard library, but if you would +instead like to use libc in a `#![no_std]` situation or crate you can request +this via: + +```toml +[dependencies] +libc = { version = "0.2", default-features = false } +``` + +By default libc uses private fields in structs in order to enforce a certain +memory alignment on them. These structs can be hard to instantiate outside of +libc. To make libc use `#[repr(align(x))]`, instead of the private fields, +activate the *align* feature. This requires Rust 1.25 or newer: + +```toml +[dependencies] +libc = { version = "0.2", features = ["align"] } +``` + +## What is libc? + +The primary purpose of this crate is to provide all of the definitions necessary +to easily interoperate with C code (or "C-like" code) on each of the platforms +that Rust supports. This includes type definitions (e.g. `c_int`), constants +(e.g. `EINVAL`) as well as function headers (e.g. `malloc`). + +This crate does not strive to have any form of compatibility across platforms, +but rather it is simply a straight binding to the system libraries on the +platform in question. + +## Public API + +This crate exports all underlying platform types, functions, and constants under +the crate root, so all items are accessible as `libc::foo`. The types and values +of all the exported APIs match the platform that libc is compiled for. + +More detailed information about the design of this library can be found in its +[associated RFC][rfc]. + +[rfc]: https://github.com/rust-lang/rfcs/blob/master/text/1291-promote-libc.md + +## Adding an API + +Want to use an API which currently isn't bound in `libc`? It's quite easy to add +one! + +The internal structure of this crate is designed to minimize the number of +`#[cfg]` attributes in order to easily be able to add new items which apply +to all platforms in the future. As a result, the crate is organized +hierarchically based on platform. Each module has a number of `#[cfg]`'d +children, but only one is ever actually compiled. Each module then reexports all +the contents of its children. + +This means that for each platform that libc supports, the path from a +leaf module to the root will contain all bindings for the platform in question. +Consequently, this indicates where an API should be added! Adding an API at a +particular level in the hierarchy means that it is supported on all the child +platforms of that level. For example, when adding a Unix API it should be added +to `src/unix/mod.rs`, but when adding a Linux-only API it should be added to +`src/unix/notbsd/linux/mod.rs`. + +If you're not 100% sure at what level of the hierarchy an API should be added +at, fear not! This crate has CI support which tests any binding against all +platforms supported, so you'll see failures if an API is added at the wrong +level or has different signatures across platforms. + +With that in mind, the steps for adding a new API are: + +1. Determine where in the module hierarchy your API should be added. +2. Add the API. +3. Send a PR to this repo. +4. Wait for CI to pass, fixing errors. +5. Wait for a merge! + +### Test before you commit + +We have two automated tests running on [Travis](https://travis-ci.org/rust-lang/libc): + +1. [`libc-test`](https://github.com/alexcrichton/ctest) + - `cd libc-test && cargo test` + - Use the `skip_*()` functions in `build.rs` if you really need a workaround. +2. Style checker + - `rustc ci/style.rs && ./style src` + +### Releasing your change to crates.io + +Now that you've done the amazing job of landing your new API or your new +platform in this crate, the next step is to get that sweet, sweet usage from +crates.io! The only next step is to bump the version of libc and then publish +it. If you'd like to get a release out ASAP you can follow these steps: + +1. Update the version number in `Cargo.toml`, you'll just be bumping the patch + version number. +2. Run `cargo update` to regenerate the lockfile to encode your version bump in + the lock file. You may pull in some other updated dependencies, that's ok. +3. Send a PR to this repository. It should [look like this][example], but it'd + also be nice to fill out the description with a small rationale for the + release (any rationale is ok though!) +4. Once merged the release will be tagged and published by one of the libc crate + maintainers. + +[example]: https://github.com/rust-lang/libc/pull/583 + +## Platforms and Documentation + +The following platforms are currently tested and have documentation available: + +Tested: + * [`i686-pc-windows-msvc`](https://rust-lang.github.io/libc/i686-pc-windows-msvc/libc/) + * [`x86_64-pc-windows-msvc`](https://rust-lang.github.io/libc/x86_64-pc-windows-msvc/libc/) + (Windows) + * [`i686-pc-windows-gnu`](https://rust-lang.github.io/libc/i686-pc-windows-gnu/libc/) + * [`x86_64-pc-windows-gnu`](https://rust-lang.github.io/libc/x86_64-pc-windows-gnu/libc/) + * [`i686-apple-darwin`](https://rust-lang.github.io/libc/i686-apple-darwin/libc/) + * [`x86_64-apple-darwin`](https://rust-lang.github.io/libc/x86_64-apple-darwin/libc/) + (OSX) + * `i386-apple-ios` + * `x86_64-apple-ios` + * [`i686-unknown-linux-gnu`](https://rust-lang.github.io/libc/i686-unknown-linux-gnu/libc/) + * [`x86_64-unknown-linux-gnu`](https://rust-lang.github.io/libc/x86_64-unknown-linux-gnu/libc/) + (Linux) + * [`x86_64-unknown-linux-musl`](https://rust-lang.github.io/libc/x86_64-unknown-linux-musl/libc/) + (Linux MUSL) + * [`aarch64-unknown-linux-gnu`](https://rust-lang.github.io/libc/aarch64-unknown-linux-gnu/libc/) + (Linux) + * `aarch64-unknown-linux-musl` + (Linux MUSL) + * [`sparc64-unknown-linux-gnu`](https://rust-lang.github.io/libc/sparc64-unknown-linux-gnu/libc/) + (Linux) + * [`mips-unknown-linux-gnu`](https://rust-lang.github.io/libc/mips-unknown-linux-gnu/libc/) + * [`arm-unknown-linux-gnueabihf`](https://rust-lang.github.io/libc/arm-unknown-linux-gnueabihf/libc/) + * [`arm-linux-androideabi`](https://rust-lang.github.io/libc/arm-linux-androideabi/libc/) + (Android) + * [`x86_64-unknown-freebsd`](https://rust-lang.github.io/libc/x86_64-unknown-freebsd/libc/) + * [`x86_64-unknown-openbsd`](https://rust-lang.github.io/libc/x86_64-unknown-openbsd/libc/) + * [`x86_64-rumprun-netbsd`](https://rust-lang.github.io/libc/x86_64-unknown-netbsd/libc/) + +The following may be supported, but are not guaranteed to always work: + + * `i686-unknown-freebsd` + * [`x86_64-unknown-bitrig`](https://rust-lang.github.io/libc/x86_64-unknown-bitrig/libc/) + * [`x86_64-unknown-dragonfly`](https://rust-lang.github.io/libc/x86_64-unknown-dragonfly/libc/) + * `i686-unknown-haiku` + * `x86_64-unknown-haiku` + * [`x86_64-unknown-netbsd`](https://rust-lang.github.io/libc/x86_64-unknown-netbsd/libc/) + * [`x86_64-sun-solaris`](https://rust-lang.github.io/libc/x86_64-sun-solaris/libc/) diff --git a/libc/appveyor.yml b/libc/appveyor.yml new file mode 100644 index 000000000..fe2a332a1 --- /dev/null +++ b/libc/appveyor.yml @@ -0,0 +1,28 @@ +environment: + # When this was added there were revocation check failures when using the + # libcurl backend as libcurl checks by default, but rustup doesn't provide the + # switch to turn this off. Switch to Hyper which looks to not check for + # revocation by default like libcurl does. + RUSTUP_USE_HYPER: 1 + CARGO_HTTP_CHECK_REVOKE: false + matrix: + - TARGET: x86_64-pc-windows-gnu + MSYS2_BITS: 64 + - TARGET: i686-pc-windows-gnu + MSYS2_BITS: 32 + - TARGET: x86_64-pc-windows-msvc + - TARGET: i686-pc-windows-msvc +install: + - appveyor-retry appveyor DownloadFile https://win.rustup.rs/ -FileName rustup-init.exe + - rustup-init.exe -y --default-host %TARGET% + - set PATH=%PATH%;C:\Users\appveyor\.cargo\bin + - if defined MSYS2_BITS set PATH=%PATH%;C:\msys64\mingw%MSYS2_BITS%\bin + - rustc -V + - cargo -V + +build: false + +test_script: + - cargo test --target %TARGET% + - cargo test --no-default-features --target %TARGET% + - cargo test --manifest-path libc-test/Cargo.toml --target %TARGET% diff --git a/libc/ci/README.md b/libc/ci/README.md new file mode 100644 index 000000000..28152e5d0 --- /dev/null +++ b/libc/ci/README.md @@ -0,0 +1,243 @@ +The goal of the libc crate is to have CI running everywhere to have the +strongest guarantees about the definitions that this library contains, and as a +result the CI is pretty complicated and also pretty large! Hopefully this can +serve as a guide through the sea of scripts in this directory and elsewhere in +this project. + +# Files + +First up, let's talk about the files in this directory: + +* `run-travis.sh` - a shell script run by all Travis builders, this is + responsible for setting up the rest of the environment such as installing new + packages, downloading Rust target libraries, etc. + +* `run.sh` - the actual script which runs tests for a particular architecture. + Called from the `run-travis.sh` script this will run all tests for the target + specified. + +* `cargo-config` - Cargo configuration of linkers to use copied into place by + the `run-travis.sh` script before builds are run. + +* `dox.sh` - script called from `run-travis.sh` on only the linux 64-bit nightly + Travis bots to build documentation for this crate. + +* `landing-page-*.html` - used by `dox.sh` to generate a landing page for all + architectures' documentation. + +* `run-qemu.sh` - see discussion about QEMU below + +* `mips`, `rumprun` - instructions to build the docker image for each respective + CI target + +# CI Systems + +Currently this repository leverages a combination of Travis CI and AppVeyor for +running tests. The triples tested are: + +* AppVeyor + * `{i686,x86_64}-pc-windows-{msvc,gnu}` +* Travis + * `{i686,x86_64,mips,aarch64}-unknown-linux-gnu` + * `{x86_64,aarch64}-unknown-linux-musl` + * `arm-unknown-linux-gnueabihf` + * `arm-linux-androideabi` + * `{i686,x86_64}-apple-{darwin,ios}` + * `x86_64-rumprun-netbsd` + * `x86_64-unknown-freebsd` + * `x86_64-unknown-openbsd` + +The Windows triples are all pretty standard, they just set up their environment +then run tests, no need for downloading any extra target libs (we just download +the right installer). The Intel Linux/OSX builds are similar in that we just +download the right target libs and run tests. Note that the Intel Linux/OSX +builds are run on stable/beta/nightly, but are the only ones that do so. + +The remaining architectures look like: + +* Android runs in a [docker image][android-docker] with an emulator, the NDK, + and the SDK already set up. The entire build happens within the docker image. +* The MIPS, ARM, and AArch64 builds all use the QEMU userspace emulator to run + the generated binary to actually verify the tests pass. +* The MUSL build just has to download a MUSL compiler and target libraries and + then otherwise runs tests normally. +* iOS builds need an extra linker flag currently, but beyond that they're built + as standard as everything else. +* The rumprun target builds an entire kernel from the test suite and then runs + it inside QEMU using the serial console to test whether it succeeded or + failed. +* The BSD builds, currently OpenBSD and FreeBSD, use QEMU to boot up a system + and compile/run tests. More information on that below. + +[android-docker]: https://github.com/rust-lang/rust-buildbot/blob/master/slaves/android/Dockerfile + +## QEMU + +Lots of the architectures tested here use QEMU in the tests, so it's worth going +over all the crazy capabilities QEMU has and the various flavors in which we use +it! + +First up, QEMU has userspace emulation where it doesn't boot a full kernel, it +just runs a binary from another architecture (using the `qemu-` wrappers). +We provide it the runtime path for the dynamically loaded system libraries, +however. This strategy is used for all Linux architectures that aren't intel. +Note that one downside of this QEMU system is that threads are barely +implemented, so we're careful to not spawn many threads. + +For the rumprun target the only output is a kernel image, so we just use that +plus the `rumpbake` command to create a full kernel image which is then run from +within QEMU. + +Finally, the fun part, the BSDs. Quite a few hoops are jumped through to get CI +working for these platforms, but the gist of it looks like: + +* Cross compiling from Linux to any of the BSDs seems to be quite non-standard. + We may be able to get it working but it might be difficult at that point to + ensure that the libc definitions align with what you'd get on the BSD itself. + As a result, we try to do compiles within the BSD distro. +* On Travis we can't run a VM-in-a-VM, so we resort to userspace emulation + (QEMU). +* Unfortunately on Travis we also can't use KVM, so the emulation is super slow. + +With all that in mind, the way BSD is tested looks like: + +1. Download a pre-prepared image for the OS being tested. +2. Generate the tests for the OS being tested. This involves running the `ctest` + library over libc to generate a Rust file and a C file which will then be + compiled into the final test. +3. Generate a disk image which will later be mounted by the OS being tested. + This image is mostly just the libc directory, but some modifications are made + to compile the generated files from step 2. +4. The kernel is booted in QEMU, and it is configured to detect the libc-test + image being available, run the test script, and then shut down afterwards. +5. Look for whether the tests passed in the serial console output of the kernel. + +There's some pretty specific instructions for setting up each image (detailed +below), but the main gist of this is that we must avoid a vanilla `cargo run` +inside of the `libc-test` directory (which is what it's intended for) because +that would compile `syntex_syntax`, a large library, with userspace emulation. +This invariably times out on Travis, so we can't do that. + +Once all those hoops are jumped through, however, we can be happy that we're +testing almost everything! + +Below are some details of how to set up the initial OS images which are +downloaded. Each image must be enabled have input/output over the serial +console, log in automatically at the serial console, detect if a second drive in +QEMU is available, and if so mount it, run a script (it'll specifically be +`run-qemu.sh` in this folder which is copied into the generated image talked +about above), and then shut down. + +### QEMU Setup - FreeBSD + +1. [Download the latest stable amd64-bootonly release ISO](https://www.freebsd.org/where.html). + E.g. FreeBSD-11.1-RELEASE-amd64-bootonly.iso +2. Create the disk image: `qemu-img create -f qcow2 FreeBSD-11.1-RELEASE-amd64.qcow2 2G` +3. Boot the machine: `qemu-system-x86_64 -cdrom FreeBSD-11.1-RELEASE-amd64-bootonly.iso -drive if=virtio,file=FreeBSD-11.1-RELEASE-amd64.qcow2 -net nic,model=virtio -net user` +4. Run the installer, and install FreeBSD: + 1. Install + 1. Continue with default keymap + 1. Set Hostname: freebsd-ci + 1. Distribution Select: + 1. Uncheck lib32 + 1. Uncheck ports + 1. Network Configuration: vtnet0 + 1. Configure IPv4? Yes + 1. DHCP? Yes + 1. Configure IPv6? No + 1. Resolver Configuration: Ok + 1. Mirror Selection: Main Site + 1. Partitioning: Auto (UFS) + 1. Partition: Entire Disk + 1. Partition Scheme: MBR + 1. App Partition: Ok + 1. Partition Editor: Finish + 1. Confirmation: Commit + 1. Wait for sets to install + 1. Set the root password to nothing (press enter twice) + 1. Set time zone to UTC + 1. Set Date: Skip + 1. Set Time: Skip + 1. System Configuration: + 1. Disable sshd + 1. Disable dumpdev + 1. System Hardening + 1. Disable Sendmail service + 1. Add User Accounts: No + 1. Final Configuration: Exit + 1. Manual Configuration: Yes + 1. `echo 'console="comconsole"' >> /boot/loader.conf` + 1. `echo 'autoboot_delay="0"' >> /boot/loader.conf` + 1. `echo 'ext2fs_load="YES"' >> /boot/loader.conf` + 1. Look at `/etc/ttys`, see what getty argument is for `ttyu0` (E.g. `3wire`) + 1. Edit `/etc/gettytab` (with `vi` for example), look for `ttyu0` argument, + prepend `:al=root` to the line beneath to have the machine auto-login as + root. E.g. + + 3wire:\ + :np:nc:sp#0: + becomes: + + 3wire:\ + :al=root:np:nc:sp#0: + + 1. Edit `/root/.login` and put this in it: + + [ -e /dev/vtbd1 ] || exit 0 + mount -t ext2fs /dev/vtbd1 /mnt + sh /mnt/run.sh /mnt + poweroff + + 1. Exit the post install shell: `exit` + 1. Back in in the installer choose Reboot + 1. If all went well the machine should reboot and show a login prompt. + If you switch to the serial console by choosing View > serial0 in + the qemu menu, you should be logged in as root. + 1. Shutdown the machine: `shutdown -p now` + +Helpful links + +* https://en.wikibooks.org/wiki/QEMU/Images +* https://blog.nekoconeko.nl/blog/2015/06/04/creating-an-openstack-freebsd-image.html +* https://www.freebsd.org/doc/handbook/serialconsole-setup.html + + +### QEMU setup - OpenBSD + +1. Download CD installer +2. `qemu-img create -f qcow2 foo.qcow2 2G` +3. `qemu -cdrom foo.iso -drive if=virtio,file=foo.qcow2 -net nic,model=virtio -net user` +4. run installer +5. `echo 'set tty com0' >> /etc/boot.conf` +6. `echo 'boot' >> /etc/boot.conf` +7. Modify /etc/ttys, change the `tty00` at the end from 'unknown off' to + 'vt220 on secure' +8. Modify same line in /etc/ttys to have `"/root/foo.sh"` as the shell +9. Add this script to `/root/foo.sh` + +``` +#!/bin/sh +exec 1>/dev/tty00 +exec 2>&1 + +if mount -t ext2fs /dev/sd1c /mnt; then + sh /mnt/run.sh /mnt + shutdown -ph now +fi + +# limited shell... +exec /bin/sh < /dev/tty00 +``` + +10. `chmod +x /root/foo.sh` + +Helpful links: + +* https://en.wikibooks.org/wiki/QEMU/Images +* http://www.openbsd.org/faq/faq7.html#SerCon + +# Questions? + +Hopefully that's at least somewhat of an introduction to everything going on +here, and feel free to ping @alexcrichton with questions! + diff --git a/libc/ci/android-install-ndk.sh b/libc/ci/android-install-ndk.sh new file mode 100644 index 000000000..873f6c52c --- /dev/null +++ b/libc/ci/android-install-ndk.sh @@ -0,0 +1,37 @@ +#!/bin/sh +# Copyright 2016 The Rust Project Developers. See the COPYRIGHT +# file at the top-level directory of this distribution and at +# http://rust-lang.org/COPYRIGHT. +# +# Licensed under the Apache License, Version 2.0 or the MIT license +# , at your +# option. This file may not be copied, modified, or distributed +# except according to those terms. + +set -ex + +curl -O https://dl.google.com/android/repository/android-ndk-r15b-linux-x86_64.zip +unzip -q android-ndk-r15b-linux-x86_64.zip + +case "$1" in + aarch64) + arch=arm64 + ;; + + i686) + arch=x86 + ;; + + *) + arch=$1 + ;; +esac; + +android-ndk-r15b/build/tools/make_standalone_toolchain.py \ + --unified-headers \ + --install-dir /android/ndk-$1 \ + --arch $arch \ + --api 24 + +rm -rf ./android-ndk-r15b-linux-x86_64.zip ./android-ndk-r15b diff --git a/libc/ci/android-install-sdk.sh b/libc/ci/android-install-sdk.sh new file mode 100644 index 000000000..ab7e14d95 --- /dev/null +++ b/libc/ci/android-install-sdk.sh @@ -0,0 +1,60 @@ +#!/bin/sh +# Copyright 2016 The Rust Project Developers. See the COPYRIGHT +# file at the top-level directory of this distribution and at +# http://rust-lang.org/COPYRIGHT. +# +# Licensed under the Apache License, Version 2.0 or the MIT license +# , at your +# option. This file may not be copied, modified, or distributed +# except according to those terms. + +set -ex + +# Prep the SDK and emulator +# +# Note that the update process requires that we accept a bunch of licenses, and +# we can't just pipe `yes` into it for some reason, so we take the same strategy +# located in https://github.com/appunite/docker by just wrapping it in a script +# which apparently magically accepts the licenses. + +mkdir sdk +curl https://dl.google.com/android/repository/sdk-tools-linux-3859397.zip -O +unzip -d sdk sdk-tools-linux-3859397.zip + +case "$1" in + arm | armv7) + abi=armeabi-v7a + ;; + + aarch64) + abi=arm64-v8a + ;; + + i686) + abi=x86 + ;; + + x86_64) + abi=x86_64 + ;; + + *) + echo "invalid arch: $1" + exit 1 + ;; +esac; + +# --no_https avoids +# javax.net.ssl.SSLHandshakeException: sun.security.validator.ValidatorException: No trusted certificate found +echo "yes" | \ + ./sdk/tools/bin/sdkmanager --no_https \ + "emulator" \ + "platform-tools" \ + "platforms;android-24" \ + "system-images;android-24;default;$abi" + +echo "no" | + ./sdk/tools/bin/avdmanager create avd \ + --name $1 \ + --package "system-images;android-24;default;$abi" diff --git a/libc/ci/android-sysimage.sh b/libc/ci/android-sysimage.sh new file mode 100644 index 000000000..9611dfeb0 --- /dev/null +++ b/libc/ci/android-sysimage.sh @@ -0,0 +1,52 @@ +# Copyright 2017 The Rust Project Developers. See the COPYRIGHT +# file at the top-level directory of this distribution and at +# http://rust-lang.org/COPYRIGHT. +# +# Licensed under the Apache License, Version 2.0 or the MIT license +# , at your +# option. This file may not be copied, modified, or distributed +# except according to those terms. + +set -ex + +URL=https://dl.google.com/android/repository/sys-img/android + +main() { + local arch=$1 + local name=$2 + local dest=/system + local td=$(mktemp -d) + + apt-get install --no-install-recommends e2tools + + pushd $td + curl -O $URL/$name + unzip -q $name + + local system=$(find . -name system.img) + mkdir -p $dest/{bin,lib,lib64} + + # Extract android linker and libraries to /system + # This allows android executables to be run directly (or with qemu) + if [ $arch = "x86_64" -o $arch = "arm64" ]; then + e2cp -p $system:/bin/linker64 $dest/bin/ + e2cp -p $system:/lib64/libdl.so $dest/lib64/ + e2cp -p $system:/lib64/libc.so $dest/lib64/ + e2cp -p $system:/lib64/libm.so $dest/lib64/ + else + e2cp -p $system:/bin/linker $dest/bin/ + e2cp -p $system:/lib/libdl.so $dest/lib/ + e2cp -p $system:/lib/libc.so $dest/lib/ + e2cp -p $system:/lib/libm.so $dest/lib/ + fi + + # clean up + apt-get purge --auto-remove -y e2tools + + popd + + rm -rf $td +} + +main "${@}" diff --git a/libc/ci/docker/aarch64-linux-android/Dockerfile b/libc/ci/docker/aarch64-linux-android/Dockerfile new file mode 100644 index 000000000..5fc83aadb --- /dev/null +++ b/libc/ci/docker/aarch64-linux-android/Dockerfile @@ -0,0 +1,45 @@ +FROM ubuntu:16.04 + +RUN dpkg --add-architecture i386 && \ + apt-get update && \ + apt-get install -y --no-install-recommends \ + file \ + curl \ + ca-certificates \ + python \ + unzip \ + expect \ + openjdk-9-jre \ + libstdc++6:i386 \ + libpulse0 \ + gcc \ + libc6-dev + +WORKDIR /android/ +COPY android* /android/ + +ENV ANDROID_ARCH=aarch64 +ENV PATH=$PATH:/android/ndk-$ANDROID_ARCH/bin:/android/sdk/tools:/android/sdk/platform-tools + +RUN sh /android/android-install-ndk.sh $ANDROID_ARCH +RUN sh /android/android-install-sdk.sh $ANDROID_ARCH +RUN mv /root/.android /tmp +RUN chmod 777 -R /tmp/.android +RUN chmod 755 /android/sdk/tools/* /android/sdk/emulator/qemu/linux-x86_64/* + +ENV PATH=$PATH:/rust/bin \ + CARGO_TARGET_AARCH64_LINUX_ANDROID_LINKER=aarch64-linux-android-gcc \ + CARGO_TARGET_AARCH64_LINUX_ANDROID_RUNNER=/tmp/runtest \ + HOME=/tmp + +ADD runtest-android.rs /tmp/runtest.rs +ENTRYPOINT [ \ + "bash", \ + "-c", \ + # set SHELL so android can detect a 64bits system, see + # http://stackoverflow.com/a/41789144 + "SHELL=/bin/dash /android/sdk/emulator/emulator @aarch64 -no-window & \ + rustc /tmp/runtest.rs -o /tmp/runtest && \ + exec \"$@\"", \ + "--" \ +] diff --git a/libc/ci/docker/aarch64-unknown-linux-gnu/Dockerfile b/libc/ci/docker/aarch64-unknown-linux-gnu/Dockerfile new file mode 100644 index 000000000..18214a3e6 --- /dev/null +++ b/libc/ci/docker/aarch64-unknown-linux-gnu/Dockerfile @@ -0,0 +1,7 @@ +FROM ubuntu:17.10 +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc libc6-dev ca-certificates \ + gcc-aarch64-linux-gnu libc6-dev-arm64-cross qemu-user +ENV CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER=aarch64-linux-gnu-gcc \ + CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_RUNNER="qemu-aarch64 -L /usr/aarch64-linux-gnu" \ + PATH=$PATH:/rust/bin diff --git a/libc/ci/docker/aarch64-unknown-linux-musl/Dockerfile b/libc/ci/docker/aarch64-unknown-linux-musl/Dockerfile new file mode 100644 index 000000000..caec1572c --- /dev/null +++ b/libc/ci/docker/aarch64-unknown-linux-musl/Dockerfile @@ -0,0 +1,27 @@ +FROM ubuntu:17.10 + +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc make libc6-dev git curl ca-certificates \ + gcc-aarch64-linux-gnu qemu-user +RUN curl https://www.musl-libc.org/releases/musl-1.1.19.tar.gz | \ + tar xzf - && \ + cd musl-1.1.19 && \ + CC=aarch64-linux-gnu-gcc \ + ./configure --prefix=/musl-aarch64 --enable-wrapper=yes && \ + make install -j4 && \ + cd .. && \ + rm -rf musl-1.1.19 +# Install linux kernel headers sanitized for use with musl +RUN curl -L https://github.com/sabotage-linux/kernel-headers/archive/v3.12.6-6.tar.gz | \ + tar xzf - && \ + cd kernel-headers-3.12.6-6 && \ + make ARCH=arm64 prefix=/musl-aarch64 install -j4 && \ + cd .. && \ + rm -rf kernel-headers-3.12.6-6 + +# FIXME: shouldn't need the `-lgcc` here, shouldn't that be in libstd? +ENV PATH=$PATH:/musl-aarch64/bin:/rust/bin \ + CC_aarch64_unknown_linux_musl=musl-gcc \ + RUSTFLAGS='-Clink-args=-lgcc' \ + CARGO_TARGET_AARCH64_UNKNOWN_LINUX_MUSL_LINKER=musl-gcc \ + CARGO_TARGET_AARCH64_UNKNOWN_LINUX_MUSL_RUNNER="qemu-aarch64 -L /musl-aarch64" diff --git a/libc/ci/docker/arm-linux-androideabi/Dockerfile b/libc/ci/docker/arm-linux-androideabi/Dockerfile new file mode 100644 index 000000000..a3fc64bfd --- /dev/null +++ b/libc/ci/docker/arm-linux-androideabi/Dockerfile @@ -0,0 +1,45 @@ +FROM ubuntu:16.04 + +RUN dpkg --add-architecture i386 && \ + apt-get update && \ + apt-get install -y --no-install-recommends \ + file \ + curl \ + ca-certificates \ + python \ + unzip \ + expect \ + openjdk-9-jre \ + libstdc++6:i386 \ + libpulse0 \ + gcc \ + libc6-dev + +WORKDIR /android/ +COPY android* /android/ + +ENV ANDROID_ARCH=arm +ENV PATH=$PATH:/android/ndk-$ANDROID_ARCH/bin:/android/sdk/tools:/android/sdk/platform-tools + +RUN sh /android/android-install-ndk.sh $ANDROID_ARCH +RUN sh /android/android-install-sdk.sh $ANDROID_ARCH +RUN mv /root/.android /tmp +RUN chmod 777 -R /tmp/.android +RUN chmod 755 /android/sdk/tools/* /android/sdk/emulator/qemu/linux-x86_64/* + +ENV PATH=$PATH:/rust/bin \ + CARGO_TARGET_ARM_LINUX_ANDROIDEABI_LINKER=arm-linux-androideabi-gcc \ + CARGO_TARGET_ARM_LINUX_ANDROIDEABI_RUNNER=/tmp/runtest \ + HOME=/tmp + +ADD runtest-android.rs /tmp/runtest.rs +ENTRYPOINT [ \ + "bash", \ + "-c", \ + # set SHELL so android can detect a 64bits system, see + # http://stackoverflow.com/a/41789144 + "SHELL=/bin/dash /android/sdk/emulator/emulator @arm -no-window & \ + rustc /tmp/runtest.rs -o /tmp/runtest && \ + exec \"$@\"", \ + "--" \ +] diff --git a/libc/ci/docker/arm-unknown-linux-gnueabihf/Dockerfile b/libc/ci/docker/arm-unknown-linux-gnueabihf/Dockerfile new file mode 100644 index 000000000..9fe71dcf8 --- /dev/null +++ b/libc/ci/docker/arm-unknown-linux-gnueabihf/Dockerfile @@ -0,0 +1,7 @@ +FROM ubuntu:17.10 +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc libc6-dev ca-certificates \ + gcc-arm-linux-gnueabihf libc6-dev-armhf-cross qemu-user +ENV CARGO_TARGET_ARM_UNKNOWN_LINUX_GNUEABIHF_LINKER=arm-linux-gnueabihf-gcc \ + CARGO_TARGET_ARM_UNKNOWN_LINUX_GNUEABIHF_RUNNER="qemu-arm -L /usr/arm-linux-gnueabihf" \ + PATH=$PATH:/rust/bin diff --git a/libc/ci/docker/arm-unknown-linux-musleabihf/Dockerfile b/libc/ci/docker/arm-unknown-linux-musleabihf/Dockerfile new file mode 100644 index 000000000..86304130f --- /dev/null +++ b/libc/ci/docker/arm-unknown-linux-musleabihf/Dockerfile @@ -0,0 +1,25 @@ +FROM ubuntu:17.10 + +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc make libc6-dev git curl ca-certificates \ + gcc-arm-linux-gnueabihf qemu-user + +RUN curl https://www.musl-libc.org/releases/musl-1.1.19.tar.gz | tar xzf - +WORKDIR /musl-1.1.19 +RUN CC=arm-linux-gnueabihf-gcc \ + CFLAGS="-march=armv6 -marm" \ + ./configure --prefix=/musl-arm --enable-wrapper=yes +RUN make install -j4 + +# Install linux kernel headers sanitized for use with musl +RUN curl -L https://github.com/sabotage-linux/kernel-headers/archive/v3.12.6-6.tar.gz | \ + tar xzf - && \ + cd kernel-headers-3.12.6-6 && \ + make ARCH=arm prefix=/musl-arm install -j4 && \ + cd .. && \ + rm -rf kernel-headers-3.12.6-6 + +ENV PATH=$PATH:/musl-arm/bin:/rust/bin \ + CC_arm_unknown_linux_musleabihf=musl-gcc \ + CARGO_TARGET_ARM_UNKNOWN_LINUX_MUSLEABIHF_LINKER=musl-gcc \ + CARGO_TARGET_ARM_UNKNOWN_LINUX_MUSLEABIHF_RUNNER="qemu-arm -L /musl-arm" diff --git a/libc/ci/docker/asmjs-unknown-emscripten/Dockerfile b/libc/ci/docker/asmjs-unknown-emscripten/Dockerfile new file mode 100644 index 000000000..3088fc53c --- /dev/null +++ b/libc/ci/docker/asmjs-unknown-emscripten/Dockerfile @@ -0,0 +1,20 @@ +FROM ubuntu:16.04 + +RUN apt-get update && \ + apt-get install -y --no-install-recommends \ + ca-certificates \ + curl \ + gcc \ + git \ + libc6-dev \ + python \ + xz-utils + +COPY emscripten.sh / +RUN bash /emscripten.sh + +ENV PATH=$PATH:/rust/bin \ + CARGO_TARGET_ASMJS_UNKNOWN_EMSCRIPTEN_RUNNER=node + +COPY emscripten-entry.sh / +ENTRYPOINT ["/emscripten-entry.sh"] diff --git a/libc/ci/docker/i686-linux-android/Dockerfile b/libc/ci/docker/i686-linux-android/Dockerfile new file mode 100644 index 000000000..f0836c385 --- /dev/null +++ b/libc/ci/docker/i686-linux-android/Dockerfile @@ -0,0 +1,45 @@ +FROM ubuntu:16.04 + +RUN dpkg --add-architecture i386 && \ + apt-get update && \ + apt-get install -y --no-install-recommends \ + file \ + curl \ + ca-certificates \ + python \ + unzip \ + expect \ + openjdk-9-jre \ + libstdc++6:i386 \ + libpulse0 \ + gcc \ + libc6-dev + +WORKDIR /android/ +COPY android* /android/ + +ENV ANDROID_ARCH=i686 +ENV PATH=$PATH:/android/ndk-$ANDROID_ARCH/bin:/android/sdk/tools:/android/sdk/platform-tools + +RUN sh /android/android-install-ndk.sh $ANDROID_ARCH +RUN sh /android/android-install-sdk.sh $ANDROID_ARCH +RUN mv /root/.android /tmp +RUN chmod 777 -R /tmp/.android +RUN chmod 755 /android/sdk/tools/* /android/sdk/emulator/qemu/linux-x86_64/* + +ENV PATH=$PATH:/rust/bin \ + CARGO_TARGET_I686_LINUX_ANDROID_LINKER=i686-linux-android-gcc \ + CARGO_TARGET_I686_LINUX_ANDROID_RUNNER=/tmp/runtest \ + HOME=/tmp + +ADD runtest-android.rs /tmp/runtest.rs +ENTRYPOINT [ \ + "bash", \ + "-c", \ + # set SHELL so android can detect a 64bits system, see + # http://stackoverflow.com/a/41789144 + "SHELL=/bin/dash /android/sdk/emulator/emulator @i686 -no-window -no-accel & \ + rustc /tmp/runtest.rs -o /tmp/runtest && \ + exec \"$@\"", \ + "--" \ +] diff --git a/libc/ci/docker/i686-unknown-linux-gnu/Dockerfile b/libc/ci/docker/i686-unknown-linux-gnu/Dockerfile new file mode 100644 index 000000000..03f3e8e69 --- /dev/null +++ b/libc/ci/docker/i686-unknown-linux-gnu/Dockerfile @@ -0,0 +1,5 @@ +FROM ubuntu:18.04 +RUN apt-get update +RUN apt-get install -y --no-install-recommends \ + gcc-multilib libc6-dev ca-certificates +ENV PATH=$PATH:/rust/bin diff --git a/libc/ci/docker/i686-unknown-linux-musl/Dockerfile b/libc/ci/docker/i686-unknown-linux-musl/Dockerfile new file mode 100644 index 000000000..49f37d70f --- /dev/null +++ b/libc/ci/docker/i686-unknown-linux-musl/Dockerfile @@ -0,0 +1,31 @@ +FROM ubuntu:17.10 + +RUN dpkg --add-architecture i386 +RUN apt-get update +RUN apt-get install -y --no-install-recommends \ + gcc-multilib make libc6-dev git curl ca-certificates libc6:i386 +# Below we're cross-compiling musl for i686 using the system compiler on an +# x86_64 system. This is an awkward thing to be doing and so we have to jump +# through a couple hoops to get musl to be happy. In particular: +# +# * We specifically pass -m32 in CFLAGS and override CC when running ./configure, +# since otherwise the script will fail to find a compiler. +# * We manually unset CROSS_COMPILE when running make; otherwise the makefile +# will call the non-existent binary 'i686-ar'. +RUN curl https://www.musl-libc.org/releases/musl-1.1.19.tar.gz | \ + tar xzf - && \ + cd musl-1.1.19 && \ + CC=gcc CFLAGS=-m32 ./configure --prefix=/musl-i686 --disable-shared --target=i686 && \ + make CROSS_COMPILE= install -j4 && \ + cd .. && \ + rm -rf musl-1.1.19 +# Install linux kernel headers sanitized for use with musl +RUN curl -L https://github.com/sabotage-linux/kernel-headers/archive/v3.12.6-6.tar.gz | \ + tar xzf - && \ + cd kernel-headers-3.12.6-6 && \ + make ARCH=i386 prefix=/musl-i686 install -j4 && \ + cd .. && \ + rm -rf kernel-headers-3.12.6-6 + +ENV PATH=$PATH:/musl-i686/bin:/rust/bin \ + CC_i686_unknown_linux_musl=musl-gcc diff --git a/libc/ci/docker/mips-unknown-linux-gnu/Dockerfile b/libc/ci/docker/mips-unknown-linux-gnu/Dockerfile new file mode 100644 index 000000000..c66abd471 --- /dev/null +++ b/libc/ci/docker/mips-unknown-linux-gnu/Dockerfile @@ -0,0 +1,10 @@ +FROM ubuntu:17.10 + +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc libc6-dev qemu-user ca-certificates \ + gcc-mips-linux-gnu libc6-dev-mips-cross \ + qemu-system-mips + +ENV CARGO_TARGET_MIPS_UNKNOWN_LINUX_GNU_LINKER=mips-linux-gnu-gcc \ + CARGO_TARGET_MIPS_UNKNOWN_LINUX_GNU_RUNNER="qemu-mips -L /usr/mips-linux-gnu" \ + PATH=$PATH:/rust/bin diff --git a/libc/ci/docker/mips-unknown-linux-musl/Dockerfile b/libc/ci/docker/mips-unknown-linux-musl/Dockerfile new file mode 100644 index 000000000..91ffd5817 --- /dev/null +++ b/libc/ci/docker/mips-unknown-linux-musl/Dockerfile @@ -0,0 +1,17 @@ +FROM ubuntu:17.10 + +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc libc6-dev qemu-user ca-certificates qemu-system-mips curl \ + bzip2 + +RUN mkdir /toolchain + +# Note that this originally came from: +# https://downloads.openwrt.org/snapshots/trunk/ar71xx/generic/OpenWrt-SDK-ar71xx-generic_gcc-5.3.0_musl-1.1.15.Linux-x86_64.tar.bz2 +RUN curl -L https://s3-us-west-1.amazonaws.com/rust-lang-ci2/libc/OpenWrt-SDK-ar71xx-generic_gcc-5.3.0_musl-1.1.15.Linux-x86_64.tar.bz2 | \ + tar xjf - -C /toolchain --strip-components=1 + +ENV PATH=$PATH:/rust/bin:/toolchain/staging_dir/toolchain-mips_34kc_gcc-5.3.0_musl-1.1.15/bin \ + CC_mips_unknown_linux_musl=mips-openwrt-linux-gcc \ + CARGO_TARGET_MIPS_UNKNOWN_LINUX_MUSL_LINKER=mips-openwrt-linux-gcc \ + CARGO_TARGET_MIPS_UNKNOWN_LINUX_MUSL_RUNNER="qemu-mips -L /toolchain/staging_dir/toolchain-mips_34kc_gcc-5.3.0_musl-1.1.15" diff --git a/libc/ci/docker/mips64-unknown-linux-gnuabi64/Dockerfile b/libc/ci/docker/mips64-unknown-linux-gnuabi64/Dockerfile new file mode 100644 index 000000000..b9921fcc5 --- /dev/null +++ b/libc/ci/docker/mips64-unknown-linux-gnuabi64/Dockerfile @@ -0,0 +1,11 @@ +FROM ubuntu:17.10 + +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc libc6-dev qemu-user ca-certificates \ + gcc-mips64-linux-gnuabi64 libc6-dev-mips64-cross \ + qemu-system-mips64 + +ENV CARGO_TARGET_MIPS64_UNKNOWN_LINUX_GNUABI64_LINKER=mips64-linux-gnuabi64-gcc \ + CARGO_TARGET_MIPS64_UNKNOWN_LINUX_GNUABI64_RUNNER="qemu-mips64 -L /usr/mips64-linux-gnuabi64" \ + CC_mips64_unknown_linux_gnuabi64=mips64-linux-gnuabi64-gcc \ + PATH=$PATH:/rust/bin diff --git a/libc/ci/docker/mips64el-unknown-linux-gnuabi64/Dockerfile b/libc/ci/docker/mips64el-unknown-linux-gnuabi64/Dockerfile new file mode 100644 index 000000000..434c90819 --- /dev/null +++ b/libc/ci/docker/mips64el-unknown-linux-gnuabi64/Dockerfile @@ -0,0 +1,11 @@ +FROM ubuntu:17.10 + +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc libc6-dev qemu-user ca-certificates \ + gcc-mips64el-linux-gnuabi64 libc6-dev-mips64el-cross \ + qemu-system-mips64el + +ENV CARGO_TARGET_MIPS64EL_UNKNOWN_LINUX_GNUABI64_LINKER=mips64el-linux-gnuabi64-gcc \ + CARGO_TARGET_MIPS64EL_UNKNOWN_LINUX_GNUABI64_RUNNER="qemu-mips64el -L /usr/mips64el-linux-gnuabi64" \ + CC_mips64el_unknown_linux_gnuabi64=mips64el-linux-gnuabi64-gcc \ + PATH=$PATH:/rust/bin diff --git a/libc/ci/docker/mipsel-unknown-linux-musl/Dockerfile b/libc/ci/docker/mipsel-unknown-linux-musl/Dockerfile new file mode 100644 index 000000000..3642fa8ca --- /dev/null +++ b/libc/ci/docker/mipsel-unknown-linux-musl/Dockerfile @@ -0,0 +1,17 @@ +FROM ubuntu:17.10 + +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc libc6-dev qemu-user ca-certificates qemu-system-mips curl \ + bzip2 + +RUN mkdir /toolchain + +# Note that this originally came from: +# https://downloads.openwrt.org/snapshots/trunk/malta/generic/OpenWrt-Toolchain-malta-le_gcc-5.3.0_musl-1.1.15.Linux-x86_64.tar.bz2 +RUN curl -L https://s3-us-west-1.amazonaws.com/rust-lang-ci2/libc/OpenWrt-Toolchain-malta-le_gcc-5.3.0_musl-1.1.15.Linux-x86_64.tar.bz2 | \ + tar xjf - -C /toolchain --strip-components=2 + +ENV PATH=$PATH:/rust/bin:/toolchain/bin \ + CC_mipsel_unknown_linux_musl=mipsel-openwrt-linux-gcc \ + CARGO_TARGET_MIPSEL_UNKNOWN_LINUX_MUSL_LINKER=mipsel-openwrt-linux-gcc \ + CARGO_TARGET_MIPSEL_UNKNOWN_LINUX_MUSL_RUNNER="qemu-mipsel -L /toolchain" diff --git a/libc/ci/docker/powerpc-unknown-linux-gnu/Dockerfile b/libc/ci/docker/powerpc-unknown-linux-gnu/Dockerfile new file mode 100644 index 000000000..106ada444 --- /dev/null +++ b/libc/ci/docker/powerpc-unknown-linux-gnu/Dockerfile @@ -0,0 +1,10 @@ +FROM ubuntu:17.10 + +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc libc6-dev qemu-user ca-certificates \ + gcc-powerpc-linux-gnu libc6-dev-powerpc-cross \ + qemu-system-ppc + +ENV CARGO_TARGET_POWERPC_UNKNOWN_LINUX_GNU_LINKER=powerpc-linux-gnu-gcc \ + CARGO_TARGET_POWERPC_UNKNOWN_LINUX_GNU_RUNNER="qemu-ppc -L /usr/powerpc-linux-gnu" \ + PATH=$PATH:/rust/bin diff --git a/libc/ci/docker/powerpc64-unknown-linux-gnu/Dockerfile b/libc/ci/docker/powerpc64-unknown-linux-gnu/Dockerfile new file mode 100644 index 000000000..a6ab66a9a --- /dev/null +++ b/libc/ci/docker/powerpc64-unknown-linux-gnu/Dockerfile @@ -0,0 +1,11 @@ +FROM ubuntu:17.10 + +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc libc6-dev qemu-user ca-certificates \ + gcc-powerpc64-linux-gnu libc6-dev-ppc64-cross \ + qemu-system-ppc + +ENV CARGO_TARGET_POWERPC64_UNKNOWN_LINUX_GNU_LINKER=powerpc64-linux-gnu-gcc \ + CARGO_TARGET_POWERPC64_UNKNOWN_LINUX_GNU_RUNNER="qemu-ppc64 -L /usr/powerpc64-linux-gnu" \ + CC=powerpc64-linux-gnu-gcc \ + PATH=$PATH:/rust/bin diff --git a/libc/ci/docker/powerpc64le-unknown-linux-gnu/Dockerfile b/libc/ci/docker/powerpc64le-unknown-linux-gnu/Dockerfile new file mode 100644 index 000000000..627123e9a --- /dev/null +++ b/libc/ci/docker/powerpc64le-unknown-linux-gnu/Dockerfile @@ -0,0 +1,11 @@ +FROM ubuntu:17.10 + +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc libc6-dev qemu-user ca-certificates \ + gcc-powerpc64le-linux-gnu libc6-dev-ppc64el-cross \ + qemu-system-ppc + +ENV CARGO_TARGET_POWERPC64LE_UNKNOWN_LINUX_GNU_LINKER=powerpc64le-linux-gnu-gcc \ + CARGO_TARGET_POWERPC64LE_UNKNOWN_LINUX_GNU_RUNNER="qemu-ppc64le -L /usr/powerpc64le-linux-gnu" \ + CC=powerpc64le-linux-gnu-gcc \ + PATH=$PATH:/rust/bin diff --git a/libc/ci/docker/s390x-unknown-linux-gnu/Dockerfile b/libc/ci/docker/s390x-unknown-linux-gnu/Dockerfile new file mode 100644 index 000000000..861f4f9b0 --- /dev/null +++ b/libc/ci/docker/s390x-unknown-linux-gnu/Dockerfile @@ -0,0 +1,18 @@ +FROM ubuntu:17.10 + +RUN apt-get update && apt-get install -y --no-install-recommends \ + curl ca-certificates \ + gcc libc6-dev \ + gcc-s390x-linux-gnu libc6-dev-s390x-cross \ + qemu-system-s390x \ + cpio + +COPY linux-s390x.sh / +RUN bash /linux-s390x.sh + +COPY test-runner-linux / + +ENV CARGO_TARGET_S390X_UNKNOWN_LINUX_GNU_LINKER=s390x-linux-gnu-gcc \ + CARGO_TARGET_S390X_UNKNOWN_LINUX_GNU_RUNNER="/test-runner-linux s390x" \ + CC_s390x_unknown_linux_gnu=s390x-linux-gnu-gcc \ + PATH=$PATH:/rust/bin diff --git a/libc/ci/docker/sparc64-unknown-linux-gnu/Dockerfile b/libc/ci/docker/sparc64-unknown-linux-gnu/Dockerfile new file mode 100644 index 000000000..d9edaab42 --- /dev/null +++ b/libc/ci/docker/sparc64-unknown-linux-gnu/Dockerfile @@ -0,0 +1,21 @@ +FROM debian:stretch + +RUN apt-get update && apt-get install -y --no-install-recommends \ + curl ca-certificates \ + gcc libc6-dev \ + gcc-sparc64-linux-gnu libc6-dev-sparc64-cross \ + qemu-system-sparc64 openbios-sparc seabios ipxe-qemu \ + p7zip-full cpio linux-libc-dev-sparc64-cross linux-headers-4.9.0-3-common + +# Put linux/module.h into the right spot as it is not shipped by debian +RUN cp /usr/src/linux-headers-4.9.0-3-common/include/uapi/linux/module.h /usr/sparc64-linux-gnu/include/linux/ + +COPY linux-sparc64.sh / +RUN bash /linux-sparc64.sh + +COPY test-runner-linux / + +ENV CARGO_TARGET_SPARC64_UNKNOWN_LINUX_GNU_LINKER=sparc64-linux-gnu-gcc \ + CARGO_TARGET_SPARC64_UNKNOWN_LINUX_GNU_RUNNER="/test-runner-linux sparc64" \ + CC_sparc64_unknown_linux_gnu=sparc64-linux-gnu-gcc \ + PATH=$PATH:/rust/bin diff --git a/libc/ci/docker/wasm32-unknown-emscripten/Dockerfile b/libc/ci/docker/wasm32-unknown-emscripten/Dockerfile new file mode 100644 index 000000000..59bf7d9a2 --- /dev/null +++ b/libc/ci/docker/wasm32-unknown-emscripten/Dockerfile @@ -0,0 +1,21 @@ +FROM ubuntu:16.04 + +RUN apt-get update && \ + apt-get install -y --no-install-recommends \ + ca-certificates \ + curl \ + gcc \ + git \ + libc6-dev \ + python \ + xz-utils + +COPY emscripten.sh / +RUN bash /emscripten.sh + +ENV PATH=$PATH:/rust/bin \ + CARGO_TARGET_WASM32_UNKNOWN_EMSCRIPTEN_RUNNER=node-wrapper.sh + +COPY emscripten-entry.sh / +COPY docker/wasm32-unknown-emscripten/node-wrapper.sh /usr/local/bin/node-wrapper.sh +ENTRYPOINT ["/emscripten-entry.sh"] diff --git a/libc/ci/docker/wasm32-unknown-emscripten/node-wrapper.sh b/libc/ci/docker/wasm32-unknown-emscripten/node-wrapper.sh new file mode 100755 index 000000000..3122e2e23 --- /dev/null +++ b/libc/ci/docker/wasm32-unknown-emscripten/node-wrapper.sh @@ -0,0 +1,15 @@ +#!/bin/sh + +set -e + +me=$1 +shift +dir=$(dirname $me) +file=$(basename $me) + +if echo $file | grep -q wasm; then + exit 0 # FIXME(rust-lang/cargo#4750) +fi + +cd $dir +exec node $file "$@" diff --git a/libc/ci/docker/x86_64-linux-android/Dockerfile b/libc/ci/docker/x86_64-linux-android/Dockerfile new file mode 100644 index 000000000..0cfbc4820 --- /dev/null +++ b/libc/ci/docker/x86_64-linux-android/Dockerfile @@ -0,0 +1,26 @@ +FROM ubuntu:16.04 + +RUN apt-get update && \ + apt-get install -y --no-install-recommends \ + ca-certificates \ + curl \ + gcc \ + libc-dev \ + python \ + unzip + +WORKDIR /android/ +ENV ANDROID_ARCH=x86_64 +COPY android-install-ndk.sh /android/ +RUN sh /android/android-install-ndk.sh $ANDROID_ARCH + +# We do not run x86_64-linux-android tests on an android emulator. +# See ci/android-sysimage.sh for informations about how tests are run. +COPY android-sysimage.sh /android/ +RUN bash /android/android-sysimage.sh x86_64 x86_64-24_r07.zip + +ENV PATH=$PATH:/rust/bin:/android/ndk-$ANDROID_ARCH/bin \ + CARGO_TARGET_X86_64_LINUX_ANDROID_LINKER=x86_64-linux-android-gcc \ + CC_x86_64_linux_android=x86_64-linux-android-gcc \ + CXX_x86_64_linux_android=x86_64-linux-android-g++ \ + HOME=/tmp diff --git a/libc/ci/docker/x86_64-rumprun-netbsd/Dockerfile b/libc/ci/docker/x86_64-rumprun-netbsd/Dockerfile new file mode 100644 index 000000000..a486d05b2 --- /dev/null +++ b/libc/ci/docker/x86_64-rumprun-netbsd/Dockerfile @@ -0,0 +1,10 @@ +FROM mato/rumprun-toolchain-hw-x86_64 +USER root +RUN apt-get update +RUN apt-get install -y --no-install-recommends \ + qemu +ENV PATH=$PATH:/rust/bin \ + CARGO_TARGET_X86_64_RUMPRUN_NETBSD_RUNNER=/tmp/runtest + +ADD docker/x86_64-rumprun-netbsd/runtest.rs /tmp/ +ENTRYPOINT ["sh", "-c", "rustc /tmp/runtest.rs -o /tmp/runtest && exec \"$@\"", "--"] diff --git a/libc/ci/docker/x86_64-rumprun-netbsd/runtest.rs b/libc/ci/docker/x86_64-rumprun-netbsd/runtest.rs new file mode 100644 index 000000000..94b594608 --- /dev/null +++ b/libc/ci/docker/x86_64-rumprun-netbsd/runtest.rs @@ -0,0 +1,54 @@ +use std::env; +use std::process::{Command, Stdio}; +use std::sync::mpsc; +use std::thread; +use std::time::Duration; +use std::io::{BufRead, BufReader, Read}; + +fn main() { + assert_eq!(env::args().len(), 2); + + let status = Command::new("rumprun-bake") + .arg("hw_virtio") + .arg("/tmp/libc-test.img") + .arg(env::args().nth(1).unwrap()) + .status() + .expect("failed to run rumprun-bake"); + assert!(status.success()); + + let mut child = Command::new("qemu-system-x86_64") + .arg("-nographic") + .arg("-vga").arg("none") + .arg("-m").arg("64") + .arg("-kernel").arg("/tmp/libc-test.img") + .stdout(Stdio::piped()) + .stderr(Stdio::piped()) + .spawn() + .expect("failed to spawn qemu"); + + let mut stdout = child.stdout.take().unwrap(); + let mut stderr = child.stderr.take().unwrap(); + let (tx, rx) = mpsc::channel(); + let tx2 = tx.clone(); + let t1 = thread::spawn(move || find_ok(&mut stdout, tx)); + let t2 = thread::spawn(move || find_ok(&mut stderr, tx2)); + + let res = rx.recv_timeout(Duration::new(5, 0)); + child.kill().unwrap(); + t1.join().unwrap(); + t2.join().unwrap(); + + if res.is_err() { + panic!("didn't find success"); + } +} + +fn find_ok(input: &mut Read, tx: mpsc::Sender<()>) { + for line in BufReader::new(input).lines() { + let line = line.unwrap(); + println!("{}", line); + if line.starts_with("PASSED ") && line.contains(" tests") { + tx.send(()).unwrap(); + } + } +} diff --git a/libc/ci/docker/x86_64-unknown-freebsd/Dockerfile b/libc/ci/docker/x86_64-unknown-freebsd/Dockerfile new file mode 100644 index 000000000..35f103657 --- /dev/null +++ b/libc/ci/docker/x86_64-unknown-freebsd/Dockerfile @@ -0,0 +1,13 @@ +FROM wezm/port-prebuilt-freebsd11@sha256:43553e2265ec702ec72a63a765df333f50b1858b896e69385749e96d8624e9b0 + +RUN apt-get update +RUN apt-get install -y --no-install-recommends \ + qemu genext2fs xz-utils +RUN apt-get install -y curl ca-certificates gcc + +ENTRYPOINT ["sh"] + +ENV PATH=$PATH:/rust/bin \ + QEMU=2018-03-15/FreeBSD-11.1-RELEASE-amd64.qcow2.xz \ + CAN_CROSS=1 \ + CARGO_TARGET_X86_64_UNKNOWN_FREEBSD_LINKER=x86_64-unknown-freebsd11-gcc diff --git a/libc/ci/docker/x86_64-unknown-linux-gnu/Dockerfile b/libc/ci/docker/x86_64-unknown-linux-gnu/Dockerfile new file mode 100644 index 000000000..6ab9c9231 --- /dev/null +++ b/libc/ci/docker/x86_64-unknown-linux-gnu/Dockerfile @@ -0,0 +1,5 @@ +FROM ubuntu:18.04 +RUN apt-get update +RUN apt-get install -y --no-install-recommends \ + gcc libc6-dev ca-certificates +ENV PATH=$PATH:/rust/bin diff --git a/libc/ci/docker/x86_64-unknown-linux-gnux32/Dockerfile b/libc/ci/docker/x86_64-unknown-linux-gnux32/Dockerfile new file mode 100644 index 000000000..03f3e8e69 --- /dev/null +++ b/libc/ci/docker/x86_64-unknown-linux-gnux32/Dockerfile @@ -0,0 +1,5 @@ +FROM ubuntu:18.04 +RUN apt-get update +RUN apt-get install -y --no-install-recommends \ + gcc-multilib libc6-dev ca-certificates +ENV PATH=$PATH:/rust/bin diff --git a/libc/ci/docker/x86_64-unknown-linux-musl/Dockerfile b/libc/ci/docker/x86_64-unknown-linux-musl/Dockerfile new file mode 100644 index 000000000..6e2b7d9e5 --- /dev/null +++ b/libc/ci/docker/x86_64-unknown-linux-musl/Dockerfile @@ -0,0 +1,20 @@ +FROM ubuntu:17.10 + +RUN apt-get update +RUN apt-get install -y --no-install-recommends \ + gcc make libc6-dev git curl ca-certificates +RUN curl https://www.musl-libc.org/releases/musl-1.1.19.tar.gz | \ + tar xzf - && \ + cd musl-1.1.19 && \ + ./configure --prefix=/musl-x86_64 && \ + make install -j4 && \ + cd .. && \ + rm -rf musl-1.1.19 +# Install linux kernel headers sanitized for use with musl +RUN curl -L https://github.com/sabotage-linux/kernel-headers/archive/v3.12.6-6.tar.gz | \ + tar xzf - && \ + cd kernel-headers-3.12.6-6 && \ + make ARCH=x86_64 prefix=/musl-x86_64 install -j4 && \ + cd .. && \ + rm -rf kernel-headers-3.12.6-6 +ENV PATH=$PATH:/musl-x86_64/bin:/rust/bin diff --git a/libc/ci/dox.sh b/libc/ci/dox.sh new file mode 100644 index 000000000..b8ffa7dd0 --- /dev/null +++ b/libc/ci/dox.sh @@ -0,0 +1,33 @@ +#!/bin/sh + +# Builds documentation for all target triples that we have a registered URL for +# in liblibc. This scrapes the list of triples to document from `src/lib.rs` +# which has a bunch of `html_root_url` directives we pick up. + +set -e + +TARGETS=`grep html_root_url src/lib.rs | sed 's/.*".*\/\(.*\)"/\1/'` + +rm -rf target/doc +mkdir -p target/doc + +cp ci/landing-page-head.html target/doc/index.html + +for target in $TARGETS; do + echo documenting $target + + rustdoc -o target/doc/$target --target $target src/lib.rs --cfg cross_platform_docs \ + --crate-name libc + + echo "

  • $target
  • " \ + >> target/doc/index.html +done + +cat ci/landing-page-footer.html >> target/doc/index.html + +# If we're on travis, not a PR, and on the right branch, publish! +if [ "$TRAVIS_PULL_REQUEST" = "false" ] && [ "$TRAVIS_BRANCH" = "master" ]; then + pip install ghp_import --install-option="--prefix=$HOME/.local" + $HOME/.local/bin/ghp-import -n target/doc + git push -qf https://${GH_TOKEN}@github.com/${TRAVIS_REPO_SLUG}.git gh-pages +fi diff --git a/libc/ci/emscripten-entry.sh b/libc/ci/emscripten-entry.sh new file mode 100755 index 000000000..22ae8b08a --- /dev/null +++ b/libc/ci/emscripten-entry.sh @@ -0,0 +1,19 @@ +#!/usr/bin/env bash +# Copyright 2017 The Rust Project Developers. See the COPYRIGHT +# file at the top-level directory of this distribution and at +# http://rust-lang.org/COPYRIGHT. +# +# Licensed under the Apache License, Version 2.0 or the MIT license +# , at your +# option. This file may not be copied, modified, or distributed +# except according to those terms. + +set -ex + +source /emsdk-portable/emsdk_env.sh &> /dev/null + +# emsdk-portable provides a node binary, but we need version 8 to run wasm +export PATH="/node-v8.0.0-linux-x64/bin:$PATH" + +exec "$@" diff --git a/libc/ci/emscripten.sh b/libc/ci/emscripten.sh new file mode 100644 index 000000000..d80258584 --- /dev/null +++ b/libc/ci/emscripten.sh @@ -0,0 +1,54 @@ +# Copyright 2017 The Rust Project Developers. See the COPYRIGHT +# file at the top-level directory of this distribution and at +# http://rust-lang.org/COPYRIGHT. +# +# Licensed under the Apache License, Version 2.0 or the MIT license +# , at your +# option. This file may not be copied, modified, or distributed +# except according to those terms. + +set -ex + +hide_output() { + set +x + on_err=" +echo ERROR: An error was encountered with the build. +cat /tmp/build.log +exit 1 +" + trap "$on_err" ERR + bash -c "while true; do sleep 30; echo \$(date) - building ...; done" & + PING_LOOP_PID=$! + $@ &> /tmp/build.log + trap - ERR + kill $PING_LOOP_PID + rm -f /tmp/build.log + set -x +} + +cd / +curl -L https://s3.amazonaws.com/mozilla-games/emscripten/releases/emsdk-portable.tar.gz | \ + tar -xz + +cd /emsdk-portable +./emsdk update +hide_output ./emsdk install sdk-1.37.20-64bit +./emsdk activate sdk-1.37.20-64bit + +# Compile and cache libc +source ./emsdk_env.sh +echo "main(){}" > a.c +HOME=/emsdk-portable/ emcc a.c +HOME=/emsdk-portable/ emcc -s BINARYEN=1 a.c +rm -f a.* + +# Make emsdk usable by any user +cp /root/.emscripten /emsdk-portable +chmod a+rxw -R /emsdk-portable + +# node 8 is required to run wasm +cd / +curl -L https://nodejs.org/dist/v8.0.0/node-v8.0.0-linux-x64.tar.xz | \ + tar -xJ + diff --git a/libc/ci/ios/deploy_and_run_on_ios_simulator.rs b/libc/ci/ios/deploy_and_run_on_ios_simulator.rs new file mode 100644 index 000000000..95df52d76 --- /dev/null +++ b/libc/ci/ios/deploy_and_run_on_ios_simulator.rs @@ -0,0 +1,172 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// This is a script to deploy and execute a binary on an iOS simulator. +// The primary use of this is to be able to run unit tests on the simulator and +// retrieve the results. +// +// To do this through Cargo instead, use Dinghy +// (https://github.com/snipsco/dinghy): cargo dinghy install, then cargo dinghy +// test. + +use std::env; +use std::fs::{self, File}; +use std::io::Write; +use std::path::Path; +use std::process; +use std::process::Command; + +macro_rules! t { + ($e:expr) => (match $e { + Ok(e) => e, + Err(e) => panic!("{} failed with: {}", stringify!($e), e), + }) +} + +// Step one: Wrap as an app +fn package_as_simulator_app(crate_name: &str, test_binary_path: &Path) { + println!("Packaging simulator app"); + drop(fs::remove_dir_all("ios_simulator_app")); + t!(fs::create_dir("ios_simulator_app")); + t!(fs::copy(test_binary_path, + Path::new("ios_simulator_app").join(crate_name))); + + let mut f = t!(File::create("ios_simulator_app/Info.plist")); + t!(f.write_all(format!(r#" + + + + + CFBundleExecutable + {} + CFBundleIdentifier + com.rust.unittests + + + "#, crate_name).as_bytes())); +} + +// Step two: Start the iOS simulator +fn start_simulator() { + println!("Looking for iOS simulator"); + let output = t!(Command::new("xcrun").arg("simctl").arg("list").output()); + assert!(output.status.success()); + let mut simulator_exists = false; + let mut simulator_booted = false; + let mut found_rust_sim = false; + let stdout = t!(String::from_utf8(output.stdout)); + for line in stdout.lines() { + if line.contains("rust_ios") { + if found_rust_sim { + panic!("Duplicate rust_ios simulators found. Please \ + double-check xcrun simctl list."); + } + simulator_exists = true; + simulator_booted = line.contains("(Booted)"); + found_rust_sim = true; + } + } + + if simulator_exists == false { + println!("Creating iOS simulator"); + Command::new("xcrun") + .arg("simctl") + .arg("create") + .arg("rust_ios") + .arg("com.apple.CoreSimulator.SimDeviceType.iPhone-SE") + .arg("com.apple.CoreSimulator.SimRuntime.iOS-10-2") + .check_status(); + } else if simulator_booted == true { + println!("Shutting down already-booted simulator"); + Command::new("xcrun") + .arg("simctl") + .arg("shutdown") + .arg("rust_ios") + .check_status(); + } + + println!("Starting iOS simulator"); + // We can't uninstall the app (if present) as that will hang if the + // simulator isn't completely booted; just erase the simulator instead. + Command::new("xcrun").arg("simctl").arg("erase").arg("rust_ios").check_status(); + Command::new("xcrun").arg("simctl").arg("boot").arg("rust_ios").check_status(); +} + +// Step three: Install the app +fn install_app_to_simulator() { + println!("Installing app to simulator"); + Command::new("xcrun") + .arg("simctl") + .arg("install") + .arg("booted") + .arg("ios_simulator_app/") + .check_status(); +} + +// Step four: Run the app +fn run_app_on_simulator() { + println!("Running app"); + let output = t!(Command::new("xcrun") + .arg("simctl") + .arg("launch") + .arg("--console") + .arg("booted") + .arg("com.rust.unittests") + .output()); + + println!("status: {}", output.status); + println!("stdout --\n{}\n", String::from_utf8_lossy(&output.stdout)); + println!("stderr --\n{}\n", String::from_utf8_lossy(&output.stderr)); + + let stdout = String::from_utf8_lossy(&output.stdout); + let passed = stdout.lines() + .find(|l| l.contains("PASSED")) + .map(|l| l.contains("tests")) + .unwrap_or(false); + + println!("Shutting down simulator"); + Command::new("xcrun") + .arg("simctl") + .arg("shutdown") + .arg("rust_ios") + .check_status(); + if !passed { + panic!("tests didn't pass"); + } +} + +trait CheckStatus { + fn check_status(&mut self); +} + +impl CheckStatus for Command { + fn check_status(&mut self) { + println!("\trunning: {:?}", self); + assert!(t!(self.status()).success()); + } +} + +fn main() { + let args: Vec = env::args().collect(); + if args.len() != 2 { + println!("Usage: {} ", args[0]); + process::exit(-1); + } + + let test_binary_path = Path::new(&args[1]); + let crate_name = test_binary_path.file_name().unwrap(); + + package_as_simulator_app(crate_name.to_str().unwrap(), test_binary_path); + start_simulator(); + install_app_to_simulator(); + run_app_on_simulator(); +} diff --git a/libc/ci/landing-page-footer.html b/libc/ci/landing-page-footer.html new file mode 100644 index 000000000..941cc8d2b --- /dev/null +++ b/libc/ci/landing-page-footer.html @@ -0,0 +1,3 @@ + + + diff --git a/libc/ci/landing-page-head.html b/libc/ci/landing-page-head.html new file mode 100644 index 000000000..fc69fa88e --- /dev/null +++ b/libc/ci/landing-page-head.html @@ -0,0 +1,7 @@ + + + + + + +