--- /dev/null
+Updating the package
+====================
+
+1. Run d/make_orig_multi.sh <version>.
+2. Verify the -vendor component tarball to make sure it looks good.
+ If not, edit d/make_orig_multi.sh and the surrounding files (such as patches
+ and exclude files) and repeat the above until it looks good.
+3. $ git fetch upstream
+ You might have to first run:
+ $ git remote add upstream https://github.com/rust-lang/cargo
+4. $ gbp import-orig ../cargo_<version>.orig.tar.gz
+ If you get errors, check the extra default flags in d/gbp.conf
+5. Check that no old versions remain in vendor/. If there are, then your git
+ repo was messed up when you ran (4). Rewind the debian/sid, upstream, and
+ pristine-tar branches, delete the upstream/<version> tag; this reverts step
+ (4). Clean up your git repo, and then try (4) again.
+6. Update d/patches and the rest of the packaging, as normal.
+
+General info
+============
+
+Current packaging of cargo is sub-optimal due to the fact that
+both the language (Rust) and its package manager (Cargo)
+are involved into self-dependency loops to bootstrap.
+
+Moreover, the current approach to modules and registry by cargo is
+biased towards a developer-friendly always-online use.
+
+This package currently resort to several workarounds to build cargo:
+ 1. we use a custom script (debian/bootstrap.py) to build a local
+ stage0, instead of downloading/embedding a snapshotted binary.
+ 2. we embed all dependencies crates, because cargo needs external
+ modules (which need cargo themself to build).
+ 4. we generate a .cargo/config at build-time, to override paths and
+ registry.
+ 5. we create a temporary git repository at build-time for the
+ registry, as this is needed by cargo.
+
+As such, the original source is composed by two tarballs:
+ * cargo source
+ * dependencies crates (under vendor/), stripped of unused embedded
+ C libraries
+
+Uploading Package
+=================
+
+Please use dgit push-source command to upload new version of the package to
+archive. Command by itself creates a source only upload uploads the package to
+ftpmaster and creates version history on dgit.
+
+ dgit push-source --gbp
+
+ -- Vasudev Kamath <vasudev@copyninja.info>, Sat, 9 Jun 2018 14:41:17 +0530
--- /dev/null
+#!/usr/bin/python3
+"""
+Wrapper around cargo to have it build using Debian settings.
+
+Usage:
+ export CARGO_HOME=debian/cargo_home
+ cargo prepare-debian /path/to/local/registry
+ cargo build
+ cargo test
+ cargo install
+
+See cargo:d/rules and dh-cargo:cargo.pm for more examples.
+
+Make sure you add "Build-Depends: python3:native" if you use this directly.
+OTOH, you only need "Build-Depends: dh-cargo" if you use that.
+
+If CARGO_HOME doesn't end with debian/cargo_home, then this script does nothing
+and passes through directly to cargo.
+
+Otherwise, you *must* set the following environment variables:
+
+- DEB_CARGO_CRATE
+ ${crate}_${version} of whatever you're building.
+
+- DEB_CARGO_PACKAGE
+ Debian binary package that the result is to be installed into.
+
+- DEB_CARGO_CRATE_IN_REGISTRY
+ Whether the crate is in the local-registry (1) or the cwd (0, empty).
+
+- CFLAGS CXXFLAGS CPPFLAGS LDFLAGS
+- DEB_HOST_GNU_TYPE DEB_HOST_RUST_TYPE
+- (optional) DEB_BUILD_OPTIONS DEB_BUILD_PROFILES
+"""
+
+import os
+import os.path
+import shutil
+import subprocess
+import sys
+
+FLAGS = "CFLAGS CXXFLAGS CPPFLAGS LDFLAGS"
+ARCHES = "DEB_HOST_GNU_TYPE DEB_HOST_RUST_TYPE"
+
+def log(*args):
+ print("debian cargo wrapper:", *args, file=sys.stderr)
+
+def logrun(*args, **kwargs):
+ log("running subprocess", args, kwargs)
+ return subprocess.run(*args, **kwargs)
+
+def sourcepath(p=None):
+ return os.path.join(os.getcwd(), p) if p else os.getcwd()
+
+def prepare_debian(cargo_home, registry, cratespec, host_gnu_type, ldflags):
+ if not os.path.exists(sourcepath(registry)):
+ raise ValueError("non-existent registry: %s" % registry)
+
+ rustflags = "-C debuginfo=2 --cap-lints warn".split()
+ rustflags.extend(["-C", "linker=%s-gcc" % host_gnu_type])
+ for f in ldflags:
+ rustflags.extend(["-C", "link-arg=%s" % f])
+ rustflags.extend(["--remap-path-prefix",
+ "%s=/usr/share/cargo/registry/%s" % (sourcepath(), cratespec.replace("_", "-"))])
+
+ # TODO: we cannot enable this until dh_shlibdeps works correctly; atm we get:
+ # dpkg-shlibdeps: warning: can't extract name and version from library name 'libstd-XXXXXXXX.so'
+ # and the resulting cargo.deb does not depend on the correct version of libstd-rust-1.XX
+ # We probably need to add override_dh_makeshlibs to d/rules of rustc
+ #rustflags.extend(["-C", "prefer-dynamic"])
+
+ os.makedirs(cargo_home, exist_ok=True)
+ with open("%s/config" % cargo_home, "w") as fp:
+ fp.write("""[source.crates-io]
+replace-with = "dh-cargo-registry"
+
+[source.dh-cargo-registry]
+directory = "{0}"
+
+[build]
+rustflags = {1}
+""".format(sourcepath(registry), repr(rustflags)))
+
+ return 0
+
+def install(binpkg, cratespec, host_rust_type, crate_in_registry, *args):
+ crate, version = cratespec.rsplit("_", 1)
+ install_target = sourcepath("debian/%s/usr" % binpkg)
+ logrun(["env", "RUST_BACKTRACE=1",
+ # set CARGO_TARGET_DIR so build products are saved in target/
+ # normally `cargo install` deletes them when it exits
+ "CARGO_TARGET_DIR=" + sourcepath("target"),
+ "/usr/bin/cargo"] + list(args) +
+ ([crate, "--vers", version] if crate_in_registry else ["--path", sourcepath()]) +
+ ["--root", install_target], check=True)
+ logrun(["rm", "-f", "%s/.crates.toml" % install_target])
+
+ # if there was a custom build output, symlink it to debian/cargo_out_dir
+ # hopefully cargo will provide a better solution in future https://github.com/rust-lang/cargo/issues/5457
+ r = logrun('''ls -td "target/%s/release/build/%s"-*/out 2>/dev/null | head -n1'''
+ % (host_rust_type, crate), shell=True, stdout=subprocess.PIPE).stdout
+ r = r.decode("utf-8").rstrip()
+ if r:
+ logrun(["ln", "-sfT", "../%s" % r, "debian/cargo_out_dir"], check=True)
+ return 0
+
+def main(*args):
+ cargo_home = os.getenv("CARGO_HOME", "")
+ if not cargo_home.endswith("/debian/cargo_home"):
+ os.execv("/usr/bin/cargo", ["cargo"] + list(args))
+
+ if any(f not in os.environ for f in FLAGS.split()):
+ raise ValueError("not all of %s set; did you call dpkg-buildflags?" % FLAGS)
+
+ if any(f not in os.environ for f in ARCHES.split()):
+ raise ValueError("not all of %s set; did you include architecture.mk?" % ARCHES)
+
+ build_options = os.getenv("DEB_BUILD_OPTIONS", "").split()
+ build_profiles = os.getenv("DEB_BUILD_PROFILES", "").split()
+
+ parallel = []
+ for o in build_options:
+ if o.startswith("parallel="):
+ parallel = ["-j" + o[9:]]
+ nodoc = "nodoc" in build_options or "nodoc" in build_profiles
+ nocheck = "nocheck" in build_options or "nocheck" in build_profiles
+
+ # note this is actually the "build target" type, see rustc's README.Debian
+ # for full details of the messed-up terminology here
+ host_rust_type = os.getenv("DEB_HOST_RUST_TYPE", "")
+ host_gnu_type = os.getenv("DEB_HOST_GNU_TYPE", "")
+
+ log("options, profiles, parallel:", build_options, build_profiles, parallel)
+ log("rust_type, gnu_type:", ", ".join([host_rust_type, host_gnu_type]))
+
+ if args[0] == "prepare-debian":
+ registry = args[1]
+ return prepare_debian(cargo_home, registry,
+ os.environ["DEB_CARGO_CRATE"], host_gnu_type, os.getenv("LDFLAGS", "").split())
+
+ newargs = []
+ subcmd = None
+ for a in args:
+ if a in ("build", "rustc", "doc", "test", "bench", "install"):
+ subcmd = a
+ newargs.extend(["-Zavoid-dev-deps", a, "--verbose", "--verbose"] +
+ parallel + ["--target", host_rust_type])
+ elif a == "clean":
+ subcmd = a
+ newargs.extend([a, "--verbose", "--verbose"])
+ else:
+ newargs.append(a)
+
+ if nodoc and subcmd == "doc":
+ return 0
+ if nocheck and subcmd in ("test", "bench"):
+ return 0
+
+ if subcmd == "clean":
+ logrun(["env", "RUST_BACKTRACE=1", "/usr/bin/cargo"] + list(newargs), check=True)
+ if os.path.exists(cargo_home):
+ shutil.rmtree(cargo_home)
+ return 0
+
+ cargo_config = "%s/config" % cargo_home
+ if not os.path.exists(cargo_config):
+ raise ValueError("does not exist: %s, did you run `cargo prepare-debian <registry>`?" % cargo_config)
+
+ if subcmd == "install":
+ return install(os.environ["DEB_CARGO_PACKAGE"],
+ os.environ["DEB_CARGO_CRATE"],
+ host_rust_type,
+ os.getenv("DEB_CARGO_CRATE_IN_REGISTRY", "") == "1",
+ *newargs)
+ else:
+ return logrun(["env", "RUST_BACKTRACE=1", "/usr/bin/cargo"] + list(newargs)).returncode
+
+if __name__ == "__main__":
+ sys.exit(main(*sys.argv[1:]))
+
--- /dev/null
+#!/usr/bin/env python
+"""
+NOTE: This script has not been used for a very long time and very likely won't
+work. Please read the code before attempting to run it and hoping that "just
+fixing the errors" will work. -- infinity0
+
+About
+=====
+
+This python script is design to do the bare minimum to compile and link the
+Cargo binary for the purposes of bootstrapping itself on a new platform for
+which cross-compiling isn't possible. I wrote this specifically to bootstrap
+Cargo on [Bitrig](https://bitrig.org). Bitrig is a fork of OpenBSD that uses
+clang/clang++ and other BSD licensed tools instead of GNU licensed software.
+Cross compiling from another platform is extremely difficult because of the
+alternative toolchain Bitrig uses.
+
+With this script, all that should be necessary to run this is a working Rust
+toolchain, Python, and Git.
+
+This script will not set up a full cargo cache or anything. It works by
+cloning the cargo index and then starting with the cargo dependencies, it
+recursively builds the dependency tree. Once it has the dependency tree, it
+starts with the leaves of the tree, doing a breadth first traversal and for
+each dependency, it clones the repo, sets the repo's head to the correct
+revision and then executes the build command specified in the cargo config.
+
+This bootstrap script uses a temporary directory to store the built dependency
+libraries and uses that as a link path when linking dependencies and the
+cargo binary. The goal is to create a statically linked cargo binary that is
+capable of being used as a "local cargo" when running the main cargo Makefiles.
+
+Dependencies
+============
+
+* pytoml -- used for parsing toml files.
+ https://github.com/avakar/pytoml
+
+* dulwich -- used for working with git repos.
+ https://git.samba.org/?p=jelmer/dulwich.git;a=summary
+
+Both can be installed via the pip tool:
+
+```sh
+sudo pip install pytoml dulwich
+```
+
+Command Line Options
+====================
+
+```
+--cargo-root <path> specify the path to the cargo repo root.
+--target-dir <path> specify the location to store build results.
+--crate-index <path> path to where crates.io index shoudl be cloned
+--no-clone don't clone crates.io index, --crate-index must point to existing clone.
+--no-clean don't remove the folders created during bootstrapping.
+--download only download the crates needed to bootstrap cargo.
+--graph output dot format graph of dependencies.
+--target <triple> build target: e.g. x86_64-unknown-bitrig
+--host <triple> host machine: e.g. x86_64-unknown-linux-gnu
+--urls-file <file> file to write crate URLs to
+--blacklist <crates> list of blacklisted crates to skip
+--include-optional <crates> list of optional crates to include
+--patchdir <dir> directory containing patches to apply to crates after fetching them
+--save-crate if set, save .crate file when downloading
+```
+
+The `--cargo-root` option defaults to the current directory if unspecified. The
+target directory defaults to Python equivilent of `mktemp -d` if unspecified.
+The `--crate-index` option specifies where the crates.io index will be cloned. Or,
+if you already have a clone of the index, the crates index should point there
+and you should also specify `--no-clone`. The `--target` option is used to
+specify which platform you are bootstrapping for. The `--host` option defaults
+to the value of the `--target` option when not specified.
+
+Examples
+========
+
+To bootstrap Cargo on (Bitrig)[https://bitrig.org] I followed these steps:
+
+* Cloned this [bootstrap script repo](https://github.com/dhuseby/cargo-bootstra)
+to `/tmp/bootstrap`.
+* Cloned the [crates.io index](https://github.com/rust-lang/crates.io-index)
+to `/tmp/index`.
+* Created a target folder, `/tmp/out`, for the output.
+* Cloned the (Cargo)[https://github.com/rust-lang/cargo] repo to `/tmp/cargo`.
+* Copied the bootstrap.py script to the cargo repo root.
+* Ran the bootstrap.py script like so:
+```sh
+./bootstrap.py --crate-index /tmp/index --target-dir /tmp/out --no-clone --no-clean --target x86_64-unknown-bitrig
+```
+
+After the script completed, there is a Cargo executable named `cargo-0_2_0` in
+`/tmp/out`. That executable can then be used to bootstrap Cargo from source by
+specifying it as the `--local-cargo` option to Cargo's `./configure` script.
+"""
+
+import argparse
+import cStringIO
+import hashlib
+import inspect
+import json
+import os
+import re
+import shutil
+import subprocess
+import sys
+import tarfile
+import tempfile
+import urlparse
+import socket
+# In Debian crates are already downloaded when we bootstrap cargo.
+# import requests
+import pytoml as toml
+import dulwich.porcelain as git
+from glob import glob
+
+
+TARGET = None
+HOST = None
+GRAPH = None
+URLS_FILE = None
+CRATE_CACHE = None
+CRATES_INDEX = 'git://github.com/rust-lang/crates.io-index.git'
+CARGO_REPO = 'git://github.com/rust-lang/cargo.git'
+CRATE_API_DL = 'https://crates.io/api/v1/crates/%s/%s/download'
+SV_RANGE = re.compile(r'^(?P<op>(?:\<=|\>=|=|\<|\>|\^|\~))?\s*'
+ r'(?P<major>(?:\*|0|[1-9][0-9]*))'
+ r'(\.(?P<minor>(?:\*|0|[1-9][0-9]*)))?'
+ r'(\.(?P<patch>(?:\*|0|[1-9][0-9]*)))?'
+ r'(\-(?P<prerelease>[0-9A-Za-z-]+(\.[0-9A-Za-z-]+)*))?'
+ r'(\+(?P<build>[0-9A-Za-z-]+(\.[0-9A-Za-z-]+)*))?$')
+SEMVER = re.compile(r'^\s*(?P<major>(?:0|[1-9][0-9]*))'
+ r'(\.(?P<minor>(?:0|[1-9][0-9]*)))?'
+ r'(\.(?P<patch>(?:0|[1-9][0-9]*)))?'
+ r'(\-(?P<prerelease>[0-9A-Za-z-]+(\.[0-9A-Za-z-]+)*))?'
+ r'(\+(?P<build>[0-9A-Za-z-]+(\.[0-9A-Za-z-]+)*))?$')
+BSCRIPT = re.compile(r'^cargo:(?P<key>([^\s=]+))(=(?P<value>.+))?$')
+BNAME = re.compile('^(lib)?(?P<name>([^_]+))(_.*)?$')
+BUILT = {}
+CRATES = {}
+CVER = re.compile("-([^-]+)$")
+UNRESOLVED = []
+PFX = []
+BLACKLIST = []
+INCLUDE_OPTIONAL = []
+
+def dbgCtx(f):
+ def do_dbg(self, *cargs):
+ PFX.append(self.name())
+ ret = f(self, *cargs)
+ PFX.pop()
+ return ret
+ return do_dbg
+
+def dbg(s):
+ print '%s: %s' % (':'.join(PFX), s)
+
+
+class PreRelease(object):
+
+ def __init__(self, pr):
+ self._container = []
+ if pr is not None:
+ self._container += str(pr).split('.')
+
+ def __str__(self):
+ return '.'.join(self._container)
+
+ def __repr__(self):
+ return self._container
+
+ def __getitem__(self, key):
+ return self._container[key]
+
+ def __len__(self):
+ return len(self._container)
+
+ def __gt__(self, rhs):
+ return not ((self < rhs) or (self == rhs))
+
+ def __ge__(self, rhs):
+ return not (self < rhs)
+
+ def __le__(self, rhs):
+ return not (self > rhs)
+
+ def __eq__(self, rhs):
+ return self._container == rhs._container
+
+ def __ne__(self, rhs):
+ return not (self == rhs)
+
+ def __lt__(self, rhs):
+ if self == rhs:
+ return False
+
+ # not having a pre-release is higher precedence
+ if len(self) == 0:
+ if len(rhs) == 0:
+ return False
+ else:
+ # 1.0.0 > 1.0.0-alpha
+ return False
+ else:
+ if len(rhs) is None:
+ # 1.0.0-alpha < 1.0.0
+ return True
+
+ # if both have one, then longer pre-releases are higher precedence
+ if len(self) > len(rhs):
+ # 1.0.0-alpha.1 > 1.0.0-alpha
+ return False
+ elif len(self) < len(rhs):
+ # 1.0.0-alpha < 1.0.0-alpha.1
+ return True
+
+ # if both have the same length pre-release, must check each piece
+ # numeric sub-parts have lower precedence than non-numeric sub-parts
+ # non-numeric sub-parts are compared lexically in ASCII sort order
+ for l,r in zip(self, rhs):
+ if l.isdigit():
+ if r.isdigit():
+ if int(l) < int(r):
+ # 2 > 1
+ return True
+ elif int(l) > int(r):
+ # 1 < 2
+ return False
+ else:
+ # 1 == 1
+ continue
+ else:
+ # 1 < 'foo'
+ return True
+ else:
+ if r.isdigit():
+ # 'foo' > 1
+ return False
+
+ # both are non-numeric
+ if l < r:
+ return True
+ elif l > r:
+ return False
+
+ raise RuntimeError('PreRelease __lt__ failed')
+
+
+class Semver(dict):
+
+ def __init__(self, sv):
+ match = SEMVER.match(str(sv))
+ if match is None:
+ raise ValueError('%s is not a valid semver string' % sv)
+
+ self._input = sv
+ self.update(match.groupdict())
+ self.prerelease = PreRelease(self['prerelease'])
+
+ def __str__(self):
+ major, minor, patch, prerelease, build = self.parts_raw()
+ s = ''
+ if major is None:
+ s += '0'
+ else:
+ s += major
+ s += '.'
+ if minor is None:
+ s += '0'
+ else:
+ s += minor
+ s += '.'
+ if patch is None:
+ s += '0'
+ else:
+ s += patch
+ if len(self.prerelease):
+ s += '-' + str(self.prerelease)
+ if build is not None:
+ s += '+' + build
+ return s
+
+ def __hash__(self):
+ return hash(str(self))
+
+ def as_range(self):
+ return SemverRange('=%s' % self)
+
+ def parts(self):
+ major, minor, patch, prerelease, build = self.parts_raw()
+ if major is None:
+ major = '0'
+ if minor is None:
+ minor = '0'
+ if patch is None:
+ patch = '0'
+ return (int(major),int(minor),int(patch),prerelease,build)
+
+ def parts_raw(self):
+ return (self['major'],self['minor'],self['patch'],self['prerelease'],self['build'])
+
+ def __lt__(self, rhs):
+ lmaj,lmin,lpat,lpre,_ = self.parts()
+ rmaj,rmin,rpat,rpre,_ = rhs.parts()
+ if lmaj < rmaj:
+ return True
+ if lmaj > rmaj:
+ return False
+ if lmin < rmin:
+ return True
+ if lmin > rmin:
+ return False
+ if lpat < rpat:
+ return True
+ if lpat > rpat:
+ return False
+ if lpre is not None and rpre is None:
+ return True
+ if lpre is not None and rpre is not None:
+ if self.prerelease < rhs.prerelease:
+ return True
+ return False
+
+ def __le__(self, rhs):
+ return not (self > rhs)
+
+ def __gt__(self, rhs):
+ return not ((self < rhs) or (self == rhs))
+
+ def __ge__(self, rhs):
+ return not (self < rhs)
+
+ def __eq__(self, rhs):
+ # build metadata is only considered for equality
+ lmaj,lmin,lpat,lpre,lbld = self.parts()
+ rmaj,rmin,rpat,rpre,rbld = rhs.parts()
+ return lmaj == rmaj and \
+ lmin == rmin and \
+ lpat == rpat and \
+ lpre == rpre and \
+ lbld == rbld
+
+ def __ne__(self, rhs):
+ return not (self == rhs)
+
+
+class SemverRange(object):
+
+ def __init__(self, sv):
+ self._input = sv
+ self._lower = None
+ self._upper = None
+ self._op = None
+ self._semver = None
+
+ sv = str(sv)
+ svs = [x.strip() for x in sv.split(',')]
+
+ if len(svs) > 1:
+ self._op = '^'
+ for sr in svs:
+ rang = SemverRange(sr)
+ if rang.lower() is not None:
+ if self._lower is None or rang.lower() < self._lower:
+ self._lower = rang.lower()
+ if rang.upper() is not None:
+ if self._upper is None or rang.upper() > self._upper:
+ self._upper = rang.upper()
+ op, semver = rang.op_semver()
+ if semver is not None:
+ if op == '>=':
+ if self._lower is None or semver < self._lower:
+ self._lower = semver
+ if op == '<':
+ if self._upper is None or semver > self._upper:
+ self._upper = semver
+ return
+
+ match = SV_RANGE.match(sv)
+ if match is None:
+ raise ValueError('%s is not a valid semver range string' % sv)
+
+ svm = match.groupdict()
+ op, major, minor, patch, prerelease, build = svm['op'], svm['major'], svm['minor'], svm['patch'], svm['prerelease'], svm['build']
+ prerelease = PreRelease(prerelease)
+
+ # fix up the op
+ if op is None:
+ if major == '*' or minor == '*' or patch == '*':
+ op = '*'
+ else:
+ # if no op was specified and there are no wildcards, then op
+ # defaults to '^'
+ op = '^'
+ else:
+ self._semver = Semver(sv[len(op):])
+
+ if op not in ('<=', '>=', '<', '>', '=', '^', '~', '*'):
+ raise ValueError('%s is not a valid semver operator' % op)
+
+ self._op = op
+
+ # lower bound
+ def find_lower():
+ if op in ('<=', '<', '=', '>', '>='):
+ return None
+
+ if op == '*':
+ # wildcards specify a range
+ if major == '*':
+ return Semver('0.0.0')
+ elif minor == '*':
+ return Semver(major + '.0.0')
+ elif patch == '*':
+ return Semver(major + '.' + minor + '.0')
+ elif op == '^':
+ # caret specifies a range
+ if patch is None:
+ if minor is None:
+ # ^0 means >=0.0.0 and <1.0.0
+ return Semver(major + '.0.0')
+ else:
+ # ^0.0 means >=0.0.0 and <0.1.0
+ return Semver(major + '.' + minor + '.0')
+ else:
+ # ^0.0.1 means >=0.0.1 and <0.0.2
+ # ^0.1.2 means >=0.1.2 and <0.2.0
+ # ^1.2.3 means >=1.2.3 and <2.0.0
+ if int(major) == 0:
+ if int(minor) == 0:
+ # ^0.0.1
+ return Semver('0.0.' + patch)
+ else:
+ # ^0.1.2
+ return Semver('0.' + minor + '.' + patch)
+ else:
+ # ^1.2.3
+ return Semver(major + '.' + minor + '.' + patch)
+ elif op == '~':
+ # tilde specifies a minimal range
+ if patch is None:
+ if minor is None:
+ # ~0 means >=0.0.0 and <1.0.0
+ return Semver(major + '.0.0')
+ else:
+ # ~0.0 means >=0.0.0 and <0.1.0
+ return Semver(major + '.' + minor + '.0')
+ else:
+ # ~0.0.1 means >=0.0.1 and <0.1.0
+ # ~0.1.2 means >=0.1.2 and <0.2.0
+ # ~1.2.3 means >=1.2.3 and <1.3.0
+ return Semver(major + '.' + minor + '.' + patch)
+
+ raise RuntimeError('No lower bound')
+ self._lower = find_lower()
+
+ def find_upper():
+ if op in ('<=', '<', '=', '>', '>='):
+ return None
+
+ if op == '*':
+ # wildcards specify a range
+ if major == '*':
+ return None
+ elif minor == '*':
+ return Semver(str(int(major) + 1) + '.0.0')
+ elif patch == '*':
+ return Semver(major + '.' + str(int(minor) + 1) + '.0')
+ elif op == '^':
+ # caret specifies a range
+ if patch is None:
+ if minor is None:
+ # ^0 means >=0.0.0 and <1.0.0
+ return Semver(str(int(major) + 1) + '.0.0')
+ else:
+ # ^0.0 means >=0.0.0 and <0.1.0
+ return Semver(major + '.' + str(int(minor) + 1) + '.0')
+ else:
+ # ^0.0.1 means >=0.0.1 and <0.0.2
+ # ^0.1.2 means >=0.1.2 and <0.2.0
+ # ^1.2.3 means >=1.2.3 and <2.0.0
+ if int(major) == 0:
+ if int(minor) == 0:
+ # ^0.0.1
+ return Semver('0.0.' + str(int(patch) + 1))
+ else:
+ # ^0.1.2
+ return Semver('0.' + str(int(minor) + 1) + '.0')
+ else:
+ # ^1.2.3
+ return Semver(str(int(major) + 1) + '.0.0')
+ elif op == '~':
+ # tilde specifies a minimal range
+ if patch is None:
+ if minor is None:
+ # ~0 means >=0.0.0 and <1.0.0
+ return Semver(str(int(major) + 1) + '.0.0')
+ else:
+ # ~0.0 means >=0.0.0 and <0.1.0
+ return Semver(major + '.' + str(int(minor) + 1) + '.0')
+ else:
+ # ~0.0.1 means >=0.0.1 and <0.1.0
+ # ~0.1.2 means >=0.1.2 and <0.2.0
+ # ~1.2.3 means >=1.2.3 and <1.3.0
+ return Semver(major + '.' + str(int(minor) + 1) + '.0')
+
+ raise RuntimeError('No upper bound')
+ self._upper = find_upper()
+
+ def __repr__(self):
+ return "SemverRange(%s, op=%s, semver=%s, lower=%s, upper=%s)" % (repr(self._input), self._op, self._semver, self._lower, self._upper)
+
+ def __str__(self):
+ return self._input
+
+ def lower(self):
+ return self._lower
+
+ def upper(self):
+ return self._upper
+
+ def op_semver(self):
+ return self._op, self._semver
+
+ def compare(self, sv):
+ if not isinstance(sv, Semver):
+ sv = Semver(sv)
+
+ op = self._op
+ if op == '*':
+ if self._semver is not None and self._semver['major'] == '*':
+ return sv >= Semver('0.0.0')
+ if self._lower is not None and sv < self._lower:
+ return False
+ if self._upper is not None and sv >= self._upper:
+ return False
+ return True
+ elif op == '^':
+ return (sv >= self._lower) and (sv < self._upper)
+ elif op == '~':
+ return (sv >= self._lower) and (sv < self._upper)
+ elif op == '<=':
+ return sv <= self._semver
+ elif op == '>=':
+ return sv >= self._semver
+ elif op == '<':
+ return sv < self._semver
+ elif op == '>':
+ return sv > self._semver
+ elif op == '=':
+ return sv == self._semver
+
+ raise RuntimeError('Semver comparison failed to find a matching op')
+
+
+def test_semver():
+ """
+ Tests for Semver parsing. Run using py.test: py.test bootstrap.py
+ """
+ assert str(Semver("1")) == "1.0.0"
+ assert str(Semver("1.1")) == "1.1.0"
+ assert str(Semver("1.1.1")) == "1.1.1"
+ assert str(Semver("1.1.1-alpha")) == "1.1.1-alpha"
+ assert str(Semver("1.1.1-alpha.1")) == "1.1.1-alpha.1"
+ assert str(Semver("1.1.1-alpha+beta")) == "1.1.1-alpha+beta"
+ assert str(Semver("1.1.1-alpha+beta.1")) == "1.1.1-alpha+beta.1"
+
+def test_semver_eq():
+ assert Semver("1") == Semver("1.0.0")
+ assert Semver("1.1") == Semver("1.1.0")
+ assert Semver("1.1.1") == Semver("1.1.1")
+ assert Semver("1.1.1-alpha") == Semver("1.1.1-alpha")
+ assert Semver("1.1.1-alpha.1") == Semver("1.1.1-alpha.1")
+ assert Semver("1.1.1-alpha+beta") == Semver("1.1.1-alpha+beta")
+ assert Semver("1.1.1-alpha.1+beta") == Semver("1.1.1-alpha.1+beta")
+ assert Semver("1.1.1-alpha.1+beta.1") == Semver("1.1.1-alpha.1+beta.1")
+
+def test_semver_comparison():
+ assert Semver("1") < Semver("2.0.0")
+ assert Semver("1.1") < Semver("1.2.0")
+ assert Semver("1.1.1") < Semver("1.1.2")
+ assert Semver("1.1.1-alpha") < Semver("1.1.1")
+ assert Semver("1.1.1-alpha") < Semver("1.1.1-beta")
+ assert Semver("1.1.1-alpha") < Semver("1.1.1-beta")
+ assert Semver("1.1.1-alpha") < Semver("1.1.1-alpha.1")
+ assert Semver("1.1.1-alpha.1") < Semver("1.1.1-alpha.2")
+ assert Semver("1.1.1-alpha+beta") < Semver("1.1.1+beta")
+ assert Semver("1.1.1-alpha+beta") < Semver("1.1.1-beta+beta")
+ assert Semver("1.1.1-alpha+beta") < Semver("1.1.1-beta+beta")
+ assert Semver("1.1.1-alpha+beta") < Semver("1.1.1-alpha.1+beta")
+ assert Semver("1.1.1-alpha.1+beta") < Semver("1.1.1-alpha.2+beta")
+ assert Semver("0.5") < Semver("2.0")
+ assert not (Semver("2.0") < Semver("0.5"))
+ assert not (Semver("0.5") > Semver("2.0"))
+ assert not (Semver("0.5") >= Semver("2.0"))
+ assert Semver("2.0") >= Semver("0.5")
+ assert Semver("2.0") > Semver("0.5")
+ assert not (Semver("2.0") > Semver("2.0"))
+ assert not (Semver("2.0") < Semver("2.0"))
+
+def test_semver_range():
+ def bounds(spec, lowe, high):
+ lowe = Semver(lowe) if lowe is not None else lowe
+ high = Semver(high) if high is not None else high
+ assert SemverRange(spec).lower() == lowe and SemverRange(spec).upper() == high
+ bounds('0', '0.0.0', '1.0.0')
+ bounds('0.0', '0.0.0', '0.1.0')
+ bounds('0.0.0', '0.0.0', '0.0.1')
+ bounds('0.0.1', '0.0.1', '0.0.2')
+ bounds('0.1.1', '0.1.1', '0.2.0')
+ bounds('1.1.1', '1.1.1', '2.0.0')
+ bounds('^0', '0.0.0', '1.0.0')
+ bounds('^0.0', '0.0.0', '0.1.0')
+ bounds('^0.0.0', '0.0.0', '0.0.1')
+ bounds('^0.0.1', '0.0.1', '0.0.2')
+ bounds('^0.1.1', '0.1.1', '0.2.0')
+ bounds('^1.1.1', '1.1.1', '2.0.0')
+ bounds('~0', '0.0.0', '1.0.0')
+ bounds('~0.0', '0.0.0', '0.1.0')
+ bounds('~0.0.0', '0.0.0', '0.1.0')
+ bounds('~0.0.1', '0.0.1', '0.1.0')
+ bounds('~0.1.1', '0.1.1', '0.2.0')
+ bounds('~1.1.1', '1.1.1', '1.2.0')
+ bounds('*', '0.0.0', None)
+ bounds('0.*', '0.0.0', '1.0.0')
+ bounds('0.0.*', '0.0.0', '0.1.0')
+
+
+def test_semver_multirange():
+ assert SemverRange(">= 0.5, < 2.0").compare("1.0.0")
+ assert SemverRange("*").compare("0.2.7")
+
+
+class Runner(object):
+
+ def __init__(self, c, e, cwd=None):
+ self._cmd = c
+ if not isinstance(self._cmd, list):
+ self._cmd = [self._cmd]
+ self._env = e
+ self._stdout = []
+ self._stderr = []
+ self._returncode = 0
+ self._cwd = cwd
+
+ def __call__(self, c, e):
+ cmd = self._cmd + c
+ env = dict(self._env, **e)
+ #dbg(' env: %s' % env)
+ #dbg(' cwd: %s' % self._cwd)
+ envstr = ''
+ for k, v in env.iteritems():
+ envstr += ' %s="%s"' % (k, v)
+ if self._cwd is not None:
+ dbg('cd %s && %s %s' % (self._cwd, envstr, ' '.join(cmd)))
+ else:
+ dbg('%s %s' % (envstr, ' '.join(cmd)))
+
+ proc = subprocess.Popen(cmd, env=env,
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE,
+ cwd=self._cwd)
+ out, err = proc.communicate()
+
+ for lo in out.split('\n'):
+ if len(lo) > 0:
+ self._stdout.append(lo)
+ #dbg('out: %s' % lo)
+
+ for le in err.split('\n'):
+ if len(le) > 0:
+ self._stderr.append(le)
+ dbg(le)
+
+ """
+ while proc.poll() is None:
+ lo = proc.stdout.readline().rstrip('\n')
+ le = proc.stderr.readline().rstrip('\n')
+ if len(lo) > 0:
+ self._stdout.append(lo)
+ dbg(lo)
+ sys.stdout.flush()
+ if len(le) > 0:
+ self._stderr.append(le)
+ dbg('err: %s', le)
+ sys.stdout.flush()
+ """
+ self._returncode = proc.wait()
+ #dbg(' ret: %s' % self._returncode)
+ return self._stdout
+
+ def output(self):
+ return self._stdout
+
+ def returncode(self):
+ return self._returncode
+
+class RustcRunner(Runner):
+
+ def __call__(self, c, e):
+ super(RustcRunner, self).__call__(c, e)
+ return ([], {}, {})
+
+class BuildScriptRunner(Runner):
+
+ def __call__(self, c, e):
+ #dbg('XXX Running build script:');
+ #dbg(' env: %s' % e)
+ #dbg(' '.join(self._cmd + c))
+ super(BuildScriptRunner, self).__call__(c, e)
+
+ # parse the output for cargo: lines
+ cmd = []
+ env = {}
+ denv = {}
+ for l in self.output():
+ match = BSCRIPT.match(str(l))
+ if match is None:
+ continue
+ pieces = match.groupdict()
+ k = pieces['key']
+ v = pieces['value']
+
+ if k == 'rustc-link-lib':
+ #dbg('YYYYYY: adding -l %s' % v)
+ cmd += ['-l', v]
+ elif k == 'rustc-link-search':
+ #dbg("adding link search path: %s" % v)
+ cmd += ['-L', v]
+ elif k == 'rustc-cfg':
+ cmd += ['--cfg', v]
+ env['CARGO_FEATURE_%s' % v.upper().replace('-', '_')] = 1
+ else:
+ #dbg("env[%s] = %s" % (k, v));
+ denv[k] = v
+ return (cmd, env, denv)
+
+class Crate(object):
+
+ def __init__(self, crate, ver, deps, cdir, build):
+ self._crate = str(crate)
+ self._version = Semver(ver)
+ self._dep_info = deps
+ self._dir = cdir
+ # put the build scripts first
+ self._build = [x for x in build if x.get('type') == 'build_script']
+ # then add the lib/bin builds
+ self._build += [x for x in build if x.get('type') != 'build_script']
+ self._resolved = False
+ self._deps = {}
+ self._refs = []
+ self._env = {}
+ self._dep_env = {}
+ self._extra_flags = []
+
+ def name(self):
+ return self._crate
+
+ def dep_info(self):
+ return self._dep_info
+
+ def version(self):
+ return self._version
+
+ def dir(self):
+ return self._dir
+
+ def __str__(self):
+ return '%s-%s' % (self.name(), self.version())
+
+ def add_dep(self, crate, features):
+ if str(crate) in self._deps:
+ return
+
+ features = [str(x) for x in features]
+ self._deps[str(crate)] = { 'features': features }
+ crate.add_ref(self)
+
+ def add_ref(self, crate):
+ if str(crate) not in self._refs:
+ self._refs.append(str(crate))
+
+ def resolved(self):
+ return self._resolved
+
+ @dbgCtx
+ def resolve(self, tdir, idir, nodl, graph=None):
+ if self._resolved:
+ return
+ if str(self) in CRATES:
+ return
+
+ if self._dep_info is not None:
+ print ''
+ dbg('Resolving dependencies for: %s' % str(self))
+ for d in self._dep_info:
+ kind = d.get('kind', 'normal')
+ if kind not in ('normal', 'build'):
+ print ''
+ dbg('Skipping %s dep %s' % (kind, d['name']))
+ continue
+
+ optional = d.get('optional', False)
+ if optional and d['name'] not in INCLUDE_OPTIONAL:
+ print ''
+ dbg('Skipping optional dep %s' % d['name'])
+ continue
+
+ svr = SemverRange(d['req'])
+ print ''
+ deps = []
+ dbg('Looking up info for %s %s' % (d['name'], str(svr)))
+ if d.get('local', None) is None:
+ # go through crates first to see if the is satisfied already
+ dcrate = find_crate_by_name_and_semver(d['name'], svr)
+ if dcrate is not None:
+ #import pdb; pdb.set_trace()
+ svr = dcrate.version().as_range()
+ name, ver, ideps, ftrs, cksum = crate_info_from_index(idir, d['name'], svr)
+ if name in BLACKLIST:
+ dbg('Found in blacklist, skipping %s' % (name))
+ elif dcrate is None:
+ if nodl:
+ cdir = find_downloaded_crate(tdir, name, svr)
+ else:
+ cdir = dl_and_check_crate(tdir, name, ver, cksum)
+ _, tver, tdeps, build = crate_info_from_toml(cdir)
+ deps += ideps
+ deps += tdeps
+ else:
+ dbg('Found crate already satisfying %s %s' % (d['name'], str(svr)))
+ deps += dcrate.dep_info()
+ else:
+ cdir = d['path']
+ name, ver, ideps, build = crate_info_from_toml(cdir)
+ deps += ideps
+
+ if name not in BLACKLIST:
+ try:
+ if dcrate is None:
+ dcrate = Crate(name, ver, deps, cdir, build)
+ if str(dcrate) in CRATES:
+ dcrate = CRATES[str(dcrate)]
+ UNRESOLVED.append(dcrate)
+ if graph is not None:
+ print >> graph, '"%s" -> "%s";' % (str(self), str(dcrate))
+
+ except:
+ dcrate = None
+
+ # clean up the list of features that are enabled
+ tftrs = d.get('features', [])
+ if isinstance(tftrs, dict):
+ tftrs = tftrs.keys()
+ else:
+ tftrs = [x for x in tftrs if len(x) > 0]
+
+ # add 'default' if default_features is true
+ if d.get('default_features', True):
+ tftrs.append('default')
+
+ features = []
+ if isinstance(ftrs, dict):
+ # add any available features that are activated by the
+ # dependency entry in the parent's dependency record,
+ # and any features they depend on recursively
+ def add_features(f):
+ if f in ftrs:
+ for k in ftrs[f]:
+ # guard against infinite recursion
+ if not k in features:
+ features.append(k)
+ add_features(k)
+ for k in tftrs:
+ add_features(k)
+ else:
+ features += [x for x in ftrs if (len(x) > 0) and (x in tftrs)]
+
+ if dcrate is not None:
+ self.add_dep(dcrate, features)
+
+ self._resolved = True
+ CRATES[str(self)] = self
+
+ @dbgCtx
+ def build(self, by, out_dir, features=[]):
+ extra_filename = '-' + str(self.version()).replace('.','_')
+ output_name = self.name().replace('-','_')
+ output = os.path.join(out_dir, 'lib%s%s.rlib' % (output_name, extra_filename))
+
+ if str(self) in BUILT:
+ return ({'name':self.name(), 'lib':output}, self._env, self._extra_flags)
+
+ externs = []
+ extra_flags = []
+ for dep,info in self._deps.iteritems():
+ if dep in CRATES:
+ extern, env, extra_flags = CRATES[dep].build(self, out_dir, info['features'])
+ externs.append(extern)
+ self._dep_env[CRATES[dep].name()] = env
+ self._extra_flags += extra_flags
+
+ if os.path.isfile(output):
+ print ''
+ dbg('Skipping %s, already built (needed by: %s)' % (str(self), str(by)))
+ BUILT[str(self)] = str(by)
+ return ({'name':self.name(), 'lib':output}, self._env, self._extra_flags)
+
+ # build the environment for subcommands
+ tenv = dict(os.environ)
+ env = {}
+ env['PATH'] = tenv['PATH']
+ env['OUT_DIR'] = out_dir
+ env['TARGET'] = TARGET
+ env['HOST'] = HOST
+ env['NUM_JOBS'] = '1'
+ env['OPT_LEVEL'] = '0'
+ env['DEBUG'] = '0'
+ env['PROFILE'] = 'release'
+ env['CARGO_MANIFEST_DIR'] = self.dir()
+ env['CARGO_PKG_VERSION_MAJOR'] = self.version()['major']
+ env['CARGO_PKG_VERSION_MINOR'] = self.version()['minor']
+ env['CARGO_PKG_VERSION_PATCH'] = self.version()['patch']
+ pre = self.version()['prerelease']
+ if pre is None:
+ pre = ''
+ env['CARGO_PKG_VERSION_PRE'] = pre
+ env['CARGO_PKG_VERSION'] = str(self.version())
+ for f in features:
+ env['CARGO_FEATURE_%s' % f.upper().replace('-','_')] = '1'
+ for l,e in self._dep_env.iteritems():
+ for k,v in e.iteritems():
+ if type(v) is not str and type(v) is not unicode:
+ v = str(v)
+ env['DEP_%s_%s' % (l.upper(), v.upper())] = v
+
+ # create the builders, build scrips are first
+ cmds = []
+ for b in self._build:
+ v = str(self._version).replace('.','_')
+ cmd = ['rustc']
+ cmd.append(os.path.join(self._dir, b['path']))
+ cmd.append('--crate-name')
+ if b['type'] == 'lib':
+ b.setdefault('name', self.name())
+ cmd.append(b['name'].replace('-','_'))
+ cmd.append('--crate-type')
+ cmd.append('lib')
+ elif b['type'] == 'build_script':
+ cmd.append('build_script_%s' % b['name'].replace('-','_'))
+ cmd.append('--crate-type')
+ cmd.append('bin')
+ else:
+ cmd.append(b['name'].replace('-','_'))
+ cmd.append('--crate-type')
+ cmd.append('bin')
+
+ for f in features:
+ cmd.append('--cfg')
+ cmd.append('feature=\"%s\"' % f)
+
+ cmd.append('-C')
+ cmd.append('extra-filename=' + extra_filename)
+
+ cmd.append('--out-dir')
+ cmd.append('%s' % out_dir)
+ cmd.append('--emit=dep-info,link')
+ cmd.append('--target')
+ cmd.append(TARGET)
+ cmd.append('-L')
+ cmd.append('%s' % out_dir)
+ cmd.append('-L')
+ cmd.append('%s/lib' % out_dir)
+
+
+ # add in the flags from dependencies
+ cmd += self._extra_flags
+
+ for e in externs:
+ cmd.append('--extern')
+ cmd.append('%s=%s' % (e['name'].replace('-','_'), e['lib']))
+
+ # get the pkg key name
+ match = BNAME.match(b['name'])
+ if match is not None:
+ match = match.groupdict()['name'].replace('-','_')
+
+ # queue up the runner
+ cmds.append({'name':b['name'], 'env_key':match, 'cmd':RustcRunner(cmd, env)})
+
+ # queue up the build script runner
+ if b['type'] == 'build_script':
+ bcmd = os.path.join(out_dir, 'build_script_%s-%s' % (b['name'], v))
+ cmds.append({'name':b['name'], 'env_key':match, 'cmd':BuildScriptRunner(bcmd, env, self._dir)})
+
+ print ''
+ dbg('Building %s (needed by: %s)' % (str(self), str(by)))
+
+ bcmd = []
+ benv = {}
+ for c in cmds:
+ runner = c['cmd']
+
+ (c1, e1, e2) = runner(bcmd, benv)
+
+ if runner.returncode() != 0:
+ raise RuntimeError('build command failed: %s' % runner.returncode())
+
+ bcmd += c1
+ benv = dict(benv, **e1)
+
+ key = c['env_key']
+ for k,v in e2.iteritems():
+ self._env['DEP_%s_%s' % (key.upper(), k.upper())] = v
+
+ #dbg('XXX cmd: %s' % bcmd)
+ #dbg('XXX env: %s' % benv)
+ #dbg('XXX denv: %s' % self._env)
+ #print ''
+
+ BUILT[str(self)] = str(by)
+ return ({'name':self.name(), 'lib':output}, self._env, bcmd)
+
+
+def dl_crate(url, depth=0):
+ if depth > 10:
+ raise RuntimeError('too many redirects')
+
+ r = requests.get(url)
+ try:
+ dbg('%sconnected to %s...%s' % ((' ' * depth), r.url, r.status_code))
+
+ if URLS_FILE is not None:
+ with open(URLS_FILE, "a") as f:
+ f.write(r.url + "\n")
+
+ return r.content
+ finally:
+ r.close()
+
+def dl_and_check_crate(tdir, name, ver, cksum):
+ cname = '%s-%s' % (name, ver)
+ cdir = os.path.join(tdir, cname)
+ if cname in CRATES:
+ dbg('skipping %s...already downloaded' % cname)
+ return cdir
+
+ def check_checksum(buf):
+ if (cksum is not None):
+ h = hashlib.sha256()
+ h.update(buf)
+ if h.hexdigest() == cksum:
+ dbg('Checksum is good...%s' % cksum)
+ else:
+ dbg('Checksum is BAD (%s != %s)' % (h.hexdigest(), cksum))
+
+ if CRATE_CACHE:
+ cachename = os.path.join(CRATE_CACHE, "%s.crate" % (cname))
+ if os.path.isfile(cachename):
+ dbg('found crate in cache...%s.crate' % (cname))
+ buf = open(cachename).read()
+ check_checksum(buf)
+ with tarfile.open(fileobj=cStringIO.StringIO(buf)) as tf:
+ dbg('unpacking result to %s...' % cdir)
+ tf.extractall(path=tdir)
+ return cdir
+
+ if not os.path.isdir(cdir):
+ dbg('Downloading %s source to %s' % (cname, cdir))
+ dl = CRATE_API_DL % (name, ver)
+ buf = dl_crate(dl)
+ check_checksum(buf)
+
+ if CRATE_CACHE:
+ dbg("saving crate to %s/%s.crate..." % (CRATE_CACHE, cname))
+ with open(os.path.join(CRATE_CACHE, "%s.crate" % (cname)), "wb") as f:
+ f.write(buf)
+
+ fbuf = cStringIO.StringIO(buf)
+ with tarfile.open(fileobj=fbuf) as tf:
+ dbg('unpacking result to %s...' % cdir)
+ tf.extractall(path=tdir)
+
+ return cdir
+
+
+def find_downloaded_crate(tdir, name, svr):
+ exists = glob("%s/%s-[0-9]*" % (tdir, name))
+ if not exists:
+ raise RuntimeError("crate does not exist and have --no-download: %s" % name)
+
+ # First, grok the available versions.
+ aver = sorted([Semver(CVER.search(x).group(1)) for x in exists])
+
+ # Now filter the "suitable" versions based on our version range.
+ sver = filter(svr.compare, aver)
+ if not sver:
+ raise RuntimeError("unable to satisfy dependency %s %s from %s; try running without --no-download" % (name, svr, map(str, aver)))
+
+ cver = sver[-1]
+ return "%s/%s-%s" % (tdir, name, cver)
+
+
+def crate_info_from_toml(cdir):
+ try:
+ with open(os.path.join(cdir, 'Cargo.toml'), 'rb') as ctoml:
+ #import pdb; pdb.set_trace()
+ cfg = toml.load(ctoml)
+ build = []
+ p = cfg.get('package',cfg.get('project', {}))
+ name = p.get('name', None)
+ #if name == 'num_cpus':
+ # import pdb; pdb.set_trace()
+ ver = p.get('version', None)
+ if (name is None) or (ver is None):
+ import pdb; pdb.set_trace()
+ raise RuntimeError('invalid .toml file format')
+
+ # look for a "links" item
+ lnks = p.get('links', [])
+ if type(lnks) is not list:
+ lnks = [lnks]
+
+ # look for a "build" item
+ bf = p.get('build', None)
+
+ # if we have a 'links', there must be a 'build'
+ if len(lnks) > 0 and bf is None:
+ import pdb; pdb.set_trace()
+ raise RuntimeError('cargo requires a "build" item if "links" is specified')
+
+ # there can be target specific build script overrides
+ boverrides = {}
+ for lnk in lnks:
+ boverrides.update(cfg.get('target', {}).get(TARGET, {}).get(lnk, {}))
+
+ bmain = False
+ if bf is not None:
+ build.append({'type':'build_script', \
+ 'path':[ bf ], \
+ 'name':name.replace('-','_'), \
+ 'links': lnks, \
+ 'overrides': boverrides})
+
+ # look for libs array
+ libs = cfg.get('lib', [])
+ if type(libs) is not list:
+ libs = [libs]
+ for l in libs:
+ l['type'] = 'lib'
+ l['links'] = lnks
+ if l.get('path', None) is None:
+ l['path'] = [ 'lib.rs' ]
+ build.append(l)
+ bmain = True
+
+ # look for bins array
+ bins = cfg.get('bin', [])
+ if type(bins) is not list:
+ bins = [bins]
+ for b in bins:
+ if b.get('path', None) is None:
+ b['path'] = [ os.path.join('bin', '%s.rs' % b['name']), os.path.join('bin', 'main.rs'), '%s.rs' % b['name'], 'main.rs' ]
+ build.append({'type': 'bin', \
+ 'name':b['name'], \
+ 'path':b['path'], \
+ 'links': lnks})
+ bmain = True
+
+ # if no explicit directions on what to build, then add a default
+ if bmain == False:
+ build.append({'type':'lib', 'path':'lib.rs', 'name':name.replace('-','_')})
+
+ for b in build:
+ # make sure the path is a list of possible paths
+ if type(b['path']) is not list:
+ b['path'] = [ b['path'] ]
+ bin_paths = []
+ for p in b['path']:
+ bin_paths.append(os.path.join(cdir, p))
+ bin_paths.append(os.path.join(cdir, 'src', p))
+
+ found_path = None
+ for p in bin_paths:
+ if os.path.isfile(p):
+ found_path = p
+ break
+
+ if found_path == None:
+ import pdb; pdb.set_trace()
+ raise RuntimeError('could not find %s to build in %s', (build, cdir))
+ else:
+ b['path'] = found_path
+
+ d = cfg.get('build-dependencies', {})
+ d.update(cfg.get('dependencies', {}))
+ d.update(cfg.get('target', {}).get(TARGET, {}).get('dependencies', {}))
+ deps = []
+ for k,v in d.iteritems():
+ if type(v) is not dict:
+ deps.append({'name':k, 'req': v})
+ elif 'path' in v:
+ if v.get('version', None) is None:
+ deps.append({'name':k, 'path':os.path.join(cdir, v['path']), 'local':True, 'req':0})
+ else:
+ opts = v.get('optional',False)
+ ftrs = v.get('features',[])
+ deps.append({'name':k, 'path': v['path'], 'req':v['version'], 'features':ftrs, 'optional':opts})
+ else:
+ opts = v.get('optional',False)
+ ftrs = v.get('features',[])
+ deps.append({'name':k, 'req':v['version'], 'features':ftrs, 'optional':opts})
+
+ return (name, ver, deps, build)
+
+ except Exception, e:
+ dbg('failed to load toml file for: %s (%s)' % (cdir, str(e)))
+ import pdb; pdb.set_trace()
+
+ return (None, None, [], 'lib.rs')
+
+
+def crate_info_from_index(idir, name, svr):
+ if len(name) == 1:
+ ipath = os.path.join(idir, '1', name)
+ elif len(name) == 2:
+ ipath = os.path.join(idir, '2', name)
+ elif len(name) == 3:
+ ipath = os.path.join(idir, '3', name[0:1], name)
+ else:
+ ipath = os.path.join(idir, name[0:2], name[2:4], name)
+
+ dbg('opening crate info: %s' % ipath)
+ dep_infos = []
+ with open(ipath, 'rb') as fin:
+ lines = fin.readlines()
+ for l in lines:
+ dep_infos.append(json.loads(l))
+
+ passed = {}
+ for info in dep_infos:
+ if 'vers' not in info:
+ continue
+ sv = Semver(info['vers'])
+ if svr.compare(sv):
+ passed[sv] = info
+
+ keys = sorted(passed.iterkeys())
+ best_match = keys.pop()
+ dbg('best match is %s-%s' % (name, best_match))
+ best_info = passed[best_match]
+ name = best_info.get('name', None)
+ ver = best_info.get('vers', None)
+ deps = best_info.get('deps', [])
+ ftrs = best_info.get('features', [])
+ cksum = best_info.get('cksum', None)
+
+ # only include deps without a 'target' or ones with matching 'target'
+ deps = [x for x in deps if x.get('target', TARGET) == TARGET]
+
+ return (name, ver, deps, ftrs, cksum)
+
+
+def find_crate_by_name_and_semver(name, svr):
+ for c in CRATES.itervalues():
+ if c.name() == name and svr.compare(c.version()):
+ return c
+ for c in UNRESOLVED:
+ if c.name() == name and svr.compare(c.version()):
+ return c
+ return None
+
+
+def args_parser():
+ parser = argparse.ArgumentParser(description='Cargo Bootstrap Tool')
+ parser.add_argument('--cargo-root', type=str, default=os.getcwd(),
+ help="specify the cargo repo root path")
+ parser.add_argument('--target-dir', type=str, default=tempfile.mkdtemp(),
+ help="specify the path for storing built dependency libs")
+ parser.add_argument('--crate-index', type=str, default=None,
+ help="path to where the crate index should be cloned")
+ parser.add_argument('--target', type=str, default=None,
+ help="target triple for machine we're bootstrapping for")
+ parser.add_argument('--host', type=str, default=None,
+ help="host triple for machine we're bootstrapping on")
+ parser.add_argument('--no-clone', action='store_true',
+ help="skip cloning crates index, --crate-index must point to an existing clone of the crates index")
+ parser.add_argument('--no-git', action='store_true',
+ help="don't assume that the crates index and cargo root are git repos; implies --no-clone")
+ parser.add_argument('--no-clean', action='store_true',
+ help="don't delete the target dir and crate index")
+ parser.add_argument('--download', action='store_true',
+ help="only download the crates needed to build cargo")
+ parser.add_argument('--no-download', action='store_true',
+ help="don't download any crates (fail if any do not exist)")
+ parser.add_argument('--graph', action='store_true',
+ help="output a dot graph of the dependencies")
+ parser.add_argument('--urls-file', type=str, default=None,
+ help="file to write crate URLs to")
+ parser.add_argument('--blacklist', type=str, default="",
+ help="space-separated list of crates to skip")
+ parser.add_argument('--include-optional', type=str, default="",
+ help="space-separated list of optional crates to include")
+ parser.add_argument('--patchdir', type=str,
+ help="directory with patches to apply after downloading crates. organized by crate/NNNN-description.patch")
+ parser.add_argument('--crate-cache', type=str,
+ help="download and save crates to crate cache (directory)")
+ return parser
+
+
+def open_or_clone_repo(rdir, rurl, no_clone):
+ try:
+ repo = git.open_repo(rdir)
+ return repo
+ except:
+ repo = None
+
+ if repo is None and no_clone is False:
+ dbg('Cloning %s to %s' % (rurl, rdir))
+ return git.clone(rurl, rdir)
+
+ if repo is None and no_clone is True:
+ repo = rdir
+
+ return repo
+
+
+def patch_crates(targetdir, patchdir):
+ """
+ Apply patches in patchdir to downloaded crates
+ patchdir organization:
+
+ <patchdir>/
+ <crate>/
+ <patch>.patch
+ """
+ for patch in glob(os.path.join(patchdir, '*', '*.patch')):
+ crateid = os.path.basename(os.path.dirname(patch))
+ m = re.match(r'^([A-Za-z0-9_-]+?)(?:-([\d.]+))?$', crateid)
+ if m:
+ cratename = m.group(1)
+ else:
+ cratename = crateid
+ if cratename != crateid:
+ dirs = glob(os.path.join(targetdir, crateid))
+ else:
+ dirs = glob(os.path.join(targetdir, '%s-*' % (cratename)))
+ for cratedir in dirs:
+ # check if patch has been applied
+ patchpath = os.path.abspath(patch)
+ p = subprocess.Popen(['patch', '--dry-run', '-s', '-f', '-F', '10', '-p1', '-i', patchpath], cwd=cratedir)
+ rc = p.wait()
+ if rc == 0:
+ dbg("patching %s with patch %s" % (os.path.basename(cratedir), os.path.basename(patch)))
+ p = subprocess.Popen(['patch', '-s', '-F', '10', '-p1', '-i', patchpath], cwd=cratedir)
+ rc = p.wait()
+ if rc != 0:
+ dbg("%s: failed to apply %s (rc=%s)" % (os.path.basename(cratedir), os.path.basename(patch), rc))
+ else:
+ dbg("%s: %s does not apply (rc=%s)" % (os.path.basename(cratedir), os.path.basename(patch), rc))
+
+
+if __name__ == "__main__":
+ try:
+ # parse args
+ parser = args_parser()
+ args = parser.parse_args()
+
+ # clone the cargo index
+ if args.crate_index is None:
+ args.crate_index = os.path.normpath(os.path.join(args.target_dir, 'index'))
+ dbg('cargo: %s, target: %s, index: %s' % \
+ (args.cargo_root, args.target_dir, args.crate_index))
+
+ TARGET = args.target
+ HOST = args.host
+ URLS_FILE = args.urls_file
+ BLACKLIST = args.blacklist.split()
+ INCLUDE_OPTIONAL = args.include_optional.split()
+ if args.crate_cache and os.path.isdir(args.crate_cache):
+ CRATE_CACHE = os.path.abspath(args.crate_cache)
+
+ if not args.no_git:
+ index = open_or_clone_repo(args.crate_index, CRATES_INDEX, args.no_clone)
+ cargo = open_or_clone_repo(args.cargo_root, CARGO_REPO, args.no_clone)
+
+ if index is None:
+ raise RuntimeError('You must have a local clone of the crates index, ' \
+ 'omit --no-clone to allow this script to clone it for ' \
+ 'you, or pass --no-git to bypass this check.')
+ if cargo is None:
+ raise RuntimeError('You must have a local clone of the cargo repo ' \
+ 'so that this script can read the cargo toml file.')
+
+ if TARGET is None:
+ raise RuntimeError('You must specify the target triple of this machine')
+ if HOST is None:
+ HOST = TARGET
+
+ except Exception, e:
+ frame = inspect.trace()[-1]
+ print >> sys.stderr, "\nException:\n from %s, line %d:\n %s\n" % (frame[1], frame[2], e)
+ parser.print_help()
+ if not args.no_clean:
+ print "cleaning up %s" % (args.target_dir)
+ shutil.rmtree(args.target_dir)
+ sys.exit(1)
+
+ try:
+
+ # load cargo deps
+ name, ver, deps, build = crate_info_from_toml(args.cargo_root)
+ cargo_crate = Crate(name, ver, deps, args.cargo_root, build)
+ UNRESOLVED.append(cargo_crate)
+
+ if args.graph:
+ GRAPH = open(os.path.join(args.target_dir, 'deps.dot'), 'wb')
+ print >> GRAPH, "digraph %s {" % name
+
+ # resolve and download all of the dependencies
+ print ''
+ print '===================================='
+ print '===== DOWNLOADING DEPENDENCIES ====='
+ print '===================================='
+ while len(UNRESOLVED) > 0:
+ crate = UNRESOLVED.pop(0)
+ crate.resolve(args.target_dir, args.crate_index, args.no_download, GRAPH)
+
+ if args.graph:
+ print >> GRAPH, "}"
+ GRAPH.close()
+
+ if args.patchdir:
+ print ''
+ print '========================'
+ print '===== PATCH CRATES ====='
+ print '========================'
+ patch_crates(args.target_dir, args.patchdir)
+
+ if args.download:
+ print "done downloading..."
+ sys.exit(0)
+
+ # build cargo
+ print ''
+ print '=========================='
+ print '===== BUILDING CARGO ====='
+ print '=========================='
+ cargo_crate.build('bootstrap.py', args.target_dir)
+
+ # cleanup
+ if not args.no_clean:
+ print "cleaning up %s..." % (args.target_dir)
+ shutil.rmtree(args.target_dir)
+ print "done"
+
+ except Exception, e:
+ frame = inspect.trace()[-1]
+ print >> sys.stderr, "\nException:\n from %s, line %d:\n %s\n" % (frame[1], frame[2], e)
+ if not args.no_clean:
+ print "cleaning up %s..." % (args.target_dir)
+ shutil.rmtree(args.target_dir)
+ sys.exit(1)
+
+
--- /dev/null
+target/doc
--- /dev/null
+src/etc/cargo.bashcomp.sh
--- /dev/null
+src/etc/man/cargo-*.1
+src/etc/man/cargo.1
--- /dev/null
+cargo (0.31.0-3) unstable; urgency=medium
+
+ * Tweak the cargo wrapper script to be more robust.
+
+ -- Ximin Luo <infinity0@debian.org> Fri, 09 Nov 2018 02:56:54 -0800
+
+cargo (0.31.0-2) unstable; urgency=medium
+
+ * Fix test failure on some architectures due to hash ordering.
+ * Bump libgit2 dependency version constraint. (Closes: #899038)
+
+ -- Ximin Luo <infinity0@debian.org> Sun, 04 Nov 2018 11:32:26 -0800
+
+cargo (0.31.0-1) unstable; urgency=medium
+
+ * Don't run tests when doing arch-indep build.
+ * Fix package include/exclude tests.
+
+ -- Ximin Luo <infinity0@debian.org> Sun, 04 Nov 2018 10:10:24 -0800
+
+cargo (0.31.0-1~exp1) experimental; urgency=medium
+
+ * New upstream release.
+ * Simplify build scripts and add a Debian wrapper for cargo.
+
+ -- Ximin Luo <infinity0@debian.org> Sun, 04 Nov 2018 03:50:33 -0800
+
+cargo (0.30.0-1) unstable; urgency=medium
+
+ * Upload to unstable.
+
+ -- Vasudev Kamath <vasudev@copyninja.info> Sun, 21 Oct 2018 11:59:11 +0530
+
+cargo (0.30.0-1~exp1) experimental; urgency=medium
+
+ [ upstream ]
+ * Cargo can silently fix some bad lockfiles You can use --locked to
+ disable this behaviour.
+ * cargo-install will now allow you to cross compile an install using
+ --target.
+ * Added the cargo-fix subcommand to automatically move project code from
+ 2015 edition to 2018.
+
+ [ Vasudev Kamath ]
+ * Refresh patch 2004 for new release.
+ * Add openssl crates fuzz,test doc and apps file to unsuspicious list.
+ * debian/patches:
+ + Drop patch 0774e97da3894f07ed5b6f7db175027a9bc4718b.patch for adding
+ cross compile support. Its merged upstream.
+ + Refresh patch 2001 to newer version of libgit2-sys.
+ + Refresh patch 2003 to newer version of libssh2-sys.
+ + Drop patch 1001 which is merged upstream.
+ + Refresh patch 2005 and 2007 to remove fuzz.
+ + Refresh patch 2002 with newer release.
+ * debian/control:
+ + Mark package compliance with Debian policy 4.2.1.
+ * Update copyright information for new release.
+ * debian/rules:
+ + Use DEB_BUILD_OPTIONS to disable tests on powerpc and powerpcspe
+ architecture.
+ Closes: bug#908961, Thanks to Helmut Grohne.
+
+ -- Vasudev Kamath <vasudev@copyninja.info> Sun, 07 Oct 2018 10:39:49 +0530
+
+cargo (0.29.0-1) unstable; urgency=medium
+
+ * Merge changes of 0.28.0-2, which was missed in first release of 0.29.0
+ * Upload to unstable.
+ * Refresh patch for `install --target` feature for release 0.29.0
+
+ -- Vasudev Kamath <vasudev@copyninja.info> Mon, 13 Aug 2018 20:37:13 +0530
+
+cargo (0.29.0-1~exp1) experimental; urgency=medium
+ [ upstream ]
+ * Cargo will now no longer allow you to publish crates with build
+ scripts that modify the src directory. The src directory in a crate
+ should be considered to be immutable.
+
+ [ Vasudev Kamath ]
+ * Update unsuspicious text for new release 0.29.0
+ * Change pattern for embedded zlib
+ * debian/patches:
+ + Refresh patches 2001, 2002, 2003, 2004 to work with new release and
+ new vendor files.
+ + Add patch 1001 to fix deprecated warnings on usage of "casues" from
+ failure crate,
+ * Update copyright information for new release.
+ * Make package compliant with policy 4.2.0.
+
+ -- Vasudev Kamath <vasudev@copyninja.info> Sun, 12 Aug 2018 22:25:42 +0530
+
+cargo (0.28.0-2) unstable; urgency=medium
+
+ * Re-add `install --target` functionality, needed by dh-cargo.
+
+ -- Ximin Luo <infinity0@debian.org> Sun, 12 Aug 2018 00:15:23 -0700
+
+cargo (0.28.0-1) unstable; urgency=medium
+
+ * Upload to unstable
+
+ -- Vasudev Kamath <vasudev@copyninja.info> Tue, 07 Aug 2018 22:14:46 +0530
+
+cargo (0.28.0-1~exp3) experimental; urgency=medium
+
+ * Disable incremental compilation tests on sparc64 architecture.
+ Closes: bug#905623, Thanks to John Paul Adrian Glaubitz.
+
+ -- Vasudev Kamath <vasudev@copyninja.info> Tue, 07 Aug 2018 22:01:31 +0530
+
+cargo (0.28.0-1~exp2) experimental; urgency=medium
+
+ * Add patch 2005 to disable fetch_platform_specific_dependencies unit
+ tests. Upstream issue #5864.
+
+ -- Vasudev Kamath <vasudev@copyninja.info> Sun, 05 Aug 2018 17:17:00 +0530
+
+cargo (0.28.0-1~exp1) experimental; urgency=medium
+
+ [ upstream ]
+ * cargo-metadata now includes authors, categories, keywords, readme, and
+ repository fields.
+ * cargo-metadata now includes a package's metadata table.
+ * Added the --target-dir optional argument. This allows you to specify a
+ different directory than target for placing compilation artifacts.
+ * Cargo will be adding automatic target inference for binaries,
+ benchmarks, examples, and tests in the Rust 2018 edition. If your
+ project specifies specific targets e.g. using [[bin]] and have other
+ binaries in locations where cargo would infer a binary, Cargo will
+ produce a warning. You can disable this feature ahead of time by
+ setting any of the following autobins, autobenches, autoexamples,
+ autotests to false.
+ * Cargo will now cache compiler information. This can be disabled by
+ setting CARGO_CACHE_RUSTC_INFO=0 in your environment.
+
+ [ Sylvestre Ledru ]
+ * Update of the alioth ML address.
+
+ [ Vasudev Kamath ]
+ * Update README.source to mention preferred way of upload.
+ * Update unsuspicious files for new release.
+ * debian/patches:
+ + Refresh patch 2007 for new release.
+ + Refresh patch 2001 for new version of libgit2-sys
+ + Drop patch 2008 as its merged upstream.
+ + Add patch 2003 for forcing use of libssh2 from system, which was now
+ controlled by environment variable.
+ * debian/copyright:
+ + Update copyright information for new release.
+ * Mark package compliance with Debian Policy 4.1.5.
+
+ -- Vasudev Kamath <vasudev@copyninja.info> Sun, 29 Jul 2018 22:07:36 +0530
+
+cargo (0.27.0-2) unstable; urgency=medium
+
+ * Support cross-compile install (upstream PR #5614).
+
+ -- Ximin Luo <infinity0@debian.org> Wed, 06 Jun 2018 22:35:30 -0700
+
+cargo (0.27.0-1) unstable; urgency=medium
+
+ * Upload to unstable.
+
+ -- Vasudev Kamath <vasudev@copyninja.info> Sun, 03 Jun 2018 20:42:13 +0530
+
+cargo (0.27.0-1~exp1) experimental; urgency=medium
+
+ [ upstream ]
+ * Cargo will now output path to custom commands when -v is passed with
+ --list.
+ * Cargo binary version is now same as the Rust version.
+ * Cargo.lock files are now included in published crates.
+
+ [ Vasudev Kamath ]
+ * Update patch 2004 for the new release.
+ * Add files from clap and vec_map to unsuspicious list.
+ * debian/patches:
+ + Update path to libgit2-sys in patch 2001.
+ + Adjust file name and paths to test files to be patched in patch
+ 2002.
+ + Drop all unused imports and comment out functions not just drop
+ #[test] in patch 2002.
+ + Drop patch 1001 as its now part of new cargo release.
+ + Refresh patch 2007.
+ * debian/copyright:
+ + Update copyright information for new vendored crates.
+
+ -- Vasudev Kamath <vasudev@copyninja.info> Sat, 02 Jun 2018 15:10:38 +0530
+
+cargo (0.26.0-1) unstable; urgency=medium
+
+ * Upload to unstable.
+
+ -- Vasudev Kamath <vasudev@copyninja.info> Tue, 01 May 2018 13:02:05 +0530
+
+cargo (0.26.0-1~exp1) experimental; urgency=medium
+
+ [upstream]
+ * cargo new now defaults to create binary crate instead of library
+ crate.
+ * cargo new will no longer name crates with name starting with rust- or
+ ending with -rs.
+ * cargo doc is faster as it uses cargo check instead of full rebuild.
+
+ [Vasudev Kamath]
+ * Refresh the patch 2004 against newer Cargo.toml
+ * Mark package compliance with Debian Policy 4.1.4
+ * debian/patches:
+ + Drop patch 2003 and 2005, the doc should be built from source using
+ mdbook.
+ + Drop patch 2006, the wasm32 related test seems to be dropped
+ upstream.
+ + Drop patch 1002, merged upstream.
+ + Add tests/generate_lock_file.rs to patch 2002 to disable
+ no_index_update test, this tries to access network.
+ + Refresh patch 1001 with new upstream release.
+ * debian/rules: disable execution of src/ci/dox.sh, this script is no
+ longer present in new release.
+ * debian/copyright:
+ + Add copyright for humantime crate.
+ + Add copyright for lazycell crate.
+ + Add copyright for quick-error crate
+ + Add copyright for proc-macro2 crate.
+
+ -- Vasudev Kamath <vasudev@copyninja.info> Sat, 21 Apr 2018 20:59:39 +0530
+
+cargo (0.25.0-3) unstable; urgency=medium
+
+ [ Ximin Luo ]
+ * Update Vcs-* fields to salsa
+
+ [ Vasudev Kamath ]
+ * Add patch to prevent incremental builds on sparc64.
+ Closes: bug#895300, Thanks to John Paul Adrian Glaubitz.
+
+ -- Vasudev Kamath <vasudev@copyninja.info> Sun, 15 Apr 2018 12:28:29 +0530
+
+cargo (0.25.0-2) unstable; urgency=medium
+
+ [ Ximin Luo ]
+ * Depend on rustc 1.24 or later.
+ * Backport a patch to not require dev-dependencies when not needed.
+
+ -- Vasudev Kamath <vasudev@copyninja.info> Thu, 22 Mar 2018 20:08:17 +0530
+
+cargo (0.25.0-1) unstable; urgency=medium
+
+ * Upload to unstable.
+
+ -- Vasudev Kamath <vasudev@copyninja.info> Fri, 09 Mar 2018 21:09:38 +0530
+
+cargo (0.25.0-1~exp2) experimental; urgency=medium
+
+ * Disable test running on powerpc and powerpcspe for now. Will be
+ enabled once issue in test suites are fixed.
+ Request from John Paul Adrian Glaubitz in IRC.
+
+ -- Vasudev Kamath <vasudev@copyninja.info> Sun, 25 Feb 2018 10:27:23 +0530
+
+cargo (0.25.0-1~exp1) experimental; urgency=medium
+
+ [upstream]
+ * Added a workspace.default-members config that overrides implied --all
+ in virtual workspaces.
+ * Enable incremental by default on development builds.
+
+ [ Vasudev Kamath ]
+ * debian/vendor-tarball-filter.txt: Filter out git test data from
+ libgit2-sys crate.
+ * debian/vendor-tarball-unsusupiciousAudit unsuspicious files for 0.25.0
+ release.
+ * debian/make_orig_multi.sh: Make sure we take filter and unsuspiciaus
+ texts from debian folder.
+ * debian/patches:
+ + Drop patch 0001 it is merged upstream.
+ + Fix the typo in description of patch 2006.
+ * Drop source/lintian-override. README under patches directory is no
+ longer considered as a patch file by lintian.
+ * debian/copyright:
+ + Drop unused vendor crates copyright information.
+ + Add new crates copyright information to copyright.
+
+ -- Vasudev Kamath <vasudev@copyninja.info> Sat, 24 Feb 2018 14:43:48 +0530
+
+cargo (0.24.0-1) unstable; urgency=medium
+
+ * Upload to unstable.
+
+ -- Ximin Luo <infinity0@debian.org> Sat, 27 Jan 2018 10:41:06 +0100
+
+cargo (0.24.0-1~exp1) experimental; urgency=medium
+
+ [upstream]
+ * Supports uninstallation of multiple crates.
+ * `cargo check` unit testing.
+ * Install a specific version using `cargo install --version`
+
+ [ Vasudev Kamath ]
+ * Update vendor-tarball-unsuspicious.txt vendor-tarball-filter.txt for
+ new upstream release.
+ * debian/control:
+ + Mark package compliance with Debian Policy 4.1.3.
+ * debian/patches:
+ + Update patch 2001 to work with libgit2-sys-0.6.19.
+ + Update 1002 patch to drop url crate specific hunk as its merged
+ upstream.
+ + Add patch 0001 to fix bad_git_dependency test failure.
+ * debian/copyright:
+ + Add new vendor crates to copyright.
+ + Track rustfmt.toml in top level copyright section.
+ * Add lintian-override for ignoring README from
+ patch-file-present-but-not-mentioned-in-series tag.
+
+ -- Vasudev Kamath <vasudev@copyninja.info> Thu, 25 Jan 2018 14:57:43 +0530
+
+cargo (0.23.0-1) unstable; urgency=medium
+
+ * Upload to unstable.
+ * Mark package as compliant with Debian Policy 4.1.2.
+ No change required to source.
+
+ -- Vasudev Kamath <vasudev@copyninja.info> Sun, 10 Dec 2017 15:33:55 +0530
+
+cargo (0.23.0-1~exp1) experimental; urgency=medium
+
+ * [upstream]
+ + Cargo will now build multi file examples in subdirectories of the
+ examples folder that have a main.rs file.
+ + Changed [root] to [package] in Cargo.lock. Old format packages will
+ continue to work and can be updated using cargo update.
+ + Supports vendoring git repositories.
+ * Refresh patch 2004 for new release.
+ * Audit logo.svg file from termion crate.
+ * debian/patches:
+ + Drop patch 1001, its merged upstream.
+ + Refresh patch 2002 with new upstream changes.
+ + Refresh patch 2001 with newer libgit2-sys changes.
+ + Add patch 2005 to prevent executing non-existing mdbook command
+ during build.
+ + Move part of typo fix for url crate to patch 1001 to 1002. url crate
+ is not updated in new cargo release.
+ * debian/copyright:
+ + Remove copyright for gcc crate.
+ + Add copyright information for cc, commoncrypto, crypto-hash,
+ redox_syscall. redox_termios and termion crate.
+ + Add CONTRIBUTING.md to top Files section.
+ + Drop magnet-sys from copyright.
+
+
+ -- Vasudev Kamath <vasudev@copyninja.info> Tue, 05 Dec 2017 22:03:49 +0530
+
+cargo (0.22.0-1~exp1) experimental; urgency=medium
+
+ * New upstream release.
+ + Can now install multiple crates with cargo install.
+ + cargo commands inside a virtual workspace will now implicitly pass
+ --all.
+ + Added [patch] section to Cargo.toml to handle prepublication
+ dependencies RFC 1969.
+ + include and exclude fields in Cargo.toml now accept gitignore like
+ patterns.
+ + Added --all-target option.
+ + Using required dependencies as a feature is now deprecated and emits
+ a warning.
+ * Put upstream PR url for patch 1001.
+ * Add conv crate file to unsuspicious files.
+ * debian/patches:
+ + Refresh patches 1001, 2002 and 2004 with new upstream release.
+ + Fix typo in cargo search command and related tests.
+ * debian/control:
+ + Mark package compliance with Debian Policy 4.1.1.
+ + Mark priority for package as optional from extra. Priority extra is
+ deprecated from Debian Policy 4.0.1.
+ * debian/copyright:
+ + Add newly added vendor copyright information.
+
+ -- Vasudev Kamath <vasudev@copyninja.info> Sun, 29 Oct 2017 19:50:42 +0530
+
+cargo (0.21.1-2) unstable; urgency=medium
+
+ * Upload to unstable.
+
+ -- Ximin Luo <infinity0@debian.org> Wed, 25 Oct 2017 23:30:46 +0200
+
+cargo (0.21.1-1) experimental; urgency=medium
+
+ * debian/control:
+ + Add myself as uploader for cargo package.
+ + Mark package compliance with Debian Policy 4.1.0.
+ * Mark tables.rs from unicode-normalization as unsuspicious.
+ * Ignore sublime workspace file from hex crate.
+ * debian/copyright:
+ + Drop wildcards representing the old crates under vendor folder.
+ + Add copyright information for newer crates under vendor
+ + Add ARCHITECTURE.* to copyright.
+ * debian/patches:
+ + Rename patches to follow patch naming guidelines mentioned in
+ debian/patches/README.
+ + Add patch 1001 to fix spelling errors in cargo output messages.
+ + Make patch 2003 DEP-3 compliant.
+ + Adjust make_orig_multi.sh to renamed clean-cargo-deps.patch
+
+ -- Vasudev Kamath <vasudev@copyninja.info> Sat, 23 Sep 2017 10:41:07 +0530
+
+cargo (0.20.0-2) unstable; urgency=medium
+
+ * Work around #865549, fixes FTBFS on ppc64el.
+
+ -- Ximin Luo <infinity0@debian.org> Thu, 14 Sep 2017 15:47:55 +0200
+
+cargo (0.20.0-1) unstable; urgency=medium
+
+ * New upstream release.
+ * Fix cross-compiling declarations, Multi-Arch: foreign => allowed
+ * Un-embed libgit2 0.25.1 again. (Closes: #860990)
+ * Update to latest Standards-Version; no changes required.
+
+ -- Ximin Luo <infinity0@debian.org> Thu, 24 Aug 2017 19:13:00 +0200
+
+cargo (0.17.0-2) unstable; urgency=medium
+
+ * Re-embed libgit2 0.25.1 due to the Debian testing freeze. It will be
+ removed again after the freeze is over, when libgit2 0.25.1 can again
+ enter Debian unstable.
+
+ -- Ximin Luo <infinity0@debian.org> Wed, 03 May 2017 16:56:03 +0200
+
+cargo (0.17.0-1) unstable; urgency=medium
+
+ * Upload to unstable so we have something to build rustc 1.17.0 with.
+
+ -- Ximin Luo <infinity0@debian.org> Wed, 03 May 2017 11:24:08 +0200
+
+cargo (0.17.0-1~exp3) experimental; urgency=medium
+
+ * Add git to Build-Depends to fix FTBFS.
+ * Mention cross-compiling in the previous changelog entry.
+
+ -- Ximin Luo <infinity0@debian.org> Tue, 02 May 2017 13:18:53 +0200
+
+cargo (0.17.0-1~exp2) experimental; urgency=medium
+
+ * Bring in some changes from Ubuntu.
+ - Rename deps/ to vendor/ as that's what upstream uses, and update
+ other files with the new paths too.
+ - Remove cargo-vendor-unpack since we no longer need to postprocess
+ cargo-vendor output in that way.
+ * Document that bootstrap.py probably doesn't work now.
+ * Include /usr/share/rustc/architecture.mk in d/rules instead of duplicating
+ awkward arch-dependent Makefile snippets.
+ * Don't embed libgit2, add a versioned B-D to libgit2-dev.
+ * Add support for cross-compiling bootstrap.
+
+ -- Ximin Luo <infinity0@debian.org> Mon, 01 May 2017 20:49:45 +0200
+
+cargo (0.17.0-1~exp1) experimental; urgency=medium
+
+ * New upstream release. (Closes: #851089, #859312)
+
+ -- Ximin Luo <infinity0@debian.org> Thu, 20 Apr 2017 03:16:04 +0200
+
+cargo (0.15.0~dev-1) unstable; urgency=medium
+
+ * New upstream snapshot (git 1877f59d6b2cb057f7ef6c6b34b926fd96a683c1)
+ - Compatible with OpenSSL 1.1.0 (Closes: #828259)
+ * rules: use new link-arg options (Closes: #834980, #837433)
+ - Requires rustc >= 1.13
+
+ -- Luca Bruno <lucab@debian.org> Fri, 25 Nov 2016 23:30:03 +0000
+
+cargo (0.11.0-2) unstable; urgency=high
+
+ * debian/rules: fix RUSTFLAGS quoting (Closes: #834980)
+
+ -- Luca Bruno <lucab@debian.org> Sun, 21 Aug 2016 18:21:21 +0000
+
+cargo (0.11.0-1) unstable; urgency=medium
+
+ [ Daniele Tricoli ]
+ * New upstream release. (Closes: #826938)
+ - Update deps tarball.
+ - Refresh patches.
+ - Drop clean-win-crates.patch since time crate is not a dependency
+ anymore.
+ - Drop deps-url-fix-toml.patch since merged upstream.
+
+ [ Luca Bruno ]
+ * Install subcommand manpages too
+ * Move to a bootstrapped (stage1) build by default
+
+ -- Luca Bruno <lucab@debian.org> Mon, 15 Aug 2016 13:59:04 +0000
+
+cargo (0.9.0-1) unstable; urgency=medium
+
+ * New upstream version
+ + Fix deprecation errors (Closes: #822178, #823652)
+ + Updated deps tarball
+ + Refreshed patches
+
+ -- Luca Bruno <lucab@debian.org> Sat, 07 May 2016 17:56:28 +0200
+
+cargo (0.8.0-2) unstable; urgency=low
+
+ * Prefer libcurl4-gnutls-dev for building (Closes: #819831)
+
+ -- Luca Bruno <lucab@debian.org> Tue, 05 Apr 2016 22:23:44 +0200
+
+cargo (0.8.0-1) unstable; urgency=medium
+
+ * New upstream version 0.8.0
+ + Updated deps tarball
+ + Refreshed patches
+ * cargo: removed unused lintian overrides
+
+ -- Luca Bruno <lucab@debian.org> Sat, 05 Mar 2016 22:39:06 +0100
+
+cargo (0.7.0-2) unstable; urgency=medium
+
+ * Bump standards version
+ * cargo:
+ + add a new stage2 profile
+ + preserve original Cargo.lock for clean
+ + clean environment to allow multiple builds
+ * cargo-doc:
+ + update docbase paths after package split
+ + do not reference remote jquery
+ + do not build under nodoc profile
+ * control: update build-deps for build-profiles
+
+ -- Luca Bruno <lucab@debian.org> Thu, 03 Mar 2016 22:18:32 +0100
+
+cargo (0.7.0-1) unstable; urgency=medium
+
+ * New upstream version 0.7.0
+ + Updated deps tarball and repack filter
+ + Refreshed patches
+ * Fixes to debian packaging
+ + Updated deps repack script
+ + index packing: use the same TAR format as cargo
+ + rules: ask cargo to build verbosely
+ * Update README.source to match current packaging
+
+ -- Luca Bruno <lucab@debian.org> Sun, 14 Feb 2016 16:12:55 +0100
+
+cargo (0.6.0-2) unstable; urgency=medium
+
+ * Introduce a cargo-doc package
+ * Fails to build when wget is installed. Force curl
+ (Closes: #809298)
+ * Add the missing VCS- fields
+
+ -- Sylvestre Ledru <sylvestre@debian.org> Tue, 26 Jan 2016 13:01:16 +0100
+
+cargo (0.6.0-1) unstable; urgency=medium
+
+ * New upstream version 0.6.0
+ + Updated deps tarball
+ + Not shipping a registry index anymore
+ * Refreshed bootstrap.py script
+ + Skip optional dependencies in stage0
+ * Added some crude pack/unpack helpers
+ * copyright: cleaned up unused entries
+ * rules: major update for new 0.6.0 bootstrap
+
+ -- Luca Bruno <lucab@debian.org> Fri, 04 Dec 2015 00:42:55 +0100
+
+cargo (0.3.0-2) unstable; urgency=medium
+
+ * Fix install target, removing arch-specific path
+
+ -- Luca Bruno <lucab@debian.org> Sat, 14 Nov 2015 19:46:57 +0100
+
+cargo (0.3.0-1) unstable; urgency=medium
+
+ * Team upload.
+ * First upload to unstable.
+ * Update gbp.conf according to git repo structure.
+ * patches: downgrade missing_docs lints to simple warnings
+ to avoid build failures on newer rustc.
+
+ -- Luca Bruno <lucab@debian.org> Sat, 14 Nov 2015 17:29:15 +0100
+
+cargo (0.3.0-0~exp1) experimental; urgency=low
+
+ * Team upload.
+ * Initial Debian release. (Closes: #786432)
+
+ -- Luca Bruno <lucab@debian.org> Tue, 11 Aug 2015 20:15:54 +0200
--- /dev/null
+Source: cargo
+Section: devel
+Maintainer: Rust Maintainers <pkg-rust-maintainers@alioth-lists.debian.net>
+Uploaders: Luca Bruno <lucab@debian.org>,
+ Angus Lees <gus@debian.org>,
+ Ximin Luo <infinity0@debian.org>,
+ Vasudev Kamath <vasudev@copyninja.info>
+Priority: optional
+# :native annotations are to support cross-compiling, see README.Debian of rustc
+Build-Depends: debhelper (>= 9.20141010),
+ dpkg-dev (>= 1.17.14),
+ cargo:native (>= 0.17.0),
+ rustc:native (>= 1.16),
+ libstd-rust-dev (>= 1.16),
+ pkg-config,
+ cmake,
+ bash-completion,
+ python3:native,
+ libcurl4-gnutls-dev | libcurl4-openssl-dev,
+ libssh2-1-dev,
+ libgit2-dev (>= 0.27),
+ libhttp-parser-dev,
+ libssl-dev,
+ zlib1g-dev,
+ git <!nocheck>
+Homepage: https://crates.io/
+Standards-Version: 4.2.1
+Vcs-Git: https://salsa.debian.org/rust-team/cargo.git
+Vcs-Browser: https://salsa.debian.org/rust-team/cargo
+
+Package: cargo
+Architecture: any
+Multi-Arch: allowed
+Depends: ${shlibs:Depends}, ${misc:Depends},
+ rustc (>= 1.24),
+ binutils,
+ gcc | clang | c-compiler
+Suggests: cargo-doc, python3
+Description: Rust package manager
+ Cargo is a tool that allows Rust projects to declare their various
+ dependencies, and ensure that you'll always get a repeatable build.
+ .
+ To accomplish this goal, Cargo does four things:
+ * Introduces two metadata files with various bits of project information.
+ * Fetches and builds your project's dependencies.
+ * Invokes rustc or another build tool with the correct parameters to build
+ your project.
+ * Introduces conventions, making working with Rust projects easier.
+ .
+ Cargo downloads your Rust project’s dependencies and compiles your
+ project.
+
+Package: cargo-doc
+Section: doc
+Architecture: all
+Build-Profiles: <!nodoc>
+Depends: ${misc:Depends}
+Description: Rust package manager, documentation
+ Cargo is a tool that allows Rust projects to declare their various
+ dependencies, and ensure that you'll always get a repeatable build.
+ .
+ To accomplish this goal, Cargo does four things:
+ * Introduces two metadata files with various bits of project information.
+ * Fetches and builds your project's dependencies.
+ * Invokes rustc or another build tool with the correct parameters to build
+ your project.
+ * Introduces conventions, making working with Rust projects easier.
+ .
+ Cargo downloads your Rust project’s dependencies and compiles your
+ project.
+ .
+ This package contains the documentation.
--- /dev/null
+Format: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
+Upstream-Name: cargo
+Source: https://github.com/rust-lang/cargo
+
+Files: src/*
+ tests/*
+ .*
+ Cargo.*
+ LICENSE-*
+ README.*
+ ARCHITECTURE.*
+ CONTRIBUTING.*
+ appveyor.yml
+Copyright: 2014 The Rust Project Developers
+License: MIT or Apache-2.0
+Comment: please do not add * to the above paragraph, so we can use lintian to
+ help us update this file properly
+
+Files: vendor/bitflags/*
+ vendor/bufstream/*
+ vendor/env_logger/*
+ vendor/flate2/*
+ vendor/fs2/*
+ vendor/glob/*
+ vendor/libc/*
+ vendor/log/*
+ vendor/rand/*
+ vendor/regex/*
+ vendor/regex-syntax/*
+ vendor/semver/*
+ vendor/shell-escape/*
+ vendor/vec_map/*
+ vendor/unicode-width/*
+ vendor/rand_core/*
+ vendor/rand_core-0*/*
+Copyright: 2014-2018 The Rust Project Developers
+License: MIT or Apache-2.0
+Comment:
+ This is a collection of external crates embedded here to bootstrap cargo.
+ Most of them come from the original upstream Rust project, thus share the
+ same MIT/Apache-2.0 dual-license. See https://github.com/rust-lang.
+ Exceptions are noted below.
+
+Files: vendor/backtrace/*
+ vendor/backtrace-sys/*
+ vendor/cfg-if/*
+ vendor/filetime/*
+ vendor/git2/*
+ vendor/git2-curl/*
+ vendor/jobserver/*
+ vendor/libz-sys/*
+ vendor/libgit2-sys/*
+ vendor/libssh2-sys/*
+ vendor/miniz-sys/*
+ vendor/miow/*
+ vendor/openssl-probe/*
+ vendor/pkg-config/*
+ vendor/rustc-demangle/*
+ vendor/tar/*
+ vendor/toml/*
+ vendor/socket2/*
+Copyright: 2014-2018 Alex Crichton <alex@alexcrichton.com>
+ 2014-2018 The Rust Project Developers
+License: MIT or Apache-2.0
+Comment: see https://github.com/alexcrichton/
+
+Files:
+ vendor/aho-corasick/*
+ vendor/memchr/*
+ vendor/utf8-ranges/*
+ vendor/wincolor/*
+ vendor/termcolor/*
+ vendor/globset/*
+ vendor/ignore/*
+ vendor/same-file/*
+ vendor/walkdir/*
+ vendor/winapi-util/*
+Copyright: 2015-2018 Andrew Gallant <jamslam@gmail.com>
+License: MIT or Unlicense
+Comment: see upstream projects,
+ * https://github.com/docopt/docopt.rs
+ * https://github.com/BurntSushi/aho-corasick
+ * https://github.com/BurntSushi/rust-memchr
+ * https://github.com/BurntSushi/utf8-ranges
+ * https://github.com/BurntSushi/ripgrep/tree/master/wincolor
+ * https://github.com/BurntSushi/ripgrep/tree/master/termcolor
+ * https://github.com/BurntSushi/ripgrep/tree/master/globset
+ * https://github.com/BurntSushi/ripgrep/tree/master/ignore
+ * https://github.com/BurntSushi/same-file
+ * https://github.com/BurntSushi/walkdir
+ * https://github.com/BurntSushi/winapi-util
+
+Files:
+ vendor/crossbeam-channel/*
+ vendor/crossbeam-deque/*
+ vendor/crossbeam-epoch/*
+ vendor/crossbeam-utils/*
+Copyright: 2017-2018 The Crossbeam Project Developers
+License: MIT or Apache-2.0
+Comment: see https://github.com/crossbeam-rs/crossbeam-deque
+
+Files: vendor/ucd-util/*
+Copyright: 2015 Andrew Gallant <jamslam@gmail.com>
+License: MIT or Apache-2.0
+Comment: see https://github.com/BurntSushi/rucd
+
+Files: vendor/kernel32/*
+ vendor/winapi/*
+Copyright: 2014-2017 Peter Atashian <retep998@gmail.com>
+ 2014-2017 winapi-rs developers
+License: MIT
+Comment: see https://github.com/retep998/winapi-rs
+
+Files: vendor/winapi-*-pc-windows-gnu/*
+Copyright: 2014-2018 Peter Atashian <retep998@gmail.com>
+License: MIT or Apache-2.0
+Comment: see https://github.com/retep998/winapi-rs
+
+Files: vendor/curl/*
+ vendor/curl-sys/*
+Copyright: 2014-2016 Carl Lerche <me@carllerche.com>
+ 2014-2016 Alex Crichton <alex@alexcrichton.com>
+License: MIT
+Comment: see https://github.com/sfackler/rust-openssl
+
+Files: vendor/itoa/*
+ vendor/quote/*
+Copyright: 2016-2017 David Tolnay <dtolnay@gmail.com>
+License: MIT or Apache-2.0
+Comment: see https://github.com/dtolnay
+
+Files: vendor/foreign-types/*
+ vendor/foreign-types-shared/*
+Copyright: 2017-2017 Steven Fackler <sfackler@gmail.com>
+License: MIT or Apache-2.0
+Comment: see https://github.com/sfackler/foreign-types
+
+Files: vendor/fwdansi/*
+Copyright: 2018-2018 kennytm <kennytm@gmail.com>
+License: MIT
+Comment: see https://github.com/kennytm/fwdansi
+
+Files: vendor/idna/*
+Copyright: 2013 Simon Sapin <simon.sapin@exyr.org>
+ 2013-2014 Valentin Gosu
+ 1991-2015 Unicode, Inc
+License: MIT or Apache-2.0
+
+Files: vendor/lazycell/*
+Copyright: 20014, The Rust Project Developers
+ 2016-2017, Nikita Pekin and lazycell contributors
+License: MIT or Apache-2.0
+
+Files: vendor/idna/*/src/IdnaMappingTable.txt
+ vendor/idna/*/tests/IdnaTest.txt
+Copyright: 1991-2017 Unicode, Inc
+License: Unicode-terms
+
+Files: vendor/lazy_static/*
+ vendor/owning_ref/*
+ vendor/rustc_version/*
+Copyright: 2014-2016 Marvin Löbel <loebel.marvin@gmail.com>
+License: MIT or Apache-2.0
+
+Files: vendor/matches/*
+Copyright: 2015 Simon Sapin <simon.sapin@exyr.org>
+License: MIT
+Comment: see https://github.com/SimonSapin/rust-std-candidates
+
+Files: vendor/smallvec/*
+Copyright: 2018 Simon Sapin <simon.sapin@exyr.org>
+License: MIT or Apache-2.0
+
+Files: vendor/miniz-sys/*/miniz.c
+Copyright: Rich Geldreich <richgel99@gmail.com>
+License: Unlicense
+
+Files: vendor/num_cpus/*
+Copyright: 2015 Sean McArthur <sean.monstar@gmail.com>
+License: MIT
+Comment: see https://github.com/seanmonstar/num_cpus
+
+Files: vendor/opener/*
+Copyright: 2018 Brian Bowman <seeker14491@gmail.com>
+License: MIT or Apache-2.0
+Comment: see https://github.com/Seeker14491/opener
+
+Files: vendor/openssl/*
+Copyright: 2013-2015 Steven Fackler <sfackler@gmail.com>
+ 2013 Jack Lloyd
+ 2011 Google Inc.
+License: Apache-2.0
+
+Files: vendor/openssl-sys/*
+Copyright: 2015 Steven Fackler <sfackler@gmail.com>
+ 2015 Alex Crichton <alex@alexcrichton.com>
+License: MIT
+Comment: see https://github.com/sfackler/rust-openssl
+
+Files: vendor/semver-parser/*
+Copyright: 2016-2018 Steve Klabnik <steve@steveklabnik.com>
+License: MIT or Apache-2.0
+Comment: see https://github.com/steveklabnik/semver-parser
+
+Files: vendor/serde/*
+ vendor/serde_derive/*
+ vendor/serde_ignored/*
+ vendor/serde_json/*
+Copyright: 2014-2017 Erick Tryzelaar <erick.tryzelaar@gmail.com>
+ 2014-2017 David Tolnay <dtolnay@gmail.com>
+License: MIT or Apache-2.0
+Comment: see https://github.com/serde-rs
+ see https://github.com/dtolnay/serde-ignored
+
+Files: vendor/strsim/*
+Copyright: 2015 Danny Guo <dannyguo91@gmail.com>
+License: MIT
+Comment: see https://github.com/dguo/strsim-rs
+
+Files: vendor/syn/*
+Copyright: 2016-2017 David Tolnay <dtolnay@gmail.com>
+License: MIT or Apache-2.0
+Comment: see https://github.com/dtolnay/syn
+
+Files: vendor/ryu/*
+Copyright: David Tolnay <dtolnay@gmail.com>
+License: BSL-1.0 or Apache-2.0
+
+Files: vendor/thread_local/*
+ vendor/lock_api/*
+ vendor/parking_lot/*
+ vendor/parking_lot_core/*
+Copyright: 2016 Amanieu d'Antras <amanieu@gmail.com>
+License: MIT or Apache-2.0
+Comment: see https://github.com/Amanieu/thread_local-rs
+
+Files: vendor/unicode-bidi/*
+Copyright: 2015 The Servo Project Developers
+License: MIT or Apache-2.0
+Comment: see https://github.com/servo/unicode-bidi
+
+Files: vendor/core-foundation/*
+ vendor/core-foundation-sys/*
+Copyright: 2012-2013, The Servo Project Developers,
+ 2012-2013, Mozilla Foundation
+License: MIT or Apache-2.0
+Comment: see https://github.com/servo/core-foundation-rs
+
+Files: vendor/unicode-xid/*
+Copyright: 2015-2017 erick.tryzelaar <erick.tryzelaar@gmail.com>
+ 2015-2017 kwantam <kwantam@gmail.com>
+License: MIT or Apache-2.0
+Comment: see https://github.com/unicode-rs/unicode-xid
+
+Files: vendor/unicode-normalization/*/scripts/unicode.py
+ vendor/unicode-xid/*/scripts/unicode.py
+Copyright: 2011-2015 The Rust Project Developers
+ 2015 The Servo Project Developers
+License: MIT or Apache-2.0
+
+Files: vendor/unicode-bidi/*/src/char_data/tables.rs
+ vendor/unicode-normalization/*/src/tables.rs
+ vendor/unicode-xid/*/src/tables.rs
+Copyright: 2011-2015 The Rust Project Developers
+ 2015 The Servo Project Developers
+License: MIT or Apache-2.0, and Unicode-terms
+Comment:
+ These files are generated using python scripts, as indicated below, from
+ Unicode data files which are licensed under the Unicode-terms. In Debian these
+ data files are available in the unicode-data package.
+ .
+ $ git grep -i generated -- vendor/unicode/* ':(exclude)*LICENSE*'
+ vendor/unicode-bidi-0.3.4/src/char_data/tables.rs:// The following code was generated by "tools/generate.py". do not edit directly
+ vendor/unicode-normalization-0.1.5/scripts/unicode.py:// NOTE: The following code was generated by "scripts/unicode.py", do not edit directly
+ vendor/unicode-normalization-0.1.5/src/tables.rs:// NOTE: The following code was generated by "scripts/unicode.py", do not edit directly
+ vendor/unicode-normalization-0.1.5/src/testdata.rs:// NOTE: The following code was generated by "scripts/unicode.py", do not edit directly
+ vendor/unicode-xid-0.0.4/scripts/unicode.py:// NOTE: The following code was generated by "scripts/unicode.py", do not edit directly
+ vendor/unicode-xid-0.0.4/src/tables.rs:// NOTE: The following code was generated by "scripts/unicode.py", do not edit directly
+
+Files: vendor/unreachable/*
+ vendor/void/*
+Copyright: 2015-2017 Jonathan Reem <jonathan.reem@gmail.com>
+License: MIT
+Comment: see https://github.com/reem/
+
+Files: vendor/unicode-normalization/*
+Copyright: 2016 kwantam <kwantam@gmail.com>
+License: MIT or Apache-2.0
+
+Files: vendor/url/*
+ vendor/percent-encoding/*
+Copyright: 2015-2016 Simon Sapin <simon.sapin@exyr.org>
+ 2013-2016 The rust-url developers
+License: MIT or Apache-2.0
+Comment: see https://github.com/servo/rust-url
+ see https://github.com/servo/rust-url/tree/master/percent_encoding
+
+Files: vendor/hex/*
+Copyright: 2015, rust-hex Developers
+License: MIT or Apache-2.0
+Comment: see https://github.com/KokaKiwi/rust-hex
+
+Files: vendor/atty/*
+Copyright: 2015-2016, Doug Tangren
+License: MIT
+Comment: see https://github.com/softprops/atty
+
+Files: vendor/fnv/*
+ vendor/cc/*
+ vendor/proc-macro2/*
+ vendor/rustc-workspace-hack/*
+Copyright: 2017-2018, Alex Crichton <alex@alexcrichton.com>
+License: MIT or Apache-2.0
+Comment:
+ see https://github.com/servo/rust-fnv/
+ see https://github.com/alexcrichton/proc-macro
+
+
+Files: vendor/home/*
+Copyright: Brian Anderson <andersb@gmail.com>
+License: MIT or Apache-2.0
+Comment: see https://github.com/brson/home
+
+Files: vendor/fuchsia-zircon/*
+ vendor/fuchsia-zircon-sys/*
+Copyright: Raph Levien <raph@google.com>
+ 2016, The Fuchsia Authors
+License: BSD-3-Clause
+Comment:
+ * see https://fuchsia.googlesource.com/magenta-rs/
+ * see https://fuchsia.googlesource.com/garnet/
+
+Files: vendor/scopeguard/*
+Copyright: 2015, The Rust Project Developers
+ bluss
+License: MIT or Apache-2.0
+Comment: see https://github.com/bluss/scopeguard
+
+Files: vendor/vcpkg/*
+Copyright: 2017-2017 Jim McGrath <jimmc2@gmail.com>
+License: MIT or Apache-2.0
+Comment: see https://github.com/mcgoo/vcpkg-rs
+
+Files: vendor/commoncrypto/*
+ vendor/commoncrypto-sys/*
+ vendor/crypto-hash/*
+Copyright: 2016, Mark Lee
+ 2015-2016, Mark Lee
+License: MIT
+Comment:
+ * see https://github.com/malept/rust-commoncrypto
+ * see https://github.com/malept/crypto-hash
+
+Files: vendor/redox_syscall/*
+ vendor/redox_termios/*
+Copyright: 2017, Redox OS Developers
+License: MIT
+
+Files: vendor/ansi_term/*
+Copyright: 2014, Benjamin Sago <ogham@bsago.me>
+ Ryan Scheel (Havvy) <ryan.havvy@gmail.com>
+ Josh Triplett <josh@joshtriplett.org>
+License: MIT
+
+Files: vendor/quick-error/*
+Copyright: 2015, The quick-error developers
+License: MIT or Apache-2.0
+
+Files: vendor/termion/*
+Copyright: 2016, Ticki
+License: MIT
+Comment: see https://github.com/ticki/termion/
+
+Files: vendor/failure/*
+ vendor/failure_derive/*
+Copyright: Without Boats <boats@mozilla.com>
+License: MIT or Apache-2.0
+Comment:
+ * see https://github.com/withoutboats/failure
+ * see https://github.com/withoutboats/failure_derive
+
+Files: vendor/remove_dir_all/*
+Copyright: 2017, Aaron Power <theaaronepower@gmail.com>
+License: MIT or Apache-2.0
+Comment: see https://github.com/Aaronepower/remove_dir_all
+
+Files: vendor/synstructure/*
+Copyright: Michael Layzell <michael@thelayzells.com>
+License: MIT
+Comment: see https://github.com/mystor/synstructure
+
+Files: vendor/schannel/*
+Copyright: 2015, Steffen Butzer <steffen.butzer@outlook.com>
+License: MIT
+Comment: see https://github.com/steffengy/schannel-rs/
+
+Files: vendor/humantime/*
+Copyright: 2016, The humantime Developers
+License: MIT or Apache-2.0
+Comment:
+ Includes parts of http date with copyright: 2016, Pyfisch and portions of musl
+ libc with copyright 2005-2013 Rich Felker.
+ .
+ See https://github.com/tailhook/humantime
+
+Files: vendor/textwrap/*
+Copyright: 2016, Martin Geisler <martin@geisler.net>
+License: MIT
+
+Files: vendor/clap/*
+Copyright: 2015-2016, Kevin B. Knapp <kbknapp@gmail.com>
+License: MIT
+
+Files: vendor/tempfile/*
+Copyright: 2015, Steven Allen
+License: MIT or Apache-2.0
+
+Files: vendor/cloudabi/*
+Copyright: Nuxi and contributors
+License: BSD-2-clause
+Comment: See https://github.com/nuxinl/cloudabi
+
+Files: vendor/arrayvec/*
+ vendor/nodrop/*
+Copyright: 2015-2017, Ulrik Sverdrup "bluss"
+License: MIT or Apache-2.0
+
+Files: vendor/memoffset/*
+Copyright: 2017, Gilad Naamn
+License: MIT
+
+Files: vendor/version_check/*
+Copyright: 2017-2018, Sergio Benitez
+License: MIT or Apache-2.0
+
+Files: vendor/stable_deref_trait/*
+Copyright: 2017, Robert Grosse
+License: MIT or Apache-2.0
+
+Files: vendor/rustfix/*
+Copyright: 2016, Pascal Hertleif
+ 2016, Oliver Schneider
+License: MIT or Apache-2.0
+
+Files: debian/*
+Copyright: 2017 Ximin Luo <infinity0@debian.org>
+ 2015-2016 Luca Bruno <lucab@debian.org>
+License: MIT or Apache-2.0
+
+Files: debian/bootstrap.py
+Copyright: 2015 David Huseby
+License: BSD-2-clause
+Comment: See LICENSE at https://github.com/dhuseby/cargo-bootstrap/
+
+License: BSD-2-clause
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions are met:
+ .
+ 1. Redistributions of source code must retain the above copyright notice,
+ this list of conditions and the following disclaimer.
+ 2. Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+ .
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ POSSIBILITY OF SUCH DAMAGE.
+
+License: BSD-3-clause
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions are
+ met:
+ .
+ 1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ 2. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the
+ distribution.
+ 3. Neither the name of the Creytiv.com nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+ .
+ THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
+ IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+ WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+ DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
+ INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+ (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
+ IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ POSSIBILITY OF SUCH DAMAGE.
+
+License: MIT
+ Permission is hereby granted, free of charge, to any person obtaining a copy
+ of this software and associated documentation files (the "Software"), to deal
+ in the Software without restriction, including without limitation the rights
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ copies of the Software, and to permit persons to whom the Software is
+ furnished to do so, subject to the following conditions:
+ .
+ The above copyright notice and this permission notice shall be included in
+ all copies or substantial portions of the Software.
+ .
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ THE SOFTWARE.
+
+License: Apache-2.0
+ On Debian systems, see /usr/share/common-licenses/Apache-2.0 for
+ the full text of the Apache License version 2.
+
+License: Unlicense
+ This is free and unencumbered software released into the public domain.
+ .
+ Anyone is free to copy, modify, publish, use, compile, sell, or
+ distribute this software, either in source code form or as a compiled
+ binary, for any purpose, commercial or non-commercial, and by any
+ means.
+ .
+ In jurisdictions that recognize copyright laws, the author or authors
+ of this software dedicate any and all copyright interest in the
+ software to the public domain. We make this dedication for the benefit
+ of the public at large and to the detriment of our heirs and
+ successors. We intend this dedication to be an overt act of
+ relinquishment in perpetuity of all present and future rights to this
+ software under copyright law.
+ .
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
+ OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
+ ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+ OTHER DEALINGS IN THE SOFTWARE.
+
+License: Unicode-terms
+ Distributed under the Terms of Use in http://www.unicode.org/copyright.html.
+ .
+ Permission is hereby granted, free of charge, to any person obtaining
+ a copy of the Unicode data files and any associated documentation
+ (the "Data Files") or Unicode software and any associated documentation
+ (the "Software") to deal in the Data Files or Software
+ without restriction, including without limitation the rights to use,
+ copy, modify, merge, publish, distribute, and/or sell copies of
+ the Data Files or Software, and to permit persons to whom the Data Files
+ or Software are furnished to do so, provided that either
+ (a) this copyright and permission notice appear with all copies
+ of the Data Files or Software, or
+ (b) this copyright and permission notice appear in associated
+ Documentation.
+ .
+ THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF
+ ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+ WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ NONINFRINGEMENT OF THIRD PARTY RIGHTS.
+ IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS
+ NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL
+ DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
+ DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
+ TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+ PERFORMANCE OF THE DATA FILES OR SOFTWARE.
+ .
+ Except as contained in this notice, the name of a copyright holder
+ shall not be used in advertising or otherwise to promote the sale,
+ use or other dealings in these Data Files or Software without prior
+ written authorization of the copyright holder.
+Comment: see http://www.unicode.org/copyright.html
+
+License: BSL-1.0
+ Permission is hereby granted, free of charge, to any person or organization
+ obtaining a copy of the software and accompanying documentation covered by this
+ license (the "Software") to use, reproduce, display, distribute, execute, and
+ transmit the Software, and to prepare derivative works of the Software, and to
+ permit third-parties to whom the Software is furnished to do so, all subject to
+ the following:
+ .
+ The copyright notices in the Software and this entire statement, including the
+ above license grant, this restriction and the following disclaimer, must be
+ included in all copies of the Software, in whole or in part, and all derivative
+ works of the Software, unless such copies or derivative works are solely in the
+ form of machine-executable object code generated by a source language
+ processor.
+ .
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT SHALL
+ THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE FOR ANY
+ DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, ARISING
+ FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ THE SOFTWARE.
--- /dev/null
+[DEFAULT]
+upstream-tag = upstream/%(version)s
+debian-tag = debian/%(version)s
+pristine-tar = True
+upstream-branch = upstream
+component = vendor
+
+[buildpackage]
+submodules = True
+ignore-branch = True
+sign-tags = True
+
+[import-orig]
+upstream-vcs-tag = %(version)s
+debian-branch = debian/experimental
--- /dev/null
+debian/scripts/* usr/share/cargo
+debian/bin/cargo usr/share/cargo/bin
--- /dev/null
+#!/bin/sh
+set -e
+echo ""
+echo "This needs a local copy of cargo-vendor, and the following packages:"
+echo "python-dulwich python-pytoml devscripts"
+echo ""
+
+TMPDIR=`mktemp -d`
+echo "Using '${TMPDIR}'..."
+cat > "${TMPDIR}/Makefile" <<'EOF'
+include /usr/share/dpkg/pkg-info.mk
+all:
+ @echo $(DEB_VERSION_UPSTREAM)
+EOF
+WORKDIR=${PWD}
+
+if [ -z "$1" ]
+ then
+ USCAN_ARGS="";
+ CARGO_VER=$(make -f "${TMPDIR}/Makefile");
+ else
+ USCAN_ARGS="--download-version $1";
+ CARGO_VER="$1";
+fi;
+
+BOOTSTRAP_PY=$(find "${PWD}" -name bootstrap.py -type f)
+VENDOR_FILTER=$(find "${PWD}/debian" -name vendor-tarball-filter.txt -type f)
+VENDOR_SUS_WHITELIST=$(find "${PWD}/debian" -name vendor-tarball-unsuspicious.txt -type f)
+
+# Download cargo tarball
+uscan --rename ${USCAN_ARGS} --force-download --destdir "${TMPDIR}/"
+
+# Extract cargo source
+cd "${TMPDIR}"
+mkdir cargo
+tar -xaf "${TMPDIR}/cargo_${CARGO_VER}.orig.tar.gz" -C cargo --strip-components=1
+cd cargo
+
+# Download build-deps via cargo-vendor
+export GIT_AUTHOR_NAME="deb-build"
+export GIT_AUTHOR_EMAIL="<>"
+export GIT_COMMITTER_NAME="${GIT_AUTHOR_NAME}"
+export GIT_COMMITTER_EMAIL="${GIT_AUTHOR_EMAIL}"
+
+${WORKDIR}/debian/scripts/debian-cargo-vendor
+
+# Clean embedded libs and update checksums
+grep -v '^#' ${VENDOR_FILTER} | xargs -I% sh -c 'rm -rf vendor/%'
+
+# Report any suspicious files
+cp -R vendor vendor-scan
+grep -v '^#' ${VENDOR_SUS_WHITELIST} | xargs -I% sh -c 'rm -rf vendor-scan/%'
+echo "Checking for suspicious files..."
+# The following shell snippet is a bit more strict than suspicious-source(1)
+find vendor-scan -type f -and -not -name '.cargo-checksum.json' -exec file '{}' \; | \
+ sed -e 's/\btext\b\(.*\), with very long lines/verylongtext\1/g' | \
+ grep -v '\b\(text\|empty\)\b' || true
+echo "The above files (if any) seem suspicious, please audit them."
+echo "If good, add them to ${VENDOR_SUS_WHITELIST}."
+echo "If bad, add them to ${VENDOR_FILTER} and/or the relevant debcargo.toml in debcargo-conf.git"
+rm -rf vendor-scan
+
+# Pack it up, reproducibly
+GZIP=-9n tar --sort=name \
+ --mtime="./Cargo.lock" \
+ --owner=root --group=root \
+ -czf "${TMPDIR}/cargo_${CARGO_VER}.orig-vendor.tar.gz" vendor
+
+# All is good, we are done!
+echo "Your files are available at:"
+echo "${TMPDIR}/cargo_${CARGO_VER}.orig.tar.gz \\"
+echo "${TMPDIR}/cargo_${CARGO_VER}.orig-vendor.tar.gz"
+echo ""
+echo "Unpacked cargo sources are available under:"
+echo "${TMPDIR}/cargo/"
--- /dev/null
+/* http://prismjs.com/download.html?themes=prism&languages=markup+css+clike+javascript */
+var _self = (typeof window !== 'undefined')
+ ? window // if in browser
+ : (
+ (typeof WorkerGlobalScope !== 'undefined' && self instanceof WorkerGlobalScope)
+ ? self // if in worker
+ : {} // if in node js
+ );
+
+/**
+ * Prism: Lightweight, robust, elegant syntax highlighting
+ * MIT license http://www.opensource.org/licenses/mit-license.php/
+ * @author Lea Verou http://lea.verou.me
+ */
+
+var Prism = (function(){
+
+// Private helper vars
+var lang = /\blang(?:uage)?-(?!\*)(\w+)\b/i;
+
+var _ = _self.Prism = {
+ util: {
+ encode: function (tokens) {
+ if (tokens instanceof Token) {
+ return new Token(tokens.type, _.util.encode(tokens.content), tokens.alias);
+ } else if (_.util.type(tokens) === 'Array') {
+ return tokens.map(_.util.encode);
+ } else {
+ return tokens.replace(/&/g, '&').replace(/</g, '<').replace(/\u00a0/g, ' ');
+ }
+ },
+
+ type: function (o) {
+ return Object.prototype.toString.call(o).match(/\[object (\w+)\]/)[1];
+ },
+
+ // Deep clone a language definition (e.g. to extend it)
+ clone: function (o) {
+ var type = _.util.type(o);
+
+ switch (type) {
+ case 'Object':
+ var clone = {};
+
+ for (var key in o) {
+ if (o.hasOwnProperty(key)) {
+ clone[key] = _.util.clone(o[key]);
+ }
+ }
+
+ return clone;
+
+ case 'Array':
+ // Check for existence for IE8
+ return o.map && o.map(function(v) { return _.util.clone(v); });
+ }
+
+ return o;
+ }
+ },
+
+ languages: {
+ extend: function (id, redef) {
+ var lang = _.util.clone(_.languages[id]);
+
+ for (var key in redef) {
+ lang[key] = redef[key];
+ }
+
+ return lang;
+ },
+
+ /**
+ * Insert a token before another token in a language literal
+ * As this needs to recreate the object (we cannot actually insert before keys in object literals),
+ * we cannot just provide an object, we need anobject and a key.
+ * @param inside The key (or language id) of the parent
+ * @param before The key to insert before. If not provided, the function appends instead.
+ * @param insert Object with the key/value pairs to insert
+ * @param root The object that contains `inside`. If equal to Prism.languages, it can be omitted.
+ */
+ insertBefore: function (inside, before, insert, root) {
+ root = root || _.languages;
+ var grammar = root[inside];
+
+ if (arguments.length == 2) {
+ insert = arguments[1];
+
+ for (var newToken in insert) {
+ if (insert.hasOwnProperty(newToken)) {
+ grammar[newToken] = insert[newToken];
+ }
+ }
+
+ return grammar;
+ }
+
+ var ret = {};
+
+ for (var token in grammar) {
+
+ if (grammar.hasOwnProperty(token)) {
+
+ if (token == before) {
+
+ for (var newToken in insert) {
+
+ if (insert.hasOwnProperty(newToken)) {
+ ret[newToken] = insert[newToken];
+ }
+ }
+ }
+
+ ret[token] = grammar[token];
+ }
+ }
+
+ // Update references in other language definitions
+ _.languages.DFS(_.languages, function(key, value) {
+ if (value === root[inside] && key != inside) {
+ this[key] = ret;
+ }
+ });
+
+ return root[inside] = ret;
+ },
+
+ // Traverse a language definition with Depth First Search
+ DFS: function(o, callback, type) {
+ for (var i in o) {
+ if (o.hasOwnProperty(i)) {
+ callback.call(o, i, o[i], type || i);
+
+ if (_.util.type(o[i]) === 'Object') {
+ _.languages.DFS(o[i], callback);
+ }
+ else if (_.util.type(o[i]) === 'Array') {
+ _.languages.DFS(o[i], callback, i);
+ }
+ }
+ }
+ }
+ },
+
+ highlightAll: function(async, callback) {
+ var elements = document.querySelectorAll('code[class*="language-"], [class*="language-"] code, code[class*="lang-"], [class*="lang-"] code');
+
+ for (var i=0, element; element = elements[i++];) {
+ _.highlightElement(element, async === true, callback);
+ }
+ },
+
+ highlightElement: function(element, async, callback) {
+ // Find language
+ var language, grammar, parent = element;
+
+ while (parent && !lang.test(parent.className)) {
+ parent = parent.parentNode;
+ }
+
+ if (parent) {
+ language = (parent.className.match(lang) || [,''])[1];
+ grammar = _.languages[language];
+ }
+
+ // Set language on the element, if not present
+ element.className = element.className.replace(lang, '').replace(/\s+/g, ' ') + ' language-' + language;
+
+ // Set language on the parent, for styling
+ parent = element.parentNode;
+
+ if (/pre/i.test(parent.nodeName)) {
+ parent.className = parent.className.replace(lang, '').replace(/\s+/g, ' ') + ' language-' + language;
+ }
+
+ if (!grammar) {
+ return;
+ }
+
+ var code = element.textContent;
+
+ if(!code) {
+ return;
+ }
+
+ code = code.replace(/^(?:\r?\n|\r)/,'');
+
+ var env = {
+ element: element,
+ language: language,
+ grammar: grammar,
+ code: code
+ };
+
+ _.hooks.run('before-highlight', env);
+
+ if (async && _self.Worker) {
+ var worker = new Worker(_.filename);
+
+ worker.onmessage = function(evt) {
+ env.highlightedCode = Token.stringify(JSON.parse(evt.data), language);
+
+ _.hooks.run('before-insert', env);
+
+ env.element.innerHTML = env.highlightedCode;
+
+ callback && callback.call(env.element);
+ _.hooks.run('after-highlight', env);
+ };
+
+ worker.postMessage(JSON.stringify({
+ language: env.language,
+ code: env.code
+ }));
+ }
+ else {
+ env.highlightedCode = _.highlight(env.code, env.grammar, env.language);
+
+ _.hooks.run('before-insert', env);
+
+ env.element.innerHTML = env.highlightedCode;
+
+ callback && callback.call(element);
+
+ _.hooks.run('after-highlight', env);
+ }
+ },
+
+ highlight: function (text, grammar, language) {
+ var tokens = _.tokenize(text, grammar);
+ return Token.stringify(_.util.encode(tokens), language);
+ },
+
+ tokenize: function(text, grammar, language) {
+ var Token = _.Token;
+
+ var strarr = [text];
+
+ var rest = grammar.rest;
+
+ if (rest) {
+ for (var token in rest) {
+ grammar[token] = rest[token];
+ }
+
+ delete grammar.rest;
+ }
+
+ tokenloop: for (var token in grammar) {
+ if(!grammar.hasOwnProperty(token) || !grammar[token]) {
+ continue;
+ }
+
+ var patterns = grammar[token];
+ patterns = (_.util.type(patterns) === "Array") ? patterns : [patterns];
+
+ for (var j = 0; j < patterns.length; ++j) {
+ var pattern = patterns[j],
+ inside = pattern.inside,
+ lookbehind = !!pattern.lookbehind,
+ lookbehindLength = 0,
+ alias = pattern.alias;
+
+ pattern = pattern.pattern || pattern;
+
+ for (var i=0; i<strarr.length; i++) { // Don’t cache length as it changes during the loop
+
+ var str = strarr[i];
+
+ if (strarr.length > text.length) {
+ // Something went terribly wrong, ABORT, ABORT!
+ break tokenloop;
+ }
+
+ if (str instanceof Token) {
+ continue;
+ }
+
+ pattern.lastIndex = 0;
+
+ var match = pattern.exec(str);
+
+ if (match) {
+ if(lookbehind) {
+ lookbehindLength = match[1].length;
+ }
+
+ var from = match.index - 1 + lookbehindLength,
+ match = match[0].slice(lookbehindLength),
+ len = match.length,
+ to = from + len,
+ before = str.slice(0, from + 1),
+ after = str.slice(to + 1);
+
+ var args = [i, 1];
+
+ if (before) {
+ args.push(before);
+ }
+
+ var wrapped = new Token(token, inside? _.tokenize(match, inside) : match, alias);
+
+ args.push(wrapped);
+
+ if (after) {
+ args.push(after);
+ }
+
+ Array.prototype.splice.apply(strarr, args);
+ }
+ }
+ }
+ }
+
+ return strarr;
+ },
+
+ hooks: {
+ all: {},
+
+ add: function (name, callback) {
+ var hooks = _.hooks.all;
+
+ hooks[name] = hooks[name] || [];
+
+ hooks[name].push(callback);
+ },
+
+ run: function (name, env) {
+ var callbacks = _.hooks.all[name];
+
+ if (!callbacks || !callbacks.length) {
+ return;
+ }
+
+ for (var i=0, callback; callback = callbacks[i++];) {
+ callback(env);
+ }
+ }
+ }
+};
+
+var Token = _.Token = function(type, content, alias) {
+ this.type = type;
+ this.content = content;
+ this.alias = alias;
+};
+
+Token.stringify = function(o, language, parent) {
+ if (typeof o == 'string') {
+ return o;
+ }
+
+ if (_.util.type(o) === 'Array') {
+ return o.map(function(element) {
+ return Token.stringify(element, language, o);
+ }).join('');
+ }
+
+ var env = {
+ type: o.type,
+ content: Token.stringify(o.content, language, parent),
+ tag: 'span',
+ classes: ['token', o.type],
+ attributes: {},
+ language: language,
+ parent: parent
+ };
+
+ if (env.type == 'comment') {
+ env.attributes['spellcheck'] = 'true';
+ }
+
+ if (o.alias) {
+ var aliases = _.util.type(o.alias) === 'Array' ? o.alias : [o.alias];
+ Array.prototype.push.apply(env.classes, aliases);
+ }
+
+ _.hooks.run('wrap', env);
+
+ var attributes = '';
+
+ for (var name in env.attributes) {
+ attributes += name + '="' + (env.attributes[name] || '') + '"';
+ }
+
+ return '<' + env.tag + ' class="' + env.classes.join(' ') + '" ' + attributes + '>' + env.content + '</' + env.tag + '>';
+
+};
+
+if (!_self.document) {
+ if (!_self.addEventListener) {
+ // in Node.js
+ return _self.Prism;
+ }
+ // In worker
+ _self.addEventListener('message', function(evt) {
+ var message = JSON.parse(evt.data),
+ lang = message.language,
+ code = message.code;
+
+ _self.postMessage(JSON.stringify(_.util.encode(_.tokenize(code, _.languages[lang]))));
+ _self.close();
+ }, false);
+
+ return _self.Prism;
+}
+
+// Get current script and highlight
+var script = document.getElementsByTagName('script');
+
+script = script[script.length - 1];
+
+if (script) {
+ _.filename = script.src;
+
+ if (document.addEventListener && !script.hasAttribute('data-manual')) {
+ document.addEventListener('DOMContentLoaded', _.highlightAll);
+ }
+}
+
+return _self.Prism;
+
+})();
+
+if (typeof module !== 'undefined' && module.exports) {
+ module.exports = Prism;
+}
+;
+Prism.languages.markup = {
+ 'comment': /<!--[\w\W]*?-->/,
+ 'prolog': /<\?[\w\W]+?\?>/,
+ 'doctype': /<!DOCTYPE[\w\W]+?>/,
+ 'cdata': /<!\[CDATA\[[\w\W]*?]]>/i,
+ 'tag': {
+ pattern: /<\/?[^\s>\/]+(?:\s+[^\s>\/=]+(?:=(?:("|')(?:\\\1|\\?(?!\1)[\w\W])*\1|[^\s'">=]+))?)*\s*\/?>/i,
+ inside: {
+ 'tag': {
+ pattern: /^<\/?[^\s>\/]+/i,
+ inside: {
+ 'punctuation': /^<\/?/,
+ 'namespace': /^[^\s>\/:]+:/
+ }
+ },
+ 'attr-value': {
+ pattern: /=(?:('|")[\w\W]*?(\1)|[^\s>]+)/i,
+ inside: {
+ 'punctuation': /[=>"']/
+ }
+ },
+ 'punctuation': /\/?>/,
+ 'attr-name': {
+ pattern: /[^\s>\/]+/,
+ inside: {
+ 'namespace': /^[^\s>\/:]+:/
+ }
+ }
+
+ }
+ },
+ 'entity': /&#?[\da-z]{1,8};/i
+};
+
+// Plugin to make entity title show the real entity, idea by Roman Komarov
+Prism.hooks.add('wrap', function(env) {
+
+ if (env.type === 'entity') {
+ env.attributes['title'] = env.content.replace(/&/, '&');
+ }
+});
+;
+Prism.languages.css = {
+ 'comment': /\/\*[\w\W]*?\*\//,
+ 'atrule': {
+ pattern: /@[\w-]+?.*?(;|(?=\s*\{))/i,
+ inside: {
+ 'rule': /@[\w-]+/
+ // See rest below
+ }
+ },
+ 'url': /url\((?:(["'])(\\(?:\r\n|[\w\W])|(?!\1)[^\\\r\n])*\1|.*?)\)/i,
+ 'selector': /[^\{\}\s][^\{\};]*?(?=\s*\{)/,
+ 'string': /("|')(\\(?:\r\n|[\w\W])|(?!\1)[^\\\r\n])*\1/,
+ 'property': /(\b|\B)[\w-]+(?=\s*:)/i,
+ 'important': /\B!important\b/i,
+ 'function': /[-a-z0-9]+(?=\()/i,
+ 'punctuation': /[(){};:]/
+};
+
+Prism.languages.css['atrule'].inside.rest = Prism.util.clone(Prism.languages.css);
+
+if (Prism.languages.markup) {
+ Prism.languages.insertBefore('markup', 'tag', {
+ 'style': {
+ pattern: /<style[\w\W]*?>[\w\W]*?<\/style>/i,
+ inside: {
+ 'tag': {
+ pattern: /<style[\w\W]*?>|<\/style>/i,
+ inside: Prism.languages.markup.tag.inside
+ },
+ rest: Prism.languages.css
+ },
+ alias: 'language-css'
+ }
+ });
+
+ Prism.languages.insertBefore('inside', 'attr-value', {
+ 'style-attr': {
+ pattern: /\s*style=("|').*?\1/i,
+ inside: {
+ 'attr-name': {
+ pattern: /^\s*style/i,
+ inside: Prism.languages.markup.tag.inside
+ },
+ 'punctuation': /^\s*=\s*['"]|['"]\s*$/,
+ 'attr-value': {
+ pattern: /.+/i,
+ inside: Prism.languages.css
+ }
+ },
+ alias: 'language-css'
+ }
+ }, Prism.languages.markup.tag);
+};
+Prism.languages.clike = {
+ 'comment': [
+ {
+ pattern: /(^|[^\\])\/\*[\w\W]*?\*\//,
+ lookbehind: true
+ },
+ {
+ pattern: /(^|[^\\:])\/\/.*/,
+ lookbehind: true
+ }
+ ],
+ 'string': /("|')(\\(?:\r\n|[\s\S])|(?!\1)[^\\\r\n])*\1/,
+ 'class-name': {
+ pattern: /((?:(?:class|interface|extends|implements|trait|instanceof|new)\s+)|(?:catch\s+\())[a-z0-9_\.\\]+/i,
+ lookbehind: true,
+ inside: {
+ punctuation: /(\.|\\)/
+ }
+ },
+ 'keyword': /\b(if|else|while|do|for|return|in|instanceof|function|new|try|throw|catch|finally|null|break|continue)\b/,
+ 'boolean': /\b(true|false)\b/,
+ 'function': /[a-z0-9_]+(?=\()/i,
+ 'number': /\b-?(0x[\dA-Fa-f]+|\d*\.?\d+([Ee]-?\d+)?)\b/,
+ 'operator': /[-+]{1,2}|!|<=?|>=?|={1,3}|&{1,2}|\|?\||\?|\*|\/|~|\^|%/,
+ 'punctuation': /[{}[\];(),.:]/
+};
+;
+Prism.languages.javascript = Prism.languages.extend('clike', {
+ 'keyword': /\b(as|async|await|break|case|catch|class|const|continue|debugger|default|delete|do|else|enum|export|extends|false|finally|for|from|function|get|if|implements|import|in|instanceof|interface|let|new|null|of|package|private|protected|public|return|set|static|super|switch|this|throw|true|try|typeof|var|void|while|with|yield)\b/,
+ 'number': /\b-?(0x[\dA-Fa-f]+|0b[01]+|0o[0-7]+|\d*\.?\d+([Ee][+-]?\d+)?|NaN|Infinity)\b/,
+ 'function': /(?!\d)[a-z0-9_$]+(?=\()/i
+});
+
+Prism.languages.insertBefore('javascript', 'keyword', {
+ 'regex': {
+ pattern: /(^|[^/])\/(?!\/)(\[.+?]|\\.|[^/\\\r\n])+\/[gimyu]{0,5}(?=\s*($|[\r\n,.;})]))/,
+ lookbehind: true
+ }
+});
+
+Prism.languages.insertBefore('javascript', 'class-name', {
+ 'template-string': {
+ pattern: /`(?:\\`|\\?[^`])*`/,
+ inside: {
+ 'interpolation': {
+ pattern: /\$\{[^}]+\}/,
+ inside: {
+ 'interpolation-punctuation': {
+ pattern: /^\$\{|\}$/,
+ alias: 'punctuation'
+ },
+ rest: Prism.languages.javascript
+ }
+ },
+ 'string': /[\s\S]+/
+ }
+ }
+});
+
+if (Prism.languages.markup) {
+ Prism.languages.insertBefore('markup', 'tag', {
+ 'script': {
+ pattern: /<script[\w\W]*?>[\w\W]*?<\/script>/i,
+ inside: {
+ 'tag': {
+ pattern: /<script[\w\W]*?>|<\/script>/i,
+ inside: Prism.languages.markup.tag.inside
+ },
+ rest: Prism.languages.javascript
+ },
+ alias: 'language-javascript'
+ }
+ });
+}
+;
--- /dev/null
+Author: Ximin Luo <infinity0@debian.org>
+Forwarded: https://github.com/rust-lang/cargo/pull/6260
+diff --git a/tests/testsuite/package.rs b/tests/testsuite/package.rs
+index f52b3cece..2d655f0a0 100644
+--- a/tests/testsuite/package.rs
++++ b/tests/testsuite/package.rs
+@@ -285,7 +285,8 @@ dependency `bar` does not specify a version.
+
+ #[test]
+ fn exclude() {
+- let p = project()
++ let root = paths::root().join("exclude");
++ let repo = git::repo(&root)
+ .file("Cargo.toml", r#"
+ [project]
+ name = "foo"
+@@ -348,7 +349,8 @@ fn exclude() {
+ .file("some_dir/dir_deep_5/some_dir/file", "")
+ .build();
+
+- p.cargo("package --no-verify -v")
++ cargo_process("package --no-verify -v")
++ .cwd(repo.root())
+ .with_stdout("")
+ .with_stderr(
+ "\
+@@ -366,7 +368,6 @@ See [..]
+ See [..]
+ [WARNING] [..] file `some_dir/file_deep_1` WILL be excluded [..]
+ See [..]
+-[WARNING] No (git) Cargo.toml found at `[..]` in workdir `[..]`
+ [PACKAGING] foo v0.0.1 ([..])
+ [ARCHIVING] [..]
+ [ARCHIVING] [..]
+@@ -386,14 +387,17 @@ See [..]
+ [ARCHIVING] [..]
+ [ARCHIVING] [..]
+ [ARCHIVING] [..]
++[ARCHIVING] .cargo_vcs_info.json
+ ",
+ ).run();
+
+- assert!(p.root().join("target/package/foo-0.0.1.crate").is_file());
++ assert!(repo.root().join("target/package/foo-0.0.1.crate").is_file());
+
+- p.cargo("package -l")
++ cargo_process("package -l")
++ .cwd(repo.root())
+ .with_stdout(
+ "\
++.cargo_vcs_info.json
+ Cargo.toml
+ dir_root_1/some_dir/file
+ dir_root_2/some_dir/file
+@@ -418,7 +422,8 @@ src/main.rs
+
+ #[test]
+ fn include() {
+- let p = project()
++ let root = paths::root().join("include");
++ let repo = git::repo(&root)
+ .file("Cargo.toml", r#"
+ [project]
+ name = "foo"
+@@ -432,16 +437,17 @@ fn include() {
+ .file("src/bar.txt", "") // should be ignored when packaging
+ .build();
+
+- p.cargo("package --no-verify -v")
++ cargo_process("package --no-verify -v")
++ .cwd(repo.root())
+ .with_stderr(
+ "\
+ [WARNING] manifest has no description[..]
+ See http://doc.crates.io/manifest.html#package-metadata for more info.
+-[WARNING] No (git) Cargo.toml found at `[..]` in workdir `[..]`
+ [PACKAGING] foo v0.0.1 ([..])
+ [ARCHIVING] [..]
+ [ARCHIVING] [..]
+ [ARCHIVING] [..]
++[ARCHIVING] .cargo_vcs_info.json
+ ",
+ ).run();
+ }
--- /dev/null
+From f075f9c6cbf11c6484330d24b8a49fbbdb465d66 Mon Sep 17 00:00:00 2001
+From: Eric Huss <eric@huss.org>
+Date: Sun, 4 Nov 2018 10:38:51 -0800
+Subject: [PATCH] Fix can_run_doc_tests order depends on hash.
+
+The deps are sorted, but the name is the same so the order depends on the metadata hash.
+Fix by sorting by the actual name, too.
+---
+ src/cargo/core/compiler/context/mod.rs | 2 ++
+ tests/testsuite/rename_deps.rs | 2 +-
+ 2 files changed, 3 insertions(+), 1 deletion(-)
+
+diff --git a/src/cargo/core/compiler/context/mod.rs b/src/cargo/core/compiler/context/mod.rs
+index 225e6ebd53..334a3876aa 100644
+--- a/src/cargo/core/compiler/context/mod.rs
++++ b/src/cargo/core/compiler/context/mod.rs
+@@ -234,6 +234,8 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
+ }
+ }
+ }
++ // Help with tests to get a stable order with renamed deps.
++ doctest_deps.sort();
+ self.compilation.to_doc_test.push(compilation::Doctest {
+ package: unit.pkg.clone(),
+ target: unit.target.clone(),
+diff --git a/tests/testsuite/rename_deps.rs b/tests/testsuite/rename_deps.rs
+index c444739b0f..209987b89c 100644
+--- a/tests/testsuite/rename_deps.rs
++++ b/tests/testsuite/rename_deps.rs
+@@ -334,8 +334,8 @@ fn can_run_doc_tests() {
+ [DOCTEST] foo
+ [RUNNING] `rustdoc --test [CWD]/src/lib.rs \
+ [..] \
+- --extern baz=[CWD]/target/debug/deps/libbar-[..].rlib \
+ --extern bar=[CWD]/target/debug/deps/libbar-[..].rlib \
++ --extern baz=[CWD]/target/debug/deps/libbar-[..].rlib \
+ [..]`
+ ",
+ ).run();
--- /dev/null
+Description: Disable network tests
+Author: Ximin Luo <infinity0@debian.org>
+Forwarded: TODO
+---
+This patch header follows DEP-3: http://dep.debian.net/deps/dep3/
+--- a/tests/testsuite/build_auth.rs
++++ b/tests/testsuite/build_auth.rs
+@@ -10,7 +10,7 @@
+ use support::{basic_manifest, project};
+
+ // Test that HTTP auth is offered from `credential.helper`
+-#[test]
++#[allow(dead_code)]
+ fn http_auth_offered() {
+ let server = TcpListener::bind("127.0.0.1:0").unwrap();
+ let addr = server.local_addr().unwrap();
+@@ -141,7 +141,7 @@
+ }
+
+ // Boy, sure would be nice to have a TLS implementation in rust!
+-#[test]
++#[allow(dead_code)]
+ fn https_something_happens() {
+ let server = TcpListener::bind("127.0.0.1:0").unwrap();
+ let addr = server.local_addr().unwrap();
+--- a/tests/testsuite/net_config.rs
++++ b/tests/testsuite/net_config.rs
+@@ -1,6 +1,6 @@
+ use support::project;
+
+-#[test]
++#[allow(dead_code)]
+ fn net_retry_loads_from_config() {
+ let p = project()
+ .file(
+@@ -33,7 +33,7 @@
+ ).run();
+ }
+
+-#[test]
++#[allow(dead_code)]
+ fn net_retry_git_outputs_warning() {
+ let p = project()
+ .file(
--- /dev/null
+Description: Disable fetch tests to allow build on arm and ppc architecture
+ These tests are disabled to allow building on release architecture.
+Author: Vasudev Kamath <vasudev@copyninja.info>
+Bug: https://github.com/rust-lang/cargo/issues/5864
+Last-Update: 2018-08-05
+
+--- a/tests/testsuite/fetch.rs
++++ b/tests/testsuite/fetch.rs
+@@ -12,8 +12,8 @@
+ p.cargo("fetch").with_stdout("").run();
+ }
+
+-#[test]
+-fn fetch_all_platform_dependencies_when_no_target_is_given() {
++#[allow(dead_code)]
++fn _fetch_all_platform_dependencies_when_no_target_is_given() {
+ if cross_compile::disabled() {
+ return;
+ }
+@@ -58,8 +58,8 @@
+ .run();
+ }
+
+-#[test]
+-fn fetch_platform_specific_dependencies() {
++#[allow(dead_code)]
++fn _fetch_platform_specific_dependencies() {
+ if cross_compile::disabled() {
+ return;
+ }
--- /dev/null
+Description: Disable incremental builds on sparc64
+ Incremental builds are currently unreliable on sparc64,
+ disable them by default for the time being.
+Last-Update: 2018-08-07
+
+--- a/src/cargo/core/profiles.rs
++++ b/src/cargo/core/profiles.rs
+@@ -458,6 +458,9 @@
+ debuginfo: Some(2),
+ debug_assertions: true,
+ overflow_checks: true,
++ #[cfg(target_arch = "sparc64")]
++ incremental: false,
++ #[cfg(not(target_arch = "sparc64"))]
+ incremental: true,
+ ..Profile::default()
+ }
+--- a/tests/testsuite/build.rs
++++ b/tests/testsuite/build.rs
+@@ -38,6 +38,7 @@
+
+ /// Check that the `CARGO_INCREMENTAL` environment variable results in
+ /// `rustc` getting `-Zincremental` passed to it.
++#[cfg(not(target_arch = "sparc64"))]
+ #[test]
+ fn cargo_compile_incremental() {
+ let p = project()
+@@ -58,6 +59,7 @@
+ ).run();
+ }
+
++#[cfg(not(target_arch = "sparc64"))]
+ #[test]
+ fn incremental_profile() {
+ let p = project()
+@@ -99,6 +101,7 @@
+ .run();
+ }
+
++#[cfg(not(target_arch = "sparc64"))]
+ #[test]
+ fn incremental_config() {
+ let p = project()
--- /dev/null
+0xxx: Grabbed from upstream development.
+1xxx: Possibly relevant for upstream adoption.
+2xxx: Only relevant for official Debian release.
--- /dev/null
+1001_fix_inc_exc_tests.patch
+1002_fix_doc_tests_order_hash.patch
+2007_sparc64_disable_incremental_build.patch
+2002_disable-net-tests.patch
+2005_disable_fetch_cross_tests.patch
--- /dev/null
+#!/usr/bin/make -f
+
+include /usr/share/dpkg/pkg-info.mk
+include /usr/share/dpkg/architecture.mk
+include /usr/share/dpkg/buildflags.mk
+include /usr/share/rustc/architecture.mk
+
+export CFLAGS CXXFLAGS CPPFLAGS LDFLAGS RUSTFLAGS
+export DEB_HOST_RUST_TYPE DEB_HOST_GNU_TYPE
+
+CARGO = $(CURDIR)/debian/bin/cargo
+
+export CARGO_HOME = $(CURDIR)/debian/cargo_home
+export DEB_CARGO_CRATE=cargo_$(DEB_VERSION_UPSTREAM)
+export DEB_CARGO_PACKAGE=cargo
+
+# To run a specific test, run something like:
+# $ debian/rules override_dh_auto_test-arch \
+# CARGO_TEST_FLAGS="package::include -- <args ...>"
+# See `cargo test --help` for more options.
+CARGO_TEST_FLAGS =
+
+%:
+ dh $@ --with bash-completion
+
+override_dh_auto_configure:
+ $(CARGO) prepare-debian $(CURDIR)/vendor
+
+override_dh_auto_build-arch:
+ $(CARGO) build
+
+override_dh_auto_build-indep:
+ $(CARGO) doc
+
+override_dh_auto_test-arch:
+ CFG_DISABLE_CROSS_TESTS=1 $(CARGO) test $(CARGO_TEST_FLAGS)
+
+override_dh_auto_test-indep:
+ # no arch-indep tests
+
+override_dh_auto_install:
+ $(CARGO) install
+
+override_dh_auto_clean:
+ $(CARGO) clean
+
+override_dh_clean:
+ # Upstream contains a lot of these
+ dh_clean -XCargo.toml.orig
--- /dev/null
+#!/bin/bash
+# To run this, you need to first install cargo-vendor.
+set -e
+
+SCRIPTDIR="$(dirname "$(readlink -f "$0")")"
+
+not_needed() {
+ diff -ru Cargo.lock.orig Cargo.lock | grep '^-"checksum' | cut '-d ' -f2-3
+}
+
+ghetto_parse_cargo() {
+ cat "$1" \
+ | tr '\n' '\t' \
+ | sed -e 's/\t\[/\n[/g' \
+ | perl -ne 'print if s/^\[(?:package|project)\].*\tname\s*=\s*"(.*?)".*\tversion\s*=\s*"(.*?)".*/\1 \2/g'
+}
+
+pruned_paths() {
+ for i in vendor/*/Cargo.toml; do
+ pkgnamever=
+ pkgnamever=$(ghetto_parse_cargo "$i")
+ if [ -z "$pkgnamever" ]; then
+ echo >&2 "failed to parse: $i"
+ exit 1
+ fi
+ echo "$pkgnamever $i"
+ done | grep -F -f <(not_needed) | cut '-d ' -f3 | while read x; do
+ echo " $(dirname $x)"
+ done
+}
+
+rm -rf vendor/
+cargo vendor --verbose vendor/
+mkdir -p .cargo
+cat >.cargo/config <<eof
+[source.crates-io]
+replace-with = "vendored-sources"
+
+[source.vendored-sources]
+directory = "$PWD/vendor"
+eof
+cargo update
+cp Cargo.lock Cargo.lock.orig
+
+if [ -d debcargo-conf ]; then ( cd debcargo-conf && git pull );
+else git clone https://salsa.debian.org/rust-team/debcargo-conf; fi
+
+# keep applying patches, and drop to a subshell for manual fixing, until it succeeds
+while ! ( cd vendor
+x=true
+for i in *; do
+ cd $i
+ # if there is a d/rules then don't mess with it, it's too custom for this
+ # script to deal with - just use the upstream version. example: backtrace-sys
+ # TODO: deal with those better, especially backtrace-sys
+ if [ -e ../../debcargo-conf/src/$i/debian/rules ]; then
+ continue
+ fi
+ if [ -d ../../debcargo-conf/src/$i/debian/patches ]; then
+ echo >&2 "$0: patching $i"
+ mkdir -p debian
+ if [ ! -d debian/patches ]; then
+ cp -a -n ../../debcargo-conf/src/$i/debian/patches debian/
+ fi
+ QUILT_PATCHES=debian/patches quilt push -a
+ case $? in
+ 0|2) true;;
+ *) echo >&2 "$0: patching $i failed <<<<<<<<<<<<<<<<<<<<<<<<"; x=false;;
+ esac
+ fi
+ cd ..
+done; $x ); do
+ echo >&2 "================================================================================"
+ echo >&2 "$0: You are now in a sub-shell!"
+ echo >&2 "$0: Fix the failed patches, then exit the sub-shell by pressing ctrl-D ONCE."
+ echo >&2 "$0: If you need to abort this process, press ctrl-D then quickly ctrl-C."
+ if [ -d debian/vendor-patches ]; then
+ echo >&2 "$0: Previous patches exist, to view the diff between these and debcargo-conf run:"
+ echo >&2 " $ diff --color=always -ru vendor/ debian/vendor-patches/ | grep -v 'Only in vendor'"
+ echo >&2 "$0: Note that this is a 2-way diff not a 3-way diff, and so may be misleading."
+ fi
+ echo >&2 "================================================================================"
+ bash || true
+ echo >&2 "$0: trying patches again..."
+done
+find vendor/*/debian/patches -name '*~' -delete || true
+cargo update
+pruned_paths | while read x; do echo >&2 "$0: removing, because debcargo-conf patches makes it obsolete: $x"; rm -rf "$x"; done
+
+# remove excluded files
+( cd vendor
+for i in *; do (
+ cd $i
+ if [ -e ../../debcargo-conf/src/$i/debian/rules ]; then
+ continue
+ fi
+ if grep -q excludes ../../debcargo-conf/src/$i/debian/debcargo.toml 2>/dev/null; then
+ sed -nre 's/.*excludes\s*=\s*(\[[^]]*\]).*/\1/p' \
+ ../../debcargo-conf/src/$i/debian/debcargo.toml \
+ | python -c "import ast, sys; x=ast.literal_eval(sys.stdin.read()); print('\n'.join((i[:-3] if i.endswith('/**') else i) for i in x));" \
+ | while read x; do echo >&2 "$0: removing, since it's excluded by debcargo-conf: vendor/$i/$x"; rm -rf "$x"; done
+ fi
+); done; )
+
+# TODO: rm special logic from debcargo and put into debcargo-conf instead
+echo >&2 "$0: removing winapi archives"
+rm -rf vendor/winapi-*-pc-windows-gnu/lib/*.a
+
+echo >&2 "$0: pruning all checksums.."
+for i in vendor/*; do ${SCRIPTDIR}/prune-checksums "$i"; done
+
+echo >&2 "$0: backing up patches to debian/vendor-patches/*"
+echo >&2 "$0: if you updated any of those, you should backport them back to debcargo-conf.git"
+rm -rf debian/vendor-patches && mkdir -p debian/vendor-patches
+( cd vendor && find . -name patches -exec cp -a --parents '{}' ../debian/vendor-patches \; )
+
+echo >&2 "$0: cleaning up..."
+rm -rf .cargo debcargo-conf Cargo.lock.orig Cargo.lock
--- /dev/null
+#!/usr/bin/python3
+# Copyright: 2015-2017 The Debian Project
+# License: MIT or Apache-2.0
+#
+# Guess the copyright of a cargo crate by looking at its git history.
+
+import datetime
+import pytoml
+import os
+import subprocess
+import sys
+
+this_year = datetime.datetime.now().year
+crates = sys.argv[1:]
+get_initial_commit = len(crates) == 1
+
+for crate in crates:
+ with open(os.path.join(crate, "Cargo.toml")) as fp:
+ data = pytoml.load(fp)
+ repo = data["package"].get("repository", None)
+ if get_initial_commit and repo:
+ output = subprocess.check_output(
+ """git clone -q --bare "%s" tmp.crate-copyright >&2 &&
+cd tmp.crate-copyright &&
+git log --format=%%cI --reverse | head -n1 | cut -b1-4 &&
+git log --format=%%cI | head -n1 | cut -b1-4 &&
+cd .. &&
+rm -rf tmp.crate-copyright""" % repo, shell=True).decode("utf-8")
+ first_year, last_year = output.strip().split(maxsplit=2)
+ else:
+ first_year = "20XX"
+ last_year = this_year
+ print("""Files: {0}
+Copyright: {1}
+License: {2}
+Comment: see {3}
+""".format(
+ os.path.join(crate, "*"),
+ "\n ".join("%s-%s %s" % (first_year, last_year, a.replace(" <>", "")) for a in data ["package"]["authors"]),
+ data["package"].get("license", "???").replace("/", " or "),
+ repo or "???"
+ ))
--- /dev/null
+#!/usr/bin/python3
+# Copyright: 2015-2017 The Debian Project
+# License: MIT or Apache-2.0
+#
+# Helper to remove removed-files from .cargo-checksum
+# TODO: rewrite to perl and add to dh-cargo, maybe?
+
+from collections import OrderedDict
+import argparse
+import json
+import os
+import sys
+
+def prune_keep(cfile):
+ with open(cfile) as fp:
+ sums = json.load(fp, object_pairs_hook=OrderedDict)
+
+ oldfiles = sums["files"]
+ newfiles = OrderedDict([entry for entry in oldfiles.items() if os.path.exists(entry[0])])
+ sums["files"] = newfiles
+
+ if len(oldfiles) == len(newfiles):
+ return
+
+ with open(cfile, "w") as fp:
+ json.dump(sums, fp, separators=(',', ':'))
+
+def prune(cfile):
+ with open(cfile, "r+") as fp:
+ sums = json.load(fp, object_pairs_hook=OrderedDict)
+ sums["files"] = {}
+ fp.seek(0)
+ json.dump(sums, fp, separators=(',', ':'))
+ fp.truncate()
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser()
+ parser.add_argument("-k", "--keep", action="store_true", help="keep "
+ "checksums of files that still exist, and assume they haven't changed.")
+ parser.add_argument('crates', nargs=argparse.REMAINDER,
+ help="crates whose checksums to prune. (default: ./)")
+ args = parser.parse_args(sys.argv[1:])
+ crates = args.crates or ["."]
+ f = prune_keep if args.keep else prune
+ for c in crates:
+ cfile = os.path.join(c, ".cargo-checksum.json") if os.path.isdir(c) else c
+ f(cfile)
--- /dev/null
+3.0 (quilt)
--- /dev/null
+--- a/Cargo.toml 2018-09-07 20:46:25.639402810 -0700
++++ b/Cargo.toml 2018-09-07 20:46:37.055559250 -0700
+@@ -71,10 +71,6 @@
+ [dependencies.bitflags]
+ version = "1.0"
+
+-[dependencies.clippy]
+-version = "~0.0.166"
+-optional = true
+-
+ [dependencies.strsim]
+ version = "0.7.0"
+ optional = true
+@@ -110,7 +106,6 @@
+ debug = []
+ default = ["suggestions", "color", "vec_map"]
+ doc = ["yaml"]
+-lints = ["clippy"]
+ nightly = []
+ no_cargo = []
+ suggestions = ["strsim"]
--- /dev/null
+no-clippy.patch
--- /dev/null
+--- a/Cargo.toml 2017-01-24 06:56:51.000000000 +0000
++++ b/Cargo.toml 2018-09-15 15:37:47.602333479 +0000
+@@ -8,13 +8,8 @@
+ keywords = ["crypto", "hash", "digest", "osx", "commoncrypto"]
+ license = "MIT"
+
+-[features]
+-lint = ["clippy"]
+-
+ [dependencies]
+ libc = "0.2"
+
+-clippy = { version = "0.0", optional = true }
+-
+ [dev-dependencies]
+ hex = "0.2"
--- /dev/null
+no-clippy.patch
--- /dev/null
+--- a/Cargo.toml 2017-01-24 06:56:51.000000000 +0000
++++ b/Cargo.toml 2018-09-15 17:37:01.354423224 +0000
+@@ -8,13 +8,8 @@
+ keywords = ["crypto", "hash", "digest", "osx", "commoncrypto"]
+ license = "MIT"
+
+-[features]
+-lint = ["clippy"]
+-
+ [dependencies]
+ commoncrypto-sys = { version = "0.2.0", path = "../commoncrypto-sys" }
+
+-clippy = { version = "0.0", optional = true }
+-
+ [dev-dependencies]
+ hex = "0.2"
--- /dev/null
+no-clippy.patch
--- /dev/null
+winapi3.patch
--- /dev/null
+--- a/Cargo.toml 1970-01-01 00:00:00.000000000 +0000
++++ b/Cargo.toml 2018-09-21 18:54:24.693880364 +0000
+@@ -48,4 +48,2 @@ version = "0.1.2"
+ version = "0.9.33"
+-[target."cfg(target_env=\"msvc\")".dependencies.kernel32-sys]
+-version = "0.2.2"
+
+@@ -54,3 +52,4 @@ version = "0.1.13"
+ [target."cfg(windows)".dependencies.winapi]
+-version = "0.2.7"
++version = "0.3"
++features = ["winsock2", "wincrypt", "libloaderapi"]
+ [badges.appveyor]
+--- a/src/easy/windows.rs 2018-09-21 18:01:35.962553903 +0000
++++ b/src/easy/windows.rs 2018-09-21 18:01:35.962553903 +0000
+@@ -4,21 +4,21 @@ use libc::c_void;
+
+ #[cfg(target_env = "msvc")]
+ mod win {
+- use kernel32;
+ use std::ffi::CString;
+ use std::mem;
+ use std::ptr;
+ use schannel::cert_context::ValidUses;
+ use schannel::cert_store::CertStore;
+ use winapi::{self, c_void, c_uchar, c_long, c_int};
++ use winapi::um::libloaderapi::{GetModuleHandleW, GetProcAddress};
+
+ fn lookup(module: &str, symbol: &str) -> Option<*const c_void> {
+ unsafe {
+ let symbol = CString::new(symbol).unwrap();
+ let mut mod_buf: Vec<u16> = module.encode_utf16().collect();
+ mod_buf.push(0);
+- let handle = kernel32::GetModuleHandleW(mod_buf.as_mut_ptr());
+- let n = kernel32::GetProcAddress(handle, symbol.as_ptr());
++ let handle = GetModuleHandleW(mod_buf.as_mut_ptr());
++ let n = GetProcAddress(handle, symbol.as_ptr());
+ if n == ptr::null() {
+ None
+ } else {
+--- a/src/lib.rs 2018-09-21 18:01:35.962553903 +0000
++++ b/src/lib.rs 2018-09-21 18:01:35.962553903 +0000
+@@ -61,8 +61,6 @@ extern crate openssl_probe;
+ #[cfg(windows)]
+ extern crate winapi;
+
+-#[cfg(target_env = "msvc")]
+-extern crate kernel32;
+ #[cfg(target_env = "msvc")]
+ extern crate schannel;
+
+--- a/src/multi.rs 2018-09-21 18:01:35.962553903 +0000
++++ b/src/multi.rs 2018-09-21 18:01:35.962553903 +0000
+@@ -8,7 +8,7 @@ use libc::{c_int, c_char, c_void, c_long, c_short};
+ use curl_sys;
+
+ #[cfg(windows)]
+-use winapi::winsock2::fd_set;
++use winapi::um::winsock2::fd_set;
+ #[cfg(unix)]
+ use libc::{fd_set, pollfd, POLLIN, POLLPRI, POLLOUT};
+
--- /dev/null
+--- a/Cargo.toml 2018-08-14 01:22:34.279727720 -0700
++++ b/Cargo.toml 2018-08-14 01:22:59.212095676 -0700
+@@ -33,10 +33,6 @@
+ version = "1.0"
+ optional = true
+
+-[dependencies.miniz-sys]
+-version = "0.1.11"
+-optional = true
+-
+ [dependencies.miniz_oxide_c_api]
+ version = "0.2"
+ features = ["no_c_export"]
+@@ -56,7 +52,8 @@
+ version = "0.1"
+
+ [features]
+-default = ["miniz-sys"]
++default = ["zlib"]
++miniz-sys = ["zlib"]
+ rust_backend = ["miniz_oxide_c_api"]
+ tokio = ["tokio-io", "futures"]
+ zlib = ["libz-sys"]
--- /dev/null
+--- flate2.orig/Cargo.toml
++++ flate2/Cargo.toml
+@@ -33,11 +33,6 @@
+ version = "1.0"
+ optional = true
+
+-[dependencies.miniz_oxide_c_api]
+-version = "0.2"
+-features = ["no_c_export"]
+-optional = true
+-
+ [dependencies.tokio-io]
+ version = "0.1"
+ optional = true
+@@ -54,12 +49,9 @@
+ [features]
+ default = ["zlib"]
+ miniz-sys = ["zlib"]
+-rust_backend = ["miniz_oxide_c_api"]
++rust_backend = ["zlib"]
+ tokio = ["tokio-io", "futures"]
+ zlib = ["libz-sys"]
+-[target."cfg(all(target_arch = \"wasm32\", not(target_os = \"emscripten\")))".dependencies.miniz_oxide_c_api]
+-version = "0.2"
+-features = ["no_c_export"]
+ [badges.appveyor]
+ repository = "alexcrichton/flate2-rs"
+
--- /dev/null
+disable-miniz.patch
+drop-deps-for-cargo.patch
--- /dev/null
+--- a/Cargo.toml 2018-08-03 01:58:48.002962262 -0700
++++ b/Cargo.toml 2018-08-03 01:58:54.275006248 -0700
+@@ -61,4 +61,4 @@
+ [target."cfg(unix)".dependencies.libc]
+ version = "0.2"
+ [target."cfg(windows)".dependencies.rand]
+-version = "0.4"
++version = "< 0.6, >= 0.4"
--- /dev/null
+relax-dep-version.patch
--- /dev/null
+--- a/Cargo.toml
++++ b/Cargo.toml
+@@ -23,8 +23 @@ license = "MIT/Apache-2.0"
+ repository = "https://github.com/indiv0/lazycell"
+-[dependencies.clippy]
+-version = "0.0"
+-optional = true
+-
+-[features]
+-nightly = []
+-nightly-testing = ["clippy", "nightly"]
--- /dev/null
+no-clippy.patch
--- /dev/null
+--- libgit2-sys-0.6.19/build.rs.orig 2018-02-06 14:11:05.758487595 +0100
++++ libgit2-sys-0.6.19/build.rs 2018-02-06 14:11:09.514541164 +0100
+@@ -31,10 +31,8 @@
+ }
+ let has_pkgconfig = Command::new("pkg-config").output().is_ok();
+
+- if env::var("LIBGIT2_SYS_USE_PKG_CONFIG").is_ok() {
+- if pkg_config::find_library("libgit2").is_ok() {
+- return
+- }
++ if pkg_config::find_library("libgit2").is_ok() {
++ return
+ }
+
+ if !Path::new("libgit2/.git").exists() {
--- /dev/null
+no-special-snowflake-env.patch
--- /dev/null
+Description: Use libssh2 from system by default
+Author: Vasudev Kamath <vasudev@copyninja.info>
+Bug: https://github.com/alexcrichton/ssh2-rs/issues/88
+Forwarded: not-needed
+Last-Update: 2018-07-28
+
+--- a/build.rs
++++ b/build.rs
+@@ -21,19 +21,20 @@
+ // The system copy of libssh2 is not used by default because it
+ // can lead to having two copies of libssl loaded at once.
+ // See https://github.com/alexcrichton/ssh2-rs/pull/88
+- if env::var("LIBSSH2_SYS_USE_PKG_CONFIG").is_ok() {
+- if let Ok(lib) = pkg_config::find_library("libssh2") {
+- for path in &lib.include_paths {
+- println!("cargo:include={}", path.display());
+- }
+- return
++ // if env::var("LIBSSH2_SYS_USE_PKG_CONFIG").is_ok() {
++ if let Ok(lib) = pkg_config::find_library("libssh2") {
++ for path in &lib.include_paths {
++ println!("cargo:include={}", path.display());
+ }
++ return;
+ }
++ // }
+
+- if !Path::new("libssh2/.git").exists() {
+- let _ = Command::new("git").args(&["submodule", "update", "--init"])
+- .status();
+- }
++ // if !Path::new("libssh2/.git").exists() {
++ // let _ = Command::new("git")
++ // .args(&["submodule", "update", "--init"])
++ // .status();
++ // }
+
+ let mut cfg = cmake::Config::new("libssh2");
+
--- /dev/null
+no-special-snowflake-env.patch
--- /dev/null
+--- a/Cargo.toml 2018-09-03 21:17:45.506177066 -0700
++++ b/Cargo.toml 2018-09-03 21:18:00.573513575 -0700
+@@ -28,14 +28,11 @@
+ [build-dependencies.cc]
+ version = "1.0"
+
+-[build-dependencies.openssl-src]
+-version = "111.0.1"
+-optional = true
+-
+ [build-dependencies.pkg-config]
+ version = "0.3.9"
+
+ [features]
+ vendored = ["openssl-src"]
++openssl-src = []
+ [target."cfg(target_env = \"msvc\")".build-dependencies.vcpkg]
+ version = "0.2"
--- /dev/null
+disable-vendor.patch
--- /dev/null
+--- a/src/lib.rs
++++ b/src/lib.rs
+@@ -9,8 +9,6 @@
+ //! A number of environment variables are available to globally configure how
+ //! this crate will invoke `pkg-config`:
+ //!
+-//! * `PKG_CONFIG_ALLOW_CROSS` - if this variable is not set, then `pkg-config`
+-//! will automatically be disabled for all cross compiles.
+ //! * `FOO_NO_PKG_CONFIG` - if set, this will disable running `pkg-config` when
+ //! probing for the library named `foo`.
+ //!
+@@ -81,7 +79,7 @@
+
+ // Only use pkg-config in host == target situations by default (allowing an
+ // override).
+- (host == target || env::var_os("PKG_CONFIG_ALLOW_CROSS").is_some())
++ (host == target || true)
+ }
+
+ #[derive(Clone, Default)]
+@@ -113,9 +111,8 @@
+ /// Contains the name of the responsible environment variable.
+ EnvNoPkgConfig(String),
+
+- /// Cross compilation detected.
+- ///
+- /// Override with `PKG_CONFIG_ALLOW_CROSS=1`.
++ /// Cross compilation detected. Kept for compatibility;
++ /// the Debian package never emits this.
+ CrossCompilation,
+
+ /// Failed to run `pkg-config`.
+@@ -137,13 +134,9 @@
+ fn description(&self) -> &str {
+ match *self {
+ Error::EnvNoPkgConfig(_) => "pkg-config requested to be aborted",
+- Error::CrossCompilation => {
+- "pkg-config doesn't handle cross compilation. \
+- Use PKG_CONFIG_ALLOW_CROSS=1 to override"
+- }
+ Error::Command { .. } => "failed to run pkg-config",
+ Error::Failure { .. } => "pkg-config did not exit sucessfully",
+- Error::__Nonexhaustive => panic!(),
++ Error::CrossCompilation | Error::__Nonexhaustive => panic!(),
+ }
+ }
+
+@@ -214,10 +207,6 @@
+ Error::EnvNoPkgConfig(ref name) => {
+ write!(f, "Aborted because {} is set", name)
+ }
+- Error::CrossCompilation => {
+- write!(f, "Cross compilation detected. \
+- Use PKG_CONFIG_ALLOW_CROSS=1 to override")
+- }
+ Error::Command { ref command, ref cause } => {
+ write!(f, "Failed to run `{}`: {}", command, cause)
+ }
+@@ -233,7 +222,7 @@
+ }
+ Ok(())
+ }
+- Error::__Nonexhaustive => panic!(),
++ Error::CrossCompilation | Error::__Nonexhaustive => panic!(),
+ }
+ }
+ }
+@@ -388,7 +377,11 @@
+ }
+
+ fn command(&self, name: &str, args: &[&str]) -> Command {
+- let exe = self.env_var("PKG_CONFIG").unwrap_or_else(|_| String::from("pkg-config"));
++ let exe = self.env_var("PKG_CONFIG").unwrap_or_else(|_| {
++ self.env_var("DEB_HOST_GNU_TYPE")
++ .map(|t| t.to_string() + "-pkg-config")
++ .unwrap_or_else(|_| String::from("pkg-config"))
++ });
+ let mut cmd = Command::new(exe);
+ if self.is_static(name) {
+ cmd.arg("--static");
+--- a/tests/test.rs
++++ b/tests/test.rs
+@@ -29,7 +29,6 @@
+ pkg_config::probe_library(name)
+ }
+
+-#[test]
+ fn cross_disabled() {
+ let _g = LOCK.lock();
+ reset();
+@@ -41,7 +40,6 @@
+ }
+ }
+
+-#[test]
+ fn cross_enabled() {
+ let _g = LOCK.lock();
+ reset();
--- /dev/null
+no-special-snowflake-env.patch
--- /dev/null
+Index: rand/Cargo.toml
+===================================================================
+--- rand.orig/Cargo.toml
++++ rand/Cargo.toml
+@@ -48,7 +48,7 @@ default = ["std"]
+ i128_support = []
+ nightly = ["i128_support"]
+ serde1 = ["serde", "serde_derive", "rand_core/serde1"]
+-std = ["rand_core/std", "alloc", "libc", "winapi", "cloudabi", "fuchsia-zircon"]
++std = ["rand_core/std", "alloc", "libc", "winapi", "cloudabi"]
+ [target."cfg(target_os = \"cloudabi\")".dependencies.cloudabi]
+ version = "0.0.3"
+ optional = true
--- /dev/null
+disable-fuchsia-zircon-dep.diff
--- /dev/null
+--- a/Cargo.toml 2017-07-06 23:45:07.000000000 +0000
++++ b/Cargo.toml 2018-09-15 18:09:13.913086470 +0000
+@@ -23,13 +23,6 @@
+
+ [lib]
+ name = "unicode_bidi"
+-[dependencies.flame]
+-version = "0.1"
+-optional = true
+-
+-[dependencies.flamer]
+-version = "0.1"
+-optional = true
+
+ [dependencies.serde]
+ version = ">=0.8, <2.0"
+@@ -43,7 +36,6 @@
+
+ [features]
+ with_serde = ["serde"]
+-flame_it = ["flame", "flamer"]
+ unstable = []
+ default = []
+ bench_it = []
--- /dev/null
+no-flamegraphs.patch
--- /dev/null
+# This is a list of files and dirs that are omitted from our custom
+# "suspicious files" scanner
+
+# test data
+flate2/tests/
+tar/tests/archives/
+term/tests/data/
+toml/tests/
+num/ci/
+openssl/test/
+schannel/test/*
+
+# misc support data
+failure/CODE_OF_CONDUCT.md
+failure_derive/CODE_OF_CONDUCT.md
+hamcrest/LICENSE-*
+*/.travis.yml
+# "build status" link-images etc take up a lot of line-length
+*/README.md
+
+# individual files, manually audited:
+backtrace-sys/src/libbacktrace/config/libtool.m4
+backtrace-sys/src/libbacktrace/configure
+clap/CHANGELOG.md
+clap/CONTRIBUTORS.md
+clap/.github/CONTRIBUTING.md
+cloudabi/cloudabi.rs
+conv/src/errors.rs
+conv/src/impls.rs
+conv/src/lib.rs
+conv/src/macros.rs
+docopt/src/test/testcases.rs
+dtoa/performance.png
+failure/book/src/bail-and-ensure.md
+git2/src/cred.rs
+idna/src/uts46_mapping_table.rs
+idna/tests/IdnaTest.txt
+itoa/performance.png
+lazy_static/src/lib.rs
+miniz-sys/miniz.c
+num/doc/favicon.ico
+num/doc/rust-logo-128x128-blk-v2.png
+num/.travis/deploy.enc
+regex/src/testdata/basic.dat
+regex/tests/crates_regex.rs
+regex/tests/fowler.rs
+rustc-demangle/src/lib.rs
+schannel/LICENSE.md
+stable_deref_trait/src/lib.rs
+synstructure/LICENSE
+tar/Cargo.toml
+termion/logo.svg
+term/scripts/id_rsa.enc
+unicode-normalization/src/tables.rs
+url/github.png
+vec_map/Cargo.toml
+winapi/README.md
+winapi/src/lib.rs
+winapi/src/winnt.rs
--- /dev/null
+version=3
+https://github.com/rust-lang/cargo/releases /rust-lang/cargo/archive/(\d+\.\d+\.\d+)\.tar\.gz