Merge remote-tracking branch 'origin/master' into haskell-updates.

This commit is contained in:
Peter Simons 2020-06-29 10:29:04 +02:00
commit 9b0a1bfcdd
155 changed files with 4163 additions and 2947 deletions

View file

@ -166,7 +166,7 @@ hello latest de2bf4786de6 About a minute ago 25.2MB
<title>buildLayeredImage</title>
<para>
Create a Docker image with many of the store paths being on their own layer to improve sharing between images.
Create a Docker image with many of the store paths being on their own layer to improve sharing between images. The image is realized into the Nix store as a gzipped tarball. Depending on the intended usage, many users might prefer to use <function>streamLayeredImage</function> instead, which this function uses internally.
</para>
<variablelist>
@ -327,6 +327,27 @@ pkgs.dockerTools.buildLayeredImage {
</section>
</section>
<section xml:id="ssec-pkgs-dockerTools-streamLayeredImage">
<title>streamLayeredImage</title>
<para>
Builds a script which, when run, will stream an uncompressed tarball of a Docker image to stdout. The arguments to this function are as for <function>buildLayeredImage</function>. This method of constructing an image does not realize the image into the Nix store, so it saves on IO and disk/cache space, particularly with large images.
</para>
<para>
The image produced by running the output script can be piped directly into <command>docker load</command>, to load it into the local docker daemon:
<screen><![CDATA[
$(nix-build) | docker load
]]></screen>
</para>
<para>
Alternatively, the image be piped via <command>gzip</command> into <command>skopeo</command>, e.g. to copy it into a registry:
<screen><![CDATA[
$(nix-build) | gzip --fast | skopeo copy docker-archive:/dev/stdin docker://some_docker_registry/myimage:tag
]]></screen>
</para>
</section>
<section xml:id="ssec-pkgs-dockerTools-fetchFromRegistry">
<title>pullImage</title>

View file

@ -462,6 +462,11 @@ lib.mapAttrs (n: v: v // { shortName = n; }) {
fullName = "GNU Lesser General Public License v3.0 or later";
};
lgpllr = spdx {
spdxId = "LGPLLR";
fullName = "Lesser General Public License For Linguistic Resources";
};
libpng = spdx {
spdxId = "Libpng";
fullName = "libpng License";
@ -482,6 +487,11 @@ lib.mapAttrs (n: v: v // { shortName = n; }) {
url = "https://opensource.franz.com/preamble.html";
};
llvm-exception = spdx {
spdxId = "LLVM-exception";
fullName = "LLVM Exception"; # LLVM exceptions to the Apache 2.0 License
};
lppl12 = spdx {
spdxId = "LPPL-1.2";
fullName = "LaTeX Project Public License v1.2";
@ -545,6 +555,12 @@ lib.mapAttrs (n: v: v // { shortName = n; }) {
fullName = "Non-Profit Open Software License 3.0";
};
obsidian = {
fullName = "Obsidian End User Agreement";
url = "https://obsidian.md/eula";
free = false;
};
ocamlpro_nc = {
fullName = "OCamlPro Non Commercial license version 1";
url = "https://alt-ergo.ocamlpro.com/http/alt-ergo-2.2.0/OCamlPro-Non-Commercial-License.pdf";

View file

@ -1604,6 +1604,12 @@
githubId = 32609395;
name = "B YI";
};
conradmearns = {
email = "conradmearns+github@pm.me";
github = "ConradMearns";
githubId = 5510514;
name = "Conrad Mearns";
};
couchemar = {
email = "couchemar@yandex.ru";
github = "couchemar";
@ -1782,6 +1788,12 @@
email = "christoph.senjak@googlemail.com";
name = "Christoph-Simon Senjak";
};
david-sawatzke = {
email = "d-nix@sawatzke.dev";
github = "david-sawatzke";
githubId = 11035569;
name = "David Sawatzke";
};
david50407 = {
email = "me@davy.tw";
github = "david50407";

View file

@ -38,7 +38,12 @@ starting VDE switch for network 1
</para>
<para>
The machine state is kept across VM restarts in
<filename>/tmp/vm-state-</filename><varname>machinename</varname>.
You can re-use the VM states coming from a previous run
by setting the <command>--keep-vm-state</command> flag.
<screen>
<prompt>$ </prompt>./result/bin/nixos-run-vms --keep-vm-state
</screen>
The machine state is stored in the
<filename>$TMPDIR/vm-state-</filename><varname>machinename</varname> directory.
</para>
</section>

View file

@ -504,6 +504,16 @@ systemd.services.nginx.serviceConfig.ReadWritePaths = [ "/var/www" ];
In the <literal>resilio</literal> module, <xref linkend="opt-services.resilio.httpListenAddr"/> has been changed to listen to <literal>[::1]</literal> instead of <literal>0.0.0.0</literal>.
</para>
</listitem>
<listitem>
<para>
Radicale's default package has changed from 2.x to 3.x. An upgrade
checklist can be found
<link xlink:href="https://github.com/Kozea/Radicale/blob/3.0.x/NEWS.md#upgrade-checklist">here</link>.
You can use the newer version in the NixOS service by setting the
<literal>package</literal> to <literal>radicale3</literal>, which is done
automatically if <literal>stateVersion</literal> is 20.09 or higher.
</para>
</listitem>
</itemizedlist>
</section>
@ -665,6 +675,12 @@ systemd.services.nginx.serviceConfig.ReadWritePaths = [ "/var/www" ];
<package>nextcloud18</package> before upgrading to <package>nextcloud19</package>
since Nextcloud doesn't support upgrades across multiple major versions.
</para>
<para>
The <literal>nixos-run-vms</literal> script now deletes the
previous run machines states on test startup. You can use the
<literal>--keep-vm-state</literal> flag to match the previous
behaviour and keep the same VM state between different test runs.
</para>
</listitem>
</itemizedlist>
</section>

View file

@ -4,6 +4,7 @@ from queue import Queue, Empty
from typing import Tuple, Any, Callable, Dict, Iterator, Optional, List
from xml.sax.saxutils import XMLGenerator
import _thread
import argparse
import atexit
import base64
import codecs
@ -751,6 +752,11 @@ class Machine:
self.log("QEMU running (pid {})".format(self.pid))
def cleanup_statedir(self) -> None:
self.log("delete the VM state directory")
if os.path.isfile(self.state_dir):
shutil.rmtree(self.state_dir)
def shutdown(self) -> None:
if not self.booted:
return
@ -889,6 +895,15 @@ def subtest(name: str) -> Iterator[None]:
if __name__ == "__main__":
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument(
"-K",
"--keep-vm-state",
help="re-use a VM state coming from a previous run",
action="store_true",
)
(cli_args, vm_scripts) = arg_parser.parse_known_args()
log = Logger()
vlan_nrs = list(dict.fromkeys(os.environ.get("VLANS", "").split()))
@ -896,8 +911,10 @@ if __name__ == "__main__":
for nr, vde_socket, _, _ in vde_sockets:
os.environ["QEMU_VDE_SOCKET_{}".format(nr)] = vde_socket
vm_scripts = sys.argv[1:]
machines = [create_machine({"startCommand": s}) for s in vm_scripts]
for machine in machines:
if not cli_args.keep_vm_state:
machine.cleanup_statedir()
machine_eval = [
"{0} = machines[{1}]".format(m.name, idx) for idx, m in enumerate(machines)
]
@ -911,7 +928,6 @@ if __name__ == "__main__":
continue
log.log("killing {} (pid {})".format(machine.name, machine.pid))
machine.process.kill()
for _, _, process, _ in vde_sockets:
process.terminate()
log.close()

View file

@ -25,8 +25,11 @@ let
in
{
options.services.undervolt = {
enable = mkEnableOption
"Intel CPU undervolting service (WARNING: may permanently damage your hardware!)";
enable = mkEnableOption ''
Undervolting service for Intel CPUs.
Warning: This service is not endorsed by Intel and may permanently damage your hardware. Use at your own risk!
'';
verbose = mkOption {
type = types.bool;

View file

@ -11,8 +11,6 @@ let
# build nsd with the options needed for the given config
nsdPkg = pkgs.nsd.override {
configFile = "${configFile}/nsd.conf";
bind8Stats = cfg.bind8Stats;
ipv6 = cfg.ipv6;
ratelimit = cfg.ratelimit.enable;
@ -897,7 +895,10 @@ in
+ "want, please enable 'services.nsd.rootServer'.";
};
environment.systemPackages = [ nsdPkg ];
environment = {
systemPackages = [ nsdPkg ];
etc."nsd/nsd.conf".source = "${configFile}/nsd.conf";
};
users.groups.${username}.gid = config.ids.gids.nsd;

View file

@ -8,8 +8,10 @@ let
confFile = pkgs.writeText "radicale.conf" cfg.config;
# This enables us to default to version 2 while still not breaking configurations of people with version 1
defaultPackage = if versionAtLeast config.system.stateVersion "17.09" then {
defaultPackage = if versionAtLeast config.system.stateVersion "20.09" then {
pkg = pkgs.radicale3;
text = "pkgs.radicale3";
} else if versionAtLeast config.system.stateVersion "17.09" then {
pkg = pkgs.radicale2;
text = "pkgs.radicale2";
} else {
@ -35,8 +37,9 @@ in
defaultText = defaultPackage.text;
description = ''
Radicale package to use. This defaults to version 1.x if
<literal>system.stateVersion &lt; 17.09</literal> and version 2.x
otherwise.
<literal>system.stateVersion &lt; 17.09</literal>, version 2.x if
<literal>17.09 system.stateVersion &lt; 20.09</literal>, and
version 3.x otherwise.
'';
};

View file

@ -708,6 +708,7 @@ in
wantedBy = [ "multi-user.target" ];
wants = concatLists (map (hostOpts: [ "acme-${hostOpts.hostName}.service" "acme-selfsigned-${hostOpts.hostName}.service" ]) vhostsACME);
after = [ "network.target" "fs.target" ] ++ map (hostOpts: "acme-selfsigned-${hostOpts.hostName}.service") vhostsACME;
before = map (hostOpts: "acme-${hostOpts.hostName}.service") vhostsACME;
path = [ pkg pkgs.coreutils pkgs.gnugrep ];

View file

@ -693,6 +693,10 @@ in
wantedBy = [ "multi-user.target" ];
wants = concatLists (map (vhostConfig: ["acme-${vhostConfig.serverName}.service" "acme-selfsigned-${vhostConfig.serverName}.service"]) acmeEnabledVhosts);
after = [ "network.target" ] ++ map (vhostConfig: "acme-selfsigned-${vhostConfig.serverName}.service") acmeEnabledVhosts;
# Nginx needs to be started in order to be able to request certificates
# (it's hosting the acme challenge after all)
# This fixes https://github.com/NixOS/nixpkgs/issues/81842
before = map (vhostConfig: "acme-${vhostConfig.serverName}.service") acmeEnabledVhosts;
stopIfChanged = false;
preStart = ''
${cfg.preStart}

View file

@ -20,10 +20,10 @@ let
in valueType;
dynamicConfigFile = if cfg.dynamicConfigFile == null then
pkgs.runCommand "config.toml" {
buildInputs = [ pkgs.yj ];
buildInputs = [ pkgs.remarshal ];
preferLocalBuild = true;
} ''
yj -jt -i \
remarshal -if json -of toml \
< ${
pkgs.writeText "dynamic_config.json"
(builtins.toJSON cfg.dynamicConfigOptions)

View file

@ -48,10 +48,9 @@ in import ./make-test-python.nix ({ lib, ... }: {
security.acme.certs."standalone.test" = {
webroot = "/var/lib/acme/acme-challenges";
};
systemd.targets."acme-finished-standalone.test" = {};
systemd.services."acme-standalone.test" = {
wants = [ "acme-finished-standalone.test.target" ];
before = [ "acme-finished-standalone.test.target" ];
systemd.targets."acme-finished-standalone.test" = {
after = [ "acme-standalone.test.service" ];
wantedBy = [ "acme-standalone.test.service" ];
};
services.nginx.enable = true;
services.nginx.virtualHosts."standalone.test" = {
@ -68,11 +67,9 @@ in import ./make-test-python.nix ({ lib, ... }: {
# A target remains active. Use this to probe the fact that
# a service fired eventhough it is not RemainAfterExit
systemd.targets."acme-finished-a.example.test" = {};
systemd.services."acme-a.example.test" = {
wants = [ "acme-finished-a.example.test.target" ];
before = [ "acme-finished-a.example.test.target" ];
after = [ "nginx.service" ];
systemd.targets."acme-finished-a.example.test" = {
after = [ "acme-a.example.test.service" ];
wantedBy = [ "acme-a.example.test.service" ];
};
services.nginx.enable = true;
@ -89,11 +86,9 @@ in import ./make-test-python.nix ({ lib, ... }: {
security.acme.server = "https://acme.test/dir";
specialisation.second-cert.configuration = {pkgs, ...}: {
systemd.targets."acme-finished-b.example.test" = {};
systemd.services."acme-b.example.test" = {
wants = [ "acme-finished-b.example.test.target" ];
before = [ "acme-finished-b.example.test.target" ];
after = [ "nginx.service" ];
systemd.targets."acme-finished-b.example.test" = {
after = [ "acme-b.example.test.service" ];
wantedBy = [ "acme-b.example.test.service" ];
};
services.nginx.virtualHosts."b.example.test" = {
enableACME = true;
@ -104,6 +99,7 @@ in import ./make-test-python.nix ({ lib, ... }: {
'';
};
};
specialisation.dns-01.configuration = {pkgs, config, nodes, lib, ...}: {
security.acme.certs."example.test" = {
domain = "*.example.test";
@ -115,10 +111,12 @@ in import ./make-test-python.nix ({ lib, ... }: {
user = config.services.nginx.user;
group = config.services.nginx.group;
};
systemd.targets."acme-finished-example.test" = {};
systemd.targets."acme-finished-example.test" = {
after = [ "acme-example.test.service" ];
wantedBy = [ "acme-example.test.service" ];
};
systemd.services."acme-example.test" = {
wants = [ "acme-finished-example.test.target" ];
before = [ "acme-finished-example.test.target" "nginx.service" ];
before = [ "nginx.service" ];
wantedBy = [ "nginx.service" ];
};
services.nginx.virtualHosts."c.example.test" = {
@ -132,6 +130,26 @@ in import ./make-test-python.nix ({ lib, ... }: {
'';
};
};
# When nginx depends on a service that is slow to start up, requesting used to fail
# certificates fail. Reproducer for https://github.com/NixOS/nixpkgs/issues/81842
specialisation.slow-startup.configuration = { pkgs, config, nodes, lib, ...}: {
systemd.services.my-slow-service = {
wantedBy = [ "multi-user.target" "nginx.service" ];
before = [ "nginx.service" ];
preStart = "sleep 5";
script = "${pkgs.python3}/bin/python -m http.server";
};
systemd.targets."acme-finished-d.example.com" = {
after = [ "acme-d.example.com.service" ];
wantedBy = [ "acme-d.example.com.service" ];
};
services.nginx.virtualHosts."d.example.com" = {
forceSSL = true;
enableACME = true;
locations."/".proxyPass = "http://localhost:8000";
};
};
};
client = {nodes, lib, ...}: {
@ -207,5 +225,15 @@ in import ./make-test-python.nix ({ lib, ... }: {
client.succeed(
"curl --cacert /tmp/ca.crt https://c.example.test/ | grep -qF 'hello world'"
)
with subtest("Can request certificate of nginx when startup is delayed"):
webserver.succeed(
"${switchToNewServer}"
)
webserver.succeed(
"/run/current-system/specialisation/slow-startup/bin/switch-to-configuration test"
)
webserver.wait_for_unit("acme-finished-d.example.com.target")
client.succeed("curl --cacert /tmp/ca.crt https://d.example.com/")
'';
})

View file

@ -43,7 +43,7 @@ import ./make-test-python.nix ({ pkgs, ...} : {
docker.fail("sudo -u noprivs docker ps")
docker.succeed("docker stop sleeping")
# Must match version twice to ensure client and server versions are correct
docker.succeed('[ $(docker version | grep ${pkgs.docker.version} | wc -l) = "2" ]')
# Must match version 4 times to ensure client and server git commits and versions are correct
docker.succeed('[ $(docker version | grep ${pkgs.docker.version} | wc -l) = "4" ]')
'';
})

View file

@ -14,9 +14,6 @@ let
[storage]
filesystem_folder = /tmp/collections
[logging]
debug = True
'';
};
# WARNING: DON'T DO THIS IN PRODUCTION!
@ -49,13 +46,18 @@ in
services.radicale.extraArgs = [
"--export-storage" "/tmp/collections-new"
];
system.stateVersion = "17.03";
};
radicale2_verify = lib.recursiveUpdate radicale2 {
services.radicale.extraArgs = [ "--verify-storage" ];
services.radicale.extraArgs = [ "--debug" "--verify-storage" ];
system.stateVersion = "17.09";
};
radicale2 = lib.recursiveUpdate (common args) {
system.stateVersion = "17.09";
};
radicale3 = lib.recursiveUpdate (common args) {
system.stateVersion = "20.09";
};
};
# This tests whether the web interface is accessible to an authenticated user
@ -117,6 +119,22 @@ in
retcode == 0 and "VCALENDAR" in output
), "Could not read calendar from Radicale 2"
radicale.succeed("curl --fail http://${user}:${password}@localhost:${port}/.web/")
radicale.succeed("curl --fail http://${user}:${password}@localhost:${port}/.web/")
with subtest("Check Radicale 3 functionality"):
radicale.succeed(
"${switchToConfig "radicale3"} >&2"
)
radicale.wait_for_unit("radicale.service")
radicale.wait_for_open_port(${port})
(retcode, output) = radicale.execute(
"curl --fail http://${user}:${password}@localhost:${port}/someuser/calendar.ics/"
)
assert (
retcode == 0 and "VCALENDAR" in output
), "Could not read calendar from Radicale 3"
radicale.succeed("curl --fail http://${user}:${password}@localhost:${port}/.web/")
'';
})

View file

@ -3,7 +3,7 @@
stdenv.mkDerivation rec {
pname = "cava";
version = "0.6.1";
version = "0.7.1";
buildInputs = [
alsaLib
@ -16,7 +16,7 @@ stdenv.mkDerivation rec {
owner = "karlstav";
repo = "cava";
rev = version;
sha256 = "1kvhqgijs29909w3sq9m0bslx2zxxn4b3i07kdz4hb0dqkppxpjy";
sha256 = "0p2g3xxl2n425bghs1qnff30jaj9cba94j2gbhgxmwaxhz26vbk7";
};
nativeBuildInputs = [ autoreconfHook ];

View file

@ -0,0 +1,50 @@
{ stdenv
, fetchurl
, alsaLib
, audiofile
, autoconf
, automake
, gnome2
, gtk2
, libjack2
, libtool
, libxml2
, pkg-config
}:
stdenv.mkDerivation rec {
pname = "industrializer";
version = "0.2.6";
src = fetchurl {
url = "mirror://sourceforge/project/${pname}/ps${pname}-${version}.tar.bz2";
sha256 = "0vls94hqpkk8h17da6fddgqbl5dgm6250av3raimhhzwvm5r1gfi";
};
nativeBuildInputs = [ pkg-config ];
buildInputs = [
alsaLib
audiofile
autoconf
automake
gnome2.gtkglext
gtk2
libjack2
libtool
libxml2
];
preConfigure = "./autogen.sh";
meta = {
description = "This program generates synthesized percussion sounds using physical modelling";
longDescription = ''
The range of sounds possible include but is not limited to cymbal sounds, metallic noises, bubbly sounds, and chimes.
After a sound is rendered, it can be played and then saved to a .WAV file.
'';
homepage = "https://sourceforge.net/projects/industrializer/";
license = stdenv.lib.licenses.gpl2Plus;
maintainers = [ stdenv.lib.maintainers.magnetophon ];
platforms = stdenv.lib.platforms.linux;
};
}

View file

@ -1,34 +1,24 @@
{ stdenv, lib, go, buildGoPackage, dep, fetchgit, git, cacert }:
{ lib, buildGoModule, fetchFromGitHub }:
buildGoPackage rec {
buildGoModule rec {
pname = "dcrd";
version = "1.1.2";
rev = "refs/tags/v${version}";
goPackagePath = "github.com/decred/dcrd";
version = "1.5.1";
buildInputs = [ go git dep cacert ];
GIT_SSL_CAINFO = "${cacert}/etc/ssl/certs/ca-bundle.crt";
NIX_SSL_CERT_FILE = "${cacert}/etc/ssl/certs/ca-bundle.crt";
src = fetchgit {
inherit rev;
url = "https://${goPackagePath}";
sha256 = "0xcynipdn9zmmralxj0hjrwyanvhkwfj2b1vvjk5zfc95s2xc1q9";
src = fetchFromGitHub {
owner = "decred";
repo = "dcrd";
rev = "refs/tags/release-v${version}";
sha256 = "1ggw289y1f4dqvj3w60q9bahq8bblbfjymn5xy04ldylr3qlxm9x";
};
preBuild = ''
export CWD=$(pwd)
cd go/src/github.com/decred/dcrd
dep ensure
go install . ./cmd/...
cd $CWD
'';
vendorSha256 = "03aw6mcvp1vr01ppxy673jf5hdryd5032cxndlkaiwg005mxp1dy";
subPackages = [ "." "cmd/dcrctl" "cmd/promptsecret" ];
meta = {
homepage = "https://decred.org";
description = "Decred daemon in Go (golang)";
license = with lib.licenses; [ isc ];
broken = stdenv.isLinux; # 2018-04-10
maintainers = with lib.maintainers; [ juaningan ];
};
}

View file

@ -1,43 +1,24 @@
{ stdenv, lib, go, buildGoPackage, dep, fetchgit, git, cacert }:
{ lib, buildGoModule, fetchFromGitHub }:
buildGoPackage rec {
buildGoModule rec {
pname = "dcrwallet";
version = "1.1.2";
rev = "refs/tags/v${version}";
goPackagePath = "github.com/decred/dcrwallet";
version = "1.5.1";
buildInputs = [ go git dep cacert ];
GIT_SSL_CAINFO = "${cacert}/etc/ssl/certs/ca-bundle.crt";
NIX_SSL_CERT_FILE = "${cacert}/etc/ssl/certs/ca-bundle.crt";
src = fetchgit {
inherit rev;
url = "https://${goPackagePath}";
sha256 = "058im4vmcmxcl5ir14h17wik5lagp2ay0p8qc3r99qmpfwvvz39x";
src = fetchFromGitHub {
owner = "decred";
repo = "dcrwallet";
rev = "refs/tags/v${version}";
sha256 = "0ij2mwvdxg78p9qbdf9wm7aaphfg4j8lqgrjyjsj3kyi1l458ds9";
};
preBuild = ''
export CWD=$(pwd)
cd go/src/github.com/decred/dcrwallet
dep ensure
'';
buildPhase = ''
runHook preBuild
go build
'';
installPhase = ''
mkdir -pv $out/bin
cp -v dcrwallet $out/bin
'';
vendorSha256 = "0qrrr92cad399xwr64qa9h41wqqaj0dy5mw248g5v53zars541w7";
subPackages = [ "." ];
meta = {
homepage = "https://decred.org";
description = "Decred daemon in Go (golang)";
description = "A secure Decred wallet daemon written in Go (golang)";
license = with lib.licenses; [ isc ];
broken = stdenv.isLinux; # 2018-04-10
maintainers = with lib.maintainers; [ juaningan ];
};
}

View file

@ -8,13 +8,13 @@
stdenv.mkDerivation rec {
pname = "lite";
version = "1.05";
version = "1.06";
src = fetchFromGitHub {
owner = "rxi";
repo = pname;
rev = "v${version}";
sha256 = "0xh29ddl5h92n1c4nlh4kx9mchwkzn4r3l16kklix3a1jm1c878a";
sha256 = "1lw4a6xv8pdlgwnhh870caij4iyzxdyjw4qmm4fswja9mbqkj32f";
};
nativeBuildInputs = [ makeWrapper pkg-config ];

View file

@ -11,8 +11,8 @@ let
archive_fmt = if system == "x86_64-darwin" then "zip" else "tar.gz";
sha256 = {
x86_64-linux = "0hmmqdamsjhjy1q8m85bs081cwmskpsp57rkj7vc2wj918wgissm";
x86_64-darwin = "00xwvi53h9rnwyba12jmsp6grkymmn6vjibypaxb96q7q7p894gh";
x86_64-linux = "1c8gi2001p2f0zc165cqwwf9f8ls34fgg040qn9l08za7djb9hyv";
x86_64-darwin = "06n17s3qa2jkmg5qx3zvshz6rvdx33dhxn65j0x5mi62dv93gjgg";
}.${system};
in
callPackage ./generic.nix rec {
@ -21,7 +21,7 @@ in
# Please backport all compatible updates to the stable release.
# This is important for the extension ecosystem.
version = "1.46.0";
version = "1.46.1";
pname = "vscode";
executableName = "code" + lib.optionalString isInsiders "-insiders";

View file

@ -11,8 +11,8 @@ let
archive_fmt = if system == "x86_64-darwin" then "zip" else "tar.gz";
sha256 = {
x86_64-linux = "088nsflscak315704vqnh8m4q7601fczglbhdz5i70kfyg89ar4w";
x86_64-darwin = "0fxpx1ydsag4gyn2kq5ddq55lpw15w176p3fypk80fyfix4kziqf";
x86_64-linux = "0l9sg2d0657k9dacy7k2jn8z07z50v8d1czgy4bkzbrj9vz7c6a7";
x86_64-darwin = "0mnq0ykahwfrgs28rdy1jl366qyclipsr3apnmhym1xsylk0mpfx";
}.${system};
sourceRoot = {
@ -27,7 +27,7 @@ in
# Please backport all compatible updates to the stable release.
# This is important for the extension ecosystem.
version = "1.46.0";
version = "1.46.1";
pname = "vscodium";
executableName = "codium";

View file

@ -1,7 +1,7 @@
{ stdenv
{ lib
, mkDerivation
, fetchFromGitHub
, cmake
, wrapQtAppsHook
, dxflib
, eigen
, flann
@ -16,7 +16,7 @@
, xercesc
}:
stdenv.mkDerivation rec {
mkDerivation rec {
pname = "cloudcompare";
version = "2.11.0";
@ -36,7 +36,6 @@ stdenv.mkDerivation rec {
nativeBuildInputs = [
cmake
eigen # header-only
wrapQtAppsHook
];
buildInputs = [
@ -73,7 +72,7 @@ stdenv.mkDerivation rec {
"-DPLUGIN_IO_QRDB=OFF" # Riegl rdblib is proprietary; not packaged in nixpkgs
];
meta = with stdenv.lib; {
meta = with lib; {
description = "3D point cloud and mesh processing software";
homepage = "https://cloudcompare.org";
license = licenses.gpl2Plus;

View file

@ -0,0 +1,25 @@
{ stdenv, fetchFromGitHub, libnotify, pkg-config, glib }:
stdenv.mkDerivation rec {
pname = "batsignal";
version = "1.0.0";
src = fetchFromGitHub {
owner = "electrickite";
repo = "batsignal";
rev = "${version}";
sha256 = "wy7YhgKfz07u0bp7rWpze+KmSdooOkmU7giaBX3wWkY=";
};
buildInputs = [ libnotify glib ];
nativeBuildInputs = [ pkg-config ];
installFlags = [ "PREFIX=${placeholder "out"}" ];
meta = with stdenv.lib; {
homepage = "https://github.com/electrickite/batsignal";
description = "Lightweight battery daemon written in C";
license = licenses.isc;
maintainers = with maintainers; [ SlothOfAnarchy ];
platforms = platforms.linux;
};
}

View file

@ -3,13 +3,13 @@
buildGoModule rec {
pname = "cheat";
version = "3.10.1";
version = "4.0.0";
src = fetchFromGitHub {
owner = "cheat";
repo = "cheat";
rev = version;
sha256 = "0indvycj972wng979mvia8mrb7bwdfay7wxq3lnj05qyxqafj5h2";
sha256 = "0j9w2rm8imb15njj7334xl6w0fgjvfqnrfvdq4zfsrwzl67ds86l";
};
subPackages = [ "cmd/cheat" ];

View file

@ -0,0 +1,42 @@
{ lib
, mkDerivation
, fetchFromGitHub
, cmake
, pkg-config
, qtbase
, qttools
, ddcutil
}:
mkDerivation rec {
pname = "ddcui";
version = "0.1.1";
src = fetchFromGitHub {
owner = "rockowitz";
repo = "ddcui";
rev = "v${version}";
sha256 = "02qr7i3pdq8p6lnhwihfgd9b7y9zwpdk6gwv626rz32ai6mfrfhl";
};
nativeBuildInputs = [
# Using cmake instead of the also-supported qmake because ddcui's qmake
# file is not currently written to support PREFIX installations.
cmake
pkg-config
];
buildInputs = [
qtbase
qttools
ddcutil
];
meta = with lib; {
description = "Graphical user interface for ddcutil - control monitor settings";
homepage = "https://www.ddcutil.com/ddcui_main/";
license = licenses.gpl2;
maintainers = with maintainers; [ nh2 ];
platforms = with platforms; linux;
};
}

View file

@ -11,13 +11,13 @@
perlPackages.buildPerlPackage rec {
pname = "fbmenugen";
version = "2020-05-20";
version = "0.85";
src = fetchFromGitHub {
owner = "trizen";
repo = pname;
rev = "ed9a680546edbb5b05086971b6a9f42a37cb485f";
sha256 = "1fikdl08a0s8d6k1ls1pzmw2rcwkfbbczsjfx6lr12ngd2bz222h";
rev = version;
sha256 = "1pmms3wzkm8h41a8zrkpn6gq9m9yy5wr5rrzmb84lbacprqq6q7q";
};
patches = [

View file

@ -0,0 +1,33 @@
{ mkDerivation
, extra-cmake-modules
, fetchFromGitHub
, kdoctools
, kiconthemes
, kio
, kjobwidgets
, kxmlgui
, stdenv
}:
mkDerivation rec {
pname = "k4dirstat";
version = "3.2.1";
src = fetchFromGitHub {
owner = "jeromerobert";
repo = pname;
rev = version;
sha256 = "15xjb80jq6vhzvzx4l341f40d8a23w1334qh6cczqm9adfnzycp7";
};
nativeBuildInputs = [ extra-cmake-modules ];
buildInputs = [ kiconthemes kio kjobwidgets kxmlgui ];
meta = with stdenv.lib; {
homepage = "https://github.com/jeromerobert/k4dirstat";
description = "A small utility program that sums up disk usage for directory trees";
license = licenses.gpl2;
maintainers = [ maintainers.raboof ];
platforms = platforms.linux;
};
}

View file

@ -2,13 +2,13 @@
stdenv.mkDerivation rec {
pname = "kanboard";
version = "1.2.14";
version = "1.2.15";
src = fetchFromGitHub {
owner = "kanboard";
repo = "kanboard";
rev = "v${version}";
sha256 = "11bwajzidnyagdyip7i8rwni1f66acv0k4lybdm0mc4195anivjh";
sha256 = "0lib2qlc8a59i9dak0g1j5hymwbq9vhflp5srhcjislxypfvrizs";
};
dontBuild = true;

View file

@ -0,0 +1,32 @@
{ appimageTools, fetchurl, lib, gsettings-desktop-schemas, gtk3}:
let
pname = "obsidian";
version = "0.7.3";
in
appimageTools.wrapType2 rec {
name = "${pname}-${version}";
src = fetchurl {
url = "https://github.com/obsidianmd/obsidian-releases/releases/download/v${version}/Obsidian-${version}.AppImage";
sha256 = "1qiag5szagalik72j8s2dmp7075g48jxgcdy0wgd02kfv90ai0y6";
};
profile = ''
export LC_ALL=C.UTF-8
export XDG_DATA_DIRS=${gsettings-desktop-schemas}/share/gsettings-schemas/${gsettings-desktop-schemas.name}:${gtk3}/share/gsettings-schemas/${gtk3.name}:$XDG_DATA_DIRS
'';
# Strip version from binary name.
extraInstallCommands = "mv $out/bin/{${name},${pname}}";
meta = with lib; {
description = "Obsidian is a powerful knowledge base that works on top of a local folder of plain text Markdown files.";
homepage = "https://obsidian.md";
license = licenses.obsidian;
maintainers = with maintainers; [ conradmearns ];
platforms = [ "x86_64-linux" ];
};
}

View file

@ -1,18 +1,47 @@
{ stdenv, fetchFromGitHub, cmake, libosmium, protozero, boost, bzip2, zlib, expat }:
{ stdenv
, fetchFromGitHub
, cmake
, installShellFiles
, pandoc
, boost
, bzip2
, expat
, libosmium
, protozero
, zlib
}:
stdenv.mkDerivation rec {
pname = "osmium-tool";
version = "1.12.0";
version = "1.12.1";
src = fetchFromGitHub {
owner = "osmcode";
repo = "osmium-tool";
rev = "v${version}";
sha256 = "18afn5qzdjpip176kk5pr04mj0p7dv70dbz1n36qmqnq3gyms10q";
sha256 = "13142hj8gfgj6w51a62hjzfmzic90xgrnnlnb70hpdqjy86bxv7j";
};
nativeBuildInputs = [ cmake ];
buildInputs = [ libosmium protozero boost bzip2 zlib expat ];
nativeBuildInputs = [
cmake
installShellFiles
pandoc
];
buildInputs = [
boost
bzip2
expat
libosmium
protozero
zlib
];
doCheck = true;
postInstall = ''
installShellCompletion --zsh ../zsh_completion/_osmium
'';
meta = with stdenv.lib; {
description = "Multipurpose command line tool for working with OpenStreetMap data based on the Osmium library";

View file

@ -8,7 +8,7 @@ stdenv.mkDerivation {
url = "ftp://ftp.foolabs.com/pub/xpdf/xpdf-3.02.tar.gz";
sha256 = "000zq4ddbwyxiki4vdwpmxbnw5n9hsg9hvwra2p33hslyib7sfmk";
};
patches = [
(fetchurl {
url = "ftp://ftp.foolabs.com/pub/xpdf/xpdf-3.02pl1.patch";
@ -39,14 +39,15 @@ stdenv.mkDerivation {
cp -v goo/libGoo.a $out/lib/goo
cp -v fofi/libfofi.a $out/lib/fofi
cp -v xpdf/libxpdf.a $out/lib/xpdf
cp -v *.h $out/include
cp -v goo/*.h $out/include
cp -v fofi/*.h $out/include
cp -v xpdf/*.h $out/include
'';
meta = {
platforms = stdenv.lib.platforms.unix;
meta = with stdenv.lib; {
platforms = platforms.unix;
license = licenses.gpl2;
};
}

View file

@ -1,4 +1,4 @@
{ stdenv, fetchurl, ncurses, xlibsWrapper, bzip2, zlib, openssl
{ stdenv, fetchurl, fetchpatch, ncurses, xlibsWrapper, bzip2, zlib, openssl
, gpm
, # Incompatible licenses, LGPLv3 - GPLv2
enableGuile ? false, guile ? null
@ -10,11 +10,12 @@
assert enableGuile -> guile != null;
assert enablePython -> python != null;
stdenv.mkDerivation {
name = "elinks-0.12pre6";
stdenv.mkDerivation rec {
pname = "elinks";
version = "0.12pre6";
src = fetchurl {
url = "http://elinks.or.cz/download/elinks-0.12pre6.tar.bz2";
url = "http://elinks.or.cz/download/${pname}-${version}.tar.bz2";
sha256 = "1nnakbi01g7yd3zqwprchh5yp45br8086b0kbbpmnclabcvlcdiq";
};
@ -23,7 +24,15 @@ stdenv.mkDerivation {
./openssl-1.1.patch
];
buildInputs = [ ncurses xlibsWrapper bzip2 zlib openssl spidermonkey gpm ]
postPatch = (stdenv.lib.optional stdenv.isDarwin) ''
patch -p0 < ${fetchpatch {
url = "https://raw.githubusercontent.com/macports/macports-ports/72bed7749e76b9092ddd8d9fe2d8449c5afb1d71/www/elinks/files/patch-perl.diff";
sha256 = "14q9hk3kg2n2r5b062hvrladp7b4yzysvhq07903w9kpg4zdbyqh";
}}
'';
buildInputs = [ ncurses xlibsWrapper bzip2 zlib openssl spidermonkey ]
++ stdenv.lib.optional stdenv.isLinux gpm
++ stdenv.lib.optional enableGuile guile
++ stdenv.lib.optional enablePython python
++ stdenv.lib.optional enablePerl perl
@ -44,10 +53,10 @@ stdenv.mkDerivation {
++ stdenv.lib.optional enableSpidermonkey "--with-spidermonkey=${spidermonkey}"
;
meta = {
meta = with stdenv.lib; {
description = "Full-featured text-mode web browser";
homepage = "http://elinks.or.cz";
license = stdenv.lib.licenses.gpl2;
platforms = stdenv.lib.platforms.linux;
license = licenses.gpl2;
platforms = with platforms; linux ++ darwin;
};
}

View file

@ -25,6 +25,7 @@ mkDerivation rec {
"Graphical small-internet client, supports gemini, http, https, gopher, finger";
homepage = "https://random-projects.net/projects/kristall.gemini";
maintainers = with maintainers; [ ehmry ];
license = licenses.gpl3;
inherit (qtmultimedia.meta) platforms;
};
}

View file

@ -34,6 +34,7 @@ stdenv.mkDerivation rec {
homepage = "http://links.twibright.com/";
description = "A small browser with some graphics support";
maintainers = with maintainers; [ raskin ];
license = licenses.gpl2Plus;
platforms = platforms.unix;
};
}

View file

@ -1,28 +1,23 @@
{ lib, buildGoModule, fetchFromGitHub, makeWrapper, kubernetes-helm, ... }:
{ lib, buildGoModule, fetchFromGitHub, makeWrapper, kubernetes-helm }:
let version = "0.118.6"; in
buildGoModule {
buildGoModule rec {
pname = "helmfile";
inherit version;
version = "0.119.1";
src = fetchFromGitHub {
owner = "roboll";
repo = "helmfile";
rev = "v${version}";
sha256 = "0zbvz8kn52c1q4yn8n9z4rrf761h495fhjw72x9q1nh44hr7npwd";
sha256 = "1j9b0xw59w5ailwa7dqgbsdigviw8ng5r4jbsk9b80izcig805zz";
};
goPackagePath = "github.com/roboll/helmfile";
vendorSha256 = "0xj14f0yx7x9ziijd1yka1n6kbmmhbibsk3ppp8cn1pqrwgqk7pr";
vendorSha256 = "11bw10s5wifzw2cy1100hyjv4xv7an7b05lcw6sphwyy56gsp2fy";
nativeBuildInputs = [ makeWrapper ];
buildFlagsArray = ''
-ldflags=
-X main.Version=${version}
'';
subPackages = [ "." ];
buildFlagsArray = [ "-ldflags=-s -w -X github.com/roboll/helmfile/pkg/app/version.Version=${version}" ];
postInstall = ''
wrapProgram $out/bin/helmfile \
@ -36,4 +31,4 @@ buildGoModule {
maintainers = with lib.maintainers; [ pneumaticat yurrriq ];
platforms = lib.platforms.unix;
};
}
}

View file

@ -15,13 +15,13 @@ with lib;
stdenv.mkDerivation rec {
pname = "kubernetes";
version = "1.18.4";
version = "1.18.5";
src = fetchFromGitHub {
owner = "kubernetes";
repo = "kubernetes";
rev = "v${version}";
sha256 = "11hic59ffypyky7g57p712y70nwk4qp9bx5vm0j0077c1i1i3756";
sha256 = "0pl7dzq5giwkzfyg4pzm8y76fnd62gaa6d4hswvgn87czab37lr6";
};
nativeBuildInputs = [ removeReferencesTo makeWrapper which go rsync go-bindata ];

View file

@ -1,18 +1,18 @@
{ stdenv, rustPlatform, fetchFromGitHub, stfl, sqlite, curl, gettext, pkgconfig, libxml2, json_c, ncurses
{ stdenv, rustPlatform, fetchFromGitHub, stfl, sqlite, curl, gettext, pkg-config, libxml2, json_c, ncurses
, asciidoctor, libiconv, Security, makeWrapper }:
rustPlatform.buildRustPackage rec {
pname = "newsboat";
version = "2.19";
version = "2.20.1";
src = fetchFromGitHub {
owner = "newsboat";
repo = "newsboat";
rev = "r${version}";
sha256 = "0yyrq8a90l6pkrczm9qvdg75jhsdq0niwp79vrdpm8rsxqpdmfq7";
sha256 = "1i9dpkdlsm3ya0w2x4c8kplrp3qzd8slbkcqvzfpqggb67gvczvv";
};
cargoSha256 = "1q3jf3d80c0ik38qk8jgbhfz5jxv0cy3lzmkyh2l002azp9hvv59";
cargoSha256 = "1ykffx2lhn4w56qm1wypkg9wsqpvzzrz419qkl95w1384xf3f7ix";
postPatch = ''
substituteInPlace Makefile --replace "|| true" ""
@ -23,7 +23,7 @@ rustPlatform.buildRustPackage rec {
'';
nativeBuildInputs = [
pkgconfig
pkg-config
asciidoctor
gettext
] ++ stdenv.lib.optionals stdenv.isDarwin [ makeWrapper ncurses ];

View file

@ -24,13 +24,13 @@
mkDerivation rec {
pname = "nheko";
version = "0.7.1";
version = "0.7.2";
src = fetchFromGitHub {
owner = "Nheko-Reborn";
repo = "nheko";
rev = "v${version}";
sha256 = "12sxibbrn79sxkf9jrm7jrlj7l5vz15claxrrll7pkv9mv44wady";
sha256 = "1cbhgaf9klgxdirrxj571fqwspm0byl75c1xc40l727a6qswvp7s";
};
nativeBuildInputs = [
@ -38,20 +38,6 @@ mkDerivation rec {
cmake
pkgconfig
];
cmakeFlags = [
# Can be removed once either https://github.com/NixOS/nixpkgs/pull/85254 or
# https://github.com/NixOS/nixpkgs/pull/73940 are merged
"-DBoost_NO_BOOST_CMAKE=TRUE"
];
# commit missing from latest release and recommended by upstream:
# https://github.com/NixOS/nixpkgs/pull/85922#issuecomment-619263903
patches = [
(fetchpatch {
name = "room-ids-escape-patch";
url = "https://github.com/Nheko-Reborn/nheko/commit/d94ac86816f9f325cba11f71344a3ca99591130d.patch";
sha256 = "1p0kj4a60l3jf0rfakc88adld7ccg3vfjhzia5rf2i03h35cxw8c";
})
];
buildInputs = [
nlohmann_json

View file

@ -336,5 +336,6 @@ stdenv.mkDerivation rec {
pierron
];
platforms = platforms.linux;
license = licenses.mpl20;
};
}

View file

@ -41,11 +41,11 @@ in
stdenv.mkDerivation rec {
pname = "mullvad-vpn";
version = "2020.4";
version = "2020.5";
src = fetchurl {
url = "https://www.mullvad.net/media/app/MullvadVPN-${version}_amd64.deb";
sha256 = "17xi8g2k89vi4d0j7pr33bx9zjapa2qh4pymbrqvxwli3yhq6zwr";
sha256 = "131z6qlpjwxcn5a62f1f147f2z1xg185jmr0vbin8h0dwa1182vn";
};
nativeBuildInputs = [

View file

@ -17,13 +17,13 @@ let
in stdenv.mkDerivation rec {
pname = "freerdp";
version = "2.1.1";
version = "2.1.2";
src = fetchFromGitHub {
owner = "FreeRDP";
repo = "FreeRDP";
rev = version;
sha256 = "1avfsx837rjfk50qv3xjccx2309phn04rq6imadqvqpfnyx4ma66";
sha256 = "1yvi7zd0ic0rv7njd0wi9q1mfvz4d9qrx3i45dd6hcq465wg8dp7";
};
postPatch = ''

View file

@ -38,6 +38,10 @@ stdenv.mkDerivation {
})
];
postPatch = ''
substituteInPlace Makefile.in --replace js_of_ocaml.ppx js_of_ocaml-ppx
'';
configureFlags = [ "--enable-verbose-make" ];
installTargets = [ "install" "install-lib" ];

View file

@ -25,6 +25,7 @@ stdenv.mkDerivation rec {
description = "The ultimate desktop calculator";
homepage = "http://qalculate.github.io";
maintainers = with maintainers; [ gebner ];
license = licenses.gpl2Plus;
platforms = platforms.all;
};
}

View file

@ -86,5 +86,6 @@ stdenv.mkDerivation rec {
description = "A data analysis framework";
platforms = platforms.unix;
maintainers = with maintainers; [ veprbl ];
license = licenses.lgpl21;
};
}

View file

@ -1,11 +1,8 @@
{ stdenv, fetchFromGitHub, python2, git }:
{ stdenv, python3, python3Packages, fetchFromGitHub, git }:
let
name = "stgit-${version}";
python3Packages.buildPythonApplication rec {
pname = "stgit";
version = "0.22";
in
stdenv.mkDerivation {
inherit name;
src = fetchFromGitHub {
owner = "ctmarinas";
@ -14,7 +11,7 @@ stdenv.mkDerivation {
sha256 = "0xpvs5fa50rrvl2c8naha1nblk5ip2mgg63a9srqqxfx6z8qmrfz";
};
buildInputs = [ python2 git ];
nativeBuildInputs = [ git ];
makeFlags = [ "prefix=$$out" ];

View file

@ -184,9 +184,9 @@ rec {
# Get revisions from
# https://github.com/docker/docker-ce/tree/${version}/components/engine/hack/dockerfile/install/*
docker_18_09 = makeOverridable dockerGen {
docker_18_09 = makeOverridable dockerGen rec {
version = "18.09.9";
rev = "039a7df9ba8097dd987370782fcdd6ea79b26016";
rev = "v${version}";
sha256 = "0wqhjx9qs96q2jd091wffn3cyv2aslqn2cvpdpgljk8yr9s0yg7h";
runcRev = "3e425f80a8c931f88e6d94a8c831b9d5aa481657";
runcSha256 = "18psc830b2rkwml1x6vxngam5b5wi3pj14mw817rshpzy87prspj";
@ -196,10 +196,10 @@ rec {
tiniSha256 = "1h20i3wwlbd8x4jr2gz68hgklh0lb0jj7y5xk1wvr8y58fip1rdn";
};
docker_19_03 = makeOverridable dockerGen {
version = "19.03.11";
rev = "42e35e61f352e527082521280d5ea3761f0dee50";
sha256 = "0q4l4z2bg269hj1dqaq02f84zgp3p8idw1cxczw383cjl0ra8xgq";
docker_19_03 = makeOverridable dockerGen rec {
version = "19.03.12";
rev = "v${version}";
sha256 = "0i5xr8q3yjrz5zsjcq63v4g1mzqpingjr1hbf9amk14484i2wkw7";
runcRev = "dc9208a3303feef5b3839f4323d9beb36df0a9dd"; # v1.0.0-rc10
runcSha256 = "0pi3rvj585997m4z9ljkxz2z9yxf9p2jr0pmqbqrc7bc95f5hagk";
containerdRev = "7ad184331fa3e55e52b890ea95e65ba581ae3429"; # v1.2.13

View file

@ -44,11 +44,14 @@ buildGoModule rec {
buildPhase = ''
patchShebangs .
${if stdenv.isDarwin
then "make CGO_ENABLED=0 BUILDTAGS='remoteclient containers_image_openpgp exclude_graphdriver_devicemapper' varlink_generate all"
else "make podman docs"}
then "make podman-remote"
else "make podman"}
make docs
'';
installPhase = ''
installPhase = stdenv.lib.optionalString stdenv.isDarwin ''
mv bin/{podman-remote,podman}
'' + ''
install -Dm555 bin/podman $out/bin/podman
installShellCompletion --bash completions/bash/podman
installShellCompletion --zsh completions/zsh/_podman
@ -63,6 +66,5 @@ buildGoModule rec {
license = licenses.asl20;
maintainers = with maintainers; [ marsam ] ++ teams.podman.members;
platforms = platforms.unix;
broken = stdenv.isDarwin;
};
}

View file

@ -7,13 +7,13 @@
stdenv.mkDerivation rec {
pname = "notion";
version = "4.0.0";
version = "4.0.1";
src = fetchFromGitHub {
owner = "raboof";
repo = pname;
rev = version;
sha256 = "0rqfvwkj0j862hf6i4wsmb6185xibsskfj9kwy896qcpcg8w4kk7";
sha256 = "1s0fyacygvc9iz7b9v3b2cmzzqc02nh4g1p9bfcxbg254iscd502";
};
nativeBuildInputs = [ pkgconfig makeWrapper groff ];

View file

@ -11,6 +11,7 @@
jq,
jshon,
lib,
makeWrapper,
moreutils,
nix,
pigz,
@ -29,6 +30,7 @@
writeReferencesToFile,
writeScript,
writeText,
writePython3,
}:
# WARNING: this API is unstable and may be subject to backwards-incompatible changes in the future.
@ -204,24 +206,17 @@ rec {
mkdir image
tar -C image -xpf "$fromImage"
# If the image name isn't set, read it from the image repository json.
if [[ -z "$fromImageName" ]]; then
fromImageName=$(jshon -k < image/repositories | head -n 1)
echo "From-image name wasn't set. Read $fromImageName."
if [[ -n "$fromImageName" ]] && [[ -n "$fromImageTag" ]]; then
parentID="$(
cat "image/manifest.json" |
jq -r '.[] | select(.RepoTags | contains([$desiredTag])) | rtrimstr(".json")' \
--arg desiredTag "$fromImageName:$fromImageTag"
)"
else
echo "From-image name or tag wasn't set. Reading the first ID."
parentID="$(cat "image/manifest.json" | jq -r '.[0].Config | rtrimstr(".json")')"
fi
# If the tag isn't set, use the name as an index into the json
# and read the first key found.
if [[ -z "$fromImageTag" ]]; then
fromImageTag=$(jshon -e $fromImageName -k < image/repositories \
| head -n1)
echo "From-image tag wasn't set. Read $fromImageTag."
fi
# Use the name and tag to get the parent ID field.
parentID=$(jshon -e $fromImageName -e $fromImageTag -u \
< image/repositories)
cat ./image/manifest.json | jq -r '.[0].Layers | .[]' > layer-list
else
touch layer-list
@ -305,106 +300,6 @@ rec {
${text}
'';
# Create $maxLayers worth of Docker Layers, one layer per store path
# unless there are more paths than $maxLayers. In that case, create
# $maxLayers-1 for the most popular layers, and smush the remainaing
# store paths in to one final layer.
#
# NOTE: the `closures` parameter is a list of closures to include.
# The TOP LEVEL store paths themselves will never be present in the
# resulting image. At this time (2019-12-16) none of these layers
# are appropriate to include, as they are all created as
# implementation details of dockerTools.
mkManyPureLayers = {
name,
# Files to add to the layer.
closures,
configJson,
# Docker has a 125-layer maximum, we pick 100 to ensure there is
# plenty of room for extension.
# https://github.com/moby/moby/blob/b3e9f7b13b0f0c414fa6253e1f17a86b2cff68b5/layer/layer_store.go#L23-L26
maxLayers ? 100
}:
let
storePathToLayer = substituteAll
{ shell = runtimeShell;
isExecutable = true;
src = ./store-path-to-layer.sh;
};
overallClosure = writeText "closure" (lib.concatStringsSep " " closures);
in
runCommand "${name}-granular-docker-layers" {
inherit maxLayers;
paths = referencesByPopularity overallClosure;
nativeBuildInputs = [ jshon rsync tarsum moreutils ];
enableParallelBuilding = true;
}
''
mkdir layers
# Delete impurities for store path layers, so they don't get
# shared and taint other projects.
cat ${configJson} \
| jshon -d config \
| jshon -s "1970-01-01T00:00:01Z" -i created > generic.json
# WARNING!
# The following code is fiddly w.r.t. ensuring every layer is
# created, and that no paths are missed. If you change the
# following head and tail call lines, double-check that your
# code behaves properly when the number of layers equals:
# maxLayers-1, maxLayers, and maxLayers+1, 0
paths() {
cat $paths ${lib.concatMapStringsSep " " (path: "| (grep -v ${path} || true)") (closures ++ [ overallClosure ])}
}
paths | head -n $((maxLayers - 1)) | cat -n | xargs -r -P$NIX_BUILD_CORES -n2 ${storePathToLayer}
if [ $(paths | wc -l) -ge $maxLayers ]; then
paths | tail -n+$maxLayers | xargs ${storePathToLayer} $maxLayers
fi
echo "Finished building layer '$name'"
mv ./layers $out
'';
# Create a "Customisation" layer which adds symlinks at the root of
# the image to the root paths of the closure. Also add the config
# data like what command to run and the environment to run it in.
mkCustomisationLayer = {
name,
# Files to add to the layer.
contents,
baseJson,
extraCommands,
uid ? 0, gid ? 0,
}:
runCommand "${name}-customisation-layer" {
nativeBuildInputs = [ jshon rsync tarsum ];
inherit extraCommands;
}
''
cp -r ${contents}/ ./layer
if [[ -n $extraCommands ]]; then
chmod ug+w layer
(cd layer; eval "$extraCommands")
fi
# Tar up the layer and throw it into 'layer.tar', while calculating its checksum.
echo "Packing layer..."
mkdir $out
tarhash=$(tar --transform='s|^\./||' -C layer --sort=name --mtime="@$SOURCE_DATE_EPOCH" --owner=${toString uid} --group=${toString gid} -cf - . | tee $out/layer.tar | tarsum)
# Add a 'checksum' field to the JSON, with the value set to the
# checksum of the tarball.
cat ${baseJson} | jshon -s "$tarhash" -i checksum > $out/json
# Indicate to docker that we're using schema version 1.0.
echo -n "1.0" > $out/VERSION
'';
# Create a "layer" (set of files).
mkPureLayer = {
# Name of the layer
@ -438,7 +333,7 @@ rec {
chmod ug+w layer
if [[ -n $extraCommands ]]; then
if [[ -n "$extraCommands" ]]; then
(cd layer; eval "$extraCommands")
fi
@ -541,131 +436,14 @@ rec {
'';
};
buildLayeredImage = {
# Image Name
name,
# Image tag, the Nix's output hash will be used if null
tag ? null,
# Files to put on the image (a nix store path or list of paths).
contents ? [],
# Docker config; e.g. what command to run on the container.
config ? {},
# Time of creation of the image. Passing "now" will make the
# created date be the time of building.
created ? "1970-01-01T00:00:01Z",
# Optional bash script to run on the files prior to fixturizing the layer.
extraCommands ? "", uid ? 0, gid ? 0,
# We pick 100 to ensure there is plenty of room for extension. I
# believe the actual maximum is 128.
maxLayers ? 100
}:
assert
(lib.assertMsg (maxLayers > 1)
"the maxLayers argument of dockerTools.buildLayeredImage function must be greather than 1 (current value: ${toString maxLayers})");
buildLayeredImage = {name, ...}@args:
let
baseName = baseNameOf name;
contentsEnv = symlinkJoin {
name = "bulk-layers";
paths = if builtins.isList contents
then contents
else [ contents ];
};
configJson = let
pure = writeText "${baseName}-config.json" (builtins.toJSON {
inherit created config;
architecture = buildPackages.go.GOARCH;
os = "linux";
});
impure = runCommand "${baseName}-standard-dynamic-date.json"
{ nativeBuildInputs = [ jq ]; }
''
jq ".created = \"$(TZ=utc date --iso-8601="seconds")\"" ${pure} > $out
'';
in if created == "now" then impure else pure;
bulkLayers = mkManyPureLayers {
name = baseName;
closures = [ contentsEnv configJson ];
# One layer will be taken up by the customisationLayer, so
# take up one less.
maxLayers = maxLayers - 1;
inherit configJson;
};
customisationLayer = mkCustomisationLayer {
name = baseName;
contents = contentsEnv;
baseJson = configJson;
inherit uid gid extraCommands;
};
result = runCommand "docker-image-${baseName}.tar.gz" {
nativeBuildInputs = [ jshon pigz coreutils findutils jq ];
# Image name and tag must be lowercase
imageName = lib.toLower name;
baseJson = configJson;
passthru.imageTag =
if tag == null
then lib.head (lib.splitString "-" (lib.last (lib.splitString "/" result)))
else lib.toLower tag;
# Docker can't be made to run darwin binaries
meta.badPlatforms = lib.platforms.darwin;
} ''
${if (tag == null) then ''
outName="$(basename "$out")"
outHash=$(echo "$outName" | cut -d - -f 1)
imageTag=$outHash
'' else ''
imageTag="${tag}"
''}
find ${bulkLayers} -mindepth 1 -maxdepth 1 | sort -t/ -k5 -n > layer-list
echo ${customisationLayer} >> layer-list
mkdir image
imageJson=$(cat ${configJson} | jq ". + {\"rootfs\": {\"diff_ids\": [], \"type\": \"layers\"}}")
manifestJson=$(jq -n "[{\"RepoTags\":[\"$imageName:$imageTag\"]}]")
for layer in $(cat layer-list); do
layerChecksum=$(sha256sum $layer/layer.tar | cut -d ' ' -f1)
layerID=$(sha256sum "$layer/json" | cut -d ' ' -f 1)
ln -s "$layer" "./image/$layerID"
manifestJson=$(echo "$manifestJson" | jq ".[0].Layers |= . + [\"$layerID/layer.tar\"]")
imageJson=$(echo "$imageJson" | jq ".history |= . + [{\"created\": \"$(jq -r .created ${configJson})\"}]")
imageJson=$(echo "$imageJson" | jq ".rootfs.diff_ids |= . + [\"sha256:$layerChecksum\"]")
done
imageJsonChecksum=$(echo "$imageJson" | sha256sum | cut -d ' ' -f1)
echo "$imageJson" > "image/$imageJsonChecksum.json"
manifestJson=$(echo "$manifestJson" | jq ".[0].Config = \"$imageJsonChecksum.json\"")
echo "$manifestJson" > image/manifest.json
jshon -n object \
-n object -s "$layerID" -i "$imageTag" \
-i "$imageName" > image/repositories
echo "Cooking the image..."
# tar exits with an exit code of 1 if files changed while it was
# reading them. It considers a change in the number of hard links
# to be a "change", which can cause this to fail if images are being
# built concurrently and the auto-optimise-store nix option is turned on.
# Since the contents of these files will not change, we can reasonably
# ignore this exit code.
set +e
tar -C image --dereference --hard-dereference --sort=name \
--mtime="@$SOURCE_DATE_EPOCH" --owner=0 --group=0 \
--mode=a-w --xform s:'^./':: --use-compress-program='pigz -nT' \
--warning=no-file-changed -cf $out .
RET=$?
if [ $RET -ne 0 ] && [ $RET -ne 1 ]; then
exit $RET
fi
set -e
echo "Finished."
'';
stream = streamLayeredImage args;
in
result;
runCommand "${name}.tar.gz" {
inherit (stream) imageName;
buildInputs = [ pigz ];
} "${stream} | pigz -nT > $out";
# 1. extract the base image
# 2. create the layer
@ -774,20 +552,22 @@ rec {
configName="$(cat ./image/manifest.json | jq -r '.[0].Config')"
baseEnvs="$(cat "./image/$configName" | jq '.config.Env // []')"
# Extract the parentID from the manifest
if [[ -n "$fromImageName" ]] && [[ -n "$fromImageTag" ]]; then
parentID="$(
cat "image/manifest.json" |
jq -r '.[] | select(.RepoTags | contains([$desiredTag])) | rtrimstr(".json")' \
--arg desiredTag "$fromImageName:$fromImageTag"
)"
else
echo "From-image name or tag wasn't set. Reading the first ID."
parentID="$(cat "image/manifest.json" | jq -r '.[0].Config | rtrimstr(".json")')"
fi
# Otherwise do not import the base image configuration and manifest
chmod a+w image image/*.json
rm -f image/*.json
if [[ -z "$fromImageName" ]]; then
fromImageName=$(jshon -k < image/repositories|head -n1)
fi
if [[ -z "$fromImageTag" ]]; then
fromImageTag=$(jshon -e $fromImageName -k \
< image/repositories|head -n1)
fi
parentID=$(jshon -e $fromImageName -e $fromImageTag -u \
< image/repositories)
for l in image/*/layer.tar; do
ls_tar $l >> baseFiles
done
@ -904,4 +684,117 @@ rec {
})
);
streamLayeredImage = {
# Image Name
name,
# Image tag, the Nix's output hash will be used if null
tag ? null,
# Files to put on the image (a nix store path or list of paths).
contents ? [],
# Docker config; e.g. what command to run on the container.
config ? {},
# Time of creation of the image. Passing "now" will make the
# created date be the time of building.
created ? "1970-01-01T00:00:01Z",
# Optional bash script to run on the files prior to fixturizing the layer.
extraCommands ? "",
# We pick 100 to ensure there is plenty of room for extension. I
# believe the actual maximum is 128.
maxLayers ? 100
}:
assert
(lib.assertMsg (maxLayers > 1)
"the maxLayers argument of dockerTools.buildLayeredImage function must be greather than 1 (current value: ${toString maxLayers})");
let
streamScript = writePython3 "stream" {} ./stream_layered_image.py;
baseJson = writeText "${name}-base.json" (builtins.toJSON {
inherit config;
architecture = buildPackages.go.GOARCH;
os = "linux";
});
customisationLayer = runCommand "${name}-customisation-layer" { inherit extraCommands; } ''
cp -r ${contentsEnv}/ $out
if [[ -n $extraCommands ]]; then
chmod u+w $out
(cd $out; eval "$extraCommands")
fi
'';
contentsEnv = symlinkJoin {
name = "${name}-bulk-layers";
paths = if builtins.isList contents
then contents
else [ contents ];
};
# NOTE: the `closures` parameter is a list of closures to include.
# The TOP LEVEL store paths themselves will never be present in the
# resulting image. At this time (2020-06-18) none of these layers
# are appropriate to include, as they are all created as
# implementation details of dockerTools.
closures = [ baseJson contentsEnv ];
overallClosure = writeText "closure" (lib.concatStringsSep " " closures);
conf = runCommand "${name}-conf.json" {
inherit maxLayers created;
imageName = lib.toLower name;
paths = referencesByPopularity overallClosure;
buildInputs = [ jq ];
} ''
paths() {
cat $paths ${lib.concatMapStringsSep " " (path: "| (grep -v ${path} || true)") (closures ++ [ overallClosure ])}
}
${if (tag == null) then ''
outName="$(basename "$out")"
outHash=$(echo "$outName" | cut -d - -f 1)
imageTag=$outHash
'' else ''
imageTag="${tag}"
''}
# convert "created" to iso format
if [[ "$created" != "now" ]]; then
created="$(date -Iseconds -d "$created")"
fi
# Create $maxLayers worth of Docker Layers, one layer per store path
# unless there are more paths than $maxLayers. In that case, create
# $maxLayers-1 for the most popular layers, and smush the remainaing
# store paths in to one final layer.
#
# The following code is fiddly w.r.t. ensuring every layer is
# created, and that no paths are missed. If you change the
# following lines, double-check that your code behaves properly
# when the number of layers equals:
# maxLayers-1, maxLayers, and maxLayers+1, 0
store_layers="$(
paths |
jq -sR '
rtrimstr("\n") | split("\n")
| (.[:$maxLayers-1] | map([.])) + [ .[$maxLayers-1:] ]
| map(select(length > 0))
' \
--argjson maxLayers "$(( maxLayers - 1 ))" # one layer will be taken up by the customisation layer
)"
cat ${baseJson} | jq '
. + {
"store_layers": $store_layers,
"customisation_layer", $customisation_layer,
"repo_tag": $repo_tag,
"created": $created
}
' --argjson store_layers "$store_layers" \
--arg customisation_layer ${customisationLayer} \
--arg repo_tag "$imageName:$imageTag" \
--arg created "$created" |
tee $out
'';
result = runCommand "stream-${name}" {
inherit (conf) imageName;
buildInputs = [ makeWrapper ];
} ''
makeWrapper ${streamScript} $out --add-flags ${conf}
'';
in result;
}

View file

@ -1,54 +0,0 @@
#!@shell@
set -eu
layerNumber=$1
shift
layerPath="./layers/$layerNumber"
echo "Creating layer #$layerNumber for $@"
mkdir -p "$layerPath"
# Make sure /nix and /nix/store appear first in the archive.
#
# We create the directories here and use them because
# when there are other things being added to the
# nix store, tar could fail, saying,
# "tar: /nix/store: file changed as we read it"
#
# In addition, we use `__Nix__` instead of `nix` to avoid renaming
# relative symlink destinations like
# /nix/store/...-nix-2.3.4/bin/nix-daemon -> nix
mkdir -p __Nix__/store
# Then we change into the /nix/store in order to
# avoid a similar "file changed as we read it" error
# as above. Namely, if we use the absolute path of
# /nix/store/123-pkg and something new is added to the nix
# store while tar is running, it will detect a change to
# /nix/store and fail. Instead, if we cd into the nix store
# and copy the relative nix store path, tar will ignore
# changes to /nix/store. In order to create the correct
# structure in the tar file, we transform the relative nix
# store path to the absolute store path.
tarhash=$(
basename -a "$@" |
tar --create --preserve-permissions --absolute-names nix \
--directory /nix/store --verbatim-files-from --files-from - \
--hard-dereference --sort=name \
--mtime="@$SOURCE_DATE_EPOCH" \
--owner=0 --group=0 \
--transform 's,^__Nix__$,/nix,' \
--transform 's,^__Nix__/store$,/nix/store,' \
--transform 's,^[^/],/nix/store/\0,rS' |
tee "$layerPath/layer.tar" |
tarsum
)
# Add a 'checksum' field to the JSON, with the value set to the
# checksum of the tarball.
cat ./generic.json | jshon -s "$tarhash" -i checksum > $layerPath/json
# Indicate to docker that we're using schema version 1.0.
echo -n "1.0" > $layerPath/VERSION

View file

@ -0,0 +1,308 @@
"""
This script generates a Docker image from a set of store paths. Uses
Docker Image Specification v1.2 as reference [1].
It expects a JSON file with the following properties and writes the
image as an uncompressed tarball to stdout:
* "architecture", "config", "os", "created", "repo_tag" correspond to
the fields with the same name on the image spec [2].
* "created" can be "now".
* "created" is also used as mtime for files added to the image.
* "store_layers" is a list of layers in ascending order, where each
layer is the list of store paths to include in that layer.
The main challenge for this script to create the final image in a
streaming fashion, without dumping any intermediate data to disk
for performance.
A docker image has each layer contents archived as separate tarballs,
and they later all get enveloped into a single big tarball in a
content addressed fashion. However, because how "tar" format works,
we have to know about the name (which includes the checksum in our
case) and the size of the tarball before we can start adding it to the
outer tarball. We achieve that by creating the layer tarballs twice;
on the first iteration we calculate the file size and the checksum,
and on the second one we actually stream the contents. 'add_layer_dir'
function does all this.
[1]: https://github.com/moby/moby/blob/master/image/spec/v1.2.md
[2]: https://github.com/moby/moby/blob/4fb59c20a4fb54f944fe170d0ff1d00eb4a24d6f/image/spec/v1.2.md#image-json-field-descriptions
""" # noqa: E501
import io
import os
import re
import sys
import json
import hashlib
import pathlib
import tarfile
import threading
from datetime import datetime
from collections import namedtuple
def archive_paths_to(obj, paths, mtime, add_nix, filter=None):
"""
Writes the given store paths as a tar file to the given stream.
obj: Stream to write to. Should have a 'write' method.
paths: List of store paths.
add_nix: Whether /nix and /nix/store directories should be
prepended to the archive.
filter: An optional transformation to be applied to TarInfo
objects. Should take a single TarInfo object and return
another one. Defaults to identity.
"""
filter = filter if filter else lambda i: i
# gettarinfo makes the paths relative, this makes them
# absolute again
def append_root(ti):
ti.name = "/" + ti.name
return ti
def apply_filters(ti):
ti.mtime = mtime
ti.uid = 0
ti.gid = 0
ti.uname = "root"
ti.gname = "root"
return filter(ti)
def dir(path):
ti = tarfile.TarInfo(path)
ti.type = tarfile.DIRTYPE
return ti
with tarfile.open(fileobj=obj, mode="w|") as tar:
# To be consistent with the docker utilities, we need to have
# these directories first when building layer tarballs. But
# we don't need them on the customisation layer.
if add_nix:
tar.addfile(apply_filters(dir("/nix")))
tar.addfile(apply_filters(dir("/nix/store")))
for path in paths:
ti = tar.gettarinfo(os.path.join("/", path))
tar.addfile(apply_filters(append_root(ti)))
for filename in pathlib.Path(path).rglob("*"):
ti = append_root(tar.gettarinfo(filename))
# copy hardlinks as regular files
if ti.islnk():
ti.type = tarfile.REGTYPE
ti.linkname = ""
ti = apply_filters(ti)
if ti.isfile():
with open(filename, "rb") as f:
tar.addfile(ti, f)
else:
tar.addfile(ti)
class ExtractChecksum:
"""
A writable stream which only calculates the final file size and
sha256sum, while discarding the actual contents.
"""
def __init__(self):
self._digest = hashlib.sha256()
self._size = 0
def write(self, data):
self._digest.update(data)
self._size += len(data)
def extract(self):
"""
Returns: Hex-encoded sha256sum and size as a tuple.
"""
return (self._digest.hexdigest(), self._size)
# Some metadata for a layer
LayerInfo = namedtuple("LayerInfo", ["size", "checksum", "path", "paths"])
def add_layer_dir(tar, paths, mtime, add_nix=True, filter=None):
"""
Appends given store paths to a TarFile object as a new layer.
tar: 'tarfile.TarFile' object for the new layer to be added to.
paths: List of store paths.
mtime: 'mtime' of the added files and the layer tarball.
Should be an integer representing a POSIX time.
add_nix: Whether /nix and /nix/store directories should be
added to a layer.
filter: An optional transformation to be applied to TarInfo
objects inside the layer. Should take a single TarInfo
object and return another one. Defaults to identity.
Returns: A 'LayerInfo' object containing some metadata of
the layer added.
"""
invalid_paths = [i for i in paths if not i.startswith("/nix/store/")]
assert len(invalid_paths) == 0, \
"Expecting absolute store paths, but got: {invalid_paths}"
# First, calculate the tarball checksum and the size.
extract_checksum = ExtractChecksum()
archive_paths_to(
extract_checksum,
paths,
mtime=mtime,
add_nix=add_nix,
filter=filter
)
(checksum, size) = extract_checksum.extract()
path = f"{checksum}/layer.tar"
layer_tarinfo = tarfile.TarInfo(path)
layer_tarinfo.size = size
layer_tarinfo.mtime = mtime
# Then actually stream the contents to the outer tarball.
read_fd, write_fd = os.pipe()
with open(read_fd, "rb") as read, open(write_fd, "wb") as write:
def producer():
archive_paths_to(
write,
paths,
mtime=mtime,
add_nix=add_nix,
filter=filter
)
write.close()
# Closing the write end of the fifo also closes the read end,
# so we don't need to wait until this thread is finished.
#
# Any exception from the thread will get printed by the default
# exception handler, and the 'addfile' call will fail since it
# won't be able to read required amount of bytes.
threading.Thread(target=producer).start()
tar.addfile(layer_tarinfo, read)
return LayerInfo(size=size, checksum=checksum, path=path, paths=paths)
def add_customisation_layer(tar, path, mtime):
"""
Adds the contents of the store path as a new layer. This is different
than the 'add_layer_dir' function defaults in the sense that the contents
of a single store path will be added to the root of the layer. eg (without
the /nix/store prefix).
tar: 'tarfile.TarFile' object for the new layer to be added to.
path: A store path.
mtime: 'mtime' of the added files and the layer tarball. Should be an
integer representing a POSIX time.
"""
def filter(ti):
ti.name = re.sub("^/nix/store/[^/]*", "", ti.name)
return ti
return add_layer_dir(
tar,
[path],
mtime=mtime,
add_nix=False,
filter=filter
)
def add_bytes(tar, path, content, mtime):
"""
Adds a file to the tarball with given path and contents.
tar: 'tarfile.TarFile' object.
path: Path of the file as a string.
content: Contents of the file.
mtime: 'mtime' of the file. Should be an integer representing a POSIX time.
"""
assert type(content) is bytes
ti = tarfile.TarInfo(path)
ti.size = len(content)
ti.mtime = mtime
tar.addfile(ti, io.BytesIO(content))
def main():
with open(sys.argv[1], "r") as f:
conf = json.load(f)
created = (
datetime.now(tz=datetime.timezone.utc)
if conf["created"] == "now"
else datetime.fromisoformat(conf["created"])
)
mtime = int(created.timestamp())
with tarfile.open(mode="w|", fileobj=sys.stdout.buffer) as tar:
layers = []
for num, store_layer in enumerate(conf["store_layers"]):
print(
"Creating layer", num,
"from paths:", store_layer,
file=sys.stderr)
info = add_layer_dir(tar, store_layer, mtime=mtime)
layers.append(info)
print("Creating the customisation layer...", file=sys.stderr)
layers.append(
add_customisation_layer(
tar,
conf["customisation_layer"],
mtime=mtime
)
)
print("Adding manifests...", file=sys.stderr)
image_json = {
"created": datetime.isoformat(created),
"architecture": conf["architecture"],
"os": "linux",
"config": conf["config"],
"rootfs": {
"diff_ids": [f"sha256:{layer.checksum}" for layer in layers],
"type": "layers",
},
"history": [
{
"created": conf["created"],
"comment": f"store paths: {layer.paths}"
}
for layer in layers
],
}
image_json = json.dumps(image_json, indent=4).encode("utf-8")
image_json_checksum = hashlib.sha256(image_json).hexdigest()
image_json_path = f"{image_json_checksum}.json"
add_bytes(tar, image_json_path, image_json, mtime=mtime)
manifest_json = [
{
"Config": image_json_path,
"RepoTags": [conf["repo_tag"]],
"Layers": [layer.path for layer in layers],
}
]
manifest_json = json.dumps(manifest_json, indent=4).encode("utf-8")
add_bytes(tar, "manifest.json", manifest_json, mtime=mtime)
print("Done.", file=sys.stderr)
if __name__ == "__main__":
main()

View file

@ -2,11 +2,11 @@
stdenv.mkDerivation rec {
pname = "joypixels";
version = "5.5.0";
version = "6.0.0";
src = fetchurl {
url = "https://cdn.joypixels.com/arch-linux/font/${version}/joypixels-android.ttf";
sha256 = "0w3r50l0knrncwv6zihyx01gs995y76xjcwsysx5bmvc1b43yijb";
sha256 = "1vxqsqs93g4jyp01r47lrpcm0fmib2n1vysx32ksmfxmprimb75s";
};
dontUnpack = true;

View file

@ -1,7 +1,7 @@
{ lib, fetchFromGitHub }:
let
version = "4.7.95";
version = "5.3.45";
in fetchFromGitHub {
name = "material-design-icons-${version}";
owner = "Templarian";
@ -16,10 +16,10 @@ in fetchFromGitHub {
cp fonts/*.woff $out/share/fonts/woff/
cp fonts/*.woff2 $out/share/fonts/woff2/
'';
sha256 = "0da92kz8ryy60kb5xm52md13w28ih4sfap8g3v9b4ziyww66zjhz";
sha256 = "1nwha6dbj97ybiwlf69la57l3ibmwgnzs0nr104bfqnqxjs471sx";
meta = with lib; {
description = "3200+ Material Design Icons from the Community";
description = "4600+ Material Design Icons from the Community";
longDescription = ''
Material Design Icons' growing icon collection allows designers and
developers targeting various platforms to download icons in the format,

View file

@ -2,13 +2,13 @@
stdenv.mkDerivation rec {
pname = "qogir-icon-theme";
version = "2020-02-21";
version = "2020-06-22";
src = fetchFromGitHub {
owner = "vinceliuice";
repo = pname;
rev = version;
sha256 = "0m7f26dzzz5gkxi9dbbc96pl0xcvayr1ibxbjkrlsjcdvfg7p3rr";
sha256 = "0s5fhwfhn4qgk198jw736byxdrfm42l5m681pllbhg02j8ld4iik";
};
nativeBuildInputs = [ gtk3 ];

View file

@ -65,5 +65,6 @@ stdenv.mkDerivation {
description = "A bundle of X.509 certificates of public Certificate Authorities (CA)";
platforms = platforms.all;
maintainers = with maintainers; [ fpletz ];
license = licenses.mpl20;
};
}

View file

@ -0,0 +1,252 @@
{ stdenv, fetchurl, tzdata, iana-etc, runCommand
, perl, which, pkgconfig, patch, procps, pcre, cacert, Security, Foundation
, mailcap, runtimeShell
, buildPackages, pkgsTargetTarget
, fetchpatch
}:
let
inherit (stdenv.lib) optionals optionalString;
goBootstrap = runCommand "go-bootstrap" {} ''
mkdir $out
cp -rf ${buildPackages.go_bootstrap}/* $out/
chmod -R u+w $out
find $out -name "*.c" -delete
cp -rf $out/bin/* $out/share/go/bin/
'';
goarch = platform: {
"i686" = "386";
"x86_64" = "amd64";
"aarch64" = "arm64";
"arm" = "arm";
"armv5tel" = "arm";
"armv6l" = "arm";
"armv7l" = "arm";
}.${platform.parsed.cpu.name} or (throw "Unsupported system");
in
stdenv.mkDerivation rec {
pname = "go";
version = "1.15beta1";
src = fetchurl {
url = "https://dl.google.com/go/go${version}.src.tar.gz";
sha256 = "1h1sg6j9jac5bw2pjrd13bf4nr18prs89147izdhzbhp896sikbq";
};
# perl is used for testing go vet
nativeBuildInputs = [ perl which pkgconfig patch procps ];
buildInputs = [ cacert pcre ]
++ optionals stdenv.isLinux [ stdenv.cc.libc.out ]
++ optionals (stdenv.hostPlatform.libc == "glibc") [ stdenv.cc.libc.static ];
depsTargetTargetPropagated = optionals stdenv.isDarwin [ Security Foundation ];
hardeningDisable = [ "all" ];
prePatch = ''
patchShebangs ./ # replace /bin/bash
# This source produces shell script at run time,
# and thus it is not corrected by patchShebangs.
substituteInPlace misc/cgo/testcarchive/carchive_test.go \
--replace '#!/usr/bin/env bash' '#!${runtimeShell}'
# Patch the mimetype database location which is missing on NixOS.
# but also allow static binaries built with NixOS to run outside nix
sed -i 's,\"/etc/mime.types,"${mailcap}/etc/mime.types\"\,\n\t&,' src/mime/type_unix.go
# Disabling the 'os/http/net' tests (they want files not available in
# chroot builds)
rm src/net/{listen,parse}_test.go
rm src/syscall/exec_linux_test.go
# !!! substituteInPlace does not seems to be effective.
# The os test wants to read files in an existing path. Just don't let it be /usr/bin.
sed -i 's,/usr/bin,'"`pwd`", src/os/os_test.go
sed -i 's,/bin/pwd,'"`type -P pwd`", src/os/os_test.go
# Fails on aarch64
sed -i '/TestFallocate/aif true \{ return\; \}' src/cmd/link/internal/ld/fallocate_test.go
# Skip this test since ssl patches mess it up.
sed -i '/TestLoadSystemCertsLoadColonSeparatedDirs/aif true \{ return\; \}' src/crypto/x509/root_unix_test.go
# Disable another PIE test which breaks.
sed -i '/TestTrivialPIE/aif true \{ return\; \}' misc/cgo/testshared/shared_test.go
# Disable the BuildModePie test
sed -i '/TestBuildmodePIE/aif true \{ return\; \}' src/cmd/go/go_test.go
# Disable the unix socket test
sed -i '/TestShutdownUnix/aif true \{ return\; \}' src/net/net_test.go
# Disable the hostname test
sed -i '/TestHostname/aif true \{ return\; \}' src/os/os_test.go
# ParseInLocation fails the test
sed -i '/TestParseInSydney/aif true \{ return\; \}' src/time/format_test.go
# Remove the api check as it never worked
sed -i '/src\/cmd\/api\/run.go/ireturn nil' src/cmd/dist/test.go
# Remove the coverage test as we have removed this utility
sed -i '/TestCoverageWithCgo/aif true \{ return\; \}' src/cmd/go/go_test.go
# Remove the timezone naming test
sed -i '/TestLoadFixed/aif true \{ return\; \}' src/time/time_test.go
# Remove disable setgid test
sed -i '/TestRespectSetgidDir/aif true \{ return\; \}' src/cmd/go/internal/work/build_test.go
# Remove cert tests that conflict with NixOS's cert resolution
sed -i '/TestEnvVars/aif true \{ return\; \}' src/crypto/x509/root_unix_test.go
# TestWritevError hangs sometimes
sed -i '/TestWritevError/aif true \{ return\; \}' src/net/writev_test.go
# TestVariousDeadlines fails sometimes
sed -i '/TestVariousDeadlines/aif true \{ return\; \}' src/net/timeout_test.go
sed -i 's,/etc/protocols,${iana-etc}/etc/protocols,' src/net/lookup_unix.go
sed -i 's,/etc/services,${iana-etc}/etc/services,' src/net/port_unix.go
# Disable cgo lookup tests not works, they depend on resolver
rm src/net/cgo_unix_test.go
'' + optionalString stdenv.isLinux ''
# prepend the nix path to the zoneinfo files but also leave the original value for static binaries
# that run outside a nix server
sed -i 's,\"/usr/share/zoneinfo/,"${tzdata}/share/zoneinfo/\"\,\n\t&,' src/time/zoneinfo_unix.go
'' + optionalString stdenv.isAarch32 ''
echo '#!${runtimeShell}' > misc/cgo/testplugin/test.bash
'' + optionalString stdenv.isDarwin ''
substituteInPlace src/race.bash --replace \
"sysctl machdep.cpu.extfeatures | grep -qv EM64T" true
sed -i 's,strings.Contains(.*sysctl.*,true {,' src/cmd/dist/util.go
sed -i 's,"/etc","'"$TMPDIR"'",' src/os/os_test.go
sed -i 's,/_go_os_test,'"$TMPDIR"'/_go_os_test,' src/os/path_test.go
sed -i '/TestChdirAndGetwd/aif true \{ return\; \}' src/os/os_test.go
sed -i '/TestCredentialNoSetGroups/aif true \{ return\; \}' src/os/exec/exec_posix_test.go
sed -i '/TestRead0/aif true \{ return\; \}' src/os/os_test.go
sed -i '/TestSystemRoots/aif true \{ return\; \}' src/crypto/x509/root_darwin_test.go
sed -i '/TestGoInstallRebuildsStalePackagesInOtherGOPATH/aif true \{ return\; \}' src/cmd/go/go_test.go
sed -i '/TestBuildDashIInstallsDependencies/aif true \{ return\; \}' src/cmd/go/go_test.go
sed -i '/TestDisasmExtld/aif true \{ return\; \}' src/cmd/objdump/objdump_test.go
sed -i 's/unrecognized/unknown/' src/cmd/link/internal/ld/lib.go
# TestCurrent fails because Current is not implemented on Darwin
sed -i 's/TestCurrent/testCurrent/g' src/os/user/user_test.go
sed -i 's/TestLookup/testLookup/g' src/os/user/user_test.go
touch $TMPDIR/group $TMPDIR/hosts $TMPDIR/passwd
'';
patches = [
./remove-tools-1.11.patch
./ssl-cert-file-1.15.patch
./remove-test-pie-1.15.patch
./creds-test.patch
./go-1.9-skip-flaky-19608.patch
./go-1.9-skip-flaky-20072.patch
./skip-external-network-tests-1.15.patch
./skip-nohup-tests.patch
./skip-cgo-tests-1.15.patch
] ++ [
# breaks under load: https://github.com/golang/go/issues/25628
(if stdenv.isAarch32
then ./skip-test-extra-files-on-aarch32-1.14.patch
else ./skip-test-extra-files-on-386-1.14.patch)
];
postPatch = ''
find . -name '*.orig' -exec rm {} ';'
'';
GOOS = stdenv.targetPlatform.parsed.kernel.name;
GOARCH = goarch stdenv.targetPlatform;
# GOHOSTOS/GOHOSTARCH must match the building system, not the host system.
# Go will nevertheless build a for host system that we will copy over in
# the install phase.
GOHOSTOS = stdenv.buildPlatform.parsed.kernel.name;
GOHOSTARCH = goarch stdenv.buildPlatform;
# {CC,CXX}_FOR_TARGET must be only set for cross compilation case as go expect those
# to be different from CC/CXX
CC_FOR_TARGET = if (stdenv.buildPlatform != stdenv.targetPlatform) then
"${pkgsTargetTarget.stdenv.cc}/bin/${pkgsTargetTarget.stdenv.cc.targetPrefix}cc"
else
null;
CXX_FOR_TARGET = if (stdenv.buildPlatform != stdenv.targetPlatform) then
"${pkgsTargetTarget.stdenv.cc}/bin/${pkgsTargetTarget.stdenv.cc.targetPrefix}c++"
else
null;
GOARM = toString (stdenv.lib.intersectLists [(stdenv.hostPlatform.parsed.cpu.version or "")] ["5" "6" "7"]);
GO386 = 387; # from Arch: don't assume sse2 on i686
CGO_ENABLED = 1;
# Hopefully avoids test timeouts on Hydra
GO_TEST_TIMEOUT_SCALE = 3;
# Indicate that we are running on build infrastructure
# Some tests assume things like home directories and users exists
GO_BUILDER_NAME = "nix";
GOROOT_BOOTSTRAP="${goBootstrap}/share/go";
postConfigure = ''
export GOCACHE=$TMPDIR/go-cache
# this is compiled into the binary
export GOROOT_FINAL=$out/share/go
export PATH=$(pwd)/bin:$PATH
# Independent from host/target, CC should produce code for the building system.
export CC=${buildPackages.stdenv.cc}/bin/cc
ulimit -a
'';
postBuild = ''
(cd src && ./make.bash)
'';
doCheck = stdenv.hostPlatform == stdenv.targetPlatform && !stdenv.isDarwin;
checkPhase = ''
runHook preCheck
(cd src && HOME=$TMPDIR GOCACHE=$TMPDIR/go-cache ./run.bash --no-rebuild)
runHook postCheck
'';
preInstall = ''
rm -r pkg/obj
# Contains the wrong perl shebang when cross compiling,
# since it is not used for anything we can deleted as well.
rm src/regexp/syntax/make_perl_groups.pl
'' + (if (stdenv.buildPlatform != stdenv.hostPlatform) then ''
mv bin/*_*/* bin
rmdir bin/*_*
${optionalString (!(GOHOSTARCH == GOARCH && GOOS == GOHOSTOS)) ''
rm -rf pkg/${GOHOSTOS}_${GOHOSTARCH} pkg/tool/${GOHOSTOS}_${GOHOSTARCH}
''}
'' else if (stdenv.hostPlatform != stdenv.targetPlatform) then ''
rm -rf bin/*_*
${optionalString (!(GOHOSTARCH == GOARCH && GOOS == GOHOSTOS)) ''
rm -rf pkg/${GOOS}_${GOARCH} pkg/tool/${GOOS}_${GOARCH}
''}
'' else "");
installPhase = ''
runHook preInstall
mkdir -p $GOROOT_FINAL
cp -a bin pkg src lib misc api doc $GOROOT_FINAL
ln -s $GOROOT_FINAL/bin $out/bin
runHook postInstall
'';
disallowedReferences = [ goBootstrap ];
meta = with stdenv.lib; {
branch = "1.15";
homepage = "http://golang.org/";
description = "The Go Programming language";
license = licenses.bsd3;
maintainers = teams.golang.members;
platforms = platforms.linux ++ platforms.darwin;
};
}

View file

@ -0,0 +1,34 @@
diff --git a/src/cmd/dist/test.go b/src/cmd/dist/test.go
index e1cd4965c3..66bf980fc6 100644
--- a/src/cmd/dist/test.go
+++ b/src/cmd/dist/test.go
@@ -584,29 +584,6 @@ func (t *tester) registerTests() {
})
}
- // Test internal linking of PIE binaries where it is supported.
- if t.internalLinkPIE() {
- t.tests = append(t.tests, distTest{
- name: "pie_internal",
- heading: "internal linking of -buildmode=pie",
- fn: func(dt *distTest) error {
- t.addCmd(dt, "src", t.goTest(), "reflect", "-buildmode=pie", "-ldflags=-linkmode=internal", t.timeout(60))
- return nil
- },
- })
- // Also test a cgo package.
- if t.cgoEnabled && t.internalLink() {
- t.tests = append(t.tests, distTest{
- name: "pie_internal_cgo",
- heading: "internal linking of -buildmode=pie",
- fn: func(dt *distTest) error {
- t.addCmd(dt, "src", t.goTest(), "os/user", "-buildmode=pie", "-ldflags=-linkmode=internal", t.timeout(60))
- return nil
- },
- })
- }
- }
-
// sync tests
if goos != "js" { // js doesn't support -cpu=10
t.tests = append(t.tests, distTest{

View file

@ -0,0 +1,13 @@
diff --git a/src/cmd/dist/test.go b/src/cmd/dist/test.go
index e1cd4965c3..0980d044df 100644
--- a/src/cmd/dist/test.go
+++ b/src/cmd/dist/test.go
@@ -1136,7 +1136,7 @@ func (t *tester) cgoTest(dt *distTest) error {
t.addCmd(dt, "misc/cgo/test", t.goTest(), "-buildmode=pie", "-ldflags=-linkmode=internal")
}
t.addCmd(dt, "misc/cgo/testtls", t.goTest(), "-buildmode=pie")
- t.addCmd(dt, "misc/cgo/nocgo", t.goTest(), "-buildmode=pie")
+ //t.addCmd(dt, "misc/cgo/nocgo", t.goTest(), "-buildmode=pie")
}
}
}

View file

@ -0,0 +1,13 @@
diff --git a/src/net/dial_test.go b/src/net/dial_test.go
index 01582489de..5b5faa5424 100644
--- a/src/net/dial_test.go
+++ b/src/net/dial_test.go
@@ -990,6 +990,8 @@ func TestDialerControl(t *testing.T) {
// except that it won't skip testing on non-mobile builders.
func mustHaveExternalNetwork(t *testing.T) {
t.Helper()
+ t.Skipf("Nix sandbox does not have networking")
+
mobile := runtime.GOOS == "android" || runtime.GOOS == "darwin" && runtime.GOARCH == "arm64"
if testenv.Builder() == "" || mobile {
testenv.MustHaveExternalNetwork(t)

View file

@ -0,0 +1,76 @@
diff --git a/src/crypto/x509/root_darwin_amd64.go b/src/crypto/x509/root_darwin_amd64.go
index 8ad5a9607d..1d6091cf83 100644
--- a/src/crypto/x509/root_darwin_amd64.go
+++ b/src/crypto/x509/root_darwin_amd64.go
@@ -8,6 +8,7 @@ import (
"bytes"
"crypto/x509/internal/macOS"
"fmt"
+ "io/ioutil"
"os"
"strings"
)
@@ -23,6 +24,14 @@ func (c *Certificate) systemVerify(opts *VerifyOptions) (chains [][]*Certificate
var loadSystemRootsWithCgo func() (*CertPool, error)
func loadSystemRoots() (*CertPool, error) {
+ if file := os.Getenv("NIX_SSL_CERT_FILE"); file != "" {
+ data, err := ioutil.ReadFile(file)
+ if err == nil {
+ roots := NewCertPool()
+ roots.AppendCertsFromPEM(data)
+ return roots, nil
+ }
+ }
var trustedRoots []*Certificate
untrustedRoots := make(map[string]bool)
diff --git a/src/crypto/x509/root_darwin_arm64.go b/src/crypto/x509/root_darwin_arm64.go
index 2fb079ba66..6a072f3e78 100644
--- a/src/crypto/x509/root_darwin_arm64.go
+++ b/src/crypto/x509/root_darwin_arm64.go
@@ -6,6 +6,11 @@
package x509
+import (
+ "io/ioutil"
+ "os"
+)
+
func (c *Certificate) systemVerify(opts *VerifyOptions) (chains [][]*Certificate, err error) {
return nil, nil
}
@@ -14,6 +19,14 @@ func (c *Certificate) systemVerify(opts *VerifyOptions) (chains [][]*Certificate
var loadSystemRootsWithCgo func() (*CertPool, error)
func loadSystemRoots() (*CertPool, error) {
+ if file := os.Getenv("NIX_SSL_CERT_FILE"); file != "" {
+ data, err := ioutil.ReadFile(file)
+ if err == nil {
+ roots := NewCertPool()
+ roots.AppendCertsFromPEM(data)
+ return roots, nil
+ }
+ }
p := NewCertPool()
p.AppendCertsFromPEM([]byte(systemRootsPEM))
return p, nil
diff --git a/src/crypto/x509/root_unix.go b/src/crypto/x509/root_unix.go
index b48e618a65..195c1ff25a 100644
--- a/src/crypto/x509/root_unix.go
+++ b/src/crypto/x509/root_unix.go
@@ -42,6 +42,13 @@ func (c *Certificate) systemVerify(opts *VerifyOptions) (chains [][]*Certificate
func loadSystemRoots() (*CertPool, error) {
roots := NewCertPool()
+ if file := os.Getenv("NIX_SSL_CERT_FILE"); file != "" {
+ data, err := ioutil.ReadFile(file)
+ if err == nil {
+ roots.AppendCertsFromPEM(data)
+ return roots, nil
+ }
+ }
files := certFiles
if f := os.Getenv(certFileEnv); f != "" {

View file

@ -0,0 +1,59 @@
From acab088fd6af0b2ef2df1396aeb93bfc2e020fa5 Mon Sep 17 00:00:00 2001
From: "Yukihiro \"Matz\" Matsumoto" <matz@ruby.or.jp>
Date: Mon, 27 Apr 2020 18:52:43 +0900
Subject: [PATCH 1/2] Updating `parse.y for recent `bison` (retry).
---
mrbgems/mruby-compiler/core/parse.y | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/mrbgems/mruby-compiler/core/parse.y b/mrbgems/mruby-compiler/core/parse.y
index 6a1faf4e..2a4f740e 100644
--- a/mrbgems/mruby-compiler/core/parse.y
+++ b/mrbgems/mruby-compiler/core/parse.y
@@ -1323,7 +1323,7 @@ heredoc_end(parser_state *p)
%}
-%pure-parser
+%define api.pure
%parse-param {parser_state *p}
%lex-param {parser_state *p}
--
2.27.0
From 3cc682d943b29e84928a847a23f411ddbace74b7 Mon Sep 17 00:00:00 2001
From: "Yukihiro \"Matz\" Matsumoto" <matz@ruby.or.jp>
Date: Fri, 15 May 2020 12:30:13 +0900
Subject: [PATCH 2/2] Remove `YYERROR_VERBOSE` which no longer supported since
`bison 3.6`.
Instead we added `%define parse.error verbose`.
---
mrbgems/mruby-compiler/core/parse.y | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/mrbgems/mruby-compiler/core/parse.y b/mrbgems/mruby-compiler/core/parse.y
index 2a4f740e..eee6a5e5 100644
--- a/mrbgems/mruby-compiler/core/parse.y
+++ b/mrbgems/mruby-compiler/core/parse.y
@@ -9,7 +9,6 @@
#ifdef PARSER_DEBUG
# define YYDEBUG 1
#endif
-#define YYERROR_VERBOSE 1
#define YYSTACK_USE_ALLOCA 1
#include <ctype.h>
@@ -1323,6 +1322,7 @@ heredoc_end(parser_state *p)
%}
+%define parse.error verbose
%define api.pure
%parse-param {parser_state *p}
%lex-param {parser_state *p}
--
2.27.0

View file

@ -1,17 +1,19 @@
{ stdenv, ruby, bison, fetchFromGitHub }:
{ stdenv, ruby, bison, rake, fetchFromGitHub }:
stdenv.mkDerivation rec {
pname = "mruby";
version = "2.1.0";
version = "2.1.1";
src = fetchFromGitHub {
owner = "mruby";
repo = "mruby";
rev = version;
sha256 = "1y072c7dh9jf8xwy7kia6cb4dkpspq4zf24ssn7zm5f46p4waxni";
sha256 = "gEEb0Vn/G+dNgeY6r0VP8bMSPrEOf5s+0GoOcnIPtEU=";
};
nativeBuildInputs = [ ruby bison ];
nativeBuildInputs = [ ruby bison rake ];
patches = [ ./bison-36-compat.patch ];
# Necessary so it uses `gcc` instead of `ld` for linking.
# https://github.com/mruby/mruby/blob/35be8b252495d92ca811d76996f03c470ee33380/tasks/toolchains/gcc.rake#L25

View file

@ -3,9 +3,9 @@
let params =
if stdenv.lib.versionAtLeast coq.coq-version "8.7" then {
version = "3.4.1";
uid = "38104";
sha256 = "1zklv2w34k866fpwmw8q692mid5n6s75d2mmhhigrzpx5l3d4z6y";
version = "3.4.2";
uid = "38288";
sha256 = "00bgzbji0gkazwxhs4q8gz4ccqsa1y1r0m0ravr18ps2h8a8qva5";
} else {
version = "3.3.0";
uid = "37077";
@ -38,7 +38,7 @@ stdenv.mkDerivation {
};
passthru = {
compatibleCoqVersions = v: builtins.elem v [ "8.5" "8.6" "8.7" "8.8" "8.9" "8.10" ];
compatibleCoqVersions = v: builtins.elem v [ "8.5" "8.6" "8.7" "8.8" "8.9" "8.10" "8.11" ];
};

View file

@ -11,7 +11,7 @@
let deps = import ./deps.nix { inherit fetchurl; };
version = "2020-04-24";
version = "2020-06-19";
# Build the nuget source needed for the later build all by itself
# since it's a time-consuming step that only depends on ./deps.nix.
@ -49,8 +49,8 @@ stdenv.mkDerivation {
src = fetchFromGitHub {
owner = "microsoft";
repo = "python-language-server";
rev = "d480cd12649dcff78ed271c92c274fab60c00f2f";
sha256 = "0p2sw6w6fymdlxn8r5ndvija2l7rd77f5rddq9n71dxj1nicljh3";
rev = "838ba78e00173d639bd90f54d8610ec16b4ba3a2";
sha256 = "0nj8l1apcb67gqwy5i49v0f01fs4lvdfmmp4w2hvrpss9if62c1m";
};
buildInputs = [dotnet-sdk_3 openssl icu];

View file

@ -28,24 +28,6 @@ in [
sha256 = "11mpnl6aar2yn7l6b1k4m3rdnl82ydmqbsja4rn84dhz1qdzfp8x";
})
(fetchNuGet {
name = "LiteDB";
version = "4.1.4";
sha256 = "1g9258mv3jm9ps2s5wcxbmszh9nqiiw3d9nrfqis8x72jqiqi6js";
})
(fetchNuGet {
name = "MSTest.TestAdapter";
version = "2.1.0";
sha256 = "1g1v8yjnk4nr1c36k3cz116889bnpiw1i1jkmqnpb19wms7sq7cz";
})
(fetchNuGet {
name = "MSTest.TestFramework";
version = "2.1.0";
sha256 = "0mac4h7ylw953chclhz0lrn19yks3bab9dn9x9fpjqi7309gid0p";
})
(fetchNuGet {
name = "MessagePack";
version = "2.1.90";
@ -100,60 +82,24 @@ in [
sha256 = "00dx5armvkqjxvkldz3invdlck9nj7w21dlsr2aqp1rqbyrbsbbh";
})
(fetchNuGet {
name = "Microsoft.CSharp";
version = "4.0.1";
sha256 = "0zxc0apx1gcx361jlq8smc9pfdgmyjh6hpka8dypc9w23nlsh6yj";
})
(fetchNuGet {
name = "Microsoft.CodeCoverage";
version = "16.5.0";
sha256 = "0610wzn4qyywf9lb4538vwqhprxc4g0g7gjbmnjzvx97jr5nd5mf";
})
(fetchNuGet {
name = "Microsoft.CSharp";
version = "4.0.1";
sha256 = "0zxc0apx1gcx361jlq8smc9pfdgmyjh6hpka8dypc9w23nlsh6yj";
})
(fetchNuGet {
name = "Microsoft.Extensions.FileSystemGlobbing";
version = "3.1.2";
sha256 = "1zwvzp0607irs7irfbq8vnclg5nj2jpyggw9agm4a32la5ngg27m";
})
(fetchNuGet {
name = "Microsoft.NET.Test.Sdk";
version = "16.5.0";
sha256 = "19f5bvzci5mmfz81jwc4dax4qdf7w4k67n263383mn8mawf22bfq";
})
(fetchNuGet {
name = "Microsoft.NETCore.Platforms";
version = "1.1.0";
sha256 = "08vh1r12g6ykjygq5d3vq09zylgb84l63k49jc4v8faw9g93iqqm";
})
(fetchNuGet {
name = "Microsoft.NETCore.Platforms";
version = "1.1.1";
sha256 = "164wycgng4mi9zqi2pnsf1pq6gccbqvw6ib916mqizgjmd8f44pj";
})
(fetchNuGet {
name = "Microsoft.NETCore.Platforms";
version = "2.0.0";
sha256 = "1fk2fk2639i7nzy58m9dvpdnzql4vb8yl8vr19r2fp8lmj9w2jr0";
})
(fetchNuGet {
name = "Microsoft.NETCore.Platforms";
version = "3.1.0";
sha256 = "1gc1x8f95wk8yhgznkwsg80adk1lc65v9n5rx4yaa4bc5dva0z3j";
})
(fetchNuGet {
name = "Microsoft.NETCore.Targets";
version = "1.1.0";
sha256 = "193xwf33fbm0ni3idxzbr5fdq3i2dlfgihsac9jj7whj0gd902nh";
})
(fetchNuGet {
name = "Microsoft.NetCore.App.Host.linux-x64";
version = "3.0.2";
@ -208,6 +154,42 @@ in [
sha256 = "1ynhzsr8a0hfby2wjhzkdiimj18izgfzp7m2yp3pby2iwb4v3xy9";
})
(fetchNuGet {
name = "Microsoft.NETCore.Platforms";
version = "1.1.0";
sha256 = "08vh1r12g6ykjygq5d3vq09zylgb84l63k49jc4v8faw9g93iqqm";
})
(fetchNuGet {
name = "Microsoft.NETCore.Platforms";
version = "1.1.1";
sha256 = "164wycgng4mi9zqi2pnsf1pq6gccbqvw6ib916mqizgjmd8f44pj";
})
(fetchNuGet {
name = "Microsoft.NETCore.Platforms";
version = "2.0.0";
sha256 = "1fk2fk2639i7nzy58m9dvpdnzql4vb8yl8vr19r2fp8lmj9w2jr0";
})
(fetchNuGet {
name = "Microsoft.NETCore.Platforms";
version = "3.1.0";
sha256 = "1gc1x8f95wk8yhgznkwsg80adk1lc65v9n5rx4yaa4bc5dva0z3j";
})
(fetchNuGet {
name = "Microsoft.NETCore.Targets";
version = "1.1.0";
sha256 = "193xwf33fbm0ni3idxzbr5fdq3i2dlfgihsac9jj7whj0gd902nh";
})
(fetchNuGet {
name = "Microsoft.NET.Test.Sdk";
version = "16.5.0";
sha256 = "19f5bvzci5mmfz81jwc4dax4qdf7w4k67n263383mn8mawf22bfq";
})
(fetchNuGet {
name = "Microsoft.TestPlatform.ObjectModel";
version = "16.5.0";
@ -269,21 +251,15 @@ in [
})
(fetchNuGet {
name = "NETStandard.Library";
version = "1.6.1";
sha256 = "1z70wvsx2d847a2cjfii7b83pjfs34q05gb037fdjikv5kbagml8";
name = "MSTest.TestAdapter";
version = "2.1.0";
sha256 = "1g1v8yjnk4nr1c36k3cz116889bnpiw1i1jkmqnpb19wms7sq7cz";
})
(fetchNuGet {
name = "NETStandard.Library";
version = "2.0.3";
sha256 = "1fn9fxppfcg4jgypp2pmrpr6awl3qz1xmnri0cygpkwvyx27df1y";
})
(fetchNuGet {
name = "NSubstitute";
version = "4.2.1";
sha256 = "0wgfjh032qds994fmgxvsg88nhgjrx7p9rnv6z678jm62qi14asy";
name = "MSTest.TestFramework";
version = "2.1.0";
sha256 = "0mac4h7ylw953chclhz0lrn19yks3bab9dn9x9fpjqi7309gid0p";
})
(fetchNuGet {
@ -293,9 +269,15 @@ in [
})
(fetchNuGet {
name = "NewtonSoft.Json";
version = "12.0.3";
sha256 = "17dzl305d835mzign8r15vkmav2hq8l6g7942dfjpnzr17wwl89x";
name = "NETStandard.Library";
version = "1.6.1";
sha256 = "1z70wvsx2d847a2cjfii7b83pjfs34q05gb037fdjikv5kbagml8";
})
(fetchNuGet {
name = "NETStandard.Library";
version = "2.0.3";
sha256 = "1fn9fxppfcg4jgypp2pmrpr6awl3qz1xmnri0cygpkwvyx27df1y";
})
(fetchNuGet {
@ -310,18 +292,342 @@ in [
sha256 = "17dzl305d835mzign8r15vkmav2hq8l6g7942dfjpnzr17wwl89x";
})
(fetchNuGet {
name = "NewtonSoft.Json";
version = "12.0.3";
sha256 = "17dzl305d835mzign8r15vkmav2hq8l6g7942dfjpnzr17wwl89x";
})
(fetchNuGet {
name = "Newtonsoft.Json";
version = "9.0.1";
sha256 = "0mcy0i7pnfpqm4pcaiyzzji4g0c8i3a5gjz28rrr28110np8304r";
})
(fetchNuGet {
name = "NSubstitute";
version = "4.2.1";
sha256 = "0wgfjh032qds994fmgxvsg88nhgjrx7p9rnv6z678jm62qi14asy";
})
(fetchNuGet {
name = "NuGet.Frameworks";
version = "5.0.0";
sha256 = "18ijvmj13cwjdrrm52c8fpq021531zaz4mj4b4zapxaqzzxf2qjr";
})
(fetchNuGet {
name = "runtime.any.System.Collections";
version = "4.3.0";
sha256 = "0bv5qgm6vr47ynxqbnkc7i797fdi8gbjjxii173syrx14nmrkwg0";
})
(fetchNuGet {
name = "runtime.any.System.Diagnostics.Tools";
version = "4.3.0";
sha256 = "1wl76vk12zhdh66vmagni66h5xbhgqq7zkdpgw21jhxhvlbcl8pk";
})
(fetchNuGet {
name = "runtime.any.System.Diagnostics.Tracing";
version = "4.3.0";
sha256 = "00j6nv2xgmd3bi347k00m7wr542wjlig53rmj28pmw7ddcn97jbn";
})
(fetchNuGet {
name = "runtime.any.System.Globalization";
version = "4.3.0";
sha256 = "1daqf33hssad94lamzg01y49xwndy2q97i2lrb7mgn28656qia1x";
})
(fetchNuGet {
name = "runtime.any.System.Globalization.Calendars";
version = "4.3.0";
sha256 = "1ghhhk5psqxcg6w88sxkqrc35bxcz27zbqm2y5p5298pv3v7g201";
})
(fetchNuGet {
name = "runtime.any.System.IO";
version = "4.3.0";
sha256 = "0l8xz8zn46w4d10bcn3l4yyn4vhb3lrj2zw8llvz7jk14k4zps5x";
})
(fetchNuGet {
name = "runtime.any.System.Reflection";
version = "4.3.0";
sha256 = "02c9h3y35pylc0zfq3wcsvc5nqci95nrkq0mszifc0sjx7xrzkly";
})
(fetchNuGet {
name = "runtime.any.System.Reflection.Extensions";
version = "4.3.0";
sha256 = "0zyri97dfc5vyaz9ba65hjj1zbcrzaffhsdlpxc9bh09wy22fq33";
})
(fetchNuGet {
name = "runtime.any.System.Reflection.Primitives";
version = "4.3.0";
sha256 = "0x1mm8c6iy8rlxm8w9vqw7gb7s1ljadrn049fmf70cyh42vdfhrf";
})
(fetchNuGet {
name = "runtime.any.System.Resources.ResourceManager";
version = "4.3.0";
sha256 = "03kickal0iiby82wa5flar18kyv82s9s6d4xhk5h4bi5kfcyfjzl";
})
(fetchNuGet {
name = "runtime.any.System.Runtime";
version = "4.3.0";
sha256 = "1cqh1sv3h5j7ixyb7axxbdkqx6cxy00p4np4j91kpm492rf4s25b";
})
(fetchNuGet {
name = "runtime.any.System.Runtime.Handles";
version = "4.3.0";
sha256 = "0bh5bi25nk9w9xi8z23ws45q5yia6k7dg3i4axhfqlnj145l011x";
})
(fetchNuGet {
name = "runtime.any.System.Runtime.InteropServices";
version = "4.3.0";
sha256 = "0c3g3g3jmhlhw4klrc86ka9fjbl7i59ds1fadsb2l8nqf8z3kb19";
})
(fetchNuGet {
name = "runtime.any.System.Text.Encoding";
version = "4.3.0";
sha256 = "0aqqi1v4wx51h51mk956y783wzags13wa7mgqyclacmsmpv02ps3";
})
(fetchNuGet {
name = "runtime.any.System.Text.Encoding.Extensions";
version = "4.3.0";
sha256 = "0lqhgqi0i8194ryqq6v2gqx0fb86db2gqknbm0aq31wb378j7ip8";
})
(fetchNuGet {
name = "runtime.any.System.Threading.Tasks";
version = "4.3.0";
sha256 = "03mnvkhskbzxddz4hm113zsch1jyzh2cs450dk3rgfjp8crlw1va";
})
(fetchNuGet {
name = "runtime.any.System.Threading.Timer";
version = "4.3.0";
sha256 = "0aw4phrhwqz9m61r79vyfl5la64bjxj8l34qnrcwb28v49fg2086";
})
(fetchNuGet {
name = "runtime.debian.8-x64.runtime.native.System.Security.Cryptography.OpenSsl";
version = "4.3.0";
sha256 = "16rnxzpk5dpbbl1x354yrlsbvwylrq456xzpsha1n9y3glnhyx9d";
})
(fetchNuGet {
name = "runtime.debian.8-x64.runtime.native.System.Security.Cryptography.OpenSsl";
version = "4.3.2";
sha256 = "0rwpqngkqiapqc5c2cpkj7idhngrgss5qpnqg0yh40mbyflcxf8i";
})
(fetchNuGet {
name = "runtime.fedora.23-x64.runtime.native.System.Security.Cryptography.OpenSsl";
version = "4.3.0";
sha256 = "0hkg03sgm2wyq8nqk6dbm9jh5vcq57ry42lkqdmfklrw89lsmr59";
})
(fetchNuGet {
name = "runtime.fedora.23-x64.runtime.native.System.Security.Cryptography.OpenSsl";
version = "4.3.2";
sha256 = "1n06gxwlinhs0w7s8a94r1q3lwqzvynxwd3mp10ws9bg6gck8n4r";
})
(fetchNuGet {
name = "runtime.fedora.24-x64.runtime.native.System.Security.Cryptography.OpenSsl";
version = "4.3.0";
sha256 = "0c2p354hjx58xhhz7wv6div8xpi90sc6ibdm40qin21bvi7ymcaa";
})
(fetchNuGet {
name = "runtime.fedora.24-x64.runtime.native.System.Security.Cryptography.OpenSsl";
version = "4.3.2";
sha256 = "0404wqrc7f2yc0wxv71y3nnybvqx8v4j9d47hlscxy759a525mc3";
})
(fetchNuGet {
name = "runtime.native.System";
version = "4.3.0";
sha256 = "15hgf6zaq9b8br2wi1i3x0zvmk410nlmsmva9p0bbg73v6hml5k4";
})
(fetchNuGet {
name = "runtime.native.System.IO.Compression";
version = "4.3.0";
sha256 = "1vvivbqsk6y4hzcid27pqpm5bsi6sc50hvqwbcx8aap5ifrxfs8d";
})
(fetchNuGet {
name = "runtime.native.System.Net.Http";
version = "4.3.0";
sha256 = "1n6rgz5132lcibbch1qlf0g9jk60r0kqv087hxc0lisy50zpm7kk";
})
(fetchNuGet {
name = "runtime.native.System.Security.Cryptography.Apple";
version = "4.3.0";
sha256 = "1b61p6gw1m02cc1ry996fl49liiwky6181dzr873g9ds92zl326q";
})
(fetchNuGet {
name = "runtime.native.System.Security.Cryptography.OpenSsl";
version = "4.3.0";
sha256 = "18pzfdlwsg2nb1jjjjzyb5qlgy6xjxzmhnfaijq5s2jw3cm3ab97";
})
(fetchNuGet {
name = "runtime.native.System.Security.Cryptography.OpenSsl";
version = "4.3.2";
sha256 = "0zy5r25jppz48i2bkg8b9lfig24xixg6nm3xyr1379zdnqnpm8f6";
})
(fetchNuGet {
name = "runtime.opensuse.13.2-x64.runtime.native.System.Security.Cryptography.OpenSsl";
version = "4.3.0";
sha256 = "0qyynf9nz5i7pc26cwhgi8j62ps27sqmf78ijcfgzab50z9g8ay3";
})
(fetchNuGet {
name = "runtime.opensuse.13.2-x64.runtime.native.System.Security.Cryptography.OpenSsl";
version = "4.3.2";
sha256 = "096ch4n4s8k82xga80lfmpimpzahd2ip1mgwdqgar0ywbbl6x438";
})
(fetchNuGet {
name = "runtime.opensuse.42.1-x64.runtime.native.System.Security.Cryptography.OpenSsl";
version = "4.3.0";
sha256 = "1klrs545awhayryma6l7g2pvnp9xy4z0r1i40r80zb45q3i9nbyf";
})
(fetchNuGet {
name = "runtime.opensuse.42.1-x64.runtime.native.System.Security.Cryptography.OpenSsl";
version = "4.3.2";
sha256 = "1dm8fifl7rf1gy7lnwln78ch4rw54g0pl5g1c189vawavll7p6rj";
})
(fetchNuGet {
name = "runtime.osx.10.10-x64.runtime.native.System.Security.Cryptography.Apple";
version = "4.3.0";
sha256 = "10yc8jdrwgcl44b4g93f1ds76b176bajd3zqi2faf5rvh1vy9smi";
})
(fetchNuGet {
name = "runtime.osx.10.10-x64.runtime.native.System.Security.Cryptography.OpenSsl";
version = "4.3.0";
sha256 = "0zcxjv5pckplvkg0r6mw3asggm7aqzbdjimhvsasb0cgm59x09l3";
})
(fetchNuGet {
name = "runtime.osx.10.10-x64.runtime.native.System.Security.Cryptography.OpenSsl";
version = "4.3.2";
sha256 = "1m9z1k9kzva9n9kwinqxl97x2vgl79qhqjlv17k9s2ymcyv2bwr6";
})
(fetchNuGet {
name = "runtime.rhel.7-x64.runtime.native.System.Security.Cryptography.OpenSsl";
version = "4.3.0";
sha256 = "0vhynn79ih7hw7cwjazn87rm9z9fj0rvxgzlab36jybgcpcgphsn";
})
(fetchNuGet {
name = "runtime.rhel.7-x64.runtime.native.System.Security.Cryptography.OpenSsl";
version = "4.3.2";
sha256 = "1cpx56mcfxz7cpn57wvj18sjisvzq8b5vd9rw16ihd2i6mcp3wa1";
})
(fetchNuGet {
name = "runtime.ubuntu.14.04-x64.runtime.native.System.Security.Cryptography.OpenSsl";
version = "4.3.0";
sha256 = "160p68l2c7cqmyqjwxydcvgw7lvl1cr0znkw8fp24d1by9mqc8p3";
})
(fetchNuGet {
name = "runtime.ubuntu.14.04-x64.runtime.native.System.Security.Cryptography.OpenSsl";
version = "4.3.2";
sha256 = "15gsm1a8jdmgmf8j5v1slfz8ks124nfdhk2vxs2rw3asrxalg8hi";
})
(fetchNuGet {
name = "runtime.ubuntu.16.04-x64.runtime.native.System.Security.Cryptography.OpenSsl";
version = "4.3.0";
sha256 = "15zrc8fgd8zx28hdghcj5f5i34wf3l6bq5177075m2bc2j34jrqy";
})
(fetchNuGet {
name = "runtime.ubuntu.16.04-x64.runtime.native.System.Security.Cryptography.OpenSsl";
version = "4.3.2";
sha256 = "0q0n5q1r1wnqmr5i5idsrd9ywl33k0js4pngkwq9p368mbxp8x1w";
})
(fetchNuGet {
name = "runtime.ubuntu.16.10-x64.runtime.native.System.Security.Cryptography.OpenSsl";
version = "4.3.0";
sha256 = "1p4dgxax6p7rlgj4q73k73rslcnz4wdcv8q2flg1s8ygwcm58ld5";
})
(fetchNuGet {
name = "runtime.ubuntu.16.10-x64.runtime.native.System.Security.Cryptography.OpenSsl";
version = "4.3.2";
sha256 = "1x0g58pbpjrmj2x2qw17rdwwnrcl0wvim2hdwz48lixvwvp22n9c";
})
(fetchNuGet {
name = "runtime.unix.Microsoft.Win32.Primitives";
version = "4.3.0";
sha256 = "0y61k9zbxhdi0glg154v30kkq7f8646nif8lnnxbvkjpakggd5id";
})
(fetchNuGet {
name = "runtime.unix.System.Console";
version = "4.3.0";
sha256 = "1pfpkvc6x2if8zbdzg9rnc5fx51yllprl8zkm5npni2k50lisy80";
})
(fetchNuGet {
name = "runtime.unix.System.Diagnostics.Debug";
version = "4.3.0";
sha256 = "1lps7fbnw34bnh3lm31gs5c0g0dh7548wfmb8zz62v0zqz71msj5";
})
(fetchNuGet {
name = "runtime.unix.System.IO.FileSystem";
version = "4.3.0";
sha256 = "14nbkhvs7sji5r1saj2x8daz82rnf9kx28d3v2qss34qbr32dzix";
})
(fetchNuGet {
name = "runtime.unix.System.Net.Primitives";
version = "4.3.0";
sha256 = "0bdnglg59pzx9394sy4ic66kmxhqp8q8bvmykdxcbs5mm0ipwwm4";
})
(fetchNuGet {
name = "runtime.unix.System.Net.Sockets";
version = "4.3.0";
sha256 = "03npdxzy8gfv035bv1b9rz7c7hv0rxl5904wjz51if491mw0xy12";
})
(fetchNuGet {
name = "runtime.unix.System.Private.Uri";
version = "4.3.0";
sha256 = "1jx02q6kiwlvfksq1q9qr17fj78y5v6mwsszav4qcz9z25d5g6vk";
})
(fetchNuGet {
name = "runtime.unix.System.Runtime.Extensions";
version = "4.3.0";
sha256 = "0pnxxmm8whx38dp6yvwgmh22smknxmqs5n513fc7m4wxvs1bvi4p";
})
(fetchNuGet {
name = "StreamJsonRpc";
version = "2.3.103";
@ -1006,316 +1312,4 @@ in [
sha256 = "0bmz1l06dihx52jxjr22dyv5mxv6pj4852lx68grjm7bivhrbfwi";
})
(fetchNuGet {
name = "runtime.any.System.Collections";
version = "4.3.0";
sha256 = "0bv5qgm6vr47ynxqbnkc7i797fdi8gbjjxii173syrx14nmrkwg0";
})
(fetchNuGet {
name = "runtime.any.System.Diagnostics.Tools";
version = "4.3.0";
sha256 = "1wl76vk12zhdh66vmagni66h5xbhgqq7zkdpgw21jhxhvlbcl8pk";
})
(fetchNuGet {
name = "runtime.any.System.Diagnostics.Tracing";
version = "4.3.0";
sha256 = "00j6nv2xgmd3bi347k00m7wr542wjlig53rmj28pmw7ddcn97jbn";
})
(fetchNuGet {
name = "runtime.any.System.Globalization";
version = "4.3.0";
sha256 = "1daqf33hssad94lamzg01y49xwndy2q97i2lrb7mgn28656qia1x";
})
(fetchNuGet {
name = "runtime.any.System.Globalization.Calendars";
version = "4.3.0";
sha256 = "1ghhhk5psqxcg6w88sxkqrc35bxcz27zbqm2y5p5298pv3v7g201";
})
(fetchNuGet {
name = "runtime.any.System.IO";
version = "4.3.0";
sha256 = "0l8xz8zn46w4d10bcn3l4yyn4vhb3lrj2zw8llvz7jk14k4zps5x";
})
(fetchNuGet {
name = "runtime.any.System.Reflection";
version = "4.3.0";
sha256 = "02c9h3y35pylc0zfq3wcsvc5nqci95nrkq0mszifc0sjx7xrzkly";
})
(fetchNuGet {
name = "runtime.any.System.Reflection.Extensions";
version = "4.3.0";
sha256 = "0zyri97dfc5vyaz9ba65hjj1zbcrzaffhsdlpxc9bh09wy22fq33";
})
(fetchNuGet {
name = "runtime.any.System.Reflection.Primitives";
version = "4.3.0";
sha256 = "0x1mm8c6iy8rlxm8w9vqw7gb7s1ljadrn049fmf70cyh42vdfhrf";
})
(fetchNuGet {
name = "runtime.any.System.Resources.ResourceManager";
version = "4.3.0";
sha256 = "03kickal0iiby82wa5flar18kyv82s9s6d4xhk5h4bi5kfcyfjzl";
})
(fetchNuGet {
name = "runtime.any.System.Runtime";
version = "4.3.0";
sha256 = "1cqh1sv3h5j7ixyb7axxbdkqx6cxy00p4np4j91kpm492rf4s25b";
})
(fetchNuGet {
name = "runtime.any.System.Runtime.Handles";
version = "4.3.0";
sha256 = "0bh5bi25nk9w9xi8z23ws45q5yia6k7dg3i4axhfqlnj145l011x";
})
(fetchNuGet {
name = "runtime.any.System.Runtime.InteropServices";
version = "4.3.0";
sha256 = "0c3g3g3jmhlhw4klrc86ka9fjbl7i59ds1fadsb2l8nqf8z3kb19";
})
(fetchNuGet {
name = "runtime.any.System.Text.Encoding";
version = "4.3.0";
sha256 = "0aqqi1v4wx51h51mk956y783wzags13wa7mgqyclacmsmpv02ps3";
})
(fetchNuGet {
name = "runtime.any.System.Text.Encoding.Extensions";
version = "4.3.0";
sha256 = "0lqhgqi0i8194ryqq6v2gqx0fb86db2gqknbm0aq31wb378j7ip8";
})
(fetchNuGet {
name = "runtime.any.System.Threading.Tasks";
version = "4.3.0";
sha256 = "03mnvkhskbzxddz4hm113zsch1jyzh2cs450dk3rgfjp8crlw1va";
})
(fetchNuGet {
name = "runtime.any.System.Threading.Timer";
version = "4.3.0";
sha256 = "0aw4phrhwqz9m61r79vyfl5la64bjxj8l34qnrcwb28v49fg2086";
})
(fetchNuGet {
name = "runtime.debian.8-x64.runtime.native.System.Security.Cryptography.OpenSsl";
version = "4.3.0";
sha256 = "16rnxzpk5dpbbl1x354yrlsbvwylrq456xzpsha1n9y3glnhyx9d";
})
(fetchNuGet {
name = "runtime.debian.8-x64.runtime.native.System.Security.Cryptography.OpenSsl";
version = "4.3.2";
sha256 = "0rwpqngkqiapqc5c2cpkj7idhngrgss5qpnqg0yh40mbyflcxf8i";
})
(fetchNuGet {
name = "runtime.fedora.23-x64.runtime.native.System.Security.Cryptography.OpenSsl";
version = "4.3.0";
sha256 = "0hkg03sgm2wyq8nqk6dbm9jh5vcq57ry42lkqdmfklrw89lsmr59";
})
(fetchNuGet {
name = "runtime.fedora.23-x64.runtime.native.System.Security.Cryptography.OpenSsl";
version = "4.3.2";
sha256 = "1n06gxwlinhs0w7s8a94r1q3lwqzvynxwd3mp10ws9bg6gck8n4r";
})
(fetchNuGet {
name = "runtime.fedora.24-x64.runtime.native.System.Security.Cryptography.OpenSsl";
version = "4.3.0";
sha256 = "0c2p354hjx58xhhz7wv6div8xpi90sc6ibdm40qin21bvi7ymcaa";
})
(fetchNuGet {
name = "runtime.fedora.24-x64.runtime.native.System.Security.Cryptography.OpenSsl";
version = "4.3.2";
sha256 = "0404wqrc7f2yc0wxv71y3nnybvqx8v4j9d47hlscxy759a525mc3";
})
(fetchNuGet {
name = "runtime.native.System";
version = "4.3.0";
sha256 = "15hgf6zaq9b8br2wi1i3x0zvmk410nlmsmva9p0bbg73v6hml5k4";
})
(fetchNuGet {
name = "runtime.native.System.IO.Compression";
version = "4.3.0";
sha256 = "1vvivbqsk6y4hzcid27pqpm5bsi6sc50hvqwbcx8aap5ifrxfs8d";
})
(fetchNuGet {
name = "runtime.native.System.Net.Http";
version = "4.3.0";
sha256 = "1n6rgz5132lcibbch1qlf0g9jk60r0kqv087hxc0lisy50zpm7kk";
})
(fetchNuGet {
name = "runtime.native.System.Security.Cryptography.Apple";
version = "4.3.0";
sha256 = "1b61p6gw1m02cc1ry996fl49liiwky6181dzr873g9ds92zl326q";
})
(fetchNuGet {
name = "runtime.native.System.Security.Cryptography.OpenSsl";
version = "4.3.0";
sha256 = "18pzfdlwsg2nb1jjjjzyb5qlgy6xjxzmhnfaijq5s2jw3cm3ab97";
})
(fetchNuGet {
name = "runtime.native.System.Security.Cryptography.OpenSsl";
version = "4.3.2";
sha256 = "0zy5r25jppz48i2bkg8b9lfig24xixg6nm3xyr1379zdnqnpm8f6";
})
(fetchNuGet {
name = "runtime.opensuse.13.2-x64.runtime.native.System.Security.Cryptography.OpenSsl";
version = "4.3.0";
sha256 = "0qyynf9nz5i7pc26cwhgi8j62ps27sqmf78ijcfgzab50z9g8ay3";
})
(fetchNuGet {
name = "runtime.opensuse.13.2-x64.runtime.native.System.Security.Cryptography.OpenSsl";
version = "4.3.2";
sha256 = "096ch4n4s8k82xga80lfmpimpzahd2ip1mgwdqgar0ywbbl6x438";
})
(fetchNuGet {
name = "runtime.opensuse.42.1-x64.runtime.native.System.Security.Cryptography.OpenSsl";
version = "4.3.0";
sha256 = "1klrs545awhayryma6l7g2pvnp9xy4z0r1i40r80zb45q3i9nbyf";
})
(fetchNuGet {
name = "runtime.opensuse.42.1-x64.runtime.native.System.Security.Cryptography.OpenSsl";
version = "4.3.2";
sha256 = "1dm8fifl7rf1gy7lnwln78ch4rw54g0pl5g1c189vawavll7p6rj";
})
(fetchNuGet {
name = "runtime.osx.10.10-x64.runtime.native.System.Security.Cryptography.Apple";
version = "4.3.0";
sha256 = "10yc8jdrwgcl44b4g93f1ds76b176bajd3zqi2faf5rvh1vy9smi";
})
(fetchNuGet {
name = "runtime.osx.10.10-x64.runtime.native.System.Security.Cryptography.OpenSsl";
version = "4.3.0";
sha256 = "0zcxjv5pckplvkg0r6mw3asggm7aqzbdjimhvsasb0cgm59x09l3";
})
(fetchNuGet {
name = "runtime.osx.10.10-x64.runtime.native.System.Security.Cryptography.OpenSsl";
version = "4.3.2";
sha256 = "1m9z1k9kzva9n9kwinqxl97x2vgl79qhqjlv17k9s2ymcyv2bwr6";
})
(fetchNuGet {
name = "runtime.rhel.7-x64.runtime.native.System.Security.Cryptography.OpenSsl";
version = "4.3.0";
sha256 = "0vhynn79ih7hw7cwjazn87rm9z9fj0rvxgzlab36jybgcpcgphsn";
})
(fetchNuGet {
name = "runtime.rhel.7-x64.runtime.native.System.Security.Cryptography.OpenSsl";
version = "4.3.2";
sha256 = "1cpx56mcfxz7cpn57wvj18sjisvzq8b5vd9rw16ihd2i6mcp3wa1";
})
(fetchNuGet {
name = "runtime.ubuntu.14.04-x64.runtime.native.System.Security.Cryptography.OpenSsl";
version = "4.3.0";
sha256 = "160p68l2c7cqmyqjwxydcvgw7lvl1cr0znkw8fp24d1by9mqc8p3";
})
(fetchNuGet {
name = "runtime.ubuntu.14.04-x64.runtime.native.System.Security.Cryptography.OpenSsl";
version = "4.3.2";
sha256 = "15gsm1a8jdmgmf8j5v1slfz8ks124nfdhk2vxs2rw3asrxalg8hi";
})
(fetchNuGet {
name = "runtime.ubuntu.16.04-x64.runtime.native.System.Security.Cryptography.OpenSsl";
version = "4.3.0";
sha256 = "15zrc8fgd8zx28hdghcj5f5i34wf3l6bq5177075m2bc2j34jrqy";
})
(fetchNuGet {
name = "runtime.ubuntu.16.04-x64.runtime.native.System.Security.Cryptography.OpenSsl";
version = "4.3.2";
sha256 = "0q0n5q1r1wnqmr5i5idsrd9ywl33k0js4pngkwq9p368mbxp8x1w";
})
(fetchNuGet {
name = "runtime.ubuntu.16.10-x64.runtime.native.System.Security.Cryptography.OpenSsl";
version = "4.3.0";
sha256 = "1p4dgxax6p7rlgj4q73k73rslcnz4wdcv8q2flg1s8ygwcm58ld5";
})
(fetchNuGet {
name = "runtime.ubuntu.16.10-x64.runtime.native.System.Security.Cryptography.OpenSsl";
version = "4.3.2";
sha256 = "1x0g58pbpjrmj2x2qw17rdwwnrcl0wvim2hdwz48lixvwvp22n9c";
})
(fetchNuGet {
name = "runtime.unix.Microsoft.Win32.Primitives";
version = "4.3.0";
sha256 = "0y61k9zbxhdi0glg154v30kkq7f8646nif8lnnxbvkjpakggd5id";
})
(fetchNuGet {
name = "runtime.unix.System.Console";
version = "4.3.0";
sha256 = "1pfpkvc6x2if8zbdzg9rnc5fx51yllprl8zkm5npni2k50lisy80";
})
(fetchNuGet {
name = "runtime.unix.System.Diagnostics.Debug";
version = "4.3.0";
sha256 = "1lps7fbnw34bnh3lm31gs5c0g0dh7548wfmb8zz62v0zqz71msj5";
})
(fetchNuGet {
name = "runtime.unix.System.IO.FileSystem";
version = "4.3.0";
sha256 = "14nbkhvs7sji5r1saj2x8daz82rnf9kx28d3v2qss34qbr32dzix";
})
(fetchNuGet {
name = "runtime.unix.System.Net.Primitives";
version = "4.3.0";
sha256 = "0bdnglg59pzx9394sy4ic66kmxhqp8q8bvmykdxcbs5mm0ipwwm4";
})
(fetchNuGet {
name = "runtime.unix.System.Net.Sockets";
version = "4.3.0";
sha256 = "03npdxzy8gfv035bv1b9rz7c7hv0rxl5904wjz51if491mw0xy12";
})
(fetchNuGet {
name = "runtime.unix.System.Private.Uri";
version = "4.3.0";
sha256 = "1jx02q6kiwlvfksq1q9qr17fj78y5v6mwsszav4qcz9z25d5g6vk";
})
(fetchNuGet {
name = "runtime.unix.System.Runtime.Extensions";
version = "4.3.0";
sha256 = "0pnxxmm8whx38dp6yvwgmh22smknxmqs5n513fc7m4wxvs1bvi4p";
})
]

View file

@ -14,6 +14,10 @@ stdenv.mkDerivation {
sha256 = "1fslfj5d7fhj3f7kh558b8mk5wllwyq4rnhfkyd96fpy144sdcka";
};
postPatch = ''
substituteInPlace setup.ml --replace js_of_ocaml.ocamlbuild js_of_ocaml-ocamlbuild
'';
buildInputs = [ which ] ++ (with ocamlPackages; [
ocaml findlib ocamlbuild menhir js_of_ocaml js_of_ocaml-ocamlbuild
]);

View file

@ -23,5 +23,6 @@ stdenv.mkDerivation rec {
description = "A multiplatform basic GUI library";
maintainers = [ maintainers.raskin ];
platforms = platforms.linux;
license = licenses.free;
};
}

View file

@ -19,5 +19,6 @@ stdenv.mkDerivation rec {
homepage = "https://github.com/f4exb/cm256cc";
platforms = platforms.linux;
maintainers = with maintainers; [ alkeryn ];
license = licenses.gpl3;
};
}

View file

@ -10,7 +10,10 @@ stdenv.mkDerivation {
buildInputs = [libxml2 openssl bzip2];
meta = {
platforms = stdenv.lib.platforms.linux;
meta = with stdenv.lib; {
description = "Peer-to-Peer file sharing client";
homepage = "http://dcgui.berlios.de";
platforms = platforms.linux;
license = [ licenses.openssl licenses.gpl2 ];
};
}

View file

@ -118,6 +118,7 @@
#, shine ? null # Fixed-point MP3 encoder
, soxr ? null # Resampling via soxr
, speex ? null # Speex de/encoder
, srt ? null # Secure Reliable Transport (SRT) protocol
#, twolame ? null # MP2 encoder
#, utvideo ? null # Ut Video de/encoder
, vid-stab ? null # Video stabilization
@ -355,6 +356,7 @@ stdenv.mkDerivation rec {
#(enableFeature (libnut != null) "libnut")
(enableFeature (libopus != null) "libopus")
(enableFeature (librsvg != null) "librsvg")
(enableFeature (srt != null) "libsrt")
(enableFeature (libssh != null) "libssh")
(enableFeature (libtheora != null) "libtheora")
(enableFeature (if isLinux then libv4l != null else false) "libv4l2")
@ -418,7 +420,7 @@ stdenv.mkDerivation rec {
libjack2 ladspaH lame libaom libass libbluray libbs2b libcaca libdc1394 libmodplug libmysofa
libogg libopus librsvg libssh libtheora libvdpau libvorbis libvpx libwebp libX11
libxcb libXv libXext lzma openal openjpeg libpulseaudio rtmpdump opencore-amr
samba SDL2 soxr speex vid-stab vo-amrwbenc wavpack x264 x265 xavs xvidcore
samba SDL2 soxr speex srt vid-stab vo-amrwbenc wavpack x264 x265 xavs xvidcore
zeromq4 zlib
] ++ optionals openglExtlib [ libGL libGLU ]
++ optionals nonfreeLicensing [ fdk_aac openssl ]

View file

@ -4,28 +4,34 @@
stdenv.mkDerivation rec {
pname = "igraph";
version = "0.7.1";
version = "0.8.2";
src = fetchFromGitHub {
owner = "igraph";
repo = pname;
rev = version;
sha256 = "1wsy0r511gk069il6iqjs27q8cjvqz20gf0a7inybx1bw84845z8";
sha256 = "015yh9s19lmxm7l1ld8adlsqh1lrmzicl801saixdwl9w05hfva4";
};
nativeBuildInputs = [ pkgconfig autoreconfHook ];
buildInputs = [ flex yacc zlib libxml2 ];
# This file is normally generated by igraph's bootstrap.sh, but we can do it
# ourselves. ~ C.
# Normally, igraph wants us to call bootstrap.sh, which will call
# tools/getversion.sh. Instead, we're going to put the version directly
# where igraph wants, and then let autoreconfHook do the rest of the
# bootstrap. ~ C.
postPatch = ''
echo "${version}" > VERSION
echo "${version}" > IGRAPH_VERSION
'';
doCheck = true;
meta = {
description = "The network analysis package";
homepage = "https://igraph.org/";
license = lib.licenses.gpl2;
# NB: Known to fail tests on aarch64.
platforms = [ "x86_64-linux" ];
maintainers = [ lib.maintainers.MostAwesomeDude ];
};
}

View file

@ -25,6 +25,7 @@ stdenv.mkDerivation rec {
homepage = "https://github.com/json-c/json-c/wiki";
maintainers = with maintainers; [ lovek323 ];
platforms = platforms.unix;
license = licenses.mit;
longDescription = ''
JSON-C implements a reference counting object model that allows you to

View file

@ -24,4 +24,10 @@ stdenv.mkDerivation {
doCheck = false; # hasdescriptor.c test fails, hrm.
installPhase = ''prefix="/" DESTDIR=$out ./installlib'';
meta = with stdenv.lib; {
description = "Installs the BlocksRuntime library from the compiler-rt";
homepage = "https://github.com/mackyle/blocksruntime";
license = licenses.mit;
};
}

View file

@ -2,12 +2,12 @@
, xercesc, xml-security-c, pkgconfig, xsd, zlib, xalanc, xxd }:
stdenv.mkDerivation rec {
version = "3.14.2";
version = "3.14.3";
pname = "libdigidocpp";
src = fetchurl {
url = "https://github.com/open-eid/libdigidocpp/releases/download/v${version}/libdigidocpp-${version}.tar.gz";
sha256 = "0d3p72gvnj00bxl6lzh6n5x1r37wj54mzzv700gngcvr3m62lkbv";
sha256 = "1hq1q2frqnm4wxcfr7vn8kqwyfdz3hx22w40kn69zh140pig6jc5";
};
nativeBuildInputs = [ cmake pkgconfig xxd ];

View file

@ -2,18 +2,20 @@
stdenv.mkDerivation rec {
pname = "libosmium";
version = "2.15.5";
version = "2.15.6";
src = fetchFromGitHub {
owner = "osmcode";
repo = "libosmium";
rev = "v${version}";
sha256 = "1f21dzzkxzi74hv17fs9kb2w6indqvvm4lkxclz4j4x98k8q3n59";
sha256 = "0rqy18bbakp41f44y5id9ixh0ar2dby46z17p4115z8k1vv9znq2";
};
nativeBuildInputs = [ cmake ];
buildInputs = [ protozero zlib bzip2 expat boost ];
doCheck = true;
meta = with stdenv.lib; {
description = "Fast and flexible C++ library for working with OpenStreetMap data";

View file

@ -1,4 +1,4 @@
{ stdenv, fetchurl, libtool }:
{ stdenv, fetchurl, fetchpatch, libtool }:
stdenv.mkDerivation rec {
pname = "libtomcrypt";
@ -9,6 +9,14 @@ stdenv.mkDerivation rec {
sha256 = "113vfrgapyv72lalhd3nkw7jnks8az0gcb5wqn9hj19nhcxlrbcn";
};
patches = [
(fetchpatch {
name = "CVE-2019-17362.patch";
url = "https://github.com/libtom/libtomcrypt/pull/508/commits/25c26a3b7a9ad8192ccc923e15cf62bf0108ef94.patch";
sha256 = "1bwsj0pwffxw648wd713z3xcyrbxc2z646psrzp38ys564fjh5zf";
})
];
nativeBuildInputs = [ libtool ];
postPatch = ''

View file

@ -7,9 +7,9 @@ let
s = # Generated upstream information
rec {
pname = "libvncserver";
version = "0.9.12";
version = "0.9.13";
url = "https://github.com/LibVNC/libvncserver/archive/LibVNCServer-${version}.tar.gz";
sha256 = "1226hb179l914919f5nm2mlf8rhaarqbf48aa649p4rwmghyx9vm"; # unpacked archive checksum
sha256 = "0zz0hslw8b1p3crnfy3xnmrljik359h83dpk64s697dqdcrzy141"; # unpacked archive checksum
};
in
stdenv.mkDerivation {
@ -17,18 +17,7 @@ stdenv.mkDerivation {
src = fetchzip {
inherit (s) url sha256;
};
patches = [
(fetchpatch {
name = "CVE-2018-20750.patch";
url = "https://github.com/LibVNC/libvncserver/commit/09e8fc02f59f16e2583b34fe1a270c238bd9ffec.patch";
sha256 = "004h50786nvjl3y3yazpsi2b767vc9gqrwm1ralj3zgy47kwfhqm";
})
(fetchpatch {
name = "CVE-2019-15681.patch";
url = "https://github.com/LibVNC/libvncserver/commit/d01e1bb4246323ba6fcee3b82ef1faa9b1dac82a.patch";
sha256 = "0hf0ss7all2m50z2kan4mck51ws44yim4ymn8p0d991y465y6l9s";
})
];
nativeBuildInputs = [ cmake ];
buildInputs = [
libjpeg openssl libgcrypt libpng

View file

@ -2,11 +2,11 @@
stdenv.mkDerivation rec {
pname = "libxls";
version = "1.5.2";
version = "1.5.3";
src = fetchurl {
url = "https://github.com/libxls/libxls/releases/download/v${version}/libxls-${version}.tar.gz";
sha256 = "1akadsyl10rp101ccjmrxr7933c3v641k377bn74jv6cdkcm4zld";
sha256 = "0rl513wpq5qh7wkmdk4g9c68rzffv3mcbz48p4xyg4969zrx8lnm";
};
nativeBuildInputs = [ unzip ];

View file

@ -14,13 +14,13 @@
stdenv.mkDerivation rec {
pname = "mtxclient";
version = "0.3.0";
version = "0.3.1";
src = fetchFromGitHub {
owner = "Nheko-Reborn";
repo = "mtxclient";
rev = "v${version}";
sha256 = "0vf5xmn6yfi5lvskfgrdmnalvclzrapcrml92bj9qaa8vq8mfsf2";
sha256 = "1dg4dq20g0ah62j5s3gpsxqq4ny7lxkxdxa9q6g54hdwkrb9ms7x";
};
cmakeFlags = [

View file

@ -31,5 +31,6 @@ stdenv.mkDerivation rec {
description = "Free and open source C++ spell checking library";
homepage = "https://nuspell.github.io/";
maintainers = with maintainers; [ fpletz ];
license = licenses.gpl3;
};
}

View file

@ -5,13 +5,13 @@
# https://github.com/oneapi-src/oneDNN#oneapi-deep-neural-network-library-onednn
stdenv.mkDerivation rec {
pname = "oneDNN";
version = "1.4";
version = "1.5";
src = fetchFromGitHub {
owner = "oneapi-src";
repo = "oneDNN";
rev = "v${version}";
sha256 = "162fb0c7klahz2irchhyxympi4fq4yp284apc53cadbss41mzld9";
sha256 = "0diiy3g4wz5lnz5mdvka5p2nwmrpfldsz83sssr5yiir29m4lqap";
};
outputs = [ "out" "dev" "doc" ];

View file

@ -32,7 +32,9 @@ stdenv.mkDerivation {
cp exe/* $out/bin
'';
meta = {
platforms = stdenv.lib.platforms.unix;
meta = with stdenv.lib; {
description = "PDF to XML converter";
platforms = platforms.unix;
license = licenses.gpl2;
};
}

View file

@ -2,13 +2,13 @@
stdenv.mkDerivation rec {
pname = "qtstyleplugin-kvantum-qt4";
version = "0.15.2";
version = "0.15.3";
src = fetchFromGitHub {
owner = "tsujan";
repo = "Kvantum";
rev = "V${version}";
sha256 = "0cv0lxyi2sr0k7f03rsh1j28avdxd0l0480jsa95ca3d2lq392g3";
sha256 = "0jq3y63c36nr2rprg7im9ik112p5zvhj46vzgxfbnpvskmg0cchm";
};
nativeBuildInputs = [ qmake4Hook ];

View file

@ -18,6 +18,7 @@ stdenv.mkDerivation rec {
homepage = "https://github.com/f4exb/serialdv";
platforms = platforms.linux;
maintainers = with maintainers; [ alkeryn ];
license = licenses.gpl3;
};
}

View file

@ -20,10 +20,11 @@ stdenv.mkDerivation {
ln -s $out/share/wasm32-wasi/undefined-symbols.txt $out/lib/wasi.imports
'';
meta = {
meta = with stdenv.lib; {
description = "WASI libc implementation for WebAssembly";
homepage = "https://wasi.dev";
platforms = lib.platforms.wasi;
maintainers = [ lib.maintainers.matthewbauer ];
platforms = platforms.wasi;
maintainers = [ maintainers.matthewbauer ];
license = with licenses; [ asl20 mit llvm-exception ];
};
}

View file

@ -0,0 +1,33 @@
{ lib
, stdenv
, fetchFromGitHub
, cmake
, pkg-config
, bluez
}:
stdenv.mkDerivation rec {
pname = "WiiUse";
version = "0.15.5";
src = fetchFromGitHub {
owner = "wiiuse";
repo = "wiiuse";
rev = "${version}";
sha256 = "05gc3s0wxx7ga4g32yyibyxdh46rm9bbslblrc72ynrjxq98sg13";
};
nativeBuildInputs = [ cmake ];
buildInputs = [ (lib.getDev bluez) ];
cmakeFlags = [ "-DBUILD_EXAMPLE_SDL=NO" ];
meta = with lib; {
description = "Feature complete cross-platform Wii Remote access library";
license = licenses.gpl3;
homepage = "https://github.com/wiiuse/wiiuse";
maintainers = with maintainers; [ shamilton ];
platforms = with platforms; linux;
};
}

View file

@ -1,10 +1,22 @@
{ stdenv, fetchFromGitHub, libusb-compat-0_1, readline ? null }:
{ stdenv
, fetchFromGitHub
, autoPatchelfHook
, libusb-compat-0_1
, readline ? null
, enableReadline ? true
, hidapi ? null
, pkg-config ? null
, mspds ? null
, enableMspds ? false
}:
let
assert stdenv.isDarwin -> hidapi != null && pkg-config != null;
assert enableReadline -> readline != null;
assert enableMspds -> mspds != null;
stdenv.mkDerivation rec {
version = "0.25";
in stdenv.mkDerivation {
pname = "mspdebug";
inherit version;
src = fetchFromGitHub {
owner = "dlbeer";
repo = "mspdebug";
@ -12,9 +24,34 @@ in stdenv.mkDerivation {
sha256 = "0prgwb5vx6fd4bj12ss1bbb6axj2kjyriyjxqrzd58s5jyyy8d3c";
};
buildInputs = [ libusb-compat-0_1 readline ];
makeFlags = [ "PREFIX=$(out)" "INSTALL=install" ] ++
(if readline == null then [ "WITHOUT_READLINE=1" ] else []);
enableParallelBuilding = true;
nativeBuildInputs = stdenv.lib.optional stdenv.isDarwin pkg-config
++ stdenv.lib.optional (enableMspds && stdenv.isLinux) autoPatchelfHook;
buildInputs = [ libusb-compat-0_1 ]
++ stdenv.lib.optional stdenv.isDarwin hidapi
++ stdenv.lib.optional enableReadline readline;
postPatch = stdenv.lib.optionalString stdenv.isDarwin ''
# TODO: remove once a new 0.26+ release is made
substituteInPlace drivers/tilib_api.c --replace .so ${stdenv.hostPlatform.extensions.sharedLibrary}
# Makefile only uses pkg-config if it detects homebrew
substituteInPlace Makefile --replace brew true
'';
# TODO: wrap with MSPDEBUG_TILIB_PATH env var instead of these rpath fixups in 0.26+
runtimeDependencies = stdenv.lib.optional enableMspds mspds;
postFixup = stdenv.lib.optionalString (enableMspds && stdenv.isDarwin) ''
# autoPatchelfHook only works on linux so...
for dep in $runtimeDependencies; do
install_name_tool -add_rpath $dep/lib $out/bin/$pname
done
'';
installFlags = [ "PREFIX=$(out)" "INSTALL=install" ];
makeFlags = [ "UNAME_S=$(unameS)" ] ++
stdenv.lib.optional (!enableReadline) "WITHOUT_READLINE=1";
unameS = stdenv.lib.optionalString stdenv.isDarwin "Darwin";
meta = with stdenv.lib; {
description = "A free programmer, debugger, and gdb proxy for MSP430 MCUs";

View file

@ -142,7 +142,9 @@
, "prettier"
, "pscid"
, "pulp"
, "purescript-language-server"
, "purescript-psa"
, "purty"
, "react-native-cli"
, "react-tools"
, "reveal.js"

File diff suppressed because it is too large Load diff

View file

@ -3,6 +3,7 @@
buildDunePackage {
pname = "ocplib-json-typed-browser";
inherit (ocplib-json-typed) version src;
useDune2 = true;
propagatedBuildInputs = [ ocplib-json-typed js_of_ocaml ];

View file

@ -17,6 +17,10 @@ stdenv.mkDerivation rec {
patches = [ ./jsoo.patch ];
postPatch = ''
substituteInPlace Makefile --replace js_of_ocaml.ppx js_of_ocaml-ppx
'';
buildInputs = [ ocaml findlib ppx_tools js_of_ocaml js_of_ocaml-ppx ];
propagatedBuildInputs = [ iri re ];

View file

@ -1,7 +1,6 @@
{ lib
, buildPythonPackage
, fetchPypi
, isPy27
, six
}:
@ -14,7 +13,7 @@ buildPythonPackage rec {
sha256 = "e5dc835582ea150e35ecd57e9d86cb707d3aa3b2505679db7332326dd49fd6b8";
};
propagatedBuildInputs = lib.optional isPy27 six;
propagatedBuildInputs = [ six ];
# mozhttpd -> moznetwork -> mozinfo -> mozfile
doCheck = false;

View file

@ -0,0 +1,36 @@
{ stdenv
, buildPythonPackage
, fetchPypi
, isPy3k
, cython
, numpy
}:
buildPythonPackage rec {
pname = "pkuseg";
version = "0.0.25";
disabled = !isPy3k;
src = fetchPypi {
inherit pname version;
sha256 = "148yp0l7h8cflxag62pc1iwj5b5liyljnaxwfjaiqwl96vwjn0fx";
};
# Does not seem to have actual tests, but unittest discover
# recognizes some non-tests as tests and fails.
doCheck = false;
nativeBuildInputs = [ cython ];
propagatedBuildInputs = [ numpy ];
pythonImportsCheck = [ "pkuseg" ];
meta = with stdenv.lib; {
description = "Toolkit for multi-domain Chinese word segmentation";
homepage = "https://github.com/lancopku/pkuseg-python";
license = licenses.unfree;
maintainers = with maintainers; [ danieldk ];
};
}

View file

@ -16,11 +16,11 @@
buildPythonPackage rec {
pname = "progressbar2";
version = "3.51.3";
version = "3.51.4";
src = fetchPypi {
inherit pname version;
sha256 = "ecf687696dd449067f69ef6730c4d4a0189db1f8d1aad9e376358354631d5b2c";
sha256 = "0dnfw8mdrz78gck4ibnv64cinbp5f7kw349wjgpwv53z6p7jiwhk";
};
propagatedBuildInputs = [ python-utils ];
@ -29,10 +29,10 @@ buildPythonPackage rec {
pytest sphinx flake8 pytestpep8 pytest-flakes pytestcov
pytestcache freezegun
];
# ignore tests on the nix wrapped setup.py and don't flake .eggs directory
# ignore tests on the nix wrapped setup.py
checkPhase = ''
runHook preCheck
${python.interpreter} setup.py test --addopts "--ignore=.eggs"
${python.interpreter} setup.py test
runHook postCheck
'';
@ -40,6 +40,6 @@ buildPythonPackage rec {
homepage = "https://progressbar-2.readthedocs.io/en/latest/";
description = "Text progressbar library for python";
license = licenses.bsd3;
maintainers = with maintainers; [ ashgillman ];
maintainers = with maintainers; [ ashgillman turion ];
};
}

View file

@ -1,6 +1,6 @@
{ stdenv, lib, buildPythonPackage, fetchPypi, astroid, six, isort,
mccabe, configparser, backports_functools_lru_cache, singledispatch,
pytest, pytestrunner, pyenchant, setuptools }:
pytest, pytestrunner, setuptools }:
buildPythonPackage rec {
pname = "pylint";
@ -11,7 +11,7 @@ buildPythonPackage rec {
sha256 = "004kfapkqxqy2s85pmddqv0fabxdxywxrlbi549p0v237pr2v94p";
};
checkInputs = [ pytest pytestrunner pyenchant ];
checkInputs = [ pytest pytestrunner ];
propagatedBuildInputs = [ astroid six isort mccabe configparser backports_functools_lru_cache singledispatch setuptools ];

View file

@ -1,5 +1,6 @@
{ buildPythonPackage, fetchPypi, lib, isPy3k
, pkgconfig, igraph }:
, pkgconfig, igraph
, texttable }:
buildPythonPackage rec {
pname = "python-igraph";
@ -7,12 +8,18 @@ buildPythonPackage rec {
nativeBuildInputs = [ pkgconfig ];
buildInputs = [ igraph ];
propagatedBuildInputs = [ texttable ];
src = fetchPypi {
inherit pname version;
sha256 = "4601638d7d22eae7608cdf793efac75e6c039770ec4bd2cecf76378c84ce7d72";
};
# NB: We want to use our igraph, not vendored igraph, but even with
# pkg-config on the PATH, their custom setup.py still needs to be explicitly
# told to do it. ~ C.
setupPyGlobalFlags = [ "--use-pkg-config" ];
doCheck = !isPy3k;
meta = {

View file

@ -1,21 +1,27 @@
{ buildPythonPackage, fetchPypi, lib, setuptools_scm, pytestCheckHook }:
{ buildPythonPackage, fetchPypi, isPy27, lib, setuptools, setuptools_scm
, pytestCheckHook }:
buildPythonPackage rec {
pname = "simpy";
version = "4.0.1";
disabled = isPy27;
src = fetchPypi {
inherit pname version;
sha256 = "b36542e2faab612f861c5ef4da17220ac1553f5892b3583c67281dbe4faad404";
};
nativeBuildInputs = [ setuptools_scm ];
propagatedBuildInputs = [ setuptools ];
checkInputs = [ pytestCheckHook ];
meta = with lib; {
homepage = "https://simpy.readthedocs.io/en/latest/";
description = "A process-based discrete-event simulation framework based on standard Python.";
homepage = "https://simpy.readthedocs.io/en/${version}/";
description = "Process-based discrete-event simulation framework based on standard Python";
license = [ licenses.mit ];
maintainers = with maintainers; [ shlevy ];
maintainers = with maintainers; [ dmrauh shlevy ];
};
}

View file

@ -2,14 +2,14 @@
buildPythonPackage rec {
pname = "SoMaJo";
version = "2.0.6";
version = "2.1.0";
disabled = !isPy3k;
src = fetchFromGitHub {
owner = "tsproisl";
repo = pname;
rev = "v${version}";
sha256 = "08nicj3nj6pi6djli26gf0kf3s2da9ysn1cpkyw7j88v8vav0p7s";
sha256 = "17ybm5rgwc6jy3i1c8nm05j7fd418n6hp44zv0q77kzhd66am6pp";
};
propagatedBuildInputs = [ regex ];

View file

@ -1,79 +1,145 @@
[{
"pname": "da_core_news_lg",
"version": "2.3.0",
"sha256": "18y1jany1ha27jkwb9563haxsbylm0axkh5c8009lsfxc8y2w9hr",
"license": "cc-by-sa-40"
},
{
"pname": "da_core_news_md",
"version": "2.3.0",
"sha256": "06nm5grj5jdx0rja7vw1f91vvd69p6vhafrwpfr1npqk24j6cacb",
"license": "cc-by-sa-40"
},
{
"pname": "da_core_news_sm",
"version": "2.3.0",
"sha256": "00byhlrcbg4wxplr473g9b3126pvk4vwy0q34xg0zx4994qb6rgn",
"license": "cc-by-sa-40"
},
{
"pname": "de_core_news_lg",
"version": "2.3.0",
"sha256": "0rixhgdz4z7sq6f2b1w3n5cn1645cr37g40hbd9xzlvdzdf5cg6d",
"license": "mit"
},
{
"pname": "de_core_news_md",
"version": "2.3.0",
"sha256": "0kxir1w000r5fn1kpa38m7688xinkn2mk1m82aiwqlck3r72jdi6",
"license": "cc-by-sa-40"
"license": "mit"
},
{
"pname": "de_core_news_sm",
"version": "2.3.0",
"sha256": "00cbmrf4njg28laysapdnp4rv4lw4yw03rxkynw1ain5fwb0izl7",
"license": "cc-by-sa-40"
"license": "mit"
},
{
"pname": "el_core_news_lg",
"version": "2.3.0",
"sha256": "001c19dd1kirlvzbdv2i89zw8nf6c4icv2w0chm7rd6x9m3i13gd",
"license": "cc-by-nc-sa-30"
},
{
"pname": "el_core_news_md",
"version": "2.3.0",
"sha256": "170x8bzm5nf02mhkxyxjk58yk2639hsjb5b9prcc69500c0vmnp0",
"license": "cc-by-sa-40"
"license": "cc-by-nc-sa-30"
},
{
"pname": "el_core_news_sm",
"version": "2.3.0",
"sha256": "10mh3za4jvr07rawzk8ps642rp11s3smraj9xvrxflik4fqkz18b",
"license": "cc-by-sa-40"
"license": "cc-by-nc-sa-30"
},
{
"pname": "en_core_web_lg",
"version": "2.3.0",
"sha256": "0mfa5wz31ya295jhyj489gb4qy806zmpq1zc11bvv5alv2m35if2",
"license": "cc-by-sa-40"
"license": "mit"
},
{
"pname": "en_core_web_md",
"version": "2.3.0",
"sha256": "1ys8sqkhiap1mq6mhbkbq8bc07lvl68xngbx725xkwvirzl5gabh",
"license": "cc-by-sa-40"
"license": "mit"
},
{
"pname": "en_core_web_sm",
"version": "2.3.0",
"sha256": "04icv9qf4pj53ll8vqxcjl2a723q1k00i7lifk8wx5saif28g37a",
"license": "cc-by-sa-40"
"license": "mit"
},
{
"pname": "en_vectors_web_lg",
"version": "2.3.0",
"sha256": "13g012rwh0bcxx3ii5mmygqzyryah1y3zd000zhidnacc1x1g743",
"license": "cc-by-sa-40"
"license": "cc-by-sa-30"
},
{
"pname": "es_core_news_lg",
"version": "2.3.0",
"sha256": "1r0pr0lzs3j9w7rd5z9nw87iayjm36v7f7gamvaiiphs6bc5p7ls",
"license": "gpl3"
},
{
"pname": "es_core_news_md",
"version": "2.3.0",
"sha256": "0nz33bmpr3rxqbnv6vb1id8pkfsvh8ii8vqplwgb3b8772kmpzy2",
"license": "cc-by-sa-40"
"license": "gpl3"
},
{
"pname": "es_core_news_sm",
"version": "2.3.0",
"sha256": "02xqhg4m0gg5r9yibvl02zixkll6w0nsmbdhp07y5yyaqjarc90d",
"license": "cc-by-sa-40"
"license": "gpl3"
},
{
"pname": "fr_core_news_lg",
"version": "2.3.0",
"sha256": "1yliamws8nqqjhpk9gr2dzlk0dms2mr958zbj21biv8fimbq60ik",
"license": "lgpllr"
},
{
"pname": "fr_core_news_md",
"version": "2.3.0",
"sha256": "04fk212ksac3bp9dj7dmzsdcnbqmbsgymsic6ddcv9zbfdv5d0db",
"license": "cc-by-sa-40"
"license": "lgpllr"
},
{
"pname": "fr_core_news_sm",
"version": "2.3.0",
"sha256": "0kldww855z67qfc9maa9z1lsvdf5vj5vc8gj0x3h68kv5n1xr4h0",
"license": "cc-by-sa-40"
"license": "lgpllr"
},
{
"pname": "it_core_news_lg",
"version": "2.3.0",
"sha256": "192rdmqnwl3ajxzhnw4r1cqv5bkziv0yc2bbzckmzqss64wk7k70",
"license": "cc-by-nc-sa-30"
},
{
"pname": "it_core_news_md",
"version": "2.3.0",
"sha256": "019ih4vwq1w6j38j0wc8pyyg1an6yy37wxq2w4amwppynmmcnd5w",
"license": "cc-by-nc-sa-30"
},
{
"pname": "it_core_news_sm",
"version": "2.3.0",
"sha256": "1c3ywqa8li0j7cyvd1xqbb096y61978hd6qv7rc6cxxjdhmkrrds",
"license": "cc-by-nc-sa-40"
},
{
"pname": "lt_core_news_lg",
"version": "2.3.0",
"sha256": "0hn5w8n7mgv33i6gvnaxl1j44n9gz4j86gg1a9jjlgdw5z98n0p2",
"license": "cc-by-sa-40"
},
{
"pname": "lt_core_news_md",
"version": "2.3.0",
"sha256": "1xya79cz2xd5vgzg6qg0ww5j2bmv7kppdk3mdjf6zpwrlzwdbk5d",
"license": "cc-by-sa-40"
},
{
@ -82,27 +148,117 @@
"sha256": "0r3rbqgz4897wyhz5jli30lryb45039f4rlvn4q0364cg1pm92g9",
"license": "cc-by-sa-40"
},
{
"pname": "nb_core_news_lg",
"version": "2.3.0",
"sha256": "18mblypw3c82004qz5w1p3262iqwn99wl9b781dq7aqwxskr02d6",
"license": "mit"
},
{
"pname": "nb_core_news_md",
"version": "2.3.0",
"sha256": "0iw97k9glxbar8mrpvnmmcb1nffgdhb83akn99p53pwmqbzxy9p3",
"license": "mit"
},
{
"pname": "nb_core_news_sm",
"version": "2.3.0",
"sha256": "07b7xri2q3m7fvn9a2gjc1044a3f14231vr32hrw96h7k6vg95h7",
"license": "mit"
},
{
"pname": "nl_core_news_lg",
"version": "2.3.0",
"sha256": "016166kzpgi0p3m0x3k308a0r60a28yz7npagjvmpl1dfm9lzhnv",
"license": "cc-by-sa-40"
},
{
"pname": "nl_core_news_md",
"version": "2.3.0",
"sha256": "1anfhig531k9k14s0cbgsvmvifp3h50qi1h8dhx894kjmq10k2lg",
"license": "cc-by-sa-40"
},
{
"pname": "nl_core_news_sm",
"version": "2.3.0",
"sha256": "0alvz7pn7cj0yax8h5gp71vrdblh3mcsmyhzgiddsd44ry35nxnj",
"license": "cc-by-sa-40"
},
{
"pname": "pl_core_news_lg",
"version": "2.3.0",
"sha256": "1acchp8pv1h4c6cwvxz07lh4ychn6aw809zfg3mbbsxgsgd2ahjr",
"license": "gpl3"
},
{
"pname": "pl_core_news_md",
"version": "2.3.0",
"sha256": "19jjjjvbys3ayibkm3cx497b4bh63ll39hfq04wx116rj4ajpwwg",
"license": "gpl3"
},
{
"pname": "pl_core_news_sm",
"version": "2.3.0",
"sha256": "166mqlfkgiszcc6hwg2mr6sir9y88y22nd81a1nidq0fiif5lfji",
"license": "gpl3"
},
{
"pname": "pt_core_news_lg",
"version": "2.3.0",
"sha256": "18gvdmfwyy2sbq6206imglhghyagd6a4gb6wcfkwhm7lzbnq714d",
"license": "cc-by-sa-40"
},
{
"pname": "pt_core_news_md",
"version": "2.3.0",
"sha256": "1yxnpwby2aq6ydvd35lylc4fs141fisfnzlx8pl88pp2b2gxijvl",
"license": "cc-by-sa-40"
},
{
"pname": "pt_core_news_sm",
"version": "2.3.0",
"sha256": "1vcvzdg9f93x0vaafkk9l9xhpmaavfj0cf0l3p06c5kx2d76f9ph",
"license": "cc-by-sa-40"
},
{
"pname": "ro_core_news_sm",
"version": "2.3.0",
"sha256": "0lsmbdwsaczv37y5sa1vvgwszy2hs8jp24a0nvc5qm8vb71rxj8w",
"license": "cc-by-sa-40"
},
{
"pname": "ro_core_news_md",
"version": "2.3.0",
"sha256": "1igwkz3yd1117gi2g78yilh9ln8n5yrdimas4prfxjgzwid3q8bc",
"license": "cc-by-sa-40"
},
{
"pname": "ro_core_news_lg",
"version": "2.3.0",
"sha256": "0id1y32kjfans7llh1i55rgr4n2x6xn208y4qf6yl3pbc17i0n9z",
"license": "cc-by-sa-40"
},
{
"pname": "xx_ent_wiki_sm",
"version": "2.3.0",
"sha256": "0x3zmmybl5kh4dn5prkfmr4q5j9bh13p40qc3rhdfi0i3jxc11pn",
"license": "cc-by-sa-40"
"license": "mit"
},
{
"pname": "zh_core_web_lg",
"version": "2.3.1",
"sha256": "17zxk7cz47k07yb3qaigc3sx4dj4zwilr1lsn2jq6w7jc8k2h1ll",
"license": "mit"
},
{
"pname": "zh_core_web_md",
"version": "2.3.1",
"sha256": "1n4iwxyam4ykn0f9jdzwkhczack8r9c3kkbyga3c4h6iwqsflzcj",
"license": "mit"
},
{
"pname": "zh_core_web_sm",
"version": "2.3.1",
"sha256": "1lj5nwhx38cpwnvajwxlfkf84dr1xx2h6wwbg3scycsh459i9fpc",
"license": "mit"
}]

Some files were not shown because too many files have changed in this diff Show more