Merge staging-next into staging

This commit is contained in:
github-actions[bot] 2023-05-22 12:02:11 +00:00 committed by GitHub
commit fc3db34b51
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
96 changed files with 7241 additions and 6464 deletions

View file

@ -618,6 +618,12 @@ in mkLicense lset) ({
fullName = "Licence Art Libre 1.3";
};
lens = {
fullName = "Lens Terms of Service Agreement";
url = "https://k8slens.dev/licenses/tos";
free = false;
};
lgpl2Only = {
spdxId = "LGPL-2.0-only";
fullName = "GNU Library General Public License v2 only";

View file

@ -155,6 +155,8 @@ rec {
# Name for the package, shown in option description
name:
{
# Whether the package can be null, for example to disable installing a package altogether.
nullable ? false,
# The attribute path where the default package is located (may be omitted)
default ? name,
# A string or an attribute path to use as an example (may be omitted)
@ -164,19 +166,24 @@ rec {
}:
let
name' = if isList name then last name else name;
in mkOption ({
type = with lib.types; (if nullable then nullOr else lib.id) package;
description = "The ${name'} package to use."
+ (if extraDescription == "" then "" else " ") + extraDescription;
} // (if default != null then let
default' = if isList default then default else [ default ];
defaultPath = concatStringsSep "." default';
defaultValue = attrByPath default'
(throw "${defaultPath} cannot be found in pkgs") pkgs;
in mkOption {
in {
default = defaultValue;
defaultText = literalExpression ("pkgs." + defaultPath);
type = lib.types.package;
description = "The ${name'} package to use."
+ (if extraDescription == "" then "" else " ") + extraDescription;
${if default != null then "default" else null} = defaultValue;
${if example != null then "example" else null} = literalExpression
} else if nullable then {
default = null;
} else { }) // lib.optionalAttrs (example != null) {
example = literalExpression
(if isList example then "pkgs." + concatStringsSep "." example else example);
};
});
/* Like mkPackageOption, but emit an mdDoc description instead of DocBook. */
mkPackageOptionMD = pkgs: name: extra:

View file

@ -182,6 +182,11 @@ checkConfigOutput '^true$' config.enableAlias ./alias-with-priority.nix
checkConfigOutput '^false$' config.enable ./alias-with-priority-can-override.nix
checkConfigOutput '^false$' config.enableAlias ./alias-with-priority-can-override.nix
# Check mkPackageOption
checkConfigOutput '^"hello"$' config.package.pname ./declare-mkPackageOption.nix
checkConfigError 'The option .undefinedPackage. is used but not defined' config.undefinedPackage ./declare-mkPackageOption.nix
checkConfigOutput '^null$' config.nullablePackage ./declare-mkPackageOption.nix
# submoduleWith
## specialArgs should work

View file

@ -0,0 +1,19 @@
{ lib, ... }: let
pkgs.hello = {
type = "derivation";
pname = "hello";
};
in {
options = {
package = lib.mkPackageOption pkgs "hello" { };
undefinedPackage = lib.mkPackageOption pkgs "hello" {
default = null;
};
nullablePackage = lib.mkPackageOption pkgs "hello" {
nullable = true;
default = null;
};
};
}

View file

@ -1951,6 +1951,12 @@
githubId = 75972;
name = "Ben Booth";
};
benwis = {
name = "Ben Wishovich";
email = "ben@benw.is";
github = "benwis";
githubId = 6953353;
};
berberman = {
email = "berberman@yandex.com";
matrix = "@berberman:mozilla.org";
@ -2676,6 +2682,12 @@
}
];
};
Ch1keen = {
email = "gihoong7@gmail.com";
github = "Ch1keen";
githubId = 40013212;
name = "Han Jeongjun";
};
chaduffy = {
email = "charles@dyfis.net";
github = "charles-dyfis-net";

View file

@ -78,7 +78,8 @@ in
environment.PYTHONPATH = pkgs.powerdns-admin.pythonPath;
serviceConfig = {
ExecStart = "${pkgs.powerdns-admin}/bin/powerdns-admin --pid /run/powerdns-admin/pid ${escapeShellArgs cfg.extraArgs}";
ExecStartPre = "${pkgs.coreutils}/bin/env FLASK_APP=${pkgs.powerdns-admin}/share/powerdnsadmin/__init__.py ${pkgs.python3Packages.flask}/bin/flask db upgrade -d ${pkgs.powerdns-admin}/share/migrations";
# Set environment variables only for starting flask database upgrade
ExecStartPre = "${pkgs.coreutils}/bin/env FLASK_APP=${pkgs.powerdns-admin}/share/powerdnsadmin/__init__.py SESSION_TYPE= ${pkgs.python3Packages.flask}/bin/flask db upgrade -d ${pkgs.powerdns-admin}/share/migrations";
ExecReload = "${pkgs.coreutils}/bin/kill -HUP $MAINPID";
ExecStop = "${pkgs.coreutils}/bin/kill -TERM $MAINPID";
PIDFile = "/run/powerdns-admin/pid";

View file

@ -792,6 +792,7 @@ in {
v2ray = handleTest ./v2ray.nix {};
varnish60 = handleTest ./varnish.nix { package = pkgs.varnish60; };
varnish72 = handleTest ./varnish.nix { package = pkgs.varnish72; };
varnish73 = handleTest ./varnish.nix { package = pkgs.varnish73; };
vault = handleTest ./vault.nix {};
vault-agent = handleTest ./vault-agent.nix {};
vault-dev = handleTest ./vault-dev.nix {};

View file

@ -10,6 +10,7 @@ let
defaultConfig = ''
BIND_ADDRESS = '127.0.0.1'
PORT = 8000
CAPTCHA_ENABLE = False
'';
makeAppTest = name: configs: makeTest {
@ -98,7 +99,30 @@ let
tcp = {
services.powerdns-admin.extraArgs = [ "-b" "127.0.0.1:8000" ];
system.build.testScript = ''
set -euxo pipefail
curl -sSf http://127.0.0.1:8000/
# Create account to check that the database migrations ran
csrf_token="$(curl -sSfc session http://127.0.0.1:8000/register | grep _csrf_token | cut -d\" -f6)"
# Outputs 'Redirecting' if successful
curl -sSfb session http://127.0.0.1:8000/register \
-F "_csrf_token=$csrf_token" \
-F "firstname=first" \
-F "lastname=last" \
-F "email=a@example.com" \
-F "username=user" \
-F "password=password" \
-F "rpassword=password" | grep Redirecting
# Login
# Outputs 'Redirecting' if successful
curl -sSfb session http://127.0.0.1:8000/login \
-F "_csrf_token=$csrf_token" \
-F "username=user" \
-F "password=password" | grep Redirecting
# Check that we are logged in, this redirects to /admin/setting/pdns if we are
curl -sSfb session http://127.0.0.1:8000/dashboard/ | grep /admin/setting
'';
};
unix = {

View file

@ -47,6 +47,6 @@ in rustPlatform.buildRustPackage {
homepage = "https://helix-editor.com";
license = licenses.mpl20;
mainProgram = "hx";
maintainers = with maintainers; [ danth yusdacra ];
maintainers = with maintainers; [ danth yusdacra zowoq ];
};
}

View file

@ -1,17 +0,0 @@
# This file has been generated by node2nix 1.11.1. Do not edit!
{pkgs ? import <nixpkgs> {
inherit system;
}, system ? builtins.currentSystem, nodejs ? pkgs."nodejs_14"}:
let
nodeEnv = import ./node-env.nix {
inherit (pkgs) stdenv lib python2 runCommand writeTextFile writeShellScript;
inherit pkgs nodejs;
libtool = if pkgs.stdenv.isDarwin then pkgs.darwin.cctools else null;
};
in
import ./node-packages.nix {
inherit (pkgs) fetchurl nix-gitignore stdenv lib fetchgit;
inherit nodeEnv;
}

View file

@ -1,598 +0,0 @@
# This file originates from node2nix
{lib, stdenv, nodejs, python2, pkgs, libtool, runCommand, writeTextFile, writeShellScript}:
let
# Workaround to cope with utillinux in Nixpkgs 20.09 and util-linux in Nixpkgs master
utillinux = if pkgs ? utillinux then pkgs.utillinux else pkgs.util-linux;
python = if nodejs ? python then nodejs.python else python2;
# Create a tar wrapper that filters all the 'Ignoring unknown extended header keyword' noise
tarWrapper = runCommand "tarWrapper" {} ''
mkdir -p $out/bin
cat > $out/bin/tar <<EOF
#! ${stdenv.shell} -e
$(type -p tar) "\$@" --warning=no-unknown-keyword --delay-directory-restore
EOF
chmod +x $out/bin/tar
'';
# Function that generates a TGZ file from a NPM project
buildNodeSourceDist =
{ name, version, src, ... }:
stdenv.mkDerivation {
name = "node-tarball-${name}-${version}";
inherit src;
buildInputs = [ nodejs ];
buildPhase = ''
export HOME=$TMPDIR
tgzFile=$(npm pack | tail -n 1) # Hooks to the pack command will add output (https://docs.npmjs.com/misc/scripts)
'';
installPhase = ''
mkdir -p $out/tarballs
mv $tgzFile $out/tarballs
mkdir -p $out/nix-support
echo "file source-dist $out/tarballs/$tgzFile" >> $out/nix-support/hydra-build-products
'';
};
# Common shell logic
installPackage = writeShellScript "install-package" ''
installPackage() {
local packageName=$1 src=$2
local strippedName
local DIR=$PWD
cd $TMPDIR
unpackFile $src
# Make the base dir in which the target dependency resides first
mkdir -p "$(dirname "$DIR/$packageName")"
if [ -f "$src" ]
then
# Figure out what directory has been unpacked
packageDir="$(find . -maxdepth 1 -type d | tail -1)"
# Restore write permissions to make building work
find "$packageDir" -type d -exec chmod u+x {} \;
chmod -R u+w "$packageDir"
# Move the extracted tarball into the output folder
mv "$packageDir" "$DIR/$packageName"
elif [ -d "$src" ]
then
# Get a stripped name (without hash) of the source directory.
# On old nixpkgs it's already set internally.
if [ -z "$strippedName" ]
then
strippedName="$(stripHash $src)"
fi
# Restore write permissions to make building work
chmod -R u+w "$strippedName"
# Move the extracted directory into the output folder
mv "$strippedName" "$DIR/$packageName"
fi
# Change to the package directory to install dependencies
cd "$DIR/$packageName"
}
'';
# Bundle the dependencies of the package
#
# Only include dependencies if they don't exist. They may also be bundled in the package.
includeDependencies = {dependencies}:
lib.optionalString (dependencies != []) (
''
mkdir -p node_modules
cd node_modules
''
+ (lib.concatMapStrings (dependency:
''
if [ ! -e "${dependency.packageName}" ]; then
${composePackage dependency}
fi
''
) dependencies)
+ ''
cd ..
''
);
# Recursively composes the dependencies of a package
composePackage = { name, packageName, src, dependencies ? [], ... }@args:
builtins.addErrorContext "while evaluating node package '${packageName}'" ''
installPackage "${packageName}" "${src}"
${includeDependencies { inherit dependencies; }}
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
'';
pinpointDependencies = {dependencies, production}:
let
pinpointDependenciesFromPackageJSON = writeTextFile {
name = "pinpointDependencies.js";
text = ''
var fs = require('fs');
var path = require('path');
function resolveDependencyVersion(location, name) {
if(location == process.env['NIX_STORE']) {
return null;
} else {
var dependencyPackageJSON = path.join(location, "node_modules", name, "package.json");
if(fs.existsSync(dependencyPackageJSON)) {
var dependencyPackageObj = JSON.parse(fs.readFileSync(dependencyPackageJSON));
if(dependencyPackageObj.name == name) {
return dependencyPackageObj.version;
}
} else {
return resolveDependencyVersion(path.resolve(location, ".."), name);
}
}
}
function replaceDependencies(dependencies) {
if(typeof dependencies == "object" && dependencies !== null) {
for(var dependency in dependencies) {
var resolvedVersion = resolveDependencyVersion(process.cwd(), dependency);
if(resolvedVersion === null) {
process.stderr.write("WARNING: cannot pinpoint dependency: "+dependency+", context: "+process.cwd()+"\n");
} else {
dependencies[dependency] = resolvedVersion;
}
}
}
}
/* Read the package.json configuration */
var packageObj = JSON.parse(fs.readFileSync('./package.json'));
/* Pinpoint all dependencies */
replaceDependencies(packageObj.dependencies);
if(process.argv[2] == "development") {
replaceDependencies(packageObj.devDependencies);
}
replaceDependencies(packageObj.optionalDependencies);
/* Write the fixed package.json file */
fs.writeFileSync("package.json", JSON.stringify(packageObj, null, 2));
'';
};
in
''
node ${pinpointDependenciesFromPackageJSON} ${if production then "production" else "development"}
${lib.optionalString (dependencies != [])
''
if [ -d node_modules ]
then
cd node_modules
${lib.concatMapStrings (dependency: pinpointDependenciesOfPackage dependency) dependencies}
cd ..
fi
''}
'';
# Recursively traverses all dependencies of a package and pinpoints all
# dependencies in the package.json file to the versions that are actually
# being used.
pinpointDependenciesOfPackage = { packageName, dependencies ? [], production ? true, ... }@args:
''
if [ -d "${packageName}" ]
then
cd "${packageName}"
${pinpointDependencies { inherit dependencies production; }}
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
fi
'';
# Extract the Node.js source code which is used to compile packages with
# native bindings
nodeSources = runCommand "node-sources" {} ''
tar --no-same-owner --no-same-permissions -xf ${nodejs.src}
mv node-* $out
'';
# Script that adds _integrity fields to all package.json files to prevent NPM from consulting the cache (that is empty)
addIntegrityFieldsScript = writeTextFile {
name = "addintegrityfields.js";
text = ''
var fs = require('fs');
var path = require('path');
function augmentDependencies(baseDir, dependencies) {
for(var dependencyName in dependencies) {
var dependency = dependencies[dependencyName];
// Open package.json and augment metadata fields
var packageJSONDir = path.join(baseDir, "node_modules", dependencyName);
var packageJSONPath = path.join(packageJSONDir, "package.json");
if(fs.existsSync(packageJSONPath)) { // Only augment packages that exist. Sometimes we may have production installs in which development dependencies can be ignored
console.log("Adding metadata fields to: "+packageJSONPath);
var packageObj = JSON.parse(fs.readFileSync(packageJSONPath));
if(dependency.integrity) {
packageObj["_integrity"] = dependency.integrity;
} else {
packageObj["_integrity"] = "sha1-000000000000000000000000000="; // When no _integrity string has been provided (e.g. by Git dependencies), add a dummy one. It does not seem to harm and it bypasses downloads.
}
if(dependency.resolved) {
packageObj["_resolved"] = dependency.resolved; // Adopt the resolved property if one has been provided
} else {
packageObj["_resolved"] = dependency.version; // Set the resolved version to the version identifier. This prevents NPM from cloning Git repositories.
}
if(dependency.from !== undefined) { // Adopt from property if one has been provided
packageObj["_from"] = dependency.from;
}
fs.writeFileSync(packageJSONPath, JSON.stringify(packageObj, null, 2));
}
// Augment transitive dependencies
if(dependency.dependencies !== undefined) {
augmentDependencies(packageJSONDir, dependency.dependencies);
}
}
}
if(fs.existsSync("./package-lock.json")) {
var packageLock = JSON.parse(fs.readFileSync("./package-lock.json"));
if(![1, 2].includes(packageLock.lockfileVersion)) {
process.stderr.write("Sorry, I only understand lock file versions 1 and 2!\n");
process.exit(1);
}
if(packageLock.dependencies !== undefined) {
augmentDependencies(".", packageLock.dependencies);
}
}
'';
};
# Reconstructs a package-lock file from the node_modules/ folder structure and package.json files with dummy sha1 hashes
reconstructPackageLock = writeTextFile {
name = "addintegrityfields.js";
text = ''
var fs = require('fs');
var path = require('path');
var packageObj = JSON.parse(fs.readFileSync("package.json"));
var lockObj = {
name: packageObj.name,
version: packageObj.version,
lockfileVersion: 1,
requires: true,
dependencies: {}
};
function augmentPackageJSON(filePath, dependencies) {
var packageJSON = path.join(filePath, "package.json");
if(fs.existsSync(packageJSON)) {
var packageObj = JSON.parse(fs.readFileSync(packageJSON));
dependencies[packageObj.name] = {
version: packageObj.version,
integrity: "sha1-000000000000000000000000000=",
dependencies: {}
};
processDependencies(path.join(filePath, "node_modules"), dependencies[packageObj.name].dependencies);
}
}
function processDependencies(dir, dependencies) {
if(fs.existsSync(dir)) {
var files = fs.readdirSync(dir);
files.forEach(function(entry) {
var filePath = path.join(dir, entry);
var stats = fs.statSync(filePath);
if(stats.isDirectory()) {
if(entry.substr(0, 1) == "@") {
// When we encounter a namespace folder, augment all packages belonging to the scope
var pkgFiles = fs.readdirSync(filePath);
pkgFiles.forEach(function(entry) {
if(stats.isDirectory()) {
var pkgFilePath = path.join(filePath, entry);
augmentPackageJSON(pkgFilePath, dependencies);
}
});
} else {
augmentPackageJSON(filePath, dependencies);
}
}
});
}
}
processDependencies("node_modules", lockObj.dependencies);
fs.writeFileSync("package-lock.json", JSON.stringify(lockObj, null, 2));
'';
};
prepareAndInvokeNPM = {packageName, bypassCache, reconstructLock, npmFlags, production}:
let
forceOfflineFlag = if bypassCache then "--offline" else "--registry http://www.example.com";
in
''
# Pinpoint the versions of all dependencies to the ones that are actually being used
echo "pinpointing versions of dependencies..."
source $pinpointDependenciesScriptPath
# Patch the shebangs of the bundled modules to prevent them from
# calling executables outside the Nix store as much as possible
patchShebangs .
# Deploy the Node.js package by running npm install. Since the
# dependencies have been provided already by ourselves, it should not
# attempt to install them again, which is good, because we want to make
# it Nix's responsibility. If it needs to install any dependencies
# anyway (e.g. because the dependency parameters are
# incomplete/incorrect), it fails.
#
# The other responsibilities of NPM are kept -- version checks, build
# steps, postprocessing etc.
export HOME=$TMPDIR
cd "${packageName}"
runHook preRebuild
${lib.optionalString bypassCache ''
${lib.optionalString reconstructLock ''
if [ -f package-lock.json ]
then
echo "WARNING: Reconstruct lock option enabled, but a lock file already exists!"
echo "This will most likely result in version mismatches! We will remove the lock file and regenerate it!"
rm package-lock.json
else
echo "No package-lock.json file found, reconstructing..."
fi
node ${reconstructPackageLock}
''}
node ${addIntegrityFieldsScript}
''}
npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${lib.optionalString production "--production"} rebuild
if [ "''${dontNpmInstall-}" != "1" ]
then
# NPM tries to download packages even when they already exist if npm-shrinkwrap is used.
rm -f npm-shrinkwrap.json
npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${lib.optionalString production "--production"} install
fi
'';
# Builds and composes an NPM package including all its dependencies
buildNodePackage =
{ name
, packageName
, version ? null
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, reconstructLock ? false
, preRebuild ? ""
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, meta ? {}
, ... }@args:
let
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "preRebuild" "unpackPhase" "buildPhase" "meta" ];
in
stdenv.mkDerivation ({
name = "${name}${if version == null then "" else "-${version}"}";
buildInputs = [ tarWrapper python nodejs ]
++ lib.optional (stdenv.isLinux) utillinux
++ lib.optional (stdenv.isDarwin) libtool
++ buildInputs;
inherit nodejs;
inherit dontStrip; # Stripping may fail a build for some package deployments
inherit dontNpmInstall preRebuild unpackPhase buildPhase;
compositionScript = composePackage args;
pinpointDependenciesScript = pinpointDependenciesOfPackage args;
passAsFile = [ "compositionScript" "pinpointDependenciesScript" ];
installPhase = ''
source ${installPackage}
# Create and enter a root node_modules/ folder
mkdir -p $out/lib/node_modules
cd $out/lib/node_modules
# Compose the package and all its dependencies
source $compositionScriptPath
${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}
# Create symlink to the deployed executable folder, if applicable
if [ -d "$out/lib/node_modules/.bin" ]
then
ln -s $out/lib/node_modules/.bin $out/bin
# Patch the shebang lines of all the executables
ls $out/bin/* | while read i
do
file="$(readlink -f "$i")"
chmod u+rwx "$file"
patchShebangs "$file"
done
fi
# Create symlinks to the deployed manual page folders, if applicable
if [ -d "$out/lib/node_modules/${packageName}/man" ]
then
mkdir -p $out/share
for dir in "$out/lib/node_modules/${packageName}/man/"*
do
mkdir -p $out/share/man/$(basename "$dir")
for page in "$dir"/*
do
ln -s $page $out/share/man/$(basename "$dir")
done
done
fi
# Run post install hook, if provided
runHook postInstall
'';
meta = {
# default to Node.js' platforms
platforms = nodejs.meta.platforms;
} // meta;
} // extraArgs);
# Builds a node environment (a node_modules folder and a set of binaries)
buildNodeDependencies =
{ name
, packageName
, version ? null
, src
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, reconstructLock ? false
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, ... }@args:
let
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" ];
in
stdenv.mkDerivation ({
name = "node-dependencies-${name}${if version == null then "" else "-${version}"}";
buildInputs = [ tarWrapper python nodejs ]
++ lib.optional (stdenv.isLinux) utillinux
++ lib.optional (stdenv.isDarwin) libtool
++ buildInputs;
inherit dontStrip; # Stripping may fail a build for some package deployments
inherit dontNpmInstall unpackPhase buildPhase;
includeScript = includeDependencies { inherit dependencies; };
pinpointDependenciesScript = pinpointDependenciesOfPackage args;
passAsFile = [ "includeScript" "pinpointDependenciesScript" ];
installPhase = ''
source ${installPackage}
mkdir -p $out/${packageName}
cd $out/${packageName}
source $includeScriptPath
# Create fake package.json to make the npm commands work properly
cp ${src}/package.json .
chmod 644 package.json
${lib.optionalString bypassCache ''
if [ -f ${src}/package-lock.json ]
then
cp ${src}/package-lock.json .
chmod 644 package-lock.json
fi
''}
# Go to the parent folder to make sure that all packages are pinpointed
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}
# Expose the executables that were installed
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
mv ${packageName} lib
ln -s $out/lib/node_modules/.bin $out/bin
'';
} // extraArgs);
# Builds a development shell
buildNodeShell =
{ name
, packageName
, version ? null
, src
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, reconstructLock ? false
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, ... }@args:
let
nodeDependencies = buildNodeDependencies args;
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "unpackPhase" "buildPhase" ];
in
stdenv.mkDerivation ({
name = "node-shell-${name}${if version == null then "" else "-${version}"}";
buildInputs = [ python nodejs ] ++ lib.optional (stdenv.isLinux) utillinux ++ buildInputs;
buildCommand = ''
mkdir -p $out/bin
cat > $out/bin/shell <<EOF
#! ${stdenv.shell} -e
$shellHook
exec ${stdenv.shell}
EOF
chmod +x $out/bin/shell
'';
# Provide the dependencies in a development shell through the NODE_PATH environment variable
inherit nodeDependencies;
shellHook = lib.optionalString (dependencies != []) ''
export NODE_PATH=${nodeDependencies}/lib/node_modules
export PATH="${nodeDependencies}/bin:$PATH"
'';
} // extraArgs);
in
{
buildNodeSourceDist = lib.makeOverridable buildNodeSourceDist;
buildNodePackage = lib.makeOverridable buildNodePackage;
buildNodeDependencies = lib.makeOverridable buildNodeDependencies;
buildNodeShell = lib.makeOverridable buildNodeShell;
}

View file

@ -1,11 +1,11 @@
{ pkgs, lib, stdenv, fetchFromGitHub, runCommand, rustPlatform, makeWrapper, llvmPackages
, nodePackages, cmake, nodejs, unzip, python3, pkg-config, libsecret
, buildNpmPackage, cmake, nodejs, unzip, python3, pkg-config, libsecret, darwin
}:
assert lib.versionAtLeast python3.version "3.5";
let
publisher = "vadimcn";
pname = "vscode-lldb";
version = "1.8.1";
version = "1.9.1";
vscodeExtUniqueId = "${publisher}.${pname}";
vscodeExtPublisher = publisher;
@ -15,7 +15,7 @@ let
owner = "vadimcn";
repo = "vscode-lldb";
rev = "v${version}";
sha256 = "sha256-5wrw8LNH14WAyIKIRGFbvrISb5RUXeD5Uh/weja9p4Q=";
sha256 = "sha256-DqxdZtSW8TZaOFGXOZQ7a4tmgRj6iAWDppCNomdfVxY=";
};
# need to build a custom version of lldb and llvm for enhanced rust support
@ -25,7 +25,7 @@ let
pname = "${pname}-adapter";
inherit version src;
cargoSha256 = "sha256-Lpo2jaDMaZGwSrpQBvBCscVbWi2Db1Cx1Tv84v1H4Es=";
cargoSha256 = "sha256-+hfNkr9cZbOcWdWKUWUqDj9a0PKjKeApFXYZzS1XokE=";
nativeBuildInputs = [ makeWrapper ];
@ -42,15 +42,35 @@ let
doCheck = false;
};
nodeDeps = ((import ./build-deps/default.nix {
inherit pkgs nodejs;
inherit (stdenv.hostPlatform) system;
}).nodeDependencies.override (old: {
inherit src version;
nativeBuildInputs = [ pkg-config ];
buildInputs = [libsecret];
dontNpmInstall = true;
}));
nodeDeps = buildNpmPackage {
pname = "${pname}-node-deps";
inherit version src;
npmDepsHash = "sha256-Cdlq1jxHSCfPjXhasClc6XzEUp3vlLgkStbhYtCyc7E=";
nativeBuildInputs = [
python3
pkg-config
];
buildInputs = [
libsecret
] ++ lib.optionals stdenv.isDarwin (with darwin.apple_sdk.frameworks; [
Security
AppKit
]);
dontNpmBuild = true;
installPhase = ''
runHook preInstall
mkdir -p $out/lib
cp -r node_modules $out/lib
runHook postInstall
'';
};
in stdenv.mkDerivation {
pname = "vscode-extension-${publisher}-${pname}";
@ -62,8 +82,14 @@ in stdenv.mkDerivation {
patches = [ ./cmake-build-extension-only.patch ];
postPatch = ''
# temporary patch for forgotten version updates
substituteInPlace CMakeLists.txt \
--replace "1.9.0" ${version}
'';
postConfigure = ''
cp -r ${nodeDeps}/lib/{node_modules,package-lock.json} .
cp -r ${nodeDeps}/lib/node_modules .
'';
cmakeFlags = [
@ -72,6 +98,10 @@ in stdenv.mkDerivation {
];
makeFlags = [ "vsix_bootstrap" ];
preBuild = lib.optionalString stdenv.isDarwin ''
export HOME=$TMPDIR
'';
installPhase = ''
ext=$out/$installPrefix
runHook preInstall

View file

@ -1,5 +1,5 @@
#! /usr/bin/env nix-shell
#! nix-shell ../../update-shell.nix -i bash
#! nix-shell ../../update-shell.nix -i bash -p wget prefetch-npm-deps
set -eo pipefail
cd "$(dirname "${BASH_SOURCE[0]}")"
@ -40,6 +40,9 @@ sed -E 's#\bsha256 = ".*?"#sha256 = "'$srcHash'"#' --in-place "$nixFile"
cargoHash=$(nix-prefetch "{ sha256 }: (import $nixpkgs {}).vscode-extensions.vadimcn.vscode-lldb.adapter.cargoDeps.overrideAttrs (_: { outputHash = sha256; })")
sed -E 's#\bcargoSha256 = ".*?"#cargoSha256 = "'$cargoHash'"#' --in-place "$nixFile"
# update node dependencies
src="$(nix-build $nixpkgs -A vscode-extensions.vadimcn.vscode-lldb.src --no-out-link)"
nix-shell -p node2nix -I nixpkgs=$nixpkgs --run "cd build-deps && ls -R && node2nix -14 -d -i \"$src/package.json\" -l \"$src/package-lock.json\""
pushd $TMPDIR
wget https://raw.githubusercontent.com/$owner/$repo/v${version}/package-lock.json
npmDepsHash=$(prefetch-npm-deps ./package-lock.json)
popd
sed -E 's#\bnpmDepsHash = ".*?"#npmDepsHash = "'$npmDepsHash'"#' --in-place "$nixFile"

View file

@ -88,7 +88,7 @@ mkDerivation rec {
"-DPLUGIN_STANDARD_QM3C2=ON"
"-DPLUGIN_STANDARD_QMPLANE=ON"
"-DPLUGIN_STANDARD_QPOISSON_RECON=ON"
"-DPLUGIN_STANDARD_QRANSAC_SD=ON"
"-DPLUGIN_STANDARD_QRANSAC_SD=OFF" # not compatible with GPL, broken on non-x86
"-DPLUGIN_STANDARD_QSRA=ON"
"-DPLUGIN_STANDARD_QCLOUDLAYERS=ON"
];

View file

@ -19,11 +19,11 @@
stdenv.mkDerivation rec {
pname = "crow-translate";
version = "2.10.4";
version = "2.10.5";
src = fetchzip {
url = "https://github.com/${pname}/${pname}/releases/download/${version}/${pname}-${version}-source.tar.gz";
hash = "sha256-M2vAH1YAvNOhDsz+BWxvteR8YX89FHtbUcQZr1uVoCs=";
hash = "sha256-sAjgG2f0rAWakPd2cZNGXkooIxQQM5OPHm11ahyY1WU=";
};
patches = [

View file

@ -170,6 +170,10 @@ let
deprecated = (super.deprecated.override {
sphinxHook = null;
}).overridePythonAttrs dropDocOutput;
wrapt = (super.wrapt.override {
sphinxHook = null;
sphinx-rtd-theme = null;
}).overridePythonAttrs dropDocOutput;
};
};
in

View file

@ -0,0 +1,73 @@
# k3s versions
K3s, Kubernetes, and other clustered software has the property of not being able to update atomically. Most software in nixpkgs, like for example bash, can be updated as part of a "nixos-rebuild switch" without having to worry about the old and the new bash interacting in some way.
K3s/Kubernetes, on the other hand, is typically run across several NixOS machines, and each NixOS machine is updated independently. As such, different versions of the package and NixOS module must maintain compatibility with each other through temporary version skew during updates.
The upstream Kubernetes project [documents this in their version-skew policy](https://kubernetes.io/releases/version-skew-policy/#supported-component-upgrade-order).
Within nixpkgs, we strive to maintain a valid "upgrade path" that does not run
afoul of the upstream version skew policy.
## Upstream release cadence and support
K3s is built on top of K8s, and typically provides a similar release cadence and support window (simply by cherry-picking over k8s patches). As such, we assume k3s's support lifecycle is identical to upstream K8s.
This is documented upstream [here](https://kubernetes.io/releases/patch-releases/#support-period).
In short, a new Kubernetes version is released roughly every 4 months, and each release is supported for a little over 1 year.
Any version that is not supported by upstream should be dropped from nixpkgs.
## Versions in NixOS releases
NixOS releases should avoid having deprecated software, or making major version upgrades, wherever possible.
As such, we would like to have only the newest K3s version in each NixOS
release at the time the release branch is branched off, which will ensure the
K3s version in that release will receieve updates for the longest duration
possible.
However, this conflicts with another desire: we would like people to be able to upgrade between NixOS stable releases without needing to make a large enough k3s version jump that they violate the Kubernetes version skew policy.
To give an example, we may have the following timeline for k8s releases:
(Note, the exact versions and dates may be wrong, this is an illustrative example, reality may differ).
```mermaid
gitGraph
branch k8s
commit
branch "k8s-1.24"
checkout "k8s-1.24"
commit id: "1.24.0" tag: "2022-05-03"
branch "k8s-1.25"
checkout "k8s-1.25"
commit id: "1.25.0" tag: "2022-08-23"
branch "k8s-1.26"
checkout "k8s-1.26"
commit id: "1.26.0" tag: "2022-12-08"
checkout k8s-1.24
commit id: "1.24-EOL" tag: "2023-07-28"
checkout k8s-1.25
commit id: "1.25-EOL" tag: "2023-10-27"
checkout k8s-1.26
commit id: "1.26-EOL" tag: "2024-02-28"
```
(Note: the above graph will render if you view this markdown on GitHub, or when using [mermaid](https://mermaid.js.org/))
In this scenario even though k3s 1.24 is still technically supported when the NixOS 23.05
release is cut, since it goes EOL before the NixOS 23.11 release is made, we would
not want to include it. Similarly, k3s 1.25 would go EOL before NixOS 23.11.
As such, we should only include k3s 1.26 in the 23.05 release.
We can then make a similar argument when NixOS 23.11 comes around to not
include k3s 1.26 or 1.27. However, that means someone upgrading from the NixOS
22.05 release to the NixOS 23.11 would not have a supported upgrade path.
In order to resolve this issue, we propose backporting not just new patch releases to older NixOS releases, but also new k3s versions, up to one version before the first version that is included in the next NixOS release.
In the above example, where NixOS 23.05 included k3s 1.26, and 23.11 included k3s 1.28, that means we would backport 1.27 to the NixOS 23.05 release, and backport all patches for 1.26 and 1.27.
This would allow someone to upgrade between those NixOS releases in a supported configuration.

View file

@ -22,7 +22,7 @@ stdenv.mkDerivation rec {
meta = with lib; {
description = "The Kubernetes IDE";
homepage = "https://k8slens.dev/";
license = licenses.mit;
license = licenses.lens;
maintainers = with maintainers; [ dbirks ];
platforms = [ "aarch64-darwin" ];
};

View file

@ -37,7 +37,7 @@ appimageTools.wrapType2 {
meta = with lib; {
description = "The Kubernetes IDE";
homepage = "https://k8slens.dev/";
license = licenses.mit;
license = licenses.lens;
maintainers = with maintainers; [ dbirks RossComputerGuy ];
platforms = [ "x86_64-linux" ];
};

View file

@ -17,13 +17,13 @@
stdenv.mkDerivation rec {
pname = "teams-for-linux";
version = "1.0.92";
version = "1.0.93";
src = fetchFromGitHub {
owner = "IsmaelMartinez";
repo = pname;
rev = "v${version}";
sha256 = "sha256-wRgXb0yzrpRlZkZ6RHMU2wdR11lwR5n6tTUbCEURvDQ=";
sha256 = "sha256-mWLjGednrKnEIvrL2iHQP3xoCb6SxptzbE40aJ5wH1U=";
};
offlineCache = fetchYarnDeps {

View file

@ -6,14 +6,14 @@
}:
stdenv.mkDerivation rec {
version = "20230512";
version = "20230517";
pname = "neomutt";
src = fetchFromGitHub {
owner = "neomutt";
repo = "neomutt";
rev = version;
sha256 = "sha256-/NeY9WrPXg6sSM1jnjgQKL7vSn8dTrAnvj229KcEEro=";
sha256 = "sha256-1i0STaJulJP0LWdNfLLIEKVapfkcguYRnbc+psWlVE4=";
};
patches = [

View file

@ -1,53 +1,55 @@
{ lib, stdenv, fetchFromGitHub, mkYarnPackage, nixosTests, writeText, python3 }:
let
version = "0.3.0";
version = "0.4.1";
src = fetchFromGitHub {
owner = "ngoduykhanh";
owner = "PowerDNS-Admin";
repo = "PowerDNS-Admin";
rev = "v${version}";
hash = "sha256-e11u0jdJr+2TDXvBAPlDfnuuDwSfBq+JtvnDUTNKp/c=";
hash = "sha256-AwqEcAPD1SF1Ma3wtH03mXlTywM0Q19hciCmTtlr3gk=";
};
python = python3;
pythonDeps = with python.pkgs; [
flask flask_assets flask-login flask-sqlalchemy flask_migrate flask-seasurf flask_mail flask-session flask-sslify
flask flask_assets flask-login flask-sqlalchemy flask_migrate flask-seasurf flask_mail flask-session flask-session-captcha flask-sslify
mysqlclient psycopg2 sqlalchemy
cffi configobj cryptography bcrypt requests python-ldap pyotp qrcode dnspython
gunicorn python3-saml pytz cssmin rjsmin authlib bravado-core
lima pytimeparse pyyaml jinja2 itsdangerous werkzeug
certifi cffi configobj cryptography bcrypt requests python-ldap pyotp qrcode dnspython
gunicorn itsdangerous python3-saml pytz rcssmin rjsmin authlib bravado-core
lima lxml passlib pyasn1 pytimeparse pyyaml jinja2 itsdangerous webcolors werkzeug zipp zxcvbn
];
assets = mkYarnPackage {
inherit src version;
packageJSON = ./package.json;
yarnNix = ./yarndeps.nix;
# Copied from package.json, see also
# https://github.com/NixOS/nixpkgs/pull/214952
packageResolutions = {
"@fortawesome/fontawesome-free" = "6.3.0";
};
nativeBuildInputs = pythonDeps;
patchPhase = ''
sed -i -r -e "s|'cssmin',\s?'cssrewrite'|'cssmin'|g" powerdnsadmin/assets.py
sed -i -r -e "s|'rcssmin',\s?'cssrewrite'|'rcssmin'|g" powerdnsadmin/assets.py
'';
buildPhase = ''
# The build process expects the directory to be writable
# with node_modules at a specific path
# https://github.com/ngoduykhanh/PowerDNS-Admin/blob/master/.yarnrc
# https://github.com/PowerDNS-Admin/PowerDNS-Admin/blob/master/.yarnrc
approot=deps/powerdns-admin-assets
ln -s $node_modules $approot/powerdnsadmin/static/node_modules
FLASK_APP=$approot/powerdnsadmin/__init__.py flask assets build
SESSION_TYPE=filesystem FLASK_APP=$approot/powerdnsadmin/__init__.py flask assets build
'';
installPhase = ''
# https://github.com/ngoduykhanh/PowerDNS-Admin/blob/54b257768f600c5548a1c7e50eac49c40df49f92/docker/Dockerfile#L43
# https://github.com/PowerDNS-Admin/PowerDNS-Admin/blob/54b257768f600c5548a1c7e50eac49c40df49f92/docker/Dockerfile#L43
mkdir $out
cp -r $approot/powerdnsadmin/static/{generated,assets,img} $out
find $node_modules -name webfonts -exec cp -r {} $out \;
find $node_modules -name fonts -exec cp -r {} $out \;
find $node_modules/icheck/skins/square -name '*.png' -exec cp {} $out/generated \;
mkdir $out/fonts
cp $node_modules/ionicons/dist/fonts/* $out/fonts
cp $node_modules/bootstrap/dist/fonts/* $out/fonts
cp $node_modules/font-awesome/fonts/* $out/fonts
'';
distPhase = "true";
};
@ -61,7 +63,7 @@ let
assets.register('js_main', 'generated/main.js')
assets.register('css_main', 'generated/main.css')
'';
in stdenv.mkDerivation rec {
in stdenv.mkDerivation {
pname = "powerdns-admin";
inherit src version;
@ -81,7 +83,13 @@ in stdenv.mkDerivation rec {
postPatch = ''
rm -r powerdnsadmin/static powerdnsadmin/assets.py
sed -i "s/id:/'id':/" migrations/versions/787bdba9e147_init_db.py
# flask-migrate 4.0 compatibility: https://github.com/PowerDNS-Admin/PowerDNS-Admin/issues/1376
substituteInPlace migrations/env.py --replace "render_as_batch=config.get_main_option('sqlalchemy.url').startswith('sqlite:')," ""
# flask-session and powerdns-admin both try to add sqlalchemy to flask.
# Reuse the database for flask-session
substituteInPlace powerdnsadmin/__init__.py --replace "sess = Session(app)" "app.config['SESSION_SQLALCHEMY'] = models.base.db; sess = Session(app)"
# Routes creates session database tables, so it needs a context
substituteInPlace powerdnsadmin/__init__.py --replace "routes.init_app(app)" "with app.app_context(): routes.init_app(app)"
'';
installPhase = ''
@ -113,7 +121,7 @@ in stdenv.mkDerivation rec {
meta = with lib; {
description = "A PowerDNS web interface with advanced features";
homepage = "https://github.com/ngoduykhanh/PowerDNS-Admin";
homepage = "https://github.com/PowerDNS-Admin/PowerDNS-Admin";
license = licenses.mit;
maintainers = with maintainers; [ Flakebi zhaofengli ];
};

View file

@ -1,17 +1,24 @@
{
"dependencies": {
"admin-lte": "2.4.9",
"bootstrap": "^3.4.1",
"bootstrap-datepicker": "^1.8.0",
"@fortawesome/fontawesome-free": "6.3.0",
"admin-lte": "3.2.0",
"bootstrap": "4.6.2",
"bootstrap-datepicker": "^1.9.0",
"bootstrap-validator": "^0.11.9",
"datatables.net-plugins": "^1.10.19",
"datatables.net-plugins": "^1.13.1",
"icheck": "^1.0.2",
"jquery-slimscroll": "^1.3.8",
"jquery-ui-dist": "^1.12.1",
"jquery-sparkline": "^2.4.0",
"jquery-ui-dist": "^1.13.2",
"jquery.quicksearch": "^2.4.0",
"jtimeout": "^3.1.0",
"jquery-validation": "^1.19.5",
"jtimeout": "^3.2.0",
"knockout": "^3.5.1",
"multiselect": "^0.9.12"
},
"resolutions": {
"admin-lte/@fortawesome/fontawesome-free": "6.3.0"
},
"name": "powerdns-admin-assets",
"version": "0.3.0"
"version": "0.4.1"
}

File diff suppressed because it is too large Load diff

View file

@ -5,28 +5,36 @@
, boost
, eigen
, zlib
, llvmPackages
}:
stdenv.mkDerivation rec {
pname = "iqtree";
version = "2.2.0.4";
version = "2.2.2.4";
src = fetchFromGitHub {
owner = "iqtree";
repo = "iqtree2";
rev = "v${version}";
sha256 = "sha256:0ickw1ldpvv2m66yzbvqfhn8k07qdkhbjrlqjs6vcf3s42j5c6pq";
hash = "sha256-5NF0Ej3M19Vd08xfmOHRhZkM1YGQ/ZlFj0HsSw1sw1w=";
fetchSubmodules = true;
};
nativeBuildInputs = [ cmake ];
buildInputs = [ boost eigen zlib ];
buildInputs = [
boost
eigen
zlib
] ++ lib.optionals stdenv.isDarwin [
llvmPackages.openmp
];
meta = with lib; {
homepage = "http://www.iqtree.org/";
description = "Efficient and versatile phylogenomic software by maximum likelihood";
license = licenses.lgpl2;
maintainers = with maintainers; [ bzizou ];
platforms = [ "x86_64-linux" "x86_64-darwin" ];
};
}

View file

@ -50,21 +50,21 @@ let
srcs = {
toolbox = fetchFromGitHub {
owner = pname + "-toolbox";
repo = pname;
rev = pname + "_" + version;
sha256 = "05s9dclmk7x5d7wnnj4qr6r6c827m72a44gizcv09lxr28pr9inz";
owner = "shogun-toolbox";
repo = "shogun";
rev = "shogun_${version}";
sha256 = "sha256-38aULxK50wQ2+/ERosSpRyBmssmYSGv5aaWfWSlrSRc=";
fetchSubmodules = true;
};
# The CMake external projects expect the packed archives
rxcpp = fetchurl {
url = "https://github.com/Reactive-Extensions/RxCpp/archive/v${rxcppVersion}.tar.gz";
sha256 = "0y2isr8dy2n1yjr9c5570kpc9lvdlch6jv0jvw000amwn5d3krsh";
sha256 = "sha256-UOc5WrG8KgAA3xJsaSCjbdPE7gSnFJay9MEK31DWUXg=";
};
gtest = fetchurl {
url = "https://github.com/google/googletest/archive/release-${gtestVersion}.tar.gz";
sha256 = "1n5p1m2m3fjrjdj752lf92f9wq3pl5cbsfrb49jqbg52ghkz99jq";
sha256 = "sha256-WKb0J3yivIVlIis7vVihd2CenEiOinJkk1m6UUUNt9g=";
};
};
in
@ -77,13 +77,6 @@ stdenv.mkDerivation rec {
src = srcs.toolbox;
patches = [
# Fix compile errors with json-c
# https://github.com/shogun-toolbox/shogun/pull/4104
(fetchpatch {
url = "https://github.com/shogun-toolbox/shogun/commit/365ce4c4c700736d2eec8ba6c975327a5ac2cd9b.patch";
sha256 = "158hqv4xzw648pmjbwrhxjp7qcppqa7kvriif87gn3zdn711c49s";
})
# Fix compile errors with GCC 9+
# https://github.com/shogun-toolbox/shogun/pull/4811
(fetchpatch {
@ -95,6 +88,20 @@ stdenv.mkDerivation rec {
sha256 = "sha256-AgJJKQA8vc5oKaTQDqMdwBR4hT4sn9+uW0jLe7GteJw=";
})
# Fix virtual destruction
(fetchpatch {
url = "https://github.com/shogun-toolbox/shogun/commit/ef0e4dc1cc4a33c9e6b17a108fa38a436de2d7ee.patch";
sha256 = "sha256-a9Rm0ytqkSAgC3dguv8m3SwOSipb+VByBHHdmV0d63w=";
})
./fix-virtual-destruction.patch
# Fix compile errors with json-c
# https://github.com/shogun-toolbox/shogun/pull/4104
(fetchpatch {
url = "https://github.com/shogun-toolbox/shogun/commit/365ce4c4c700736d2eec8ba6c975327a5ac2cd9b.patch";
sha256 = "sha256-OhEWwrHtD/sOcjHmPY/C9zJ8ruww8yXrRcTw38nGEJU=";
})
# Fix compile errors with Eigen 3.4
./eigen-3.4.patch
@ -126,6 +133,16 @@ stdenv.mkDerivation rec {
cmakeFlags = let
enableIf = cond: if cond then "ON" else "OFF";
excludeTestsRegex = lib.concatStringsSep "|" [
# sporadic segfault
"TrainedModelSerialization"
# broken by openblas 0.3.21
"mathematics_lapack"
# these take too long on CI
"evaluation_cross_validation"
"modelselection_combined_kernel"
"modelselection_grid_search"
];
in [
"-DBUILD_META_EXAMPLES=ON"
"-DCMAKE_DISABLE_FIND_PACKAGE_ARPACK=ON"
@ -134,7 +151,7 @@ stdenv.mkDerivation rec {
"-DCMAKE_DISABLE_FIND_PACKAGE_Mosek=ON"
"-DCMAKE_DISABLE_FIND_PACKAGE_TFLogger=ON"
"-DCMAKE_DISABLE_FIND_PACKAGE_ViennaCL=ON"
"-DCMAKE_CTEST_ARGUMENTS='--exclude-regex;TrainedModelSerialization'" # Sporadic segfault
"-DCMAKE_CTEST_ARGUMENTS=--exclude-regex;'${excludeTestsRegex}'"
"-DENABLE_TESTING=${enableIf doCheck}"
"-DDISABLE_META_INTEGRATION_TESTS=ON"
"-DTRAVIS_DISABLE_META_CPP=ON"

View file

@ -0,0 +1,20 @@
From: Sebastián Mancilla <smancill@smancill.dev>
Subject: Fix virtual destruction
---
src/shogun/solver/LDASolver.h | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/src/shogun/solver/LDASolver.h b/src/shogun/solver/LDASolver.h
index 9300a85c5..d500eca5d 100644
--- a/src/shogun/solver/LDASolver.h
+++ b/src/shogun/solver/LDASolver.h
@@ -87,7 +87,7 @@ namespace shogun
compute_within_cov();
}
- ~LDASolver()
+ virtual ~LDASolver()
{
SG_UNREF(m_features)
SG_UNREF(m_labels)

View file

@ -9,11 +9,11 @@ assert (!blas.isILP64) && (!lapack.isILP64);
stdenv.mkDerivation rec {
pname = "giac${lib.optionalString enableGUI "-with-xcas"}";
version = "1.9.0-29"; # TODO try to remove preCheck phase on upgrade
version = "1.9.0-43"; # TODO try to remove preCheck phase on upgrade
src = fetchurl {
url = "https://www-fourier.ujf-grenoble.fr/~parisse/debian/dists/stable/main/source/giac_${version}.tar.gz";
sha256 = "sha256-9jUVcsrV8jMfqrmnymZ4vIaWlabF9ppCuq7VDlZ5Cw4=";
sha256 = "sha256-466jB8ZRqHkU5XCY+j0Fh7Dq/mMaOu10rHECKbtNGrs=";
};
patches = [
@ -27,16 +27,12 @@ stdenv.mkDerivation rec {
# the compiler rightfully warns about (with an error nowadays).
(fetchpatch {
name = "fix-string-compiler-error.patch";
url = "https://salsa.debian.org/science-team/giac/-/raw/08cb807ef41f5216b712928886ebf74f69d5ddf6/debian/patches/fix-string-compiler-error.patch";
sha256 = "sha256-K4KAJY1F9Y4DTZFmVEOCXTnxBmHo4//3A10UR3Wlliw=";
url = "https://salsa.debian.org/science-team/giac/-/raw/9ca8dbf4bb16d9d96948aa4024326d32485d7917/debian/patches/fix-string-compiler-error.patch";
sha256 = "sha256-r+M+9MRPRqhHcdhYWI6inxyNvWbXUbBcPCeDY7aulvk=";
})
# increase pari stack size for test chk_fhan4
(fetchpatch {
name = "increase-pari-stack-size.patch";
url = "https://salsa.debian.org/science-team/giac/-/raw/08cb807ef41f5216b712928886ebf74f69d5ddf6/debian/patches/increase-pari-size.patch";
sha256 = "sha256-764P0IJ7ndURap7hotOmYJK0wAhYdqMbQNOnhJxVNt0=";
})
# increase pari stack size for test chk_fhan{4,6}
./increase-pari-stack-size.patch
] ++ lib.optionals (!enableGUI) [
# when enableGui is false, giac is compiled without fltk. That
# means some outputs differ in the make check. Patch around this:

View file

@ -0,0 +1,18 @@
diff -ur a/check/chk_fhan4 b/check/chk_fhan4
--- a/check/chk_fhan4 2018-03-13 19:27:11.000000000 +0100
+++ b/check/chk_fhan4 2023-05-20 16:31:30.349063063 +0200
@@ -1,4 +1,5 @@
#! /bin/sh
unset LANG
+export PARI_SIZE=2048000
../src/icas TP04-sol.cas > TP04.tst
diff TP04.tst TP04-sol.cas.out1
diff -ur a/check/chk_fhan6 b/check/chk_fhan6
--- a/check/chk_fhan6 2018-03-13 19:27:21.000000000 +0100
+++ b/check/chk_fhan6 2023-05-20 16:32:04.199407065 +0200
@@ -1,4 +1,5 @@
#! /bin/sh
unset LANG
+export PARI_SIZE=2048000
../src/icas TP06-sol.cas > TP06.tst
diff TP06.tst TP06-sol.cas.out1

View file

@ -1,126 +0,0 @@
{ lib
, stdenv
, fetchurl
, fetchpatch
, texinfo
, perl
, python3
, makeWrapper
, autoreconfHook
, rlwrap ? null
, tk ? null
, gnuplot ? null
, lisp-compiler
}:
let
# Allow to remove some executables from the $PATH of the wrapped binary
searchPath = lib.makeBinPath
(lib.filter (x: x != null) [ lisp-compiler rlwrap tk gnuplot ]);
in
stdenv.mkDerivation rec {
pname = "maxima";
# old version temporarily kept for sage due to
# https://github.com/sagemath/sage/issues/33718
version = "5.45.1";
src = fetchurl {
url = "mirror://sourceforge/${pname}/${pname}-${version}.tar.gz";
sha256 = "sha256-/pAWJ2lwvvIUoaJENIVYZEUU1/36pPyLnQ6Hr8u059w=";
};
nativeBuildInputs = [
autoreconfHook
lisp-compiler
makeWrapper
python3
texinfo
];
strictDeps = true;
nativeCheckInputs = [
gnuplot
];
postPatch = ''
substituteInPlace doc/info/Makefile.am --replace "/usr/bin/env perl" "${perl}/bin/perl"
'';
postInstall = ''
# Make sure that maxima can find its runtime dependencies.
for prog in "$out/bin/"*; do
wrapProgram "$prog" --prefix PATH ":" "$out/bin:${searchPath}"
done
# Move emacs modules and documentation into the right place.
mkdir -p $out/share/emacs $out/share/doc
ln -s ../maxima/${version}/emacs $out/share/emacs/site-lisp
ln -s ../maxima/${version}/doc $out/share/doc/maxima
''
+ (lib.optionalString (lisp-compiler.pname == "ecl") ''
cp src/binary-ecl/maxima.fas* "$out/lib/maxima/${version}/binary-ecl/"
'')
;
patches = [
# fix path to info dir (see https://trac.sagemath.org/ticket/11348)
(fetchpatch {
url = "https://git.sagemath.org/sage.git/plain/build/pkgs/maxima/patches/infodir.patch?id=07d6c37d18811e2b377a9689790a7c5e24da16ba";
sha256 = "09v64n60f7i6frzryrj0zd056lvdpms3ajky4f9p6kankhbiv21x";
})
# fix https://sourceforge.net/p/maxima/bugs/2596/
(fetchpatch {
url = "https://git.sagemath.org/sage.git/plain/build/pkgs/maxima/patches/matrixexp.patch?id=07d6c37d18811e2b377a9689790a7c5e24da16ba";
sha256 = "06961hn66rhjijfvyym21h39wk98sfxhp051da6gz0n9byhwc6zg";
})
# undo https://sourceforge.net/p/maxima/code/ci/f5e9b0f7eb122c4e48ea9df144dd57221e5ea0ca
# see https://trac.sagemath.org/ticket/13364#comment:93
(fetchpatch {
url = "https://git.sagemath.org/sage.git/plain/build/pkgs/maxima/patches/undoing_true_false_printing_patch.patch?id=07d6c37d18811e2b377a9689790a7c5e24da16ba";
sha256 = "0fvi3rcjv6743sqsbgdzazy9jb6r1p1yq63zyj9fx42wd1hgf7yx";
})
] ++ lib.optionals (lisp-compiler.pname == "ecl") [
# build fasl, needed for ECL support
(fetchpatch {
url = "https://git.sagemath.org/sage.git/plain/build/pkgs/maxima/patches/maxima.system.patch?id=07d6c37d18811e2b377a9689790a7c5e24da16ba";
sha256 = "18zafig8vflhkr80jq2ivk46k92dkszqlyq8cfmj0b2vcfjwwbar";
})
];
# The test suite is disabled since 5.42.2 because of the following issues:
#
# Error(s) found:
# /build/maxima-5.44.0/share/linearalgebra/rtest_matrixexp.mac problems:
# (20 21 22)
# Tests that were expected to fail but passed:
# /build/maxima-5.44.0/share/vector/rtest_vect.mac problem:
# (19)
# 3 tests failed out of 16,184 total tests.
#
# These failures don't look serious. It would be nice to fix them, but I
# don't know how and probably won't have the time to find out.
doCheck = false; # try to re-enable after next version update
enableParallelBuilding = true;
passthru = {
inherit lisp-compiler;
};
meta = with lib; {
description = "Computer algebra system";
homepage = "http://maxima.sourceforge.net";
license = licenses.gpl2Plus;
longDescription = ''
Maxima is a fairly complete computer algebra system written in
lisp with an emphasis on symbolic computation. It is based on
DOE-MACSYMA and licensed under the GPL. Its abilities include
symbolic integration, 3D plotting, and an ODE solver.
'';
maintainers = with maintainers; [ doronbehar ];
platforms = platforms.unix;
};
}

View file

@ -15,7 +15,7 @@ assert withThread -> libpthreadstubs != null;
stdenv.mkDerivation rec {
pname = "pari";
version = "2.15.2";
version = "2.15.3";
src = fetchurl {
urls = [
@ -23,15 +23,16 @@ stdenv.mkDerivation rec {
# old versions are at the url below
"https://pari.math.u-bordeaux.fr/pub/pari/OLD/${lib.versions.majorMinor version}/${pname}-${version}.tar.gz"
];
hash = "sha256-sEYoER7iKHZRmksc2vsy/rqjTq+iT56B9Y+NBX++4N0=";
hash = "sha256-rfWlhjjNr9cqi0i8n0RJcrIzKcjVRaHT7Ru+sbZWkmg=";
};
patches = [
# https://pari.math.u-bordeaux.fr/cgi-bin/bugreport.cgi?bug=2441
# https://pari.math.u-bordeaux.fr/cgi-bin/bugreport.cgi?bug=2466
(fetchpatch {
name = "fix-find_isogenous_from_Atkin.patch";
url = "https://git.sagemath.org/sage.git/plain/build/pkgs/pari/patches/bug2441.patch?id=9.8.rc0";
hash = "sha256-DvOUFlFDnopN+MJY6GYRPNabuoHPFch/nNn+49ygznc=";
name = "incorrect-result-from-qfbclassno.patch";
url = "https://pari.math.u-bordeaux.fr/cgi-bin/gitweb.cgi?p=pari.git;a=commitdiff_plain;h=7ca0c2eae87def89fa7253c60e4791a8ef26629d";
excludes = [ "src/test/32/quadclassunit" "CHANGES" ];
hash = "sha256-CQRkIYDFMrWHCoSWGsIydPjGk3w09zzghajlNuq29Jk=";
})
];

View file

@ -128,7 +128,7 @@ let
singular = pkgs.singular.override { inherit flint; };
maxima = pkgs.maxima-ecl-5_45.override {
maxima = pkgs.maxima-ecl.override {
lisp-compiler = pkgs.ecl.override {
# "echo syntax error | ecl > /dev/full 2>&1" segfaults in
# ECL. We apply a patch to fix it (write_error.patch), but it

View file

@ -1,19 +0,0 @@
diff --git a/src/sage/repl/configuration.py b/src/sage/repl/configuration.py
index 67d7d2accf..18279581e2 100644
--- a/src/sage/repl/configuration.py
+++ b/src/sage/repl/configuration.py
@@ -9,10 +9,11 @@ the IPython simple prompt is being used::
sage: cmd = 'print([sys.stdin.isatty(), sys.stdout.isatty()])'
sage: import pexpect
sage: output = pexpect.run(
- ....: 'bash -c \'echo "{0}" | sage\''.format(cmd),
+ ....: 'bash -c \'export SAGE_BANNER=no; echo "{0}" | sage\''.format(cmd),
....: ).decode('utf-8', 'surrogateescape')
- sage: 'sage: [False, True]' in output
- True
+ sage: print(output)
+ sage...[False, True]
+ ...
"""
#*****************************************************************************

View file

@ -1,87 +0,0 @@
diff --git a/src/sage/env.py b/src/sage/env.py
index c4953cfa65..47b880f9ad 100644
--- a/src/sage/env.py
+++ b/src/sage/env.py
@@ -244,81 +244,8 @@ os.environ['MPMATH_SAGE'] = '1'
SAGE_BANNER = var("SAGE_BANNER", "")
SAGE_IMPORTALL = var("SAGE_IMPORTALL", "yes")
-
-def _get_shared_lib_path(*libnames: str) -> Optional[str]:
- """
- Return the full path to a shared library file installed in
- ``$SAGE_LOCAL/lib`` or the directories associated with the
- Python sysconfig.
-
- This can also be passed more than one library name (e.g. for cases where
- some library may have multiple names depending on the platform) in which
- case the first one found is returned.
-
- This supports most *NIX variants (in which ``lib<libname>.so`` is found
- under ``$SAGE_LOCAL/lib``), macOS (same, but with the ``.dylib``
- extension), and Cygwin (under ``$SAGE_LOCAL/bin/cyg<libname>.dll``,
- or ``$SAGE_LOCAL/bin/cyg<libname>-*.dll`` for versioned DLLs).
-
- For distributions like Debian that use a multiarch layout, we also try the
- multiarch lib paths (i.e. ``/usr/lib/<arch>/``).
-
- This returns ``None`` if no matching library file could be found.
-
- EXAMPLES::
-
- sage: from sage.env import _get_shared_lib_path
- sage: "gap" in _get_shared_lib_path("gap")
- True
- sage: _get_shared_lib_path("an_absurd_lib") is None
- True
-
- """
-
- for libname in libnames:
- search_directories: List[Path] = []
- patterns: List[str] = []
- if sys.platform == 'cygwin':
- # Later down we take the first matching DLL found, so search
- # SAGE_LOCAL first so that it takes precedence
- if SAGE_LOCAL:
- search_directories.append(Path(SAGE_LOCAL) / 'bin')
- search_directories.append(Path(sysconfig.get_config_var('BINDIR')))
- # Note: The following is not very robust, since if there are multible
- # versions for the same library this just selects one more or less
- # at arbitrary. However, practically speaking, on Cygwin, there
- # will only ever be one version
- patterns = [f'cyg{libname}.dll', f'cyg{libname}-*.dll']
- else:
- if sys.platform == 'darwin':
- ext = 'dylib'
- else:
- ext = 'so'
-
- if SAGE_LOCAL:
- search_directories.append(Path(SAGE_LOCAL) / 'lib')
- libdir = sysconfig.get_config_var('LIBDIR')
- if libdir is not None:
- libdir = Path(libdir)
- search_directories.append(libdir)
-
- multiarchlib = sysconfig.get_config_var('MULTIARCH')
- if multiarchlib is not None:
- search_directories.append(libdir / multiarchlib),
-
- patterns = [f'lib{libname}.{ext}']
-
- for directory in search_directories:
- for pattern in patterns:
- path = next(directory.glob(pattern), None)
- if path is not None:
- return str(path.resolve())
-
- # Just return None if no files were found
- return None
-
# locate libgap shared object
-GAP_SO = var("GAP_SO", _get_shared_lib_path("gap", ""))
+GAP_SO = var("GAP_SO", '/default')
# post process
if DOT_SAGE is not None and ' ' in DOT_SAGE:

View file

@ -1,19 +0,0 @@
diff --git a/src/sage/doctest/forker.py b/src/sage/doctest/forker.py
index 02e18e67e7..2ebf6eb35f 100644
--- a/src/sage/doctest/forker.py
+++ b/src/sage/doctest/forker.py
@@ -1075,6 +1075,14 @@ class SageDocTestRunner(doctest.DocTestRunner, object):
sage: set(ex2.predecessors) == set([ex0,ex1])
True
"""
+
+ # Fix ECL dir race conditions by using a separate dir for each process
+ # (https://trac.sagemath.org/ticket/26968)
+ os.environ['MAXIMA_USERDIR'] = "{}/sage-maxima-{}".format(
+ tempfile.gettempdir(),
+ os.getpid()
+ )
+
if isinstance(globs, RecordingDict):
globs.start()
example.sequence_number = len(self.history)

View file

@ -1,58 +0,0 @@
diff --git a/src/sage/misc/persist.pyx b/src/sage/misc/persist.pyx
index 3ac5f1cc2b..cb1f327c19 100644
--- a/src/sage/misc/persist.pyx
+++ b/src/sage/misc/persist.pyx
@@ -157,7 +157,7 @@ def load(*filename, compress=True, verbose=True, **kwargs):
....: _ = f.write(code)
sage: load(t)
sage: hello
- <fortran object>
+ <fortran ...>
"""
import sage.repl.load
if len(filename) != 1:
diff --git a/src/sage/plot/complex_plot.pyx b/src/sage/plot/complex_plot.pyx
index 6f0aeab87a..b77c69b2f7 100644
--- a/src/sage/plot/complex_plot.pyx
+++ b/src/sage/plot/complex_plot.pyx
@@ -461,6 +461,8 @@ def complex_to_rgb(z_values, contoured=False, tiled=False,
rgb[i, j, 2] = b
sig_off()
+ nan_indices = np.isnan(rgb).any(-1) # Mask for undefined points
+ rgb[nan_indices] = 1 # Make nan_indices white
return rgb
diff --git a/src/sage/plot/histogram.py b/src/sage/plot/histogram.py
index 3bc2b76b58..388c2d1391 100644
--- a/src/sage/plot/histogram.py
+++ b/src/sage/plot/histogram.py
@@ -87,13 +87,8 @@ class Histogram(GraphicPrimitive):
TESTS::
- sage: h = histogram([10,3,5], normed=True)[0]
- doctest:warning...:
- DeprecationWarning: the 'normed' option is deprecated. Use 'density' instead.
- See https://trac.sagemath.org/25260 for details.
+ sage: h = histogram([10,3,5], density=True)[0]
sage: h.get_minmax_data()
- doctest:warning ...
- ...VisibleDeprecationWarning: Passing `normed=True` on non-uniform bins has always been broken, and computes neither the probability density function nor the probability mass function. The result is only correct if the bins are uniform, when density=True will produce the same result anyway. The argument will be removed in a future version of numpy.
{'xmax': 10.0, 'xmin': 3.0, 'ymax': 0.476190476190..., 'ymin': 0}
"""
import numpy
diff --git a/src/sage/repl/ipython_extension.py b/src/sage/repl/ipython_extension.py
index 798671aab4..cad6a47ca8 100644
--- a/src/sage/repl/ipython_extension.py
+++ b/src/sage/repl/ipython_extension.py
@@ -405,7 +405,7 @@ class SageMagics(Magics):
....: C END FILE FIB1.F
....: ''')
sage: fib
- <fortran object>
+ <fortran ...>
sage: from numpy import array
sage: a = array(range(10), dtype=float)
sage: fib(a, 10)

View file

@ -1,82 +0,0 @@
diff --git a/src/sage/interfaces/tachyon.py b/src/sage/interfaces/tachyon.py
index 23671e5089..a5604a643c 100644
--- a/src/sage/interfaces/tachyon.py
+++ b/src/sage/interfaces/tachyon.py
@@ -74,14 +74,14 @@ Camera projection modes
The ``PROJECTION`` keyword must be followed by one of the supported
camera projection mode identifiers ``PERSPECTIVE``, ``PERSPECTIVE_DOF``,
``ORTHOGRAPHIC``, or ``FISHEYE``. The ``FISHEYE`` projection mode
-requires two extra parameters ``FOCALLENGTH`` and ``APERTURE`` which
+requires two extra parameters ``FOCALDIST`` and ``APERTURE`` which
precede the regular camera options.
::
Camera
projection perspective_dof
- focallength 0.75
+ focaldist 0.75
aperture 0.02
Zoom 0.666667
Aspectratio 1.000000
diff --git a/src/sage/plot/plot3d/tachyon.py b/src/sage/plot/plot3d/tachyon.py
index 88c8eba2d5..c4427dd484 100644
--- a/src/sage/plot/plot3d/tachyon.py
+++ b/src/sage/plot/plot3d/tachyon.py
@@ -92,7 +92,7 @@ angle, right angle)::
Finally there is the ``projection='perspective_dof'`` option. ::
sage: T = Tachyon(xres=800, antialiasing=4, raydepth=10,
- ....: projection='perspective_dof', focallength='1.0', aperture='.0025')
+ ....: projection='perspective_dof', focaldist='1.0', aperture='.0025')
sage: T.light((0,5,7), 1.0, (1,1,1))
sage: T.texture('t1', opacity=1, specular=.3)
sage: T.texture('t2', opacity=1, specular=.3, color=(0,0,1))
@@ -186,7 +186,7 @@ class Tachyon(WithEqualityById, SageObject):
or ``'fisheye'``.
- ``frustum`` - (default ''), otherwise list of four numbers. Only
used with projection='fisheye'.
- - ``focallength`` - (default ''), otherwise a number. Only used
+ - ``focaldist`` - (default ''), otherwise a number. Only used
with projection='perspective_dof'.
- ``aperture`` - (default ''), otherwise a number. Only used
with projection='perspective_dof'.
@@ -331,7 +331,7 @@ class Tachyon(WithEqualityById, SageObject):
Use of the ``projection='perspective_dof'`` option. This may not be
implemented correctly. ::
- sage: T = Tachyon(xres=800,antialiasing=4, raydepth=10, projection='perspective_dof', focallength='1.0', aperture='.0025')
+ sage: T = Tachyon(xres=800,antialiasing=4, raydepth=10, projection='perspective_dof', focaldist='1.0', aperture='.0025')
sage: T.light((0,5,7), 1.0, (1,1,1))
sage: T.texture('t1', opacity=1, specular=.3)
sage: T.texture('t2', opacity=1, specular=.3, color=(0,0,1))
@@ -365,7 +365,7 @@ class Tachyon(WithEqualityById, SageObject):
look_at=[0, 0, 0],
viewdir=None,
projection='PERSPECTIVE',
- focallength='',
+ focaldist='',
aperture='',
frustum=''):
r"""
@@ -391,7 +391,7 @@ class Tachyon(WithEqualityById, SageObject):
self._camera_position = (-3, 0, 0) # default value
self._updir = updir
self._projection = projection
- self._focallength = focallength
+ self._focaldist = focaldist
self._aperture = aperture
self._frustum = frustum
self._objects = []
@@ -624,9 +624,9 @@ class Tachyon(WithEqualityById, SageObject):
camera_out = r"""
camera
projection %s""" % (tostr(self._projection))
- if self._focallength != '':
+ if self._focaldist != '':
camera_out = camera_out + r"""
- focallength %s""" % (float(self._focallength))
+ focaldist %s""" % (float(self._focaldist))
if self._aperture != '':
camera_out = camera_out + r"""
aperture %s""" % (float(self._aperture))

View file

@ -1,8 +1,9 @@
{ lib
, buildPythonPackage
, sage-src
, sphinx
, jupyter-sphinx
, sphinx
, sphinx-copybutton
}:
buildPythonPackage rec {
@ -11,8 +12,9 @@ buildPythonPackage rec {
src = sage-src;
propagatedBuildInputs = [
sphinx
jupyter-sphinx
sphinx
sphinx-copybutton
];
preBuild = ''

View file

@ -8,87 +8,41 @@
# This is done because multiple derivations rely on these sources and they should
# all get the same sources with the same patches applied.
let
# Fetch a diff between `base` and `rev` on sage's git server.
# Used to fetch trac tickets by setting the `base` to the last release and the
# `rev` to the last commit of the ticket.
#
# We don't use sage's own build system (which builds all its
# dependencies), so we exclude changes to "build/" from patches by
# default to avoid conflicts.
fetchSageDiff = { base, name, rev, sha256, squashed ? false, excludes ? [ "build/*" ]
, ...}@args: (
fetchpatch ({
inherit name sha256 excludes;
# There are three places to get changes from:
#
# 1) From Sage's Trac. Contains all release tags (like "9.4") and all developer
# branches (wip patches from tickets), but exports each commit as a separate
# patch, so merge commits can lead to conflicts. Used if squashed == false.
#
# The above is the preferred option. To use it, find a Trac ticket and pass the
# "Commit" field from the ticket as "rev", choosing "base" as an appropriate
# release tag, i.e. a tag that doesn't cause the patch to include a lot of
# unrelated changes. If there is no such tag (due to nonlinear history, for
# example), there are two other options, listed below.
#
# 2) From GitHub's sagemath/sage repo. This lets us use a GH feature that allows
# us to choose between a .patch file, with one patch per commit, or a .diff file,
# which squashes all commits into a single diff. This is used if squashed ==
# true. This repo has all release tags. However, it has no developer branches, so
# this option can't be used if a change wasn't yet shipped in a (possibly beta)
# release.
#
# 3) From GitHub's sagemath/sagetrac-mirror repo. Mirrors all developer branches,
# but has no release tags. The only use case not covered by 1 or 2 is when we need
# to apply a patch from an open ticket that contains merge commits.
#
# Item 3 could cover all use cases if the sagemath/sagetrack-mirror repo had
# release tags, but it requires a sha instead of a release number in "base", which
# is inconvenient.
urls = if squashed
then [
"https://github.com/sagemath/sage/compare/${base}...${rev}.diff"
"https://github.com/sagemath/sagetrac-mirror/compare/${base}...${rev}.diff"
]
else [ "https://git.sagemath.org/sage.git/patch?id2=${base}&id=${rev}" ];
} // builtins.removeAttrs args [ "rev" "base" "sha256" "squashed" "excludes" ])
);
in
stdenv.mkDerivation rec {
version = "9.8";
version = "10.0";
pname = "sage-src";
src = fetchFromGitHub {
owner = "sagemath";
repo = "sage";
rev = version;
sha256 = "sha256-dDbrzJXsOBARYfJz0r7n3LbaoXHnx7Acz6HBa95NV9o=";
sha256 = "sha256-zN/Lo/GBCjYGemuaYpgG3laufN8te3wPjXMQ+Me9zgY=";
};
# Patches needed because of particularities of nix or the way this is packaged.
# The goal is to upstream all of them and get rid of this list.
nixPatches = [
# Fixes a potential race condition which can lead to transient doctest failures.
./patches/fix-ecl-race.patch
# Not necessary since library location is set explicitly
# https://trac.sagemath.org/ticket/27660#ticket
./patches/do-not-test-find-library.patch
# Parallelize docubuild using subprocesses, fixing an isolation issue. See
# https://groups.google.com/forum/#!topic/sage-packaging/YGOm8tkADrE
./patches/sphinx-docbuild-subprocesses.patch
# After updating smypow to (https://github.com/sagemath/sage/issues/3360)
# we can now set the cache dir to be within the .sage directory. This is
# not strictly necessary, but keeps us from littering in the user's HOME.
./patches/sympow-cache.patch
];
# Since sage unfortunately does not release bugfix releases, packagers must
# fix those bugs themselves. This is for critical bugfixes, where "critical"
# == "causes (transient) doctest failures / somebody complained".
bugfixPatches = [
# To help debug the transient error in
# https://trac.sagemath.org/ticket/23087 when it next occurs.
./patches/configurationpy-error-verbose.patch
# Sage uses mixed integer programs (MIPs) to find edge disjoint
# spanning trees. For some reason, aarch64 glpk takes much longer
# than x86_64 glpk to solve such MIPs. Since the MIP formulation
# has "numerous problems" and will be replaced by a polynomial
# algorithm soon, disable this test for now.
# https://github.com/sagemath/sage/issues/34575
./patches/disable-slow-glpk-test.patch
];
# Patches needed because of package updates. We could just pin the versions of
@ -98,73 +52,33 @@ stdenv.mkDerivation rec {
# should come from or be proposed to upstream. This list will probably never
# be empty since dependencies update all the time.
packageUpgradePatches = [
# After updating smypow to (https://trac.sagemath.org/ticket/3360) we can
# now set the cache dir to be within the .sage directory. This is not
# strictly necessary, but keeps us from littering in the user's HOME.
./patches/sympow-cache.patch
# Upstream will wait until Sage 9.7 to upgrade to linbox 1.7 because it
# does not support gcc 6. We can upgrade earlier.
# https://trac.sagemath.org/ticket/32959
./patches/linbox-1.7-upgrade.patch
# adapted from https://trac.sagemath.org/ticket/23712#comment:22
./patches/tachyon-renamed-focallength.patch
# https://trac.sagemath.org/ticket/34391
(fetchSageDiff {
name = "gap-4.12-upgrade.patch";
base = "9.8.beta7";
rev = "dd4a17281adcda74e11f998ef519b6bd0dafb043";
sha256 = "sha256-UQT9DO9xd5hh5RucvUkIm+rggPKu8bc1YaSI6LVYH98=";
})
# https://trac.sagemath.org/ticket/34701
(fetchSageDiff {
name = "libgap-fix-gc-crashes-on-aarch64.patch";
base = "eb8cd42feb58963adba67599bf6e311e03424328"; # TODO: update when #34391 lands
rev = "90acc7f1c13a80b8aa673469a2668feb9cd4207f";
sha256 = "sha256-9BhQLFB3wUhiXRQsK9L+I62lSjvTfrqMNi7QUIQvH4U=";
})
# https://github.com/sagemath/sage/pull/35235
# https://github.com/sagemath/sage/pull/35584, positively reviewed
(fetchpatch {
name = "ipython-8.11-upgrade.patch";
url = "https://github.com/sagemath/sage/commit/23471e2d242c4de8789d7b1fc8b07a4b1d1e595a.diff";
sha256 = "sha256-wvH4BvDiaBv7jbOP8LvOE5Vs16Kcwz/C9jLpEMohzLQ=";
name = "networkx-3.1-upgrade.patch";
url = "https://github.com/sagemath/sage/compare/10.0.rc2..e599562cf5fdfb9799a5412fac40c2f8e9f97341.diff";
sha256 = "sha256-3A90kXqNR0c7+k8xrZXAt5wqWg/VFAPNhQujwTdOyhI=";
})
# positively reviewed
# https://github.com/sagemath/sage/pull/35612, positively reviewed
(fetchpatch {
name = "matplotlib-3.7.0-upgrade.patch";
url = "https://github.com/sagemath/sage/pull/35177.diff";
sha256 = "sha256-YdPnMsjXBm9ZRm6a8hH8rSynkrABjLoIzqwp3F/rKAw=";
name = "linbox-1.7-upgrade.patch";
url = "https://github.com/sagemath/sage/compare/10.0.rc2..9c8796c7b677e3a056348e3510331ea8b8c3c42e.diff";
sha256 = "sha256-/TpvIQZUqmbUuz6wvp3ni9oRir5LBA2FKDJcmnHI1r4=";
})
# https://github.com/sagemath/sage/pull/35336, merged in 10.0.beta8
# https://github.com/sagemath/sage/pull/35619
(fetchpatch {
name = "ipywidgets-8.0.5-upgrade.patch";
url = "https://github.com/sagemath/sage/commit/7ab3e3aa81d47a35d09161b965bba8ab16fd5c9e.diff";
sha256 = "sha256-WjdsPTui6uv92RerlV0mqltmLaxADvz+3aqSvxBFmfU=";
name = "maxima-5.46.0-upgrade.patch";
url = "https://github.com/sagemath/sage/compare/10.0.rc3..7e86af5dae8f89868b25a6f57189bb5ca618da89.diff";
sha256 = "sha256-pxSxdJ2lyHoMUIxhlIn1nTHaddRxGvvTj9IbwFCTBFU=";
})
# https://github.com/sagemath/sage/pull/35499
# https://github.com/sagemath/sage/pull/35635, positively reviewed
(fetchpatch {
name = "ipywidgets-8.0.5-upgrade-part-deux.patch";
url = "https://github.com/sagemath/sage/pull/35499.diff";
sha256 = "sha256-uNCjLs9qrARTQNsq1+kTdvuV2A1M4xx5b1gWh5c55X0=";
name = "sympy-1.12-upgrade.patch";
url = "https://github.com/sagemath/sage/compare/10.0.rc2..aa4193cdc8ec9fb7bd7c49696b7f914668f7913a.diff";
sha256 = "sha256-UAmYCxHvnE5p+H2DySNZTPFVm915jHtOEoG+tZz5n7I=";
})
# rebased from https://github.com/sagemath/sage/pull/34994, merged in sage 10.0.beta2
./patches/numpy-1.24-upgrade.patch
# Sage uses mixed integer programs (MIPs) to find edge disjoint
# spanning trees. For some reason, aarch64 glpk takes much longer
# than x86_64 glpk to solve such MIPs. Since the MIP formulation
# has "numerous problems" and will be replaced by a polynomial
# algorithm soon, disable this test for now.
# https://trac.sagemath.org/ticket/34575
./patches/disable-slow-glpk-test.patch
];
patches = nixPatches ++ bugfixPatches ++ packageUpgradePatches;
@ -178,19 +92,6 @@ stdenv.mkDerivation rec {
sed -i \
"s|var(\"SAGE_ROOT\".*|var(\"SAGE_ROOT\", \"$out\")|" \
src/sage/env.py
# src/doc/en/reference/spkg/conf.py expects index.rst in its directory,
# a list of external packages in the sage distribution (build/pkgs)
# generated by the bootstrap script (which we don't run). this is not
# relevant for other distributions, so remove it.
rm src/doc/en/reference/spkg/conf.py
sed -i "/spkg/d" src/doc/en/reference/index.rst
# the bootstrap script also generates installation instructions for
# arch, debian, fedora, cygwin and homebrew using data from build/pkgs.
# we don't run the bootstrap script, so disable including the generated
# files. docbuilding fails otherwise.
sed -i "/literalinclude/d" src/doc/en/installation/source.rst
'';
buildPhase = "# do nothing";

View file

@ -11,6 +11,11 @@ stdenv.mkDerivation rec {
strictDeps = true;
nativeBuildInputs = [
# for patchShebangs below
python3
];
unpackPhase = ''
export SAGE_DOC_OVERRIDE="$PWD/share/doc/sage"
export SAGE_DOC_SRC_OVERRIDE="$PWD/docsrc"
@ -24,15 +29,25 @@ stdenv.mkDerivation rec {
export HOME="$TMPDIR/sage_home"
mkdir -p "$HOME"
# run bootstrap script to generate Sage spkg docs, because unfortunately some unrelated doc
# pages link to them. it needs a few ugly (but self-contained) hacks for a standalone run.
cp -r "${src}/build" "$HOME"
chmod -R 755 "$HOME/build"
sed -i "/assert/d" "$HOME/build/sage_bootstrap/env.py"
sed -i "s|sage-bootstrap-python|python3|" "$HOME/build/bin/sage-package"
patchShebangs "$HOME/build/bin/sage-package"
pushd "$SAGE_DOC_SRC_OVERRIDE"
sed -i "s|OUTPUT_DIR=\"src/doc/|OUTPUT_DIR=\"$SAGE_DOC_SRC_OVERRIDE/|" bootstrap
PATH="$HOME/build/bin:$PATH" SAGE_ROOT="${src}" ./bootstrap
popd
# adapted from src/doc/Makefile (doc-src target), which tries to call Sage from PATH
mkdir -p $SAGE_DOC_SRC_OVERRIDE/en/reference/repl
${sage-with-env}/bin/sage -advanced > $SAGE_DOC_SRC_OVERRIDE/en/reference/repl/options.txt
# needed to link them in the sage docs using intersphinx
export PPLPY_DOCS=${python3.pkgs.pplpy.doc}/share/doc/pplpy
# adapted from src/doc/bootstrap (which we don't run)
OUTPUT_DIR="$SAGE_DOC_SRC_OVERRIDE/en/reference/repl"
mkdir -p "$OUTPUT_DIR"
OUTPUT="$OUTPUT_DIR/options.txt"
${sage-with-env}/bin/sage -advanced > "$OUTPUT"
# jupyter-sphinx calls the sagemath jupyter kernel during docbuild
export JUPYTER_PATH=${jupyter-kernel-specs}

View file

@ -1,76 +1,83 @@
{ sage-src
, env-locations
, perl
, python
, buildPythonPackage
, m4
, perl
, pkg-config
, sage-setup
, gd
, iml
, libpng
, readline
, arb
, blas
, lapack
, boost
, brial
, cliquer
, cypari2
, cysignals
, cython
, lisp-compiler
, eclib
, ecm
, fflas-ffpack
, flint
, gd
, gap
, giac
, givaro
, glpk
, gsl
, iml
, jinja2
, libpng
, lapack
, lcalc
, lrcalc
, gap
, libbraiding
, libhomfly
, libmpc
, linbox
, lisp-compiler
, lrcalc
, m4ri
, m4rie
, memory-allocator
, libmpc
, mpfi
, mpfr
, ntl
, numpy
, pari
, pkgconfig # the python module, not the pkg-config alias
, pkg-config
, planarity
, ppl
, primecountpy
, python
, ratpoints
, readline
, rankwidth
, symmetrica
, zn_poly
, fflas-ffpack
, boost
, ratpoints
, singular
, pip
, jupyter-core
, sage-setup
, libhomfly
, libbraiding
, gmpy2
, pplpy
, sqlite
, jupyter-client
, ipywidgets
, mpmath
, rpy2
, symmetrica
, cvxopt
, cypari2
, cysignals
, cython
, fpylll
, scipy
, sympy
, matplotlib
, pillow
, gmpy2
, importlib-metadata
, importlib-resources
, ipykernel
, networkx
, ptyprocess
, ipython
, ipywidgets
, jinja2
, jupyter-client
, jupyter-core
, lrcalc-python
, sphinx # TODO: this is in setup.cfg, should we override it?
, matplotlib
, memory-allocator
, mpmath
, networkx
, numpy
, pexpect
, pillow
, pip
, pkgconfig
, pplpy
, primecountpy
, ptyprocess
, requests
, rpy2
, scipy
, sphinx
, sympy
, typing-extensions
}:
assert (!blas.isILP64) && (!lapack.isILP64);
@ -87,83 +94,93 @@ buildPythonPackage rec {
nativeBuildInputs = [
iml
perl
jupyter-core
pkg-config
sage-setup
pip # needed to query installed packages
lisp-compiler
m4
perl
pip # needed to query installed packages
pkg-config
sage-setup
];
buildInputs = [
gd
readline
iml
libpng
readline
];
propagatedBuildInputs = [
cypari2
jinja2
numpy
pkgconfig
boost
# native dependencies (TODO: determine which ones need to be propagated)
arb
blas
boost
brial
cliquer
lisp-compiler
eclib
ecm
fflas-ffpack
flint
gap
giac
givaro
glpk
gsl
lapack
lcalc
gap
libbraiding
libhomfly
libmpc
linbox
lisp-compiler
lrcalc
m4ri
m4rie
memory-allocator
mpfi
mpfr
ntl
blas
lapack
pari
planarity
ppl
primecountpy
rankwidth
ratpoints
singular
symmetrica
zn_poly
pip
cython
cysignals
libhomfly
libbraiding
gmpy2
pplpy
sqlite
symmetrica
# from src/sage/setup.cfg and requirements.txt
cvxopt
cypari2
cysignals
cython
fpylll
gmpy2
importlib-metadata
importlib-resources
ipykernel
ipython
ipywidgets
jinja2
jupyter-client
jupyter-core
lrcalc-python
matplotlib
memory-allocator
mpmath
networkx
numpy
pexpect
pillow
pip
pkgconfig
pplpy
primecountpy
ptyprocess
requests
rpy2
scipy
sympy
matplotlib
pillow
ipykernel
fpylll
networkx
jupyter-client
ipywidgets
ptyprocess
lrcalc-python
sphinx
sympy
typing-extensions
];
preBuild = ''

View file

@ -24,7 +24,7 @@
stdenv.mkDerivation rec {
pname = "singular";
version = "4.3.1p2";
version = "4.3.2p1";
# since the tarball does not contain tests, we fetch from GitHub.
src = fetchFromGitHub {
@ -33,9 +33,8 @@ stdenv.mkDerivation rec {
# if a release is tagged (which sometimes does not happen), it will
# be in the format below.
# rev = "Release-${lib.replaceStrings ["."] ["-"] version}";
rev = "370a87f29e7b2a3fefe287184bd4f75b793cb03c";
sha256 = "sha256-T2tJ5yHTLzrXdozQB/XGZn4lNhpwVd9L30ZOzKAHxWs=";
rev = "Release-${lib.replaceStrings ["."] ["-"] version}";
sha256 = "sha256-fprlqJ/3vhnUBnopOhWi4TWMKjcJ4qDQGq8vaaGzy2E=";
# the repository's .gitattributes file contains the lines "/Tst/
# export-ignore" and "/doc/ export-ignore" so some directories are

View file

@ -1,76 +1,50 @@
## FIXME: see ../../../servers/code-server/ for a proper yarn packaging
## - export ELECTRON_SKIP_BINARY_DOWNLOAD=1
## - jq "del(.scripts.preinstall)" node_modules/shellcheck/package.json | sponge node_modules/shellcheck/package.json
{
lib, stdenv, buildFHSEnv, runCommand, writeScript, fetchurl, fetchzip
}:
let
{ lib, stdenv, electron_22, buildNpmPackage, fetchFromGitHub }:
buildNpmPackage {
pname = "webtorrent-desktop";
version = "0.21.0";
in
runCommand "${pname}-${version}" rec {
inherit (stdenv) shell;
inherit pname version;
src =
if stdenv.hostPlatform.system == "x86_64-linux" then
fetchzip {
url = "https://github.com/webtorrent/webtorrent-desktop/releases/download/v${version}/WebTorrent-v${version}-linux.zip";
sha256 = "13gd8isq2l10kibsc1bsc15dbgpnwa7nw4cwcamycgx6pfz9a852";
}
else
throw "Webtorrent is not currently supported on ${stdenv.hostPlatform.system}";
version = "0.25-pre";
src = fetchFromGitHub {
owner = "webtorrent";
repo = "webtorrent-desktop";
rev = "fce078defefd575cb35a5c79d3d9f96affc8a08f";
sha256 = "sha256-gXFiG36qqR0QHTqhaxgQKDO0UCHkJLnVwUTQB/Nct/c=";
};
npmDepsHash = "sha256-pEuvstrZ9oMdJ/iU6XwEQ1BYOyQp/ce6sYBTrMCjGMc=";
makeCacheWritable = true;
npmRebuildFlags = [ "--ignore-scripts" ];
installPhase = ''
## Rebuild node_modules for production
## after babel compile has finished
rm -r node_modules
export NODE_ENV=production
npm ci --ignore-scripts
fhs = buildFHSEnv rec {
name = "fhsEnterWebTorrent";
runScript = "${src}/WebTorrent";
## use the trampoline, if you need to shell into the fhsenv
# runScript = writeScript "trampoline" ''
# #!/bin/sh
# exec "$@"
# '';
targetPkgs = pkgs: with pkgs; with xorg; [
alsa-lib atk at-spi2-core at-spi2-atk cairo cups dbus expat
fontconfig freetype gdk-pixbuf glib gtk3 pango libuuid libX11
libXScrnSaver libXcomposite libXcursor libXdamage libXext
libXfixes libXi libXrandr libXrender libXtst libxcb nspr nss
stdenv.cc.cc udev
];
# extraBwrapArgs = [
# "--ro-bind /run/user/$(id -u)/pulse /run/user/$(id -u)/pulse"
# ];
};
## delete unused files
rm -r test
desktopFile = fetchurl {
url = "https://raw.githubusercontent.com/webtorrent/webtorrent-desktop/v${version}/static/linux/share/applications/webtorrent-desktop.desktop";
sha256 = "1v16dqbxqds3cqg3xkzxsa5fyd8ssddvjhy9g3i3lz90n47916ca";
};
icon256File = fetchurl {
url = "https://raw.githubusercontent.com/webtorrent/webtorrent-desktop/v${version}/static/linux/share/icons/hicolor/256x256/apps/webtorrent-desktop.png";
sha256 = "1dapxvvp7cx52zhyaby4bxm4rll9xc7x3wk8k0il4g3mc7zzn3yk";
};
icon48File = fetchurl {
url = "https://raw.githubusercontent.com/webtorrent/webtorrent-desktop/v${version}/static/linux/share/icons/hicolor/48x48/apps/webtorrent-desktop.png";
sha256 = "00y96w9shbbrdbf6xcjlahqd08154kkrxmqraik7qshiwcqpw7p4";
};
## delete config for build time cache
npm config delete cache
## add script wrapper and desktop files; icons
mkdir -p $out/lib $out/bin $out/share/applications
cp -r . $out/lib/webtorrent-desktop
cat > $out/bin/WebTorrent <<EOF
#! ${stdenv.shell}
set -eu
exec ${electron_22}/bin/electron --no-sandbox $out/lib/webtorrent-desktop "\$@"
EOF
chmod +x $out/bin/WebTorrent
cp -r static/linux/share/icons $out/share/
sed "s#/opt/webtorrent-desktop#$out/bin#" \
< static/linux/share/applications/webtorrent-desktop.desktop \
> $out/share/applications/webtorrent-desktop.desktop
'';
meta = with lib; {
description = "Streaming torrent app for Mac, Windows, and Linux";
homepage = "https://webtorrent.io/desktop";
license = licenses.mit;
maintainers = [ maintainers.flokli maintainers.bendlas ];
platforms = [
"x86_64-linux"
];
};
} ''
mkdir -p $out/{bin,share/{applications,icons/hicolor/{48x48,256x256}/apps}}
cp $fhs/bin/fhsEnterWebTorrent $out/bin/WebTorrent
cp $icon48File $out/share/icons/hicolor/48x48/apps/webtorrent-desktop.png
cp $icon256File $out/share/icons/hicolor/256x256/apps/webtorrent-desktop.png
## Fix the desktop link
substitute $desktopFile $out/share/applications/webtorrent-desktop.desktop \
--replace /opt/webtorrent-desktop $out/libexec
''
}

View file

@ -1,9 +0,0 @@
{ python3, qtile-unwrapped }:
(python3.withPackages (_: [ qtile-unwrapped ])).overrideAttrs (_: {
# otherwise will be exported as "env", this restores `nix search` behavior
name = "${qtile-unwrapped.pname}-${qtile-unwrapped.version}";
# export underlying qtile package
passthru = { unwrapped = qtile-unwrapped; };
# restore original qtile attrs
inherit (qtile-unwrapped) pname version meta;
})

View file

@ -14,7 +14,7 @@ assert withFlint -> flint != null;
stdenv.mkDerivation rec {
pname = "eclib";
version = "20221012"; # upgrade might break the sage interface
version = "20230424"; # upgrade might break the sage interface
# sage tests to run:
# src/sage/interfaces/mwrank.py
# src/sage/libs/eclib
@ -28,8 +28,8 @@ stdenv.mkDerivation rec {
#
# see https://github.com/JohnCremona/eclib/issues/64#issuecomment-789788561
# for upstream's explanation of the above
url = "https://github.com/JohnCremona/eclib/releases/download/${version}/eclib-${version}.tar.bz2";
sha256 = "sha256-TPavWyn6BMt7fAo19rrlPICPbK/XKstBruB/ka1adBc=";
url = "https://github.com/JohnCremona/eclib/releases/download/v${version}/eclib-${version}.tar.bz2";
sha256 = "sha256-FCLez8q+uwrUL39Yxa7+W9j6EXV7ReMaGGOE/QN81cE=";
};
buildInputs = [
pari

View file

@ -2,6 +2,7 @@
, stdenv
, substituteAll
, fetchurl
, fetchpatch2
, pkg-config
, gettext
, graphene
@ -82,6 +83,20 @@ stdenv.mkDerivation rec {
patches = [
# https://github.com/NixOS/nixpkgs/pull/218143#issuecomment-1501059486
./patches/4.0-fix-darwin-build.patch
# Fix deleting in Nautilus (part of 4.10.4)
# https://gitlab.gnome.org/GNOME/nautilus/-/issues/2945
(fetchpatch2 {
url = "https://gitlab.gnome.org/GNOME/gtk/-/commit/4f47683710bbb4b56c286c6ee6a5c394fcf2b755.patch";
sha256 = "fU9SX8MH37ZN6Ffk/YhYmipTC7+uT9JXnWggArWNkqA=";
})
# Fix border/artifact appearing in applications (part of 4.10.4)
# https://gitlab.gnome.org/GNOME/mutter/-/issues/2805
# https://gitlab.gnome.org/GNOME/gnome-shell/-/issues/6696
(fetchpatch2 {
url = "https://gitlab.gnome.org/GNOME/gtk/-/commit/b686ce1cb62dba505120e0f1116c516662a06e30.patch";
sha256 = "0zjY5s+T4CVe3WiowgWE58ruVvqBFUuY2juwBOzMRN4=";
})
];
depsBuildBuild = [

View file

@ -10,7 +10,7 @@ stdenv.mkDerivation {
src = fetchFromGitHub {
owner = "libcpr";
repo = "cpr";
rev = "1.10.3";
rev = version;
hash = "sha256-NueZPBiICrh8GXXdCqNtVaB7PfqwtQ0WolvRij8SYbE=";
};

View file

@ -12,7 +12,7 @@
buildPythonPackage rec {
pname = "atlassian-python-api";
version = "3.36.0";
version = "3.37.0";
format = "setuptools";
disabled = pythonOlder "3.7";
@ -21,7 +21,7 @@ buildPythonPackage rec {
owner = "atlassian-api";
repo = pname;
rev = "refs/tags/${version}";
hash = "sha256-9xKGA9F3RLijjiEnb01QjmWA9CnN7FZGEEFEWZU4A+A=";
hash = "sha256-+lhDivbw30Oa3aB0TprRhBzv/c72IzNltFZA87LY2nM=";
};
propagatedBuildInputs = [

View file

@ -13,7 +13,7 @@
buildPythonPackage rec {
pname = "bx-py-utils";
version = "78";
version = "80";
disabled = pythonOlder "3.9";
@ -23,9 +23,13 @@ buildPythonPackage rec {
owner = "boxine";
repo = "bx_py_utils";
rev = "refs/tags/v${version}";
hash = "sha256-dMcbv/qf+8Qzu47MVFU2QUviT/vjKsHp+45F/6NOlWo=";
hash = "sha256-ih0tqT+3fTTgncXz4bneo4OGT0jVhybdADTy1de5VqI=";
};
postPatch = ''
rm bx_py_utils_tests/publish.py
'';
nativeBuildInputs = [
poetry-core
];

View file

@ -0,0 +1,37 @@
{ lib
, fetchFromGitHub
, buildPythonPackage
, nose
, pillow
, wheezy-captcha
}:
buildPythonPackage rec {
pname = "captcha";
version = "0.4";
format = "setuptools";
src = fetchFromGitHub {
owner = "lepture";
repo = pname;
rev = "v${version}";
hash = "sha256-uxUjoACN65Cx5LMKpT+bZhKpf2JRSaEyysnYUgZntp8=";
};
propagatedBuildInputs = [ pillow ];
pythonImportsCheck = [ "captcha" ];
nativeCheckInputs = [ nose wheezy-captcha ];
checkPhase = ''
nosetests -s
'';
meta = with lib; {
description = "A captcha library that generates audio and image CAPTCHAs";
homepage = "https://github.com/lepture/captcha";
license = licenses.bsd3;
maintainers = with maintainers; [ Flakebi ];
};
}

View file

@ -1,6 +1,7 @@
{ lib
, fetchFromGitHub
, buildPythonPackage
, pythonAtLeast
, pythonOlder
, pytest
, safe-pysha3
@ -22,10 +23,14 @@ buildPythonPackage rec {
nativeCheckInputs = [
pytest
] ++ passthru.optional-dependencies.pycryptodome
++ passthru.optional-dependencies.pysha3;
# eth-hash can use either safe-pysha3 or pycryptodome;
# safe-pysha3 requires Python 3.9+ while pycryptodome does not.
# https://github.com/ethereum/eth-hash/issues/46#issuecomment-1314029211
++ lib.optional (pythonAtLeast "3.9") passthru.optional-dependencies.pysha3;
checkPhase = ''
pytest tests/backends/pycryptodome/
'' + lib.optionalString (pythonAtLeast "3.9") ''
pytest tests/backends/pysha3/
'';

View file

@ -0,0 +1,38 @@
{ lib
, fetchFromGitHub
, buildPythonPackage
, flask
, flask-sessionstore
, flask-sqlalchemy
, captcha
, pytestCheckHook
}:
buildPythonPackage rec {
pname = "flask-session-captcha";
version = "1.3.0";
format = "setuptools";
src = fetchFromGitHub {
owner = "Tethik";
repo = pname;
rev = "v${version}";
hash = "sha256-V0f3mXCfqwH2l3OtJKOHGdrlKAFxs2ynqXvNve7Amkc=";
};
propagatedBuildInputs = [ flask flask-sessionstore captcha ];
pythonImportsCheck = [ "flask_session_captcha" ];
nativeCheckInputs = [ flask-sqlalchemy pytestCheckHook ];
# RuntimeError: Working outside of application context.
doCheck = false;
meta = with lib; {
description = "A captcha implemention for flask";
homepage = "https://github.com/Tethik/flask-session-captcha";
license = licenses.mit;
maintainers = with maintainers; [ Flakebi ];
};
}

View file

@ -0,0 +1,35 @@
{ lib
, fetchPypi
, buildPythonPackage
, flask
, nose
}:
buildPythonPackage rec {
pname = "flask-sessionstore";
version = "0.4.5";
format = "setuptools";
src = fetchPypi {
pname = "Flask-Sessionstore";
inherit version;
hash = "sha256-AQ3jWrnw2UI8L3nFEx4AhDwGP4R8Tr7iBMsDS5jLQPQ=";
};
propagatedBuildInputs = [ flask ];
pythonImportsCheck = [ "flask_sessionstore" ];
nativeCheckInputs = [ nose ];
checkPhase = ''
nosetests -s
'';
meta = with lib; {
description = "Session Storage Backends for Flask";
homepage = "https://github.com/mcrowson/flask-sessionstore";
license = licenses.bsd3;
maintainers = with maintainers; [ Flakebi ];
};
}

View file

@ -27,7 +27,7 @@
buildPythonPackage rec {
pname = "manifest-ml";
version = "0.1.5";
version = "0.1.7";
format = "setuptools";
disalbed = pythonOlder "3.8";
@ -36,7 +36,7 @@ buildPythonPackage rec {
owner = "HazyResearch";
repo = "manifest";
rev = "refs/tags/v${version}";
hash = "sha256-WKibIJv4eJ0IOCRgTl02Zusf0XNTPLBIyme6HMANr8I=";
hash = "sha256-wrslrzMAPBVAlb4ieB+DcLfWjZdlaUBnGXbzcMhlf34=";
};
propagatedBuildInputs = [

View file

@ -80,7 +80,7 @@ buildPythonPackage rec {
pname = "matplotlib";
format = "pyproject";
disabled = pythonOlder "3.9";
disabled = pythonOlder "3.8";
src = fetchPypi {
inherit pname version;

View file

@ -5,13 +5,13 @@
buildPythonPackage rec {
pname = "nvidia-ml-py";
version = "11.525.84";
version = "11.525.112";
format = "setuptools";
src = fetchPypi {
inherit pname version;
extension = "tar.gz";
hash = "sha256-WckO3WyKdkWL3JVFrLDc+Iv4igrYi2A3v8wFZDqkvVU=";
hash = "sha256-xk5HOVO2XsDMx2zzYBwxMKCsgGC7yuqRLMAPqOTJho0=";
};
patches = [

View file

@ -7,7 +7,7 @@
buildPythonPackage rec {
pname = "pysimplegui";
version = "4.60.4";
version = "4.60.5";
format = "setuptools";
disabled = pythonOlder "3.6";
@ -15,7 +15,7 @@ buildPythonPackage rec {
src = fetchPypi {
pname = "PySimpleGUI";
inherit version;
hash = "sha256-+IyCwwGlGuo1vmBdwGC8zrDctmguFigFRIhHAatLI7o=";
hash = "sha256-MQFNHMXu8Tc9fpNWT/JgRmJkXMd0qTmx8BqiU+f514s=";
};
propagatedBuildInputs = [

View file

@ -6,7 +6,7 @@
, xorgserver
, pulseaudio
, pytest-asyncio
, qtile-unwrapped
, qtile
, keyring
, requests
, stravalib
@ -34,7 +34,7 @@ buildPythonPackage rec {
];
checkInputs = [
pytest-asyncio
qtile-unwrapped
qtile
pulseaudio
keyring
requests

View file

@ -1,20 +1,33 @@
{ lib
, buildPythonPackage
, fetchFromGitHub
, python3
, python3Packages
, mypy
, cairocffi
, dbus-next
, dbus-python
, glib
, pango
, pkg-config
, libinput
, libxkbcommon
, mpd2
, mypy
, pango
, pkg-config
, psutil
, pulseaudio
, pygobject3
, python-dateutil
, pywayland
, pywlroots
, pyxdg
, setuptools
, setuptools-scm
, wayland
, wlroots
, xcbutilcursor
, pulseaudio
, xcffib
, xkbcommon
}:
python3Packages.buildPythonPackage rec {
buildPythonPackage rec {
pname = "qtile";
version = "0.22.1";
@ -42,14 +55,13 @@ python3Packages.buildPythonPackage rec {
nativeBuildInputs = [
pkg-config
] ++ (with python3Packages; [
setuptools-scm
]);
setuptools
];
propagatedBuildInputs = with python3Packages; [
propagatedBuildInputs = [
xcffib
(cairocffi.override { withXcffib = true; })
setuptools
python-dateutil
dbus-python
dbus-next

View file

@ -0,0 +1,8 @@
{ python3 }:
(python3.withPackages (_: [ python3.pkgs.qtile ])).overrideAttrs (_: {
# restore some qtile attrs, beautify name
inherit (python3.pkgs.qtile) pname version meta;
name = with python3.pkgs.qtile; "${pname}-${version}";
passthru.unwrapped = python3.pkgs.qtile;
})

View file

@ -1,4 +1,5 @@
{ lib
, stdenv
, buildPythonPackage
, fetchFromGitHub
, cmake
@ -15,6 +16,7 @@
, numpy
, pandas
, pillow
, memorymappingHook
}:
let
external = {
@ -82,6 +84,8 @@ buildPythonPackage rec {
buildInputs = [
boost
cairo
] ++ lib.optionals (stdenv.system == "x86_64-darwin") [
memorymappingHook
];
propagatedBuildInputs = [
@ -149,7 +153,7 @@ buildPythonPackage rec {
meta = with lib; {
description = "Open source toolkit for cheminformatics";
maintainers = [ maintainers.rmcgibbo ];
maintainers = with maintainers; [ rmcgibbo natsukium ];
license = licenses.bsd3;
homepage = "https://www.rdkit.org";
};

View file

@ -7,11 +7,11 @@
buildPythonPackage rec {
pname = "scikit-fmm";
version = "2022.8.15";
version = "2023.4.2";
src = fetchPypi {
inherit pname version;
hash = "sha256-BgDmxoB1QzZ/DlqIB0m66Km+fbAo5RcpjmX0BZ9985w=";
hash = "sha256-14ccR/ggdyq6kvJWUe8U5NJ96M45PArjwCqzxuJCPAs=";
};
propagatedBuildInputs = [

View file

@ -8,12 +8,12 @@
buildPythonPackage rec {
pname = "trimesh";
version = "3.21.6";
version = "3.21.7";
format = "pyproject";
src = fetchPypi {
inherit pname version;
hash = "sha256-+gFqZAbGLoNDdOmbxElKwb0QY7BJfOUZVD7888T6eU8=";
hash = "sha256-wtGt3PUCUiSIiQRA3NxO1nPE35XQDipWfrwSKdDBhtE=";
};
nativeBuildInputs = [ setuptools ];

View file

@ -0,0 +1,27 @@
{ lib
, buildPythonPackage
, fetchPypi
, pillow
}:
buildPythonPackage rec {
pname = "wheezy.captcha";
version = "3.0.2";
format = "setuptools";
src = fetchPypi {
inherit pname version;
hash = "sha256-PdtOhoVOopQsX2raPqh0P8meM8/MysgKsIe27HNtl3s=";
};
propagatedBuildInputs = [ pillow ];
pythonImportsCheck = [ "wheezy.captcha" ];
meta = with lib; {
homepage = "https://wheezycaptcha.readthedocs.io/en/latest/";
description = "A lightweight CAPTCHA library";
license = licenses.mit;
maintainers = with maintainers; [ Flakebi ];
};
}

View file

@ -0,0 +1,53 @@
diff --git a/src/cmd/linuxkit/vendor/github.com/Code-Hex/vz/v3/osversion.go b/src/cmd/linuxkit/vendor/github.com/Code-Hex/vz/v3/osversion.go
index d72a7856d..b186d3aff 100644
--- a/src/cmd/linuxkit/vendor/github.com/Code-Hex/vz/v3/osversion.go
+++ b/src/cmd/linuxkit/vendor/github.com/Code-Hex/vz/v3/osversion.go
@@ -9,6 +9,7 @@ import "C"
import (
"errors"
"fmt"
+ "os/exec"
"strconv"
"strings"
"sync"
@@ -48,6 +49,40 @@ func fetchMajorMinorVersion() (float64, error) {
if err != nil {
return 0, err
}
+
+ // For backward compatibility reasons, if code compiled against an SDK
+ // earlier than macOS 11 is run on macOS 11 or later, and then tries to read
+ // value of kern.osproductversion, the OS will return the value "10.16"
+ // instead of the real OS version string. By contrast, the command `sw_vers
+ // -productVersion` will return the real OS version string unless the
+ // environment variable SYSTEM_VERSION_COMPAT is set to 1 or 2, in which
+ // case it will respectively return "10.16" and "15.7" (the latter is for
+ // some iOS compatibility reason).
+ //
+ // The only (currently) sure way to get the real OS version string
+ // regardless of SYSTEM_VERSION_COMPAT or the SDK compiled against is
+ // apparently to parse
+ // /System/Library/CoreServices/.SystemVersionPlatform.plist if it exists,
+ // and /System/Library/CoreServices/SystemVersion.plist otherwise. Doing
+ // so, however, requires parsing XML plist files.
+ //
+ // Given what this library does, it doesn't seem likely that there would be
+ // a good reason to run its code with SYSTEM_VERSION_COMPAT set, so using
+ // `sw_vers` should be adequate until a proper parsing of plist files is
+ // added.
+ //
+ // See https://github.com/ziglang/zig/issues/7569,
+ // https://github.com/ziglang/zig/pull/7714 and
+ // https://eclecticlight.co/2020/08/13/macos-version-numbering-isnt-so-simple/
+ // for more information.
+ if osver == "10.16" {
+ out, err := exec.Command("sw_vers", "-productVersion").Output()
+ if err != nil {
+ return 0, err
+ }
+ osver = strings.TrimRight(string(out), "\r\n")
+ }
+
prefix := "v"
majorMinor := strings.TrimPrefix(semver.MajorMinor(prefix+osver), prefix)
version, err := strconv.ParseFloat(majorMinor, 64)

View file

@ -1,4 +1,4 @@
{ lib, stdenv, buildGoModule, fetchFromGitHub, git, Virtualization, testers, linuxkit }:
{ lib, stdenv, buildGoModule, fetchFromGitHub, git, Cocoa, Virtualization, sigtool, testers, linuxkit }:
buildGoModule rec {
pname = "linuxkit";
@ -15,7 +15,17 @@ buildGoModule rec {
modRoot = "./src/cmd/linuxkit";
buildInputs = lib.optionals stdenv.isDarwin [ Virtualization ];
patches = [
./darwin-os-version.patch
./support-apple-11-sdk.patch
];
# - On macOS, an executable must be signed with the right entitlement(s) to be
# able to use the Virtualization framework at runtime.
# - sigtool is allows us to validly sign such executables with a dummy
# authority.
nativeBuildInputs = lib.optionals stdenv.isDarwin [ sigtool ];
buildInputs = lib.optionals stdenv.isDarwin [ Cocoa Virtualization ];
ldflags = [
"-s"
@ -25,6 +35,17 @@ buildGoModule rec {
nativeCheckInputs = [ git ];
# - Because this package definition doesn't build using the source's Makefile,
# we must manually call the sign target.
# - The binary stripping that nixpkgs does by default in the
# fixup phase removes such signing and entitlements, so we have to sign
# after stripping.
# - Finally, at the start of the fixup phase, the working directory is
# $sourceRoot/src/cmd/linuxkit, so it's simpler to use the sign target from
# the Makefile in that directory rather than $sourceRoot/Makefile.
postFixup = lib.optionalString stdenv.isDarwin ''
make sign LOCAL_TARGET=$out/bin/linuxkit
'';
passthru.tests.version = testers.testVersion {
package = linuxkit;
command = "linuxkit version";

View file

@ -0,0 +1,811 @@
diff --git a/src/cmd/linuxkit/vendor/github.com/Code-Hex/vz/v3/virtualization_12.m b/src/cmd/linuxkit/vendor/github.com/Code-Hex/vz/v3/virtualization_12.m
index 567172ba2..e2c1ac047 100644
--- a/src/cmd/linuxkit/vendor/github.com/Code-Hex/vz/v3/virtualization_12.m
+++ b/src/cmd/linuxkit/vendor/github.com/Code-Hex/vz/v3/virtualization_12.m
@@ -8,6 +8,7 @@
bool vmCanStop(void *machine, void *queue)
{
+#ifdef INCLUDE_TARGET_OSX_12
if (@available(macOS 12, *)) {
__block BOOL result;
dispatch_sync((dispatch_queue_t)queue, ^{
@@ -15,12 +16,13 @@ bool vmCanStop(void *machine, void *queue)
});
return (bool)result;
}
-
+#endif
RAISE_UNSUPPORTED_MACOS_EXCEPTION();
}
void stopWithCompletionHandler(void *machine, void *queue, void *completionHandler)
{
+#ifdef INCLUDE_TARGET_OSX_12
if (@available(macOS 12, *)) {
vm_completion_handler_t handler = makeVMCompletionHandler(completionHandler);
dispatch_sync((dispatch_queue_t)queue, ^{
@@ -29,7 +31,7 @@ void stopWithCompletionHandler(void *machine, void *queue, void *completionHandl
Block_release(handler);
return;
}
-
+#endif
RAISE_UNSUPPORTED_MACOS_EXCEPTION();
}
@@ -38,10 +40,11 @@ void stopWithCompletionHandler(void *machine, void *queue, void *completionHandl
*/
void *newVZGenericPlatformConfiguration()
{
+#ifdef INCLUDE_TARGET_OSX_12
if (@available(macOS 12, *)) {
return [[VZGenericPlatformConfiguration alloc] init];
}
-
+#endif
RAISE_UNSUPPORTED_MACOS_EXCEPTION();
}
@@ -51,11 +54,12 @@ void *newVZGenericPlatformConfiguration()
*/
void setDirectorySharingDevicesVZVirtualMachineConfiguration(void *config, void *directorySharingDevices)
{
+#ifdef INCLUDE_TARGET_OSX_12
if (@available(macOS 12, *)) {
[(VZVirtualMachineConfiguration *)config setDirectorySharingDevices:[(NSMutableArray *)directorySharingDevices copy]];
return;
}
-
+#endif
RAISE_UNSUPPORTED_MACOS_EXCEPTION();
}
@@ -66,11 +70,12 @@ void setDirectorySharingDevicesVZVirtualMachineConfiguration(void *config, void
*/
void setPlatformVZVirtualMachineConfiguration(void *config, void *platform)
{
+#ifdef INCLUDE_TARGET_OSX_12
if (@available(macOS 12, *)) {
[(VZVirtualMachineConfiguration *)config setPlatform:(VZPlatformConfiguration *)platform];
return;
}
-
+#endif
RAISE_UNSUPPORTED_MACOS_EXCEPTION();
}
@@ -80,11 +85,12 @@ void setPlatformVZVirtualMachineConfiguration(void *config, void *platform)
*/
void setGraphicsDevicesVZVirtualMachineConfiguration(void *config, void *graphicsDevices)
{
+#ifdef INCLUDE_TARGET_OSX_12
if (@available(macOS 12, *)) {
[(VZVirtualMachineConfiguration *)config setGraphicsDevices:[(NSMutableArray *)graphicsDevices copy]];
return;
}
-
+#endif
RAISE_UNSUPPORTED_MACOS_EXCEPTION();
}
@@ -94,11 +100,12 @@ void setGraphicsDevicesVZVirtualMachineConfiguration(void *config, void *graphic
*/
void setPointingDevicesVZVirtualMachineConfiguration(void *config, void *pointingDevices)
{
+#ifdef INCLUDE_TARGET_OSX_12
if (@available(macOS 12, *)) {
[(VZVirtualMachineConfiguration *)config setPointingDevices:[(NSMutableArray *)pointingDevices copy]];
return;
}
-
+#endif
RAISE_UNSUPPORTED_MACOS_EXCEPTION();
}
@@ -108,11 +115,12 @@ void setPointingDevicesVZVirtualMachineConfiguration(void *config, void *pointin
*/
void setKeyboardsVZVirtualMachineConfiguration(void *config, void *keyboards)
{
+#ifdef INCLUDE_TARGET_OSX_12
if (@available(macOS 12, *)) {
[(VZVirtualMachineConfiguration *)config setKeyboards:[(NSMutableArray *)keyboards copy]];
return;
}
-
+#endif
RAISE_UNSUPPORTED_MACOS_EXCEPTION();
}
@@ -122,11 +130,12 @@ void setKeyboardsVZVirtualMachineConfiguration(void *config, void *keyboards)
*/
void setAudioDevicesVZVirtualMachineConfiguration(void *config, void *audioDevices)
{
+#ifdef INCLUDE_TARGET_OSX_12
if (@available(macOS 12, *)) {
[(VZVirtualMachineConfiguration *)config setAudioDevices:[(NSMutableArray *)audioDevices copy]];
return;
}
-
+#endif
RAISE_UNSUPPORTED_MACOS_EXCEPTION();
}
@@ -136,10 +145,11 @@ void setAudioDevicesVZVirtualMachineConfiguration(void *config, void *audioDevic
*/
void *newVZVirtioSoundDeviceConfiguration()
{
+#ifdef INCLUDE_TARGET_OSX_12
if (@available(macOS 12, *)) {
return [[VZVirtioSoundDeviceConfiguration alloc] init];
}
-
+#endif
RAISE_UNSUPPORTED_MACOS_EXCEPTION();
}
@@ -148,11 +158,12 @@ void *newVZVirtioSoundDeviceConfiguration()
*/
void setStreamsVZVirtioSoundDeviceConfiguration(void *audioDeviceConfiguration, void *streams)
{
+#ifdef INCLUDE_TARGET_OSX_12
if (@available(macOS 12, *)) {
[(VZVirtioSoundDeviceConfiguration *)audioDeviceConfiguration setStreams:[(NSMutableArray *)streams copy]];
return;
}
-
+#endif
RAISE_UNSUPPORTED_MACOS_EXCEPTION();
}
@@ -162,10 +173,11 @@ void setStreamsVZVirtioSoundDeviceConfiguration(void *audioDeviceConfiguration,
*/
void *newVZVirtioSoundDeviceInputStreamConfiguration()
{
+#ifdef INCLUDE_TARGET_OSX_12
if (@available(macOS 12, *)) {
return [[VZVirtioSoundDeviceInputStreamConfiguration alloc] init];
}
-
+#endif
RAISE_UNSUPPORTED_MACOS_EXCEPTION();
}
@@ -174,12 +186,13 @@ void *newVZVirtioSoundDeviceInputStreamConfiguration()
*/
void *newVZVirtioSoundDeviceHostInputStreamConfiguration()
{
+#ifdef INCLUDE_TARGET_OSX_12
if (@available(macOS 12, *)) {
VZVirtioSoundDeviceInputStreamConfiguration *inputStream = (VZVirtioSoundDeviceInputStreamConfiguration *)newVZVirtioSoundDeviceInputStreamConfiguration();
[inputStream setSource:[[VZHostAudioInputStreamSource alloc] init]];
return inputStream;
}
-
+#endif
RAISE_UNSUPPORTED_MACOS_EXCEPTION();
}
@@ -189,10 +202,11 @@ void *newVZVirtioSoundDeviceHostInputStreamConfiguration()
*/
void *newVZVirtioSoundDeviceOutputStreamConfiguration()
{
+#ifdef INCLUDE_TARGET_OSX_12
if (@available(macOS 12, *)) {
return [[VZVirtioSoundDeviceOutputStreamConfiguration alloc] init];
}
-
+#endif
RAISE_UNSUPPORTED_MACOS_EXCEPTION();
}
@@ -201,12 +215,13 @@ void *newVZVirtioSoundDeviceOutputStreamConfiguration()
*/
void *newVZVirtioSoundDeviceHostOutputStreamConfiguration()
{
+#ifdef INCLUDE_TARGET_OSX_12
if (@available(macOS 12, *)) {
VZVirtioSoundDeviceOutputStreamConfiguration *outputStream = (VZVirtioSoundDeviceOutputStreamConfiguration *)newVZVirtioSoundDeviceOutputStreamConfiguration();
[outputStream setSink:[[VZHostAudioOutputStreamSink alloc] init]];
return outputStream;
}
-
+#endif
RAISE_UNSUPPORTED_MACOS_EXCEPTION();
}
@@ -220,12 +235,13 @@ void *newVZVirtioSoundDeviceHostOutputStreamConfiguration()
*/
void *newVZSharedDirectory(const char *dirPath, bool readOnly)
{
+#ifdef INCLUDE_TARGET_OSX_12
if (@available(macOS 12, *)) {
NSString *dirPathNSString = [NSString stringWithUTF8String:dirPath];
NSURL *dirURL = [NSURL fileURLWithPath:dirPathNSString];
return [[VZSharedDirectory alloc] initWithURL:dirURL readOnly:(BOOL)readOnly];
}
-
+#endif
RAISE_UNSUPPORTED_MACOS_EXCEPTION();
}
@@ -237,10 +253,11 @@ void *newVZSharedDirectory(const char *dirPath, bool readOnly)
*/
void *newVZSingleDirectoryShare(void *sharedDirectory)
{
+#ifdef INCLUDE_TARGET_OSX_12
if (@available(macOS 12, *)) {
return [[VZSingleDirectoryShare alloc] initWithDirectory:(VZSharedDirectory *)sharedDirectory];
}
-
+#endif
RAISE_UNSUPPORTED_MACOS_EXCEPTION();
}
@@ -252,10 +269,11 @@ void *newVZSingleDirectoryShare(void *sharedDirectory)
*/
void *newVZMultipleDirectoryShare(void *sharedDirectories)
{
+#ifdef INCLUDE_TARGET_OSX_12
if (@available(macOS 12, *)) {
return [[VZMultipleDirectoryShare alloc] initWithDirectories:(NSDictionary<NSString *, VZSharedDirectory *> *)sharedDirectories];
}
-
+#endif
RAISE_UNSUPPORTED_MACOS_EXCEPTION();
}
@@ -267,6 +285,7 @@ void *newVZMultipleDirectoryShare(void *sharedDirectories)
*/
void *newVZVirtioFileSystemDeviceConfiguration(const char *tag, void **error)
{
+#ifdef INCLUDE_TARGET_OSX_12
if (@available(macOS 12, *)) {
NSString *tagNSString = [NSString stringWithUTF8String:tag];
BOOL valid = [VZVirtioFileSystemDeviceConfiguration validateTag:tagNSString error:(NSError *_Nullable *_Nullable)error];
@@ -275,7 +294,7 @@ void *newVZVirtioFileSystemDeviceConfiguration(const char *tag, void **error)
}
return [[VZVirtioFileSystemDeviceConfiguration alloc] initWithTag:tagNSString];
}
-
+#endif
RAISE_UNSUPPORTED_MACOS_EXCEPTION();
}
@@ -284,11 +303,12 @@ void *newVZVirtioFileSystemDeviceConfiguration(const char *tag, void **error)
*/
void setVZVirtioFileSystemDeviceConfigurationShare(void *config, void *share)
{
+#ifdef INCLUDE_TARGET_OSX_12
if (@available(macOS 12, *)) {
[(VZVirtioFileSystemDeviceConfiguration *)config setShare:(VZDirectoryShare *)share];
return;
}
-
+#endif
RAISE_UNSUPPORTED_MACOS_EXCEPTION();
}
@@ -298,10 +318,11 @@ void setVZVirtioFileSystemDeviceConfigurationShare(void *config, void *share)
*/
void *newVZUSBScreenCoordinatePointingDeviceConfiguration()
{
+#ifdef INCLUDE_TARGET_OSX_12
if (@available(macOS 12, *)) {
return [[VZUSBScreenCoordinatePointingDeviceConfiguration alloc] init];
}
-
+#endif
RAISE_UNSUPPORTED_MACOS_EXCEPTION();
}
@@ -311,10 +332,11 @@ void *newVZUSBScreenCoordinatePointingDeviceConfiguration()
*/
void *newVZUSBKeyboardConfiguration()
{
+#ifdef INCLUDE_TARGET_OSX_12
if (@available(macOS 12, *)) {
return [[VZUSBKeyboardConfiguration alloc] init];
}
-
+#endif
RAISE_UNSUPPORTED_MACOS_EXCEPTION();
}
@@ -328,6 +350,7 @@ void sharedApplication()
void startVirtualMachineWindow(void *machine, double width, double height)
{
+#ifdef INCLUDE_TARGET_OSX_12
if (@available(macOS 12, *)) {
@autoreleasepool {
AppDelegate *appDelegate = [[[AppDelegate alloc]
@@ -340,5 +363,6 @@ void startVirtualMachineWindow(void *machine, double width, double height)
return;
}
}
+#endif
RAISE_UNSUPPORTED_MACOS_EXCEPTION();
}
diff --git a/src/cmd/linuxkit/vendor/github.com/Code-Hex/vz/v3/virtualization_12_arm64.m b/src/cmd/linuxkit/vendor/github.com/Code-Hex/vz/v3/virtualization_12_arm64.m
index 4fbaf6cb7..452adb747 100644
--- a/src/cmd/linuxkit/vendor/github.com/Code-Hex/vz/v3/virtualization_12_arm64.m
+++ b/src/cmd/linuxkit/vendor/github.com/Code-Hex/vz/v3/virtualization_12_arm64.m
@@ -30,6 +30,7 @@
*/
void *newVZMacAuxiliaryStorageWithCreating(const char *storagePath, void *hardwareModel, void **error)
{
+#ifdef INCLUDE_TARGET_OSX_12
if (@available(macOS 12, *)) {
NSString *storagePathNSString = [NSString stringWithUTF8String:storagePath];
NSURL *storageURL = [NSURL fileURLWithPath:storagePathNSString];
@@ -38,7 +39,7 @@ void *newVZMacAuxiliaryStorageWithCreating(const char *storagePath, void *hardwa
options:VZMacAuxiliaryStorageInitializationOptionAllowOverwrite
error:(NSError *_Nullable *_Nullable)error];
}
-
+#endif
RAISE_UNSUPPORTED_MACOS_EXCEPTION();
}
@@ -49,6 +50,7 @@ void *newVZMacAuxiliaryStorageWithCreating(const char *storagePath, void *hardwa
*/
void *newVZMacAuxiliaryStorage(const char *storagePath)
{
+#ifdef INCLUDE_TARGET_OSX_12
if (@available(macOS 12, *)) {
NSString *storagePathNSString = [NSString stringWithUTF8String:storagePath];
NSURL *storageURL = [NSURL fileURLWithPath:storagePathNSString];
@@ -56,7 +58,7 @@ void *newVZMacAuxiliaryStorage(const char *storagePath)
// https://developer.apple.com/documentation/virtualization/vzmacauxiliarystorage?language=objc
return [[VZMacAuxiliaryStorage alloc] initWithContentsOfURL:storageURL];
}
-
+#endif
RAISE_UNSUPPORTED_MACOS_EXCEPTION();
}
@@ -80,10 +82,11 @@ void *newVZMacAuxiliaryStorage(const char *storagePath)
*/
void *newVZMacPlatformConfiguration()
{
+#ifdef INCLUDE_TARGET_OSX_12
if (@available(macOS 12, *)) {
return [[VZMacPlatformConfiguration alloc] init];
}
-
+#endif
RAISE_UNSUPPORTED_MACOS_EXCEPTION();
}
@@ -92,17 +95,19 @@ void *newVZMacPlatformConfiguration()
*/
void setHardwareModelVZMacPlatformConfiguration(void *config, void *hardwareModel)
{
+#ifdef INCLUDE_TARGET_OSX_12
if (@available(macOS 12, *)) {
[(VZMacPlatformConfiguration *)config setHardwareModel:(VZMacHardwareModel *)hardwareModel];
return;
}
-
+#endif
RAISE_UNSUPPORTED_MACOS_EXCEPTION();
}
// Store the hardware model to disk so that we can retrieve them for subsequent boots.
void storeHardwareModelDataVZMacPlatformConfiguration(void *config, const char *filePath)
{
+#ifdef INCLUDE_TARGET_OSX_12
if (@available(macOS 12, *)) {
VZMacPlatformConfiguration *macPlatformConfiguration = (VZMacPlatformConfiguration *)config;
NSString *filePathNSString = [NSString stringWithUTF8String:filePath];
@@ -110,7 +115,7 @@ void storeHardwareModelDataVZMacPlatformConfiguration(void *config, const char *
[macPlatformConfiguration.hardwareModel.dataRepresentation writeToURL:fileURL atomically:YES];
return;
}
-
+#endif
RAISE_UNSUPPORTED_MACOS_EXCEPTION();
}
@@ -121,17 +126,19 @@ void storeHardwareModelDataVZMacPlatformConfiguration(void *config, const char *
*/
void setMachineIdentifierVZMacPlatformConfiguration(void *config, void *machineIdentifier)
{
+#ifdef INCLUDE_TARGET_OSX_12
if (@available(macOS 12, *)) {
[(VZMacPlatformConfiguration *)config setMachineIdentifier:(VZMacMachineIdentifier *)machineIdentifier];
return;
}
-
+#endif
RAISE_UNSUPPORTED_MACOS_EXCEPTION();
}
// Store the machine identifier to disk so that we can retrieve them for subsequent boots.
void storeMachineIdentifierDataVZMacPlatformConfiguration(void *config, const char *filePath)
{
+#ifdef INCLUDE_TARGET_OSX_12
if (@available(macOS 12, *)) {
VZMacPlatformConfiguration *macPlatformConfiguration = (VZMacPlatformConfiguration *)config;
NSString *filePathNSString = [NSString stringWithUTF8String:filePath];
@@ -139,7 +146,7 @@ void storeMachineIdentifierDataVZMacPlatformConfiguration(void *config, const ch
[macPlatformConfiguration.machineIdentifier.dataRepresentation writeToURL:fileURL atomically:YES];
return;
}
-
+#endif
RAISE_UNSUPPORTED_MACOS_EXCEPTION();
}
@@ -151,11 +158,12 @@ void storeMachineIdentifierDataVZMacPlatformConfiguration(void *config, const ch
*/
void setAuxiliaryStorageVZMacPlatformConfiguration(void *config, void *auxiliaryStorage)
{
+#ifdef INCLUDE_TARGET_OSX_12
if (@available(macOS 12, *)) {
[(VZMacPlatformConfiguration *)config setAuxiliaryStorage:(VZMacAuxiliaryStorage *)auxiliaryStorage];
return;
}
-
+#endif
RAISE_UNSUPPORTED_MACOS_EXCEPTION();
}
@@ -169,10 +177,11 @@ void setAuxiliaryStorageVZMacPlatformConfiguration(void *config, void *auxiliary
*/
void *newVZMacOSBootLoader()
{
+#ifdef INCLUDE_TARGET_OSX_12
if (@available(macOS 12, *)) {
return [[VZMacOSBootLoader alloc] init];
}
-
+#endif
RAISE_UNSUPPORTED_MACOS_EXCEPTION();
}
@@ -182,10 +191,11 @@ void *newVZMacOSBootLoader()
*/
void *newVZMacGraphicsDeviceConfiguration()
{
+#ifdef INCLUDE_TARGET_OSX_12
if (@available(macOS 12, *)) {
return [[VZMacGraphicsDeviceConfiguration alloc] init];
}
-
+#endif
RAISE_UNSUPPORTED_MACOS_EXCEPTION();
}
@@ -194,11 +204,12 @@ void *newVZMacGraphicsDeviceConfiguration()
*/
void setDisplaysVZMacGraphicsDeviceConfiguration(void *graphicsConfiguration, void *displays)
{
+#ifdef INCLUDE_TARGET_OSX_12
if (@available(macOS 12, *)) {
[(VZMacGraphicsDeviceConfiguration *)graphicsConfiguration setDisplays:[(NSMutableArray *)displays copy]];
return;
}
-
+#endif
RAISE_UNSUPPORTED_MACOS_EXCEPTION();
}
@@ -210,13 +221,14 @@ void setDisplaysVZMacGraphicsDeviceConfiguration(void *graphicsConfiguration, vo
*/
void *newVZMacGraphicsDisplayConfiguration(NSInteger widthInPixels, NSInteger heightInPixels, NSInteger pixelsPerInch)
{
+#ifdef INCLUDE_TARGET_OSX_12
if (@available(macOS 12, *)) {
return [[VZMacGraphicsDisplayConfiguration alloc]
initWithWidthInPixels:widthInPixels
heightInPixels:heightInPixels
pixelsPerInch:pixelsPerInch];
}
-
+#endif
RAISE_UNSUPPORTED_MACOS_EXCEPTION();
}
@@ -226,6 +238,7 @@ void *newVZMacGraphicsDisplayConfiguration(NSInteger widthInPixels, NSInteger he
*/
void *newVZMacHardwareModelWithPath(const char *hardwareModelPath)
{
+#ifdef INCLUDE_TARGET_OSX_12
if (@available(macOS 12, *)) {
VZMacHardwareModel *hardwareModel;
NSString *hardwareModelPathNSString = [NSString stringWithUTF8String:hardwareModelPath];
@@ -236,12 +249,13 @@ void *newVZMacHardwareModelWithPath(const char *hardwareModelPath)
}
return hardwareModel;
}
-
+#endif
RAISE_UNSUPPORTED_MACOS_EXCEPTION();
}
void *newVZMacHardwareModelWithBytes(void *hardwareModelBytes, int len)
{
+#ifdef INCLUDE_TARGET_OSX_12
if (@available(macOS 12, *)) {
VZMacHardwareModel *hardwareModel;
@autoreleasepool {
@@ -250,7 +264,7 @@ void *newVZMacHardwareModelWithBytes(void *hardwareModelBytes, int len)
}
return hardwareModel;
}
-
+#endif
RAISE_UNSUPPORTED_MACOS_EXCEPTION();
}
@@ -259,10 +273,11 @@ void *newVZMacHardwareModelWithBytes(void *hardwareModelBytes, int len)
*/
void *newVZMacMachineIdentifier()
{
+#ifdef INCLUDE_TARGET_OSX_12
if (@available(macOS 12, *)) {
return [[VZMacMachineIdentifier alloc] init];
}
-
+#endif
RAISE_UNSUPPORTED_MACOS_EXCEPTION();
}
@@ -274,6 +289,7 @@ void *newVZMacMachineIdentifier()
*/
void *newVZMacMachineIdentifierWithPath(const char *machineIdentifierPath)
{
+#ifdef INCLUDE_TARGET_OSX_12
if (@available(macOS 12, *)) {
VZMacMachineIdentifier *machineIdentifier;
NSString *machineIdentifierPathNSString = [NSString stringWithUTF8String:machineIdentifierPath];
@@ -284,12 +300,13 @@ void *newVZMacMachineIdentifierWithPath(const char *machineIdentifierPath)
}
return machineIdentifier;
}
-
+#endif
RAISE_UNSUPPORTED_MACOS_EXCEPTION();
}
void *newVZMacMachineIdentifierWithBytes(void *machineIdentifierBytes, int len)
{
+#ifdef INCLUDE_TARGET_OSX_12
if (@available(macOS 12, *)) {
VZMacMachineIdentifier *machineIdentifier;
@autoreleasepool {
@@ -298,12 +315,13 @@ void *newVZMacMachineIdentifierWithBytes(void *machineIdentifierBytes, int len)
}
return machineIdentifier;
}
-
+#endif
RAISE_UNSUPPORTED_MACOS_EXCEPTION();
}
nbyteslice getVZMacMachineIdentifierDataRepresentation(void *machineIdentifierPtr)
{
+#ifdef INCLUDE_TARGET_OSX_12
if (@available(macOS 12, *)) {
VZMacMachineIdentifier *machineIdentifier = (VZMacMachineIdentifier *)machineIdentifierPtr;
NSData *data = [machineIdentifier dataRepresentation];
@@ -313,12 +331,13 @@ nbyteslice getVZMacMachineIdentifierDataRepresentation(void *machineIdentifierPt
};
return ret;
}
-
+#endif
RAISE_UNSUPPORTED_MACOS_EXCEPTION();
}
VZMacOSRestoreImageStruct convertVZMacOSRestoreImage2Struct(void *restoreImagePtr)
{
+#ifdef INCLUDE_TARGET_OSX_12
if (@available(macOS 12, *)) {
VZMacOSRestoreImage *restoreImage = (VZMacOSRestoreImage *)restoreImagePtr;
VZMacOSRestoreImageStruct ret;
@@ -329,12 +348,13 @@ VZMacOSRestoreImageStruct convertVZMacOSRestoreImage2Struct(void *restoreImagePt
ret.mostFeaturefulSupportedConfiguration = (void *)CFBridgingRetain([restoreImage mostFeaturefulSupportedConfiguration]);
return ret;
}
-
+#endif
RAISE_UNSUPPORTED_MACOS_EXCEPTION();
}
void fetchLatestSupportedMacOSRestoreImageWithCompletionHandler(void *cgoHandler)
{
+#ifdef INCLUDE_TARGET_OSX_12
if (@available(macOS 12, *)) {
[VZMacOSRestoreImage fetchLatestSupportedWithCompletionHandler:^(VZMacOSRestoreImage *restoreImage, NSError *error) {
VZMacOSRestoreImageStruct restoreImageStruct = convertVZMacOSRestoreImage2Struct(restoreImage);
@@ -342,12 +362,13 @@ void fetchLatestSupportedMacOSRestoreImageWithCompletionHandler(void *cgoHandler
}];
return;
}
-
+#endif
RAISE_UNSUPPORTED_MACOS_EXCEPTION();
}
void loadMacOSRestoreImageFile(const char *ipswPath, void *cgoHandler)
{
+#ifdef INCLUDE_TARGET_OSX_12
if (@available(macOS 12, *)) {
NSString *ipswPathNSString = [NSString stringWithUTF8String:ipswPath];
NSURL *ipswURL = [NSURL fileURLWithPath:ipswPathNSString];
@@ -358,12 +379,13 @@ void loadMacOSRestoreImageFile(const char *ipswPath, void *cgoHandler)
}];
return;
}
-
+#endif
RAISE_UNSUPPORTED_MACOS_EXCEPTION();
}
VZMacOSConfigurationRequirementsStruct convertVZMacOSConfigurationRequirements2Struct(void *requirementsPtr)
{
+#ifdef INCLUDE_TARGET_OSX_12
if (@available(macOS 12, *)) {
VZMacOSConfigurationRequirements *requirements = (VZMacOSConfigurationRequirements *)requirementsPtr;
VZMacOSConfigurationRequirementsStruct ret;
@@ -373,12 +395,13 @@ VZMacOSConfigurationRequirementsStruct convertVZMacOSConfigurationRequirements2S
ret.hardwareModel = (void *)CFBridgingRetain([requirements hardwareModel]);
return ret;
}
-
+#endif
RAISE_UNSUPPORTED_MACOS_EXCEPTION();
}
VZMacHardwareModelStruct convertVZMacHardwareModel2Struct(void *hardwareModelPtr)
{
+#ifdef INCLUDE_TARGET_OSX_12
if (@available(macOS 12, *)) {
VZMacHardwareModel *hardwareModel = (VZMacHardwareModel *)hardwareModelPtr;
VZMacHardwareModelStruct ret;
@@ -391,7 +414,7 @@ VZMacHardwareModelStruct convertVZMacHardwareModel2Struct(void *hardwareModelPtr
ret.dataRepresentation = retByteSlice;
return ret;
}
-
+#endif
RAISE_UNSUPPORTED_MACOS_EXCEPTION();
}
@@ -405,6 +428,7 @@ VZMacHardwareModelStruct convertVZMacHardwareModel2Struct(void *hardwareModelPtr
*/
void *newVZMacOSInstaller(void *virtualMachine, void *vmQueue, const char *restoreImageFilePath)
{
+#ifdef INCLUDE_TARGET_OSX_12
if (@available(macOS 12, *)) {
__block VZMacOSInstaller *ret;
NSString *restoreImageFilePathNSString = [NSString stringWithUTF8String:restoreImageFilePath];
@@ -414,7 +438,7 @@ void *newVZMacOSInstaller(void *virtualMachine, void *vmQueue, const char *resto
});
return ret;
}
-
+#endif
RAISE_UNSUPPORTED_MACOS_EXCEPTION();
}
@@ -425,6 +449,7 @@ void *newProgressObserverVZMacOSInstaller()
void installByVZMacOSInstaller(void *installerPtr, void *vmQueue, void *progressObserverPtr, void *completionHandler, void *fractionCompletedHandler)
{
+#ifdef INCLUDE_TARGET_OSX_12
if (@available(macOS 12, *)) {
VZMacOSInstaller *installer = (VZMacOSInstaller *)installerPtr;
dispatch_sync((dispatch_queue_t)vmQueue, ^{
@@ -439,12 +464,13 @@ void installByVZMacOSInstaller(void *installerPtr, void *vmQueue, void *progress
});
return;
}
-
+#endif
RAISE_UNSUPPORTED_MACOS_EXCEPTION();
}
void cancelInstallVZMacOSInstaller(void *installerPtr)
{
+#ifdef INCLUDE_TARGET_OSX_12
if (@available(macOS 12, *)) {
VZMacOSInstaller *installer = (VZMacOSInstaller *)installerPtr;
if (installer.progress.cancellable) {
@@ -452,7 +478,7 @@ void cancelInstallVZMacOSInstaller(void *installerPtr)
}
return;
}
-
+#endif
RAISE_UNSUPPORTED_MACOS_EXCEPTION();
}
diff --git a/src/cmd/linuxkit/vendor/github.com/Code-Hex/vz/v3/virtualization_debug.m b/src/cmd/linuxkit/vendor/github.com/Code-Hex/vz/v3/virtualization_debug.m
index 67fe356ae..af81a46b0 100644
--- a/src/cmd/linuxkit/vendor/github.com/Code-Hex/vz/v3/virtualization_debug.m
+++ b/src/cmd/linuxkit/vendor/github.com/Code-Hex/vz/v3/virtualization_debug.m
@@ -12,10 +12,11 @@
*/
void *newVZGDBDebugStubConfiguration(uint32_t port)
{
+#ifdef INCLUDE_TARGET_OSX_12
if (@available(macOS 12, *)) {
return [[_VZGDBDebugStubConfiguration alloc] initWithPort:(NSInteger)port];
}
-
+#endif
RAISE_UNSUPPORTED_MACOS_EXCEPTION();
}
@@ -24,10 +25,11 @@ void *newVZGDBDebugStubConfiguration(uint32_t port)
*/
void setDebugStubVZVirtualMachineConfiguration(void *config, void *debugStub)
{
+#ifdef INCLUDE_TARGET_OSX_12
if (@available(macOS 12, *)) {
[(VZVirtualMachineConfiguration *)config _setDebugStub:(_VZDebugStubConfiguration *)debugStub];
return;
}
-
+#endif
RAISE_UNSUPPORTED_MACOS_EXCEPTION();
}
\ No newline at end of file
diff --git a/src/cmd/linuxkit/vendor/github.com/Code-Hex/vz/v3/virtualization_helper.h b/src/cmd/linuxkit/vendor/github.com/Code-Hex/vz/v3/virtualization_helper.h
index 995b40882..9da0700b9 100644
--- a/src/cmd/linuxkit/vendor/github.com/Code-Hex/vz/v3/virtualization_helper.h
+++ b/src/cmd/linuxkit/vendor/github.com/Code-Hex/vz/v3/virtualization_helper.h
@@ -18,6 +18,13 @@ NSDictionary *dumpProcessinfo();
__builtin_unreachable(); \
} while (0)
+// for macOS 12 API
+#if __MAC_OS_X_VERSION_MAX_ALLOWED >= 120000
+#define INCLUDE_TARGET_OSX_12 1
+#else
+#pragma message("macOS 12 API has been disabled")
+#endif
+
// for macOS 12.3 API
#if __MAC_OS_X_VERSION_MAX_ALLOWED >= 120300
#define INCLUDE_TARGET_OSX_12_3 1
diff --git a/src/cmd/linuxkit/vendor/github.com/Code-Hex/vz/v3/virtualization_view.h b/src/cmd/linuxkit/vendor/github.com/Code-Hex/vz/v3/virtualization_view.h
index ab00b9225..15d306f66 100644
--- a/src/cmd/linuxkit/vendor/github.com/Code-Hex/vz/v3/virtualization_view.h
+++ b/src/cmd/linuxkit/vendor/github.com/Code-Hex/vz/v3/virtualization_view.h
@@ -23,9 +23,11 @@
- (instancetype)init;
@end
+#ifdef INCLUDE_TARGET_OSX_12
API_AVAILABLE(macos(12.0))
@interface AppDelegate : NSObject <NSApplicationDelegate, NSWindowDelegate, VZVirtualMachineDelegate>
- (instancetype)initWithVirtualMachine:(VZVirtualMachine *)virtualMachine
windowWidth:(CGFloat)windowWidth
windowHeight:(CGFloat)windowHeight;
-@end
\ No newline at end of file
+@end
+#endif
\ No newline at end of file
diff --git a/src/cmd/linuxkit/vendor/github.com/Code-Hex/vz/v3/virtualization_view.m b/src/cmd/linuxkit/vendor/github.com/Code-Hex/vz/v3/virtualization_view.m
index 9031c44f1..33b20d91b 100644
--- a/src/cmd/linuxkit/vendor/github.com/Code-Hex/vz/v3/virtualization_view.m
+++ b/src/cmd/linuxkit/vendor/github.com/Code-Hex/vz/v3/virtualization_view.m
@@ -165,6 +165,7 @@
@end
+#ifdef INCLUDE_TARGET_OSX_12
@implementation AppDelegate {
VZVirtualMachine *_virtualMachine;
VZVirtualMachineView *_virtualMachineView;
@@ -372,3 +373,4 @@
[aboutPanel makeKeyAndOrderFront:nil];
}
@end
+#endif

View file

@ -2,15 +2,15 @@
rustPlatform.buildRustPackage rec {
pname = "refinery-cli";
version = "0.8.9";
version = "0.8.10";
src = fetchCrate {
pname = "refinery_cli";
inherit version;
sha256 = "sha256-KNidO4HO4fcGXWJxFYsat2duZTzUA8XFcaK+Qzb1HFI=";
sha256 = "sha256-6nb/RduzoTK5UtdzYBLdKkYTUrV9A1w1ZePqr3cO534=";
};
cargoHash = "sha256-nYqOGSFQ4GdUdLkZ2Xtx+bRj2sX6joxKjNqm9CloODU=";
cargoHash = "sha256-rdxcWsLwhWuqGE5Z698NULg6Y2nkLqiIqEpBpceflk0=";
nativeBuildInputs = [ pkg-config ];

View file

@ -0,0 +1,47 @@
{ darwin
, fetchFromGitHub
, lib
, openssl
, pkg-config
, rustPlatform
, stdenv
}:
let
inherit (darwin.apple_sdk.frameworks)
CoreServices
Security;
inherit (lib) optionals;
inherit (stdenv) isDarwin isLinux;
in
rustPlatform.buildRustPackage rec {
pname = "cargo-leptos";
version = "0.1.8";
src = fetchFromGitHub {
owner = "leptos-rs";
repo = pname;
rev = version;
hash = "sha256-z4AqxvKu9E8GGMj6jNUAAWeqoE/j+6NoAEZWeNZ+1BA=";
};
cargoHash = "sha256-w/9W4DXbh4G5DZ8IGUz4nN3LEjHhL7HgybHqODMFzHw=";
nativeBuildInputs = optionals (!isDarwin) [ pkg-config ];
buildInputs = optionals (!isDarwin) [
openssl
] ++ optionals isDarwin [
Security
CoreServices
];
# https://github.com/leptos-rs/cargo-leptos#dependencies
buildFeatures = [ "no_downloads" ]; # cargo-leptos will try to install missing dependencies on its own otherwise
doCheck = false; # Check phase tries to query crates.io
meta = with lib; {
description = "A build tool for the Leptos web framework";
homepage = "https://github.com/leptos-rs/cargo-leptos";
license = with licenses; [ mit ];
maintainers = with maintainers; [ benwis ];
};
}

View file

@ -207,7 +207,9 @@ in buildFHSEnv rec {
libpsl
nghttp2.lib
rtmpdump
] ++ steam-runtime-wrapped.overridePkgs
]
# This needs to come from pkgs as the passed-in steam-runtime-wrapped may not be the same architecture
++ pkgs.steamPackages.steam-runtime-wrapped.overridePkgs
++ extraLibraries pkgs;
extraInstallCommands = lib.optionalString (steam != null) ''

View file

@ -15,4 +15,11 @@ stdenv.mkDerivation rec {
buildInputs = [ openssl ];
installFlags = [ "PREFIX=$(out)" ];
meta = with lib; {
description = "A tool for working with embedded signatures in Mach-O files";
homepage = "https://github.com/thefloweringash/sigtool";
license = licenses.mit;
platforms = platforms.unix;
};
}

View file

@ -12,13 +12,13 @@
stdenv.mkDerivation rec {
pname = "intel-compute-runtime";
version = "23.05.25593.11";
version = "23.13.26032.30";
src = fetchFromGitHub {
owner = "intel";
repo = "compute-runtime";
rev = version;
sha256 = "sha256-AsJGcyVqRGz7OBWTlQeTS412iUzMAbIsA4w6CmEf1G8=";
sha256 = "sha256-KaU+11lY/chCySao1vLOejDJ9i4yjYWxaz0pzd8lWNY=";
};
nativeBuildInputs = [ cmake pkg-config ];

View file

@ -5,22 +5,17 @@
, kernel
}:
stdenv.mkDerivation rec {
stdenv.mkDerivation {
pname = "ipu6-drivers";
version = "unstable-2023-02-20";
version = "unstable-2023-05-19";
src = fetchFromGitHub {
owner = "intel";
repo = pname;
rev = "dfedab03f3856010d37968cb384696038c73c984";
hash = "sha256-TKo04+fqY64SdDuWApuzRXBnaAW2DReubwFRsdfJMWM=";
repo = "ipu6-drivers";
rev = "8c02a846d1afe0e108964a2d3db4acb175712da9";
hash = "sha256-f2EuxVkCvEPyH0XbLCv5t/Mi0jdk7BOh1QluG/TxZr0=";
};
patches = [
# https://github.com/intel/ipu6-drivers/pull/84
./pr-84-unpatched-upstream-compatiblity.patch
];
postPatch = ''
cp --no-preserve=mode --recursive --verbose \
${ivsc-driver.src}/backport-include \
@ -53,7 +48,6 @@ stdenv.mkDerivation rec {
maintainers = with lib.maintainers; [ hexa ];
platforms = [ "x86_64-linux" ];
# requires 6.1.7 https://github.com/intel/ipu6-drivers/pull/84
# fails to build on 6.3 https://github.com/intel/ipu6-drivers/issues/140
broken = kernel.kernelOlder "6.1.7" || kernel.kernelAtLeast "6.3";
broken = kernel.kernelOlder "6.1.7";
};
}

View file

@ -1,365 +0,0 @@
From 8f4346915bb7e3a3ad3eea2c24b6da09dac257b2 Mon Sep 17 00:00:00 2001
From: Hans de Goede <hdegoede@redhat.com>
Date: Tue, 29 Nov 2022 15:06:23 +0100
Subject: [PATCH 1/4] sensors: Use clk-framework instead of a "clken" GPIO
Use the clk-framework to get a clk-provider reference and use
clk_prepare_enable() / clk_disable_unprepare() to control the clk.
This replace modelling the clock as a "clken" GPIO, which is not a valid
way to model it when the clk is e.g. generated by the clk-generator of
a TPS68470 PMIC.
This relies on the following upstream bugfix for the INT3472 clk provider:
https://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git/commit/?id=cf5ac2d45f6e4d11ad78e7b10ae9a4121ba5e995
"platform/x86: int3472/discrete: Ensure the clk/power enable pins are in output mode"
This patch is available since upstream kernel 6.1.7, so the new
code is only enabled for LINUX_VERSION_CODE >= KERNEL_VERSION(6, 1, 7)
This allow susing the IPU6 sensor drivers with the upstream int3472
driver with unmodified upstream kernels >= 6.1.7 .
Signed-off-by: Hans de Goede <hdegoede@redhat.com>
---
drivers/media/i2c/hm11b1.c | 18 ++++++++++++++++++
drivers/media/i2c/ov01a1s.c | 18 ++++++++++++++++++
2 files changed, 36 insertions(+)
diff --git a/drivers/media/i2c/hm11b1.c b/drivers/media/i2c/hm11b1.c
index 1cc5cd761fbf..e14810bdd612 100644
--- a/drivers/media/i2c/hm11b1.c
+++ b/drivers/media/i2c/hm11b1.c
@@ -468,8 +468,13 @@ struct hm11b1 {
struct gpio_desc *reset_gpio;
/* GPIO for powerdown */
struct gpio_desc *powerdown_gpio;
+#if LINUX_VERSION_CODE < KERNEL_VERSION(6, 1, 7)
/* GPIO for clock enable */
struct gpio_desc *clken_gpio;
+#else
+ /* Clock provider */
+ struct clk *clk;
+#endif
/* GPIO for privacy LED */
struct gpio_desc *pled_gpio;
#endif
@@ -508,7 +513,14 @@ static void hm11b1_set_power(struct hm11b1 *hm11b1, int on)
return;
gpiod_set_value_cansleep(hm11b1->reset_gpio, on);
gpiod_set_value_cansleep(hm11b1->powerdown_gpio, on);
+#if LINUX_VERSION_CODE < KERNEL_VERSION(6, 1, 7)
gpiod_set_value_cansleep(hm11b1->clken_gpio, on);
+#else
+ if (on)
+ clk_prepare_enable(hm11b1->clk);
+ else
+ clk_disable_unprepare(hm11b1->clk);
+#endif
gpiod_set_value_cansleep(hm11b1->pled_gpio, on);
msleep(20);
#elif IS_ENABLED(CONFIG_POWER_CTRL_LOGIC)
@@ -1093,12 +1105,18 @@ static int hm11b1_parse_dt(struct hm11b1 *hm11b1)
return ret;
}
+#if LINUX_VERSION_CODE < KERNEL_VERSION(6, 1, 7)
hm11b1->clken_gpio = devm_gpiod_get(dev, "clken", GPIOD_OUT_HIGH);
ret = PTR_ERR_OR_ZERO(hm11b1->clken_gpio);
if (ret < 0) {
dev_err(dev, "error while getting clken_gpio gpio: %d\n", ret);
return ret;
}
+#else
+ hm11b1->clk = devm_clk_get_optional(dev, "clk");
+ if (IS_ERR(hm11b1->clk))
+ return dev_err_probe(dev, PTR_ERR(hm11b1->clk), "getting clk\n");
+#endif
hm11b1->pled_gpio = devm_gpiod_get(dev, "pled", GPIOD_OUT_HIGH);
ret = PTR_ERR_OR_ZERO(hm11b1->pled_gpio);
diff --git a/drivers/media/i2c/ov01a1s.c b/drivers/media/i2c/ov01a1s.c
index e4477625ce3b..628a1dd83ddf 100644
--- a/drivers/media/i2c/ov01a1s.c
+++ b/drivers/media/i2c/ov01a1s.c
@@ -317,8 +317,13 @@ struct ov01a1s {
struct gpio_desc *reset_gpio;
/* GPIO for powerdown */
struct gpio_desc *powerdown_gpio;
+#if LINUX_VERSION_CODE < KERNEL_VERSION(6, 1, 7)
/* GPIO for clock enable */
struct gpio_desc *clken_gpio;
+#else
+ /* Clock provider */
+ struct clk *clk;
+#endif
/* GPIO for privacy LED */
struct gpio_desc *pled_gpio;
#endif
@@ -339,7 +344,14 @@ static void ov01a1s_set_power(struct ov01a1s *ov01a1s, int on)
return;
gpiod_set_value_cansleep(ov01a1s->reset_gpio, on);
gpiod_set_value_cansleep(ov01a1s->powerdown_gpio, on);
+#if LINUX_VERSION_CODE < KERNEL_VERSION(6, 1, 7)
gpiod_set_value_cansleep(ov01a1s->clken_gpio, on);
+#else
+ if (on)
+ clk_prepare_enable(ov01a1s->clk);
+ else
+ clk_disable_unprepare(ov01a1s->clk);
+#endif
gpiod_set_value_cansleep(ov01a1s->pled_gpio, on);
msleep(20);
#elif IS_ENABLED(CONFIG_POWER_CTRL_LOGIC)
@@ -945,12 +957,18 @@ static int ov01a1s_parse_dt(struct ov01a1s *ov01a1s)
return -EPROBE_DEFER;
}
+#if LINUX_VERSION_CODE < KERNEL_VERSION(6, 1, 7)
ov01a1s->clken_gpio = devm_gpiod_get(dev, "clken", GPIOD_OUT_HIGH);
ret = PTR_ERR_OR_ZERO(ov01a1s->clken_gpio);
if (ret < 0) {
dev_err(dev, "error while getting clken_gpio gpio: %d\n", ret);
return -EPROBE_DEFER;
}
+#else
+ ov01a1s->clk = devm_clk_get_optional(dev, "clk");
+ if (IS_ERR(ov01a1s->clk))
+ return dev_err_probe(dev, PTR_ERR(ov01a1s->clk), "getting clk\n");
+#endif
ov01a1s->pled_gpio = devm_gpiod_get(dev, "pled", GPIOD_OUT_HIGH);
ret = PTR_ERR_OR_ZERO(ov01a1s->pled_gpio);
From b04fdf6433f6b64840d46f92ddf3d6d18e86ede3 Mon Sep 17 00:00:00 2001
From: Hans de Goede <hdegoede@redhat.com>
Date: Tue, 29 Nov 2022 23:37:50 +0100
Subject: [PATCH 2/4] sensors: Make powerdown and reset signals active-low by
default
The powerdown and reset functions should be set to 0, as in
not-powered-down, not-in-reset when the sensor is turned on.
Adjust the gpiod_set() value parameters for the powerdown_gpio
and reset_gpio to !on to properly reflect this.
Typical sensors however have a NRESET aka /RESET pin which needs
to be driven low to put the device in reset and the have
a powerup/enable pin rather then a powerdown pin. So at
the physicical level the pins associated with the reset and
powerdown functions need to be driven low to put the chip
in reset / to power the chip down. Mark the pins as active-low
in the added gpio-lookup table entries for these pin to
reflect this.
This double negation has 0 net effect, but it uses the GPIO
subsystem functionality as intended (setting reset to 0
on poweron makes lot more sense then setting it to 1 on poweron)
and it aligns the use of these GPIOs with that of the mainline
kernel allowing future use of the IPU6 driver with the
mainline INT3472 driver without needing to patch the mainline
kernel.
Signed-off-by: Hans de Goede <hdegoede@redhat.com>
---
drivers/media/i2c/hm11b1.c | 4 ++--
drivers/media/i2c/ov01a1s.c | 4 ++--
drivers/media/i2c/ov2740.c | 2 +-
...nt3472-support-independent-clock-and-LED-gpios-5.17+.patch | 4 ++--
patch/int3472-support-independent-clock-and-LED-gpios.patch | 4 ++--
5 files changed, 9 insertions(+), 9 deletions(-)
diff --git a/drivers/media/i2c/hm11b1.c b/drivers/media/i2c/hm11b1.c
index e14810bdd612..652e8f177044 100644
--- a/drivers/media/i2c/hm11b1.c
+++ b/drivers/media/i2c/hm11b1.c
@@ -511,8 +511,8 @@ static void hm11b1_set_power(struct hm11b1 *hm11b1, int on)
#if IS_ENABLED(CONFIG_INTEL_SKL_INT3472)
if (!(hm11b1->reset_gpio && hm11b1->powerdown_gpio))
return;
- gpiod_set_value_cansleep(hm11b1->reset_gpio, on);
- gpiod_set_value_cansleep(hm11b1->powerdown_gpio, on);
+ gpiod_set_value_cansleep(hm11b1->reset_gpio, !on);
+ gpiod_set_value_cansleep(hm11b1->powerdown_gpio, !on);
#if LINUX_VERSION_CODE < KERNEL_VERSION(6, 1, 7)
gpiod_set_value_cansleep(hm11b1->clken_gpio, on);
#else
diff --git a/drivers/media/i2c/ov01a1s.c b/drivers/media/i2c/ov01a1s.c
index 628a1dd83ddf..2ce81d04abf6 100644
--- a/drivers/media/i2c/ov01a1s.c
+++ b/drivers/media/i2c/ov01a1s.c
@@ -342,8 +342,8 @@ static void ov01a1s_set_power(struct ov01a1s *ov01a1s, int on)
#if IS_ENABLED(CONFIG_INTEL_SKL_INT3472)
if (!(ov01a1s->reset_gpio && ov01a1s->powerdown_gpio))
return;
- gpiod_set_value_cansleep(ov01a1s->reset_gpio, on);
- gpiod_set_value_cansleep(ov01a1s->powerdown_gpio, on);
+ gpiod_set_value_cansleep(ov01a1s->reset_gpio, !on);
+ gpiod_set_value_cansleep(ov01a1s->powerdown_gpio, !on);
#if LINUX_VERSION_CODE < KERNEL_VERSION(6, 1, 7)
gpiod_set_value_cansleep(ov01a1s->clken_gpio, on);
#else
diff --git a/drivers/media/i2c/ov2740.c b/drivers/media/i2c/ov2740.c
index 67fb17e08e36..a8bb101776bd 100644
--- a/drivers/media/i2c/ov2740.c
+++ b/drivers/media/i2c/ov2740.c
@@ -596,7 +596,7 @@ static void ov2740_set_power(struct ov2740 *ov2740, int on)
{
if (!(ov2740->reset_gpio && ov2740->pled_gpio))
return;
- gpiod_set_value_cansleep(ov2740->reset_gpio, on);
+ gpiod_set_value_cansleep(ov2740->reset_gpio, !on);
gpiod_set_value_cansleep(ov2740->pled_gpio, on);
msleep(20);
}
diff --git a/patch/int3472-support-independent-clock-and-LED-gpios-5.17+.patch b/patch/int3472-support-independent-clock-and-LED-gpios-5.17+.patch
index 57373ac85f39..66ed770b68a0 100644
--- a/patch/int3472-support-independent-clock-and-LED-gpios-5.17+.patch
+++ b/patch/int3472-support-independent-clock-and-LED-gpios-5.17+.patch
@@ -65,7 +65,7 @@ index ed4c9d760757..f5857ec334fa 100644
case INT3472_GPIO_TYPE_RESET:
ret = skl_int3472_map_gpio_to_sensor(int3472, agpio, "reset",
- GPIO_ACTIVE_LOW);
-+ polarity);
++ polarity ^ GPIO_ACTIVE_LOW);
if (ret)
err_msg = "Failed to map reset pin to sensor\n";
@@ -73,7 +73,7 @@ index ed4c9d760757..f5857ec334fa 100644
case INT3472_GPIO_TYPE_POWERDOWN:
ret = skl_int3472_map_gpio_to_sensor(int3472, agpio, "powerdown",
- GPIO_ACTIVE_LOW);
-+ polarity);
++ polarity ^ GPIO_ACTIVE_LOW);
if (ret)
err_msg = "Failed to map powerdown pin to sensor\n";
diff --git a/patch/int3472-support-independent-clock-and-LED-gpios.patch b/patch/int3472-support-independent-clock-and-LED-gpios.patch
index a2def0d76852..df70ce4a7117 100644
--- a/patch/int3472-support-independent-clock-and-LED-gpios.patch
+++ b/patch/int3472-support-independent-clock-and-LED-gpios.patch
@@ -65,7 +65,7 @@ index e59d79c7e82f..5cf6dd63d43f 100644
case INT3472_GPIO_TYPE_RESET:
ret = skl_int3472_map_gpio_to_sensor(int3472, agpio, "reset",
- GPIO_ACTIVE_LOW);
-+ polarity);
++ polarity ^ GPIO_ACTIVE_LOW);
if (ret)
err_msg = "Failed to map reset pin to sensor\n";
@@ -73,7 +73,7 @@ index e59d79c7e82f..5cf6dd63d43f 100644
case INT3472_GPIO_TYPE_POWERDOWN:
ret = skl_int3472_map_gpio_to_sensor(int3472, agpio, "powerdown",
- GPIO_ACTIVE_LOW);
-+ polarity);
++ polarity ^ GPIO_ACTIVE_LOW);
if (ret)
err_msg = "Failed to map powerdown pin to sensor\n";
From 90d4b2d9cb07292c6a2580572252938a836f4a86 Mon Sep 17 00:00:00 2001
From: Hans de Goede <hdegoede@redhat.com>
Date: Thu, 15 Dec 2022 16:00:31 +0100
Subject: [PATCH 3/4] sensors: Make "pled" GPIO optional
Starting with kernel 6.3 the mainline int3472 driver models the privacy
LED device as a LED class device rather then as a GPIO.
As part of these changed the v4l2-core subdev code in 6.3 turns
the LED on/off on s_stream() on/off calls on the sensor v4l2-subdev,
so sensor drivers don't have to take care of this themselves.
Change the devm_gpiod_get() calls for the "pled" GPIO into
devm_gpiod_get_optional() calls so that the sensor drivers
can work with both older kernel (controlling the GPIO) and
with newer kernels which don't have a "pled" GPIO.
Signed-off-by: Hans de Goede <hdegoede@redhat.com>
---
drivers/media/i2c/hm11b1.c | 2 +-
drivers/media/i2c/ov01a1s.c | 2 +-
drivers/media/i2c/ov2740.c | 4 +---
3 files changed, 3 insertions(+), 5 deletions(-)
diff --git a/drivers/media/i2c/hm11b1.c b/drivers/media/i2c/hm11b1.c
index 652e8f177044..6257f7987268 100644
--- a/drivers/media/i2c/hm11b1.c
+++ b/drivers/media/i2c/hm11b1.c
@@ -1118,7 +1118,7 @@ static int hm11b1_parse_dt(struct hm11b1 *hm11b1)
return dev_err_probe(dev, PTR_ERR(hm11b1->clk), "getting clk\n");
#endif
- hm11b1->pled_gpio = devm_gpiod_get(dev, "pled", GPIOD_OUT_HIGH);
+ hm11b1->pled_gpio = devm_gpiod_get_optional(dev, "pled", GPIOD_OUT_HIGH);
ret = PTR_ERR_OR_ZERO(hm11b1->pled_gpio);
if (ret < 0) {
dev_err(dev, "error while getting pled gpio: %d\n", ret);
diff --git a/drivers/media/i2c/ov01a1s.c b/drivers/media/i2c/ov01a1s.c
index 2ce81d04abf6..1bc6199713f3 100644
--- a/drivers/media/i2c/ov01a1s.c
+++ b/drivers/media/i2c/ov01a1s.c
@@ -970,7 +970,7 @@ static int ov01a1s_parse_dt(struct ov01a1s *ov01a1s)
return dev_err_probe(dev, PTR_ERR(ov01a1s->clk), "getting clk\n");
#endif
- ov01a1s->pled_gpio = devm_gpiod_get(dev, "pled", GPIOD_OUT_HIGH);
+ ov01a1s->pled_gpio = devm_gpiod_get_optional(dev, "pled", GPIOD_OUT_HIGH);
ret = PTR_ERR_OR_ZERO(ov01a1s->pled_gpio);
if (ret < 0) {
dev_err(dev, "error while getting pled gpio: %d\n", ret);
diff --git a/drivers/media/i2c/ov2740.c b/drivers/media/i2c/ov2740.c
index a8bb101776bd..08f284d4aca1 100644
--- a/drivers/media/i2c/ov2740.c
+++ b/drivers/media/i2c/ov2740.c
@@ -594,8 +594,6 @@ static u64 to_pixels_per_line(u32 hts, u32 f_index)
static void ov2740_set_power(struct ov2740 *ov2740, int on)
{
- if (!(ov2740->reset_gpio && ov2740->pled_gpio))
- return;
gpiod_set_value_cansleep(ov2740->reset_gpio, !on);
gpiod_set_value_cansleep(ov2740->pled_gpio, on);
msleep(20);
@@ -633,7 +631,7 @@ static int ov2740_parse_dt(struct ov2740 *ov2740)
return ret;
}
- ov2740->pled_gpio = devm_gpiod_get(dev, "pled", GPIOD_OUT_HIGH);
+ ov2740->pled_gpio = devm_gpiod_get_optional(dev, "pled", GPIOD_OUT_HIGH);
ret = PTR_ERR_OR_ZERO(ov2740->pled_gpio);
if (ret < 0) {
dev_err(dev, "error while getting pled gpio: %d\n", ret);
From 5ed1980822f0cb4787d1346493d126aad1bf9210 Mon Sep 17 00:00:00 2001
From: Hans de Goede <hdegoede@redhat.com>
Date: Tue, 29 Nov 2022 15:15:15 +0100
Subject: [PATCH 4/4] ov01a1s: Drop unused link_freq variable
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Drop the unused link_freq variable, fixing this compiler warning:
drivers/media/i2c/ov01a1s.c:994:13: warning: unused variable link_freq [-Wunused-variable]
994 | s64 link_freq;
| ^~~~~~~~~
Signed-off-by: Hans de Goede <hdegoede@redhat.com>
---
drivers/media/i2c/ov01a1s.c | 1 -
1 file changed, 1 deletion(-)
diff --git a/drivers/media/i2c/ov01a1s.c b/drivers/media/i2c/ov01a1s.c
index 1bc6199713f3..ab4ff255d4c1 100644
--- a/drivers/media/i2c/ov01a1s.c
+++ b/drivers/media/i2c/ov01a1s.c
@@ -988,7 +988,6 @@ static int ov01a1s_probe(struct i2c_client *client)
#if IS_ENABLED(CONFIG_INTEL_VSC)
struct vsc_mipi_config conf;
struct vsc_camera_status status;
- s64 link_freq;
#endif
ov01a1s = devm_kzalloc(&client->dev, sizeof(*ov01a1s), GFP_KERNEL);

View file

@ -4,15 +4,15 @@
, kernel
}:
stdenv.mkDerivation rec {
pname = "ivsc-drivers";
version = "unstable-2023-01-06";
stdenv.mkDerivation {
pname = "ivsc-driver";
version = "unstable-2023-03-10";
src = fetchFromGitHub {
owner = "intel";
repo = "ivsc-driver";
rev = "94ecb88b3ac238d9145ac16230d6e0779bb4fd32";
hash = "sha256-Q7iyKw4WFSX42E4AtoW/zYRKpknWZSU66V5VPAx6AjA=";
rev = "c8db12b907e2e455d4d5586e5812d1ae0eebd571";
hash = "sha256-OM9PljvaMKrk72BFeSCqaABFeAws+tOdd3oC2jyNreE=";
};
nativeBuildInputs = kernel.moduleBuildDependencies;
@ -33,7 +33,7 @@ stdenv.mkDerivation rec {
];
meta = {
homepage = "https://github.com/intel/ivsc-drivers";
homepage = "https://github.com/intel/ivsc-driver";
description = "Intel Vision Sensing Controller kernel driver";
license = lib.licenses.gpl2;
maintainers = with lib.maintainers; [ hexa ];

View file

@ -12,12 +12,12 @@
stdenv.mkDerivation rec {
pname = "iwd";
version = "2.3";
version = "2.4";
src = fetchgit {
url = "https://git.kernel.org/pub/scm/network/wireless/iwd.git";
rev = version;
sha256 = "sha256-xI/zl7yYWZpoag7RZIL06aUNVgrHDOBEn93dbWBG48I=";
sha256 = "sha256-X7jPheVePOaLY2kaAdpBwi5b/YrRZVqswOBu2RzsuHc=";
};
outputs = [ "out" "man" "doc" ]

View file

@ -3,7 +3,7 @@
with lib;
buildLinux (args // rec {
version = "6.4-rc2";
version = "6.4-rc3";
extraMeta.branch = lib.versions.majorMinor version;
# modDirVersion needs to be x.y.z, will always add .0
@ -11,7 +11,7 @@ buildLinux (args // rec {
src = fetchzip {
url = "https://git.kernel.org/torvalds/t/linux-${version}.tar.gz";
hash = "sha256-CQwSN5LQxGO900QLMAXcjGhB2o+6rZgXHQ+gCJtVaeU=";
hash = "sha256-twGP2zNNkxJy32wCCMlkGtV1xco05FeW2s3wljwB1eM=";
};
# Should the testing kernels ever be built on Hydra?

View file

@ -15,6 +15,11 @@ lib.makeScope
coreutils = callPackage ./coreutils { tinycc = tinycc-mes; };
gnugrep = callPackage ./gnugrep {
bash = bash_2_05;
tinycc = tinycc-mes;
};
gnumake = callPackage ./gnumake { tinycc = tinycc-mes; };
gnupatch = callPackage ./gnupatch { tinycc = tinycc-mes; };
@ -40,6 +45,7 @@ lib.makeScope
test = kaem.runCommand "minimal-bootstrap-test" {} ''
echo ${bash_2_05.tests.get-version}
echo ${gnugrep.tests.get-version}
echo ${gnused.tests.get-version}
echo ${mes.compiler.tests.get-version}
echo ${tinycc-mes.compiler.tests.chain}

View file

@ -0,0 +1,60 @@
{ lib
, fetchurl
, bash
, tinycc
, gnumake
}:
let
pname = "gnugrep";
version = "2.4";
src = fetchurl {
url = "mirror://gnu/grep/grep-${version}.tar.gz";
sha256 = "05iayw5sfclc476vpviz67hdy03na0pz2kb5csa50232nfx34853";
};
# Thanks to the live-bootstrap project!
# See https://github.com/fosslinux/live-bootstrap/blob/1bc4296091c51f53a5598050c8956d16e945b0f5/sysa/grep-2.4
makefile = fetchurl {
url = "https://github.com/fosslinux/live-bootstrap/raw/1bc4296091c51f53a5598050c8956d16e945b0f5/sysa/grep-2.4/mk/main.mk";
sha256 = "08an9ljlqry3p15w28hahm6swnd3jxizsd2188przvvsj093j91k";
};
in
bash.runCommand "${pname}-${version}" {
inherit pname version;
nativeBuildInputs = [
tinycc.compiler
gnumake
];
passthru.tests.get-version = result:
bash.runCommand "${pname}-get-version-${version}" {} ''
${result}/bin/grep --version
mkdir ''${out}
'';
meta = with lib; {
description = "GNU implementation of the Unix grep command";
homepage = "https://www.gnu.org/software/grep";
license = licenses.gpl3Plus;
maintainers = teams.minimal-bootstrap.members;
mainProgram = "grep";
platforms = platforms.unix;
};
} ''
# Unpack
ungz --file ${src} --output grep.tar
untar --file grep.tar
rm grep.tar
cd grep-${version}
# Configure
cp ${makefile} Makefile
# Build
make CC="tcc -static -B ${tinycc.libs}/lib"
# Install
make install PREFIX=$out
''

View file

@ -61,4 +61,9 @@ in
version = "7.2.1";
hash = "sha256-TZN9FyCo7BnFM/ly2TA6HJiJt7/KdDeJOuXCfPIEqUA=";
};
# EOL 2024-03-15
varnish73 = common {
version = "7.3.0";
hash = "sha256-4tu7DsJwqQZHw4aGbm4iaZOu1G5I3nUacruBlzfxSuc=";
};
}

View file

@ -45,4 +45,8 @@ in
version = "0.20.0";
sha256 = "sha256-3eH3qCa24rWqYXsTTDmm/9LjBMxcxUuozuRzZ3e8cUo=";
};
modules22 = common {
version = "0.22.0";
sha256 = "sha256-eoa6i6AuOS4pxQKA/lbJnwFc39cRiLqnBSpPM4Oitrc=";
};
}

View file

@ -1,4 +1,4 @@
{ callPackages, callPackage, varnish60, varnish72, fetchFromGitHub }: {
{ callPackages, callPackage, varnish60, varnish72, varnish73, fetchFromGitHub }: {
varnish60Packages = rec {
varnish = varnish60;
modules = (callPackages ./modules.nix { inherit varnish; }).modules15;
@ -17,4 +17,8 @@
varnish = varnish72;
modules = (callPackages ./modules.nix { inherit varnish; }).modules20;
};
varnish73Packages = rec {
varnish = varnish73;
modules = (callPackages ./modules.nix { inherit varnish; }).modules22;
};
}

View file

@ -1,8 +1,8 @@
{ callPackage }: builtins.mapAttrs (_: callPackage ./generic.nix) rec {
wordpress = wordpress6_2;
wordpress6_2 = {
version = "6.2.1";
hash = "sha256-jGmOEmdj3n4bCoTJH/4DEsjTBiaEmaxBt1kA19HctU8=";
version = "6.2.2";
hash = "sha256-0qpvPauGbeP1MLHmz6gItJf80Erts7E7x28TM9AmAPk=";
};
wordpress6_1 = {
version = "6.1.2";

View file

@ -1,25 +1,29 @@
{ lib, pkgs, stdenv, buildGoModule, fetchFromGitHub, nixosTests
, nodejs, debianutils, mkdocs, python3, python3Packages }:
, nodejs, debianutils, mkdocs, python3, python3Packages
, pkg-config, pixman, cairo, pango }:
let
nodeDependencies = (import ./node-composition.nix {
inherit pkgs nodejs;
inherit (stdenv.hostPlatform) system;
}).nodeDependencies;
}).nodeDependencies.override {
nativeBuildInputs = [ pkg-config ];
buildInputs = [ pixman cairo pango ];
};
in
buildGoModule rec {
pname = "ntfy-sh";
version = "2.4.0";
version = "2.5.0";
src = fetchFromGitHub {
owner = "binwiederhier";
repo = "ntfy";
rev = "v${version}";
sha256 = "sha256-bwYiIeDpZZpfv/HNtB/3acL0dJfegF/4OqWcEV8YGfY=";
sha256 = "sha256-C7Ko7JBiQoafos7TbVTqq6pn7NnuLOZo7Dcf6ob2IzI=";
};
vendorSha256 = "sha256-HHuj3PcIu1wsdcfd04PofoZHjRSgTfWfJcomqH3KXa8=";
vendorSha256 = "sha256-9mhMeGcAdFjzLJdsGnoTArtxVEaUznpN64j5SMBYHv8=";
doCheck = false;

File diff suppressed because it is too large Load diff

View file

@ -7,11 +7,11 @@
stdenv.mkDerivation rec {
pname = "wget";
version = "1.21.3";
version = "1.21.4";
src = fetchurl {
url = "mirror://gnu/wget/${pname}-${version}.tar.lz";
sha256 = "sha256-29L7XkcUnUdS0Oqg2saMxJzyDUbfT44yb/yPGLKvTqU=";
hash = "sha256-NoNhml9Q7cvMsXIKeQBvo3v5uaJVqMW0gEi8PHqHS9k=";
};
patches = [

View file

@ -0,0 +1,5 @@
# frozen_string_literal: true
source "https://rubygems.org"
gem "ronin"

View file

@ -0,0 +1,205 @@
GEM
remote: https://rubygems.org/
specs:
activemodel (7.0.4.3)
activesupport (= 7.0.4.3)
activerecord (7.0.4.3)
activemodel (= 7.0.4.3)
activesupport (= 7.0.4.3)
activesupport (7.0.4.3)
concurrent-ruby (~> 1.0, >= 1.0.2)
i18n (>= 1.6, < 2)
minitest (>= 5.1)
tzinfo (~> 2.0)
addressable (2.8.2)
public_suffix (>= 2.0.2, < 6.0)
async (2.5.0)
console (~> 1.10)
io-event (~> 1.1)
timers (~> 4.1)
async-io (1.34.3)
async
chars (0.3.2)
combinatorics (0.4.4)
command_kit (0.4.0)
command_mapper (0.3.1)
concurrent-ruby (1.2.2)
connection_pool (2.4.0)
console (1.16.2)
fiber-local
domain_name (0.5.20190701)
unf (>= 0.0.5, < 1.0.0)
fake_io (0.1.0)
fiber-local (1.0.0)
hexdump (1.0.0)
http-cookie (1.0.5)
domain_name (~> 0.5)
i18n (1.12.0)
concurrent-ruby (~> 1.0)
io-console (0.6.0)
io-event (1.1.7)
irb (1.6.3)
reline (>= 0.3.0)
mechanize (2.8.5)
addressable (~> 2.8)
domain_name (~> 0.5, >= 0.5.20190701)
http-cookie (~> 1.0, >= 1.0.3)
mime-types (~> 3.0)
net-http-digest_auth (~> 1.4, >= 1.4.1)
net-http-persistent (>= 2.5.2, < 5.0.dev)
nokogiri (~> 1.11, >= 1.11.2)
rubyntlm (~> 0.6, >= 0.6.3)
webrick (~> 1.7)
webrobots (~> 0.1.2)
mime-types (3.4.1)
mime-types-data (~> 3.2015)
mime-types-data (3.2023.0218.1)
mini_portile2 (2.8.1)
minitest (5.18.0)
mustermann (3.0.0)
ruby2_keywords (~> 0.0.1)
net-http-digest_auth (1.4.1)
net-http-persistent (4.0.2)
connection_pool (~> 2.2)
nokogiri (1.14.2)
mini_portile2 (~> 2.8.0)
racc (~> 1.4)
nokogiri-diff (0.2.0)
nokogiri (~> 1.5)
tdiff (~> 0.3, >= 0.3.2)
nokogiri-ext (0.1.0)
nokogiri (~> 1.0)
open_namespace (0.4.1)
public_suffix (5.0.1)
racc (1.6.2)
rack (2.2.6.4)
rack-protection (3.0.5)
rack
rack-user_agent (0.5.3)
rack (>= 1.5)
woothee (>= 1.0.0)
reline (0.3.3)
io-console (~> 0.5)
ronin (2.0.1)
async-io (~> 1.0)
open_namespace (~> 0.4)
ronin-code-asm (~> 1.0)
ronin-code-sql (~> 2.0)
ronin-core (~> 0.1, >= 0.1.1)
ronin-db (~> 0.1)
ronin-exploits (~> 1.0, >= 1.0.1)
ronin-fuzzer (~> 0.1)
ronin-payloads (~> 0.1, >= 0.1.1)
ronin-repos (~> 0.1)
ronin-support (~> 1.0, >= 1.0.1)
ronin-vulns (~> 0.1, >= 0.1.2)
ronin-web (~> 1.0, >= 1.0.1)
rouge (~> 3.0)
wordlist (~> 1.0)
ronin-code-asm (1.0.0)
ruby-yasm (~> 0.3)
ronin-code-sql (2.0.0)
ronin-support (~> 1.0)
ronin-core (0.1.1)
command_kit (~> 0.4)
irb (~> 1.0)
reline (~> 0.1)
ronin-db (0.1.0)
ronin-core (~> 0.1)
ronin-db-activerecord (~> 0.1)
ronin-support (~> 1.0)
sqlite3 (~> 1.0)
ronin-db-activerecord (0.1.0)
activerecord (~> 7.0)
uri-query_params (~> 0.6)
ronin-exploits (1.0.1)
ronin-code-sql (~> 2.0)
ronin-core (~> 0.1)
ronin-payloads (~> 0.1, >= 0.1.1)
ronin-post_ex (~> 0.1)
ronin-repos (~> 0.1)
ronin-support (~> 1.0, >= 1.0.1)
ronin-vulns (~> 0.1, >= 0.1.1)
uri-query_params (~> 0.6)
ronin-fuzzer (0.1.0)
combinatorics (~> 0.4)
ronin-core (~> 0.1)
ronin-support (~> 1.0)
ronin-payloads (0.1.1)
ronin-code-asm (~> 1.0)
ronin-core (~> 0.1)
ronin-post_ex (~> 0.1)
ronin-repos (~> 0.1)
ronin-support (~> 1.0)
ronin-post_ex (0.1.0)
fake_io (~> 0.1)
hexdump (~> 1.0)
ronin-core (~> 0.1)
ronin-repos (0.1.0)
ronin-core (~> 0.1)
ronin-support (1.0.1)
addressable (~> 2.0)
chars (~> 0.3, >= 0.3.2)
combinatorics (~> 0.4)
hexdump (~> 1.0)
uri-query_params (~> 0.8)
ronin-vulns (0.1.2)
ronin-core (~> 0.1)
ronin-support (~> 1.0, >= 1.0.1)
ronin-web (1.0.1)
mechanize (~> 2.0)
nokogiri (~> 1.4)
nokogiri-diff (~> 0.2)
nokogiri-ext (~> 0.1)
open_namespace (~> 0.4)
ronin-core (~> 0.1)
ronin-support (~> 1.0)
ronin-web-server (~> 0.1, >= 0.1.1)
ronin-web-spider (~> 0.1)
ronin-web-user_agents (~> 0.1)
ronin-web-server (0.1.1)
rack (~> 2.2)
rack-user_agent (~> 0.5)
ronin-support (~> 1.0)
sinatra (~> 3.0)
webrick (~> 1.0)
ronin-web-spider (0.1.0)
ronin-support (~> 1.0)
spidr (~> 0.7)
ronin-web-user_agents (0.1.0)
rouge (3.30.0)
ruby-yasm (0.3.0)
command_mapper (~> 0.1)
ruby2_keywords (0.0.5)
rubyntlm (0.6.3)
sinatra (3.0.5)
mustermann (~> 3.0)
rack (~> 2.2, >= 2.2.4)
rack-protection (= 3.0.5)
tilt (~> 2.0)
spidr (0.7.0)
nokogiri (~> 1.3)
sqlite3 (1.6.2)
mini_portile2 (~> 2.8.0)
tdiff (0.3.4)
tilt (2.1.0)
timers (4.3.5)
tzinfo (2.0.6)
concurrent-ruby (~> 1.0)
unf (0.1.4)
unf_ext
unf_ext (0.0.8.2)
uri-query_params (0.8.1)
webrick (1.8.1)
webrobots (0.1.2)
woothee (1.13.0)
wordlist (1.0.0)
PLATFORMS
ruby
DEPENDENCIES
ronin
BUNDLED WITH
2.3.7

View file

@ -0,0 +1,25 @@
{ pkgs, lib, bundlerApp, bundlerUpdateScript }:
bundlerApp {
pname = "ronin";
gemdir = ./.;
exes = [
"ronin"
"ronin-db"
"ronin-exploits"
"ronin-fuzzer"
"ronin-payloads"
"ronin-repos"
"ronin-vulns"
"ronin-web"
];
passthru.updateScript = bundlerUpdateScript "ronin";
meta = with lib; {
description = "A free and Open Source Ruby toolkit for security research and development";
homepage = "https://ronin-rb.dev";
license = licenses.gpl3Plus;
maintainers = with maintainers; [ Ch1keen ];
};
}

View file

@ -0,0 +1,795 @@
{
activemodel = {
dependencies = ["activesupport"];
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "0ymhsxgdb68zgf4zp07g2bymmpgn0b9r38avn9pagz1p5zy1ql9v";
type = "gem";
};
version = "7.0.4.3";
};
activerecord = {
dependencies = ["activemodel" "activesupport"];
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "01wb98i2zsbb4jcb4i6z72vb05wiks4hv9chc66h1rsxrv0zi4dv";
type = "gem";
};
version = "7.0.4.3";
};
activesupport = {
dependencies = ["concurrent-ruby" "i18n" "minitest" "tzinfo"];
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "15m0b1im6i401ab51vzr7f8nk8kys1qa0snnl741y3sir3xd07jp";
type = "gem";
};
version = "7.0.4.3";
};
addressable = {
dependencies = ["public_suffix"];
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "0km8qw6qzximlg9iz24acqbpbzjw0r05bgavc6zqs3282xkyhimy";
type = "gem";
};
version = "2.8.2";
};
async = {
dependencies = ["console" "io-event" "timers"];
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "0gj166ij131c5d53dj51ad8v25dsrn9xym3vx8wkma1n40x3d6la";
type = "gem";
};
version = "2.5.0";
};
async-io = {
dependencies = ["async"];
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "10qxdz7hi136gp4pgzmw49vp8mz4fk89lc2319lp3d8iqn8w1swj";
type = "gem";
};
version = "1.34.3";
};
chars = {
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "18lgsszrrh3xnaym2jdz7g5gm7c8hv5faj7zyrm1ws9l107jrhr5";
type = "gem";
};
version = "0.3.2";
};
combinatorics = {
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "1sf0pj29xzriwsqv607iwzs76piac6kygqxpg0i59qwx029100fw";
type = "gem";
};
version = "0.4.4";
};
command_kit = {
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "179mlrnzj56ghviyvvwk0kdfyvr050yk4jj4nwb78izlbxw1wl1m";
type = "gem";
};
version = "0.4.0";
};
command_mapper = {
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "1v363y9g7zxfx2y7p50hdvxj6c0a8mfh30wac2rm3ibldspcjmn1";
type = "gem";
};
version = "0.3.1";
};
concurrent-ruby = {
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "0krcwb6mn0iklajwngwsg850nk8k9b35dhmc2qkbdqvmifdi2y9q";
type = "gem";
};
version = "1.2.2";
};
connection_pool = {
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "0dndngqvkm2ih3wqn5ilf9980c1cc57lqn5lywx3myalzpilq05z";
type = "gem";
};
version = "2.4.0";
};
console = {
dependencies = ["fiber-local"];
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "0y1bv3kd1l9p0k5n3anvvjxdrcq113pyngz2g29i9mvdgbbx7kq2";
type = "gem";
};
version = "1.16.2";
};
domain_name = {
dependencies = ["unf"];
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "0lcqjsmixjp52bnlgzh4lg9ppsk52x9hpwdjd53k8jnbah2602h0";
type = "gem";
};
version = "0.5.20190701";
};
fake_io = {
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "10559cnd2cqllql8ibd0zx0rvq8xk0qll5sqa4khb5963596ldmn";
type = "gem";
};
version = "0.1.0";
};
fiber-local = {
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "1vrxxb09fc7aicb9zb0pmn5akggjy21dmxkdl3w949y4q05rldr9";
type = "gem";
};
version = "1.0.0";
};
hexdump = {
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "1787w456yzmy4c13ray228n89a5wz6p6k3ibssjvy955qlr44b7g";
type = "gem";
};
version = "1.0.0";
};
http-cookie = {
dependencies = ["domain_name"];
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "13rilvlv8kwbzqfb644qp6hrbsj82cbqmnzcvqip1p6vqx36sxbk";
type = "gem";
};
version = "1.0.5";
};
i18n = {
dependencies = ["concurrent-ruby"];
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "1vdcchz7jli1p0gnc669a7bj3q1fv09y9ppf0y3k0vb1jwdwrqwi";
type = "gem";
};
version = "1.12.0";
};
io-console = {
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "0dikardh14c72gd9ypwh8dim41wvqmzfzf35mincaj5yals9m7ff";
type = "gem";
};
version = "0.6.0";
};
io-event = {
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "1mk579b88kvv5r4as0f6niq02176c6lmph305ml4piklgx6a1fsa";
type = "gem";
};
version = "1.1.7";
};
irb = {
dependencies = ["reline"];
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "1h9s07n5v3z029v18924ws9vdkdc80n6llp9ccx77yg1krv2g0f3";
type = "gem";
};
version = "1.6.3";
};
mechanize = {
dependencies = ["addressable" "domain_name" "http-cookie" "mime-types" "net-http-digest_auth" "net-http-persistent" "nokogiri" "rubyntlm" "webrick" "webrobots"];
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "1adjnzvq3rxqz7xf3qr7c0p85ccfwmn0l3fcmch6cjwz0i9vc5ah";
type = "gem";
};
version = "2.8.5";
};
mime-types = {
dependencies = ["mime-types-data"];
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "0ipw892jbksbxxcrlx9g5ljq60qx47pm24ywgfbyjskbcl78pkvb";
type = "gem";
};
version = "3.4.1";
};
mime-types-data = {
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "1pky3vzaxlgm9gw5wlqwwi7wsw3jrglrfflrppvvnsrlaiz043z9";
type = "gem";
};
version = "3.2023.0218.1";
};
mini_portile2 = {
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "1af4yarhbbx62f7qsmgg5fynrik0s36wjy3difkawy536xg343mp";
type = "gem";
};
version = "2.8.1";
};
minitest = {
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "0ic7i5z88zcaqnpzprf7saimq2f6sad57g5mkkqsrqrcd6h3mx06";
type = "gem";
};
version = "5.18.0";
};
mustermann = {
dependencies = ["ruby2_keywords"];
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "0rwbq20s2gdh8dljjsgj5s6wqqfmnbclhvv2c2608brv7jm6jdbd";
type = "gem";
};
version = "3.0.0";
};
net-http-digest_auth = {
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "1nq859b0gh2vjhvl1qh1zrk09pc7p54r9i6nnn6sb06iv07db2jb";
type = "gem";
};
version = "1.4.1";
};
net-http-persistent = {
dependencies = ["connection_pool"];
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "0i1as2lgnw7b4jid0gw5glv5hnxz36nmfsbr9rmxbcap72ijgy03";
type = "gem";
};
version = "4.0.2";
};
nokogiri = {
dependencies = ["mini_portile2" "racc"];
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "1djq4rp4m967mn6sxmiw75vz24gfp0w602xv22kk1x3cmi5afrf7";
type = "gem";
};
version = "1.14.2";
};
nokogiri-diff = {
dependencies = ["nokogiri" "tdiff"];
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "0njr1s42war0bj1axb2psjvk49l74a8wzr799wckqqdcb6n51lc1";
type = "gem";
};
version = "0.2.0";
};
nokogiri-ext = {
dependencies = ["nokogiri"];
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "0y1yflr1989vfy46lxhvs5njlskwiv08akkjybnh8n0cdqms4lhs";
type = "gem";
};
version = "0.1.0";
};
open_namespace = {
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "11j392gl62ibhkidjrjfnb3sygmqmvsc7zd5bhmnigd65x5gs310";
type = "gem";
};
version = "0.4.1";
};
public_suffix = {
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "0hz0bx2qs2pwb0bwazzsah03ilpf3aai8b7lk7s35jsfzwbkjq35";
type = "gem";
};
version = "5.0.1";
};
racc = {
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "09jgz6r0f7v84a7jz9an85q8vvmp743dqcsdm3z9c8rqcqv6pljq";
type = "gem";
};
version = "1.6.2";
};
rack = {
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "1qgwkcb8kxns8d5187cxjaxf18b7dmg9gh6cr9c1125m0bj2pnfk";
type = "gem";
};
version = "2.2.6.4";
};
rack-protection = {
dependencies = ["rack"];
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "1a12m1mv8dc0g90fs1myvis8vsgr427k1arg1q4a9qlfw6fqyhis";
type = "gem";
};
version = "3.0.5";
};
rack-user_agent = {
dependencies = ["rack" "woothee"];
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "1l1gw8xx1g04kdxc89hsy4aawdz8r2an4b78yzk9cc3y8qmw16v7";
type = "gem";
};
version = "0.5.3";
};
reline = {
dependencies = ["io-console"];
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "0zpz436h6gxyh000bdsm1m53kb5zgl97cfb45rxk2w5z2fgl30f3";
type = "gem";
};
version = "0.3.3";
};
ronin = {
dependencies = ["async-io" "open_namespace" "ronin-code-asm" "ronin-code-sql" "ronin-core" "ronin-db" "ronin-exploits" "ronin-fuzzer" "ronin-payloads" "ronin-repos" "ronin-support" "ronin-vulns" "ronin-web" "rouge" "wordlist"];
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "10jnlhacvcqhfd31hi1208xhmxv8fqa3yz6nwc0g1bb5271v2j16";
type = "gem";
};
version = "2.0.1";
};
ronin-code-asm = {
dependencies = ["ruby-yasm"];
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "0faic3m95nhr7wrh7visdj45qaah7dvnl0afl4a5gmy6ybij16zl";
type = "gem";
};
version = "1.0.0";
};
ronin-code-sql = {
dependencies = ["ronin-support"];
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "0mdnjvfvazyn0pnsjm1vdj906wmh97vvvi8mizjkvvipxkzizr40";
type = "gem";
};
version = "2.0.0";
};
ronin-core = {
dependencies = ["command_kit" "irb" "reline"];
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "0l2244i8im55mm3mdj88fg98avlmzjq581aazrhcaxm7qv0cl6bh";
type = "gem";
};
version = "0.1.1";
};
ronin-db = {
dependencies = ["ronin-core" "ronin-db-activerecord" "ronin-support" "sqlite3"];
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "0r0ybr2pw7can5sgnibmmlh97aicq1m31l8ldsswj56fkrjjn7r1";
type = "gem";
};
version = "0.1.0";
};
ronin-db-activerecord = {
dependencies = ["activerecord" "uri-query_params"];
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "13a39x9dwr4ismfrz2vf4yv7abmx9vzgfdj0diiz79ysfmbmj6a4";
type = "gem";
};
version = "0.1.0";
};
ronin-exploits = {
dependencies = ["ronin-code-sql" "ronin-core" "ronin-payloads" "ronin-post_ex" "ronin-repos" "ronin-support" "ronin-vulns" "uri-query_params"];
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "0niw585sg40wj23d9j0l98bnhyxvlaif92s7dynznf7x4igmp9rj";
type = "gem";
};
version = "1.0.1";
};
ronin-fuzzer = {
dependencies = ["combinatorics" "ronin-core" "ronin-support"];
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "19sc4kk6lwpq6fd23dmji0vf4mjkf1z5pjq4wp0xs2cby2fzld5p";
type = "gem";
};
version = "0.1.0";
};
ronin-payloads = {
dependencies = ["ronin-code-asm" "ronin-core" "ronin-post_ex" "ronin-repos" "ronin-support"];
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "0z8k5g9r0bi8mhkmzbgx4lpw1civnmc6adl5hy0k3dp9wm3qs002";
type = "gem";
};
version = "0.1.1";
};
ronin-post_ex = {
dependencies = ["fake_io" "hexdump" "ronin-core"];
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "0dcpnlz8niqjjm5d9z8khg53acl7xn5dgliv70svsncc3h0hx0w7";
type = "gem";
};
version = "0.1.0";
};
ronin-repos = {
dependencies = ["ronin-core"];
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "054zm9bcri9gklsr7xh1z8qqzm7a6n0j8m7mm0553hr1mnpah94p";
type = "gem";
};
version = "0.1.0";
};
ronin-support = {
dependencies = ["addressable" "chars" "combinatorics" "hexdump" "uri-query_params"];
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "0pysnsgdn8hxn2wikgs0x3kcz0r4a1n5fdsys6c1z0kmslh4f52k";
type = "gem";
};
version = "1.0.1";
};
ronin-vulns = {
dependencies = ["ronin-core" "ronin-support"];
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "13yyn331cj8wip99s0km17v9vcx3gpyb9v4nkcmpzhg9rq5w4x57";
type = "gem";
};
version = "0.1.2";
};
ronin-web = {
dependencies = ["mechanize" "nokogiri" "nokogiri-diff" "nokogiri-ext" "open_namespace" "ronin-core" "ronin-support" "ronin-web-server" "ronin-web-spider" "ronin-web-user_agents"];
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "0wzd7dibc7lkqvw0kqx4py6srqd3ic2mbr7jzyq7d7wrx4inbpgs";
type = "gem";
};
version = "1.0.1";
};
ronin-web-server = {
dependencies = ["rack" "rack-user_agent" "ronin-support" "sinatra" "webrick"];
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "14p1z2s20dkipb6rp2wyjc91dz6bjn5v8nv68m54my7p1vac05zk";
type = "gem";
};
version = "0.1.1";
};
ronin-web-spider = {
dependencies = ["ronin-support" "spidr"];
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "0592llhzm8miy0lj4xsb4h0ppy18wmwqi54rjzzsm7h3d2py7iv9";
type = "gem";
};
version = "0.1.0";
};
ronin-web-user_agents = {
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "1shca7bsc09hag7ax3js9xszw71mnf1ywrf0l0pk40hfqmnnaxcl";
type = "gem";
};
version = "0.1.0";
};
rouge = {
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "1dnfkrk8xx2m8r3r9m2p5xcq57viznyc09k7r3i4jbm758i57lx3";
type = "gem";
};
version = "3.30.0";
};
ruby-yasm = {
dependencies = ["command_mapper"];
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "1vf0kdaaysx9kr7v8rl0hl0j73zkfkg7zqvg0b41sgfg3zfib0ap";
type = "gem";
};
version = "0.3.0";
};
ruby2_keywords = {
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "1vz322p8n39hz3b4a9gkmz9y7a5jaz41zrm2ywf31dvkqm03glgz";
type = "gem";
};
version = "0.0.5";
};
rubyntlm = {
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "0b8hczk8hysv53ncsqzx4q6kma5gy5lqc7s5yx8h64x3vdb18cjv";
type = "gem";
};
version = "0.6.3";
};
sinatra = {
dependencies = ["mustermann" "rack" "rack-protection" "tilt"];
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "1ryfja9yd3fq8n1p5yi3qnd0pjk7bkycmxxmbb1bj0axlr1pdv20";
type = "gem";
};
version = "3.0.5";
};
spidr = {
dependencies = ["nokogiri"];
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "15gjqry61z93f4p84x5b1bi6f65xd4djax0563ljngmsckyg7xg5";
type = "gem";
};
version = "0.7.0";
};
sqlite3 = {
dependencies = ["mini_portile2"];
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "1i47n6nkyigkyag00yqf9f3nj11bm1lb0ds5nkvkdvm7lxbna5jq";
type = "gem";
};
version = "1.6.2";
};
tdiff = {
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "0rjvqyyxrybzhaqmgh4zjcdrvmqyqcqqbq4vda39idhrqcd2gy67";
type = "gem";
};
version = "0.3.4";
};
tilt = {
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "1qmhi6d9przjzhsyk9g5pq2j75c656msh6xzprqd2mxgphf23jxs";
type = "gem";
};
version = "2.1.0";
};
timers = {
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "0pjzipnmzfywvgsr3gxwj6nmg47lz4700g0q71jgcy1z6rb7dn7p";
type = "gem";
};
version = "4.3.5";
};
tzinfo = {
dependencies = ["concurrent-ruby"];
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "16w2g84dzaf3z13gxyzlzbf748kylk5bdgg3n1ipvkvvqy685bwd";
type = "gem";
};
version = "2.0.6";
};
unf = {
dependencies = ["unf_ext"];
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "0bh2cf73i2ffh4fcpdn9ir4mhq8zi50ik0zqa1braahzadx536a9";
type = "gem";
};
version = "0.1.4";
};
unf_ext = {
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "1yj2nz2l101vr1x9w2k83a0fag1xgnmjwp8w8rw4ik2rwcz65fch";
type = "gem";
};
version = "0.0.8.2";
};
uri-query_params = {
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "08i91q1q2fvjq7n21p4f4pryi8b9msknrgwz132spvhm4l55n6l6";
type = "gem";
};
version = "0.8.1";
};
webrick = {
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "13qm7s0gr2pmfcl7dxrmq38asaza4w0i2n9my4yzs499j731wh8r";
type = "gem";
};
version = "1.8.1";
};
webrobots = {
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "19ndcbba8s8m62hhxxfwn83nax34rg2k5x066awa23wknhnamg7b";
type = "gem";
};
version = "0.1.2";
};
woothee = {
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "0xg31qi09swgsf46b9ba38z2jav2516bg3kg7xf1wfbzw8mpd3fc";
type = "gem";
};
version = "1.13.0";
};
wordlist = {
groups = ["default"];
platforms = [];
source = {
remotes = ["https://rubygems.org"];
sha256 = "07h4kgycf72w9gbnf95d5h9zcdcgl3gjadfajjicl2xkiffvpcmf";
type = "gem";
};
version = "1.0.0";
};
}

View file

@ -1415,6 +1415,7 @@ mapAliases ({
qt515 = qt5; # Added 2022-11-24
qt5ct = libsForQt5.qt5ct; # Added 2021-12-27
qtcurve = libsForQt5.qtcurve; # Added 2020-11-07
qtile-unwrapped = python3.pkgs.qtile; # Added 2023-05-12
qtkeychain = throw "the qtkeychain attribute (qt4 version) has been removes, use the qt5 version: libsForQt5.qtkeychain"; # Added 2021-08-04
qtscriptgenerator = throw "'qtscriptgenerator' (Qt4) is unmaintained upstream and not used in nixpkgs"; # Added 2022-06-14
quagga = throw "quagga is no longer maintained upstream"; # Added 2021-04-22

View file

@ -1679,6 +1679,8 @@ with pkgs;
redfang = callPackage ../tools/networking/redfang { };
ronin = callPackage ../tools/security/ronin { };
s0ix-selftest-tool = callPackage ../tools/system/s0ix-selftest-tool { };
scarab = callPackage ../tools/games/scarab { };
@ -13892,9 +13894,9 @@ with pkgs;
valum = callPackage ../development/web/valum { };
inherit (callPackages ../servers/varnish { })
varnish60 varnish72;
varnish60 varnish72 varnish73;
inherit (callPackages ../servers/varnish/packages.nix { })
varnish60Packages varnish72Packages;
varnish60Packages varnish72Packages varnish73Packages;
varnishPackages = varnish72Packages;
varnish = varnishPackages.varnish;
@ -16320,6 +16322,7 @@ with pkgs;
cargo-edit = callPackage ../development/tools/rust/cargo-edit {
inherit (darwin.apple_sdk.frameworks) Security;
};
cargo-leptos = callPackage ../development/tools/rust/cargo-leptos { };
cargo-kcov = callPackage ../development/tools/rust/cargo-kcov { };
cargo-graph = callPackage ../development/tools/rust/cargo-graph { };
cargo-guppy = callPackage ../development/tools/rust/cargo-guppy { };
@ -18712,7 +18715,8 @@ with pkgs;
libwtk-sdl2 = callPackage ../development/libraries/libwtk-sdl2 { };
linuxkit = callPackage ../development/tools/misc/linuxkit {
inherit (darwin.apple_sdk_11_0.frameworks) Virtualization;
inherit (darwin.apple_sdk_11_0.frameworks) Cocoa Virtualization;
inherit (darwin) sigtool;
};
listenbrainz-mpd = callPackage ../applications/audio/listenbrainz-mpd {
@ -34816,8 +34820,7 @@ with pkgs;
qpdfview = libsForQt5.callPackage ../applications/office/qpdfview { };
qtile-unwrapped = callPackage ../applications/window-managers/qtile { };
qtile = callPackage ../applications/window-managers/qtile/wrapper.nix { };
qtile = callPackage ../development/python-modules/qtile/wrapper.nix { };
vimgolf = callPackage ../games/vimgolf { };
@ -38406,10 +38409,6 @@ with pkgs;
maxima-ecl = maxima.override {
lisp-compiler = ecl;
};
# old version temporarily kept for sage
maxima-ecl-5_45 = callPackage ../applications/science/math/maxima/5.45.nix {
lisp-compiler = ecl;
};
mxnet = callPackage ../applications/science/math/mxnet {
inherit (linuxPackages) nvidia_x11;

View file

@ -208,7 +208,6 @@ in {
kernelPatches = [
kernelPatches.bridge_stp_helper
kernelPatches.request_key_helper
kernelPatches.make-maple-state-reusable-after-mas_empty_area
];
};
latest = packageAliases.linux_latest.kernel;

View file

@ -1623,6 +1623,8 @@ self: super: with self; {
inherit (pkgs) capstone;
};
captcha = callPackage ../development/python-modules/captcha { };
capturer = callPackage ../development/python-modules/capturer { };
carbon = callPackage ../development/python-modules/carbon { };
@ -3679,6 +3681,10 @@ self: super: with self; {
flask-session = callPackage ../development/python-modules/flask-session { };
flask-session-captcha = callPackage ../development/python-modules/flask-session-captcha { };
flask-sessionstore = callPackage ../development/python-modules/flask-sessionstore { };
flask-security-too = callPackage ../development/python-modules/flask-security-too { };
flask-silk = callPackage ../development/python-modules/flask-silk { };
@ -10246,6 +10252,7 @@ self: super: with self; {
qtconsole = callPackage ../development/python-modules/qtconsole { };
qtile = callPackage ../development/python-modules/qtile { };
qtile-extras = callPackage ../development/python-modules/qtile-extras { };
qtpy = callPackage ../development/python-modules/qtpy { };
@ -10346,7 +10353,12 @@ self: super: with self; {
rdflib = callPackage ../development/python-modules/rdflib { };
rdkit = callPackage ../development/python-modules/rdkit { };
rdkit = callPackage ../development/python-modules/rdkit {
boost = pkgs.boost182.override {
enablePython = true;
inherit python;
};
};
re-assert = callPackage ../development/python-modules/re-assert { };
@ -12928,6 +12940,8 @@ self: super: with self; {
wheel-inspect = callPackage ../development/python-modules/wheel-inspect { };
wheezy-captcha = callPackage ../development/python-modules/wheezy-captcha { };
wheezy-template = callPackage ../development/python-modules/wheezy-template { };
whichcraft = callPackage ../development/python-modules/whichcraft { };