Merge branch 'master' into haskell-updates

This commit is contained in:
Malte Brandy 2022-01-27 17:48:01 +01:00
commit caef341934
163 changed files with 810 additions and 1650 deletions

View file

@ -192,10 +192,6 @@ meta.hydraPlatforms = [];
If set to `true`, the package is marked as "broken", meaning that it wont show up in `nix-env -qa`, and cannot be built or installed. Such packages should be removed from Nixpkgs eventually unless they are fixed.
### `updateWalker` {#var-meta-updateWalker}
If set to `true`, the package is tested to be updated correctly by the `update-walker.sh` script without additional settings. Such packages have `meta.version` set and their homepage (or the page specified by `meta.downloadPage`) contains a direct link to the package tarball.
## Licenses {#sec-meta-license}
The `meta.license` attribute should preferably contain a value from `lib.licenses` defined in [`nixpkgs/lib/licenses.nix`](https://github.com/NixOS/nixpkgs/blob/master/lib/licenses.nix), or in-place license description of the same format if the license is unlikely to be useful in another expression.

View file

@ -48,10 +48,6 @@
system.nixos.versionSuffix =
".${final.substring 0 8 (self.lastModifiedDate or self.lastModified or "19700101")}.${self.shortRev or "dirty"}";
system.nixos.revision = final.mkIf (self ? rev) self.rev;
# NOTE: This assumes that `nixpkgs.config` is _not_ used when
# nixpkgs.pkgs is set OR _module.args.pkgs is set.
nixpkgs.config.path = self.outPath;
}
];
});
@ -66,7 +62,7 @@
}).nixos.manual.x86_64-linux;
};
legacyPackages = forAllSystems (system: import ./. { inherit system; config.path = self.outPath; });
legacyPackages = forAllSystems (system: import ./. { inherit system; });
nixosModules = {
notDetected = import ./nixos/modules/installer/scan/not-detected.nix;

View file

@ -531,6 +531,29 @@
was added.
</para>
</listitem>
<listitem>
<para>
The configuration portion of the <literal>nix-daemon</literal>
module has been reworked and exposed as
<link xlink:href="options.html#opt-nix-settings">nix.settings</link>:
</para>
<itemizedlist spacing="compact">
<listitem>
<para>
Legacy options have been mapped to the corresponding
options under under
<link xlink:href="options.html#opt-nix.settings">nix.settings</link>
but may be deprecated in the future.
</para>
</listitem>
<listitem>
<para>
<link xlink:href="options.html#opt-nix.buildMachines.publicHostKey">nix.buildMachines.publicHostKey</link>
has been added.
</para>
</listitem>
</itemizedlist>
</listitem>
<listitem>
<para>
The

View file

@ -453,7 +453,7 @@
Allow ad-hoc remote builders for building the new system. This requires
the user executing <command>nixos-rebuild</command> (usually root) to be
configured as a trusted user in the Nix daemon. This can be achieved by
using the <literal>nix.trustedUsers</literal> NixOS option. Examples
using the <literal>nix.settings.trusted-users</literal> NixOS option. Examples
values for that option are described in the <literal>Remote builds
chapter</literal> in the Nix manual, (i.e. <command>--builders
"ssh://bigbrother x86_64-linux"</command>). By specifying an empty string

View file

@ -183,6 +183,10 @@ In addition to numerous new and upgraded packages, this release has the followin
Similarly [virtualisation.vmVariantWithBootloader](#opt-virtualisation.vmVariantWithBootLoader) was added.
- The configuration portion of the `nix-daemon` module has been reworked and exposed as [nix.settings](options.html#opt-nix-settings):
* Legacy options have been mapped to the corresponding options under under [nix.settings](options.html#opt-nix.settings) but may be deprecated in the future.
* [nix.buildMachines.publicHostKey](options.html#opt-nix.buildMachines.publicHostKey) has been added.
- The `writers.writePyPy2`/`writers.writePyPy3` and corresponding `writers.writePyPy2Bin`/`writers.writePyPy3Bin` convenience functions to create executable Python 2/3 scripts using the PyPy interpreter were added.
- The `influxdb2` package was split into `influxdb2-server` and

View file

@ -17,7 +17,7 @@ in
description = "Azure NixOS Test User";
openssh.authorizedKeys.keys = [ (builtins.readFile ~/.ssh/id_ed25519.pub) ];
};
nix.trustedUsers = [ username ];
nix.settings.trusted-users = [ username ];
virtualisation.azureImage.diskSize = 2500;

View file

@ -25,7 +25,7 @@ with lib;
installer.cloneConfigExtra = ''
# Let demo build as a trusted user.
# nix.trustedUsers = [ "demo" ];
# nix.settings.trusted-users = [ "demo" ];
# Mount a VirtualBox shared folder.
# This is configurable in the VirtualBox menu at

View file

@ -61,85 +61,17 @@ let
in scrubbedEval.options;
baseOptionsJSON =
let
filterIntoStore =
filter =
builtins.filterSource
(n: t:
(t == "directory" -> baseNameOf n != "tests")
&& (t == "file" -> hasSuffix ".nix" n)
);
# Figure out if Nix runs in pure evaluation mode. May return true in
# impure mode, but this is highly unlikely.
# We need to know because of https://github.com/NixOS/nix/issues/1888
# and https://github.com/NixOS/nix/issues/5868
isPureEval = builtins.getEnv "PATH" == "" && builtins.getEnv "_" == "";
# Return a nixpkgs subpath with minimal copying.
#
# The sources for the base options json derivation can come in one of
# two forms:
# - single source: a store path with all of nixpkgs, postfix with
# subpaths to access various directories. This has the benefit of
# not creating copies of these subtrees in the Nix store, but
# can cause unnecessary rebuilds if you update the Nixpkgs `pkgs`
# tree often.
# - split sources: multiple store paths with subdirectories of
# nixpkgs that exclude the bulk of the pkgs directory.
# This requires more copying and hashing during evaluation but
# requires fewer files to be copied. This method produces fewer
# unnecessary rebuilds of the base options json.
#
# Flake
#
# Flakes always put a copy of the full nixpkgs sources in the store,
# so we can use the "single source" method. This method is ideal
# for using nixpkgs as a dependency, as the base options json will be
# substitutable from cache.nixos.org.
#
# This requires that the `self.outPath` is wired into `pkgs` correctly,
# which is done for you if `pkgs` comes from the `lib.nixosSystem` or
# `legacyPackages` flake attributes.
#
# Other Nixpkgs invocation
#
# If you do not use the known-correct flake attributes, but rather
# invoke Nixpkgs yourself, set `config.path` to the correct path value,
# e.g. `import nixpkgs { config.path = nixpkgs; }`.
#
# Choosing between single or split source paths
#
# We make assumptions based on the type and contents of `pkgs.path`.
# By passing a different `config.path` to Nixpkgs, you can influence
# how your documentation cache is evaluated and rebuilt.
#
# Single source
# - If pkgs.path is a string containing a store path, the code has no
# choice but to create this store path, if it hasn't already been.
# We assume that the "single source" method is most efficient.
# - If pkgs.path is a path value containing that is a store path,
# we try to convert it to a string with context without copying.
# This occurs for example when nixpkgs was fetched and using its
# default `config.path`, which is `./.`.
# Nix currently does not allow this conversion when evaluating in
# pure mode. If the conversion is not possible, we use the
# "split source" method.
#
# Split source
# - If pkgs.path is a path value that is not a store path, we assume
# that it's unlikely for all of nixpkgs to end up in the store for
# other reasons and try to keep both the copying and rebuilds low.
pull =
if builtins.typeOf pkgs.path == "string" && isStorePath pkgs.path then
dir: "${pkgs.path}/${dir}"
else if !isPureEval && isStorePath pkgs.path then
dir: "${builtins.storePath pkgs.path}/${dir}"
else
dir: filterIntoStore "${toString pkgs.path}/${dir}";
in
pkgs.runCommand "lazy-options.json" {
libPath = pull "lib";
pkgsLibPath = pull "pkgs/pkgs-lib";
nixosPath = pull "nixos";
libPath = filter "${toString pkgs.path}/lib";
pkgsLibPath = filter "${toString pkgs.path}/pkgs/pkgs-lib";
nixosPath = filter "${toString pkgs.path}/nixos";
modules = map (p: ''"${removePrefix "${modulesPath}/" (toString p)}"'') docModules.lazy;
} ''
export NIX_STORE_DIR=$TMPDIR/store

View file

@ -59,8 +59,6 @@ let
inherit (cfg) config overlays localSystem crossSystem;
};
# NOTE: flake.nix assumes that nixpkgs.config is only used with ../../..
# as nixpkgs.config.path should be equivalent to ../../..
finalPkgs = if opt.pkgs.isDefined then cfg.pkgs.appendOverlays cfg.overlays else defaultPkgs;
in

View file

@ -17,7 +17,7 @@ with lib;
boot.kernelPackages = mkDefault pkgs.linuxPackages_hardened;
nix.allowedUsers = mkDefault [ "@users" ];
nix.settings.allowed-users = mkDefault [ "@users" ];
environment.memoryAllocator.provider = mkDefault "scudo";
environment.variables.SCUDO_OPTIONS = mkDefault "ZeroContents=1";

View file

@ -123,8 +123,8 @@ with lib;
boot.kernel.sysctl."user.max_user_namespaces" = 0;
assertions = [
{ assertion = config.nix.useSandbox -> config.security.allowUserNamespaces;
message = "`nix.useSandbox = true` conflicts with `!security.allowUserNamespaces`.";
{ assertion = config.nix.settings.sandbox -> config.security.allowUserNamespaces;
message = "`nix.settings.sandbox = true` conflicts with `!security.allowUserNamespaces`.";
}
];
})

View file

@ -147,7 +147,7 @@ in
concurrent = mkOption {
type = types.int;
default = 1;
example = literalExpression "config.nix.maxJobs";
example = literalExpression "config.nix.settings.max-jobs";
description = ''
Limits how many jobs globally can be run concurrently.
The most upper limit of jobs using all defined runners.

View file

@ -67,7 +67,7 @@ in
# Trusted user allows simplified configuration and better performance
# when operating in a cluster.
nix.trustedUsers = [ config.systemd.services.hercules-ci-agent.serviceConfig.User ];
nix.settings.trusted-users = [ config.systemd.services.hercules-ci-agent.serviceConfig.User ];
services.hercules-ci-agent = {
settings = {
nixUserIsTrusted = true;

View file

@ -258,8 +258,6 @@ in
uid = config.ids.uids.hydra-www;
};
nix.trustedUsers = [ "hydra-queue-runner" ];
services.hydra.extraConfig =
''
using_frontend_proxy = 1
@ -277,16 +275,21 @@ in
environment.variables = hydraEnv;
nix.extraOptions = ''
keep-outputs = true
keep-derivations = true
nix.settings = mkMerge [
{
keep-outputs = true;
keep-derivations = true;
trusted-users = [ "hydra-queue-runner" ];
}
'' + optionalString (versionOlder (getVersion config.nix.package.out) "2.4pre") ''
# The default (`true') slows Nix down a lot since the build farm
# has so many GC roots.
gc-check-reachability = false
'';
(mkIf (versionOlder (getVersion config.nix.package.out) "2.4pre")
{
# The default (`true') slows Nix down a lot since the build farm
# has so many GC roots.
gc-check-reachability = false;
}
)
];
systemd.services.hydra-init =
{ wantedBy = [ "multi-user.target" ];

View file

@ -6,20 +6,20 @@ let
cfg = config.nix;
nix = cfg.package.out;
nixPackage = cfg.package.out;
nixVersion = getVersion nix;
isNix23 = versionAtLeast nixVersion "2.3pre";
isNixAtLeast = versionAtLeast (getVersion nixPackage);
makeNixBuildUser = nr: {
name = "nixbld${toString nr}";
name = "nixbld${toString nr}";
value = {
description = "Nix build user ${toString nr}";
/* For consistency with the setgid(2), setuid(2), and setgroups(2)
calls in `libstore/build.cc', don't add any supplementary group
here except "nixbld". */
/*
For consistency with the setgid(2), setuid(2), and setgroups(2)
calls in `libstore/build.cc', don't add any supplementary group
here except "nixbld".
*/
uid = builtins.add config.ids.uids.nixbld nr;
isSystemUser = true;
group = "nixbld";
@ -30,43 +30,82 @@ let
nixbldUsers = listToAttrs (map makeNixBuildUser (range 1 cfg.nrBuildUsers));
nixConf =
assert versionAtLeast nixVersion "2.2";
pkgs.runCommand "nix.conf" { preferLocalBuild = true; extraOptions = cfg.extraOptions; } (
''
cat > $out <<END
assert isNixAtLeast "2.2";
let
mkValueString = v:
if v == null then ""
else if isInt v then toString v
else if isBool v then boolToString v
else if isFloat v then floatToString v
else if isList v then toString v
else if isDerivation v then toString v
else if builtins.isPath v then toString v
else if isString v then v
else if isCoercibleToString v then toString v
else abort "The nix conf value: ${toPretty {} v} can not be encoded";
mkKeyValue = k: v: "${escape [ "=" ] k} = ${mkValueString v}";
mkKeyValuePairs = attrs: concatStringsSep "\n" (mapAttrsToList mkKeyValue attrs);
in
pkgs.writeTextFile {
name = "nix.conf";
text = ''
# WARNING: this file is generated from the nix.* options in
# your NixOS configuration, typically
# /etc/nixos/configuration.nix. Do not edit it!
build-users-group = nixbld
max-jobs = ${toString (cfg.maxJobs)}
cores = ${toString (cfg.buildCores)}
sandbox = ${if (builtins.isBool cfg.useSandbox) then boolToString cfg.useSandbox else cfg.useSandbox}
extra-sandbox-paths = ${toString cfg.sandboxPaths}
substituters = ${toString cfg.binaryCaches}
trusted-substituters = ${toString cfg.trustedBinaryCaches}
trusted-public-keys = ${toString cfg.binaryCachePublicKeys}
auto-optimise-store = ${boolToString cfg.autoOptimiseStore}
require-sigs = ${boolToString cfg.requireSignedBinaryCaches}
trusted-users = ${toString cfg.trustedUsers}
allowed-users = ${toString cfg.allowedUsers}
${optionalString (!cfg.distributedBuilds) ''
builders =
''}
system-features = ${toString cfg.systemFeatures}
${optionalString isNix23 ''
sandbox-fallback = false
''}
$extraOptions
END
'' + optionalString cfg.checkConfig (
if pkgs.stdenv.hostPlatform != pkgs.stdenv.buildPlatform then ''
echo "Ignore nix.checkConfig when cross-compiling"
'' else ''
echo "Checking that Nix can read nix.conf..."
ln -s $out ./nix.conf
NIX_CONF_DIR=$PWD ${cfg.package}/bin/nix show-config ${optionalString isNix23 "--no-net --option experimental-features nix-command"} >/dev/null
'')
);
${mkKeyValuePairs cfg.settings}
${cfg.extraOptions}
'';
checkPhase =
if pkgs.stdenv.hostPlatform != pkgs.stdenv.buildPlatform then ''
echo "Ignoring validation for cross-compilation"
''
else ''
echo "Validating generated nix.conf"
ln -s $out ./nix.conf
set -e
set +o pipefail
NIX_CONF_DIR=$PWD \
${cfg.package}/bin/nix show-config ${optionalString (isNixAtLeast "2.3pre") "--no-net --option experimental-features nix-command"} \
|& sed -e 's/^warning:/error:/' \
| (! grep '${if cfg.checkConfig then "^error:" else "^error: unknown setting"}')
set -o pipefail
'';
};
legacyConfMappings = {
useSandbox = "sandbox";
buildCores = "cores";
maxJobs = "max-jobs";
sandboxPaths = "extra-sandbox-paths";
binaryCaches = "substituters";
trustedBinaryCaches = "trusted-substituters";
binaryCachePublicKeys = "trusted-public-keys";
autoOptimiseStore = "auto-optimise-store";
requireSignedBinaryCaches = "require-sigs";
trustedUsers = "trusted-users";
allowedUsers = "allowed-users";
systemFeatures = "system-feature";
};
semanticConfType = with types;
let
confAtom = nullOr
(oneOf [
bool
int
float
str
path
package
]) // {
description = "Nix config atom (null, bool, int, float, str, path or package)";
};
in
attrsOf (either confAtom (listOf confAtom));
in
@ -76,7 +115,7 @@ in
(mkRenamedOptionModule [ "nix" "chrootDirs" ] [ "nix" "sandboxPaths" ])
(mkRenamedOptionModule [ "nix" "daemonIONiceLevel" ] [ "nix" "daemonIOSchedPriority" ])
(mkRemovedOptionModule [ "nix" "daemonNiceLevel" ] "Consider nix.daemonCPUSchedPolicy instead.")
];
] ++ mapAttrsToList (oldConf: newConf: mkRenamedOptionModule [ "nix" oldConf ] [ "nix" "settings" newConf ]) legacyConfMappings;
###### interface
@ -102,81 +141,6 @@ in
'';
};
maxJobs = mkOption {
type = types.either types.int (types.enum ["auto"]);
default = "auto";
example = 64;
description = ''
This option defines the maximum number of jobs that Nix will try to
build in parallel. The default is auto, which means it will use all
available logical cores. It is recommend to set it to the total
number of logical cores in your system (e.g., 16 for two CPUs with 4
cores each and hyper-threading).
'';
};
autoOptimiseStore = mkOption {
type = types.bool;
default = false;
example = true;
description = ''
If set to true, Nix automatically detects files in the store that have
identical contents, and replaces them with hard links to a single copy.
This saves disk space. If set to false (the default), you can still run
nix-store --optimise to get rid of duplicate files.
'';
};
buildCores = mkOption {
type = types.int;
default = 0;
example = 64;
description = ''
This option defines the maximum number of concurrent tasks during
one build. It affects, e.g., -j option for make.
The special value 0 means that the builder should use all
available CPU cores in the system. Some builds may become
non-deterministic with this option; use with care! Packages will
only be affected if enableParallelBuilding is set for them.
'';
};
useSandbox = mkOption {
type = types.either types.bool (types.enum ["relaxed"]);
default = true;
description = "
If set, Nix will perform builds in a sandboxed environment that it
will set up automatically for each build. This prevents impurities
in builds by disallowing access to dependencies outside of the Nix
store by using network and mount namespaces in a chroot environment.
This is enabled by default even though it has a possible performance
impact due to the initial setup time of a sandbox for each build. It
doesn't affect derivation hashes, so changing this option will not
trigger a rebuild of packages.
";
};
sandboxPaths = mkOption {
type = types.listOf types.str;
default = [];
example = [ "/dev" "/proc" ];
description =
''
Directories from the host filesystem to be included
in the sandbox.
'';
};
extraOptions = mkOption {
type = types.lines;
default = "";
example = ''
keep-outputs = true
keep-derivations = true
'';
description = "Additional text appended to <filename>nix.conf</filename>.";
};
distributedBuilds = mkOption {
type = types.bool;
default = false;
@ -187,7 +151,7 @@ in
};
daemonCPUSchedPolicy = mkOption {
type = types.enum ["other" "batch" "idle"];
type = types.enum [ "other" "batch" "idle" ];
default = "other";
example = "batch";
description = ''
@ -218,7 +182,7 @@ in
};
daemonIOSchedClass = mkOption {
type = types.enum ["best-effort" "idle"];
type = types.enum [ "best-effort" "idle" ];
default = "best-effort";
example = "idle";
description = ''
@ -250,11 +214,11 @@ in
scheduling policy: With idle, priorities are not used in scheduling
decisions. best-effort supports values in the range 0 (high) to 7
(low).
'';
'';
};
buildMachines = mkOption {
type = types.listOf (types.submodule ({
type = types.listOf (types.submodule {
options = {
hostName = mkOption {
type = types.str;
@ -276,7 +240,7 @@ in
};
systems = mkOption {
type = types.listOf types.str;
default = [];
default = [ ];
example = [ "x86_64-linux" "aarch64-linux" ];
description = ''
The system types the build machine can execute derivations on.
@ -293,7 +257,7 @@ in
The username to log in as on the remote host. This user must be
able to log in and run nix commands non-interactively. It must
also be privileged to build derivations, so must be included in
<option>nix.trustedUsers</option>.
<option>nix.settings.trusted-users</option>.
'';
};
sshKey = mkOption {
@ -331,7 +295,7 @@ in
};
mandatoryFeatures = mkOption {
type = types.listOf types.str;
default = [];
default = [ ];
example = [ "big-parallel" ];
description = ''
A list of features mandatory for this builder. The builder will
@ -342,7 +306,7 @@ in
};
supportedFeatures = mkOption {
type = types.listOf types.str;
default = [];
default = [ ];
example = [ "kvm" "big-parallel" ];
description = ''
A list of features supported by this builder. The builder will
@ -350,9 +314,18 @@ in
list.
'';
};
publicHostKey = mkOption {
type = types.nullOr types.str;
default = null;
description = ''
The (base64-encoded) public host key of this builder. The field
is calculated via <command>base64 -w0 /etc/ssh/ssh_host_type_key.pub</command>.
If null, SSH will use its regular known-hosts file when connecting.
'';
};
};
}));
default = [];
});
default = [ ];
description = ''
This option lists the machines to be used if distributed builds are
enabled (see <option>nix.distributedBuilds</option>).
@ -366,7 +339,7 @@ in
envVars = mkOption {
type = types.attrs;
internal = true;
default = {};
default = { };
description = "Environment variables used by Nix.";
};
@ -391,92 +364,13 @@ in
'';
};
binaryCaches = mkOption {
type = types.listOf types.str;
description = ''
List of binary cache URLs used to obtain pre-built binaries
of Nix packages.
By default https://cache.nixos.org/ is added,
to override it use <literal>lib.mkForce []</literal>.
'';
};
trustedBinaryCaches = mkOption {
type = types.listOf types.str;
default = [ ];
example = [ "https://hydra.nixos.org/" ];
description = ''
List of binary cache URLs that non-root users can use (in
addition to those specified using
<option>nix.binaryCaches</option>) by passing
<literal>--option binary-caches</literal> to Nix commands.
'';
};
requireSignedBinaryCaches = mkOption {
type = types.bool;
default = true;
description = ''
If enabled (the default), Nix will only download binaries from binary caches if
they are cryptographically signed with any of the keys listed in
<option>nix.binaryCachePublicKeys</option>. If disabled, signatures are neither
required nor checked, so it's strongly recommended that you use only
trustworthy caches and https to prevent man-in-the-middle attacks.
'';
};
binaryCachePublicKeys = mkOption {
type = types.listOf types.str;
example = [ "hydra.nixos.org-1:CNHJZBh9K4tP3EKF6FkkgeVYsS3ohTl+oS0Qa8bezVs=" ];
description = ''
List of public keys used to sign binary caches. If
<option>nix.requireSignedBinaryCaches</option> is enabled,
then Nix will use a binary from a binary cache if and only
if it is signed by <emphasis>any</emphasis> of the keys
listed here. By default, only the key for
<uri>cache.nixos.org</uri> is included.
'';
};
trustedUsers = mkOption {
type = types.listOf types.str;
default = [ "root" ];
example = [ "root" "alice" "@wheel" ];
description = ''
A list of names of users that have additional rights when
connecting to the Nix daemon, such as the ability to specify
additional binary caches, or to import unsigned NARs. You
can also specify groups by prefixing them with
<literal>@</literal>; for instance,
<literal>@wheel</literal> means all users in the wheel
group.
'';
};
allowedUsers = mkOption {
type = types.listOf types.str;
default = [ "*" ];
example = [ "@wheel" "@builders" "alice" "bob" ];
description = ''
A list of names of users (separated by whitespace) that are
allowed to connect to the Nix daemon. As with
<option>nix.trustedUsers</option>, you can specify groups by
prefixing them with <literal>@</literal>. Also, you can
allow all users by specifying <literal>*</literal>. The
default is <literal>*</literal>. Note that trusted users are
always allowed to connect.
'';
};
nixPath = mkOption {
type = types.listOf types.str;
default =
[
"nixpkgs=/nix/var/nix/profiles/per-user/root/channels/nixos"
"nixos-config=/etc/nixos/configuration.nix"
"/nix/var/nix/profiles/per-user/root/channels"
];
default = [
"nixpkgs=/nix/var/nix/profiles/per-user/root/channels/nixos"
"nixos-config=/etc/nixos/configuration.nix"
"/nix/var/nix/profiles/per-user/root/channels"
];
description = ''
The default Nix expression search path, used by the Nix
evaluator to look up paths enclosed in angle brackets
@ -484,45 +378,44 @@ in
'';
};
systemFeatures = mkOption {
type = types.listOf types.str;
example = [ "kvm" "big-parallel" "gccarch-skylake" ];
description = ''
The supported features of a machine
'';
};
checkConfig = mkOption {
type = types.bool;
default = true;
description = ''
If enabled (the default), checks that Nix can parse the generated nix.conf.
If enabled (the default), checks for data type mismatches and that Nix
can parse the generated nix.conf.
'';
};
registry = mkOption {
type = types.attrsOf (types.submodule (
let
inputAttrs = types.attrsOf (types.oneOf [types.str types.int types.bool types.package]);
referenceAttrs = with types; attrsOf (oneOf [
str
int
bool
package
]);
in
{ config, name, ... }:
{ options = {
{
options = {
from = mkOption {
type = inputAttrs;
type = referenceAttrs;
example = { type = "indirect"; id = "nixpkgs"; };
description = "The flake reference to be rewritten.";
};
to = mkOption {
type = inputAttrs;
type = referenceAttrs;
example = { type = "github"; owner = "my-org"; repo = "my-nixpkgs"; };
description = "The flake reference to which <option>from></option> is to be rewritten.";
description = "The flake reference <option>from></option> is rewritten to.";
};
flake = mkOption {
type = types.nullOr types.attrs;
default = null;
example = literalExpression "nixpkgs";
description = ''
The flake input to which <option>from></option> is to be rewritten.
The flake input <option>from></option> is rewritten to.
'';
};
exact = mkOption {
@ -537,35 +430,232 @@ in
};
config = {
from = mkDefault { type = "indirect"; id = name; };
to = mkIf (config.flake != null)
({ type = "path";
path = config.flake.outPath;
} // lib.filterAttrs
(n: v: n == "lastModified" || n == "rev" || n == "revCount" || n == "narHash")
config.flake);
to = mkIf (config.flake != null) (mkDefault
{
type = "path";
path = config.flake.outPath;
} // filterAttrs
(n: _: n == "lastModified" || n == "rev" || n == "revCount" || n == "narHash")
config.flake);
};
}
));
default = {};
default = { };
description = ''
A system-wide flake registry.
'';
};
};
extraOptions = mkOption {
type = types.lines;
default = "";
example = ''
keep-outputs = true
keep-derivations = true
'';
description = "Additional text appended to <filename>nix.conf</filename>.";
};
settings = mkOption {
type = types.submodule {
freeformType = semanticConfType;
options = {
max-jobs = mkOption {
type = types.either types.int (types.enum [ "auto" ]);
default = "auto";
example = 64;
description = ''
This option defines the maximum number of jobs that Nix will try to
build in parallel. The default is auto, which means it will use all
available logical cores. It is recommend to set it to the total
number of logical cores in your system (e.g., 16 for two CPUs with 4
cores each and hyper-threading).
'';
};
auto-optimise-store = mkOption {
type = types.bool;
default = false;
example = true;
description = ''
If set to true, Nix automatically detects files in the store that have
identical contents, and replaces them with hard links to a single copy.
This saves disk space. If set to false (the default), you can still run
nix-store --optimise to get rid of duplicate files.
'';
};
cores = mkOption {
type = types.int;
default = 0;
example = 64;
description = ''
This option defines the maximum number of concurrent tasks during
one build. It affects, e.g., -j option for make.
The special value 0 means that the builder should use all
available CPU cores in the system. Some builds may become
non-deterministic with this option; use with care! Packages will
only be affected if enableParallelBuilding is set for them.
'';
};
sandbox = mkOption {
type = types.either types.bool (types.enum [ "relaxed" ]);
default = true;
description = ''
If set, Nix will perform builds in a sandboxed environment that it
will set up automatically for each build. This prevents impurities
in builds by disallowing access to dependencies outside of the Nix
store by using network and mount namespaces in a chroot environment.
This is enabled by default even though it has a possible performance
impact due to the initial setup time of a sandbox for each build. It
doesn't affect derivation hashes, so changing this option will not
trigger a rebuild of packages.
'';
};
extra-sandbox-paths = mkOption {
type = types.listOf types.str;
default = [ ];
example = [ "/dev" "/proc" ];
description = ''
Directories from the host filesystem to be included
in the sandbox.
'';
};
substituters = mkOption {
type = types.listOf types.str;
description = ''
List of binary cache URLs used to obtain pre-built binaries
of Nix packages.
By default https://cache.nixos.org/ is added.
'';
};
trusted-substituters = mkOption {
type = types.listOf types.str;
default = [ ];
example = [ "https://hydra.nixos.org/" ];
description = ''
List of binary cache URLs that non-root users can use (in
addition to those specified using
<option>nix.settings.substituters</option>) by passing
<literal>--option binary-caches</literal> to Nix commands.
'';
};
require-sigs = mkOption {
type = types.bool;
default = true;
description = ''
If enabled (the default), Nix will only download binaries from binary caches if
they are cryptographically signed with any of the keys listed in
<option>nix.settings.trusted-public-keys</option>. If disabled, signatures are neither
required nor checked, so it's strongly recommended that you use only
trustworthy caches and https to prevent man-in-the-middle attacks.
'';
};
trusted-public-keys = mkOption {
type = types.listOf types.str;
example = [ "hydra.nixos.org-1:CNHJZBh9K4tP3EKF6FkkgeVYsS3ohTl+oS0Qa8bezVs=" ];
description = ''
List of public keys used to sign binary caches. If
<option>nix.settings.trusted-public-keys</option> is enabled,
then Nix will use a binary from a binary cache if and only
if it is signed by <emphasis>any</emphasis> of the keys
listed here. By default, only the key for
<uri>cache.nixos.org</uri> is included.
'';
};
trusted-users = mkOption {
type = types.listOf types.str;
default = [ "root" ];
example = [ "root" "alice" "@wheel" ];
description = ''
A list of names of users that have additional rights when
connecting to the Nix daemon, such as the ability to specify
additional binary caches, or to import unsigned NARs. You
can also specify groups by prefixing them with
<literal>@</literal>; for instance,
<literal>@wheel</literal> means all users in the wheel
group.
'';
};
system-features = mkOption {
type = types.listOf types.str;
example = [ "kvm" "big-parallel" "gccarch-skylake" ];
description = ''
The set of features supported by the machine. Derivations
can express dependencies on system features through the
<literal>requiredSystemFeatures</literal> attribute.
By default, pseudo-features <literal>nixos-test</literal>, <literal>benchmark</literal>,
and <literal>big-parallel</literal> used in Nixpkgs are set, <literal>kvm</literal>
is also included in it is avaliable.
'';
};
allowed-users = mkOption {
type = types.listOf types.str;
default = [ "*" ];
example = [ "@wheel" "@builders" "alice" "bob" ];
description = ''
A list of names of users (separated by whitespace) that are
allowed to connect to the Nix daemon. As with
<option>nix.settings.trusted-users</option>, you can specify groups by
prefixing them with <literal>@</literal>. Also, you can
allow all users by specifying <literal>*</literal>. The
default is <literal>*</literal>. Note that trusted users are
always allowed to connect.
'';
};
};
};
default = { };
example = literalExpression ''
{
use-sandbox = true;
show-trace = true;
system-features = [ "big-parallel" "kvm" "recursive-nix" ];
sandbox-paths = { "/bin/sh" = "''${pkgs.busybox-sandbox-shell.out}/bin/busybox"; };
}
'';
description = ''
Configuration for Nix, see
<link xlink:href="https://nixos.org/manual/nix/stable/#sec-conf-file"/> or
<citerefentry>
<refentrytitle>nix.conf</refentrytitle>
<manvolnum>5</manvolnum>
</citerefentry> for avalaible options.
The value declared here will be translated directly to the key-value pairs Nix expects.
</para>
<para>
You can use <command>nix-instantiate --eval --strict '&lt;nixpkgs/nixos&gt;' -A config.nix.settings</command>
to view the current value. By default it is empty.
</para>
<para>
Nix configurations defined under <option>nix.*</option> will be translated and applied to this
option. In addition, configuration specified in <option>nix.extraOptions</option> which will be appended
verbatim to the resulting config file.
'';
};
};
};
###### implementation
config = mkIf cfg.enable {
nix.binaryCachePublicKeys = [ "cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY=" ];
nix.binaryCaches = [ "https://cache.nixos.org/" ];
environment.systemPackages =
[ nix
[
nixPackage
pkgs.nix-info
]
++ optional (config.programs.bash.enableCompletion) pkgs.nix-bash-completions;
@ -579,44 +669,49 @@ in
# List of machines for distributed Nix builds in the format
# expected by build-remote.pl.
environment.etc."nix/machines" =
{ enable = cfg.buildMachines != [];
text =
concatMapStrings (machine:
"${if machine.sshUser != null then "${machine.sshUser}@" else ""}${machine.hostName} "
+ (if machine.system != null then machine.system else concatStringsSep "," machine.systems)
+ " ${if machine.sshKey != null then machine.sshKey else "-"} ${toString machine.maxJobs} "
+ toString (machine.speedFactor)
+ " "
+ concatStringsSep "," (machine.mandatoryFeatures ++ machine.supportedFeatures)
+ " "
+ concatStringsSep "," machine.mandatoryFeatures
environment.etc."nix/machines" = mkIf (cfg.buildMachines != [ ]) {
text =
concatMapStrings
(machine:
(concatStringsSep " " [
"${optionalString (machine.sshUser != null) "${machine.sshUser}@"}${machine.hostName}"
(if machine.system != null then machine.system else if machine.systems != [ ] then concatStringsSep "," machine.systems else "-")
(if machine.sshKey != null then machine.sshKey else "-")
(toString machine.maxJobs)
(toString machine.speedFactor)
(concatStringsSep "," machine.supportedFeatures)
(concatStringsSep "," machine.mandatoryFeatures)
]
++ optional (isNixAtLeast "2.4pre") (if machine.publicHostKey != null then machine.publicHostKey else "-"))
+ "\n"
) cfg.buildMachines;
};
)
cfg.buildMachines;
};
assertions =
let badMachine = m: m.system == null && m.systems == [];
in [
let badMachine = m: m.system == null && m.systems == [ ];
in
[
{
assertion = !(builtins.any badMachine cfg.buildMachines);
assertion = !(any badMachine cfg.buildMachines);
message = ''
At least one system type (via <varname>system</varname> or
<varname>systems</varname>) must be set for every build machine.
Invalid machine specifications:
'' + " " +
(builtins.concatStringsSep "\n "
(builtins.map (m: m.hostName)
(builtins.filter (badMachine) cfg.buildMachines)));
(concatStringsSep "\n "
(map (m: m.hostName)
(filter (badMachine) cfg.buildMachines)));
}
];
systemd.packages = [ nix ];
systemd.packages = [ nixPackage ];
systemd.sockets.nix-daemon.wantedBy = [ "sockets.target" ];
systemd.services.nix-daemon =
{ path = [ nix pkgs.util-linux config.programs.ssh.package ]
{
path = [ nixPackage pkgs.util-linux config.programs.ssh.package ]
++ optionals cfg.distributedBuilds [ pkgs.gzip ];
environment = cfg.envVars
@ -626,7 +721,8 @@ in
unitConfig.RequiresMountsFor = "/nix/store";
serviceConfig =
{ CPUSchedulingPolicy = cfg.daemonCPUSchedPolicy;
{
CPUSchedulingPolicy = cfg.daemonCPUSchedPolicy;
IOSchedulingClass = cfg.daemonIOSchedClass;
IOSchedulingPriority = cfg.daemonIOSchedPriority;
LimitNOFILE = 4096;
@ -636,9 +732,7 @@ in
};
# Set up the environment variables for running Nix.
environment.sessionVariables = cfg.envVars //
{ NIX_PATH = cfg.nixPath;
};
environment.sessionVariables = cfg.envVars // { NIX_PATH = cfg.nixPath; };
environment.extraInit =
''
@ -647,7 +741,7 @@ in
fi
'';
nix.nrBuildUsers = mkDefault (lib.max 32 (if cfg.maxJobs == "auto" then 0 else cfg.maxJobs));
nix.nrBuildUsers = mkDefault (max 32 (if cfg.settings.max-jobs == "auto" then 0 else cfg.settings.max-jobs));
users.users = nixbldUsers;
@ -663,14 +757,26 @@ in
fi
'';
nix.systemFeatures = mkDefault (
[ "nixos-test" "benchmark" "big-parallel" "kvm" ] ++
optionals (pkgs.hostPlatform ? gcc.arch) (
# a builder can run code for `gcc.arch` and inferior architectures
[ "gccarch-${pkgs.hostPlatform.gcc.arch}" ] ++
map (x: "gccarch-${x}") lib.systems.architectures.inferiors.${pkgs.hostPlatform.gcc.arch}
)
);
# Legacy configuration conversion.
nix.settings = mkMerge [
{
trusted-public-keys = [ "cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY=" ];
substituters = [ "https://cache.nixos.org/" ];
system-features = mkDefault (
[ "nixos-test" "benchmark" "big-parallel" "kvm" ] ++
optionals (pkgs.hostPlatform ? gcc.arch) (
# a builder can run code for `gcc.arch` and inferior architectures
[ "gccarch-${pkgs.hostPlatform.gcc.arch}" ] ++
map (x: "gccarch-${x}") systems.architectures.inferiors.${pkgs.hostPlatform.gcc.arch}
)
);
}
(mkIf (!cfg.distributedBuilds) { builders = null; })
(mkIf (isNixAtLeast "2.3pre") { sandbox-fallback = false; })
];
};

View file

@ -20,7 +20,7 @@ in {
write = mkOption {
type = types.bool;
default = false;
description = "Whether to enable writing to the Nix store as a remote store via SSH. Note: the sshServe user is named nix-ssh and is not a trusted-user. nix-ssh should be added to the nix.trustedUsers option in most use cases, such as allowing remote building of derivations.";
description = "Whether to enable writing to the Nix store as a remote store via SSH. Note: the sshServe user is named nix-ssh and is not a trusted-user. nix-ssh should be added to the <option>nix.settings.trusted-users</option> option in most use cases, such as allowing remote building of derivations.";
};
keys = mkOption {

View file

@ -394,7 +394,8 @@ in
# Extra UDEV rules used by Solid
services.udev.packages = [
pkgs.libmtp
# libmtp has "bin", "dev", "out" outputs. UDEV rules file is in "out".
pkgs.libmtp.out
pkgs.media-player-info
];

View file

@ -156,7 +156,7 @@ in
specialisation = mkOption {
default = {};
example = lib.literalExpression "{ fewJobsManyCores.configuration = { nix.buildCores = 0; nix.maxJobs = 1; }; }";
example = lib.literalExpression "{ fewJobsManyCores.configuration = { nix.settings = { core = 0; max-jobs = 1; }; }";
description = ''
Additional configurations to build. If
<literal>inheritParentConfig</literal> is true, the system

View file

@ -300,18 +300,15 @@ in {
interpreterSandboxPath = dirOf (dirOf interpreterReg);
} // (magics.${system} or (throw "Cannot create binfmt registration for system ${system}"));
}) cfg.emulatedSystems);
# TODO: add a nix.extraPlatforms option to NixOS!
nix.extraOptions = lib.mkIf (cfg.emulatedSystems != []) ''
extra-platforms = ${toString (cfg.emulatedSystems ++ lib.optional pkgs.stdenv.hostPlatform.isx86_64 "i686-linux")}
'';
nix.sandboxPaths = lib.mkIf (cfg.emulatedSystems != []) (
let
nix.settings = lib.mkIf (cfg.emulatedSystems != []) {
extra-platforms = cfg.emulatedSystems ++ lib.optional pkgs.stdenv.hostPlatform.isx86_64 "i686-linux";
extra-sandbox-paths = let
ruleFor = system: cfg.registrations.${system};
hasWrappedRule = lib.any (system: (ruleFor system).wrapInterpreterInShell) cfg.emulatedSystems;
in [ "/run/binfmt" ]
++ lib.optional hasWrappedRule "${pkgs.bash}"
++ (map (system: (ruleFor system).interpreterSandboxPath) cfg.emulatedSystems)
);
++ (map (system: (ruleFor system).interpreterSandboxPath) cfg.emulatedSystems);
};
environment.etc."binfmt.d/nixos.conf".source = builtins.toFile "binfmt_nixos.conf"
(lib.concatStringsSep "\n" (lib.mapAttrsToList makeBinfmtLine config.boot.binfmt.registrations));

View file

@ -10,8 +10,8 @@ import ./make-test-python.nix ({ pkgs, lib, ... }: {
# XXX: Sandbox setup fails while trying to hardlink files from the host's
# store file system into the prepared chroot directory.
nix.useSandbox = false;
nix.binaryCaches = []; # don't try to access cache.nixos.org
nix.settings.sandbox = false;
nix.settings.substituters = []; # don't try to access cache.nixos.org
virtualisation.writableStore = true;
# Make sure we always have all the required dependencies for creating a

View file

@ -11,7 +11,7 @@ import ./make-test-python.nix ({ pkgs, ... } : {
users.users.sybil = { isNormalUser = true; group = "wheel"; };
imports = [ ../modules/profiles/hardened.nix ];
environment.memoryAllocator.provider = "graphene-hardened";
nix.useSandbox = false;
nix.settings.sandbox = false;
virtualisation.emptyDiskImages = [ 4096 ];
boot.initrd.postDeviceCommands = ''
${pkgs.dosfstools}/bin/mkfs.vfat -n EFISYS /dev/vdb

View file

@ -45,11 +45,11 @@ in makeTest {
../modules/profiles/base.nix
];
nix.binaryCaches = mkForce [ ];
nix.extraOptions = ''
hashed-mirrors =
connect-timeout = 1
'';
nix.settings = {
substituters = mkForce [];
hashed-mirrors = null;
connect-timeout = 1;
};
virtualisation.diskSize = 8 * 1024;
virtualisation.emptyDiskImages = [

View file

@ -42,7 +42,7 @@
hostName = "localhost";
systems = [ system ];
}];
binaryCaches = [];
settings.substituters = [];
};
};
}

View file

@ -334,11 +334,11 @@ let
(pkgs.grub2_efi.override { inherit zfsSupport; })
]);
nix.binaryCaches = mkForce [ ];
nix.extraOptions = ''
hashed-mirrors =
connect-timeout = 1
'';
nix.settings = {
substituters = mkForce [];
hashed-mirrors = null;
connect-timeout = 1;
};
};
};

View file

@ -111,11 +111,11 @@ import ./make-test-python.nix (
environment.etc."initiator-root-disk-closure".source = nodes.initiatorRootDisk.config.system.build.toplevel;
nix.binaryCaches = lib.mkForce [ ];
nix.extraOptions = ''
hashed-mirrors =
connect-timeout = 1
'';
nix.settings = {
substituters = lib.mkForce [ ];
hashed-mirrors = null;
connect-timeout = 1;
};
};
initiatorRootDisk = { config, pkgs, modulesPath, lib, ... }: {

View file

@ -95,11 +95,11 @@ import ./make-test-python.nix (
system.extraDependencies = [ nodes.initiatorRootDisk.config.system.build.toplevel ];
nix.binaryCaches = lib.mkForce [];
nix.extraOptions = ''
hashed-mirrors =
connect-timeout = 1
'';
nix.settings = {
substituters = lib.mkForce [];
hashed-mirrors = null;
connect-timeout = 1;
};
};
initiatorRootDisk = { config, pkgs, modulesPath, lib, ... }: {

View file

@ -23,7 +23,7 @@ let
deployer = { config, lib, nodes, pkgs, ... }: {
imports = [ ../../modules/installer/cd-dvd/channel.nix ];
environment.systemPackages = [ nixopsPkg ];
nix.binaryCaches = lib.mkForce [ ];
nix.settings.substituters = lib.mkForce [ ];
users.users.person.isNormalUser = true;
virtualisation.writableStore = true;
virtualisation.additionalPaths = [

View file

@ -16,7 +16,7 @@ in
(modulesPath + "/testing/test-instrumentation.nix")
];
virtualisation.writableStore = true;
nix.binaryCaches = lib.mkForce [ ];
nix.settings.substituters = lib.mkForce [ ];
virtualisation.graphics = false;
documentation.enable = false;
services.qemuGuest.enable = true;

View file

@ -43,11 +43,11 @@ let
# vda is a filesystem without partition table
forceInstall = true;
};
nix.binaryCaches = lib.mkForce [ ];
nix.extraOptions = ''
hashed-mirrors =
connect-timeout = 1
'';
nix.settings = {
substituters = lib.mkForce [];
hashed-mirrors = null;
connect-timeout = 1;
};
# save some memory
documentation.enable = false;
};

View file

@ -10,14 +10,14 @@ let
# If an update breaks things, one of those might have valuable info:
# https://aur.archlinux.org/packages/spotify/
# https://community.spotify.com/t5/Desktop-Linux
version = "1.1.72.439.gc253025e";
version = "1.1.77.643.g3c4c6fc6";
# To get the latest stable revision:
# curl -H 'X-Ubuntu-Series: 16' 'https://api.snapcraft.io/api/v1/snaps/details/spotify?channel=stable' | jq '.download_url,.version,.last_updated'
# To get general information:
# curl -H 'Snap-Device-Series: 16' 'https://api.snapcraft.io/v2/snaps/info/spotify' | jq '.'
# More examples of api usage:
# https://github.com/canonical-websites/snapcraft.io/blob/master/webapp/publisher/snaps/views.py
rev = "56";
rev = "57";
deps = [
alsa-lib
@ -80,7 +80,7 @@ stdenv.mkDerivation {
# https://community.spotify.com/t5/Desktop-Linux/Redistribute-Spotify-on-Linux-Distributions/td-p/1695334
src = fetchurl {
url = "https://api.snapcraft.io/api/v1/snaps/download/pOBIoZ2LrCB3rDohMxoYGnbN14EHOgD7_${rev}.snap";
sha512 = "b2bd3d49a18dfebaa4660f9c39d11d57fb80a4ef15ec7b7973e3cc07be74f74aebd2d8c66360d79fe778244c533ed02f9dfca4085f99aae0e5faae7c003ba4ef";
sha512 = "d9f8fe692db479bcce1f47c87b65c5ac6d62e16b76a0f9b2d693d82d2b9ed2c7cf370cb091ce8ecd291c47d1efdbaa897c9bffb210edd901dc3d5425995229f7";
};
nativeBuildInputs = [ makeWrapper wrapGAppsHook squashfsTools ];

View file

@ -1,5 +1,5 @@
{ lib, stdenv, fetchFromGitHub, flex, bison, pkg-config, zlib, libtiff, libpng, fftw
, cairo, readline, ffmpeg, makeWrapper, wxGTK30, netcdf, blas
, cairo, readline, ffmpeg, makeWrapper, wxGTK30, wxmac, netcdf, blas
, proj, gdal, geos, sqlite, postgresql, libmysqlclient, python3Packages, libLAS, proj-datumgrid
, zstd, pdal, wrapGAppsHook
}:
@ -16,10 +16,14 @@ stdenv.mkDerivation rec {
};
nativeBuildInputs = [ pkg-config ];
buildInputs = [ flex bison zlib proj gdal libtiff libpng fftw sqlite cairo
readline ffmpeg makeWrapper wxGTK30 netcdf geos postgresql libmysqlclient blas
libLAS proj-datumgrid zstd pdal wrapGAppsHook ]
++ (with python3Packages; [ python python-dateutil wxPython_4_1 numpy ]);
buildInputs = [ flex bison zlib proj gdal libtiff libpng fftw sqlite
readline ffmpeg makeWrapper netcdf geos postgresql libmysqlclient blas
libLAS proj-datumgrid zstd wrapGAppsHook ]
++ lib.optionals stdenv.isLinux [ cairo pdal wxGTK30 ]
++ lib.optional stdenv.isDarwin wxmac
++ (with python3Packages; [ python python-dateutil numpy ]
++ lib.optional stdenv.isDarwin wxPython_4_0
++ lib.optional stdenv.isLinux wxPython_4_1);
# On Darwin the installer tries to symlink the help files into a system
# directory
@ -33,12 +37,11 @@ stdenv.mkDerivation rec {
configureFlags = [
"--with-proj-share=${proj}/share/proj"
"--with-proj-includes=${proj.dev}/include"
"--with-proj-lib=${proj}/lib"
"--with-proj-libs=${proj}/lib"
"--without-opengl"
"--with-readline"
"--with-wxwidgets"
"--with-netcdf"
"--with-pdal"
"--with-geos"
"--with-postgres"
"--with-postgres-libs=${postgresql.lib}/lib/"
@ -51,6 +54,12 @@ stdenv.mkDerivation rec {
"--with-zstd"
"--with-fftw"
"--with-pthread"
] ++ lib.optionals stdenv.isLinux [
"--with-pdal"
] ++ lib.optionals stdenv.isDarwin [
"--without-cairo"
"--without-freetype"
"--without-x"
];
# Otherwise a very confusing "Can't load GDAL library" error

View file

@ -1,4 +1,4 @@
{ lib, stdenv, fetchFromGitHub, pkg-config, libtool
{ lib, stdenv, fetchFromGitHub, fetchpatch, pkg-config, libtool
, bzip2, zlib, libX11, libXext, libXt, fontconfig, freetype, ghostscript, libjpeg, djvulibre
, lcms2, openexr, libjxl, libpng, liblqr1, libraw, librsvg, libtiff, libxml2, openjpeg, libwebp, libheif
, ApplicationServices
@ -27,6 +27,14 @@ stdenv.mkDerivation rec {
sha256 = "0r8zmk2cfmf09l94hqzfz4aspnzn178ggdbgm7w4hr0p864cbvc3";
};
patches = [
# fix a type confusion bug introduced in 7.1.0-20 with commit 075565e93c71bcaaabf0ce70b7d1060bccdf0020
(fetchpatch {
url = "https://github.com/ImageMagick/ImageMagick/commit/62845d5672eca4446b952dd0ab2e3e0dab0309d4.patch";
sha256 = "1kni5i8b5hl69niypidm90mhir8cafi6r9i857fxdlv045h3dg4p";
})
];
outputs = [ "out" "dev" "doc" ]; # bin/ isn't really big
outputMan = "out"; # it's tiny

View file

@ -1,3 +0,0 @@
url http://spiegl.de/qiv/download/
version_link '[.]tgz$'
do_overwrite() { do_overwrite_just_version; }

View file

@ -5,14 +5,14 @@
stdenv.mkDerivation rec {
pname = "tev";
version = "1.19";
version = "1.22";
src = fetchFromGitHub {
owner = "Tom94";
repo = pname;
rev = "v${version}";
fetchSubmodules = true;
sha256 = "sha256-laP47xOND6PMA6dwTcCupcTIW+9zCaxO6rHzvDSL9JU=";
sha256 = "sha256-WLDQaN6wHnadvp0JyUzlcZVNiSbFudmmBSNYRMaE6U4=";
};
nativeBuildInputs = [ cmake wrapGAppsHook ];
@ -53,6 +53,7 @@ stdenv.mkDerivation rec {
changelog = "https://github.com/Tom94/tev/releases/tag/v${version}";
license = licenses.bsd3;
platforms = platforms.unix;
badPlatforms = [ "aarch64-linux" ]; # fails on Hydra since forever
maintainers = with maintainers; [ ];
};
}

View file

@ -19,5 +19,12 @@ mkDerivation {
kpty syntax-highlighting libmtp libssh openexr openslp
phonon qtsvg samba solid gperf
];
# org.kde.kmtpd5 DBUS service launches kiod5 binary from kio derivation, not from kio-extras
postInstall = ''
substituteInPlace $out/share/dbus-1/services/org.kde.kmtpd5.service \
--replace Exec=$out Exec=${kio}
'';
CXXFLAGS = [ "-I${ilmbase.dev}/include/OpenEXR" ];
}

View file

@ -65,7 +65,7 @@ stdenv.mkDerivation rec {
postFixup = ''
wrapProgram $out/bin/auto-multiple-choice \
''${makeWrapperArgs[@]} \
--prefix PERL5LIB : "${with perlPackages; makePerlPath [
--prefix PERL5LIB : "${with perlPackages; makeFullPerlPath [
ArchiveZip
DBDSQLite
Cairo
@ -75,6 +75,7 @@ stdenv.mkDerivation rec {
GlibObjectIntrospection
Gtk3
LocaleGettext
OpenOfficeOODoc
PerlMagick
TextCSV
XMLParser

View file

@ -2,11 +2,11 @@
buildPythonApplication rec {
pname = "gallery_dl";
version = "1.20.1";
version = "1.20.3";
src = fetchPypi {
inherit pname version;
sha256 = "a1c06625381485f82aa14a038a622d40ab9cc2c8d150dd65c66df96dbf427f62";
sha256 = "6d1d97bd08a2a0dcfb0578e759fb83c4902b395405b3e3238673f684973b0556";
};
propagatedBuildInputs = [ requests yt-dlp ];

View file

@ -1,7 +0,0 @@
url https://mupdf.com/downloads/archive/
do_overwrite(){
ensure_hash
ensure_version
set_var_value version $CURRENT_VERSION
set_var_value sha256 $CURRENT_HASH
}

View file

@ -1,43 +0,0 @@
{ stdenv, fetchurl, itstool, intltool, pkg-config
, libxml2, gnome2, atk, gtk2, glib
, mono, mono-addins, dbus-sharp-2_0, dbus-sharp-glib-2_0, gnome-sharp, gtk-sharp-2_0
, makeWrapper, lib}:
let
version = "1.15.9";
in
stdenv.mkDerivation {
pname = "tomboy";
inherit version;
src = fetchurl {
url = "https://github.com/tomboy-notes/tomboy/releases/download/${version}/tomboy-${version}.tar.xz";
sha256 = "0j5jmd079bm2fydqaic5ymbfdxna3qlx6fkp2mqhgcdr7prsbl3q";
};
nativeBuildInputs = [ pkg-config ];
buildInputs = [ itstool intltool
libxml2 gnome2.GConf atk gtk2
mono mono-addins dbus-sharp-2_0 dbus-sharp-glib-2_0 gnome-sharp gtk-sharp-2_0
makeWrapper ];
postInstall = ''
makeWrapper "${mono}/bin/mono" "$out/bin/tomboy" \
--add-flags "$out/lib/tomboy/Tomboy.exe" \
--prefix MONO_GAC_PREFIX : ${dbus-sharp-2_0} \
--prefix MONO_GAC_PREFIX : ${dbus-sharp-glib-2_0} \
--prefix MONO_GAC_PREFIX : ${gtk-sharp-2_0} \
--prefix MONO_GAC_PREFIX : ${gnome-sharp} \
--prefix MONO_GAC_PREFIX : ${mono-addins} \
--prefix LD_LIBRARY_PATH : ${lib.makeLibraryPath [ glib gtk-sharp-2_0 gtk-sharp-2_0.gtk gnome2.GConf ]}
'';
meta = with lib; {
homepage = "https://wiki.gnome.org/Apps/Tomboy";
description = "A simple note-taking application with synchronization";
platforms = platforms.linux;
license = lib.licenses.lgpl2;
maintainers = with maintainers; [ stesie ];
};
}

View file

@ -38,7 +38,6 @@ in stdenv.mkDerivation rec {
license = licenses.gpl2;
downloadPage = "https://vifm.info/downloads.shtml";
homepage = "https://vifm.info/";
updateWalker = true;
changelog = "https://github.com/vifm/vifm/blob/v${version}/ChangeLog";
};
}

View file

@ -1,9 +1,9 @@
{ lib, buildGoModule, fetchFromGitHub, fetchzip, installShellFiles }:
let
version = "0.24.1";
sha256 = "18jzf5kd06c10f45y4crvaqa5r10dhq2ashlhppzrmhigiyavxac";
manifestsSha256 = "0qbdik65irnwgw7klj5w0z00jxflm855gikpnqb9gsxd7rbw8ysk";
version = "0.25.3";
sha256 = "1j7jw6vfki67dz9lkx3f94b9hi6d2bc504yy3nfppp3hx8nwxb37";
manifestsSha256 = "1akp1i3xasfjq6zqbk7mnbkhnzmq7if7v82q6zdp2678xrg6xps5";
manifests = fetchzip {
url =
@ -23,7 +23,7 @@ in buildGoModule rec {
inherit sha256;
};
vendorSha256 = "sha256-HoAVdY+kZLpUEl3mE7obbTzAJUyt5MBPjGhs6ZDSnzU=";
vendorSha256 = "sha256-/VeJq6l3kSZ9qcYf2ypyyoXVKME+rig6aDdWDoRqNzA=";
postUnpack = ''
cp -r ${manifests} source/cmd/flux/manifests

View file

@ -87,7 +87,6 @@ python3.pkgs.buildPythonApplication rec {
license = lib.licenses.gpl3Plus;
maintainers = with lib.maintainers; [ raskin abbradar ];
downloadPage = "http://gajim.org/downloads.php";
updateWalker = true;
platforms = lib.platforms.linux;
};
}

View file

@ -38,8 +38,6 @@ stdenv.mkDerivation rec {
license = licenses.gpl2;
maintainers = with maintainers; [ pSub ];
platforms = with platforms; linux;
updateWalker = true;
downloadPage = "http://mcabber.com/files/";
downloadURLRegexp = "mcabber-[0-9.]+[.]tar[.][a-z0-9]+$";
};
}

View file

@ -3,7 +3,7 @@
} :
let
version = "0.3.3";
version = "0.3.4";
in stdenv.mkDerivation {
pname = "soapyhackrf";
@ -13,7 +13,7 @@ in stdenv.mkDerivation {
owner = "pothosware";
repo = "SoapyHackRF";
rev = "soapy-hackrf-${version}";
sha256 = "1awn89z462500gb3fjb7x61b1znkjri9n1d39bqfip1qk4s11pxc";
sha256 = "sha256-fzPYHJAPX8FkFxPXpLlUagTd/NoamRX0YnxHwkbV1nI=";
};
nativeBuildInputs = [ cmake pkg-config ];

View file

@ -1,6 +0,0 @@
url http://page.mi.fu-berlin.de/cbenzmueller/leo/download.html
version_link '[.]tgz'
version '.*_v([0-9.]+)[.][a-z0-9]+$' '\1'
do_overwrite () {
do_overwrite_just_version
}

View file

@ -1,6 +0,0 @@
target clingo.nix
attribute_name clingo
url https://github.com/potassco/clingo/releases/
ensure_choice
version '.*/v([0-9.]+)[.]tar[.].*' '\1'
minimize_overwrite

View file

@ -66,6 +66,5 @@ stdenv.mkDerivation rec {
platforms = lib.platforms.linux;
downloadPage = "http://www.ps.uni-saarland.de/~cebrown/satallax/downloads.php";
homepage = "http://www.ps.uni-saarland.de/~cebrown/satallax/index.php";
updateWalker = true;
};
}

View file

@ -82,6 +82,5 @@ stdenv.mkDerivation rec {
license = licenses.gpl2Plus;
maintainers = with maintainers; [ ertes AndersonTorres ] ++ teams.sage.members;
platforms = platforms.linux ++ platforms.darwin;
updateWalker = true;
};
}

View file

@ -31,6 +31,5 @@ stdenv.mkDerivation rec {
maintainers = [lib.maintainers.raskin];
platforms = lib.platforms.unix;
homepage = "http://www.mathe2.uni-bayreuth.de/stoll/programs/";
updateWalker = true;
};
}

View file

@ -1,5 +0,0 @@
url https://sourceforge.net/projects/golly/files/golly/
version_link '[-][0-9.]+/$'
SF_version_tarball 'src'
SF_redirect
minimize_overwrite

View file

@ -92,7 +92,6 @@ let
downloadPage = "https://www.mercurial-scm.org/release/";
license = licenses.gpl2Plus;
maintainers = with maintainers; [ eelco lukegb pacien ];
updateWalker = true;
platforms = platforms.unix;
};
};

View file

@ -1,8 +0,0 @@
url http://www.monotone.ca/downloads.php
do_overwrite(){
ensure_version
ensure_hash
set_var_value version $CURRENT_VERSION
set_var_value sha256 $CURRENT_HASH
}

View file

@ -6,11 +6,11 @@
python3Packages.buildPythonApplication rec {
pname = "streamlink";
version = "3.1.0";
version = "3.1.1";
src = python3Packages.fetchPypi {
inherit pname version;
sha256 = "sha256-T2M0vg+BYIdr21CcdrrBf7bVVlZU+tKJWG2xfBMoMlg=";
sha256 = "sha256-hVzTHpAOOuHVMoo3Ejv//irsUBoddLzdEvDSonWAYOQ=";
};
checkInputs = with python3Packages; [

View file

@ -28,7 +28,7 @@ nixosTest {
virtualisation.writableStore = true;
# Test runs without network, so we don't substitute and prepare our deps
nix.binaryCaches = lib.mkForce [];
nix.settings.substituters = lib.mkForce [];
environment.etc."pre-built-paths".source = writeText "pre-built-paths" (
builtins.toJSON [hello figlet stdenvNoCC]
);

View file

@ -1,19 +0,0 @@
a :
a.stdenv.mkDerivation {
buildCommand = ''
mkdir -p "$out/attributes"
'' + (a.lib.concatStrings (map
(n: ''
ln -s "${a.writeTextFile {name=n; text=builtins.getAttr n a.theAttrSet;}}" $out/attributes/${n};
'')
(builtins.attrNames a.theAttrSet)
));
name = "attribute-set";
meta = {
description = "Contents of an attribute set";
maintainers = [
a.lib.maintainers.raskin
];
};
}

View file

@ -1,24 +0,0 @@
#! /bin/sh
[ -z "$1" ] && {
echo "Use $0 expression-basename repo-url branch-name package-base-name"
echo "Like:"
echo "$0 default http://git.example.com/repo origin/master hello"
exit 1;
} >&2
own_dir="$(cd "$(dirname "$0")"; sh -c pwd)"
cp "$own_dir/../builder-defs/template-bdp-uud.nix" "$1.nix"
sed -e "s@src-for-default.nix@src-for-$1.nix@g;
s@fetchUrlFromSrcInfo@fetchGitFromSrcInfo@g" -i "$1.nix"
echo '{}' > "src-for-$1.nix"
cat << EOF > src-info-for-$1.nix
{
repoUrl = "$2";
rev = "$3";
baseName = "$4";
method = "fetchgit";
}
EOF

View file

@ -1,20 +0,0 @@
#! /bin/sh
[ -z "$1" ] && {
echo "Use $0 expression-basename download-page package-base-name"
echo "Like:"
echo "$0 default http://example.com/downloads hello"
exit 1;
} >&2
own_dir="$(cd "$(dirname "$0")"; sh -c pwd)"
cp "$own_dir/../builder-defs/template-auto-callable.nix" "$1.nix"
sed -e "s@src-for-default.nix@src-for-$1.nix@g" -i "$1.nix"
echo '{}' > "src-for-$1.nix"
cat << EOF > src-info-for-$1.nix
{
downloadPage = "$2";
baseName = "$3";
}
EOF

View file

@ -1,29 +0,0 @@
Next to file.nix we get src-for-file.nix
src-for-file.nix should evaluate to a flat attribute set with
string values.
It is supposed to be imported in the main expression.
In the ideal world it can export url, hash, version.
src-for-file.nix generation is directed by
src-info-for-file.nix.
Attributes:
src-info-for-file.nix:
downloadPage
rev (for repos)
baseName (default = unnamed-package)
sourceRegexp (default = '.*[.]tar[.].*')
choiceCommand (default = 'head -1')
versionExtractorSedScript (default = 's/.*-([0-9.]+)[.].*/\1/')
versionReferenceCreator (default = 's/-([0-9.]+)[.]/-${version}./')
mirrorSedScript (default = none)
src-for-file.nix:
advertisedUrl (its match is the check for update presence)
url
hash
version
name

View file

@ -1,14 +0,0 @@
# sed scripts
#http://sourceforge.net/projects/webdruid/files/webdruid/0.6.0-alpha5/webdruid-0.6.0-alpha5.tar.gz/download
#http://downloads.sourceforge.net/webdruid/files/webdruid/0.6.0-alpha5/webdruid-0.6.0-alpha5.tar.gz
skipRedirectSF='s@sourceforge.net/projects@downloads.sourceforge.net/project@; s@/files@@; s@/download$@@;'
extractReleaseSF='s@.*/([^/]+)/[^/]+@\1@'
extractVersionSF='s@.*/[^/0-9]*([0-9].*)[.](tar|tgz|tbz2|zip).*@\1@'
apacheMirror='s@http://www.apache.org/dist/@mirror://apache/@'
skipRedirectApache='s@/dyn/closer.cgi[?]path=@/dist@'
replaceAllVersionOccurences() {
echo s/"$version"/\${version}/g
}
dashDelimitedVersion='s/.*-([0-9.]+)-.*/\1/'

View file

@ -1,13 +0,0 @@
{
a=1;
b="text";
c=''
text
'';
d=''
Multi-line text with special characters -
like \ (backslash) and ''${} (dollar +
curly braces) and $ (dollar) and ' (quote)
and " (double quote).
'';
}

View file

@ -1,182 +0,0 @@
#! /bin/sh
set -x
own_dir="$(cd "$(dirname "$0")"; sh -c pwd)"
source "$own_dir/snippets.sh"
[ -z "$1" ] && {
echo "Specify main expression filename."
exit 1;
}
main_file="$1"
main_dir="$(cd "$(dirname "$main_file")" ; sh -c pwd)"
file_name="$(basename "$main_file")"
defs_file="$main_dir"/src-info-for-"$file_name"
src_file="$main_dir"/src-for-"$file_name"
# OK, [vcs] revert is always possible
new_src_file="$main_dir"/src-for-"$file_name"
forcedUrl="$2"
defs_dir="$("$own_dir"/attrset-to-dir.sh "$defs_file")"
src_defs_dir="$("$own_dir"/attrset-to-dir.sh "$src_file")"
getAttr () {
file="$defs_dir"/"$1"
data="$( ( [ -f "$file" ] && cat "$file" ) || echo "$2" )"
echo "attribute $1 obtained as: [[$data]]" >&2
echo "$data"
}
method="$(getAttr method fetchurl)"
baseName="$(getAttr baseName 'unnamed-package')"
commonPrefetchVars=" version name hash"
prefetchClause=""
[ fetchSFdirs = "$method" ] && {
if [ -z "$forcedUrl" ]; then
freshUrl="$("$own_dir"/urls-from-page.sh "$(getAttr downloadPage)" |
eval "egrep '$(getAttr sourceRegexp '[-][0-9.]+/$')'" |
eval "egrep -v '$(getAttr blacklistRegexp '^$')'" |
eval "$(getAttr choiceCommand 'head -n 1')" |
eval "$(getAttr versionToFileCommand "sed -re 's@/([^/]*-[0-9.]+)/@/\1/\1$(getAttr fileSuffix .tar.gz)@'")"
)"
if ! egrep ':' <<< "$freshUrl" ; then
freshUrl="$(dirname "$(getAttr downloadPage).")/$freshUrl"
fi
echo "Found download link: $freshUrl" >&2
else
freshUrl="$forcedUrl"
fi
freshUrl="$(echo "$freshUrl" | sed -re "$skipRedirectSF")"
echo "Sourceforge-corrected URL: $freshUrl" >&2
version="$(echo "$freshUrl" |
sed -re "$(getAttr versionExtractorSedScript "$extractVersionSF")")"
baseName="$(getAttr baseName "$(echo "$freshUrl" | sed -re 's@.*/project/([^/]+)/.*@\1@')")"
url="$freshUrl"
name="$baseName-$version"
advertisedUrl="$freshUrl"
if [ x"$freshUrl" = x"$(cat "$src_defs_dir"/advertisedUrl)" ]; then
echo "Source link not changed" >&2
exit
fi
hash=$(nix-prefetch-url "$freshUrl")
prefetchVars="url advertisedUrl";
}
[ fetchSF = "$method" ] && {
if [ -z "$forcedUrl" ]; then
freshUrl="$("$own_dir"/urls-from-page.sh "$(getAttr downloadPage)" |
eval "egrep \"$(getAttr sourceRegexp '.*[.]tar[.].*|.*[.]tgz$|.*[.]tbz2$')\"" |
eval "egrep -v \"$(getAttr blacklistRegexp '^$')\"" |
eval "$(getAttr choiceCommand 'head -1')")"
if ! egrep ':' <<< "$freshUrl" ; then
freshUrl="$(dirname "$(getAttr downloadPage).")/$freshUrl"
fi
echo "Found download link: $freshUrl" >&2
else
freshUrl="$forcedUrl"
fi
freshUrl="$(echo "$freshUrl" | sed -re "$skipRedirectSF")"
echo "Sourceforge-corrected URL: $freshUrl" >&2
version="$(echo "$freshUrl" |
sed -re "$(getAttr versionExtractorSedScript "$extractVersionSF")")"
baseName="$(getAttr baseName "$(echo "$freshUrl" | sed -re 's@.*/project/([^/]+)/.*@\1@')")"
url="$freshUrl"
name="$baseName-$version"
advertisedUrl="$freshUrl"
if [ x"$freshUrl" = x"$(cat "$src_defs_dir"/advertisedUrl)" ]; then
echo "Source link not changed" >&2
exit
fi
hash=$(nix-prefetch-url "$freshUrl")
prefetchVars="url advertisedUrl";
}
[ fetchurl = "$method" ] && {
if [ -z "$forcedUrl" ] ; then
freshUrl="$("$own_dir"/urls-from-page.sh "$(getAttr downloadPage)" |
eval "egrep \"$(getAttr sourceRegexp '.*[.]tar[.].*|.*[.]tgz$|.*[.]tbz2$')\"" |
eval "egrep -v \"$(getAttr blacklistRegexp '^$')\"" |
eval "$(getAttr choiceCommand 'head -1')")"
if ! egrep ':' <<< "$freshUrl" ; then
freshUrl="$(dirname "$(getAttr downloadPage).")/$freshUrl"
fi
echo "Found download link: $freshUrl" >&2
else
freshUrl="$forcedUrl"
fi
version="$(echo "$freshUrl" |
eval "sed -re \"$(getAttr versionExtractorSedScript \
's/.*-([0-9.]+)[.].*/\1/')\"")"
mirrorUrl="$(echo "$freshUrl" |
eval "sed -r -e \"$(getAttr versionReferenceCreator \
's/-'"${version}"'[.]/-\${version}./')\"" |
eval "sed -r -e \"$(getAttr mirrorSedScript)\"")"
url="$mirrorUrl"
name="$baseName-$version"
advertisedUrl="$freshUrl"
url="$mirrorUrl"
if [ x"$freshUrl" = x"$(cat "$src_defs_dir"/advertisedUrl)" ]; then
echo "Source link not changed" >&2
exit
fi
hash=$(nix-prefetch-url "$freshUrl")
prefetchVars="url advertisedUrl";
}
[ "fetchgit" = "$method" ] && {
repoUrl="$(getAttr repoUrl)"
export NIX_PREFETCH_GIT_CHECKOUT_HOOK="
cat .git/HEAD
"
export NIX_HASH_ALGO=sha256
rev="$(getAttr rev '')";
rev_and_hash="$("$own_dir"/../fetchgit/nix-prefetch-git "$repoUrl" "$rev" | tee /dev/stderr | tail -2)"
rev="$(echo "$rev_and_hash" | head -1)"
url="$repoUrl";
hash="$(echo "$rev_and_hash" | tail -1)"
version="$rev"
name="$baseName-$version"
prefetchVars="rev url";
}
prefetchAssignments="";
for i in $commonPrefetchVars $prefetchVars; do
prefetchAssignments="$prefetchAssignments $i=\"$(eval echo \"\$$i\")\";$(echo -e '\n ')"
done;
extraAssignments=""
for i in $(getAttr extraVars ''); do
eval "$(getAttr "eval_$i" 'i=""')"
extraAssignments="$extraAssignments $i=\"$(eval echo \"\$$i\")\";$(echo -e '\n ')"
done
cat << EOF > "$new_src_file"
rec {
$prefetchAssignments
$extraAssignments
}
EOF

View file

@ -1,20 +0,0 @@
SF_redirect () {
redirect 99
process 'http://[a-z]+[.]dl[.]sourceforge[.]net/' 'mirror://sourceforge/'
process '[?].*' ''
}
SF_version_dir () {
version_link 'http://sourceforge.net/.+/'"$1"'[0-9.]+/$'
}
SF_version_tarball () {
version_link "${1:-[.]tar[.]}.*/download\$"
}
GH_latest () {
prefetch_command_rel ../fetchgit/nix-prefetch-git
revision "$("$(dirname "$0")/urls-from-page.sh" "$CURRENT_URL/commits" | grep /commit/ | head -n 1 | xargs basename )"
version '.*' "git-$(date +%Y-%m-%d)"
NEED_TO_CHOOSE_URL=
}

View file

@ -1,320 +0,0 @@
#! /bin/sh
own_dir="$(cd "$(dirname "$0")"; pwd)"
URL_WAS_SET=
DL_URL_RE=
CURRENT_URL=
CURRENT_REV=
PREFETCH_COMMAND=
NEED_TO_CHOOSE_URL=1
url () {
URL_WAS_SET=1
CURRENT_URL="$1"
}
dl_url_re () {
DL_URL_RE="$1"
}
version_unpack () {
sed -re '
s/[.]/ /g;
s@/@ / @g
s/-(rc|pre)/ -1 \1 /g;
s/-(gamma)/ -2 \1 /g;
s/-(beta)/ -3 \1 /g;
s/-(alpha)/ -4 \1 /g;
s/[-]/ - /g;
'
}
version_repack () {
sed -re '
s/ - /-/g;
s/ -[0-9]+ ([a-z]+) /-\1/g;
s@ / @/@g
s/ /./g;
'
}
version_sort () {
version_unpack |
sort -t ' ' -n $(for i in $(seq 30); do echo " -k${i}n" ; done) | tac |
version_repack
}
position_choice () {
head -n "${1:-1}" | tail -n "${2:-1}"
}
matching_links () {
"$own_dir"/urls-from-page.sh "$CURRENT_URL" | grep -E "$1"
}
link () {
CURRENT_URL="$(matching_links "$1" | position_choice "$2" "$3")"
unset NEED_TO_CHOOSE_URL
echo "Linked by: $*"
echo "URL: $CURRENT_URL" >&2
}
version_link () {
CURRENT_URL="$(matching_links "$1" | version_sort | position_choice "$2" "$3")"
unset NEED_TO_CHOOSE_URL
echo "Linked version by: $*"
echo "URL: $CURRENT_URL" >&2
}
redirect () {
CURRENT_URL="$(curl -I -L --max-redirs "${1:-99}" "$CURRENT_URL" |
grep -E '^[Ll]ocation: ' | position_choice "${2:-999999}" "$3" |
sed -e 's/^[Ll]ocation: //; s/\r//')"
echo "Redirected: $*"
echo "URL: $CURRENT_URL" >&2
}
replace () {
sed -re "s $1 $2 g"
}
process () {
CURRENT_URL="$(echo "$CURRENT_URL" | replace "$1" "$2")"
echo "Processed: $*"
echo "URL: $CURRENT_URL" >&2
}
version () {
CURRENT_VERSION="$(echo "$CURRENT_URL" | replace "$1" "$2")"
echo "Version: $CURRENT_VERSION" >&2
}
ensure_version () {
echo "Ensuring version. CURRENT_VERSION: $CURRENT_VERSION" >&2
[ -z "$CURRENT_VERSION" ] && version '.*-([0-9.]+)[-._].*' '\1'
}
ensure_target () {
echo "Ensuring target. CURRENT_TARGET: $CURRENT_TARGET" >&2
[ -z "$CURRENT_TARGET" ] && target "$(basename "$CONFIG_NAME" .upstream).nix"
}
ensure_name () {
echo "Ensuring name. CURRENT_NAME: $CURRENT_NAME" >&2
[ -z "$CURRENT_NAME" ] && name "$(basename "$CONFIG_DIR")"
echo "Resulting name: $CURRENT_NAME"
}
ensure_attribute_name () {
echo "Ensuring attribute name. CURRENT_ATTRIBUTE_NAME: $CURRENT_ATTRIBUTE_NAME" >&2
ensure_name
[ -z "$CURRENT_ATTRIBUTE_NAME" ] && attribute_name "$CURRENT_NAME"
echo "Resulting attribute name: $CURRENT_ATTRIBUTE_NAME"
}
ensure_url () {
echo "Ensuring starting URL. CURRENT_URL: $CURRENT_URL" >&2
ensure_attribute_name
[ -z "$CURRENT_URL" ] && CURRENT_URL="$(retrieve_meta downloadPage)"
[ -z "$CURRENT_URL" ] && CURRENT_URL="$(retrieve_meta downloadpage)"
[ -z "$CURRENT_URL" ] && CURRENT_URL="$(retrieve_meta homepage)"
echo "Resulting URL: $CURRENT_URL"
}
ensure_choice () {
echo "Ensuring that choice is made." >&2
echo "NEED_TO_CHOOSE_URL: [$NEED_TO_CHOOSE_URL]." >&2
echo "CURRENT_URL: $CURRENT_URL" >&2
[ -z "$URL_WAS_SET" ] && [ -z "$CURRENT_URL" ] && ensure_url
[ -n "$NEED_TO_CHOOSE_URL" ] && {
version_link "${DL_URL_RE:-[.]tar[.]([^./])+\$}"
unset NEED_TO_CHOOSE_URL
}
[ -z "$CURRENT_URL" ] && {
echo "Error: empty CURRENT_URL"
echo "Error: empty CURRENT_URL" >&2
exit 1
}
}
revision () {
CURRENT_REV="$1"
echo "CURRENT_REV: $CURRENT_REV"
}
prefetch_command () {
PREFETCH_COMMAND="$1"
}
prefetch_command_rel () {
PREFETCH_COMMAND="$(dirname "$0")/$1"
}
ensure_hash () {
echo "Ensuring hash. CURRENT_HASH: $CURRENT_HASH" >&2
[ -z "$CURRENT_HASH" ] && hash
}
hash () {
CURRENT_HASH="$(${PREFETCH_COMMAND:-nix-prefetch-url} "$CURRENT_URL" $CURRENT_REV)"
echo "CURRENT_HASH: $CURRENT_HASH" >&2
}
name () {
CURRENT_NAME="$1"
echo "CURRENT_NAME: $CURRENT_NAME" >&2
}
attribute_name () {
CURRENT_ATTRIBUTE_NAME="$1"
echo "CURRENT_ATTRIBUTE_NAME: $CURRENT_ATTRIBUTE_NAME" >&2
}
retrieve_meta () {
nix-instantiate --eval-only '<nixpkgs>' -A "$CURRENT_ATTRIBUTE_NAME".meta."$1" | xargs
}
retrieve_version () {
PACKAGED_VERSION="$(retrieve_meta version)"
}
ensure_dl_url_re () {
echo "Ensuring DL_URL_RE. DL_URL_RE: $DL_URL_RE" >&2
[ -z "$DL_URL_RE" ] && dl_url_re "$(retrieve_meta downloadURLRegexp)"
echo "DL_URL_RE: $DL_URL_RE" >&2
}
directory_of () {
cd "$(dirname "$1")"; pwd
}
full_path () {
echo "$(directory_of "$1")/$(basename "$1")"
}
target () {
CURRENT_TARGET="$1"
{ [ "$CURRENT_TARGET" = "${CURRENT_TARGET#/}" ] && CURRENT_TARGET="$CONFIG_DIR/$CURRENT_TARGET"; }
echo "Target set to: $CURRENT_TARGET"
}
marker () {
BEGIN_EXPRESSION="$1"
}
update_found () {
echo "Compare: $CURRENT_VERSION vs $PACKAGED_VERSION"
[ "$CURRENT_VERSION" != "$PACKAGED_VERSION" ]
}
do_write_expression () {
echo "${1}rec {"
echo "${1} baseName=\"$CURRENT_NAME\";"
echo "${1} version=\"$CURRENT_VERSION\";"
echo "${1} name=\"\${baseName}-\${version}\";"
echo "${1} hash=\"$CURRENT_HASH\";"
echo "${1} url=\"$CURRENT_URL\";"
[ -n "$CURRENT_REV" ] && echo "${1} rev=\"$CURRENT_REV\";"
echo "${1} sha256=\"$CURRENT_HASH\";"
echo "$2"
}
line_position () {
file="$1"
regexp="$2"
count="${3:-1}"
grep -E "$regexp" -m "$count" -B 999999 "$file" | wc -l
}
replace_once () {
file="$1"
regexp="$2"
replacement="$3"
instance="${4:-1}"
echo "Replacing once:"
echo "file: [[$file]]"
echo "regexp: [[$regexp]]"
echo "replacement: [[$replacement]]"
echo "instance: [[$instance]]"
position="$(line_position "$file" "$regexp" "$instance")"
sed -re "${position}s $regexp $replacement " -i "$file"
}
set_var_value () {
var="${1}"
value="${2}"
instance="${3:-1}"
file="${4:-$CURRENT_TARGET}"
no_quotes="${5:-0}"
quote='"'
let "$no_quotes" && quote=""
replace_once "$file" "${var} *= *.*" "${var} = ${quote}${value}${quote};" "$instance"
}
do_regenerate () {
BEFORE="$(cat "$1" | grep -F "$BEGIN_EXPRESSION" -B 999999;)"
AFTER_EXPANDED="$(cat "$1" | grep -F "$BEGIN_EXPRESSION" -A 999999 | grep -E '^ *[}] *; *$' -A 999999;)"
AFTER="$(echo "$AFTER_EXPANDED" | tail -n +2)"
CLOSE_BRACE="$(echo "$AFTER_EXPANDED" | head -n 1)"
SPACING="$(echo "$CLOSE_BRACE" | sed -re 's/[^ ].*//')"
echo "$BEFORE"
do_write_expression "$SPACING" "$CLOSE_BRACE"
echo "$AFTER"
}
do_overwrite () {
ensure_hash
do_regenerate "$1" > "$1.new.tmp"
mv "$1.new.tmp" "$1"
}
do_overwrite_just_version () {
ensure_hash
set_var_value version $CURRENT_VERSION
set_var_value sha256 $CURRENT_HASH
}
minimize_overwrite() {
do_overwrite(){
do_overwrite_just_version
}
}
process_config () {
CONFIG_DIR="$(directory_of "$1")"
CONFIG_NAME="$(basename "$1")"
BEGIN_EXPRESSION='# Generated upstream information';
if [ -f "$CONFIG_DIR/$CONFIG_NAME" ] &&
[ "${CONFIG_NAME}" = "${CONFIG_NAME%.nix}" ]; then
source "$CONFIG_DIR/$CONFIG_NAME"
else
CONFIG_NAME="${CONFIG_NAME%.nix}"
ensure_attribute_name
[ -n "$(retrieve_meta updateWalker)" ] ||
[ -n "$FORCE_UPDATE_WALKER" ] || {
echo "Error: package not marked as safe for update-walker" >&2
echo "Set FORCE_UPDATE_WALKER=1 to override" >&2
exit 1;
}
[ -z "$(retrieve_meta fullRegenerate)" ] && eval "
minimize_overwrite
"
fi
ensure_attribute_name
retrieve_version
ensure_dl_url_re
ensure_choice
ensure_version
ensure_target
update_found && do_overwrite "$CURRENT_TARGET"
}
source "$own_dir/update-walker-service-specific.sh"
process_config "$1"

View file

@ -1,118 +0,0 @@
update-walker is an imperative semiautomated update helper.
It runs the X.upstream file to find the freshest version of the package in
the specified upstream source and updates the corresponding X.nix file.
The simplest available commands:
url: set the upstream source list URL equal to $1; the default is
meta.downloadPage with meta.homepage fallback
dl_url_re: set the regular expression used to select download links to $1; the
default is meta.downloadURLRegexp or '[.]tar[.]([^./])+\$' if it is not set
target: specify target expression; default is to replace .upstream extension
with .nix extension
name: specify the derivation name; default is the basename of the dirname
of the .upstream file
attribute_name: specify the attribute name to evaluate for getting the current
version from meta.version; default is to use the derivation name
minimize_overwrite: set config options that mean that only version= and
sha256= have to be replaced; the default is to regenerate a full upstream
description block with url, name, version, hash etc.
A lot of packages can be updated in a pseudo-declarative style using only
the commands from the previous paragraph.
Some packages do not need any non-default settings, in these case just setting
meta.updateWalker to true is enough, you can run update-walker directly on the
.nix file afterwards. In this case minimize_overwrite it implied unless
meta.fullRegenerate is set.
The packages that require more fine-grained control than the described options
allow, you need to take into account the default control flow of the tool.
First, the definitions from update-walker script and additional definitions
from update-walker-service-specific.sh are loaded. Then the config is executed
as a shell script. Some of the commands it can use do remember whether they
have been used. Afterwards the following steps happen:
attribute_name is set to name unless it has been already set
meta.version is read from the NixPkgs package called attribute_name
download URL regexp is set to default unless it has been already set in the
updater script
the download page URL gets set to default value unless it has been set
previously
if the action of getting the download page and choosing the freshest link by
version has not yet been taken, it happens
if the version has not yet been extracted from the URL, it gets extracted
target nix expression to update gets set to the default value unless it has
been set explicitly
if the URL version is fresher than the packaged version, the new file gets
downloaded and its hash is calculated
do_overwrite function is called; the default calculates a big upstream data
block and puts it after the '# Generated upstream information' marker (the
marker can be changed by the command marker)
If the update needs some special logic, it is put into the updater script and
the corresponding steps are skipped because the needed action has already been
performed.
For example:
minimize_overwrite is exactly the same as
do_overwrite() { do_overwrite_just_version; }
redefinition. You can do a more complex do_overwrite redifinition, if needed.
It can probably use ensure_hash to download the source and calculate the hash
and set_var_value.
set_var_value alters the $3-th instance of assigning the $1 name in the
expression to the value $2. $3 defaults to 1. It can modify $4 instead of the
current target, it can put the value without quotes if $5 is 1.
Typical steps include:
ensure_choice: download current URL and find the freshest version link on the
page, it is now the new URL
ensure_hash: download current URL and calculate the source package hash
ensure_version: extract version from the URL
SF_redirect: replace the current URL with a SourceForge.net mirror:// URL
SF_version_dir: assume SourceForge.net layout and choose the freshest
version-named subdirectory in the file catalog; you can optionally specify $1
as a directory name regexp (digits and periods will be required after it)
SF_version_tarball: assume SourceForge.net layout and choose the freshest
tarball download link
version: apply replacement of $1 with $2 (extended regexp format) to extract
the version from URL
version_link: choose the freshest versioned link, $1 is the regexp of
acceptable links

View file

@ -18,13 +18,13 @@
stdenv.mkDerivation rec {
pname = "switchboard-plug-network";
version = "2.4.1";
version = "2.4.2";
src = fetchFromGitHub {
owner = "elementary";
repo = pname;
rev = version;
sha256 = "0nqihsbrpjw4nx1c50g854bqybniw38adi78vzg8nyl6ikj2r0z4";
sha256 = "sha256-CdSX4p98HQNC0VF5Ae/ZnDqm000+9KJ6JhQWhSHC4CI=";
};
patches = [

View file

@ -1,7 +1,6 @@
{ lib
, stdenv
, fetchFromGitHub
, fetchpatch
, nix-update-script
, pkg-config
, meson
@ -20,24 +19,15 @@
stdenv.mkDerivation rec {
pname = "elementary-capnet-assist";
version = "2.4.0";
version = "2.4.1";
src = fetchFromGitHub {
owner = "elementary";
repo = "capnet-assist";
rev = version;
sha256 = "sha256-UdkS+w61c8z2TCJyG7YsDb0n0b2LOpFyaHzMbdCJsZI=";
sha256 = "sha256-8hhp37EBzZxEVvPaRw9PohjaPWKQZ/AfqqvwLxQCBKk=";
};
patches = [
# Fix build with meson 0.61
# https://github.com/elementary/capnet-assist/pull/76
(fetchpatch {
url = "https://github.com/elementary/capnet-assist/commit/0e77bf8023ba1b35e3a5badb72c246cabf6552b9.patch";
sha256 = "sha256-B/KEs/TCxR+i3uQSRtWxTi2+cu0n6QLcfKCbMCvSsvs=";
})
];
nativeBuildInputs = [
desktop-file-utils
meson

View file

@ -17,13 +17,13 @@
stdenv.mkDerivation rec {
pname = "xdg-desktop-portal-pantheon";
version = "1.0.1";
version = "1.1.0";
src = fetchFromGitHub {
owner = "elementary";
repo = "portals";
rev = version;
sha256 = "sha256-8gBMjCMEzrFmKHhkXsgcIESC93EOT0ADkRUIJMmerjw=";
sha256 = "sha256-YICNOeNrpO2tJFyULjQEhZQCrrMyQau59EC7c5K9q40=";
};
nativeBuildInputs = [

View file

@ -1,6 +0,0 @@
url https://sourceforge.net/projects/freepascal/files/Source/
SF_version_dir
version_link 'fpcbuild-[0-9.]+[.]tar[.]gz/download$'
SF_redirect
version '.*-([0-9.]+)[.]tar[.]gz' '\1'
do_overwrite () { do_overwrite_just_version; }

View file

@ -9,7 +9,7 @@ in
inherit mkGraal;
graalvm11-ce = mkGraal rec {
version = "21.3.0";
version = "22.0.0.2";
javaVersion = "11";
platforms = [ "x86_64-linux" "aarch64-linux" "x86_64-darwin" ];
};
@ -20,7 +20,7 @@ in
# directory"/tmp/SVM-4194439592488143713"): error=0, Failed to exec spawn
# helper: pid: 19865, exit value: 1"
graalvm17-ce = mkGraal rec {
version = "21.3.0";
version = "22.0.0.2";
javaVersion = "17";
platforms = [ "x86_64-linux" "x86_64-darwin" ];
};

View file

@ -3,55 +3,55 @@
[
{
sha256 = {
"11-linux-aarch64" = "0hsjxp6ly7jsn9k94fddcl7afc5gda66jyppcnfvslishbizqd0i";
"17-linux-aarch64" = "09hzl80m7f5ppmcvryz9aq0yw9scdkp5dqhblrqnkzyhvdjl5ycn";
"11-linux-amd64" = "1ylk5l933z813k0k1xlayiv8fa0f1gmpr66bma51532iy3mch6rs";
"17-linux-amd64" = "1xn3shwkai61vvzsg595k8776a21ds00w2pjlscvfcbs1ag07n0i";
"11-darwin-amd64" = "0qpqnnmqxvxzj3mwz05acpg4n8ffqsz0sji8lbl03fgswpvgfavc";
"17-darwin-amd64" = "1akpsrd9r2igcls0cvhpqw3jrnh59m8z80knx83lmj0cj836a8v0";
"11-linux-aarch64" = "0n1cgd9rn5aw7rzbd45nlzpnqif095zwl3999vddlhpnjlyjdh0w";
"17-linux-aarch64" = "1iw27igiyzzl43yfgid1h6h7hd0xnv0rfdkp4r7r8i51sa3q7my7";
"11-linux-amd64" = "00vipkrhc7d5xxznm07pjrdjahhfg5s5vxg49xz8qxz2nwxhi1mw";
"17-linux-amd64" = "1m4v2s1b2878r6dqpcxvbqpc3s2l8l0xcbna37bbfx6rsc73wx2g";
"11-darwin-amd64" = "08v37avg439hywx2yqx0bnfpazwaja2xhyc0kj1imib6iadib042";
"17-darwin-amd64" = "16lg3qfx7j8w8cxc3abl7c19nj6jhkni99wmff153lyhyk8zjjnm";
}.${javaVersionPlatform} or null;
url = "https://github.com/graalvm/graalvm-ce-builds/releases/download/vm-21.3.0/graalvm-ce-java${javaVersionPlatform}-21.3.0.tar.gz";
url = "https://github.com/graalvm/graalvm-ce-builds/releases/download/vm-22.0.0.2/graalvm-ce-java${javaVersionPlatform}-22.0.0.2.tar.gz";
}
{
sha256 = {
"11-linux-aarch64" = "0qlmg5fwvqsb5ab3irj2hrcd5jc94mibnlz1gvzpnq85rw1zcb6h";
"17-linux-aarch64" = "0jmarhwngs6vpbcgsix0dxhj42qj9vnk3vln8fhdxmydwnns8r1m";
"11-linux-amd64" = "0kvnjr55rizy53vn0ff9w27z1qh9d1vp3s7r1kdl0wyhrbhd8n49";
"17-linux-amd64" = "0h14sml42jda54agjs1prfnyjaxxsc67350fr51n8p20nl28lj6z";
"11-darwin-amd64" = "1mg8c8hh8wmbwsisgarmp35jd0dall1fwdv49mggp74hicbc32h3";
"17-darwin-amd64" = "0qz0xf2ph9gi45vvri7vphxh35m11nk7sa8nkwxl28l8bza0kb40";
"11-linux-aarch64" = "17h0yfw0lxsiblqv1nzpc6i71vh6hbwf1x6lp7kziass1a4ixm2i";
"17-linux-aarch64" = "1nvm04smzbis1jy9znac2a4yf9ajqvvmadcf5ffr521rm784g2br";
"11-linux-amd64" = "07g7fab0zj1h77a30kiswqm0hvr1m5s6crszcbyvha2v3x2a6145";
"17-linux-amd64" = "0c8qi7b63zkjrz3sz01bbmfni7pcz9nq1jv1f34lj9lcsm8gc9cc";
"11-darwin-amd64" = "0xn1frj1f4pzrd5gm6xwq31blgvz8l9249c97q3yh7p6rkk7vhh3";
"17-darwin-amd64" = "1dr80314fxcklmhi19jn3pqrsz3iivbvcxnphdzl978krm1afzq0";
}.${javaVersionPlatform} or null;
url = "https://github.com/graalvm/graalvm-ce-builds/releases/download/vm-21.3.0/native-image-installable-svm-java${javaVersionPlatform}-21.3.0.jar";
url = "https://github.com/graalvm/graalvm-ce-builds/releases/download/vm-22.0.0.2/native-image-installable-svm-java${javaVersionPlatform}-22.0.0.2.jar";
}
{
sha256 = {
"11-linux-aarch64" = "02rvwl1nng8d3qn226rjx5yq2blxs4yz009ab928qanhmb4vhv8b";
"17-linux-aarch64" = "13kaxbgfp9pm6s28i5hfyg957iiwzrxf0ibibkv2yndgj64vj8xg";
"11-linux-amd64" = "0zz62zr7imjaw9a3j5m66xs7c72cqb1i74ab3rnlh0dgs1mdpljg";
"17-linux-amd64" = "1v2iwznlav8dsjj30nlhvsvv7pxmyzkhkp1p7spjjma09d34q4iv";
"11-darwin-amd64" = "1wiv0299b2xrc229alczmjfj1bsn90p0wdm64rr39xnyyhbqrr80";
"17-darwin-amd64" = "095sii8ibjcvvc6wnxk77ax151c4zgj8bpp81q3kyaazgpzvrk5s";
"11-linux-aarch64" = "103d91sz2dmlc5hcbi9v3d3irgb83755hz16vkknfhbbkhm5iyz0";
"17-linux-aarch64" = "0vas98knpvpajmv8bkgcf0fh7n5fy361nd47002cpppg6hrp7k9q";
"11-linux-amd64" = "0h4s1dgx2wn63pabdckl85s70s1kw97vp0c8z7izihdn2fy7w3z9";
"17-linux-amd64" = "1g98ashyvscwyn1k8mamih6qhcbxsk62x6ynd7x81ndy1karlv6q";
"11-darwin-amd64" = "1y8d6c2ri7hrvsv3aqbcp49pmxh9yppcsfnx0jcwm88wlach0p52";
"17-darwin-amd64" = "13a6rchnaczpmxga6g405z55913ayq5gwihzlvyy6shk6gwbcppw";
}.${javaVersionPlatform} or null;
url = "https://github.com/oracle/truffleruby/releases/download/vm-21.3.0/ruby-installable-svm-java${javaVersionPlatform}-21.3.0.jar";
url = "https://github.com/oracle/truffleruby/releases/download/vm-22.0.0.2/ruby-installable-svm-java${javaVersionPlatform}-22.0.0.2.jar";
}
{
sha256 = {
"11-linux-aarch64" = "1ck4c1z98h1zn4i6xhh1hb6w2jab6n17ddykb72xxw4vig9nhlc7";
"17-linux-aarch64" = "0p9gx5iq730br9wvacxs4403anxnjln6mx8v0dl4w4lhikjxsy8x";
"11-linux-amd64" = "0gy8jj9d9msmj0i44sysiwq3j2k2w2g47fhq6y1aq47n3kmwj9kv";
"17-linux-amd64" = "0qk8rgbmnk84isnb33x5bbh17bnnmq9yqasfgy5p953min6pbxj7";
"11-darwin-amd64" = "0agw6k3jn2jh8wyc9h8rvzlgs96qh4nlj0y8nyzsmidvwq2ahl00";
"17-darwin-amd64" = "0l1il0rq48sw6sha9jr0xphjgrm7q0kywy8z94mabm9maqh7l3rn";
"11-linux-aarch64" = "135zkpqm8z5nzcyn5h6vdx3c09f9wb6jgzmp0llcnx8w76p71154";
"17-linux-aarch64" = "0pij3kh70lxrzmbzx8zw97f9nb0rr492l7x3n13wcr859cr8akas";
"11-linux-amd64" = "0ppvsgs216jmm5p8m34lqg2kv0awadh1dlkxb7qzcw2b6x0grzf0";
"17-linux-amd64" = "1gf0jfmqy8lp6w7bimjp0j5abzmi0ndsn4hcjvfv7123lbj52zkz";
"11-darwin-amd64" = "1mq6013crjmrpf3yvxwv9p4yn0rcdzg5z9hq9l6fpf47i8522k6p";
"17-darwin-amd64" = "15l7p48vsca4cvqxbpb9lcmafysmdsxpv6avrpxajz705j3nsvmp";
}.${javaVersionPlatform} or null;
url = "https://github.com/graalvm/graalvm-ce-builds/releases/download/vm-21.3.0/wasm-installable-svm-java${javaVersionPlatform}-21.3.0.jar";
url = "https://github.com/graalvm/graalvm-ce-builds/releases/download/vm-22.0.0.2/wasm-installable-svm-java${javaVersionPlatform}-22.0.0.2.jar";
}
{
sha256 = {
"11-linux-amd64" = "1l5av2v459q88zfl83877h7b3426z3d86kp6wqjvz2441brvidi0";
"17-linux-amd64" = "100p1cgw0z4yfy4axb3gr32m8jnyx1f8bj6f6kk0mf3l8fv2kb7p";
"11-darwin-amd64" = "06694n74dzsfwlli1sjdsrfbj9ngw7bhrcayvy4sgy2va5qpdjs0";
"17-darwin-amd64" = "1qwg45q0760lsa62h0nk2zdv0r1npr82bh6p1z3md6pjppm7i025";
"11-linux-amd64" = "0m8cqqqdks34b2zv7i6qw9kzqxi1rfqsmknqa9wm0b7dqaxx209g";
"17-linux-amd64" = "12nszxp2yv35y8zkm94bnd0mnanah48y41r61ypymd19plaqmdxk";
"11-darwin-amd64" = "00g6akpv0gkw8gcxfbgcyipn6gj25yr32k1lb7iqj08bq4f2zvk7";
"17-darwin-amd64" = "1hd71qg0nmklyakl4cc29vl10fxalbyd2b5yn7x9iv6m0h1pp25g";
}.${javaVersionPlatform} or null;
url = "https://github.com/graalvm/graalpython/releases/download/vm-21.3.0/python-installable-svm-java${javaVersionPlatform}-21.3.0.jar";
url = "https://github.com/graalvm/graalpython/releases/download/vm-22.0.0.2/python-installable-svm-java${javaVersionPlatform}-22.0.0.2.jar";
}
]

View file

@ -119,7 +119,5 @@ stdenv.mkDerivation rec {
}
'');
meta = sbclBootstrap.meta // {
updateWalker = true;
};
meta = sbclBootstrap.meta;
}

View file

@ -10,7 +10,7 @@ with lib; mkCoqDerivation {
inherit version;
defaultVersion = with versions; switch [ coq.version mathcomp.version ] [
{ cases = [ (range "8.10" "8.14") (isGe "1.12.0") ]; out = "1.13"; }
{ cases = [ (range "8.10" "8.15") (isGe "1.12.0") ]; out = "1.13"; }
{ cases = [ (range "8.10" "8.12") "1.11.0" ]; out = "1.11"; }
] null;

View file

@ -9,7 +9,7 @@ with lib; mkCoqDerivation rec {
defaultVersion = with versions;
switch [ coq.coq-version mathcomp-algebra.version ] [
{ cases = [ (range "8.13" "8.14") (isGe "1.12") ]; out = "1.1.0+1.12+8.13"; }
{ cases = [ (range "8.13" "8.15") (isGe "1.12") ]; out = "1.1.0+1.12+8.13"; }
] null;
release."1.0.0+1.12+8.13".sha256 = "1j533vx6lacr89bj1bf15l1a0s7rvrx4l00wyjv99aczkfbz6h6k";

View file

@ -4,10 +4,11 @@ with lib; mkCoqDerivation {
pname = "paramcoq";
inherit version;
defaultVersion = with versions; switch coq.version [
{ case = range "8.10" "8.14"; out = "1.1.3+coq${coq.coq-version}"; }
{ case = range "8.10" "8.15"; out = "1.1.3+coq${coq.coq-version}"; }
{ case = range "8.7" "8.13"; out = "1.1.2+coq${coq.coq-version}"; }
] null;
displayVersion = { paramcoq = "..."; };
release."1.1.3+coq8.15".sha256 = "0sl7ihznwz05d2x2v78w1zd4q55c1sgy06vxasbcls4v2pkw53hl";
release."1.1.3+coq8.14".sha256 = "00zqq9dc2p5v0ib1jgizl25xkwxrs9mrlylvy0zvb96dpridjc71";
release."1.1.3+coq8.13".sha256 = "06ndly736k4pmdn4baqa7fblp6lx7a9pxm9gvz1vzd6ic51825wp";
release."1.1.3+coq8.12".sha256 = "sha256:10j23ws8ymqpxhapni75sxbzz0dl4n9sgasrx618i7s7b705y2rh";

View file

@ -2,8 +2,8 @@
let
release = {
"8.14.0+0.14.0".sha256 = "sha256:1kh80yb791yl771qbqkvwhbhydfii23a7lql0jgifvllm2k8hd8d";
"8.14+rc1+0.14.0".sha256 = "1w7d7anvcfx8vz51mnrf1jkw6rlpzjkjlr06avf58wlhymww7pja";
"8.15.0+0.15.0".sha256 = "1vh99ya2dq6a8xl2jrilgs0rpj4j227qx8zvzd2v5xylx0p4bbrp";
"8.14.0+0.14.0".sha256 = "1kh80yb791yl771qbqkvwhbhydfii23a7lql0jgifvllm2k8hd8d";
"8.13.0+0.13.0".sha256 = "0k69907xn4k61w4mkhwf8kh8drw9pijk9ynijsppihw98j8w38fy";
"8.12.0+0.12.1".sha256 = "048x3sgcq4h845hi6hm4j4dsfca8zfj70dm42w68n63qcm6xf9hn";
"8.11.0+0.11.1".sha256 = "1phmh99yqv71vlwklqgfxiq2vj99zrzxmryj2j4qvg5vav3y3y6c";
@ -17,6 +17,7 @@ in
defaultVersion = with versions;
switch coq.version [
{ case = isEq "8.15"; out = "8.15.0+0.15.0"; }
{ case = isEq "8.14"; out = "8.14.0+0.14.0"; }
{ case = isEq "8.13"; out = "8.13.0+0.13.0"; }
{ case = isEq "8.12"; out = "8.12.0+0.12.1"; }
@ -55,9 +56,6 @@ in
let inherit (o) version; in {
src = fetchzip {
url =
if version == "8.14+rc1+0.14.0"
then "https://github.com/ejgallego/coq-serapi/archive/refs/tags/8.14+rc1+0.14.0.tar.gz"
else
"https://github.com/ejgallego/coq-serapi/releases/download/${version}/coq-serapi-${
if version == "8.11.0+0.11.1" then version
else builtins.replaceStrings [ "+" ] [ "." ] version

View file

@ -1,5 +0,0 @@
url https://sourceforge.net/projects/regina-rexx/files/regina-rexx/
SF_version_dir
SF_version_tarball
SF_redirect
minimize_overwrite

View file

@ -1,10 +0,0 @@
url https://botan.randombit.net/
version_link 'Botan-[0-9]+([.][0-9]+)*[.](tgz|tbz|tbz2|tar[.]bz2)$'
ensure_version
attribute_name botan2
do_overwrite(){
ensure_hash
set_var_value sha256 $CURRENT_HASH
set_var_value baseVersion ${CURRENT_VERSION%.*}
set_var_value revision ${CURRENT_VERSION##*.}
}

View file

@ -1,9 +0,0 @@
url http://botan.randombit.net/download.html
version_link 'Botan-[0-9]+[.][0-9]*[02468]([.][0-9]+)?[.](tgz|tbz|tbz2|tar[.]bz2)$'
ensure_version
do_overwrite(){
ensure_hash
set_var_value sha256 $CURRENT_HASH
set_var_value baseVersion ${CURRENT_VERSION%.*}
set_var_value revision ${CURRENT_VERSION##*.}
}

View file

@ -1,4 +0,0 @@
url http://eigen.tuxfamily.org/
ensure_choice
version '.*/([-0-9.]+)[.]tar[.].*' '\1'
do_overwrite() { do_overwrite_just_version; }

View file

@ -51,6 +51,5 @@ stdenv.mkDerivation rec {
platforms = platforms.unix;
homepage = "https://www.flintlib.org/";
downloadPage = "https://www.flintlib.org/downloads.html";
updateWalker = true;
};
}

View file

@ -24,6 +24,5 @@ stdenv.mkDerivation rec {
maintainers = [lib.maintainers.raskin];
platforms = lib.platforms.unix;
homepage = "https://cs.uwaterloo.ca/~astorjoh/iml.html";
updateWalker = true;
};
}

View file

@ -20,6 +20,5 @@ stdenv.mkDerivation rec {
platforms = lib.platforms.unix;
homepage = "https://wiki.documentfoundation.org/DLP/Libraries/libzmf";
downloadPage = "http://dev-www.libreoffice.org/src/libzmf/";
updateWalker = true;
};
}

View file

@ -22,8 +22,6 @@ stdenv.mkDerivation rec {
description = "A lightweight C library for the Jabber protocol";
platforms = platforms.all;
downloadPage = "http://mcabber.com/files/loudmouth/";
downloadURLRegexp = "loudmouth-[0-9.]+[.]tar[.]bz2$";
updateWalker = true;
license = licenses.lgpl21;
};
}

View file

@ -1,10 +0,0 @@
url https://gitlab.com/mdds/mdds
version_link 'mdds-.*[.]tar[.][a-z0-9]+$'
version '.*-([0-9.]+)[.]tar[.].*' '\1'
do_overwrite(){
ensure_hash
ensure_version
set_var_value version $CURRENT_VERSION
set_var_value sha256 $CURRENT_HASH
}

View file

@ -21,6 +21,5 @@ stdenv.mkDerivation rec {
platforms = lib.platforms.unix;
downloadPage = "https://mpir.org/downloads.html";
homepage = "https://mpir.org/";
updateWalker = true;
};
}

View file

@ -9,11 +9,6 @@
, withPerl ? stdenv.hostPlatform == stdenv.buildPlatform
}:
assert (
lib.assertMsg (!withPerl -> stdenv.hostPlatform != stdenv.buildPlatform)
"withPerl should not be disabled unless cross compiling"
);
# Note: this package is used for bootstrapping fetchurl, and thus
# cannot use fetchpatch! All mutable patches (generated by GitHub or
# cgit) that are needed here should be included directly in Nixpkgs as

View file

@ -1,3 +0,0 @@
url http://ftp.u-tx.net/gnu/osip/
attribute_name libosip
minimize_overwrite

View file

@ -31,6 +31,5 @@ stdenv.mkDerivation rec {
platforms = platforms.all;
homepage = "http://www.phash.org";
downloadPage = "https://github.com/clearscene/pHash";
updateWalker = true;
};
}

View file

@ -8,20 +8,11 @@ stdenv.mkDerivation rec {
sha256 = "1y0gikds2nr8jk8smhrl617njk23ymmpxyjb2j1xbj0k82xspv78";
};
passthru = {
updateScript = ''
#!${runtimeShell}
cd ${toString ./.}
${toString path}/pkgs/build-support/upstream-updater/update-walker.sh default.nix
'';
};
meta = {
meta = with lib; {
description = "File open routines to safely open a file when in the presence of an attack";
license = lib.licenses.asl20 ;
maintainers = [lib.maintainers.raskin];
platforms = lib.platforms.all;
license = licenses.asl20;
maintainers = with maintainers; [ raskin ];
platforms = platforms.all;
homepage = "https://research.cs.wisc.edu/mist/safefile/";
updateWalker = true;
};
}

View file

@ -29,6 +29,5 @@ stdenv.mkDerivation rec {
maintainers = with lib.maintainers; [raskin];
license = lib.licenses.gpl2;
downloadPage = "http://www.soft-switch.org/downloads/spandsp/";
updateWalker = true;
};
}

View file

@ -53,7 +53,7 @@ stdenv.mkDerivation rec {
postInstall = let
pcTemplate = fetchurl {
url = "https://github.com/oneapi-src/oneTBB/raw/master/integration/pkg-config/tbb.pc.in";
url = "https://github.com/oneapi-src/oneTBB/raw/478de5b1887c928e52f029d706af6ea640a877be/integration/pkg-config/tbb.pc.in";
sha256 = "2pCad9txSpNbzac0vp/VY3x7HNySaYkbH3Rx8LK53pI=";
};
in ''

View file

@ -73,7 +73,6 @@ stdenv.mkDerivation rec {
description = "XML Security Library in C based on libxml2";
license = lib.licenses.mit;
platforms = with lib.platforms; linux ++ darwin;
updateWalker = true;
};
}
)

View file

@ -79,8 +79,8 @@ rec {
patchSrc = fetchFromGitHub {
owner = "abathur";
repo = "nix-py-dev-oil";
rev = "v0.8.12.1";
hash = "sha256-7JVnosdcvmVFN3h6SIeeqcJFcyFkai//fFuzi7ThNMY=";
rev = "v0.8.12.2";
hash = "sha256-+dVxzPKMGNKFE+7Ggzx9iWjjvwW2Ow3UqmjjUud9Mqo=";
};
patches = [
"${patchSrc}/0001-add_setup_py.patch"
@ -88,6 +88,7 @@ rec {
"${patchSrc}/0004-disable-internal-py-yajl-for-nix-built.patch"
"${patchSrc}/0006-disable_failing_libc_tests.patch"
"${patchSrc}/0007-namespace_via_init.patch"
"${patchSrc}/0009-avoid_nix_arch64_darwin_toolchain_bug.patch"
];
buildInputs = [ readline cmark py-yajl ];

View file

@ -1,4 +1,5 @@
{ lib
, stdenv
, callPackage
, python27Packages
, installShellFiles
@ -17,7 +18,18 @@ python27Packages.buildPythonApplication {
nativeBuildInputs = [ installShellFiles ];
propagatedBuildInputs = [ oildev python27Packages.configargparse ];
propagatedBuildInputs = [
oildev
/*
Disable configargparse's tests on aarch64-darwin.
Several of py27 scandir's tests fail on aarch64-darwin. Chain:
configargparse -> pytest-check-hook -> pytest -> pathlib2 -> scandir
TODO: drop if https://github.com/NixOS/nixpkgs/issues/156807 resolves?
*/
(python27Packages.configargparse.overridePythonAttrs (old: {
doCheck = stdenv.hostPlatform.system != "aarch64-darwin";
}))
];
patchPhase = ''
for file in resholve; do

View file

@ -8,14 +8,14 @@
buildPythonPackage rec {
version = "20.0.0";
version = "20.1.0";
pname = "azure-mgmt-resource";
disabled = !isPy3k;
src = fetchPypi {
inherit pname version;
extension = "zip";
sha256 = "622dca4484be64f9f5ce335d327dffabf3e71e14e8a3f4a1051dc85a5c3ebbca";
sha256 = "4fbb2158320e0bdd367882642f266a6dfb3b4b8610792b3afbbca39089f212d7";
};
propagatedBuildInputs = [

View file

@ -11,12 +11,12 @@
buildPythonPackage rec {
pname = "azure-mgmt-web";
version = "6.0.0";
version = "6.1.0";
src = fetchPypi {
inherit pname version;
extension = "zip";
sha256 = "a58750df23d5d4cb8eff283a93312e933ee3ef4272324576005c3dc8c22ce944";
sha256 = "c26635089276515b0488fcf014aab50a0446f54800c6e0e5583cc493ac8d738f";
};
propagatedBuildInputs = [

View file

@ -61,7 +61,8 @@ buildPythonPackage rec {
postPatch = ''
sed -i setup.py -e "/pip>=/c\'pip',"
substituteInPlace setup.py \
--replace 'typing==3.6.4' 'typing'
--replace "typing==3.6.4" "typing" \
--replace "attrs>=19.3.0,<21.3.0" "attrs"
'';
disabledTestPaths = [

View file

@ -2,7 +2,8 @@
, aioredis
, async_generator
, buildPythonPackage
, fetchPypi
, fetchFromGitHub
, fetchpatch
, hypothesis
, lupa
, pytest-asyncio
@ -19,11 +20,13 @@ buildPythonPackage rec {
version = "1.7.0";
format = "setuptools";
disabled = pythonOlder "3.5";
disabled = pythonOlder "3.7";
src = fetchPypi {
inherit pname version;
sha256 = "sha256-yb0S5DAzbL0+GJ+uDpHrmZl7k+dtv91u1n+jUtxoTHE=";
src = fetchFromGitHub {
owner = "jamesls";
repo = pname;
rev = version;
hash = "sha256-P6PUg9SY0Qshlvj+iV1xdrzVLJ9JXUV4cGHUynKO3m0=";
};
propagatedBuildInputs = [
@ -42,6 +45,15 @@ buildPythonPackage rec {
pytestCheckHook
];
patches = [
# Support for redis <= 4.1.0, https://github.com/jamesls/fakeredis/pull/324
(fetchpatch {
name = "support-redis-4.1.0.patch";
url = "https://github.com/jamesls/fakeredis/commit/8ef8dc6dacc9baf571d66a25ffbf0fadd7c70f78.patch";
sha256 = "sha256-4DrF/5WEWQWlJZtAi4qobMDyRAAcO/weHIaK9waN00k=";
})
];
disabledTestPaths = [
# AttributeError: 'AsyncGenerator' object has no attribute XXXX
"test/test_aioredis2.py"

View file

@ -8,7 +8,7 @@
buildPythonPackage rec {
pname = "flux-led";
version = "0.28.11";
version = "0.28.17";
format = "setuptools";
disabled = pythonOlder "3.7";
@ -17,7 +17,7 @@ buildPythonPackage rec {
owner = "Danielhiversen";
repo = "flux_led";
rev = version;
sha256 = "sha256-6EBHFqfCCDKMY9T8suPDIOoiA2LugMJh0OJiHoICioU=";
sha256 = "1brh2wmv29vsvnndsjf1nbh2brwrkxicwcyj9brs9kgsrhvlv71s";
};
propagatedBuildInputs = [

View file

@ -1,32 +1,45 @@
{ stdenv, lib, buildPythonPackage, fetchPypi, pythonOlder, pytest, freezegun }:
{ lib
, stdenv
, buildPythonPackage
, fetchPypi
, freezegun
, pytestCheckHook
, pythonOlder
}:
buildPythonPackage rec {
version = "5.0.2";
pname = "ftputil";
version = "5.0.3";
format = "setuptools";
disabled = pythonOlder "3.6";
src = fetchPypi {
inherit pname version;
sha256 = "afa2ba402235e8c6583c1d2630269628344134c9246b961ff14f182047f3e633";
hash = "sha256-m4buZ8GYDOHYmxN1K8SLlJP+4GNJy0doKFlOduCPhIg=";
};
checkInputs = [ pytest freezegun ];
checkInputs = [
freezegun
pytestCheckHook
];
checkPhase = ''
touch Makefile
# Disable tests that require network access or access /home or assume execution before year 2020
py.test test \
-k "not test_public_servers and not test_real_ftp \
and not test_set_parser and not test_repr \
and not test_conditional_upload and not test_conditional_download_with_older_target \
''
# need until https://ftputil.sschwarzer.net/trac/ticket/140#ticket is fixed
+ lib.optionalString stdenv.isDarwin "and not test_error_message_reuse"
+ ''"'';
disabledTests = [
# Tests require network access
"test_public_servers"
"test_real_ftp"
"test_set_parser"
"test_upload"
];
pythonImportsCheck = [
"ftputil"
];
meta = with lib; {
description = "High-level FTP client library (virtual file system and more)";
homepage = "http://ftputil.sschwarzer.net/";
license = licenses.bsd2; # "Modified BSD license, says pypi"
license = licenses.bsd2;
maintainers = with maintainers; [ ];
};
}

Some files were not shown because too many files have changed in this diff Show more