Skip to content

Commit

Permalink
Use unit id for package key (#2239)
Browse files Browse the repository at this point in the history
Currently haskell.nix uses the package name as the key for `hsPkgs`.  This means that only one package with a given name can exist for a given plan.  When the cabal planner makes a plan it often includes more than one version of a given package.  For instance if a package is needed for a `build-tool-depends` it is likely that it may have requirements that do not match the rest of the project.

When there are two versions of the same package in the plan haskell.nix currently chooses the most recent one.  This is often the correct choice for the main plan (though it may not always be), but it can sometimes be the wrong choice for the `build-tool-depends`.

This PR aims to resolve this issue by using the unit ID from the `plan.json` file as the key for `hsPkgs`.  This means that we can much more closely match the plan.

* Use the `plan.json` as much as possible (including dependencies and cabal flag settings).
* Fall back on existing sources for information not in `plan.json`.
* Include mappings from `hsPkgs.${pkg-name}` to unit ID based entries.
* Support overrides of the form `packages.${pkg-name}...` (these are applied to all the versions of the package).
* Per-component `pre-existing` packages based on the `plan.json` dependencies.
  • Loading branch information
hamishmack authored Sep 17, 2024
1 parent a921f73 commit 61fbe40
Show file tree
Hide file tree
Showing 46 changed files with 8,535 additions and 181 deletions.
39 changes: 15 additions & 24 deletions .github/workflows/pipeline.yml
Original file line number Diff line number Diff line change
Expand Up @@ -139,49 +139,40 @@ jobs:

hydra-ifdLevel-0-and-1:
runs-on: [self-hosted, linux]
timeout-minutes: 20
steps:
- uses: actions/checkout@v4
- name: "Check that jobset will evaluate in Hydra at ifdLevel 0 and 1"
run: |
nix-build build.nix -A maintainer-scripts.check-hydra -o check-hydra.sh
./check-hydra.sh --arg ifdLevel 0
./check-hydra.sh --arg ifdLevel 1
sed -i 's/runningHydraEvalTest = false;/runningHydraEvalTest = true;/' flake.nix
sed -i 's/ifdLevel = 3;/ifdLevel = 0;/' flake.nix
./check-hydra.sh
sed -i 's/ifdLevel = 0;/ifdLevel = 1;/' flake.nix
./check-hydra.sh
hydra-ifdLevel-2:
runs-on: [self-hosted, linux]
timeout-minutes: 20
steps:
- uses: actions/checkout@v4
- name: "Check that jobset will evaluate in Hydra at ifdLevel 2"
run: |
nix-build build.nix -A maintainer-scripts.check-hydra -o check-hydra.sh
./check-hydra.sh --arg ifdLevel 2
sed -i 's/runningHydraEvalTest = false;/runningHydraEvalTest = true;/' flake.nix
sed -i 's/ifdLevel = 3;/ifdLevel = 2;/' flake.nix
./check-hydra.sh
hydra-ifdLevel-3-and-ghc-8-10:
hydra-ifdLevel-3:
runs-on: [self-hosted, linux]
timeout-minutes: 30
steps:
- uses: actions/checkout@v4
- name: "Check that jobset will evaluate in Hydra at ifdLevel 3 and ghc 8.10"
- name: "Check that jobset will evaluate in Hydra at ifdLevel 3"
run: |
nix-build build.nix -A maintainer-scripts.check-hydra -o check-hydra.sh
./check-hydra.sh --arg ifdLevel 3 --arg include 'x: __substring 0 6 x == "ghc810"'
hydra-ifdLevel-3-and-ghc-9-2:
runs-on: [self-hosted, linux]
steps:
- uses: actions/checkout@v4
- name: "Check that jobset will evaluate in Hydra at ifdLevel 3 and ghc 9.2"
run: |
nix-build build.nix -A maintainer-scripts.check-hydra -o check-hydra.sh
./check-hydra.sh --arg ifdLevel 3 --arg include 'x: __substring 0 5 x == "ghc92"'
hydra-ifdLevel-3-and-not-ghc-8-10-or-9-2:
runs-on: [self-hosted, linux]
steps:
- uses: actions/checkout@v4
- name: "Check that jobset will evaluate in Hydra at ifdLevel 3 and not (ghc 8.10 or ghc 9.2)"
run: |
nix-build build.nix -A maintainer-scripts.check-hydra -o check-hydra.sh
./check-hydra.sh --arg ifdLevel 3 --arg include 'x: !(__substring 0 6 x == "ghc810" || __substring 0 5 x == "ghc92")'
sed -i 's/runningHydraEvalTest = false;/runningHydraEvalTest = true;/' flake.nix
./check-hydra.sh
closure-size:
runs-on: [self-hosted, linux]
Expand Down
2 changes: 1 addition & 1 deletion builder/comp-builder.nix
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ let self =
# (not just the one we are building).
# Enable for tests in packages that use cabal-doctest.
( haskellLib.isTest componentId &&
lib.any (x: x.identifier.name or "" == "cabal-doctest") package.setup-depends
lib.any (x: x.identifier.name or "" == "cabal-doctest") (package.setup-depends ++ setup.config.depends or [])
)
, allComponent # Used when `configureAllComponents` is set to get a suitable configuration.

Expand Down
2 changes: 1 addition & 1 deletion builder/make-config-files.nix
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
let
# Sort and remove duplicates from nonReinstallablePkgs.
# That way changes to the order of nonReinstallablePkgs does not require rebuilds.
nonReinstallablePkgs' = __attrNames (lib.genAttrs nonReinstallablePkgs (x: x));
nonReinstallablePkgs' = __attrNames (lib.genAttrs (component.pre-existing or [] ++ nonReinstallablePkgs) (x: x));

ghc = if enableDWARF then defaults.ghc.dwarf else defaults.ghc;

Expand Down
10 changes: 6 additions & 4 deletions builder/shell-for.nix
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,8 @@ let
selectedComponents =
lib.filter isSelectedComponent (lib.attrValues transitiveDependenciesComponents);

allHsPkgsComponents = lib.concatMap haskellLib.getAllComponents (builtins.attrValues hsPkgs);
allHsPkgsComponents = lib.concatMap haskellLib.getAllComponents
(lib.filter (x: !(x.isRedirect or false)) (builtins.attrValues hsPkgs));

# Given a list of `depends`, removes those which are selected components
removeSelectedInputs =
Expand Down Expand Up @@ -114,9 +115,10 @@ let
# Set up a "dummy" component to use with ghcForComponent.
component = {
depends = packageInputs;
libs = [];
pkgconfig = [];
frameworks = [];
pre-existing = lib.concatMap (x: (haskellLib.dependToLib x).config.pre-existing or []) packageInputs;
libs = lib.concatMap (x: (haskellLib.dependToLib x).config.libs or []) packageInputs;
pkgconfig = lib.concatMap (x: (haskellLib.dependToLib x).config.pkgconfig or []) packageInputs;
frameworks = lib.concatMap (x: (haskellLib.dependToLib x).config.frameworks or []) packageInputs;
doExactConfig = false;
};
configFiles = makeConfigFiles {
Expand Down
76 changes: 76 additions & 0 deletions changelog.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,82 @@
This file contains a summary of changes to Haskell.nix and `nix-tools`
that will impact users.

## Sep 17, 2024

Cabal projects now use the more granular Unit IDs from plan.json
to identify packages. This allows for different versions of a
package to be used when building `built-tool-depends` and setup
dependencies.

Overrides in the `modules` argument apply to all versions of
the package. However to make this work we needed to make
each `packages.somepackage` an option (instead of using an
`attrsOf` the submodule type).

It is now an error to override a package that is not in the
plan. This can be a problem if different GHC versions, target
platforms, or cabal flag settings cause the package to be
excluded from the plan. Adding `package-keys` can tell
haskell.nix to include the option anyway:

```
modules = [{
# Tell haskell.nix that `somepackage` may exist.
package-keys = ["somepackage"];
# Now the following will not cause an error even
# if `somepackage` is not in the plan
packages.somepackage.flags.someflag = true;
}];
```

There is a helper function you can use to add `package-keys`
for all of the `builtins.attrNames` of `packages`:

```
modules = [(pkgs.haskell-nix.haskellLib.addPackageKeys {
packages.somepackage.flags.someflag = true;
})];
```

Do not use the module's `pkgs` arg to look `addPackageKeys` up
though or it will result an `infinite recursion` error.

Code that uses `options.packages` will also need to be updated.
For instance the following code that uses `options.packages`
to set `--Werror` for local packages:

```
({ lib, ... }: {
options.packages = lib.mkOption {
type = lib.types.attrsOf (lib.types.submodule (
{ config, lib, ... }:
lib.mkIf config.package.isLocal
{
configureFlags = [ "--ghc-option=-Werror"];
}
));
};
})
```

Now needs to do it for each of the entry in `config.package-keys`
instead of using `attrsOf`:

```
({ config, lib, ... }: {
options.packages = lib.genAttrs config.package-keys (_:
lib.mkOption {
type = lib.types.submodule (
{ config, lib, ... }:
lib.mkIf config.package.isLocal
{
configureFlags = [ "--ghc-option=-Werror"];
}
);
});
})
```

## Jun 5, 2024

Haskell.nix now respects the `pre-existing` packages selected
Expand Down
8 changes: 4 additions & 4 deletions flake.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

13 changes: 8 additions & 5 deletions flake.nix
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
"hls-2.6" = { url = "github:haskell/haskell-language-server/2.6.0.0"; flake = false; };
"hls-2.7" = { url = "github:haskell/haskell-language-server/2.7.0.0"; flake = false; };
"hls-2.8" = { url = "github:haskell/haskell-language-server/2.8.0.0"; flake = false; };
"hls-2.9" = { url = "github:haskell/haskell-language-server/2.9.0.0"; flake = false; };
"hls-2.9" = { url = "github:haskell/haskell-language-server/2.9.0.1"; flake = false; };
hydra.url = "hydra";
hackage = {
url = "github:input-output-hk/hackage.nix";
Expand Down Expand Up @@ -91,6 +91,7 @@
callFlake = import flake-compat;

ifdLevel = 3;
runningHydraEvalTest = false;
compiler = "ghc928";
config = import ./config.nix;

Expand All @@ -108,9 +109,10 @@
# systems supported by haskell.nix
systems = [
"x86_64-linux"
] ++ (if runningHydraEvalTest then [] else [
"x86_64-darwin"
"aarch64-darwin"
];
]);

nixpkgsArgs = {
inherit config;
Expand Down Expand Up @@ -282,13 +284,14 @@
traceHydraJobs (lib.recursiveUpdate flake (lib.optionalAttrs (ifdLevel > 2) {
hydraJobs.nix-tools = pkgs.releaseTools.aggregate {
name = "nix-tools";
constituents = [
constituents = (if runningHydraEvalTest then [] else [
"aarch64-darwin.nix-tools.static.zipped.nix-tools-static"
"x86_64-darwin.nix-tools.static.zipped.nix-tools-static"
"x86_64-linux.nix-tools.static.zipped.nix-tools-static"
"x86_64-linux.nix-tools.static.zipped.nix-tools-static-arm64"
"aarch64-darwin.nix-tools.static.zipped.nix-tools-static-no-ifd"
"x86_64-darwin.nix-tools.static.zipped.nix-tools-static-no-ifd"
]) ++ [
"x86_64-linux.nix-tools.static.zipped.nix-tools-static"
"x86_64-linux.nix-tools.static.zipped.nix-tools-static-arm64"
"x86_64-linux.nix-tools.static.zipped.nix-tools-static-no-ifd"
"x86_64-linux.nix-tools.static.zipped.nix-tools-static-arm64-no-ifd"
(writeText "gitrev" (self.rev or "0000000000000000000000000000000000000000"))
Expand Down
22 changes: 13 additions & 9 deletions lib/call-cabal-project-to-nix.nix
Original file line number Diff line number Diff line change
Expand Up @@ -538,11 +538,11 @@ let
'';
};

plan-nix = materialize ({
plan-json = materialize ({
inherit materialized;
sha256 = plan-sha256;
sha256Arg = "plan-sha256";
this = "project.plan-nix" + (if name != null then " for ${name}" else "");
this = "project.plan-json" + (if name != null then " for ${name}" else "");
} // pkgs.lib.optionalAttrs (checkMaterialization != null) {
inherit checkMaterialization;
}) (evalPackages.runCommand (nameAndSuffix "plan-to-nix-pkgs") {
Expand All @@ -564,7 +564,6 @@ let
# These two output will be present if in cabal configure failed.
# They are used to provide passthru.json and passthru.freeze that
# check first for cabal configure failure.
"json" # The `plan.json` file generated by cabal and used for `plan-to-nix` input
"freeze" # The `cabal.project.freeze` file created by `cabal v2-freeze`
];
} ''
Expand Down Expand Up @@ -619,7 +618,7 @@ let
export SSL_CERT_FILE=${cacert}/etc/ssl/certs/ca-bundle.crt
export GIT_SSL_CAINFO=${cacert}/etc/ssl/certs/ca-bundle.crt
CABAL_DIR=${
export CABAL_DIR=${
# This creates `.cabal` directory that is as it would have
# been at the time `cached-index-state`. We may include
# some packages that will be excluded by `index-state-max`
Expand All @@ -630,7 +629,9 @@ let
index-state = cached-index-state;
sha256 = index-sha256-found;
}
} make-install-plan ${
}
make-install-plan ${
# Setting the desired `index-state` here in case it is not
# in the cabal.project file. This will further restrict the
# packages used by the solver (cached-index-state >= index-state-max).
Expand Down Expand Up @@ -676,17 +677,20 @@ let
# proper relative paths.
(cd $out${subDir'} && plan-to-nix --full ${if ignorePackageYaml then "--ignore-package-yaml" else ""} --plan-json $tmp${subDir'}/dist-newstyle/cache/plan.json -o .)
substituteInPlace $tmp${subDir'}/dist-newstyle/cache/plan.json --replace "$out" "."
substituteInPlace $tmp${subDir'}/dist-newstyle/cache/plan.json --replace "$CABAL_DIR" ""
# Replace the /nix/store paths to minimal git repos with indexes (that will work with materialization).
${fixedProject.replaceLocations}
# Make the plan.json file available in case we need to debug plan-to-nix
cp $tmp${subDir'}/dist-newstyle/cache/plan.json $json
# Remove the non nix files ".project" ".cabal" "package.yaml" files
# as they should not be in the output hash (they may change slightly
# without affecting the nix).
find $out \( -type f -or -type l \) ! -name '*.nix' -delete
# Make the plan.json file available in case we need to debug plan-to-nix
cp $tmp${subDir'}/dist-newstyle/cache/plan.json $out
# Make the revised cabal files available (after the delete step avove)
echo "Moving cabal files from $tmp${subDir'}/dist-newstyle/cabal-files to $out${subDir'}/cabal-files"
mv $tmp${subDir'}/dist-newstyle/cabal-files $out${subDir'}/cabal-files
Expand All @@ -698,7 +702,7 @@ let
mv $out${subDir'}/pkgs.nix $out${subDir'}/default.nix
'');
in {
projectNix = plan-nix;
projectNix = plan-json;
inherit index-state-max src;
inherit (fixedProject) sourceRepos extra-hackages;
}
2 changes: 1 addition & 1 deletion lib/cover-project.nix
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ in pkgs.runCommand "project-coverage-report"
fi
# Copy mix, tix, and html information over from each report
for f in $report/share/hpc/vanilla/mix/$identifier*; do
for f in $report/share/hpc/vanilla/mix/*; do
cp -Rn $f $out/share/hpc/vanilla/mix
done
cp -R $report/share/hpc/vanilla/tix/* $out/share/hpc/vanilla/tix/
Expand Down
9 changes: 6 additions & 3 deletions lib/default.nix
Original file line number Diff line number Diff line change
Expand Up @@ -98,12 +98,13 @@ in {
# Was there a reference to the package source in the `cabal.project` or `stack.yaml` file.
# This is used to make the default `packages` list for `shellFor`.
isLocalPackage = p: p.isLocal or false;
selectLocalPackages = lib.filterAttrs (_n: p: p != null && isLocalPackage p);
isRedirectPackage = p: p.isRedirect or false;
selectLocalPackages = lib.filterAttrs (_n: p: p != null && isLocalPackage p && !isRedirectPackage p);

# if it's a project package it has a src attribute set with an origSubDir attribute.
# project packages are a subset of localPackages
isProjectPackage = p: p.isProject or false;
selectProjectPackages = lib.filterAttrs (_n: p: p != null && isLocalPackage p && isProjectPackage p);
selectProjectPackages = lib.filterAttrs (_n: p: p != null && isLocalPackage p && isProjectPackage p && !isRedirectPackage p);

# Format a componentId as it should appear as a target on the
# command line of the setup script.
Expand Down Expand Up @@ -341,7 +342,7 @@ in {
# Converts from a `compoent.depends` value to a library derivation.
# In the case of sublibs the `depends` value should already be the derivation.
dependToLib = d:
# Do simplify this to `d.components.library or d`, as that
# Do not simplify this to `d.components.library or d`, as that
# will not give a good error message if the `.library`
# is missing (happens if the package is unplanned,
# but has overrides).
Expand Down Expand Up @@ -613,4 +614,6 @@ in {
}";

types = import ./types.nix { inherit lib; };

addPackageKeys = x: x // { package-keys = builtins.attrNames x.packages; };
}
Loading

0 comments on commit 61fbe40

Please sign in to comment.